+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.KC84DoWNuA --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools [2 ymakes processing] [8436/8436 modules configured] [1896/4837 modules rendered] [2 ymakes processing] [8436/8436 modules configured] [4712/4837 modules rendered] [2 ymakes processing] [8436/8436 modules configured] [4837/4837 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [8442/8442 modules configured] [4837/4837 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 3.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots | 6.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a | 7.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a | 8.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a | 8.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a | 8.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a | 8.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a | 8.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a | 8.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a | 8.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a | 8.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a | 8.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a | 8.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a | 8.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a | 8.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a | 9.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a | 9.2%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a | 9.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a | 9.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a | 9.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |10.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |11.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |11.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |11.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |12.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |13.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |14.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |14.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |13.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |13.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |14.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |14.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |14.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |14.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/libcore-persqueue-codecs.a |14.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |14.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |14.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |15.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |15.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |15.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |16.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |16.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |16.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |17.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |17.4%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |17.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_context.cpp |17.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_impl.cpp |17.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_optimize.cpp |17.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table_desc.cpp |17.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/yql_yt_op_settings.h_serialized.cpp |17.7%| PREPARE $(VCS) |17.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_hash.cpp |17.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_optimize.cpp |18.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_integration.cpp |17.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_settings.cpp |18.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_key.cpp |18.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_table_meta.cpp |18.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_peephole.cpp |18.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |18.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_columnar_stats.cpp |18.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_mkql_compiler.cpp |18.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_wide_flow.cpp |19.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_optimize.cpp |19.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_reorder.cpp |19.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider.cpp |19.7%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |19.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_filter.cpp |20.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_walk_folders.cpp |20.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt.cpp |21.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_content.cpp |21.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_merge.cpp |21.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table.cpp |21.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_constraints.cpp |21.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_key_range.cpp |22.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_field_subset.cpp |22.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_map.cpp |23.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_utils.cpp |23.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_exec.cpp |23.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_helper.cpp |23.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_misc.cpp |23.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_join.cpp |23.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_type_ann.cpp |23.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_fuse.cpp |24.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_finalize.cpp |24.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_cbo_helpers.cpp |24.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_ytql.cpp |24.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_input.cpp |24.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_logical_optimize.cpp |24.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_weak_fields.cpp |24.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_output.cpp |25.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_finalizing.cpp |25.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_impl.cpp |24.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_constraints.cpp |25.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_push.cpp |25.1%| PREPARE $(YMAKE_PYTHON3-4256832079) |25.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_trackable.cpp |25.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_forwarding_gateway.cpp |25.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_gateway.cpp |25.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |25.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink.cpp |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |25.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_lambda.cpp |25.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_write.cpp |25.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_intent_determination.cpp |26.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_partition.cpp |26.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_sort.cpp |26.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_type_ann.cpp |26.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |26.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_exec.cpp |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |27.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_epoch.cpp |26.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_optimize.cpp |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |27.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_integration.cpp |27.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource.cpp |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |27.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery.cpp |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |27.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |28.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_hybrid.cpp |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |28.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |28.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_horizontal_join.cpp |28.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_helpers.cpp |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |29.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |30.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |30.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/k8s_api/libpy3tools-cfg-k8s_api.global.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |30.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |31.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.global.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |31.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |32.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |32.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/tz/libpublic-udf-tz.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/liboptimizer-sbuckets-constructor.global.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |33.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |33.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |33.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3_ansi/libv1-lexer-antlr3_ansi.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/liboptimizer-sbuckets-common.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3_ansi/libv1-proto_parser-antlr3_ansi.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3/libv1-proto_parser-antlr3.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/libyql-dq-comp_nodes.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |39.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/liboptimizer-sbuckets-index.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/liboptimizer-sbuckets-optimizer.global.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |40.4%| PREPARE $(LLD_ROOT-3808007503) |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |45.2%| PREPARE $(CLANG_FORMAT-2212207123) |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |45.7%| PREPARE $(PYTHON) |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |47.6%| [CP] {default-linux-x86_64, release, asan} $(B)/common_test.context |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/system/libsystem_allocator.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lua/liblibrary-cpp-lua.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |48.2%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |47.9%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/login/libydb-library-login.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |48.2%| PREPARE $(FLAKE8_PY3-715603131) |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_static/libclang_rt.asan_static-x86_64.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_cxx/libclang_rt.asan_cxx-x86_64.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan/libclang_rt.asan-x86_64.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |48.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/login/login.cpp |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |48.5%| PREPARE $(FLAKE8_PY2-2255386470) |48.6%| PREPARE $(TEST_TOOL_HOST-sbr:8444524403) |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3/libv1-lexer-antlr3.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lua/libcontrib-libs-lua.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yajl/libcontrib-libs-yajl.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/ucontext_impl/libboost-context-ucontext_impl.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/http-parser/libcontrib-restricted-http-parser.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/config/liblibrary-cpp-config.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |48.9%| {BAZEL_DOWNLOAD} $(B)/library/cpp/sanitizer/plugin/sanitizer.py.pyplugin |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |46.4%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |44.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |44.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_3d6916930a438b51675ef6dda7.o |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |44.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_a5c82b9ecb3bf738ea9e628123.o |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |39.5%| PREPARE $(WITH_JDK17-sbr:7832760150) |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |39.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_93dc3386250916dfae1ecb9b13.o |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/column_converters/libyt-library-column_converters.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/counters/liboptimizer-sbuckets-counters.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/libsbuckets-logic-one_head.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/libsbuckets-logic-slices.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |38.8%| PREPARE $(CLANG-1922233694) |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |38.2%| PREPARE $(JDK_DEFAULT-472926544) |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |38.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |37.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |37.6%| PREPARE $(JDK17-472926544) |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/service/libtx-limiter-service.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |38.1%| PREPARE $(GDB) |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/formats/libyt-library-formats.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |38.5%| PREPARE $(WITH_JDK-sbr:7832760150) |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |38.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BlockInfo.cpp |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowOutputFormat.cpp |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowOutputFormat.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDatabaseOrNone.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWindowDefinition.cpp |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOptimizeQuery.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileDescriptor.cpp |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationString.cpp |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationMap.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimal.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationCustomSimpleText.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/checkStackSize.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/OpenSSLHelpers.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TimerDescriptor.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMemoryTracker.cpp |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_query_executor.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/errnoToString.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnString.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ExecutionSpeedLimits.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnAggregateFunction.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ColumnGathererStream.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__data_erasure_manager.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/IAggregateFunction.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cansel_build_index.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUT.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUTImpl.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getFQDNOrHostName.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/mremap.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/preciseExp10.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getResource.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/StringRef.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/JSON.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/demangle.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getPageSize.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionFactory.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnConst.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFunction.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionCombinatorFactory.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnLowCardinality.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/shift10.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnsCommon.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/sleep.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Allocator.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentThread.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMetrics.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/BlockStreamProfileInfo.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadStatus.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PipeFDs.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/FilterDescription.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnCompressed.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnDecimal.cpp |37.1%| PREPARE $(CLANG-2518231432) |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorDump.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFixedString.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorToString.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IPv6ToBinary.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnVector.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/MemoryTracker.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnTuple.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnMap.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/IColumn.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/AlignedBuffer.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnArray.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Config/AbstractConfigurationComparison.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/MaskOperations.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ClickHouseRevision.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Throttler.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ZooKeeper/IKeeper.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatReadable.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProcfsMetricsProvider.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadProfileEvents.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Epoll.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/DNSResolver.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hex.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ErrorCodes.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/parseAddress.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PODArray.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorWriteBinary.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IntervalKind.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TaskStatsInfoGetter.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadPool.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProfileEvents.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/RemoteHostFilter.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedWriteBuffer.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferFromFile.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Exception.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferBase.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionFactory.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hasLinuxCapability.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatIPv6.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBuffer.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/escapeForFileName.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getMultipleKeysFromConfig.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/createHardLink.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsFields.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getNumberOfPhysicalCPUCores.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/setThreadName.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/randomSeed.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/thread_local_rng.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecNone.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/materializeBlock.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/isLocalAddress.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/quoteString.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/IBlockInputStream.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BaseSettings.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/LZ4_decompress_faster.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomIPv4AndIPv6.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecLZ4.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/ICompressionCodec.cpp |37.3%| [UN] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecMultiple.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/CompressionMethod.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTRolesOrUsersSet.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinalityHelpers.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsEnums.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/NamesAndTypes.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFactory.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFixedString.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Field.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNullable.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomGeo.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/EnumValues.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeArray.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockOutputStream.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeEnum.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/SizeLimits.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate32.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/IDataType.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockInputStream.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDecimalBase.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeAggregateFunction.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime64.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomSimpleAggregateFunction.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationArray.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeInterval.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime64.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFunction.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNumber.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNullable.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/JSONEachRowUtils.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesNumber.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNothing.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/verbosePrintString.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNested.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeMap.cpp |37.6%| PREPARE $(CLANG18-3363451693) |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNumberBase.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeString.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeTuple.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeUUID.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesDecimal.cpp |37.6%| [BI] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/buildinfo_data.h |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinality.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Settings.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFileDescriptor.cpp |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate32.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/getLeastSupertype.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationEnum.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/NestedUtils.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/DoubleConverter.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/ISerialization.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFile.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationAggregateFunction.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimalBase.cpp |37.7%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svnversion.cpp |37.7%| RESOURCE $(sbr:4966407557) |37.7%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svn_interface.c |37.7%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |37.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |37.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |37.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/FormatFactory.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNothing.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTupleElement.cpp |37.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTuple.cpp |37.7%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationUUID.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationWrapper.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/OpenedFile.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFile.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionsConversion.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionHelpers.cpp |37.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so |37.8%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/build_info/build_info_static.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufWriter.cpp |37.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromMemory.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufReader.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/NativeFormat.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/registerFormats.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/IFunction.cpp |37.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/UseSSL.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionFactory.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/toFixedString.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/extractTimeZoneFromFunctionArguments.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptorDiscardOnFailure.cpp |37.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |37.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/copyData.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ThreadPoolReader.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectQuery.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileBase.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFile.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFile.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAsterisk.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/Progress.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/TablesStatus.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileWithCache.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/PeekableReadBuffer.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileDescriptor.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryLog.cpp |37.9%| PREPARE $(CLANG16-1380963495) |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/SynchronousReader.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadSettings.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromPocoSocket.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFileDescriptor.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/TimeoutSetter.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteHelpers.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileBase.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryThreadLog.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsTransformers.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionary.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadHelpers.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptor.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIdentifier.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ProfileEventsExt.cpp |38.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |38.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFile.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTKillQueryQuery.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromPocoSocket.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferValidUTF8.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ClientInfo.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTLiteral.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionSelectQuery.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryParameter.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/parseDateTimeBestEffort.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSampleRatio.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/createReadBufferFromFileBase.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowGrantsQuery.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/readFloatText.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsMatcher.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunction.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTExpressionList.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSettingsProfileElement.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTBackupQuery.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTUserNameWithHost.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnDeclaration.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSystemQuery.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/InternalTextLogsQueue.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAlterQuery.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDatabaseOrNone.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTConstraintDeclaration.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionDeclaration.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDropQuery.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectWithUnionQuery.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionaryAttributeDeclaration.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserAlterQuery.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTInsertQuery.cpp |38.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunctionWithKeyValueArguments.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTCreateQuery.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIndexDeclaration.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDataType.cpp |38.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTPartition.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDropQuery.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOrderByElement.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTNameTypePair.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionElementParsers.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserKillQueryQuery.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithTableAndOutput.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOutput.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQualifiedAsterisk.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOnCluster.cpp |38.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |38.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |38.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectQuery.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRolesOrUsersSet.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetRoleQuery.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRenameQuery.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/Lexer.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTablesInSelectQuery.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowTablesQuery.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablePropertiesQuery.cpp |38.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetRoleQuery.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp |38.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetQuery.cpp |38.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTTLElement.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSubquery.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/InsertQuerySettingsPushDownVisitor.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/CommonParsers.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithAlias.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/TokenIterator.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithElement.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IParserBase.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/castColumn.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IAST.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionListParsers.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIdentifierOrStringLiteral.cpp |38.3%| PREPARE $(FLAKE8_LINTER-sbr:6561765464) |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Chunk.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserProjectionSelectQuery.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionary.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCheckQuery.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserBackupQuery.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/CastOverloadResolver.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ConcatProcessor.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCase.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/AvroRowInputFormat.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDescribeTableQuery.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCreateQuery.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionaryAttributeDeclaration.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserPartition.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExplainQuery.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExternalDDLQuery.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowInputFormat.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserOptimizeQuery.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CHColumnToArrowColumn.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ORCBlockInputFormat.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserInsertQuery.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserQuery.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetQuery.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowInputFormat.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowInputFormat.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowOutputFormat.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/QueryWithOutputSettingsPushDownVisitor.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowPrivilegesQuery.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSampleRatio.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISink.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowGrantsQuery.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectWithUnionQuery.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowTablesQuery.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWithElement.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSettingsProfileElement.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationIP.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUnionQueryElement.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUseQuery.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablesInSelectQuery.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWatchQuery.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseDatabaseAndTableName.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseUserName.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBufferedStreams.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUserNameWithHost.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Block.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatAST.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatSettingName.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIntervalKind.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/queryToString.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Executors/PollingQueue.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBlockInputFormat.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseQuery.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IOutputFormat.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IInputFormat.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/RawBLOBRowInputFormat.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowInputFormat.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Port.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONAsStringRowInputFormat.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowOutputFormat.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowInputFormat.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockOutputFormat.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IAccumulatingTransform.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/OutputStreamToOutputFormat.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowOutputFormat.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IProcessor.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/LimitTransform.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/RowInputFormatWithDiagnosticInfo.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSystemQuery.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationFixedString.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISimpleTransform.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISource.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_data_erasure_manager.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationLowCardinality.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnNullable.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/ColumnWithTypeAndName.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ResizeProcessor.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/registerDataTypeDateTime.cpp |38.7%| PREPARE $(BLACK_LINTER-sbr:8415400280) |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/clickhouse_client_udf.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |38.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__root_data_erasure_manager.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/usage/libtx-limiter-usage.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |38.9%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/flake8_linter/flake8_linter |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |38.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/config/libcore-persqueue-config.a |38.9%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/black_linter/black_linter |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a >> test.py::py2_flake8 [GOOD] |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |39.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_stats/libclient-impl-ydb_stats.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a >> conftest.py::black [GOOD] >> conftest.py::black [GOOD] >> test_join.py::black [GOOD] |39.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so >> test_clickhouse.py::black [GOOD] >> test_greenplum.py::black [GOOD] |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a >> test_join.py::black [GOOD] >> test_mysql.py::black [GOOD] >> test_workload.py::flake8 [GOOD] >> test_postgresql.py::black [GOOD] >> test_ydb.py::black [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> scenario.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_case.py::flake8 [GOOD] |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a >> test.py::py2_flake8 [GOOD] |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |39.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a >> test.py::py2_flake8 [GOOD] >> test_kqprun_recipe.py::flake8 [GOOD] |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a >> test.py::py2_flake8 [GOOD] |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a >> test.py::py2_flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_compatibility.py::flake8 [GOOD] >> test_followers.py::flake8 [GOOD] >> test_stress.py::flake8 [GOOD] |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/purecalc/libcore-persqueue-purecalc.a >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_sql_streaming.py::flake8 [GOOD] >> test_actorsystem.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> tablet_scheme_tests.py::flake8 [GOOD] >> test_cms_erasure.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a >> test.py::py2_flake8 [GOOD] >> test_pdisk_format_info.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] >> test_tablet_channel_migration.py::flake8 [GOOD] >> test_disk.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] >> test_mixed.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |39.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/black >> test_ydb.py::black [GOOD] |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |39.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/flake8 >> test_workload.py::flake8 [GOOD] |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |39.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/black >> test_join.py::black [GOOD] |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a >> test_restarts.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> docker_wrapper_test.py::flake8 [GOOD] |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |39.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/join/flake8 >> test_case.py::flake8 [GOOD] |39.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 >> test.py::py2_flake8 [GOOD] |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a >> test.py::flake8 [GOOD] >> test_bulkupserts_tpch.py::flake8 [GOOD] >> test_insert_delete_duplicate_records.py::flake8 [GOOD] >> test_insertinto_selectfrom.py::flake8 [GOOD] >> test_tiering.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |39.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a >> conftest.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test_alter_ops.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] |39.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 >> test.py::py2_flake8 [GOOD] |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a >> test.py::flake8 [GOOD] >> test_quoting.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |39.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 >> test.py::py2_flake8 [GOOD] |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |39.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |39.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a >> test.py::flake8 [GOOD] >> test_log_scenario.py::flake8 [GOOD] >> zip_bomb.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> parser.py::flake8 [GOOD] |39.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |39.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |39.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |39.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |39.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |39.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_sql.py::flake8 [GOOD] |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |39.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |39.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/kv/tests/flake8 >> test_workload.py::flake8 [GOOD] |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |39.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |39.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/compatibility/flake8 >> test_stress.py::flake8 [GOOD] |39.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |39.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |39.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/flake8 >> test_sql_streaming.py::flake8 [GOOD] |39.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |40.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] |39.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |40.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 >> test.py::flake8 [GOOD] |40.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/simple_queue/tests/flake8 >> test_workload.py::flake8 [GOOD] |40.0%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 >> test.py::py2_flake8 [GOOD] |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |40.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |40.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |40.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/flake8 >> test_tablet_channel_migration.py::flake8 [GOOD] |40.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/flake8 >> test_tpch.py::flake8 [GOOD] |40.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] |40.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/mixedpy/flake8 >> test_mixed.py::flake8 [GOOD] |40.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 >> test.py::flake8 [GOOD] |40.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |40.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/large/flake8 >> test_workload_manager.py::flake8 [GOOD] |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/common/libcolumnshard-counters-common.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |40.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/flake8 >> test_join.py::flake8 [GOOD] |40.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 >> test.py::flake8 [GOOD] |40.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/load/flake8 >> test_tpch.py::flake8 [GOOD] |40.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 >> test.py::flake8 [GOOD] |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a >> test_common.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |40.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |40.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |40.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 >> test.py::flake8 [GOOD] >> compare.py::flake8 [GOOD] |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |40.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 >> test.py::flake8 [GOOD] |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |40.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_account_actions.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queue_tags.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |40.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/flake8 >> zip_bomb.py::flake8 [GOOD] |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |40.2%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |40.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/docs/generator/flake8 >> parser.py::flake8 [GOOD] |40.2%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::flake8 [GOOD] |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |40.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a >> test.py::py2_flake8 [GOOD] |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |39.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a >> test.py::flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a >> test.py::py2_flake8 [GOOD] |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a >> test.py::py2_flake8 [GOOD] |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a >> __main__.py::flake8 [GOOD] |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/histogram/libessentials-core-histogram.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a >> test.py::py2_flake8 [GOOD] |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |40.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/benchmarks/runner/result_compare/flake8 >> compare.py::flake8 [GOOD] |40.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a >> test_workload.py::flake8 [GOOD] >> alter_compression.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> test_commit.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/fbs/libclient-arrow-fbs.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |40.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 >> test.py::py2_flake8 [GOOD] |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/abstract/libsbuckets-logic-abstract.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a >> test.py::flake8 [GOOD] >> hive_matchers.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] |40.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |40.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |40.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test_schemeshard_limits.py::flake8 [GOOD] >> test_example.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_config_with_metadata.py::flake8 [GOOD] >> test_generate_dynamic_config.py::flake8 [GOOD] |40.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/formats/libyt-client-formats.a |40.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |40.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 >> test.py::flake8 [GOOD] |40.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] |40.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 >> test.py::flake8 [GOOD] |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a >> test_alloc_default.py::flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] >> test_transform.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_quota_exhaustion.py::flake8 [GOOD] >> test_insert_restarts.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_alter_compression.py::flake8 [GOOD] >> test_alter_tiering.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_read_update_write_load.py::flake8 [GOOD] >> test_scheme_load.py::flake8 [GOOD] >> test_simple.py::flake8 [GOOD] |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |40.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 >> test.py::py2_flake8 [GOOD] |40.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |40.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 >> test.py::flake8 [GOOD] |40.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |40.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 >> test.py::py2_flake8 [GOOD] |40.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] >> allure_utils.py::flake8 [GOOD] >> results_processor.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_cli.py::flake8 [GOOD] >> ydb_cluster.py::flake8 [GOOD] |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a >> helpers.py::flake8 [GOOD] >> runner.py::flake8 [GOOD] |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a >> test_base.py::flake8 [GOOD] >> test_query.py::flake8 [GOOD] >> test_s3.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |40.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a >> conftest.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/flake8 >> base.py::flake8 [GOOD] >> test_ttl.py::flake8 [GOOD] |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/skiff_ext/libyt-library-skiff_ext.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a >> overlapping_portions.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 >> test.py::flake8 [GOOD] |40.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |40.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_factory.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_append.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_addmember.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_exists.cpp |41.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 >> test.py::flake8 [GOOD] |41.0%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/libyt-client-arrow.a |41.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] |41.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/oltp_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] >> run_tests.py::flake8 [GOOD] |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |41.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_collect.cpp |41.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/example/flake8 >> test_example.py::flake8 [GOOD] |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |41.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_if.cpp |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |41.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_safe_circular_buffer.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_zip.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_condense.cpp |41.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 >> test.py::flake8 [GOOD] |41.0%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |41.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reverse.cpp |41.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/config/flake8 >> test_generate_dynamic_config.py::flake8 [GOOD] |41.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_replicate.cpp |41.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |41.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_rh_hash.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_round.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_removemember.cpp |41.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] |41.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/flake8 >> test_simple.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] >> test_fifo_messaging.py::flake8 [GOOD] |41.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/data_quotas/flake8 >> test_quota_exhaustion.py::flake8 [GOOD] |41.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] |41.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reduce.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_group.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join.cpp |41.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/yaml_config/ut_transform/flake8 >> test_transform.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] |41.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_container.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_skiptake.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_func.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_scalar_apply.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_aggrcount.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_count.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_apply.cpp |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/lib/flake8 >> ydb_cluster.py::flake8 [GOOD] >> test_cp_ic.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> test_retry_high_rate.py::flake8 [GOOD] |41.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_just.cpp >> test_crud.py::flake8 [GOOD] |41.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_getelem.cpp >> test_discovery.py::flake8 [GOOD] |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/library/benchmarks/runner/runner/flake8 >> runner.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_inserts.py::flake8 [GOOD] >> test_kv.py::flake8 [GOOD] |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/lib/flake8 >> test_s3.py::flake8 [GOOD] |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 >> test.py::flake8 [GOOD] |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/flake8 >> test_auditlog.py::flake8 [GOOD] |41.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_logical.cpp |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 >> test.py::flake8 [GOOD] |41.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |41.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join_imp.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_top.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_blocks.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_compress.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_decimal.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_check_args.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_some.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_coalesce.cpp >> conftest.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_big_state.py::flake8 [GOOD] >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] >> test_kill_pq_bill.py::flake8 [GOOD] >> test_mem_alloc.py::flake8 [GOOD] >> test_metrics_cleanup.py::flake8 [GOOD] >> test_pq_read_write.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] >> test_restart_query.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |41.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mul.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_sum.cpp |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_callable.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_if.cpp >> test.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> kikimr_config.py::flake8 [GOOD] >> test_tpch_import.py::flake8 [GOOD] |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain_map.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_coalesce.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_map_join.cpp |41.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 >> test.py::flake8 [GOOD] |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain1_map.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chopper.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense1.cpp |41.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/oom/flake8 >> overlapping_portions.py::flake8 [GOOD] |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mod.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_contains.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_discard.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_minmax.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_frombytes.cpp >> test_liveness_wardens.py::flake8 [GOOD] |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_combine.cpp |41.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/benchmarks/runner/run_tests/flake8 >> run_tests.py::flake8 [GOOD] |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_div.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dictitems.cpp >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_extend.cpp |41.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_element.cpp |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dynamic_variant.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_filter.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_factory.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_exists.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flatmap.cpp >> test_multinode_cluster.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_clickhouse.py::flake8 [GOOD] >> test_greenplum.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test_mysql.py::flake8 [GOOD] >> test_postgresql.py::flake8 [GOOD] >> test_ydb.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_enumerate.cpp |41.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ensure.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromyson.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromstring.cpp |41.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flow.cpp |41.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/flake8 >> test_kv.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> data_correctness.py::flake8 [GOOD] >> data_migration_when_alter_ttl.py::flake8 [GOOD] >> ttl_delete_s3.py::flake8 [GOOD] >> ttl_unavailable_s3.py::flake8 [GOOD] >> unstable_connection.py::flake8 [GOOD] |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold1.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_mapnext.cpp |41.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterable.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_logical.cpp >> test_query_cache.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch_spilling.py::flake8 [GOOD] |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_heap.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hasitems.cpp |41.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 >> test.py::flake8 [GOOD] |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ifpresent.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_guess.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hopping.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_invoke.cpp |41.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterator.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lazy_list.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join_dict.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_length.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map_join.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_listfromrange.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lookup.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map.cpp >> test.py::py2_flake8 [GOOD] |41.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 >> test.py::py2_flake8 [GOOD] |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_random.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_now.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_list.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_rows_formatter.cpp |41.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/library/ut/flake8 >> kikimr_config.py::flake8 [GOOD] |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_next_value.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_measure_arg.cpp |41.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/flake8 >> test_tpch_import.py::flake8 [GOOD] |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multimap.cpp >> column_table_helper.py::flake8 [GOOD] >> range_allocator.py::flake8 [GOOD] >> s3_client.py::flake8 [GOOD] >> thread_helper.py::flake8 [GOOD] >> time_histogram.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_client.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |41.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_nop.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multihopping.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_prepend.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_null.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_queue.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_pickle.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_withcontext.cpp |41.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tostring.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_range.cpp |41.6%| [SB] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_switch.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_seq.cpp |41.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> test.py::flake8 [GOOD] |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_size.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_sort.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_to_list.cpp >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_ctas.py::flake8 [GOOD] >> test_yt_reading.py::flake8 [GOOD] |41.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_skip.cpp |41.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/flake8 >> test_ydb.py::flake8 [GOOD] |41.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] |41.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_state.cpp |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |41.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_source.cpp |41.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] |41.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tooptional.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_timezone.cpp |41.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |41.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_time_order_recover.cpp |41.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |41.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_take.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tobytes.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_toindexdict.cpp |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |41.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_unwrap.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chain_map.cpp >> test_leader_start_inflight.py::flake8 [GOOD] >> gen-report.py::flake8 [GOOD] >> test_encryption.py::flake8 [GOOD] |41.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_visitall.cpp |41.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 >> test.py::flake8 [GOOD] |41.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/solomon/py2_flake8 >> test.py::py2_flake8 [GOOD] |41.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_varitem.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_udf.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_top_sort.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_weakmember.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_way.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_while.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join.cpp >> test_base.py::flake8 [GOOD] >> test_http_api.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> tpc_tests.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime_with_service_name.py::flake8 [GOOD] >> select_positive_with_service_name.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> select_positive_with_schema.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |41.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chopper.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_map.cpp |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |41.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/flake8 >> unstable_connection.py::flake8 [GOOD] |41.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> test.py::flake8 [GOOD] |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |41.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_filter.cpp |41.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 >> test.py::flake8 [GOOD] |41.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] |41.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/flake8 >> test.py::flake8 [GOOD] |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |41.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_combine.cpp |41.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/large/flake8 >> test_tpch_spilling.py::flake8 [GOOD] |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |41.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/solomon/flake8 >> test.py::flake8 [GOOD] |41.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/example/ydb-tests-example |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |41.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_todict.cpp |41.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |41.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |41.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |41.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |41.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |41.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |41.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a |41.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |41.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |41.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |41.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |41.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |41.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |42.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |41.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |41.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg.cpp |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |41.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |42.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/common/flake8 >> ydb_client.py::flake8 [GOOD] |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 >> test.py::py2_flake8 [GOOD] |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |41.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |41.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |41.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |41.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |41.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |41.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |42.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |41.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |42.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |42.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |42.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 >> test.py::flake8 [GOOD] |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |42.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::flake8 [GOOD] |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_import/flake8 >> test_yt_reading.py::flake8 [GOOD] |42.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |42.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |42.3%| [UN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a >> conftest.py::flake8 [GOOD] >> test_auth_system_views.py::flake8 [GOOD] >> test_create_users.py::flake8 [GOOD] >> test_create_users_strict_acl_checks.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] >> test_user_administration.py::flake8 [GOOD] >> test_users_groups_with_acl.py::flake8 [GOOD] |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |42.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 >> test.py::flake8 [GOOD] |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> test.py::flake8 [GOOD] |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 >> test.py::flake8 [GOOD] |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |42.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/benchmarks/runner/result_convert/flake8 >> gen-report.py::flake8 [GOOD] |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |42.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |42.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/benchmarks/runner/flake8 >> tpc_tests.py::flake8 [GOOD] |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libpy3providers-s3-proto.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |42.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/objcopy_c693478edc1220e7a9143567d1.o |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |42.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/objcopy_342e8590e41686b18307d054a9.o |42.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_a0bee0ed11edab150a8172af5c.o |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |42.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_643fa2679e88d9b2d33558b050.o |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blob_depot/closed_interval_set_ut.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blob_depot/given_id_range_ut.cpp |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/fixtures/libpy3tests-library-fixtures.global.a |42.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_1c0f807c059fe226699115f242.o |42.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_1515671fe2dfb16894dfbe901e.o |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |42.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tenants/flake8 >> test_users_groups_with_acl.py::flake8 [GOOD] |42.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_2b682e146a665bfa19210b0fd9.o |42.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_c623700776b43ee95ec93c56f9.o |42.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_5525925030ba2866c1b1040841.o |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/config/validation/auth_config_validator_ut/auth_config_validator_ut.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/security/simple/libmvp-security-simple.a |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/meta_cache_ut.cpp |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/access/libclient-yc_private-access.a |42.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_0665be2c60952715f39eb25568.o |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/quota/libclient-yc_private-quota.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/protos/libmvp-core-protos.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor_ut.cpp |42.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_488333b1ebd4c1d6d8ec5bcb8f.o |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/libclient-nc_private-iam.a |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/provider/yql_s3_listing_strategy_ut.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/openid_connect.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_prefix_kmeans.cpp |42.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_4ea639aebd19c36ee3cdb4479d.o |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/yaml_config_parser_ut.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/yaml_config_proto2yaml_ut.cpp |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_sample_k.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/console_dumper_ut.cpp |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |42.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_2efdf95387a81f55cf9c81071a.o |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |42.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_242486256e1af973cd1d5376d1.o |42.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_1555e67a3dd43a3e7f09bf8eee.o |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/ut/xml_builder_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/yaml_config_ut.cpp |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_proxy_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_build_index.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_data_cleanup.cpp |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/testing/group_overseer/libblobstorage-testing-group_overseer.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/libydb-mvp-core.a |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |42.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-pytest/libpy3contrib-python-allure-pytest.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask-Cors/py3/libpy3python-Flask-Cors-py3.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libpy3contrib-libs-googleapis-common-protos.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-python-commons/libpy3contrib-python-allure-python-commons.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/helpers/libpy3olap-scenario-helpers.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libpy3client-yc_public-common.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/libyc_private-ydb-v1.a >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_recursive_remove.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/responses/py3/libpy3python-responses-py3.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |42.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/01e1cebcd98e239de10ed70b94_raw.auxcpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/common/encryption_ut.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/lib/libpy3tests-olap-lib.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libpy3client-yc_public-iam.global.a |43.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_36807918bd7a86c1ea37310c9c.o |43.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_656baae3c1e24959f5bcc457d7.o |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytz/py3/libpy3python-pytz-py3.global.a >> conftest.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_empty.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/queue_id_ut.cpp |43.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_0ab925f82bbba07bf3b749dc3c.o |43.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |43.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_8e9f839326d1a9224e4b2e15e2.o |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/params_ut.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/common/libpy3tests-olap-common.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/py3/libpy3python-moto-py3.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/include/libclient-ydb_topic-include.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |43.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_8e57113197bb359e3999b04aab.o |43.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] |43.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_a40d299361b06d7622f78b2238.o |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |43.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_5b5c3367c789898aa5a6cae866.o |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |43.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/df691ac52d0b755cb039db39b5_raw.auxcpp |43.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_3e8bf44ed681ff82ae143aaec3.o |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/color/libinteractive-highlight-color.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |43.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_b34c6a8a5501db208eebc5d8e4.o |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |43.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_9a3dabea847c21e0b4fa4cda26.o |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |43.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_cca8dcd66462c9ca3c57fcb78e.o |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/static/libcomplete-name-static.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/static/libcomplete-name-static.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |43.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |43.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |43.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |43.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |43.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |43.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/objcopy_9f29b589555ed64086e5eadccf.o |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |43.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |43.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize |43.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/ydb-tests-sql |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |43.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |43.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |43.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |43.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_53073eb93c76466fca8f474c5f.o |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/mock/libcommon-http_gateway-mock.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/util_string_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/util_pool_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_790c6ea4aad5e761d21421b25d.o |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/objcopy_e90c44513b7fb4a0ab1a6e13f2.o |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/552c373b422666221556a5a9bd_raw.auxcpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |43.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |43.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.so |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_96d3f689fe7d6b16f9da31e376.o |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_cd9abca883cad9b25e20bf2f08.o |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/actors/ut/yql_arrow_push_down_ut.cpp |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_6508d12aaafde6f0a60fe8fff3.o |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/objcopy_c55121179eeb3b5753498290c4.o |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/port_discovery_ut.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/public/types_ut.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_local_kmeans.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_9caa7583d1e4955730dbd6f3fd.o |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |43.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_2d296dfaf373f7f15e6312517a.o |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/test_meta/libpy3tests-library-test_meta.global.a |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_ac7eeedcbf7038a60a7673762a.o |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_6e536fb2c379a4ebe79c499de8.o |43.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |44.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_938861be99a6cedecb22904193.o |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/ut/utils_ut.cpp |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/objcopy_c96ef635306ccee8a5cf6359f1.o |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/0dff0b13f2d02975a4a973a1e8_raw.auxcpp |44.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_f928a40774b17a9d6cd7cabd2c.o |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/config/validation/validators_ut.cpp |44.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_bf578b7161cc94bf18488d04ca.o |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/log_backend/json_envelope_ut.cpp |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp |44.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_55f2556d6eafcd77ebc4c517d4.o |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |44.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_f42b1add98328abd34a53e4aef.o |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/proto_helpers/libfmr-request_options-proto_helpers.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/proto/libyt-fmr-proto.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/fmr/libyt-gateway-fmr.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_group/main.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/simple/libcore-cbo-simple.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/libfmr-coordinator-interface.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |44.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_3bdea7737a87c43bfaa0aaf4c3.o |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/client/libfmr-coordinator-client.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.a |44.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_1ab2a5a6dd84a6c9ff5d5c50b0.o |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/lib/libpy3tests-sql-lib.global.a |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/yql_facade_run/libessentials-tools-yql_facade_run.a |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/impl/libfmr-job-impl.a |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/interface/libfmr-job_factory-interface.a |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |44.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_f738234258cd034cd5383f92ad.o |44.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_2f0e0ac8198858b9ec9901778e.o |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |44.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_bf6c9c02784d65e20a01685ce8.o |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/interface/libfmr-job-interface.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap_ut.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/impl/libfmr-job_factory-impl.a |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |44.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/ut/ydb-library-login-password_checker-ut |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_service/interface/libfmr-yt_service-interface.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/proto_helpers/libcoordinator-interface-proto_helpers.a |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/libfmr-table_data_service-local.a |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/impl/libfmr-worker-impl.a |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/interface/libfmr-table_data_service-interface.a |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/utils/libyt-fmr-utils.a |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/service_node/service_node |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_service/impl/libfmr-yt_service-impl.a |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |44.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_e7477203b27fa0321cf18fd7ee.o |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/libyt-fmr-request_options.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a |44.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/ut/ydb-library-login-cache-ut |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/tools/ytrun/lib/libtools-ytrun-lib.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_clock_pro_ut.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_data_cleanup.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_switchable_ut.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp >> PasswordChecker::CannotAcceptTooShortPassword [GOOD] >> PasswordChecker::PasswordCannotContainUsername [GOOD] >> PasswordChecker::AcceptPasswordWithSpecialCharsIfSpecialCharsListIsEmpty [GOOD] >> PasswordChecker::CannotAcceptPasswordWithoutLowerCaseCharacters [GOOD] >> PasswordChecker::CheckCorrectPasswordWithMaxComplexity [GOOD] >> PasswordChecker::CannotAcceptPasswordWithoutUpperCaseCharacters [GOOD] >> PasswordChecker::CannotAcceptPasswordWithoutNumbers [GOOD] >> PasswordChecker::AcceptEmptyPassword [GOOD] >> PasswordChecker::CannotAcceptPasswordWithoutLowerCaseAndSpecialCharacters [GOOD] >> PasswordChecker::CannotAcceptEmptyPassword [GOOD] >> PasswordChecker::HashChecker [GOOD] >> PasswordChecker::AcceptPasswordWithCustomSpecialCharactersList [GOOD] >> PasswordChecker::CannotAcceptPasswordWithInvalidCharacters [GOOD] >> PasswordChecker::CannotAcceptPasswordWithoutSpecialCharacters [GOOD] |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_1b4bf9f1f46a6111d16337dee0.o |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_87b299e07b15c86f4f50f458ef.o >> TCacheTest::TestLruCacheWithEmptyCapacity [GOOD] >> TCacheTest::TestLruCache [GOOD] >> TCacheTest::TestLruCacheWithSameUserAndPassword [GOOD] >> TCacheTest::TestLruCacheWithDifferentPasswords [GOOD] >> TCacheTest::TestLruCacheResize [GOOD] |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_f3c323ef80ada193284f036d44.o |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_45b6981aed17dda33d43217f52.o |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_023a23fcfdf79043d814bb8aab.o |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_9818d2b70aad7db98a0f9c044c.o |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_3ea8aa67e7c24c4f0e3b0406b9.o |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_5831cbd77ecc92a241b6cf1ea2.o |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_2f7ac0f750374152d13c6bfbcf.o |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_854d6cc7a0cc5cdd793cfc1e6d.o |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_4f9d76a39d2f7ba2b9f198f28c.o |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_445797246443360525d31550d1.o |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |44.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_b306c2955ce13e6db6cae73363.o |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_10b0cfa01297f7d7392eb4d9e4.o |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/tools/dump_ds_init/main.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_ut.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/provider/ut/yql_pq_ut.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/ut/test_connection_ut.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/sha256_ut.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_test_runtime.cpp |44.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_bac05c8b5a79735451f58d9322.o |44.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/login/password_checker/ut/unittest >> PasswordChecker::CannotAcceptPasswordWithoutSpecialCharacters [GOOD] |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/perf/colons.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |44.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/ut/ydb-library-login-account_lockout-ut |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_partlayout_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/main.cpp |44.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_51562f83ff52d1ceaac0c36a08.o |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/metrics/libproviders-dq-metrics.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/proxy.cpp |44.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_a54664d42025a3be375f961b82.o |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/login/cache/ut/unittest >> TCacheTest::TestLruCacheResize [GOOD] |44.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_d2e759e2d0ff1243166a3bc7d9.o |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |44.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_0a1f127d9343562caddfbacf79.o |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/metering/stream_ru_calculator_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/validators/validator_nameservice_ut.cpp |44.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_8bf6865076719c3317e71551a4.o |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/validators/registry_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |44.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_79d897640a3a634a87f173e2f4.o |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/validators/validator_bootstrap_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |44.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_199ab4be3deaff025e1ab92143.o |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |44.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_7eab954373d77ffb1fab95ca0d.o |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |44.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_422ca1effff14e5a08952658d0.o |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |45.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |45.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_8ac5034640eee44b1cd5fa5253.o |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/metering/time_grid_ut.cpp |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |44.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_27c0687ceeb7ce4ff5e4cea90a.o |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |45.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_b0a88dfa3c67850033b8c21ce7.o |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots >> AccountLockoutTest::InitCorrectValue [GOOD] >> AccountLockoutTest::InitWrongValue [GOOD] >> AccountLockoutTest::InitEmptyValue [GOOD] >> AccountLockoutTest::InitZeroValue [GOOD] |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |45.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_178e64ce5db822fc6aa8b3e608.o |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/scheduler/libproviders-dq-scheduler.a |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |44.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.global.a |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/service_node/main.cpp |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/yt/libdq-actors-yt.a |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_1c18035bb4b3759d5e029db746.o |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_blobmap_ut.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/surg/main.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_iter_ut.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/auth_ut.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |44.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_b90d4f51856a4b346b62877f37.o |44.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_912038ceef7de48e0e15c25307.o |44.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_703c8e1d9a9a2b271b8b995a29.o |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/libetcd-grpc.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/attributes_md5_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/lib/libpy3functional-tpc-lib.global.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/message_delay_stats_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/infly_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/lib/libpy3olap-load-lib.global.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/metering_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |45.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_ddabc037fdef8d5f7c6c2f9b47.o |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/perf/main.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_tokens.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/global_worker_manager/libproviders-dq-global_worker_manager.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |45.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/login/account_lockout/ut/unittest >> AccountLockoutTest::InitZeroValue [GOOD] |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |45.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |45.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/objcopy_1406195445f45d950dda89fcd8.o |45.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/trace_ut.cpp |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_pdiskfit/lib/libblobstorage-ut_pdiskfit-lib.a |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |45.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_065e9244d685c2b8f0ab66e414.o |45.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_4cf502b19212965f14d6660a20.o |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |45.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_461999da7ba13deab5689c18ec.o |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst_it_all_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/testlib/actors/test_runtime_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |45.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_2b60b599fc27771d93e79090fc.o |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_hash.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hullwritesst_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute_scheduler_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |45.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut_pg/flat_database_pg_ut.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |45.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |45.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |45.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/solomon/ydb-library-yql-tests-sql-solomon |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |45.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |46.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |46.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |46.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |46.2%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |46.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |46.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_bfa810e70cd1de18c5d4a18a62.o |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/45be6e48ea8f2ac38577085d0d_raw.auxcpp |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |46.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_3efa41af97c0510be1d2e99f05.o |46.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5865a174a6c25ca1a2d6386702.o |46.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5db899a01c2ec6f53648af6840.o |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |46.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_9c56ea1b7d34c7d8f6329bfcfd.o |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reshuffle_kmeans.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_server_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |46.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_f8b2cbafb1fed0e25bf9683c2d.o |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |46.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_1a397c908c9859dc40a771ddf1.o |46.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_e31620202d3ba8df14ff2a18e1.o |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |46.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |46.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/btree_benchmark/btree_benchmark |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |46.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |46.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |46.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_c65a9d5efe13dc05c1466090ba.o |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/tools/yqlrun/yqlrun |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |46.6%| [PK] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |46.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_b8aa61f402be805d2e3e9e75a2.o |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |46.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_f580ed931409135de17b6aff8b.o |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |46.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_4b2e093abff756c97b675c0a31.o |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |46.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_862077c20b8d599559a861c2df.o |46.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_a6e393b6d53f4c73feac80b55c.o |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |46.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_4352b8b3e3cf61532c865b371b.o |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_dictionary.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_column_filter.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/7fdc9492198d5f306aa05e0de1_raw.auxcpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/ut/ydb-core-util-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |46.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_3ddbad334a37a829b3772ddb05.o |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_reader.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_program_step.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/worker_node/main.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_arrow.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |46.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/objcopy_dcbdf62672440a626e79a64e14.o |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.global.a |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_kqp.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |47.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/benchmark/main/libtesting-benchmark-main.global.a |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_benchmark/main.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |47.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_12d01741952bd4afa836364d84.o |47.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_0e928e66807fd553d7fcaa58a3.o |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/benchmark/libcpp-testing-benchmark.a |47.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_b08299d456f3448b368e814cb8.o |47.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_696078ddd4c2d0788472b3ebfe.o |47.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_8f2fbd9f79880fbfa3c1838d80.o |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_5dc9c76fd90ae0562084321e87.o |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/config/init/init_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_large.cpp |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_15e284a8ecb30c90903e842e70.o |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_7c81cbfa6b5ce112674cb0a849.o |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_e2acb41e7099c0db4fe54a1587.o |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_a3fc9153ce93c876df4c755b36.o |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/b9d4e191a9fd03221b46c5af49_raw.auxcpp |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_79fff48e52404c1611400b8a2c.o |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_0221134ccf2a949614ce2a2056.o |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/ingress/blobstorage_ingress_matrix_ut.cpp |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_a0543c2dc30365e9b2ad3d0ca6.o |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_ae5b9f6e7a00f305f01a3dde87.o |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_41295709119857c2e0f1a41f31.o |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_e0331f455507fe5ac3b71d0537.o |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_f364ff47dd846bb94c3e83f2a8.o |47.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator_grpc/solomon_recipe_grpc |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_52476c20dac0af4f59edc2917e.o |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/ingress/blobstorage_ingress_ut.cpp |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_b06d27009e49b9ba3df883a226.o |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8e19d47784789c55156c57f816.o |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d3af02c7d57ea2cbbe5d381baa.o |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_8120ef49e7e653ed0601604313.o |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_14c03c6aecffbe39cb01ddf2ed.o |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |47.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/libpy3stress-olap_workload-workload.global.a |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/batched_vec_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/bufferwithgaps_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/ptr_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_253d734e8c901d319d84fcc6e9.o |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d1dee10c0c00d50989b086bd3f.o |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8649dacdf340abe7c53df69638.o |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_d52256d4fa9895f38df6030445.o |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_9be8b6745d0fa150928bab4206.o |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_909bbfbd36bf4d7cf0544f0406.o |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_5051832ffa0b6b13cebe014eb1.o |47.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_c98e5b95c64b8486a12f10d408.o |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/common/libpy3tests-stress-common.global.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/object_storage_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/wildcard_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/lib/libtools-yqlrun-lib.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/tools/yqlrun/yqlrun.cpp |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/http/libtools-yqlrun-http.a |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a |47.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_6b8c453743f8fd2c5380af70c6.o |47.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_f363a941fa24746cadffc60594.o |47.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_19dadf8afeb30502d735b660ce.o |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/e1ff312a3308444783623a7c6e_raw.auxcpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |47.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_496e4638abf3c5ef12eafab52c.o |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/linear_regression/liblibrary-cpp-linear_regression.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/accurate_accumulate/liblibrary-cpp-accurate_accumulate.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/microseconds_sliding_window_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/cache_eviction_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/bits_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/objcopy_1d0482d354dc270d18e7123281.o |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/list_all_topics_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_cow_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/fetch_request_ut.cpp |47.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/type_codecs_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/address_classifier_ut.cpp |47.5%| [AR] {default-linux-x86_64, release, asan, pic} $(B)/yt/yt/core/libyt-yt-core.a |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hazard_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/event_priority_queue_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fast_tls_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/lz4_data_generator_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/circular_queue_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_fixed_hash_set_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/concurrent_rw_hash_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hyperlog_counter_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cache_cache_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_stack_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/log_priority_mute_checker_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fragmented_buffer_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/lf_stack_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_heap_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cache_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_ut.cpp |47.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_7897d1b03fc78e49620c18f81a.o |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |47.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_e2a089b95d9316f6e26025d3e3.o |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/queue_inplace_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/simple_cache_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/queue_oneone_inplace_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/page_map_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/stlog_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/operation_queue_priority_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/token_bucket_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ui64id_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ulid_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_data_source_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/iceberg_ddl_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/interval_set_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_source_builder_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/operation_queue_ut.cpp |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |47.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut/ydb-core-base-ut |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |47.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_173de88696c8239b22567e7ece.o |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |47.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_d0255dda539959b69d421868a2.o |47.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_96b8686cd075e874d95d4aa5c5.o |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/7179c606fb7373cb8f04d9971a_raw.auxcpp |47.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_6a5c78aa9f679a0920be5264fe.o |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/field_transformation.pb.{h, cc} |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_config.pb.{h, cc} |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |47.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_0553360a969b2c9633badb428d.o |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |47.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_245adf3e28f56e6467e034d9f2.o |47.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_31d605682329607481eb568ed0.o |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/datastreams.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/yql_mount.pb.{h, cc} |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/retry_config.pb.{h, cc} |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.{pb.h ... grpc.pb.h} |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/connector.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/udf_resolver.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/task_controller.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_4f92526e13553482736b942b2c.o |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/connector.{pb.h ... grpc.pb.h} |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_ec3163328cb5ab8f222e66dd41.o |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7eade8c49389813f8c36b72b5b.o |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_d1ba757d227a70ff4910717854.o |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_datastreams_v1.{pb.h ... grpc.pb.h} |48.0%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/yql_expr_nodes.{gen.h ... defs.inl.h} |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_discovery_v1.{pb.h ... grpc.pb.h} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/type/libpy3oltp_workload-workload-type.global.a |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_71d73932c95681fccfc7215041.o |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_c7c229be41e9b028572ad1aab3.o |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_0446f521b26a2e8128f94ac50f.o |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_5294a064c14cf5a49516321590.o |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_syncneighbors_ut.cpp |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_3209cda00462f2963f3cbbc912.o |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_2e1dd9c9bc385e6efd22b78136.o |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_a99732b1d02edd62e674483ffe.o |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_context.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_restore_ut.cpp |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut_large.cpp |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_crypto_ut.cpp |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |48.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |48.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |48.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |48.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_actions.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |48.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |48.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_323a17e94d8d570989807d19d3.o |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut_large/ut_btree_index_large.cpp |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/localdb_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_log_cache_ut.cpp |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/statestorage_guardian_impl_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/memory_stats_ut.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/statestorage_ut.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |48.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_49a1ca9559288648fba9cf7b65.o |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage_grouptype_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/logoblob_ut.cpp |48.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/path_ut.cpp |48.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config_ut.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/memusage_ut.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebufresize_ut.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebuf_ut.cpp |48.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebufstream_ut.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |48.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_895e78a038dc7069fda56c2e82.o |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/libpy3stress-oltp_workload-workload.global.a |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_lsnmngr_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_outofspace_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/table_index_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_pdisk_error_ut.cpp |48.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |48.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |48.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |48.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |48.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_import.pb.{h, cc} |48.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} |48.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |48.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.{pb.h ... grpc.pb.h} |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.{pb.h ... grpc.pb.h} |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.{pb.h ... grpc.pb.h} |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/fields.pb.{h, cc} |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |48.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_93197284f82f9ae9fc0256ee95.o |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |48.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_e4166f3d104a6751b45e7e712f.o |48.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_816e2dba53f55d924139cdb3c5.o |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |48.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_c77713875cf17988efd8fc0fb3.o |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |48.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_903d4758faea71f1363e296b3f.o |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |48.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_133cf22e149f008572dab30674.o |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_strategy/strategy_ut.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.{pb.h ... grpc.pb.h} |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.global.a |48.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.global.a |48.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/objcopy_484246668d943fbae3b476ec7d.o |48.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |48.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |48.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/common/util_ut.cpp |48.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |48.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/snap_vec_ut.cpp |48.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix_ut.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |48.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment_ut.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_algo_ut.cpp |48.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data_ut.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |48.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_86ad37399122e504f3e6d8378d.o |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_0c4ce75555cfd5c0dd63e9dfbd.o |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |48.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_c84c8d511807425dc18073129b.o |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_5a4a401f33f46c70417a65f584.o |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/4129dc9878c2058404494fb088_raw.auxcpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/flavours/libpy3tests-library-flavours.global.a |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/persqueue/topic_parser/ut/topic_names_converter_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |48.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/objcopy_5fddfa8f171a3216cad65e02ab.o |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ut_data_erasure_reboots.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |48.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.{pb.h ... grpc.pb.h} |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/login/ut/ydb-library-login-ut |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ydb-tests-olap |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/docs/generator/generator |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |48.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |48.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/blobsan/blobsan |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ut_helpers.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/c52bef66453eb652f14989b79d_raw.auxcpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.a |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.global.a |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_b91160bcee04ad1f57e80af064.o |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_320dc1aa155c1ac9123afec54c.o |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.global.a |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.global.a |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_1007df29dec27b0b7a1587d49f.o |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.global.a |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/login/login_ut.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.a |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/e01aded916ad04e888f13223cf_raw.auxcpp |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_e9b644ce7912a7deb04bbf40a8.o |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/top_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_id_dict_ut.cpp |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |49.0%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_48884f6b745ced4d3e78997cb1.o |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_5333c1912ecbac0f64ff97551f.o |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/dq_effects.pb.{h, cc} |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/helpers_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_51000f45ee1f1ab0908a7e71c9.o |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |49.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_53273ad3976098fa8cbd55f5a9.o |49.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/e294827eb799173498fe26d398_raw.auxcpp |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_181bdcd1743e9a1a78fafe4b60.o |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/mon_proto.pb.{h, cc} |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_6f577a0a3d7a659599df51626e.o |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_49bad8251d240ad7c49d384b91.o |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_1574e8a5a6c530c7bfd6378c4d.o |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_452efd8b0828678a61ff4e0569.o |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_d1da8f48b4e80ef5678b1197a3.o |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_52647c3535f2451207dfa29a87.o |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_504b845d57f1a23561e970de61.o |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_1aeeb50f676472f975830c135d.o |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_0664e2ab2eb37ae9f02538e483.o |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/ut/ut_utils.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.{pb.h ... grpc.pb.h} |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.{pb.h ... grpc.pb.h} |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |49.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |49.2%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |49.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_f05ead59375a9db120b95dd730.o |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a5874452d3dbd6f6e49cd08be6.o |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_9ea5b1fb7a4f8e1b0b8d7cf345.o |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_delayedresp_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_34efc91ed920a8b27d971c44a6.o |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_791e2f78c18891d943ecce5e41.o |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/blobsan/main.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_0a29eb8c456ab5b998f2d12ba1.o |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_b9fd5c62781ec3b78d111a0ba7.o |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_readbatch_ut.cpp |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_e5d897582dc0fbda7c578cb53f.o |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_ccde7a40b2fd2886f22cd46a85.o |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_7211c23d9494c46f0f60063e9e.o |49.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_1a75593bdb000d1e31dd6e96d5.o |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |49.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.{pb.h ... grpc.pb.h} |49.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/tools/sql2yql/sql2yql |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_c8e04cf4d110f8c670988beb0f.o |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |49.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_0b6bc206b470900b0b94249ade.o |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullcompactdeferredqueue_ut.cpp |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/workload/libpy3stress-simple_queue-workload.global.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_2cc418e8604751e5b8f9029a81.o |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_6e0da74b1512d0ffe19c5dc500.o |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/topic_data_ut.cpp |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_ef822f612b696eb514a5565056.o |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2de2accab39327e9b10680901f.o |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_e3640190fc6b98b359c2a9e990.o |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_3df021aac8504049c53286aea0.o |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2492aafb6862566a2398c9f27e.o |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/provider/ut/pushdown/pushdown_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_6b37760fb6a28054d0feafd61d.o |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_817637e20254f271e9be62d14e.o |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_340b457b8174f6293d5748588e.o |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_5923b362516b6632b9769a5db2.o |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/objcopy_ac8dbe7f54a2cb7efb6636f75f.o |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/libpy3olap-docs-generator.global.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_insert_table.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_logs_engine.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/metric_meta.pb.{h, cc} |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/tools/dump/main.cpp |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/tools/sql2yql/sql2yql.cpp |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_1a867878d783e80bc2d70bd8d0.o |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/32049c3ef1f885f0e34984b3bf_raw.auxcpp |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_7dbead413d0eb2c0f2ebe75a93.o |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_1a637ae81b754dfa4e06b949b8.o |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_ce073e3cc612363936bdd04210.o |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_999b0e05144f29a542dbe4b3f5.o |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_6cfba3dbee97ec121b2f346459.o |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_eff0a6b0f75ccb9a2cb742007c.o |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_083605b223ce507d0fef919d0d.o |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |49.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |49.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_69ec8108bd4bdc059abab5b374.o |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_4246ee6b3505ab22753eb44ce7.o |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_7e7e709046fe8acad91d924675.o |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/locks_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |49.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |49.7%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/io_formats/arrow/scheme/csv_arrow_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |49.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure/ut_data_erasure.cpp |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/flat_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |49.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |49.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |49.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |49.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |49.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |49.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |49.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/objcopy_caf222d14387d4810b5cb3e853.o |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |49.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |49.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |49.9%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_0aefef587c181350d3a25f70e0.o |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_c068ee86eb127df13256bfbe45.o |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_3e7b0e88092417daa72b89bfde.o |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_spacetracker_ut.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |50.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/config_ut.cpp |50.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.{pb.h ... grpc.pb.h} |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/pgproxy/pg_proxy_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/utils_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |50.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.{pb.h ... grpc.pb.h} |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |50.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgimpl_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |50.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/codecs_ut.cpp |50.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgwriter_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmem_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/service/query_history_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |50.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |50.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |50.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |50.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_277b7e8f79021687bec95be8db.o |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_0359848ae21601186c5b0d9873.o |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_88a0e187d0d0f8235a5e3f2fff.o |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullstorageratio_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/double_indexed_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_generic_it_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_heap_it_ut.cpp |50.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_7c328c2741f9dd7697a2e0e8b1.o |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_blob_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |50.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_4c839b0fc6ee60e0bb4adc7079.o |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |50.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_994fcbd53c4e2174c302bdb5ab.o |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |50.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |50.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |50.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |49.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |49.2%| COMPACTING CACHE 535.0MiB |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |49.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 |49.2%| [TS] {RESULT} ydb/tests/sql/large/flake8 |49.2%| [TS] {RESULT} ydb/core/viewer/tests/flake8 |49.2%| [TS] {RESULT} ydb/library/benchmarks/runner/result_compare/flake8 |49.2%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |49.2%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |49.2%| [TS] {RESULT} ydb/tests/functional/tpc/medium/flake8 |49.2%| [TS] {RESULT} ydb/tests/stress/kv/tests/flake8 |49.2%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |49.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 |49.2%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 |49.2%| [TS] {RESULT} ydb/tests/fq/streaming_optimize/flake8 |49.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 |49.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 |49.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 |49.3%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |49.3%| [TS] {RESULT} ydb/tests/functional/rename/flake8 |49.3%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/flake8 |49.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 |49.3%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |49.3%| [TS] {RESULT} ydb/tests/olap/s3_import/flake8 |49.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |49.3%| [TS] {RESULT} ydb/tests/functional/hive/flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 |49.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/generic/streaming/flake8 |49.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 |49.3%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |49.3%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 |49.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 |49.3%| [TS] {RESULT} ydb/tests/example/flake8 |49.3%| [TS] {RESULT} ydb/tests/stress/mixedpy/flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 |49.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 |49.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 |49.4%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 |49.4%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 |49.4%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 |49.4%| [TS] {RESULT} ydb/tests/functional/api/flake8 |49.4%| [TS] {RESULT} ydb/tests/functional/compatibility/flake8 |49.4%| [TS] {RESULT} ydb/tests/olap/flake8 |49.4%| [TS] {RESULT} ydb/tests/olap/data_quotas/flake8 |49.4%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 |49.4%| [TS] {RESULT} ydb/tests/functional/config/flake8 |49.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 |49.4%| [TS] {RESULT} ydb/tests/olap/common/flake8 |49.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 |49.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 |49.4%| [TS] {RESULT} ydb/tests/fq/common/flake8 |49.4%| [TS] {RESULT} ydb/library/benchmarks/runner/runner/flake8 |49.4%| [TS] {RESULT} ydb/tests/sql/lib/flake8 |49.4%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 |49.4%| [TS] {RESULT} ydb/library/yql/tests/sql/solomon/py2_flake8 |49.4%| [TS] {RESULT} ydb/tests/olap/oom/flake8 |49.4%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |49.4%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |49.4%| [TS] {RESULT} ydb/tests/fq/generic/streaming/black |49.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 |49.5%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 |49.5%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |49.5%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 |49.5%| [TS] {RESULT} ydb/tests/stress/log/tests/flake8 |49.5%| [TS] {RESULT} ydb/library/benchmarks/runner/flake8 |49.5%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |49.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 |49.5%| [TS] {RESULT} ydb/tests/olap/column_family/compression/flake8 |49.5%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |49.5%| [TS] {RESULT} ydb/tests/olap/lib/flake8 |49.5%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |49.5%| [TS] {RESULT} ydb/tests/olap/docs/generator/flake8 |49.5%| [TS] {RESULT} ydb/tests/olap/scenario/flake8 |49.5%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 |49.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 |49.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 |49.5%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 |49.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 |49.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 |49.5%| [TS] {RESULT} ydb/library/login/cache/ut/unittest |49.6%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 |49.6%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |49.6%| [TS] {RESULT} ydb/library/login/password_checker/ut/unittest |49.6%| [TS] {RESULT} ydb/tests/library/ut/flake8 |49.6%| [TS] {RESULT} ydb/library/login/account_lockout/ut/unittest |49.6%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |49.6%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |49.6%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 |49.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 |49.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 |49.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 |49.6%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 |49.6%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |49.6%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 |49.6%| [TS] {RESULT} ydb/tests/functional/tpc/large/flake8 |49.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 |49.6%| [TS] {RESULT} ydb/library/yaml_config/ut_transform/flake8 |49.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 |49.6%| [TS] {RESULT} ydb/tests/fq/solomon/flake8 |49.6%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 |49.6%| [TS] {RESULT} ydb/library/benchmarks/runner/run_tests/flake8 |49.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 |49.7%| [TS] {RESULT} ydb/tests/sql/flake8 |49.7%| [TS] {RESULT} ydb/tests/fq/generic/analytics/black |49.7%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 |49.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 |49.7%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/join/flake8 |49.7%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |49.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 |49.7%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |49.7%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 |49.7%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/flake8 |49.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 |49.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 |49.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 |49.7%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 |49.7%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |49.7%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 |49.7%| [TS] {RESULT} ydb/library/benchmarks/runner/result_convert/flake8 |49.7%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/flake8 |49.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 |49.7%| [TS] {RESULT} ydb/tests/olap/load/flake8 |49.7%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 |49.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 |49.8%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/flake8 |49.8%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 |49.8%| [TS] {RESULT} ydb/tests/functional/cms/flake8 |49.8%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |49.8%| [TS] {RESULT} ydb/tests/fq/generic/analytics/flake8 |49.8%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |49.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 |49.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/flake8 |49.8%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |49.8%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 |49.8%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |49.8%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 |49.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 |49.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 |49.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 |49.9%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 |50.0%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |50.0%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |50.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 |50.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 |50.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 |50.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 |50.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 |53.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/login/login.cpp |54.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/login/login.cpp |55.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/login/libydb-library-login.a |55.6%| [AR] {RESULT} $(B)/ydb/library/login/libydb-library-login.a |56.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/login/libydb-library-login.a |62.2%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |62.2%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |62.2%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |62.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |62.3%| [AR] {default-linux-x86_64, release, asan} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |62.3%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |62.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |62.3%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/core/libyt-yt-core.a |62.3%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |62.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |62.3%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/client/libyt-yt-client.a |62.3%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |62.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |62.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |62.4%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |62.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |62.4%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |62.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |62.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |62.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |62.4%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |62.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |62.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |62.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |62.4%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |62.4%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |62.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |62.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |62.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |62.5%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |62.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |62.5%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |62.5%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |63.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |63.5%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |63.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |63.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |63.9%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |64.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |64.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |64.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |64.3%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |64.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |64.3%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |64.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |64.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |64.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |64.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |64.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |64.3%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |64.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/login/login_ut.cpp |64.3%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |64.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/login/login_ut.cpp |64.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |64.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |64.3%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |64.3%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |64.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |64.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |64.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |64.3%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |64.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |64.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |64.3%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |64.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |64.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |64.3%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |64.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |64.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |64.4%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |64.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |64.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |64.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |64.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |64.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |64.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |64.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |64.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |64.4%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |64.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |64.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydb/ydb |64.4%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |64.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |64.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |64.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut >> TBsVDiskRepl1::ReplProxyKeepBits >> TBsVDiskExtreme::Simple3Put3GetFresh >> TBsVDiskManyPutGet::ManyPutGetWaitCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardFresh >> TBsVDiskGC::TGCManyVPutsDelTabletTest >> TBsVDiskExtremeHuge::Simple3Put3GetFresh >> TBsVDiskGC::GCPutKeepIntoEmptyDB >> TBsDbStat::ChaoticParallelWrite_DbStat >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardFresh >> TBsLocalRecovery::WriteRestartReadHuge >> TBsVDiskOutOfSpace::WriteUntilOrangeZone [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardFresh >> TBsVDiskExtreme::Simple3Put1SeqGetAllFresh >> TBsVDiskExtreme::SimpleGetFromEmptyDB >> TBsVDiskExtremeHandoffHuge::SimpleHndPut1SeqGetFresh >> TBsVDiskOutOfSpace::WriteUntilYellowZone >> TBsVDiskRange::Simple3PutRangeGetNothingForwardFresh >> TBsLocalRecovery::StartStopNotEmptyDB >> TBsVDiskRepl3::SyncLogTest >> TBsVDiskBadBlobId::PutBlobWithBadId >> TDelayedResponsesTests::Test [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllForwardFresh |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> ReadBatcher::Range >> TBlobStorageHullCompactDeferredQueueTest::Basic |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> ReadBatcher::ReadBatcher |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TDelayedResponsesTests::Test [GOOD] >> ReadBatcher::ReadBatcher [GOOD] |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> ReadBatcher::ReadBatcher [GOOD] >> TBsVDiskExtreme::SimpleGetFromEmptyDB [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetFresh >> TBsVDiskRange::Simple3PutRangeGetNothingForwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingForwardCompaction >> TBsVDiskExtreme::Simple3Put1SeqGetAllFresh [GOOD] >> TBsVDiskGC::GCPutKeepIntoEmptyDB [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllForwardFresh [GOOD] >> TBsVDiskGC::GCPutBarrierVDisk0NoSync >> TBsVDiskExtreme::Simple3Put1SeqGetAllCompaction >> TBsVDiskRange::Simple3PutRangeGetAllForwardCompaction >> TBsVDiskBadBlobId::PutBlobWithBadId [GOOD] >> TBsVDiskExtreme::Simple3Put3GetFresh [GOOD] >> TBsVDiskExtreme::Simple3Put3GetCompaction >> TBsVDiskBrokenPDisk::WriteUntilDeviceDeath >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardFresh [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHndPut1SeqGetFresh [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardCompaction >> TBsVDiskExtremeHuge::Simple3Put3GetFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put3GetCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardCompaction >> TBsVDiskManyPutGet::ManyPutGetWaitCompaction [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetFreshIndexOnly >> TBsVDiskRepl3::SyncLogTest [GOOD] >> THugeMigration::ExtendMap_HugeBlobs |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut >> TBsVDiskGC::TGCManyVPutsDelTabletTest [GOOD] >> TBsVDiskManyPutGet::ManyPutGet >> TBsVDiskRepl1::ReplProxyKeepBits [GOOD] >> TBsVDiskRepl2::ReplEraseDiskRestoreWOOneDisk >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetFresh [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetCompaction >> TBsVDiskBrokenPDisk::WriteUntilDeviceDeath [GOOD] >> TBsVDiskDefrag::DefragEmptyDB >> TBsVDiskExtremeHuge::Simple3Put3GetCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkFresh >> TBsVDiskExtreme::Simple3Put1SeqGetAllCompaction [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetFreshIndexOnly [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetCompactionIndexOnly >> TBsVDiskRange::Simple3PutRangeGetAllForwardCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Fresh >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction >> TBsVDiskExtreme::Simple3Put3GetCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkFresh >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllFresh >> TBsVDiskRange::Simple3PutRangeGetNothingForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardCompaction [GOOD] >> TBsVDiskRepl1::ReplProxyData >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardFresh >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardFresh >> TBsVDiskGC::GCPutBarrierVDisk0NoSync [GOOD] >> TBsVDiskGC::GCPutBarrierSync >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllFresh [GOOD] >> TBsVDiskDefrag::DefragEmptyDB [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllCompaction >> TBsVDiskDefrag::Defrag50PercentGarbage >> THugeMigration::ExtendMap_HugeBlobs [GOOD] >> THugeMigration::ExtendMap_SmallBlobsBecameHuge >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardCompaction >> TBsVDiskManyPutGet::ManyPutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiSinglePutGet >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetCompaction [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetFresh >> TBsVDiskExtreme::Simple3Put1SeqGet2Fresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Compaction >> TBsVDiskExtreme::Simple3Put1SeqSubsOkFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardCompaction >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction >> TBsVDiskExtreme::Simple3Put1SeqSubsOkCompaction >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardFresh >> TBsVDiskRepl1::ReplProxyData [GOOD] >> TBsVDiskRepl1::ReplEraseDiskRestore >> TBsVDiskExtreme::Simple3Put1SeqGet2Compaction [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartFresh >> TBsVDiskManyPutGet::ManyPutRangeGetCompactionIndexOnly [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Fresh >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetFresh [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetCompaction >> TBsVDiskGC::GCPutBarrierSync [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardCompaction [GOOD] >> TBsVDiskGC::GCPutKeepBarrierSync >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction >> TBsVDiskExtreme::Simple3Put1SeqSubsOkCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorFresh >> TBsVDiskManyPutGet::ManyMultiSinglePutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGet >> TBsLocalRecovery::WriteRestartReadHuge [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeIncreased >> THugeMigration::ExtendMap_SmallBlobsBecameHuge [GOOD] >> THugeMigration::RollbackMap_HugeBlobs >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Fresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetCompaction [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly [GOOD] >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize >> TBsVDiskManyPutGet::ManyMultiPutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction [GOOD] |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction [GOOD] >> TBsVDiskGC::GCPutKeepBarrierSync [GOOD] >> TBsVDiskGC::GCPutManyBarriersNoSync >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction [GOOD] >> THugeMigration::RollbackMap_HugeBlobs [GOOD] >> TMonitoring::ReregisterTest >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch [GOOD] |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction [GOOD] |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction [GOOD] >> TMonitoring::ReregisterTest [GOOD] |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction [GOOD] |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction [GOOD] >> TBsVDiskGC::GCPutManyBarriersNoSync [GOOD] >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest >> ReadBatcher::Range [GOOD] |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TMonitoring::ReregisterTest [GOOD] Test command err: RUN TEST SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch [GOOD] >> TBlobStorageHullCompactDeferredQueueTest::Basic [GOOD] |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> ReadBatcher::Range [GOOD] >> TBsVDiskDefrag::Defrag50PercentGarbage [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyFresh >> TBsLocalRecovery::WriteRestartReadHugeIncreased [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeDecreased ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TBlobStorageHullCompactDeferredQueueTest::Basic [GOOD] Test command err: STEP 1 STEP 2 StringToId# 63 numItems# 110271 >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest [GOOD] |64.6%| [TA] $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} |64.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |64.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |64.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} |64.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction |64.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |64.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |64.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |64.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |64.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |64.6%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut >> TBsVDiskRepl2::ReplEraseDiskRestoreWOOneDisk [GOOD] >> TBsVDiskRepl3::ReplEraseDiskRestoreMultipart |64.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |64.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |64.6%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut >> ReadOnlyVDisk::TestReads >> ReadOnlyVDisk::TestGarbageCollect >> ReadOnlyVDisk::TestWrites |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst >> ReadOnlyVDisk::TestDiscover >> ReadOnlyVDisk::TestStorageLoad >> ReadOnlyVDisk::TestSync |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction [GOOD] >> JsonChangeRecord::Heartbeat [GOOD] |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |64.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |64.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeDecreased [GOOD] >> TBsOther1::PoisonPill |64.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |64.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |64.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] >> PartitionStats::Collector |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |64.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |64.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] >> PartitionStats::CollectorOverload [GOOD] |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |64.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::Heartbeat [GOOD] |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |64.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction [GOOD] Test command err: 2025-04-10T10:52:37.601312Z :BS_VDISK_PUT ERROR: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVPut: TabletID cannot be empty; id# [0:1:10:0:0:10:1] Marker# BSVS43 2025-04-10T10:52:39.144368Z :BS_VDISK_OTHER ERROR: PDiskId# 1 VDISK[0:_:0:0:0]: (0) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake' 2025-04-10T10:52:39.144436Z :BS_SKELETON ERROR: PDiskId# 1 VDISK[0:_:0:0:0]: (0) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake Marker# BSVSF03 >> TInterconnectTest::TestSimplePingPong |64.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] >> TestProtocols::TestConnectProtocol >> TInterconnectTest::TestConnectAndDisconnect >> TInterconnectTest::TestBlobEvent >> TActorActivity::Basic [GOOD] >> ActorBootstrapped::TestBootstrapped >> TInterconnectTest::OldFormat >> TInterconnectTest::TestBlobEvent220BytesPreSerialized |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] |64.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation >> TBsVDiskRepl1::ReplEraseDiskRestore [GOOD] >> TBsVDiskRepl1::ReadOnly >> TInterconnectTest::TestNotifyUndelivered >> ActorBootstrapped::TestBootstrapped [GOOD] >> ActorBootstrapped::TestBootstrappedParent >> TInterconnectTest::TestManyEvents >> TestProtocols::TestResolveProtocol >> TInterconnectTest::TestSimplePingPong [GOOD] >> TInterconnectTest::TestSubscribeByFlag |64.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |64.8%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation >> ActorBootstrapped::TestBootstrappedParent [GOOD] >> TActorTracker::Basic >> TestProtocols::TestConnectProtocol [GOOD] >> TestProtocols::TestHTTPCollected >> TInterconnectTest::TestBlobEvent [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes >> TInterconnectTest::TestSubscribeByFlag [GOOD] >> TInterconnectTest::TestConnectAndDisconnect [GOOD] >> TInterconnectTest::TestReconnect >> TInterconnectTest::TestBlobEventPreSerialized >> TInterconnectTest::TestNotifyUndelivered [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor >> TInterconnectTest::OldFormat [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew >> TestProtocols::TestHTTPCollected [GOOD] >> TInterconnectTest::TestTraceIdPassThrough >> TActorTracker::Basic [GOOD] >> TInterconnectTest::TestBlobEvent220BytesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes >> TestProtocols::TestResolveProtocol [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow >> TInterconnectTest::TestReconnect [GOOD] >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent >> TInterconnectTest::TestBlobEvent220Bytes [GOOD] >> TInterconnectTest::TestAddressResolve >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes >> TInterconnectTest::TestBlobEventPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes >> TInterconnectTest::TestBlobEventDifferentSizes [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld |64.8%| [TA] $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} >> TInterconnectTest::TestTraceIdPassThrough [GOOD] |64.8%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TInterconnectTest::TestBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels >> TInterconnectTest::TestAddressResolve [GOOD] >> TInterconnectTest::OldNbs >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TActorTracker::Basic [GOOD] Test command err: ASYNC_DESTROYER |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> TInterconnectTest::TestManyEvents [GOOD] >> TInterconnectTest::TestCrossConnect |64.8%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |64.8%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] Test command err: 2025-04-10T10:52:51.147646Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 (null) -> PendingActivation 2025-04-10T10:52:51.147713Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP01 ready to work 2025-04-10T10:52:51.149570Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @201 (null) -> PendingActivation 2025-04-10T10:52:51.149629Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP01 ready to work 2025-04-10T10:52:51.149810Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-04-10T10:52:51.151085Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP02 configured for host ::1:2818 2025-04-10T10:52:51.151219Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @483 PendingNodeInfo -> PendingConnection 2025-04-10T10:52:51.151665Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH01 starting outgoing handshake 2025-04-10T10:52:51.151831Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-10T10:52:51.152631Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH05 connected to peer 2025-04-10T10:52:51.153085Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:35936 2025-04-10T10:52:51.153496Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 0] ICH02 starting incoming handshake 2025-04-10T10:52:51.154430Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 SendExBlock ExRequest Protocol: 2 ProgramPID: 1066618 ProgramStartTime: 20644577252470 Serial: 4030183067 ReceiverNodeId: 6 SenderActorId: "[5:4030183067:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 1066618" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 1066618" AcceptUUID: "Cluster for process with id: 1066618" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "\216y\345\"\323\3153\371h\302N\355o\315\271\'\213~\273\032k\2673\276\310Z\370\235\210!\323~" RequestXxhash: true RequestXdcShuffle: true 2025-04-10T10:52:51.154839Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 ReceiveExBlock ExRequest Protocol: 2 ProgramPID: 1066618 ProgramStartTime: 20644577252470 Serial: 4030183067 ReceiverNodeId: 6 SenderActorId: "[5:4030183067:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 1066618" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 1066618" AcceptUUID: "Cluster for process with id: 1066618" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "\216y\345\"\323\3153\371h\302N\355o\315\271\'\213~\273\032k\2673\276\310Z\370\235\210!\323~" RequestXxhash: true RequestXdcShuffle: true 2025-04-10T10:52:51.154893Z node 6 :INTERCONNECT WARN: Handshake [6:21:2057] [node 5] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-10T10:52:51.155181Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-04-10T10:52:51.156013Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP02 configured for host ::1:15693 2025-04-10T10:52:51.156058Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP17 incoming handshake (actor [6:21:2057]) 2025-04-10T10:52:51.156099Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @483 PendingNodeInfo -> PendingConnection 2025-04-10T10:52:51.156149Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2025-04-10T10:52:51.156206Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP08 No active sessions, becoming PendingConnection 2025-04-10T10:52:51.156236Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @220 PendingConnection -> PendingConnection 2025-04-10T10:52:51.156521Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 SendExBlock ExReply Success { Protocol: 2 ProgramPID: 1066618 ProgramStartTime: 20644593884138 Serial: 1774167217 SenderActorId: "[6:1774167217:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 1066618" AcceptUUID: "Cluster for process with id: 1066618" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true } 2025-04-10T10:52:51.156892Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 ReceiveExBlock ExReply Success { Protocol: 2 ProgramPID: 1066618 ProgramStartTime: 20644593884138 Serial: 1774167217 SenderActorId: "[6:1774167217:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 1066618" AcceptUUID: "Cluster for process with id: 1066618" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true } 2025-04-10T10:52:51.156960Z node 5 :INTERCONNECT WARN: Handshake [5:19:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-10T10:52:51.157091Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-10T10:52:51.157427Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:35942 2025-04-10T10:52:51.157751Z node 6 :INTERCONNECT DEBUG: Handshake [6:23:2058] [node 0] ICH02 starting incoming handshake 2025-04-10T10:52:51.158249Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "\216y\345\"\323\3153\371h\302N\355o\315\271\'\213~\273\032k\2673\276\310Z\370\235\210!\323~" 2025-04-10T10:52:51.158358Z node 5 :INTERCONNECT INFO: Handshake [5:19:2057] [node 6] ICH04 handshake succeeded 2025-04-10T10:52:51.158705Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-04-10T10:52:51.158759Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:19:2057] poison: false 2025-04-10T10:52:51.158794Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @350 PendingConnection -> StateWork 2025-04-10T10:52:51.158909Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP22 created new session: [5:24:2048] 2025-04-10T10:52:51.158959Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS09 handshake done sender: [5:19:2057] self: [5:4030183067:0] peer: [6:1774167217:0] socket: 24 2025-04-10T10:52:51.159007Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS10 traffic start 2025-04-10T10:52:51.159083Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS11 registering socket in PollerActor 2025-04-10T10:52:51.159157Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-04-10T10:52:51.159208Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-04-10T10:52:51.159272Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-04-10T10:52:51.159332Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS04 subscribe for session state for [5:17:2056] 2025-04-10T10:52:51.159426Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS01 InputSession created 2025-04-10T10:52:51.160146Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2025-04-10T10:52:51.160216Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-10T10:52:51.160457Z node 6 :INTERCONNECT INFO: Handshake [6:21:2057] [node 5] ICH04 handshake succeeded 2025-04-10T10:52:51.160719Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2025-04-10T10:52:51.160773Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-10T10:52:51.160831Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2025-04-10T10:52:51.160875Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:21:2057] poison: false 2025-04-10T10:52:51.160930Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @350 PendingConnection -> StateWork 2025-04-10T10:52:51.161044Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP22 created new session: [6:26:2048] 2025-04-10T10:52:51.161088Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS09 handshake done sender: [6:21:2057] self: [6:1774167217:0] peer: [5:4030183067:0] socket: 25 2025-04-10T10:52:51.161130Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS10 traffic start 2025-04-10T10:52:51.161205Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS11 registering socket in PollerActor 2025-04-10T10:52:51.161248Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-10T10:52:51.161281Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-04-10T10:52:51.161316Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-10T10:52:51.161376Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS01 InputSession created 2025-04-10T10:52:51.161446Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-04-10T10:52:51.161522Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-10T10:52:51.161578Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-04-10T10:52:51.161619Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-04-10T10:52:51.161730Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-04-10T10:52:51.161793Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-10T10:52:51.161835Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2025-04-10T10:52:51.161861Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-10T10:52:51.161905Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-10T10:52:51.161939Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-10T10:52:51.161974Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-04-10T10:52:51.161992Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-10T10:52:51.162028Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-04-10T10:52:51.162064Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-04-10T10:52:51.162125Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-10T10:52:51.162168Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-04-10T10:52:51.162284Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS02 send event from: [5:17:2056] to: [6:18:2056] 2025-04-10T10:52:51.162453Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS22 outgoing packet Serial# 1 Confirm# 0 DataSize# 84 InflightDataAmount# 84 2025-04-10T10:52:51.162560Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-04-10T10:52:51.162680Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-04-10T10: ... 0:52:51.164915Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS07 socket disconnect 24 reason# EndOfStream 2025-04-10T10:52:51.164945Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS25 shutdown socket, reason# EndOfStream 2025-04-10T10:52:51.165017Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS15 start handshake 2025-04-10T10:52:51.165323Z node 5 :INTERCONNECT DEBUG: Handshake [5:29:2058] [node 6] ICH01 starting outgoing handshake 2025-04-10T10:52:51.165467Z node 6 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-10T10:52:51.165975Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-10T10:52:51.166237Z node 6 :INTERCONNECT DEBUG: Handshake [6:28:2059] [node 5] ICH05 connected to peer 2025-04-10T10:52:51.166371Z node 6 :INTERCONNECT DEBUG: Handshake [6:28:2059] [node 5] ICH07 SendExBlock ExRequest HandshakeId: "[V\336\367\323U\334=\201\033\001\273h1\026=\246j\330)M`\337\332\003\253\261\350\246\373F\370" 2025-04-10T10:52:51.167004Z node 5 :INTERCONNECT DEBUG: Handshake [5:29:2058] [node 6] ICH05 connected to peer 2025-04-10T10:52:51.167083Z node 5 :INTERCONNECT DEBUG: Handshake [5:29:2058] [node 6] ICH07 SendExBlock ExRequest HandshakeId: "\032\247\331.\202;\035kFq\372pF\236\002\357\221(!Q\350t\217\36055y\013>\253)\241" 2025-04-10T10:52:51.167157Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:35946 2025-04-10T10:52:51.167260Z node 5 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:59998 2025-04-10T10:52:51.167528Z node 6 :INTERCONNECT DEBUG: Handshake [6:32:2060] [node 0] ICH02 starting incoming handshake 2025-04-10T10:52:51.167780Z node 5 :INTERCONNECT DEBUG: Handshake [5:33:2059] [node 0] ICH02 starting incoming handshake 2025-04-10T10:52:51.168274Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP09 (actor [5:33:2059]) from: [6:1774167217:0] for: [5:4030183067:0] 2025-04-10T10:52:51.168330Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS08 incoming handshake Self# [6:1774167217:0] Peer# [5:4030183067:0] Counter# 1 LastInputSerial# 1 2025-04-10T10:52:51.168389Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP06 reply for incoming handshake (actor [5:33:2059]) is held 2025-04-10T10:52:51.168428Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP09 (actor [6:32:2060]) from: [5:4030183067:0] for: [6:1774167217:0] 2025-04-10T10:52:51.168474Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS08 incoming handshake Self# [5:4030183067:0] Peer# [6:1774167217:0] Counter# 1 LastInputSerial# 1 2025-04-10T10:52:51.168502Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2025-04-10T10:52:51.169005Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-10T10:52:51.169483Z node 5 :INTERCONNECT DEBUG: Handshake [5:29:2058] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "\032\247\331.\202;\035kFq\372pF\236\002\357\221(!Q\350t\217\36055y\013>\253)\241" 2025-04-10T10:52:51.169568Z node 5 :INTERCONNECT INFO: Handshake [5:29:2058] [node 6] ICH04 handshake succeeded 2025-04-10T10:52:51.169852Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-04-10T10:52:51.169896Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP111 dropped incoming handshake: [5:33:2059] poison: true 2025-04-10T10:52:51.169949Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:29:2058] poison: false 2025-04-10T10:52:51.169985Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @350 StateWork -> StateWork 2025-04-10T10:52:51.170034Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS09 handshake done sender: [5:29:2058] self: [5:4030183067:0] peer: [6:1774167217:0] socket: 29 2025-04-10T10:52:51.170071Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS10 traffic start 2025-04-10T10:52:51.170184Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS11 registering socket in PollerActor 2025-04-10T10:52:51.170254Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-04-10T10:52:51.170298Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS06 rewind SendQueue size# 1 LastConfirmed# 1 NextSerial# 2 2025-04-10T10:52:51.170390Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-04-10T10:52:51.170454Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:35958 2025-04-10T10:52:51.170783Z node 6 :INTERCONNECT DEBUG: Handshake [6:36:2061] [node 0] ICH02 starting incoming handshake 2025-04-10T10:52:51.171249Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS01 InputSession created 2025-04-10T10:52:51.171305Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-04-10T10:52:51.171398Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-10T10:52:51.172303Z node 6 :INTERCONNECT NOTICE: Proxy [6:9:2048] [node 5] ICP25 outgoing handshake failed, temporary: 0 explanation: outgoing handshake Peer# ::1(::1:15693) Socket error# connection unexpectedly closed state# ReceiveResponse processed# 0 remain# 52 incoming: [6:32:2060] held: no 2025-04-10T10:52:51.172348Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP052 dropped outgoing handshake: [6:28:2059] poison: false 2025-04-10T10:52:51.172390Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP28 other handshake is still going on 2025-04-10T10:52:51.172660Z node 6 :INTERCONNECT INFO: Handshake [6:32:2060] [node 5] ICH04 handshake succeeded 2025-04-10T10:52:51.172806Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2025-04-10T10:52:51.172849Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:32:2060] poison: false 2025-04-10T10:52:51.172886Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @350 StateWork -> StateWork 2025-04-10T10:52:51.172922Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS09 handshake done sender: [6:32:2060] self: [6:1774167217:0] peer: [5:4030183067:0] socket: 30 2025-04-10T10:52:51.172959Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS10 traffic start 2025-04-10T10:52:51.173024Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS11 registering socket in PollerActor 2025-04-10T10:52:51.173057Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-10T10:52:51.173099Z node 6 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 5] ICOCH98 Dropping confirmed messages 2025-04-10T10:52:51.173147Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2025-04-10T10:52:51.173188Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2025-04-10T10:52:51.173211Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-10T10:52:51.173245Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-04-10T10:52:51.173284Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-10T10:52:51.173405Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS01 InputSession created 2025-04-10T10:52:51.173429Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-04-10T10:52:51.173507Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2025-04-10T10:52:51.173588Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-10T10:52:51.173624Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-10T10:52:51.173738Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-04-10T10:52:51.173771Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-10T10:52:51.173810Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-04-10T10:52:51.173837Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-04-10T10:52:51.173866Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-04-10T10:52:51.173930Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-10T10:52:51.174011Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-04-10T10:52:51.174030Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-10T10:52:51.174056Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-04-10T10:52:51.174080Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-04-10T10:52:51.174102Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-10T10:52:51.174128Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-10T10:52:51.174193Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS02 send event from: [6:18:2056] to: [5:17:2056] 2025-04-10T10:52:51.174261Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS22 outgoing packet Serial# 2 Confirm# 2 DataSize# 84 InflightDataAmount# 84 2025-04-10T10:52:51.174322Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-10T10:52:51.174348Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-10T10:52:51.174373Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-04-10T10:52:51.174419Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-04-10T10:52:51.174455Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# 106 num# 1 err# 2025-04-10T10:52:51.174521Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-10T10:52:51.174569Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 2 2025-04-10T10:52:51.174603Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH98 Dropping confirmed messages 2025-04-10T10:52:51.174658Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2025-04-10T10:52:51.174698Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 2 2025-04-10T10:52:51.174800Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS01 socket: 29 reason# 2025-04-10T10:52:51.174851Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP30 unregister session Session# [5:24:2048] VirtualId# [5:4030183067:0] 2025-04-10T10:52:51.174896Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 StateWork -> PendingActivation 2025-04-10T10:52:51.174930Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS25 shutdown socket, reason# 2025-04-10T10:52:51.175007Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH89 Notyfying about Undelivered messages! NotYetConfirmed size: 0, Queue size: 0 |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] >> AggregateStatistics::ShouldBePings >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] >> TInterconnectTest::OldNbs [GOOD] |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] >> TBsVDiskRepl1::ReadOnly [GOOD] |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2025-04-10T10:52:52.951520Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-10T10:52:52.952443Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 3, status = OK 2025-04-10T10:52:52.952782Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:52.952887Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = OK 2025-04-10T10:52:52.952923Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:52.952983Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-10T10:52:52.953083Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5, status = OK 2025-04-10T10:52:52.953117Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:52.953211Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = OK 2025-04-10T10:52:52.953256Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:52.953337Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2, status = OK 2025-04-10T10:52:52.953392Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:52.953449Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-10T10:52:52.953488Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:52.953517Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2025-04-10T10:52:52.953612Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 6, status = OK 2025-04-10T10:52:52.953662Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:52.953708Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-10T10:52:52.953739Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 5, status = ERROR 2025-04-10T10:52:52.953756Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:52.953791Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = OK 2025-04-10T10:52:52.953826Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:14:2061], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:52.953895Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-10T10:52:52.953922Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:52.953944Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 7 2025-04-10T10:52:52.954037Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [0:0:0], tablet id = 7, status = ERROR 2025-04-10T10:52:52.954052Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:52.965229Z node 1 :STATISTICS DEBUG: Tablet 3 has already been processed 2025-04-10T10:52:52.965308Z node 1 :STATISTICS ERROR: No result was received from the tablet 4 2025-04-10T10:52:52.965335Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2025-04-10T10:52:52.965451Z node 1 :STATISTICS DEBUG: Tablet 5 has already been processed 2025-04-10T10:52:52.965488Z node 1 :STATISTICS DEBUG: Tablet 1 has already been processed 2025-04-10T10:52:52.965512Z node 1 :STATISTICS ERROR: No result was received from the tablet 2 2025-04-10T10:52:52.965534Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2025-04-10T10:52:52.965601Z node 1 :STATISTICS ERROR: No result was received from the tablet 6 2025-04-10T10:52:52.965621Z node 1 :STATISTICS DEBUG: Tablet 6 is not local. 2025-04-10T10:52:52.965649Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-10T10:52:52.965747Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-10T10:52:52.965776Z node 1 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-04-10T10:52:52.965828Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-10T10:52:52.965856Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:52.965898Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 2, status = ERROR 2025-04-10T10:52:52.965914Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:52.965942Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-04-10T10:52:52.965958Z node 1 :STATISTICS DEBUG: Skip EvClientConnected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2025-04-10T10:52:52.927987Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-10T10:52:52.928270Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2 2025-04-10T10:52:52.928304Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2025-04-10T10:52:52.928405Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-10T10:52:52.929096Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = ERROR 2025-04-10T10:52:52.929136Z node 1 :STATISTICS DEBUG: Tablet 1 is not local. 2025-04-10T10:52:52.929236Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-10T10:52:52.929255Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:52.929286Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = ERROR 2025-04-10T10:52:52.929302Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2025-04-10T10:52:52.929379Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5 2025-04-10T10:52:52.929404Z node 1 :STATISTICS DEBUG: Tablet 5 is not local. 2025-04-10T10:52:52.929461Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2025-04-10T10:52:52.929551Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = ERROR 2025-04-10T10:52:52.929577Z node 1 :STATISTICS DEBUG: Tablet 7 is not local. 2025-04-10T10:52:52.929603Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:15:2062], server id = [1:15:2062], tablet id = 8 2025-04-10T10:52:52.929620Z node 1 :STATISTICS DEBUG: Tablet 8 is not local. 2025-04-10T10:52:52.929638Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-10T10:52:52.929697Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-04-10T10:52:52.929713Z node 1 :STATISTICS DEBUG: Skip EvClientConnected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] Test command err: 2025-04-10T10:52:51.297502Z node 4 :INTERCONNECT WARN: Handshake [4:20:2056] [node 3] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-10T10:52:51.948253Z node 5 :INTERCONNECT WARN: Handshake [5:18:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-10T10:52:52.444958Z node 8 :INTERCONNECT WARN: Handshake [8:20:2056] [node 7] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-10T10:52:52.447510Z node 7 :INTERCONNECT WARN: Handshake [7:18:2057] [node 8] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2025-04-10T10:52:52.894915Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-10T10:52:52.895597Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-04-10T10:52:52.895813Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:52.896048Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [1:39:2059], tablet id = 2, status = OK 2025-04-10T10:52:52.896128Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:39:2059], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:52.896196Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-10T10:52:52.896462Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-10T10:52:52.896579Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-10T10:52:52.896638Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [1:40:2060], tablet id = 3, status = OK 2025-04-10T10:52:52.896662Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:40:2060], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:52.896695Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-10T10:52:52.896710Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:52.896827Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [3:44:2057], tablet id = 5, status = OK 2025-04-10T10:52:52.896851Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:44:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:52.896881Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:46:2057], server id = [2:46:2057], tablet id = 4, status = OK 2025-04-10T10:52:52.896901Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:46:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:52.897025Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2025-04-10T10:52:52.897096Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2025-04-10T10:52:52.897135Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-10T10:52:52.897203Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-04-10T10:52:52.897235Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-10T10:52:52.897313Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [0:0:0], tablet id = 5, status = ERROR 2025-04-10T10:52:52.897325Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:52.897346Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:46:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-10T10:52:52.897359Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:52.897373Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [0:0:0], tablet id = 2, status = ERROR 2025-04-10T10:52:52.897389Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:52.897411Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-04-10T10:52:52.897490Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-10T10:52:52.897504Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:52.897548Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [4:49:2057], tablet id = 6, status = OK 2025-04-10T10:52:52.897578Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:49:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:52.897695Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-04-10T10:52:52.897762Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2025-04-10T10:52:52.897788Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T10:52:52.897867Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [0:0:0], tablet id = 6, status = ERROR 2025-04-10T10:52:52.897888Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:52.898041Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-04-10T10:52:52.898090Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-10T10:52:52.898277Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-04-10T10:52:52.898377Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-04-10T10:52:53.130192Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-10T10:52:53.131100Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-04-10T10:52:53.131345Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:53.131490Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-10T10:52:53.131660Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-10T10:52:53.131842Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-10T10:52:53.131907Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-10T10:52:53.131921Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:53.132006Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [3:42:2057], tablet id = 3, status = OK 2025-04-10T10:52:53.132039Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:42:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:53.132075Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-10T10:52:53.132093Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-10T10:52:53.132201Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-04-10T10:52:53.132266Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-10T10:52:53.132283Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:53.132319Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-04-10T10:52:53.132343Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:53.132411Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-04-10T10:52:53.132440Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-04-10T10:52:53.132467Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T10:52:53.132522Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-10T10:52:53.132538Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:53.132577Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-04-10T10:52:53.142825Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-10T10:52:53.142884Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T10:52:53.142908Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-10T10:52:53.142926Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T10:52:53.153367Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-10T10:52:53.153423Z node 1 :STATISTICS INFO: Node 2 is unavailable 2025-04-10T10:52:53.153463Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-10T10:52:53.153556Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-10T10:52:53.153581Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-10T10:52:53.153603Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-10T10:52:53.153620Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T10:52:53.153751Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-10T10:52:53.153776Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive |64.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldNbs [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow [GOOD] >> TestProtocols::TestHTTPRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-04-10T10:52:53.030423Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-10T10:52:53.031286Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-04-10T10:52:53.031594Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:53.031715Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-10T10:52:53.031933Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-10T10:52:53.032139Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-10T10:52:53.032227Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-10T10:52:53.032247Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:53.032349Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [3:42:2057], tablet id = 3, status = OK 2025-04-10T10:52:53.032393Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:42:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:53.032441Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-10T10:52:53.032469Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-10T10:52:53.032585Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-04-10T10:52:53.032660Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-10T10:52:53.032678Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:53.032724Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-04-10T10:52:53.032755Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-10T10:52:53.032830Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-04-10T10:52:53.032865Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-04-10T10:52:53.032898Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T10:52:53.032962Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-10T10:52:53.032981Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:53.033042Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-04-10T10:52:53.043430Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-10T10:52:53.043496Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T10:52:53.043531Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-10T10:52:53.043552Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T10:52:53.054199Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-10T10:52:53.054277Z node 1 :STATISTICS INFO: Node 2 is unavailable 2025-04-10T10:52:53.054331Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-10T10:52:53.054438Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-10T10:52:53.054469Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-10T10:52:53.054493Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-10T10:52:53.054514Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T10:52:53.054653Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-10T10:52:53.054679Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] Test command err: RandomSeed# 5803270368438395903 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-04-10T10:52:50.396450Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-04-10T10:52:50.402280Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-04-10T10:52:50.408524Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-04-10T10:52:50.412018Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-04-10T10:52:50.421568Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-04-10T10:52:50.425005Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-04-10T10:52:50.428655Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-04-10T10:52:50.432067Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-04-10T10:52:52.817191Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:52.817300Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:52.817408Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:52.818079Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: [2716e9f3ee3fe7fb] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-04-10T10:52:52.819419Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:52.819701Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:52.820551Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-04-10T10:52:52.822367Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:52.822948Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:52.823682Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:14:0:0:131072:0] 2025-04-10T10:52:52.824558Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:52.825387Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:52.825852Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:14:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:15:0:0:32768:0] 2025-04-10T10:52:52.826762Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:52.826827Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:52.827409Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:15:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:16:0:0:131072:0] 2025-04-10T10:52:52.828610Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:52.828669Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:52.829385Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:16:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2025-04-10T10:52:52.830617Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:52.830851Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:52.830948Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:17:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUEUUU } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUSU } { OrderNumber# 5 Situations# UUUUUS } { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2025-04-10T10:52:52.832600Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:52.832758Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:52.832872Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:18:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 7 Situations# SUUUUU } { OrderNumber# 0 Situations# UEUUUU } { OrderNumber# 1 Situations# UUEUUU } { OrderNumber# 2 Situations# UUUEUU } { OrderNumber# 3 Situations# UUUUSU } { OrderNumber# 4 Situations# UUUUUS } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2025-04-10T10:52:52.834778Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:52.834947Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:52.835010Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:19:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# UUEUUU } { OrderNumber# 1 Situations# UUUEUU } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } { OrderNumber# 4 Situations# UUSUUU } { OrderNumber# 5 Situations# UUUUSU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2025-04-10T10:52:52.837201Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:52.837326Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:52.837456Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:20:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvGet with key [1:1:11:0:0:32768:0] 2025-04-10T10:52:52.842727Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:699] 2025-04-10T10:52:52.842955Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:706] 2025-04-10T10:52:52.843039Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:713] 2025-04-10T10:52:52.843624Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: [146f8a36ee019c6c] Response# TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} Marker# BPG29 2025-04-10T10:52:52.843727Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:706] 2025-04-10T10:52:52.843778Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:713] TEvGetResult: TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2025-04-10T10:52:53.138512Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-10T10:52:53.138894Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-10T10:52:53.242677Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-10T10:52:53.242792Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2025-04-10T10:52:53.242857Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-10T10:52:53.243960Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:16:2056], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-10T10:52:53.244002Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:53.244066Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:19:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2025-04-10T10:52:53.244089Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-10T10:52:53.244158Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-04-10T10:52:53.244193Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRepl1::ReadOnly [GOOD] Test command err: 2025-04-10T10:52:46.705301Z :BS_SYNCER ERROR: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:52:46.721560Z :BS_SYNCER ERROR: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8179067486276324939] 2025-04-10T10:52:47.736935Z :BS_SYNCER ERROR: PDiskId# 4 VDISK[0:_:0:1:1]: (0) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> ReadOnlyVDisk::TestWrites [GOOD] >> TestProtocols::TestHTTPRequest [GOOD] |64.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |64.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange >> ReadOnlyVDisk::TestDiscover [GOOD] |64.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |64.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] Test command err: RandomSeed# 10081116320650227769 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-04-10T10:52:50.207062Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-04-10T10:52:50.211208Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-04-10T10:52:50.215409Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-04-10T10:52:50.217755Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-04-10T10:52:50.223585Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-04-10T10:52:50.225500Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-04-10T10:52:50.227630Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-04-10T10:52:50.229393Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-04-10T10:52:51.428164Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:51.428284Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:51.428418Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:51.429298Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: [83ed0126a3188d65] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-04-10T10:52:51.431068Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:51.431238Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:51.432331Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-04-10T10:52:51.434092Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:51.434905Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:51.435756Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:14:0:0:131072:0] 2025-04-10T10:52:51.436987Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:51.438046Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:51.438670Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:14:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only m ... ey [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but the writes still go through === SEND TEvPut with key [1:1:21:0:0:32768:0] 2025-04-10T10:52:53.640673Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:53.640799Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:22:0:0:131072:0] 2025-04-10T10:52:53.643485Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:53.644468Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:23:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:24:0:0:131072:0] 2025-04-10T10:52:53.647731Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:25:0:0:32768:0] 2025-04-10T10:52:53.649610Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:53.649686Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:26:0:0:131072:0] 2025-04-10T10:52:53.652036Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:53.652132Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:27:0:0:32768:0] 2025-04-10T10:52:53.654542Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:53.654612Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:28:0:0:131072:0] 2025-04-10T10:52:53.656560Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:53.656736Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:29:0:0:32768:0] 2025-04-10T10:52:53.658936Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:53.659011Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:30:0:0:131072:0] 2025-04-10T10:52:53.661085Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:53.661233Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} === Read all 31 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:21:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:21:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:22:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:22:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:23:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:23:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:24:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:24:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:25:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:25:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:26:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:26:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:27:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:27:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:28:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:28:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:29:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:29:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:30:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:30:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |64.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TestProtocols::TestHTTPRequest [GOOD] |64.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |64.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] Test command err: RandomSeed# 1793971987749627315 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 3 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-04-10T10:52:50.387341Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:698] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-04-10T10:52:50.695103Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:698] 2025-04-10T10:52:50.695965Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5317:705] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-04-10T10:52:50.983816Z 3 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5324:712] 2025-04-10T10:52:50.984865Z 1 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:698] 2025-04-10T10:52:50.985588Z 2 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5317:705] 2025-04-10T10:52:50.985900Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: [c5c5255da347a8f5] Result# TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Putting VDisk #4 to normal === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Putting VDisk #5 to normal === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Putting VDisk #6 to normal === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} |64.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |64.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |64.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |64.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |64.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |64.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |64.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |64.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |64.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |65.0%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::UuidExchange[PqRunner] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes >> Cdc::DocApi[PqRunner] >> Cdc::KeysOnlyLog[PqRunner] >> IncrementalBackup::SimpleRestore >> TBsOther1::PoisonPill [GOOD] >> TBsOther1::ChaoticParallelWrite >> IncrementalBackup::SimpleBackup >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental >> IncrementalBackup::BackupRestore >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder >> DataShardReadTableSnapshots::ReadTableSplitBefore >> DataShardReadTableSnapshots::ReadTableDropColumn |65.0%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadTableSnapshots::ReadTableSnapshot >> ReadOnlyVDisk::TestGarbageCollect [GOOD] >> TSchemeShardMoveTest::Chain >> TSchemeShardMoveTest::Boot >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false >> TSchemeShardServerLess::StorageBillingLabels >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation >> TSchemeShardMoveTest::MoveIndexSameDst >> TSchemeShardMoveTest::Replace |65.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode >> TSchemeShardMoveTest::MoveIndex >> TSchemeShardMoveTest::TwoTables >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true >> TSchemeShardMoveTest::ResetCachedPath >> TSchemeShardServerLess::Fake [GOOD] >> TSchemeShardServerLess::StorageBilling >> TSchemeShardMoveTest::MoveMigratedTable >> TSchemeShardMoveTest::MoveTableForBackup |65.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGarbageCollect [GOOD] Test command err: RandomSeed# 1246410335147899628 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 2 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:1:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-04-10T10:52:50.209801Z 1 00h01m40.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-04-10T10:52:50.214254Z 1 00h01m40.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] SEND TEvGet with key [1:1:2:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-04-10T10:52:51.245098Z 1 00h03m20.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:51.245922Z 2 00h03m20.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-04-10T10:52:51.756936Z 1 00h04m20.151095s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:51.757104Z 2 00h04m20.151095s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-04-10T10:52:52.163576Z 1 00h05m00.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:52.164941Z 2 00h05m00.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:52.166344Z 3 00h05m00.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:52.166726Z 1 00h05m00.160512s :BS_PROXY_PUT ERROR: [25d67e07dc127f44] Result# TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} 2025-04-10T10:52:52.665138Z 1 00h06m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:52.665269Z 2 00h06m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:52.665312Z 3 00h06m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-04-10T10:52:53.299118Z 1 00h07m40.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:53.299260Z 2 00h07m40.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:53.299301Z 3 00h07m40.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:53.299337Z 4 00h07m40.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5329:719] === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-04-10T10:52:53.512784Z 1 00h08m20.252119s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:53.512911Z 2 00h08m20.252119s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:53.512944Z 3 00h08m20.252119s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:53.512972Z 4 00h08m20.252119s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5329:719] 2025-04-10T10:52:53.513000Z 5 00h08m20.252119s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5336:726] === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-04-10T10:52:53.689742Z 1 00h09m00.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:53.689915Z 2 00h09m00.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:53.689972Z 3 00h09m00.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:53.690021Z 4 00h09m00.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5329:719] 2025-04-10T10:52:53.690069Z 5 00h09m00.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5336:726] 2025-04-10T10:52:53.690115Z 6 00h09m00.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5343:733] === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2025-04-10T10:52:53.848006Z 1 00h09m40.302119s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:698] 2025-04-10T10:52:53.848127Z 2 00h09m40.302119s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:53.848165Z 3 00h09m40.302119s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:53.848196Z 4 00h09m40.302119s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5329:719] 2025-04-10T10:52:53.848226Z 5 00h09m40.302119s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5336:726] 2025-04-10T10:52:53.848259Z 6 00h09m40.302119s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5343:733] 2025-04-10T10:52:53.848290Z 7 00h09m40.302119s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5350:740] === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-04-10T10:52:54.031147Z 2 00h10m20.351607s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:705] 2025-04-10T10:52:54.031242Z 3 00h10m20.351607s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:54.031296Z 4 00h10m20.351607s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5329:719] 2025-04-10T10:52:54.031352Z 5 00h10m20.351607s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5336:726] 2025-04-10T10:52:54.031406Z 6 00h10m20.351607s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5343:733] 2025-04-10T10:52:54.031464Z 7 00h10m20.351607s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5350:740] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2025-04-10T10:52:54.289902Z 3 00h11m00.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:712] 2025-04-10T10:52:54.289983Z 4 00h11m00.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5329:719] 2025-04-10T10:52:54.290032Z 5 00h11m00.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5336:726] 2025-04-10T10:52:54.290080Z 6 00h11m00.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5343:733] 2025-04-10T10:52:54.290124Z 7 00h11m00.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5350:740] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] 2025-04-10T10:52:54.599209Z 4 00h11m40.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5329:719] 2025-04-10T10:52:54.599325Z 5 00h11m40.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5336:726] 2025-04-10T10:52:54.599388Z 6 00h11m40.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5343:733] 2025-04-10T10:52:54.599451Z 7 00h11m40.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5350:740] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-04-10T10:52:54.927619Z 5 00h12m20.402631s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5336:726] 2025-04-10T10:52:54.927712Z 6 00h12m20.402631s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5343:733] 2025-04-10T10:52:54.927770Z 7 00h12m20.402631s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5350:740] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-04-10T10:52:55.756277Z 6 00h14m00.451607s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5343:733] 2025-04-10T10:52:55.756335Z 7 00h14m00.451607s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5350:740] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-04-10T10:52:56.127645Z 7 00h14m40.460512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5350:740] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} SEND TEvPut with key [1:1:4:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} SEND TEvGet with key [1:1:4:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:1:0] NODATA Size# 0}} >> TSchemeShardMoveTest::Reject >> TBsVDiskRepl3::ReplEraseDiskRestoreMultipart [GOOD] >> TBsVDiskRepl3::AnubisTest [GOOD] >> TBsVDiskRepl3::ReplPerf |65.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] >> TSchemeShardMoveTest::Boot [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] |65.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |65.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |65.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] >> TSchemeShardMoveTest::TwoTables [GOOD] >> TSchemeShardMoveTest::MoveTableForBackup [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardMoveTest::MoveIndex [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted |65.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |65.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |65.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor >> ReadOnlyVDisk::TestReads [GOOD] >> TSchemeShardMoveTest::Chain [GOOD] >> TSchemeShardMoveTest::Index >> TSchemeShardMoveTest::ResetCachedPath [GOOD] >> TSchemeShardMoveTest::MoveMigratedTable [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.445947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.446063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.446172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.446216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.446269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.446307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.446383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.446453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.446968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.511043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.511097Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.526457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.526706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.526863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.533303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.533523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.534209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.534532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.536466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.537303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.537340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.537459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.537507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.537534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.537641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.542056Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.649178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.649387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.649602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.649835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.649891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.651971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.652108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.652271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.652323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.652355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.652398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.653990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.654062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.654100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.655555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.655596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.655631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.655682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.659462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.661150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.661291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.662219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.662338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.662380Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.662690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.662739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.662882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.662959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.664810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.664854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.664996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.665033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.665186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.665219Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.665285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.665323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.665355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.665391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.665414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.665437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.665459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.665496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.665545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.665570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.665594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.667121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.667200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.667230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SchemeShard: 72075186234409549 Status: StatusSuccess 2025-04-10T10:52:59.013449Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 104:0 HandleReply TEvInitTenantSchemeShardResult operationId: 104:0 at schemeshard: 72057594046678944 2025-04-10T10:52:59.013516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 104:0 Got OK TEvInitTenantSchemeShardResult from schemeshard tablet: 72075186234409549 shardIdx: 72057594046678944:5 at schemeshard: 72057594046678944 2025-04-10T10:52:59.017187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186234409551, partId: 0 2025-04-10T10:52:59.017273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409551 2025-04-10T10:52:59.017307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 104:0 HandleReply TEvConfigureStatus operationId:104:0 at schemeshard:72057594046678944 2025-04-10T10:52:59.017354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 104:0 Got OK TEvConfigureStatus from tablet# 72075186234409551 shardIdx# 72057594046678944:7 at schemeshard# 72057594046678944 2025-04-10T10:52:59.017382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 3 -> 128 2025-04-10T10:52:59.020461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.020981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.022971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.023237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.023294Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.023346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 104:0, at tablet# 72057594046678944 2025-04-10T10:52:59.023402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-04-10T10:52:59.023544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:59.025771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-10T10:52:59.025886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-10T10:52:59.026283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:59.026415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:59.026473Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-10T10:52:59.026716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-10T10:52:59.026758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-10T10:52:59.026877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T10:52:59.026990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:619:2546], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-04-10T10:52:59.028860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:59.028898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T10:52:59.029100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:59.029158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-10T10:52:59.029393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.029448Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-04-10T10:52:59.029512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2025-04-10T10:52:59.030293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:52:59.030390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:52:59.030429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T10:52:59.030470Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-10T10:52:59.030524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-04-10T10:52:59.030622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-10T10:52:59.033783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.033838Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-10T10:52:59.033930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T10:52:59.033973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:52:59.034022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T10:52:59.034072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:52:59.034118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-10T10:52:59.034182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:52:59.034234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T10:52:59.034274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T10:52:59.034476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T10:52:59.034909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-10T10:52:59.036851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-10T10:52:59.036913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-10T10:52:59.037463Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-10T10:52:59.037595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T10:52:59.037654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:775:2655] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-10T10:52:59.040899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:59.041103Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } 2025-04-10T10:52:59.041174Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/ServerLess0 2025-04-10T10:52:59.041332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-04-10T10:52:59.041379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-04-10T10:52:59.043716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:59.043875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 105, wait until txId: 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.467540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.467626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.467680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.467722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.467767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.467795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.467869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.467955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.468297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.552277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.552335Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.567412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.567657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.567847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.574626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.574914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.575620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.576024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.577993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.579251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.579308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.579484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.579533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.579588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.579809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.586498Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.710775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.710988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.711189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.711452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.711509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.714131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.714320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.714536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.714598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.714640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.714675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.716855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.716914Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.716968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.718722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.718810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.718858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.718945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.722364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.724116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.724276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.725067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.725178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.725212Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.725425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.725463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.725612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.725671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.727419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.727474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.727610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.727637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.727785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.727834Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.727921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.727947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.727987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.728011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.728040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.728067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.728095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.728130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.728173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.728204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.728230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.729756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.729829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.729856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... and all the parts is done, operation id: 103:1 2025-04-10T10:52:59.175542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-10T10:52:59.175584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-10T10:52:59.175605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T10:52:59.175915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:52:59.175983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T10:52:59.176043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-10T10:52:59.176081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T10:52:59.176110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T10:52:59.178668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T10:52:59.178726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:502:2461] 2025-04-10T10:52:59.178826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-04-10T10:52:59.179379Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:52:59.179559Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 184us result status StatusPathDoesNotExist 2025-04-10T10:52:59.179738Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T10:52:59.180161Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:52:59.180353Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove1" took 194us result status StatusSuccess 2025-04-10T10:52:59.180731Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove1" PathDescription { Self { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:59.181396Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:52:59.181566Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table2" took 147us result status StatusPathDoesNotExist 2025-04-10T10:52:59.181682Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T10:52:59.182054Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:52:59.182230Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove2" took 164us result status StatusSuccess 2025-04-10T10:52:59.182575Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove2" PathDescription { Self { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:59.183119Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:52:59.183232Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 116us result status StatusSuccess 2025-04-10T10:52:59.183542Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.401005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.401105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.401164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.401221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.401266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.401310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.401377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.401455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.401784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.487189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.487264Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.503785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.504004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.504155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.513015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.513253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.513965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.514313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.516306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.517658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.517718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.517916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.517961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.518003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.518181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.524660Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.651924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.652071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.652213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.652419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.652469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.654072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.654216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.654370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.654412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.654436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.654461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.655877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.655922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.655961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.657381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.657418Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.657456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.657515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.660509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.662029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.662200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.662867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.662946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.662976Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.663163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.663197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.663306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.663361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.664772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.664802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.664929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.664959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.665111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.665143Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.665325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.665346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.665370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.665419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.665445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.665486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.665510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.665531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.665573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.665605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.665638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.667618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.667720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.667754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tate::TPropose ProgressState leave, operationId 104:0, at tablet# 72057594046678944 2025-04-10T10:52:59.084732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-04-10T10:52:59.084863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:59.086737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-10T10:52:59.086846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-10T10:52:59.087126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:59.087226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:59.087267Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-10T10:52:59.087490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-10T10:52:59.087537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-10T10:52:59.087634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T10:52:59.087731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:619:2546], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-04-10T10:52:59.090161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:59.090199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T10:52:59.090356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:59.090400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-10T10:52:59.090618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.090663Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-04-10T10:52:59.090721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2025-04-10T10:52:59.091280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:52:59.091376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:52:59.091402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T10:52:59.091437Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-10T10:52:59.091483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-04-10T10:52:59.091576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-10T10:52:59.095047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.095126Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-10T10:52:59.095256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T10:52:59.095302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:52:59.095369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T10:52:59.095448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:52:59.095506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-10T10:52:59.095556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:52:59.095602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T10:52:59.095647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T10:52:59.095871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T10:52:59.096526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-10T10:52:59.098480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-10T10:52:59.098530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-10T10:52:59.099060Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-10T10:52:59.099195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T10:52:59.099262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:775:2655] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-10T10:52:59.102582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:59.102735Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } 2025-04-10T10:52:59.102785Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/SharedDB 2025-04-10T10:52:59.102958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-04-10T10:52:59.103023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-04-10T10:52:59.104956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:59.105073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, operation: ALTER DATABASE, path: /MyRoot/SharedDB TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2025-04-10T10:52:59.107675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:59.107806Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } 2025-04-10T10:52:59.107836Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, path /MyRoot/ServerLess0 2025-04-10T10:52:59.107978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 106:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-04-10T10:52:59.108017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-04-10T10:52:59.109957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:59.110110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 106, wait until txId: 106 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.451602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.451708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.451747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.451775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.451812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.451841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.451893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.451951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.452256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.527197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.527237Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.538933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.539183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.539329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.545181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.545397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.546005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.546283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.547920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.548766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.548808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.548936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.548967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.549004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.549134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.554100Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.654484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.654635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.654789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.654980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.655021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.656725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.656890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.657070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.657124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.657165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.657191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.659127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.659209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.659270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.661104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.661162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.661222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.661286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.664239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.666054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.666227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.667393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.667538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.667598Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.667897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.667945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.668119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.668239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.670066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.670112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.670305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.670343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.670607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.670702Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.670790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.670825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.670864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.670914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.670949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.670982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.671011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.671036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.671091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.671131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.671159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.673032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.673141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.673179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tor: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-10T10:52:59.173377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T10:52:59.173457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:622:2548], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T10:52:59.173563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409549 2025-04-10T10:52:59.173591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409549, txId: 0, path id: [OwnerId: 72075186234409549, LocalPathId: 1] 2025-04-10T10:52:59.173683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409549 2025-04-10T10:52:59.173719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:717:2617], at schemeshard: 72075186234409549, txId: 0, path id: 1 2025-04-10T10:52:59.174509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409549, msg: Owner: 72075186234409549 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409549, cookie: 0 2025-04-10T10:52:59.174751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:3 msg type: 268697640 2025-04-10T10:52:59.174843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2025-04-10T10:52:59.174940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T10:52:59.174980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T10:52:59.175092Z node 1 :HIVE INFO: [72075186233409546] TEvUpdateDomain, msg: DomainKey { SchemeShard: 72057594046678944 PathId: 3 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared TxId: 106 2025-04-10T10:52:59.175170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Update domain reply, message: Origin: 72075186233409546 TxId: 106, at schemeshard: 72057594046678944 2025-04-10T10:52:59.175197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-04-10T10:52:59.175287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-04-10T10:52:59.175332Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 106:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2025-04-10T10:52:59.175362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 138 -> 240 2025-04-10T10:52:59.176571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.176711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.176747Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-10T10:52:59.176846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T10:52:59.176871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T10:52:59.177989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T10:52:59.178022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T10:52:59.178072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-10T10:52:59.178111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T10:52:59.178140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-10T10:52:59.178204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-10T10:52:59.178255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-10T10:52:59.179807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-10T10:52:59.179854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-10T10:52:59.180314Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-10T10:52:59.180398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-10T10:52:59.180443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:853:2733] TestWaitNotification: OK eventTxId 106 2025-04-10T10:52:59.181169Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:52:59.181364Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 219us result status StatusSuccess 2025-04-10T10:52:59.181717Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:59.182345Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409549 2025-04-10T10:52:59.182498Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409549 describe path "/MyRoot/ServerLess0" took 139us result status StatusSuccess 2025-04-10T10:52:59.182703Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409549 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/ServerLess0" } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 1 PathOwnerId: 72075186234409549, at schemeshard: 72075186234409549 2025-04-10T10:52:59.183224Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:52:59.183354Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 139us result status StatusSuccess 2025-04-10T10:52:59.183578Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:59.183902Z node 1 :HIVE INFO: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:3 >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable >> Cdc::UuidExchange[PqRunner] [GOOD] >> Cdc::UuidExchange[YdsRunner] >> TSchemeShardMoveTest::Replace [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads [GOOD] Test command err: RandomSeed# 2283567404663483923 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #1 to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #2 to read-only === Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #1 === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #2 === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #3 === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #4 === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #5 === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #6 === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.287385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.287512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.287571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.287618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.287664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.287699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.287771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.287867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.288230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.378830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.378891Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.401086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.401374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.401585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.410193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.410437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.411332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.411819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.415349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.416989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.417090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.417418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.417488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.417531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.417696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.437259Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.581526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.581771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.582042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.582300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.582356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.584535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.584672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.584864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.584919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.584966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.585006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.586893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.586942Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.586977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.588458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.588499Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.588542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.588594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.592075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.593698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.593865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.594782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.594893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.594935Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.595175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.595221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.595397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.595520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.597378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.597421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.597589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.597631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.597898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.597949Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.598041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.598076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.598114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.598178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.598228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.598271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.598305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.598338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.598403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.598440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.598477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.607103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.607236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.607277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ecute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.252262Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-10T10:52:59.252366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T10:52:59.252410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T10:52:59.252445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T10:52:59.252473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T10:52:59.252509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-10T10:52:59.252561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T10:52:59.252622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-10T10:52:59.252654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-10T10:52:59.252796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T10:52:59.254345Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186234409546 2025-04-10T10:52:59.254595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-10T10:52:59.254870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T10:52:59.256166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409546 2025-04-10T10:52:59.259036Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186234409548 2025-04-10T10:52:59.289357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-04-10T10:52:59.289789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T10:52:59.291416Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186234409547 Forgetting tablet 72075186234409548 2025-04-10T10:52:59.293311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-10T10:52:59.293578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186234409547 2025-04-10T10:52:59.297858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:52:59.297924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T10:52:59.298081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T10:52:59.299123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T10:52:59.299383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:52:59.299437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T10:52:59.299523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:59.299823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T10:52:59.303932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-10T10:52:59.304008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409546 2025-04-10T10:52:59.304211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-10T10:52:59.304245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409548 2025-04-10T10:52:59.304608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-10T10:52:59.304665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409547 2025-04-10T10:52:59.309012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T10:52:59.309169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-10T10:52:59.309602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-10T10:52:59.309663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-10T10:52:59.310334Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-10T10:52:59.310497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-10T10:52:59.310551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:920:2783] TestWaitNotification: OK eventTxId 106 2025-04-10T10:52:59.311452Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:52:59.311747Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 342us result status StatusPathDoesNotExist 2025-04-10T10:52:59.311981Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T10:52:59.312614Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:52:59.312874Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 244us result status StatusPathDoesNotExist 2025-04-10T10:52:59.313058Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T10:52:59.313650Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:52:59.313863Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 222us result status StatusSuccess 2025-04-10T10:52:59.314828Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted 2025-04-10T10:52:59.315616Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409550 2025-04-10T10:52:59.315747Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409551 2025-04-10T10:52:59.315793Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409552 2025-04-10T10:52:59.315844Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409553 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.474727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.474813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.474864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.474902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.474940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.474966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.475036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.475119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.475403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.558800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.558852Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.574111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.574411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.574560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.581401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.581655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.582343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.582683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.584410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.585676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.585744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.585920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.585962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.586003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.586138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.592217Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.722306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.722514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.722703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.722915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.722963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.725209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.725343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.725537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.725614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.725650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.725695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.727293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.727346Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.727377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.728779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.728815Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.728848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.728897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.732580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.734370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.734502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.735161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.735259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.735296Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.735491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.735538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.735643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.735698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.737327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.737369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.737524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.737570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.737787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.737827Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.737913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.737946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.737978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.738027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.738070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.738103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.738133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.738182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.738235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.738268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.738296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.740365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.740464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.740499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ecute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.470643Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-10T10:52:59.470745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T10:52:59.470801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T10:52:59.470865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T10:52:59.470900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T10:52:59.470941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-10T10:52:59.470983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T10:52:59.471026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-10T10:52:59.471065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-10T10:52:59.471225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T10:52:59.471961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T10:52:59.473424Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186234409549 2025-04-10T10:52:59.490406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:59.494822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-10T10:52:59.495248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 Forgetting tablet 72075186234409549 2025-04-10T10:52:59.496969Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186234409551 2025-04-10T10:52:59.497231Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186234409550 2025-04-10T10:52:59.497531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-04-10T10:52:59.497808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T10:52:59.499021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-10T10:52:59.499215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186234409551 Forgetting tablet 72075186234409550 2025-04-10T10:52:59.499850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T10:52:59.500035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:52:59.500094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T10:52:59.500211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T10:52:59.500691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:52:59.500752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T10:52:59.500819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:59.503605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-10T10:52:59.503666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409549 2025-04-10T10:52:59.506547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-10T10:52:59.506600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409551 2025-04-10T10:52:59.506705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-10T10:52:59.506765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409550 2025-04-10T10:52:59.506968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T10:52:59.507045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-10T10:52:59.507365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-10T10:52:59.507406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-10T10:52:59.507946Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-10T10:52:59.508064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-10T10:52:59.508126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:945:2807] TestWaitNotification: OK eventTxId 106 2025-04-10T10:52:59.508753Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:52:59.508987Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 277us result status StatusPathDoesNotExist 2025-04-10T10:52:59.509183Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T10:52:59.509889Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:52:59.510092Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 192us result status StatusPathDoesNotExist 2025-04-10T10:52:59.510251Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T10:52:59.510741Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:52:59.510891Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 162us result status StatusSuccess 2025-04-10T10:52:59.511325Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186234409549 is deleted wait until 72075186234409550 is deleted wait until 72075186234409551 is deleted wait until 72075186234409552 is deleted 2025-04-10T10:52:59.511908Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409549 2025-04-10T10:52:59.511995Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409550 2025-04-10T10:52:59.512035Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409551 2025-04-10T10:52:59.512085Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409552 Deleted tabletId 72075186234409549 Deleted tabletId 72075186234409550 Deleted tabletId 72075186234409551 Deleted tabletId 72075186234409552 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.517708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.517810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.517868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.517909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.517952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.517983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.518075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.518199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.518548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.605200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.605263Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.620758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.621038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.621190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.629095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.629368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.630024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.630456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.632304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.633558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.633619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.633828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.633882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.633923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.634115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.640994Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.765187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.765435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.765649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.765825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.765870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.767982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.768092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.768245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.768288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.768318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.768345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.769745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.769786Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.769824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.771928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.772101Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.772136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.772218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.775380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.777133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.777297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.778195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.778339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.778396Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.778682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.778725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.778866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.778933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.780918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.780960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.781108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.781134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.781305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.781350Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.781445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.781492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.781520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.781541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.781566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.781592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.781614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.781655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.781700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.781728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.781750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.783631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.783754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.783795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-04-10T10:52:59.600340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 156000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 278 } } 2025-04-10T10:52:59.600459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 105:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 156000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 278 } } 2025-04-10T10:52:59.600506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-10T10:52:59.600601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 105:0, left await: 0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.600651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 3 -> 128 2025-04-10T10:52:59.602695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.602904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.602965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 105:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:59.603238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-04-10T10:52:59.603413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:59.605119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2025-04-10T10:52:59.605261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 105 at step: 5000004 2025-04-10T10:52:59.605890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:59.606014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:59.606099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 105:0 HandleReply TEvOperationPlan, operationId: 105:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-04-10T10:52:59.606505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 129 2025-04-10T10:52:59.606655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-10T10:52:59.614475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:59.614536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T10:52:59.614792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:59.614840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-10T10:52:59.615460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.615534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 105 2025-04-10T10:52:59.617099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:52:59.617357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:52:59.617404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-10T10:52:59.617455Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 8 2025-04-10T10:52:59.617536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T10:52:59.617635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-04-10T10:52:59.618630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1440 } } 2025-04-10T10:52:59.618675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-04-10T10:52:59.618828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1440 } } 2025-04-10T10:52:59.618961Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1440 } } 2025-04-10T10:52:59.620149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 673 RawX2: 4294969907 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-04-10T10:52:59.620201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-04-10T10:52:59.620369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Source { RawX1: 673 RawX2: 4294969907 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-04-10T10:52:59.620421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:52:59.620516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 673 RawX2: 4294969907 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-04-10T10:52:59.620589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 105:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:59.620633Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.620684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-10T10:52:59.620734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2025-04-10T10:52:59.623028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T10:52:59.625330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.625462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.625790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.625857Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-10T10:52:59.625986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T10:52:59.626029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T10:52:59.626079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T10:52:59.626114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T10:52:59.626173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-04-10T10:52:59.626259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 105 2025-04-10T10:52:59.626311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T10:52:59.626359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-10T10:52:59.626396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-10T10:52:59.626527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T10:52:59.628554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T10:52:59.628610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:840:2759] TestWaitNotification: OK eventTxId 105 >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] >> Cdc::KeysOnlyLog[PqRunner] [GOOD] >> Cdc::KeysOnlyLog[YdsRunner] >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.323588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.323676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.323715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.323742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.323775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.323811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.324002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.324242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.325019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.393094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.393155Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.410491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.410836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.411037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.454863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.455111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.455929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.456593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.458561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.459831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.459918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.460193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.460265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.460320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.460519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.468215Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.588357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.588558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.588754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.588935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.588981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.591264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.591373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.591515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.591569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.591605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.591642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.593265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.593316Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.593347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.594664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.594745Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.594785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.594829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.597827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.599289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.599429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.600255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.600379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.600419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.600626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.600675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.600817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.600883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.602674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.602719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.602865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.602911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.603124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.603161Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.603245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.603274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.603307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.603339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.603388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.603418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.603458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.603487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.603558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.603587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.603629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.605253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.605348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.605375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ly execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1001 } } 2025-04-10T10:52:59.781521Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1001 } } FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 FAKE_COORDINATOR: Erasing txId 103 2025-04-10T10:52:59.782817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-10T10:52:59.782868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 2 2025-04-10T10:52:59.782997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:2, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-10T10:52:59.783060Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:52:59.783160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-10T10:52:59.783227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:59.783278Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-10T10:52:59.783328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:52:59.783377Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:2 129 -> 240 2025-04-10T10:52:59.784159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-10T10:52:59.784194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409547, partId: 0 2025-04-10T10:52:59.784318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-10T10:52:59.784359Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:52:59.784426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-10T10:52:59.784470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:59.784498Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.784527Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T10:52:59.784556Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-10T10:52:59.788470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-10T10:52:59.788764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.791500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-10T10:52:59.791875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-10T10:52:59.791933Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:59.791986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-10T10:52:59.792110Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 2/3 2025-04-10T10:52:59.792150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-04-10T10:52:59.792194Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 2/3 2025-04-10T10:52:59.792230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-04-10T10:52:59.792270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 2/3, is published: true 2025-04-10T10:52:59.792612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.792739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:52:59.792780Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:59.792817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-10T10:52:59.792883Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 3/3 2025-04-10T10:52:59.792910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-04-10T10:52:59.792940Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 3/3 2025-04-10T10:52:59.792966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-04-10T10:52:59.792991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/3, is published: true 2025-04-10T10:52:59.793033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-04-10T10:52:59.793079Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T10:52:59.793117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T10:52:59.793241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-10T10:52:59.793290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T10:52:59.793332Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-04-10T10:52:59.793354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-10T10:52:59.793387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-10T10:52:59.793411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T10:52:59.793435Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-04-10T10:52:59.793458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-04-10T10:52:59.793530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-10T10:52:59.793556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T10:52:59.794396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:52:59.794452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-10T10:52:59.794526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T10:52:59.794587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T10:52:59.794626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T10:52:59.794667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T10:52:59.794708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:59.800960Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T10:52:59.802112Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Handle TEvGetProxyServicesRequest TestWaitNotification wait txId: 103 2025-04-10T10:52:59.849424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T10:52:59.849492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T10:52:59.849878Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T10:52:59.849972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T10:52:59.850011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:670:2555] TestWaitNotification: OK eventTxId 103 >> TSchemeShardMoveTest::Index [GOOD] >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Replace [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.280971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.281044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.281100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.281130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.281162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.281181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.281231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.281300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.281549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.364596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.364775Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.379889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.380171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.380334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.387091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.387348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.387945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.388315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.390127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.391334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.391389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.391546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.391594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.391630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.391818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.397595Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.543759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.544028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.544269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.544548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.544611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.546543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.546642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.546780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.546818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.546841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.546866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.548225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.548263Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.548298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.549403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.549497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.549537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.549579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.557761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.559354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.559516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.560432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.560539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.560574Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.560774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.560812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.560945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.560996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.562594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.562637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.562746Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.562781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.562965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.563025Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.563120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.563144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.563175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.563209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.563241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.563287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.563322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.563367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.563423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.563461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.563496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.565291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.565384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.565421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 208388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 5 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T10:53:00.208545Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2025-04-10T10:53:00.232600Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409546 2025-04-10T10:53:00.256871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T10:53:00.257149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2025-04-10T10:53:00.257599Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409546 2025-04-10T10:53:00.263801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T10:53:00.264008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 1 Forgetting tablet 72075186233409547 2025-04-10T10:53:00.264836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T10:53:00.264953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 2025-04-10T10:53:00.265229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:53:00.265264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-04-10T10:53:00.265309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 1 2025-04-10T10:53:00.265336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 15], at schemeshard: 72057594046678944 2025-04-10T10:53:00.265357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 2 2025-04-10T10:53:00.265374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 14], at schemeshard: 72057594046678944 2025-04-10T10:53:00.265392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-04-10T10:53:00.265408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-04-10T10:53:00.265426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 1 2025-04-10T10:53:00.265443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 12], at schemeshard: 72057594046678944 2025-04-10T10:53:00.265464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:53:00.267552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T10:53:00.267621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2025-04-10T10:53:00.269413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T10:53:00.269460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2025-04-10T10:53:00.269580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T10:53:00.269623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T10:53:00.269808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 5 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-04-10T10:53:00.270322Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-10T10:53:00.270400Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-04-10T10:53:00.270451Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-04-10T10:53:00.271071Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Src" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:53:00.271269Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Src" took 246us result status StatusPathDoesNotExist 2025-04-10T10:53:00.271419Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Src\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Src" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T10:53:00.271995Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:53:00.272222Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dst" took 244us result status StatusSuccess 2025-04-10T10:53:00.272733Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dst" PathDescription { Self { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Async" LocalPathId: 23 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 25 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 22 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:53:00.273695Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:53:00.273853Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 176us result status StatusSuccess 2025-04-10T10:53:00.274276Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 28 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 28 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 26 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |65.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.397730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.397836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.397876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.397906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.397946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.397981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.398033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.398099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.398393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.486478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.486534Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.501367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.501638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.501785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.508038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.508262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.508823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.509180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.510968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.512123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.512183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.512338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.512381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.512413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.512595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.518909Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.632295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.632495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.632694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.632925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.632978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.635063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.635200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.635360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.635419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.635452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.635485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.637172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.637242Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.637288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.638831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.638924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.638977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.639030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.642851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.646812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.646981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.648005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.648134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.648184Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.648430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.648482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.648656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.648730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.650698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.650730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.650836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.650870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.651060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.651109Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.651185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.651207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.651234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.651257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.651308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.651344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.651391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.651424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.651476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.651507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.651527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.653726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.653830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.653866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... thId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:447:2408], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T10:53:00.242290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-10T10:53:00.242404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:53:00.242577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-10T10:53:00.242612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-10T10:53:00.242648Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-10T10:53:00.242905Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:53:00.242996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:53:00.243042Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-10T10:53:00.243089Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-10T10:53:00.244681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-10T10:53:00.244733Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-10T10:53:00.244829Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-10T10:53:00.244864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T10:53:00.244906Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-10T10:53:00.244937Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T10:53:00.244982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-10T10:53:00.245048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:124:2150] message: TxId: 281474976710760 2025-04-10T10:53:00.245088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T10:53:00.245124Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-10T10:53:00.245158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-10T10:53:00.245217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T10:53:00.246704Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-10T10:53:00.246761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-10T10:53:00.246823Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-10T10:53:00.246904Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:447:2408], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T10:53:00.248315Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-10T10:53:00.248401Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:447:2408], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T10:53:00.248460Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T10:53:00.249972Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-10T10:53:00.250058Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:447:2408], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T10:53:00.250106Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-10T10:53:00.250295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:53:00.250348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:627:2576] TestWaitNotification: OK eventTxId 102 2025-04-10T10:53:00.250990Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:53:00.251265Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 317us result status StatusSuccess 2025-04-10T10:53:00.251770Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |65.1%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |65.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut >> Donor::SlayAfterWiping >> Balancing::Balancing_OneTopic_TopicApi >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK |65.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.462514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.462620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.462658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.462696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.462738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.462778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.462834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.462922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.463285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.537746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.537807Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.551839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.552008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.552123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.558522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.558744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.559264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.559541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.560817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.561786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.561832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.561986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.562026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.562071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.562241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.567756Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.670569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.670812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.671038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.671274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.671333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.673922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.674044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.674251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.674307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.674347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.674379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.676366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.676463Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.676499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.678398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.678516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.678569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.678626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.682392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.684256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.684458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.685456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.685587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.685630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.685890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.685951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.686124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.686216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.688273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.688324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.688446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.688478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.688667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.688804Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.688877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.688905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.688932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.688962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.688993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.689021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.689059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.689086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.689137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.689162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.689184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.690852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.690927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.690955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... t: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:53:00.401749Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:53:00.401869Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 143us result status StatusSuccess 2025-04-10T10:53:00.402192Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:53:00.402575Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T10:53:00.402797Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Sync" took 229us result status StatusSuccess 2025-04-10T10:53:00.403313Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Sync" PathDescription { Self { Name: "Sync" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:53:00.403768Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T10:53:00.403938Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Async" took 182us result status StatusSuccess 2025-04-10T10:53:00.404389Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Async" PathDescription { Self { Name: "Async" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 5 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.642450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.642573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.642609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.642640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.642683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.642721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.642765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.642852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.643123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.729535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.729582Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.745495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.745773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.745927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.755492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.755688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.756141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.756459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.757916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.759211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.759261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.759436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.759493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.759537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.759683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.764759Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.882448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.882622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.882791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.882953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.883005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.884832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.884953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.885086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.885128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.885155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.885177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.886823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.886891Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.886935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.888540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.888629Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.888678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.888729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.892421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.894103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.894339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.895330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.895454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.895499Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.895760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.895815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.895989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.896084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.897907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.897949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.898122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.898191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.898430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.898472Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.898561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.898604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.898641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.898670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.898706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.898742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.898790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.898827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.898886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.898921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.898954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.908055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.908188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.908241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD DEBUG: TMoveSequence TDropParts HandleReply TEvDropSequenceResult shardId# 72075186233409546 status# SUCCESS operationId# 102:1 at tablet 72057594046678944 2025-04-10T10:53:00.532537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T10:53:00.532603Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 4 -> 240 2025-04-10T10:53:00.535140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-10T10:53:00.535332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-10T10:53:00.535426Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveSequence TDone, operationId: 102:1 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:53:00.535488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveSequence TDone, operationId: 102:1 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-10T10:53:00.535604Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-04-10T10:53:00.535636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-10T10:53:00.535675Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-04-10T10:53:00.535704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-10T10:53:00.535733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2025-04-10T10:53:00.535817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:373:2341] message: TxId: 102 2025-04-10T10:53:00.535879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-10T10:53:00.535931Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T10:53:00.535971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T10:53:00.536126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T10:53:00.536170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T10:53:00.536207Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-10T10:53:00.536250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-10T10:53:00.536303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-10T10:53:00.536329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T10:53:00.536804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:53:00.536860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T10:53:00.536941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T10:53:00.536990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T10:53:00.537029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T10:53:00.543247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:53:00.543323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:473:2428] 2025-04-10T10:53:00.543785Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-10T10:53:00.548943Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:53:00.549239Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/myseq" took 349us result status StatusPathDoesNotExist 2025-04-10T10:53:00.549441Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/myseq\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table/myseq" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T10:53:00.549933Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:53:00.550181Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 226us result status StatusPathDoesNotExist 2025-04-10T10:53:00.550342Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T10:53:00.550809Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:53:00.551059Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove" took 282us result status StatusSuccess 2025-04-10T10:53:00.551538Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove" PathDescription { Self { Name: "TableMove" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "TableMove" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 DefaultFromSequence: "myseq" NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false Sequences { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:53:00.552122Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T10:53:00.552316Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/myseq" took 220us result status StatusSuccess 2025-04-10T10:53:00.552640Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/myseq" PathDescription { Self { Name: "myseq" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TopicAutoscaling::PartitionSplit_PQv1 >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK >> Donor::SkipBadDonor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.501994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.502088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.502129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.502186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.502231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.502277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.502348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.502433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.502725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.589815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.589874Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.605560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.605747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.605881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.611897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.612123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.612661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.613042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.614975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.616332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.616394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.616603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.616662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.616705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.616891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.624438Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.757822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.758084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.758333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.758612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.758671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.760938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.761066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.761230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.761288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.761327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.761368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.763345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.763417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.763466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.765031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.765258Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.765305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.765356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.769248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.770975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.771153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.772142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.772271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.772327Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.772597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.772651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.772824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.772941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.774958Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.775004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.775185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.775224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.775471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.775519Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.775755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.775795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.775837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.775870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.775910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.775950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.776003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.776041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.776099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.776144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.776190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.778660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.778778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.778837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... p: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 711 } } 2025-04-10T10:53:00.744528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-10T10:53:00.744650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 711 } } 2025-04-10T10:53:00.744734Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 711 } } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T10:53:00.746636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T10:53:00.746690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 2 2025-04-10T10:53:00.746837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:2, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T10:53:00.746886Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:53:00.746964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T10:53:00.747036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:53:00.747077Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-10T10:53:00.747119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:53:00.747160Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:2 129 -> 240 2025-04-10T10:53:00.747486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T10:53:00.747511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-10T10:53:00.747620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T10:53:00.747658Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:53:00.747715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T10:53:00.747759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:53:00.747788Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:53:00.747814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T10:53:00.747842Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T10:53:00.749252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-10T10:53:00.749685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:53:00.753543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-10T10:53:00.753933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-10T10:53:00.754186Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:53:00.754230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-10T10:53:00.754325Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 2/3 2025-04-10T10:53:00.754355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-04-10T10:53:00.754386Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 2/3 2025-04-10T10:53:00.754419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-04-10T10:53:00.754465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/3, is published: true 2025-04-10T10:53:00.754684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:53:00.754882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:53:00.754905Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:53:00.754931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-10T10:53:00.754972Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 3/3 2025-04-10T10:53:00.754994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-04-10T10:53:00.755022Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 3/3 2025-04-10T10:53:00.755046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-04-10T10:53:00.755071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/3, is published: true 2025-04-10T10:53:00.755140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:376:2344] message: TxId: 102 2025-04-10T10:53:00.755183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-04-10T10:53:00.755225Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T10:53:00.755254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T10:53:00.755369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-10T10:53:00.755408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T10:53:00.755445Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-10T10:53:00.755470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-10T10:53:00.755501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-10T10:53:00.755518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T10:53:00.755534Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-04-10T10:53:00.755549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-04-10T10:53:00.755579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-10T10:53:00.755598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T10:53:00.756081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:53:00.756140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-10T10:53:00.756209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T10:53:00.756242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T10:53:00.756266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T10:53:00.756291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T10:53:00.756313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:53:00.759044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:53:00.759098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:473:2434] 2025-04-10T10:53:00.759579Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 >> Donor::MultipleEvicts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Index [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.241764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.241905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.241988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.242043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.242112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.242193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.242321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.242448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.242813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.327954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.328035Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.341890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.342109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.342265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.365611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.365920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.366681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.367072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.370589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.371598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.371655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.371837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.371889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.371929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.372077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.376954Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.504479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.504672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.504872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.505050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.505088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.506950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.507060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.507196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.507243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.507280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.507318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.508653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.508704Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.508732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.510079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.510190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.510230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.510285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.513090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.514499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.514644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.515426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.515556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.515626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.515912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.515981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.516146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.516222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.517985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.518036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.518188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.518220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.518407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.518465Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.518566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.518597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.518631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.518658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.518685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.518715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.518752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.518779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.518831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.518860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.518885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.520596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.520672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.520702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:53:00.696594Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:53:00.696715Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 131us result status StatusSuccess 2025-04-10T10:53:00.696972Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:53:00.697410Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T10:53:00.697604Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Sync" took 201us result status StatusSuccess 2025-04-10T10:53:00.698117Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Sync" PathDescription { Self { Name: "Sync" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:53:00.698606Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T10:53:00.698759Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Async" took 173us result status StatusSuccess 2025-04-10T10:53:00.699189Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Async" PathDescription { Self { Name: "Async" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Donor::ContinueWithFaultyDonor |65.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> TSchemeShardMoveTest::OneTable [GOOD] >> Donor::ConsistentWritesWhenSwitchingToDonorMode |65.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |65.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> IncrementalBackup::SimpleRestore [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental >> DataShardReadTableSnapshots::ReadTableSplitBefore [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitFinished >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows |65.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> DataShardReadTableSnapshots::ReadTableSnapshot [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID >> DataShardReadTableSnapshots::ReadTableDropColumn [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.697558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.697631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.697671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.697700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.697739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.697766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.697814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.697872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.698074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.765126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.765208Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.781820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.782104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.782320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.789915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.790138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.790913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.791297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.793331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.794600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.794666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.794875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.794930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.794984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.795192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.801848Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.941851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.942055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.942237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.942438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.942489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.944433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.944552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.944720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.944824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.944864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.944898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.946599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.946671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.946708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.948231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.948317Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.948379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.948438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.952183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.953955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.954124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.955090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.955204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.955250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.955624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.955688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.955845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.955919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.957894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.957936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.958087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.958120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.958345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.958410Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.958500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.958526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.958568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.958596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.958622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.958650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.958689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.958719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.958765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.958793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.958821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.960451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.960531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.960558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0T10:53:01.909964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 108:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-10T10:53:01.909995Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 108, done: 0, blocked: 1 2025-04-10T10:53:01.910059Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 108:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 108 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-10T10:53:01.910185Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 137 -> 129 2025-04-10T10:53:01.910288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:53:01.910338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T10:53:01.911163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T10:53:01.912349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T10:53:01.912496Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:53:01.912535Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:53:01.912661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T10:53:01.912764Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:53:01.912796Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-04-10T10:53:01.912839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 108, path id: 4 2025-04-10T10:53:01.913144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T10:53:01.913188Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-04-10T10:53:01.913256Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T10:53:01.913290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:53:01.913324Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 2025-04-10T10:53:01.913886Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-04-10T10:53:01.913966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-04-10T10:53:01.913996Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-10T10:53:01.914034Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 23 2025-04-10T10:53:01.914072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:53:01.915013Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-04-10T10:53:01.915082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-04-10T10:53:01.915104Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-10T10:53:01.915128Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-10T10:53:01.915152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T10:53:01.915223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-04-10T10:53:01.916844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T10:53:01.916900Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:53:01.917111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T10:53:01.917219Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-10T10:53:01.917253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-10T10:53:01.917292Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-10T10:53:01.917321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-10T10:53:01.917353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-04-10T10:53:01.917411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:335:2314] message: TxId: 108 2025-04-10T10:53:01.917457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-10T10:53:01.917523Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-04-10T10:53:01.917560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-04-10T10:53:01.917662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T10:53:01.919371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-04-10T10:53:01.920402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-04-10T10:53:01.921348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-10T10:53:01.921398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:835:2793] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-10T10:53:01.921961Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-10T10:53:01.922020Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2025-04-10T10:53:01.939207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 8589936886 } TabletId: 72075186233409546 State: 4 2025-04-10T10:53:01.939335Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-04-10T10:53:01.942110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-10T10:53:01.942647Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-10T10:53:01.944826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T10:53:01.945148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T10:53:01.945891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:53:01.945953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-10T10:53:01.946034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:53:01.949507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T10:53:01.949610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T10:53:01.950132Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2025-04-10T10:53:01.950796Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:53:01.951001Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 236us result status StatusSuccess 2025-04-10T10:53:01.951297Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 21 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ResourcePoolsDdl::TestPoolSwitchToLimitedState >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions >> ResourcePoolsDdl::TestCreateResourcePool >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits >> KqpWorkloadServiceDistributed::TestDistributedQueue >> IncrementalBackup::SimpleBackup [GOOD] >> IncrementalBackup::MultiRestore >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool >> KqpWorkloadService::TestQueueSizeSimple >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop >> Cdc::UuidExchange[YdsRunner] [GOOD] >> Cdc::UuidExchange[TopicRunner] >> KqpWorkloadServiceActors::TestPoolFetcher >> Cdc::DocApi[PqRunner] [GOOD] >> Cdc::DocApi[YdsRunner] >> Donor::ContinueWithFaultyDonor [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag >> Donor::SlayAfterWiping [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex >> Donor::SkipBadDonor [GOOD] >> Donor::MultipleEvicts [GOOD] >> Cdc::KeysOnlyLog[YdsRunner] [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ContinueWithFaultyDonor [GOOD] Test command err: RandomSeed# 6169145628409365401 2025-04-10T10:53:03.583617Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:53:03.585528Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 10488153830867750627] 2025-04-10T10:53:03.603741Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::MultipleEvicts [GOOD] Test command err: RandomSeed# 16396507804748311516 0 donors: 2025-04-10T10:53:03.601033Z 4 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:53:03.603582Z 4 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11812726877114346412] 2025-04-10T10:53:03.624521Z 4 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 2:1000 2025-04-10T10:53:03.717673Z 2 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:53:03.720257Z 2 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11812726877114346412] 2025-04-10T10:53:03.739322Z 2 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2025-04-10T10:53:03.825052Z 4 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:53:03.827543Z 4 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11812726877114346412] 2025-04-10T10:53:03.841018Z 4 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 2:1000 2025-04-10T10:53:03.916354Z 2 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:53:03.919154Z 2 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11812726877114346412] 2025-04-10T10:53:03.933225Z 2 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2025-04-10T10:53:04.009669Z 4 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:53:04.012460Z 4 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11812726877114346412] 2025-04-10T10:53:04.026476Z 4 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 2:1000 2025-04-10T10:53:04.105023Z 2 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:53:04.107393Z 2 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11812726877114346412] 2025-04-10T10:53:04.120854Z 2 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2025-04-10T10:53:04.199632Z 4 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:53:04.202015Z 4 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11812726877114346412] 2025-04-10T10:53:04.215540Z 4 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 2:1000 2025-04-10T10:53:04.293128Z 2 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:53:04.295672Z 2 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11812726877114346412] 2025-04-10T10:53:04.308783Z 2 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2025-04-10T10:53:04.385893Z 4 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:53:04.388410Z 4 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11812726877114346412] 2025-04-10T10:53:04.401448Z 4 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 2:1000 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SlayAfterWiping [GOOD] Test command err: RandomSeed# 5705732921051010918 2025-04-10T10:53:03.283512Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:53:03.285743Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17672026644689674396] 2025-04-10T10:53:03.306237Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SkipBadDonor [GOOD] Test command err: RandomSeed# 4885677509947950428 2025-04-10T10:53:03.592019Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:53:03.594115Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 13542675785452646077] 2025-04-10T10:53:03.613464Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> IncrementalBackup::BackupRestore [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental |65.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |65.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsOther1::ChaoticParallelWrite [GOOD] >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] >> Cdc::UuidExchange[TopicRunner] [GOOD] >> Cdc::UpdatesLog[PqRunner] >> KqpWorkloadServiceActors::TestPoolFetcher [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] >> TBsLocalRecovery::StartStopNotEmptyDB [GOOD] >> TBsLocalRecovery::WriteRestartRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] Test command err: 2025-04-10T10:53:00.446710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:53:00.446960Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:53:00.447182Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001064/r3tmp/tmptEfWn2/pdisk_1.dat 2025-04-10T10:53:00.843975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.887839Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:00.926574Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:53:00.927761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:00.927899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:00.928050Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T10:53:00.939740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:01.020120Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T10:53:01.020196Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T10:53:01.020364Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T10:53:01.154263Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T10:53:01.154360Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T10:53:01.155000Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T10:53:01.155122Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T10:53:01.155474Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T10:53:01.155653Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T10:53:01.155746Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T10:53:01.157658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:53:01.158124Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T10:53:01.158877Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T10:53:01.158968Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T10:53:01.190777Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:53:01.192086Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:53:01.192608Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T10:53:01.192910Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:53:01.241841Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:53:01.242788Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:53:01.242940Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:53:01.245009Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:53:01.245122Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:53:01.245204Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:53:01.245692Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:53:01.245856Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:53:01.245962Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T10:53:01.258055Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:53:01.290992Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:53:01.291229Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:53:01.291392Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T10:53:01.291442Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:53:01.291488Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:53:01.291538Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:53:01.291799Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:01.291853Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:01.292256Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:53:01.292386Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:53:01.292450Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:53:01.292497Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:53:01.292555Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T10:53:01.292598Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:53:01.292639Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:53:01.292694Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:53:01.292745Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:53:01.292882Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:01.292933Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:01.292981Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T10:53:01.293451Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T10:53:01.293519Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:53:01.293648Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:53:01.293893Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T10:53:01.293955Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:53:01.294067Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:53:01.294140Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T10:53:01.294216Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T10:53:01.294257Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T10:53:01.294295Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T10:53:01.294626Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T10:53:01.294776Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T10:53:01.294817Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T10:53:01.294857Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T10:53:01.294932Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T10:53:01.294972Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T10:53:01.295005Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T10:53:01.295039Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T10:53:01.295065Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T10:53:01.296674Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T10:53:01.296752Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:53:01.307640Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 281474976715659] at 72075186224037888 is Executed 2025-04-10T10:53:07.315785Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit MakeScanSnapshot 2025-04-10T10:53:07.315805Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit WaitForStreamClearance 2025-04-10T10:53:07.315837Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-04-10T10:53:07.315894Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:745:2626] for [0:281474976715659] at 72075186224037888 2025-04-10T10:53:07.315925Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-04-10T10:53:07.315967Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:53:07.316117Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287427, Sender [2:666:2570], Recipient [2:745:2626]: NKikimrTx.TEvStreamClearanceRequest TxId: 281474976715659 ShardId: 72075186224037888 KeyRange { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } 2025-04-10T10:53:07.316157Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvStreamClearanceRequest from ShardId# 72075186224037888 2025-04-10T10:53:07.316219Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Sending TEvStreamClearanceResponse to [2:666:2570] ShardId# 72075186224037888 2025-04-10T10:53:07.316397Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:745:2626], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715659 2025-04-10T10:53:07.316439Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-10T10:53:07.316565Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:745:2626], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715659 Cleared: true 2025-04-10T10:53:07.316603Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-10T10:53:07.316706Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:07.316734Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:07.316779Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:53:07.316813Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T10:53:07.316858Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-04-10T10:53:07.316898Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-04-10T10:53:07.316956Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715659] at 72075186224037888 2025-04-10T10:53:07.317006Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-04-10T10:53:07.317038Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-10T10:53:07.317070Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit ReadTableScan 2025-04-10T10:53:07.317100Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-04-10T10:53:07.317327Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-04-10T10:53:07.317359Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T10:53:07.317400Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T10:53:07.317452Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:53:07.317497Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:53:07.317556Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:53:07.318065Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:779:2647], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-10T10:53:07.318108Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-10T10:53:07.318228Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:779:2647], Recipient [2:745:2626]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715659 ShardId: 72075186224037888 2025-04-10T10:53:07.318277Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-10T10:53:07.318632Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:744:2626], Recipient [2:745:2626]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715658 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-10T10:53:07.318682Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-10T10:53:07.318726Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-10T10:53:07.318794Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-04-10T10:53:07.318953Z node 2 :TX_DATASHARD ERROR: Got scan fatal error: Invalid DyNumber binary representation 2025-04-10T10:53:07.319017Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-04-10T10:53:07.319204Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T10:53:07.319249Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2025-04-10T10:53:07.319360Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:779:2647], Recipient [2:745:2626]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715659 ShardId: 72075186224037888 2025-04-10T10:53:07.319399Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-04-10T10:53:07.319440Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-04-10T10:53:07.319632Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:07.319688Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:07.319747Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:53:07.319788Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T10:53:07.319832Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-04-10T10:53:07.319873Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-04-10T10:53:07.319922Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715659] at 72075186224037888 error: Invalid DyNumber binary representation, IsFatalError: 1 2025-04-10T10:53:07.320001Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-04-10T10:53:07.320055Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit ReadTableScan 2025-04-10T10:53:07.320095Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit FinishPropose 2025-04-10T10:53:07.320130Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-04-10T10:53:07.320165Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is DelayComplete 2025-04-10T10:53:07.320200Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit FinishPropose 2025-04-10T10:53:07.320235Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T10:53:07.320274Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit CompletedOperations 2025-04-10T10:53:07.320334Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-04-10T10:53:07.320361Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T10:53:07.320394Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715659] at 72075186224037888 has finished 2025-04-10T10:53:07.320433Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:53:07.320472Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T10:53:07.320506Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:53:07.320536Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:53:07.320596Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:53:07.320637Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-04-10T10:53:07.320682Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715659 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: EXEC_ERROR 2025-04-10T10:53:07.320735Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715659 at tablet 72075186224037888 status: EXEC_ERROR errors: PROGRAM_ERROR (Invalid DyNumber binary representation) | 2025-04-10T10:53:07.320832Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:53:07.321129Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:666:2570], Recipient [2:745:2626]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037888 Status: EXEC_ERROR Error { Kind: PROGRAM_ERROR Reason: "Invalid DyNumber binary representation" } TxId: 281474976715659 Step: 0 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 505 } } 2025-04-10T10:53:07.321173Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvProposeTransactionResult Status# EXEC_ERROR ShardId# 72075186224037888 2025-04-10T10:53:07.321263Z node 2 :TX_PROXY ERROR: [ReadTable [2:745:2626] TxId# 281474976715658] RESPONSE Status# ExecError shard: 72075186224037888 table: /Root/Table 2025-04-10T10:53:07.321599Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:745:2626], Recipient [2:666:2570]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1500 TxId: 281474976715658 >> ReadOnlyVDisk::TestSync [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce >> Cdc::KeysOnlyLog[TopicRunner] [GOOD] >> Cdc::KeysOnlyLogDebezium ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] Test command err: 2025-04-10T10:53:00.096351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:53:00.096595Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:53:00.096846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001093/r3tmp/tmpaPjJal/pdisk_1.dat 2025-04-10T10:53:00.435374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.476979Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:00.517111Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:53:00.518299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:00.518438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:00.518598Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T10:53:00.531042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:00.612805Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T10:53:00.612882Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T10:53:00.613054Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T10:53:00.772263Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T10:53:00.772377Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T10:53:00.773003Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T10:53:00.773109Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T10:53:00.773509Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T10:53:00.773716Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T10:53:00.773823Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T10:53:00.775796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.776324Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T10:53:00.777048Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T10:53:00.777142Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T10:53:00.814945Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:53:00.815960Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:53:00.816414Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T10:53:00.816684Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:53:00.852669Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:53:00.853296Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:53:00.853403Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:53:00.854902Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:53:00.854967Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:53:00.855010Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:53:00.855362Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:53:00.855478Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:53:00.855584Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T10:53:00.866379Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:53:00.891457Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:53:00.891724Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:53:00.891906Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T10:53:00.891957Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:53:00.892021Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:53:00.892072Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:53:00.892341Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:00.892410Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:00.892813Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:53:00.892927Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:53:00.892991Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:53:00.893039Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:53:00.893109Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T10:53:00.893164Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:53:00.893205Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:53:00.893265Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:53:00.893322Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:53:00.893495Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.893564Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.893616Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T10:53:00.894129Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T10:53:00.894212Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:53:00.894355Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:53:00.894581Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T10:53:00.894652Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:53:00.894765Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:53:00.894834Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T10:53:00.894878Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T10:53:00.894917Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T10:53:00.894954Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T10:53:00.895253Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T10:53:00.895304Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T10:53:00.895367Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T10:53:00.895407Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T10:53:00.895492Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T10:53:00.895532Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T10:53:00.895570Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T10:53:00.895607Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T10:53:00.895651Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T10:53:00.897265Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T10:53:00.897336Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:53:00.908129Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... planned 0 immediate 1 planned 0 2025-04-10T10:53:07.560514Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for WaitForStreamClearance 2025-04-10T10:53:07.560531Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit WaitForStreamClearance 2025-04-10T10:53:07.560555Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715663] at 72075186224037890 2025-04-10T10:53:07.560586Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-04-10T10:53:07.560603Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit WaitForStreamClearance 2025-04-10T10:53:07.560620Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit ReadTableScan 2025-04-10T10:53:07.560650Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2025-04-10T10:53:07.560803Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Continue 2025-04-10T10:53:07.560821Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T10:53:07.560839Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-04-10T10:53:07.560865Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-10T10:53:07.560891Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-10T10:53:07.560931Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T10:53:07.561285Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1004:2803], Recipient [2:889:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-10T10:53:07.561317Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-10T10:53:07.561388Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-04-10T10:53:07.561409Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-10T10:53:07.561441Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 2 rows at [2:1004:2803] 2025-04-10T10:53:07.561538Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-04-10T10:53:07.561754Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T10:53:07.561869Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-04-10T10:53:07.561896Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-04-10T10:53:07.561915Z node 2 :TX_PROXY TRACE: [ReadTable [2:974:2775] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1004:2803] ShardId# 72075186224037890 2025-04-10T10:53:07.561962Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-04-10T10:53:07.562028Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-04-10T10:53:07.562047Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-10T10:53:07.562293Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:973:2775], Recipient [2:974:2775]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-10T10:53:07.562313Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-10T10:53:07.562329Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 1 rows at [2:1004:2803] 2025-04-10T10:53:07.562360Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-04-10T10:53:07.562402Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T10:53:07.562501Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\005\000\000\000b\005\0357\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\005\000\000\000" 2025-04-10T10:53:07.562519Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-04-10T10:53:07.562535Z node 2 :TX_PROXY TRACE: [ReadTable [2:974:2775] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1004:2803] ShardId# 72075186224037890 2025-04-10T10:53:07.562596Z node 2 :TX_PROXY INFO: [ReadTable [2:974:2775] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.015555s execute time: 0.169332s total time: 0.184887s 2025-04-10T10:53:07.562735Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-04-10T10:53:07.562762Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 0 2025-04-10T10:53:07.562903Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-10T10:53:07.562920Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715663, at: 72075186224037890 2025-04-10T10:53:07.563150Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:974:2775], Recipient [2:884:2709]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2025-04-10T10:53:07.563360Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:889:2711], Recipient [2:889:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:07.563385Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:07.563412Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T10:53:07.563430Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-10T10:53:07.563455Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for ReadTableScan 2025-04-10T10:53:07.563480Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2025-04-10T10:53:07.563505Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715663] at 72075186224037890 error: , IsFatalError: 0 2025-04-10T10:53:07.563535Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-04-10T10:53:07.563594Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit ReadTableScan 2025-04-10T10:53:07.563621Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit FinishPropose 2025-04-10T10:53:07.563648Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-04-10T10:53:07.563679Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is DelayComplete 2025-04-10T10:53:07.563701Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit FinishPropose 2025-04-10T10:53:07.563725Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2025-04-10T10:53:07.563747Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit CompletedOperations 2025-04-10T10:53:07.563779Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-04-10T10:53:07.563804Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2025-04-10T10:53:07.563822Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037890 has finished 2025-04-10T10:53:07.563838Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:53:07.563852Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-10T10:53:07.563870Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-10T10:53:07.563887Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-10T10:53:07.563918Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T10:53:07.563937Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-04-10T10:53:07.563958Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-10T10:53:07.564009Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T10:53:07.564152Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [2:974:2775], Recipient [2:889:2711]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715663 2025-04-10T10:53:07.564175Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-04-10T10:53:07.564210Z node 2 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037890 txId 281474976715663 2025-04-10T10:53:07.564264Z node 2 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037890 txId 281474976715663 2025-04-10T10:53:07.564367Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [2:974:2775], Recipient [2:889:2711]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715663 2025-04-10T10:53:07.564388Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-04-10T10:53:07.564443Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:974:2775], Recipient [2:889:2711]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] Test command err: 2025-04-10T10:53:00.248890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:53:00.249054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:53:00.249196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010bd/r3tmp/tmpmVseXL/pdisk_1.dat 2025-04-10T10:53:00.584870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.626872Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:00.664386Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:53:00.665369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:00.665510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:00.665645Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T10:53:00.677177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:00.757223Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T10:53:00.757300Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T10:53:00.757538Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T10:53:00.887470Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T10:53:00.887592Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T10:53:00.888221Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T10:53:00.888354Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T10:53:00.888751Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T10:53:00.888953Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T10:53:00.889072Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T10:53:00.890655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.891043Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T10:53:00.891645Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T10:53:00.891732Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T10:53:00.919123Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:53:00.920184Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:53:00.920645Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T10:53:00.920859Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:53:00.968543Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:53:00.969408Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:53:00.969571Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:53:00.971442Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:53:00.971518Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:53:00.971581Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:53:00.972062Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:53:00.972215Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:53:00.972304Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T10:53:00.983057Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:53:01.008707Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:53:01.008958Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:53:01.009108Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T10:53:01.009149Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:53:01.009189Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:53:01.009230Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:53:01.009501Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:01.009565Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:01.009966Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:53:01.010079Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:53:01.010140Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:53:01.010207Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:53:01.010265Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T10:53:01.010305Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:53:01.010343Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:53:01.010412Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:53:01.010465Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:53:01.010621Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:01.010683Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:01.010734Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T10:53:01.011215Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T10:53:01.011269Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:53:01.011384Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:53:01.011621Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T10:53:01.011686Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:53:01.011795Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:53:01.011862Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T10:53:01.011904Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T10:53:01.011940Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T10:53:01.011973Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T10:53:01.012279Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T10:53:01.012316Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T10:53:01.012355Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T10:53:01.012400Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T10:53:01.012476Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T10:53:01.012516Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T10:53:01.012553Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T10:53:01.012585Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T10:53:01.012622Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T10:53:01.014226Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T10:53:01.014301Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:53:01.025186Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... _DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-10T10:53:07.708374Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:864:2693], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715662 Cleared: true 2025-04-10T10:53:07.708408Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-10T10:53:07.708486Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:07.708517Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:07.708565Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:53:07.708599Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T10:53:07.708647Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for WaitForStreamClearance 2025-04-10T10:53:07.708688Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit WaitForStreamClearance 2025-04-10T10:53:07.708735Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715662] at 72075186224037888 2025-04-10T10:53:07.708770Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-10T10:53:07.708804Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-10T10:53:07.708833Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit ReadTableScan 2025-04-10T10:53:07.708867Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2025-04-10T10:53:07.709077Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Continue 2025-04-10T10:53:07.709108Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T10:53:07.709141Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T10:53:07.709173Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:53:07.709202Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:53:07.709260Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:53:07.709710Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:876:2704], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-10T10:53:07.709751Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-10T10:53:07.709812Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-10T10:53:07.710143Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-10T10:53:07.710214Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-10T10:53:07.710276Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-10T10:53:07.710473Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T10:53:07.710562Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-04-10T10:53:07.710632Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-10T10:53:07.710695Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-10T10:53:07.711113Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-10T10:53:07.711147Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-10T10:53:07.711202Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-10T10:53:07.711280Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T10:53:07.711348Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-04-10T10:53:07.711398Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-10T10:53:07.711433Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-10T10:53:07.711697Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-10T10:53:07.711729Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-10T10:53:07.711776Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-10T10:53:07.711832Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T10:53:07.711887Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-04-10T10:53:07.711937Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-10T10:53:07.711972Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-10T10:53:07.712214Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-10T10:53:07.712248Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-10T10:53:07.712288Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-10T10:53:07.712349Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-10T10:53:07.712516Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T10:53:07.712555Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037888 2025-04-10T10:53:07.712632Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-04-10T10:53:07.712678Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-04-10T10:53:07.712828Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:07.712883Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:07.712951Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:53:07.713015Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T10:53:07.713073Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2025-04-10T10:53:07.713112Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2025-04-10T10:53:07.713158Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715662] at 72075186224037888 error: , IsFatalError: 0 2025-04-10T10:53:07.713210Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-10T10:53:07.713257Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ReadTableScan 2025-04-10T10:53:07.713299Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2025-04-10T10:53:07.713336Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-04-10T10:53:07.713375Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is DelayComplete 2025-04-10T10:53:07.713413Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2025-04-10T10:53:07.713451Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T10:53:07.713504Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2025-04-10T10:53:07.713557Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-10T10:53:07.713582Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T10:53:07.713615Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2025-04-10T10:53:07.713657Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:53:07.713697Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T10:53:07.713739Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:53:07.713787Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:53:07.713864Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:53:07.713911Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-04-10T10:53:07.713966Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-10T10:53:07.714056Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:53:07.714295Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037888 2025-04-10T10:53:07.714383Z node 2 :TX_PROXY INFO: [ReadTable [2:864:2693] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.013560s execute time: 0.112471s total time: 0.126031s 2025-04-10T10:53:07.714785Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:666:2570]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] Test command err: 2025-04-10T10:53:00.443647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:53:00.443867Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:53:00.444050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010ad/r3tmp/tmpq92UET/pdisk_1.dat 2025-04-10T10:53:00.810132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.856685Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:00.895489Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:53:00.896443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:00.896539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:00.896660Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T10:53:00.908060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:00.988367Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T10:53:00.988448Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T10:53:00.988625Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T10:53:01.154732Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T10:53:01.154836Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T10:53:01.155403Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T10:53:01.155518Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T10:53:01.155897Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T10:53:01.156089Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T10:53:01.156188Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T10:53:01.157985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:53:01.158488Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T10:53:01.159140Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T10:53:01.159230Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T10:53:01.192451Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:53:01.193784Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:53:01.194375Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T10:53:01.194672Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:53:01.248378Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:53:01.249357Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:53:01.249532Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:53:01.251811Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:53:01.251913Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:53:01.251980Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:53:01.252448Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:53:01.252621Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:53:01.252822Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T10:53:01.263822Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:53:01.319165Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:53:01.319493Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:53:01.319699Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T10:53:01.319750Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:53:01.319796Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:53:01.319845Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:53:01.320186Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:01.320270Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:01.320793Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:53:01.320946Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:53:01.321031Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:53:01.321086Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:53:01.321150Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T10:53:01.321194Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:53:01.321231Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:53:01.321275Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:53:01.321319Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:53:01.321436Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:01.321499Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:01.321556Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T10:53:01.321960Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T10:53:01.321998Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:53:01.322104Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:53:01.322361Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T10:53:01.322416Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:53:01.322524Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:53:01.322594Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T10:53:01.322659Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T10:53:01.322694Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T10:53:01.322724Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T10:53:01.323003Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T10:53:01.323050Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T10:53:01.323085Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T10:53:01.323117Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T10:53:01.323193Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T10:53:01.323223Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T10:53:01.323253Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T10:53:01.323279Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T10:53:01.323299Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T10:53:01.325019Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T10:53:01.325109Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:53:01.336026Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 90 has no attached operations 2025-04-10T10:53:07.781708Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-10T10:53:07.781754Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T10:53:07.782173Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-04-10T10:53:07.782207Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-10T10:53:07.782237Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-04-10T10:53:07.782275Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-10T10:53:07.782428Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1001:2799], Recipient [2:903:2724]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-10T10:53:07.782449Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-10T10:53:07.782561Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T10:53:07.782661Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\003\000\000\000b\005\035!\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\003\000\000\000" 2025-04-10T10:53:07.782689Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-04-10T10:53:07.782714Z node 2 :TX_PROXY TRACE: [ReadTable [2:864:2693] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1001:2799] ShardId# 72075186224037890 2025-04-10T10:53:07.782760Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-04-10T10:53:07.782815Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-04-10T10:53:07.782839Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-10T10:53:07.783178Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:863:2693], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-10T10:53:07.783213Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-10T10:53:07.783239Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-04-10T10:53:07.783278Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-10T10:53:07.783358Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T10:53:07.783466Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-04-10T10:53:07.783492Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-04-10T10:53:07.783517Z node 2 :TX_PROXY TRACE: [ReadTable [2:864:2693] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1001:2799] ShardId# 72075186224037890 2025-04-10T10:53:07.783568Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-04-10T10:53:07.783624Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-04-10T10:53:07.783654Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-10T10:53:07.783878Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:863:2693], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-10T10:53:07.783906Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-10T10:53:07.783931Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-04-10T10:53:07.783968Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-10T10:53:07.784002Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-10T10:53:07.784091Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-10T10:53:07.784107Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037890 2025-04-10T10:53:07.784183Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:1001:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715662 ShardId: 72075186224037890 2025-04-10T10:53:07.784207Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037890 2025-04-10T10:53:07.784224Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037890 2025-04-10T10:53:07.784270Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:903:2724], Recipient [2:903:2724]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:07.784289Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:07.784327Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T10:53:07.784356Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-10T10:53:07.784376Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037890 for ReadTableScan 2025-04-10T10:53:07.784392Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit ReadTableScan 2025-04-10T10:53:07.784411Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715662] at 72075186224037890 error: , IsFatalError: 0 2025-04-10T10:53:07.784435Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-04-10T10:53:07.784454Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit ReadTableScan 2025-04-10T10:53:07.784471Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit FinishPropose 2025-04-10T10:53:07.784488Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-04-10T10:53:07.784510Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is DelayComplete 2025-04-10T10:53:07.784527Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit FinishPropose 2025-04-10T10:53:07.784543Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit CompletedOperations 2025-04-10T10:53:07.784558Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit CompletedOperations 2025-04-10T10:53:07.784585Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-04-10T10:53:07.784599Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit CompletedOperations 2025-04-10T10:53:07.784614Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037890 has finished 2025-04-10T10:53:07.784629Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:53:07.784643Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-10T10:53:07.784658Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-10T10:53:07.784673Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-10T10:53:07.784702Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T10:53:07.784719Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-04-10T10:53:07.784745Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-10T10:53:07.784782Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T10:53:07.784922Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:903:2724], Recipient [2:864:2693]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715662 Step: 0 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 213 } } 2025-04-10T10:53:07.784948Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037890 2025-04-10T10:53:07.784987Z node 2 :TX_PROXY INFO: [ReadTable [2:864:2693] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.014240s execute time: 0.322089s total time: 0.336329s 2025-04-10T10:53:07.785253Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:666:2570]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-04-10T10:53:07.785345Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:901:2722]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-04-10T10:53:07.785525Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:903:2724]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync [GOOD] Test command err: RandomSeed# 11968460529934371393 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:0:0:0:131072:0] 2025-04-10T10:52:50.733134Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:8808:940] 2025-04-10T10:52:50.733676Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8815:947] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-04-10T10:52:53.288136Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:8822:954] 2025-04-10T10:52:53.288306Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8815:947] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-04-10T10:52:58.110345Z 5 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8836:968] 2025-04-10T10:52:58.110452Z 4 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:8829:961] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-04-10T10:53:00.630532Z 6 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8843:975] 2025-04-10T10:53:00.630609Z 5 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8836:968] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-04-10T10:53:03.620528Z 7 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8850:982] 2025-04-10T10:53:03.620644Z 6 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8843:975] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:6:0:0:131072:0] 2025-04-10T10:53:06.833115Z 7 00h26m00.561536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8850:982] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 7 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] Test command err: 2025-04-10T10:53:00.373755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:53:00.373938Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:53:00.374094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001081/r3tmp/tmpJjfjEA/pdisk_1.dat 2025-04-10T10:53:00.743953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.779424Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:00.819973Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:53:00.821001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:00.821123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:00.821292Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T10:53:00.832762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:00.912953Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T10:53:00.913040Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T10:53:00.913225Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T10:53:01.054029Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T10:53:01.054181Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T10:53:01.054855Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T10:53:01.054985Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T10:53:01.055427Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T10:53:01.055676Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T10:53:01.055789Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T10:53:01.057935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:53:01.058539Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T10:53:01.059399Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T10:53:01.059512Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T10:53:01.094675Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:53:01.096035Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:53:01.096628Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T10:53:01.096913Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:53:01.153887Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:53:01.154812Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:53:01.154956Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:53:01.156966Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:53:01.157047Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:53:01.157107Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:53:01.157558Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:53:01.157750Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:53:01.157862Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T10:53:01.170819Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:53:01.209764Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:53:01.209938Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:53:01.210059Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T10:53:01.210096Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:53:01.210135Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:53:01.210186Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:53:01.210497Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:01.210550Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:01.210837Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:53:01.210931Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:53:01.210972Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:53:01.210999Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:53:01.211033Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T10:53:01.211062Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:53:01.211086Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:53:01.211123Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:53:01.211156Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:53:01.211243Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:01.211277Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:01.211313Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T10:53:01.211674Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T10:53:01.211720Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:53:01.211863Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:53:01.212100Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T10:53:01.212160Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:53:01.212262Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:53:01.212345Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T10:53:01.212389Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T10:53:01.212427Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T10:53:01.212464Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T10:53:01.212724Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T10:53:01.212759Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T10:53:01.212793Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T10:53:01.212833Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T10:53:01.212904Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T10:53:01.212941Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T10:53:01.212970Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T10:53:01.212995Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T10:53:01.213015Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T10:53:01.214245Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T10:53:01.214302Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:53:01.225063Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 186224037896 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T10:53:08.168986Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037896 2025-04-10T10:53:08.169051Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2025-04-10T10:53:08.169100Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2025-04-10T10:53:08.169150Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-10T10:53:08.169574Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1366:3078], Recipient [2:1264:2998]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-10T10:53:08.169608Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-10T10:53:08.169764Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1366:3078], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2025-04-10T10:53:08.169809Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2025-04-10T10:53:08.169855Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2025-04-10T10:53:08.169977Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-04-10T10:53:08.173520Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037896, TxId: 281474976715664, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T10:53:08.173730Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1366:3078], Recipient [2:1084:2857]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: RESPONSE_DATA TxId: 281474976715664 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2025-04-10T10:53:08.173786Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received stream data from ShardId# 72075186224037896 2025-04-10T10:53:08.173839Z node 2 :TX_PROXY TRACE: [ReadTable [2:1084:2857] TxId# 281474976715663] Sending TEvStreamDataAck to [2:1366:3078] ShardId# 72075186224037896 2025-04-10T10:53:08.173971Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1366:3078], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2025-04-10T10:53:08.174009Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2025-04-10T10:53:08.174053Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037896, TxId: 281474976715664, PendingAcks: 0 2025-04-10T10:53:08.174474Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:1083:2857], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715663 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-10T10:53:08.174518Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-10T10:53:08.174552Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2025-04-10T10:53:08.174607Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-04-10T10:53:08.174666Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-04-10T10:53:08.174800Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037896 2025-04-10T10:53:08.174830Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715664, at: 72075186224037896 2025-04-10T10:53:08.175028Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:1366:3078], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715664 ShardId: 72075186224037896 2025-04-10T10:53:08.175060Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received TEvStreamQuotaRelease from ShardId# 72075186224037896 2025-04-10T10:53:08.175086Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Released quota 1 reserved messages from ShardId# 72075186224037896 2025-04-10T10:53:08.175168Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1264:2998], Recipient [2:1264:2998]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:08.175217Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:53:08.175275Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-04-10T10:53:08.175305Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 1 active planned 0 immediate 1 planned 0 2025-04-10T10:53:08.175354Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037896 for ReadTableScan 2025-04-10T10:53:08.175384Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit ReadTableScan 2025-04-10T10:53:08.175414Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715664] at 72075186224037896 error: , IsFatalError: 0 2025-04-10T10:53:08.175466Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-04-10T10:53:08.175497Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit ReadTableScan 2025-04-10T10:53:08.175527Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit FinishPropose 2025-04-10T10:53:08.175561Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-04-10T10:53:08.175598Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is DelayComplete 2025-04-10T10:53:08.175631Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit FinishPropose 2025-04-10T10:53:08.175658Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit CompletedOperations 2025-04-10T10:53:08.175685Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit CompletedOperations 2025-04-10T10:53:08.175728Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-04-10T10:53:08.175751Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit CompletedOperations 2025-04-10T10:53:08.175776Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037896 has finished 2025-04-10T10:53:08.175804Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:53:08.175827Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037896 2025-04-10T10:53:08.175853Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2025-04-10T10:53:08.175878Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2025-04-10T10:53:08.175937Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-10T10:53:08.175991Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-04-10T10:53:08.176036Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037896 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-10T10:53:08.176118Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-10T10:53:08.176345Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1264:2998], Recipient [2:1084:2857]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: COMPLETE TxId: 281474976715664 Step: 0 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037896 CpuTimeUsec: 299 } } 2025-04-10T10:53:08.176379Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received stream complete from ShardId# 72075186224037896 2025-04-10T10:53:08.182334Z node 2 :TX_PROXY INFO: [ReadTable [2:1084:2857] TxId# 281474976715663] RESPONSE Status# ExecComplete prepare time: 0.017564s execute time: 0.612296s total time: 0.629860s 2025-04-10T10:53:08.183025Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:884:2709]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-10T10:53:08.183316Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:994:2791]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-10T10:53:08.183545Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:999:2793]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-10T10:53:08.186119Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1260:2996]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-10T10:53:08.186534Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1264:2998]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-10T10:53:08.186810Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1369:3081], Recipient [2:1150:2914]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:08.186852Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:08.186904Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1367:3079], serverId# [2:1369:3081], sessionId# [0:0:0] 2025-04-10T10:53:08.195075Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1370:3082], Recipient [2:1152:2916]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:08.195201Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:08.195262Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1368:3080], serverId# [2:1370:3082], sessionId# [0:0:0] 2025-04-10T10:53:08.195668Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1150:2914]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-10T10:53:08.195973Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1152:2916]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 >> ResourcePoolsDdl::TestCreateResourcePool [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] Test command err: 2025-04-10T10:52:59.625815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:52:59.626034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:52:59.626249Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001770/r3tmp/tmpHxfE0K/pdisk_1.dat 2025-04-10T10:53:00.011093Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.011183Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.011222Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:00.011374Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-10T10:53:00.011403Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:53:00.132885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-10T10:53:00.133149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.133397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T10:53:00.133649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:53:00.133736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.133837Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.134648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.134841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T10:53:00.134923Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.134958Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T10:53:00.135146Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.135192Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.135264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.135310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T10:53:00.135359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:53:00.135401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:53:00.135507Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.136165Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.136203Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T10:53:00.136363Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.136401Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.136466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.136516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:53:00.136580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:53:00.136663Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.137011Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.137039Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T10:53:00.137134Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.137171Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.137220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.137249Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.137298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-10T10:53:00.137341Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.137381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:53:00.141167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:53:00.141658Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.141701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:53:00.141950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:53:00.143219Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T10:53:00.143274Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T10:53:00.143318Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-10T10:53:00.143461Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-10T10:53:00.143894Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.143945Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.143978Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:00.144129Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-10T10:53:00.144168Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-10T10:53:00.144229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.144267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-10T10:53:00.144305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.181632Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-10T10:53:00.181740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-10T10:53:00.181783Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:00.182503Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-10T10:53:00.182592Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-10T10:53:00.222008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:00.222137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:00.233647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:00.308375Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-10T10:53:00.309141Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.309184Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.309234Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:00.309406Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-10T10:53:00.309445Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T10:53:00.309565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.309719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... eration: 1 2025-04-10T10:53:08.365729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:0, shardIdx: 72057594046644480:2, datashard: 72075186224037889, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:08.365779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-10T10:53:08.365822Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:08.366099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:53:08.366132Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:08.366475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:53:08.366527Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:08.377431Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:53:08.377554Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-04-10T10:53:08.377641Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:53:08.377741Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:53:08.378034Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [2:1007:2796], Recipient [2:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:08.378079Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:08.378106Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:08.378338Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [2:739:2610], Recipient [2:409:2404]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-10T10:53:08.378380Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-10T10:53:08.378444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-10T10:53:08.378478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715662, tablet: 72075186224037888, partId: 0 2025-04-10T10:53:08.378640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480, message: Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-10T10:53:08.378695Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-10T10:53:08.378766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-10T10:53:08.378814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:0, shardIdx: 72057594046644480:1, datashard: 72075186224037888, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:08.378863Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:53:08.378903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-04-10T10:53:08.378948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:0, datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-04-10T10:53:08.378988Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:0 129 -> 240 2025-04-10T10:53:08.379131Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:08.379667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:53:08.379710Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:08.379748Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:0 2025-04-10T10:53:08.379874Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:953:2752] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-10T10:53:08.379937Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:739:2610] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-10T10:53:08.380081Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-04-10T10:53:08.380173Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T10:53:08.380376Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037889 state Ready 2025-04-10T10:53:08.380418Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-10T10:53:08.380581Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [2:409:2404], Recipient [2:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:08.380624Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:08.380679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:53:08.380743Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:0ProgressState, operation type TxCopyTable 2025-04-10T10:53:08.380806Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:08.380865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 281474976715662:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-10T10:53:08.380926Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715662, done: 0, blocked: 1 2025-04-10T10:53:08.381017Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715662 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-04-10T10:53:08.381060Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:0 240 -> 240 2025-04-10T10:53:08.381601Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:08.381637Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:0 2025-04-10T10:53:08.381742Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [2:409:2404], Recipient [2:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:08.381771Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:08.381817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:53:08.381858Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715662:0 ProgressState 2025-04-10T10:53:08.381974Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:08.382010Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-04-10T10:53:08.382052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-10T10:53:08.382102Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-04-10T10:53:08.382267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-10T10:53:08.382319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 1/1, is published: true 2025-04-10T10:53:08.382395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:934:2736] message: TxId: 281474976715662 2025-04-10T10:53:08.382449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-10T10:53:08.382499Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-04-10T10:53:08.382539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-04-10T10:53:08.382700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-04-10T10:53:08.382751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-04-10T10:53:08.383218Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:08.383318Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:934:2736] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-10T10:53:08.383703Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [2:942:2743], Recipient [2:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:53:08.383738Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:53:08.383763Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-10T10:53:08.518231Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [2:1034:2813], serverId# [2:1035:2814], sessionId# [0:0:0] 2025-04-10T10:53:08.518446Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfna1x69xydcwqqmmz4javr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmNkYjljMjgtOTZmNWNiZjMtY2I4ZDVhOWUtYzNiMmYyNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 4 } items { uint32_value: 40 } }, { items { uint32_value: 5 } items { uint32_value: 50 } } >> IncrementalBackup::MultiRestore [GOOD] >> IncrementalBackup::E2EBackupCollection >> ResourcePoolsDdl::TestPoolSwitchToLimitedState [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState >> KqpWorkloadService::TestQueueSizeSimple [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries |65.2%| [TA] $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool |65.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::UpdatesLog[PqRunner] [GOOD] >> Cdc::UpdatesLog[YdsRunner] |65.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks >> Cdc::DocApi[YdsRunner] [GOOD] >> Cdc::DocApi[TopicRunner] >> Cdc::KeysOnlyLogDebezium [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_AutoscaleAwareSDK >> TInterconnectTest::TestCrossConnect [GOOD] >> TInterconnectTest::TestManyEventsWithReconnect |65.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> TInterconnectTest::TestManyEventsWithReconnect [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization |65.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> TBsLocalRecovery::WriteRestartRead [GOOD] |65.2%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> TBsLocalRecovery::MultiPutWriteRestartRead >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions >> KqpWorkloadServiceDistributed::TestDistributedQueue [GOOD] >> KqpWorkloadServiceDistributed::TestNodeDisconnect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] Test command err: 2025-04-10T10:52:59.585765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:52:59.585975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:52:59.586179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017a2/r3tmp/tmpU8YznB/pdisk_1.dat 2025-04-10T10:52:59.908763Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:52:59.908863Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:52:59.908918Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:52:59.909097Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-10T10:52:59.909131Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:53:00.013902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-10T10:53:00.014090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.014277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T10:53:00.014434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:53:00.014505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.014581Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.015113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.015334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T10:53:00.015393Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.015429Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T10:53:00.015586Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.015625Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.015671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.015719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T10:53:00.015760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:53:00.015784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:53:00.015857Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.016167Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.016198Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T10:53:00.016301Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.016327Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.016375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.016405Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:53:00.016445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:53:00.016507Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.016718Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.016738Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T10:53:00.016798Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.016824Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.016850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.016868Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.016909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-10T10:53:00.016935Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.016967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:53:00.019458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:53:00.019797Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.019831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:53:00.019981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:53:00.020909Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T10:53:00.020941Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T10:53:00.020973Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-10T10:53:00.021065Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-10T10:53:00.021375Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.021405Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.021430Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:00.021544Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-10T10:53:00.021566Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-10T10:53:00.021612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.021640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-10T10:53:00.021669Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.055081Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-10T10:53:00.055179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-10T10:53:00.055211Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:00.055867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-10T10:53:00.055950Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-10T10:53:00.094858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:00.094955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:00.106582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:00.181118Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-10T10:53:00.181865Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.181912Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.181944Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:00.182116Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-10T10:53:00.182172Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T10:53:00.182286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.182435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... 474976715662:1, at schemeshard: 72057594046644480 2025-04-10T10:53:15.661148Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:15.661295Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-10T10:53:15.661339Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:15.673224Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:53:15.673360Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-04-10T10:53:15.673454Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:53:15.673595Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:53:15.673919Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [3:998:2793], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:15.673964Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:15.673995Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:15.674231Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [3:666:2570], Recipient [3:409:2404]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-10T10:53:15.674285Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-10T10:53:15.674364Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-10T10:53:15.674418Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715662, tablet: 72075186224037888, partId: 1 2025-04-10T10:53:15.674550Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480, message: Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-10T10:53:15.674592Z node 3 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-10T10:53:15.674659Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-10T10:53:15.674718Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:1, shardIdx: 72057594046644480:1, datashard: 72075186224037888, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:15.674761Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-10T10:53:15.674822Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:1, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-04-10T10:53:15.674879Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:1, datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-04-10T10:53:15.674915Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:1 129 -> 240 2025-04-10T10:53:15.675055Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:15.675628Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-10T10:53:15.675668Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:15.675707Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:1 2025-04-10T10:53:15.675825Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:939:2743] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-10T10:53:15.675914Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:666:2570] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-10T10:53:15.676034Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-04-10T10:53:15.676134Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T10:53:15.676336Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037889 state Ready 2025-04-10T10:53:15.676387Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-10T10:53:15.676573Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:15.676615Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:15.676685Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-10T10:53:15.676741Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:1ProgressState, operation type TxCopyTable 2025-04-10T10:53:15.676791Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:15.676845Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 281474976715662:1, name: CopyTableBarrier, done: 1, blocked: 1, parts count: 2 2025-04-10T10:53:15.676889Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715662, done: 1, blocked: 1 2025-04-10T10:53:15.676989Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:1 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715662 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-04-10T10:53:15.677049Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:1 240 -> 240 2025-04-10T10:53:15.677681Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:15.677726Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:1 2025-04-10T10:53:15.677848Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:15.677883Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:15.677937Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-10T10:53:15.677984Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715662:1 ProgressState 2025-04-10T10:53:15.678134Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:15.678820Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:1 progress is 2/2 2025-04-10T10:53:15.678902Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-04-10T10:53:15.678967Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:1 progress is 2/2 2025-04-10T10:53:15.679024Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-04-10T10:53:15.679097Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 2/2, is published: true 2025-04-10T10:53:15.679212Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:909:2723] message: TxId: 281474976715662 2025-04-10T10:53:15.679287Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-04-10T10:53:15.679359Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-04-10T10:53:15.679417Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-04-10T10:53:15.679500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 2 2025-04-10T10:53:15.679543Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:1 2025-04-10T10:53:15.679566Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:1 2025-04-10T10:53:15.679683Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-04-10T10:53:15.679725Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-10T10:53:15.680306Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:15.680420Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:909:2723] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-10T10:53:15.680830Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:924:2731], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:53:15.680869Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:53:15.680898Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-10T10:53:15.863026Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [3:1025:2810], serverId# [3:1026:2811], sessionId# [0:0:0] 2025-04-10T10:53:15.863247Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfna91c72bms68h0s2eh30r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjAwNjZiZDAtZmFhZGNkZjItOGVlNGQxYjItNmYxMmUyOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } 2025-04-10T10:53:16.042809Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfna9724hvhe4e2ejrqdsj9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzY0YzliZDctYjIyM2U5NTktNTA3NmNhLWViOGM1ZmJi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> TBsVDiskRepl3::ReplPerf [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] Test command err: Starting iteration 0 Starting iteration 1 Starting iteration 2 Starting iteration 3 Starting iteration 4 Starting iteration 5 Starting iteration 6 Starting iteration 7 Starting iteration 8 Starting iteration 9 Starting iteration 10 Starting iteration 11 Starting iteration 12 Starting iteration 13 Starting iteration 14 Starting iteration 15 Starting iteration 16 Starting iteration 17 Starting iteration 18 Starting iteration 19 Starting iteration 20 Starting iteration 21 Starting iteration 22 Starting iteration 23 Starting iteration 24 Starting iteration 25 Starting iteration 26 Starting iteration 27 Starting iteration 28 Starting iteration 29 Starting iteration 30 Starting iteration 31 Starting iteration 32 Starting iteration 33 Starting iteration 34 Starting iteration 35 Starting iteration 36 Starting iteration 37 Starting iteration 38 Starting iteration 39 Starting iteration 40 Starting iteration 41 Starting iteration 42 Starting iteration 43 Starting iteration 44 Starting iteration 45 Starting iteration 46 Starting iteration 47 Starting iteration 48 Starting iteration 49 0 0 0 1 0 3 0 7 0 15 0 31 0 63 0 127 0 255 0 511 0 1023 0 2047 0 4095 0 8191 0 16383 0 32767 0 65535 1 0 1 1 1 3 1 7 1 15 1 31 1 63 1 127 1 255 1 511 1 1023 1 2047 1 4095 1 8191 1 16383 1 32767 1 65535 3 0 3 1 3 3 3 7 3 15 3 31 3 63 3 127 3 255 3 511 3 1023 3 2047 3 4095 3 8191 3 16383 3 32767 3 65535 7 0 7 1 7 3 7 7 7 15 7 31 7 63 7 127 7 255 7 511 7 1023 7 2047 7 4095 7 8191 7 16383 7 32767 7 65535 15 0 15 1 15 3 15 7 15 15 15 31 15 63 15 127 15 255 15 511 15 1023 15 2047 15 4095 15 8191 15 16383 15 32767 15 65535 31 0 31 1 31 3 31 7 31 15 31 31 31 63 31 127 31 255 31 511 31 1023 31 2047 31 4095 31 8191 31 16383 31 32767 31 65535 63 0 63 1 63 3 63 7 63 15 63 31 63 63 63 127 63 255 63 511 63 1023 63 2047 63 4095 63 8191 63 16383 63 32767 63 65535 127 0 127 1 127 3 127 7 127 15 127 31 127 63 127 127 127 255 127 511 127 1023 127 2047 127 4095 127 8191 127 16383 127 32767 127 65535 255 0 255 1 255 3 255 7 255 15 255 31 255 63 255 127 255 255 255 511 255 1023 255 2047 255 4095 255 8191 255 16383 255 32767 255 65535 511 0 511 1 511 3 511 7 511 15 511 31 511 63 511 127 511 255 511 511 511 1023 511 2047 511 4095 511 8191 511 16383 511 32767 511 65535 1023 0 1023 1 1023 3 1023 7 1023 15 1023 31 1023 63 1023 127 1023 255 1023 511 1023 1023 1023 2047 1023 4095 1023 8191 1023 16383 1023 32767 1023 65535 2047 0 2047 1 2047 3 2047 7 2047 15 2047 31 2047 63 2047 127 2047 255 2047 511 2047 1023 2047 2047 2047 4095 2047 8191 2047 16383 2047 32767 2047 65535 4095 0 4095 1 4095 3 4095 7 4095 15 4095 31 4095 63 4095 127 4095 255 4095 511 4095 1023 4095 2047 4095 4095 4095 8191 4095 16383 4095 32767 4095 65535 8191 0 8191 1 8191 3 8191 7 8191 15 8191 31 8191 63 8191 127 8191 255 8191 511 8191 1023 8191 2047 8191 4095 8191 8191 8191 16383 8191 32767 8191 65535 16383 0 16383 1 16383 3 16383 7 16383 15 16383 31 16383 63 16383 127 16383 255 16383 511 16383 1023 16383 2047 16383 4095 16383 8191 16383 16383 16383 32767 16383 65535 32767 0 32767 1 32767 3 32767 7 32767 15 32767 31 32767 63 32767 127 32767 255 32767 511 32767 1023 32767 2047 32767 4095 32767 8191 32767 16383 32767 32767 32767 65535 65535 0 65535 1 65535 3 65535 7 65535 15 65535 31 65535 63 65535 127 65535 255 65535 511 65535 1023 65535 2047 65535 4095 65535 8191 65535 16383 65535 32767 65535 65535 >> Cdc::UpdatesLog[YdsRunner] [GOOD] >> Cdc::UpdatesLog[TopicRunner] |65.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |65.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState [GOOD] >> ResourcePoolsDdl::TestResourcePoolAcl >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRepl3::ReplPerf [GOOD] Test command err: 2025-04-10T10:52:44.664375Z :BS_SYNCER ERROR: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:52:44.793131Z :BS_SYNCER ERROR: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8022774740285133405] 2025-04-10T10:52:45.219951Z :BS_SYNCER ERROR: PDiskId# 4 VDISK[0:_:0:1:1]: (0) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-04-10T10:52:55.814616Z :BS_SYNCER ERROR: PDiskId# 4 VDISK[0:_:0:3:0]: (0) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:52:55.862674Z :BS_SYNCER ERROR: PDiskId# 4 VDISK[0:_:0:3:0]: (0) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17363375057191431842] 2025-04-10T10:52:56.904666Z :BS_SYNCER ERROR: PDiskId# 4 VDISK[0:_:0:3:0]: (0) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-04-10T10:53:12.193172Z :BS_SYNCER ERROR: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:53:12.295971Z :BS_SYNCER ERROR: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 4929255286637286242] 2025-04-10T10:53:12.467160Z :BS_SYNCER ERROR: PDiskId# 4 VDISK[0:_:0:1:1]: (0) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |65.3%| [TA] $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissNoDeadline >> TNodeBrokerTest::TestListNodes >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ControlPlane_BackCompatibility >> Balancing::Balancing_OneTopic_TopicApi [GOOD] >> Balancing::Balancing_OneTopic_PQv1 |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> IncrementalBackup::E2EBackupCollection [GOOD] >> TLocalTests::TestAddTenantWhileResolving >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits [GOOD] >> KqpWorkloadService::TestLargeConcurrentQueryLimit >> TopicAutoscaling::PartitionSplit_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK >> KqpWorkloadService::TestQueueSizeManyQueries [GOOD] >> KqpWorkloadService::TestZeroQueueSize >> TDynamicNameserverTest::CacheMissNoDeadline [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] >> TBsLocalRecovery::MultiPutWriteRestartRead [GOOD] >> TBsLocalRecovery::MultiPutWriteRestartReadHuge >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |65.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator >> TLocalTests::TestRemoveTenantWhileResolving |65.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |65.3%| [TA] {RESULT} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |65.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] >> TSlotIndexesPoolTest::Ranges [GOOD] |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissNoDeadline [GOOD] Test command err: 2025-04-10T10:53:21.168193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:21.168318Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from to NODE_BROKER_ACTOR >> TNodeBrokerTest::NodeNameReuseRestart |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] Test command err: 2025-04-10T10:53:21.539899Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T10:53:21.540461Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/00113a/r3tmp/tmpHBmTXZ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T10:53:21.550311Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/00113a/r3tmp/tmpHBmTXZ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/00113a/r3tmp/tmpHBmTXZ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10898884318161745923 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T10:53:21.556716Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T10:53:21.557146Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/00113a/r3tmp/tmpHBmTXZ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T10:53:21.557305Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/00113a/r3tmp/tmpHBmTXZ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/00113a/r3tmp/tmpHBmTXZ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6962368710226180513 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 >> TSlotIndexesPoolTest::Basic [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::E2EBackupCollection [GOOD] Test command err: 2025-04-10T10:52:59.676521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:52:59.676665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:52:59.676787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00177a/r3tmp/tmp4IOHtI/pdisk_1.dat 2025-04-10T10:53:00.027488Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.027586Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.027634Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:00.027798Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-10T10:53:00.027848Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:53:00.157519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-10T10:53:00.157767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.157986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T10:53:00.158212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:53:00.158274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.158372Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.159032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.159194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T10:53:00.159249Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.159282Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T10:53:00.159451Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.159497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.159554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.159603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T10:53:00.159641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:53:00.159673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:53:00.159771Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.160120Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.160154Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T10:53:00.160261Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.160309Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.160362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.160399Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:53:00.160448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:53:00.160545Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.160840Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.160864Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T10:53:00.160956Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.160993Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.161030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.161074Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.161115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-10T10:53:00.161141Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.161174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:53:00.164552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:53:00.164958Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.164999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:53:00.165166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:53:00.166351Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T10:53:00.166407Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T10:53:00.166447Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-10T10:53:00.166576Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-10T10:53:00.166976Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.167016Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.167050Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:00.167170Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-10T10:53:00.167197Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-10T10:53:00.167254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.167287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-10T10:53:00.167324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.213232Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-10T10:53:00.213313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-10T10:53:00.213346Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:00.213858Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-10T10:53:00.213906Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-10T10:53:00.256329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:00.256421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:00.267926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:00.342119Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-10T10:53:00.342797Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.342839Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.342873Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:00.343043Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-10T10:53:00.343080Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T10:53:00.343167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.343299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... ode 3 :TX_DATASHARD INFO: 72075186224037892 Sending notify to schemeshard 72057594046644480 txId 281474976715668 state Ready TxInFly 0 2025-04-10T10:53:20.007264Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-10T10:53:20.007607Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [3:1565:3213], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:20.007651Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:20.007678Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:20.007885Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [3:1223:2934], Recipient [3:409:2404]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 1223 RawX2: 12884904822 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-04-10T10:53:20.007925Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-10T10:53:20.007992Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 1223 RawX2: 12884904822 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-04-10T10:53:20.008030Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715668, tablet: 72075186224037892, partId: 1 2025-04-10T10:53:20.008157Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715668:1, at schemeshard: 72057594046644480, message: Source { RawX1: 1223 RawX2: 12884904822 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-04-10T10:53:20.008199Z node 3 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715668:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-10T10:53:20.008267Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715668:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 1223 RawX2: 12884904822 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-04-10T10:53:20.008321Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715668:1, shardIdx: 72057594046644480:5, datashard: 72075186224037892, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:20.008352Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-10T10:53:20.008381Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715668:1, datashard: 72075186224037892, at schemeshard: 72057594046644480 2025-04-10T10:53:20.008418Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:1 129 -> 240 2025-04-10T10:53:20.008627Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TRestoreMultipleIncrementalBackups TDone, operationId: 281474976715668:1 Constructed op# SrcTablePaths: "/Root/.backups/collections/MyCollection/19700101000002Z_incremental/Table" DstTablePath: "/Root/Table" SrcPathIds { OwnerId: 72057594046644480 LocalId: 15 } 2025-04-10T10:53:20.008799Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:20.009356Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-10T10:53:20.009392Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:20.009424Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715668:1 2025-04-10T10:53:20.009508Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1223:2934] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715668 at schemeshard: 72057594046644480 2025-04-10T10:53:20.009657Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:20.009692Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:20.009747Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-10T10:53:20.009795Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TRestoreMultipleIncrementalBackups TDone, operationId: 281474976715668:1 ProgressState 2025-04-10T10:53:20.009957Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:20.010000Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:1 progress is 1/2 2025-04-10T10:53:20.010055Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 1/2 2025-04-10T10:53:20.010107Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715668, done: 1, blocked: 1 2025-04-10T10:53:20.010249Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715668:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715668 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-04-10T10:53:20.010293Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:0 240 -> 240 2025-04-10T10:53:20.010512Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:1 progress is 1/2 2025-04-10T10:53:20.010564Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 1/2 2025-04-10T10:53:20.010621Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 1/2, is published: true 2025-04-10T10:53:20.010890Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715668 datashard 72075186224037892 state Ready 2025-04-10T10:53:20.010947Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 Got TEvSchemaChangedResult from SS at 72075186224037892 2025-04-10T10:53:20.011345Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:20.011383Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715668:0 2025-04-10T10:53:20.011519Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:20.011556Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:20.011603Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-10T10:53:20.011658Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:0 ProgressState 2025-04-10T10:53:20.011797Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:20.011830Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:0 progress is 2/2 2025-04-10T10:53:20.011858Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 2/2 2025-04-10T10:53:20.011896Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:0 progress is 2/2 2025-04-10T10:53:20.011921Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 2/2 2025-04-10T10:53:20.011950Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 2/2, is published: true 2025-04-10T10:53:20.012015Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:1441:3110] message: TxId: 281474976715668 2025-04-10T10:53:20.012076Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 2/2 2025-04-10T10:53:20.012138Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:0 2025-04-10T10:53:20.012185Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:0 2025-04-10T10:53:20.012348Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 16] was 4 2025-04-10T10:53:20.012401Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 3 2025-04-10T10:53:20.012446Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:1 2025-04-10T10:53:20.012468Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:1 2025-04-10T10:53:20.012525Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 3 2025-04-10T10:53:20.012553Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 16] was 3 2025-04-10T10:53:20.013091Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:20.013185Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1441:3110] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715668 at schemeshard: 72057594046644480 2025-04-10T10:53:20.013618Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1449:3117], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:53:20.013657Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:53:20.013688Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-10T10:53:20.058111Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1565:3213], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:53:20.058231Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:53:20.058281Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-10T10:53:20.158503Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:53:20.158605Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:53:20.158723Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:53:20.158758Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:53:20.605379Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrfnadjb8znb0q62pwtkhz57, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWUzZGRjNjgtOWY2ODU3NTQtNDZkY2E4MzYtMmJiZDhhZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 2 } items { uint32_value: 200 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> TBsVDiskOutOfSpace::WriteUntilYellowZone [GOOD] >> TBsVDiskRange::RangeGetFromEmptyDB |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ControlPlane_CreateAlterDescribe >> TNodeBrokerTest::FixedNodeId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] Test command err: 2025-04-10T10:53:21.738869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:21.738937Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:21.771664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T10:53:21.849704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions >> TNodeBrokerTest::RegistrationPipelining >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder >> TBsLocalRecovery::MultiPutWriteRestartReadHuge [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeXXX |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSimpleDeadline [GOOD] Test command err: 2025-04-10T10:53:23.973411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:23.973508Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 |65.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |65.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges >> Cdc::UpdatesLog[TopicRunner] [GOOD] >> Cdc::VirtualTimestamps[PqRunner] |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |65.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless [GOOD] >> ResourcePoolsDdl::TestDefaultPoolRestrictions >> Cdc::DocApi[TopicRunner] [GOOD] >> Cdc::HugeKey[PqRunner] |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder [GOOD] >> ReadOnlyVDisk::TestStorageLoad [GOOD] |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::WithResolvedTimestamps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] Test command err: 2025-04-10T10:53:23.938665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:23.938738Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:23.959103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 >> StreamCreator::Basic >> TBsVDiskRange::RangeGetFromEmptyDB [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardFresh >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath [GOOD] >> KqpWorkloadService::TestZeroQueueSizeManyQueries >> TNodeBrokerTest::FixedNodeId [GOOD] |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder [GOOD] Test command err: 2025-04-10T10:53:25.504469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:25.504550Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) >> TNodeBrokerTest::RegistrationPipelining [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] Test command err: RandomSeed# 11800745267087102649 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-04-10T10:52:55.676241Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:55.678625Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:55.682097Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:55.688463Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:55.689248Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:55.705336Z 1 00h02m38.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:55.720625Z 1 00h02m38.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:55.850361Z 1 00h02m38.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:55.861465Z 1 00h02m38.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.060727Z 1 00h02m38.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.076722Z 1 00h02m38.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.269414Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.271942Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.288444Z 1 00h02m39.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.490708Z 1 00h02m39.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.652371Z 1 00h02m39.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.718691Z 1 00h02m39.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.733345Z 1 00h02m39.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.844099Z 1 00h02m40.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.871677Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.872445Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.938232Z 1 00h02m40.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.949695Z 1 00h02m40.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.963613Z 1 00h02m40.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.976956Z 1 00h02m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.987801Z 1 00h02m40.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:56.997500Z 1 00h02m40.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:57.108106Z 1 00h02m40.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:57.120442Z 1 00h02m40.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:57.133217Z 1 00h02m41.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:57.249274Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:57.249447Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:57.477267Z 1 00h02m41.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:57.492617Z 1 00h02m41.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:57.525705Z 1 00h02m41.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:57.540322Z 1 00h02m41.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:57.617323Z 1 00h02m41.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:57.632749Z 1 00h02m42.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:57.665319Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:57.665976Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:57.955922Z 1 00h02m42.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:57.972700Z 1 00h02m42.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.020702Z 1 00h02m42.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.036388Z 1 00h02m42.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.201511Z 1 00h02m42.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.236491Z 1 00h02m43.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.259579Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.261950Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.275085Z 1 00h02m43.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.392636Z 1 00h02m43.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.534697Z 1 00h02m43.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.600961Z 1 00h02m43.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.624685Z 1 00h02m43.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.647726Z 1 00h02m43.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.794795Z 1 00h02m43.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.805684Z 1 00h02m43.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.870756Z 1 00h02m44.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.916425Z 1 00h02m44.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.934042Z 1 00h02m44.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.949555Z 1 00h02m44.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:58.997342Z 1 00h02m44.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:59.018736Z 1 00h02m44.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:59.145454Z 1 00h02m44.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:59.281306Z 1 00h02m44.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:59.356173Z 1 00h02m45.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:59.471819Z 1 00h02m45.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:59.488144Z 1 00h02m45.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:59.634655Z 1 00h02m45.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:59.647813Z 1 00h02m45.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:59.672032Z 1 00h02m45.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:59.726585Z 1 00h02m45.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5311:700] 2025-04-10T10:52:59.882452Z 1 00h02m46.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [ ... k [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2025-04-10T10:53:12.378426Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:12.382115Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:12.387702Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:12.394372Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:12.396507Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:12.440058Z 8 00h20m54.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:12.612592Z 8 00h20m54.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:12.649310Z 8 00h20m54.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:12.945950Z 8 00h20m54.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:12.977944Z 8 00h20m55.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:13.097320Z 8 00h20m55.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:13.311107Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:13.313913Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:13.339607Z 8 00h20m55.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:13.375903Z 8 00h20m55.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:13.392843Z 8 00h20m55.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:13.459068Z 8 00h20m56.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:13.483651Z 8 00h20m56.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:13.556425Z 8 00h20m56.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:13.819228Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:13.820710Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:13.912846Z 8 00h20m56.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:13.994731Z 8 00h20m56.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:14.345029Z 8 00h20m56.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:14.520253Z 8 00h20m56.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:14.537365Z 8 00h20m57.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:14.553618Z 8 00h20m57.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:14.575313Z 8 00h20m57.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:14.601342Z 8 00h20m57.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:14.720479Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:14.724016Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:14.827368Z 8 00h20m57.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:14.857719Z 8 00h20m57.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:14.896472Z 8 00h20m57.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:15.182348Z 8 00h20m58.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:15.414751Z 8 00h20m58.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:15.432213Z 8 00h20m58.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:15.453254Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:15.454763Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:15.638303Z 8 00h20m58.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:15.659546Z 8 00h20m58.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:15.893307Z 8 00h20m58.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:15.929928Z 8 00h20m59.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:16.041812Z 8 00h20m59.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:16.054957Z 8 00h20m59.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:16.074176Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:16.075730Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:16.110930Z 8 00h20m59.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:16.243659Z 8 00h20m59.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:16.319065Z 8 00h20m59.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:16.339978Z 8 00h21m00.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:16.376184Z 8 00h21m00.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:16.459885Z 8 00h21m00.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:16.689555Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:16.691029Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:16.717946Z 8 00h21m00.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:16.735626Z 8 00h21m00.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:17.015072Z 8 00h21m00.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:17.040494Z 8 00h21m00.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:17.063826Z 8 00h21m00.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:17.103102Z 8 00h21m01.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:17.118328Z 8 00h21m01.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:17.214474Z 8 00h21m01.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:17.239187Z 8 00h21m01.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:17.259834Z 8 00h21m01.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:17.294755Z 8 00h21m01.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:17.393845Z 8 00h21m01.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:17.435547Z 8 00h21m02.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:17.451059Z 8 00h21m02.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:17.494397Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:17.494874Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] 2025-04-10T10:53:17.498476Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5360:749] |65.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |65.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |65.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TNodeBrokerTest::ConfigPipelining >> TNodeBrokerTest::TestListNodes [GOOD] |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> Cdc::NewAndOldImagesLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] |65.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] Test command err: 2025-04-10T10:53:25.459763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:25.459908Z node 1 :IMPORT WARN: Table profiles were not loaded |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot >> TDynamicNameserverTest::CacheMissSameDeadline ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipelining [GOOD] Test command err: 2025-04-10T10:53:25.636288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:25.636369Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] Test command err: 2025-04-10T10:53:21.368957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:21.369050Z node 1 :IMPORT WARN: Table profiles were not loaded >> TEnumerationTest::TestPublish [GOOD] |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TEnumerationTest::TestPublish [GOOD] |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TDynamicNameserverTest::CacheMissSameDeadline [GOOD] |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] Test command err: 2025-04-10T10:53:26.151115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:26.151219Z node 1 :IMPORT WARN: Table profiles were not loaded |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline [GOOD] Test command err: 2025-04-10T10:53:28.889268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:28.889352Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) >> TNodeBrokerTest::ConfigPipelining [GOOD] |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> KqpWorkloadService::TestZeroQueueSize [GOOD] >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit >> Cdc::VirtualTimestamps[PqRunner] [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks [GOOD] >> ResourcePoolClassifiersDdl::TestExplicitPoolId >> VDiskRestart::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] Test command err: 2025-04-10T10:53:28.625717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:28.625781Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:28.673235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ConfigPipelining [GOOD] Test command err: 2025-04-10T10:53:28.267969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:28.268035Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:28.383277Z node 1 :NODE_BROKER ERROR: Cannot register node host1:1001: ERROR_TEMP: No free node IDs ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] Test command err: 2025-04-10T10:53:26.304043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:26.304115Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:26.327678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> VDiskRestart::Simple [GOOD] |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> StreamCreator::WithResolvedTimestamps [GOOD] >> TMonitoringTests::ValidActorId >> ResourcePoolsDdl::TestDefaultPoolRestrictions [GOOD] >> ResourcePoolsDdl::TestAlterResourcePool >> StreamCreator::Basic [GOOD] >> TMonitoringTests::InvalidActorId >> TMonitoringTests::ValidActorId [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction [GOOD] |65.6%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::BasicFunctionality >> TMonitoringTests::InvalidActorId [GOOD] >> TopicAutoscaling::Simple_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_PQv1 >> TNodeBrokerTest::UpdateEpochPipelining >> TopicAutoscaling::ControlPlane_BackCompatibility [GOOD] >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::WithResolvedTimestamps [GOOD] Test command err: 2025-04-10T10:53:26.551385Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635890807668039:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:26.551424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011b6/r3tmp/tmpABwUTA/pdisk_1.dat 2025-04-10T10:53:27.177893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:27.177999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:27.180267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:27.199489Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22111 TServer::EnableGrpc on GrpcPort 23120, node 1 2025-04-10T10:53:27.539101Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:53:27.539133Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:53:27.539144Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:53:27.539300Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:27.904438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:27.939919Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:53:27.960365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282408092 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744282407980 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282408092 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-10T10:53:28.128709Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T10:53:28.128898Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T10:53:28.128919Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T10:53:28.129852Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T10:53:30.112887Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282408092, tx_id: 281474976710658 } } } 2025-04-10T10:53:30.113346Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T10:53:30.115402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:53:30.117025Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-10T10:53:30.117039Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-10T10:53:30.159497Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-10T10:53:30.159529Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-10T10:53:30.160075Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-04-10T10:53:30.258373Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7491635907987538069:2348] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:5:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-10T10:53:30.284265Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-04-10T10:53:30.284294Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# 2025-04-10T10:53:30.299145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Table 2025-04-10T10:53:30.321294Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-04-10T10:53:30.321322Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282408092 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) >> TNodeBrokerTest::SingleDomainModeBannedIds >> TDynamicNameserverTest::BasicFunctionality |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction [GOOD] |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor >> TTenantPoolTests::TestStateStatic >> GracefulShutdown::TTxGracefulShutdown >> TNodeBrokerTest::LoadStateMoveEpoch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::Basic [GOOD] Test command err: 2025-04-10T10:53:27.110391Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635894950170140:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:27.118841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00119d/r3tmp/tmpJVmttd/pdisk_1.dat 2025-04-10T10:53:27.606461Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:27.608755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:27.608838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:27.614622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22166 TServer::EnableGrpc on GrpcPort 11696, node 1 2025-04-10T10:53:27.874307Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:53:27.874325Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:53:27.874331Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:53:27.874422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:28.373275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:28.392688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282408498 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744282408428 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282408498 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-10T10:53:28.531235Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T10:53:28.531395Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T10:53:28.531406Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T10:53:28.534769Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T10:53:30.490677Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282408498, tx_id: 281474976710658 } } } 2025-04-10T10:53:30.491070Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T10:53:30.492521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:53:30.493568Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-10T10:53:30.493577Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-10T10:53:30.522874Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-10T10:53:30.522902Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-10T10:53:30.523374Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-04-10T10:53:30.627142Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7491635907835072863:2349] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:5:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-10T10:53:30.646450Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-04-10T10:53:30.646478Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# 2025-04-10T10:53:30.660815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Table 2025-04-10T10:53:30.676657Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-04-10T10:53:30.676700Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282408498 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) >> TNodeBrokerTest::ExtendLeasePipelining |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId |65.7%| [TA] $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} >> TDynamicNameserverTest::CacheMissDifferentDeadline >> TTenantPoolTests::TestStateStatic [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] [GOOD] >> Cdc::NewAndOldImagesLogDebezium |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |65.7%| [TA] $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] >> Cdc::HugeKey[PqRunner] [GOOD] >> Cdc::HugeKey[YdsRunner] >> TSlotIndexesPoolTest::Expansion [GOOD] |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic [GOOD] >> TNodeBrokerTest::RegistrationPipeliningNodeName >> TDynamicNameserverTest::CacheMissDifferentDeadline [GOOD] |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] Test command err: 2025-04-10T10:53:32.701425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:32.701507Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:32.839494Z node 1 :NODE_BROKER ERROR: Cannot register node host3:1001: ERROR_TEMP: No free node IDs 2025-04-10T10:53:32.874915Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-04-10T10:53:32.895217Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node ID is banned 2025-04-10T10:53:33.862678Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-04-10T10:53:33.885587Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable >> TNodeBrokerTest::ExtendLeaseRestartRace >> TSlotIndexesPoolTest::Init [GOOD] |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] >> TopicAutoscaling::ControlPlane_CreateAlterDescribe [GOOD] >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning >> TNodeBrokerTest::NodeNameExpiration >> TDynamicNameserverTest::CacheMissPipeDisconnect >> GracefulShutdown::TTxGracefulShutdown [GOOD] >> TNodeBrokerTest::ResolveScopeIdForServerless |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadline [GOOD] Test command err: 2025-04-10T10:53:34.260215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:34.260301Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) >> KqpWorkloadService::TestZeroQueueSizeManyQueries [GOOD] >> KqpWorkloadServiceActors::TestCreateDefaultPool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] Test command err: 2025-04-10T10:53:33.583438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:33.583504Z node 1 :IMPORT WARN: Table profiles were not loaded |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissPipeDisconnect [GOOD] >> TDynamicNameserverTest::BasicFunctionality [GOOD] >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality [GOOD] Test command err: 2025-04-10T10:53:33.238511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:33.238593Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:33.307783Z node 1 :NODE_BROKER ERROR: [Dirty] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2025-04-10T10:53:33.320775Z node 1 :NODE_BROKER ERROR: [Committed] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s >> Cdc::VirtualTimestamps[YdsRunner] [GOOD] >> Cdc::VirtualTimestamps[TopicRunner] >> TNodeBrokerTest::BasicFunctionality [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissPipeDisconnect [GOOD] Test command err: 2025-04-10T10:53:35.777406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:35.777502Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] Test command err: 2025-04-10T10:53:32.970759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:32.970845Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:34.299302Z node 1 :NODE_BROKER ERROR: Cannot register node host2:1001: ERROR_TEMP: No free node IDs ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR >> TLocalTests::TestAlterTenant >> TLocalTests::TestAddTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] Test command err: 2025-04-10T10:53:33.571582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:33.571671Z node 1 :IMPORT WARN: Table profiles were not loaded |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] Test command err: 2025-04-10T10:53:35.971242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:35.971326Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:36.055322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T10:53:36.103513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] Test command err: 2025-04-10T10:53:33.597585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:33.597652Z node 1 :IMPORT WARN: Table profiles were not loaded ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::BasicFunctionality [GOOD] Test command err: 2025-04-10T10:53:32.613430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:32.613535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:34.008392Z node 1 :NODE_BROKER ERROR: Cannot register node host1:1001: WRONG_REQUEST: Another location is registered for host1:1001 2025-04-10T10:53:34.030539Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-04-10T10:53:34.031154Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-10T10:53:34.031648Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired >> TNodeBrokerTest::TestListNodesEpochDeltas >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges [GOOD] >> TLocalTests::TestAddTenant [GOOD] |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] Test command err: 2025-04-10T10:53:35.698441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:35.698515Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:35.725319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T10:53:35.772419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR >> TLocalTests::TestAlterTenant [GOOD] >> TNodeBrokerTest::NoEffectBeforeCommit |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges [GOOD] Test command err: 2025-04-10T10:53:37.349944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:37.350052Z node 1 :IMPORT WARN: Table profiles were not loaded >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenant [GOOD] Test command err: 2025-04-10T10:53:37.764769Z node 1 :LOCAL ERROR: TDomainLocal(dc-1): Receive TEvDescribeSchemeResult with bad status StatusPathDoesNotExist reason is <> while resolving subdomain dc-1 2025-04-10T10:53:37.765091Z node 1 :LOCAL ERROR: Unknown domain dc-3 |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::TestCacheUsage |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAlterTenant [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] Test command err: 2025-04-10T10:53:33.829657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:33.829743Z node 1 :IMPORT WARN: Table profiles were not loaded >> TNodeBrokerTest::NodeNameExpiration [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeXXX [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased >> Cdc::NewAndOldImagesLogDebezium [GOOD] >> Cdc::OldImageLogDebezium >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless [GOOD] >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier >> Balancing::Balancing_OneTopic_PQv1 [GOOD] >> Balancing::Balancing_ManyTopics_TopicApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameExpiration [GOOD] Test command err: 2025-04-10T10:53:35.981859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:35.981968Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:36.003298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 >> KqpWorkloadService::TestLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestLessConcurrentQueryLimit >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] >> TDynamicNameserverTest::TestCacheUsage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] Test command err: 2025-04-10T10:53:35.891570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:35.891689Z node 1 :IMPORT WARN: Table profiles were not loaded ... rebooting node broker ... OnActivateExecutor tabletId# 72057594037936129 ... captured cache request ... sending extend lease request ... captured cache request ... waiting for response ... waiting for epoch update ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] Test command err: 2025-04-10T10:53:39.063162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:39.063259Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::TestCacheUsage [GOOD] Test command err: 2025-04-10T10:53:39.509158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:39.509246Z node 1 :IMPORT WARN: Table profiles were not loaded >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] >> ResourcePoolsDdl::TestDropResourcePool >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters >> KqpWorkloadServiceActors::TestCreateDefaultPool [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor >> ResourcePoolClassifiersDdl::TestExplicitPoolId [GOOD] >> ResourcePoolClassifiersDdl::TestMultiGroupClassification >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex >> Cdc::HugeKey[YdsRunner] [GOOD] >> Cdc::HugeKey[TopicRunner] >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] Test command err: 2025-04-10T10:53:38.490625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:38.490691Z node 1 :IMPORT WARN: Table profiles were not loaded >> KqpWorkloadServiceTables::TestPoolStateFetcherActor [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart >> Cdc::VirtualTimestamps[TopicRunner] [GOOD] >> Cdc::Write[PqRunner] >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention [GOOD] >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool [GOOD] >> KqpWorkloadService::TestStartQueryAfterCancel >> Cdc::OldImageLogDebezium [GOOD] >> Cdc::NewImageLogDebezium >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK >> TSchemeShardServerLess::StorageBilling [GOOD] >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning >> TopicAutoscaling::Simple_PQv1 [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.545830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.545909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.545962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.546001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.546041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.546067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.546122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.546198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.546457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.623818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.623876Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.643303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.643570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.643748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.650679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.650891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.651518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.651840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.653568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.654759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.654811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.654988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.655030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.655065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.655313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.662118Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.795769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.796015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.796219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.796485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.796539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.798611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.798737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.798919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.798973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.799008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.799053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.800618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.800679Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.800713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.802105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.802161Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.802203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.802253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.806085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.807748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.807920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.808933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.809055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.809107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.809376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.809433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.809603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.809694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.811718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.811756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.811924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.811965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.812196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.812238Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.812328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.812368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.812399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.812506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.812546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.812597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.812631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.812659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.812709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.812741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.812772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.814857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.814964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.814997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-10T10:53:44.813360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-04-10T10:53:44.813449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72075186233409549 2025-04-10T10:53:44.813604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-04-10T10:53:44.813760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2025-04-10T10:53:44.813830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-10T10:53:44.815106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-10T10:53:44.816060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-10T10:53:44.817277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409549 2025-04-10T10:53:44.817327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 1] 2025-04-10T10:53:44.817455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 2] 2025-04-10T10:53:44.817637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409549 2025-04-10T10:53:44.817681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:662:2574], at schemeshard: 72075186233409549, txId: 107, path id: 1 2025-04-10T10:53:44.817726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:662:2574], at schemeshard: 72075186233409549, txId: 107, path id: 2 2025-04-10T10:53:44.819009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-10T10:53:44.819071Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72075186233409549 2025-04-10T10:53:44.819178Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-10T10:53:44.819223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2025-04-10T10:53:44.819268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-04-10T10:53:44.820048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-04-10T10:53:44.820134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-04-10T10:53:44.820165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-04-10T10:53:44.820201Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 1], version: 9 2025-04-10T10:53:44.820231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 5 2025-04-10T10:53:44.821390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-04-10T10:53:44.821498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-04-10T10:53:44.821526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-04-10T10:53:44.821546Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], version: 18446744073709551615 2025-04-10T10:53:44.821571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 4 2025-04-10T10:53:44.821639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-04-10T10:53:44.823027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-10T10:53:44.823072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72075186233409549 2025-04-10T10:53:44.823369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-10T10:53:44.823502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-10T10:53:44.823528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-10T10:53:44.823555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-10T10:53:44.823579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-10T10:53:44.823605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-10T10:53:44.823667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:809:2690] message: TxId: 107 2025-04-10T10:53:44.823711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-10T10:53:44.823754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-10T10:53:44.823788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-10T10:53:44.823871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 2 2025-04-10T10:53:44.824905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-04-10T10:53:44.826021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-04-10T10:53:44.826210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-10T10:53:44.826257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:2196:4041] TestWaitNotification: OK eventTxId 107 2025-04-10T10:53:44.843367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72075186233409549, message: Source { RawX1: 781 RawX2: 4294969966 } TabletId: 72075186233409552 State: 4 2025-04-10T10:53:44.843471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72075186233409549 2025-04-10T10:53:44.845818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72075186233409549:4 hive 72057594037968897 at ss 72075186233409549 2025-04-10T10:53:44.846266Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409552 2025-04-10T10:53:44.848325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72075186233409549 ShardLocalIdx: 4, at schemeshard: 72075186233409549 2025-04-10T10:53:44.848559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 1 2025-04-10T10:53:44.849095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409549 2025-04-10T10:53:44.849142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409549, LocalPathId: 2], at schemeshard: 72075186233409549 2025-04-10T10:53:44.849186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2025-04-10T10:53:44.851454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409549:4 2025-04-10T10:53:44.851568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409549:4 tabletId 72075186233409552 2025-04-10T10:53:44.852351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409549 2025-04-10T10:53:44.991646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-04-10T10:53:44.991774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-04-10T10:53:44.991851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-04-10T10:53:44.991934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-04-10T10:53:44.991972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-04-10T10:53:44.992024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-04-10T10:53:44.992074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-10T10:53:44.992118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T10:53:44.992156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-10T10:53:45.059634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:53:45.060037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":1600452180,"quantity":59,"finish":1600452239,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":0},"id":"72057594046678944-3-1600452180-1600452239-0","cloud_id":"CLOUD_ID_VAL","source_wt":1600452240,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 2020-09-18T18:04:00.027500Z, LastBillTime: 2020-09-18T18:02:00.000000Z, lastBilled: 2020-09-18T18:02:00.000000Z--2020-09-18T18:02:59.000000Z, toBill: 2020-09-18T18:03:00.000000Z--2020-09-18T18:03:59.000000Z, next retry at: 2020-09-18T18:05:00.000000Z 2025-04-10T10:53:45.062480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete grabMeteringMessage has happened 2025-04-10T10:53:45.062665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TFakeMetering got TEvMetering::TEvWriteMeteringJson >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK >> Cdc::Write[PqRunner] [GOOD] >> Cdc::Write[YdsRunner] >> Cdc::HugeKey[TopicRunner] [GOOD] >> Cdc::HugeKeyDebezium >> ResourcePoolsDdl::TestDropResourcePool [GOOD] >> Cdc::NewImageLogDebezium [GOOD] >> Cdc::NaN[PqRunner] >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestDropResourcePool [GOOD] Test command err: 2025-04-10T10:53:04.210708Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635797829854195:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:04.210776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fe9/r3tmp/tmpcR5bEJ/pdisk_1.dat 2025-04-10T10:53:04.599050Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:04.607317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:04.607408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:04.610201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1429, node 1 2025-04-10T10:53:04.746752Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:53:04.746773Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:53:04.746779Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:53:04.746895Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5007 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:05.059703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:07.174565Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-10T10:53:07.178361Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2FiZmU2MDEtZTUwYzUwZWMtYzgxYzM4MmUtNGYwNDY0YWU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id N2FiZmU2MDEtZTUwYzUwZWMtYzgxYzM4MmUtNGYwNDY0YWU= 2025-04-10T10:53:07.178957Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810714756726:2328], Start check tables existence, number paths: 2 2025-04-10T10:53:07.179043Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2FiZmU2MDEtZTUwYzUwZWMtYzgxYzM4MmUtNGYwNDY0YWU=, ActorId: [1:7491635810714756727:2329], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.179656Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-10T10:53:07.179703Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T10:53:07.179740Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-10T10:53:07.182640Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810714756726:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-10T10:53:07.182719Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810714756726:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-10T10:53:07.182781Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810714756726:2328], Successfully finished 2025-04-10T10:53:07.182843Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-10T10:53:07.192282Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810714756744:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.196771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:53:07.198122Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810714756744:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-10T10:53:07.198367Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810714756744:2301], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-10T10:53:07.207384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810714756744:2301], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:53:07.260059Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810714756744:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.264670Z node 1 :TX_PROXY ERROR: Actor# [1:7491635810714756797:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:07.264779Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810714756744:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-10T10:53:07.274085Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-10T10:53:07.274117Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-10T10:53:07.274203Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810714756806:2332], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-10T10:53:07.274285Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2FiZmU2MDEtZTUwYzUwZWMtYzgxYzM4MmUtNGYwNDY0YWU=, ActorId: [1:7491635810714756727:2329], ActorState: ReadyState, TraceId: 01jrfna0t94pk8f6qz68v080b9, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE RESOURCE POOL my_pool WITH ( CONCURRENT_QUERY_LIMIT=1, QUEUE_SIZE=0 ); rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-10T10:53:07.276070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810714756806:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:07.276218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:07.472102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T10:53:07.476843Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=N2FiZmU2MDEtZTUwYzUwZWMtYzgxYzM4MmUtNGYwNDY0YWU=, ActorId: [1:7491635810714756727:2329], ActorState: ExecuteState, TraceId: 01jrfna0t94pk8f6qz68v080b9, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7491635810714756815:2329] WorkloadServiceCleanup: 0 2025-04-10T10:53:07.478401Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2FiZmU2MDEtZTUwYzUwZWMtYzgxYzM4MmUtNGYwNDY0YWU=, ActorId: [1:7491635810714756727:2329], ActorState: CleanupState, TraceId: 01jrfna0t94pk8f6qz68v080b9, EndCleanup, isFinal: 0 2025-04-10T10:53:07.478482Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2FiZmU2MDEtZTUwYzUwZWMtYzgxYzM4MmUtNGYwNDY0YWU=, ActorId: [1:7491635810714756727:2329], ActorState: CleanupState, TraceId: 01jrfna0t94pk8f6qz68v080b9, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7491635797829854444:2277] 2025-04-10T10:53:07.482658Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjJkNGFkZjItMWY0MGFlYzAtYWUxMzA4YjUtNWQ2YzQ4Y2M=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjJkNGFkZjItMWY0MGFlYzAtYWUxMzA4YjUtNWQ2YzQ4Y2M= 2025-04-10T10:53:07.482790Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjJkNGFkZjItMWY0MGFlYzAtYWUxMzA4YjUtNWQ2YzQ4Y2M=, ActorId: [1:7491635810714756837:2333], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.482947Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-04-10T10:53:07.483023Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810714756839:2334], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-04-10T10:53:07.483083Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjJkNGFkZjItMWY0MGFlYzAtYWUxMzA4YjUtNWQ2YzQ4Y2M=, ActorId: [1:7491635810714756837:2333], ActorState: ReadyState, TraceId: 01jrfna10v5g4ggn97bpzw4vvf, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7491635810714756836:2361] database: Root databaseId: /Root pool id: my_pool 2025-04-10T10:53:07.483126Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7491635810714756837:2333], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=1&id=MjJkNGFkZjItMWY0MGFlYzAtYWUxMzA4YjUtNWQ2YzQ4Y2M= 2025-04-10T10:53:07.483231Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491635810714756840:2335], Database: /Root, Start database fetching 2025-04-10T10:53:07.483450Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491635810714756840:2335], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-10T10:53:07.483544Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-10T10:53:07.483638Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491635810714756849:2336], DatabaseId: /Root, PoolId: my_pool, SessionId: y ... ror: Resource pool my_pool not found or you don't have access permissions } 2025-04-10T10:53:48.761135Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=OTcyNmFiMjAtZDEzZGU4NWUtODFmY2FiYzQtYTllYzMzZTg=, ActorId: [7:7491635986544603591:2442], ActorState: ExecuteState, TraceId: 01jrfnb9ah8brex3328t6czpgw, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:53:48.761177Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-10T10:53:48.761181Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTcyNmFiMjAtZDEzZGU4NWUtODFmY2FiYzQtYTllYzMzZTg=, ActorId: [7:7491635986544603591:2442], ActorState: ExecuteState, TraceId: 01jrfnb9ah8brex3328t6czpgw, EndCleanup, isFinal: 0 2025-04-10T10:53:48.761252Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTcyNmFiMjAtZDEzZGU4NWUtODFmY2FiYzQtYTllYzMzZTg=, ActorId: [7:7491635986544603591:2442], ActorState: ExecuteState, TraceId: 01jrfnb9ah8brex3328t6czpgw, Sent query response back to proxy, proxyRequestId: 18, proxyId: [7:7491635956479831508:2270] 2025-04-10T10:53:48.761600Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=OTcyNmFiMjAtZDEzZGU4NWUtODFmY2FiYzQtYTllYzMzZTg=, TxId: 2025-04-10T10:53:48.761705Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=7&id=OTcyNmFiMjAtZDEzZGU4NWUtODFmY2FiYzQtYTllYzMzZTg=, TxId: 2025-04-10T10:53:48.761820Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7491635973659701236:2339], DatabaseId: /Root, PoolId: my_pool, succefully refreshed pool state, in flight: 0, delayed: 0 2025-04-10T10:53:48.761888Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=OTcyNmFiMjAtZDEzZGU4NWUtODFmY2FiYzQtYTllYzMzZTg=, ActorId: [7:7491635986544603591:2442], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:53:48.761927Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=OTcyNmFiMjAtZDEzZGU4NWUtODFmY2FiYzQtYTllYzMzZTg=, ActorId: [7:7491635986544603591:2442], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:53:48.761958Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTcyNmFiMjAtZDEzZGU4NWUtODFmY2FiYzQtYTllYzMzZTg=, ActorId: [7:7491635986544603591:2442], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:53:48.761985Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTcyNmFiMjAtZDEzZGU4NWUtODFmY2FiYzQtYTllYzMzZTg=, ActorId: [7:7491635986544603591:2442], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:53:48.762056Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTcyNmFiMjAtZDEzZGU4NWUtODFmY2FiYzQtYTllYzMzZTg=, ActorId: [7:7491635986544603591:2442], ActorState: unknown state, Session actor destroyed 2025-04-10T10:53:48.765744Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=OTIxYWIyM2QtM2ZlOWM3MzctN2IzMTBhNWUtYWIwZTkyZmE=, ActorId: [7:7491635973659701111:2330], ActorState: ExecuteState, TraceId: 01jrfnb9a7es2cx96ynvtk6ak0, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [7:7491635986544603615:2330] WorkloadServiceCleanup: 0 2025-04-10T10:53:48.767806Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTIxYWIyM2QtM2ZlOWM3MzctN2IzMTBhNWUtYWIwZTkyZmE=, ActorId: [7:7491635973659701111:2330], ActorState: CleanupState, TraceId: 01jrfnb9a7es2cx96ynvtk6ak0, EndCleanup, isFinal: 0 2025-04-10T10:53:48.767881Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTIxYWIyM2QtM2ZlOWM3MzctN2IzMTBhNWUtYWIwZTkyZmE=, ActorId: [7:7491635973659701111:2330], ActorState: CleanupState, TraceId: 01jrfnb9a7es2cx96ynvtk6ak0, Sent query response back to proxy, proxyRequestId: 17, proxyId: [7:7491635956479831508:2270] 2025-04-10T10:53:48.772336Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ODgxNTRjY2YtNjk5OWYwNzItOWEzYjYwMy05NTUxYjIyZA==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ODgxNTRjY2YtNjk5OWYwNzItOWEzYjYwMy05NTUxYjIyZA== 2025-04-10T10:53:48.772466Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ODgxNTRjY2YtNjk5OWYwNzItOWEzYjYwMy05NTUxYjIyZA==, ActorId: [7:7491635986544603664:2457], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:48.772708Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-04-10T10:53:48.772768Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491635986544603666:2458], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-04-10T10:53:48.772809Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ODgxNTRjY2YtNjk5OWYwNzItOWEzYjYwMy05NTUxYjIyZA==, ActorId: [7:7491635986544603664:2457], ActorState: ReadyState, TraceId: 01jrfnb9b46hdkz09gh0btcbt7, received request, proxyRequestId: 19 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [7:7491635986544603663:2648] database: Root databaseId: /Root pool id: my_pool 2025-04-10T10:53:48.772860Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [7:7491635986544603664:2457], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=7&id=ODgxNTRjY2YtNjk5OWYwNzItOWEzYjYwMy05NTUxYjIyZA== 2025-04-10T10:53:48.772919Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [7:7491635986544603667:2459], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=7&id=ODgxNTRjY2YtNjk5OWYwNzItOWEzYjYwMy05NTUxYjIyZA==, Start pool fetching 2025-04-10T10:53:48.772957Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491635986544603668:2460], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-04-10T10:53:48.773061Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491635986544603666:2458], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-10T10:53:48.773112Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491635986544603668:2460], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-10T10:53:48.773139Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-10T10:53:48.773221Z node 7 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [7:7491635986544603667:2459], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=7&id=ODgxNTRjY2YtNjk5OWYwNzItOWEzYjYwMy05NTUxYjIyZA==, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-10T10:53:48.773339Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [7:7491635986544603667:2459], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=7&id=ODgxNTRjY2YtNjk5OWYwNzItOWEzYjYwMy05NTUxYjIyZA==, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2025-04-10T10:53:48.773477Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [7:7491635986544603664:2457]: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2025-04-10T10:53:48.773571Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=ODgxNTRjY2YtNjk5OWYwNzItOWEzYjYwMy05NTUxYjIyZA==, ActorId: [7:7491635986544603664:2457], ActorState: ExecuteState, TraceId: 01jrfnb9b46hdkz09gh0btcbt7, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-04-10T10:53:48.773708Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ODgxNTRjY2YtNjk5OWYwNzItOWEzYjYwMy05NTUxYjIyZA==, ActorId: [7:7491635986544603664:2457], ActorState: ExecuteState, TraceId: 01jrfnb9b46hdkz09gh0btcbt7, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-04-10T10:53:48.773738Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Finished request with worker actor [7:7491635986544603664:2457], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=7&id=ODgxNTRjY2YtNjk5OWYwNzItOWEzYjYwMy05NTUxYjIyZA== 2025-04-10T10:53:48.773802Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ODgxNTRjY2YtNjk5OWYwNzItOWEzYjYwMy05NTUxYjIyZA==, ActorId: [7:7491635986544603664:2457], ActorState: CleanupState, TraceId: 01jrfnb9b46hdkz09gh0btcbt7, EndCleanup, isFinal: 1 2025-04-10T10:53:48.773913Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ODgxNTRjY2YtNjk5OWYwNzItOWEzYjYwMy05NTUxYjIyZA==, ActorId: [7:7491635986544603664:2457], ActorState: CleanupState, TraceId: 01jrfnb9b46hdkz09gh0btcbt7, Sent query response back to proxy, proxyRequestId: 19, proxyId: [7:7491635956479831508:2270] 2025-04-10T10:53:48.773947Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ODgxNTRjY2YtNjk5OWYwNzItOWEzYjYwMy05NTUxYjIyZA==, ActorId: [7:7491635986544603664:2457], ActorState: unknown state, TraceId: 01jrfnb9b46hdkz09gh0btcbt7, Cleanup temp tables: 0 2025-04-10T10:53:48.774047Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ODgxNTRjY2YtNjk5OWYwNzItOWEzYjYwMy05NTUxYjIyZA==, ActorId: [7:7491635986544603664:2457], ActorState: unknown state, TraceId: 01jrfnb9b46hdkz09gh0btcbt7, Session actor destroyed 2025-04-10T10:53:48.780570Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=OTIxYWIyM2QtM2ZlOWM3MzctN2IzMTBhNWUtYWIwZTkyZmE=, ActorId: [7:7491635973659701111:2330], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:53:48.780618Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=OTIxYWIyM2QtM2ZlOWM3MzctN2IzMTBhNWUtYWIwZTkyZmE=, ActorId: [7:7491635973659701111:2330], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:53:48.780647Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTIxYWIyM2QtM2ZlOWM3MzctN2IzMTBhNWUtYWIwZTkyZmE=, ActorId: [7:7491635973659701111:2330], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:53:48.780674Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTIxYWIyM2QtM2ZlOWM3MzctN2IzMTBhNWUtYWIwZTkyZmE=, ActorId: [7:7491635973659701111:2330], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:53:48.780743Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTIxYWIyM2QtM2ZlOWM3MzctN2IzMTBhNWUtYWIwZTkyZmE=, ActorId: [7:7491635973659701111:2330], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] Test command err: 2025-04-10T10:53:05.117141Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635802760715322:2173];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:05.117498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fad/r3tmp/tmpw3mgLy/pdisk_1.dat 2025-04-10T10:53:05.503952Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:05.517799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:05.517895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:05.521430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8359, node 1 2025-04-10T10:53:05.647610Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:53:05.647641Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:53:05.647657Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:53:05.647789Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:06.014787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:07.863349Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T10:53:07.863592Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was disabled 2025-04-10T10:53:07.866443Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODlmZTBjMTktZTk1NjM1ZmMtYjE0NmNkZmUtNTY3YjU1Zjk=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ODlmZTBjMTktZTk1NjM1ZmMtYjE0NmNkZmUtNTY3YjU1Zjk= 2025-04-10T10:53:07.866959Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODlmZTBjMTktZTk1NjM1ZmMtYjE0NmNkZmUtNTY3YjU1Zjk=, ActorId: [1:7491635811350650420:2326], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.881196Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM= 2025-04-10T10:53:07.881298Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [1:7491635811350650422:2327], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.881650Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [1:7491635811350650422:2327], ActorState: ReadyState, TraceId: 01jrfna1d96pxwwgq7t5hcvsj7, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7491635811350650421:2294] database: Root databaseId: /Root pool id: 2025-04-10T10:53:07.881795Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [1:7491635811350650422:2327], ActorState: ExecuteState, TraceId: 01jrfna1d96pxwwgq7t5hcvsj7, Sending CompileQuery request 2025-04-10T10:53:08.145764Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [1:7491635811350650422:2327], ActorState: ExecuteState, TraceId: 01jrfna1d96pxwwgq7t5hcvsj7, ExecutePhyTx, tx: 0x000050C000243358 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-10T10:53:08.145831Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [1:7491635811350650422:2327], ActorState: ExecuteState, TraceId: 01jrfna1d96pxwwgq7t5hcvsj7, Sending to Executer TraceId: 0 8 2025-04-10T10:53:08.145989Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [1:7491635811350650422:2327], ActorState: ExecuteState, TraceId: 01jrfna1d96pxwwgq7t5hcvsj7, Created new KQP executer: [1:7491635815645617725:2327] isRollback: 0 2025-04-10T10:53:08.176564Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [1:7491635811350650422:2327], ActorState: ExecuteState, TraceId: 01jrfna1d96pxwwgq7t5hcvsj7, Forwarded TEvStreamData to [1:7491635811350650421:2294] 2025-04-10T10:53:08.180556Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [1:7491635811350650422:2327], ActorState: ExecuteState, TraceId: 01jrfna1d96pxwwgq7t5hcvsj7, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-10T10:53:08.180697Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [1:7491635811350650422:2327], ActorState: ExecuteState, TraceId: 01jrfna1d96pxwwgq7t5hcvsj7, txInfo Status: Committed Kind: Pure TotalDuration: 35.085 ServerDuration: 34.991 QueriesCount: 2 2025-04-10T10:53:08.180745Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [1:7491635811350650422:2327], ActorState: ExecuteState, TraceId: 01jrfna1d96pxwwgq7t5hcvsj7, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T10:53:08.180896Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [1:7491635811350650422:2327], ActorState: ExecuteState, TraceId: 01jrfna1d96pxwwgq7t5hcvsj7, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:53:08.180949Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [1:7491635811350650422:2327], ActorState: ExecuteState, TraceId: 01jrfna1d96pxwwgq7t5hcvsj7, EndCleanup, isFinal: 1 2025-04-10T10:53:08.180982Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [1:7491635811350650422:2327], ActorState: ExecuteState, TraceId: 01jrfna1d96pxwwgq7t5hcvsj7, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7491635802760715435:2277] 2025-04-10T10:53:08.181028Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [1:7491635811350650422:2327], ActorState: unknown state, TraceId: 01jrfna1d96pxwwgq7t5hcvsj7, Cleanup temp tables: 0 2025-04-10T10:53:08.181332Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmY0M2NlZDEtOGYyNTQxMDMtMTI2NzNhNWQtOGU5NmYxYzM=, ActorId: [1:7491635811350650422:2327], ActorState: unknown state, TraceId: 01jrfna1d96pxwwgq7t5hcvsj7, Session actor destroyed 2025-04-10T10:53:08.189649Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ODlmZTBjMTktZTk1NjM1ZmMtYjE0NmNkZmUtNTY3YjU1Zjk=, ActorId: [1:7491635811350650420:2326], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:53:08.189708Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ODlmZTBjMTktZTk1NjM1ZmMtYjE0NmNkZmUtNTY3YjU1Zjk=, ActorId: [1:7491635811350650420:2326], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:53:08.189730Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODlmZTBjMTktZTk1NjM1ZmMtYjE0NmNkZmUtNTY3YjU1Zjk=, ActorId: [1:7491635811350650420:2326], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:53:08.189766Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODlmZTBjMTktZTk1NjM1ZmMtYjE0NmNkZmUtNTY3YjU1Zjk=, ActorId: [1:7491635811350650420:2326], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:53:08.189878Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODlmZTBjMTktZTk1NjM1ZmMtYjE0NmNkZmUtNTY3YjU1Zjk=, ActorId: [1:7491635811350650420:2326], ActorState: unknown state, Session actor destroyed 2025-04-10T10:53:09.305889Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491635816265712574:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:09.306386Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fad/r3tmp/tmp3AkWoO/pdisk_1.dat 2025-04-10T10:53:09.495749Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7361, node 2 2025-04-10T10:53:09.643380Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:09.643476Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:09.646948Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:09.646986Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:53:09.646997Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:53:09.647009Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:53:09.647162Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersi ... [8:7491635982832566468:2360], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-10T10:53:47.052935Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7491635982832566466:2358], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2025-04-10T10:53:47.052956Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7491635982832566468:2360], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2025-04-10T10:53:47.052992Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root 2025-04-10T10:53:47.053014Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7491635982832566467:2359], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, Pool info successfully resolved 2025-04-10T10:53:47.053049Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc= 2025-04-10T10:53:47.053096Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7491635978537599129:2346], DatabaseId: /Root, PoolId: default, Received new request, worker id: [8:7491635982832566463:2356], session id: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc= 2025-04-10T10:53:47.053121Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc= 2025-04-10T10:53:47.053142Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7491635978537599129:2346], DatabaseId: /Root, PoolId: default, Reply continue success to [8:7491635982832566463:2356], session id: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, local in flight: 1 2025-04-10T10:53:47.053182Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: ExecuteState, TraceId: 01jrfnb7ncefg3cxbgm6kycbap, continue request, pool id: default 2025-04-10T10:53:47.053424Z node 8 :KQP_SESSION INFO: Scheme error, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], status: PathNotTable 2025-04-10T10:53:47.383853Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7491635961357728909:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:47.383915Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:53:47.390528Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7491635960635460526:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:47.390606Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:53:47.395152Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491635959989844756:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:47.395284Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:53:47.400304Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7491635959282952061:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:47.400463Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:53:47.406384Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7491635958256399024:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:47.406455Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:53:47.513794Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: ExecuteState, TraceId: 01jrfnb7ncefg3cxbgm6kycbap, ExecutePhyTx, tx: 0x000050C0001AF518 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-10T10:53:47.513839Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: ExecuteState, TraceId: 01jrfnb7ncefg3cxbgm6kycbap, Sending to Executer TraceId: 0 8 2025-04-10T10:53:47.513907Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: ExecuteState, TraceId: 01jrfnb7ncefg3cxbgm6kycbap, Created new KQP executer: [8:7491635982832566493:2356] isRollback: 0 2025-04-10T10:53:47.517795Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: ExecuteState, TraceId: 01jrfnb7ncefg3cxbgm6kycbap, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-10T10:53:47.517875Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: ExecuteState, TraceId: 01jrfnb7ncefg3cxbgm6kycbap, ExecutePhyTx, tx: 0x000050C000392BD8 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-10T10:53:47.518418Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: ExecuteState, TraceId: 01jrfnb7ncefg3cxbgm6kycbap, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-10T10:53:47.518558Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: ExecuteState, TraceId: 01jrfnb7ncefg3cxbgm6kycbap, txInfo Status: Committed Kind: ReadOnly TotalDuration: 4.815 ServerDuration: 4.771 QueriesCount: 2 2025-04-10T10:53:47.518654Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: ExecuteState, TraceId: 01jrfnb7ncefg3cxbgm6kycbap, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T10:53:47.518724Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: ExecuteState, TraceId: 01jrfnb7ncefg3cxbgm6kycbap, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-04-10T10:53:47.518761Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7491635978537599129:2346], DatabaseId: /Root, PoolId: default, Received cleanup request, worker id: [8:7491635982832566463:2356], session id: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, duration: 0.465972s, cpu consumed: 0.001384s 2025-04-10T10:53:47.518794Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7491635978537599129:2346], DatabaseId: /Root, PoolId: default, Reply cleanup success to [8:7491635982832566463:2356], session id: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, local in flight: 0 2025-04-10T10:53:47.518850Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request finished in pool, DatabaseId: /Root, PoolId: default, Duration: 0.465972s, CpuConsumed: 0.001384s, AdjustCpuQuota: 0 2025-04-10T10:53:47.518971Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: CleanupState, TraceId: 01jrfnb7ncefg3cxbgm6kycbap, EndCleanup, isFinal: 0 2025-04-10T10:53:47.519033Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: CleanupState, TraceId: 01jrfnb7ncefg3cxbgm6kycbap, Sent query response back to proxy, proxyRequestId: 6, proxyId: [8:7491635961357729127:2279] 2025-04-10T10:53:47.519313Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, TxId: 2025-04-10T10:53:47.519434Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, TxId: 2025-04-10T10:53:47.519641Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:53:47.519700Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:53:47.519731Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:53:47.519757Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:53:47.519848Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYyYjQ5NzEtNjA2NGRlZjEtYzc4ZjkzZDktOWM2YWJmMDc=, ActorId: [8:7491635982832566463:2356], ActorState: unknown state, Session actor destroyed 2025-04-10T10:53:47.530087Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=M2Y0OTkyZjgtNTA3NTI3NWUtYmM3YWY2NDQtMjcwMGJkODg=, ActorId: [8:7491635978537598976:2335], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:53:47.530137Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=M2Y0OTkyZjgtNTA3NTI3NWUtYmM3YWY2NDQtMjcwMGJkODg=, ActorId: [8:7491635978537598976:2335], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:53:47.530187Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2Y0OTkyZjgtNTA3NTI3NWUtYmM3YWY2NDQtMjcwMGJkODg=, ActorId: [8:7491635978537598976:2335], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:53:47.530220Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2Y0OTkyZjgtNTA3NTI3NWUtYmM3YWY2NDQtMjcwMGJkODg=, ActorId: [8:7491635978537598976:2335], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:53:47.530305Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2Y0OTkyZjgtNTA3NTI3NWUtYmM3YWY2NDQtMjcwMGJkODg=, ActorId: [8:7491635978537598976:2335], ActorState: unknown state, Session actor destroyed >> Cdc::Write[YdsRunner] [GOOD] >> Cdc::Write[TopicRunner] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart [GOOD] >> KqpWorkloadServiceTables::TestLeaseExpiration >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased [GOOD] Test command err: 2025-04-10T10:53:24.652628Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:448:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.652666Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:195:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.652771Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:900:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.652796Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:716:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.652820Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:983:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.652843Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:419:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.652875Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:871:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.652901Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:852:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.652935Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:404:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.652958Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:502:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.653627Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:337:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.653683Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:6:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.653713Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:472:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.653742Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:653:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.653765Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:205:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.653795Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:862:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.653821Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:609:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.653847Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:784:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.653871Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:264:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.653903Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:682:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.654891Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:162:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.654931Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:512:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.654960Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:434:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.654981Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:866:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655003Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:696:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655030Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:438:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655052Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:954:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655074Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:370:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655093Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:234:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655114Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:371:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655655Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:31:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655684Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:161:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655709Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:482:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655732Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:30:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655755Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:506:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655779Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:958:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655801Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:546:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655823Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:619:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655846Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:924:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.655868Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:147:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.656419Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:409:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.656454Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:891:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.656480Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:390:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.656501Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:594:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.656525Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:915:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.656547Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:439:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.656572Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:133:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.656591Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:895:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.656621Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:920:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.656640Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:725:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.657096Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:98:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.657122Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:551:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.657142Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:949:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.657163Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:16:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.657188Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:341:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.657220Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:346:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.657240Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:376:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.657264Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:521:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.657287Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:823:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.657306Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:584:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.657885Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:939:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.657920Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:380:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.657943Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:711:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.657965Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:511:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.657988Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:750:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.658012Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:74:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.658034Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:210:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.658053Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:64:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.658070Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:973:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.658099Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:774:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.658664Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:99:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.658689Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:200:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.658709Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:837:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.658728Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:541:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.658749Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:137:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.658771Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:278:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.658795Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:36:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.658813Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:21:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.658833Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:579:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.658851Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:720:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.659210Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:794:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.659232Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:108:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.659256Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:69:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.659283Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:779:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.659306Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:842:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.659325Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:171:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.659347Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:929:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.659370Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:1:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.659394Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:463:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.659416Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:614:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.659890Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:832:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.659919Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:963:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.659940Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:395:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.659962Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:361:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.659984Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:322:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.660003Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:516:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.660029Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:229:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.660047Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:857:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.660070Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:492:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.660088Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:89:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.660656Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:118:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.660685Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:613:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.660711Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:103:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.660741Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:65:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.660765Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:239:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.660783Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:298:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.660806Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:580:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.660832Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:764:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.660856Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:847:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.660887Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:191:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.661474Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:648:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.661503Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:45:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.661524Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:424:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.661546Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:997:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.661565Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:861:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.661590Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:672:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.661618Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:531:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.661642Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:706:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.661665Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:667:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.661694Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:308:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.662296Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:905:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.662339Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:701:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.662369Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:157:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.662397Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:176:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.662434Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:890:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.662461Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:225:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.662489Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:585:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.662513Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:113:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.662538Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:274:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.662567Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:618:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.663216Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:244:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.663248Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:681:0:0:66560:1] Marker# BSVS08 2025-04-10T10:53:24.663273Z :BS_VDISK_PUT CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:652:0:0:66560:1] Marker# BSVS08 >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad [GOOD] >> TopicAutoscaling::ControlPlane_CDC >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] Test command err: 2025-04-10T10:53:04.233625Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635797481851697:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:04.233719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001019/r3tmp/tmp2K4tHw/pdisk_1.dat 2025-04-10T10:53:04.785371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:04.785497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 61947, node 1 2025-04-10T10:53:04.787294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:04.814294Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:04.851841Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:53:04.851900Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:53:04.982623Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:53:04.982647Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:53:04.982653Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:53:04.982753Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11026 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:05.388458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:07.321245Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-10T10:53:07.321378Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810366754233:2329], Start check tables existence, number paths: 2 2025-04-10T10:53:07.324627Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmYxYmFhZmYtZjNhNTJiNDMtNjAwM2JjMTQtOWIwMGM0NWI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MmYxYmFhZmYtZjNhNTJiNDMtNjAwM2JjMTQtOWIwMGM0NWI= 2025-04-10T10:53:07.325319Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-10T10:53:07.325354Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T10:53:07.325376Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-10T10:53:07.325460Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810366754233:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-10T10:53:07.325545Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810366754233:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-10T10:53:07.325582Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810366754233:2329], Successfully finished 2025-04-10T10:53:07.325637Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmYxYmFhZmYtZjNhNTJiNDMtNjAwM2JjMTQtOWIwMGM0NWI=, ActorId: [1:7491635810366754249:2330], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.330502Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-10T10:53:07.337699Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810366754251:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.340432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:53:07.341235Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810366754251:2302], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-10T10:53:07.341391Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810366754251:2302], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-10T10:53:07.348219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810366754251:2302], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:53:07.441115Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810366754251:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.444863Z node 1 :TX_PROXY ERROR: Actor# [1:7491635810366754302:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:07.444952Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810366754251:2302], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-10T10:53:07.446892Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MzY0MDdmNGMtNGMzZmNmOTctYjM0Yjk0NDYtOTY4NjdlYWU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MzY0MDdmNGMtNGMzZmNmOTctYjM0Yjk0NDYtOTY4NjdlYWU= 2025-04-10T10:53:07.446980Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MzY0MDdmNGMtNGMzZmNmOTctYjM0Yjk0NDYtOTY4NjdlYWU=, ActorId: [1:7491635810366754309:2331], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.447194Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-10T10:53:07.447230Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-10T10:53:07.447298Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810366754311:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:53:07.447310Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MzY0MDdmNGMtNGMzZmNmOTctYjM0Yjk0NDYtOTY4NjdlYWU=, ActorId: [1:7491635810366754309:2331], ActorState: ReadyState, TraceId: 01jrfna0zqb8rpfhqvt8v2xprc, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7491635810366754308:2339] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-10T10:53:07.447347Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7491635810366754309:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MzY0MDdmNGMtNGMzZmNmOTctYjM0Yjk0NDYtOTY4NjdlYWU= 2025-04-10T10:53:07.447401Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491635810366754312:2333], Database: /Root, Start database fetching 2025-04-10T10:53:07.447572Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491635810366754312:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-10T10:53:07.447622Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-10T10:53:07.447745Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491635810366754318:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MzY0MDdmNGMtNGMzZmNmOTctYjM0Yjk0NDYtOTY4NjdlYWU=, Start pool fetching 2025-04-10T10:53:07.447789Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810366754321:2335], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:53:07.448430Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810366754321:2335], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-10T10:53:07.448453Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810366754311:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-10T10:53:07.448488Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-10T10:53:07.448509Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-10T10:53:07.448536Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491635810366754318:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MzY0MDdmNGMtNGMzZmNmOTctYjM0Yjk0NDYtOTY4NjdlYWU=, Pool info successfully resolved 2025-04-10T10:53:07.448713Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY0MDdmNGMtNGMzZmNmOTctYjM0Yjk0NDYtOTY4NjdlYWU= 2025-04-10T10:53:07.448753Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7491635810366754325:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-10T10:53:07.448812Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MzY0MDdmNGMtNGMzZmNmOTctYjM0Yjk0NDYtOTY4NjdlYWU= 2025-04-10T10:53:07.448827Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7491635810366754325:2336], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7491635810366754309:2331], session id: ydb://session/3?node_id=1&id=MzY0MDdmNGMtNGMzZmNmOTctYjM0Yjk0NDYtOTY4NjdlYWU= 2025-04-10T10:53:07.448873Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [Wo ... ault 2025-04-10T10:53:51.472156Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZTIxMDRhOGQtODllNjFhNWYtODhkNGE2ZTMtZmY2MjhiOWQ=, ActorId: [8:7491635996433117710:2404], ActorState: ExecuteState, TraceId: 01jrfnbbzg5cfmm2zh5vp9kzn3, Sending CompileQuery request 2025-04-10T10:53:51.580629Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7491635976630634657:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:51.580702Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:53:51.581370Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZTIxMDRhOGQtODllNjFhNWYtODhkNGE2ZTMtZmY2MjhiOWQ=, ActorId: [8:7491635996433117710:2404], ActorState: ExecuteState, TraceId: 01jrfnbbzg5cfmm2zh5vp9kzn3, ExecutePhyTx, tx: 0x000050C000075E98 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-10T10:53:51.581427Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZTIxMDRhOGQtODllNjFhNWYtODhkNGE2ZTMtZmY2MjhiOWQ=, ActorId: [8:7491635996433117710:2404], ActorState: ExecuteState, TraceId: 01jrfnbbzg5cfmm2zh5vp9kzn3, Sending to Executer TraceId: 0 8 2025-04-10T10:53:51.581520Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZTIxMDRhOGQtODllNjFhNWYtODhkNGE2ZTMtZmY2MjhiOWQ=, ActorId: [8:7491635996433117710:2404], ActorState: ExecuteState, TraceId: 01jrfnbbzg5cfmm2zh5vp9kzn3, Created new KQP executer: [8:7491635996433117720:2404] isRollback: 0 2025-04-10T10:53:51.585863Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZTIxMDRhOGQtODllNjFhNWYtODhkNGE2ZTMtZmY2MjhiOWQ=, ActorId: [8:7491635996433117710:2404], ActorState: ExecuteState, TraceId: 01jrfnbbzg5cfmm2zh5vp9kzn3, Forwarded TEvStreamData to [7:7491635999333469139:3353] 2025-04-10T10:53:51.586695Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZTIxMDRhOGQtODllNjFhNWYtODhkNGE2ZTMtZmY2MjhiOWQ=, ActorId: [8:7491635996433117710:2404], ActorState: ExecuteState, TraceId: 01jrfnbbzg5cfmm2zh5vp9kzn3, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-10T10:53:51.586838Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZTIxMDRhOGQtODllNjFhNWYtODhkNGE2ZTMtZmY2MjhiOWQ=, ActorId: [8:7491635996433117710:2404], ActorState: ExecuteState, TraceId: 01jrfnbbzg5cfmm2zh5vp9kzn3, txInfo Status: Committed Kind: ReadOnly TotalDuration: 5.574 ServerDuration: 5.499 QueriesCount: 2 2025-04-10T10:53:51.586907Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZTIxMDRhOGQtODllNjFhNWYtODhkNGE2ZTMtZmY2MjhiOWQ=, ActorId: [8:7491635996433117710:2404], ActorState: ExecuteState, TraceId: 01jrfnbbzg5cfmm2zh5vp9kzn3, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T10:53:51.587337Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZTIxMDRhOGQtODllNjFhNWYtODhkNGE2ZTMtZmY2MjhiOWQ=, ActorId: [8:7491635996433117710:2404], ActorState: ExecuteState, TraceId: 01jrfnbbzg5cfmm2zh5vp9kzn3, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:53:51.587381Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZTIxMDRhOGQtODllNjFhNWYtODhkNGE2ZTMtZmY2MjhiOWQ=, ActorId: [8:7491635996433117710:2404], ActorState: ExecuteState, TraceId: 01jrfnbbzg5cfmm2zh5vp9kzn3, EndCleanup, isFinal: 1 2025-04-10T10:53:51.587440Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZTIxMDRhOGQtODllNjFhNWYtODhkNGE2ZTMtZmY2MjhiOWQ=, ActorId: [8:7491635996433117710:2404], ActorState: ExecuteState, TraceId: 01jrfnbbzg5cfmm2zh5vp9kzn3, Sent query response back to proxy, proxyRequestId: 5, proxyId: [8:7491635974958280164:2277] 2025-04-10T10:53:51.587471Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZTIxMDRhOGQtODllNjFhNWYtODhkNGE2ZTMtZmY2MjhiOWQ=, ActorId: [8:7491635996433117710:2404], ActorState: unknown state, TraceId: 01jrfnbbzg5cfmm2zh5vp9kzn3, Cleanup temp tables: 0 2025-04-10T10:53:51.587837Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZTIxMDRhOGQtODllNjFhNWYtODhkNGE2ZTMtZmY2MjhiOWQ=, ActorId: [8:7491635996433117710:2404], ActorState: unknown state, TraceId: 01jrfnbbzg5cfmm2zh5vp9kzn3, Session actor destroyed 2025-04-10T10:53:51.590855Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI= 2025-04-10T10:53:51.590932Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:51.591222Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: ReadyState, TraceId: 01jrfnbc37e2cz5svzpnn8rx6p, received request, proxyRequestId: 6 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT * FROM `.sys/resource_pools` WHERE Name >= "default" rpcActor: [7:7491635999333469147:3357] database: /Root/test-dedicated databaseId: /Root/test-dedicated pool id: default 2025-04-10T10:53:51.591247Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: ReadyState, TraceId: 01jrfnbc37e2cz5svzpnn8rx6p, request placed into pool from cache: default 2025-04-10T10:53:51.591313Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: ExecuteState, TraceId: 01jrfnbc37e2cz5svzpnn8rx6p, Sending CompileQuery request 2025-04-10T10:53:51.676664Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: ExecuteState, TraceId: 01jrfnbc37e2cz5svzpnn8rx6p, ExecutePhyTx, tx: 0x000050C000078E98 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-10T10:53:51.676724Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: ExecuteState, TraceId: 01jrfnbc37e2cz5svzpnn8rx6p, Sending to Executer TraceId: 0 8 2025-04-10T10:53:51.676797Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: ExecuteState, TraceId: 01jrfnbc37e2cz5svzpnn8rx6p, Created new KQP executer: [8:7491635996433117740:2412] isRollback: 0 2025-04-10T10:53:51.681643Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: ExecuteState, TraceId: 01jrfnbc37e2cz5svzpnn8rx6p, Forwarded TEvStreamData to [7:7491635999333469147:3357] 2025-04-10T10:53:51.682361Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: ExecuteState, TraceId: 01jrfnbc37e2cz5svzpnn8rx6p, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-10T10:53:51.682548Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: ExecuteState, TraceId: 01jrfnbc37e2cz5svzpnn8rx6p, txInfo Status: Committed Kind: ReadOnly TotalDuration: 5.958 ServerDuration: 5.861 QueriesCount: 2 2025-04-10T10:53:51.682647Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: ExecuteState, TraceId: 01jrfnbc37e2cz5svzpnn8rx6p, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T10:53:51.683041Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: ExecuteState, TraceId: 01jrfnbc37e2cz5svzpnn8rx6p, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:53:51.683079Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: ExecuteState, TraceId: 01jrfnbc37e2cz5svzpnn8rx6p, EndCleanup, isFinal: 1 2025-04-10T10:53:51.683130Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: ExecuteState, TraceId: 01jrfnbc37e2cz5svzpnn8rx6p, Sent query response back to proxy, proxyRequestId: 6, proxyId: [8:7491635974958280164:2277] 2025-04-10T10:53:51.683156Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: unknown state, TraceId: 01jrfnbc37e2cz5svzpnn8rx6p, Cleanup temp tables: 0 2025-04-10T10:53:51.683558Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2Y5ODQ0ZjEtYjdjMjE0MzEtMjQ5ZWZlOTAtNzgzYjQ1MTI=, ActorId: [8:7491635996433117734:2412], ActorState: unknown state, TraceId: 01jrfnbc37e2cz5svzpnn8rx6p, Session actor destroyed 2025-04-10T10:53:51.686342Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-04-10T10:53:51.687067Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T10:53:51.687296Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-04-10T10:53:51.687723Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T10:53:51.695536Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NTFmMjExODItZDRiYjNkODMtNGI5ZWE1NDEtMWU4ODVmMGU=, ActorId: [7:7491635977858631582:2335], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:53:51.695593Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NTFmMjExODItZDRiYjNkODMtNGI5ZWE1NDEtMWU4ODVmMGU=, ActorId: [7:7491635977858631582:2335], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:53:51.695660Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTFmMjExODItZDRiYjNkODMtNGI5ZWE1NDEtMWU4ODVmMGU=, ActorId: [7:7491635977858631582:2335], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:53:51.695693Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTFmMjExODItZDRiYjNkODMtNGI5ZWE1NDEtMWU4ODVmMGU=, ActorId: [7:7491635977858631582:2335], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:53:51.695764Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTFmMjExODItZDRiYjNkODMtNGI5ZWE1NDEtMWU4ODVmMGU=, ActorId: [7:7491635977858631582:2335], ActorState: unknown state, Session actor destroyed 2025-04-10T10:53:51.891480Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7491635974958279942:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:51.891556Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=timeout; >> Cdc::HugeKeyDebezium [GOOD] >> Cdc::Drop[PqRunner] >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Enable |65.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp >> Cdc::Write[TopicRunner] [GOOD] >> Cdc::UpdateStream |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp >> TBsDbStat::ChaoticParallelWrite_DbStat [GOOD] >> TBsHuge::Simple >> Balancing::Balancing_ManyTopics_TopicApi [GOOD] >> Balancing::Balancing_ManyTopics_PQv1 >> TBsHuge::Simple [GOOD] >> TBsHuge::SimpleErasureNone >> Cdc::NaN[PqRunner] [GOOD] >> Cdc::NaN[YdsRunner] |65.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |65.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |65.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |65.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |65.9%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |65.9%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |65.9%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |65.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize [GOOD] |65.9%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |65.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> KqpWorkloadService::TestStartQueryAfterCancel [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit >> KqpWorkloadService::TestLessConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold >> TBsHuge::SimpleErasureNone [GOOD] >> TBsLocalRecovery::ChaoticWriteRestart >> Cdc::Drop[PqRunner] [GOOD] >> Cdc::Drop[YdsRunner] >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize [GOOD] Test command err: 2025-04-10T10:53:58.990047Z :BS_VDISK_GET CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVGetResult: Result message is too large; size# 67108001 orig# {ExtrQuery# [5000:1:0:0:0:100000:1] sh# 257 sz# 99743 c# 0}{ExtrQuery# [5000:1:1:0:0:100000:1] sh# 257 sz# 99743 c# 1}{ExtrQuery# [5000:1:2:0:0:100000:1] sh# 257 sz# 99743 c# 2}{ExtrQuery# [5000:1:3:0:0:100000:1] sh# 257 sz# 99743 c# 3}{ExtrQuery# [5000:1:4:0:0:100000:1] sh# 257 sz# 99743 c# 4}{ExtrQuery# [5000:1:5:0:0:100000:1] sh# 257 sz# 99743 c# 5}{ExtrQuery# [5000:1:6:0:0:100000:1] sh# 257 sz# 99743 c# 6}{ExtrQuery# [5000:1:7:0:0:100000:1] sh# 257 sz# 99743 c# 7}{ExtrQuery# [5000:1:8:0:0:100000:1] sh# 257 sz# 99743 c# 8}{ExtrQuery# [5000:1:9:0:0:100000:1] sh# 257 sz# 99743 c# 9}{ExtrQuery# [5000:1:10:0:0:100000:1] sh# 257 sz# 99743 c# 10}{ExtrQuery# [5000:1:11:0:0:100000:1] sh# 257 sz# 99743 c# 11}{ExtrQuery# [5000:1:12:0:0:100000:1] sh# 257 sz# 99743 c# 12}{ExtrQuery# [5000:1:13:0:0:100000:1] sh# 257 sz# 99743 c# 13}{ExtrQuery# [5000:1:14:0:0:100000:1] sh# 257 sz# 99743 c# 14}{ExtrQuery# [5000:1:15:0:0:100000:1] sh# 257 sz# 99743 c# 15}{ExtrQuery# [5000:1:16:0:0:100000:1] sh# 257 sz# 99743 c# 16}{ExtrQuery# [5000:1:17:0:0:100000:1] sh# 257 sz# 99743 c# 17}{ExtrQuery# [5000:1:18:0:0:100000:1] sh# 257 sz# 99743 c# 18}{ExtrQuery# [5000:1:19:0:0:100000:1] sh# 257 sz# 99743 c# 19}{ExtrQuery# [5000:1:20:0:0:100000:1] sh# 257 sz# 99743 c# 20}{ExtrQuery# [5000:1:21:0:0:100000:1] sh# 257 sz# 99743 c# 21}{ExtrQuery# [5000:1:22:0:0:100000:1] sh# 257 sz# 99743 c# 22}{ExtrQuery# [5000:1:23:0:0:100000:1] sh# 257 sz# 99743 c# 23}{ExtrQuery# [5000:1:24:0:0:100000:1] sh# 257 sz# 99743 c# 24}{ExtrQuery# [5000:1:25:0:0:100000:1] sh# 257 sz# 99743 c# 25}{ExtrQuery# [5000:1:26:0:0:100000:1] sh# 257 sz# 99743 c# 26}{ExtrQuery# [5000:1:27:0:0:100000:1] sh# 257 sz# 99743 c# 27}{ExtrQuery# [5000:1:28:0:0:100000:1] sh# 257 sz# 99743 c# 28}{ExtrQuery# [5000:1:29:0:0:100000:1] sh# 257 sz# 99743 c# 29}{ExtrQuery# [5000:1:30:0:0:100000:1] sh# 257 sz# 99743 c# 30}{ExtrQuery# [5000:1:31:0:0:100000:1] sh# 257 sz# 99743 c# 31}{ExtrQuery# [5000:1:32:0:0:100000:1] sh# 257 sz# 99743 c# 32}{ExtrQuery# [5000:1:33:0:0:100000:1] sh# 257 sz# 99743 c# 33}{ExtrQuery# [5000:1:34:0:0:100000:1] sh# 257 sz# 99743 c# 34}{ExtrQuery# [5000:1:35:0:0:100000:1] sh# 257 sz# 99743 c# 35}{ExtrQuery# [5000:1:36:0:0:100000:1] sh# 257 sz# 99743 c# 36}{ExtrQuery# [5000:1:37:0:0:100000:1] sh# 257 sz# 99743 c# 37}{ExtrQuery# [5000:1:38:0:0:100000:1] sh# 257 sz# 99743 c# 38}{ExtrQuery# [5000:1:39:0:0:100000:1] sh# 257 sz# 99743 c# 39}{ExtrQuery# [5000:1:40:0:0:100000:1] sh# 257 sz# 99743 c# 40}{ExtrQuery# [5000:1:41:0:0:100000:1] sh# 257 sz# 99743 c# 41}{ExtrQuery# [5000:1:42:0:0:100000:1] sh# 257 sz# 99743 c# 42}{ExtrQuery# [5000:1:43:0:0:100000:1] sh# 257 sz# 99743 c# 43}{ExtrQuery# [5000:1:44:0:0:100000:1] sh# 257 sz# 99743 c# 44}{ExtrQuery# [5000:1:45:0:0:100000:1] sh# 257 sz# 99743 c# 45}{ExtrQuery# [5000:1:46:0:0:100000:1] sh# 257 sz# 99743 c# 46}{ExtrQuery# [5000:1:47:0:0:100000:1] sh# 257 sz# 99743 c# 47}{ExtrQuery# [5000:1:48:0:0:100000:1] sh# 257 sz# 99743 c# 48}{ExtrQuery# [5000:1:49:0:0:100000:1] sh# 257 sz# 99743 c# 49}{ExtrQuery# [5000:1:50:0:0:100000:1] sh# 257 sz# 99743 c# 50}{ExtrQuery# [5000:1:51:0:0:100000:1] sh# 257 sz# 99743 c# 51}{ExtrQuery# [5000:1:52:0:0:100000:1] sh# 257 sz# 99743 c# 52}{ExtrQuery# [5000:1:53:0:0:100000:1] sh# 257 sz# 99743 c# 53}{ExtrQuery# [5000:1:54:0:0:100000:1] sh# 257 sz# 99743 c# 54}{ExtrQuery# [5000:1:55:0:0:100000:1] sh# 257 sz# 99743 c# 55}{ExtrQuery# [5000:1:56:0:0:100000:1] sh# 257 sz# 99743 c# 56}{ExtrQuery# [5000:1:57:0:0:100000:1] sh# 257 sz# 99743 c# 57}{ExtrQuery# [5000:1:58:0:0:100000:1] sh# 257 sz# 99743 c# 58}{ExtrQuery# [5000:1:59:0:0:100000:1] sh# 257 sz# 99743 c# 59}{ExtrQuery# [5000:1:60:0:0:100000:1] sh# 257 sz# 99743 c# 60}{ExtrQuery# [5000:1:61:0:0:100000:1] sh# 257 sz# 99743 c# 61}{ExtrQuery# [5000:1:62:0:0:100000:1] sh# 257 sz# 99743 c# 62}{ExtrQuery# [5000:1:63:0:0:100000:1] sh# 257 sz# 99743 c# 63}{ExtrQuery# [5000:1:64:0:0:100000:1] sh# 257 sz# 99743 c# 64}{ExtrQuery# [5000:1:65:0:0:100000:1] sh# 257 sz# 99743 c# 65}{ExtrQuery# [5000:1:66:0:0:100000:1] sh# 257 sz# 99743 c# 66}{ExtrQuery# [5000:1:67:0:0:100000:1] sh# 257 sz# 99743 c# 67}{ExtrQuery# [5000:1:68:0:0:100000:1] sh# 257 sz# 99743 c# 68}{ExtrQuery# [5000:1:69:0:0:100000:1] sh# 257 sz# 99743 c# 69}{ExtrQuery# [5000:1:70:0:0:100000:1] sh# 257 sz# 99743 c# 70}{ExtrQuery# [5000:1:71:0:0:100000:1] sh# 257 sz# 99743 c# 71}{ExtrQuery# [5000:1:72:0:0:100000:1] sh# 257 sz# 99743 c# 72}{ExtrQuery# [5000:1:73:0:0:100000:1] sh# 257 sz# 99743 c# 73}{ExtrQuery# [5000:1:74:0:0:100000:1] sh# 257 sz# 99743 c# 74}{ExtrQuery# [5000:1:75:0:0:100000:1] sh# 257 sz# 99743 c# 75}{ExtrQuery# [5000:1:76:0:0:100000:1] sh# 257 sz# 99743 c# 76}{ExtrQuery# [5000:1:77:0:0:100000:1] sh# 257 sz# 99743 c# 77}{ExtrQuery# [5000:1:78:0:0:100000:1] sh# 257 sz# 99743 c# 78}{ExtrQuery# [5000:1:79:0:0:100000:1] sh# 257 sz# 99743 c# 79}{ExtrQuery# [5000:1:80:0:0:100000:1] sh# 257 sz# 99743 c# 80}{ExtrQuery# [5000:1:81:0:0:100000:1] sh# 257 sz# 99743 c# 81}{ExtrQuery# [5000:1:82:0:0:100000:1] sh# 257 sz# 99743 c# 82}{ExtrQuery# [5000:1:83:0:0:100000:1] sh# 257 sz# 99743 c# 83}{ExtrQuery# [5000:1:84:0:0:100000:1] sh# 257 sz# 99743 c# 84}{ExtrQuery# [5000:1:85:0:0:100000:1] sh# 257 sz# 99743 c# 85}{ExtrQuery# [5000:1:86:0:0:100000:1] sh# 257 sz# 99743 c# 86}{ExtrQuery# [5000:1:87:0:0:100000:1] sh# 257 sz# 99743 c# 87}{ExtrQuery# [5000:1:88:0:0:100000:1] sh# 257 sz# 99743 c# 88}{ExtrQuery# [5000:1:89:0:0:100000:1] sh# 257 sz# 99743 c# 89}{ExtrQuery# [5000:1:90:0:0:100000:1] sh# 257 sz# 99743 c# 90}{ExtrQuery# [5000:1:91:0:0:100000:1] sh# 257 sz# 99743 c# 91}{ExtrQuery# [5000:1:92:0:0:100000:1] sh# 257 sz# 99743 c# 92}{ExtrQuery# [5000:1:93:0:0:100000:1] sh# 257 sz# 99743 c# 93}{ExtrQuery# [5000:1:94:0:0:100000:1] sh# 257 sz# 99743 c# 94}{ExtrQuery# [5000:1:95:0:0:100000:1] sh# 257 sz# 99743 c# 95}{ExtrQuery# [5000:1:96:0:0:100000:1] sh# 257 sz# 99743 c# 96}{ExtrQuery# [5000:1:97:0:0:100000:1] sh# 257 sz# 99743 c# 97}{ExtrQuery# [5000:1:98:0:0:100000:1] sh# 257 sz# 99743 c# 98}{ExtrQuery# [5000:1:99:0:0:100000:1] sh# 257 sz# 99743 c# 99}{ExtrQuery# [5000:1:100:0:0:100000:1] sh# 257 sz# 99743 c# 100}{ExtrQuery# [5000:1:101:0:0:100000:1] sh# 257 sz# 99743 c# 101}{ExtrQuery# [5000:1:102:0:0:100000:1] sh# 257 sz# 99743 c# 102}{ExtrQuery# [5000:1:103:0:0:100000:1] sh# 257 sz# 99743 c# 103}{ExtrQuery# [5000:1:104:0:0:100000:1] sh# 257 sz# 99743 c# 104}{ExtrQuery# [5000:1:105:0:0:100000:1] sh# 257 sz# 99743 c# 105}{ExtrQuery# [5000:1:106:0:0:100000:1] sh# 257 sz# 99743 c# 106}{ExtrQuery# [5000:1:107:0:0:100000:1] sh# 257 sz# 99743 c# 107}{ExtrQuery# [5000:1:108:0:0:100000:1] sh# 257 sz# 99743 c# 108}{ExtrQuery# [5000:1:109:0:0:100000:1] sh# 257 sz# 99743 c# 109}{ExtrQuery# [5000:1:110:0:0:100000:1] sh# 257 sz# 99743 c# 110}{ExtrQuery# [5000:1:111:0:0:100000:1] sh# 257 sz# 99743 c# 111}{ExtrQuery# [5000:1:112:0:0:100000:1] sh# 257 sz# 99743 c# 112}{ExtrQuery# [5000:1:113:0:0:100000:1] sh# 257 sz# 99743 c# 113}{ExtrQuery# [5000:1:114:0:0:100000:1] sh# 257 sz# 99743 c# 114}{ExtrQuery# [5000:1:115:0:0:100000:1] sh# 257 sz# 99743 c# 115}{ExtrQuery# [5000:1:116:0:0:100000:1] sh# 257 sz# 99743 c# 116}{ExtrQuery# [5000:1:117:0:0:100000:1] sh# 257 sz# 99743 c# 117}{ExtrQuery# [5000:1:118:0:0:100000:1] sh# 257 sz# 99743 c# 118}{ExtrQuery# [5000:1:119:0:0:100000:1] sh# 257 sz# 99743 c# 119}{ExtrQuery# [5000:1:120:0:0:100000:1] sh# 257 sz# 99743 c# 120}{ExtrQuery# [5000:1:121:0:0:100000:1] sh# 257 sz# 99743 c# 121}{ExtrQuery# [5000:1:122:0:0:100000:1] sh# 257 sz# 99743 c# 122}{ExtrQuery# [5000:1:123:0:0:100000:1] sh# 257 sz# 99743 c# 123}{ExtrQuery# [5000:1:124:0:0:100000:1] sh# 257 sz# 99743 c# 124}{ExtrQuery# [5000:1:125:0:0:100000:1] sh# 257 sz# 99743 c# 125}{ExtrQuery# [5000:1:126:0:0:100000:1] sh# 257 sz# 99743 c# 126}{ExtrQuery# [5000:1:127:0:0:100000:1] sh# 257 sz# 99743 c# 127}{ExtrQuery# [5000:1:128:0:0:100000:1] sh# 257 sz# 99743 c# 128}{ExtrQuery# [5000:1:129:0:0:100000:1] sh# 257 sz# 99743 c# 129}{ExtrQuery# [5000:1:130:0:0:100000:1] sh# 257 sz# 99743 c# 130}{ExtrQuery# [5000:1:131:0:0:100000:1] sh# 257 sz# 99743 c# 131}{ExtrQuery# [5000:1:132:0:0:100000:1] sh# 257 sz# 99743 c# 132}{ExtrQuery# [5000:1:133:0:0:100000:1] sh# 257 sz# 99743 c# 133}{ExtrQuery# [5000:1:134:0:0:100000:1] sh# 257 sz# 99743 c# 134}{ExtrQuery# [5000:1:135:0:0:100000:1] sh# 257 sz# 99743 c# 135}{ExtrQuery# [5000:1:136:0:0:100000:1] sh# 257 sz# 99743 c# 136}{ExtrQuery# [5000:1:137:0:0:100000:1] sh# 257 sz# 99743 c# 137}{ExtrQuery# [5000:1:138:0:0:100000:1] sh# 257 sz# 99743 c# 138}{ExtrQuery# [5000:1:139:0:0:100000:1] sh# 257 sz# 99743 c# 139}{ExtrQuery# [5000:1:140:0:0:100000:1] sh# 257 sz# 99743 c# 140}{ExtrQuery# [5000:1:141:0:0:100000:1] sh# 257 sz# 99743 c# 141}{ExtrQuery# [5000:1:142:0:0:100000:1] sh# 257 sz# 99743 c# 142}{ExtrQuery# [5000:1:143:0:0:100000:1] sh# 257 sz# 99743 c# 143}{ExtrQuery# [5000:1:144:0:0:100000:1] sh# 257 sz# 99743 c# 144}{ExtrQuery# [5000:1:145:0:0:100000:1] sh# 257 sz# 99743 c# 145}{ExtrQuery# [5000:1:146:0:0:100000:1] sh# 257 sz# 99743 c# 146}{ExtrQuery# [5000:1:147:0:0:100000:1] sh# 257 sz# 99743 c# 147}{ExtrQuery# [5000:1:148:0:0:100000:1] sh# 257 sz# 99743 c# 148}{ExtrQuery# [5000:1:149:0:0:100000:1] sh# 257 sz# 99743 c# 149}{ExtrQuery# [5000:1:150:0:0:100000:1] sh# 257 sz# 99743 c# 150}{ExtrQuery# [5000:1:151:0:0:100000:1] sh# 257 sz# 99743 c# 151}{ExtrQuery# [5000:1:152:0:0:100000:1] sh# 257 sz# 99743 c# 152}{ExtrQuery# [5000:1:153:0:0:100000:1] sh# 257 sz# 99743 c# 153}{ExtrQuery# [5000:1:154:0:0:100000:1] sh# 257 sz# 99743 c# 154}{ExtrQuery# [5000:1:155:0:0:100000:1] sh# 257 sz# 99743 c# 155}{ExtrQuery# [5000:1:156:0:0:100000:1] sh# 257 sz# 99743 c# 156}{ExtrQuery# [5000:1:157:0:0:100000:1] sh# 257 sz# 99743 c# 157}{ExtrQuery# [5000:1:158:0:0:100000:1] sh# 257 sz# 99743 c# 158}{ExtrQuery# [5000:1:159:0:0:100000:1] sh# 257 sz# 99743 c# 159}{ExtrQuery# [5000:1:160:0:0:100000:1] sh# 257 sz# 99743 c# 160}{ExtrQuery# [5000:1:161:0:0:100000:1] sh# 257 sz# 99743 c# 161}{ExtrQuery# [5000:1:162:0:0:100000:1] sh# 257 sz# 99743 c# 162}{ExtrQuery# [5000:1:163:0:0:100000:1] sh# 257 sz# 99743 c# 163}{ExtrQuery# [5000:1:164:0:0:100000:1] sh# 257 sz# 99743 c# 164}{ExtrQuery# [5000:1:165:0:0:100000:1] sh# 257 sz# 99743 c# 165}{ExtrQuery# [5000:1:166:0:0:100000:1] sh# 257 sz# 99743 c# 166}{ExtrQuery# [5000:1:167:0:0:100000:1] sh# 257 sz# 99743 c# 167}{ExtrQuery# [5000:1:168:0:0:100000:1] sh# 257 sz# 99743 c# 168}{ExtrQuery# [5000:1:169:0:0:100000:1] sh# 257 sz# 99743 c# 169}{ExtrQuery# [5000:1:170:0:0:100000:1] sh# 257 sz# 99743 c# 170}{ExtrQuery# [5000:1:171:0:0:100000:1] sh# 257 sz# 99743 c# 171}{ExtrQuery# [5000:1:172:0:0:100000:1] sh# 257 sz# 99743 c# 172}{ExtrQuery# [5000:1:173:0:0:100000:1] sh# 257 sz# 99743 c# 173}{ExtrQuery# [5000:1:174:0:0:100000:1] sh# 257 sz# 99743 c# 174}{ExtrQuery# [5000:1:175:0:0:100000:1] sh# 257 sz# 99743 c# 175}{ExtrQuery# [5000:1:176:0:0:100000:1] sh# 257 sz# 99743 c# 176}{ExtrQuery# [5000:1:177:0:0:100000:1] sh# 257 sz# 99743 c# 177}{ExtrQuery# [5000:1:178:0:0:100000:1] sh# 257 sz# 99743 c# 178}{ExtrQuery# [5000:1:179:0:0:100000:1] sh# 257 sz# 99743 c# 179}{ExtrQuery# [5000:1:180:0:0:100000:1] sh# 257 sz# 99743 c# 180}{ExtrQuery# [5000:1:181:0:0:100000:1] sh# 257 sz# 99743 c# 181}{ExtrQuery# [5000:1:182:0:0:100000:1] sh# 257 sz# 99743 c# 182}{ExtrQuery# [5000:1:183:0:0:100000:1] sh# 257 sz# 99743 c# 183}{ExtrQuery# [5000:1:184:0:0:100000:1] sh# 257 sz# 99743 c# 184}{ExtrQuery# [5000:1:185:0:0:100000:1] sh# 257 sz# 99743 c# 185}{ExtrQuery# [5000:1:186:0:0:100000:1] sh# 257 sz# 99743 c# 186}{ExtrQuery# [5000:1:187:0:0:100000:1] sh# 257 sz# 99743 c# 187}{ExtrQuery# [5000:1:188:0:0:100000:1] sh# 257 sz# 99743 c# 188}{ExtrQuery# [5000:1:189:0:0:100000:1] sh# 257 sz# 99743 c# 189}{ExtrQuery# [5000:1:190:0:0:100000:1] sh# 257 sz# 99743 c# 190}{ExtrQuery# [5000:1:191:0:0:100000:1] sh# ... sz# 99743 c# 484}{ExtrQuery# [5000:1:485:0:0:100000:1] sh# 257 sz# 99743 c# 485}{ExtrQuery# [5000:1:486:0:0:100000:1] sh# 257 sz# 99743 c# 486}{ExtrQuery# [5000:1:487:0:0:100000:1] sh# 257 sz# 99743 c# 487}{ExtrQuery# [5000:1:488:0:0:100000:1] sh# 257 sz# 99743 c# 488}{ExtrQuery# [5000:1:489:0:0:100000:1] sh# 257 sz# 99743 c# 489}{ExtrQuery# [5000:1:490:0:0:100000:1] sh# 257 sz# 99743 c# 490}{ExtrQuery# [5000:1:491:0:0:100000:1] sh# 257 sz# 99743 c# 491}{ExtrQuery# [5000:1:492:0:0:100000:1] sh# 257 sz# 99743 c# 492}{ExtrQuery# [5000:1:493:0:0:100000:1] sh# 257 sz# 99743 c# 493}{ExtrQuery# [5000:1:494:0:0:100000:1] sh# 257 sz# 99743 c# 494}{ExtrQuery# [5000:1:495:0:0:100000:1] sh# 257 sz# 99743 c# 495}{ExtrQuery# [5000:1:496:0:0:100000:1] sh# 257 sz# 99743 c# 496}{ExtrQuery# [5000:1:497:0:0:100000:1] sh# 257 sz# 99743 c# 497}{ExtrQuery# [5000:1:498:0:0:100000:1] sh# 257 sz# 99743 c# 498}{ExtrQuery# [5000:1:499:0:0:100000:1] sh# 257 sz# 99743 c# 499}{ExtrQuery# [5000:1:500:0:0:100000:1] sh# 257 sz# 99743 c# 500}{ExtrQuery# [5000:1:501:0:0:100000:1] sh# 257 sz# 99743 c# 501}{ExtrQuery# [5000:1:502:0:0:100000:1] sh# 257 sz# 99743 c# 502}{ExtrQuery# [5000:1:503:0:0:100000:1] sh# 257 sz# 99743 c# 503}{ExtrQuery# [5000:1:504:0:0:100000:1] sh# 257 sz# 99743 c# 504}{ExtrQuery# [5000:1:505:0:0:100000:1] sh# 257 sz# 99743 c# 505}{ExtrQuery# [5000:1:506:0:0:100000:1] sh# 257 sz# 99743 c# 506}{ExtrQuery# [5000:1:507:0:0:100000:1] sh# 257 sz# 99743 c# 507}{ExtrQuery# [5000:1:508:0:0:100000:1] sh# 257 sz# 99743 c# 508}{ExtrQuery# [5000:1:509:0:0:100000:1] sh# 257 sz# 99743 c# 509}{ExtrQuery# [5000:1:510:0:0:100000:1] sh# 257 sz# 99743 c# 510}{ExtrQuery# [5000:1:511:0:0:100000:1] sh# 257 sz# 99743 c# 511}{ExtrQuery# [5000:1:512:0:0:100000:1] sh# 257 sz# 99743 c# 512}{ExtrQuery# [5000:1:513:0:0:100000:1] sh# 257 sz# 99743 c# 513}{ExtrQuery# [5000:1:514:0:0:100000:1] sh# 257 sz# 99743 c# 514}{ExtrQuery# [5000:1:515:0:0:100000:1] sh# 257 sz# 99743 c# 515}{ExtrQuery# [5000:1:516:0:0:100000:1] sh# 257 sz# 99743 c# 516}{ExtrQuery# [5000:1:517:0:0:100000:1] sh# 257 sz# 99743 c# 517}{ExtrQuery# [5000:1:518:0:0:100000:1] sh# 257 sz# 99743 c# 518}{ExtrQuery# [5000:1:519:0:0:100000:1] sh# 257 sz# 99743 c# 519}{ExtrQuery# [5000:1:520:0:0:100000:1] sh# 257 sz# 99743 c# 520}{ExtrQuery# [5000:1:521:0:0:100000:1] sh# 257 sz# 99743 c# 521}{ExtrQuery# [5000:1:522:0:0:100000:1] sh# 257 sz# 99743 c# 522}{ExtrQuery# [5000:1:523:0:0:100000:1] sh# 257 sz# 99743 c# 523}{ExtrQuery# [5000:1:524:0:0:100000:1] sh# 257 sz# 99743 c# 524}{ExtrQuery# [5000:1:525:0:0:100000:1] sh# 257 sz# 99743 c# 525}{ExtrQuery# [5000:1:526:0:0:100000:1] sh# 257 sz# 99743 c# 526}{ExtrQuery# [5000:1:527:0:0:100000:1] sh# 257 sz# 99743 c# 527}{ExtrQuery# [5000:1:528:0:0:100000:1] sh# 257 sz# 99743 c# 528}{ExtrQuery# [5000:1:529:0:0:100000:1] sh# 257 sz# 99743 c# 529}{ExtrQuery# [5000:1:530:0:0:100000:1] sh# 257 sz# 99743 c# 530}{ExtrQuery# [5000:1:531:0:0:100000:1] sh# 257 sz# 99743 c# 531}{ExtrQuery# [5000:1:532:0:0:100000:1] sh# 257 sz# 99743 c# 532}{ExtrQuery# [5000:1:533:0:0:100000:1] sh# 257 sz# 99743 c# 533}{ExtrQuery# [5000:1:534:0:0:100000:1] sh# 257 sz# 99743 c# 534}{ExtrQuery# [5000:1:535:0:0:100000:1] sh# 257 sz# 99743 c# 535}{ExtrQuery# [5000:1:536:0:0:100000:1] sh# 257 sz# 99743 c# 536}{ExtrQuery# [5000:1:537:0:0:100000:1] sh# 257 sz# 99743 c# 537}{ExtrQuery# [5000:1:538:0:0:100000:1] sh# 257 sz# 99743 c# 538}{ExtrQuery# [5000:1:539:0:0:100000:1] sh# 257 sz# 99743 c# 539}{ExtrQuery# [5000:1:540:0:0:100000:1] sh# 257 sz# 99743 c# 540}{ExtrQuery# [5000:1:541:0:0:100000:1] sh# 257 sz# 99743 c# 541}{ExtrQuery# [5000:1:542:0:0:100000:1] sh# 257 sz# 99743 c# 542}{ExtrQuery# [5000:1:543:0:0:100000:1] sh# 257 sz# 99743 c# 543}{ExtrQuery# [5000:1:544:0:0:100000:1] sh# 257 sz# 99743 c# 544}{ExtrQuery# [5000:1:545:0:0:100000:1] sh# 257 sz# 99743 c# 545}{ExtrQuery# [5000:1:546:0:0:100000:1] sh# 257 sz# 99743 c# 546}{ExtrQuery# [5000:1:547:0:0:100000:1] sh# 257 sz# 99743 c# 547}{ExtrQuery# [5000:1:548:0:0:100000:1] sh# 257 sz# 99743 c# 548}{ExtrQuery# [5000:1:549:0:0:100000:1] sh# 257 sz# 99743 c# 549}{ExtrQuery# [5000:1:550:0:0:100000:1] sh# 257 sz# 99743 c# 550}{ExtrQuery# [5000:1:551:0:0:100000:1] sh# 257 sz# 99743 c# 551}{ExtrQuery# [5000:1:552:0:0:100000:1] sh# 257 sz# 99743 c# 552}{ExtrQuery# [5000:1:553:0:0:100000:1] sh# 257 sz# 99743 c# 553}{ExtrQuery# [5000:1:554:0:0:100000:1] sh# 257 sz# 99743 c# 554}{ExtrQuery# [5000:1:555:0:0:100000:1] sh# 257 sz# 99743 c# 555}{ExtrQuery# [5000:1:556:0:0:100000:1] sh# 257 sz# 99743 c# 556}{ExtrQuery# [5000:1:557:0:0:100000:1] sh# 257 sz# 99743 c# 557}{ExtrQuery# [5000:1:558:0:0:100000:1] sh# 257 sz# 99743 c# 558}{ExtrQuery# [5000:1:559:0:0:100000:1] sh# 257 sz# 99743 c# 559}{ExtrQuery# [5000:1:560:0:0:100000:1] sh# 257 sz# 99743 c# 560}{ExtrQuery# [5000:1:561:0:0:100000:1] sh# 257 sz# 99743 c# 561}{ExtrQuery# [5000:1:562:0:0:100000:1] sh# 257 sz# 99743 c# 562}{ExtrQuery# [5000:1:563:0:0:100000:1] sh# 257 sz# 99743 c# 563}{ExtrQuery# [5000:1:564:0:0:100000:1] sh# 257 sz# 99743 c# 564}{ExtrQuery# [5000:1:565:0:0:100000:1] sh# 257 sz# 99743 c# 565}{ExtrQuery# [5000:1:566:0:0:100000:1] sh# 257 sz# 99743 c# 566}{ExtrQuery# [5000:1:567:0:0:100000:1] sh# 257 sz# 99743 c# 567}{ExtrQuery# [5000:1:568:0:0:100000:1] sh# 257 sz# 99743 c# 568}{ExtrQuery# [5000:1:569:0:0:100000:1] sh# 257 sz# 99743 c# 569}{ExtrQuery# [5000:1:570:0:0:100000:1] sh# 257 sz# 99743 c# 570}{ExtrQuery# [5000:1:571:0:0:100000:1] sh# 257 sz# 99743 c# 571}{ExtrQuery# [5000:1:572:0:0:100000:1] sh# 257 sz# 99743 c# 572}{ExtrQuery# [5000:1:573:0:0:100000:1] sh# 257 sz# 99743 c# 573}{ExtrQuery# [5000:1:574:0:0:100000:1] sh# 257 sz# 99743 c# 574}{ExtrQuery# [5000:1:575:0:0:100000:1] sh# 257 sz# 99743 c# 575}{ExtrQuery# [5000:1:576:0:0:100000:1] sh# 257 sz# 99743 c# 576}{ExtrQuery# [5000:1:577:0:0:100000:1] sh# 257 sz# 99743 c# 577}{ExtrQuery# [5000:1:578:0:0:100000:1] sh# 257 sz# 99743 c# 578}{ExtrQuery# [5000:1:579:0:0:100000:1] sh# 257 sz# 99743 c# 579}{ExtrQuery# [5000:1:580:0:0:100000:1] sh# 257 sz# 99743 c# 580}{ExtrQuery# [5000:1:581:0:0:100000:1] sh# 257 sz# 99743 c# 581}{ExtrQuery# [5000:1:582:0:0:100000:1] sh# 257 sz# 99743 c# 582}{ExtrQuery# [5000:1:583:0:0:100000:1] sh# 257 sz# 99743 c# 583}{ExtrQuery# [5000:1:584:0:0:100000:1] sh# 257 sz# 99743 c# 584}{ExtrQuery# [5000:1:585:0:0:100000:1] sh# 257 sz# 99743 c# 585}{ExtrQuery# [5000:1:586:0:0:100000:1] sh# 257 sz# 99743 c# 586}{ExtrQuery# [5000:1:587:0:0:100000:1] sh# 257 sz# 99743 c# 587}{ExtrQuery# [5000:1:588:0:0:100000:1] sh# 257 sz# 99743 c# 588}{ExtrQuery# [5000:1:589:0:0:100000:1] sh# 257 sz# 99743 c# 589}{ExtrQuery# [5000:1:590:0:0:100000:1] sh# 257 sz# 99743 c# 590}{ExtrQuery# [5000:1:591:0:0:100000:1] sh# 257 sz# 99743 c# 591}{ExtrQuery# [5000:1:592:0:0:100000:1] sh# 257 sz# 99743 c# 592}{ExtrQuery# [5000:1:593:0:0:100000:1] sh# 257 sz# 99743 c# 593}{ExtrQuery# [5000:1:594:0:0:100000:1] sh# 257 sz# 99743 c# 594}{ExtrQuery# [5000:1:595:0:0:100000:1] sh# 257 sz# 99743 c# 595}{ExtrQuery# [5000:1:596:0:0:100000:1] sh# 257 sz# 99743 c# 596}{ExtrQuery# [5000:1:597:0:0:100000:1] sh# 257 sz# 99743 c# 597}{ExtrQuery# [5000:1:598:0:0:100000:1] sh# 257 sz# 99743 c# 598}{ExtrQuery# [5000:1:599:0:0:100000:1] sh# 257 sz# 99743 c# 599}{ExtrQuery# [5000:1:600:0:0:100000:1] sh# 257 sz# 99743 c# 600}{ExtrQuery# [5000:1:601:0:0:100000:1] sh# 257 sz# 99743 c# 601}{ExtrQuery# [5000:1:602:0:0:100000:1] sh# 257 sz# 99743 c# 602}{ExtrQuery# [5000:1:603:0:0:100000:1] sh# 257 sz# 99743 c# 603}{ExtrQuery# [5000:1:604:0:0:100000:1] sh# 257 sz# 99743 c# 604}{ExtrQuery# [5000:1:605:0:0:100000:1] sh# 257 sz# 99743 c# 605}{ExtrQuery# [5000:1:606:0:0:100000:1] sh# 257 sz# 99743 c# 606}{ExtrQuery# [5000:1:607:0:0:100000:1] sh# 257 sz# 99743 c# 607}{ExtrQuery# [5000:1:608:0:0:100000:1] sh# 257 sz# 99743 c# 608}{ExtrQuery# [5000:1:609:0:0:100000:1] sh# 257 sz# 99743 c# 609}{ExtrQuery# [5000:1:610:0:0:100000:1] sh# 257 sz# 99743 c# 610}{ExtrQuery# [5000:1:611:0:0:100000:1] sh# 257 sz# 99743 c# 611}{ExtrQuery# [5000:1:612:0:0:100000:1] sh# 257 sz# 99743 c# 612}{ExtrQuery# [5000:1:613:0:0:100000:1] sh# 257 sz# 99743 c# 613}{ExtrQuery# [5000:1:614:0:0:100000:1] sh# 257 sz# 99743 c# 614}{ExtrQuery# [5000:1:615:0:0:100000:1] sh# 257 sz# 99743 c# 615}{ExtrQuery# [5000:1:616:0:0:100000:1] sh# 257 sz# 99743 c# 616}{ExtrQuery# [5000:1:617:0:0:100000:1] sh# 257 sz# 99743 c# 617}{ExtrQuery# [5000:1:618:0:0:100000:1] sh# 257 sz# 99743 c# 618}{ExtrQuery# [5000:1:619:0:0:100000:1] sh# 257 sz# 99743 c# 619}{ExtrQuery# [5000:1:620:0:0:100000:1] sh# 257 sz# 99743 c# 620}{ExtrQuery# [5000:1:621:0:0:100000:1] sh# 257 sz# 99743 c# 621}{ExtrQuery# [5000:1:622:0:0:100000:1] sh# 257 sz# 99743 c# 622}{ExtrQuery# [5000:1:623:0:0:100000:1] sh# 257 sz# 99743 c# 623}{ExtrQuery# [5000:1:624:0:0:100000:1] sh# 257 sz# 99743 c# 624}{ExtrQuery# [5000:1:625:0:0:100000:1] sh# 257 sz# 99743 c# 625}{ExtrQuery# [5000:1:626:0:0:100000:1] sh# 257 sz# 99743 c# 626}{ExtrQuery# [5000:1:627:0:0:100000:1] sh# 257 sz# 99743 c# 627}{ExtrQuery# [5000:1:628:0:0:100000:1] sh# 257 sz# 99743 c# 628}{ExtrQuery# [5000:1:629:0:0:100000:1] sh# 257 sz# 99743 c# 629}{ExtrQuery# [5000:1:630:0:0:100000:1] sh# 257 sz# 99743 c# 630}{ExtrQuery# [5000:1:631:0:0:100000:1] sh# 257 sz# 99743 c# 631}{ExtrQuery# [5000:1:632:0:0:100000:1] sh# 257 sz# 99743 c# 632}{ExtrQuery# [5000:1:633:0:0:100000:1] sh# 257 sz# 99743 c# 633}{ExtrQuery# [5000:1:634:0:0:100000:1] sh# 257 sz# 99743 c# 634}{ExtrQuery# [5000:1:635:0:0:100000:1] sh# 257 sz# 99743 c# 635}{ExtrQuery# [5000:1:636:0:0:100000:1] sh# 257 sz# 99743 c# 636}{ExtrQuery# [5000:1:637:0:0:100000:1] sh# 257 sz# 99743 c# 637}{ExtrQuery# [5000:1:638:0:0:100000:1] sh# 257 sz# 99743 c# 638}{ExtrQuery# [5000:1:639:0:0:100000:1] sh# 257 sz# 99743 c# 639}{ExtrQuery# [5000:1:640:0:0:100000:1] sh# 257 sz# 99743 c# 640}{ExtrQuery# [5000:1:641:0:0:100000:1] sh# 257 sz# 99743 c# 641}{ExtrQuery# [5000:1:642:0:0:100000:1] sh# 257 sz# 99743 c# 642}{ExtrQuery# [5000:1:643:0:0:100000:1] sh# 257 sz# 99743 c# 643}{ExtrQuery# [5000:1:644:0:0:100000:1] sh# 257 sz# 99743 c# 644}{ExtrQuery# [5000:1:645:0:0:100000:1] sh# 257 sz# 99743 c# 645}{ExtrQuery# [5000:1:646:0:0:100000:1] sh# 257 sz# 99743 c# 646}{ExtrQuery# [5000:1:647:0:0:100000:1] sh# 257 sz# 99743 c# 647}{ExtrQuery# [5000:1:648:0:0:100000:1] sh# 257 sz# 99743 c# 648}{ExtrQuery# [5000:1:649:0:0:100000:1] sh# 257 sz# 99743 c# 649}{ExtrQuery# [5000:1:650:0:0:100000:1] sh# 257 sz# 99743 c# 650}{ExtrQuery# [5000:1:651:0:0:100000:1] sh# 257 sz# 99743 c# 651}{ExtrQuery# [5000:1:652:0:0:100000:1] sh# 257 sz# 99743 c# 652}{ExtrQuery# [5000:1:653:0:0:100000:1] sh# 257 sz# 99743 c# 653}{ExtrQuery# [5000:1:654:0:0:100000:1] sh# 257 sz# 99743 c# 654}{ExtrQuery# [5000:1:655:0:0:100000:1] sh# 257 sz# 99743 c# 655}{ExtrQuery# [5000:1:656:0:0:100000:1] sh# 257 sz# 99743 c# 656}{ExtrQuery# [5000:1:657:0:0:100000:1] sh# 257 sz# 99743 c# 657}{ExtrQuery# [5000:1:658:0:0:100000:1] sh# 257 sz# 99743 c# 658}{ExtrQuery# [5000:1:659:0:0:100000:1] sh# 257 sz# 99743 c# 659}{ExtrQuery# [5000:1:660:0:0:100000:1] sh# 257 sz# 99743 c# 660}{ExtrQuery# [5000:1:661:0:0:100000:1] sh# 257 sz# 99743 c# 661}{ExtrQuery# [5000:1:662:0:0:100000:1] sh# 257 sz# 99743 c# 662}{ExtrQuery# [5000:1:663:0:0:100000:1] sh# 257 sz# 99743 c# 663}{ExtrQuery# [5000:1:664:0:0:100000:1] sh# 257 sz# 99743 c# 664}{ExtrQuery# [5000:1:665:0:0:100000:1] sh# 257 sz# 99743 c# 665}{ExtrQuery# [5000:1:666:0:0:100000:1] sh# 257 sz# 99743 c# 666}{ExtrQuery# [5000:1:667:0:0:100000:1] sh# 257 sz# 99743 c# 667}{ExtrQuery# [5000:1:668:0:0:100000:1] sh# 257 sz# 99743 c# 668}{ExtrQuery# [5000:1:669:0:0:100000:1] sh# 257 sz# 99743 c# 669}{ExtrQuery# [5000:1:670:0:0:100000:1] sh# 257 sz# 99743 c# 670}{ExtrQuery# [5000:1:671:0:0:100000:1] sh# 257 sz# 99743 c# 671}{ExtrQuery# [5000:1:672:0:0:17027:1] sh# 257 sz# 16770 c# 672} {MsgQoS} Notify# 0 Internals# 0 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0}; VDISK CAN NOT REPLY ON TEvVGet REQUEST >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> ReadSessionImplTest::SuccessfulInit [GOOD] >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] >> ReadSessionImplTest::ProperlyOrdersDecompressedData >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] >> Cdc::UpdateStream [GOOD] >> Cdc::UpdateShardCount >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> CompressExecutor::TestReorderedExecutor >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-04-10T10:54:00.901280Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.901325Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.901368Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.901871Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.902664Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:00.914018Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.914501Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:00.918787Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.918817Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.918847Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.919214Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.919832Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:00.920027Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.920216Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:00.920531Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-10T10:54:00.921442Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.921481Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.921498Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.921796Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.922453Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:00.922557Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.922741Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:00.923459Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.923731Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T10:54:00.923821Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:00.923860Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-10T10:54:00.925136Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.925165Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.925192Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.925527Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.925939Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:00.926037Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.926258Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-04-10T10:54:00.927256Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T10:54:00.927476Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-10T10:54:00.927804Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-10T10:54:00.927995Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-10T10:54:00.928104Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:00.928139Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T10:54:00.928174Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-10T10:54:00.928337Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-04-10T10:54:00.928435Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-10T10:54:00.928459Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-10T10:54:00.928479Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T10:54:00.928619Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-04-10T10:54:00.928730Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-10T10:54:00.928757Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-10T10:54:00.928775Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-10T10:54:00.928864Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-04-10T10:54:00.928924Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-10T10:54:00.928944Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-10T10:54:00.928979Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T10:54:00.929099Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-04-10T10:54:00.930783Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.930823Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.930846Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.931236Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.931676Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:00.931833Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.932045Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-04-10T10:54:00.934685Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T10:54:00.934997Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-10T10:54:00.935384Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-10T10:54:00.935572Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-10T10:54:00.935689Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:00.935727Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T10:54:00.935751Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-10T10:54:00.935769Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-10T10:54:00.935803Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T10:54:00.936052Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 5). Partition stream id: 1 GOT RANGE 0 5 Getting new event 2025-04-10T10:54:00.936135Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-10T10:54:00.936179Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-10T10:54:00.936200Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-10T10:54:00.936223Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-10T10:54:00.936260Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T10:54:00.936427Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 GOT RANGE 5 9 2025-04-10T10:54:00.937740Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.937815Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.937837Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.938135Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.938879Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:00.939056Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.939316Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:00.940457Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T10:54:00.941349Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T10:54:00.941630Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-04-10T10:54:00.941728Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-10T10:54:00.941839Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:00.941884Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T10:54:00.941919Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-04-10T10:54:00.941944Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-04-10T10:54:00.941988Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-04-10T10:54:00.942021Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-10T10:54:00.942257Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 end_offset: 3 } } RANGE 0 3 2025-04-10T10:54:00.942419Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 12). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 start_offset: 3 end_offset: 12 } } RANGE 3 12 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-04-10T10:54:00.734691Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.734727Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.734752Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.735304Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-10T10:54:00.735391Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.735439Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.736867Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007424s 2025-04-10T10:54:00.737678Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.738244Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-10T10:54:00.738367Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.741122Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.741154Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.741177Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.741592Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-10T10:54:00.741627Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.741656Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.741729Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005505s 2025-04-10T10:54:00.742171Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.742587Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-10T10:54:00.742673Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.743615Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.743638Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.743661Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.743980Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-10T10:54:00.744016Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.744035Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.744098Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.167474s 2025-04-10T10:54:00.744469Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.744844Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-10T10:54:00.744920Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.745805Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.745828Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.745853Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.746187Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-10T10:54:00.746225Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.746247Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.746292Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.237836s 2025-04-10T10:54:00.746621Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.747157Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-10T10:54:00.747240Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.748247Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.748314Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.748358Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.748626Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.749098Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:00.796478Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.796955Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-04-10T10:54:00.797000Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.797027Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.797103Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.262944s 2025-04-10T10:54:00.797299Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-10T10:54:00.799514Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.799549Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.799572Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.799917Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.800442Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:00.800597Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.801788Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:00.906733Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.907066Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-10T10:54:00.907152Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:00.907208Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-10T10:54:00.907291Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-10T10:54:01.007902Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-10T10:54:01.008100Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-10T10:54:01.009708Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.009734Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.009763Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:01.010174Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:01.010653Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:01.010803Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.011289Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:01.112297Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.112542Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-10T10:54:01.112610Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:01.112651Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-10T10:54:01.112732Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-04-10T10:54:01.112845Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-10T10:54:01.112921Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-10T10:54:01.113025Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-10T10:54:01.113729Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-04-10T10:54:00.905557Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.905599Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.905623Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.906037Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.906485Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-10T10:54:00.906581Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.908505Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.908526Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.908546Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.908894Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.909148Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-10T10:54:00.909199Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.909872Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.909915Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.909948Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.910293Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-10T10:54:00.910340Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.910370Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.910487Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-04-10T10:54:00.911320Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.911382Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.911406Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.911710Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-10T10:54:00.911739Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.911752Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.911817Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-04-10T10:54:00.912704Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-10T10:54:00.912761Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-10T10:54:00.912789Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.913047Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.913604Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:00.925334Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-10T10:54:00.925794Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:00.926170Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-04-10T10:54:00.929818Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-04-10T10:54:00.930143Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:00.930217Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T10:54:00.930253Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-10T10:54:00.930275Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-10T10:54:00.930299Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-10T10:54:00.930318Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-10T10:54:00.930336Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-04-10T10:54:00.930356Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-04-10T10:54:00.930419Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-04-10T10:54:00.930439Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-04-10T10:54:00.930457Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-04-10T10:54:00.930474Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-04-10T10:54:00.930493Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-04-10T10:54:00.930523Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-04-10T10:54:00.930607Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-04-10T10:54:00.930629Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-04-10T10:54:00.930675Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-04-10T10:54:00.930692Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-04-10T10:54:00.930709Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-04-10T10:54:00.930725Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-04-10T10:54:00.930742Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-04-10T10:54:00.930758Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-04-10T10:54:00.930775Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-04-10T10:54:00.930811Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-04-10T10:54:00.930833Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-04-10T10:54:00.930852Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-04-10T10:54:00.930867Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-04-10T10:54:00.930885Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-04-10T10:54:00.930900Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-04-10T10:54:00.930919Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-04-10T10:54:00.930948Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-04-10T10:54:00.930969Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-04-10T10:54:00.931047Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-04-10T10:54:00.931066Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-04-10T10:54:00.931082Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-04-10T10:54:00.931101Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-04-10T10:54:00.931122Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-04-10T10:54:00.931140Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-04-10T10:54:00.931158Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-04-10T10:54:00.931193Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-04-10T10:54:00.931216Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-04-10T10:54:00.931234Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-04-10T10:54:00.931251Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-04-10T10:54:00.931268Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-04-10T10:54:00.931283Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-04-10T10:54:00.931315Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-04-10T10:54:00.931346Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-04-10T10:54:00.931368Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-04-10T10:54:00.931384Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-04-10T10:54:00.931408Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-04-10T10:54:00.931458Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-10T10:54:00.933618Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-04-10T10:54:00.933962Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-04-10T10:54:00.933995Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-04-10T10:54:00.934012Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-04-10T10:54:00.934025Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-04-10T10:54:00.934053Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-04-10T10:54:00.934064Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-04-10T10:54:00.934075Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-04-10T10:54:00.934086Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-04-10T10:54:00.934115Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-04-10T10:54:00.934129Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-04-10T10:54:00.934139Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-04-10T10:54:00.934174Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-04-10T10:54:00.934189Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-04-10T10:54:00.934199Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-04-10T10:54:00.934209Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-04-10T10:54:00.934226Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-04-10T10:54:00.934259Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-04-10T10:54:00.934270Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-04-10T10:54:00.934281Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-04-10T10:54:00.934291Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-04-10T10:54:00.934310Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-04-10T10:54:00.934319Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-04-10T10:54:00.934329Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-04-10T10:54:00.934338Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-04-10T10:54:00.934346Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-04-10T10:54:00.934356Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-04-10T10:54:00.934365Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-04-10T10:54:00.934383Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-04-10T10:54:00.934396Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-04-10T10:54:00.934416Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-04-10T10:54:00.934428Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-04-10T10:54:00.934440Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-04-10T10:54:00.934481Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-04-10T10:54:00.934494Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-04-10T10:54:00.934503Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-04-10T10:54:00.934511Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-04-10T10:54:00.934530Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-04-10T10:54:00.934540Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-04-10T10:54:00.934550Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-04-10T10:54:00.934558Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-04-10T10:54:00.934567Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-04-10T10:54:00.934578Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-04-10T10:54:00.934587Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-04-10T10:54:00.934606Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-04-10T10:54:00.934660Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-04-10T10:54:00.934673Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-04-10T10:54:00.934683Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-04-10T10:54:00.934693Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-04-10T10:54:00.934706Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-04-10T10:54:00.934715Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-04-10T10:54:00.934760Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-10T10:54:00.934860Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-10T10:54:00.935822Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.935841Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.935855Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.936154Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.936588Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:00.936723Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.937129Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:01.038206Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.038439Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-10T10:54:01.038501Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:01.038538Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-10T10:54:01.038594Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-10T10:54:01.241415Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-04-10T10:54:01.352232Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-10T10:54:01.354324Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-10T10:54:01.354503Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-10T10:54:01.355635Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.355701Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.355729Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:01.356041Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:01.356592Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:01.356699Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.357066Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:01.463017Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.463243Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-10T10:54:01.463305Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:01.463340Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-10T10:54:01.463418Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-04-10T10:54:01.463516Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-10T10:54:01.463874Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-10T10:54:01.463971Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-10T10:54:01.466092Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |66.0%| [TA] $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> KqpWorkloadServiceDistributed::TestNodeDisconnect [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-04-10T10:54:00.928650Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.928738Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.928761Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.929264Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.929968Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:00.942432Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.942909Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:00.943761Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T10:54:00.944193Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T10:54:00.944442Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-10T10:54:00.944579Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T10:54:00.944668Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:00.944716Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-10T10:54:00.944750Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-10T10:54:00.944780Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-10T10:54:00.945953Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.945981Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.946019Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.946462Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.947087Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:00.947272Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.947478Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-04-10T10:54:00.949575Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T10:54:00.949886Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-10T10:54:00.950249Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-10T10:54:00.950511Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-10T10:54:00.951166Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:00.951208Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T10:54:00.951237Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-10T10:54:00.951384Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-04-10T10:54:00.951468Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-10T10:54:00.951481Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-10T10:54:00.951496Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T10:54:00.951585Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-04-10T10:54:00.951616Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-10T10:54:00.951627Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-10T10:54:00.951640Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-10T10:54:00.951683Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-04-10T10:54:00.951731Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-10T10:54:00.951752Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-10T10:54:00.951775Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T10:54:00.951872Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-04-10T10:54:00.953672Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.953698Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.953731Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.954086Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.954604Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:00.955639Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.955894Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2025-04-10T10:54:00.956830Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T10:54:00.957043Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-10T10:54:00.957407Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-10T10:54:00.957681Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-10T10:54:00.957795Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:00.957836Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-10T10:54:00.957945Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 GOT RANGE 0 2 Getting new event 2025-04-10T10:54:00.958026Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T10:54:00.958059Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-10T10:54:00.958132Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 GOT RANGE 2 3 Getting new event 2025-04-10T10:54:00.958208Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-10T10:54:00.958235Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-10T10:54:00.958327Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 GOT RANGE 3 4 Getting new event 2025-04-10T10:54:00.958381Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-10T10:54:00.958401Z :DEBUG: [db] [sessionid] [cluster] The application data ... er". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T10:54:02.915157Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 201). Partition stream id: 1 GOT RANGE 0 201 2025-04-10T10:54:02.967235Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-10T10:54:02.967674Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-10T10:54:02.967736Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:02.968100Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:02.968701Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:02.969047Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-10T10:54:02.969785Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-04-10T10:54:03.055384Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-04-10T10:54:03.056461Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:03.058366Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T10:54:03.061187Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-10T10:54:03.062044Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-10T10:54:03.067055Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-10T10:54:03.067870Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-10T10:54:03.068686Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-04-10T10:54:03.069472Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-04-10T10:54:03.076810Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-04-10T10:54:03.077565Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-04-10T10:54:03.077642Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-04-10T10:54:03.081700Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T10:54:03.088860Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 11). Partition stream id: 1 GOT RANGE 0 11 2025-04-10T10:54:03.103696Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:03.103728Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:03.103816Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:03.104177Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:03.104601Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:03.107470Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:03.107858Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:03.108446Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-04-10T10:54:03.109914Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:03.110029Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:03.110079Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:03.110442Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:03.110833Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:03.110951Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:03.111506Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:03.111644Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T10:54:03.111804Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:03.111851Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-10T10:54:03.112048Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK >> TopicAutoscaling::ControlPlane_CDC [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Disable >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [GOOD] >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK >> Cdc::Drop[YdsRunner] [GOOD] >> Cdc::Drop[TopicRunner] |66.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad |66.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |66.0%| [TA] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |66.0%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] Test command err: 2025-04-10T10:53:04.369510Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635797804672495:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:04.369569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00104d/r3tmp/tmpY6NBTo/pdisk_1.dat 2025-04-10T10:53:04.966824Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:04.989394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:04.989505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:04.992304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22617, node 1 2025-04-10T10:53:05.206767Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:53:05.206786Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:53:05.206792Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:53:05.206902Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:05.561723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:05.597975Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:53:07.407633Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-10T10:53:07.410950Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Y2UwNDE2MTUtNzRiODQxZjUtNDYyNTkzMmYtMzI4ZWE5Y2U=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id Y2UwNDE2MTUtNzRiODQxZjUtNDYyNTkzMmYtMzI4ZWE5Y2U= 2025-04-10T10:53:07.411927Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810689575028:2329], Start check tables existence, number paths: 2 2025-04-10T10:53:07.412038Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-10T10:53:07.412067Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T10:53:07.412088Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-10T10:53:07.412157Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Y2UwNDE2MTUtNzRiODQxZjUtNDYyNTkzMmYtMzI4ZWE5Y2U=, ActorId: [1:7491635810689575029:2330], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.417340Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810689575028:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-10T10:53:07.417411Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810689575028:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-10T10:53:07.417440Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810689575028:2329], Successfully finished 2025-04-10T10:53:07.417547Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-10T10:53:07.430894Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810689575046:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.433914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:53:07.435075Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810689575046:2304], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-10T10:53:07.435260Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810689575046:2304], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-10T10:53:07.443936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810689575046:2304], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:53:07.530529Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810689575046:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.534726Z node 1 :TX_PROXY ERROR: Actor# [1:7491635810689575097:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:07.534872Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810689575046:2304], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-10T10:53:07.539439Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjBiYWQyZmUtN2ZjYjc2YjYtYzcxNmExZDQtOWY1MjIxNTI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjBiYWQyZmUtN2ZjYjc2YjYtYzcxNmExZDQtOWY1MjIxNTI= 2025-04-10T10:53:07.539554Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjBiYWQyZmUtN2ZjYjc2YjYtYzcxNmExZDQtOWY1MjIxNTI=, ActorId: [1:7491635810689575105:2331], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.539745Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-10T10:53:07.539805Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-10T10:53:07.539931Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810689575107:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:53:07.539948Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjBiYWQyZmUtN2ZjYjc2YjYtYzcxNmExZDQtOWY1MjIxNTI=, ActorId: [1:7491635810689575105:2331], ActorState: ReadyState, TraceId: 01jrfna12k32070fw18cvyth9w, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7491635810689575104:2342] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-10T10:53:07.540004Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7491635810689575105:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MjBiYWQyZmUtN2ZjYjc2YjYtYzcxNmExZDQtOWY1MjIxNTI= 2025-04-10T10:53:07.540072Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491635810689575108:2333], Database: /Root, Start database fetching 2025-04-10T10:53:07.543146Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491635810689575108:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-10T10:53:07.543267Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-10T10:53:07.543386Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491635810689575117:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MjBiYWQyZmUtN2ZjYjc2YjYtYzcxNmExZDQtOWY1MjIxNTI=, Start pool fetching 2025-04-10T10:53:07.543438Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810689575118:2335], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:53:07.543976Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810689575107:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-10T10:53:07.544047Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810689575118:2335], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-10T10:53:07.544051Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-10T10:53:07.544063Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-10T10:53:07.544135Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491635810689575117:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MjBiYWQyZmUtN2ZjYjc2YjYtYzcxNmExZDQtOWY1MjIxNTI=, Pool info successfully resolved 2025-04-10T10:53:07.544342Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjBiYWQyZmUtN2ZjYjc2YjYtYzcxNmExZDQtOWY1MjIxNTI= 2025-04-10T10:53:07.544415Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7491635810689575121:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-10T10:53:07.544526Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7491635810689575121:2336], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7491635810689575105:2331], session id: ydb://session/3?node_id=1&id=MjBiYWQyZmUtN2ZjYjc2YjYtYzcxNmExZDQtOWY1MjIxNTI= 2025-04-10T10:53:07.544527Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MjBiYWQyZmUtN2ZjYjc2YjYtYzcxNmExZDQtOWY1MjIxNTI= 2025-04-10T10:53:07.544669Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] ... DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7491636051175244732:2328], Start check tables existence, number paths: 2 2025-04-10T10:54:03.296910Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDY1ZjQ4ZGItODY1NzdmYWEtYTJkYThmZGEtNDgzYTFmODE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDY1ZjQ4ZGItODY1NzdmYWEtYTJkYThmZGEtNDgzYTFmODE= 2025-04-10T10:54:03.297676Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-10T10:54:03.297710Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T10:54:03.297736Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-10T10:54:03.297832Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7491636051175244732:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-10T10:54:03.297892Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7491636051175244732:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-10T10:54:03.297932Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7491636051175244732:2328], Successfully finished 2025-04-10T10:54:03.297990Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDY1ZjQ4ZGItODY1NzdmYWEtYTJkYThmZGEtNDgzYTFmODE=, ActorId: [6:7491636051175244757:2330], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:54:03.298860Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-10T10:54:03.299848Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491636051175244759:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:54:03.303861Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:54:03.305070Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491636051175244759:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2025-04-10T10:54:03.305214Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491636051175244759:2301], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-10T10:54:03.315339Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491636051175244759:2301], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:54:03.368277Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491636051175244759:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:54:03.371705Z node 6 :TX_PROXY ERROR: Actor# [6:7491636051175244810:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:03.371819Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491636051175244759:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-10T10:54:03.376749Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc= 2025-04-10T10:54:03.376917Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc=, ActorId: [6:7491636051175244817:2331], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:54:03.377245Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-10T10:54:03.377280Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-10T10:54:03.377366Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491636051175244819:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:54:03.377385Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc=, ActorId: [6:7491636051175244817:2331], ActorState: ReadyState, TraceId: 01jrfnbqkhctk5yxf5m4h3xd84, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [6:7491636051175244816:2338] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-10T10:54:03.377477Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [6:7491636051175244817:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc= 2025-04-10T10:54:03.377570Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7491636051175244820:2333], Database: /Root, Start database fetching 2025-04-10T10:54:03.377757Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7491636051175244820:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-10T10:54:03.377838Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-10T10:54:03.377922Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7491636051175244826:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc=, Start pool fetching 2025-04-10T10:54:03.377979Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491636051175244828:2335], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:54:03.378835Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491636051175244828:2335], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-10T10:54:03.378842Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491636051175244819:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-10T10:54:03.378903Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-10T10:54:03.378922Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-10T10:54:03.378932Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7491636051175244826:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc=, Pool info successfully resolved 2025-04-10T10:54:03.379132Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc= 2025-04-10T10:54:03.379168Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7491636051175244833:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-10T10:54:03.379264Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc= 2025-04-10T10:54:03.379360Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc=, ActorId: [6:7491636051175244817:2331], ActorState: ExecuteState, TraceId: 01jrfnbqkhctk5yxf5m4h3xd84, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool sample_pool_id 2025-04-10T10:54:03.379523Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc=, ActorId: [6:7491636051175244817:2331], ActorState: ExecuteState, TraceId: 01jrfnbqkhctk5yxf5m4h3xd84, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-04-10T10:54:03.379606Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Finished request with worker actor [6:7491636051175244817:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc= 2025-04-10T10:54:03.379664Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc=, ActorId: [6:7491636051175244817:2331], ActorState: CleanupState, TraceId: 01jrfnbqkhctk5yxf5m4h3xd84, EndCleanup, isFinal: 1 2025-04-10T10:54:03.379788Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc=, ActorId: [6:7491636051175244817:2331], ActorState: CleanupState, TraceId: 01jrfnbqkhctk5yxf5m4h3xd84, Sent query response back to proxy, proxyRequestId: 3, proxyId: [6:7491636033995374943:2085] 2025-04-10T10:54:03.379833Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc=, ActorId: [6:7491636051175244817:2331], ActorState: unknown state, TraceId: 01jrfnbqkhctk5yxf5m4h3xd84, Cleanup temp tables: 0 2025-04-10T10:54:03.379993Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTg0NzY4OWItODNiNTQyZDctYWU1NTdhMTgtNDc2ZDJiZTc=, ActorId: [6:7491636051175244817:2331], ActorState: unknown state, TraceId: 01jrfnbqkhctk5yxf5m4h3xd84, Session actor destroyed 2025-04-10T10:54:03.380054Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7491636051175244833:2336], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2025-04-10T10:54:03.387867Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZDY1ZjQ4ZGItODY1NzdmYWEtYTJkYThmZGEtNDgzYTFmODE=, ActorId: [6:7491636051175244757:2330], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:54:03.387921Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZDY1ZjQ4ZGItODY1NzdmYWEtYTJkYThmZGEtNDgzYTFmODE=, ActorId: [6:7491636051175244757:2330], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:54:03.387951Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDY1ZjQ4ZGItODY1NzdmYWEtYTJkYThmZGEtNDgzYTFmODE=, ActorId: [6:7491636051175244757:2330], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:54:03.387983Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDY1ZjQ4ZGItODY1NzdmYWEtYTJkYThmZGEtNDgzYTFmODE=, ActorId: [6:7491636051175244757:2330], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:54:03.388066Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDY1ZjQ4ZGItODY1NzdmYWEtYTJkYThmZGEtNDgzYTFmODE=, ActorId: [6:7491636051175244757:2330], ActorState: unknown state, Session actor destroyed >> TopicAutoscaling::ControlPlane_CDC_Enable [GOOD] >> TopicAutoscaling::MidOfRange [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePool >> Cdc::UpdateShardCount [GOOD] >> Cdc::UpdateRetentionPeriod >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor >> TResourceBroker::TestOverusage >> TFlatMetrics::TimeSeriesAvg16 [GOOD] >> TFlatMetrics::TimeSeriesAVG [GOOD] >> TTabletPipeTest::TestConnectReject >> TTabletPipeTest::TestSendAfterOpen >> TTabletPipeTest::TestOpen >> TResourceBrokerConfig::UpdateTasks [GOOD] >> TResourceBrokerInstant::Test >> TTabletPipeTest::TestTwoNodes >> TResourceBrokerConfig::UpdateQueues [GOOD] >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor >> TFlatMetrics::MaximumValue3 [GOOD] >> TFlatMetrics::MaximumValue4 [GOOD] >> TFlatMetrics::MaximumValue1 [GOOD] >> TFlatMetrics::MaximumValue2 [GOOD] >> TTabletLabeledCountersAggregator::HeavyAggregation >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck >> TResourceBroker::TestErrors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::MidOfRange [GOOD] Test command err: 2025-04-10T10:53:01.659424Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635781883746081:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:01.659562Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:53:01.820300Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001a1d/r3tmp/tmpsaezLU/pdisk_1.dat 2025-04-10T10:53:02.010293Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31625, node 1 2025-04-10T10:53:02.051917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:02.052002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:02.081373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:02.152861Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001a1d/r3tmp/yandexxbKlt4.tmp 2025-04-10T10:53:02.152884Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001a1d/r3tmp/yandexxbKlt4.tmp 2025-04-10T10:53:02.242529Z INFO: TTestServer started on Port 21363 GrpcPort 31625 2025-04-10T10:53:02.290445Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001a1d/r3tmp/yandexxbKlt4.tmp 2025-04-10T10:53:02.290674Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21363 PQClient connected to localhost:31625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:02.492659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:53:02.531812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T10:53:04.791198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635794768648794:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.791198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635794768648780:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.791310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.797666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T10:53:04.818758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635794768648809:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T10:53:05.115510Z node 1 :TX_PROXY ERROR: Actor# [1:7491635794768648873:2450] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:05.157231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.219562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.285872Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491635799063616178:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:53:05.286332Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjJmYWYyNDItNDM0NmVjOWEtYjBlMDYzYzQtNzgzZGYxNjU=, ActorId: [1:7491635794768648777:2337], ActorState: ExecuteState, TraceId: 01jrfn9ybxb29rxhc75b925rtq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:53:05.292460Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:53:05.351275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7491635799063616473:2632] 2025-04-10T10:53:06.659557Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491635781883746081:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:06.659628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T10:53:11.819191Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-10T10:53:11.835245Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-10T10:53:11.836372Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491635824833420566:2813], Recipient [1:7491635781883746509:2191]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:11.836407Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:11.836423Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:11.836461Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491635824833420562:2810], Recipient [1:7491635781883746509:2191]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-10T10:53:11.836479Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:53:11.900081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:53:11.900421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:53:11.900659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-10T10:53:11.900708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-10T10:53:11.900758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-10T10:53:11.900794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-10T10:53:11.900824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-10T10:53:11.900886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, ... parts is done, operation id: 281474976715673:1 2025-04-10T10:54:04.801624Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715673:1 2025-04-10T10:54:04.801696Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2025-04-10T10:54:04.801716Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715673:2 2025-04-10T10:54:04.801724Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715673:2 2025-04-10T10:54:04.801772Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 5 2025-04-10T10:54:04.801788Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715673, publications: 2, subscribers: 1 2025-04-10T10:54:04.801803Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715673, [OwnerId: 72057594046644480, LocalPathId: 14], 4 2025-04-10T10:54:04.801813Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715673, [OwnerId: 72057594046644480, LocalPathId: 15], 2 2025-04-10T10:54:04.802314Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [5:7491636054230266988:2485], Recipient [5:7491636054230266868:2475]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-04-10T10:54:04.802963Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [5:7491636015575560145:2239], Recipient [5:7491636015575560006:2162]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Version: 4 } 2025-04-10T10:54:04.802997Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-10T10:54:04.803067Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 14 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715673 2025-04-10T10:54:04.803147Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 14 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715673 2025-04-10T10:54:04.803146Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [5:7491636054230266868:2475], Recipient [5:7491636054230266868:2475]: NKikimrClient.TResponse Status: 1 Cookie: 5 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2025-04-10T10:54:04.803164Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715673 2025-04-10T10:54:04.803168Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2025-04-10T10:54:04.803185Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715673, pathId: [OwnerId: 72057594046644480, LocalPathId: 14], version: 4 2025-04-10T10:54:04.803186Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T10:54:04.803202Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 14] was 3 2025-04-10T10:54:04.803217Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Try execute txs with state DELETING 2025-04-10T10:54:04.803237Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715673, State DELETING 2025-04-10T10:54:04.803275Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete TxId 281474976715673 2025-04-10T10:54:04.803281Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:54:04.803386Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [5:7491636054230266868:2475], Recipient [5:7491636054230266868:2475]: NKikimr::TEvKeyValue::TEvCollect 2025-04-10T10:54:04.803449Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [5:7491636015575560145:2239], Recipient [5:7491636015575560006:2162]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 15] Version: 2 } 2025-04-10T10:54:04.803470Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-10T10:54:04.803528Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 15 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715673 2025-04-10T10:54:04.803602Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 15 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715673 2025-04-10T10:54:04.803613Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715673 2025-04-10T10:54:04.803630Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715673, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], version: 2 2025-04-10T10:54:04.803646Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 4 2025-04-10T10:54:04.803706Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715673, subscribers: 1 2025-04-10T10:54:04.803727Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [5:7491636054230266841:2473] 2025-04-10T10:54:04.803750Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:54:04.803866Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:54:04.804111Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715673 2025-04-10T10:54:04.804124Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:54:04.804166Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715673 2025-04-10T10:54:04.804174Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:54:04.804225Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [5:7491636054230266993:2486], Recipient [5:7491636054230266868:2475]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-04-10T10:54:04.804236Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [5:7491636054230266841:2473] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715673 at schemeshard: 72057594046644480 2025-04-10T10:54:04.804758Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [5:7491636054230266851:2835], Recipient [5:7491636015575560006:2162]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:54:04.804782Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:54:04.804797Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-10T10:54:04.817988Z node 5 :PQ_READ_PROXY DEBUG: new alter topic request 2025-04-10T10:54:04.829977Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [5:7491636015575560006:2162]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:04.830034Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:04.830093Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [5:7491636015575560006:2162], Recipient [5:7491636015575560006:2162]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:04.830118Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:04.892402Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7491636054230266868:2475], Partition 0, Sender [0:0:0], Recipient [5:7491636054230266966:2481], Cookie: 0 2025-04-10T10:54:04.892491Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7491636054230266966:2481]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:04.892526Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:04.892582Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:54:04.892659Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:54:04.892691Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:54:04.892726Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:54:04.993080Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7491636054230266868:2475], Partition 0, Sender [0:0:0], Recipient [5:7491636054230266966:2481], Cookie: 0 2025-04-10T10:54:04.993163Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7491636054230266966:2481]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:04.993197Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:04.993251Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:54:04.993329Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:54:04.993361Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:54:04.993395Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:54:05.094118Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7491636054230266868:2475], Partition 0, Sender [0:0:0], Recipient [5:7491636054230266966:2481], Cookie: 0 2025-04-10T10:54:05.094227Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7491636054230266966:2481]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:05.094263Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:05.094319Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:54:05.094396Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:54:05.094428Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:54:05.094466Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet >> TResourceBroker::TestResubmitTask >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize >> TTabletPipeTest::TestSendAfterReboot >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> BootstrapperTest::KeepExistingTablet >> TTabletPipeTest::TestConnectReject [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen >> TTabletPipeTest::TestShutdown >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular >> TResourceBroker::TestOverusage [GOOD] >> TResourceBroker::TestNotifyActorDied >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] >> TTabletCountersPercentile::SingleBucket [GOOD] >> TTabletPipeTest::TestOpen [GOOD] |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue4 [GOOD] |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue2 [GOOD] >> TResourceBroker::TestResubmitTask [GOOD] >> TResourceBroker::TestUpdateCookie >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> TResourceBrokerInstant::Test [GOOD] >> TResourceBroker::TestErrors [GOOD] >> TResourceBroker::TestExecutionStat >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] >> TTabletPipeTest::TestShutdown [GOOD] |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAVG [GOOD] |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> TTabletPipeTest::TestSendAfterReboot [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] >> TTabletPipeTest::TestTwoNodes [GOOD] >> TResourceBroker::TestNotifyActorDied [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> TResourceBroker::TestUpdateCookie [GOOD] >> TResourceBroker::TestExecutionStat [GOOD] |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::Test [GOOD] |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::SingleBucket [GOOD] |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpen [GOOD] |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestOpen [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:117:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [1:119:2147] sender: [1:121:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:156:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:119:2147] sender: [1:158:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:119:2147] sender: [1:161:2057] recipient: [1:101:2136] Leader for TabletID 9437185 is [1:119:2147] sender: [1:163:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:119:2147] sender: [1:165:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:167:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:195:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:198:2057] recipient: [1:100:2135] Leader for TabletID 9437184 is [1:116:2145] sender: [1:201:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:202:2057] recipient: [1:200:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:204:2057] recipient: [1:200:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:232:2057] recipient: [1:14:2061] >> BootstrapperTest::KeepExistingTablet [GOOD] >> BootstrapperTest::DuplicateNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterReboot [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:117:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [1:119:2147] sender: [1:121:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:156:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:119:2147] sender: [1:158:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:119:2147] sender: [1:161:2057] recipient: [1:101:2136] Leader for TabletID 9437185 is [1:119:2147] sender: [1:162:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:119:2147] sender: [1:165:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:167:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:195:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:198:2057] recipient: [1:100:2135] Leader for TabletID 9437184 is [1:116:2145] sender: [1:201:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:202:2057] recipient: [1:200:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:204:2057] recipient: [1:200:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:232:2057] recipient: [1:14:2061] |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestShutdown [GOOD] |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] >> TPipeTrackerTest::TestSimpleAdd [GOOD] |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodes [GOOD] |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] |66.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut >> TResourceBroker::TestAutoTaskId |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] |66.1%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] Test command err: 2025-04-10T10:54:08.421059Z node 3 :PIPE_SERVER ERROR: [9437185] NodeDisconnected NodeId# 2 |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] >> Cdc::NaN[YdsRunner] [GOOD] >> Cdc::NaN[TopicRunner] >> TResourceBroker::TestAutoTaskId [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet |66.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |66.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |66.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |66.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |66.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |66.1%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut >> BootstrapperTest::LoneBootstrapper >> TFlatMetrics::TimeSeriesAvg4 [GOOD] >> TFlatMetrics::TimeSeriesKV [GOOD] >> BootstrapperTest::DuplicateNodes [GOOD] >> Cdc::Drop[TopicRunner] [GOOD] >> Cdc::DescribeStream |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed >> TTabletPipeTest::TestPipeWithVersionInfo |66.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |66.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure >> TPipeTrackerTest::TestAddSameTabletTwice [GOOD] >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> TResourceBroker::TestCounters |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesKV [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed [GOOD] >> TTabletCountersAggregator::ColumnShardCounters >> BootstrapperTest::LoneBootstrapper [GOOD] >> BootstrapperTest::MultipleBootstrappers >> TResourceBroker::TestRealUsage >> TTabletPipeTest::TestRewriteSameNode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] Test command err: ... waiting for pipe to connect ... sleeping (original instance should be preserved) ... waiting for original instance to stop ... waiting for original instance to stop (done) ... waiting for pipe to connect 2025-04-10T10:54:09.998953Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-10T10:54:09.999066Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-10T10:54:09.999879Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-10T10:54:09.999935Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 12552810490399048506 2025-04-10T10:54:10.000199Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-10T10:54:10.000232Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-04-10T10:54:10.001208Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: UNKNOWN 2025-04-10T10:54:10.001299Z node 4 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2025-04-10T10:54:10.001883Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2025-04-10T10:54:10.001928Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 4 (owner) >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpen |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> TTabletCountersPercentile::WithoutZero [GOOD] >> TFlatMetrics::TimeSeriesKV2 [GOOD] >> TTabletCountersPercentile::StartFromZero [GOOD] >> TPipeCacheTest::TestAutoConnect >> TTabletPipeTest::TestConsumerSidePipeReset >> TTabletResolver::TabletResolvePriority [GOOD] >> TResourceBroker::TestCounters [GOOD] >> TResourceBroker::TestChangeTaskType >> TFlatMetrics::TimeSeriesAvg16x60 [GOOD] >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> TResourceBrokerInstant::TestErrors |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> TTabletLabeledCountersAggregator::SimpleAggregation >> BootstrapperTest::RestartUnavailableTablet >> TBsLocalRecovery::ChaoticWriteRestart [GOOD] >> TTabletPipeTest::TestKillClientBeforServerIdKnown >> TBsLocalRecovery::ChaoticWriteRestartHuge [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased >> TResourceBroker::TestRealUsage [GOOD] >> TResourceBroker::TestRandomQueue >> TTabletPipeTest::TestRewriteSameNode [GOOD] |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::ColumnShardCounters [GOOD] |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::StartFromZero [GOOD] >> TResourceBroker::TestChangeTaskType [GOOD] >> TTabletLabeledCountersAggregator::SimpleAggregation [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation >> Cdc::UpdateRetentionPeriod [GOOD] >> Cdc::SupportedTypes >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::TabletResolvePriority [GOOD] >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> TTabletPipeTest::TestInterconnectSession >> TResourceBrokerInstant::TestErrors [GOOD] >> TResourceBrokerInstant::TestMerge >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] >> TResourceBroker::TestRandomQueue [GOOD] |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRewriteSameNode [GOOD] |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestChangeTaskType [GOOD] >> TResourceBrokerInstant::TestMerge [GOOD] |66.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |66.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> TTabletPipeTest::TestInterconnectSession [GOOD] >> BootstrapperTest::RestartUnavailableTablet [GOOD] >> BootstrapperTest::UnavailableStateStorage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestRandomQueue [GOOD] Test command err: 2025-04-10T10:54:13.000447Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-12 (12 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.000856Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-31 (31 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.001019Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-38 (38 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.001163Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-44 (44 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.001244Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-47 (47 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.001380Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-52 (52 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.001450Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-54 (54 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.001527Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-56 (56 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.001601Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-59 (59 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.001696Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-61 (61 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.001898Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-69 (69 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.001971Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-71 (71 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.002009Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-72 (72 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.002129Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-76 (76 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.002209Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-77 (77 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.002383Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-78 (78 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.002459Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-81 (81 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.002498Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-82 (82 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.002581Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-85 (85 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.002740Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-91 (91 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.002815Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-94 (94 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.002977Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-100 (100 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.003273Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-111 (111 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.003684Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-132 (132 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.003747Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-134 (134 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.003820Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-137 (137 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.003860Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-138 (138 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.003997Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-144 (144 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.004055Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-146 (146 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.004201Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-153 (153 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.004348Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-159 (159 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.004391Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-160 (160 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.004431Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-161 (161 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.004522Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-165 (165 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.004641Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-170 (170 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.004713Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-172 (172 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.004873Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-179 (179 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.004949Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-182 (182 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.005061Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-187 (187 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.005120Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-189 (189 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.005213Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-193 (193 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.005270Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-194 (194 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.005361Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-198 (198 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.005586Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-208 (208 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.005643Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-210 (210 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.005720Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-213 (213 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.005948Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-222 (222 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.006136Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-231 (231 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.006395Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-242 (242 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.006440Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-243 (243 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.006518Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-246 (246 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.006570Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-247 (247 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.006630Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-249 (249 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.006675Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-250 (250 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.006757Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-253 (253 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.007027Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-267 (267 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.007123Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-270 (270 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.007187Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-272 (272 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.007252Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-274 (274 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.007291Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-275 (275 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.007340Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-276 (276 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.007392Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-277 (277 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.007472Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-280 (280 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.007605Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-286 (286 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.007644Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-287 (287 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.007919Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-301 (301 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.008294Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-318 (318 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.008352Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-319 (319 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.008404Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-320 (320 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.008519Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-325 (325 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.008594Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-328 (328 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.008652Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-330 (330 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.008717Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-332 (332 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.008838Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-337 (337 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.009168Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-346 (346 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.009392Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-355 (355 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.009472Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-357 (357 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.009518Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-358 (358 by [2:99:2 ... 10T10:54:13.054180Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-144 (144 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.054242Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-159 (159 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.054279Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-161 (161 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.054391Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-179 (179 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.054434Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-194 (194 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.054488Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-208 (208 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.054538Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-222 (222 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.054594Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-231 (231 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.054675Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-247 (247 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.054709Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-253 (253 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.054765Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-267 (267 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.054796Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-272 (272 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.054847Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-277 (277 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.054902Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-287 (287 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.054961Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-319 (319 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055000Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-330 (330 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055030Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-337 (337 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055094Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-391 (391 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055183Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-492 (492 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055267Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-518 (518 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055320Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-573 (573 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055365Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-587 (587 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055503Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-670 (670 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055526Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-680 (680 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055593Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-702 (702 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055652Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-718 (718 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055691Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-719 (719 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055729Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-725 (725 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055805Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-752 (752 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055845Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-753 (753 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055885Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-758 (758 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.055934Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-774 (774 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.056046Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-855 (855 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.056082Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-862 (862 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.056113Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-863 (863 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.056142Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-872 (872 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.056187Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-891 (891 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.056228Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-898 (898 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.056313Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-940 (940 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.056380Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-971 (971 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.056516Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-72 (72 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.056605Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-94 (94 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.056652Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-111 (111 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.056697Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-137 (137 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.056812Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-198 (198 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.056881Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-213 (213 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.056961Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-250 (250 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.057118Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-318 (318 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.057195Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-346 (346 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.057327Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-363 (363 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.057492Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-411 (411 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.057544Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-412 (412 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.057651Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-480 (480 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.057719Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-482 (482 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.057819Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-502 (502 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.057949Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-551 (551 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.058064Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-576 (576 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.058118Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-591 (591 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.058234Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-601 (601 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.058278Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-606 (606 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.058412Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-623 (623 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.058459Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-632 (632 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.058502Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-640 (640 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.058542Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-655 (655 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.058697Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-704 (704 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.058755Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-722 (722 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.058877Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-801 (801 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.058921Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-803 (803 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.058951Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-826 (826 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.059015Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-831 (831 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.059093Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-843 (843 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.059136Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-861 (861 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.059195Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-896 (896 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.059237Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-897 (897 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.059304Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-933 (933 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.059347Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-952 (952 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.059434Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-974 (974 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-04-10T10:54:13.059486Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-975 (975 by [2:99:2134])' of unknown type 'wrong' to default queue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] Test command err: { LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] Test command err: 2025-04-10T10:54:13.091593Z node 1 :PIPE_SERVER DEBUG: [9437185] Detach 2025-04-10T10:54:13.108879Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2025-04-10T10:54:13.116687Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2025-04-10T10:54:13.120482Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:128:2154] 2025-04-10T10:54:13.120571Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:128:2154] 2025-04-10T10:54:13.120923Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:128:2154] 2025-04-10T10:54:13.120994Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:128:2154] 2025-04-10T10:54:13.121072Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:128:2154] 2025-04-10T10:54:13.121137Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:128:2154] 2025-04-10T10:54:13.121231Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:128:2154] 2025-04-10T10:54:13.121405Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:128:2154] Type# 269877249 Reason# ActorUnknown 2025-04-10T10:54:13.121549Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:131:2156] 2025-04-10T10:54:13.121590Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:131:2156] 2025-04-10T10:54:13.121671Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:131:2156] 2025-04-10T10:54:13.121717Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:131:2156] 2025-04-10T10:54:13.121773Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:131:2156] 2025-04-10T10:54:13.121802Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:131:2156] 2025-04-10T10:54:13.121857Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:131:2156] 2025-04-10T10:54:13.121957Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:131:2156] Type# 269877249 Reason# ActorUnknown 2025-04-10T10:54:13.122136Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:133:2158] 2025-04-10T10:54:13.122183Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:133:2158] 2025-04-10T10:54:13.122242Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:133:2158] 2025-04-10T10:54:13.122275Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:133:2158] 2025-04-10T10:54:13.122316Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:133:2158] 2025-04-10T10:54:13.122339Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:133:2158] 2025-04-10T10:54:13.122418Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:133:2158] 2025-04-10T10:54:13.122540Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:133:2158] Type# 269877249 Reason# ActorUnknown |66.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool |66.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp >> TPipeCacheTest::TestAutoConnect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestMerge [GOOD] Test command err: 2025-04-10T10:54:12.908143Z node 1 :RESOURCE_BROKER ERROR: FinishTaskInstant failed for task 2: cannot finish unknown task |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient >> YdbSdkSessions::SessionsServerLimit >> TSchemeShardServerLess::StorageBillingLabels [GOOD] >> TestDataErasure::DataErasureWithCopyTable |66.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |66.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_kqp.cpp |66.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> BootstrapperTest::UnavailableStateStorage [GOOD] >> TestDataErasure::DataErasureWithMerge >> TestDataErasure::SimpleDataErasureTest >> YdbSdkSessions::MultiThreadSync |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestAutoConnect [GOOD] |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> KqpUserConstraint::KqpReadNull-UploadNull >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> TestDataErasure::DataErasureManualLaunch3Cycles |66.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |66.3%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut >> KqpWorkloadService::TestCpuLoadThreshold [GOOD] >> KqpWorkloadService::TestCpuLoadThresholdRefresh >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::UnavailableStateStorage [GOOD] Test command err: ... waiting for pipe to connect ... waiting for blocked connect attempt ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... waiting for blocked connect attempt (done) ... disconnecting nodes 2 <-> 1 ... waiting for pipe to disconnect ... waiting for pipe to connect ... waiting for pipe to connect ... waiting for multiple state storage lookup attempts 2025-04-10T10:54:14.491283Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA 2025-04-10T10:54:14.492038Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: ERROR, leader: [0:0:0] 2025-04-10T10:54:14.492096Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, state storage unavailable, sleeping for 0.148014s 2025-04-10T10:54:14.627803Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... waiting for multiple state storage lookup attempts (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBillingLabels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:52:58.326647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:52:58.326764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.326814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:52:58.326852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:52:58.326887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:52:58.326916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:52:58.326991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:52:58.327071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:52:58.327362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:58.411492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:52:58.411567Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:58.429596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:58.430014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:52:58.430239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:52:58.443801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:52:58.444146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:52:58.444993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.445517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:52:58.447999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.449361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.449426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.449682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:52:58.449820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.449883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:52:58.450053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.456227Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:52:58.594600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:52:58.594811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.594993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:52:58.595209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:52:58.595261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.597210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.597339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:52:58.597551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.597617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:52:58.597658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:52:58.597694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:52:58.599447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.599501Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:52:58.599538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:52:58.601102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.601150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.601184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.601232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.604660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:52:58.606397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:52:58.606551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:52:58.607535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:52:58.607661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:52:58.607710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.607963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:52:58.608013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:52:58.608165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:52:58.608267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:52:58.610135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.610214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:52:58.610430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.610476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:52:58.610705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:52:58.610746Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:52:58.610845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.610879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.610917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:52:58.610970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.611023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:52:58.611059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:52:58.611088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:52:58.611114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:52:58.611167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:52:58.611202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:52:58.611231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:52:58.613287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.613401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:52:58.613437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-04-10T10:52:58.931661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-04-10T10:52:58.931684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-04-10T10:52:58.931716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2025-04-10T10:52:58.933242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:52:58.933276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T10:52:58.933400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:52:58.933438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 FAKE_COORDINATOR: Erasing txId 105 2025-04-10T10:52:58.933811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409549, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "cloud_id" Value: "CLOUD_ID_VAL" } UserAttributes { Key: "database_id" Value: "DATABASE_ID_VAL" } UserAttributes { Key: "folder_id" Value: "FOLDER_ID_VAL" } UserAttributes { Key: "label_k" Value: "v" } UserAttributes { Key: "not_a_label_x" Value: "y" } UserAttributesVersion: 2 2025-04-10T10:52:58.933927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "cloud_id" Value: "CLOUD_ID_VAL" } UserAttributes { Key: "database_id" Value: "DATABASE_ID_VAL" } UserAttributes { Key: "folder_id" Value: "FOLDER_ID_VAL" } UserAttributes { Key: "label_k" Value: "v" } UserAttributes { Key: "not_a_label_x" Value: "y" } UserAttributesVersion: 2, at schemeshard: 72075186233409549 2025-04-10T10:52:58.934107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-04-10T10:52:58.934501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:52:58.934581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:52:58.934614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-10T10:52:58.934647Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-04-10T10:52:58.934678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T10:52:58.934768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-10T10:52:58.936743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409549 2025-04-10T10:52:58.936771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 0, path id: [OwnerId: 72075186233409549, LocalPathId: 1] 2025-04-10T10:52:58.936894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409549 2025-04-10T10:52:58.936917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:669:2581], at schemeshard: 72075186233409549, txId: 0, path id: 1 2025-04-10T10:52:58.937011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 3 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 2 UserAttributesVersion: 2 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-10T10:52:58.937059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T10:52:58.937120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:571:2510], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 2, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T10:52:58.937576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72075186233409549, cookie: 0 2025-04-10T10:52:58.937675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T10:52:58.937716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-10T10:52:58.937866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-10T10:52:58.937896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-10T10:52:58.938182Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-10T10:52:58.938260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T10:52:58.938283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:747:2639] TestWaitNotification: OK eventTxId 105 ... waiting for metering 2025-04-10T10:53:03.595481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:53:03.595569Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:03.652825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:53:03.652914Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:03.700257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:53:03.700324Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:23.147217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:53:23.147365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: initiate at first time, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 1970-01-01T00:01:00.000000Z, set LastBillTime: 1970-01-01T00:01:00.000000Z, next retry at: 1970-01-01T00:02:00.000000Z 2025-04-10T10:53:23.155099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:53:23.264341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-10T10:53:23.264625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T10:53:23.264782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-10T10:53:23.369512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-04-10T10:53:23.369637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-04-10T10:53:23.369730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-04-10T10:53:23.413154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-04-10T10:53:23.413263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-04-10T10:53:23.413341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-04-10T10:53:48.794310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:53:48.794452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling: too soon call, wait until current period ends, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 1970-01-01T00:02:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, next retry at: 1970-01-01T00:03:00.000000Z 2025-04-10T10:53:48.794525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:53:48.899495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-10T10:53:48.899605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T10:53:48.899649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-10T10:53:48.972672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-04-10T10:53:48.972763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-04-10T10:53:48.972807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-04-10T10:53:49.035778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-04-10T10:53:49.035869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-04-10T10:53:49.035928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-04-10T10:54:14.547444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:54:14.547993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":120,"quantity":59,"finish":179,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":0},"id":"72057594046678944-3-120-179-0","cloud_id":"CLOUD_ID_VAL","source_wt":180,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","labels":{"k":"v"},"folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 1970-01-01T00:03:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:02:00.000000Z--1970-01-01T00:02:59.000000Z, next retry at: 1970-01-01T00:04:00.000000Z 2025-04-10T10:54:14.555507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete ... blocking NKikimr::NMetering::TEvMetering::TEvWriteMeteringJson from FLAT_SCHEMESHARD_ACTOR to TFakeMetering ... waiting for metering (done) |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureManualLaunch >> Balancing::Balancing_ManyTopics_PQv1 [GOOD] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureRun3Cycles >> TestDataErasure::DataErasureWithSplit >> BootstrapperTest::MultipleBootstrappers [GOOD] >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation |66.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} >> CompressExecutor::TestReorderedExecutor [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] >> StatisticsSaveLoad::ForbidAccess |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize [GOOD] >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2025-04-10T10:54:12.425732Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-10T10:54:12.425835Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-10T10:54:12.425913Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-10T10:54:12.427432Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-10T10:54:12.427514Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-04-10T10:54:12.427685Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-10T10:54:12.427720Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 838756400823690829 2025-04-10T10:54:12.427960Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-10T10:54:12.427991Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 2303809724928703835 2025-04-10T10:54:12.429059Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: UNKNOWN 2025-04-10T10:54:12.429299Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-04-10T10:54:12.429396Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-04-10T10:54:12.429562Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-04-10T10:54:12.429592Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.149198s 2025-04-10T10:54:12.429655Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-04-10T10:54:12.429722Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-04-10T10:54:12.429748Z node 3 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2025-04-10T10:54:12.429948Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-04-10T10:54:12.429979Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.190190s 2025-04-10T10:54:12.626624Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-10T10:54:12.627379Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] 2025-04-10T10:54:12.627957Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-10T10:54:12.628013Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-04-10T10:54:12.675176Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-10T10:54:12.676102Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] 2025-04-10T10:54:12.676669Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-10T10:54:12.676722Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 3 (idx 1) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2025-04-10T10:54:13.524236Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 4 2025-04-10T10:54:13.524303Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 5 2025-04-10T10:54:13.524649Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-10T10:54:13.524701Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-10T10:54:13.524747Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-10T10:54:13.524777Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-10T10:54:13.536589Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] 2025-04-10T10:54:13.536791Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] 2025-04-10T10:54:13.543315Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-10T10:54:13.543385Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-04-10T10:54:13.543483Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-10T10:54:13.543516Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2025-04-10T10:54:14.359429Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 4 2025-04-10T10:54:14.359495Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 5 2025-04-10T10:54:14.359578Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-10T10:54:14.359648Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-10T10:54:14.359902Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-10T10:54:14.359930Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-10T10:54:14.361738Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] 2025-04-10T10:54:14.362736Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] ... disconnecting nodes 1 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... disconnecting nodes 1 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2025-04-10T10:54:14.364180Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-10T10:54:14.364231Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 6528562917658346564 2025-04-10T10:54:14.364543Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-10T10:54:14.364570Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 16349739802483488852 2025-04-10T10:54:14.365248Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: OWNER 2025-04-10T10:54:14.365289Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 3 (owner) 2025-04-10T10:54:14.365370Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-04-10T10:54:14.365594Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: OWNER 2025-04-10T10:54:14.365620Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 3 (owner) ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2025-04-10T10:54:15.187336Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335029 2025-04-10T10:54:15.187425Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-10T10:54:15.187561Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335029 2025-04-10T10:54:15.187599Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-10T10:54:15.189384Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] 2025-04-10T10:54:15.189811Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] ... disconnecting nodes 1 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2025-04-10T10:54:15.198990Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-10T10:54:15.199064Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 13164802727073798053 ... disconnecting nodes 1 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2025-04-10T10:54:15.199716Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-10T10:54:15.199751Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 10171326560769670008 ... disconnecting nodes 1 <-> 2 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER 2025-04-10T10:54:15.200175Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: UNKNOWN 2025-04-10T10:54:15.200632Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335031 2025-04-10T10:54:15.200682Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: DISCONNECTED 2025-04-10T10:54:15.200727Z node 4 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot ... disconnecting nodes 1 <-> 3 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER 2025-04-10T10:54:15.201584Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335031 2025-04-10T10:54:15.201636Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: DISCONNECTED 2025-04-10T10:54:15.201703Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2025-04-10T10:54:15.201732Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 4 (owner) 2025-04-10T10:54:15.210452Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, tablet dead 2025-04-10T10:54:15.210550Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-10T10:54:15.223539Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:393:2096] 2025-04-10T10:54:15.256710Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-10T10:54:15.256774Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... disconnecting nodes 1 <-> 0 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to >> TopicAutoscaling::ControlPlane_CDC_Disable [GOOD] >> TRtmrTest::CreateWithoutTimeCastBuckets |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> Cdc::DescribeStream [GOOD] >> Cdc::DecimalKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] Test command err: 2025-04-10T10:54:07.823443Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 [1:7:2054] 2025-04-10T10:54:07.823783Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:8:2055] worker 0 2025-04-10T10:54:07.823832Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:9:2056] worker 1 2025-04-10T10:54:07.823854Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:10:2057] worker 2 2025-04-10T10:54:07.823873Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:11:2058] worker 3 2025-04-10T10:54:07.823908Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:12:2059] worker 4 2025-04-10T10:54:07.823932Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:13:2060] worker 5 2025-04-10T10:54:07.823951Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:14:2061] worker 6 2025-04-10T10:54:07.823969Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:15:2062] worker 7 2025-04-10T10:54:07.824000Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:16:2063] worker 8 2025-04-10T10:54:07.824029Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:17:2064] worker 9 Sending message to [1:9:2056] from [1:7:2054] id 1 Sending message to [1:10:2057] from [1:7:2054] id 2 Sending message to [1:11:2058] from [1:7:2054] id 3 Sending message to [1:12:2059] from [1:7:2054] id 4 Sending message to [1:13:2060] from [1:7:2054] id 5 Sending message to [1:14:2061] from [1:7:2054] id 6 Sending message to [1:15:2062] from [1:7:2054] id 7 Sending message to [1:16:2063] from [1:7:2054] id 8 Sending message to [1:17:2064] from [1:7:2054] id 9 Sending message to [1:8:2055] from [1:7:2054] id 10 2025-04-10T10:54:08.815253Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [1:11:2058] 2025-04-10T10:54:08.815365Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [1:12:2059] 2025-04-10T10:54:08.815443Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [1:13:2060] 2025-04-10T10:54:08.815509Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [1:14:2061] 2025-04-10T10:54:08.815599Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [1:15:2062] 2025-04-10T10:54:08.815655Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [1:16:2063] 2025-04-10T10:54:08.815714Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [1:17:2064] 2025-04-10T10:54:08.816197Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [1:8:2055] 2025-04-10T10:54:08.816532Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [1:9:2056] 2025-04-10T10:54:08.816636Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [1:10:2057] 2025-04-10T10:54:08.816707Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [1:8:2055] 2025-04-10T10:54:08.818421Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [1:8:2055] 2025-04-10T10:54:08.867654Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:8:2055] Initiator [1:7:2054] 2025-04-10T10:54:08.893364Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:9:2056] 2025-04-10T10:54:08.895106Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:9:2056] 2025-04-10T10:54:08.948111Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:9:2056] Initiator [1:7:2054] 2025-04-10T10:54:08.994528Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:10:2057] 2025-04-10T10:54:08.995667Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:10:2057] 2025-04-10T10:54:09.022971Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:10:2057] Initiator [1:7:2054] 2025-04-10T10:54:09.048125Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:11:2058] 2025-04-10T10:54:09.049570Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:11:2058] 2025-04-10T10:54:09.091502Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:11:2058] Initiator [1:7:2054] 2025-04-10T10:54:09.117702Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:12:2059] 2025-04-10T10:54:09.119593Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:12:2059] 2025-04-10T10:54:09.170800Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:12:2059] Initiator [1:7:2054] 2025-04-10T10:54:09.196116Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:13:2060] 2025-04-10T10:54:09.197516Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:13:2060] 2025-04-10T10:54:09.234320Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:13:2060] Initiator [1:7:2054] 2025-04-10T10:54:09.260564Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:14:2061] 2025-04-10T10:54:09.261879Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:14:2061] 2025-04-10T10:54:09.297001Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:14:2061] Initiator [1:7:2054] 2025-04-10T10:54:09.332030Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:15:2062] 2025-04-10T10:54:09.333655Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:15:2062] 2025-04-10T10:54:09.370285Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:15:2062] Initiator [1:7:2054] 2025-04-10T10:54:09.396296Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:16:2063] 2025-04-10T10:54:09.397941Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:16:2063] 2025-04-10T10:54:09.434694Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:16:2063] Initiator [1:7:2054] 2025-04-10T10:54:09.461279Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:17:2064] 2025-04-10T10:54:09.462847Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:17:2064] 2025-04-10T10:54:09.501735Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:17:2064] Initiator [1:7:2054] 2025-04-10T10:54:09.528329Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [1:7:2054] 2025-04-10T10:54:09.528470Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [1:7:2054] 2025-04-10T10:54:09.534088Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:7:2054] 2025-04-10T10:54:09.534264Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:7:2054] 2025-04-10T10:54:09.540524Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:7:2054] 2025-04-10T10:54:09.540693Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:7:2054] 2025-04-10T10:54:09.546260Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:7:2054] 2025-04-10T10:54:09.546393Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:7:2054] 2025-04-10T10:54:09.552971Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:7:2054] 2025-04-10T10:54:09.553102Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:7:2054] 2025-04-10T10:54:09.559144Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:7:2054] 2025-04-10T10:54:09.559295Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:7:2054] 2025-04-10T10:54:09.565367Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:7:2054] 2025-04-10T10:54:09.565548Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:7:2054] 2025-04-10T10:54:09.573803Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:7:2054] 2025-04-10T10:54:09.573943Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:7:2054] 2025-04-10T10:54:09.579978Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:7:2054] 2025-04-10T10:54:09.580130Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:7:2054] 2025-04-10T10:54:09.586017Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:7:2054] 2025-04-10T10:54:09.586291Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:7:2054] 2025-04-10T10:54:09.592289Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:7:2054] Initiator [1:6:2053] TEST 2 10 duration 1.947489s 2025-04-10T10:54:09.987437Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 [2:7:2054] 2025-04-10T10:54:09.987913Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:8:2055] worker 0 2025-04-10T10:54:09.987965Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:9:2056] worker 1 2025-04-10T10:54:09.987994Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:10:2057] worker 2 2025-04-10T10:54:09.988023Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:11:2058] worker 3 2025-04-10T10:54:09.988049Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:12:2059] worker 4 2025-04-10T10:54:09.988074Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:13:2060] worker 5 2025-04-10T10:54:09.988098Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:14:2061] worker 6 2025-04-10T10:54:09.988123Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:15:2062] worker 7 2025-04-10T10:54:09.988161Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:16:2063] worker 8 2025-04-10T10:54:09.988199Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:17:2064] worker 9 2025-04-10T10:54:09.988229Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:18:2065] worker 10 2025-04-10T10:54:09.988253Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:19:2066] worker 11 2025-04-10T10:54:09.988279Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:20:2067] worker 12 2025-04-10T10:54:09.988303Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:21:2068] worker 13 2025-04-10T10:54:09.988327Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:22:2069] worker 14 2025-04-10T10:54:09.988374Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:23:2070] worker 15 2025-04-10T10:54:09.988414Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:24:2071] worker 16 2025-04-10T10:54:09.988438Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:25:2072] worker 17 2025-04-10T10:54:09.988462Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:26:2073] worker 18 2025-04-10T10:54:09.988483Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:27:2074] worker 19 Sending message to [2:9:2056] from [2:7:2054] id 1 Sending message to [2:10:2057] from [2:7:2054] id 2 Sending message to [2:11:2058] from [2:7:2054] id 3 Sending message to [2:12:2059] from [2:7:2054] id 4 Sending message to [2:13:2060] from [2:7:2054] id 5 Sending message to [2:14:2061] from [2:7:2054] id 6 Sending message to [2:15:2062] from [2:7:2054] id 7 Sending message to [2:16:2063] from [2:7:2054] id 8 Sending message to [2:17:2064] from [2:7: ... sponse node 14 [2:7:2054] 2025-04-10T10:54:12.019578Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 15 [2:7:2054] 2025-04-10T10:54:12.019612Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 15 [2:7:2054] 2025-04-10T10:54:12.019649Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 16 [2:7:2054] 2025-04-10T10:54:12.019695Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 16 [2:7:2054] 2025-04-10T10:54:12.019763Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 17 [2:7:2054] 2025-04-10T10:54:12.019789Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 17 [2:7:2054] 2025-04-10T10:54:12.019841Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 18 [2:7:2054] 2025-04-10T10:54:12.019863Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 18 [2:7:2054] 2025-04-10T10:54:12.019906Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 19 [2:7:2054] 2025-04-10T10:54:12.019925Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 19 [2:7:2054] 2025-04-10T10:54:12.019948Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [2:7:2054] 2025-04-10T10:54:12.020065Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [2:7:2054] 2025-04-10T10:54:12.025443Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [2:7:2054] 2025-04-10T10:54:12.025610Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [2:7:2054] 2025-04-10T10:54:12.034854Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [2:7:2054] 2025-04-10T10:54:12.035010Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [2:7:2054] 2025-04-10T10:54:12.046754Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [2:7:2054] 2025-04-10T10:54:12.046944Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [2:7:2054] 2025-04-10T10:54:12.055170Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [2:7:2054] 2025-04-10T10:54:12.055341Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [2:7:2054] 2025-04-10T10:54:12.060884Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [2:7:2054] 2025-04-10T10:54:12.061029Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [2:7:2054] 2025-04-10T10:54:12.066486Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [2:7:2054] 2025-04-10T10:54:12.066628Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [2:7:2054] 2025-04-10T10:54:12.075613Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [2:7:2054] 2025-04-10T10:54:12.075783Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [2:7:2054] 2025-04-10T10:54:12.081507Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [2:7:2054] 2025-04-10T10:54:12.081657Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [2:7:2054] 2025-04-10T10:54:12.087869Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [2:7:2054] 2025-04-10T10:54:12.088006Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [2:7:2054] 2025-04-10T10:54:12.093504Z node 2 :TABLET_AGGREGATOR INFO: aggregator request processed [2:7:2054] Initiator [2:6:2053] TEST 2 20 duration 2.299866s 2025-04-10T10:54:12.351163Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 [3:7:2054] 2025-04-10T10:54:12.351303Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [3:7:2054] self [3:8:2055] worker 0 Sending message to [3:8:2055] from [3:7:2054] id 1 Sending message to [3:8:2055] from [3:7:2054] id 2 Sending message to [3:8:2055] from [3:7:2054] id 3 Sending message to [3:8:2055] from [3:7:2054] id 4 Sending message to [3:8:2055] from [3:7:2054] id 5 Sending message to [3:8:2055] from [3:7:2054] id 6 Sending message to [3:8:2055] from [3:7:2054] id 7 Sending message to [3:8:2055] from [3:7:2054] id 8 Sending message to [3:8:2055] from [3:7:2054] id 9 Sending message to [3:8:2055] from [3:7:2054] id 10 2025-04-10T10:54:13.252562Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [3:8:2055] 2025-04-10T10:54:13.252641Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [3:8:2055] 2025-04-10T10:54:13.252680Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [3:8:2055] 2025-04-10T10:54:13.252765Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [3:8:2055] 2025-04-10T10:54:13.252809Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [3:8:2055] 2025-04-10T10:54:13.252846Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [3:8:2055] 2025-04-10T10:54:13.252881Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [3:8:2055] 2025-04-10T10:54:13.252935Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [3:8:2055] 2025-04-10T10:54:13.252984Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [3:8:2055] 2025-04-10T10:54:13.253036Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [3:8:2055] 2025-04-10T10:54:13.253422Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [3:8:2055] 2025-04-10T10:54:13.254781Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [3:8:2055] 2025-04-10T10:54:13.287322Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [3:8:2055] 2025-04-10T10:54:13.288791Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [3:8:2055] 2025-04-10T10:54:13.323536Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [3:8:2055] 2025-04-10T10:54:13.324951Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [3:8:2055] 2025-04-10T10:54:13.362062Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [3:8:2055] 2025-04-10T10:54:13.363545Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [3:8:2055] 2025-04-10T10:54:13.396876Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [3:8:2055] 2025-04-10T10:54:13.398276Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [3:8:2055] 2025-04-10T10:54:13.441585Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [3:8:2055] 2025-04-10T10:54:13.443070Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [3:8:2055] 2025-04-10T10:54:13.497141Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [3:8:2055] 2025-04-10T10:54:13.498772Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [3:8:2055] 2025-04-10T10:54:13.533920Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [3:8:2055] 2025-04-10T10:54:13.535440Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [3:8:2055] 2025-04-10T10:54:13.571466Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [3:8:2055] 2025-04-10T10:54:13.573101Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [3:8:2055] 2025-04-10T10:54:13.609104Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [3:8:2055] 2025-04-10T10:54:13.610706Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [3:8:2055] 2025-04-10T10:54:13.675356Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:8:2055] Initiator [3:7:2054] 2025-04-10T10:54:14.049968Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [3:7:2054] 2025-04-10T10:54:14.050948Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [3:7:2054] 2025-04-10T10:54:14.118452Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:7:2054] Initiator [3:6:2053] TEST 2 1 duration 1.997171s 2025-04-10T10:54:14.508324Z node 4 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [4:6:2053] self [4:7:2054] worker 0 Sending message to [4:7:2054] from [4:7:2054] id 1 Sending message to [4:7:2054] from [4:7:2054] id 2 Sending message to [4:7:2054] from [4:7:2054] id 3 Sending message to [4:7:2054] from [4:7:2054] id 4 Sending message to [4:7:2054] from [4:7:2054] id 5 Sending message to [4:7:2054] from [4:7:2054] id 6 Sending message to [4:7:2054] from [4:7:2054] id 7 Sending message to [4:7:2054] from [4:7:2054] id 8 Sending message to [4:7:2054] from [4:7:2054] id 9 Sending message to [4:7:2054] from [4:7:2054] id 10 2025-04-10T10:54:15.431763Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [4:7:2054] 2025-04-10T10:54:15.431864Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [4:7:2054] 2025-04-10T10:54:15.431899Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [4:7:2054] 2025-04-10T10:54:15.431933Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [4:7:2054] 2025-04-10T10:54:15.432063Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [4:7:2054] 2025-04-10T10:54:15.432115Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [4:7:2054] 2025-04-10T10:54:15.432164Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [4:7:2054] 2025-04-10T10:54:15.432214Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [4:7:2054] 2025-04-10T10:54:15.432263Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [4:7:2054] 2025-04-10T10:54:15.432312Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [4:7:2054] 2025-04-10T10:54:15.432783Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [4:7:2054] 2025-04-10T10:54:15.435489Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [4:7:2054] 2025-04-10T10:54:15.482796Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [4:7:2054] 2025-04-10T10:54:15.485363Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [4:7:2054] 2025-04-10T10:54:15.545917Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [4:7:2054] 2025-04-10T10:54:15.547870Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [4:7:2054] 2025-04-10T10:54:15.607303Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [4:7:2054] 2025-04-10T10:54:15.609545Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [4:7:2054] 2025-04-10T10:54:15.663156Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [4:7:2054] 2025-04-10T10:54:15.664942Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [4:7:2054] 2025-04-10T10:54:15.738563Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [4:7:2054] 2025-04-10T10:54:15.740592Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [4:7:2054] 2025-04-10T10:54:15.787225Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [4:7:2054] 2025-04-10T10:54:15.789325Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [4:7:2054] 2025-04-10T10:54:15.838282Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [4:7:2054] 2025-04-10T10:54:15.840117Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [4:7:2054] 2025-04-10T10:54:15.883212Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [4:7:2054] 2025-04-10T10:54:15.885605Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [4:7:2054] 2025-04-10T10:54:15.932359Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [4:7:2054] 2025-04-10T10:54:15.934418Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [4:7:2054] 2025-04-10T10:54:16.012198Z node 4 :TABLET_AGGREGATOR INFO: aggregator request processed [4:7:2054] Initiator [4:6:2053] TEST 2 1 duration 2.038124s |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit >> KqpStreamLookup::ReadTableDuringSplit >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless [GOOD] >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] >> WithSDK::DescribeConsumer >> TestDataErasure::SimpleDataErasureTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ControlPlane_CDC_Disable [GOOD] Test command err: 2025-04-10T10:53:02.077442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635786908138157:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:02.077511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:53:02.307918Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00198c/r3tmp/tmp2n4Mti/pdisk_1.dat 2025-04-10T10:53:02.481114Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8202, node 1 2025-04-10T10:53:02.548076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:02.548167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:02.549900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:02.566701Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/00198c/r3tmp/yandextnndwQ.tmp 2025-04-10T10:53:02.566725Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/00198c/r3tmp/yandextnndwQ.tmp 2025-04-10T10:53:02.593486Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/00198c/r3tmp/yandextnndwQ.tmp 2025-04-10T10:53:02.593731Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:53:02.618615Z INFO: TTestServer started on Port 4319 GrpcPort 8202 TClient is connected to server localhost:4319 PQClient connected to localhost:8202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:02.911068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:02.936460Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:02.975684Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:53:03.046611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T10:53:05.143326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635799793040854:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:05.143496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:05.143790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635799793040881:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:05.152616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T10:53:05.171455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635799793040883:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T10:53:05.507694Z node 1 :TX_PROXY ERROR: Actor# [1:7491635799793040947:2449] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:05.552685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.639452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.642971Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491635799793040955:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:53:05.643177Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDdlNDk3ZS1kNzk1OWNmNy1mYWFmNzkzYS0yNzUwNzg0Ng==, ActorId: [1:7491635799793040851:2337], ActorState: ExecuteState, TraceId: 01jrfn9yqaf118dzrsvxdxjw49, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:53:05.645440Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:53:05.729095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7491635799793041247:2628] 2025-04-10T10:53:07.077564Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491635786908138157:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:07.077692Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T10:53:11.999635Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-10T10:53:12.016605Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-10T10:53:12.017682Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491635829857812624:2801], Recipient [1:7491635786908138581:2188]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:12.017708Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:12.017725Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:12.017765Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491635829857812620:2798], Recipient [1:7491635786908138581:2188]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-10T10:53:12.017782Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:53:12.168804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:53:12.169290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:53:12.169581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-10T10:53:12.169618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-10T10:53:12.169651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-10T10:53:12.169694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was ... 5-04-10T10:54:16.689742Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete TxId 281474976715673 2025-04-10T10:54:16.689784Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 15 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715673 2025-04-10T10:54:16.689844Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 15 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715673 2025-04-10T10:54:16.689855Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715673 2025-04-10T10:54:16.689869Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715673, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], version: 2 2025-04-10T10:54:16.689882Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 4 2025-04-10T10:54:16.689927Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715673, subscribers: 1 2025-04-10T10:54:16.689946Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [6:7491636106438487918:2481] 2025-04-10T10:54:16.689969Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:54:16.690056Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:54:16.690509Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [6:7491636106438487945:2483], Recipient [6:7491636106438487945:2483]: NKikimr::TEvKeyValue::TEvCollect 2025-04-10T10:54:16.691368Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [6:7491636106438488074:2494], Recipient [6:7491636106438487945:2483]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-04-10T10:54:16.696673Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715673 2025-04-10T10:54:16.696709Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:54:16.696774Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715673 2025-04-10T10:54:16.696785Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:54:16.696894Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:7491636106438487918:2481] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715673 at schemeshard: 72057594046644480 2025-04-10T10:54:16.698065Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:7491636106438487933:2850], Recipient [6:7491636054898879145:2145]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:54:16.698104Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:54:16.698131Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-10T10:54:16.718598Z node 6 :PQ_READ_PROXY DEBUG: new alter topic request 2025-04-10T10:54:16.723835Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:7491636106438488083:2932], Recipient [6:7491636054898879145:2145]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:16.723885Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:16.723905Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:54:16.723960Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [6:7491636106438488080:2930], Recipient [6:7491636054898879145:2145]: {TEvModifySchemeTransaction txid# 281474976715674 TabletId# 72057594046644480} 2025-04-10T10:54:16.723983Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:54:16.727405Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "Root/origin/feed" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "streamImpl" PathId: 15 TotalGroupCount: 3 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "feed" TopicPath: "/Root/origin/feed/streamImpl" YdbDatabasePath: "/Root" MeteringMode: METERING_MODE_REQUEST_UNITS PartitionStrategy { MinPartitionCount: 3 MaxPartitionCount: 107 ScaleThresholdSeconds: 30 PartitionStrategyType: DISABLED } } Partitions { PartitionId: 0 TabletId: 72075186224037893 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037894 NextPartitionId: 1 } ApplyIf { PathId: 15 PathVersion: 2 } AllowAccessToPrivatePaths: true } TxId: 281474976715674 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:54:16.727814Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: Root/origin/feed/streamImpl, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-10T10:54:16.727998Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715674:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046644480 2025-04-10T10:54:16.728252Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:54:16.728924Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715674, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 281474976715674 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:54:16.729153Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715674, database: /Root, subject: root@builtin, status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: Root/origin/feed/streamImpl 2025-04-10T10:54:16.729198Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:54:16.729448Z node 6 :TX_PROXY ERROR: Actor# [6:7491636106438488080:2930] txid# 281474976715674, issues: { message: "Can`t disable auto partitioning." severity: 1 } 2025-04-10T10:54:16.729729Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:7491636106438488083:2932], Recipient [6:7491636054898879145:2145]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:54:16.729759Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:54:16.729779Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-10T10:54:16.780841Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636106438487945:2483], Partition 0, Sender [0:0:0], Recipient [6:7491636106438488045:2489], Cookie: 0 2025-04-10T10:54:16.780911Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636106438488045:2489]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:16.780938Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:16.780982Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:54:16.781048Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:54:16.781075Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:54:16.781104Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:54:16.882856Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636106438487945:2483], Partition 0, Sender [0:0:0], Recipient [6:7491636106438488045:2489], Cookie: 0 2025-04-10T10:54:16.882983Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636106438488045:2489]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:16.883018Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:16.883088Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:54:16.883179Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:54:16.883218Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:54:16.883265Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:54:16.988892Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636106438487945:2483], Partition 0, Sender [0:0:0], Recipient [6:7491636106438488045:2489], Cookie: 0 2025-04-10T10:54:16.988997Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636106438488045:2489]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:16.989034Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:16.989093Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:54:16.989180Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:54:16.989216Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:54:16.989253Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:54:17.092179Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636106438487945:2483], Partition 0, Sender [0:0:0], Recipient [6:7491636106438488045:2489], Cookie: 0 2025-04-10T10:54:17.092305Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636106438488045:2489]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:17.092343Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:17.092434Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:54:17.092539Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:54:17.092577Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:54:17.092614Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] >> TestDataErasure::DataErasureManualLaunch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:54:19.563141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:54:19.563257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:19.563303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:54:19.563364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:54:19.563416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:54:19.563444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:54:19.563530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:19.563598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:54:19.563984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:54:19.652618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:54:19.652692Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:19.684523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:54:19.684926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:54:19.685150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:54:19.699405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:54:19.699720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:54:19.700435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:19.700887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:54:19.705423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:19.707051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:19.707122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:19.707345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:54:19.707406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:19.707455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:54:19.707623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:54:19.739414Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:54:20.283351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:54:20.283637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:20.283895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:54:20.284151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:54:20.284203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:20.291380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:20.291578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:54:20.291813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:20.291866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:54:20.291907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:54:20.291943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:54:20.303362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:20.303464Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:54:20.303504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:54:20.306043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:20.306112Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:20.306203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:20.306267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:54:20.314453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:54:20.317247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:54:20.317581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:54:20.318674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:20.318860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:20.318921Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:20.319238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:54:20.319295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:20.319502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:20.319596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:54:20.329036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:20.329108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:20.329335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:20.329382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:54:20.329711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:20.329764Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:54:20.329869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:20.329906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:20.329944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:20.329982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:20.330039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:54:20.330085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:20.330125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:54:20.330190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:54:20.330278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:20.330322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:54:20.330355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:54:20.332985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:20.333125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:20.333168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... NFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T10:54:20.440154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T10:54:20.440184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2025-04-10T10:54:20.440215Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 1 2025-04-10T10:54:20.440246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:54:20.440330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2025-04-10T10:54:20.448169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T10:54:20.448251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateRTMR TConfigureParts ProgressState operationId# 100:0 at tablet72057594046678944 2025-04-10T10:54:20.448302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2025-04-10T10:54:20.448809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T10:54:20.449228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T10:54:20.459020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T10:54:20.459135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:54:20.459215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-10T10:54:20.459390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:54:20.468883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-10T10:54:20.469092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-10T10:54:20.469519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:20.469660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:20.469737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2025-04-10T10:54:20.469871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-10T10:54:20.470082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:20.470172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 100 2025-04-10T10:54:20.483201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:20.483283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:20.483506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:54:20.483625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:20.483664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-10T10:54:20.483698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-10T10:54:20.484167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T10:54:20.484218Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-10T10:54:20.484364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T10:54:20.484403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T10:54:20.484445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T10:54:20.484479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T10:54:20.484519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-10T10:54:20.484568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T10:54:20.484618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-10T10:54:20.484655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-10T10:54:20.484749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:54:20.484788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-10T10:54:20.484825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T10:54:20.484855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-10T10:54:20.485653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T10:54:20.485750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T10:54:20.485794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-10T10:54:20.485847Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T10:54:20.485892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:20.486770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T10:54:20.486863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T10:54:20.486898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-10T10:54:20.486925Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-10T10:54:20.486955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T10:54:20.487019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-10T10:54:20.497633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T10:54:20.497785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-10T10:54:20.498074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-10T10:54:20.498120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-10T10:54:20.498706Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-10T10:54:20.498834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T10:54:20.498873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:307:2298] TestWaitNotification: OK eventTxId 100 2025-04-10T10:54:20.499428Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:54:20.499637Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 253us result status StatusSuccess 2025-04-10T10:54:20.500772Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "rtmr1" PathId: 2 PartitionsCount: 0 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbSdkSessions::SessionsServerLimit [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::SimpleDataErasureTest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:54:15.975796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:54:15.975865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:15.975914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:54:15.975940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:54:15.975969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:54:15.975998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:54:15.976047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:15.976108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:54:15.976422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:54:16.050325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:54:16.050387Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:16.065993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:54:16.066324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:54:16.066494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:54:16.072726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:54:16.072909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:54:16.073349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:16.073634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:54:16.075018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:16.076002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:16.076040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:16.076156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:54:16.076186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:16.076211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:54:16.076326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.081275Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:54:16.198055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:54:16.198291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.198493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:54:16.198715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:54:16.198779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.200954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:16.201075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:54:16.201228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.201278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:54:16.201310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:54:16.201349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:54:16.203224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.203274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:54:16.203306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:54:16.204884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.204929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.204964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:16.205015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:54:16.208700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:54:16.210483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:54:16.210639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:54:16.211626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:16.211757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:16.211832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:16.212137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:54:16.212191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:16.212337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:16.212440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:54:16.214593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:16.214644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:16.214799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:16.214837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:54:16.215049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.215092Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:54:16.215213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:16.215263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:16.215335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:16.215384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:16.215430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:54:16.215473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:16.215511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:54:16.215554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:54:16.215628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:16.215665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:54:16.215700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:54:16.223758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:16.223896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:16.223943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... , processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:19.592583Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T10:54:19.592702Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1987:3655], Recipient [1:460:2412]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:1988:3656] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-04-10T10:54:19.592732Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T10:54:19.592764Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409546 2025-04-10T10:54:19.592894Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:460:2412], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 2 } Generation: 1 Status: COMPLETED 2025-04-10T10:54:19.592926Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-04-10T10:54:19.592991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-04-10T10:54:19.593075Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2] in# 62 ms, next wakeup# 599.938000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-04-10T10:54:19.593163Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-04-10T10:54:19.601045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-04-10T10:54:19.601140Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-10T10:54:19.601827Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1992:3660], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:1993:3661] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T10:54:19.601882Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T10:54:19.601920Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-04-10T10:54:19.602188Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-04-10T10:54:19.602246Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-10T10:54:19.602316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-10T10:54:19.602441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-10T10:54:19.602492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-04-10T10:54:19.602590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-10T10:54:19.602679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-10T10:54:19.958531Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:460:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:19.958631Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:19.958737Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:833:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:19.958780Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:19.958840Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:19.958866Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:19.958975Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:19.959022Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:19.959115Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:460:2412], Recipient [1:460:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:19.959144Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:19.959208Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:833:2716], Recipient [1:833:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:19.959235Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:20.006612Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:20.006713Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:20.006782Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-10T10:54:20.007064Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-04-10T10:54:20.007106Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-10T10:54:20.007142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-10T10:54:20.007243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-10T10:54:20.007297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-04-10T10:54:20.007369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-10T10:54:20.007423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-10T10:54:20.338654Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:460:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:20.338756Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:20.338859Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:833:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:20.338894Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:20.339059Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:20.339119Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:20.339231Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:460:2412], Recipient [1:460:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:20.339275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:20.339381Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:833:2716], Recipient [1:833:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:20.339417Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:20.339496Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:20.339530Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:20.386459Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:20.386559Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:20.386620Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-10T10:54:20.386863Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-04-10T10:54:20.386903Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-10T10:54:20.386940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-10T10:54:20.387028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-10T10:54:20.387074Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-10T10:54:20.387150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.937000s, Timestamp# 1970-01-01T00:00:05.108000Z 2025-04-10T10:54:20.387198Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 2 s 2025-04-10T10:54:20.389499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-10T10:54:20.390203Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:2014:3682], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:20.390271Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:20.390316Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T10:54:20.390483Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:274:2265], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-10T10:54:20.390521Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-10T10:54:20.390585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureManualLaunch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:54:17.116462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:54:17.116558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:17.116615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:54:17.116651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:54:17.116695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:54:17.116720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:54:17.116790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:17.116856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:54:17.117215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:54:17.212093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:54:17.212151Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:17.234022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:54:17.234400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:54:17.234578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:54:17.241941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:54:17.242177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:54:17.242931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:17.243414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:54:17.245615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:17.247006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:17.247072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:17.247264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:54:17.247315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:17.247356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:54:17.247564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.255732Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:54:17.402085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:54:17.402342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.402580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:54:17.402839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:54:17.402902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.407250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:17.407413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:54:17.407619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.407683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:54:17.407724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:54:17.407762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:54:17.413324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.413402Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:54:17.413450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:54:17.419193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.419261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.419308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:17.419401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:54:17.427250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:54:17.430983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:54:17.431201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:54:17.432235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:17.432407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:17.432476Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:17.432763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:54:17.432821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:17.433024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:17.433151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:54:17.439617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:17.439672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:17.439877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:17.439929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:54:17.440219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.440275Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:54:17.440399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:17.440447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:17.440511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:17.440556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:17.440600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:54:17.440646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:17.440699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:54:17.440739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:54:17.440818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:17.440882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:54:17.440921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:54:17.445099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:17.445232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:17.445272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... LAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1891:3559], Recipient [1:833:2716]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:1892:3560] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-04-10T10:54:19.492045Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T10:54:19.492096Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409551 2025-04-10T10:54:19.498960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# false 2025-04-10T10:54:19.527988Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:639:2555], Recipient [1:460:2412]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 1 TabletId: 72075186233409549 Status: OK 2025-04-10T10:54:19.528040Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-04-10T10:54:19.528086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409546 2025-04-10T10:54:19.528136Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409549, shardIdx# 72075186233409546:4 in# 60 ms, next wakeup in# 14.940000s, rate# 1, in queue# 0 shards, running# 1 shards at schemeshard 72075186233409546 2025-04-10T10:54:19.529767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# false 2025-04-10T10:54:19.544441Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:642:2556], Recipient [1:460:2412]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 1 TabletId: 72075186233409550 Status: OK 2025-04-10T10:54:19.544535Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-04-10T10:54:19.544604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409546 2025-04-10T10:54:19.544678Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409550, shardIdx# 72075186233409546:5 in# 62 ms, next wakeup in# 14.938000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409546 2025-04-10T10:54:19.544746Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-04-10T10:54:19.544770Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 1 2025-04-10T10:54:19.547144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# true 2025-04-10T10:54:19.547539Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1907:3575], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:19.547595Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:19.547622Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T10:54:19.547740Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1906:3574], Recipient [1:460:2412]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:1907:3575] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-04-10T10:54:19.547769Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T10:54:19.547796Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409546 2025-04-10T10:54:19.547925Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:460:2412], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 2 } Generation: 1 Status: COMPLETED 2025-04-10T10:54:19.547957Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-04-10T10:54:19.548016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-04-10T10:54:19.548075Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2] in# 63 ms, next wakeup# 599.937000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-04-10T10:54:19.548143Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-04-10T10:54:19.554926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-04-10T10:54:19.554994Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-10T10:54:19.555517Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1911:3579], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:1912:3580] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T10:54:19.555559Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T10:54:19.555593Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-04-10T10:54:19.555743Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-04-10T10:54:19.555778Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-10T10:54:19.555817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-10T10:54:19.555882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-10T10:54:19.555920Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-04-10T10:54:19.555976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-10T10:54:19.556044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-10T10:54:20.687665Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:20.687761Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:20.687801Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-10T10:54:20.688031Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:20.688065Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:20.688217Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-04-10T10:54:20.688247Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-10T10:54:20.688276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-10T10:54:20.688341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-10T10:54:20.688386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-04-10T10:54:20.688459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-10T10:54:20.688506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-10T10:54:21.171118Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:21.171216Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:21.171361Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:21.171392Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:21.171434Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-10T10:54:21.171576Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:21.171606Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:21.171755Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-04-10T10:54:21.171784Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-10T10:54:21.171812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-10T10:54:21.171887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-10T10:54:21.171921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-10T10:54:21.171959Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 2 s 2025-04-10T10:54:21.175141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-10T10:54:21.175876Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1993:3661], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:21.175927Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:21.175965Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T10:54:21.176128Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:274:2265], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-10T10:54:21.176162Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-10T10:54:21.176199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] Test command err: 2025-04-10T10:54:18.977607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:54:18.977912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:18.978160Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011d0/r3tmp/tmpACder6/pdisk_1.dat 2025-04-10T10:54:19.387494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:54:19.447093Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:19.504144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:19.504289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:19.519699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:19.623756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:54:20.215125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:866:2713], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:20.215247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:876:2718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:20.215666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:20.221199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:54:20.425678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:880:2721], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:54:20.523639Z node 1 :TX_PROXY ERROR: Actor# [1:962:2772] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:21.305755Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfnc81n9dgqa3cj2epb1xb8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM5ZWNkOTctZDAxYTc0YzEtNjVlMDQ5YjEtYmM1NjhjMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |66.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |66.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |66.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |66.4%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal >> YdbSdkSessions::MultiThreadSync [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient >> TestDataErasure::DataErasureManualLaunch3Cycles [GOOD] >> TestDataErasure::DataErasureWithMerge [GOOD] >> TestDataErasure::DataErasureWithCopyTable [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_ChildFirst >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad |66.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |66.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |66.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureManualLaunch3Cycles [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:54:16.102162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:54:16.102293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:16.102358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:54:16.102398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:54:16.102449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:54:16.102479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:54:16.102548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:16.102621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:54:16.103018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:54:16.207416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:54:16.207476Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:16.224130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:54:16.224381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:54:16.224518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:54:16.233089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:54:16.233287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:54:16.233803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:16.234055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:54:16.235647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:16.236587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:16.236639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:16.236769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:54:16.236811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:16.236840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:54:16.236962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.242414Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:54:16.407959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:54:16.408193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.408422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:54:16.408673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:54:16.408734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.415185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:16.415345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:54:16.415550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.415632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:54:16.415677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:54:16.415743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:54:16.418910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.419001Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:54:16.419041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:54:16.427341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.427419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.427483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:16.427548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:54:16.431504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:54:16.443101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:54:16.443304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:54:16.444375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:16.444537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:16.444606Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:16.444937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:54:16.445001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:16.445186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:16.445304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:54:16.448702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:16.448760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:16.448938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:16.448987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:54:16.449250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:16.449303Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:54:16.449415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:16.449473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:16.449556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:16.449599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:16.449657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:54:16.449717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:16.449768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:54:16.449801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:54:16.449876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:16.449918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:54:16.449958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:54:16.452533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:16.452658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:16.452696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ning# 1 shards at schemeshard 72075186233409546 2025-04-10T10:54:23.306010Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:642:2556], Recipient [1:460:2412]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 3 TabletId: 72075186233409550 Status: OK 2025-04-10T10:54:23.306044Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-04-10T10:54:23.306079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409546 2025-04-10T10:54:23.306123Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409550, shardIdx# 72075186233409546:5 in# 65 ms, next wakeup in# 10.802000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409546 2025-04-10T10:54:23.306182Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-04-10T10:54:23.306216Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 3 2025-04-10T10:54:23.316830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# false 2025-04-10T10:54:23.317389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# true 2025-04-10T10:54:23.317652Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:460:2412], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 2 } Generation: 3 Status: COMPLETED 2025-04-10T10:54:23.317694Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-04-10T10:54:23.317760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-04-10T10:54:23.317820Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2] in# 67 ms, next wakeup# 595.800000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-04-10T10:54:23.317894Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-04-10T10:54:23.323626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-04-10T10:54:23.323719Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-10T10:54:23.324034Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-04-10T10:54:23.324078Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-10T10:54:23.324127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-10T10:54:23.324198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-10T10:54:23.324239Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-04-10T10:54:23.324305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-10T10:54:23.324367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-10T10:54:23.846251Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:23.846331Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:23.846425Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:23.846460Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:23.856941Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:460:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:23.857021Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:23.857109Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:833:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:23.857137Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:23.857200Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:460:2412], Recipient [1:460:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:23.857230Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:23.857307Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:833:2716], Recipient [1:833:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:23.857334Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:23.906576Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:23.906674Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:23.906733Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-10T10:54:23.907030Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-04-10T10:54:23.907065Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-10T10:54:23.907093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-10T10:54:23.907166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-10T10:54:23.907201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-04-10T10:54:23.907272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-10T10:54:23.907330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-10T10:54:24.322505Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.322584Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.322674Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.322707Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.338513Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:460:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.338603Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.338679Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:833:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.338710Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.338779Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:460:2412], Recipient [1:460:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.338813Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.338889Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:833:2716], Recipient [1:833:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.338917Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.395108Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:24.395184Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:24.395220Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-10T10:54:24.395493Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-04-10T10:54:24.395532Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-10T10:54:24.395562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-10T10:54:24.395623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-10T10:54:24.395657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-10T10:54:24.395706Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 3, duration# 2 s 2025-04-10T10:54:24.403208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-10T10:54:24.403970Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:3576:4932], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:24.404029Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:24.404070Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T10:54:24.404229Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:2783:4295], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-10T10:54:24.404264Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-10T10:54:24.404301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureWithMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:65:2058] recipient: [1:59:2100] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:65:2058] recipient: [1:59:2100] Leader for TabletID 72057594046678944 is [1:69:2104] sender: [1:74:2058] recipient: [1:59:2100] 2025-04-10T10:54:15.665280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:54:15.665382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:15.665424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:54:15.665477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:54:15.665530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:54:15.665587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:54:15.665645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:15.665714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:54:15.666092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:54:15.758095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:54:15.758202Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:15.760247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:54:15.760482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:54:15.760663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:54:15.767491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:54:15.767779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:54:15.768475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:15.768774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:54:15.769719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:15.771985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:15.772055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:15.772229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:54:15.772292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:15.772334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:54:15.772460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.775696Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:69:2104] sender: [1:148:2058] recipient: [1:16:2063] 2025-04-10T10:54:15.938392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:54:15.938670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.938900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:54:15.939139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:54:15.939199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.940259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:15.940420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:54:15.940613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.940669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:54:15.940718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:54:15.940768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:54:15.941270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.941316Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:54:15.941347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:54:15.941709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.941752Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.941786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:15.941852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:54:15.946975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:54:15.947620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:54:15.947821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:54:15.948758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:15.948883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 73 RawX2: 4294969404 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:15.948947Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:15.949216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:54:15.949277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:15.949500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:15.949582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:54:15.963096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:15.963189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:15.963407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:15.963448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:124:2135], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:54:15.963783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.963832Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:54:15.963932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:15.964015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:15.964052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:15.964092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:15.964144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:54:15.964187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:15.964234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:54:15.964263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:54:15.964353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:15.964397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:54:15.964433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:54:15.970508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:15.970651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:15.970688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, a ... .806591Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.806664Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.806747Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:185:2178], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.806777Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.818192Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.818283Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.818375Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:279:2238], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.818406Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.854582Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.854652Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.854729Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:185:2178], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.854760Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.866493Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.866572Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.866657Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:279:2238], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.866684Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.902636Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.902723Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.902838Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:185:2178], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.902869Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.914548Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.914634Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.914741Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:279:2238], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.914773Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.950664Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.950738Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.951608Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:185:2178], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.951655Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.962911Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1221:3033], Recipient [1:279:2238]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 2 Generation: 2 Round: 2 TableStats { DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 5382 Memory: 94331 Storage: 10148063 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-04-10T10:54:24.962982Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T10:54:24.963036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] state 'Ready' dataSize 10141461 rowCount 99 cpuUsage 0.5382 2025-04-10T10:54:24.963142Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] raw table stats: DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T10:54:24.963181Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-10T10:54:24.974520Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.974608Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.974710Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:279:2238], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.974742Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:25.002606Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:25.002692Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:25.002729Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-10T10:54:25.002825Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T10:54:25.002859Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T10:54:25.002891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72075186233409546, queue size# 1 2025-04-10T10:54:25.002964Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-10T10:54:25.002997Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-10T10:54:25.003242Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:184:2177], Recipient [1:185:2178]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-04-10T10:54:25.003276Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-10T10:54:25.003306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-10T10:54:25.003362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-10T10:54:25.003443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-10T10:54:25.003505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 19.899500s, Timestamp# 1970-01-01T00:01:20.100500Z 2025-04-10T10:54:25.003541Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 30 s 2025-04-10T10:54:25.003822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72075186233409546:6 data size 10141461 row count 99 2025-04-10T10:54:25.003906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409551 maps to shardIdx: 72075186233409546:6 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 99, DataSize 10141461 2025-04-10T10:54:25.003987Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:6 with partCount# 1, rowCount# 99, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72075186233409546 2025-04-10T10:54:25.004107Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2025-04-10T10:54:25.004575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-10T10:54:25.007902Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1397:3186], Recipient [1:185:2178]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:25.007973Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:25.008011Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T10:54:25.008195Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:169:2169], Recipient [1:185:2178]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-10T10:54:25.008232Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-10T10:54:25.008272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] >> TestDataErasure::DataErasureWithSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureWithCopyTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:65:2058] recipient: [1:59:2100] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:65:2058] recipient: [1:59:2100] Leader for TabletID 72057594046678944 is [1:69:2104] sender: [1:74:2058] recipient: [1:59:2100] 2025-04-10T10:54:15.436358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:54:15.436464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:15.436501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:54:15.436537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:54:15.436574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:54:15.436611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:54:15.436686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:15.436769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:54:15.437116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:54:15.524340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:54:15.524392Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:15.525845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:54:15.526002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:54:15.526216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:54:15.530163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:54:15.530356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:54:15.530974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:15.531226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:54:15.531850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:15.532842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:15.532887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:15.533036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:54:15.533074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:15.533128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:54:15.533232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.535986Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:69:2104] sender: [1:148:2058] recipient: [1:16:2063] 2025-04-10T10:54:15.684104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:54:15.684339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.684545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:54:15.684815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:54:15.684874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.686477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:15.686653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:54:15.686883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.686945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:54:15.686999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:54:15.687037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:54:15.687613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.687661Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:54:15.687698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:54:15.688136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.688174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.688224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:15.688301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:54:15.692059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:54:15.692591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:54:15.692748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:54:15.693717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:15.693836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 73 RawX2: 4294969404 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:15.693898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:15.694169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:54:15.694235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:15.694423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:15.694524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:54:15.695199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:15.695258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:15.695413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:15.695462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:124:2135], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:54:15.695749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:15.695793Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:54:15.695882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:15.695919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:15.695965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:15.696031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:15.696077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:54:15.696122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:15.696157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:54:15.696187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:54:15.696259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:15.696298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:54:15.696329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:54:15.698303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:15.698416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:15.698458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, a ... CompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T10:54:24.878861Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T10:54:24.878953Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T10:54:24.878989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72075186233409546, queue size# 2 2025-04-10T10:54:24.879062Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-04-10T10:54:24.879100Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-04-10T10:54:24.879232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72075186233409546:6 data size 5019511 row count 49 2025-04-10T10:54:24.879315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409551 maps to shardIdx: 72075186233409546:6 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], pathId map=SimpleCopy, is column=0, is olap=0, RowCount 49, DataSize 5019511 2025-04-10T10:54:24.879395Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:6 with partCount# 1, rowCount# 49, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:50.000000Z at schemeshard 72075186233409546 2025-04-10T10:54:24.879482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72075186233409546:7 data size 5121950 row count 50 2025-04-10T10:54:24.879524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409546:7 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], pathId map=SimpleCopy, is column=0, is olap=0, RowCount 50, DataSize 5121950 2025-04-10T10:54:24.879567Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:7 with partCount# 1, rowCount# 50, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:50.000000Z at schemeshard 72075186233409546 2025-04-10T10:54:24.879653Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2025-04-10T10:54:24.890592Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T10:54:24.890680Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T10:54:24.890715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72075186233409546, queue size# 0 2025-04-10T10:54:24.915172Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.915251Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.915363Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:185:2178], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.915400Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.927271Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.927358Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.927458Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:279:2238], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.927493Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.962643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.962723Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.962842Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:185:2178], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.962874Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.974573Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.974664Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:24.974810Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:279:2238], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:24.974845Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:25.014944Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:25.015029Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:25.015148Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:185:2178], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:25.015184Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:25.026594Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:25.026688Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:25.026826Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:279:2238], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:25.026865Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:25.067266Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:25.067346Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:25.067454Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:185:2178], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:25.067487Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:25.078731Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:25.078826Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:25.078959Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:279:2238], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:25.078998Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:25.115114Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:25.115201Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:25.115380Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:185:2178], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:25.115418Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:25.130880Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:25.130972Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:25.131013Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-10T10:54:25.131285Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:184:2177], Recipient [1:185:2178]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-04-10T10:54:25.131330Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-10T10:54:25.131364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-10T10:54:25.131442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-10T10:54:25.131482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-10T10:54:25.131558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 14.999500s, Timestamp# 1970-01-01T00:01:25.000500Z 2025-04-10T10:54:25.131601Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 35 s 2025-04-10T10:54:25.132311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-10T10:54:25.136409Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1760:3464], Recipient [1:185:2178]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:25.136488Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:25.136530Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T10:54:25.136649Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:169:2169], Recipient [1:185:2178]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-10T10:54:25.136693Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-10T10:54:25.136739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] |66.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |66.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |66.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> TestDataErasure::DataErasureRun3Cycles [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureWithSplit [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:65:2058] recipient: [1:59:2100] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:65:2058] recipient: [1:59:2100] Leader for TabletID 72057594046678944 is [1:69:2104] sender: [1:74:2058] recipient: [1:59:2100] 2025-04-10T10:54:17.152305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:54:17.152403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:17.152444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:54:17.152477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:54:17.152517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:54:17.152562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:54:17.152631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:17.152699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:54:17.153099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:54:17.314610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:54:17.314701Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:17.316888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:54:17.317120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:54:17.317286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:54:17.323005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:54:17.323275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:54:17.324238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:17.324555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:54:17.325431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:17.326821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:17.326880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:17.327067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:54:17.327114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:17.327162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:54:17.327272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.330499Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:69:2104] sender: [1:148:2058] recipient: [1:16:2063] 2025-04-10T10:54:17.481072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:54:17.481322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.481552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:54:17.481793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:54:17.481857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.482608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:17.482751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:54:17.482977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.483039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:54:17.483107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:54:17.483160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:54:17.483684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.483741Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:54:17.483777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:54:17.484254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.484298Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.484344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:17.484421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:54:17.488011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:54:17.488489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:54:17.488672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:54:17.489582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:17.489700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 73 RawX2: 4294969404 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:17.489760Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:17.490018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:54:17.490071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:17.490318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:17.490424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:54:17.491109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:17.491174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:17.491360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:17.491403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:124:2135], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:54:17.491732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.491776Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:54:17.491875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:17.491912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:17.491953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:17.491998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:17.492049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:54:17.492093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:17.492142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:54:17.492177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:54:17.492245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:17.492291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:54:17.492324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:54:17.494400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:17.494521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:17.494557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, a ... Sender [0:0:0], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:26.421822Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:26.421974Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:185:2178], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:26.422010Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:26.434791Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:26.434892Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:26.435045Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:279:2238], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:26.435081Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:26.470859Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:26.470937Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:26.472315Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:185:2178], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:26.472366Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:26.484818Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1009:2872], Recipient [1:279:2238]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409550 TableLocalId: 2 Generation: 2 Round: 2 TableStats { DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1338 Memory: 93107 Storage: 5022813 } ShardState: 2 UserTablePartOwners: 72075186233409550 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-04-10T10:54:26.484893Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T10:54:26.484951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] state 'Ready' dataSize 5019511 rowCount 49 cpuUsage 0.1338 2025-04-10T10:54:26.485070Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] raw table stats: DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T10:54:26.485114Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-10T10:54:26.485397Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1013:2874], Recipient [1:279:2238]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 2 Generation: 2 Round: 2 TableStats { DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1750 Memory: 93131 Storage: 5125318 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-04-10T10:54:26.485433Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T10:54:26.485484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] state 'Ready' dataSize 5121950 rowCount 50 cpuUsage 0.175 2025-04-10T10:54:26.485583Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] raw table stats: DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T10:54:26.497040Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:26.497118Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:26.497217Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:279:2238], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:26.497248Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:26.522721Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:185:2178]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:26.522805Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:26.522848Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-10T10:54:26.522997Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:279:2238]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T10:54:26.523036Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T10:54:26.523104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72075186233409546, queue size# 2 2025-04-10T10:54:26.523178Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-04-10T10:54:26.523211Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-04-10T10:54:26.523413Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:184:2177], Recipient [1:185:2178]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-04-10T10:54:26.523450Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-10T10:54:26.523484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-10T10:54:26.523542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-10T10:54:26.523573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-10T10:54:26.523641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 19.899500s, Timestamp# 1970-01-01T00:01:20.100500Z 2025-04-10T10:54:26.523681Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 30 s 2025-04-10T10:54:26.523974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72075186233409546:5 data size 5019511 row count 49 2025-04-10T10:54:26.524055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72075186233409546:5 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 49, DataSize 5019511 2025-04-10T10:54:26.524176Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:5 with partCount# 1, rowCount# 49, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72075186233409546 2025-04-10T10:54:26.524283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72075186233409546:6 data size 5121950 row count 50 2025-04-10T10:54:26.524332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409551 maps to shardIdx: 72075186233409546:6 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 50, DataSize 5121950 2025-04-10T10:54:26.524377Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:6 with partCount# 1, rowCount# 50, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72075186233409546 2025-04-10T10:54:26.524445Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2025-04-10T10:54:26.524951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-10T10:54:26.528512Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1194:3028], Recipient [1:185:2178]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:26.528594Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:26.528638Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T10:54:26.528818Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:169:2169], Recipient [1:185:2178]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-10T10:54:26.528853Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-10T10:54:26.528894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureRun3Cycles [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:54:17.503822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:54:17.503918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:17.503983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:54:17.504027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:54:17.504073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:54:17.504097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:54:17.504156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:17.504225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:54:17.504583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:54:17.691057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:54:17.691126Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:17.721146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:54:17.721421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:54:17.721583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:54:17.737206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:54:17.737451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:54:17.738101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:17.738511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:54:17.744733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:17.746127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:17.746208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:17.746389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:54:17.746434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:17.746472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:54:17.746646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.753853Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:54:17.940326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:54:17.940548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.940768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:54:17.941028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:54:17.941088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.943264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:17.943413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:54:17.943639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.943697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:54:17.943730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:54:17.943762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:54:17.945856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.945919Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:54:17.945953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:54:17.947889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.947944Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.947985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:17.948046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:54:17.951906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:54:17.954203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:54:17.954400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:54:17.955491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:17.955627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:17.955688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:17.955999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:54:17.956055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:17.956249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:17.956403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:54:17.958752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:17.958800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:17.958979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:17.959013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:54:17.959285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:17.959332Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:54:17.959419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:17.959450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:17.959504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:17.959548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:17.959581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:54:17.959616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:17.959647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:54:17.959673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:54:17.959736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:17.959773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:54:17.959802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:54:17.961942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:17.962040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:17.962077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ipient [1:460:2412]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 3 TabletId: 72075186233409550 Status: OK 2025-04-10T10:54:26.417741Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-04-10T10:54:26.417773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409546 2025-04-10T10:54:26.417812Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409550, shardIdx# 72075186233409546:5 in# 64 ms, next wakeup in# 8.936000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409546 2025-04-10T10:54:26.417849Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-04-10T10:54:26.417873Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 3 2025-04-10T10:54:26.422761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# false 2025-04-10T10:54:26.423257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# true 2025-04-10T10:54:26.423474Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:460:2412], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 2 } Generation: 3 Status: COMPLETED 2025-04-10T10:54:26.423512Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-04-10T10:54:26.423589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-04-10T10:54:26.423641Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2] in# 66 ms, next wakeup# 593.934000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-04-10T10:54:26.423702Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-04-10T10:54:26.431104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-04-10T10:54:26.431172Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-10T10:54:26.431437Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-04-10T10:54:26.431474Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-10T10:54:26.431501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-10T10:54:26.431559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-10T10:54:26.431587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-04-10T10:54:26.431643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-10T10:54:26.431688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-10T10:54:26.974798Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:26.974892Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:26.974976Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:460:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:26.975005Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:26.975058Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:833:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:26.975082Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:26.975146Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:460:2412], Recipient [1:460:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:26.975176Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:26.975252Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:833:2716], Recipient [1:833:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:26.975279Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:26.975342Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:26.975369Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:27.013903Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:27.013997Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:27.014032Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-10T10:54:27.014340Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-04-10T10:54:27.014380Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-10T10:54:27.014411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-10T10:54:27.014490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-10T10:54:27.014523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-04-10T10:54:27.014595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-10T10:54:27.014638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-10T10:54:27.490547Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:27.490647Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:27.490732Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:460:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:27.490762Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:27.490828Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:833:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:27.490862Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:54:27.490945Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:460:2412], Recipient [1:460:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:27.490982Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:27.491068Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:833:2716], Recipient [1:833:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:27.491097Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:27.491159Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:27.491193Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:27.526072Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:27.526178Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-10T10:54:27.526216Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-10T10:54:27.526462Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-04-10T10:54:27.526500Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-10T10:54:27.526530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-10T10:54:27.526604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-10T10:54:27.526635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-10T10:54:27.526685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.933000s, Timestamp# 1970-01-01T00:00:11.112000Z 2025-04-10T10:54:27.526716Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 3, duration# 2 s 2025-04-10T10:54:27.535075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-10T10:54:27.535773Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:3595:4951], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:27.535832Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:54:27.535871Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T10:54:27.536044Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:274:2265], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-10T10:54:27.536076Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-10T10:54:27.536114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 |66.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] Test command err: 2025-04-10T10:53:04.449624Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635798225077041:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:04.452150Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001032/r3tmp/tmpX0ftZx/pdisk_1.dat 2025-04-10T10:53:04.895270Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:04.918046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:04.918191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:04.920269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25089, node 1 2025-04-10T10:53:05.174775Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:53:05.174803Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:53:05.174812Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:53:05.174945Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:05.581535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:07.633462Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-10T10:53:07.633851Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T10:53:07.633907Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-10T10:53:07.633934Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-10T10:53:07.637248Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjU4NTgwZjgtM2I0NzFmMGEtZDQ4ZmZiMS04MGI4Y2U3Yg==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZjU4NTgwZjgtM2I0NzFmMGEtZDQ4ZmZiMS04MGI4Y2U3Yg== 2025-04-10T10:53:07.642823Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635811109979584:2329], Start check tables existence, number paths: 2 2025-04-10T10:53:07.642952Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjU4NTgwZjgtM2I0NzFmMGEtZDQ4ZmZiMS04MGI4Y2U3Yg==, ActorId: [1:7491635811109979585:2330], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.647070Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635811109979584:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-10T10:53:07.647150Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635811109979584:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-10T10:53:07.647204Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635811109979584:2329], Successfully finished 2025-04-10T10:53:07.647261Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-10T10:53:07.663467Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635811109979602:2303], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.669764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:53:07.670893Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635811109979602:2303], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-10T10:53:07.671053Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635811109979602:2303], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-10T10:53:07.679236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635811109979602:2303], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:53:07.777083Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635811109979602:2303], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.780828Z node 1 :TX_PROXY ERROR: Actor# [1:7491635811109979653:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:07.780930Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635811109979602:2303], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-10T10:53:07.783540Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDNlNDRjMGYtOTZkODAxMGUtMjM0OGM4ZTEtYzA2OGY4ZTU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDNlNDRjMGYtOTZkODAxMGUtMjM0OGM4ZTEtYzA2OGY4ZTU= 2025-04-10T10:53:07.783659Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDNlNDRjMGYtOTZkODAxMGUtMjM0OGM4ZTEtYzA2OGY4ZTU=, ActorId: [1:7491635811109979660:2331], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.783856Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-10T10:53:07.783878Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-10T10:53:07.783934Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635811109979662:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:53:07.784006Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDNlNDRjMGYtOTZkODAxMGUtMjM0OGM4ZTEtYzA2OGY4ZTU=, ActorId: [1:7491635811109979660:2331], ActorState: ReadyState, TraceId: 01jrfna1a7djzd3a10bx0vfdxz, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7491635811109979659:2340] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-10T10:53:07.784050Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7491635811109979660:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZDNlNDRjMGYtOTZkODAxMGUtMjM0OGM4ZTEtYzA2OGY4ZTU= 2025-04-10T10:53:07.784341Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491635811109979664:2333], Database: /Root, Start database fetching 2025-04-10T10:53:07.784495Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491635811109979664:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-10T10:53:07.784544Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-10T10:53:07.784630Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491635811109979672:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZDNlNDRjMGYtOTZkODAxMGUtMjM0OGM4ZTEtYzA2OGY4ZTU=, Start pool fetching 2025-04-10T10:53:07.784673Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635811109979673:2335], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:53:07.785236Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635811109979662:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-10T10:53:07.785238Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635811109979673:2335], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-10T10:53:07.785277Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-10T10:53:07.785292Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-10T10:53:07.785307Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491635811109979672:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZDNlNDRjMGYtOTZkODAxMGUtMjM0OGM4ZTEtYzA2OGY4ZTU=, Pool info successfully resolved 2025-04-10T10:53:07.785463Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDNlNDRjMGYtOTZkODAxMGUtMjM0OGM4ZTEtYzA2OGY4ZTU= 2025-04-10T10:53:07.785531Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7491635811109979676:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-10T10:53:07.785613Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7491635811109979676:2336], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7491635811109979660:2331], session id: ydb://session/3?node_id=1&id=ZDNlNDRjMGYtOTZkODAxMGUtMjM0OGM4ZTEtYzA2OGY4ZTU= 2025-04-10T10:53:07.785615Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZDNlNDRjMGYtOTZkODAxMGUtMjM0OGM4ZTEtYzA2OGY4ZTU= 2025-04-10T10:53:07.785663Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7491635811109979676:2336], DatabaseId: /Root, PoolId: sample_pool_id, Reply continue success to [1:7491635811109979660:2331 ... /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-10T10:54:24.905336Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2UxMjRiMjktMTQxOWVjNWItY2I2ZTY5ZTYtNGFhZTk2YTI=, ActorId: [8:7491636140751274401:2690], ActorState: ExecuteState, TraceId: 01jrfncch2bzf8y1cg2phmsn1r, ExecutePhyTx, tx: 0x000050C00020A4D8 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-10T10:54:24.905571Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2UxMjRiMjktMTQxOWVjNWItY2I2ZTY5ZTYtNGFhZTk2YTI=, ActorId: [8:7491636140751274401:2690], ActorState: ExecuteState, TraceId: 01jrfncch2bzf8y1cg2phmsn1r, Sending to Executer TraceId: 0 8 2025-04-10T10:54:24.905689Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2UxMjRiMjktMTQxOWVjNWItY2I2ZTY5ZTYtNGFhZTk2YTI=, ActorId: [8:7491636140751274401:2690], ActorState: ExecuteState, TraceId: 01jrfncch2bzf8y1cg2phmsn1r, Created new KQP executer: [8:7491636140751274407:2690] isRollback: 0 2025-04-10T10:54:24.907783Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2UxMjRiMjktMTQxOWVjNWItY2I2ZTY5ZTYtNGFhZTk2YTI=, ActorId: [8:7491636140751274401:2690], ActorState: ExecuteState, TraceId: 01jrfncch2bzf8y1cg2phmsn1r, Forwarded TEvStreamData to [8:7491636140751274400:3079] 2025-04-10T10:54:24.908711Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2UxMjRiMjktMTQxOWVjNWItY2I2ZTY5ZTYtNGFhZTk2YTI=, ActorId: [8:7491636140751274401:2690], ActorState: ExecuteState, TraceId: 01jrfncch2bzf8y1cg2phmsn1r, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-10T10:54:24.908903Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=M2UxMjRiMjktMTQxOWVjNWItY2I2ZTY5ZTYtNGFhZTk2YTI=, ActorId: [8:7491636140751274401:2690], ActorState: ExecuteState, TraceId: 01jrfncch2bzf8y1cg2phmsn1r, txInfo Status: Committed Kind: Pure TotalDuration: 3.676 ServerDuration: 3.594 QueriesCount: 2 2025-04-10T10:54:24.908986Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2UxMjRiMjktMTQxOWVjNWItY2I2ZTY5ZTYtNGFhZTk2YTI=, ActorId: [8:7491636140751274401:2690], ActorState: ExecuteState, TraceId: 01jrfncch2bzf8y1cg2phmsn1r, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T10:54:24.909224Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=M2UxMjRiMjktMTQxOWVjNWItY2I2ZTY5ZTYtNGFhZTk2YTI=, ActorId: [8:7491636140751274401:2690], ActorState: ExecuteState, TraceId: 01jrfncch2bzf8y1cg2phmsn1r, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:54:24.909262Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2UxMjRiMjktMTQxOWVjNWItY2I2ZTY5ZTYtNGFhZTk2YTI=, ActorId: [8:7491636140751274401:2690], ActorState: ExecuteState, TraceId: 01jrfncch2bzf8y1cg2phmsn1r, EndCleanup, isFinal: 1 2025-04-10T10:54:24.909336Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2UxMjRiMjktMTQxOWVjNWItY2I2ZTY5ZTYtNGFhZTk2YTI=, ActorId: [8:7491636140751274401:2690], ActorState: ExecuteState, TraceId: 01jrfncch2bzf8y1cg2phmsn1r, Sent query response back to proxy, proxyRequestId: 60, proxyId: [8:7491636063441861099:2269] 2025-04-10T10:54:24.909375Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2UxMjRiMjktMTQxOWVjNWItY2I2ZTY5ZTYtNGFhZTk2YTI=, ActorId: [8:7491636140751274401:2690], ActorState: unknown state, TraceId: 01jrfncch2bzf8y1cg2phmsn1r, Cleanup temp tables: 0 2025-04-10T10:54:24.909733Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2UxMjRiMjktMTQxOWVjNWItY2I2ZTY5ZTYtNGFhZTk2YTI=, ActorId: [8:7491636140751274401:2690], ActorState: unknown state, TraceId: 01jrfncch2bzf8y1cg2phmsn1r, Session actor destroyed 2025-04-10T10:54:24.980410Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MzMxZGQwYzYtY2ZlN2ViNDYtMzM4OWVlMDItZGZlZmZiYTI=, ActorId: [8:7491636089211665298:2333], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:54:24.980471Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MzMxZGQwYzYtY2ZlN2ViNDYtMzM4OWVlMDItZGZlZmZiYTI=, ActorId: [8:7491636089211665298:2333], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:54:24.980509Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MzMxZGQwYzYtY2ZlN2ViNDYtMzM4OWVlMDItZGZlZmZiYTI=, ActorId: [8:7491636089211665298:2333], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:54:24.980549Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MzMxZGQwYzYtY2ZlN2ViNDYtMzM4OWVlMDItZGZlZmZiYTI=, ActorId: [8:7491636089211665298:2333], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:54:24.980643Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MzMxZGQwYzYtY2ZlN2ViNDYtMzM4OWVlMDItZGZlZmZiYTI=, ActorId: [8:7491636089211665298:2333], ActorState: unknown state, Session actor destroyed 2025-04-10T10:54:25.094028Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc= 2025-04-10T10:54:25.094429Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:54:25.095234Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ReadyState, TraceId: 01jrfncct77wagxcxxsf1hmkph, received request, proxyRequestId: 62 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpcActor: [8:7491636145046241716:2696] database: /Root databaseId: /Root pool id: default 2025-04-10T10:54:25.095285Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ReadyState, TraceId: 01jrfncct77wagxcxxsf1hmkph, request placed into pool from cache: default 2025-04-10T10:54:25.095400Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ExecuteState, TraceId: 01jrfncct77wagxcxxsf1hmkph, Sending CompileQuery request 2025-04-10T10:54:25.321979Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ExecuteState, TraceId: 01jrfncct77wagxcxxsf1hmkph, acquire mvcc snapshot 2025-04-10T10:54:25.327830Z node 8 :KQP_SESSION TRACE: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ExecuteState, TraceId: 01jrfncct77wagxcxxsf1hmkph, read snapshot result: UNAVAILABLE, step: 1744282465000, tx id: 18446744073709551615 2025-04-10T10:54:25.327925Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ExecuteState, TraceId: 01jrfncct77wagxcxxsf1hmkph, ExecutePhyTx, tx: 0x000050C00045E518 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-10T10:54:25.329966Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ExecuteState, TraceId: 01jrfncct77wagxcxxsf1hmkph, Sending to Executer TraceId: 0 8 2025-04-10T10:54:25.330117Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ExecuteState, TraceId: 01jrfncct77wagxcxxsf1hmkph, Created new KQP executer: [8:7491636145046241735:2695] isRollback: 0 2025-04-10T10:54:25.335885Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ExecuteState, TraceId: 01jrfncct77wagxcxxsf1hmkph, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-10T10:54:25.336082Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ExecuteState, TraceId: 01jrfncct77wagxcxxsf1hmkph, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 14.11 QueriesCount: 2 2025-04-10T10:54:25.336266Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ExecuteState, TraceId: 01jrfncct77wagxcxxsf1hmkph, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T10:54:25.336694Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ExecuteState, TraceId: 01jrfncct77wagxcxxsf1hmkph, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:54:25.336728Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ExecuteState, TraceId: 01jrfncct77wagxcxxsf1hmkph, EndCleanup, isFinal: 0 2025-04-10T10:54:25.336802Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ExecuteState, TraceId: 01jrfncct77wagxcxxsf1hmkph, Sent query response back to proxy, proxyRequestId: 62, proxyId: [8:7491636063441861099:2269] 2025-04-10T10:54:25.337717Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:54:25.337813Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ReadyState, Sending to Executer TraceId: 0 8 2025-04-10T10:54:25.337901Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ReadyState, Created new KQP executer: [8:7491636145046241745:2695] isRollback: 1 2025-04-10T10:54:25.337958Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:54:25.339619Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: CleanupState, EndCleanup, isFinal: 1 2025-04-10T10:54:25.339695Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:54:25.339888Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=M2QxNWI4NjItMmFhNjhhNjktZWZmMTU0MjUtYmYyMDk3Mzc=, ActorId: [8:7491636145046241715:2695], ActorState: unknown state, Session actor destroyed >> Cdc::DecimalKey [GOOD] >> Cdc::DropColumn |66.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |66.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] Test command err: 2025-04-10T10:53:04.811663Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635797479262603:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:04.811704Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fcc/r3tmp/tmp8qjBDd/pdisk_1.dat 2025-04-10T10:53:05.294863Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:05.312934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:05.313058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:05.316420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18061, node 1 2025-04-10T10:53:05.398450Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:53:05.398475Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:53:05.398489Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:53:05.398608Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:05.688016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:07.863702Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-10T10:53:07.863885Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810364165139:2329], Start check tables existence, number paths: 2 2025-04-10T10:53:07.867890Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWIyZWE5NmQtMmIxNmM2ZWMtY2EzZTM4OGMtY2RmYWUwZmE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YWIyZWE5NmQtMmIxNmM2ZWMtY2EzZTM4OGMtY2RmYWUwZmE= 2025-04-10T10:53:07.868363Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWIyZWE5NmQtMmIxNmM2ZWMtY2EzZTM4OGMtY2RmYWUwZmE=, ActorId: [1:7491635810364165155:2330], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.868633Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-10T10:53:07.868652Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T10:53:07.868682Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-10T10:53:07.868760Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810364165139:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-10T10:53:07.868800Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810364165139:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-10T10:53:07.868824Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810364165139:2329], Successfully finished 2025-04-10T10:53:07.875569Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-10T10:53:07.887966Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810364165157:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.891463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:53:07.892747Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810364165157:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-10T10:53:07.892914Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810364165157:2301], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-10T10:53:07.901943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810364165157:2301], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:53:07.976261Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810364165157:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.980293Z node 1 :TX_PROXY ERROR: Actor# [1:7491635810364165208:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:07.980388Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810364165157:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-10T10:53:07.980668Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810364165215:2339], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:53:07.981618Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810364165215:2339], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-10T10:53:07.990759Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=YWIyZWE5NmQtMmIxNmM2ZWMtY2EzZTM4OGMtY2RmYWUwZmE=, ActorId: [1:7491635810364165155:2330], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:53:07.990844Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=YWIyZWE5NmQtMmIxNmM2ZWMtY2EzZTM4OGMtY2RmYWUwZmE=, ActorId: [1:7491635810364165155:2330], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:53:07.990869Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWIyZWE5NmQtMmIxNmM2ZWMtY2EzZTM4OGMtY2RmYWUwZmE=, ActorId: [1:7491635810364165155:2330], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:53:07.990903Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWIyZWE5NmQtMmIxNmM2ZWMtY2EzZTM4OGMtY2RmYWUwZmE=, ActorId: [1:7491635810364165155:2330], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:53:07.990986Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWIyZWE5NmQtMmIxNmM2ZWMtY2EzZTM4OGMtY2RmYWUwZmE=, ActorId: [1:7491635810364165155:2330], ActorState: unknown state, Session actor destroyed 2025-04-10T10:53:08.682007Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491635815318100626:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:08.682046Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fcc/r3tmp/tmp5PXbAY/pdisk_1.dat 2025-04-10T10:53:08.817322Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62322, node 2 2025-04-10T10:53:08.864672Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:08.864768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:08.870110Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:08.889775Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:53:08.889805Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:53:08.889812Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:53:08.889917Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:09.115274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:09.122002Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:53:11.492654Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-10T10:53:11.494863Z node 2 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=2&id=ODAwMmQwMy0yYzE1ZjJjZC1jMmZjZWFmNi0zNjM0ZTZkMQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ODAwMmQwMy0yYzE1ZjJjZC1jMmZjZWFmNi0zNjM0ZTZkMQ== 2025-04-10T10:53:11.495598Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [ ... State: ExecuteState, TraceId: 01jrfncbvjfc35skmqra746a6j, Created new KQP executer: [8:7491636140370321654:4870] isRollback: 0 2025-04-10T10:54:24.134088Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NzIyZGM3NTYtM2U5NjEyNmUtM2UwYmYxNzctMTUxZDU4ZGQ=, ActorId: [6:7491635933665656528:2336], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:54:24.134168Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NzIyZGM3NTYtM2U5NjEyNmUtM2UwYmYxNzctMTUxZDU4ZGQ=, ActorId: [6:7491635933665656528:2336], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:54:24.134206Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NzIyZGM3NTYtM2U5NjEyNmUtM2UwYmYxNzctMTUxZDU4ZGQ=, ActorId: [6:7491635933665656528:2336], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:54:24.134245Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NzIyZGM3NTYtM2U5NjEyNmUtM2UwYmYxNzctMTUxZDU4ZGQ=, ActorId: [6:7491635933665656528:2336], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:54:24.134346Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NzIyZGM3NTYtM2U5NjEyNmUtM2UwYmYxNzctMTUxZDU4ZGQ=, ActorId: [6:7491635933665656528:2336], ActorState: unknown state, Session actor destroyed 2025-04-10T10:54:24.136202Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YzJiOTE2M2EtOTZiZGMwMWEtYmU1YjMzZmQtZTI1NWFjNWI=, ActorId: [8:7491636140370321623:4870], ActorState: ExecuteState, TraceId: 01jrfncbvjfc35skmqra746a6j, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-10T10:54:24.136305Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YzJiOTE2M2EtOTZiZGMwMWEtYmU1YjMzZmQtZTI1NWFjNWI=, ActorId: [8:7491636140370321623:4870], ActorState: ExecuteState, TraceId: 01jrfncbvjfc35skmqra746a6j, ExecutePhyTx, tx: 0x000050C000448A98 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-10T10:54:24.142815Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YzJiOTE2M2EtOTZiZGMwMWEtYmU1YjMzZmQtZTI1NWFjNWI=, ActorId: [8:7491636140370321623:4870], ActorState: ExecuteState, TraceId: 01jrfncbvjfc35skmqra746a6j, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-10T10:54:24.143074Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YzJiOTE2M2EtOTZiZGMwMWEtYmU1YjMzZmQtZTI1NWFjNWI=, ActorId: [8:7491636140370321623:4870], ActorState: ExecuteState, TraceId: 01jrfncbvjfc35skmqra746a6j, txInfo Status: Committed Kind: ReadOnly TotalDuration: 27.676 ServerDuration: 27.594 QueriesCount: 2 2025-04-10T10:54:24.143216Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YzJiOTE2M2EtOTZiZGMwMWEtYmU1YjMzZmQtZTI1NWFjNWI=, ActorId: [8:7491636140370321623:4870], ActorState: ExecuteState, TraceId: 01jrfncbvjfc35skmqra746a6j, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T10:54:24.143289Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YzJiOTE2M2EtOTZiZGMwMWEtYmU1YjMzZmQtZTI1NWFjNWI=, ActorId: [8:7491636140370321623:4870], ActorState: ExecuteState, TraceId: 01jrfncbvjfc35skmqra746a6j, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:54:24.143317Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YzJiOTE2M2EtOTZiZGMwMWEtYmU1YjMzZmQtZTI1NWFjNWI=, ActorId: [8:7491636140370321623:4870], ActorState: ExecuteState, TraceId: 01jrfncbvjfc35skmqra746a6j, EndCleanup, isFinal: 0 2025-04-10T10:54:24.143380Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YzJiOTE2M2EtOTZiZGMwMWEtYmU1YjMzZmQtZTI1NWFjNWI=, ActorId: [8:7491636140370321623:4870], ActorState: ExecuteState, TraceId: 01jrfncbvjfc35skmqra746a6j, Sent query response back to proxy, proxyRequestId: 520, proxyId: [8:7491635908442078467:2085] 2025-04-10T10:54:24.143895Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=YzJiOTE2M2EtOTZiZGMwMWEtYmU1YjMzZmQtZTI1NWFjNWI=, TxId: 2025-04-10T10:54:24.143990Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=YzJiOTE2M2EtOTZiZGMwMWEtYmU1YjMzZmQtZTI1NWFjNWI=, TxId: 2025-04-10T10:54:24.144110Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7491635934211882563:2319], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2025-04-10T10:54:24.144228Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YzJiOTE2M2EtOTZiZGMwMWEtYmU1YjMzZmQtZTI1NWFjNWI=, ActorId: [8:7491636140370321623:4870], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:54:24.144277Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YzJiOTE2M2EtOTZiZGMwMWEtYmU1YjMzZmQtZTI1NWFjNWI=, ActorId: [8:7491636140370321623:4870], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:54:24.145047Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YzJiOTE2M2EtOTZiZGMwMWEtYmU1YjMzZmQtZTI1NWFjNWI=, ActorId: [8:7491636140370321623:4870], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:54:24.145084Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YzJiOTE2M2EtOTZiZGMwMWEtYmU1YjMzZmQtZTI1NWFjNWI=, ActorId: [8:7491636140370321623:4870], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:54:24.145152Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YzJiOTE2M2EtOTZiZGMwMWEtYmU1YjMzZmQtZTI1NWFjNWI=, ActorId: [8:7491636140370321623:4870], ActorState: unknown state, Session actor destroyed 2025-04-10T10:54:24.154778Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTFjYzY1NjYtZmI5MGZkOWItMTZjNjI3MjYtOGU4MzcxMDY=, ActorId: [7:7491636138701620060:5322], ActorState: ExecuteState, TraceId: 01jrfncbvedghc2dv5xm90c8sb, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-10T10:54:24.154902Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTFjYzY1NjYtZmI5MGZkOWItMTZjNjI3MjYtOGU4MzcxMDY=, ActorId: [7:7491636138701620060:5322], ActorState: ExecuteState, TraceId: 01jrfncbvedghc2dv5xm90c8sb, ExecutePhyTx, tx: 0x000050C000450958 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-10T10:54:24.163247Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTFjYzY1NjYtZmI5MGZkOWItMTZjNjI3MjYtOGU4MzcxMDY=, ActorId: [7:7491636138701620060:5322], ActorState: ExecuteState, TraceId: 01jrfncbvedghc2dv5xm90c8sb, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-10T10:54:24.163493Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=OTFjYzY1NjYtZmI5MGZkOWItMTZjNjI3MjYtOGU4MzcxMDY=, ActorId: [7:7491636138701620060:5322], ActorState: ExecuteState, TraceId: 01jrfncbvedghc2dv5xm90c8sb, txInfo Status: Committed Kind: ReadOnly TotalDuration: 51.421 ServerDuration: 51.308 QueriesCount: 2 2025-04-10T10:54:24.163642Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTFjYzY1NjYtZmI5MGZkOWItMTZjNjI3MjYtOGU4MzcxMDY=, ActorId: [7:7491636138701620060:5322], ActorState: ExecuteState, TraceId: 01jrfncbvedghc2dv5xm90c8sb, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T10:54:24.163722Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=OTFjYzY1NjYtZmI5MGZkOWItMTZjNjI3MjYtOGU4MzcxMDY=, ActorId: [7:7491636138701620060:5322], ActorState: ExecuteState, TraceId: 01jrfncbvedghc2dv5xm90c8sb, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:54:24.163752Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTFjYzY1NjYtZmI5MGZkOWItMTZjNjI3MjYtOGU4MzcxMDY=, ActorId: [7:7491636138701620060:5322], ActorState: ExecuteState, TraceId: 01jrfncbvedghc2dv5xm90c8sb, EndCleanup, isFinal: 0 2025-04-10T10:54:24.163814Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTFjYzY1NjYtZmI5MGZkOWItMTZjNjI3MjYtOGU4MzcxMDY=, ActorId: [7:7491636138701620060:5322], ActorState: ExecuteState, TraceId: 01jrfncbvedghc2dv5xm90c8sb, Sent query response back to proxy, proxyRequestId: 548, proxyId: [7:7491635906773375728:2209] 2025-04-10T10:54:24.164967Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=OTFjYzY1NjYtZmI5MGZkOWItMTZjNjI3MjYtOGU4MzcxMDY=, TxId: 2025-04-10T10:54:24.165066Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=7&id=OTFjYzY1NjYtZmI5MGZkOWItMTZjNjI3MjYtOGU4MzcxMDY=, TxId: 2025-04-10T10:54:24.165287Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7491635932543179699:2318], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2025-04-10T10:54:24.165337Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=OTFjYzY1NjYtZmI5MGZkOWItMTZjNjI3MjYtOGU4MzcxMDY=, ActorId: [7:7491636138701620060:5322], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:54:24.165379Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=OTFjYzY1NjYtZmI5MGZkOWItMTZjNjI3MjYtOGU4MzcxMDY=, ActorId: [7:7491636138701620060:5322], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:54:24.165422Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTFjYzY1NjYtZmI5MGZkOWItMTZjNjI3MjYtOGU4MzcxMDY=, ActorId: [7:7491636138701620060:5322], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:54:24.165495Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTFjYzY1NjYtZmI5MGZkOWItMTZjNjI3MjYtOGU4MzcxMDY=, ActorId: [7:7491636138701620060:5322], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:54:24.165597Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTFjYzY1NjYtZmI5MGZkOWItMTZjNjI3MjYtOGU4MzcxMDY=, ActorId: [7:7491636138701620060:5322], ActorState: unknown state, Session actor destroyed 2025-04-10T10:54:24.487696Z node 8 :BS_PROXY_PUT ERROR: [ef141cfa5ecbf131] Result# TEvPutResult {Id# [72075186224037889:1:765:0:0:42:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037889:1:765:0:0:42:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 6 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-10T10:54:24.487676Z node 7 :BS_PROXY_PUT ERROR: [78381863306dd81e] Result# TEvPutResult {Id# [72075186224037888:1:844:0:0:42:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037888:1:844:0:0:42:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 6 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 |66.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient |66.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |66.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/{meta.json ... results_accumulator.log} |66.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |66.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |66.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index >> KqpWorkloadServiceTables::TestLeaseExpiration [GOOD] >> KqpWorkloadServiceTables::TestLeaseUpdates >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace [GOOD] >> Cdc::AreJsonsEqualReturnsTrueOnEqual [GOOD] >> Cdc::AreJsonsEqualReturnsFalseOnDifferent [GOOD] >> Cdc::AreJsonsEqualFailsOnWildcardInArray [GOOD] >> Cdc::AlterViaTopicService |66.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |66.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |66.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> Cdc::SupportedTypes [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning >> YdbSdkSessions::SessionsServerLimitWithSessionPool [GOOD] >> Cdc::NaN[TopicRunner] [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased [GOOD] |66.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased [GOOD] Test command err: 2025-04-10T10:53:57.598850Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:2835} PDiskId# 1 ownerId# 4 invalid OwnerRound, got# 101 expected# 151 error in TLogWrite for ownerId# 4 ownerRound# 101 lsn# 12 PDiskId# 1 2025-04-10T10:53:59.153564Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:2835} PDiskId# 1 ownerId# 7 invalid OwnerRound, got# 101 expected# 151 error in TLogWrite for ownerId# 7 ownerRound# 101 lsn# 11 PDiskId# 1 >> TargetDiscoverer::Dirs >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false >> KqpWorkloadService::TestCpuLoadThresholdRefresh [FAIL] >> KqpWorkloadService::TestHandlerActorCleanup ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [GOOD] Test command err: 2025-04-10T10:54:15.685945Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636100323069722:2280];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:15.686021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0020a6/r3tmp/tmpJQUwpx/pdisk_1.dat 2025-04-10T10:54:16.480131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:16.480256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:16.486971Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:16.493540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64098, node 1 2025-04-10T10:54:16.769830Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:16.769855Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:16.769862Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:16.770000Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:17.214845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting...
: Error: Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:20.188925Z node 1 :KQP_PROXY WARN: TraceId: "01jrfnc80w8dad4vf1tvj91h2n", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:20.211473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636121797907055:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:20.211588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:20.212185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636121797907067:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:20.216784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:54:20.248316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636121797907069:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:54:20.323616Z node 1 :TX_PROXY ERROR: Actor# [1:7491636121797907140:2693] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:20.690329Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636100323069722:2280];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:20.690403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:20.928442Z node 1 :KQP_PROXY WARN: TraceId: "01jrfnc8r0akqvztxc77pxfms5", Active sessions limit exceeded, maximum allowed: 2
: Error: Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:23.654682Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636137536633973:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:23.658609Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0020a6/r3tmp/tmpZCWyRp/pdisk_1.dat 2025-04-10T10:54:24.102476Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:24.216970Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:24.217097Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:24.225561Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31786, node 4 2025-04-10T10:54:24.366914Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:24.366950Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:24.366960Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:24.367121Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:24.963668Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:28.610284Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491636137536633973:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:28.610387Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:28.895859Z node 4 :KQP_PROXY WARN: TraceId: "01jrfncggz4tpneskm5va4a438", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:28.904761Z node 4 :KQP_PROXY WARN: TraceId: "01jrfncgh89mb8kta2r44yh2qq", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:28.917636Z node 4 :KQP_PROXY WARN: TraceId: "01jrfncghnehgzw7ykwvbhbrwg", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:28.928351Z node 4 :KQP_PROXY WARN: TraceId: "01jrfncgj0dbnxmjmehg6vca52", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:28.935433Z node 4 :KQP_PROXY WARN: TraceId: "01jrfncgj7d9za49fddm768jdg", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:28.944306Z node 4 :KQP_PROXY WARN: TraceId: "01jrfncgjffat2pn13scjp436y", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:28.967708Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636159011471575:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:28.967795Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:28.968169Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636159011471587:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:28.972753Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:54:29.016396Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491636159011471589:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:54:29.126773Z node 4 :TX_PROXY ERROR: Actor# [4:7491636163306438956:2690] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.297055Z node 4 :KQP_PROXY WARN: TraceId: "01jrfncgxg2zdw4zmy5aknhxxh", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:29.308320Z node 4 :KQP_PROXY WARN: TraceId: "01jrfncgxv8cr15jc2ntkhgjxt", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:29.317825Z node 4 :KQP_PROXY WARN: TraceId: "01jrfncgy51bmgwt1wyxc6v63a", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:29.332443Z node 4 :KQP_PROXY WARN: TraceId: "01jrfncgymaea12pxtkm7t10cz", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:29.366591Z node 4 :KQP_PROXY WARN: TraceId: "01jrfncgzj3gky3ee9zabv1nqq", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:29.390965Z node 4 :KQP_PROXY WARN: TraceId: "01jrfnch0e0dkgkzrsd04vrpnx", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:29.406495Z node 4 :KQP_PROXY WARN: TraceId: "01jrfnch0x2yd0tvj2tkyf2afh", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:29.421407Z node 4 :KQP_PROXY WARN: TraceId: "01jrfnch1detwt5dmhedtb6nr7", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:29.431410Z node 4 :KQP_PROXY WARN: TraceId: "01jrfnch1q47bp1q86pkq2praz", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:29.443617Z node 4 :KQP_PROXY WARN: TraceId: "01jrfnch2357gj8rntdbrv0byv", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:29.462775Z node 4 :KQP_PROXY WARN: TraceId: "01jrfnch2p9zf76p2chkcekhr2", Active sessions limit exceeded, maximum allowed: 2 2025-04-10T10:54:29.480647Z node 4 :KQP_PROXY WARN: TraceId: "01jrfnch38d0q66nvxhgnb9pqc", Active sessions limit exceeded, maximum allowed: 2 >> TargetDiscoverer::Negative >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] |66.5%| [TA] $(B)/ydb/core/blobstorage/ut_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> Viewer::Cluster10000Tablets >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::TestExternalLogin >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK |66.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |66.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |66.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |66.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber >> TargetDiscoverer::IndexedTable >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true [GOOD] >> Viewer::TabletMergingPacked >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false >> TSchemeShardLoginTest::TestExternalLogin [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCount |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] Test command err: 2025-04-10T10:54:00.984577Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.984626Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.984655Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.985155Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:00.998121Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:00.998391Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.998683Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:00.999127Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.999527Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T10:54:00.999657Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:00.999713Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-10T10:54:01.000445Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.000469Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.000488Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:01.000898Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:01.001684Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:01.001805Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.001999Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:01.002484Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.002623Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T10:54:01.002755Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:01.002798Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-10T10:54:01.003785Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.003805Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.003848Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:01.004179Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:01.004687Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:01.004792Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.004954Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:01.005776Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.005945Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T10:54:01.006059Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:01.006107Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-10T10:54:01.007179Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.007206Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.007228Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:01.007594Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:01.008262Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:01.009051Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.009265Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:01.010934Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.011469Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T10:54:01.011590Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:01.011637Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-10T10:54:01.013307Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.013331Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.013375Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:01.013762Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:01.017218Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:01.017408Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.017657Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:01.018128Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.018283Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T10:54:01.018427Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:01.018482Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-10T10:54:01.019238Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.019283Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.019310Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:01.019654Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:01.022036Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:01.022203Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.022459Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:01.022888Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.023009Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T10:54:01.023150Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:01.023199Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-10T10:54:01.024957Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.024979Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.025006Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:01.025370Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:01.026064Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:01.026251Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.028518Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:01.029379Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.029626Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T10:54:01.029771Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:01.029820Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-10T10:54:01.030974Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.031029Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.031058Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:01.031445Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:01.032056Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:01.032189Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.032445Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:01.034102Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:01.034527Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T10:54:01.034610Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:01.034653Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-10T10:54:01.074247Z :ReadSession INFO: Random seed for debugging is 1744282441074205 2025-04-10T10:54:01.458829Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636041117213370:2147];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:01.463472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:01.540883Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636042402187996:2187];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001523/r3tmp/tmppi4JS8/pdisk_1.dat 2025-04-10T10:54:01.758910Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created ... 03d-e1087d1b-214e32b3, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 200 2025-04-10T10:54:20.169704Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8857985408870582267_v1 response to read: guid# 746e2628-2367503d-e1087d1b-214e32b3 2025-04-10T10:54:20.169917Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8857985408870582267_v1 Process answer. Aval parts: 0 2025-04-10T10:54:20.175004Z :DEBUG: [/Root] [/Root] [79904904-eba95d49-1ce92ed7-3a7c73b2] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:20.176654Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8857985408870582267_v1 grpc read done: success# 1, data# { read { } } 2025-04-10T10:54:20.176747Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8857985408870582267_v1 got read request: guid# 6a70b9a-7f18416f-4c744980-6d2d9773 2025-04-10T10:54:20.178271Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2025-04-10T10:54:20.178373Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) GOT MESSAGE: Message { Data: "message3" Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-04-10T10:54:20.150000Z WriteTime: 2025-04-10T10:54:20.154000Z Ip: "ipv6:[::1]:35608" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:35608" } } } 2025-04-10T10:54:20.182373Z :DEBUG: [/Root] [/Root] [79904904-eba95d49-1ce92ed7-3a7c73b2] [dc1] Commit offsets [2, 3). Partition stream id: 1 2025-04-10T10:54:20.182739Z :DEBUG: [/Root] [/Root] [79904904-eba95d49-1ce92ed7-3a7c73b2] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-10T10:54:20.184077Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T10:54:20.184122Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T10:54:20.184229Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_8857985408870582267_v1 2025-04-10T10:54:20.184343Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T10:54:20.184359Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T10:54:20.184376Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T10:54:20.184395Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-10T10:54:20.184408Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-10T10:54:20.184424Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-10T10:54:20.184441Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T10:54:20.184461Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-10T10:54:20.184488Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T10:54:20.183342Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8857985408870582267_v1 grpc read done: success# 1, data# { commit { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } } 2025-04-10T10:54:20.183523Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8857985408870582267_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 4 2025-04-10T10:54:20.198465Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T10:54:20.198552Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T10:54:20.198625Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 4 2025-04-10T10:54:20.202527Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8857985408870582267_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 4 } 2025-04-10T10:54:20.202591Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8857985408870582267_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 4 2025-04-10T10:54:20.202646Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8857985408870582267_v1 replying for commits: assignId# 1, from# 4, to# 4, offset# 3 2025-04-10T10:54:20.203513Z :DEBUG: [/Root] [/Root] [79904904-eba95d49-1ce92ed7-3a7c73b2] [dc1] Committed response: { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } 2025-04-10T10:54:20.254226Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|90f853-bba3eaae-c2aa99aa-7a6024f7_0] Write session will now close 2025-04-10T10:54:20.254317Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|90f853-bba3eaae-c2aa99aa-7a6024f7_0] Write session: aborting 2025-04-10T10:54:20.254888Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|90f853-bba3eaae-c2aa99aa-7a6024f7_0] Write session: gracefully shut down, all writes complete 2025-04-10T10:54:20.254936Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|90f853-bba3eaae-c2aa99aa-7a6024f7_0] Write session: destroy 2025-04-10T10:54:20.258960Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|90f853-bba3eaae-c2aa99aa-7a6024f7_0 grpc read done: success: 0 data: 2025-04-10T10:54:20.258990Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|90f853-bba3eaae-c2aa99aa-7a6024f7_0 grpc read failed 2025-04-10T10:54:20.259020Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|90f853-bba3eaae-c2aa99aa-7a6024f7_0 grpc closed 2025-04-10T10:54:20.259036Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|90f853-bba3eaae-c2aa99aa-7a6024f7_0 is DEAD 2025-04-10T10:54:20.259635Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:54:20.266583Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7491636122721594887:2617] destroyed 2025-04-10T10:54:20.266647Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T10:54:22.722386Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8857985408870582267_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2025-04-10T10:54:30.178292Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8857985408870582267_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2025-04-10T10:54:30.270320Z :INFO: [/Root] [/Root] [79904904-eba95d49-1ce92ed7-3a7c73b2] Closing read session. Close timeout: 0.000000s 2025-04-10T10:54:30.270430Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-04-10T10:54:30.270502Z :INFO: [/Root] [/Root] [79904904-eba95d49-1ce92ed7-3a7c73b2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16687 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:54:30.270667Z :NOTICE: [/Root] [/Root] [79904904-eba95d49-1ce92ed7-3a7c73b2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-10T10:54:30.270725Z :DEBUG: [/Root] [/Root] [79904904-eba95d49-1ce92ed7-3a7c73b2] [dc1] Abort session to cluster 2025-04-10T10:54:30.271383Z :NOTICE: [/Root] [/Root] [79904904-eba95d49-1ce92ed7-3a7c73b2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:54:30.278344Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_8857985408870582267_v1 2025-04-10T10:54:30.278416Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7491636092656823294:2530] destroyed 2025-04-10T10:54:30.274957Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8857985408870582267_v1 grpc read done: success# 0, data# { } 2025-04-10T10:54:30.274992Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_8857985408870582267_v1 grpc read failed 2025-04-10T10:54:30.275031Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_8857985408870582267_v1 closed 2025-04-10T10:54:30.275132Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_8857985408870582267_v1 is DEAD 2025-04-10T10:54:30.278497Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_8857985408870582267_v1 2025-04-10T10:54:30.280226Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7491636092656823291:2525] disconnected; active server actors: 1 2025-04-10T10:54:30.280264Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7491636092656823291:2525] client user disconnected session shared/user_1_1_8857985408870582267_v1 2025-04-10T10:54:30.915551Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710718, task: 1, CA Id [1:7491636165671268433:2721]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-10T10:54:30.952800Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710718, task: 1, CA Id [1:7491636165671268433:2721]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:54:31.018312Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710718, task: 1, CA Id [1:7491636165671268433:2721]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:54:31.106257Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710718, task: 1, CA Id [1:7491636165671268433:2721]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:54:31.219224Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710718, task: 1, CA Id [1:7491636165671268433:2721]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:54:31.368187Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710718, task: 1, CA Id [1:7491636165671268433:2721]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:54:31.662773Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710718, task: 1, CA Id [1:7491636165671268433:2721]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> TargetDiscoverer::Transfer >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs >> Viewer::JsonAutocompleteStartOfDatabaseName |66.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |66.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |66.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] >> Viewer::FuzzySearcherLimit3OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit4OutOf4 [GOOD] >> Viewer::FuzzySearcherLongWord [GOOD] |66.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/ut/ydb-core-client-ut |66.6%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |66.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut >> Viewer::FuzzySearcherPriority [GOOD] >> Viewer::JsonAutocompleteColumns >> Viewer::TabletMergingPacked [GOOD] >> Viewer::VDiskMerging >> TargetDiscoverer::Basic >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> Viewer::PDiskMerging >> Viewer::SelectStringWithNoBase64Encoding >> Viewer::PDiskMerging [GOOD] >> Viewer::SelectStringWithBase64Encoding >> StatisticsSaveLoad::ForbidAccess [GOOD] >> Viewer::JsonAutocompleteEmpty >> TAsyncIndexTests::CreateTable >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true >> TDatabaseResolverTests::Ydb_Dedicated >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] Test command err: 2025-04-10T10:54:15.862749Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636101033206125:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:15.862823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00209e/r3tmp/tmpA5B4sv/pdisk_1.dat 2025-04-10T10:54:16.540301Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:16.545284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:16.546252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:16.557883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7831, node 1 2025-04-10T10:54:16.852897Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:16.852919Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:16.852926Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:16.853033Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2810 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:17.258757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:20.868556Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636101033206125:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:20.868624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:24.082640Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636138967619632:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:24.082691Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00209e/r3tmp/tmpIghDyF/pdisk_1.dat 2025-04-10T10:54:24.274858Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:24.330861Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:24.330966Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:24.339741Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2019, node 4 2025-04-10T10:54:24.603031Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:24.603054Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:24.603062Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:24.603207Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:24.980568Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:29.083056Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491636138967619632:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:29.083132Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:29.149846Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457529:2585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.149994Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457521:2577], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150051Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457515:2571], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150096Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457493:2549], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150171Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457506:2562], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150214Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457488:2544], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150247Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457498:2554], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150289Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457512:2568], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150332Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457513:2569], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150372Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457525:2581], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150419Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457517:2573], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150473Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457501:2557], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150526Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457500:2556], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150562Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457502:2558], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150602Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457489:2545], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150671Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457497:2553], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150716Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457522:2578], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:29.150759Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636160442457492:2548], DatabaseId: /Root, PoolId: de ... 0:54:29.541451Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458150:3141] txid# 281474976715711, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.541499Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458186:3169] txid# 281474976715715, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.550545Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458220:3196] txid# 281474976715718, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.550788Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458221:3197] txid# 281474976715719, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.557404Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458234:3210] txid# 281474976715720, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.558601Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458216:3194] txid# 281474976715717, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.558701Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458237:3213] txid# 281474976715723, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.558914Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458235:3211] txid# 281474976715721, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.559104Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458236:3212] txid# 281474976715722, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.567018Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458270:3238] txid# 281474976715724, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.567365Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458278:3246] txid# 281474976715732, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.567635Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458275:3243] txid# 281474976715729, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.567757Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458271:3239] txid# 281474976715725, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.567893Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458277:3245] txid# 281474976715731, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.568005Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458273:3241] txid# 281474976715727, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.568198Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458279:3247] txid# 281474976715733, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.568337Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458280:3248] txid# 281474976715734, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.568446Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458276:3244] txid# 281474976715730, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.568585Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458272:3240] txid# 281474976715726, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.568726Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458274:3242] txid# 281474976715728, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.568826Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458292:3258] txid# 281474976715735, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.568968Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458293:3259] txid# 281474976715736, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.569089Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458294:3260] txid# 281474976715737, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.569244Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458312:3278] txid# 281474976715738, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.569362Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458320:3281] txid# 281474976715740, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.569520Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458313:3279] txid# 281474976715739, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.569663Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458387:3330] txid# 281474976715744, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.569774Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458386:3329] txid# 281474976715743, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.569805Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458390:3333] txid# 281474976715747, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.575198Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458384:3327] txid# 281474976715741, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.578338Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458388:3331] txid# 281474976715745, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.578557Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458385:3328] txid# 281474976715742, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:29.578682Z node 4 :TX_PROXY ERROR: Actor# [4:7491636160442458389:3332] txid# 281474976715746, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpStreamLookup::ReadTableDuringSplit [GOOD] >> Viewer::TabletMerging |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> Cdc::AlterViaTopicService [GOOD] >> Cdc::Alter >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] >> TargetDiscoverer::Dirs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-04-10T10:54:00.962427Z :SpecifyClustersExplicitly INFO: Random seed for debugging is 1744282440962377 2025-04-10T10:54:01.272194Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636041314128559:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:01.272276Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:01.342935Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636039320980931:2157];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:01.556503Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:01.556615Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:54:01.585840Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001530/r3tmp/tmpsyIlXd/pdisk_1.dat 2025-04-10T10:54:01.856086Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:01.860252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:01.860379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:01.861624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:01.861692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:01.866514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:01.866929Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:54:01.867599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27711, node 1 2025-04-10T10:54:01.968700Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001530/r3tmp/yandexrtOBvN.tmp 2025-04-10T10:54:01.968737Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001530/r3tmp/yandexrtOBvN.tmp 2025-04-10T10:54:01.968916Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001530/r3tmp/yandexrtOBvN.tmp 2025-04-10T10:54:01.969092Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:54:02.018133Z INFO: TTestServer started on Port 26697 GrpcPort 27711 TClient is connected to server localhost:26697 PQClient connected to localhost:27711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:02.278938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T10:54:04.503784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636054199031469:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.504485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636054199031460:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.504576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.508672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:54:04.514097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636054199031506:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.514176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.537672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636054199031474:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:54:04.769774Z node 1 :TX_PROXY ERROR: Actor# [1:7491636054199031561:2693] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:04.804010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:54:04.814034Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636054199031572:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:54:04.814132Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491636052205883062:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:54:04.814478Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTBlYjMzZDQtZDU0YTExMS03OTQ0ODY0OC05NjM4M2VhNA==, ActorId: [1:7491636054199031457:2336], ActorState: ExecuteState, TraceId: 01jrfnbrp1ck04prnch8p2dxfv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:54:04.814514Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzA4NmNiNDAtZmU4OTFkMjItNDFiMjFiMGYtNDIwMDM5N2Y=, ActorId: [2:7491636052205883028:2308], ActorState: ExecuteState, TraceId: 01jrfnbrs87w86c8jy45pa443v, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:54:04.817041Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:54:04.817713Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:54:05.014474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:54:05.152161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:27711", true, true, 1000); 2025-04-10T10:54:05.372107Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfnbse22d2sn95tkdsmvxq8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI5ZDM3YzUtMmQ2YzFlZWQtMmMxN2ZlYjktOTBmMjQ4Y2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491636058493999281:2990] 2025-04-10T10:54:06.274286Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636041314128559:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:06.274381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:06.332334Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636039320980931:2157];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:06.332420Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T10:54:10.525655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhos ... 04-10T10:54:31.062312Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_4311956447357826455_v1 grpc read done: success# 1, data# { read { } } 2025-04-10T10:54:31.062383Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_4311956447357826455_v1 grpc closed 2025-04-10T10:54:31.062419Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_4311956447357826455_v1 is DEAD 2025-04-10T10:54:31.123000Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_4311956447357826455_v1 2025-04-10T10:54:31.123060Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7491636160675389761:2537] destroyed 2025-04-10T10:54:31.123118Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_4311956447357826455_v1 2025-04-10T10:54:31.120473Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491636160675389745:2528] disconnected; active server actors: 1 2025-04-10T10:54:31.120515Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491636160675389745:2528] client user disconnected session shared/user_3_1_4311956447357826455_v1 2025-04-10T10:54:31.640092Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491636169265324562:2556] TxId: 281474976710695. Ctx: { TraceId: 01jrfnchxv2v162tbracamxazj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGY4NmJkMGEtYjdiZmQ4OWMtZGM0NTVjZDMtMjliMzRhNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-04-10T10:54:31.640309Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491636169265324572:2569], TxId: 281474976710695, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZGY4NmJkMGEtYjdiZmQ4OWMtZGM0NTVjZDMtMjliMzRhNjg=. TraceId : 01jrfnchxv2v162tbracamxazj. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7491636169265324562:2556], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-10T10:54:31.640620Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491636169265324573:2570], TxId: 281474976710695, task: 4. Ctx: { TraceId : 01jrfnchxv2v162tbracamxazj. SessionId : ydb://session/3?node_id=3&id=ZGY4NmJkMGEtYjdiZmQ4OWMtZGM0NTVjZDMtMjliMzRhNjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7491636169265324562:2556], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-10T10:54:32.642657Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGY4NmJkMGEtYjdiZmQ4OWMtZGM0NTVjZDMtMjliMzRhNjg=, ActorId: [3:7491636164970357196:2556], ActorState: ExecuteState, TraceId: 01jrfnchxv2v162tbracamxazj, Create QueryResponse for error on request, msg: 2025-04-10T10:54:32.647151Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "Failed to send EvStartKqpTasksRequest because node is unavailable: 4" severity: 1 } } TxMeta { id: "01jrfnck62182cmyx70m09rgg7" } } YdbStatus: UNAVAILABLE ConsumedRu: 845 } 2025-04-10T10:54:35.312935Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:35.312979Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:35.313019Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:35.322494Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:35.323150Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:35.323909Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:35.326624Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-04-10T10:54:35.331831Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:35.331871Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:35.331908Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:35.350307Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:35.352625Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:35.352826Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:35.354424Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:35.356837Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T10:54:35.357504Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-04-10T10:54:35.357626Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-04-10T10:54:35.362298Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:35.362369Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T10:54:35.362406Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-10T10:54:35.362471Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-10T10:54:35.365689Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:35.365727Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:35.365769Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:35.367198Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:35.368370Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:35.369111Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:35.370418Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:35.371359Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:35.371572Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T10:54:35.378306Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:35.378417Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-10T10:54:35.378554Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 2025-04-10T10:54:35.380692Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:35.380743Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:35.380777Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:35.390525Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:35.391081Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:35.391260Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:35.394448Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T10:54:35.395412Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T10:54:35.396003Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T10:54:35.396222Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-10T10:54:35.396311Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T10:54:35.396381Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:35.396429Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-10T10:54:35.396605Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-10T10:54:35.396647Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-10T10:54:37.400542Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:37.400576Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:37.400613Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:37.401721Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T10:54:37.404130Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T10:54:37.404331Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:37.407767Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:37.408424Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T10:54:37.408527Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T10:54:37.408642Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> TargetDiscoverer::Negative [GOOD] >> Viewer::VDiskMerging [GOOD] >> Viewer::TenantInfo5kkTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess [GOOD] Test command err: 2025-04-10T10:54:22.167900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:54:22.174963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:22.175103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c5d/r3tmp/tmpPjnOPi/pdisk_1.dat 2025-04-10T10:54:22.729709Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24329, node 1 2025-04-10T10:54:23.068987Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:23.069069Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:23.069113Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:23.069714Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:54:23.078347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:54:23.211942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:23.212141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:23.235620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1572 2025-04-10T10:54:24.058911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:54:28.631768Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T10:54:28.695159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:28.695323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:28.732450Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:54:28.739133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:29.066613Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:54:29.067388Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:54:29.068157Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:54:29.068328Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:54:29.068469Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:54:29.068814Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:54:29.068943Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:54:29.069051Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:54:29.069168Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:54:29.284590Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:29.284719Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:29.300678Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:29.558366Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:29.868436Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T10:54:29.868609Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T10:54:29.989349Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T10:54:29.990856Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T10:54:29.991127Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T10:54:29.991208Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T10:54:29.991275Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T10:54:29.991334Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T10:54:29.991396Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T10:54:29.992135Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T10:54:29.992823Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T10:54:30.031874Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T10:54:30.032043Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T10:54:30.061209Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T10:54:30.069561Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T10:54:30.070333Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T10:54:30.090740Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T10:54:30.128553Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T10:54:30.128668Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T10:54:30.128801Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T10:54:30.159156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T10:54:30.177557Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T10:54:30.177778Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T10:54:30.421990Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T10:54:30.764958Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T10:54:30.859323Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T10:54:32.821039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:32.821233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:32.884204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T10:54:33.605734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2550:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:33.605972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:33.609833Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2555:3131]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T10:54:33.610099Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T10:54:33.610236Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2557:3133] 2025-04-10T10:54:33.610355Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2557:3133] 2025-04-10T10:54:33.611110Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2558:2994] 2025-04-10T10:54:33.611467Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2557:3133], server id = [2:2558:2994], tablet id = 72075186224037894, status = OK 2025-04-10T10:54:33.611684Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2558:2994], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T10:54:33.611758Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T10:54:33.612008Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T10:54:33.612105Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2555:3131], StatRequests.size() = 1 2025-04-10T10:54:33.636396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2562:3137], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:33.636542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:33.637001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2567:3142], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:33.645688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T10:54:33.846667Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T10:54:33.846746Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T10:54:33.978571Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2557:3133], schemeshard count = 1 2025-04-10T10:54:34.428464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2569:3144], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T10:54:34.627066Z node 1 :TX_PROXY ERROR: Actor# [1:2686:3216] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:34.649933Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2709:3232]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T10:54:34.650126Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T10:54:34.650194Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2709:3232], StatRequests.size() = 1 2025-04-10T10:54:34.747740Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfncn271vkbe01qdc9qcv32, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2VjN2YyZjQtZDFhNDE4NGMtZDBjZDQyZDgtYjA3YzZhNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:54:35.051386Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2790:3262], for# user@builtin, access# DescribeSchema 2025-04-10T10:54:35.051477Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2790:3262], for# user@builtin, access# DescribeSchema 2025-04-10T10:54:35.065723Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:2780:3258], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/Database/.metadata/_statistics]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:54:35.067970Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjZlMjM2OTctMzg2NTZiZjAtYzQyMzIzMWEtYWYzMDU5M2E=, ActorId: [1:2769:3250], ActorState: ExecuteState, TraceId: 01jrfncpfy7a10s363wef81hwm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: 2025-04-10T10:54:24.840055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:54:24.840297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:24.840668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017ef/r3tmp/tmpeLYp3E/pdisk_1.dat 2025-04-10T10:54:25.416384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:54:25.424839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:54:25.435197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:54:25.436241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T10:54:25.438783Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-04-10T10:54:25.438847Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-04-10T10:54:25.484519Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:25.494721Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2025-04-10T10:54:25.544047Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:334:2374] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-04-10T10:54:25.544237Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-04-10T10:54:25.544414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:25.544477Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-10T10:54:25.544509Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-10T10:54:25.544549Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-10T10:54:25.544582Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-10T10:54:25.544702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:25.545025Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-10T10:54:25.545072Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-10T10:54:25.545110Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-10T10:54:25.545159Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-10T10:54:25.545298Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-04-10T10:54:25.558900Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-04-10T10:54:25.559017Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:334:2374]) 2025-04-10T10:54:25.559105Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-10T10:54:25.559660Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-04-10T10:54:25.559744Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:334:2374])::Execute 2025-04-10T10:54:25.559788Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-10T10:54:25.559868Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:334:2374])::Complete 2025-04-10T10:54:25.560038Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443319296 } 2025-04-10T10:54:25.560100Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-04-10T10:54:25.560149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:25.560314Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-04-10T10:54:25.560380Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-10T10:54:25.560415Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-10T10:54:25.560584Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-10T10:54:25.560619Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-10T10:54:25.560648Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-10T10:54:25.560682Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-10T10:54:25.574945Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-04-10T10:54:25.575045Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-10T10:54:25.654615Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-04-10T10:54:25.654722Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-04-10T10:54:25.655082Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-04-10T10:54:25.655499Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-04-10T10:54:25.655570Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2404] Proxy 2025-04-10T10:54:25.656515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:54:25.658107Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-10T10:54:25.658324Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-10T10:54:25.658369Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-04-10T10:54:25.658416Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-04-10T10:54:25.659226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:54:25.659305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-10T10:54:25.660289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-10T10:54:25.667432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:54:25.668735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:54:25.668802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:54:25.672812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-04-10T10:54:25.677519Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-04-10T10:54:25.729145Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-10T10:54:25.729260Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-04-10T10:54:25.729556Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-04-10T10:54:25.729655Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-04-10T10:54:25.729731Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-04-10T10:54:25.729943Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-04-10T10:54:25.730566Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-04-10T10:54:25.730729Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-04-10T10:54:25.731635Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-04-10T10:54:25.731844Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2025-04-10T10:54:25.731954Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-04-10T10:54:25.732028Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-04-10T10:54:25.732188Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-04-10T10:54:25.732291Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 2 assigned to group 21810380 ... ecute(72075186224037888 OK) 2025-04-10T10:54:37.632794Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2025-04-10T10:54:37.632927Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T10:54:37.633109Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-04-10T10:54:37.633170Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-10T10:54:37.633262Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2025-04-10T10:54:37.633873Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2025-04-10T10:54:37.649360Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T10:54:37.652568Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 HANDLE EvProposeTransaction marker# C0 2025-04-10T10:54:37.652673Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 step# 3500 Status# 16 SEND to# [2:409:2404] Proxy marker# C1 2025-04-10T10:54:37.665231Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2025-04-10T10:54:37.753993Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715665 has been planned 2025-04-10T10:54:37.754129Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 2025-04-10T10:54:37.754207Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 2025-04-10T10:54:37.754499Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4000 in 0.500000s at 3.950000s 2025-04-10T10:54:37.755010Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 3500, txid# 281474976715665 marker# C2 2025-04-10T10:54:37.755114Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 stepId# 3500 Status# 17 SEND EvProposeTransactionStatus to# [2:409:2404] Proxy 2025-04-10T10:54:37.755937Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715665 at step 3500 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-10T10:54:37.756023Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:54:37.756249Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:54:37.756830Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:54:37.756894Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:54:37.756960Z node 2 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715665] in PlanQueue unit at 72075186224037889 2025-04-10T10:54:37.757204Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715665 keys extracted: 0 2025-04-10T10:54:37.757384Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:54:37.757588Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:54:37.757686Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2025-04-10T10:54:37.758541Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:54:37.760551Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-10T10:54:37.760644Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:54:37.761375Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-10T10:54:37.761519Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-10T10:54:37.761573Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-04-10T10:54:37.761601Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 for mediator 72057594046382081 acknowledged 2025-04-10T10:54:37.761644Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 acknowledged 2025-04-10T10:54:37.761976Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:54:37.762054Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T10:54:37.762124Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715665 state PreOffline TxInFly 0 2025-04-10T10:54:37.762260Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:54:37.763065Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715665, done: 0, blocked: 1 2025-04-10T10:54:37.766100Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715665 datashard 72075186224037889 state PreOffline 2025-04-10T10:54:37.766213Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-10T10:54:37.766890Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-04-10T10:54:37.767018Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 2, subscribers: 1 2025-04-10T10:54:37.767906Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2025-04-10T10:54:37.768429Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T10:54:37.769626Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZThlYWU5MjYtMzJhNTE3MjItMjhhMTRmMDYtOWIyYTgyNjE= 2025-04-10 10:54:37.769 INFO ydb-core-tx-datashard-ut_minstep(pid=1089974, tid=0x00007F6C1AF0CD00) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-04-10T10:54:37.769754Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZThlYWU5MjYtMzJhNTE3MjItMjhhMTRmMDYtOWIyYTgyNjE= 2025-04-10 10:54:37.769 INFO ydb-core-tx-datashard-ut_minstep(pid=1089974, tid=0x00007F6C1AF0CD00) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-04-10T10:54:37.769843Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZThlYWU5MjYtMzJhNTE3MjItMjhhMTRmMDYtOWIyYTgyNjE= 2025-04-10 10:54:37.769 INFO ydb-core-tx-datashard-ut_minstep(pid=1089974, tid=0x00007F6C1AF0CD00) [core exec] yql_execution.cpp:59: Begin, root #43 2025-04-10T10:54:37.769903Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZThlYWU5MjYtMzJhNTE3MjItMjhhMTRmMDYtOWIyYTgyNjE= 2025-04-10 10:54:37.769 INFO ydb-core-tx-datashard-ut_minstep(pid=1089974, tid=0x00007F6C1AF0CD00) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-04-10T10:54:37.769964Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=ZThlYWU5MjYtMzJhNTE3MjItMjhhMTRmMDYtOWIyYTgyNjE= 2025-04-10 10:54:37.769 TRACE ydb-core-tx-datashard-ut_minstep(pid=1089974, tid=0x00007F6C1AF0CD00) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-04-10T10:54:37.770046Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZThlYWU5MjYtMzJhNTE3MjItMjhhMTRmMDYtOWIyYTgyNjE= 2025-04-10 10:54:37.770 INFO ydb-core-tx-datashard-ut_minstep(pid=1089974, tid=0x00007F6C1AF0CD00) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-04-10T10:54:37.770124Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZThlYWU5MjYtMzJhNTE3MjItMjhhMTRmMDYtOWIyYTgyNjE= 2025-04-10 10:54:37.770 INFO ydb-core-tx-datashard-ut_minstep(pid=1089974, tid=0x00007F6C1AF0CD00) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-04-10T10:54:37.770202Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZThlYWU5MjYtMzJhNTE3MjItMjhhMTRmMDYtOWIyYTgyNjE= 2025-04-10 10:54:37.770 INFO ydb-core-tx-datashard-ut_minstep(pid=1089974, tid=0x00007F6C1AF0CD00) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-04-10T10:54:37.770270Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZThlYWU5MjYtMzJhNTE3MjItMjhhMTRmMDYtOWIyYTgyNjE= 2025-04-10 10:54:37.770 INFO ydb-core-tx-datashard-ut_minstep(pid=1089974, tid=0x00007F6C1AF0CD00) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-04-10T10:54:37.770447Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ZThlYWU5MjYtMzJhNTE3MjItMjhhMTRmMDYtOWIyYTgyNjE= 2025-04-10 10:54:37.770 NOTE ydb-core-tx-datashard-ut_minstep(pid=1089974, tid=0x00007F6C1AF0CD00) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2025-04-10T10:54:37.770520Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ZThlYWU5MjYtMzJhNTE3MjItMjhhMTRmMDYtOWIyYTgyNjE= 2025-04-10 10:54:37.770 NOTE ydb-core-tx-datashard-ut_minstep(pid=1089974, tid=0x00007F6C1AF0CD00) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2025-04-10T10:54:37.770567Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ZThlYWU5MjYtMzJhNTE3MjItMjhhMTRmMDYtOWIyYTgyNjE= 2025-04-10 10:54:37.770 NOTE ydb-core-tx-datashard-ut_minstep(pid=1089974, tid=0x00007F6C1AF0CD00) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2025-04-10T10:54:37.786984Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T10:54:37.787285Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-10T10:54:37.789268Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T10:54:37.790353Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-10T10:54:37.790823Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2025-04-10T10:54:37.790890Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-04-10T10:54:37.791023Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-04-10T10:54:37.791163Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-04-10T10:54:37.791301Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> TAsyncIndexTests::CreateTable [GOOD] >> TargetDiscoverer::Transfer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:54:33.837116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:54:33.837227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:33.837274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:54:33.837322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:54:33.837361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:54:33.837386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:54:33.837442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:33.837556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:54:33.837886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:54:33.932289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:54:33.932369Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:33.949794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:54:33.950087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:54:33.950279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:54:33.959938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:54:33.960213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:54:33.960876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:33.961258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:54:33.963341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:33.964868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:33.964946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:33.965160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:54:33.965209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:33.965259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:54:33.965417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:54:33.975765Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:54:34.172491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:54:34.172798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.173031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:54:34.173250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:54:34.173299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.183242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:34.183442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:54:34.183654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.183712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:54:34.183768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:54:34.183820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:54:34.186233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.186307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:54:34.186347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:54:34.188540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.188601Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.188653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:34.188721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:54:34.209275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:54:34.217278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:54:34.217605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:54:34.220781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:34.221204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:34.221260Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:34.221582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:54:34.221653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:34.222109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:34.222231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:54:34.225983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:34.226046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:34.226285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:34.226332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:54:34.226609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.226671Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:54:34.226771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:34.226862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:34.226909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:34.226938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:34.226981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:54:34.227033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:34.227065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:54:34.227093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:54:34.227167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:34.227218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:54:34.227251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:54:34.229724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:34.229844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:34.229878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:54:39.252768Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:39.252920Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:39.252962Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-04-10T10:54:39.253009Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-04-10T10:54:39.253632Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:54:39.253732Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:54:39.253770Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-10T10:54:39.253810Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-10T10:54:39.253851Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T10:54:39.254412Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:54:39.254495Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:54:39.254522Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-10T10:54:39.254547Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-10T10:54:39.254573Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:39.254637Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-10T10:54:39.257169Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T10:54:39.257603Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 2025-04-10T10:54:39.258130Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:54:39.258378Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 261us result status StatusSuccess 2025-04-10T10:54:39.258698Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 2 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-04-10T10:54:39.261575Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveGroup { Group: "group1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:54:39.261762Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 1] name: MyRoot type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:39.261790Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:39.261849Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:39.261882Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:54:39.262084Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-10T10:54:39.262249Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T10:54:39.262291Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T10:54:39.262346Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T10:54:39.262383Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T10:54:39.262462Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:39.262531Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-04-10T10:54:39.262571Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T10:54:39.262609Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-10T10:54:39.262644Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-04-10T10:54:39.262677Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-10T10:54:39.265104Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:39.265218Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2025-04-10T10:54:39.265473Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:39.265518Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:39.265695Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:39.265748Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:204:2206], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-04-10T10:54:39.266294Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T10:54:39.266403Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T10:54:39.266448Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-10T10:54:39.266517Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-10T10:54:39.266562Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:39.266672Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-10T10:54:39.268712Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-04-10T10:54:39.269374Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:54:39.269595Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 236us result status StatusSuccess 2025-04-10T10:54:39.270004Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [GOOD] Test command err: 2025-04-10T10:54:23.644424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:54:23.644621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:23.644791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c00/r3tmp/tmpv6KkjW/pdisk_1.dat 2025-04-10T10:54:24.402666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:54:24.446868Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:24.492749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:24.492890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:24.507656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:24.616524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:54:25.333878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:739:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:25.334064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:25.334159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:25.343827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:54:25.576034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2629], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:54:25.690801Z node 1 :TX_PROXY ERROR: Actor# [1:827:2672] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:37.141504Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfncd1c56mefwvapsmytskr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzIzNzU2OGUtYWY2MzhjMzctNWFjYzcwNDktYTYzZjNkMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:54:37.199495Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfncd1c56mefwvapsmytskr", SessionId: ydb://session/3?node_id=1&id=NzIzNzU2OGUtYWY2MzhjMzctNWFjYzcwNDktYTYzZjNkMjM=, Slow query, duration: 11.874893s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "UPSERT INTO `/Root/TestTable` (key, value) VALUES (0, 00), (1, 11), (2, 22), (3, 33), (4, 44), (5, 55), (6, 66), (7, 77), (8, 88), (9, 99), (10, 1010), (11, 1111), (12, 1212), (13, 1313), (14, 1414), (15, 1515), (16, 1616), (17, 1717), (18, 1818), (19, 1919), (20, 2020), (21, 2121), (22, 2222), (23, 2323), (24, 2424), (25, 2525), (26, 2626), (27, 2727), (28, 2828), (29, 2929), (30, 3030), (31, 3131), (32, 3232), (33, 3333), (34, 3434), (35, 3535), (36, 3636), (37, 3737), (38, 3838), (39, 3939), (40, 4040), (41, 4141), (42, 4242), (43, 4343), (44, 4444), (45, 4545), (46, 4646), (47, 4747), (48, 4848), (49, 4949), (50, 5050), (51, 5151), (52, 5252), (53, 5353), (54, 5454), (55, 5555), (56, 5656), (57, 5757), (58, 5858), (59, 5959), (60, 6060), (61, 6161), (62, 6262), (63, 6363), (64, 6464), (65, 6565), (66, 6666), (67, 6767), (68, 6868), (69, 6969), (70, 7070), (71, 7171), (72, 7272), (73, 7373), (74, 7474), (75, 7575), (76, 7676), (77, 7777), (78, 7878), (79, 7979), (80, 8080), (81, 8181), (82, 8282), (83, 8383), (84, 8484), (85, 8585), (86, 8686), (87, 8787), (88, 8888), (89, 8989), (90, 9090), (91, 9191), (92, 9292), (93, 9393), (94, 9494), (95, 9595), (96, 9696), (97, 9797), (98, 9898), (99, 9999), (100, 100100), (101, 101101), (102, 102102), (103, 103103), (104, 104104), (105, 105105), (106, 106106), (107, 107107), (108, 108108), (109, 109109), (110, 110110), (111, 111111), (112, 112112), (113, 113113), (114, 114114), (115, 115115), (116, 116116), (117, 117117), (118, 118118), (119, 119119), (120, 120120), (121, 121121), (122, 122122), (123, 123123), (124, 124124), (125, 125125), (126, 126126), (127, 127127), (128, 128128), (129, 129129), (130, 130130), (131, 131131), (132, 132132), (133, 133133), (134, 134134), (135, 135135), (136, 136136), (137, 137137), (138, 138138), (139, 139139), (140, 140140), (141, 141141), (142, 142142), (143, 143143), (144, 144144), (145, 145145), (146, 146146), (147, 147147), (148, 148148), (149, 149149), (150, 150150), (151, 151151), (152, 152152), (153, 153153), (154, 154154), (155, 155155), (156, 156156), (157, 157157), (158, 158158), (159, 159159), (160, 160160), (161, 161161), (162, 162162), (163, 163163), (164, 164164), (165, 165165), (166, 166166), (167, 167167), (168, 168168), (169, 169169), (170, 170170), (171, 171171), (172, 172172), (173, 173173), (174, 174174), (175, 175175), (176, 176176), (177, 177177), (178, 178178), (179, 179179), (180, 180180), (181, 181181), (182, 182182), (183, 183183), (184, 184184), (185, 185185), (186, 186186), (187, 187187), (188, 188188), (189, 189189), (190, 190190), (191, 191191), (192, 192192), (193, 193193), (194, 194194), (195, 195195), (196, 196196), (197, 197197), (198, 198198), (199, 199199), (200, 200200), (201, 201201), (202, 202202), (203, 203203), (204, 204204), (205, 205205), (206, 206206), (207, 207207), (208, 208208), (209, 209209), (210, 210210), (211, 211211), (212, 212212), (213, 213213), (214, 214214), (215, 215215), (216, 216216), (217, 217217), (218, 218218), (219, 219219), (220, 220220), (221, 221221), (222, 222222), (223, 223223), (224, 224224), (225, 225225), (226, 226226), (227, 227227), (228, 228228), (229, 229229), (230, 230230), (231, 231231), (232, 232232), (233, 233233), (234, 234234), (235, 235235), (236, 236236), (237, 237237), (238, 238238), (239, 239239), (240, 240240), (241, 241241), (242, 242242), (243, 243243), (244, 244244), (245, 245245), (246, 246246), (247, 247247), (248, 248248), (249, 249249), (250, 250250), (251, 251251), (252, 252252), (253, 253253), (254, 254254), (255, 255255), (256, 256256), (257, 257257), (258, 258258), (259, 259259), (260, 260260), (261, 261261), (262, 262262), (263, 263263), (264, 264264), (265, 265265), (266, 266266), (267, 267267), (268, 268268), (269, 269269), (270, 270270), (271, 271271), (272, 272272), (273, 273273), (274, 274274), (275, 275275), (276, 276276), (277, 277277), (278, 278278), (279, 279279), (280, 280280), (281, 281281), (282, 282282), (283, 283283), (284, 284284), (285, 285285), (286, 286286), (287, 287287), (288, 288288), (289, 289289), (290, 290290), (291, 291291), (292, 292292), (293, 293293), (294, 294294), (295, 295295), (296, 296296), (297, 297297), (298, 298298), (299, 299299), (300, 300300), (301, 301301), (302, 302302), (303, 303303), (304, 304304), (305, 305305), (306, 306306), (307, 307307), (308, 308308), (309, 309309), (310, 310310), (311, 311311), (312, 312312), (313, 313313), (314, 314314), (315, 315315), (316, 316316), (317, 317317), (318, 318318), (319, 319319), (320, 320320), (321, 321321), (322, 322322), (323, 323323), (324, 324324), (325, 325325), (326, 326326), (327, 327327), (328, 328328), (329, 329329), (330, 330330), (331, 331331), (332, 332332), (333, 333333), (334, 334334), (335, 335335), (336, 336336), (337, 337337), (338, 338338), (339, 339339), (340, 340340), (341, 341341), (342, 342342), (343, 343343), (344, 344344), (345, 345345), (346, 346346), (347, 347347), (348, 348348), (349, 349349), (350, 350350), (351, 351351), (352, 352352), (353, 353353), (354, 354354), (355, 355355), (356, 356356), (357, 357357), (358, 358358), (359, 359359), (360, 360360), (361, 361361), (362, 362362), (363, 363363), (364, 364364), (365, 365365), (366, 366366), (367, 367367), (368, 368368), (369, 369369), (370, 370370), (371, 371371), (372, 372372), (373, 373373), (374, 374374), (375, 375375), (376, 376376), (377, 377377), (378, 378378), (379, 379379), (380, 380380), (381, 381381), (382, 382382), (383, 383383), (384, 384384), (385, 385385), (386, 386386), (387, 387387), (388, 388388), (389, 389389), (390, 390390), (391, 391391), (392, 392392), (393, 393393), (394, 394394), (395, 395395), (396, 396396), (397, 397397), (398, 398398), (399, 399399), (400, 400400), (401, 401401), (402, 402402), (403, 403403), (404, 404404), (405, 405405), (406, 406406), (407, 407407), (408, 408408), (409, 409409), (410, 410410), (411, 411411), (412, 412412), (413, 413413), (414, 414414), (415, 415415), (416, 416416), (417, 417417), (418, 418418), (419, 419419), (420, 420420), (421, 421421), (422, 422422), (423, 423423), (424, 424424), (425, 425425), (426, 426426), (427, 427427), (428, 428428), (429, 429429), (430, 430430), (431, 431431), (432, 432432), (433, 433433), (434, 434434), (435, 435435), (436, 436436), (437, 437437), (438, 438438), (439, 439439), (440, 440440), (441, 441441), (442, 442442), (443, 443443), (444, 444444), (445, 445445), (446, 446446), (447, 447447), (448, 448448), (449, 449449), (450, 450450), (451, 451451), (452, 452452), (453, 453453), (454, 454454), (455, 455455), (456, 456456), (457, 457457), (458, 458458), (459, 459459), (460, 460460), (461, 461461), (462, 462462), (463, 463463), (464, 464464), (465, 465465), (466, 466466), (467, 467467), (468, 468468), (469, 469469), (470, 470470), (471, 471471), (472, 472472), (473, 473473), (474, 474474), (475, 475475), (476, 476476), (477, 477477), (478, 478478), (479, 479479), (480, 480480), (481, 481481), (482, 482482), (483, 483483), (484, 484484), (485, 485485), (486, 486486), (487, 487487), (488, 488488), (489, 489489), (490, 490490), (491, 491491), (492, 492492), (493, 493493), (494, 494494), (495, 495495), (496, 496496), (497, 497497), (498, 498498), (499, 499499), (500, 500500), (501, 501501), (502, 502502), (503, 503503), (504, 504504), (505, 505505), (506, 506506), (507, 507507), (508, 508508), (509, 509509), (510, 510510), (511, 511511), (512, 512512), (513, 513513), (514, 514514), (515, 515515), (516, 516516), (517, 517517), (518, 518518), (519, 519519), (520, 520520), (521, 521521), (522, 522522), (523, 523523), (524, 524524), (525, 525525), (526, 526526), (527, 527527), (528, 528528), (529, 529529), (530, 530530), (531, 531531), (532, 532532), (533, 533533), (534, 534534), (535, 535535), (536, 536536), (537, 537537), (538, 538538), (539, 539539), (540, 540540), (541, 541541), (542, 542542), (543, 543543), (544, 544544), (545, 545545), (546, 546546), (547, 547547), (548, 548548), (549, 549549), (550, 550550), (551, 551551), (552, 552552), (553, 553553), (554, 554554), (555, 555555), (556, 556556), (557, 557557), (558, 558558), (559, 559559), (560, 560560), (561, 561561), (562, 562562), (563, 563563), (564, 564564), (565, 565565), (566, 566566), (567, 567567), (568, 568568), (569, 569569), (570, 570570), (571, 571571), (572, 572572), (573, 573573), (574, 574574), (575, 575575), (576, 576576), (577, 577577), (578, 578578), (579, 579579), (580, 580580), (581, 581581), (582, 582582), (583, 583583), (584, 584584), (585, 585585), (586, 586586), (587, 587587), (588, 588588), (589, 589589), (590, 590590), (591, 591591), (592, 592592), (593, 593593), (594, 594594), (595, 595595), (596, 596596), (597, 597597), (598, 598598), (599, 599599), (600, 600600), (601, 601601), (602, 602602), (603, 603603), (604, 604604), (605, 605605), (606, 606606), (607, 607607), (608, 608608), (609, 609609), (610, 610610), (611, 611611), (612, 612612), (613, 613613), (614, 614614), (615, 615615), (616, 616616), (617, 617617), (618, 618618), (619, 619619), (620, 620620), (621, 621621), (622, 622622), (623, 623623), (624, 624624), (625, 625625), (626, 626626), (627, 627627), (628, 628628), (629, 629629), (630, 630630), (631, 631631), (632, 632632), (633, 633633), (634, 634634), (635, 635635), (636, 636636), (637, 637637), (638, 638638), (639, 639639), (640, 640640), (641, 641641), (642, 642642), (643, 643643), (644, 644644), (645, 645645), (646, 646646), (647, 647647), (648, 648648), (649, 649649), (650, 650650), (651, 651651), (652, 652652), (653, 653653), (654, 654654), (655, 655655), (656, 656656), (657, 657657), (658, 658658), (659, 659659), (660, 660660), (661, 661661), (662, 662662), (663, 663663), (664, 664664), (665, 665665), (666, 666666), (667, 667667), (668, 668668), (669, 669669), (670, 670670), (671, 671671), (672, 672672), (673, 673673), (674, 674674), (675, 675675), (676, 676676), (677, 677677), (678, 678678), (679, 679679), (680, 680680), (681, 681681), (682, 682682), (683, 683683), (684, 684684), (685, 685685), (686, 686686), (687, 687687), (688, 688688), (689, 689689), (690, 690690), (691, 691691), (692, 692692), (693, 693693), (694, 694694), (695, 695695), (696, 696696), (697, 697697), (698, 698698), (699, 699699), (700, 700700), (701, 701701), (702, 702702), (703, 703703), (704, 704704), (705, 705705), (706, 706706), (707, 707707), (708, 708708), (709, 709709), (710, 710710), (711, 711711), (712, 712712), (713, 713713), (714, 714714), (715, 715715), (716, 716716), (717, 717717), (718, 718718), (719, 719719), (720, 720720), (721, 721721), (722, 722722), (723, 723723), (724, 724724), (725, 725725), (726, 726726), (727, 727727), (728, 728728), (729, 729729), (730, 730730), (731, 731731), (732, 732732), (733, 733733), (734, 734734), (735, 735735), (736, 736736), (737, 737737), (738, 738738), (739, 739739), (740, 740740), (741, 741741), (742, 742742), (743, 743743), (744, 744744), (745, 745745), (746, 746746), (747, 747747), (748, 748748), (749, 749749), (750, 750750), (751, 751751), (752, 752752), (753, 753753), (754, 754754), (755, 755755), (756, 756756), (757, 757757), (758, 758758), (759, 759759), (760, 760760), (761, 761761), (762, 762762), (763, 763763), (764, 764764), (765, 765765), (766, 766766), (767, 767767), (768, 768768), (769, 769769), (770, 770770), (771, 771771), (772, 772772), (773, 773773), (774, 774774), (775, 775775), (776, 776776), (777, 777777), (778, 778778), (779, 779779), (780, 780780), (781, 781781), (782, 782782), (783, 783783), (784, 784784), (785, 785785), (786, 786786), (787, 787787), (788, 788788), (789, 789789), (790, 790790), (791, 791791), (792, 792792), (793, 793793), (794, 794794), (795, 795795), (796, 796796), (797, 797797), (798, 798798), (799, 799799), (800, 800800), (801, 801801), (802, 802802), (803, 803803), (804, 804804), (805, 805805), (806, 806806), (807, 807807), (808, 808808), (809, 809809), (810, 810810), (811, 811811), (812, 812812), (813, 813813), (814, 814814), (815, 815815), (816, 816816), (817, 817817), (818, 818818), (819, 819819), (820, 820820), (821, 821821), (822, 822822), (823, 823823), (824, 824824), (825, 825825), (826, 826826), (827, 827827), (828, 828828), (829, 829829), (830, 830830), (831, 831831), (832, 832832), (833, 833833), (834, 834834), (835, 835835), (836, 836836), (837, 837837), (838, 838838), (839, 839839), (840, 840840), (841, 841841), (842, 842842), (843, 843843), (844, 844844), (845, 845845), (846, 846846), (847, 847847), (848, 848848), (849, 849849), (850, 850850), (851, 851851), (852, 852852), (853, 853853), (854, 854854), (855, 855855), (856, 856856), (857, 857857), (858, 858858), (859, 859859), (860, 860860), (861, 861861), (862, 862862), (863, 863863), (864, 864864), (865, 865865), (866, 866866), (867, 867867), (868, 868868), (869, 869869), (870, 870870), (871, 871871), (872, 872872), (873, 873873), (874, 874874), (875, 875875), (876, 876876), (877, 877877), (878, 878878), (879, 879879), (880, 880880), (881, 881881), (882, 882882), (883, 883883), (884, 884884), (885, 885885), (886, 886886), (887, 887887), (888, 888888), (889, 889889), (890, 890890), (891, 891891), (892, 892892), (893, 893893), (894, 894894), (895, 895895), (896, 896896), (897, 897897), (898, 898898), (899, 899899), (900, 900900), (901, 901901), (902, 902902), (903, 903903), (904, 904904), (905, 905905), (906, 906906), (907, 907907), (908, 908908), (909, 909909), (910, 910910), (911, 911911), (912, 912912), (913, 913913), (914, 914914), (915, 915915), (916, 916916), (917, 917917), (918, 918918), (919, 919919), (920, 920920), (921, 921921), (922, 922922), (923, 923923), (924, 924924), (925, 925925), (926, 926926), (927, 927927), (928, 928928), (929, 929929), (930, 930930), (931, 931931), (932, 932932), (933, 933933), (934, 934934), (935, 935935), (936, 936936), (937, 937937), (938, 938938), (939, 939939), (940, 940940), (941, 941941), (942, 942942), (943, 943943), (944, 944944), (945, 945945), (946, 946946), (947, 947947), (948, 948948), (949, 949949), (950, 950950), (951, 951951), (952, 952952), (953, 953953), (954, 954954), (955, 955955), (956, 956956), (957, 957957), (958, 958958), (959, 959959), (960, 960960), (961, 961961), (962, 962962), (963, 963963), (964, 964964), (965, 965965), (966, 966966), (967, 967967), (968, 968968), (969, 969969), (970, 970970), (971, 971971), (972, 972972), (973, 973973), (974, 974974), (975, 975975), (976, 976976), (977, 977977), (978, 978978), (979, 979979), (980, 980980), (981, 981981), (982, 982982), (983, 983983), (984, 984984), (985, 985985), (986, 986986), (987, 987987), (988, 988988), (989, 989989), (990, 990990), (991, 991991), (992, 992992), (993, 993993), (994, 994994), (995, 995995), (996, 996996), (997, 997997), (998, 998998), (999, 999999), (10000, 10000);", parameters: 0b 2025-04-10T10:54:37.604034Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfncrmmamcncybe9ts3myxt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODA0NjZjN2MtYzFkNjJjZTEtMjJkOWVkMTctN2M5NmY5OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR 2025-04-10T10:54:37.625958Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfncrmmamcncybe9ts3myxt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODA0NjZjN2MtYzFkNjJjZTEtMjJkOWVkMTctN2M5NmY5OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR --- split started --- --- split finished --- Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR >> Viewer::LevenshteinDistance [GOOD] >> Viewer::JsonStorageListingV2 >> TSchemeShardLoginTest::ResetFailedAttemptCount [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:54:38.681975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:54:38.682071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:38.682113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:54:38.682176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:54:38.682219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:54:38.682247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:54:38.682300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:38.682393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:54:38.682732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:54:38.831068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:54:38.831130Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:38.863831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:54:38.864118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:54:38.864301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:54:38.877572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:54:38.877786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:54:38.878421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:38.878819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:54:38.886837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:38.888214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:38.888287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:38.888461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:54:38.888522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:38.888566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:54:38.888713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:54:38.905596Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:54:39.067601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:54:39.067867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:39.068100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:54:39.068376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:54:39.068442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:39.075361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:39.075534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:54:39.075847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:39.075934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:54:39.076015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:54:39.076086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:54:39.083055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:39.083140Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:54:39.083192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:54:39.091231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:39.091312Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:39.091405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:39.091476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:54:39.095487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:54:39.102882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:54:39.103130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:54:39.104395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:39.104577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:39.104638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:39.105031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:54:39.105119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:39.105343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:39.105488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:54:39.108049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:39.108101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:39.108286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:39.108351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:54:39.108604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:39.108649Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:54:39.108770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:39.108804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:39.108838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:39.108885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:39.108924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:54:39.108965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:39.109006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:54:39.109034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:54:39.109097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:39.109133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:54:39.109164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:54:39.111476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:39.111581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:39.111626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:39.793614Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T10:54:39.793647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:54:39.793687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-10T10:54:39.794783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T10:54:39.794824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-10T10:54:39.794983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T10:54:39.795020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:54:39.795078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T10:54:39.795130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:39.795171Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:54:39.795201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T10:54:39.795234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-10T10:54:39.808313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T10:54:39.808842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T10:54:39.815304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T10:54:39.817994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T10:54:39.818135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T10:54:39.822853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:54:39.823174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T10:54:39.891740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T10:54:39.891811Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-10T10:54:39.891930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-10T10:54:39.891969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-10T10:54:39.892002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-10T10:54:39.892036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-10T10:54:39.892070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-10T10:54:39.892600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:54:39.893036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:54:39.893088Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T10:54:39.893150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-10T10:54:39.893190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-10T10:54:39.893228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-10T10:54:39.893255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-10T10:54:39.893280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-10T10:54:39.893353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 101 2025-04-10T10:54:39.893397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-10T10:54:39.893437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T10:54:39.893506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T10:54:39.893634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T10:54:39.893677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-10T10:54:39.893696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-10T10:54:39.893721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T10:54:39.893759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-10T10:54:39.893786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-10T10:54:39.893831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T10:54:39.903469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T10:54:39.903548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:378:2346] TestWaitNotification: OK eventTxId 101 2025-04-10T10:54:39.904145Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T10:54:39.904479Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 309us result status StatusSuccess 2025-04-10T10:54:39.905319Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] Test command err: 2025-04-10T10:54:15.009589Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636101286972347:2087];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:15.009695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f51/r3tmp/tmpVEGoIU/pdisk_1.dat 2025-04-10T10:54:15.699605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:15.699749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:15.708191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:15.803415Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18241, node 1 2025-04-10T10:54:15.915186Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:54:15.922536Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:54:16.042664Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:16.042685Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:16.042703Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:16.042834Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:16.409427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:20.010881Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636101286972347:2087];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:20.010961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:23.609896Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636136135260860:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:23.609939Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f51/r3tmp/tmpZbdGsP/pdisk_1.dat 2025-04-10T10:54:24.057944Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:24.091982Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:24.092107Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:24.099732Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13324, node 4 2025-04-10T10:54:24.275186Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:24.275222Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:24.275231Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:24.275381Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:24.783891Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:28.618272Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491636136135260860:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:28.618360Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> Viewer::JsonAutocompleteSimilarDatabaseName >> TargetDiscoverer::IndexedTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative [GOOD] Test command err: 2025-04-10T10:54:34.063872Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636183121813727:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:34.063911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ecb/r3tmp/tmpBHUwWT/pdisk_1.dat 2025-04-10T10:54:34.989406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:34.989533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:34.993120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:35.244017Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7376 TServer::EnableGrpc on GrpcPort 31811, node 1 2025-04-10T10:54:35.726942Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:35.726979Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:35.726988Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:35.727138Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:36.534613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:36.665532Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-04-10T10:54:36.665582Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/Table, status# SCHEME_ERROR, issues# {
: Error: Path not found } >> Cdc::DropColumn [GOOD] >> Cdc::DropIndex >> EscapingBasics::EncloseSecretShouldWork [GOOD] >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> Cache::Test5 >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Transfer [GOOD] Test command err: 2025-04-10T10:54:36.406756Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636190543139076:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:36.428709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e9a/r3tmp/tmpkCE8YH/pdisk_1.dat 2025-04-10T10:54:37.094902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:37.095001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:37.100198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:37.124318Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26795 TServer::EnableGrpc on GrpcPort 27115, node 1 2025-04-10T10:54:37.515006Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:37.515036Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:37.515046Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:37.515209Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:37.915041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:37.938364Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:38.158661Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Topic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1744282478064, tx_id: 281474976710658 } } } 2025-04-10T10:54:38.158692Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root/Topic 2025-04-10T10:54:38.195755Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTopicResponse { Result: { status: SUCCESS, issues: } } 2025-04-10T10:54:38.195801Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe topic succeeded: path# /Root/Topic 2025-04-10T10:54:38.195860Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Topic, dstPath# /Root/Replicated/Table, kind# Transfer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Dirs [GOOD] Test command err: 2025-04-10T10:54:33.266125Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636176873923397:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:33.270917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ea0/r3tmp/tmpsBD3MS/pdisk_1.dat 2025-04-10T10:54:34.118344Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:34.163972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:34.164056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:34.171980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14198 TServer::EnableGrpc on GrpcPort 23441, node 1 2025-04-10T10:54:34.654691Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:34.654718Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:34.654725Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:34.654816Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:35.329836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:35.355640Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:35.379454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:35.384924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:35.631951Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744282475397, tx_id: 1 } } } 2025-04-10T10:54:35.631993Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-10T10:54:35.642337Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Dir, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744282475411, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-10T10:54:35.642390Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-10T10:54:35.651119Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282475509, tx_id: 281474976710659 } }] } } 2025-04-10T10:54:35.651142Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root/Dir 2025-04-10T10:54:38.242362Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636176873923397:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:38.242425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:38.663547Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282475509, tx_id: 281474976710659 } } } 2025-04-10T10:54:38.663571Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Dir/Table 2025-04-10T10:54:38.663604Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Dir/Table, dstPath# /Root/Replicated/Dir/Table, kind# Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:54:34.082251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:54:34.082387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:34.082458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:54:34.082504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:54:34.082554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:54:34.082587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:54:34.082660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:34.082776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:54:34.083192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:54:34.177520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:54:34.177607Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:34.197638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:54:34.197965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:54:34.198120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:54:34.237656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:54:34.238052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:54:34.238834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:34.239349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:54:34.247671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:34.249332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:34.249424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:34.249644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:54:34.249705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:34.249750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:54:34.249892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.263319Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:54:34.412410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:54:34.412676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.412905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:54:34.413197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:54:34.413262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.435497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:34.435677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:54:34.435924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.435998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:54:34.436043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:54:34.436107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:54:34.438525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.438589Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:54:34.438630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:54:34.440644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.440697Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.440761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:34.440833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:54:34.445049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:54:34.447341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:54:34.447545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:54:34.448624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:34.448775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:34.448833Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:34.449156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:54:34.449217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:34.449409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:34.449542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:54:34.451957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:34.452013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:34.452248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:34.452301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:54:34.452579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.452632Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:54:34.452737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:34.452795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:34.452839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:34.452876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:34.452920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:54:34.452971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:34.453016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:54:34.453052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:54:34.453130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:34.453173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:54:34.453210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:54:34.455848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:34.455983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:34.456026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:54:41.217854Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:41.218006Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:41.218052Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-04-10T10:54:41.218103Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-04-10T10:54:41.218828Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:54:41.218949Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:54:41.218991Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-10T10:54:41.219038Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-10T10:54:41.219089Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T10:54:41.219723Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:54:41.219803Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:54:41.219832Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-10T10:54:41.219862Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-10T10:54:41.219893Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:41.219961Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-10T10:54:41.222900Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T10:54:41.223324Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 2025-04-10T10:54:41.223850Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:54:41.224065Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 247us result status StatusSuccess 2025-04-10T10:54:41.224413Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-04-10T10:54:41.227617Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveGroup { Group: "group1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:54:41.227839Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 1] name: MyRoot type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:41.227876Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:41.227930Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:41.227964Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:54:41.228192Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-10T10:54:41.228305Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T10:54:41.228348Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T10:54:41.228397Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T10:54:41.228434Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T10:54:41.228502Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:41.228569Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-04-10T10:54:41.228613Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T10:54:41.228657Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-10T10:54:41.228699Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-04-10T10:54:41.228741Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-10T10:54:41.231644Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:41.231779Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2025-04-10T10:54:41.232041Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:41.232093Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:41.232301Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:41.232353Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:204:2206], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-04-10T10:54:41.232938Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T10:54:41.233048Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T10:54:41.233097Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-10T10:54:41.233142Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-10T10:54:41.233193Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:41.233314Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-10T10:54:41.235502Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-04-10T10:54:41.236122Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:54:41.236318Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 230us result status StatusSuccess 2025-04-10T10:54:41.236801Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> EscapingBasics::HideSecretsShouldWork [GOOD] >> IssuesTextFiltering::ShouldRemoveDatabasePath >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> Cache::Test5 [GOOD] >> EntityId::CheckId [GOOD] |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> Cache::Test3 [GOOD] >> Cache::Test4 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::IndexedTable [GOOD] Test command err: 2025-04-10T10:54:35.590865Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636188821992496:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:35.591692Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001eb3/r3tmp/tmpKOUGzO/pdisk_1.dat 2025-04-10T10:54:36.403209Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:36.409158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:36.409236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:36.412470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17855 TServer::EnableGrpc on GrpcPort 4626, node 1 2025-04-10T10:54:36.875891Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:36.875913Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:36.875925Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:36.876041Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:37.577284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:37.613695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:38.206408Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744282477651, tx_id: 1 } } } 2025-04-10T10:54:38.206439Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-10T10:54:38.223552Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282478050, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-10T10:54:38.223583Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-10T10:54:40.574259Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636188821992496:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:40.574324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:40.797004Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282478050, tx_id: 281474976710658 } } } 2025-04-10T10:54:40.797031Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-04-10T10:54:40.797059Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table 2025-04-10T10:54:40.797126Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table/Index, dstPath# /Root/Replicated/Table/Index/indexImplTable, kind# IndexTable >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] >> TPersQueueMirrorer::TestBasicRemote >> TSubscriberCombinationsTest::CombinationsRootDomain >> TSubscriberCombinationsTest::MigratedPathRecreation |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::CheckId [GOOD] |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test4 [GOOD] |66.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |66.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |66.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> TSubscriberCombinationsTest::MigratedPathRecreation [GOOD] >> TSubscriberTest::Boot |66.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |66.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication >> Cdc::SplitTopicPartition_TopicAutoPartitioning [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge >> TSubscriberTest::Boot [GOOD] |66.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication >> TargetDiscoverer::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:54:33.674966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:54:33.675095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:33.675144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:54:33.675197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:54:33.675248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:54:33.675278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:54:33.675332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:33.675428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:54:33.675791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:54:33.851383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:54:33.851484Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:33.891993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:54:33.892330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:54:33.892499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:54:33.932718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:54:33.933008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:54:33.933734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:33.934229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:54:33.938041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:33.939659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:33.939737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:33.939969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:54:33.940036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:33.940078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:54:33.940236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:54:33.949212Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:54:34.256660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:54:34.256913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.257143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:54:34.257379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:54:34.257430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.266788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:34.270102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:54:34.270391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.270454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:54:34.270492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:54:34.270542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:54:34.275519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.275581Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:54:34.275621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:54:34.277517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.277564Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.277616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:34.277691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:54:34.281088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:54:34.289581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:54:34.289857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:54:34.290953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:34.291120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:34.291175Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:34.291464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:54:34.291541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:34.291746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:34.291830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:54:34.294485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:34.294534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:34.294775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:34.294823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:54:34.295077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:34.295126Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:54:34.295283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:34.295331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:34.295387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:34.295421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:34.295458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:54:34.295502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:34.295541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:54:34.295573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:54:34.295971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:34.296036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:54:34.296071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:54:34.299473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:34.299668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:34.299709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... DbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:54:42.776484Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-10T10:54:42.776536Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T10:54:42.776584Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T10:54:42.776637Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-04-10T10:54:42.776677Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-04-10T10:54:42.795376Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSuccess TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:42.795556Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2025-04-10T10:54:42.801596Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:42.801681Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:42.801953Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:42.802008Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:357:2333], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-10T10:54:42.802739Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:54:42.802878Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:54:42.802948Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-10T10:54:42.803009Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-10T10:54:42.803152Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:42.803290Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-10T10:54:42.814962Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T10:54:42.816649Z node 4 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [4:310:2297] sender: [4:403:2058] recipient: [4:102:2137] Leader for TabletID 72057594046678944 is [4:310:2297] sender: [4:406:2058] recipient: [4:15:2062] Leader for TabletID 72057594046678944 is [4:310:2297] sender: [4:407:2058] recipient: [4:405:2376] Leader for TabletID 72057594046678944 is [4:408:2377] sender: [4:409:2058] recipient: [4:405:2376] 2025-04-10T10:54:42.936823Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:54:42.936964Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:42.937035Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:54:42.937098Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:54:42.937152Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:54:42.937194Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:54:42.937280Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:42.937398Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:54:42.937924Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:54:43.003827Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:54:43.005934Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:54:43.006214Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:54:43.006504Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:54:43.006562Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:43.006724Z node 4 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:54:43.007803Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:43.007961Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.008061Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.008575Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.008690Z node 4 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T10:54:43.008987Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.009129Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.009253Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.009397Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.009518Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.009728Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.010122Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.014566Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.015819Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.015977Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.016240Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.016373Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.016500Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.016874Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.017000Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.017203Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.017540Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.017778Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.017854Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.017946Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T10:54:43.026730Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:43.026833Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:43.028002Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:54:43.028082Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:43.028137Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:54:43.028584Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [4:408:2377] sender: [4:465:2058] recipient: [4:15:2062] 2025-04-10T10:54:43.075754Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-10T10:54:43.075834Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-10T10:54:43.188148Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MzI1NjgzLCJpYXQiOjE3NDQyODI0ODMsInN1YiI6InVzZXIxIn0.ofDlXYNAWLokVwfjOG4Iqve6sizCUAl2T3BG6JvkYgxq4i5V3vLQ0iHJvsqRPgQAuOwNQ0ddyfkLBQ4iXiyH10QVZBjJUnGYN8A6L8uikLrtW5GhNzxxxAMUyRFaIoIKMy_04Elbd0wPmv0N82Nw3VWPqZM8b22UEJzST6onpc2V1s1nMedZslioXj4sbEJniGQ_CbPwSbbA--iB9H8BIy2Xpge-UbokXN2DavmnaLAWL37-nvCMk-zJXs7Ao3B0N6k3SB7U0UxR_FqxXtEY_CNmWVT0-vHWFdA4Sufh0DaNScGXGIxKcL3fxrRBmmRZeA46xWsdL1ijezH-oTe2Lw" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MzI1NjgzLCJpYXQiOjE3NDQyODI0ODMsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-04-10T10:54:43.188396Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:43.188474Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:43.188739Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:43.188802Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:458:2416], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-10T10:54:43.189558Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 >> Cdc::RacyRebootAndSplitWithTxInflight [GOOD] >> Cdc::RacyActivateAndEnqueue >> TLocksTest::CK_Range_BrokenLock >> TFlatTest::ShardFreezeUnfreezeAlreadySet >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] Test command err: 2025-04-10T10:54:43.787243Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-04-10T10:54:43.787335Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2025-04-10T10:54:43.794460Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-04-10T10:54:43.794544Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2025-04-10T10:54:43.794620Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:35:2066] 2025-04-10T10:54:43.794659Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 900, generation# 1 2025-04-10T10:54:43.794941Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:35:2066] 2025-04-10T10:54:43.794978Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 900, generation# 1 2025-04-10T10:54:43.795108Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T10:54:43.795644Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:41:2068] 2025-04-10T10:54:43.795687Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside 2025-04-10T10:54:43.795839Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:41:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-10T10:54:43.795985Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:42:2068] 2025-04-10T10:54:43.796005Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside 2025-04-10T10:54:43.796038Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:42:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-10T10:54:43.796191Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:43:2068] 2025-04-10T10:54:43.796216Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/db/dir_inside 2025-04-10T10:54:43.796261Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:43:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-10T10:54:43.796350Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:3:2050] 2025-04-10T10:54:43.796417Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2068] 2025-04-10T10:54:43.796487Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:6:2053] 2025-04-10T10:54:43.796519Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:42:2068] 2025-04-10T10:54:43.796548Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:43:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:9:2056] 2025-04-10T10:54:43.796580Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2068] 2025-04-10T10:54:43.796666Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:38:2068] 2025-04-10T10:54:43.796785Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:39:2068] 2025-04-10T10:54:43.796837Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Set up state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T10:54:43.796892Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:40:2068] 2025-04-10T10:54:43.796943Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:37:2068][/root/db/dir_inside] Ignore empty state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2025-04-10T10:54:43.797182Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:34:2065], cookie# 0, event size# 118 2025-04-10T10:54:43.797231Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2025-04-10T10:54:43.815624Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-04-10T10:54:43.815915Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:3:2050] 2025-04-10T10:54:43.816004Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:41:2068] 2025-04-10T10:54:43.816074Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:38:2068] 2025-04-10T10:54:43.816160Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Update to strong state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() < argsRight.GetSuperId() =========== !argsRight.IsDeletion 2025-04-10T10:54:43.816433Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:35:2066], cookie# 0, event size# 117 2025-04-10T10:54:43.816473Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2025-04-10T10:54:43.816527Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-04-10T10:54:43.816682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:6:2053] 2025-04-10T10:54:43.816770Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:42:2068] 2025-04-10T10:54:43.816837Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:39:2068] 2025-04-10T10:54:43.816910Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Path was updated to new version: owner# [1:36:2067], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 900, LocalPathId: 11], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T10:54:44.365523Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T10:54:44.370946Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2025-04-10T10:54:44.371048Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2025-04-10T10:54:44.371111Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2025-04-10T10:54:44.371196Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:35:2065] 2025-04-10T10:54:44.371271Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:36:2065] 2025-04-10T10:54:44.371317Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:34:2065][path] Set up state: owner# [3:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T10:54:44.371373Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:37:2065] 2025-04-10T10:54:44.371440Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:34:2065][path] Ignore empty state: owner# [3:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] Test command err: 2025-04-10T10:54:24.501104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:54:24.501369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:24.501601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c1d/r3tmp/tmpT9J7hE/pdisk_1.dat 2025-04-10T10:54:24.998821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:54:25.088001Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:25.144210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:25.144409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:25.157527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:25.278064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:54:25.961310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:790:2660], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:25.961442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:800:2665], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:25.961840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:25.967464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:54:26.166287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:804:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:54:26.343153Z node 1 :TX_PROXY ERROR: Actor# [1:882:2715] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:41.423597Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfncdn624n2x4sgf33t36pb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWUzMTlhMTctMzg0ZjQ3MzEtYzM3MDFhM2ItZGVmMjFlNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:54:41.490381Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfncdn624n2x4sgf33t36pb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWUzMTlhMTctMzg0ZjQ3MzEtYzM3MDFhM2ItZGVmMjFlNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:54:41.607398Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfncdn624n2x4sgf33t36pb", SessionId: ydb://session/3?node_id=1&id=NWUzMTlhMTctMzg0ZjQ3MzEtYzM3MDFhM2ItZGVmMjFlNDY=, Slow query, duration: 15.648293s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "UPSERT INTO `/Root/TestTable` (key, value) VALUES (0, 00), (1, 11), (2, 22), (3, 33), (4, 44), (5, 55), (6, 66), (7, 77), (8, 88), (9, 99), (10, 1010), (11, 1111), (12, 1212), (13, 1313), (14, 1414), (15, 1515), (16, 1616), (17, 1717), (18, 1818), (19, 1919), (20, 2020), (21, 2121), (22, 2222), (23, 2323), (24, 2424), (25, 2525), (26, 2626), (27, 2727), (28, 2828), (29, 2929), (30, 3030), (31, 3131), (32, 3232), (33, 3333), (34, 3434), (35, 3535), (36, 3636), (37, 3737), (38, 3838), (39, 3939), (40, 4040), (41, 4141), (42, 4242), (43, 4343), (44, 4444), (45, 4545), (46, 4646), (47, 4747), (48, 4848), (49, 4949), (50, 5050), (51, 5151), (52, 5252), (53, 5353), (54, 5454), (55, 5555), (56, 5656), (57, 5757), (58, 5858), (59, 5959), (60, 6060), (61, 6161), (62, 6262), (63, 6363), (64, 6464), (65, 6565), (66, 6666), (67, 6767), (68, 6868), (69, 6969), (70, 7070), (71, 7171), (72, 7272), (73, 7373), (74, 7474), (75, 7575), (76, 7676), (77, 7777), (78, 7878), (79, 7979), (80, 8080), (81, 8181), (82, 8282), (83, 8383), (84, 8484), (85, 8585), (86, 8686), (87, 8787), (88, 8888), (89, 8989), (90, 9090), (91, 9191), (92, 9292), (93, 9393), (94, 9494), (95, 9595), (96, 9696), (97, 9797), (98, 9898), (99, 9999), (100, 100100), (101, 101101), (102, 102102), (103, 103103), (104, 104104), (105, 105105), (106, 106106), (107, 107107), (108, 108108), (109, 109109), (110, 110110), (111, 111111), (112, 112112), (113, 113113), (114, 114114), (115, 115115), (116, 116116), (117, 117117), (118, 118118), (119, 119119), (120, 120120), (121, 121121), (122, 122122), (123, 123123), (124, 124124), (125, 125125), (126, 126126), (127, 127127), (128, 128128), (129, 129129), (130, 130130), (131, 131131), (132, 132132), (133, 133133), (134, 134134), (135, 135135), (136, 136136), (137, 137137), (138, 138138), (139, 139139), (140, 140140), (141, 141141), (142, 142142), (143, 143143), (144, 144144), (145, 145145), (146, 146146), (147, 147147), (148, 148148), (149, 149149), (150, 150150), (151, 151151), (152, 152152), (153, 153153), (154, 154154), (155, 155155), (156, 156156), (157, 157157), (158, 158158), (159, 159159), (160, 160160), (161, 161161), (162, 162162), (163, 163163), (164, 164164), (165, 165165), (166, 166166), (167, 167167), (168, 168168), (169, 169169), (170, 170170), (171, 171171), (172, 172172), (173, 173173), (174, 174174), (175, 175175), (176, 176176), (177, 177177), (178, 178178), (179, 179179), (180, 180180), (181, 181181), (182, 182182), (183, 183183), (184, 184184), (185, 185185), (186, 186186), (187, 187187), (188, 188188), (189, 189189), (190, 190190), (191, 191191), (192, 192192), (193, 193193), (194, 194194), (195, 195195), (196, 196196), (197, 197197), (198, 198198), (199, 199199), (200, 200200), (201, 201201), (202, 202202), (203, 203203), (204, 204204), (205, 205205), (206, 206206), (207, 207207), (208, 208208), (209, 209209), (210, 210210), (211, 211211), (212, 212212), (213, 213213), (214, 214214), (215, 215215), (216, 216216), (217, 217217), (218, 218218), (219, 219219), (220, 220220), (221, 221221), (222, 222222), (223, 223223), (224, 224224), (225, 225225), (226, 226226), (227, 227227), (228, 228228), (229, 229229), (230, 230230), (231, 231231), (232, 232232), (233, 233233), (234, 234234), (235, 235235), (236, 236236), (237, 237237), (238, 238238), (239, 239239), (240, 240240), (241, 241241), (242, 242242), (243, 243243), (244, 244244), (245, 245245), (246, 246246), (247, 247247), (248, 248248), (249, 249249), (250, 250250), (251, 251251), (252, 252252), (253, 253253), (254, 254254), (255, 255255), (256, 256256), (257, 257257), (258, 258258), (259, 259259), (260, 260260), (261, 261261), (262, 262262), (263, 263263), (264, 264264), (265, 265265), (266, 266266), (267, 267267), (268, 268268), (269, 269269), (270, 270270), (271, 271271), (272, 272272), (273, 273273), (274, 274274), (275, 275275), (276, 276276), (277, 277277), (278, 278278), (279, 279279), (280, 280280), (281, 281281), (282, 282282), (283, 283283), (284, 284284), (285, 285285), (286, 286286), (287, 287287), (288, 288288), (289, 289289), (290, 290290), (291, 291291), (292, 292292), (293, 293293), (294, 294294), (295, 295295), (296, 296296), (297, 297297), (298, 298298), (299, 299299), (300, 300300), (301, 301301), (302, 302302), (303, 303303), (304, 304304), (305, 305305), (306, 306306), (307, 307307), (308, 308308), (309, 309309), (310, 310310), (311, 311311), (312, 312312), (313, 313313), (314, 314314), (315, 315315), (316, 316316), (317, 317317), (318, 318318), (319, 319319), (320, 320320), (321, 321321), (322, 322322), (323, 323323), (324, 324324), (325, 325325), (326, 326326), (327, 327327), (328, 328328), (329, 329329), (330, 330330), (331, 331331), (332, 332332), (333, 333333), (334, 334334), (335, 335335), (336, 336336), (337, 337337), (338, 338338), (339, 339339), (340, 340340), (341, 341341), (342, 342342), (343, 343343), (344, 344344), (345, 345345), (346, 346346), (347, 347347), (348, 348348), (349, 349349), (350, 350350), (351, 351351), (352, 352352), (353, 353353), (354, 354354), (355, 355355), (356, 356356), (357, 357357), (358, 358358), (359, 359359), (360, 360360), (361, 361361), (362, 362362), (363, 363363), (364, 364364), (365, 365365), (366, 366366), (367, 367367), (368, 368368), (369, 369369), (370, 370370), (371, 371371), (372, 372372), (373, 373373), (374, 374374), (375, 375375), (376, 376376), (377, 377377), (378, 378378), (379, 379379), (380, 380380), (381, 381381), (382, 382382), (383, 383383), (384, 384384), (385, 385385), (386, 386386), (387, 387387), (388, 388388), (389, 389389), (390, 390390), (391, 391391), (392, 392392), (393, 393393), (394, 394394), (395, 395395), (396, 396396), (397, 397397), (398, 398398), (399, 399399), (400, 400400), (401, 401401), (402, 402402), (403, 403403), (404, 404404), (405, 405405), (406, 406406), (407, 407407), (408, 408408), (409, 409409), (410, 410410), (411, 411411), (412, 412412), (413, 413413), (414, 414414), (415, 415415), (416, 416416), (417, 417417), (418, 418418), (419, 419419), (420, 420420), (421, 421421), (422, 422422), (423, 423423), (424, 424424), (425, 425425), (426, 426426), (427, 427427), (428, 428428), (429, 429429), (430, 430430), (431, 431431), (432, 432432), (433, 433433), (434, 434434), (435, 435435), (436, 436436), (437, 437437), (438, 438438), (439, 439439), (440, 440440), (441, 441441), (442, 442442), (443, 443443), (444, 444444), (445, 445445), (446, 446446), (447, 447447), (448, 448448), (449, 449449), (450, 450450), (451, 451451), (452, 452452), (453, 453453), (454, 454454), (455, 455455), (456, 456456), (457, 457457), (458, 458458), (459, 459459), (460, 460460), (461, 461461), (462, 462462), (463, 463463), (464, 464464), (465, 465465), (466, 466466), (467, 467467), (468, 468468), (469, 469469), (470, 470470), (471, 471471), (472, 472472), (473, 473473), (474, 474474), (475, 475475), (476, 476476), (477, 477477), (478, 478478), (479, 479479), (480, 480480), (481, 481481), (482, 482482), (483, 483483), (484, 484484), (485, 485485), (486, 486486), (487, 487487), (488, 488488), (489, 489489), (490, 490490), (491, 491491), (492, 492492), (493, 493493), (494, 494494), (495, 495495), (496, 496496), (497, 497497), (498, 498498), (499, 499499), (500, 500500), (501, 501501), (502, 502502), (503, 503503), (504, 504504), (505, 505505), (506, 506506), (507, 507507), (508, 508508), (509, 509509), (510, 510510), (511, 511511), (512, 512512), (513, 513513), (514, 514514), (515, 515515), (516, 516516), (517, 517517), (518, 518518), (519, 519519), (520, 520520), (521, 521521), (522, 522522), (523, 523523), (524, 524524), (525, 525525), (526, 526526), (527, 527527), (528, 528528), (529, 529529), (530, 530530), (531, 531531), (532, 532532), (533, 533533), (534, 534534), (535, 535535), (536, 536536), (537, 537537), (538, 538538), (539, 539539), (540, 540540), (541, 541541), (542, 542542), (543, 543543), (544, 544544), (545, 545545), (546, 546546), (547, 547547), (548, 548548), (549, 549549), (550, 550550), (551, 551551), (552, 552552), (553, 553553), (554, 554554), (555, 555555), (556, 556556), (557, 557557), (558, 558558), (559, 559559), (560, 560560), (561, 561561), (562, 562562), (563, 563563), (564, 564564), (565, 565565), (566, 566566), (567, 567567), (568, 568568), (569, 569569), (570, 570570), (571, 571571), (572, 572572), (573, 573573), (574, 574574), (575, 575575), (576, 576576), (577, 577577), (578, 578578), (579, 579579), (580, 580580), (581, 581581), (582, 582582), (583, 583583), (584, 584584), (585, 585585), (586, 586586), (587, 587587), (588, 588588), (589, 589589), (590, 590590), (591, 591591), (592, 592592), (593, 593593), (594, 594594), (595, 595595), (596, 596596), (597, 597597), (598, 598598), (599, 599599), (600, 600600), (601, 601601), (602, 602602), (603, 603603), (604, 604604), (605, 605605), (606, 606606), (607, 607607), (608, 608608), (609, 609609), (610, 610610), (611, 611611), (612, 612612), (613, 613613), (614, 614614), (615, 615615), (616, 616616), (617, 617617), (618, 618618), (619, 619619), (620, 620620), (621, 621621), (622, 622622), (623, 623623), (624, 624624), (625, 625625), (626, 626626), (627, 627627), (628, 628628), (629, 629629), (630, 630630), (631, 631631), (632, 632632), (633, 633633), (634, 634634), (635, 635635), (636, 636636), (637, 637637), (638, 638638), (639, 639639), (640, 640640), (641, 641641), (642, 642642), (643, 643643), (644, 644644), (645, 645645), (646, 646646), (647, 647647), (648, 648648), (649, 649649), (650, 650650), (651, 651651), (652, 652652), (653, 653653), (654, 654654), (655, 655655), (656, 656656), (657, 657657), (658, 658658), (659, 659659), (660, 660660), (661, 661661), (662, 662662), (663, 663663), (664, 664664), (665, 665665), (666, 666666), (667, 667667), (668, 668668), (669, 669669), (670, 670670), (671, 671671), (672, 672672), (673, 673673), (674, 674674), (675, 675675), (676, 676676), (677, 677677), (678, 678678), (679, 679679), (680, 680680), (681, 681681), (682, 682682), (683, 683683), (684, 684684), (685, 685685), (686, 686686), (687, 687687), (688, 688688), (689, 689689), (690, 690690), (691, 691691), (692, 692692), (693, 693693), (694, 694694), (695, 695695), (696, 696696), (697, 697697), (698, 698698), (699, 699699), (700, 700700), (701, 701701), (702, 702702), (703, 703703), (704, 704704), (705, 705705), (706, 706706), (707, 707707), (708, 708708), (709, 709709), (710, 710710), (711, 711711), (712, 712712), (713, 713713), (714, 714714), (715, 715715), (716, 716716), (717, 717717), (718, 718718), (719, 719719), (720, 720720), (721, 721721), (722, 722722), (723, 723723), (724, 724724), (725, 725725), (726, 726726), (727, 727727), (728, 728728), (729, 729729), (730, 730730), (731, 731731), (732, 732732), (733, 733733), (734, 734734), (735, 735735), (736, 736736), (737, 737737), (738, 738738), (739, 739739), (740, 740740), (741, 741741), (742, 742742), (743, 743743), (744, 744744), (745, 745745), (746, 746746), (747, 747747), (748, 748748), (749, 749749), (750, 750750), (751, 751751), (752, 752752), (753, 753753), (754, 754754), (755, 755755), (756, 756756), (757, 757757), (758, 758758), (759, 759759), (760, 760760), (761, 761761), (762, 762762), (763, 763763), (764, 764764), (765, 765765), (766, 766766), (767, 767767), (768, 768768), (769, 769769), (770, 770770), (771, 771771), (772, 772772), (773, 773773), (774, 774774), (775, 775775), (776, 776776), (777, 777777), (778, 778778), (779, 779779), (780, 780780), (781, 781781), (782, 782782), (783, 783783), (784, 784784), (785, 785785), (786, 786786), (787, 787787), (788, 788788), (789, 789789), (790, 790790), (791, 791791), (792, 792792), (793, 793793), (794, 794794), (795, 795795), (796, 796796), (797, 797797), (798, 798798), (799, 799799), (800, 800800), (801, 801801), (802, 802802), (803, 803803), (804, 804804), (805, 805805), (806, 806806), (807, 807807), (808, 808808), (809, 809809), (810, 810810), (811, 811811), (812, 812812), (813, 813813), (814, 814814), (815, 815815), (816, 816816), (817, 817817), (818, 818818), (819, 819819), (820, 820820), (821, 821821), (822, 822822), (823, 823823), (824, 824824), (825, 825825), (826, 826826), (827, 827827), (828, 828828), (829, 829829), (830, 830830), (831, 831831), (832, 832832), (833, 833833), (834, 834834), (835, 835835), (836, 836836), (837, 837837), (838, 838838), (839, 839839), (840, 840840), (841, 841841), (842, 842842), (843, 843843), (844, 844844), (845, 845845), (846, 846846), (847, 847847), (848, 848848), (849, 849849), (850, 850850), (851, 851851), (852, 852852), (853, 853853), (854, 854854), (855, 855855), (856, 856856), (857, 857857), (858, 858858), (859, 859859), (860, 860860), (861, 861861), (862, 862862), (863, 863863), (864, 864864), (865, 865865), (866, 866866), (867, 867867), (868, 868868), (869, 869869), (870, 870870), (871, 871871), (872, 872872), (873, 873873), (874, 874874), (875, 875875), (876, 876876), (877, 877877), (878, 878878), (879, 879879), (880, 880880), (881, 881881), (882, 882882), (883, 883883), (884, 884884), (885, 885885), (886, 886886), (887, 887887), (888, 888888), (889, 889889), (890, 890890), (891, 891891), (892, 892892), (893, 893893), (894, 894894), (895, 895895), (896, 896896), (897, 897897), (898, 898898), (899, 899899), (900, 900900), (901, 901901), (902, 902902), (903, 903903), (904, 904904), (905, 905905), (906, 906906), (907, 907907), (908, 908908), (909, 909909), (910, 910910), (911, 911911), (912, 912912), (913, 913913), (914, 914914), (915, 915915), (916, 916916), (917, 917917), (918, 918918), (919, 919919), (920, 920920), (921, 921921), (922, 922922), (923, 923923), (924, 924924), (925, 925925), (926, 926926), (927, 927927), (928, 928928), (929, 929929), (930, 930930), (931, 931931), (932, 932932), (933, 933933), (934, 934934), (935, 935935), (936, 936936), (937, 937937), (938, 938938), (939, 939939), (940, 940940), (941, 941941), (942, 942942), (943, 943943), (944, 944944), (945, 945945), (946, 946946), (947, 947947), (948, 948948), (949, 949949), (950, 950950), (951, 951951), (952, 952952), (953, 953953), (954, 954954), (955, 955955), (956, 956956), (957, 957957), (958, 958958), (959, 959959), (960, 960960), (961, 961961), (962, 962962), (963, 963963), (964, 964964), (965, 965965), (966, 966966), (967, 967967), (968, 968968), (969, 969969), (970, 970970), (971, 971971), (972, 972972), (973, 973973), (974, 974974), (975, 975975), (976, 976976), (977, 977977), (978, 978978), (979, 979979), (980, 980980), (981, 981981), (982, 982982), (983, 983983), (984, 984984), (985, 985985), (986, 986986), (987, 987987), (988, 988988), (989, 989989), (990, 990990), (991, 991991), (992, 992992), (993, 993993), (994, 994994), (995, 995995), (996, 996996), (997, 997997), (998, 998998), (999, 999999), (10000, 10000);", parameters: 0b 2025-04-10T10:54:41.845474Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfncwyc02d0jevwh9mag7td, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTlkYWQ0MWMtOGQwNDFhYS03NmQ5NGI5Ny1kZDI5YzA1Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR |66.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |66.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |66.8%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |66.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |66.8%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |66.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |66.8%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::SelectRangeForbidNullArgs2 >> TLocksTest::Range_BrokenLock0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Basic [GOOD] Test command err: 2025-04-10T10:54:37.787494Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636194501691465:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:37.787674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e96/r3tmp/tmprwYghR/pdisk_1.dat 2025-04-10T10:54:38.499778Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:38.520361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:38.520451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:38.531740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20747 TServer::EnableGrpc on GrpcPort 23134, node 1 2025-04-10T10:54:39.050940Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:39.050959Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:39.050966Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:39.051079Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:39.897773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:39.929494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:40.325075Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744282479961, tx_id: 1 } } } 2025-04-10T10:54:40.325104Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-10T10:54:40.336186Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282480101, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-10T10:54:40.336208Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-10T10:54:42.754253Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636194501691465:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:42.754330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:43.582313Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282480101, tx_id: 281474976710658 } } } 2025-04-10T10:54:43.582343Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-04-10T10:54:43.582377Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table >> TLocksTest::UpdateLockedKey ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] Test command err: 2025-04-10T10:54:15.786672Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636103437849795:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:15.786724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0020ad/r3tmp/tmpXvQSXS/pdisk_1.dat 2025-04-10T10:54:16.719772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:16.719865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:16.728563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:16.807084Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:16.825814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 3404, node 1 2025-04-10T10:54:16.848448Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:54:16.848465Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:54:17.026656Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:17.026680Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:17.026690Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:17.026791Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:17.701639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:20.786689Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636103437849795:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:20.786776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:23.119855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589325:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.122591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589313:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.136231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.183314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589361:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.188433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:54:23.190348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589359:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.190538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.199052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589374:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.201850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589372:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.202029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.535201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589440:2517], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.535361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589441:2518], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.555587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589442:2519], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.555816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589427:2504], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.555861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589436:2513], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.559954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589437:2514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.560089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589438:2515], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.560136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589439:2516], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.568373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589443:2520], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.568544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636137797589417:2502], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.595636Z node 1 :TX_PROXY ERROR: Actor# [1:7491636137797589364:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T10:54:23.595800Z node 1 :TX_PROXY ERROR: Actor# [1:7491636137797589378:2677] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T10:54:23.650839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:23.846246Z node 1 :TX_PROXY ERROR: Actor# [1:7491636137797589481:2724] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T10:54:23.846425Z node 1 :TX_PROXY ERROR: Actor# [1:7491636137797589485:2728] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T10:54:23.846524Z node 1 :TX_PROXY ERROR: Actor# [1:7491636137797589482:2725] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T10:54:23.846632Z node 1 :TX_PROXY ERROR: Actor# [1:7491636137797589480:2723] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, Local ... uestId: 473 2025-04-10T10:54:43.245699Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MjkyM2QwZGUtMjdkY2I2MDYtM2Q4ODc5MDQtNTYzNDQ1ZA==, ActorId: [4:7491636192337689537:2351], ActorState: ExecuteState, TraceId: 01jrfncy5y8bw32mznq718a1b8, Reply query error, msg: Pending previous query completion proxyRequestId: 478 2025-04-10T10:54:43.245735Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MjkyM2QwZGUtMjdkY2I2MDYtM2Q4ODc5MDQtNTYzNDQ1ZA==, ActorId: [4:7491636192337689537:2351], ActorState: ExecuteState, TraceId: 01jrfncy5y8bw32mznq718a1b8, Reply query error, msg: Pending previous query completion proxyRequestId: 496 2025-04-10T10:54:43.245766Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MjkyM2QwZGUtMjdkY2I2MDYtM2Q4ODc5MDQtNTYzNDQ1ZA==, ActorId: [4:7491636192337689537:2351], ActorState: ExecuteState, TraceId: 01jrfncy5y8bw32mznq718a1b8, Reply query error, msg: Pending previous query completion proxyRequestId: 497 2025-04-10T10:54:43.245804Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MjkyM2QwZGUtMjdkY2I2MDYtM2Q4ODc5MDQtNTYzNDQ1ZA==, ActorId: [4:7491636192337689537:2351], ActorState: ExecuteState, TraceId: 01jrfncy5y8bw32mznq718a1b8, Reply query error, msg: Pending previous query completion proxyRequestId: 498 2025-04-10T10:54:43.245894Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MjkyM2QwZGUtMjdkY2I2MDYtM2Q4ODc5MDQtNTYzNDQ1ZA==, ActorId: [4:7491636192337689537:2351], ActorState: ExecuteState, TraceId: 01jrfncy5y8bw32mznq718a1b8, Reply query error, msg: Pending previous query completion proxyRequestId: 502 2025-04-10T10:54:43.245923Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MjkyM2QwZGUtMjdkY2I2MDYtM2Q4ODc5MDQtNTYzNDQ1ZA==, ActorId: [4:7491636192337689537:2351], ActorState: ExecuteState, TraceId: 01jrfncy5y8bw32mznq718a1b8, Reply query error, msg: Pending previous query completion proxyRequestId: 503 2025-04-10T10:54:43.245956Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MjkyM2QwZGUtMjdkY2I2MDYtM2Q4ODc5MDQtNTYzNDQ1ZA==, ActorId: [4:7491636192337689537:2351], ActorState: ExecuteState, TraceId: 01jrfncy5y8bw32mznq718a1b8, Reply query error, msg: Pending previous query completion proxyRequestId: 508 2025-04-10T10:54:43.245990Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Y2NmNzVlMDUtZWYzYmJkYWYtOGE1OTZmNmMtNDI3ODg5MzM=, ActorId: [4:7491636192337689508:2345], ActorState: ExecuteState, TraceId: 01jrfncy5zat4c80nmv9z3x2pa, Reply query error, msg: Pending previous query completion proxyRequestId: 469 2025-04-10T10:54:43.246047Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Y2NmNzVlMDUtZWYzYmJkYWYtOGE1OTZmNmMtNDI3ODg5MzM=, ActorId: [4:7491636192337689508:2345], ActorState: ExecuteState, TraceId: 01jrfncy5zat4c80nmv9z3x2pa, Reply query error, msg: Pending previous query completion proxyRequestId: 482 2025-04-10T10:54:43.254838Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWRkNWExZDgtYTUyODc4NjctNDUwNmJiYzgtNGZhYTUzNGM=, ActorId: [4:7491636192337689509:2346], ActorState: ExecuteState, TraceId: 01jrfncybj9z1hxmgjtsm5es5y, Reply query error, msg: Pending previous query completion proxyRequestId: 475 2025-04-10T10:54:43.254908Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWRkNWExZDgtYTUyODc4NjctNDUwNmJiYzgtNGZhYTUzNGM=, ActorId: [4:7491636192337689509:2346], ActorState: ExecuteState, TraceId: 01jrfncybj9z1hxmgjtsm5es5y, Reply query error, msg: Pending previous query completion proxyRequestId: 476 2025-04-10T10:54:43.254942Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWRkNWExZDgtYTUyODc4NjctNDUwNmJiYzgtNGZhYTUzNGM=, ActorId: [4:7491636192337689509:2346], ActorState: ExecuteState, TraceId: 01jrfncybj9z1hxmgjtsm5es5y, Reply query error, msg: Pending previous query completion proxyRequestId: 479 2025-04-10T10:54:43.255002Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWRkNWExZDgtYTUyODc4NjctNDUwNmJiYzgtNGZhYTUzNGM=, ActorId: [4:7491636192337689509:2346], ActorState: ExecuteState, TraceId: 01jrfncybj9z1hxmgjtsm5es5y, Reply query error, msg: Pending previous query completion proxyRequestId: 480 2025-04-10T10:54:43.255094Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWRkNWExZDgtYTUyODc4NjctNDUwNmJiYzgtNGZhYTUzNGM=, ActorId: [4:7491636192337689509:2346], ActorState: ExecuteState, TraceId: 01jrfncybj9z1hxmgjtsm5es5y, Reply query error, msg: Pending previous query completion proxyRequestId: 489 2025-04-10T10:54:43.255124Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWRkNWExZDgtYTUyODc4NjctNDUwNmJiYzgtNGZhYTUzNGM=, ActorId: [4:7491636192337689509:2346], ActorState: ExecuteState, TraceId: 01jrfncybj9z1hxmgjtsm5es5y, Reply query error, msg: Pending previous query completion proxyRequestId: 492 2025-04-10T10:54:43.255172Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWRkNWExZDgtYTUyODc4NjctNDUwNmJiYzgtNGZhYTUzNGM=, ActorId: [4:7491636192337689509:2346], ActorState: ExecuteState, TraceId: 01jrfncybj9z1hxmgjtsm5es5y, Reply query error, msg: Pending previous query completion proxyRequestId: 505 2025-04-10T10:54:43.255516Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=N2I4ZWY1NDMtNDBiNWVkODctZGM0NDg3NTgtOGQ1OTJmZjI=, ActorId: [4:7491636192337689510:2347], ActorState: ExecuteState, TraceId: 01jrfncy613vpk1btwhbk9e1be, Reply query error, msg: Pending previous query completion proxyRequestId: 471 2025-04-10T10:54:43.255552Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=N2I4ZWY1NDMtNDBiNWVkODctZGM0NDg3NTgtOGQ1OTJmZjI=, ActorId: [4:7491636192337689510:2347], ActorState: ExecuteState, TraceId: 01jrfncy613vpk1btwhbk9e1be, Reply query error, msg: Pending previous query completion proxyRequestId: 485 2025-04-10T10:54:43.255612Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=N2I4ZWY1NDMtNDBiNWVkODctZGM0NDg3NTgtOGQ1OTJmZjI=, ActorId: [4:7491636192337689510:2347], ActorState: ExecuteState, TraceId: 01jrfncy613vpk1btwhbk9e1be, Reply query error, msg: Pending previous query completion proxyRequestId: 499 2025-04-10T10:54:43.255889Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDQ0ZTUyNDgtNzM5YWQyODMtODJlYjNkZWYtZmIxZGE2YmQ=, ActorId: [4:7491636192337689538:2352], ActorState: ExecuteState, TraceId: 01jrfncy5y1p54aggjfm6cpg23, Reply query error, msg: Pending previous query completion proxyRequestId: 474 2025-04-10T10:54:43.255933Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDQ0ZTUyNDgtNzM5YWQyODMtODJlYjNkZWYtZmIxZGE2YmQ=, ActorId: [4:7491636192337689538:2352], ActorState: ExecuteState, TraceId: 01jrfncy5y1p54aggjfm6cpg23, Reply query error, msg: Pending previous query completion proxyRequestId: 477 2025-04-10T10:54:43.255966Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDQ0ZTUyNDgtNzM5YWQyODMtODJlYjNkZWYtZmIxZGE2YmQ=, ActorId: [4:7491636192337689538:2352], ActorState: ExecuteState, TraceId: 01jrfncy5y1p54aggjfm6cpg23, Reply query error, msg: Pending previous query completion proxyRequestId: 483 2025-04-10T10:54:43.256028Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDQ0ZTUyNDgtNzM5YWQyODMtODJlYjNkZWYtZmIxZGE2YmQ=, ActorId: [4:7491636192337689538:2352], ActorState: ExecuteState, TraceId: 01jrfncy5y1p54aggjfm6cpg23, Reply query error, msg: Pending previous query completion proxyRequestId: 488 2025-04-10T10:54:43.256077Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDQ0ZTUyNDgtNzM5YWQyODMtODJlYjNkZWYtZmIxZGE2YmQ=, ActorId: [4:7491636192337689538:2352], ActorState: ExecuteState, TraceId: 01jrfncy5y1p54aggjfm6cpg23, Reply query error, msg: Pending previous query completion proxyRequestId: 500 2025-04-10T10:54:43.256111Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDQ0ZTUyNDgtNzM5YWQyODMtODJlYjNkZWYtZmIxZGE2YmQ=, ActorId: [4:7491636192337689538:2352], ActorState: ExecuteState, TraceId: 01jrfncy5y1p54aggjfm6cpg23, Reply query error, msg: Pending previous query completion proxyRequestId: 506 2025-04-10T10:54:43.256393Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzM5ZTQyMjEtM2E1Y2U4Mi00MWMwZDM5ZS01NDIzMjY5Ng==, ActorId: [4:7491636192337689512:2349], ActorState: ExecuteState, TraceId: 01jrfncy5y3egf034jawj17bca, Reply query error, msg: Pending previous query completion proxyRequestId: 481 2025-04-10T10:54:43.256428Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzM5ZTQyMjEtM2E1Y2U4Mi00MWMwZDM5ZS01NDIzMjY5Ng==, ActorId: [4:7491636192337689512:2349], ActorState: ExecuteState, TraceId: 01jrfncy5y3egf034jawj17bca, Reply query error, msg: Pending previous query completion proxyRequestId: 509 2025-04-10T10:54:43.256658Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzJlMjllYjQtNThiNGJiYzgtMTMyMWRiZDgtOTYxYTE4ZA==, ActorId: [4:7491636192337689539:2353], ActorState: ExecuteState, TraceId: 01jrfncy5z93653yjbt3zxq4e7, Reply query error, msg: Pending previous query completion proxyRequestId: 484 2025-04-10T10:54:43.256693Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzJlMjllYjQtNThiNGJiYzgtMTMyMWRiZDgtOTYxYTE4ZA==, ActorId: [4:7491636192337689539:2353], ActorState: ExecuteState, TraceId: 01jrfncy5z93653yjbt3zxq4e7, Reply query error, msg: Pending previous query completion proxyRequestId: 490 2025-04-10T10:54:43.256748Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzJlMjllYjQtNThiNGJiYzgtMTMyMWRiZDgtOTYxYTE4ZA==, ActorId: [4:7491636192337689539:2353], ActorState: ExecuteState, TraceId: 01jrfncy5z93653yjbt3zxq4e7, Reply query error, msg: Pending previous query completion proxyRequestId: 493 2025-04-10T10:54:43.256805Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzJlMjllYjQtNThiNGJiYzgtMTMyMWRiZDgtOTYxYTE4ZA==, ActorId: [4:7491636192337689539:2353], ActorState: ExecuteState, TraceId: 01jrfncy5z93653yjbt3zxq4e7, Reply query error, msg: Pending previous query completion proxyRequestId: 494 2025-04-10T10:54:43.256869Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzJlMjllYjQtNThiNGJiYzgtMTMyMWRiZDgtOTYxYTE4ZA==, ActorId: [4:7491636192337689539:2353], ActorState: ExecuteState, TraceId: 01jrfncy5z93653yjbt3zxq4e7, Reply query error, msg: Pending previous query completion proxyRequestId: 501 2025-04-10T10:54:43.257154Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGZkOTZmMjUtYzdiMjUyZmYtYWM4OTVhN2MtN2I0MGM1MjM=, ActorId: [4:7491636192337689511:2348], ActorState: ExecuteState, TraceId: 01jrfncy5ydjk6yhdhg72t37x9, Reply query error, msg: Pending previous query completion proxyRequestId: 487 2025-04-10T10:54:43.257191Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGZkOTZmMjUtYzdiMjUyZmYtYWM4OTVhN2MtN2I0MGM1MjM=, ActorId: [4:7491636192337689511:2348], ActorState: ExecuteState, TraceId: 01jrfncy5ydjk6yhdhg72t37x9, Reply query error, msg: Pending previous query completion proxyRequestId: 507 2025-04-10T10:54:43.257249Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=N2Y2MjZlNGYtMmNhN2RkYjktZGE5ZWFjMTMtZTU4MmUyNjI=, ActorId: [4:7491636192337689540:2354], ActorState: ExecuteState, TraceId: 01jrfncy9tfa53v3pmnm0ngpcj, Reply query error, msg: Pending previous query completion proxyRequestId: 504 2025-04-10T10:54:43.258106Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MjkyM2QwZGUtMjdkY2I2MDYtM2Q4ODc5MDQtNTYzNDQ1ZA==, ActorId: [4:7491636192337689537:2351], ActorState: ExecuteState, TraceId: 01jrfncy5y8bw32mznq718a1b8, Reply query error, msg: Pending previous query completion proxyRequestId: 510 2025-04-10T10:54:43.277106Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzM5ZTQyMjEtM2E1Y2U4Mi00MWMwZDM5ZS01NDIzMjY5Ng==, ActorId: [4:7491636192337689512:2349], ActorState: ExecuteState, TraceId: 01jrfncy5y3egf034jawj17bca, Reply query error, msg: Pending previous query completion proxyRequestId: 511 |66.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |66.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |66.8%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |66.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> TFlatTest::WriteSplitKillRead >> TObjectStorageListingTest::Listing >> TLocksTest::Range_IncorrectDot1 >> TLocksTest::Range_IncorrectNullDot1 >> TLocksTest::SetLockFail >> TCancelTx::CrossShardReadOnly >> TFlatTest::AutoSplitBySize >> Cdc::Alter [GOOD] >> Cdc::AddColumn >> Viewer::JsonAutocompleteEmpty [GOOD] >> Viewer::JsonAutocompleteEndOfDatabaseName >> TFlatTest::SelectRangeItemsLimit >> TFlatTest::SelectRangeBytesLimit |66.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |66.8%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |66.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut >> Viewer::JsonAutocompleteColumns [GOOD] >> TFlatTest::ShardFreezeUnfreezeAlreadySet [GOOD] >> TFlatTest::ShardFreezeUnfreeze >> TFlatTest::SelectRangeReverse >> Viewer::JsonAutocompleteStartOfDatabaseName [GOOD] >> Viewer::JsonStorageListingV1 >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] >> TLocksFatTest::RangeSetRemove >> TFlatTest::SelectRangeNullArgs3 >> TFlatTest::SelectRangeForbidNullArgs2 [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumns [GOOD] Test command err: 2025-04-10T10:54:46.093038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:54:46.093496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:46.093588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 24070, node 1 TClient is connected to server localhost:24360 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: 2025-04-10T10:54:32.494734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:54:32.495027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:32.495255Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017e5/r3tmp/tmpbDLVCb/pdisk_1.dat 2025-04-10T10:54:32.964867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:54:32.972942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:54:32.988139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:54:32.989146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T10:54:33.000288Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-04-10T10:54:33.000371Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-04-10T10:54:33.037751Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:33.047717Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2025-04-10T10:54:33.113428Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:334:2374] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-04-10T10:54:33.113653Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-04-10T10:54:33.113811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:33.113871Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-10T10:54:33.113911Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-10T10:54:33.113960Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-10T10:54:33.113999Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-10T10:54:33.114117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:33.114473Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-10T10:54:33.114543Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-10T10:54:33.114583Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-10T10:54:33.114636Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-10T10:54:33.114812Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-04-10T10:54:33.127957Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-04-10T10:54:33.128093Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:334:2374]) 2025-04-10T10:54:33.128219Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-10T10:54:33.128794Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-04-10T10:54:33.128896Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:334:2374])::Execute 2025-04-10T10:54:33.128949Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-10T10:54:33.129028Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:334:2374])::Complete 2025-04-10T10:54:33.129216Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443319296 } 2025-04-10T10:54:33.129289Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-04-10T10:54:33.129343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:33.129529Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-04-10T10:54:33.129618Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-10T10:54:33.129660Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-10T10:54:33.129832Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-10T10:54:33.129879Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-10T10:54:33.129917Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-10T10:54:33.129956Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-10T10:54:33.142853Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-04-10T10:54:33.142955Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-10T10:54:33.218696Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-04-10T10:54:33.218857Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-04-10T10:54:33.219234Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-04-10T10:54:33.219666Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-04-10T10:54:33.219755Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2404] Proxy 2025-04-10T10:54:33.220796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:54:33.222429Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-10T10:54:33.222546Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-10T10:54:33.222592Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-04-10T10:54:33.222637Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-04-10T10:54:33.223444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:54:33.223533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-10T10:54:33.224591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-10T10:54:33.227611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:54:33.228901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:54:33.228972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:54:33.229849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-04-10T10:54:33.233934Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-04-10T10:54:33.248482Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-10T10:54:33.248606Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-04-10T10:54:33.248872Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-04-10T10:54:33.248955Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-04-10T10:54:33.249043Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-04-10T10:54:33.249263Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-04-10T10:54:33.250018Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-04-10T10:54:33.250253Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-04-10T10:54:33.251197Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-04-10T10:54:33.251478Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2025-04-10T10:54:33.251610Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-04-10T10:54:33.251696Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-04-10T10:54:33.251870Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-04-10T10:54:33.251936Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996544}: tablet 72075186224037888 channel 2 assigned to group 21810380 ... 49.367582Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715665 ssId 72057594046644480 seqNo 2:4 2025-04-10T10:54:49.367632Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715665 at tablet 72075186224037889 2025-04-10T10:54:49.367854Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:54:49.368042Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:54:49.368106Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:54:49.368135Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:54:49.368170Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2025-04-10T10:54:49.379072Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:54:49.379199Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:54:49.380731Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 HANDLE EvProposeTransaction marker# C0 2025-04-10T10:54:49.380803Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 step# 33000 Status# 16 SEND to# [2:409:2404] Proxy marker# C1 2025-04-10T10:54:49.452838Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715665 has been planned 2025-04-10T10:54:49.452958Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 2025-04-10T10:54:49.453000Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 2025-04-10T10:54:49.453299Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 33500 in 0.500000s at 33.450000s 2025-04-10T10:54:49.453829Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 33000, txid# 281474976715665 marker# C2 2025-04-10T10:54:49.453914Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 stepId# 33000 Status# 17 SEND EvProposeTransactionStatus to# [2:409:2404] Proxy 2025-04-10T10:54:49.454506Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 33000, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:54:49.455236Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715665 at step 33000 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 33000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-10T10:54:49.455303Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:54:49.455601Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:54:49.455654Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:54:49.455704Z node 2 :TX_DATASHARD DEBUG: Found ready operation [33000:281474976715665] in PlanQueue unit at 72075186224037889 2025-04-10T10:54:49.455934Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 33000:281474976715665 keys extracted: 0 2025-04-10T10:54:49.456107Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:54:49.456343Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:54:49.456417Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2025-04-10T10:54:49.456888Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:54:49.458931Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 33000} 2025-04-10T10:54:49.459002Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:54:49.459491Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-10T10:54:49.459575Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-10T10:54:49.459617Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-04-10T10:54:49.459637Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 for mediator 72057594046382081 acknowledged 2025-04-10T10:54:49.459669Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 acknowledged 2025-04-10T10:54:49.459939Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:54:49.460001Z node 2 :TX_DATASHARD DEBUG: Complete [33000 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T10:54:49.460052Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715665 state PreOffline TxInFly 0 2025-04-10T10:54:49.460135Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:54:49.460778Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715665, done: 0, blocked: 1 2025-04-10T10:54:49.497350Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715665 datashard 72075186224037889 state PreOffline 2025-04-10T10:54:49.497476Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-10T10:54:49.498108Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-04-10T10:54:49.498710Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 1 2025-04-10T10:54:49.499717Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2025-04-10T10:54:49.500196Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T10:54:49.501299Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTZmMDA1MzEtODgzODUxZDctNmViZThmNzMtY2UxZmI2MmU= 2025-04-10 10:54:49.501 INFO ydb-core-tx-datashard-ut_minstep(pid=1091365, tid=0x00007F12CA337D00) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-04-10T10:54:49.501440Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTZmMDA1MzEtODgzODUxZDctNmViZThmNzMtY2UxZmI2MmU= 2025-04-10 10:54:49.501 INFO ydb-core-tx-datashard-ut_minstep(pid=1091365, tid=0x00007F12CA337D00) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-04-10T10:54:49.501550Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTZmMDA1MzEtODgzODUxZDctNmViZThmNzMtY2UxZmI2MmU= 2025-04-10 10:54:49.501 INFO ydb-core-tx-datashard-ut_minstep(pid=1091365, tid=0x00007F12CA337D00) [core exec] yql_execution.cpp:59: Begin, root #43 2025-04-10T10:54:49.501606Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTZmMDA1MzEtODgzODUxZDctNmViZThmNzMtY2UxZmI2MmU= 2025-04-10 10:54:49.501 INFO ydb-core-tx-datashard-ut_minstep(pid=1091365, tid=0x00007F12CA337D00) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-04-10T10:54:49.501665Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=OTZmMDA1MzEtODgzODUxZDctNmViZThmNzMtY2UxZmI2MmU= 2025-04-10 10:54:49.501 TRACE ydb-core-tx-datashard-ut_minstep(pid=1091365, tid=0x00007F12CA337D00) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-04-10T10:54:49.501752Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTZmMDA1MzEtODgzODUxZDctNmViZThmNzMtY2UxZmI2MmU= 2025-04-10 10:54:49.501 INFO ydb-core-tx-datashard-ut_minstep(pid=1091365, tid=0x00007F12CA337D00) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-04-10T10:54:49.501837Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTZmMDA1MzEtODgzODUxZDctNmViZThmNzMtY2UxZmI2MmU= 2025-04-10 10:54:49.501 INFO ydb-core-tx-datashard-ut_minstep(pid=1091365, tid=0x00007F12CA337D00) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-04-10T10:54:49.501899Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTZmMDA1MzEtODgzODUxZDctNmViZThmNzMtY2UxZmI2MmU= 2025-04-10 10:54:49.501 INFO ydb-core-tx-datashard-ut_minstep(pid=1091365, tid=0x00007F12CA337D00) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-04-10T10:54:49.501963Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTZmMDA1MzEtODgzODUxZDctNmViZThmNzMtY2UxZmI2MmU= 2025-04-10 10:54:49.501 INFO ydb-core-tx-datashard-ut_minstep(pid=1091365, tid=0x00007F12CA337D00) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-04-10T10:54:49.502130Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=OTZmMDA1MzEtODgzODUxZDctNmViZThmNzMtY2UxZmI2MmU= 2025-04-10 10:54:49.502 NOTE ydb-core-tx-datashard-ut_minstep(pid=1091365, tid=0x00007F12CA337D00) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2025-04-10T10:54:49.503518Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=OTZmMDA1MzEtODgzODUxZDctNmViZThmNzMtY2UxZmI2MmU= 2025-04-10 10:54:49.503 NOTE ydb-core-tx-datashard-ut_minstep(pid=1091365, tid=0x00007F12CA337D00) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2025-04-10T10:54:49.503592Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=OTZmMDA1MzEtODgzODUxZDctNmViZThmNzMtY2UxZmI2MmU= 2025-04-10 10:54:49.503 NOTE ydb-core-tx-datashard-ut_minstep(pid=1091365, tid=0x00007F12CA337D00) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2025-04-10T10:54:49.517108Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T10:54:49.517440Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-10T10:54:49.519768Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T10:54:49.520929Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-10T10:54:49.521418Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2025-04-10T10:54:49.521508Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-04-10T10:54:49.521642Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-04-10T10:54:49.521783Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-04-10T10:54:49.521923Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> TLocksTest::SetLockFail [GOOD] >> TLocksTest::SetEraseSet |66.8%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::WriteSplitKillRead [GOOD] >> TFlatTest::WriteSplitWriteSplit >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK >> TCancelTx::CrossShardReadOnly [GOOD] >> TCancelTx::CrossShardReadOnlyWithReadSets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> WithSDK::DescribeConsumer Test command err: 2025-04-10T10:53:01.890596Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635784715305308:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:01.890666Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:53:02.072051Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00196d/r3tmp/tmp1swHSD/pdisk_1.dat 2025-04-10T10:53:02.276246Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9511, node 1 2025-04-10T10:53:02.318534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:02.318803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:02.322825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:02.361181Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/00196d/r3tmp/yandexiQd8T1.tmp 2025-04-10T10:53:02.361225Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/00196d/r3tmp/yandexiQd8T1.tmp 2025-04-10T10:53:02.361394Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/00196d/r3tmp/yandexiQd8T1.tmp 2025-04-10T10:53:02.361540Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:53:02.412626Z INFO: TTestServer started on Port 21282 GrpcPort 9511 TClient is connected to server localhost:21282 PQClient connected to localhost:9511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:02.708803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:53:02.792066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T10:53:03.072988Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-10T10:53:04.751421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635797600207996:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.751622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.752862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635797600208031:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.756842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T10:53:04.768182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635797600208033:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T10:53:05.043973Z node 1 :TX_PROXY ERROR: Actor# [1:7491635797600208097:2447] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:05.074573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.139465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.211011Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491635801895175406:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:53:05.212723Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTUxMDcyYzUtMjZiY2M5OTItNGM2NGQ5NWEtYjgyYmQ5YTM=, ActorId: [1:7491635797600207992:2335], ActorState: ExecuteState, TraceId: 01jrfn9yb08gfa9tjrxa1gtkba, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:53:05.218436Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:53:05.238022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7491635801895175699:2629] 2025-04-10T10:53:06.885280Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491635784715305308:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:06.885356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T10:53:10.484296Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-10T10:53:10.500122Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-10T10:53:10.501462Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491635823370012423:2765], Recipient [1:7491635789010273035:2193]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:10.501523Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:10.501550Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:10.501581Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491635823370012419:2762], Recipient [1:7491635789010273035:2193]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-04-10T10:53:10.501596Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:53:10.655207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710672 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:53:10.655721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:53:10.655988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-10T10:53:10.656021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-10T10:53:10.656052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-10T10:53:10.656087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-10T10:53:10.656130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] w ... 5-04-10T10:54:50.031429Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [6:7491636121509497035:2141], Recipient [6:7491636121509497035:2141]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:54:50.031450Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime #0 0x18eec551 in __asan_memcpy /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors_memintrinsics.cpp:63:3 #1 0x19172701 in MemCopy /-S/util/generic/mem_copy.h:18:9 #2 0x19172701 in TBuffer::Append(char const*, unsigned long) /-S/util/generic/buffer.cpp:54:5 #3 0x1fc4af46 in Acquire /-S/ydb/public/sdk/cpp/src/client/topic/impl/write_session_impl.h:208:18 #4 0x1fc4af46 in NYdb::Dev::NTopic::TWriteSessionImpl::FlushWriteIfRequiredImpl() /-S/ydb/public/sdk/cpp/src/client/topic/impl/write_session_impl.cpp:1366:64 #5 0x1fc4a4f0 in NYdb::Dev::NTopic::TWriteSessionImpl::WriteInternal(NYdb::Dev::NTopic::TContinuationToken&&, NYdb::Dev::NTopic::TWriteMessage&&) /-S/ydb/public/sdk/cpp/src/client/topic/impl/write_session_impl.cpp:660:9 #6 0x1fc23871 in NYdb::Dev::NTopic::TWriteSession::Write(NYdb::Dev::NTopic::TContinuationToken&&, NYdb::Dev::NTopic::TWriteMessage&&, NYdb::Dev::NTable::TTransaction*) /-S/ydb/public/sdk/cpp/src/client/topic/impl/write_session.cpp:72:19 #7 0x1fc2581e in NYdb::Dev::NTopic::TSimpleBlockingWriteSession::Write(NYdb::Dev::NTopic::TWriteMessage&&, NYdb::Dev::NTable::TTransaction*, TDuration const&) /-S/ydb/public/sdk/cpp/src/client/topic/impl/write_session.cpp:131:17 #8 0x18e500cc in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&)::$_1::operator()(unsigned long) const /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:52:13 #9 0x18e29842 in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:76:9 #10 0x18e56a27 in operator() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #11 0x18e56a27 in __invoke<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #12 0x18e56a27 in __call<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #13 0x18e56a27 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #14 0x18e56a27 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #15 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #16 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #17 0x196e6995 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #18 0x196b64c8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #19 0x18e55bf3 in NKikimr::NTestSuiteWithSDK::TCurrentTest::Execute() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #20 0x196b7d95 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #21 0x196e0f0c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #22 0x7f8c4938ad8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #23 0x7f8c4938ae3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #24 0x165b1028 in _start (/home/runner/.ya/build/build_root/2gt4/00196d/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk+0x165b1028) (BuildId: 34a7903e4bedd81b1cc1447f91dca5a1a7035efc) 0x50300594f889 is located 9 bytes inside of 32-byte region [0x50300594f880,0x50300594f8a0) freed by thread T0 here: #0 0x18f22a3d in operator delete(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:143:3 #1 0x18e50034 in UnRef /-S/util/generic/string.h:100:13 #2 0x18e50034 in UnRef /-S/util/generic/string.h:53:16 #3 0x18e50034 in UnRef /-S/util/generic/ptr.h:625:13 #4 0x18e50034 in ~TIntrusivePtr /-S/util/generic/ptr.h:523:9 #5 0x18e50034 in ~TBasicString /-S/util/generic/fwd.h:8:7 #6 0x18e50034 in ~TStringBuilder /-S/util/string/builder.h:8:11 #7 0x18e50034 in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&)::$_1::operator()(unsigned long) const /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:50:27 #8 0x18e29842 in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:76:9 #9 0x18e56a27 in operator() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #10 0x18e56a27 in __invoke<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #11 0x18e56a27 in __call<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #12 0x18e56a27 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #13 0x18e56a27 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #14 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #15 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #16 0x196e6995 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #17 0x196b64c8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #18 0x18e55bf3 in NKikimr::NTestSuiteWithSDK::TCurrentTest::Execute() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #19 0x196b7d95 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #20 0x196e0f0c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #21 0x7f8c4938ad8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) previously allocated by thread T0 here: #0 0x18f221dd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x165b3305 in Construct, std::__y1::allocator > > &> /-S/util/generic/string.h:207:17 #2 0x165b3305 in TBasicString>::Clone() /-S/util/generic/string.h:228:9 #3 0x191c3ecc in Detach /-S/util/generic/string.h:376:13 #4 0x191c3ecc in MutRef /-S/util/generic/string.h:249:9 #5 0x191c3ecc in append /-S/util/generic/string.h:784:9 #6 0x191c3ecc in TStringOutput::DoWrite(void const*, unsigned long) /-S/util/stream/str.cpp:37:9 #7 0x18e4fedd in Write /-S/util/stream/output.h:74:13 #8 0x18e4fedd in Write /-S/util/stream/output.h:84:9 #9 0x18e4fedd in Out /-S/util/stream/output.h:199:11 #10 0x18e4fedd in operator<< /-S/util/stream/output.h:218:5 #11 0x18e4fedd in operator<< /-S/util/string/builder.h:33:21 #12 0x18e4fedd in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&)::$_1::operator()(unsigned long) const /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:50:48 #13 0x18e29842 in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:76:9 #14 0x18e56a27 in operator() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #15 0x18e56a27 in __invoke<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #16 0x18e56a27 in __call<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #17 0x18e56a27 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #18 0x18e56a27 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #19 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #20 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #21 0x196e6995 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #22 0x196b64c8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #23 0x18e55bf3 in NKikimr::NTestSuiteWithSDK::TCurrentTest::Execute() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #24 0x196b7d95 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #25 0x196e0f0c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #26 0x7f8c4938ad8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: heap-use-after-free /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors_memintrinsics.cpp:63:3 in __asan_memcpy Shadow bytes around the buggy address: 0x50300594f600: fa fa fa fa fa fa fa fa fd fd fd fd fa fa fa fa 0x50300594f680: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x50300594f700: fa fa fa fa fa fa fd fd fd fd fa fa fa fa fa fa 0x50300594f780: fa fa fa fa fa fa fa fa fd fd fd fd fa fa fd fd 0x50300594f800: fd fa fa fa fa fa fa fa fa fa fd fd fd fa fa fa =>0x50300594f880: fd[fd]fd fd fa fa fa fa fa fa fa fa fa fa fa fa 0x50300594f900: fa fa fa fa fa fa fa fa fd fd fd fd fa fa fa fa 0x50300594f980: fa fa fa fa fd fd fd fd fa fa fa fa fa fa fa fa 0x50300594fa00: fa fa fa fa fa fa 00 00 00 00 fa fa fa fa fa fa 0x50300594fa80: fa fa fd fd fd fa fa fa fa fa fa fa fa fa fa fa 0x50300594fb00: fa fa fa fa fd fd fd fa fa fa fa fa fa fa fa fa Shadow byte legend (one shadow byte represents 8 application bytes): Addressable: 00 Partially addressable: 01 02 03 04 05 06 07 Heap left redzone: fa Freed heap region: fd Stack left redzone: f1 Stack mid redzone: f2 Stack right redzone: f3 Stack after return: f5 Stack use after scope: f8 Global redzone: f9 Global init order: f6 Poisoned by user: f7 Container overflow: fc Array cookie: ac Intra object redzone: bb ASan internal: fe Left alloca redzone: ca Right alloca redzone: cb ==1069560==ABORTING >> TFlatTest::SelectRangeItemsLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs4 >> TFlatTest::SelectRangeBytesLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs1 >> Viewer::JsonAutocompleteSimilarDatabaseName [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit >> TFlatTest::SelectRangeReverse [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys >> TLocksTest::GoodDupLock >> TFlatTest::ShardFreezeUnfreeze [GOOD] >> TFlatTest::SplitEmptyAndWrite >> Cdc::DropIndex [GOOD] >> Cdc::DisableStream |66.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_OffsetCommit |66.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut >> TReplicationTests::Create >> TFlatTest::SelectRangeNullArgs3 [GOOD] >> TFlatTest::SelectRangeNullArgs4 >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK |66.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |66.9%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut >> TObjectStorageListingTest::Listing [GOOD] >> TObjectStorageListingTest::ManyDeletes >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] >> Viewer::Cluster10000Tablets [GOOD] >> Viewer::FuzzySearcherLimit1OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit2OutOf4 [GOOD] >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreeze [GOOD] Test command err: 2025-04-10T10:54:45.681498Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636230207802156:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:45.681937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002047/r3tmp/tmp0x9hMl/pdisk_1.dat 2025-04-10T10:54:46.341318Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:46.346405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:46.346521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:46.351300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28942 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:46.977329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:46.988769Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:46.998384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:54:47.167671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:47.187454Z node 1 :TX_PROXY ERROR: Actor# [1:7491636238797737350:2399] txid# 281474976710660, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-04-10T10:54:47.189752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:47.212513Z node 1 :TX_PROXY ERROR: Actor# [1:7491636238797737391:2434] txid# 281474976710662, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002047/r3tmp/tmpcHQEr8/pdisk_1.dat 2025-04-10T10:54:50.690355Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:50.704772Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:50.728070Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:50.728150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:50.732215Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19799 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:51.024461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:51.034665Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:54:51.044206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:51.176909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:51.236256Z node 2 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710660: 2025-04-10T10:54:51.236618Z node 2 :TX_PROXY ERROR: Actor# [2:7491636257342884672:2396] txid# 281474976710660 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-10T10:54:51.236697Z node 2 :TX_PROXY ERROR: Actor# [2:7491636257342884672:2396] txid# 281474976710660 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-10T10:54:51.236724Z node 2 :TX_PROXY ERROR: Actor# [2:7491636257342884672:2396] txid# 281474976710660 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-04-10T10:54:51.250812Z node 2 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710661: 2025-04-10T10:54:51.254259Z node 2 :TX_PROXY ERROR: Actor# [2:7491636257342884680:2401] txid# 281474976710661 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-10T10:54:51.254348Z node 2 :TX_PROXY ERROR: Actor# [2:7491636257342884680:2401] txid# 281474976710661 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-10T10:54:51.254367Z node 2 :TX_PROXY ERROR: Actor# [2:7491636257342884680:2401] txid# 281474976710661 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-04-10T10:54:51.268136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 waiting... >> Cdc::ShouldDeliverChangesOnSplitMerge [GOOD] >> Cdc::ResolvedTimestamps >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] >> TReplicationTests::Create [GOOD] >> TReplicationTests::ConsistencyLevel >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> BasicUsage::BrokenCredentialsProvider [GOOD] >> Cdc::RacyActivateAndEnqueue [GOOD] >> Cdc::RacyCreateAndSend ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] Test command err: 2025-04-10T10:54:46.470697Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636232973372411:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:46.475038Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002045/r3tmp/tmp4AuZrW/pdisk_1.dat 2025-04-10T10:54:47.456900Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:47.461281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:47.461385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:47.478834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12023 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:48.041788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:54:48.115897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:52.050609Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636260577216214:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:52.051446Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002045/r3tmp/tmpSn11yR/pdisk_1.dat 2025-04-10T10:54:52.346569Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:52.378984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:52.379088Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:52.387293Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64339 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:52.711507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:52.722606Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:52.747176Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T10:54:52.751470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::WriteSplitWriteSplit [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] >> Cdc::AddColumn [GOOD] >> Cdc::AddColumn_TopicAutoPartitioning |66.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |66.9%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |66.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] Test command err: 2025-04-10T10:54:50.014586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636252098399610:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:50.014632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00202f/r3tmp/tmpjeR69n/pdisk_1.dat 2025-04-10T10:54:50.625334Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:50.631082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:50.631172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:50.650046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32483 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:51.032731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:51.084032Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:51.114060Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:51.126081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00202f/r3tmp/tmpG342XE/pdisk_1.dat 2025-04-10T10:54:55.301234Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636275160943101:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:55.302275Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:55.482967Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:55.489264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:55.489348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:55.491500Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10971 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:55.805307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:55.815576Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:55.827856Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:55.832489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] Test command err: 2025-04-10T10:54:49.673896Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636246358623284:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00202d/r3tmp/tmpq2Q6p7/pdisk_1.dat 2025-04-10T10:54:50.086817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:50.353014Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:50.363331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:50.363412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:50.366887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23540 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:50.890505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:54:50.926935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:55.171794Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636272010543462:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:55.179630Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00202d/r3tmp/tmpvUdJ76/pdisk_1.dat 2025-04-10T10:54:55.408059Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:55.408136Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:55.414197Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:55.451126Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19290 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:55.745331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:55.754749Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:55.779223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::SplitEmptyAndWrite [GOOD] >> TFlatTest::SplitBoundaryRead >> Viewer::JsonAutocompleteEndOfDatabaseName [GOOD] >> Viewer::JsonAutocompleteScheme ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2025-04-10T10:54:00.859388Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1744282440859356 2025-04-10T10:54:01.217895Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636042850898533:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:01.218003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:01.273170Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636039142794557:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:01.273507Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:01.479747Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:54:01.480730Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00153a/r3tmp/tmpzmAQUf/pdisk_1.dat 2025-04-10T10:54:01.758005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:01.758085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:01.758745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:01.758784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:01.763956Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:54:01.764091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:01.765939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:01.767986Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10737, node 1 2025-04-10T10:54:01.865867Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/00153a/r3tmp/yandexNcQFRW.tmp 2025-04-10T10:54:01.865894Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/00153a/r3tmp/yandexNcQFRW.tmp 2025-04-10T10:54:01.866046Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/00153a/r3tmp/yandexNcQFRW.tmp 2025-04-10T10:54:01.866189Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:54:01.929549Z INFO: TTestServer started on Port 29611 GrpcPort 10737 TClient is connected to server localhost:29611 PQClient connected to localhost:10737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:02.224014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T10:54:04.313401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636055735801439:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.313582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.313644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636055735801451:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.318097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:54:04.362443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636055735801453:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:54:04.581834Z node 1 :TX_PROXY ERROR: Actor# [1:7491636055735801543:2686] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:04.605324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:54:04.723786Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491636052027696820:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:54:04.724101Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTdjN2ZiNjUtOGM5NTYxNjctZWJmYzVkNTItNjAxMDQxY2U=, ActorId: [2:7491636052027696777:2310], ActorState: ExecuteState, TraceId: 01jrfnbrvte16cye1fazam6km7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:54:04.721722Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636055735801563:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:54:04.722846Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjhkMjY0N2ItZGQ5ZGRkMTktOTllZmMxOTUtNGNjNjFiNjA=, ActorId: [1:7491636055735801435:2336], ActorState: ExecuteState, TraceId: 01jrfnbrg115n4jszw3tf1aabw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:54:04.725342Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:54:04.725535Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:54:04.854575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:54:04.994296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:10737", true, true, 1000); 2025-04-10T10:54:05.185382Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfnbs8m77jptf3c7tkq6s92, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNmODEyNzEtOWJlOGQ2ZGYtNTJmNTMzYTAtOTE5ZjVjNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491636060030769255:2979] 2025-04-10T10:54:06.216555Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636042850898533:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:06.216650Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:06.274900Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636039142794557:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:06.274996Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T10:54:10.663153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:10737 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-10T10:54:10.837002Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:10737 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds ... redPartition=(NULL)) Update the table 2025-04-10T10:54:56.058836Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7491636276652120666:2537] disconnected; active server actors: 1 2025-04-10T10:54:56.058866Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7491636276652120666:2537] disconnected no session 2025-04-10T10:54:56.256988Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7491636272357153320:2537] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-10T10:54:56.257035Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7491636272357153320:2537] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-10T10:54:56.257055Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7491636272357153320:2537] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-10T10:54:56.257087Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-10T10:54:56.265635Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2025-04-10T10:54:56.268527Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7491636276652120701:2537], now have 1 active actors on pipe 2025-04-10T10:54:56.268611Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T10:54:56.268642Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T10:54:56.268744Z node 6 :PERSQUEUE INFO: new Cookie src|981a39a0-c6595184-e3f335fd-c114bd6b_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-10T10:54:56.268855Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-10T10:54:56.271276Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|981a39a0-c6595184-e3f335fd-c114bd6b_0 2025-04-10T10:54:56.268909Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T10:54:56.270174Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T10:54:56.270202Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T10:54:56.270287Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T10:54:56.275430Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744282496275 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:54:56.275565Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|981a39a0-c6595184-e3f335fd-c114bd6b_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-10T10:54:56.275756Z :INFO: [] MessageGroupId [src] SessionId [src|981a39a0-c6595184-e3f335fd-c114bd6b_0] Write session: close. Timeout = 0 ms 2025-04-10T10:54:56.275811Z :INFO: [] MessageGroupId [src] SessionId [src|981a39a0-c6595184-e3f335fd-c114bd6b_0] Write session will now close 2025-04-10T10:54:56.275858Z :DEBUG: [] MessageGroupId [src] SessionId [src|981a39a0-c6595184-e3f335fd-c114bd6b_0] Write session: aborting 2025-04-10T10:54:56.276324Z :INFO: [] MessageGroupId [src] SessionId [src|981a39a0-c6595184-e3f335fd-c114bd6b_0] Write session: gracefully shut down, all writes complete 2025-04-10T10:54:56.276372Z :DEBUG: [] MessageGroupId [src] SessionId [src|981a39a0-c6595184-e3f335fd-c114bd6b_0] Write session: destroy 2025-04-10T10:54:56.280851Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|981a39a0-c6595184-e3f335fd-c114bd6b_0 grpc read done: success: 0 data: 2025-04-10T10:54:56.280877Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|981a39a0-c6595184-e3f335fd-c114bd6b_0 grpc read failed 2025-04-10T10:54:56.280909Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|981a39a0-c6595184-e3f335fd-c114bd6b_0 grpc closed 2025-04-10T10:54:56.280928Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|981a39a0-c6595184-e3f335fd-c114bd6b_0 is DEAD 2025-04-10T10:54:56.281645Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:54:56.286278Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7491636276652120701:2537] destroyed 2025-04-10T10:54:56.286376Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T10:54:56.329430Z :INFO: [/Root] [/Root] [3c56271e-2fc452d2-fea60ff9-3122c668] Starting read session 2025-04-10T10:54:56.329521Z :DEBUG: [/Root] [/Root] [3c56271e-2fc452d2-fea60ff9-3122c668] Starting session to cluster null (localhost:20706) 2025-04-10T10:54:56.331521Z :DEBUG: [/Root] [/Root] [3c56271e-2fc452d2-fea60ff9-3122c668] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:56.331568Z :DEBUG: [/Root] [/Root] [3c56271e-2fc452d2-fea60ff9-3122c668] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:56.331622Z :DEBUG: [/Root] [/Root] [3c56271e-2fc452d2-fea60ff9-3122c668] [null] Reconnecting session to cluster null in 0.000000s 2025-04-10T10:54:56.333051Z :ERROR: [/Root] [/Root] [3c56271e-2fc452d2-fea60ff9-3122c668] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2025-04-10T10:54:56.333119Z :DEBUG: [/Root] [/Root] [3c56271e-2fc452d2-fea60ff9-3122c668] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:56.333160Z :DEBUG: [/Root] [/Root] [3c56271e-2fc452d2-fea60ff9-3122c668] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:56.333287Z :INFO: [/Root] [/Root] [3c56271e-2fc452d2-fea60ff9-3122c668] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2025-04-10T10:54:56.338525Z :NOTICE: [/Root] [/Root] [3c56271e-2fc452d2-fea60ff9-3122c668] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:54:56.338575Z :DEBUG: [/Root] [/Root] [3c56271e-2fc452d2-fea60ff9-3122c668] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2025-04-10T10:54:56.338684Z :INFO: [/Root] [/Root] [3c56271e-2fc452d2-fea60ff9-3122c668] Closing read session. Close timeout: 0.000000s 2025-04-10T10:54:56.338751Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-10T10:54:56.338805Z :INFO: [/Root] [/Root] [3c56271e-2fc452d2-fea60ff9-3122c668] Counters: { Errors: 1 CurrentSessionLifetimeMs: 9 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:54:56.338907Z :NOTICE: [/Root] [/Root] [3c56271e-2fc452d2-fea60ff9-3122c668] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:54:56.838385Z node 5 :KQP_EXECUTER ERROR: ActorId: [5:7491636276652120752:2550] TxId: 281474976710691. Ctx: { TraceId: 01jrfndb8wf2jnna9rx80vkf0y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=N2VkZjU0M2QtOWZkN2UzNjMtYjFlOTU2MDQtZWM0YTRjNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 6 2025-04-10T10:54:56.838571Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7491636276652120760:2561], TxId: 281474976710691, task: 2. Ctx: { TraceId : 01jrfndb8wf2jnna9rx80vkf0y. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=N2VkZjU0M2QtOWZkN2UzNjMtYjFlOTU2MDQtZWM0YTRjNGQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [5:7491636276652120752:2550], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-10T10:54:56.838729Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7491636276652120761:2562], TxId: 281474976710691, task: 4. Ctx: { TraceId : 01jrfndb8wf2jnna9rx80vkf0y. SessionId : ydb://session/3?node_id=5&id=N2VkZjU0M2QtOWZkN2UzNjMtYjFlOTU2MDQtZWM0YTRjNGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [5:7491636276652120752:2550], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-10T10:54:57.785308Z node 5 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710692. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:54:57.785467Z node 5 :KQP_EXECUTER WARN: ActorId: [5:7491636280947088090:2565] TxId: 281474976710692. Ctx: { TraceId: 01jrfndchb6kz3sd288rhrj1at, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZmU4MzQwNC1hOGJmMjFhNC03ZWQwMjIyNi00ZWZkZGM2NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:54:57.785704Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=ZmU4MzQwNC1hOGJmMjFhNC03ZWQwMjIyNi00ZWZkZGM2NA==, ActorId: [5:7491636280947088087:2565], ActorState: ExecuteState, TraceId: 01jrfndchb6kz3sd288rhrj1at, Create QueryResponse for error on request, msg: 2025-04-10T10:54:57.787415Z node 5 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfndchnf0v7nf353ja3x7vt" } } YdbStatus: UNAVAILABLE ConsumedRu: 6 } 2025-04-10T10:54:57.837331Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=N2VkZjU0M2QtOWZkN2UzNjMtYjFlOTU2MDQtZWM0YTRjNGQ=, ActorId: [5:7491636276652120702:2550], ActorState: ExecuteState, TraceId: 01jrfndb8wf2jnna9rx80vkf0y, Create QueryResponse for error on request, msg: 2025-04-10T10:54:57.839515Z node 5 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "Failed to send EvStartKqpTasksRequest because node is unavailable: 6" severity: 1 } } TxMeta { id: "01jrfndbsm8z88y29d6w49ewvf" } } YdbStatus: UNAVAILABLE ConsumedRu: 348 } >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> TReplicationTests::ConsistencyLevel [GOOD] >> TReplicationTests::Alter >> TReplicationTests::CreateSequential >> TFlatTest::AutoSplitBySize [GOOD] >> TFlatTest::AutoMergeBySize >> TFlatTest::SelectRangeNullArgs4 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitWriteSplit [GOOD] Test command err: 2025-04-10T10:54:47.923705Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636239950791343:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:47.924111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002037/r3tmp/tmpv6mnOO/pdisk_1.dat 2025-04-10T10:54:48.538044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:48.538142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:48.564371Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:48.567401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13146 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:49.121875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:49.168014Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:49.194713Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:49.203502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:49.765634Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.12, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.030s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-10T10:54:49.786564Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.12, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-10T10:54:49.861484Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.17, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-10T10:54:49.892537Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.17, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.005s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-04-10T10:54:50.001100Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.22, eph 3} end=0, 4 blobs 8r (max 9), put Spent{time=0.006s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3463 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744282489425 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-04-10T10:54:50.518450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } } } TxId: 281474976710680 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:54:50.518699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T10:54:50.519008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-10T10:54:50.519043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-10T10:54:50.519062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-10T10:54:50.519294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-04-10T10:54:50.519365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710680:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:54:50.520360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710680, response: Status: StatusAccepted TxId: 281474976710680 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:54:50.520439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710680, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-04-10T10:54:50.520580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T10:54:50.520613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710680:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-04-10T10:54:50.520933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-10T10:54:50.521049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-10T10:54:50.521139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 5 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-10T10:54:50.521656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-04-10T10:54:50.521741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-04-10T10:54:50.521765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:5 msg type: 268697601 2025-04-10T10:54:50.521815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710680, partId: 0, tablet: 72057594037968897 2025-04-10T10:54:50.521835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:3, partId: 0 2025-04-10T10:54:50.521848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:4, partId: 0 2025-04-10T10:54:50.521855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:5, partId: 0 waiting... 2025-04-10T10:54:50.532006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710680, at schemeshard: 72057594046644480 2025-04-10T10:54:50.532038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710680, ready parts: 0/1, is published: true 2025-04-10T10:54:50.532066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710680, at schemeshard: 72057594046644480 2025-04-10T10:54:50.543360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-04-10T10:54:50.543402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:3, partId: 0 2025-04-10T10:54:50.543535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-04-10T10:54:50.543559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710680:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-10T10:54:50.543607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 7205759403796889 ... 731219Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-10T10:54:56.731626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710694:0, at schemeshard: 72057594046644480 2025-04-10T10:54:56.731762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-10T10:54:56.731807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-10T10:54:56.731878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710694:0, at schemeshard: 72057594046644480 2025-04-10T10:54:56.731903Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976710694:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:54:56.732314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 9 2025-04-10T10:54:56.732431Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710694:0 progress is 1/1 2025-04-10T10:54:56.732443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710694 ready parts: 1/1 2025-04-10T10:54:56.732466Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710694:0 progress is 1/1 2025-04-10T10:54:56.732477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710694 ready parts: 1/1 2025-04-10T10:54:56.732490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710694, ready parts: 1/1, is published: true 2025-04-10T10:54:56.732534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7491636278929600551:2424] message: TxId: 281474976710694 2025-04-10T10:54:56.732555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710694 ready parts: 1/1 2025-04-10T10:54:56.732575Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710694:0 2025-04-10T10:54:56.732585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710694:0 2025-04-10T10:54:56.732736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 8 2025-04-10T10:54:56.734084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-10T10:54:56.734386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-04-10T10:54:56.734574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-10T10:54:56.734728Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-04-10T10:54:56.736692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-10T10:54:56.736713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-10T10:54:56.736753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-10T10:54:56.736771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 TClient::Ls request: /dc-1/Dir/TableOld 2025-04-10T10:54:56.740211Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-10T10:54:56.740246Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-04-10T10:54:56.778520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491636278929600372 RawX2: 4503608217307483 } TabletId: 72075186224037894 State: 4 2025-04-10T10:54:56.778604Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-04-10T10:54:56.778868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491636278929600169 RawX2: 4503608217307456 } TabletId: 72075186224037892 State: 4 2025-04-10T10:54:56.778894Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-04-10T10:54:56.779106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491636274634632523 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-04-10T10:54:56.779127Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-10T10:54:56.779240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491636278929600364 RawX2: 4503608217307482 } TabletId: 72075186224037893 State: 4 2025-04-10T10:54:56.779259Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-04-10T10:54:56.779818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-04-10T10:54:56.779902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-10T10:54:56.779962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-10T10:54:56.780005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-04-10T10:54:56.786564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491636278929600196 RawX2: 4503608217307457 } TabletId: 72075186224037890 State: 4 2025-04-10T10:54:56.786640Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-10T10:54:56.787303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-10T10:54:56.787783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-04-10T10:54:56.788015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-10T10:54:56.788192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-10T10:54:56.788331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-10T10:54:56.788429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-10T10:54:56.788562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-10T10:54:56.788664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-04-10T10:54:56.788824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-10T10:54:56.789278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-04-10T10:54:56.789298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-04-10T10:54:56.789338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-10T10:54:56.789346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-10T10:54:56.789411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-10T10:54:56.789599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-10T10:54:56.789726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-10T10:54:56.789736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-10T10:54:56.789771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-04-10T10:54:56.789780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-04-10T10:54:56.789851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T10:54:56.789869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-10T10:54:56.789909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T10:54:56.791372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-10T10:54:56.791403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-10T10:54:56.791454Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T10:54:56.791512Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-10T10:54:56.791540Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-04-10T10:54:56.791561Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-04-10T10:54:56.791582Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-10T10:54:56.791603Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found >> DataShardWrite::WriteImmediateBadRequest |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] Test command err: 2025-04-10T10:54:50.365167Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636252916015335:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:50.365645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00202b/r3tmp/tmpbP4Y3X/pdisk_1.dat 2025-04-10T10:54:51.227079Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:51.229194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:51.229280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:51.241933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4622 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:51.895558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:51.947221Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:51.982633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:56.038735Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636279320198898:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:56.038786Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00202b/r3tmp/tmppYXS5R/pdisk_1.dat 2025-04-10T10:54:56.470258Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:56.508132Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:56.508214Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:56.512849Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18335 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:56.851664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:56.864426Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:56.886776Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T10:54:56.896398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] >> TBoardSubscriberTest::NotAvailableByShutdown >> TCancelTx::CrossShardReadOnlyWithReadSets [GOOD] >> TCancelTx::ImmediateReadOnly >> TLocksTest::SetEraseSet [GOOD] >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig >> TKeyValueCollectorTest::TestKeyValueCollectorSingle >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingle [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeNullArgs4 [GOOD] Test command err: 2025-04-10T10:54:52.114617Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636258808894356:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:52.115180Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002027/r3tmp/tmp1MqqzQ/pdisk_1.dat 2025-04-10T10:54:52.936250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:52.936352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:52.950381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:53.024613Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19880 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:53.593351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:54:53.685821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:57.719431Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636280429915642:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:57.719594Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002027/r3tmp/tmp8qXwkD/pdisk_1.dat 2025-04-10T10:54:58.179409Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:58.183062Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:58.183165Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:58.191315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14880 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:58.704209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:58.721919Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:58.739368Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:58.746372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> TFlatTest::SplitBoundaryRead [GOOD] |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] Test command err: 2025-04-10T10:53:04.278722Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635797636366689:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:04.278913Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0006d3/r3tmp/tmpaQgAIX/pdisk_1.dat 2025-04-10T10:53:04.738210Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:04.741199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:04.741306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:04.744474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9831, node 1 2025-04-10T10:53:04.870855Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:53:04.870886Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:53:04.870902Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:53:04.871049Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:05.252279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:07.362935Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-10T10:53:07.366581Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWIxNzQzODktMjUyNWVhYjUtYmM0NWU4MDctNzQwYWQ0YTY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YWIxNzQzODktMjUyNWVhYjUtYmM0NWU4MDctNzQwYWQ0YTY= 2025-04-10T10:53:07.367217Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810521269060:2329], Start check tables existence, number paths: 2 2025-04-10T10:53:07.367306Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-10T10:53:07.367328Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T10:53:07.367352Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-10T10:53:07.372308Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWIxNzQzODktMjUyNWVhYjUtYmM0NWU4MDctNzQwYWQ0YTY=, ActorId: [1:7491635810521269061:2330], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.372568Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810521269060:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-10T10:53:07.372624Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810521269060:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-10T10:53:07.372654Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635810521269060:2329], Successfully finished 2025-04-10T10:53:07.372758Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-10T10:53:07.379377Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810521269078:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.382962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:53:07.384119Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810521269078:2302], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-10T10:53:07.384285Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810521269078:2302], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-10T10:53:07.392259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810521269078:2302], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:53:07.470445Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810521269078:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.474479Z node 1 :TX_PROXY ERROR: Actor# [1:7491635810521269129:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:07.474597Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635810521269078:2302], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-10T10:53:07.481203Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-10T10:53:07.481233Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-10T10:53:07.481320Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810521269138:2332], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-10T10:53:07.481415Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWIxNzQzODktMjUyNWVhYjUtYmM0NWU4MDctNzQwYWQ0YTY=, ActorId: [1:7491635810521269061:2330], ActorState: ReadyState, TraceId: 01jrfna10r4rx6jgzxhr20bb9n, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT DESCRIBE SCHEMA ON `/Root` TO `user@test`; GRANT DESCRIBE SCHEMA, SELECT ROW ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `user@test`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-10T10:53:07.483393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810521269138:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:07.483522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:07.741437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T10:53:07.746311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T10:53:07.748104Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=YWIxNzQzODktMjUyNWVhYjUtYmM0NWU4MDctNzQwYWQ0YTY=, ActorId: [1:7491635810521269061:2330], ActorState: ExecuteState, TraceId: 01jrfna10r4rx6jgzxhr20bb9n, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7491635810521269147:2330] WorkloadServiceCleanup: 0 2025-04-10T10:53:07.749196Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWIxNzQzODktMjUyNWVhYjUtYmM0NWU4MDctNzQwYWQ0YTY=, ActorId: [1:7491635810521269061:2330], ActorState: CleanupState, TraceId: 01jrfna10r4rx6jgzxhr20bb9n, EndCleanup, isFinal: 0 2025-04-10T10:53:07.749273Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWIxNzQzODktMjUyNWVhYjUtYmM0NWU4MDctNzQwYWQ0YTY=, ActorId: [1:7491635810521269061:2330], ActorState: CleanupState, TraceId: 01jrfna10r4rx6jgzxhr20bb9n, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7491635797636366776:2277] 2025-04-10T10:53:07.752787Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTUwYmNkNmEtZTc0ODgwYzktNjk5ZWE4ZGQtMzdkMDJlN2I=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OTUwYmNkNmEtZTc0ODgwYzktNjk5ZWE4ZGQtMzdkMDJlN2I= 2025-04-10T10:53:07.752862Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTUwYmNkNmEtZTc0ODgwYzktNjk5ZWE4ZGQtMzdkMDJlN2I=, ActorId: [1:7491635810521269177:2335], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.753003Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-10T10:53:07.753058Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635810521269179:2336], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:53:07.753091Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTUwYmNkNmEtZTc0ODgwYzktNjk5ZWE4ZGQtMzdkMDJlN2I=, ActorId: [1:7491635810521269177:2335], ActorState: ReadyState, TraceId: 01jrfna1993t6498rwanvw02jt, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: DROP RESOURCE POOL CLASSIFIER MyResourcePoolClassifier rpcActor: [1:7491635810521269176:2360] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-10T10:53:07.753118Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7491635810521269177:2335], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OTUwYmNkNmEtZTc0ODgwYzktNjk5ZWE4ZGQtMzdkMDJlN2I= 2025-04-10T10:53:07.753171Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491635810521269180:2337], Database: /Root, Start database fetching 2025-04-10T10:53:07.753287Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491635810521269180:2337], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-10T10:53:07.7533 ... e: ReadyState, Session closed due to explicit close event 2025-04-10T10:55:02.459659Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MmFmN2EwYy1kZjkzNzhmMS03OTE2YzY3OS04YzU0ZGM2Mw==, ActorId: [9:7491636302741837297:3007], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:55:02.459695Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MmFmN2EwYy1kZjkzNzhmMS03OTE2YzY3OS04YzU0ZGM2Mw==, ActorId: [9:7491636302741837297:3007], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:55:02.459729Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MmFmN2EwYy1kZjkzNzhmMS03OTE2YzY3OS04YzU0ZGM2Mw==, ActorId: [9:7491636302741837297:3007], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:55:02.459816Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MmFmN2EwYy1kZjkzNzhmMS03OTE2YzY3OS04YzU0ZGM2Mw==, ActorId: [9:7491636302741837297:3007], ActorState: unknown state, Session actor destroyed 2025-04-10T10:55:02.466079Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7491636208252553784:2673][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 48, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-10T10:55:02.466195Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7491636208252553784:2673][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 49, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-10T10:55:02.468465Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7491636302741837308:3013], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-04-10T10:55:02.468818Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=OGM5MGRjYzEtNDA3ZTc3ZDMtNTk0M2Q0YzgtNDAzNmYzNzY=, ActorId: [9:7491636302741837304:3011], ActorState: ExecuteState, TraceId: 01jrfndh9f0naya80rs2wbdmf3, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2025-04-10T10:55:02.468862Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=OGM5MGRjYzEtNDA3ZTc3ZDMtNTk0M2Q0YzgtNDAzNmYzNzY=, ActorId: [9:7491636302741837304:3011], ActorState: ExecuteState, TraceId: 01jrfndh9f0naya80rs2wbdmf3, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:55:02.468893Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=OGM5MGRjYzEtNDA3ZTc3ZDMtNTk0M2Q0YzgtNDAzNmYzNzY=, ActorId: [9:7491636302741837304:3011], ActorState: ExecuteState, TraceId: 01jrfndh9f0naya80rs2wbdmf3, EndCleanup, isFinal: 0 2025-04-10T10:55:02.469046Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=OGM5MGRjYzEtNDA3ZTc3ZDMtNTk0M2Q0YzgtNDAzNmYzNzY=, ActorId: [9:7491636302741837304:3011], ActorState: ExecuteState, TraceId: 01jrfndh9f0naya80rs2wbdmf3, Sent query response back to proxy, proxyRequestId: 104, proxyId: [9:7491636169597847223:2277] 2025-04-10T10:55:02.469806Z node 9 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2025-04-10T10:55:02.470117Z node 9 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-04-10T10:55:02.470262Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=OGM5MGRjYzEtNDA3ZTc3ZDMtNTk0M2Q0YzgtNDAzNmYzNzY=, ActorId: [9:7491636302741837304:3011], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:55:02.470302Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=OGM5MGRjYzEtNDA3ZTc3ZDMtNTk0M2Q0YzgtNDAzNmYzNzY=, ActorId: [9:7491636302741837304:3011], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:55:02.470338Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=OGM5MGRjYzEtNDA3ZTc3ZDMtNTk0M2Q0YzgtNDAzNmYzNzY=, ActorId: [9:7491636302741837304:3011], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:55:02.470377Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=OGM5MGRjYzEtNDA3ZTc3ZDMtNTk0M2Q0YzgtNDAzNmYzNzY=, ActorId: [9:7491636302741837304:3011], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:55:02.470464Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=OGM5MGRjYzEtNDA3ZTc3ZDMtNTk0M2Q0YzgtNDAzNmYzNzY=, ActorId: [9:7491636302741837304:3011], ActorState: unknown state, Session actor destroyed 2025-04-10T10:55:02.793009Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MWI4ZGI4MzQtMWFlYzQxMTgtMjk2NDc3YzctNzQ5MjEwZjA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MWI4ZGI4MzQtMWFlYzQxMTgtMjk2NDc3YzctNzQ5MjEwZjA= 2025-04-10T10:55:02.793347Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MWI4ZGI4MzQtMWFlYzQxMTgtMjk2NDc3YzctNzQ5MjEwZjA=, ActorId: [9:7491636302741837319:3019], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:55:02.794286Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MWI4ZGI4MzQtMWFlYzQxMTgtMjk2NDc3YzctNzQ5MjEwZjA=, ActorId: [9:7491636302741837319:3019], ActorState: ReadyState, TraceId: 01jrfndhmaar6frc8qz5bfgzge, received request, proxyRequestId: 106 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpcActor: [9:7491636302741837320:3020] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2025-04-10T10:55:02.794329Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MWI4ZGI4MzQtMWFlYzQxMTgtMjk2NDc3YzctNzQ5MjEwZjA=, ActorId: [9:7491636302741837319:3019], ActorState: ReadyState, TraceId: 01jrfndhmaar6frc8qz5bfgzge, request placed into pool from cache: default 2025-04-10T10:55:02.794469Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MWI4ZGI4MzQtMWFlYzQxMTgtMjk2NDc3YzctNzQ5MjEwZjA=, ActorId: [9:7491636302741837319:3019], ActorState: ExecuteState, TraceId: 01jrfndhmaar6frc8qz5bfgzge, Sending CompileQuery request 2025-04-10T10:55:02.822531Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7491636208252553784:2673][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 50, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-10T10:55:02.822645Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7491636208252553784:2673][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 51, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-10T10:55:02.824261Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7491636302741837322:3021], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-04-10T10:55:02.826647Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=MWI4ZGI4MzQtMWFlYzQxMTgtMjk2NDc3YzctNzQ5MjEwZjA=, ActorId: [9:7491636302741837319:3019], ActorState: ExecuteState, TraceId: 01jrfndhmaar6frc8qz5bfgzge, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2025-04-10T10:55:02.826698Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MWI4ZGI4MzQtMWFlYzQxMTgtMjk2NDc3YzctNzQ5MjEwZjA=, ActorId: [9:7491636302741837319:3019], ActorState: ExecuteState, TraceId: 01jrfndhmaar6frc8qz5bfgzge, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:55:02.826729Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MWI4ZGI4MzQtMWFlYzQxMTgtMjk2NDc3YzctNzQ5MjEwZjA=, ActorId: [9:7491636302741837319:3019], ActorState: ExecuteState, TraceId: 01jrfndhmaar6frc8qz5bfgzge, EndCleanup, isFinal: 0 2025-04-10T10:55:02.826890Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MWI4ZGI4MzQtMWFlYzQxMTgtMjk2NDc3YzctNzQ5MjEwZjA=, ActorId: [9:7491636302741837319:3019], ActorState: ExecuteState, TraceId: 01jrfndhmaar6frc8qz5bfgzge, Sent query response back to proxy, proxyRequestId: 106, proxyId: [9:7491636169597847223:2277] 2025-04-10T10:55:02.827692Z node 9 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2025-04-10T10:55:02.828105Z node 9 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-04-10T10:55:02.828179Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MWI4ZGI4MzQtMWFlYzQxMTgtMjk2NDc3YzctNzQ5MjEwZjA=, ActorId: [9:7491636302741837319:3019], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:55:02.828224Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MWI4ZGI4MzQtMWFlYzQxMTgtMjk2NDc3YzctNzQ5MjEwZjA=, ActorId: [9:7491636302741837319:3019], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:55:02.828268Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MWI4ZGI4MzQtMWFlYzQxMTgtMjk2NDc3YzctNzQ5MjEwZjA=, ActorId: [9:7491636302741837319:3019], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:55:02.828307Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MWI4ZGI4MzQtMWFlYzQxMTgtMjk2NDc3YzctNzQ5MjEwZjA=, ActorId: [9:7491636302741837319:3019], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:55:02.828396Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MWI4ZGI4MzQtMWFlYzQxMTgtMjk2NDc3YzctNzQ5MjEwZjA=, ActorId: [9:7491636302741837319:3019], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] Test command err: 2025-04-10T10:53:04.630132Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635798284927261:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:04.632107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000703/r3tmp/tmp5Jl1DV/pdisk_1.dat 2025-04-10T10:53:05.057095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:05.057180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:05.079088Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:05.084279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9843, node 1 2025-04-10T10:53:05.216949Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:53:05.216975Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:53:05.216984Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:53:05.217127Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15110 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:05.509231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:05.532635Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:53:07.744620Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-10T10:53:07.744867Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635811169829783:2329], Start check tables existence, number paths: 2 2025-04-10T10:53:07.748177Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzA2OWMzYy1iODU1MzFjLTNkNjNlMzQ5LTEwOWRiM2Nh, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzA2OWMzYy1iODU1MzFjLTNkNjNlMzQ5LTEwOWRiM2Nh 2025-04-10T10:53:07.748824Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T10:53:07.748853Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-10T10:53:07.748886Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-10T10:53:07.748955Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635811169829783:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-10T10:53:07.749029Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635811169829783:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-10T10:53:07.749090Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635811169829783:2329], Successfully finished 2025-04-10T10:53:07.750419Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzA2OWMzYy1iODU1MzFjLTNkNjNlMzQ5LTEwOWRiM2Nh, ActorId: [1:7491635811169829799:2330], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.750627Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-10T10:53:07.760410Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635811169829801:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.763585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:53:07.764797Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635811169829801:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-10T10:53:07.764960Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635811169829801:2301], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-10T10:53:07.773216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635811169829801:2301], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:53:07.860236Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635811169829801:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.864170Z node 1 :TX_PROXY ERROR: Actor# [1:7491635811169829852:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:07.864242Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635811169829801:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-10T10:53:07.865792Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2025-04-10T10:53:07.865842Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2025-04-10T10:53:07.866007Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635811169829859:2331], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:53:07.867233Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635811169829859:2331], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-10T10:53:07.867317Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2025-04-10T10:53:07.867369Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-10T10:53:07.867713Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7491635811169829868:2332], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-10T10:53:07.868811Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7491635811169829868:2332], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-04-10T10:53:07.874286Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-10T10:53:07.874313Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-10T10:53:07.874349Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635811169829880:2334], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-10T10:53:07.874667Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzA2OWMzYy1iODU1MzFjLTNkNjNlMzQ5LTEwOWRiM2Nh, ActorId: [1:7491635811169829799:2330], ActorState: ReadyState, TraceId: 01jrfna1d1azxmfmq4tmshra31, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: DROP RESOURCE POOL sample_pool_id; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-10T10:53:07.876384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635811169829880:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:07.876500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:08.118630Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7491635811169829868:2332], DatabaseId: Root, PoolId: sample_pool_id, Got delete notification 2025-04-10T10:53:08.121505Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NzA2OWMzYy1iODU1MzFjLTNkNjNlMzQ5LTEwOWRiM2Nh, ActorId: [1:7491635811169829799:2330], ActorState: ExecuteState, TraceId: 01jrfna1d1azxmfmq4tmshra31, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7491635811169829889:2330] WorkloadServiceCleanup: 0 2025-04-10T10:53:08.122912Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzA2OWMzYy1iODU1MzFjLTNkNjNlMzQ5LTEwOWRiM2Nh, ActorId: [1:7491635811169829799:2330], ActorState: CleanupState, TraceId: 01jrfna1d1azxmfmq4tmshra31, EndCleanup, isFinal: 0 2025-04-10T10:53:08.122998Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzA2OWMzYy1iODU1MzFjLTNkNjNlMzQ5LTEwOWRiM2Nh, ActorId: [1:7491635811169829799:2330], ActorState: CleanupState, TraceId: 01jrfna1d1azxmfmq4tmshra31, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7491635798284927493:2276] 2025-04-10T10:53:08.129751Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NzA2OWMzYy1iODU1MzFjLTNkNjNlMzQ5LTEwOWRiM2Nh, ActorId: [1:7491635811169829799:2330], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:53:08.129809Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NzA2OWMzYy1iODU1MzFjLTNkNjNlMzQ5LTEwOWRiM2Nh, ActorId: [1:7491635811169829799:2330], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:53:08.129829Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzA2OWMzYy1iODU1MzFjLTNkNjNlMzQ5LTEwOWRiM2Nh, ActorId: [1:7491635811169829799:2330], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:53:08.129851Z node 1 :KQP_SESSI ... de_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj2494xkyr64aww7mj4q, Sending to Executer TraceId: 0 8 2025-04-10T10:55:03.239006Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj2494xkyr64aww7mj4q, Created new KQP executer: [10:7491636308701856315:2562] isRollback: 0 2025-04-10T10:55:03.261065Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj2494xkyr64aww7mj4q, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-10T10:55:03.261281Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj2494xkyr64aww7mj4q, txInfo Status: Committed Kind: ReadWrite TotalDuration: 23.289 ServerDuration: 22.667 QueriesCount: 2 2025-04-10T10:55:03.261437Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj2494xkyr64aww7mj4q, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T10:55:03.261534Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj2494xkyr64aww7mj4q, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:55:03.261568Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj2494xkyr64aww7mj4q, EndCleanup, isFinal: 0 2025-04-10T10:55:03.261631Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj2494xkyr64aww7mj4q, Sent query response back to proxy, proxyRequestId: 28, proxyId: [10:7491636171262901397:2276] 2025-04-10T10:55:03.263749Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, TxId: 2025-04-10T10:55:03.263919Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-04-10T10:55:03.265006Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ReadyState, TraceId: 01jrfndj307vr5c5hmefj7r0b3, received request, proxyRequestId: 29 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [10:7491636308701856338:2568] database: /Root databaseId: /Root pool id: default 2025-04-10T10:55:03.265050Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ReadyState, TraceId: 01jrfndj307vr5c5hmefj7r0b3, request placed into pool from cache: default 2025-04-10T10:55:03.265830Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj307vr5c5hmefj7r0b3, ExecutePhyTx, tx: 0x000050C00032AAD8 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-10T10:55:03.265909Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj307vr5c5hmefj7r0b3, Sending to Executer TraceId: 0 8 2025-04-10T10:55:03.266023Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj307vr5c5hmefj7r0b3, Created new KQP executer: [10:7491636308701856343:2562] isRollback: 0 2025-04-10T10:55:03.279018Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj307vr5c5hmefj7r0b3, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-10T10:55:03.279122Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj307vr5c5hmefj7r0b3, ExecutePhyTx, tx: 0x000050C00032AA18 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-10T10:55:03.280263Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj307vr5c5hmefj7r0b3, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-10T10:55:03.280449Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj307vr5c5hmefj7r0b3, txInfo Status: Committed Kind: ReadOnly TotalDuration: 14.748 ServerDuration: 14.62 QueriesCount: 2 2025-04-10T10:55:03.280595Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj307vr5c5hmefj7r0b3, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T10:55:03.280664Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj307vr5c5hmefj7r0b3, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:55:03.280699Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj307vr5c5hmefj7r0b3, EndCleanup, isFinal: 0 2025-04-10T10:55:03.280764Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ExecuteState, TraceId: 01jrfndj307vr5c5hmefj7r0b3, Sent query response back to proxy, proxyRequestId: 29, proxyId: [10:7491636171262901397:2276] 2025-04-10T10:55:03.282333Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, TxId: 2025-04-10T10:55:03.282468Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, TxId: 2025-04-10T10:55:03.282866Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:55:03.282916Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:55:03.282950Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:55:03.282978Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:55:03.283059Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ZGVmMTIzNTUtODAyYmJmOWMtNGIzODZiMzItYjhhOGU2ZTA=, ActorId: [10:7491636308701856310:2562], ActorState: unknown state, Session actor destroyed 2025-04-10T10:55:03.298059Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=MjhhNGQzNDYtNDI1ZmMxNDQtMTNlMTAyMTctNjY4ZGM2NmE=, ActorId: [10:7491636209917607499:2340], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:55:03.298121Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=MjhhNGQzNDYtNDI1ZmMxNDQtMTNlMTAyMTctNjY4ZGM2NmE=, ActorId: [10:7491636209917607499:2340], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:55:03.298197Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MjhhNGQzNDYtNDI1ZmMxNDQtMTNlMTAyMTctNjY4ZGM2NmE=, ActorId: [10:7491636209917607499:2340], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:55:03.298235Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MjhhNGQzNDYtNDI1ZmMxNDQtMTNlMTAyMTctNjY4ZGM2NmE=, ActorId: [10:7491636209917607499:2340], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:55:03.298359Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MjhhNGQzNDYtNDI1ZmMxNDQtMTNlMTAyMTctNjY4ZGM2NmE=, ActorId: [10:7491636209917607499:2340], ActorState: unknown state, Session actor destroyed >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadFromTimestamp_PQv1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetEraseSet [GOOD] Test command err: 2025-04-10T10:54:48.241783Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636242903606061:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:48.242138Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002039/r3tmp/tmpvotMjF/pdisk_1.dat 2025-04-10T10:54:48.982730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:48.982819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:48.983101Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:49.015132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19762 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:49.683000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:54:49.726422Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:49.731423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:49.995118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:50.060198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:50.109246Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710662: Validate (783): Key validation status: 3 2025-04-10T10:54:50.109601Z node 1 :TX_PROXY ERROR: Actor# [1:7491636251493541415:2504] txid# 281474976710662 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-10T10:54:50.109670Z node 1 :TX_PROXY ERROR: Actor# [1:7491636251493541415:2504] txid# 281474976710662 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-10T10:54:50.109727Z node 1 :TX_PROXY ERROR: Actor# [1:7491636251493541415:2504] txid# 281474976710662 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-04-10T10:54:50.112971Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710663: Validate (783): Key validation status: 3 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-04-10T10:54:50.113202Z node 1 :TX_PROXY ERROR: Actor# [1:7491636251493541436:2511] txid# 281474976710663 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-10T10:54:50.113254Z node 1 :TX_PROXY ERROR: Actor# [1:7491636251493541436:2511] txid# 281474976710663 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-10T10:54:50.113291Z node 1 :TX_PROXY ERROR: Actor# [1:7491636251493541436:2511] txid# 281474976710663 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-04-10T10:54:50.116510Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710664: Validate (783): Key validation status: 3 2025-04-10T10:54:50.116714Z node 1 :TX_PROXY ERROR: Actor# [1:7491636251493541444:2515] txid# 281474976710664 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-10T10:54:50.116758Z node 1 :TX_PROXY ERROR: Actor# [1:7491636251493541444:2515] txid# 281474976710664 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-10T10:54:50.116777Z node 1 :TX_PROXY ERROR: Actor# [1:7491636251493541444:2515] txid# 281474976710664 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-04-10T10:54:50.120013Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710665: Validate (783): Key validation status: 3 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-04-10T10:54:50.120238Z node 1 :TX_PROXY ERROR: Actor# [1:7491636251493541450:2518] txid# 281474976710665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-10T10:54:50.120294Z node 1 :TX_PROXY ERROR: Actor# [1:7491636251493541450:2518] txid# 281474976710665 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-10T10:54:50.120324Z node 1 :TX_PROXY ERROR: Actor# [1:7491636251493541450:2518] txid# 281474976710665 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-04-10T10:54:53.763466Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636263139470787:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002039/r3tmp/tmpL4KTV3/pdisk_1.dat 2025-04-10T10:54:53.955718Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:54.122959Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:54.163930Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:54.164025Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:54.171599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13230 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:54.492309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:54.506903Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:54.527474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:54.643332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:54.797645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:59.468188Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636289146057151:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:59.484518Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002039/r3tmp/tmpcyS065/pdisk_1.dat 2025-04-10T10:54:59.636360Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:59.668233Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:59.668346Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:59.675163Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30713 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:59.991797Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:00.010894Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T10:55:00.015891Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.114800Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.197868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TLocksFatTest::RangeSetRemove [GOOD] >> TLocksFatTest::ShardLocks >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation [GOOD] >> Viewer::FloatPointJsonQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitBoundaryRead [GOOD] Test command err: 2025-04-10T10:54:57.027971Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636279829641268:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:57.028368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002023/r3tmp/tmp6J7EP2/pdisk_1.dat 2025-04-10T10:54:57.829368Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:57.839044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:57.839141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:57.844532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29214 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:58.409970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:58.466356Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:58.502332Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:58.519002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:58.874020Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.009s,wait=0.004s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-10T10:54:58.890957Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.012s,wait=0.003s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-10T10:54:58.973278Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.006s,wait=0.004s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1589 647 6413)b }, ecr=1.000 2025-04-10T10:54:58.991587Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.012s,wait=0.008s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2342 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744282498714 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... 2025-04-10T10:54:59.228473Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-10T10:54:59.229539Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.23, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-10T10:54:59.229948Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-10T10:54:59.230249Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-10T10:54:59.233492Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 3} end=0, 4 blobs 2r (max 2), put Spent{time=0.004s,wait=0.002s,interrupts=1} Part{ 2 pk, lobs 0 +0, (1907 1533 0)b }, ecr=1.000 2025-04-10T10:54:59.243643Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.34, eph 3} end=0, 4 blobs 8r (max 8), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3298 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744282498714 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-04-10T10:54:59.457200Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-10T10:54:59.457241Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-10T10:54:59.457252Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-10T10:54:59.457264Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-10T10:54:59.467483Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T10:55:02.270105Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636301733763692:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:02.270211Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002023/r3tmp/tmpINXYJY/pdisk_1.dat 2025-04-10T10:55:02.406486Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:02.431686Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:02.431760Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:02.436193Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63215 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:02.815516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:02.838707Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:02.886129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:03.102070Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.011s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-10T10:55:03.112328Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-10T10:55:03.194170Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.011s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-10T10:55:03.218877Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.026s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 Sc ... et: 72057594046644480 2025-04-10T10:55:03.493026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037891 splitOp: 281474976715678:0 alterVersion: 1 at tablet: 72057594046644480 2025-04-10T10:55:03.493728Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037890 cookie: 72057594046644480:3 msg type: 269553152 2025-04-10T10:55:03.493845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037891 cookie: 72057594046644480:4 msg type: 269553152 2025-04-10T10:55:03.493917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037890 2025-04-10T10:55:03.493927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037891 2025-04-10T10:55:03.514948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715678, at schemeshard: 72057594046644480 2025-04-10T10:55:03.514980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 0/1, is published: true 2025-04-10T10:55:03.514999Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715678, at schemeshard: 72057594046644480 2025-04-10T10:55:03.550747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037890 2025-04-10T10:55:03.550814Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037890 2025-04-10T10:55:03.551087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037891 2025-04-10T10:55:03.551117Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037891 2025-04-10T10:55:03.551141Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715678:0 3 -> 131 2025-04-10T10:55:03.551510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-10T10:55:03.551632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-10T10:55:03.551682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-10T10:55:03.551707Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:55:03.551731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TSplitMerge TTransferData operationId# 281474976715678:0 Starting split on src datashard 72075186224037888 splitOpId# 281474976715678:0 at tablet 72057594046644480 2025-04-10T10:55:03.551982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553154 2025-04-10T10:55:03.552051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037888 2025-04-10T10:55:03.557572Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-10T10:55:03.557859Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-10T10:55:03.558052Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.28, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-10T10:55:03.558288Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.30, eph -9223372036854775808} end=0, 0 blobs 0r (max 0), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-10T10:55:03.558291Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-10T10:55:03.570996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-04-10T10:55:03.571063Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715678:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-04-10T10:55:03.571394Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715678:0 131 -> 132 2025-04-10T10:55:03.571511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-04-10T10:55:03.571962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-10T10:55:03.572090Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T10:55:03.572113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715678, path id: [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-10T10:55:03.572328Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T10:55:03.572346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:7491636301733764185:2231], at schemeshard: 72057594046644480, txId: 281474976715678, path id: 3 2025-04-10T10:55:03.572378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-10T10:55:03.572407Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:55:03.572431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Notify src datashard 72075186224037888 on partitioning changed splitOp# 281474976715678 at tablet 72057594046644480 2025-04-10T10:55:03.576078Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2025-04-10T10:55:03.576171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2025-04-10T10:55:03.576186Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715678 2025-04-10T10:55:03.576209Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715678, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2025-04-10T10:55:03.576230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-04-10T10:55:03.576297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 0/1, is published: true 2025-04-10T10:55:03.576703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553158 2025-04-10T10:55:03.576938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715678 2025-04-10T10:55:03.578930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-04-10T10:55:03.578962Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715678:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-04-10T10:55:03.579019Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715678:0 progress is 1/1 2025-04-10T10:55:03.579032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-04-10T10:55:03.579049Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715678:0 progress is 1/1 2025-04-10T10:55:03.579059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-04-10T10:55:03.579073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 1/1, is published: true 2025-04-10T10:55:03.579107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7491636306028731920:2360] message: TxId: 281474976715678 2025-04-10T10:55:03.579133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-04-10T10:55:03.579153Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715678:0 2025-04-10T10:55:03.579161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715678:0 2025-04-10T10:55:03.579281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-04-10T10:55:03.579601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-10T10:55:03.579612Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976715678:0 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744282503033 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> SystemView::ShowCreateTablePartitionAtKeys >> SystemView::AuthOwners_Access |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar >> TCancelTx::ImmediateReadOnly [GOOD] >> TestProgram::YqlKernelStartsWithScalar [GOOD] |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |67.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |67.0%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |67.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateDropRecreate >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable >> TPersQueueMirrorer::TestBasicRemote [GOOD] >> TPersQueueMirrorer::ValidStartStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"Lorem\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"7,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"7,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"Lorem"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; >> Cdc::DisableStream [GOOD] >> Cdc::InitialScan |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TCancelTx::ImmediateReadOnly [GOOD] Test command err: 2025-04-10T10:54:48.248999Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636242187480623:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:48.252607Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002033/r3tmp/tmpwGlNDn/pdisk_1.dat 2025-04-10T10:54:48.859047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:48.859132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:48.870423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:48.924799Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28887 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:49.573432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:49.615589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:28887 2025-04-10T10:54:50.202285Z node 1 :TX_PROXY ERROR: Actor# [1:7491636250777415828:2394] txid# 281474976710660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-10T10:54:50.202358Z node 1 :TX_PROXY ERROR: Actor# [1:7491636250777415828:2394] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2025-04-10T10:54:50.237851Z node 1 :TX_PROXY ERROR: Actor# [1:7491636250777415842:2405] txid# 281474976710661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-10T10:54:50.237928Z node 1 :TX_PROXY ERROR: Actor# [1:7491636250777415842:2405] txid# 281474976710661 RESPONSE Status# ExecCancelled marker# P13c 2025-04-10T10:54:50.277223Z node 1 :TX_PROXY ERROR: Actor# [1:7491636250777415857:2416] txid# 281474976710662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-10T10:54:50.277322Z node 1 :TX_PROXY ERROR: Actor# [1:7491636250777415857:2416] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2025-04-10T10:54:50.312157Z node 1 :TX_PROXY ERROR: Actor# [1:7491636250777415885:2438] txid# 281474976710664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-10T10:54:50.312273Z node 1 :TX_PROXY ERROR: Actor# [1:7491636250777415885:2438] txid# 281474976710664 RESPONSE Status# ExecCancelled marker# P13c 2025-04-10T10:54:50.334045Z node 1 :TX_PROXY ERROR: Actor# [1:7491636250777415898:2448] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-10T10:54:50.334098Z node 1 :TX_PROXY ERROR: Actor# [1:7491636250777415898:2448] txid# 281474976710665 RESPONSE Status# ExecCancelled marker# P13c 2025-04-10T10:54:50.353602Z node 1 :TX_PROXY ERROR: Actor# [1:7491636250777415911:2458] txid# 281474976710666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-10T10:54:50.353660Z node 1 :TX_PROXY ERROR: Actor# [1:7491636250777415911:2458] txid# 281474976710666 RESPONSE Status# ExecCancelled marker# P13c 2025-04-10T10:54:54.632348Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636268709806379:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:54.632415Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002033/r3tmp/tmptsNRv5/pdisk_1.dat 2025-04-10T10:54:54.942337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:54.942419Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:54.954610Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:54.986004Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18829 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:55.251372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:55.258605Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:54:55.262045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:18829 2025-04-10T10:54:59.966553Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636289805817196:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:59.966715Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002033/r3tmp/tmp5I0nxu/pdisk_1.dat 2025-04-10T10:55:00.322581Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:00.334974Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:00.335070Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:00.343079Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1606 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-10T10:55:00.647461Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:55:00.670343Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.680219Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:1606 2025-04-10T10:55:01.151509Z node 3 :TX_PROXY ERROR: Actor# [3:7491636298395752524:2388] txid# 281474976710660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-10T10:55:01.151583Z node 3 :TX_PROXY ERROR: Actor# [3:7491636298395752524:2388] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2025-04-10T10:55:01.175533Z node 3 :TX_PROXY ERROR: Actor# [3:7491636298395752541:2402] txid# 281474976710661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-10T10:55:01.175602Z node 3 :TX_PROXY ERROR: Actor# [3:7491636298395752541:2402] txid# 281474976710661 RESPONSE Status# ExecCancelled marker# P13c 2025-04-10T10:55:01.194540Z node 3 :TX_PROXY ERROR: Actor# [3:7491636298395752555:2413] txid# 281474976710662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-10T10:55:01.194619Z node 3 :TX_PROXY ERROR: Actor# [3:7491636298395752555:2413] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2025-04-10T10:55:01.247048Z node 3 :TX_PROXY ERROR: Actor# [3:7491636298395752583:2435] txid# 281474976710664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-10T10:55:01.247117Z node 3 :TX_PROXY ERROR: Actor# [3:7491636298395752583:2435] txid# 281474976710664 RESPONSE Status# ExecCancelled marker# P13c 2025-04-10T10:55:01.287072Z node 3 :TX_PROXY ERROR: Actor# [3:7491636298395752597:2446] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-10T10:55:01.287145Z node 3 :TX_PROXY ERROR: Actor# [3:7491636298395752597:2446] txid# 281474976710665 RESPONSE Status# ExecCancelled marker# P13c 2025-04-10T10:55:01.310674Z node 3 :TX_PROXY ERROR: Actor# [3:7491636298395752612:2458] txid# 281474976710666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-10T10:55:01.310731Z node 3 :TX_PROXY ERROR: Actor# [3:7491636298395752612:2458] txid# 281474976710666 RESPONSE Status# ExecCancelled marker# P13c 2025-04-10T10:55:04.701055Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636311173582145:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002033/r3tmp/tmpddNW3m/pdisk_1.dat 2025-04-10T10:55:04.878767Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:55:05.093305Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:05.129994Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:05.130086Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:05.132558Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2307 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:05.418342Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:05.433143Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:55:05.444417Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:2307 2025-04-10T10:55:05.873267Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710660 at tablet 72075186224037888 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-04-10T10:55:05.873585Z node 4 :TX_PROXY ERROR: Actor# [4:7491636315468550001:2385] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2025-04-10T10:55:05.891060Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710662 at tablet 72075186224037889 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-04-10T10:55:05.891354Z node 4 :TX_PROXY ERROR: Actor# [4:7491636315468550026:2397] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::CreateWithoutCredentials >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable >> TestProgram::YqlKernelStartsWith [GOOD] >> TestProgram::JsonExistsBinary >> TestProgram::JsonExistsBinary [GOOD] >> Cdc::RacyCreateAndSend [GOOD] >> Cdc::RacySplitAndDropTable |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(26):{\"i\":\"7,9\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"6,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; >> Cdc::AddColumn_TopicAutoPartitioning [GOOD] >> Cdc::AddIndex >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::Describe >> TReplicationTests::CopyReplicatedTable [GOOD] >> TColumnEngineTestLogs::IndexWriteOverload >> TLocksTest::CK_Range_BrokenLock [GOOD] >> TLocksTest::CK_Range_BrokenLockInf |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |67.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK |67.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |67.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] >> TestProgram::JsonExists >> TestProgram::JsonExists [GOOD] >> Viewer::JsonAutocompleteScheme [GOOD] >> Viewer::JsonAutocompleteEmptyColumns >> TestProgram::YqlKernelEquals ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:54:58.221106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:54:58.221238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:58.221280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:54:58.221328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:54:58.221372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:54:58.221415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:54:58.221510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:54:58.221588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:54:58.221930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:54:58.318681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:54:58.318752Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:58.360995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:54:58.361353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:54:58.361552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:54:58.382684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:54:58.383100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:54:58.383860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:58.384401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:54:58.391670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:58.393385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:58.393489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:58.393688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:54:58.393746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:58.393792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:54:58.393986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:54:58.424626Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:54:58.634052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:54:58.634406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:58.634694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:54:58.634967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:54:58.635029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:58.639370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:58.639584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:54:58.639820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:58.639877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:54:58.639918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:54:58.639966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:54:58.647169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:58.647258Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:54:58.647306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:54:58.655245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:58.655340Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:58.655424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:58.655522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:54:58.659703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:54:58.671164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:54:58.671450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:54:58.672516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:54:58.672680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:54:58.672726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:58.673028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:54:58.673077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:54:58.673292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:54:58.673378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:54:58.680181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:54:58.680272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:54:58.680566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:54:58.680639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:54:58.680969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:54:58.681043Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:54:58.681183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:58.681250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:58.681316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:54:58.681376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:58.681427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:54:58.681506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:54:58.681555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:54:58.681609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:54:58.681726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:54:58.681785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:54:58.681833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:54:58.689264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:58.689463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:54:58.689504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... BUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T10:55:12.450341Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-10T10:55:12.458725Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T10:55:12.458852Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T10:55:12.461433Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 4 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1827 } } 2025-04-10T10:55:12.461496Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-10T10:55:12.461641Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 4 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1827 } } 2025-04-10T10:55:12.461770Z node 8 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 4 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1827 } } 2025-04-10T10:55:12.462763Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 407 RawX2: 34359740744 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T10:55:12.462825Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-10T10:55:12.462989Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 407 RawX2: 34359740744 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T10:55:12.463080Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:55:12.463203Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 407 RawX2: 34359740744 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T10:55:12.463283Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:12.463344Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-10T10:55:12.465813Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:55:12.466320Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:55:12.483828Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 34359740660 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T10:55:12.483900Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T10:55:12.484054Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 34359740660 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T10:55:12.484097Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:55:12.484174Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 305 RawX2: 34359740660 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T10:55:12.484243Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:12.484293Z node 8 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:55:12.484350Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:55:12.484407Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T10:55:12.484442Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T10:55:12.486570Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:55:12.486753Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:55:12.486811Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-04-10T10:55:12.486877Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-10T10:55:12.486954Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-04-10T10:55:12.487059Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-10T10:55:12.487113Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-04-10T10:55:12.489804Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:55:12.489880Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T10:55:12.490051Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:55:12.490119Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:55:12.490217Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:55:12.490275Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:55:12.490341Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T10:55:12.490458Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:333:2312] message: TxId: 102 2025-04-10T10:55:12.490532Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:55:12.490594Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T10:55:12.490649Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T10:55:12.490899Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T10:55:12.490961Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:55:12.493296Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:55:12.493378Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:436:2397] TestWaitNotification: OK eventTxId 102 2025-04-10T10:55:12.494142Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:55:12.494494Z node 8 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 383us result status StatusSuccess 2025-04-10T10:55:12.495032Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestProgram::YqlKernelEquals [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;));); >> BsControllerConfig::PDiskCreate >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; >> BsControllerConfig::SelectAllGroups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(26):{\"i\":\"10,11\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"10","p":{"address":{"name":"i16","id":10}},"o":"10","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"10,11","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"11","p":{"address":{"name":"float","id":11}},"o":"11","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"10,11","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9FloatTypeE; digraph program {N0[shape=box, label="N3(26):{\"i\":\"10,11\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; } FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; >> TLocksTest::Range_BrokenLock0 [GOOD] >> TLocksTest::Range_BrokenLock1 >> TLocksTest::Range_IncorrectNullDot1 [GOOD] >> TLocksTest::Range_IncorrectNullDot2 >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile >> BsControllerConfig::AddDriveSerial >> BsControllerConfig::Basic >> BsControllerConfig::MergeIntersectingBoxes >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad_AfterAlter >> TLocksTest::UpdateLockedKey [GOOD] >> TLocksTest::SetLockNothing >> Cdc::ResolvedTimestamps [GOOD] >> Cdc::ResolvedTimestampsMultiplePartitions >> BsControllerConfig::OverlayMapCrossReferences >> BsControllerConfig::ExtendByCreatingSeparateBox >> BsControllerConfig::ManyPDisksRestarts >> BsControllerConfig::SelectAllGroups [GOOD] >> TLocksTest::Range_IncorrectDot1 [GOOD] >> TLocksTest::Range_IncorrectDot2 >> BsControllerConfig::OverlayMap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::SelectAllGroups [GOOD] Test command err: 2025-04-10T10:55:14.737968Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-10T10:55:14.742879Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-10T10:55:14.743281Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-10T10:55:14.745705Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:55:14.746251Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-10T10:55:14.746711Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:14.746744Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:14.746979Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-10T10:55:14.756753Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-10T10:55:14.756877Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-10T10:55:14.757015Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-10T10:55:14.757109Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:14.757266Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:14.757333Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-10T10:55:14.966991Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.159329s 2025-04-10T10:55:14.967136Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.159502s >> Viewer::FloatPointJsonQuery [GOOD] >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials >> Viewer::SelectStringWithNoBase64Encoding [GOOD] >> Viewer::ServerlessNodesPage >> Viewer::SelectStringWithBase64Encoding [GOOD] >> Viewer::QueryExecuteScript >> BsControllerConfig::OverlayMap [GOOD] >> BsControllerConfig::PDiskCreate [GOOD] >> TLocksFatTest::ShardLocks [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::PDiskCreate [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:201:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:201:2076] Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:217:2066] recipient: [1:201:2076] 2025-04-10T10:55:14.330412Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-10T10:55:14.334489Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-10T10:55:14.334829Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-10T10:55:14.336556Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:55:14.337006Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-10T10:55:14.337527Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:14.337561Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:14.337773Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-10T10:55:14.347419Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-10T10:55:14.347537Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-10T10:55:14.347678Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-10T10:55:14.347769Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:14.347914Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:14.347994Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:242:2066] recipient: [1:20:2067] 2025-04-10T10:55:14.362875Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-10T10:55:14.363030Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:14.373773Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:14.373891Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:14.373971Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:14.374058Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:14.374185Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:14.374247Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:14.374280Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:14.374328Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:14.386647Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:14.386782Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:14.398701Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:14.398854Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-10T10:55:14.400009Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-10T10:55:14.400061Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-10T10:55:14.400267Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-10T10:55:14.400320Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-10T10:55:14.428879Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } } } Command { QueryBaseConfig { } } } 2025-04-10T10:55:14.429433Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-04-10T10:55:14.429501Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-04-10T10:55:14.429543Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-04-10T10:55:14.429572Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-04-10T10:55:14.429634Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-04-10T10:55:14.429668Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-04-10T10:55:14.429691Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-04-10T10:55:14.429714Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-04-10T10:55:14.429737Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-04-10T10:55:14.429760Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-04-10T10:55:14.429784Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-04-10T10:55:14.429811Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-04-10T10:55:14.429852Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-04-10T10:55:14.429877Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-04-10T10:55:14.429899Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-04-10T10:55:14.429920Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-04-10T10:55:14.429942Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-04-10T10:55:14.429965Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-04-10T10:55:14.429987Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-04-10T10:55:14.430010Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-04-10T10:55:14.430057Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-04-10T10:55:14.430085Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-04-10T10:55:14.430111Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-04-10T10:55:14.430138Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-04-10T10:55:14.430188Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-04-10T10:55:14.430211Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-04-10T10:55:14.430249Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-04-10T10:55:14.430274Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-04-10T10:55:14.430306Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-04-10T10:55:14.430346Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:199:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:199:2076] Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:217:2066] recipient: [11:199:2076] 2025-04-10T10:55:16.438896Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-10T10:55:16.439731Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-10T10:55:16.439942Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-10T10:55:16.441284Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:55:16.441525Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-10T10:55:16.442082Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:16.442108Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:16.442327Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-10T10:55:16.451641Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-10T10:55:16.451784Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-10T10:55:16.451883Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-10T10:55:16.451963Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:16.452078Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:16.452146Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:242:2066] recipient: [11:20:2067] 2025-04-10T10:55:16.465910Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-10T10:55:16.466074Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:16.478781Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:16.478923Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:16.479019Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:16.479114Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:16.479237Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:16.479287Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:16.479344Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:16.479408Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:16.490702Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:16.490841Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:16.501662Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:16.501789Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-10T10:55:16.503000Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-10T10:55:16.503066Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-10T10:55:16.503247Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-10T10:55:16.503289Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-10T10:55:16.504079Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } } } Command { QueryBaseConfig { } } } 2025-04-10T10:55:16.504515Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-04-10T10:55:16.504568Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2025-04-10T10:55:16.504617Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1002 Path# /dev/disk3 2025-04-10T10:55:16.504646Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1000 Path# /dev/disk1 2025-04-10T10:55:16.504671Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1001 Path# /dev/disk2 2025-04-10T10:55:16.504694Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1002 Path# /dev/disk3 2025-04-10T10:55:16.504716Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1000 Path# /dev/disk1 2025-04-10T10:55:16.504740Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1001 Path# /dev/disk2 2025-04-10T10:55:16.504764Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1002 Path# /dev/disk3 2025-04-10T10:55:16.504786Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1000 Path# /dev/disk1 2025-04-10T10:55:16.504827Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1001 Path# /dev/disk2 2025-04-10T10:55:16.504853Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1002 Path# /dev/disk3 2025-04-10T10:55:16.504876Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1000 Path# /dev/disk1 2025-04-10T10:55:16.504901Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1001 Path# /dev/disk2 2025-04-10T10:55:16.504985Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1002 Path# /dev/disk3 2025-04-10T10:55:16.505024Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1000 Path# /dev/disk1 2025-04-10T10:55:16.505050Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1001 Path# /dev/disk2 2025-04-10T10:55:16.505072Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1002 Path# /dev/disk3 2025-04-10T10:55:16.505102Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1000 Path# /dev/disk1 2025-04-10T10:55:16.505136Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1001 Path# /dev/disk2 2025-04-10T10:55:16.505162Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1002 Path# /dev/disk3 2025-04-10T10:55:16.505186Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1000 Path# /dev/disk1 2025-04-10T10:55:16.505209Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1001 Path# /dev/disk2 2025-04-10T10:55:16.505245Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1002 Path# /dev/disk3 2025-04-10T10:55:16.505273Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1000 Path# /dev/disk1 2025-04-10T10:55:16.505301Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1001 Path# /dev/disk2 2025-04-10T10:55:16.505338Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1002 Path# /dev/disk3 2025-04-10T10:55:16.505364Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1000 Path# /dev/disk1 2025-04-10T10:55:16.505385Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1001 Path# /dev/disk2 2025-04-10T10:55:16.505407Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1002 Path# /dev/disk3 >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret >> BsControllerConfig::AddDriveSerial [GOOD] >> BsControllerConfig::AddDriveSerialMassive >> BsControllerConfig::Basic [GOOD] >> BsControllerConfig::DeleteStoragePool >> BsControllerConfig::ReassignGroupDisk |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> Cdc::InitialScan [GOOD] >> Cdc::InitialScanDebezium >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] >> TestProgram::JsonValueBinary >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::ShardLocks [GOOD] Test command err: 2025-04-10T10:54:51.605168Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636257779294069:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:51.605737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002029/r3tmp/tmp6EnpvJ/pdisk_1.dat 2025-04-10T10:54:52.513562Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:52.530950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:52.531023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:52.533285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10047 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:53.207636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:53.250433Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:53.280148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:53.612831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:53.736384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:56.566538Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636257779294069:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:56.566708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:59.697232Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636290971939202:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:59.698406Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002029/r3tmp/tmpnYuL29/pdisk_1.dat 2025-04-10T10:54:59.890114Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:59.943183Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:59.943284Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:59.944289Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23827 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:00.320797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.328604Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.350406Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T10:55:00.355170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.461380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.536137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:04.614664Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636290971939202:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:04.614722Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:07.885321Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636326132637647:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:07.886582Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002029/r3tmp/tmprfMZvL/pdisk_1.dat 2025-04-10T10:55:08.066977Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:08.068928Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:08.069007Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:08.070491Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63504 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:08.299402Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:08.317344Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:08.388959Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:08.459739Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:13.193292Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636349587147574:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:13.193342Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002029/r3tmp/tmpYAil46/pdisk_1.dat 2025-04-10T10:55:13.622468Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:13.661632Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:13.666356Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:13.675303Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7527 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:13.987934Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:14.002848Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:14.030794Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:14.040552Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:55:14.130418Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:14.241182Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> TestProgram::JsonValueBinary [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:55:03.475612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:55:03.475735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:03.475778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:55:03.475819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:55:03.475873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:55:03.475915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:55:03.475984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:03.476075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:55:03.476406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:55:03.585415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:55:03.585496Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:03.606567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:55:03.606833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:55:03.606982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:55:03.623181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:55:03.623459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:55:03.624101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:03.624456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:55:03.626609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:03.627855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:03.627921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:03.628079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:55:03.628133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:03.628171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:55:03.628323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:55:03.636057Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:55:03.777939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:03.778180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:03.778414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:55:03.778644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:55:03.778698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:03.785604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:03.785767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:55:03.785964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:03.786030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:55:03.786070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:55:03.786114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:55:03.794682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:03.794770Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:55:03.794812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:55:03.798646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:03.798709Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:03.798762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:03.798833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:55:03.813505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:55:03.831115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:55:03.831318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:55:03.832347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:03.832483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:03.832527Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:03.832800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:55:03.832851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:03.833018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:03.833092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:55:03.843525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:03.843599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:03.843801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:03.843844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:55:03.844083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:03.844129Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:55:03.844216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:03.844270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:03.844310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:03.844342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:03.844374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:55:03.844410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:03.844443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:55:03.844469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:55:03.844534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:03.844568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:55:03.844601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:55:03.852783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:03.852930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:03.852963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... E: StateWork, received event# 2146435072, Sender [9:125:2151], Recipient [9:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:55:20.121940Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 102 2025-04-10T10:55:20.122029Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:20.122059Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:20.122251Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:55:20.122419Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:20.122453Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-10T10:55:20.122491Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T10:55:20.123025Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:55:20.123074Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T10:55:20.123227Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T10:55:20.123289Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:55:20.123349Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:55:20.123403Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:55:20.123456Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:55:20.123509Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-10T10:55:20.123571Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:55:20.123628Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T10:55:20.123667Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T10:55:20.123908Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T10:55:20.123967Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-10T10:55:20.124019Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-10T10:55:20.124075Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-10T10:55:20.124872Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:204:2206], Recipient [9:125:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-04-10T10:55:20.124912Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-10T10:55:20.124981Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:55:20.125052Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:55:20.125087Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T10:55:20.125152Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T10:55:20.125222Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:20.125310Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T10:55:20.126304Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:204:2206], Recipient [9:125:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 } 2025-04-10T10:55:20.126345Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-10T10:55:20.126407Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:55:20.126493Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:55:20.126530Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T10:55:20.126583Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T10:55:20.126617Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T10:55:20.126705Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-10T10:55:20.126754Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T10:55:20.127326Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435084, Sender [9:125:2151], Recipient [9:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-04-10T10:55:20.127376Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-04-10T10:55:20.127445Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:55:20.127499Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T10:55:20.127582Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:20.129692Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T10:55:20.132809Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T10:55:20.132848Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T10:55:20.133965Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T10:55:20.134002Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T10:55:20.134106Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T10:55:20.134407Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T10:55:20.134467Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T10:55:20.134896Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [9:446:2401], Recipient [9:125:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:55:20.134962Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:55:20.135002Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T10:55:20.135172Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [9:362:2341], Recipient [9:125:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-04-10T10:55:20.135219Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-10T10:55:20.135309Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T10:55:20.135415Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:55:20.135466Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [9:444:2399] 2025-04-10T10:55:20.135664Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [9:446:2401], Recipient [9:125:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:55:20.135700Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:55:20.135744Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-10T10:55:20.136151Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [9:447:2402], Recipient [9:125:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-10T10:55:20.136206Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-10T10:55:20.136315Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:55:20.136535Z node 9 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 206us result status StatusPathDoesNotExist 2025-04-10T10:55:20.136705Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"6,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;prio ... 2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"6,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"6,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [GOOD] >> Cdc::RacySplitAndDropTable [GOOD] >> Cdc::RenameTable |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> TestProgram::NumRowsWithNulls >> TestProgram::NumRowsWithNulls [GOOD] >> TLocksTest::GoodDupLock [GOOD] >> TLocksTest::CK_Range_GoodLock >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false >> BsControllerConfig::AddDriveSerialMassive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::NumRowsWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N2(15):{\"i\":\"2\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N0(2):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N2[shape=box, label="N1(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N3(15):{\"i\":\"10001\",\"t\":\"Filter\"}\nREMOVE:10001",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N4(8):{\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10002\",\"t\":\"Calculation\"}\n"]; N5[shape=box, label="N5(8):{\"i\":\"10002\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N1->N2->N0->N3->N4->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]},{"owner_id":5,"inputs":[{"from":4}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Filter"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"5":{"p":{"i":"10002","t":"Projection"},"w":8,"id":5},"4":{"p":{"p":{"kernel":{"class_name":"SIMPLE"}},"o":"10002","t":"Calculation"},"w":8,"id":4},"0":{"p":{"i":"2","p":{"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; |67.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTests::ShouldCheckQuotas >> BsControllerConfig::ReassignGroupDisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::AddDriveSerialMassive [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:201:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:201:2076] Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:217:2066] recipient: [1:201:2076] 2025-04-10T10:55:15.593954Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-10T10:55:15.598326Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-10T10:55:15.598722Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-10T10:55:15.600348Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:55:15.600733Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-10T10:55:15.601241Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:15.601267Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:15.601429Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-10T10:55:15.610789Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-10T10:55:15.610934Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-10T10:55:15.611176Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-10T10:55:15.611300Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:15.611413Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:15.611480Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:239:2066] recipient: [1:20:2067] 2025-04-10T10:55:15.624303Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-10T10:55:15.624472Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:15.640130Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:15.640276Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:15.640359Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:15.640421Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:15.640561Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:15.640621Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:15.640653Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:15.640708Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:15.651546Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:15.651701Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:15.662471Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:15.664394Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-10T10:55:15.665629Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-10T10:55:15.665679Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-10T10:55:15.665885Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-10T10:55:15.665954Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-10T10:55:15.681126Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-04-10T10:55:15.682884Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-04-10T10:55:15.683551Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:199:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:199:2076] Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:217:2066] recipient: [11:199:2076] 2025-04-10T10:55:17.537928Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-10T10:55:17.542133Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-10T10:55:17.542458Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-10T10:55:17.544033Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:55:17.544318Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-10T10:55:17.544973Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:17.545021Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:17.545233Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-10T10:55:17.569077Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-10T10:55:17.569240Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-10T10:55:17.569386Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-10T10:55:17.569516Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:17.569623Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:17.569686Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:239:2066] recipient: [11:20:2067] 2025-04-10T10:55:17.582804Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-10T10:55:17.583012Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:17.594751Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:17.594902Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:17.594999Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:17.595078Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:17.595196Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:17.595250Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:17.595296Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:17.595342Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:17.610695Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:17.610838Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:17.622776Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:17.622929Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-10T10:55:17.626348Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-10T10:55:17.626411Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-10T10:55:17.626633Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-10T10:55:17.626685Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-10T10:55:17.627357Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-04-10T10:55:17.628543Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# ... ommand { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-04-10T10:55:19.349702Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-04-10T10:55:19.350506Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-04-10T10:55:19.351131Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-04-10T10:55:19.351785Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-04-10T10:55:19.352716Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-04-10T10:55:19.353508Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-04-10T10:55:19.354461Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-04-10T10:55:19.355163Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-04-10T10:55:19.368046Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-04-10T10:55:19.368474Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-04-10T10:55:19.368931Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-04-10T10:55:19.369396Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-04-10T10:55:19.369878Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-04-10T10:55:19.370570Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:214:2066] recipient: [31:196:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:214:2066] recipient: [31:196:2076] Leader for TabletID 72057594037932033 is [31:216:2078] sender: [31:217:2066] recipient: [31:196:2076] 2025-04-10T10:55:21.190024Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-10T10:55:21.190974Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-10T10:55:21.191186Z node 31 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-10T10:55:21.192607Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:55:21.192834Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-10T10:55:21.193499Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:21.193531Z node 31 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:21.193725Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-10T10:55:21.203464Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-10T10:55:21.203616Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-10T10:55:21.203808Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-10T10:55:21.203924Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:21.204015Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:21.204090Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [31:216:2078] sender: [31:239:2066] recipient: [31:20:2067] 2025-04-10T10:55:21.216636Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-10T10:55:21.217194Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:21.228588Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:21.228738Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:21.228829Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:21.228894Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:21.229051Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:21.229097Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:21.229139Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:21.229183Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:21.239964Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:21.240103Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:21.250905Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:21.251047Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-10T10:55:21.252479Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-10T10:55:21.252544Z node 31 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-10T10:55:21.252745Z node 31 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-10T10:55:21.252788Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-10T10:55:21.253394Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_0" BoxId: 1 } } } 2025-04-10T10:55:21.254618Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_1" BoxId: 1 } } } 2025-04-10T10:55:21.255252Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2025-04-10T10:55:21.255813Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2025-04-10T10:55:21.256364Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2025-04-10T10:55:21.256904Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-04-10T10:55:21.257547Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-04-10T10:55:21.259928Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-04-10T10:55:21.260586Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-04-10T10:55:21.261228Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-04-10T10:55:21.261960Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-04-10T10:55:21.262651Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-04-10T10:55:21.263279Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-04-10T10:55:21.263889Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-04-10T10:55:21.264544Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-04-10T10:55:21.295590Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-04-10T10:55:21.296558Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-04-10T10:55:21.297332Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-04-10T10:55:21.298086Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-04-10T10:55:21.316319Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:278:2068] recipient: [1:254:2078] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:278:2068] recipient: [1:254:2078] Leader for TabletID 72057594037932033 is [1:280:2080] sender: [1:281:2068] recipient: [1:254:2078] 2025-04-10T10:55:19.578791Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-10T10:55:19.583269Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-10T10:55:19.583630Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-10T10:55:19.692392Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:55:19.692972Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-10T10:55:19.693674Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:19.693707Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:19.693962Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-10T10:55:19.705778Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-10T10:55:19.705892Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-10T10:55:19.706044Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-10T10:55:19.706232Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:19.706364Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:19.706453Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:280:2080] sender: [1:306:2068] recipient: [1:22:2069] 2025-04-10T10:55:19.719783Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-10T10:55:19.719976Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:19.731021Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:19.731179Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:19.731273Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:19.731371Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:19.731536Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:19.731598Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:19.731652Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:19.731711Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:19.742730Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:19.742913Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:19.761535Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:19.761713Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-10T10:55:19.763065Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-10T10:55:19.763126Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-10T10:55:19.763364Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-10T10:55:19.763430Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-10T10:55:19.799610Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-04-10T10:55:19.800322Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk 2025-04-10T10:55:19.800394Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk 2025-04-10T10:55:19.800440Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk 2025-04-10T10:55:19.800474Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk 2025-04-10T10:55:19.800509Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk 2025-04-10T10:55:19.800532Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk 2025-04-10T10:55:19.800562Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk 2025-04-10T10:55:19.800586Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1000 Path# /dev/disk 2025-04-10T10:55:19.800874Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1000 Path# /dev/disk 2025-04-10T10:55:19.800902Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1000 Path# /dev/disk 2025-04-10T10:55:19.800954Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1000 Path# /dev/disk 2025-04-10T10:55:19.800982Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-04-10T10:55:19.833147Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { Success: true } Success: true ConfigTxSeqNo: 2 Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:278:2068] recipient: [13:254:2078] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:278:2068] recipient: [13:254:2078] Leader for TabletID 72057594037932033 is [13:280:2080] sender: [13:281:2068] recipient: [13:254:2078] 2025-04-10T10:55:21.574967Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-10T10:55:21.576171Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-10T10:55:21.576410Z node 13 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-10T10:55:21.577982Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:55:21.585111Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-10T10:55:21.585861Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:21.585900Z node 13 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:21.586174Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-10T10:55:21.598307Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-10T10:55:21.598443Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-10T10:55:21.598574Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-10T10:55:21.598697Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:21.598821Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:21.598924Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [13:280:2080] sender: [13:306:2068] recipient: [13:22:2069] 2025-04-10T10:55:21.616161Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-10T10:55:21.616326Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:21.627133Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:21.627280Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:21.627379Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:21.627481Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:21.627625Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:21.627698Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:21.627744Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:21.627806Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:21.642730Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:21.642879Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:21.655363Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:21.655512Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-10T10:55:21.656851Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-10T10:55:21.656915Z node 13 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-10T10:55:21.657119Z node 13 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-10T10:55:21.657164Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-10T10:55:21.658028Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 2 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-04-10T10:55:21.659054Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1000 Path# /dev/disk 2025-04-10T10:55:21.659113Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1000 Path# /dev/disk 2025-04-10T10:55:21.659146Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1000 Path# /dev/disk 2025-04-10T10:55:21.659187Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1000 Path# /dev/disk 2025-04-10T10:55:21.659224Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1000 Path# /dev/disk 2025-04-10T10:55:21.659263Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1000 Path# /dev/disk 2025-04-10T10:55:21.659320Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1000 Path# /dev/disk 2025-04-10T10:55:21.659351Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1000 Path# /dev/disk 2025-04-10T10:55:21.659399Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 21:1000 Path# /dev/disk 2025-04-10T10:55:21.659427Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 22:1000 Path# /dev/disk 2025-04-10T10:55:21.659470Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 23:1000 Path# /dev/disk 2025-04-10T10:55:21.659497Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 24:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-04-10T10:55:21.685702Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" >> Cdc::AddIndex [GOOD] >> Cdc::AddStream >> Cdc::ResolvedTimestampsMultiplePartitions [GOOD] >> Cdc::ResolvedTimestampsVolatileOutOfOrder >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType >> TLocksTest::SetLockNothing [GOOD] >> Viewer::JsonAutocompleteEmptyColumns [GOOD] >> Viewer::JsonAutocompleteColumnsPOST >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |67.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |67.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> TSchemeShardTTLTests::BuildIndexShouldSucceed >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [GOOD] Test command err: 2025-04-10T10:53:01.657590Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635782371290785:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:01.657690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:53:01.807223Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0019cb/r3tmp/tmpLyUDkN/pdisk_1.dat 2025-04-10T10:53:01.988378Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6472, node 1 2025-04-10T10:53:02.055142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:02.055689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:02.082043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:02.126983Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/0019cb/r3tmp/yandex63A5eR.tmp 2025-04-10T10:53:02.127024Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/0019cb/r3tmp/yandex63A5eR.tmp 2025-04-10T10:53:02.198292Z INFO: TTestServer started on Port 26840 GrpcPort 6472 2025-04-10T10:53:02.284975Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/0019cb/r3tmp/yandex63A5eR.tmp 2025-04-10T10:53:02.285220Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26840 PQClient connected to localhost:6472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:02.452809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:53:02.489835Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:53:02.495736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T10:53:02.672231Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-10T10:53:04.718560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635795256193481:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.718645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.718861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635795256193509:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.733374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T10:53:04.741763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635795256193541:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.741940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.750793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635795256193511:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T10:53:04.967704Z node 1 :TX_PROXY ERROR: Actor# [1:7491635795256193567:2449] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:05.010134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.094351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.129666Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491635795256193577:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:53:05.130517Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDIyY2Y5ZTItNTZkZGQzMTQtYWNmYzhhYmQtZjM5MWQzMTA=, ActorId: [1:7491635795256193477:2336], ActorState: ExecuteState, TraceId: 01jrfn9ya931b1b2f4vma1x4s7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:53:05.133317Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:53:05.231868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7491635799551161178:2635] 2025-04-10T10:53:06.657797Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491635782371290785:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:06.657920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T10:53:11.758943Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-10T10:53:11.795970Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-10T10:53:11.797395Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491635825320965270:2815], Recipient [1:7491635782371291217:2200]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:11.797438Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:11.797456Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:11.797515Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491635825320965266:2812], Recipient [1:7491635782371291217:2200]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-10T10:53:11.797533Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:53:11.896762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:53:11.897208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:53:11.897506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId ... PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:20.696945Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:20.794419Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636370082454833:2878], Partition 3, Sender [0:0:0], Recipient [6:7491636370082454926:2887], Cookie: 0 2025-04-10T10:55:20.794532Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636370082454926:2887]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.794578Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.794626Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:20.794714Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:20.794745Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:20.794790Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:20.794867Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636361492519789:2790], Partition 1, Sender [0:0:0], Recipient [6:7491636361492519876:2800], Cookie: 0 2025-04-10T10:55:20.794911Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636361492519876:2800]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.794927Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.794958Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:20.795003Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:20.795027Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:20.795051Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:20.795093Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636327132780093:2470], Partition 0, Sender [0:0:0], Recipient [6:7491636327132780153:2474], Cookie: 0 2025-04-10T10:55:20.795133Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636327132780153:2474]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.795148Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.795171Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:20.795204Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:20.795222Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:20.795240Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:20.795280Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636361492519785:2789], Partition 2, Sender [0:0:0], Recipient [6:7491636361492519870:2798], Cookie: 0 2025-04-10T10:55:20.795314Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636361492519870:2798]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.795327Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.795351Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:20.795381Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:20.795396Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:20.795416Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:20.795456Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636370082454830:2877], Partition 4, Sender [0:0:0], Recipient [6:7491636370082454922:2885], Cookie: 0 2025-04-10T10:55:20.795487Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636370082454922:2885]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.795501Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.795526Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:20.795556Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:20.795573Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:20.795591Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:20.898313Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636370082454833:2878], Partition 3, Sender [0:0:0], Recipient [6:7491636370082454926:2887], Cookie: 0 2025-04-10T10:55:20.898395Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636370082454926:2887]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.898420Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.898466Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:20.898538Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:20.898564Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:20.898596Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:20.898652Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636361492519789:2790], Partition 1, Sender [0:0:0], Recipient [6:7491636361492519876:2800], Cookie: 0 2025-04-10T10:55:20.898683Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636361492519876:2800]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.898697Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.898719Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:20.898751Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:20.898767Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:20.898786Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:20.898827Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636327132780093:2470], Partition 0, Sender [0:0:0], Recipient [6:7491636327132780153:2474], Cookie: 0 2025-04-10T10:55:20.898859Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636327132780153:2474]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.898873Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.898898Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:20.898927Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:20.898944Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:20.898960Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:20.899002Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636361492519785:2789], Partition 2, Sender [0:0:0], Recipient [6:7491636361492519870:2798], Cookie: 0 2025-04-10T10:55:20.899036Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636361492519870:2798]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.899047Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.899067Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:20.899096Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:20.899108Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:20.899124Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:20.899163Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636370082454830:2877], Partition 4, Sender [0:0:0], Recipient [6:7491636370082454922:2885], Cookie: 0 2025-04-10T10:55:20.899201Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636370082454922:2885]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.899214Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:20.899239Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:20.899265Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:20.899280Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:20.899296Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:55:24.094950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:55:24.095058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:24.095103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:55:24.095149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:55:24.095198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:55:24.095247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:55:24.095325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:24.095417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:55:24.095905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:55:24.185292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:55:24.185381Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:24.204918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:55:24.205236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:55:24.205435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:55:24.213392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:55:24.213667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:55:24.214411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:24.214827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:55:24.216840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:24.218339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:24.218410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:24.218613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:55:24.218670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:24.218712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:55:24.218891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:55:24.226027Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:55:24.369738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:24.370036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:24.370309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:55:24.370588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:55:24.370652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:24.373905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:24.374127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:55:24.374387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:24.374449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:55:24.374495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:55:24.374547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:55:24.376987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:24.377177Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:55:24.377219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:55:24.379387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:24.379445Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:24.379489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:24.379542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:55:24.384801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:55:24.387395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:55:24.387629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:55:24.388798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:24.388966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:24.389168Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:24.389532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:55:24.389590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:24.389819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:24.389929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:55:24.392848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:24.392903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:24.393123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:24.393177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:55:24.393471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:24.393519Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:55:24.393623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:24.393671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:24.393719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:24.393755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:24.393795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:55:24.393853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:24.393893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:55:24.393928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:55:24.394014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:24.394057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:55:24.394090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:55:24.396909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:24.397048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:24.397090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 200 2025-04-10T10:55:24.986372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 129 2025-04-10T10:55:24.986483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T10:55:24.986539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T10:55:24.995243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:24.995316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:55:24.995556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T10:55:24.995847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:24.995903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-10T10:55:24.995988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 4 2025-04-10T10:55:24.996268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:55:24.996324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-10T10:55:24.998082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:55:25.002498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:55:25.002581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T10:55:25.002650Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-04-10T10:55:25.002705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T10:55:25.004725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1374 } } 2025-04-10T10:55:25.004776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2025-04-10T10:55:25.004911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1374 } } 2025-04-10T10:55:25.005013Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1374 } } 2025-04-10T10:55:25.012627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:55:25.012755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:55:25.012787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T10:55:25.012840Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-10T10:55:25.012898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T10:55:25.012992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-10T10:55:25.014243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 547 RawX2: 4294969788 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-10T10:55:25.014302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2025-04-10T10:55:25.014471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 547 RawX2: 4294969788 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-10T10:55:25.014543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:55:25.014646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 547 RawX2: 4294969788 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-10T10:55:25.014711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:25.014750Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:55:25.014785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-10T10:55:25.014866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-10T10:55:25.023213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T10:55:25.023551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:55:25.023736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T10:55:25.024112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:55:25.024494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:55:25.024547Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-10T10:55:25.024674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T10:55:25.024713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T10:55:25.024758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T10:55:25.024797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T10:55:25.024852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-10T10:55:25.024955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:408:2375] message: TxId: 103 2025-04-10T10:55:25.025005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T10:55:25.025072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T10:55:25.025108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T10:55:25.025225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T10:55:25.028385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T10:55:25.028444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:577:2514] TestWaitNotification: OK eventTxId 103 W0000 00:00:1744282525.029145 1105601 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 104 2025-04-10T10:55:25.032135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/SubDomain" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:25.032634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, at schemeshard: 72057594046678944 2025-04-10T10:55:25.032782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, schema: Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } }, at schemeshard: 72057594046678944 2025-04-10T10:55:25.033223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, at schemeshard: 72057594046678944 2025-04-10T10:55:25.040726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "TTL run interval cannot be less than limit: 1800" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:25.040943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot/SubDomain, subject: , status: StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, operation: CREATE TABLE, path: /MyRoot/SubDomain/Table4 TestModificationResult got TxId: 104, wait until txId: 104 >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] >> SystemView::AuthOwners_Access [GOOD] >> SystemView::AuthOwners_ResultOrder |67.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |67.2%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] Test command err: 2025-04-10T10:53:01.684206Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635784683713772:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:01.684289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001990/r3tmp/tmp9J1B52/pdisk_1.dat 2025-04-10T10:53:01.882988Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:53:02.055704Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:02.078041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:02.078142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:02.080070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62393, node 1 2025-04-10T10:53:02.185639Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001990/r3tmp/yandexZ5cZSw.tmp 2025-04-10T10:53:02.185666Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001990/r3tmp/yandexZ5cZSw.tmp 2025-04-10T10:53:02.185826Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001990/r3tmp/yandexZ5cZSw.tmp 2025-04-10T10:53:02.185950Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:53:02.243777Z INFO: TTestServer started on Port 18279 GrpcPort 62393 TClient is connected to server localhost:18279 PQClient connected to localhost:62393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:02.566097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:02.579360Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:02.692259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:53:02.824671Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:02.922414Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-10T10:53:04.701834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635797568616461:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.701962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635797568616488:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.702074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.709173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T10:53:04.727080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635797568616492:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T10:53:04.963771Z node 1 :TX_PROXY ERROR: Actor# [1:7491635797568616556:2448] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:04.993506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.079655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.131955Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491635797568616565:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:53:05.133705Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWI2MWEwMi01MGRhNjQzMi1lMjE1YWFhZS04OGUyNWNjYg==, ActorId: [1:7491635797568616458:2336], ActorState: ExecuteState, TraceId: 01jrfn9y9d6fj7172vd1gkehrf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:53:05.136203Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:53:05.197270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7491635801863584153:2630] 2025-04-10T10:53:06.684283Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491635784683713772:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:06.684360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T10:53:11.761399Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-10T10:53:11.787813Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-10T10:53:11.789035Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491635827633388239:2804], Recipient [1:7491635784683714186:2189]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:11.789074Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:11.789095Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:11.789141Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491635827633388235:2801], Recipient [1:7491635784683714186:2189]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-10T10:53:11.789159Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:53:11.896042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:53:11.896620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:53:11.896935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-10T10:53:11.896982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-10T10:53:11.897014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025- ... iles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-04-10T10:55:22.167586Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [6:7491636380149612720:2781], Partition 2, Sender [6:7491636380149612720:2781], Recipient [6:7491636380149612820:2796], Cookie: 0 2025-04-10T10:55:22.167624Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [6:7491636380149612720:2781], Recipient [6:7491636380149612820:2796]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-04-10T10:55:22.167637Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2025-04-10T10:55:22.167688Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [6:7491636345789873152:2468], Recipient [6:7491636345789873090:2465]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-04-10T10:55:22.167704Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-04-10T10:55:22.167756Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [6:7491636380149612820:2796], Recipient [6:7491636380149612720:2781]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-04-10T10:55:22.167769Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-04-10T10:55:22.167992Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 9 DataSize: 0 UsedReserveSize: 0 2025-04-10T10:55:22.168126Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] ProcessPendingStats. PendingUpdates size 3 2025-04-10T10:55:22.174630Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [6:7491636345789873089:2464], Recipient [6:7491636281365362476:2136]: NKikimrPQ.TEvPeriodicTopicStats PathId: 13 Generation: 1 Round: 9 DataSize: 0 UsedReserveSize: 0 SubDomainOutOfSpace: false 2025-04-10T10:55:22.174683Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-04-10T10:55:22.174704Z node 6 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046644480, LocalPathId: 13] DataSize 0 UsedReserveSize 0 2025-04-10T10:55:22.174737Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.099994s, queue# 1 2025-04-10T10:55:22.186814Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [6:7491636345789873089:2464], Recipient [6:7491636281365362476:2136]: NKikimrSchemeOp.TDescribePath PathId: 13 SchemeshardId: 72057594046644480 2025-04-10T10:55:22.186864Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-10T10:55:22.230702Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636380149612721:2782], Partition 1, Sender [0:0:0], Recipient [6:7491636380149612822:2798], Cookie: 0 2025-04-10T10:55:22.230781Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636380149612822:2798]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:22.230808Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:22.230865Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:22.230949Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:22.230979Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:22.231008Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:22.231664Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636380149612720:2781], Partition 2, Sender [0:0:0], Recipient [6:7491636380149612820:2796], Cookie: 0 2025-04-10T10:55:22.231709Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636380149612820:2796]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:22.231730Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:22.231765Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:22.231811Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:22.231832Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:22.231866Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:22.258445Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636345789873090:2465], Partition 0, Sender [0:0:0], Recipient [6:7491636345789873152:2468], Cookie: 0 2025-04-10T10:55:22.258516Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636345789873152:2468]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:22.258538Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:22.258582Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:22.258652Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:22.258679Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:22.258706Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:22.278316Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [6:7491636281365362476:2136]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:55:22.278358Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:55:22.278413Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [6:7491636281365362476:2136]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-10T10:55:22.278431Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-10T10:55:22.278459Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T10:55:22.278470Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-10T10:55:22.278525Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-04-10T10:55:22.278578Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [6:7491636281365362476:2136], Recipient [6:7491636281365362476:2136]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:55:22.278598Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:55:22.282010Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [6:7491636281365362476:2136]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-10T10:55:22.282044Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-10T10:55:22.282066Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-10T10:55:22.447895Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636380149612721:2782], Partition 1, Sender [0:0:0], Recipient [6:7491636380149612822:2798], Cookie: 0 2025-04-10T10:55:22.447969Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636380149612822:2798]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:22.447993Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:22.448032Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:22.448095Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:22.448122Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:22.448146Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:22.448194Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636380149612720:2781], Partition 2, Sender [0:0:0], Recipient [6:7491636380149612820:2796], Cookie: 0 2025-04-10T10:55:22.448229Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636380149612820:2796]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:22.448244Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:22.448268Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:22.448299Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:22.448316Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:22.448334Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:22.448377Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636345789873090:2465], Partition 0, Sender [0:0:0], Recipient [6:7491636345789873152:2468], Cookie: 0 2025-04-10T10:55:22.448414Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636345789873152:2468]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:22.448427Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:22.448451Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:22.448485Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:22.448502Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:22.448521Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:55:26.008135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:55:26.008241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:26.008281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:55:26.008332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:55:26.008383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:55:26.008426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:55:26.008490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:26.008747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:55:26.009127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:55:26.102502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:55:26.102580Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:26.122430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:55:26.122750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:55:26.122933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:55:26.133091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:55:26.133337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:55:26.134091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:26.134520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:55:26.136625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:26.138077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:26.138141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:26.138363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:55:26.138408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:26.138450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:55:26.138625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.146444Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:55:26.392655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:26.392924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.393203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:55:26.393554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:55:26.393635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.403173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:26.403387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:55:26.403632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.403715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:55:26.403759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:55:26.403794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:55:26.411190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.411321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:55:26.411382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:55:26.415265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.415331Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.415379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:26.415431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:55:26.427035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:55:26.429414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:55:26.429700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:55:26.430826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:26.430971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:26.431019Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:26.431312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:55:26.431425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:26.431612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:26.431717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:55:26.434332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:26.434387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:26.434620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:26.434697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:55:26.434951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.435000Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:55:26.435103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:26.435159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:26.435205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:26.435236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:26.435284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:55:26.435340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:26.435374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:55:26.435406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:55:26.435475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:26.435520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:55:26.435563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:55:26.438329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:26.438435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:26.438474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T10:55:26.438529Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T10:55:26.438573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:26.438671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T10:55:26.442143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T10:55:26.442871Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1744282526.444212 1105993 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2025-04-10T10:55:26.447300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:26.447731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.448240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2025-04-10T10:55:26.448781Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-10T10:55:26.467072Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-10T10:55:26.467986Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:55:26.470915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:26.471112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-04-10T10:55:26.472681Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 W0000 00:00:1744282526.473178 1105993 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2025-04-10T10:55:26.475876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:26.476220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.476412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, at schemeshard: 72057594046678944 2025-04-10T10:55:26.478894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Type \'DyNumber\' specified for column \'modified_at\' is not supported" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:26.479044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetLockNothing [GOOD] Test command err: 2025-04-10T10:54:47.572272Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636237772972267:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:47.572648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203f/r3tmp/tmp66Vpzc/pdisk_1.dat 2025-04-10T10:54:48.603334Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:48.616231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:48.616305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:48.623274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21066 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:49.152981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:49.174315Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:49.186857Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:49.195174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:49.419569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:49.529673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:54.143875Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636267339561052:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:54.179179Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203f/r3tmp/tmpe300P8/pdisk_1.dat 2025-04-10T10:54:54.471286Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:54.541086Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:54.541177Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:54.545927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14637 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:55.015506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:55.026624Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:55.055161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:55.196543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:55.310712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:59.345310Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636292204965235:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:59.345375Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203f/r3tmp/tmpGbLuSO/pdisk_1.dat 2025-04-10T10:54:59.587840Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:59.613775Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:59.613862Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:59.619811Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21762 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:59.991961Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:59.999143Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.028420Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.186660Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.282000Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:05.056653Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636312476615754:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203f/r3tmp/tmpXWpLtj/pdisk_1.dat 2025-04-10T10:55:05.153087Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:55:05.216171Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:05.254337Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:05.254454Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:05.257445Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13954 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:05.617390Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:05.626728Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:05.638592Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:55:05.646550Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:05.736638Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:05.837858Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:10.253516Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491636335741442193:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203f/r3tmp/tmpq2EgqP/pdisk_1.dat 2025-04-10T10:55:10.361730Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:55:10.469982Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:10.473439Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:10.473578Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:10.475129Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17185 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:10.820009Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:10.828146Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:10.843512Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:55:10.848591Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:10.937310Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:11.030637Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:15.878924Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491636357061194148:2198];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203f/r3tmp/tmpfQkEDF/pdisk_1.dat 2025-04-10T10:55:15.988449Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:55:16.123348Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:16.146693Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:16.146807Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:16.152063Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4693 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:16.534742Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:16.551571Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:16.575286Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:16.723442Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:16.807015Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:21.045361Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491636385517274567:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:21.045418Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203f/r3tmp/tmp4nRDlp/pdisk_1.dat 2025-04-10T10:55:21.293838Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:21.327369Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:21.327472Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:21.331542Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15575 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:21.600090Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:21.614509Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:21.639689Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T10:55:21.653069Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:21.748047Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:21.833361Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] Test command err: 2025-04-10T10:54:51.142609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:303:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:54:51.143083Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:54:51.143326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:51.143722Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:726:2368], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:54:51.144077Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:51.144237Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:54:52.079663Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:52.320847Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T10:54:52.386340Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-10T10:54:53.264512Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 21210, node 1 TClient is connected to server localhost:12851 2025-04-10T10:54:53.920731Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:53.920812Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:53.920858Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:53.921274Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:54:59.090644Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636291520681341:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:59.090719Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:54:59.431912Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:59.465473Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:59.465600Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:59.472445Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3001, node 3 2025-04-10T10:54:59.679015Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:59.679044Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:59.679081Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:59.679281Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1859 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:00.305992Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.393895Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:55:00.399145Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:04.093231Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636291520681341:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:04.093363Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:04.218974Z node 3 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:04.219057Z node 3 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:04.531364Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636312995518524:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:04.531486Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636312995518503:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:04.532256Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:04.554594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:55:04.589019Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636312995518531:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:55:04.668238Z node 3 :TX_PROXY ERROR: Actor# [3:7491636312995518582:2364] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:05.258703Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGU4YzNhZTYtNTRjM2E4MjgtODlmMDA5MzgtYjJjYmUwNQ==, ActorId: [3:7491636312995518500:2342], ActorState: ExecuteState, TraceId: 01jrfndkac3ebpd4j23aeazwz6, Create QueryResponse for error on request, msg: Scheme operations cannot be executed inside transaction 2025-04-10T10:55:08.277424Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636327241400807:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:08.277502Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:55:08.481320Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:08.506047Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:08.506351Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:08.511198Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3055, node 4 2025-04-10T10:55:08.646884Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:08.646917Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:08.646926Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:08.647077Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29089 2025-04-10T10:55:09.437800Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T10:55:09.451746Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:09.460075Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-10T10:55:12.682733Z node 4 :GRPC_SERVER DEBUG: Got grpc request# request auth and check internal request, traceId# undef, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# /Root, peer# , grpcInfo# undef, timeout# 9.999862s 2025-04-10T10:55:12.682908Z node 4 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:12.682947Z node 4 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:13.278630Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491636327241400807:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:13.278736Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:14.114369Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636353011205254:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:14.114484Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: ... false data# peer# 2025-04-10T10:55:23.090006Z node 5 :GRPC_SERVER DEBUG: [0x51b0001b9e80] received request Name# Coordination/CreateNode ok# false data# peer# 2025-04-10T10:55:23.090094Z node 5 :GRPC_SERVER DEBUG: [0x51b0001ba580] received request Name# Coordination/AlterNode ok# false data# peer# 2025-04-10T10:55:23.090304Z node 5 :GRPC_SERVER DEBUG: [0x51b0001bdd80] received request Name# Coordination/DropNode ok# false data# peer# 2025-04-10T10:55:23.090384Z node 5 :GRPC_SERVER DEBUG: [0x51b0001be480] received request Name# Coordination/DescribeNode ok# false data# peer# 2025-04-10T10:55:23.090563Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c0080] received request Name# CreateDatabase ok# false data# peer# 2025-04-10T10:55:23.090641Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c0e80] received request Name# GetDatabaseStatus ok# false data# peer# 2025-04-10T10:55:23.090850Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c0780] received request Name# AlterDatabase ok# false data# peer# 2025-04-10T10:55:23.090916Z node 5 :GRPC_SERVER DEBUG: [0x51b00028c580] received request Name# ListDatabases ok# false data# peer# 2025-04-10T10:55:23.091105Z node 5 :GRPC_SERVER DEBUG: [0x51b00028be80] received request Name# RemoveDatabase ok# false data# peer# 2025-04-10T10:55:23.091173Z node 5 :GRPC_SERVER DEBUG: [0x51b00028cc80] received request Name# DescribeDatabaseOptions ok# false data# peer# 2025-04-10T10:55:23.091414Z node 5 :GRPC_SERVER DEBUG: [0x51b00028da80] received request Name# GetScaleRecommendation ok# false data# peer# 2025-04-10T10:55:23.091451Z node 5 :GRPC_SERVER DEBUG: [0x51b00028ef80] received request Name# ListEndpoints ok# false data# peer# 2025-04-10T10:55:23.091690Z node 5 :GRPC_SERVER DEBUG: [0x51b00028b780] received request Name# WhoAmI ok# false data# peer# 2025-04-10T10:55:23.091730Z node 5 :GRPC_SERVER DEBUG: [0x51b00028e880] received request Name# NodeRegistration ok# false data# peer# 2025-04-10T10:55:23.091959Z node 5 :GRPC_SERVER DEBUG: [0x51b00028f680] received request Name# Scan ok# false data# peer# 2025-04-10T10:55:23.092001Z node 5 :GRPC_SERVER DEBUG: [0x51b0002af580] received request Name# GetShardLocations ok# false data# peer# 2025-04-10T10:55:23.092217Z node 5 :GRPC_SERVER DEBUG: [0x51b0002aee80] received request Name# DescribeTable ok# false data# peer# 2025-04-10T10:55:23.092252Z node 5 :GRPC_SERVER DEBUG: [0x51b000166c80] received request Name# CreateSnapshot ok# false data# peer# 2025-04-10T10:55:23.092478Z node 5 :GRPC_SERVER DEBUG: [0x51b000167380] received request Name# RefreshSnapshot ok# false data# peer# 2025-04-10T10:55:23.092503Z node 5 :GRPC_SERVER DEBUG: [0x51b000167a80] received request Name# DiscardSnapshot ok# false data# peer# 2025-04-10T10:55:23.092751Z node 5 :GRPC_SERVER DEBUG: [0x51b000168180] received request Name# List ok# false data# peer# 2025-04-10T10:55:23.092763Z node 5 :GRPC_SERVER DEBUG: [0x51b000168880] received request Name# RateLimiter/CreateResource ok# false data# peer# 2025-04-10T10:55:23.093031Z node 5 :GRPC_SERVER DEBUG: [0x51b000168f80] received request Name# RateLimiter/AlterResource ok# false data# peer# 2025-04-10T10:55:23.093041Z node 5 :GRPC_SERVER DEBUG: [0x51b000169680] received request Name# RateLimiter/DropResource ok# false data# peer# 2025-04-10T10:55:23.093282Z node 5 :GRPC_SERVER DEBUG: [0x51b000169d80] received request Name# RateLimiter/ListResources ok# false data# peer# 2025-04-10T10:55:23.093313Z node 5 :GRPC_SERVER DEBUG: [0x51b00016a480] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2025-04-10T10:55:23.093556Z node 5 :GRPC_SERVER DEBUG: [0x51b00016ab80] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2025-04-10T10:55:23.093605Z node 5 :GRPC_SERVER DEBUG: [0x51b00016b980] received request Name# CreateStream ok# false data# peer# 2025-04-10T10:55:23.093812Z node 5 :GRPC_SERVER DEBUG: [0x51b00016c080] received request Name# ListStreams ok# false data# peer# 2025-04-10T10:55:23.093872Z node 5 :GRPC_SERVER DEBUG: [0x51b00016c780] received request Name# DeleteStream ok# false data# peer# 2025-04-10T10:55:23.094097Z node 5 :GRPC_SERVER DEBUG: [0x51b00016b280] received request Name# DescribeStream ok# false data# peer# 2025-04-10T10:55:23.094197Z node 5 :GRPC_SERVER DEBUG: [0x51b00016d580] received request Name# ListShards ok# false data# peer# 2025-04-10T10:55:23.094480Z node 5 :GRPC_SERVER DEBUG: [0x51b000088880] received request Name# SetWriteQuota ok# false data# peer# 2025-04-10T10:55:23.094776Z node 5 :GRPC_SERVER DEBUG: [0x51b000088180] received request Name# UpdateStream ok# false data# peer# 2025-04-10T10:55:23.095081Z node 5 :GRPC_SERVER DEBUG: [0x51b0003bc680] received request Name# PutRecord ok# false data# peer# 2025-04-10T10:55:23.095368Z node 5 :GRPC_SERVER DEBUG: [0x51b0003bbf80] received request Name# PutRecords ok# false data# peer# 2025-04-10T10:55:23.095604Z node 5 :GRPC_SERVER DEBUG: [0x51b0003bb880] received request Name# GetRecords ok# false data# peer# 2025-04-10T10:55:23.095665Z node 5 :GRPC_SERVER DEBUG: [0x51b0003bb180] received request Name# GetShardIterator ok# false data# peer# 2025-04-10T10:55:23.095854Z node 5 :GRPC_SERVER DEBUG: [0x51b0003baa80] received request Name# SubscribeToShard ok# false data# peer# 2025-04-10T10:55:23.095910Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b9c80] received request Name# DescribeLimits ok# false data# peer# 2025-04-10T10:55:23.096113Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b9580] received request Name# DescribeStreamSummary ok# false data# peer# 2025-04-10T10:55:23.096160Z node 5 :GRPC_SERVER DEBUG: [0x51b000081180] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2025-04-10T10:55:23.096369Z node 5 :GRPC_SERVER DEBUG: [0x51b000081880] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2025-04-10T10:55:23.096419Z node 5 :GRPC_SERVER DEBUG: [0x51b000081f80] received request Name# UpdateShardCount ok# false data# peer# 2025-04-10T10:55:23.096620Z node 5 :GRPC_SERVER DEBUG: [0x51b000082680] received request Name# UpdateStreamMode ok# false data# peer# 2025-04-10T10:55:23.096702Z node 5 :GRPC_SERVER DEBUG: [0x51b000082d80] received request Name# RegisterStreamConsumer ok# false data# peer# 2025-04-10T10:55:23.096875Z node 5 :GRPC_SERVER DEBUG: [0x51b000083480] received request Name# DeregisterStreamConsumer ok# false data# peer# 2025-04-10T10:55:23.096949Z node 5 :GRPC_SERVER DEBUG: [0x51b000083b80] received request Name# DescribeStreamConsumer ok# false data# peer# 2025-04-10T10:55:23.097157Z node 5 :GRPC_SERVER DEBUG: [0x51b000084280] received request Name# ListStreamConsumers ok# false data# peer# 2025-04-10T10:55:23.097226Z node 5 :GRPC_SERVER DEBUG: [0x51b000084980] received request Name# AddTagsToStream ok# false data# peer# 2025-04-10T10:55:23.097418Z node 5 :GRPC_SERVER DEBUG: [0x51b000085080] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2025-04-10T10:55:23.097516Z node 5 :GRPC_SERVER DEBUG: [0x51b000085780] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2025-04-10T10:55:23.097696Z node 5 :GRPC_SERVER DEBUG: [0x51b000085e80] received request Name# ListTagsForStream ok# false data# peer# 2025-04-10T10:55:23.097780Z node 5 :GRPC_SERVER DEBUG: [0x51b0002cfb80] received request Name# MergeShards ok# false data# peer# 2025-04-10T10:55:23.097947Z node 5 :GRPC_SERVER DEBUG: [0x51b000086580] received request Name# RemoveTagsFromStream ok# false data# peer# 2025-04-10T10:55:23.098058Z node 5 :GRPC_SERVER DEBUG: [0x51b000086c80] received request Name# SplitShard ok# false data# peer# 2025-04-10T10:55:23.098254Z node 5 :GRPC_SERVER DEBUG: [0x51b000087380] received request Name# StartStreamEncryption ok# false data# peer# 2025-04-10T10:55:23.098388Z node 5 :GRPC_SERVER DEBUG: [0x51b000087a80] received request Name# StopStreamEncryption ok# false data# peer# 2025-04-10T10:55:23.098538Z node 5 :GRPC_SERVER DEBUG: [0x51b000088f80] received request Name# SelfCheck ok# false data# peer# 2025-04-10T10:55:23.098645Z node 5 :GRPC_SERVER DEBUG: [0x51b000089680] received request Name# NodeCheck ok# false data# peer# 2025-04-10T10:55:23.098792Z node 5 :GRPC_SERVER DEBUG: [0x51b00017a080] received request Name# CreateSession ok# false data# peer# 2025-04-10T10:55:23.098883Z node 5 :GRPC_SERVER DEBUG: [0x51b00017ae80] received request Name# DeleteSession ok# false data# peer# 2025-04-10T10:55:23.099180Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b8e80] received request Name# AttachSession ok# false data# peer# 2025-04-10T10:55:23.099448Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b8080] received request Name# BeginTransaction ok# false data# peer# 2025-04-10T10:55:23.099731Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b7980] received request Name# CommitTransaction ok# false data# peer# 2025-04-10T10:55:23.099825Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b7280] received request Name# RollbackTransaction ok# false data# peer# 2025-04-10T10:55:23.100012Z node 5 :GRPC_SERVER DEBUG: [0x51b00017a780] received request Name# ExecuteQuery ok# false data# peer# 2025-04-10T10:55:23.100024Z node 5 :GRPC_SERVER DEBUG: [0x51b00008a480] received request Name# ExecuteScript ok# false data# peer# 2025-04-10T10:55:23.100225Z node 5 :GRPC_SERVER DEBUG: [0x51b00008ab80] received request Name# FetchScriptResults ok# false data# peer# 2025-04-10T10:55:23.100281Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b6b80] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2025-04-10T10:55:23.100415Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b6480] received request Name# ChangeTabletSchema ok# false data# peer# 2025-04-10T10:55:23.100548Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b5d80] received request Name# RestartTablet ok# false data# peer# 2025-04-10T10:55:23.100604Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b5680] received request Name# CreateLogStore ok# false data# peer# 2025-04-10T10:55:23.100802Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b4f80] received request Name# DescribeLogStore ok# false data# peer# 2025-04-10T10:55:23.100821Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b4880] received request Name# DropLogStore ok# false data# peer# 2025-04-10T10:55:23.100999Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b4180] received request Name# AlterLogStore ok# false data# peer# 2025-04-10T10:55:23.101081Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b3a80] received request Name# CreateLogTable ok# false data# peer# 2025-04-10T10:55:23.101183Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b3380] received request Name# DescribeLogTable ok# false data# peer# 2025-04-10T10:55:23.101350Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b2c80] received request Name# DropLogTable ok# false data# peer# 2025-04-10T10:55:23.101373Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b2580] received request Name# AlterLogTable ok# false data# peer# 2025-04-10T10:55:23.101607Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b1e80] received request Name# Login ok# false data# peer# 2025-04-10T10:55:23.101646Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b1780] received request Name# DescribeReplication ok# false data# peer# 2025-04-10T10:55:23.101796Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b1080] received request Name# DescribeView ok# false data# peer# >> TObjectStorageListingTest::ManyDeletes [GOOD] |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING; ... onent=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:55:26.411957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:55:26.412074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:26.412125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:55:26.412165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:55:26.412216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:55:26.412269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:55:26.412343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:26.412444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:55:26.412820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:55:26.510943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:55:26.511022Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:26.527593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:55:26.527897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:55:26.528098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:55:26.535778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:55:26.536019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:55:26.536787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:26.537201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:55:26.539198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:26.540547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:26.540610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:26.540856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:55:26.540911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:26.540959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:55:26.541162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.548024Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:55:26.695653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:26.695923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.696137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:55:26.696385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:55:26.696443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.701505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:26.701728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:55:26.701998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.702068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:55:26.702114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:55:26.702177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:55:26.706305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.706379Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:55:26.706421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:55:26.714974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.715054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.715098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:26.715150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:55:26.719477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:55:26.721846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:55:26.722032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:55:26.723110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:26.723263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:26.723321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:26.723733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:55:26.723804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:26.724011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:26.724215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:55:26.727529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:26.727580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:26.727769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:26.727823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:55:26.728068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:26.728117Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:55:26.728221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:26.728266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:26.728311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:26.728346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:26.728385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:55:26.728442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:26.728483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:55:26.728521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:55:26.728594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:26.728638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:55:26.728672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:55:26.731058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:26.731177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:26.731257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Type: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:385:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T10:55:27.427185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-10T10:55:27.427341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:55:27.427551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-10T10:55:27.427583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-10T10:55:27.427614Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-10T10:55:27.427866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:27.427967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:27.428014Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-10T10:55:27.428051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-10T10:55:27.429910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-10T10:55:27.429957Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-10T10:55:27.430052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-10T10:55:27.430080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T10:55:27.430114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-10T10:55:27.430168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T10:55:27.430196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-10T10:55:27.430256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:123:2149] message: TxId: 281474976710760 2025-04-10T10:55:27.430295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T10:55:27.430322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-10T10:55:27.430346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-10T10:55:27.430412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-10T10:55:27.432232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-10T10:55:27.432300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-10T10:55:27.432374Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-10T10:55:27.432495Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:385:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T10:55:27.434028Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-10T10:55:27.434098Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:385:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T10:55:27.434186Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T10:55:27.435622Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-10T10:55:27.435712Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:385:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T10:55:27.435751Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-10T10:55:27.435854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:55:27.435895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:474:2434] TestWaitNotification: OK eventTxId 102 2025-04-10T10:55:27.436474Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:55:27.436763Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 328us result status StatusSuccess 2025-04-10T10:55:27.437230Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPersQueueMirrorer::ValidStartStream [GOOD] |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable >> TSchemeShardTTLTestsWithReboots::CopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:55:27.094458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:55:27.094559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:27.094597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:55:27.094631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:55:27.094671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:55:27.094713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:55:27.094779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:27.094845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:55:27.095218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:55:27.189559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:55:27.189642Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:27.207425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:55:27.207726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:55:27.207900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:55:27.219119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:55:27.219366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:55:27.220180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:27.220598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:55:27.222722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:27.224019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:27.224103Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:27.224286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:55:27.224339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:27.224382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:55:27.224542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:55:27.240882Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:55:27.390758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:27.391010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:27.391268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:55:27.391541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:55:27.391605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:27.394264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:27.394454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:55:27.394649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:27.394707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:55:27.394746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:55:27.394782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:55:27.396669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:27.396714Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:55:27.396747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:55:27.398380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:27.398413Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:27.398443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:27.398477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:55:27.408399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:55:27.410271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:55:27.410522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:55:27.411364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:27.411492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:27.411542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:27.411774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:55:27.411821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:27.411988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:27.412070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:55:27.414221Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:27.414269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:27.414435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:27.414499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:55:27.414744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:27.414789Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:55:27.414880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:27.414930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:27.414979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:27.415016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:27.415060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:55:27.415116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:27.415153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:55:27.415184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:55:27.415250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:27.415289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:55:27.415325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:55:27.417769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:27.417886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:27.417928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... type: 269090816 2025-04-10T10:55:27.781422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-10T10:55:27.782321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:27.782442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:27.782524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-10T10:55:27.782843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-10T10:55:27.783023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:55:27.786659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:27.786735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:55:27.787028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:27.787077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-10T10:55:27.787511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:55:27.787576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-10T10:55:27.788193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:55:27.788308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:55:27.788358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T10:55:27.788472Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-10T10:55:27.788522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T10:55:27.788605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-10T10:55:27.791721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T10:55:27.808839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1276 } } 2025-04-10T10:55:27.808928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T10:55:27.809103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1276 } } 2025-04-10T10:55:27.809238Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1276 } } FAKE_COORDINATOR: Erasing txId 102 2025-04-10T10:55:27.810454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T10:55:27.810516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T10:55:27.810674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T10:55:27.810732Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:55:27.810824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T10:55:27.810912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:27.810953Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:55:27.811008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:55:27.811057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T10:55:27.814176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:55:27.815540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:55:27.815720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:55:27.815776Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T10:55:27.815890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:55:27.815928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:55:27.815971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:55:27.816002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:55:27.816094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T10:55:27.816249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 102 2025-04-10T10:55:27.816310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:55:27.816354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T10:55:27.816397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T10:55:27.816556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:55:27.818674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:55:27.818737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:394:2365] TestWaitNotification: OK eventTxId 102 2025-04-10T10:55:27.819339Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:55:27.819598Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 296us result status StatusSuccess 2025-04-10T10:55:27.820128Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TCmsTenatsTest::TestClusterLimit >> Cdc::InitialScanDebezium [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase [GOOD] >> Viewer::JsonAutocompleteSchemePOST >> Cdc::InitialScanRacyCompleteAndRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::ManyDeletes [GOOD] Test command err: 2025-04-10T10:54:47.956946Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636239931507451:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:47.957371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002031/r3tmp/tmp7CF6FY/pdisk_1.dat 2025-04-10T10:54:48.556856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:48.556936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:48.563159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:48.625774Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28080, node 1 2025-04-10T10:54:48.838752Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:48.838774Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:48.838780Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:48.838889Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30078 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:49.491500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:49.530485Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:49.586054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:52.946751Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636239931507451:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:52.947175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:58.296495Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636287737135420:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002031/r3tmp/tmp4RS3Wc/pdisk_1.dat 2025-04-10T10:54:58.357755Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:58.491389Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:58.524069Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:58.524169Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:58.527322Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24034, node 2 2025-04-10T10:54:58.674916Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:58.674948Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:58.674956Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:58.675085Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28774 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:59.076962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:59.082135Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:59.100553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... .2025-04-10T10:55:03.222251Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636287737135420:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:03.222418Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; .2025-04-10T10:55:13.454812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:55:13.454863Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:15.489333Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037891 2025-04-10T10:55:15.489528Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-10T10:55:15.490395Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037891 2025-04-10T10:55:15.490424Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037890 2025-04-10T10:55:15.490763Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037892 2025-04-10T10:55:15.490762Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T10:55:15.491463Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037892 2025-04-10T10:55:15.491928Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037889 2025-04-10T10:55:15.492901Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037891 2025-04-10T10:55:15.492984Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T10:55:15.493027Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037892 2025-04-10T10:55:15.493038Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-10T10:55:15.500269Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711500 at step 1744282515542 at tablet 72075186224037891 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282515542 MediatorID: 72057594046382081 TabletID: 72075186224037891 } 2025-04-10T10:55:15.500337Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-10T10:55:15.500574Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-10T10:55:15.500602Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:55:15.500638Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1744282515542:281474976711500] in PlanQueue unit at 72075186224037891 2025-04-10T10:55:15.500697Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037891 got data tx from cache 1744282515542:281474976711500 2025-04-10T10:55:15.501979Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:55:15.502739Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711500 at step 1744282515542 at tablet 72075186224037889 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282515542 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-10T10:55:15.502768Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:55:15.502912Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:55:15.502930Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:55:15.502952Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1744282515542:281474976711500] in PlanQueue unit at 72075186224037889 2025-04-10T10:55:15.502987Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 got data tx from cache 1744282515542:281474976711500 2025-04-10T10:55:15.504836Z node 2 :TX_DATASHARD DEBUG: tx 281474976711500 released its data 2025-04-10T10:55:15.504875Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:55:15.505323Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711500 at step 1744282515542 at tablet 72075186224037890 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282515542 MediatorID: 72057594046382081 TabletID: 72075186224037890 } 2025-04-10T10:55:15.505338Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T10:55:15.505437Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T10:55:15.505471Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:55:15.505489Z n ... ned 1 2025-04-10T10:55:26.838994Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1744282526882:281474976711911] in PlanQueue unit at 72075186224037890 2025-04-10T10:55:26.839023Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037890 got data tx from cache 1744282526882:281474976711911 2025-04-10T10:55:26.839151Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711911 at step 1744282526882 at tablet 72075186224037889 { Transactions { TxId: 281474976711911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282526882 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-10T10:55:26.839179Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:55:26.839273Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:55:26.839287Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:55:26.839322Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1744282526882:281474976711911] in PlanQueue unit at 72075186224037889 2025-04-10T10:55:26.839354Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 got data tx from cache 1744282526882:281474976711911 2025-04-10T10:55:26.840143Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 released its data 2025-04-10T10:55:26.840163Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:55:26.840599Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 released its data 2025-04-10T10:55:26.840620Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:55:26.842701Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-10T10:55:26.843649Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037892 restored its data 2025-04-10T10:55:26.844657Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 released its data 2025-04-10T10:55:26.844683Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:55:26.844814Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037892 step# 1744282526882} 2025-04-10T10:55:26.845055Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:55:26.845844Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037889 restored its data 2025-04-10T10:55:26.846524Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T10:55:26.847362Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037890 restored its data 2025-04-10T10:55:26.847363Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 released its data 2025-04-10T10:55:26.847392Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:55:26.848106Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1744282526882} 2025-04-10T10:55:26.848186Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 released its data 2025-04-10T10:55:26.848207Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:55:26.848218Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-10T10:55:26.848921Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037892 restored its data 2025-04-10T10:55:26.849884Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:55:26.850932Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T10:55:26.851692Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037890 restored its data 2025-04-10T10:55:26.852457Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:55:26.852542Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:55:26.852746Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1744282526882} 2025-04-10T10:55:26.852812Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 1744282526882} 2025-04-10T10:55:26.852838Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-10T10:55:26.852891Z node 2 :TX_DATASHARD DEBUG: Complete [1744282526882 : 281474976711911] from 72075186224037891 at tablet 72075186224037891 send result to client [2:7491636407996233972:3583], exec latency: 0 ms, propose latency: 15 ms 2025-04-10T10:55:26.852914Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-10T10:55:26.853098Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-10T10:55:26.853126Z node 2 :TX_DATASHARD DEBUG: Complete [1744282526882 : 281474976711911] from 72075186224037892 at tablet 72075186224037892 send result to client [2:7491636407996233972:3583], exec latency: 12 ms, propose latency: 15 ms 2025-04-10T10:55:26.853139Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-10T10:55:26.853167Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037889 restored its data 2025-04-10T10:55:26.855619Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 released its data 2025-04-10T10:55:26.855645Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:55:26.857153Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T10:55:26.857208Z node 2 :TX_DATASHARD DEBUG: Complete [1744282526882 : 281474976711911] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7491636407996233972:3583], exec latency: 13 ms, propose latency: 18 ms 2025-04-10T10:55:26.857238Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T10:55:26.885288Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:55:26.886451Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037889 restored its data 2025-04-10T10:55:26.909291Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:55:26.924385Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:55:26.924478Z node 2 :TX_DATASHARD DEBUG: Complete [1744282526882 : 281474976711911] from 72075186224037889 at tablet 72075186224037889 send result to client [2:7491636407996233972:3583], exec latency: 69 ms, propose latency: 85 ms 2025-04-10T10:55:26.924516Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:55:26.967424Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-10T10:55:26.968428Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-04-10T10:55:26.968903Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2025-04-10T10:55:26.969394Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "" contents: 0 common prefixes: 0 2025-04-10T10:55:26.969628Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 1 2025-04-10T10:55:26.970185Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-10T10:55:26.970298Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 0 2025-04-10T10:55:26.970786Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-10T10:55:26.971264Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-04-10T10:55:26.971822Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2025-04-10T10:55:26.972327Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Godfather.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "/Videos/Godfather.avi" contents: 2 common prefixes: 0 2025-04-10T10:55:26.972886Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/House of Cards/Season 1/Chapter 1.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 4 last path: "/Videos/House of Cards/Season 1/Chapter 1.avi" contents: 3 common prefixes: 1 2025-04-10T10:55:26.973427Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Terminator 2.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 5 last path: "/Videos/Terminator 2.avi" contents: 4 common prefixes: 1 2025-04-10T10:55:26.973597Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: finished status: 0 description: "" contents: 4 common prefixes: 1 >> TCmsTest::WalleRebootDownNode >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] |67.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |67.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> TCmsTest::RequestReplaceDevices >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] >> TClusterInfoTest::DeviceId [GOOD] >> TClusterInfoTest::FillInfo [GOOD] >> TCmsTenatsTest::CollectInfo >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:55:29.389749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:55:29.389856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:29.389893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:55:29.389924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:55:29.389969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:55:29.390016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:55:29.390077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:29.390183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:55:29.390558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:55:29.468724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:55:29.468804Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:29.482746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:55:29.483028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:55:29.483233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:55:29.491589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:55:29.491809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:55:29.492535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:29.492963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:55:29.496590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:29.497913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:29.497972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:29.498112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:55:29.498166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:29.498199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:55:29.498336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:55:29.503932Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:55:29.645660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:29.645864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:29.646025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:55:29.646284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:55:29.646361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:29.649145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:29.649295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:55:29.649500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:29.649565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:55:29.649748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:55:29.649786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:55:29.656905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:29.656984Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:55:29.657036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:55:29.660192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:29.660252Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:29.660296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:29.660343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:55:29.664205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:55:29.668827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:55:29.669018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:55:29.670011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:29.670163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:29.670212Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:29.670536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:55:29.670589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:29.670768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:29.670845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:55:29.674460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:29.674522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:29.674694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:29.674764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:55:29.674989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:29.675031Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:55:29.675139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:29.675177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:29.675217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:29.675252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:29.675292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:55:29.675351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:29.675401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:55:29.675437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:55:29.675504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:29.675571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:55:29.675604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:55:29.677871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:29.677984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:29.678025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... esult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1262 } } 2025-04-10T10:55:30.041692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T10:55:30.041746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-04-10T10:55:30.041866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T10:55:30.041916Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:55:30.041999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T10:55:30.042073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:30.042137Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T10:55:30.042245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:55:30.042291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-10T10:55:30.042858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T10:55:30.042890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-10T10:55:30.042973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T10:55:30.043021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:55:30.043096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T10:55:30.043149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:30.043198Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.043231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T10:55:30.043295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-10T10:55:30.047491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T10:55:30.048091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T10:55:30.048285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T10:55:30.049776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T10:55:30.049863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T10:55:30.050002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.050072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T10:55:30.050382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.050637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T10:55:30.050692Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-10T10:55:30.050818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-10T10:55:30.050857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-10T10:55:30.050919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-10T10:55:30.050976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-10T10:55:30.051038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-10T10:55:30.051357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.051409Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T10:55:30.051519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-10T10:55:30.051561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-10T10:55:30.051592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-10T10:55:30.051629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-10T10:55:30.051657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-10T10:55:30.051741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 101 2025-04-10T10:55:30.051792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-10T10:55:30.051838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T10:55:30.051868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T10:55:30.052014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T10:55:30.052057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-10T10:55:30.052100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-10T10:55:30.052136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T10:55:30.052160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-10T10:55:30.052199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-10T10:55:30.052255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T10:55:30.054612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T10:55:30.054658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:378:2346] TestWaitNotification: OK eventTxId 101 2025-04-10T10:55:30.055133Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:55:30.055506Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 318us result status StatusSuccess 2025-04-10T10:55:30.056136Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Cdc::RenameTable [GOOD] >> Cdc::InitialScan_WithTopicSchemeTx >> TCmsTest::ManageRequestsWrong ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:55:30.024108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:55:30.024217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:30.024262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:55:30.024298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:55:30.024339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:55:30.024383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:55:30.024436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:30.024517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:55:30.024854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:55:30.111698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:55:30.111761Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:30.126902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:55:30.127146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:55:30.127314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:55:30.134270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:55:30.134464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:55:30.135170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:30.135529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:55:30.137321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:30.138569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:30.138636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:30.138809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:55:30.138853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:30.138890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:55:30.139064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.144901Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:55:30.299685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:30.299899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.300115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:55:30.300320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:55:30.300363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.308084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:30.308291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:55:30.308550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.308622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:55:30.308664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:55:30.308707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:55:30.317763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.317864Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:55:30.317907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:55:30.331516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.331611Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.331666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:30.331733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:55:30.357611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:55:30.367238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:55:30.367544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:55:30.368692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:30.368866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:30.368921Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:30.369240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:55:30.369297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:30.369533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:30.369654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:55:30.375637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:30.375703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:30.375944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:30.376024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:55:30.376321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.376375Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:55:30.376506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:30.376548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:30.376593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:30.376628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:30.376667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:55:30.376737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:30.376791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:55:30.376839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:55:30.376935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:30.376988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:55:30.377031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:55:30.379645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:30.379780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:30.379827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T10:55:30.379886Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T10:55:30.379941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:30.380128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T10:55:30.398577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T10:55:30.399234Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1744282530.400587 1107263 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2025-04-10T10:55:30.404944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:30.405384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.405544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } }, at schemeshard: 72057594046678944 2025-04-10T10:55:30.406038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL should be less than 1744282530 seconds (20188 days, 55 years). The ttl behaviour is undefined before 1970., at schemeshard: 72057594046678944 2025-04-10T10:55:30.406730Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-10T10:55:30.427690Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-10T10:55:30.428731Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:55:30.438259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL should be less than 1744282530 seconds (20188 days, 55 years). The ttl behaviour is undefined before 1970." TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:30.438503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL should be less than 1744282530 seconds (20188 days, 55 years). The ttl behaviour is undefined before 1970., operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-10T10:55:30.440327Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TCmsTest::RequestRestartServicesMultipleNodes >> TCmsTest::StateRequestNode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:55:30.348589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:55:30.348675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:30.348710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:55:30.348741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:55:30.348777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:55:30.348810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:55:30.348860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:30.348918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:55:30.349169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:55:30.438468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:55:30.438550Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:30.454676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:55:30.454983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:55:30.455152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:55:30.462287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:55:30.462502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:55:30.463226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:30.463613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:55:30.465368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:30.466679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:30.466750Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:30.466932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:55:30.466983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:30.467023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:55:30.467204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.473957Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:55:30.622063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:30.622317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.622503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:55:30.622710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:55:30.622754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.624907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:30.625056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:55:30.625221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.625274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:55:30.625304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:55:30.625332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:55:30.631274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.631356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:55:30.631407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:55:30.636479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.636564Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.636614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:30.636668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:55:30.640597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:55:30.651114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:55:30.651423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:55:30.652528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:30.652682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:30.652737Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:30.653036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:55:30.653098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:30.653313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:30.653398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:55:30.656231Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:30.656283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:30.656513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:30.656581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:55:30.656826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.656878Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:55:30.657006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:30.657045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:30.657092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:30.657135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:30.657205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:55:30.657266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:30.657311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:55:30.657366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:55:30.657458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:30.657510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:55:30.657546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:55:30.660077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:30.660207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:30.660255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 78944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 2025-04-10T10:55:30.913920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:30.914066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:30.914131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId# 101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2025-04-10T10:55:30.914337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2025-04-10T10:55:30.914496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:30.914573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-04-10T10:55:30.921823Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:30.921893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:30.922106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:55:30.922400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:30.922466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T10:55:30.922515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T10:55:30.923195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.923275Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T10:55:30.924426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T10:55:30.924555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T10:55:30.924606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T10:55:30.924663Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T10:55:30.924715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:30.925788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T10:55:30.925867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T10:55:30.925896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T10:55:30.925926Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T10:55:30.925970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T10:55:30.926057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-10T10:55:30.931862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 8552 } } 2025-04-10T10:55:30.931932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-10T10:55:30.932075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 8552 } } 2025-04-10T10:55:30.932176Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 8552 } } 2025-04-10T10:55:30.933806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T10:55:30.933864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-10T10:55:30.934036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T10:55:30.934103Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:55:30.934220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T10:55:30.934297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:30.934339Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.934393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:55:30.934441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-10T10:55:30.935438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T10:55:30.935511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T10:55:30.937318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.937500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.937791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.937836Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T10:55:30.937985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T10:55:30.938041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T10:55:30.938098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T10:55:30.938134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T10:55:30.938199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-10T10:55:30.938277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 101 2025-04-10T10:55:30.938339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T10:55:30.938410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T10:55:30.938450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T10:55:30.938585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:55:30.940569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T10:55:30.940643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:337:2315] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-10T10:55:30.944088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" DropColumns { Name: "modified_at" } TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:30.944511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:55:30.944917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', at schemeshard: 72057594046678944 2025-04-10T10:55:30.947577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Cannot enable TTL on dropped column: \'modified_at\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:30.947755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile >> TCmsTest::WalleRebootDownNode [GOOD] >> TCmsTest::WalleCleanupTest >> TCmsTenatsTest::TestClusterLimit [GOOD] >> TCmsTenatsTest::RequestShutdownHost >> TCmsTenatsTest::CollectInfo [GOOD] >> TCmsTenatsTest::RequestRestartServices >> TCmsTest::RequestRestartServicesRejectSecond ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TPersQueueMirrorer::ValidStartStream [GOOD] Test command err: 2025-04-10T10:53:01.880288Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635783522269067:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:01.880635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:53:02.034948Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001960/r3tmp/tmpVTVClE/pdisk_1.dat 2025-04-10T10:53:02.281438Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12646, node 1 2025-04-10T10:53:02.295470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:02.295562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:02.297505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:02.347265Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001960/r3tmp/yandexWCUPnq.tmp 2025-04-10T10:53:02.347300Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001960/r3tmp/yandexWCUPnq.tmp 2025-04-10T10:53:02.347477Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001960/r3tmp/yandexWCUPnq.tmp 2025-04-10T10:53:02.347667Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:53:02.402747Z INFO: TTestServer started on Port 7604 GrpcPort 12646 TClient is connected to server localhost:7604 PQClient connected to localhost:12646 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:02.707576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:53:02.735329Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:53:02.789525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T10:53:02.978511Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-10T10:53:04.914221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635796407171531:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.914345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635796407171543:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.914365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.919149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T10:53:04.928775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635796407171547:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T10:53:05.187656Z node 1 :TX_PROXY ERROR: Actor# [1:7491635800702138907:2449] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:05.221873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.271373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.316969Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491635800702138915:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:53:05.318823Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWY3NGM2ZGItNWU3MzA4YjYtY2E1Yjg2MDctNzg2ZmZmZA==, ActorId: [1:7491635796407171528:2336], ActorState: ExecuteState, TraceId: 01jrfn9yfmax07egdgaex58z93, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:53:05.321065Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:53:05.374985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7491635800702139203:2628] 2025-04-10T10:53:06.880509Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491635783522269067:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:06.880570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T10:53:11.839979Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-10T10:53:11.867364Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-10T10:53:11.868871Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491635826471943277:2792], Recipient [1:7491635787817236547:2193]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:11.868909Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:11.868930Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:11.868972Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491635826471943273:2789], Recipient [1:7491635787817236547:2193]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-10T10:53:11.868989Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:53:11.935807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 10 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:53:11.936237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:53:11.936502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-10T10:53:11.936545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-10T10:53:11.936580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-10T10:53:11.936613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-10T10:53:11.936631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-10T10:53:11.936650Z node 1 :FL ... DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 1 EndOffset: 10 WriteTimestampMS: 1744282526040 CreateTimestampMS: 1744282525870 SizeLag: 1179 WriteTimestampEstimateMS: 1744282526184 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-04-10T10:55:26.212421Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 1 committedOffset 1 2025-04-10T10:55:26.212519Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 sending to client partition status 2025-04-10T10:55:26.213620Z :INFO: [] [] [b8b84766-d500287b-873fc32f-c4e5331e] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: 5 2025-04-10T10:55:26.214346Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 5 } } 2025-04-10T10:55:26.214528Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, commitOffset# (empty maybe) 2025-04-10T10:55:26.214588Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 1 committedOffset 1 clientCommitOffset (empty maybe) clientReadOffset 5 2025-04-10T10:55:26.214623Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 5 endOffset 10 2025-04-10T10:55:26.214690Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, endOffset# 10, WTime# 1744282526040, sizeLag# 1179 2025-04-10T10:55:26.214710Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1TEvPartitionReady. Aval parts: 1 2025-04-10T10:55:26.214771Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 performing read request: guid# d410a4ea-edfd2b20-7bab1bfe-103b7854, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 6, size# 1414, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-10T10:55:26.214880Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 6 maxSize 1414 maxTimeLagMs 0 readTimestampMs 0 readOffset 5 EndOffset 10 ClientCommitOffset 1 committedOffset 1 Guid d410a4ea-edfd2b20-7bab1bfe-103b7854 2025-04-10T10:55:26.216030Z node 9 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-10T10:55:26.216084Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-10T10:55:26.216252Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 Topic 'rt3.dc1--topic1' partition 0 user user offset 5 count 6 size 1414 endOffset 10 max time lag 0ms effective offset 5 2025-04-10T10:55:26.216298Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 5, current partition end offset: 10 2025-04-10T10:55:26.216511Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2025-04-10T10:55:26.216537Z node 9 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T10:55:26.216947Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 5 2025-04-10T10:55:26.217768Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 10 Result { Offset: 5 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 6 WriteTimestampMS: 1744282526088 CreateTimestampMS: 1744282526079 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 6 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 7 WriteTimestampMS: 1744282526161 CreateTimestampMS: 1744282526079 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 7 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 8 WriteTimestampMS: 1744282526161 CreateTimestampMS: 1744282526079 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 8 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 9 WriteTimestampMS: 1744282526161 CreateTimestampMS: 1744282526079 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 9 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 10 WriteTimestampMS: 1744282526162 CreateTimestampMS: 1744282526079 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 18446744073709551581 RealReadOffset: 9 WaitQuotaTimeMs: 0 EndOffset: 10 StartOffset: 0 } Cookie: 5 } 2025-04-10T10:55:26.218010Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset10 2025-04-10T10:55:26.218058Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 ReadOffset 10 ReadGuid d410a4ea-edfd2b20-7bab1bfe-103b7854 has messages 1 2025-04-10T10:55:26.218557Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 read done: guid# d410a4ea-edfd2b20-7bab1bfe-103b7854, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 673 2025-04-10T10:55:26.218591Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 response to read: guid# d410a4ea-edfd2b20-7bab1bfe-103b7854 2025-04-10T10:55:26.218848Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 Process answer. Aval parts: 0 2025-04-10T10:55:26.219893Z :DEBUG: [] [] [b8b84766-d500287b-873fc32f-c4e5331e] [] Got ReadResponse, serverBytesSize = 673, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428127 2025-04-10T10:55:26.220080Z :DEBUG: [] [] [b8b84766-d500287b-873fc32f-c4e5331e] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428127 2025-04-10T10:55:26.220807Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (5-9) 2025-04-10T10:55:26.220865Z :DEBUG: [] [] [b8b84766-d500287b-873fc32f-c4e5331e] [] Returning serverBytesSize = 673 to budget 2025-04-10T10:55:26.220915Z :DEBUG: [] [] [b8b84766-d500287b-873fc32f-c4e5331e] [] In ContinueReadingDataImpl, ReadSizeBudget = 673, ReadSizeServerDelta = 52428127 2025-04-10T10:55:26.221216Z :DEBUG: [] [] [b8b84766-d500287b-873fc32f-c4e5331e] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-10T10:55:26.221646Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (5-5) 2025-04-10T10:55:26.221727Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (6-6) 2025-04-10T10:55:26.221774Z :DEBUG: [] Take Data. Partition 0. Read: {1, 1} (7-7) 2025-04-10T10:55:26.221815Z :DEBUG: [] Take Data. Partition 0. Read: {1, 2} (8-8) 2025-04-10T10:55:26.221867Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (9-9) 2025-04-10T10:55:26.221951Z :DEBUG: [] [] [b8b84766-d500287b-873fc32f-c4e5331e] [] The application data is transferred to the client. Number of messages 5, size 115 bytes 2025-04-10T10:55:26.222034Z :DEBUG: [] [] [b8b84766-d500287b-873fc32f-c4e5331e] [] Returning serverBytesSize = 0 to budget 2025-04-10T10:55:26.221815Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 grpc read done: success# 1, data# { read_request { bytes_size: 673 } } 2025-04-10T10:55:26.221964Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 got read request: guid# 2588fdb1-f0cc697f-d6166ff5-84677250 2025-04-10T10:55:26.222247Z :INFO: [] [] [b8b84766-d500287b-873fc32f-c4e5331e] Closing read session. Close timeout: 0.000000s 2025-04-10T10:55:26.222316Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:9:1 2025-04-10T10:55:26.222386Z :INFO: [] [] [b8b84766-d500287b-873fc32f-c4e5331e] Counters: { Errors: 0 CurrentSessionLifetimeMs: 41 BytesRead: 115 MessagesRead: 5 BytesReadCompressed: 115 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:55:26.222517Z :NOTICE: [] [] [b8b84766-d500287b-873fc32f-c4e5331e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-10T10:55:26.222574Z :DEBUG: [] [] [b8b84766-d500287b-873fc32f-c4e5331e] [] Abort session to cluster 2025-04-10T10:55:26.223211Z :NOTICE: [] [] [b8b84766-d500287b-873fc32f-c4e5331e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:55:26.229028Z :DEBUG: [] MessageGroupId [src-id-test] SessionId [src-id-test|96e38265-ece476f-70ed9724-8e03d09e_0] Write session: destroy 2025-04-10T10:55:26.231140Z node 8 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 grpc read done: success# 0, data# { } 2025-04-10T10:55:26.231186Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 grpc read failed 2025-04-10T10:55:26.231234Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 grpc closed 2025-04-10T10:55:26.231279Z node 8 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_8_2_6246105783948493923_v1 is DEAD 2025-04-10T10:55:26.234500Z node 8 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [8:7491636405908299750:2539] disconnected; active server actors: 1 2025-04-10T10:55:26.234533Z node 8 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [8:7491636405908299750:2539] client user disconnected session shared/user_8_2_6246105783948493923_v1 2025-04-10T10:55:26.235634Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_8_2_6246105783948493923_v1 2025-04-10T10:55:26.235689Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [8:7491636405908299753:2542] destroyed 2025-04-10T10:55:26.235755Z node 9 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_8_2_6246105783948493923_v1 >> TMaintenanceApiTest::SingleCompositeActionGroup >> Viewer::ServerlessNodesPage [GOOD] >> Viewer::ServerlessWithExclusiveNodes >> BsControllerConfig::MergeIntersectingBoxes [GOOD] >> BsControllerConfig::MoveGroups >> TCmsTest::ManageRequestsWrong [GOOD] >> TCmsTest::ManageRequestsDry >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 >> TCmsTest::RequestRestartServicesMultipleNodes [GOOD] >> TCmsTest::RequestRestartServicesDryRun >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::RequestReplaceDevicePDisk >> TCmsTest::StateRequestNode [GOOD] >> TCmsTest::StateRequestUnknownNode >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup [GOOD] >> TMaintenanceApiTest::ActionReason >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange >> Cdc::AddStream [GOOD] >> Cdc::AwsRegion >> TopicAutoscaling::ReadFromTimestamp_PQv1 [GOOD] >> TCmsTest::ActionIssuePartialPermissions >> TCmsTest::WalleCleanupTest [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart >> Viewer::QueryExecuteScript [FAIL] >> Viewer::Plan2SvgOK >> BsControllerConfig::ExtendByCreatingSeparateBox [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::RequestRestartServicesWrongHost >> TNodeBrokerTest::TestRandomActions [GOOD] |67.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |67.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query >> TCmsTest::RequestReplaceDevicePDisk [GOOD] >> TCmsTest::RequestReplaceDevicePDiskByPath >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc >> TSubscriberCombinationsTest::CombinationsRootDomain [GOOD] >> TSubscriberCombinationsTest::CombinationsMigratedPath >> TCmsTest::RequestRestartServicesDryRun [GOOD] >> TCmsTest::RequestReplacePDiskDoesntBreakGroup >> TCmsTest::StateRequestUnknownNode [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes >> TCmsTest::ManageRequestsDry [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_ChildFirst [GOOD] >> TCmsTest::Notifications >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckSessionResetAfterCommit >> TMaintenanceApiTest::ActionReason [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions [GOOD] Test command err: 2025-04-10T10:53:21.431273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:53:21.431339Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:21.556036Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:53:21.620306Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-10T10:53:21.802417Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-10T10:53:23.587389Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-10T10:53:23.675584Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:53:24.656921Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:53:25.103815Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-10T10:53:25.104510Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-10T10:53:25.119850Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-10T10:53:25.120395Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-10T10:53:25.175947Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-10T10:53:25.625411Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-10T10:53:25.641799Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-10T10:53:25.740701Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-10T10:53:25.741923Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-10T10:53:25.762662Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-10T10:53:25.843074Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-10T10:53:25.845239Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-10T10:53:25.884294Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-10T10:53:26.737770Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-10T10:53:26.774904Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-10T10:53:26.863733Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-10T10:53:26.887551Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-10T10:53:26.955451Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:53:26.981526Z node 1 :NODE_BROKER ERROR: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2025-04-10T10:53:26.982992Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-04-10T10:53:26.984237Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:53:26.985132Z node 1 :NODE_BROKER ERROR: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2025-04-10T10:53:27.005989Z node 1 :NODE_BROKER ERROR: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2025-04-10T10:53:27.092665Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:53:27.093392Z node 1 :NODE_BROKER ERROR: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-04-10T10:53:27.108431Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:53:27.859662Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:53:27.896251Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-10T10:53:27.911840Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-10T10:53:28.045855Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-10T10:53:28.110369Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-10T10:53:28.221557Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-10T10:53:28.337240Z node 1 :NODE_BROKER ERROR: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-04-10T10:53:28.337747Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-10T10:53:29.703124Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:53:29.703797Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:53:30.198365Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:53:30.223647Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:53:30.628702Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:53:31.153077Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-10T10:53:31.191392Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-10T10:53:32.027981Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:53:32.028533Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:53:32.508987Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-10T10:53:32.549383Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-10T10:53:32.608267Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-10T10:53:32.667437Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-10T10:53:32.668956Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-10T10:53:32.713786Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-10T10:53:32.797432Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:53:32.797987Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:53:32.828280Z node 1 :NODE_BROKER ERROR: Cannot register node host11:10: ERROR_TEMP: No free node IDs 2025-04-10T10:53:33.869425Z node 1 :NODE_BROKER ERROR: Cannot register node host11:10: ERROR_TEMP: No free node IDs 2025-04-10T10:53:33.886351Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-10T10:53:34.029994Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-10T10:53:34.031260Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-10T10:53:34.031822Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-10T10:53:34.032649Z node 1 :NODE_BROKER ERROR: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-04-10T10:53:34.048866Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-10T10:53:34.072964Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-10T10:53:34.073572Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-10T10:53:34.074119Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-10T10:53:34.088835Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-10T10:53:34.866402Z node 1 :NODE_BROKER ERROR: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2025-04-10T10:53:34.988948Z node 1 :NODE_BROKER ERROR: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-04-10T10:53:35.027546Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Node has expired 2025-04-10T10:53:35.561529Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-10T10:53:35.671932Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-10T10:53:35.672581Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-10T10:53:35.690520Z node 1 :NODE_BROKER ERROR: Cannot register node host11:10: ERROR_TEMP: No free node IDs 2025-04-10T10:53:36.183464Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-10T10:53:36.639356Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:53:36.640071Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:53:36.640697Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:53:36.668903Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:53:36.669680Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:53:37.208879Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-10T10:53:37.210163Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-10T10:53:37.644466Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-10T10:53:37.665212Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-10T10:53:38.085161Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:53:38.114421Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-10T10:53:38.116306Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-10T10:53:38.116887Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-10T10:53:38.132295Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-10T10:53:38.179087Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-10T10:53:38.765766Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:53:38.838014Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-10T10:53:38.872192Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-10T10:53:38.873003Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-10T10:53:39.474684Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-10T10:53:39.546249Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-10T10:53:40.168400Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-04-10T10:53:40.183598Z node 1 :NODE_BROKER ERROR: Cannot extend lease for ... node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:55:17.005245Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:55:17.296163Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:55:17.328233Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:55:17.332280Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:55:17.366602Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:55:17.396947Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:55:17.447183Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-10T10:55:17.481400Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-10T10:55:17.490250Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-10T10:55:17.492624Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-10T10:55:17.662871Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Node has expired 2025-04-10T10:55:17.669100Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Node has expired 2025-04-10T10:55:18.716303Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-10T10:55:19.264464Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Node has expired 2025-04-10T10:55:19.337759Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Node has expired 2025-04-10T10:55:19.840085Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-10T10:55:19.912224Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-10T10:55:20.409722Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:55:20.412339Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:55:20.621975Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-10T10:55:21.471646Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:55:21.475077Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:55:21.477949Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:55:21.567756Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2025-04-10T10:55:21.573040Z node 1 :NODE_BROKER ERROR: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2025-04-10T10:55:22.257577Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-10T10:55:22.286248Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-10T10:55:22.327880Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-10T10:55:22.330444Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-10T10:55:22.332804Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-10T10:55:22.407747Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-10T10:55:22.502509Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-04-10T10:55:22.507395Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-04-10T10:55:22.569862Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-04-10T10:55:22.572106Z node 1 :NODE_BROKER ERROR: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-04-10T10:55:22.696916Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-04-10T10:55:23.656415Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-10T10:55:23.658188Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-10T10:55:23.758117Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Node has expired 2025-04-10T10:55:25.183436Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:55:25.579961Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-10T10:55:26.494817Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:55:27.109748Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-10T10:55:27.321686Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-10T10:55:27.323822Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-10T10:55:27.325839Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-10T10:55:27.344151Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-10T10:55:28.121738Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-10T10:55:28.148660Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-10T10:55:28.151321Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-10T10:55:28.188800Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-10T10:55:28.222699Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-10T10:55:28.348581Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-10T10:55:28.428219Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:55:28.452227Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:55:28.461682Z node 1 :NODE_BROKER ERROR: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2025-04-10T10:55:28.464579Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:55:28.537828Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:55:28.627767Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:55:28.693872Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:55:29.432778Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:55:29.728039Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-04-10T10:55:29.768247Z node 1 :NODE_BROKER ERROR: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2025-04-10T10:55:29.771265Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-04-10T10:55:29.802208Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-04-10T10:55:29.824249Z node 1 :NODE_BROKER ERROR: Cannot register node host8:7: ERROR_TEMP: No free node IDs 2025-04-10T10:55:29.826799Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-04-10T10:55:29.894212Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-04-10T10:55:29.918652Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-04-10T10:55:29.921262Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-04-10T10:55:29.942264Z node 1 :NODE_BROKER ERROR: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2025-04-10T10:55:30.682402Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-10T10:55:30.831519Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Node has expired 2025-04-10T10:55:31.300531Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-10T10:55:31.394863Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-10T10:55:31.408080Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-10T10:55:31.412038Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-10T10:55:31.414264Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-10T10:55:31.505916Z node 1 :NODE_BROKER ERROR: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2025-04-10T10:55:31.509000Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:55:31.511071Z node 1 :NODE_BROKER ERROR: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-04-10T10:55:31.513949Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:55:31.515993Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-04-10T10:55:31.518446Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-10T10:55:32.379223Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-10T10:55:32.503236Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:55:32.982939Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-10T10:55:33.054921Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-10T10:55:33.056664Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-10T10:55:33.216238Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-10T10:55:33.320222Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-04-10T10:55:33.331484Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Node has expired 2025-04-10T10:55:33.335643Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Node has expired 2025-04-10T10:55:33.370309Z node 1 :NODE_BROKER ERROR: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2025-04-10T10:55:34.103996Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-10T10:55:34.106738Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-10T10:55:34.580060Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-10T10:55:34.584137Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-10T10:55:34.608937Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-10T10:55:34.611758Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::ActionReason [GOOD] >> TMaintenanceApiTest::SingleCompositeActionGroup [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> TCmsTest::ActionWithZeroDuration >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> TCmsTenatsTest::RequestRestartServices [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadFromTimestamp_PQv1 [GOOD] Test command err: 2025-04-10T10:53:02.109345Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635789170677844:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:02.110070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001969/r3tmp/tmphJIQlk/pdisk_1.dat 2025-04-10T10:53:02.283416Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:53:02.452972Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19163, node 1 2025-04-10T10:53:02.496023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:02.496112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:02.506729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:02.563845Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001969/r3tmp/yandexiU4zMr.tmp 2025-04-10T10:53:02.563892Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001969/r3tmp/yandexiU4zMr.tmp 2025-04-10T10:53:02.586969Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001969/r3tmp/yandexiU4zMr.tmp 2025-04-10T10:53:02.587169Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:53:02.619317Z INFO: TTestServer started on Port 13523 GrpcPort 19163 TClient is connected to server localhost:13523 PQClient connected to localhost:19163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:02.917922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:53:02.972697Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:53:03.042930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T10:53:05.104309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635802055580526:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:05.104416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:05.106639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635802055580538:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:05.114576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T10:53:05.131199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635802055580540:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T10:53:05.371670Z node 1 :TX_PROXY ERROR: Actor# [1:7491635802055580606:2450] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:05.419357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.468058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.552734Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491635802055580614:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:53:05.554804Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGEzNTIwZGYtNTRhNGI0MmEtOTY0NmI3NzItYTgyNjljNzI=, ActorId: [1:7491635802055580523:2336], ActorState: ExecuteState, TraceId: 01jrfn9yny0y77mhczm309k6aq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:53:05.557078Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:53:05.563873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7491635802055580902:2629] 2025-04-10T10:53:07.099132Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491635789170677844:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:07.099237Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T10:53:12.072349Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-10T10:53:12.100564Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-10T10:53:12.101852Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491635832120352272:2793], Recipient [1:7491635789170678231:2188]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:12.101889Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:12.101915Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:12.101955Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491635832120352268:2790], Recipient [1:7491635789170678231:2188]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-10T10:53:12.101971Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:53:12.169558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:53:12.169984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:53:12.170263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-10T10:53:12.170315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-10T10:53:12.170349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-10T10:53:12.170403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-10T10:53:12.170460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13 ... 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:33.838540Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:33.838570Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:33.838666Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636425663203048:2856], Partition 1, Sender [0:0:0], Recipient [6:7491636425663203124:2861], Cookie: 0 2025-04-10T10:55:33.838708Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636425663203124:2861]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:33.838723Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:33.838748Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:33.838780Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:33.838795Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:33.838811Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:33.846122Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636374123593904:2469], Partition 0, Sender [0:0:0], Recipient [6:7491636374123593962:2472], Cookie: 0 2025-04-10T10:55:33.846274Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636374123593962:2472]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:33.846313Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:33.846387Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:33.846483Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:33.846514Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:33.846552Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:33.849612Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [6:7491636322583985183:2140]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T10:55:33.849640Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T10:55:33.849659Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T10:55:33.849713Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-10T10:55:33.849728Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-10T10:55:33.849795Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 12 shard idx 72057594046644480:4 data size 656 row count 2 2025-04-10T10:55:33.849854Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037891 maps to shardIdx: 72057594046644480:4 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], pathId map=Versions, is column=0, is olap=0, RowCount 2, DataSize 656 2025-04-10T10:55:33.849868Z node 6 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037891, followerId 0 2025-04-10T10:55:33.849920Z node 6 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:4 with partCount# 0, rowCount# 2, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-10T10:55:33.850037Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:55:33.850141Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [6:7491636322583985183:2140]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T10:55:33.850174Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T10:55:33.850186Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-10T10:55:33.940697Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636425663203045:2855], Partition 2, Sender [0:0:0], Recipient [6:7491636425663203125:2862], Cookie: 0 2025-04-10T10:55:33.940803Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636425663203125:2862]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:33.940834Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:33.940884Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:33.940966Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:33.940994Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:33.941028Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:33.941099Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636425663203048:2856], Partition 1, Sender [0:0:0], Recipient [6:7491636425663203124:2861], Cookie: 0 2025-04-10T10:55:33.941138Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636425663203124:2861]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:33.941153Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:33.941178Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:33.941210Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:33.941226Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:33.941243Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:33.946394Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636374123593904:2469], Partition 0, Sender [0:0:0], Recipient [6:7491636374123593962:2472], Cookie: 0 2025-04-10T10:55:33.946488Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636374123593962:2472]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:33.946534Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:33.946582Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:33.946663Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:33.946688Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:33.946733Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:34.042286Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636425663203045:2855], Partition 2, Sender [0:0:0], Recipient [6:7491636425663203125:2862], Cookie: 0 2025-04-10T10:55:34.042374Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636425663203125:2862]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:34.042403Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:34.042451Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:34.042526Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:34.042554Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:34.042588Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:34.042643Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636425663203048:2856], Partition 1, Sender [0:0:0], Recipient [6:7491636425663203124:2861], Cookie: 0 2025-04-10T10:55:34.042679Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636425663203124:2861]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:34.042696Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:34.042727Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:34.042761Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:34.042776Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:34.042793Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:34.046888Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636374123593904:2469], Partition 0, Sender [0:0:0], Recipient [6:7491636374123593962:2472], Cookie: 0 2025-04-10T10:55:34.046954Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636374123593962:2472]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:34.046976Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:34.047013Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:34.047073Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:34.047100Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:34.047128Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> Cdc::InitialScanRacyCompleteAndRequest [GOOD] >> Viewer::JsonAutocompleteColumnsPOST [GOOD] >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> TCmsTest::RestartNodeInDownState >> Cdc::InitialScanAndLimits |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] >> TCmsTest::StateStorageAvailabilityMode >> BsControllerConfig::DeleteStoragePool [GOOD] >> SystemView::AuthOwners_ResultOrder [GOOD] >> SystemView::AuthOwners_TableRange >> TCmsTest::RequestReplaceDevicePDiskByPath [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode >> TCmsTest::RequestReplacePDiskDoesntBreakGroup [GOOD] >> TCmsTest::RequestReplacePDiskConsecutiveWithDone >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] >> TMaintenanceApiTest::RequestReplaceDevicePDisk >> TCmsTest::TestOutdatedState >> TCmsTest::Notifications [GOOD] >> TCmsTest::Mirror3dcPermissions >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEviction >> TCmsTest::StateRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] Test command err: 2025-04-10T10:55:07.572364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:07.572616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:07.572819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00177b/r3tmp/tmp050in5/pdisk_1.dat 2025-04-10T10:55:08.184120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:55:08.231100Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:08.272752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:08.272958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:08.286867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:55:08.392384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:55:08.464756Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:55:08.466426Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:55:08.467075Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T10:55:08.467376Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:55:08.535424Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:55:08.536375Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:55:08.536539Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:55:08.538693Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:55:08.538819Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:55:08.538884Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:55:08.539412Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:55:08.539579Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:55:08.539683Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T10:55:08.554560Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:55:08.613911Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:55:08.614175Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:55:08.614383Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T10:55:08.614431Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:55:08.614471Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:55:08.614513Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:55:08.614800Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:55:08.614868Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:55:08.615275Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:55:08.615391Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:55:08.615475Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:55:08.615522Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:55:08.615589Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T10:55:08.615637Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:55:08.615675Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:55:08.615712Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:55:08.615759Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:55:08.615893Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:55:08.615935Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:55:08.615985Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T10:55:08.616449Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T10:55:08.616515Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:55:08.616645Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:55:08.616926Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T10:55:08.616990Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:55:08.617088Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:55:08.617152Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T10:55:08.617219Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T10:55:08.617273Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T10:55:08.617325Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T10:55:08.617693Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T10:55:08.617737Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T10:55:08.617775Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T10:55:08.617818Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T10:55:08.617870Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T10:55:08.617909Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T10:55:08.617949Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T10:55:08.618005Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T10:55:08.618039Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T10:55:08.623300Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T10:55:08.623376Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:55:08.634850Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:55:08.634937Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T10:55:08.634981Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T10:55:08.635034Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T10:55:08.635113Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T10:55:08.813979Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:55:08.814056Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:55:08.814101Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T10:55:08.814696Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T10:55:08.814744Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T10:55:08.814897Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T10:55:08.814953Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T10:55:08.815005Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T10:55:08.815053Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T10:55:08.819872Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T10:55:08.819963Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:55:08.821272Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:55:08.821331Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:55:08.821399Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:55:0 ... 4752Z node 6 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2025-04-10T10:55:37.104793Z node 6 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-04-10T10:55:37.104865Z node 6 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 1500:100 keys extracted: 3 2025-04-10T10:55:37.104929Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-10T10:55:37.104997Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadWriteDetails 2025-04-10T10:55:37.105047Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T10:55:37.105088Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T10:55:37.105158Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically complete end at 72075186224037888 2025-04-10T10:55:37.105215Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically incomplete end at 72075186224037888 2025-04-10T10:55:37.105267Z node 6 :TX_DATASHARD TRACE: Activated operation [1500:100] at 72075186224037888 2025-04-10T10:55:37.105319Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-10T10:55:37.105350Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T10:55:37.105378Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildWriteOutRS 2025-04-10T10:55:37.105406Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildWriteOutRS 2025-04-10T10:55:37.105497Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-10T10:55:37.105548Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildWriteOutRS 2025-04-10T10:55:37.105580Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-04-10T10:55:37.105610Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-04-10T10:55:37.105642Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-10T10:55:37.105670Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-04-10T10:55:37.105698Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-04-10T10:55:37.105725Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit PrepareWriteTxInRS 2025-04-10T10:55:37.105759Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-10T10:55:37.105786Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-04-10T10:55:37.105811Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-04-10T10:55:37.105837Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit LoadAndWaitInRS 2025-04-10T10:55:37.105863Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-10T10:55:37.105887Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-04-10T10:55:37.105916Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit ExecuteWrite 2025-04-10T10:55:37.105943Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit ExecuteWrite 2025-04-10T10:55:37.105987Z node 6 :TX_DATASHARD DEBUG: Executing write operation for [1500:100] at 72075186224037888 2025-04-10T10:55:37.106785Z node 6 :TX_DATASHARD DEBUG: Executed write operation for [1500:100] at 72075186224037888, row count=3 2025-04-10T10:55:37.106872Z node 6 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-10T10:55:37.106953Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T10:55:37.106998Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit ExecuteWrite 2025-04-10T10:55:37.107053Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompleteWrite 2025-04-10T10:55:37.107101Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompleteWrite 2025-04-10T10:55:37.107363Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is DelayComplete 2025-04-10T10:55:37.107438Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompleteWrite 2025-04-10T10:55:37.107500Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T10:55:37.107544Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompletedOperations 2025-04-10T10:55:37.107589Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-10T10:55:37.107619Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T10:55:37.107655Z node 6 :TX_DATASHARD TRACE: Execution plan for [1500:100] at 72075186224037888 has finished 2025-04-10T10:55:37.107751Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:55:37.107804Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T10:55:37.107859Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:55:37.107914Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:55:37.119298Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-10T10:55:37.119475Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:55:37.119544Z node 6 :TX_DATASHARD TRACE: Complete execution for [1500:100] at 72075186224037888 on unit CompleteWrite 2025-04-10T10:55:37.119638Z node 6 :TX_DATASHARD DEBUG: Complete write [1500 : 100] from 72075186224037888 at tablet 72075186224037888 send result to client [6:593:2518] 2025-04-10T10:55:37.119730Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:55:37.123048Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:767:2637], Recipient [6:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:55:37.123160Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:55:37.123246Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:766:2636], serverId# [6:767:2637], sessionId# [0:0:0] 2025-04-10T10:55:37.123370Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:765:2635], Recipient [6:666:2570]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-10T10:55:37.124704Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:770:2640], Recipient [6:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:55:37.124782Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:55:37.124843Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:769:2639], serverId# [6:770:2640], sessionId# [0:0:0] 2025-04-10T10:55:37.125106Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:768:2638], Recipient [6:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-10T10:55:37.125258Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T10:55:37.125326Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/100 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T10:55:37.125380Z node 6 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1500/18446744073709551615 2025-04-10T10:55:37.125475Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-10T10:55:37.125591Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T10:55:37.125645Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-10T10:55:37.125712Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T10:55:37.125755Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T10:55:37.125817Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-10T10:55:37.125870Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T10:55:37.125902Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T10:55:37.125930Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T10:55:37.125959Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-10T10:55:37.126083Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-10T10:55:37.126466Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[6:768:2638], 1000} after executionsCount# 1 2025-04-10T10:55:37.126577Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:768:2638], 1000} sends rowCount# 3, bytes# 96, quota rows left# 18446744073709551612, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T10:55:37.126679Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:768:2638], 1000} finished in read 2025-04-10T10:55:37.126759Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T10:55:37.126792Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T10:55:37.126821Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T10:55:37.126852Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-10T10:55:37.126903Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T10:55:37.126928Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T10:55:37.126960Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-10T10:55:37.127014Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T10:55:37.127137Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::DeleteStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:201:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:201:2076] Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:217:2066] recipient: [1:201:2076] 2025-04-10T10:55:15.641330Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-10T10:55:15.646316Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-10T10:55:15.646681Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-10T10:55:15.648458Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:55:15.648923Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-10T10:55:15.649558Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:15.649595Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:15.649835Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-10T10:55:15.659386Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-10T10:55:15.659522Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-10T10:55:15.659689Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-10T10:55:15.659807Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:15.659927Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:15.660009Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:239:2066] recipient: [1:20:2067] 2025-04-10T10:55:15.671983Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-10T10:55:15.672140Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:15.684527Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:15.684643Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:15.684715Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:15.684787Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:15.684915Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:15.684969Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:15.685004Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:15.685059Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:15.696003Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:15.696166Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:15.707231Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:15.707455Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-10T10:55:15.708690Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-10T10:55:15.708757Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-10T10:55:15.708999Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-10T10:55:15.709062Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-10T10:55:15.721413Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:199:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:199:2076] Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:217:2066] recipient: [11:199:2076] 2025-04-10T10:55:17.605951Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-10T10:55:17.625671Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-10T10:55:17.625939Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-10T10:55:17.627452Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:55:17.627688Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-10T10:55:17.628304Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:17.628338Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:17.629440Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-10T10:55:17.641635Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-10T10:55:17.641805Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-10T10:55:17.641936Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-10T10:55:17.642038Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:17.642130Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:17.642227Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:239:2066] recipient: [11:20:2067] 2025-04-10T10:55:17.654862Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-10T10:55:17.655022Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:17.665836Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:17.665976Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:17.666098Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:17.666213Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:17.666344Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:17.666402Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:17.666443Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:17.666505Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:17.677492Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:17.677654Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:17.690964Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:17.691136Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-10T10:55:17.692412Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-10T10:55:17.692476Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-10T10:55:17.692701Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-10T10:55:17.692762Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-10T10:55:17.693336Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:3014:2106] recipient: [21:2918:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:3014:2106] recipient: [21:2918:2116] Leader for TabletID 72057594037932033 is [21:3016:2118] sender: [21:3017:2106] recipient: [21:2918:2116] 2025-04-10T10:55:20.309239Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-10T10:55:20.309915Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-10T10:55:20.310092Z n ... fit_pdisks.cpp:339} Create new pdisk PDiskId# 97:1001 Path# /dev/disk2 2025-04-10T10:55:29.078515Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 97:1002 Path# /dev/disk3 2025-04-10T10:55:29.078547Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 98:1000 Path# /dev/disk1 2025-04-10T10:55:29.078576Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 98:1001 Path# /dev/disk2 2025-04-10T10:55:29.078605Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 98:1002 Path# /dev/disk3 2025-04-10T10:55:29.078635Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 99:1000 Path# /dev/disk1 2025-04-10T10:55:29.078668Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 99:1001 Path# /dev/disk2 2025-04-10T10:55:29.078701Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 99:1002 Path# /dev/disk3 2025-04-10T10:55:29.078734Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 100:1000 Path# /dev/disk1 2025-04-10T10:55:29.078769Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 100:1001 Path# /dev/disk2 2025-04-10T10:55:29.078799Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 100:1002 Path# /dev/disk3 2025-04-10T10:55:29.078832Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 101:1000 Path# /dev/disk1 2025-04-10T10:55:29.078861Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 101:1001 Path# /dev/disk2 2025-04-10T10:55:29.078893Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 101:1002 Path# /dev/disk3 2025-04-10T10:55:29.078921Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 102:1000 Path# /dev/disk1 2025-04-10T10:55:29.078961Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 102:1001 Path# /dev/disk2 2025-04-10T10:55:29.078993Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 102:1002 Path# /dev/disk3 2025-04-10T10:55:29.079021Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 103:1000 Path# /dev/disk1 2025-04-10T10:55:29.079050Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 103:1001 Path# /dev/disk2 2025-04-10T10:55:29.079083Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 103:1002 Path# /dev/disk3 2025-04-10T10:55:29.079114Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 104:1000 Path# /dev/disk1 2025-04-10T10:55:29.079147Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 104:1001 Path# /dev/disk2 2025-04-10T10:55:29.079178Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 104:1002 Path# /dev/disk3 2025-04-10T10:55:29.079208Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 105:1000 Path# /dev/disk1 2025-04-10T10:55:29.079240Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 105:1001 Path# /dev/disk2 2025-04-10T10:55:29.079273Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 105:1002 Path# /dev/disk3 2025-04-10T10:55:29.079305Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 106:1000 Path# /dev/disk1 2025-04-10T10:55:29.079336Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 106:1001 Path# /dev/disk2 2025-04-10T10:55:29.079367Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 106:1002 Path# /dev/disk3 2025-04-10T10:55:29.079397Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 107:1000 Path# /dev/disk1 2025-04-10T10:55:29.079429Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 107:1001 Path# /dev/disk2 2025-04-10T10:55:29.079462Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 107:1002 Path# /dev/disk3 2025-04-10T10:55:29.079501Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 108:1000 Path# /dev/disk1 2025-04-10T10:55:29.079534Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 108:1001 Path# /dev/disk2 2025-04-10T10:55:29.079570Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 108:1002 Path# /dev/disk3 2025-04-10T10:55:29.079601Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 109:1000 Path# /dev/disk1 2025-04-10T10:55:29.079632Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 109:1001 Path# /dev/disk2 2025-04-10T10:55:29.079667Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 109:1002 Path# /dev/disk3 2025-04-10T10:55:29.185568Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 110:1000 Path# /dev/disk1 2025-04-10T10:55:29.185658Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 110:1001 Path# /dev/disk2 2025-04-10T10:55:29.185693Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 110:1002 Path# /dev/disk3 2025-04-10T10:55:29.185726Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 111:1000 Path# /dev/disk1 2025-04-10T10:55:29.185758Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 111:1001 Path# /dev/disk2 2025-04-10T10:55:29.185793Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 111:1002 Path# /dev/disk3 2025-04-10T10:55:29.185846Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 112:1000 Path# /dev/disk1 2025-04-10T10:55:29.185882Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 112:1001 Path# /dev/disk2 2025-04-10T10:55:29.185915Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 112:1002 Path# /dev/disk3 2025-04-10T10:55:29.185948Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 113:1000 Path# /dev/disk1 2025-04-10T10:55:29.185978Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 113:1001 Path# /dev/disk2 2025-04-10T10:55:29.186019Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 113:1002 Path# /dev/disk3 2025-04-10T10:55:29.186054Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 114:1000 Path# /dev/disk1 2025-04-10T10:55:29.186086Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 114:1001 Path# /dev/disk2 2025-04-10T10:55:29.186117Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 114:1002 Path# /dev/disk3 2025-04-10T10:55:29.186164Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 115:1000 Path# /dev/disk1 2025-04-10T10:55:29.186197Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 115:1001 Path# /dev/disk2 2025-04-10T10:55:29.186231Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 115:1002 Path# /dev/disk3 2025-04-10T10:55:29.186262Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 116:1000 Path# /dev/disk1 2025-04-10T10:55:29.186295Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 116:1001 Path# /dev/disk2 2025-04-10T10:55:29.186330Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 116:1002 Path# /dev/disk3 2025-04-10T10:55:29.186363Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 117:1000 Path# /dev/disk1 2025-04-10T10:55:29.186395Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 117:1001 Path# /dev/disk2 2025-04-10T10:55:29.186428Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 117:1002 Path# /dev/disk3 2025-04-10T10:55:29.186459Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 118:1000 Path# /dev/disk1 2025-04-10T10:55:29.186490Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 118:1001 Path# /dev/disk2 2025-04-10T10:55:29.186521Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 118:1002 Path# /dev/disk3 2025-04-10T10:55:29.186555Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 119:1000 Path# /dev/disk1 2025-04-10T10:55:29.186591Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 119:1001 Path# /dev/disk2 2025-04-10T10:55:29.186621Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 119:1002 Path# /dev/disk3 2025-04-10T10:55:29.186652Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 120:1000 Path# /dev/disk1 2025-04-10T10:55:29.186684Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 120:1001 Path# /dev/disk2 2025-04-10T10:55:29.186715Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 120:1002 Path# /dev/disk3 2025-04-10T10:55:29.192479Z node 71 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.192034s 2025-04-10T10:55:29.192688Z node 71 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.192267s 2025-04-10T10:55:29.207995Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool 1" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: ROT } } } } } 2025-04-10T10:55:29.315778Z node 71 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.107884s 2025-04-10T10:55:29.315916Z node 71 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.108051s 2025-04-10T10:55:29.343337Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "storage pool 2" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: SSD } } } } Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 2 ItemConfigGeneration: 1 } } } 2025-04-10T10:55:29.428328Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 1 ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental >> TLocksTest::CK_Range_BrokenLockInf [GOOD] >> TCmsTest::WalleTasks >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumnsPOST [GOOD] Test command err: 2025-04-10T10:54:45.497248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:54:45.497612Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:45.497699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 4289, node 1 TClient is connected to server localhost:15426 2025-04-10T10:54:58.408042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:319:2362], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:54:58.408428Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:58.408538Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 19722, node 2 TClient is connected to server localhost:2070 2025-04-10T10:55:10.371175Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:337:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:10.371679Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:10.371922Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 18659, node 3 TClient is connected to server localhost:4602 2025-04-10T10:55:22.634225Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:337:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:22.634769Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:22.634879Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 8932, node 4 TClient is connected to server localhost:29656 2025-04-10T10:55:34.773463Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:34.773967Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:34.774190Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 12808, node 5 TClient is connected to server localhost:21335 >> Cdc::ResolvedTimestampsVolatileOutOfOrder [GOOD] >> Cdc::SequentialSplitMerge >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::SamePriorityRequest >> Cdc::InitialScan_WithTopicSchemeTx [GOOD] >> Cdc::InitialScan_TopicAutoPartitioning >> SystemView::ShowCreateTablePartitionAtKeys [GOOD] >> SystemView::ShowCreateTablePartitionByHash >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode >> TCmsTest::StateRequest [GOOD] >> TCmsTest::ScheduledEmergencyDuringRollingRestart >> TCmsTest::TestOutdatedState [GOOD] >> TCmsTest::TestSetResetMarkers >> TCmsTest::RequestReplacePDiskConsecutiveWithDone [GOOD] >> TCmsTest::StateStorageAvailabilityMode [GOOD] >> TLocksTest::Range_IncorrectNullDot2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_BrokenLockInf [GOOD] Test command err: 2025-04-10T10:54:45.606630Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636230580905179:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:45.607585Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002043/r3tmp/tmpWo2e7w/pdisk_1.dat 2025-04-10T10:54:46.341659Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:46.348486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:46.348566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:46.352470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23737 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:46.996088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:47.046506Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:47.068547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:47.237138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:47.301096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:50.775217Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636252732103783:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002043/r3tmp/tmpNUz0Oj/pdisk_1.dat 2025-04-10T10:54:50.813877Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:50.951802Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:50.975245Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:50.975333Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:50.984578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29937 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:51.507488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:51.518657Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:51.547017Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:51.555768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T10:54:51.715419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:51.825392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:57.672967Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636279933371925:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:57.673019Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002043/r3tmp/tmphggNn5/pdisk_1.dat 2025-04-10T10:54:57.886844Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:57.931694Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:57.931777Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:57.936029Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61682 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:58.316239Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:58.328800Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:58.342731Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:58.347162Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:58.436007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:58.613731Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:03.784366Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636306689031190:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:03.784430Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002043/r3tmp/tmpshQIk0/pdisk_1.dat 2025-04-10T10:55:04.026247Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:04.053310Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:04.053412Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:04.055156Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5829 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 ... Connecting -> Connected TClient is connected to server localhost:9756 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:19.880086Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:19.910800Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:55:19.963214Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:20.053974Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:20.141200Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:24.058329Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491636395887962707:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:24.058430Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002043/r3tmp/tmpSOSMIU/pdisk_1.dat 2025-04-10T10:55:24.201241Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:24.232698Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:24.232807Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:24.234244Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3477 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:24.499373Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:24.527837Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T10:55:24.535177Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:24.659960Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:24.745830Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:28.852386Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491636413507143736:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:28.852558Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002043/r3tmp/tmp1dz94I/pdisk_1.dat 2025-04-10T10:55:29.103205Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:29.130653Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:29.130759Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:29.132351Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19114 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-10T10:55:29.396593Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:55:29.403993Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:29.420639Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:29.537738Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:29.628453Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:34.033410Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491636439097121318:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:34.037199Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002043/r3tmp/tmppT4GC8/pdisk_1.dat 2025-04-10T10:55:34.229231Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:34.261202Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:34.261339Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:34.264955Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6579 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:34.613181Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:34.642004Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:34.738652Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:34.818790Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TCmsTest::VDisksEviction [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] >> Viewer::JsonAutocompleteSchemePOST [GOOD] |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplacePDiskConsecutiveWithDone [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::AllVDisksEvictionInRack |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageAvailabilityMode [GOOD] >> TLocksTest::Range_BrokenLock1 [GOOD] >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2025-04-10T10:55:39.594064Z node 18 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-10T10:55:39.594174Z node 18 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-10T10:55:39.594331Z node 18 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-04-10T10:55:39.596632Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120028000 } } 2025-04-10T10:55:39.597726Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120028000 } 2025-04-10T10:55:39.598083Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003000s 2025-04-10T10:55:39.598168Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-10T10:55:39.598371Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-04-10T10:55:39.598488Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-04-10T10:55:39.598560Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-04-10T10:55:39.598745Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-04-10T10:55:39.599009Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-10T10:55:39.599076Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-04-10T10:55:39.599376Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-04-10T10:55:39.599446Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-04-10T10:55:39.599503Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-04-10T10:55:39.599560Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-04-10T10:55:39.599595Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-04-1 ... mp: 120540048 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120540048 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120540048 } Timestamp: 120540048 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120540048 } } 2025-04-10T10:55:39.993699Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540048 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120540048 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120540048 } Timestamp: 120540048 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540048 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120540048 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120540048 } Timestamp: 120540048 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540048 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120540048 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120540048 } Timestamp: 120540048 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540048 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120540048 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120540048 } Timestamp: 120540048 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540048 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120540048 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120540048 } Timestamp: 120540048 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540048 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120540048 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120540048 } Timestamp: 120540048 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540048 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120540048 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120540048 } Timestamp: 120540048 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540048 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120540048 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120540048 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120540048 } Timestamp: 120540048 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120540048 } 2025-04-10T10:55:39.994037Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-04-10T10:55:39.994124Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-04-10T10:55:39.994194Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-04-10T10:55:39.994372Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-04-10T10:55:39.994597Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-10T10:55:39.994660Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-04-10T10:55:39.994930Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 0.100000s 2025-04-10T10:55:39.994981Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-10T10:55:39.995075Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-04-10T10:55:39.995141Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-04-10T10:55:39.995175Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-04-10T10:55:39.995201Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-04-10T10:55:39.995241Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-04-10T10:55:39.995273Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 23, wbId# [23:8388350642965737326:1634689637] 2025-04-10T10:55:39.995311Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 24, wbId# [24:8388350642965737326:1634689637] 2025-04-10T10:55:39.995352Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-04-10T10:55:39.995860Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 18 CreateTime: 120441560 ChangeTime: 120441560 Path: "/18/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-10T10:55:39.996404Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 24 CreateTime: 120441560 ChangeTime: 120441560 Path: "/24/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-10T10:55:39.996557Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 120441560 ChangeTime: 120441560 Path: "/25/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-10T10:55:39.996640Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 22 CreateTime: 120441560 ChangeTime: 120441560 Path: "/22/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-10T10:55:39.996702Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 23 CreateTime: 120441560 ChangeTime: 120441560 Path: "/23/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-10T10:55:39.996759Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 19 CreateTime: 120441560 ChangeTime: 120441560 Path: "/19/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-10T10:55:39.996814Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 20 CreateTime: 120441560 ChangeTime: 120441560 Path: "/20/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-10T10:55:39.996883Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 21 CreateTime: 120441560 ChangeTime: 120441560 Path: "/21/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-10T10:55:39.996936Z node 18 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-10T10:55:40.010169Z node 18 :CMS DEBUG: TTxStorePermissions complete 2025-04-10T10:55:40.010427Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 18 has not yet been completed" } RequestId: "user-r-3" Deadline: 0 } 2025-04-10T10:55:40.011104Z node 18 :CMS INFO: User user removes request user-r-3 2025-04-10T10:55:40.011172Z node 18 :CMS DEBUG: Resulting status: OK 2025-04-10T10:55:40.011237Z node 18 :CMS DEBUG: TTxRemoveRequest Execute 2025-04-10T10:55:40.011280Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 18 2025-04-10T10:55:40.011414Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# explicit remove 2025-04-10T10:55:40.027111Z node 18 :CMS DEBUG: TTxRemoveRequest Complete 2025-04-10T10:55:40.027358Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-3" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } >> TCmsTest::SamePriorityRequest [GOOD] |67.3%| [TA] $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] |67.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |67.4%| [TA] {RESULT} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |67.4%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest [GOOD] |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> TMaintenanceApiTest::RequestReplaceDevicePDisk [GOOD] >> TLocksTest::Range_IncorrectDot2 [GOOD] >> TCmsTest::TestSetResetMarkers [GOOD] >> TCmsTest::TestProcessingQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:55:23.378106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:55:23.378239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:23.378290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:55:23.378329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:55:23.378375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:55:23.378413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:55:23.378469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:23.378539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:55:23.378912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:55:23.466087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:55:23.466188Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:23.483475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:55:23.483790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:55:23.483964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:55:23.492606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:55:23.492908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:55:23.493780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:23.494323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:55:23.496427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:23.497636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:23.497701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:23.497863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:55:23.497913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:23.497959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:55:23.498084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:55:23.504980Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:55:23.650879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:23.651139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:23.651460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:55:23.651771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:55:23.651837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:23.654853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:23.655042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:55:23.655264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:23.655336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:55:23.655381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:55:23.655424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:55:23.657601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:23.657658Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:55:23.657694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:55:23.660614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:23.660664Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:23.660701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:23.660745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:55:23.663599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:55:23.665994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:55:23.666200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:55:23.667062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:23.667206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:23.667249Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:23.667499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:55:23.667544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:23.667690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:23.667805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:55:23.669723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:23.669760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:23.669912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:23.669958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:55:23.670134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:23.670194Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:55:23.670286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:23.670316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:23.670351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:23.670384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:23.670417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:55:23.670479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:23.670519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:55:23.670560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:55:23.670618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:23.670656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:55:23.670772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:55:23.672924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:23.673021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:23.673055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:42.302387Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:55:42.302607Z node 18 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:42.302646Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T10:55:42.302688Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T10:55:42.303057Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:42.303107Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T10:55:42.304791Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T10:55:42.304882Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T10:55:42.304930Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T10:55:42.304967Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T10:55:42.305008Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:42.305648Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T10:55:42.305728Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T10:55:42.305757Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T10:55:42.305793Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T10:55:42.305827Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T10:55:42.305891Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-10T10:55:42.307367Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1315 } } 2025-04-10T10:55:42.307412Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-10T10:55:42.307543Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1315 } } 2025-04-10T10:55:42.307643Z node 18 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1315 } } 2025-04-10T10:55:42.309207Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 77309413622 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T10:55:42.309249Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-10T10:55:42.309356Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 77309413622 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T10:55:42.309397Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:55:42.309526Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 77309413622 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T10:55:42.309592Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:42.309634Z node 18 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:42.309681Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:55:42.309750Z node 18 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-10T10:55:42.310708Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T10:55:42.312919Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T10:55:42.313275Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:42.314582Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:42.314910Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T10:55:42.314955Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T10:55:42.315066Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T10:55:42.315101Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T10:55:42.315148Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T10:55:42.315182Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T10:55:42.315227Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-10T10:55:42.315377Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:336:2315] message: TxId: 101 2025-04-10T10:55:42.315440Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T10:55:42.315487Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T10:55:42.315522Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T10:55:42.315652Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:55:42.317571Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T10:55:42.317618Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [18:337:2316] TestWaitNotification: OK eventTxId 101 2025-04-10T10:55:42.318181Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:55:42.318426Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" took 315us result status StatusSuccess 2025-04-10T10:55:42.318994Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "DyNumber" TypeId: 4866 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectNullDot2 [GOOD] Test command err: 2025-04-10T10:54:48.238657Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636243907516530:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:48.238711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203d/r3tmp/tmpdz70FW/pdisk_1.dat 2025-04-10T10:54:49.020783Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:49.060515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:49.060615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:49.067419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19882 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:49.933398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:54:50.011707Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:50.027621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:50.231162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:50.321914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203d/r3tmp/tmpzatBPR/pdisk_1.dat 2025-04-10T10:54:54.440108Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:54.583865Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:54.619640Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:54.619713Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:54.627323Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21161 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:54.887465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:54.895759Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:54.910448Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:54.914627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:55.009717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:55.113100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:59.730130Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636289728581182:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203d/r3tmp/tmpx6G1ei/pdisk_1.dat 2025-04-10T10:54:59.844355Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:59.947969Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:59.971536Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:59.971628Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:59.977445Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2186 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:00.412489Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.423103Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.439083Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:55:00.449506Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.601409Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.702570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203d/r3tmp/tmpXbr2IC/pdisk_1.dat 2025-04-10T10:55:04.922291Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:04.939769Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:04.939897Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:04.943333Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:04.970755Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1578 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:05.581224Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, bu ... (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:20.503617Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8700 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:20.810555Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:20.818333Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:20.846834Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T10:55:20.854789Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:21.026360Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:21.107267Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:25.160735Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491636401820061213:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:25.160865Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203d/r3tmp/tmpk3ef00/pdisk_1.dat 2025-04-10T10:55:25.357376Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:25.426746Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:25.426849Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:25.428393Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16067 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-10T10:55:25.675890Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:25.703578Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:25.784139Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T10:55:25.866339Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:30.573269Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491636421458407554:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:30.573416Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203d/r3tmp/tmpSBr7uR/pdisk_1.dat 2025-04-10T10:55:30.892224Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:30.900636Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:30.900752Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:30.902844Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62133 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:31.312007Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:31.346332Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:31.516787Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:31.594167Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:35.881178Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491636444109671463:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:35.881252Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203d/r3tmp/tmpr3kOLg/pdisk_1.dat 2025-04-10T10:55:36.057350Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:36.082950Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:36.083081Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:36.084541Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15027 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:36.389783Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T10:55:36.411822Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:55:36.549814Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:36.614792Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteSchemePOST [GOOD] Test command err: 2025-04-10T10:54:50.445914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:54:50.458114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:50.458235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 17340, node 1 TClient is connected to server localhost:20072 2025-04-10T10:55:02.946937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:319:2362], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:02.947400Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:02.947525Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 12839, node 2 TClient is connected to server localhost:11678 2025-04-10T10:55:14.989524Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:337:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:14.990001Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:14.990510Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 5681, node 3 TClient is connected to server localhost:12148 2025-04-10T10:55:27.120556Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:337:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:27.120944Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:27.121020Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 13294, node 4 TClient is connected to server localhost:23844 2025-04-10T10:55:38.930283Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:38.930810Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:38.931018Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 31033, node 5 TClient is connected to server localhost:2427 |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::RequestReplaceDevicePDisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock1 [GOOD] Test command err: 2025-04-10T10:54:46.746599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636232356175505:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:46.746663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203b/r3tmp/tmpaNxTWv/pdisk_1.dat 2025-04-10T10:54:47.787203Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:47.808394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:47.808486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:47.809977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:47.811169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11412 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:48.214587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:48.238868Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:48.255688Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:48.341579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:48.625229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:48.747101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:52.566048Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636260188497196:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:52.566083Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203b/r3tmp/tmp8DWMCr/pdisk_1.dat 2025-04-10T10:54:52.956786Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:53.052517Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:53.052636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:53.068374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15037 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:53.354963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:53.362534Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:53.379670Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:53.390688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:53.538332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:53.640637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203b/r3tmp/tmp95G2S2/pdisk_1.dat 2025-04-10T10:54:58.312383Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:58.360461Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:58.371334Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:58.371417Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:58.379930Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24075 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:58.864788Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:58.890746Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:58.902861Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T10:54:58.914098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:59.061339Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:54:59.146861Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T10:55:03.815576Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636306451571630:2154];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203b/r3tmp/tmp2ZDsfE/pdisk_1.dat 2025-04-10T10:55:03.977342Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:55:04.053701Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:04.066535Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:04.066619Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:04.068403Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11923 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDoma ... " PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:20.815662Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:20.825809Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:20.854369Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:55:20.861985Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:20.946535Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:21.030289Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:25.659898Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491636400799346541:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:25.659975Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203b/r3tmp/tmpHz3aCm/pdisk_1.dat 2025-04-10T10:55:25.891276Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:25.956691Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:25.956808Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:25.962908Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17313 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:26.255587Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:26.267533Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:26.287010Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:55:26.294320Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:26.394297Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:26.562613Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:31.156299Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491636426078624723:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:31.156373Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203b/r3tmp/tmpw762KU/pdisk_1.dat 2025-04-10T10:55:31.481381Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:31.500941Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:31.501045Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:31.504672Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27997 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:31.802955Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:31.813726Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:31.824334Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:55:31.829747Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:31.930472Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:31.996994Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:36.851596Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491636447253770694:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:36.851701Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00203b/r3tmp/tmpHrIbLe/pdisk_1.dat 2025-04-10T10:55:37.012221Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:37.057052Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:37.057164Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:37.058813Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27012 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:37.399704Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:37.419213Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:37.489524Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:37.554787Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> Viewer::Plan2SvgOK [GOOD] >> Viewer::Plan2SvgBad >> TCmsTest::Mirror3dcPermissions [GOOD] >> Cdc::AwsRegion [GOOD] >> TCmsTest::AllVDisksEvictionInRack [GOOD] |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] Test command err: 2025-04-10T10:53:04.446554Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635794997688925:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:04.446614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:53:04.526038Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491635796882096803:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:04.526075Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ffc/r3tmp/tmpMsbbBr/pdisk_1.dat 2025-04-10T10:53:05.113756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:05.113987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:05.123460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:05.123609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:05.128971Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:53:05.129155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:05.138880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:05.179163Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5320, node 1 2025-04-10T10:53:05.269692Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:53:05.270675Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:53:05.319910Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:53:05.319935Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:53:05.319948Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:53:05.320076Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20026 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:05.762631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:07.952638Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-10T10:53:07.954740Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635807882591678:2333], Start check tables existence, number paths: 2 2025-04-10T10:53:07.954833Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 2 2025-04-10T10:53:07.954848Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T10:53:07.954873Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-10T10:53:07.955807Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635807882591678:2333], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-10T10:53:07.955922Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635807882591678:2333], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-10T10:53:07.955966Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635807882591678:2333], Successfully finished 2025-04-10T10:53:07.956068Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-10T10:53:07.956140Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTA5ODhiMTQtZjA0NzdiNDMtOTAzYTk1YTMtYTMyMTU5NTU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MTA5ODhiMTQtZjA0NzdiNDMtOTAzYTk1YTMtYTMyMTU5NTU= 2025-04-10T10:53:07.956806Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTA5ODhiMTQtZjA0NzdiNDMtOTAzYTk1YTMtYTMyMTU5NTU=, ActorId: [1:7491635807882591694:2334], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.977082Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635807882591696:2522], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.981082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:53:07.987160Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635807882591696:2522], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-10T10:53:07.988189Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635807882591696:2522], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-10T10:53:08.008354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635807882591696:2522], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:53:08.053404Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-10T10:53:08.053582Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T10:53:08.053599Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-10T10:53:08.053969Z node 2 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 2 2025-04-10T10:53:08.053995Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7491635814061966283:2306], Start check tables existence, number paths: 2 2025-04-10T10:53:08.055278Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7491635814061966283:2306], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-10T10:53:08.055336Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7491635814061966283:2306], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-10T10:53:08.055370Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7491635814061966283:2306], Successfully finished 2025-04-10T10:53:08.055417Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-10T10:53:08.075784Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635807882591696:2522], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:08.081562Z node 1 :TX_PROXY ERROR: Actor# [1:7491635812177559073:2584] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:08.081653Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635807882591696:2522], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-10T10:53:08.084761Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODQ0MTYzYjctNGJiMDg4Y2EtODI2ZGU4ZTQtMjFhNzQzZDY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ODQ0MTYzYjctNGJiMDg4Y2EtODI2ZGU4ZTQtMjFhNzQzZDY= 2025-04-10T10:53:08.085150Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODQ0MTYzYjctNGJiMDg4Y2EtODI2ZGU4ZTQtMjFhNzQzZDY=, ActorId: [1:7491635812177559083:2335], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:08.085462Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODQ0MTYzYjctNGJiMDg4Y2EtODI2ZGU4ZTQtMjFhNzQzZDY=, ActorId: [1:7491635812177559083:2335], ActorState: ReadyState, TraceId: 01jrfna1knf3y4qpthjhpqvphc, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7491635812177559082:2592] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-10T10:53:08.085457Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-10T10:53:08.085504Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-10T10:53:08.085610Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7491635812177559083:2335], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ODQ0MTYzYjctNGJiMDg4Y2EtODI2ZGU4ZTQtMjFhNzQzZDY= 2025-04-10T10:53:08.085622Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635812177559085:2336], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:53:08.085729Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491635812177559086:2337], Database: /Root, Start database fetching 2025-04-10T10:53:08.086053Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491635812177559086:2337], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-10T10:53:08.086189Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-10T10:53:08.086314Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491635812177559095:2338], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ODQ0MTYzYjctNGJiMDg4Y2EtODI2ZGU4ZTQtMjFhNzQzZDY=, Start pool fetching 2025-04-10T10:53:08.086367Z nod ... node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:35.251940Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:35.251961Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:35.252172Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:35.653393Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:39.995439Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7491636438637301898:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:39.995552Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:40.474857Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-10T10:55:40.488004Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzZlYmFiZjYtZTY3OWRjMGEtZjUyYWJjMDctYjU5YWFmY2Q=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YzZlYmFiZjYtZTY3OWRjMGEtZjUyYWJjMDctYjU5YWFmY2Q= 2025-04-10T10:55:40.493213Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7491636464407106316:2331], Start check tables existence, number paths: 2 2025-04-10T10:55:40.493358Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzZlYmFiZjYtZTY3OWRjMGEtZjUyYWJjMDctYjU5YWFmY2Q=, ActorId: [12:7491636464407106317:2332], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:55:40.493542Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T10:55:40.493591Z node 12 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-10T10:55:40.493633Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-10T10:55:40.501197Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7491636464407106316:2331], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-10T10:55:40.501312Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7491636464407106316:2331], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-10T10:55:40.501387Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7491636464407106316:2331], Successfully finished 2025-04-10T10:55:40.501553Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-10T10:55:40.506300Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7491636464407106342:2306], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:55:40.514826Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:55:40.518393Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7491636464407106342:2306], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2025-04-10T10:55:40.519737Z node 12 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7491636464407106342:2306], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-10T10:55:40.553835Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7491636464407106342:2306], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:55:40.624753Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7491636464407106342:2306], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:55:40.628994Z node 12 :TX_PROXY ERROR: Actor# [12:7491636464407106393:2338] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:40.629200Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7491636464407106342:2306], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-10T10:55:40.629635Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2025-04-10T10:55:40.629670Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2025-04-10T10:55:40.629772Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7491636464407106400:2333], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:55:40.631716Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7491636464407106400:2333], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-10T10:55:40.631802Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2025-04-10T10:55:40.631835Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-10T10:55:40.632137Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [12:7491636464407106409:2334], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-10T10:55:40.633851Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [12:7491636464407106409:2334], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-04-10T10:55:40.652138Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-10T10:55:40.652191Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-10T10:55:40.652353Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzZlYmFiZjYtZTY3OWRjMGEtZjUyYWJjMDctYjU5YWFmY2Q=, ActorId: [12:7491636464407106317:2332], ActorState: ReadyState, TraceId: 01jrfnepka3z137nz4x1bgfybt, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT ALL ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `test@user`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-10T10:55:40.655291Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7491636464407106421:2336], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-10T10:55:40.657141Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7491636464407106421:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:40.657267Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:40.673437Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T10:55:40.677162Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [12:7491636464407106409:2334], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-04-10T10:55:40.678368Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=YzZlYmFiZjYtZTY3OWRjMGEtZjUyYWJjMDctYjU5YWFmY2Q=, ActorId: [12:7491636464407106317:2332], ActorState: ExecuteState, TraceId: 01jrfnepka3z137nz4x1bgfybt, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [12:7491636464407106422:2332] WorkloadServiceCleanup: 0 2025-04-10T10:55:40.680556Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzZlYmFiZjYtZTY3OWRjMGEtZjUyYWJjMDctYjU5YWFmY2Q=, ActorId: [12:7491636464407106317:2332], ActorState: CleanupState, TraceId: 01jrfnepka3z137nz4x1bgfybt, EndCleanup, isFinal: 0 2025-04-10T10:55:40.680629Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzZlYmFiZjYtZTY3OWRjMGEtZjUyYWJjMDctYjU5YWFmY2Q=, ActorId: [12:7491636464407106317:2332], ActorState: CleanupState, TraceId: 01jrfnepka3z137nz4x1bgfybt, Sent query response back to proxy, proxyRequestId: 3, proxyId: [12:7491636438637302107:2266] 2025-04-10T10:55:40.697183Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=YzZlYmFiZjYtZTY3OWRjMGEtZjUyYWJjMDctYjU5YWFmY2Q=, ActorId: [12:7491636464407106317:2332], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:55:40.697241Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=YzZlYmFiZjYtZTY3OWRjMGEtZjUyYWJjMDctYjU5YWFmY2Q=, ActorId: [12:7491636464407106317:2332], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:55:40.697283Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzZlYmFiZjYtZTY3OWRjMGEtZjUyYWJjMDctYjU5YWFmY2Q=, ActorId: [12:7491636464407106317:2332], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:55:40.697315Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzZlYmFiZjYtZTY3OWRjMGEtZjUyYWJjMDctYjU5YWFmY2Q=, ActorId: [12:7491636464407106317:2332], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:55:40.697408Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzZlYmFiZjYtZTY3OWRjMGEtZjUyYWJjMDctYjU5YWFmY2Q=, ActorId: [12:7491636464407106317:2332], ActorState: unknown state, Session actor destroyed |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::AllVDisksEvictionInRack [GOOD] Test command err: 2025-04-10T10:55:42.558261Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-10T10:55:42.558376Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-10T10:55:42.558547Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-04-10T10:55:42.560665Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120027000 } } 2025-04-10T10:55:42.561459Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120027000 } 2025-04-10T10:55:42.622141Z node 25 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003000s 2025-04-10T10:55:42.622248Z node 25 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-10T10:55:42.622516Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-04-10T10:55:42.622624Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2025-04-10T10:55:42.622682Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 25 has not yet been completed) 2025-04-10T10:55:42.622839Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-04-10T10:55:42.623073Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-10T10:55:42.623126Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 25, marker# MARKER_DISK_FAULTY 2025-04-10T10:55:42.623462Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-04-10T10:55:42.623535Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2025-04-10T10:55:42.623575Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2025-04-10T10:55:42.623614Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2025-04-10T10:55:42.623679Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-04-10T10:55:42.623718Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-04-10T10:55:42.623756Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-04-10T10:55:42.623800Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-04-10T10:55:42.631824Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: ... [28:8388350642965737326:1634689637] 2025-04-10T10:55:42.847425Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-04-10T10:55:42.847453Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-04-10T10:55:42.847481Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-04-10T10:55:42.847510Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-04-10T10:55:42.847912Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-10T10:55:42.848510Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-10T10:55:42.848738Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-10T10:55:42.848903Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-10T10:55:42.848977Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-10T10:55:42.849044Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-10T10:55:42.849107Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-10T10:55:42.849200Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-04-10T10:55:42.849252Z node 25 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-10T10:55:42.849483Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 26:26, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-04-10T10:55:42.849545Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 25:25, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-04-10T10:55:42.849597Z node 25 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2025-04-10T10:55:42.849812Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-04-10T10:55:42.850038Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-04-10T10:55:42.850221Z node 25 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2025-04-10T10:55:42.850285Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 25:25 2025-04-10T10:55:42.850327Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 26:26 2025-04-10T10:55:42.864433Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-04-10T10:55:42.864533Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-04-10T10:55:42.880484Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-10T10:55:42.880592Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-10T10:55:42.880661Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-04-10T10:55:42.881434Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-10T10:55:42.881536Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } 2025-04-10T10:55:42.881604Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 25, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-04-10T10:55:42.881652Z node 25 :CMS DEBUG: Ring: 0; State: Ok 2025-04-10T10:55:42.881680Z node 25 :CMS DEBUG: Ring: 1; State: Ok 2025-04-10T10:55:42.881698Z node 25 :CMS DEBUG: Ring: 2; State: Ok 2025-04-10T10:55:42.881720Z node 25 :CMS DEBUG: Result: ALLOW 2025-04-10T10:55:42.881846Z node 25 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-04-10T10:55:42.881936Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-04-10T10:55:42.882019Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-04-10T10:55:42.882177Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.127000Z, action# Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2025-04-10T10:55:42.882274Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-10T10:55:42.894251Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-04-10T10:55:42.894555Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } Deadline: 780127000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12001 } } } } 2025-04-10T10:55:42.894615Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:33:00.127000Z 2025-04-10T10:55:42.940801Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-04-10T10:55:42.941148Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-10T10:55:42.941214Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-10T10:55:42.941273Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-04-10T10:55:42.941983Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-10T10:55:42.942124Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } 2025-04-10T10:55:42.942202Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-04-10T10:55:42.942259Z node 25 :CMS DEBUG: Result: ALLOW 2025-04-10T10:55:42.942435Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-04-10T10:55:42.942512Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:13:00Z) 2025-04-10T10:55:42.942626Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-04-10T10:55:42.942836Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.228512Z, action# Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2025-04-10T10:55:42.942980Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-10T10:55:42.956245Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-04-10T10:55:42.956584Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } Deadline: 780228512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2025-04-10T10:55:42.957280Z node 25 :CMS INFO: User user is done with permissions user-p-1 2025-04-10T10:55:42.957338Z node 25 :CMS DEBUG: Resulting status: OK 2025-04-10T10:55:42.957417Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-04-10T10:55:42.957548Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 25 2025-04-10T10:55:42.957678Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, reason# permission user-p-1 was removed 2025-04-10T10:55:42.957740Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-04-10T10:55:42.970565Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-04-10T10:55:42.970793Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-04-10T10:55:42.971459Z node 25 :CMS INFO: User user is done with permissions user-p-2 2025-04-10T10:55:42.971523Z node 25 :CMS DEBUG: Resulting status: OK 2025-04-10T10:55:42.971633Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-04-10T10:55:42.971737Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 26 2025-04-10T10:55:42.971838Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-2 was removed 2025-04-10T10:55:42.971935Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-04-10T10:55:42.984591Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-04-10T10:55:42.984821Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] Test command err: 2025-04-10T10:54:48.273115Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636244735882819:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:48.273156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002041/r3tmp/tmpVHLAs0/pdisk_1.dat 2025-04-10T10:54:49.244213Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:49.256199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:49.256287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:49.264313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13383 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:49.781231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:54:49.822476Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:49.827238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:50.057698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:54:50.140077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002041/r3tmp/tmpTRZVYR/pdisk_1.dat 2025-04-10T10:54:54.244504Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:54.264369Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:54.264450Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:54.275964Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:54.279534Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6140 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:54.627469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:54:54.675598Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:54.683655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:54.866780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:54.999877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:00.054771Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636295184901505:2203];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002041/r3tmp/tmpUfy6VO/pdisk_1.dat 2025-04-10T10:55:00.235302Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:55:00.415206Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:00.477706Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:00.477800Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:00.483197Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28924 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:00.834119Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:00.874340Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:55:00.883056Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:01.088436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:01.200934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:05.974742Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636315953769997:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:05.974900Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002041/r3tmp/tmp0aVKbi/pdisk_1.dat 2025-04-10T10:55:06.208490Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:06.215827Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:06.215937Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:06.220565Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9029 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:06.556720Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard ... (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:22.231179Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32533 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:22.583157Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:22.602136Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:22.616590Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:55:22.624806Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:22.725384Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:22.808466Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:26.738051Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491636405595863094:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:26.738117Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002041/r3tmp/tmpt2eXbP/pdisk_1.dat 2025-04-10T10:55:27.051784Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:27.060194Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:27.060331Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:27.062234Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19405 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:27.396370Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T10:55:27.423634Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:55:27.540544Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:27.615389Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:32.136849Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491636432646451085:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002041/r3tmp/tmpQZF0iv/pdisk_1.dat 2025-04-10T10:55:32.224542Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:55:32.398441Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:32.488260Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:32.488432Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:32.491503Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62064 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:32.843249Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:32.867993Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:32.952411Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:33.126732Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:38.022965Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491636456319016019:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:38.024672Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002041/r3tmp/tmpOjH9pb/pdisk_1.dat 2025-04-10T10:55:38.222180Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:38.280664Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:38.280772Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:38.283385Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7090 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:38.583609Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:38.610794Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:38.694718Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T10:55:38.771883Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> TCmsTest::TestProcessingQueue [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] >> TCmsTest::SamePriorityRequest2 |67.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |67.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |67.4%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::AwsRegion [GOOD] Test command err: 2025-04-10T10:52:59.031702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:52:59.031981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:52:59.032190Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00148b/r3tmp/tmpsviIKo/pdisk_1.dat 2025-04-10T10:52:59.416476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:52:59.458227Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:59.504735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:52:59.504880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:52:59.516421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:52:59.598787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:52:59.637597Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T10:52:59.637885Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:59.678300Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:59.678444Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:52:59.680177Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:52:59.680270Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:52:59.680339Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:52:59.680727Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:52:59.680870Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:52:59.680997Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T10:52:59.691772Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:52:59.718826Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:52:59.719002Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:52:59.719099Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T10:52:59.719142Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:52:59.719193Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:52:59.719232Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:59.719622Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:52:59.719746Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:52:59.719816Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:52:59.719853Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:52:59.719888Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:52:59.719939Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:52:59.720036Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T10:52:59.720425Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:52:59.720588Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:52:59.720660Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:52:59.721993Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:52:59.732654Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:52:59.732775Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T10:52:59.884024Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T10:52:59.888565Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T10:52:59.888636Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:59.889410Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:52:59.889452Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:52:59.889532Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T10:52:59.889772Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T10:52:59.889880Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:52:59.891855Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:52:59.891930Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T10:52:59.893581Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T10:52:59.893991Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:52:59.895164Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T10:52:59.895213Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:59.895659Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T10:52:59.895715Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:52:59.896256Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:52:59.896288Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:52:59.896334Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T10:52:59.896435Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T10:52:59.896488Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T10:52:59.896561Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:59.897566Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:684:2580][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-10T10:52:59.900653Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:52:59.902411Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T10:52:59.902498Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T10:52:59.902982Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T10:53:03.429673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:53:03.429924Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:53:03.430213Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00148b/r3tmp/tmpodbWNi/pdisk_1.dat 2025-04-10T10:53:03.720881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:03.743372Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:03.781800Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:03.781931Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:03.795304Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:03.881789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:53:03.908772Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:677:2578] 2025-04-10T10:53:03.909042Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:53:03.960972Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:53:03.961162Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:53:03.962893Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:53:03.962977Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:53:03.963033Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:53:03.963377Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:53:03.963692Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-0 ... -10T10:55:43.063323Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T10:55:43.063554Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 427 2025-04-10T10:55:43.063988Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:55:43.064033Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2025-04-10T10:55:43.064081Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message topic: Table/Stream2/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 2 partNo : 0 messageNo: 1 size 323 offset: -1 2025-04-10T10:55:43.064232Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Topic 'Table/Stream2/streamImpl' partition 0 part blob processing sourceId '\00072075186224037888' seqNo 2 partNo 0 2025-04-10T10:55:43.146645Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Topic 'Table/Stream2/streamImpl' partition 0 part blob complete sourceId '\00072075186224037888' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 438 count 1 nextOffset 1 batches 1 2025-04-10T10:55:43.147192Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream2/streamImpl' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 426 WTime 2547 2025-04-10T10:55:43.147312Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T10:55:43.147347Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T10:55:43.147379Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T10:55:43.147409Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T10:55:43.147441Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] m0000000000p72075186224037888 2025-04-10T10:55:43.147471Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000001_00000| 2025-04-10T10:55:43.147502Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] i0000000000 2025-04-10T10:55:43.147532Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T10:55:43.147563Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] =========================== 2025-04-10T10:55:43.147682Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T10:55:43.147755Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 426 2025-04-10T10:55:43.149182Z node 21 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 427 actorID [21:796:2660] 2025-04-10T10:55:43.149600Z node 21 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 size 427 2025-04-10T10:55:43.149961Z node 21 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 426 actorID [21:972:2768] 2025-04-10T10:55:43.150071Z node 21 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037891' partition 0 offset 0 partno 0 count 1 parts 0 size 426 >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2025-04-10T10:55:43.151704Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:55:43.151819Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-04-10T10:55:43.152801Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 0 max time lag 0ms effective offset 0 2025-04-10T10:55:43.152904Z node 21 :PERSQUEUE DEBUG: waiting read cookie 0 partition 0 user $without_consumer offset 0 count 10000 size 26214400 timeout 0 2025-04-10T10:55:43.153060Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T10:55:43.153178Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] waiting read cookie 0 partition 0 read timeout for $without_consumer offset 0 2025-04-10T10:55:43.153315Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T10:55:43.164346Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 342 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T10:55:43.164548Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T10:55:43.164755Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream1/streamImpl', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-10T10:55:43.165225Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-04-10T10:55:43.165370Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-04-10T10:55:43.165569Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 341 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T10:55:43.165628Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T10:55:43.165685Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream2/streamImpl', Partition: 0, SeqNo: 2, partNo: 0, Offset: 0 is stored on disk 2025-04-10T10:55:43.165888Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-10T10:55:43.166216Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-10T10:55:43.166381Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-10T10:55:43.166519Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-10T10:55:43.166671Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-04-10T10:55:43.166773Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T10:55:43.167185Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][21:1172:2714] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 1 Offset: 0 WriteTimestampMS: 2547 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-04-10T10:55:43.167379Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037891][21:1174:2812] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 2 Offset: 0 WriteTimestampMS: 2547 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-04-10T10:55:43.167458Z node 21 :PERSQUEUE DEBUG: Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer readTimeStamp done, result 2547 queuesize 0 startOffset 0 2025-04-10T10:55:43.167659Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][21:876:2714] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-10T10:55:43.167801Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][21:1030:2812] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-10T10:55:43.167998Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 2, at tablet# 72075186224037888 2025-04-10T10:55:43.168092Z node 21 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 1, at tablet: 72075186224037888 2025-04-10T10:55:43.168274Z node 21 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 2, at tablet: 72075186224037888 2025-04-10T10:55:43.179478Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 2, left# 0, at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2025-04-10T10:55:43.448093Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:55:43.448158Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-04-10T10:55:43.448295Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-04-10T10:55:43.448398Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-10T10:55:43.448578Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-04-10T10:55:43.448672Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T10:55:43.449488Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >>>>> GetRecords path=/Root/Table/Stream2 partitionId=0 2025-04-10T10:55:43.451842Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:55:43.451973Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2025-04-10T10:55:43.452962Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream2/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-04-10T10:55:43.453079Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-10T10:55:43.453216Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-10T10:55:43.453304Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T10:55:43.453988Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestProcessingQueue [GOOD] Test command err: 2025-04-10T10:55:39.494089Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-10T10:55:39.649938Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-10T10:55:39.665794Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-10T10:55:39.714664Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-10T10:55:44.006607Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-10T10:55:44.006669Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-10T10:55:44.006688Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-10T10:55:44.006700Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-10T10:55:44.006712Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-10T10:55:44.006722Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-10T10:55:44.006735Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-10T10:55:44.006748Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> KqpQuery::RowsLimit >> KqpQuery::UdfTerminate >> KqpQuery::SelectWhereInSubquery |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> KqpQuery::YqlSyntaxV0 >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> KqpLimits::BigParameter >> KqpLimits::OutOfSpaceBulkUpsertFail >> KqpQuery::DdlInDataQuery >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink >> KqpQuery::QueryClientTimeoutPrecompiled >> JsonProtoConversion::JsonToProtoArray [GOOD] >> KqpLimits::KqpMkqlMemoryLimitException >> KqpQuery::ExecuteDataQueryCollectMeta >> KqpQuery::PreparedQueryInvalidate >> TCmsTest::WalleTasks [GOOD] >> TCmsTest::WalleTasksWithNodeLimit |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoArray [GOOD] |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> Cdc::InitialScanAndLimits [GOOD] >> Cdc::InitialScanComplete >> TCmsTest::SamePriorityRequest2 [GOOD] >> TLocksTest::CK_Range_GoodLock [GOOD] >> JsonProtoConversion::JsonToProtoMap >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain >> JsonProtoConversion::JsonToProtoMap [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> TSchemeShardExtSubDomainTest::Create |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoArray >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoMap [GOOD] >> Cdc::SequentialSplitMerge [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx |67.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |67.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |67.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> KqpExplain::SortStage |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> Cdc::InitialScan_TopicAutoPartitioning [GOOD] >> Cdc::InitialScanUpdatedRows >> Viewer::ServerlessWithExclusiveNodes [GOOD] >> Viewer::SharedDoesntShowExclusiveNodes >> JsonProtoConversion::ProtoMapToJson [GOOD] >> Viewer::TabletMerging [GOOD] >> Viewer::StorageGroupOutputWithoutFilterNoDepends |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_GoodLock [GOOD] Test command err: 2025-04-10T10:54:56.906547Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636277219152062:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:56.915321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002025/r3tmp/tmpIUpWao/pdisk_1.dat 2025-04-10T10:54:57.826412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:57.826515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:57.830852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:57.919456Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:57.927365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:2983 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:58.234269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:54:58.284201Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:54:58.299087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:58.520665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:58.626666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002025/r3tmp/tmpD2kR80/pdisk_1.dat 2025-04-10T10:55:02.800553Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636305052690858:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:02.839632Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:55:02.967514Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:02.989158Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:02.994381Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:02.999515Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63999 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:03.498839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:03.515574Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:03.538225Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:55:03.544930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:03.716675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:03.814986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002025/r3tmp/tmp4tVoKQ/pdisk_1.dat 2025-04-10T10:55:07.868732Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:08.012040Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:08.051237Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:08.051347Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:08.052971Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14072 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:08.326269Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:08.353500Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:08.442518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:08.535294Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:12.928423Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636345388604442:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:12.929144Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002025/r3tmp/tmphbcKsH/pdisk_1.dat 2025-04-10T10:55:13.154663Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:13.177764Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:13.177855Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:13.179306Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5230 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp ' ... PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-10T10:55:28.262381Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:55:28.274422Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:28.285963Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T10:55:28.292312Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:28.386604Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:28.471732Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:32.933080Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491636432263866394:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:32.954046Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002025/r3tmp/tmp34GtCb/pdisk_1.dat 2025-04-10T10:55:33.105206Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:33.131847Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:33.131947Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:33.134676Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32456 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:33.388829Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:33.395749Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:33.426569Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:33.513909Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:33.590460Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:38.136019Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491636456256605513:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002025/r3tmp/tmpS1ZOfb/pdisk_1.dat 2025-04-10T10:55:38.231160Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:55:38.318411Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:38.344206Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:38.344318Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:38.346024Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20656 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:38.643883Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:38.652245Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:38.662012Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:55:38.667515Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:38.752853Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:38.812849Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:43.265248Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491636477961078958:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:43.265311Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002025/r3tmp/tmpBqhmvK/pdisk_1.dat 2025-04-10T10:55:43.469320Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:43.472524Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:43.472637Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:43.474382Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14538 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:55:43.807243Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:43.829073Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:55:43.835211Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:43.916533Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:44.022639Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> TopicAutoscaling::PartitionSplit_AutosplitByLoad_AfterAlter [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |67.5%| [TA] $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> KqpQuery::QueryTimeout |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] |67.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |67.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |67.5%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |67.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |67.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |67.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] |67.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] |67.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |67.6%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |67.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |67.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |67.6%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |67.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut >> TFlatTest::AutoMergeBySize [GOOD] >> TFlatTest::AutoSplitMergeQueue >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> SystemView::AuthOwners_TableRange [GOOD] >> SystemView::AuthPermissions >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::Create [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter >> BsControllerConfig::MoveGroups [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive >> TCmsTest::WalleTasksWithNodeLimit [GOOD] >> TCmsTest::WalleTasksDifferentPriorities |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings |67.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |67.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |67.6%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |67.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |67.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |67.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MoveGroups [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2916:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2916:2116] Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3017:2106] recipient: [1:2916:2116] 2025-04-10T10:55:16.449360Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-10T10:55:16.455011Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-10T10:55:16.455361Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-10T10:55:16.455930Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:55:16.457796Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-10T10:55:16.458641Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:16.458679Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:16.458985Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-10T10:55:16.487041Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-10T10:55:16.487184Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-10T10:55:16.487330Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-10T10:55:16.487430Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:16.487584Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:16.487670Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3042:2106] recipient: [1:60:2107] 2025-04-10T10:55:16.501075Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-10T10:55:16.501256Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:16.513916Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:16.514052Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:16.514142Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:16.514247Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:16.514370Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:16.514423Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:16.514465Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:16.514526Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:16.525395Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:16.525593Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:16.538212Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:16.538385Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-10T10:55:16.539564Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-10T10:55:16.539611Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-10T10:55:16.539812Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-10T10:55:16.539867Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-10T10:55:16.566059Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 150 PDiskFilter { Property { Type: ROT } } } } } 2025-04-10T10:55:16.569530Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-04-10T10:55:16.569617Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-04-10T10:55:16.569659Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-04-10T10:55:16.569709Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-04-10T10:55:16.569746Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-04-10T10:55:16.569782Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-04-10T10:55:16.569810Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-04-10T10:55:16.569852Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-04-10T10:55:16.569894Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-04-10T10:55:16.569923Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-04-10T10:55:16.569946Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-04-10T10:55:16.569975Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-04-10T10:55:16.569997Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-04-10T10:55:16.570022Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-04-10T10:55:16.570049Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-04-10T10:55:16.570093Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-04-10T10:55:16.570132Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-04-10T10:55:16.570187Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-04-10T10:55:16.570210Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-04-10T10:55:16.570254Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-04-10T10:55:16.570279Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:3 ... 78:1000 Path# /dev/disk1 2025-04-10T10:55:44.362534Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 178:1001 Path# /dev/disk2 2025-04-10T10:55:44.362570Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 178:1002 Path# /dev/disk3 2025-04-10T10:55:44.362608Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 179:1000 Path# /dev/disk1 2025-04-10T10:55:44.362641Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 179:1001 Path# /dev/disk2 2025-04-10T10:55:44.362675Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 179:1002 Path# /dev/disk3 2025-04-10T10:55:44.362710Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 180:1000 Path# /dev/disk1 2025-04-10T10:55:44.362744Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 180:1001 Path# /dev/disk2 2025-04-10T10:55:44.362776Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 180:1002 Path# /dev/disk3 2025-04-10T10:55:44.362804Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 181:1000 Path# /dev/disk1 2025-04-10T10:55:44.362837Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 181:1001 Path# /dev/disk2 2025-04-10T10:55:44.362868Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 181:1002 Path# /dev/disk3 2025-04-10T10:55:44.362903Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 182:1000 Path# /dev/disk1 2025-04-10T10:55:44.362938Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 182:1001 Path# /dev/disk2 2025-04-10T10:55:44.362967Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 182:1002 Path# /dev/disk3 2025-04-10T10:55:44.363002Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 183:1000 Path# /dev/disk1 2025-04-10T10:55:44.363034Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 183:1001 Path# /dev/disk2 2025-04-10T10:55:44.363064Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 183:1002 Path# /dev/disk3 2025-04-10T10:55:44.363099Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 184:1000 Path# /dev/disk1 2025-04-10T10:55:44.363133Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 184:1001 Path# /dev/disk2 2025-04-10T10:55:44.363164Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 184:1002 Path# /dev/disk3 2025-04-10T10:55:44.363199Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 185:1000 Path# /dev/disk1 2025-04-10T10:55:44.363232Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 185:1001 Path# /dev/disk2 2025-04-10T10:55:44.363264Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 185:1002 Path# /dev/disk3 2025-04-10T10:55:44.363292Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 186:1000 Path# /dev/disk1 2025-04-10T10:55:44.363326Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 186:1001 Path# /dev/disk2 2025-04-10T10:55:44.363356Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 186:1002 Path# /dev/disk3 2025-04-10T10:55:44.363387Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 187:1000 Path# /dev/disk1 2025-04-10T10:55:44.363417Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 187:1001 Path# /dev/disk2 2025-04-10T10:55:44.363448Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 187:1002 Path# /dev/disk3 2025-04-10T10:55:44.363473Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 188:1000 Path# /dev/disk1 2025-04-10T10:55:44.363498Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 188:1001 Path# /dev/disk2 2025-04-10T10:55:44.363528Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 188:1002 Path# /dev/disk3 2025-04-10T10:55:44.363557Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 189:1000 Path# /dev/disk1 2025-04-10T10:55:44.363588Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 189:1001 Path# /dev/disk2 2025-04-10T10:55:44.363619Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 189:1002 Path# /dev/disk3 2025-04-10T10:55:44.363647Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 190:1000 Path# /dev/disk1 2025-04-10T10:55:44.363684Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 190:1001 Path# /dev/disk2 2025-04-10T10:55:44.363714Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 190:1002 Path# /dev/disk3 2025-04-10T10:55:44.363743Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 191:1000 Path# /dev/disk1 2025-04-10T10:55:44.363772Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 191:1001 Path# /dev/disk2 2025-04-10T10:55:44.363801Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 191:1002 Path# /dev/disk3 2025-04-10T10:55:44.363829Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 192:1000 Path# /dev/disk1 2025-04-10T10:55:44.363857Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 192:1001 Path# /dev/disk2 2025-04-10T10:55:44.363887Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 192:1002 Path# /dev/disk3 2025-04-10T10:55:44.363919Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 193:1000 Path# /dev/disk1 2025-04-10T10:55:44.363954Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 193:1001 Path# /dev/disk2 2025-04-10T10:55:44.363983Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 193:1002 Path# /dev/disk3 2025-04-10T10:55:44.364013Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 194:1000 Path# /dev/disk1 2025-04-10T10:55:44.364042Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 194:1001 Path# /dev/disk2 2025-04-10T10:55:44.364074Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 194:1002 Path# /dev/disk3 2025-04-10T10:55:44.364105Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 195:1000 Path# /dev/disk1 2025-04-10T10:55:44.364136Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 195:1001 Path# /dev/disk2 2025-04-10T10:55:44.364164Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2025-04-10T10:55:44.364198Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 196:1000 Path# /dev/disk1 2025-04-10T10:55:44.364230Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 196:1001 Path# /dev/disk2 2025-04-10T10:55:44.364263Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2025-04-10T10:55:44.364291Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 197:1000 Path# /dev/disk1 2025-04-10T10:55:44.364321Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 197:1001 Path# /dev/disk2 2025-04-10T10:55:44.364354Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2025-04-10T10:55:44.364386Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 198:1000 Path# /dev/disk1 2025-04-10T10:55:44.364416Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 198:1001 Path# /dev/disk2 2025-04-10T10:55:44.364446Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 198:1002 Path# /dev/disk3 2025-04-10T10:55:44.364476Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 199:1000 Path# /dev/disk1 2025-04-10T10:55:44.364506Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 199:1001 Path# /dev/disk2 2025-04-10T10:55:44.364539Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 199:1002 Path# /dev/disk3 2025-04-10T10:55:44.364569Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 200:1000 Path# /dev/disk1 2025-04-10T10:55:44.364599Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 200:1001 Path# /dev/disk2 2025-04-10T10:55:44.364631Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 200:1002 Path# /dev/disk3 2025-04-10T10:55:44.629480Z node 151 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.274105s 2025-04-10T10:55:44.629674Z node 151 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.274347s 2025-04-10T10:55:44.665880Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-04-10T10:55:44.766261Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 1 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 1 ExplicitGroupId: 2147483748 } } } 2025-04-10T10:55:44.782455Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-04-10T10:55:44.867727Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 2 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 2 ExplicitGroupId: 2147483749 } } } 2025-04-10T10:55:44.884386Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-04-10T10:55:44.992861Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 3 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 3 } } } 2025-04-10T10:55:45.011898Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } >> KqpParams::RowsList >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false >> KqpQuery::QueryCacheTtl |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> Viewer::Plan2SvgBad [FAIL] |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds |67.7%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive |67.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |67.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> KqpQuery::DdlInDataQuery [GOOD] |67.7%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |67.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> KqpQuery::DeleteWhereInSubquery >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> KqpScan::RemoteShardScan >> KqpScan::ScanDuringSplit10 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_AutosplitByLoad_AfterAlter [GOOD] Test command err: 2025-04-10T10:53:01.660239Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635782823416598:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:01.660427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0019a2/r3tmp/tmpfE3ObK/pdisk_1.dat 2025-04-10T10:53:01.865504Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:53:02.061390Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:02.069433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:02.069562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:02.077250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9646, node 1 2025-04-10T10:53:02.166761Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/0019a2/r3tmp/yandexUZxZno.tmp 2025-04-10T10:53:02.166792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/0019a2/r3tmp/yandexUZxZno.tmp 2025-04-10T10:53:02.222053Z INFO: TTestServer started on Port 2141 GrpcPort 9646 2025-04-10T10:53:02.266077Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/0019a2/r3tmp/yandexUZxZno.tmp 2025-04-10T10:53:02.266313Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2141 PQClient connected to localhost:9646 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:02.575596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:02.603145Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:02.665407Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:53:02.691893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T10:53:02.936693Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-10T10:53:04.540495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635795708319290:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.540638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.540723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635795708319318:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.545497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T10:53:04.557821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635795708319320:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T10:53:04.824915Z node 1 :TX_PROXY ERROR: Actor# [1:7491635795708319384:2448] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:04.856683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:53:04.911478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:53:04.968508Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491635795708319401:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:53:04.970378Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjYwN2M2NDktNTI2OTc2YzgtZjcwNmYyNTAtNGIyYjFiZGU=, ActorId: [1:7491635795708319288:2336], ActorState: ExecuteState, TraceId: 01jrfn9y4gayb7bth8rf3dh5wq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:53:04.973147Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:53:05.029209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7491635800003286987:2630] 2025-04-10T10:53:06.660312Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491635782823416598:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:06.660407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T10:53:11.502018Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-10T10:53:11.523278Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-10T10:53:11.524689Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491635825773091075:2809], Recipient [1:7491635782823417027:2193]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:11.524734Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:11.524758Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:11.524802Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491635825773091071:2806], Recipient [1:7491635782823417027:2193]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-10T10:53:11.524819Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:53:11.603094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:53:11.603630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:53:11.603970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-10T10:53:11.604021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-10T10:53:11.604061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-10 ... sts. 2025-04-10T10:55:49.914977Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:49.915026Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636461770486011:2855], Partition 2, Sender [0:0:0], Recipient [6:7491636461770486090:2864], Cookie: 0 2025-04-10T10:55:49.915080Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636461770486090:2864]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:49.915095Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:49.915122Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:49.915156Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:49.915175Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:49.915198Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:49.915351Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636461770486013:2856], Partition 1, Sender [0:0:0], Recipient [6:7491636461770486087:2862], Cookie: 0 2025-04-10T10:55:49.915393Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636461770486087:2862]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:49.915409Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:49.915438Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:49.915474Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:49.915492Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:49.915511Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:49.915560Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636487540290446:3004], Partition 4, Sender [0:0:0], Recipient [6:7491636487540290540:3013], Cookie: 0 2025-04-10T10:55:49.915600Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636487540290540:3013]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:49.915615Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:49.915642Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:49.915674Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:49.915695Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:49.915715Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:49.927728Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794759, Sender [6:7491636461770486034:2857], Recipient [6:7491636461770486011:2855]: NKikimr::NKeyValue::TChannelBalancer::TEvUpdateWeights 2025-04-10T10:55:49.927813Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 270794759, Sender [6:7491636461770486040:2858], Recipient [6:7491636461770486013:2856]: NKikimr::NKeyValue::TChannelBalancer::TEvUpdateWeights 2025-04-10T10:55:50.008478Z node 6 :PERSQUEUE TRACE: StateIdle event# 65538 (NActors::TEvents::TEvWakeup), Tablet [6:7491636461770486011:2855], Partition 2, Sender [0:0:0], Recipient [6:7491636461770486090:2864], Cookie: 0 2025-04-10T10:55:50.008594Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 65538, Sender [0:0:0], Recipient [6:7491636461770486090:2864]: NActors::TEvents::TEvWakeup 2025-04-10T10:55:50.008928Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188501, Sender [6:7491636461770486090:2864], Recipient [6:7491636461770486011:2855]: NKikimr::TEvPQ::TEvPartitionCounters 2025-04-10T10:55:50.008962Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionCounters 2025-04-10T10:55:50.008991Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2025-04-10T10:55:50.022389Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636461770486013:2856], Partition 1, Sender [0:0:0], Recipient [6:7491636461770486087:2862], Cookie: 0 2025-04-10T10:55:50.022510Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636461770486087:2862]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:50.022549Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:50.022613Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:50.022718Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:50.022757Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:50.022806Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:50.022880Z node 6 :PERSQUEUE TRACE: StateIdle event# 65538 (NActors::TEvents::TEvWakeup), Tablet [6:7491636461770486013:2856], Partition 1, Sender [0:0:0], Recipient [6:7491636461770486087:2862], Cookie: 0 2025-04-10T10:55:50.022944Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 65538, Sender [0:0:0], Recipient [6:7491636461770486087:2862]: NActors::TEvents::TEvWakeup 2025-04-10T10:55:50.023226Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636487540290446:3004], Partition 4, Sender [0:0:0], Recipient [6:7491636487540290540:3013], Cookie: 0 2025-04-10T10:55:50.023273Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636487540290540:3013]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:50.023291Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:50.023326Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:50.023370Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:50.023388Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:50.023414Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:50.023512Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188501, Sender [6:7491636461770486087:2862], Recipient [6:7491636461770486013:2856]: NKikimr::TEvPQ::TEvPartitionCounters 2025-04-10T10:55:50.023542Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionCounters 2025-04-10T10:55:50.023572Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2025-04-10T10:55:50.023867Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [6:7491636461770486087:2862], Recipient [6:7491636461770486013:2856]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-04-10T10:55:50.023891Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-04-10T10:55:50.023976Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636487540290447:3005], Partition 3, Sender [0:0:0], Recipient [6:7491636487540290547:3015], Cookie: 0 2025-04-10T10:55:50.024030Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636487540290547:3015]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:50.024051Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:50.024084Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:50.024133Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:50.024158Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:50.024183Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:50.024242Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636414525844121:2464], Partition 0, Sender [0:0:0], Recipient [6:7491636414525844180:2468], Cookie: 0 2025-04-10T10:55:50.024291Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636414525844180:2468]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:50.024309Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:50.024338Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:50.024376Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:50.024393Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:50.024414Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:55:50.024469Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636461770486011:2855], Partition 2, Sender [0:0:0], Recipient [6:7491636461770486090:2864], Cookie: 0 2025-04-10T10:55:50.024514Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636461770486090:2864]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:50.024528Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:55:50.024559Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:55:50.024597Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:55:50.024616Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:55:50.024639Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> KqpQuery::YqlSyntaxV0 [GOOD] >> KqpQuery::YqlTableSample >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UdfMemoryLimit >> TSchemeShardExtSubDomainTest::CreateAndAlter [GOOD] >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive >> KqpQuery::ExecuteDataQueryCollectMeta [GOOD] >> KqpQuery::GenericQueryNoRowsLimit >> KqpQuery::SelectWhereInSubquery [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink >> KqpQuery::PreparedQueryInvalidate [GOOD] >> KqpQuery::QueryCache >> KqpAnalyze::AnalyzeTable+ColumnStore >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst |67.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> KqpScan::ScanRetryRead |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |67.7%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |67.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> KqpLimits::BigParameter [GOOD] >> KqpLimits::CancelAfterRoTx >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink >> Cdc::InitialScanComplete [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords >> KqpExplain::SortStage [GOOD] >> KqpExplain::SqlIn >> ExternalBlobsMultipleChannels::WithCompaction |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive |67.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |67.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |67.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestExecutorMemUsage >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true >> KqpQuery::QueryClientTimeoutPrecompiled [GOOD] >> KqpQuery::QueryExplain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2916:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2916:2116] Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3017:2106] recipient: [1:2916:2116] 2025-04-10T10:55:17.258606Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-10T10:55:17.265629Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-10T10:55:17.266059Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-10T10:55:17.266988Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:55:17.269897Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-10T10:55:17.270623Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:17.270665Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:17.271019Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-10T10:55:17.281912Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-10T10:55:17.282049Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-10T10:55:17.282205Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-10T10:55:17.282376Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:17.282478Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:17.282559Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3042:2106] recipient: [1:60:2107] 2025-04-10T10:55:17.294724Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-10T10:55:17.294893Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:17.306859Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:17.307027Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:17.307108Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:17.307183Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:17.307308Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:17.307433Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:17.307484Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:17.307560Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:17.318655Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:17.318875Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:17.330865Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:17.331038Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-10T10:55:17.332444Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-10T10:55:17.332506Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-10T10:55:17.332704Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-10T10:55:17.332769Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-10T10:55:17.349841Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 60 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2025-04-10T10:55:17.350878Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-04-10T10:55:17.350958Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-04-10T10:55:17.350993Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-04-10T10:55:17.351029Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-04-10T10:55:17.351057Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-04-10T10:55:17.351104Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-04-10T10:55:17.351140Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-04-10T10:55:17.351194Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-04-10T10:55:17.351220Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-04-10T10:55:17.351248Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-04-10T10:55:17.351273Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-04-10T10:55:17.351313Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-04-10T10:55:17.351336Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-04-10T10:55:17.351378Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-04-10T10:55:17.351403Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-04-10T10:55:17.351446Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-04-10T10:55:17.351476Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-04-10T10:55:17.351538Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-04-10T10:55:17.351568Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-04-10T10:55:17.351623Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-04-10T10:55:17.351654Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-04-10T10:55:17.351688Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-04-10T10:55:17.351712Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-04-10T10:55:17.351778Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-04-10T10:55:17.351806Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-04-10T10:55:17.351855Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-04-10T10:55:17.351880Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-04-10T10:55:17.351922Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-04-10T10:55:17.351948Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-04-10T10:55:17.351974Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2025-04-10T10:55:17.351997Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-04-10T10:55:17.352030Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2025-04-10T10:55:17.352055Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Cr ... ER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 204:1000 Path# /dev/disk1 2025-04-10T10:55:47.279587Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 204:1001 Path# /dev/disk2 2025-04-10T10:55:47.279645Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 204:1002 Path# /dev/disk3 2025-04-10T10:55:47.279679Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 205:1000 Path# /dev/disk1 2025-04-10T10:55:47.279711Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 205:1001 Path# /dev/disk2 2025-04-10T10:55:47.279746Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 205:1002 Path# /dev/disk3 2025-04-10T10:55:47.279775Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 206:1000 Path# /dev/disk1 2025-04-10T10:55:47.279805Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 206:1001 Path# /dev/disk2 2025-04-10T10:55:47.279834Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 206:1002 Path# /dev/disk3 2025-04-10T10:55:47.279863Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 207:1000 Path# /dev/disk1 2025-04-10T10:55:47.279892Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 207:1001 Path# /dev/disk2 2025-04-10T10:55:47.279927Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 207:1002 Path# /dev/disk3 2025-04-10T10:55:47.279956Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 208:1000 Path# /dev/disk1 2025-04-10T10:55:47.279985Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 208:1001 Path# /dev/disk2 2025-04-10T10:55:47.280015Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 208:1002 Path# /dev/disk3 2025-04-10T10:55:47.280049Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 209:1000 Path# /dev/disk1 2025-04-10T10:55:47.280084Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 209:1001 Path# /dev/disk2 2025-04-10T10:55:47.280117Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 209:1002 Path# /dev/disk3 2025-04-10T10:55:47.280156Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 210:1000 Path# /dev/disk1 2025-04-10T10:55:47.280198Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 210:1001 Path# /dev/disk2 2025-04-10T10:55:47.280229Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 210:1002 Path# /dev/disk3 2025-04-10T10:55:47.754265Z node 161 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.480904s 2025-04-10T10:55:47.754499Z node 161 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.481168s 2025-04-10T10:55:47.816762Z node 161 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 4 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 4 } ItemConfigGeneration: 1 } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 180 PDiskFilter { Property { Type: ROT } } ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } 2025-04-10T10:55:47.819419Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 211:1000 Path# /dev/disk1 2025-04-10T10:55:47.819509Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 211:1001 Path# /dev/disk2 2025-04-10T10:55:47.819573Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 211:1002 Path# /dev/disk3 2025-04-10T10:55:47.819615Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 212:1000 Path# /dev/disk1 2025-04-10T10:55:47.819656Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 212:1001 Path# /dev/disk2 2025-04-10T10:55:47.819688Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 212:1002 Path# /dev/disk3 2025-04-10T10:55:47.819721Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 213:1000 Path# /dev/disk1 2025-04-10T10:55:47.819755Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 213:1001 Path# /dev/disk2 2025-04-10T10:55:47.819796Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 213:1002 Path# /dev/disk3 2025-04-10T10:55:47.819832Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 214:1000 Path# /dev/disk1 2025-04-10T10:55:47.819866Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 214:1001 Path# /dev/disk2 2025-04-10T10:55:47.819922Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 214:1002 Path# /dev/disk3 2025-04-10T10:55:47.819967Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 215:1000 Path# /dev/disk1 2025-04-10T10:55:47.820001Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 215:1001 Path# /dev/disk2 2025-04-10T10:55:47.820031Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 215:1002 Path# /dev/disk3 2025-04-10T10:55:47.820060Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 216:1000 Path# /dev/disk1 2025-04-10T10:55:47.820089Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 216:1001 Path# /dev/disk2 2025-04-10T10:55:47.820126Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 216:1002 Path# /dev/disk3 2025-04-10T10:55:47.820157Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 217:1000 Path# /dev/disk1 2025-04-10T10:55:47.820196Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 217:1001 Path# /dev/disk2 2025-04-10T10:55:47.820234Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 217:1002 Path# /dev/disk3 2025-04-10T10:55:47.820270Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 218:1000 Path# /dev/disk1 2025-04-10T10:55:47.820301Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 218:1001 Path# /dev/disk2 2025-04-10T10:55:47.820336Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 218:1002 Path# /dev/disk3 2025-04-10T10:55:47.820366Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 219:1000 Path# /dev/disk1 2025-04-10T10:55:47.820398Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 219:1001 Path# /dev/disk2 2025-04-10T10:55:47.820427Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 219:1002 Path# /dev/disk3 2025-04-10T10:55:47.820457Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 220:1000 Path# /dev/disk1 2025-04-10T10:55:47.820492Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 220:1001 Path# /dev/disk2 2025-04-10T10:55:47.820525Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 220:1002 Path# /dev/disk3 >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> KqpQuery::RowsLimit [GOOD] >> KqpQuery::RowsLimitServiceOverride >> KqpQuery::QueryTimeout [GOOD] >> KqpQuery::RandomNumber >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent >> KqpLimits::KqpMkqlMemoryLimitException [GOOD] >> KqpLimits::DatashardProgramSize+useSink >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns >> Viewer::TenantInfo5kkTablets [GOOD] >> Viewer::UseTransactionWhenExecuteDataActionQuery >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb >> Cdc::InitialScanUpdatedRows [GOOD] >> Cdc::MustNotLoseSchemaSnapshot >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive |67.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |67.7%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload |67.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowNoShards >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:55:51.994430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:55:52.001648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:52.001744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:55:52.001810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:55:52.001863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:55:52.001923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:55:52.001998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:52.002078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:55:52.049481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:55:52.398778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:55:52.398845Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:52.442319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:55:52.442862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:55:52.454395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:55:52.531981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:55:52.540632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:55:52.547116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:52.571015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:55:52.595664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:52.604605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:52.604710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:52.606484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:55:52.606554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:52.606614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:55:52.606788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.617111Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:55:52.766805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:52.794712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.809590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:55:52.818426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:55:52.818524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.827381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:52.833940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:55:52.834274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.841868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:55:52.841981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:55:52.842028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:55:52.846857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.846942Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:55:52.847005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:55:52.851431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.851488Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.858380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:52.858552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.862543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:55:52.865705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:55:52.877353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:55:52.878564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:52.878714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:52.878767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:52.879067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:55:52.879115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:52.890821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:52.891004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:55:52.919212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:52.919277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:52.920111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:52.920194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:55:52.920517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.920585Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:55:52.920699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:52.920756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.920819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:52.920854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.920898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:55:52.920938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.920990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:55:52.921028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:55:52.921087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:52.921128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:55:52.921160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:55:52.933530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:52.933717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:52.933784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1.335135Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:56:01.335486Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:56:01.335536Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:56:01.335704Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:56:01.335907Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:56:01.335947Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-10T10:56:01.335988Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-10T10:56:01.336308Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:56:01.336380Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2025-04-10T10:56:01.336441Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 135 -> 240 2025-04-10T10:56:01.343579Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:56:01.343725Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:56:01.343763Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T10:56:01.343808Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T10:56:01.343847Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:56:01.345019Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:56:01.345127Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:56:01.345163Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T10:56:01.345198Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T10:56:01.345236Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T10:56:01.345312Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-10T10:56:01.360147Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-10T10:56:01.360311Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-10T10:56:01.360345Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T10:56:01.360864Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:56:01.360937Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-10T10:56:01.361149Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T10:56:01.361214Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T10:56:01.361277Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T10:56:01.361325Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T10:56:01.361384Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-10T10:56:01.361475Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T10:56:01.361533Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T10:56:01.361578Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T10:56:01.361785Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T10:56:01.363156Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T10:56:01.364102Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-10T10:56:01.366741Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T10:56:01.367190Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T10:56:01.378916Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-10T10:56:01.380105Z node 7 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:56:01.384696Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T10:56:01.385029Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 2025-04-10T10:56:01.406411Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-04-10T10:56:01.407493Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T10:56:01.407774Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-04-10T10:56:01.408704Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:56:01.408779Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T10:56:01.408924Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T10:56:01.409378Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T10:56:01.409805Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:56:01.409865Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T10:56:01.409951Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:56:01.419879Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T10:56:01.419958Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T10:56:01.425305Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T10:56:01.425371Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-10T10:56:01.426384Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T10:56:01.426456Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T10:56:01.426658Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T10:56:01.426792Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T10:56:01.427124Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T10:56:01.427179Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T10:56:01.427721Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T10:56:01.427848Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T10:56:01.427901Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:538:2487] TestWaitNotification: OK eventTxId 103 2025-04-10T10:56:01.428496Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:56:01.428740Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 282us result status StatusPathDoesNotExist 2025-04-10T10:56:01.428955Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> Viewer::JsonStorageListingV2 [GOOD] >> Viewer::JsonStorageListingV2GroupIdFilter >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true >> KqpParams::RowsList [GOOD] >> KqpQuery::CurrentUtcTimestamp |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> KqpQuery::QueryCache [GOOD] >> KqpQuery::Pure |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> KqpQuery::YqlTableSample [GOOD] >> KqpQuery::UpdateWhereInSubquery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:55:51.994612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:55:52.002390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:52.002451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:55:52.002489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:55:52.002547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:55:52.002580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:55:52.002644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:52.002743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:55:52.049112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:55:52.398978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:55:52.399063Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:52.443172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:55:52.443624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:55:52.458391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:55:52.527415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:55:52.539867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:55:52.546530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:52.571969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:55:52.595657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:52.605609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:52.605686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:52.605812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:55:52.605872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:52.605918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:55:52.606527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.619660Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:55:52.766749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:52.794370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.808683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:55:52.817955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:55:52.818129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.831908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:52.835372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:55:52.835599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.843783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:55:52.843867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:55:52.843905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:55:52.849322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.849396Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:55:52.849437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:55:52.855626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.855699Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.859261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:52.859360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.863269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:55:52.871576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:55:52.877360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:55:52.878575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:52.878716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:52.878775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:52.879102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:55:52.879166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:52.890862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:52.891005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:55:52.918511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:52.918598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:52.918827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:52.918869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:55:52.920026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.920097Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:55:52.920225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:52.920266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.920345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:52.920389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.920433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:55:52.920494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.920540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:55:52.920580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:55:52.920652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:52.920693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:55:52.920752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:55:52.922955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:52.923105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:52.923152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 72057594046678944, LocalPathId: 2], type: EPathTypeExtSubDomain, state: EPathStateAlter)" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:56:03.843303Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_0', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExtSubDomain, state: EPathStateAlter), operation: ALTER DATABASE, path: /MyRoot/USER_0 2025-04-10T10:56:03.845697Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:56:03.845807Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxAlterExtSubDomain, at tablet# 72057594046678944 2025-04-10T10:56:03.845947Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 ProgressState no shards to create, do next state 2025-04-10T10:56:03.846018Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2025-04-10T10:56:03.848464Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:56:03.848549Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:56:03.848614Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 3 -> 128 2025-04-10T10:56:03.851062Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:56:03.851134Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:56:03.851253Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 103:0, at tablet# 72057594046678944 2025-04-10T10:56:03.851337Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-10T10:56:03.851577Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:56:03.853907Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-10T10:56:03.854107Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-10T10:56:03.854645Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:56:03.854820Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 30064773228 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:56:03.854897Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-04-10T10:56:03.855296Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-10T10:56:03.855383Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-04-10T10:56:03.855582Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T10:56:03.855814Z node 7 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [7:399:2369], msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72057594046678944 2025-04-10T10:56:03.858852Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 2025-04-10T10:56:03.859060Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72075186234409546 2025-04-10T10:56:03.859326Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 FAKE_COORDINATOR: Erasing txId 103 2025-04-10T10:56:03.859883Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:56:03.859951Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:56:03.860194Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:56:03.860270Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-10T10:56:03.860740Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:56:03.860822Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-04-10T10:56:03.860876Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 240 -> 240 2025-04-10T10:56:03.862418Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:56:03.862602Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:56:03.862680Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T10:56:03.862743Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-10T10:56:03.862815Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-10T10:56:03.862949Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-10T10:56:03.866971Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-10T10:56:03.867099Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T10:56:03.867250Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:399:2369], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T10:56:03.867396Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-04-10T10:56:03.867445Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-04-10T10:56:03.867614Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-04-10T10:56:03.867658Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:493:2435], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-04-10T10:56:03.869216Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:56:03.869285Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-10T10:56:03.869501Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T10:56:03.869564Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T10:56:03.869631Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T10:56:03.869689Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T10:56:03.869752Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-10T10:56:03.869825Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T10:56:03.869888Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T10:56:03.869944Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T10:56:03.870079Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T10:56:03.871062Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-04-10T10:56:03.873620Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T10:56:03.873743Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 104 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-04-10T10:56:03.875753Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T10:56:03.875826Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T10:56:03.876452Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T10:56:03.876597Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T10:56:03.876660Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:575:2515] TestWaitNotification: OK eventTxId 103 |67.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut >> TTxDataShardUploadRows::TestUploadShadowRows |67.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |67.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] Test command err: RandomSeed# 14309744326021615345 Reassign# 3 -- VSlotId { NodeId: 4 PDiskId: 1000 VSlotId: 1000 } GroupId: 2181038080 GroupGeneration: 1 VDiskKind: "Default" FailDomainIdx: 3 VDiskMetrics { SatisfactionRank: 0 VSlotId { NodeId: 4 PDiskId: 1000 VSlotId: 1000 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } Status: "READY" Ready: true Put# [1:1:1:0:0:56:0] Put# [1:1:2:0:0:26:0] Put# [1:1:3:0:0:76:0] Put# [1:1:4:0:0:43:0] Put# [1:1:5:0:0:3:0] Put# [1:1:6:0:0:68:0] 2025-04-10T10:53:03.273903Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T10:53:03.276057Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 7993775276595749230] 2025-04-10T10:53:03.286459Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:3:0:0:76:1] 2025-04-10T10:53:03.286594Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:4:0:0:43:1] 2025-04-10T10:53:03.286631Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:5:0:0:3:2] 2025-04-10T10:53:03.286663Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:6:0:0:68:3] 2025-04-10T10:53:03.287083Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 4 PartsResurrected# 4 Put# [1:1:7:0:0:35:0] Put# [1:1:8:0:0:9:0] Put# [1:1:9:0:0:68:0] Put# [1:1:10:0:0:68:0] Put# [1:1:11:0:0:62:0] Put# [1:1:12:0:0:42:0] Put# [1:1:13:0:0:77:0] Put# [1:1:14:0:0:41:0] Put# [1:1:15:0:0:64:0] Put# [1:1:16:0:0:31:0] Put# [1:1:17:0:0:3:0] Put# [1:1:18:0:0:46:0] Put# [1:1:19:0:0:66:0] Put# [1:1:20:0:0:90:0] Put# [1:1:21:0:0:4:0] Put# [1:1:22:0:0:10:0] Put# [1:1:23:0:0:65:0] Put# [1:1:24:0:0:3:0] Put# [1:1:25:0:0:6:0] Put# [1:1:26:0:0:66:0] Put# [1:1:27:0:0:88:0] Put# [1:1:28:0:0:90:0] Put# [1:1:29:0:0:84:0] Put# [1:1:30:0:0:17:0] Put# [1:1:31:0:0:66:0] Put# [1:1:32:0:0:46:0] Put# [1:1:33:0:0:51:0] Put# [1:1:34:0:0:90:0] Put# [1:1:35:0:0:6:0] Put# [1:1:36:0:0:100:0] Put# [1:1:37:0:0:62:0] Put# [1:1:38:0:0:79:0] Put# [1:1:39:0:0:29:0] Put# [1:1:40:0:0:45:0] Put# [1:1:41:0:0:87:0] Put# [1:1:42:0:0:81:0] Put# [1:1:43:0:0:74:0] Put# [1:1:44:0:0:68:0] Put# [1:1:45:0:0:56:0] Put# [1:1:46:0:0:73:0] Put# [1:1:47:0:0:42:0] Put# [1:1:48:0:0:7:0] Put# [1:1:49:0:0:52:0] Put# [1:1:50:0:0:93:0] Put# [1:1:51:0:0:86:0] Put# [1:1:52:0:0:86:0] Put# [1:1:53:0:0:52:0] Put# [1:1:54:0:0:64:0] Put# [1:1:55:0:0:6:0] Put# [1:1:56:0:0:86:0] Put# [1:1:57:0:0:60:0] Put# [1:1:58:0:0:43:0] Put# [1:1:59:0:0:48:0] Put# [1:1:60:0:0:97:0] Put# [1:1:61:0:0:89:0] Put# [1:1:62:0:0:79:0] Put# [1:1:63:0:0:17:0] Put# [1:1:64:0:0:87:0] Put# [1:1:65:0:0:92:0] Put# [1:1:66:0:0:89:0] Put# [1:1:67:0:0:32:0] Put# [1:1:68:0:0:49:0] Put# [1:1:69:0:0:39:0] Put# [1:1:70:0:0:12:0] Put# [1:1:71:0:0:15:0] Put# [1:1:72:0:0:15:0] Put# [1:1:73:0:0:27:0] Put# [1:1:74:0:0:90:0] Put# [1:1:75:0:0:83:0] Put# [1:1:76:0:0:35:0] Put# [1:1:77:0:0:50:0] Put# [1:1:78:0:0:8:0] Put# [1:1:79:0:0:29:0] Put# [1:1:80:0:0:78:0] Put# [1:1:81:0:0:23:0] Put# [1:1:82:0:0:7:0] Put# [1:1:83:0:0:64:0] Put# [1:1:84:0:0:7:0] Put# [1:1:85:0:0:17:0] Put# [1:1:86:0:0:72:0] Put# [1:1:87:0:0:24:0] Put# [1:1:88:0:0:92:0] Put# [1:1:89:0:0:15:0] Put# [1:1:90:0:0:30:0] Put# [1:1:91:0:0:91:0] Put# [1:1:92:0:0:80:0] Put# [1:1:93:0:0:58:0] Put# [1:1:94:0:0:19:0] Put# [1:1:95:0:0:10:0] Put# [1:1:96:0:0:32:0] Put# [1:1:97:0:0:20:0] Put# [1:1:98:0:0:15:0] Put# [1:1:99:0:0:9:0] Put# [1:1:100:0:0:31:0] Put# [1:1:101:0:0:91:0] Put# [1:1:102:0:0:84:0] Put# [1:1:103:0:0:85:0] Put# [1:1:104:0:0:27:0] Put# [1:1:105:0:0:45:0] Put# [1:1:106:0:0:74:0] Put# [1:1:107:0:0:3:0] Put# [1:1:108:0:0:74:0] Put# [1:1:109:0:0:13:0] Put# [1:1:110:0:0:34:0] Put# [1:1:111:0:0:70:0] Put# [1:1:112:0:0:35:0] Put# [1:1:113:0:0:66:0] Put# [1:1:114:0:0:67:0] Put# [1:1:115:0:0:70:0] Put# [1:1:116:0:0:34:0] Put# [1:1:117:0:0:93:0] Put# [1:1:118:0:0:79:0] Put# [1:1:119:0:0:33:0] Put# [1:1:120:0:0:84:0] Put# [1:1:121:0:0:90:0] Put# [1:1:122:0:0:44:0] Put# [1:1:123:0:0:74:0] Put# [1:1:124:0:0:87:0] Put# [1:1:125:0:0:10:0] Put# [1:1:126:0:0:56:0] Put# [1:1:127:0:0:9:0] Put# [1:1:128:0:0:72:0] Put# [1:1:129:0:0:14:0] Put# [1:1:130:0:0:62:0] Put# [1:1:131:0:0:48:0] Put# [1:1:132:0:0:48:0] Put# [1:1:133:0:0:84:0] Put# [1:1:134:0:0:6:0] Put# [1:1:135:0:0:45:0] Put# [1:1:136:0:0:56:0] Put# [1:1:137:0:0:51:0] Put# [1:1:138:0:0:11:0] Put# [1:1:139:0:0:38:0] Put# [1:1:140:0:0:87:0] Put# [1:1:141:0:0:23:0] Put# [1:1:142:0:0:38:0] Put# [1:1:143:0:0:12:0] Put# [1:1:144:0:0:3:0] Put# [1:1:145:0:0:2:0] Put# [1:1:146:0:0:100:0] Put# [1:1:147:0:0:2:0] Put# [1:1:148:0:0:85:0] Put# [1:1:149:0:0:39:0] Put# [1:1:150:0:0:80:0] Put# [1:1:151:0:0:21:0] Put# [1:1:152:0:0:47:0] Put# [1:1:153:0:0:69:0] Put# [1:1:154:0:0:20:0] Put# [1:1:155:0:0:86:0] Put# [1:1:156:0:0:8:0] Put# [1:1:157:0:0:100:0] Put# [1:1:158:0:0:97:0] Put# [1:1:159:0:0:86:0] Put# [1:1:160:0:0:71:0] Put# [1:1:161:0:0:32:0] Put# [1:1:162:0:0:64:0] Put# [1:1:163:0:0:96:0] Put# [1:1:164:0:0:46:0] Put# [1:1:165:0:0:46:0] Put# [1:1:166:0:0:88:0] Put# [1:1:167:0:0:12:0] Put# [1:1:168:0:0:72:0] Put# [1:1:169:0:0:22:0] Put# [1:1:170:0:0:87:0] Put# [1:1:171:0:0:52:0] Put# [1:1:172:0:0:94:0] Put# [1:1:173:0:0:5:0] Put# [1:1:174:0:0:79:0] Put# [1:1:175:0:0:40:0] Put# [1:1:176:0:0:92:0] Put# [1:1:177:0:0:50:0] Put# [1:1:178:0:0:62:0] Put# [1:1:179:0:0:92:0] Put# [1:1:180:0:0:33:0] Put# [1:1:181:0:0:38:0] Put# [1:1:182:0:0:40:0] Put# [1:1:183:0:0:24:0] Put# [1:1:184:0:0:49:0] Put# [1:1:185:0:0:77:0] Put# [1:1:186:0:0:81:0] Put# [1:1:187:0:0:58:0] Put# [1:1:188:0:0:88:0] Put# [1:1:189:0:0:91:0] Put# [1:1:190:0:0:60:0] Put# [1:1:191:0:0:67:0] Put# [1:1:192:0:0:17:0] Put# [1:1:193:0:0:96:0] Put# [1:1:194:0:0:27:0] Put# [1:1:195:0:0:35:0] Put# [1:1:196:0:0:5:0] Put# [1:1:197:0:0:18:0] Put# [1:1:198:0:0:93:0] Put# [1:1:199:0:0:40:0] Put# [1:1:200:0:0:79:0] Put# [1:1:201:0:0:34:0] Put# [1:1:202:0:0:42:0] Put# [1:1:203:0:0:27:0] Put# [1:1:204:0:0:16:0] Put# [1:1:205:0:0:17:0] Put# [1:1:206:0:0:37:0] Put# [1:1:207:0:0:1:0] Put# [1:1:208:0:0:91:0] Put# [1:1:209:0:0:59:0] Put# [1:1:210:0:0:32:0] Put# [1:1:211:0:0:10:0] Put# [1:1:212:0:0:34:0] Put# [1:1:213:0:0:94:0] Put# [1:1:214:0:0:42:0] Put# [1:1:215:0:0:11:0] Put# [1:1:216:0:0:32:0] Put# [1:1:217:0:0:60:0] Put# [1:1:218:0:0:53:0] Put# [1:1:219:0:0:57:0] Put# [1:1:220:0:0:64:0] Put# [1:1:221:0:0:21:0] Put# [1:1:222:0:0:83:0] Put# [1:1:223:0:0:70:0] Put# [1:1:224:0:0:22:0] Put# [1:1:225:0:0:94:0] Put# [1:1:226:0:0:64:0] Put# [1:1:227:0:0:72:0] Put# [1:1:228:0:0:96:0] Put# [1:1:229:0:0:54:0] Put# [1:1:230:0:0:22:0] Put# [1:1:231:0:0:14:0] Put# [1:1:232:0:0:10:0] Put# [1:1:233:0:0:53:0] Put# [1:1:234:0:0:99:0] Put# [1:1:235:0:0:97:0] Put# [1:1:236:0:0:21:0] Put# [1:1:237:0:0:10:0] Put# [1:1:238:0:0:89:0] Put# [1:1:239:0:0:97:0] Put# [1:1:240:0:0:86:0] Put# [1:1:241:0:0:48:0] Put# [1:1:242:0:0:94:0] Put# [1:1:243:0:0:77:0] Put# [1:1:244:0:0:74:0] Put# [1:1:245:0:0:38:0] Put# [1:1:246:0:0:86:0] Put# [1:1:247:0:0:3:0] Put# [1:1:248:0:0:14:0] Put# [1:1:249:0:0:43:0] Put# [1:1:250:0:0:99:0] Put# [1:1:251:0:0:77:0] Put# [1:1:252:0:0:24:0] Put# [1:1:253:0:0:79:0] Put# [1:1:254:0:0:3:0] Put# [1:1:255:0:0:51:0] Put# [1:1:256:0:0:13:0] Put# [1:1:257:0:0:49:0] Put# [1:1:258:0:0:50:0] Put# [1:1:259:0:0:43:0] Put# [1:1:260:0:0:1:0] Put# [1:1:261:0:0:28:0] Put# [1:1:262:0:0:37:0] Put# [1:1:263:0:0:82:0] Put# [1:1:264:0:0:5:0] Put# [1:1:265:0:0:83:0] Put# [1:1:266:0:0:75:0] Put# [1:1:267:0:0:3:0] Put# [1:1:268:0:0:9:0] Put# [1:1:269:0:0:51:0] Put# [1:1:270:0:0:39:0] Put# [1:1:271:0:0:93:0] Put# [1:1:272:0:0:99:0] Put# [1:1:273:0:0:47:0] Put# [1:1:274:0:0:71:0] Put# [1:1:275:0:0:100:0] Put# [1:1:276:0:0:2:0] Put# [1:1:277:0:0:15:0] Put# [1:1:278:0:0:37:0] Put# [1:1:279:0:0:94:0] Put# [1:1:280:0:0:19:0] Put# [1:1:281:0:0:81:0] Put# [1:1:282:0:0:7:0] Put# [1:1:283:0:0:37:0] Put# [1:1:284:0:0:86:0] Put# [1:1:285:0:0:10:0] Put# [1:1:286:0:0:38:0] Put# [1:1:287:0:0:60:0] Put# [1:1:288:0:0:66:0] Put# [1:1:289:0:0:91:0] Put# [1:1:290:0:0:62:0] Put# [1:1:291:0:0:86:0] Put# [1:1:292:0:0:60:0] Put# [1:1:293:0:0:27:0] Put# [1:1:294:0:0:58:0] Put# [1:1:295:0:0:74:0] Put# [1:1:296:0:0:59:0] Put# [1:1:297:0:0:92:0] Put# [1:1:298:0:0:42:0] Put# [1:1:299:0:0:33:0] Put# [1:1:300:0:0:86:0] Put# [1:1:301:0:0:20:0] Put# [1:1:302:0:0:58:0] Put# [1:1:303:0:0:47:0] Put# [1:1:304:0:0:73:0] Put# [1:1:305:0:0:99:0] Put# [1:1:306:0:0:31:0] Put# [1:1:307:0:0:12:0] Put# [1:1:308:0:0:36:0] Put# [1:1:309:0:0:44:0] Put# [1:1:310:0:0:35:0] Put# [1:1:311:0:0:11:0] Put# [1:1:312:0:0:18:0] Put# [1:1:313:0:0:87:0] Put# [1:1:314:0:0:3:0] Put# [1:1:315:0:0:38:0] Put# [1:1:316:0:0:33:0] Put# [1:1:317:0:0:7:0] Put# [1:1:318:0:0:51:0] Put# [1:1:319:0:0:75:0] Put# [1:1:320:0:0:44:0] Put# [1:1:321:0:0:37:0] Put# [1:1:322:0:0:6:0] Put# [1:1:323:0:0:22:0] Put# [1:1:324:0:0:68:0] Put# [1:1:325:0:0:80:0] Put# [1:1:326:0:0:11:0] Put# [1:1:327:0:0:95:0] Put# [1:1:328:0:0:28:0] Put# [1:1:329:0:0:56:0] Put# [1:1:330:0:0:7:0] Put# [1:1:331:0:0:57:0] Put# [1:1:332:0:0:52:0] Put# [1:1:333:0:0:51:0] Put# [1:1:334:0:0:27:0] Put# [1:1:335:0:0:61:0] Put# [1:1:336:0:0:77:0] Put# [1:1:337:0:0:68:0] Put# [1:1:338:0:0:53:0] Put# [1:1:339:0:0:33:0] Put# [1:1:340:0:0:20:0] Put# [1:1:341:0:0:64:0] Put# [1:1:342:0:0:6:0] Put# [1:1:343:0:0:39:0] Put# [1:1:344:0:0:46:0] Put# [1:1:345:0:0:5:0] Put# [1:1:346:0:0:74:0] Put# [1:1:347:0:0:26:0] Put# [1:1:348:0:0:69:0] Put# [1:1:349:0:0:86:0] Put# [1:1:350:0:0:3:0] Put# [1:1:351:0:0:15:0] Put# [1:1:352:0:0:13:0] Put# [1:1:353:0:0:38:0] Put# [1:1:354:0:0:55:0] Put# [1:1:355:0:0:21:0] Put# [1:1:356:0:0:47:0] Put# [1:1:357:0:0:36:0] Put# [1:1:358:0:0:35:0] Put# [1:1:359:0:0:78:0] Put# [1:1:360:0:0:60:0] Put# [1:1:361:0:0:3:0] Put# [1:1:362:0:0:6:0] Put# [1:1:363:0:0:44:0] Put# [1:1:364:0:0:90:0] Put# [1:1:365:0:0:44:0] Put# [1:1:366:0:0:1:0] Put# [1:1:367:0:0:2:0] Put# [1:1:368:0:0:92:0] Put# [1:1:369:0:0:90:0] Put# [1:1:370:0:0:49:0] Put# [1:1:371:0:0:87:0] Put# [1:1:372:0:0:95:0] Put# [1:1:373:0:0:85:0] Put# [1:1:374:0:0:82:0] Put# [1:1:375:0:0:56:0] Put# [1:1:376:0:0:76:0] Put# [1:1:377:0:0:1:0] Put# [1:1:378:0:0:18:0] Put# [1:1:379:0:0:14:0] Put# [1:1:380:0:0:15:0] Put# [1:1:381:0:0:46:0] Put# [1:1:382:0:0:16:0] Put# [1:1:383:0:0:63:0] Put# [1:1:384:0:0:27:0] Put# [1:1:385:0:0:34:0] Put# [1:1:386:0:0:19:0] Put# [1:1:387:0:0:74:0] Put# [1:1:388:0:0:34:0] Put# [1:1:389:0:0:72:0] Put# [1:1:390:0:0:12:0] Put# [1:1:391:0:0:60:0] Put# [1:1:392:0:0:83:0] Put# [1:1:393:0:0:9:0] Put# [1:1:394:0:0:60:0] Put# [1:1:395:0:0:3:0] Put# [1:1:396:0:0:60:0] Put# [1:1:397:0:0:32:0] Put# [1:1:398:0:0:5:0] Put# [1:1:399:0:0:22:0] Put# [1:1:400:0:0:69:0] Put# [1:1:401:0:0:55:0] Put# [1:1:402:0:0:35:0] Put# [1:1:403:0:0:16:0] Put# [1:1:404:0:0:99:0] Put# [1:1:405:0:0:49:0] Put# [1:1:406:0:0:13:0] Put# [1:1:407:0:0:42:0] Put# [1:1:408:0:0:65:0] Put# [1:1:409:0:0:4:0] Put# [1:1:410:0:0:96:0] Put# [1:1:411:0:0:8:0] Put# [1:1:412:0:0:32:0] Put# [1:1:413:0:0:63:0] Put# [1:1:414:0:0:55:0] Put# [1:1:415:0:0:59:0] Put# [1:1:416:0:0:43:0] Put# [1:1:417:0:0:98:0] Put# [1:1:418:0:0:75:0] Put# [1:1:419:0:0:24:0] Put# [1:1:420:0:0:47:0] Put# [1:1:421:0:0:100:0] Put# [1:1:422:0:0:20:0] Put# [1:1:423:0:0:27:0] Put# [1:1:424:0:0:16:0] Put# [1:1:425:0:0:56:0] Put# [1:1:426:0:0:28:0] Put# [1:1:427:0:0:93:0] Put# [1:1:428:0:0:31:0] Put# [1:1:429:0:0:18:0] Put# [1:1:430:0:0:76:0] Put# [1:1:431:0:0:22:0] Put# [1:1:432:0:0:45:0] Put# [1:1:433:0:0:51:0] Put# [1:1:434:0:0:76:0] Put# [1:1:435:0:0:18:0] Put# [1:1:436:0:0:58:0] Put# [1:1:437:0:0:46:0] Put# [1:1:438:0:0:38:0] Put# [1:1:439:0:0:60:0 ... h06m21.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.520372Z 1 00h06m22.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.520642Z 1 00h06m22.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.520823Z 1 00h06m22.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.520996Z 1 00h06m22.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.521171Z 1 00h06m22.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.521333Z 1 00h06m22.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.521521Z 1 00h06m22.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.620503Z 1 00h06m23.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.620763Z 1 00h06m23.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.620943Z 1 00h06m23.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.621111Z 1 00h06m23.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.621276Z 1 00h06m23.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.621432Z 1 00h06m23.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.621610Z 1 00h06m23.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.756110Z 1 00h06m24.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.756329Z 1 00h06m24.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.756467Z 1 00h06m24.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.756602Z 1 00h06m24.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.756735Z 1 00h06m24.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.759175Z 1 00h06m24.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.759353Z 1 00h06m24.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.831814Z 1 00h06m25.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.832113Z 1 00h06m25.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.832334Z 1 00h06m25.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.832558Z 1 00h06m25.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.832789Z 1 00h06m25.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.833013Z 1 00h06m25.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.833222Z 1 00h06m25.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.928202Z 1 00h06m26.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.928464Z 1 00h06m26.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.928642Z 1 00h06m26.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.928820Z 1 00h06m26.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.928986Z 1 00h06m26.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.929154Z 1 00h06m26.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:02.929310Z 1 00h06m26.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.026440Z 1 00h06m27.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.026677Z 1 00h06m27.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.026820Z 1 00h06m27.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.026949Z 1 00h06m27.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.027091Z 1 00h06m27.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.027255Z 1 00h06m27.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.027404Z 1 00h06m27.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.088278Z 1 00h06m28.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.088513Z 1 00h06m28.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.088686Z 1 00h06m28.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.088858Z 1 00h06m28.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.089013Z 1 00h06m28.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.089154Z 1 00h06m28.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.089304Z 1 00h06m28.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.157033Z 1 00h06m29.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.157240Z 1 00h06m29.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.157370Z 1 00h06m29.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.157521Z 1 00h06m29.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.157653Z 1 00h06m29.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.157783Z 1 00h06m29.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.157919Z 1 00h06m29.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.262485Z 1 00h06m30.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.262705Z 1 00h06m30.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.262843Z 1 00h06m30.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.262968Z 1 00h06m30.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.263096Z 1 00h06m30.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.263224Z 1 00h06m30.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream 2025-04-10T10:56:03.263354Z 1 00h06m30.353528s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TSyncerJob::HandleOK(TEvVSyncFullResult): data.empty() && !EndOfStream >> KqpQuery::DeleteWhereInSubquery [GOOD] >> KqpQuery::DictJoin >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink >> KqpQuery::GenericQueryNoRowsLimit [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows |67.8%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:55:51.995375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:55:52.002368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:52.002437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:55:52.002488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:55:52.002534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:55:52.002569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:55:52.002637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:52.002744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:55:52.049498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:55:52.423410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:55:52.423546Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:52.449092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:55:52.449511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:55:52.454765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:55:52.528114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:55:52.539902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:55:52.546569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:52.570815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:55:52.595442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:52.605565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:52.605674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:52.605870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:55:52.605927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:52.605973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:55:52.610025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.619658Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:55:52.767557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:52.794966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.810432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:55:52.818456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:55:52.818578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.828330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:52.834573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:55:52.834806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.844241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:55:52.844348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:55:52.844388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:55:52.851415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.851495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:55:52.851539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:55:52.855667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.855768Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.862319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:52.862459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.866738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:55:52.886160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:55:52.886412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:55:52.887550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:52.887767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:52.887837Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:52.888180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:55:52.888250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:52.890790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:52.890947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:55:52.918708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:52.918788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:52.918990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:52.919036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:55:52.919296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.919352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:55:52.919500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:52.919539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.919582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:52.919624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.919671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:55:52.919721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.919756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:55:52.920330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:55:52.920443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:52.920504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:55:52.920542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:55:52.923220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:52.923345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:52.923383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-04-10T10:56:05.276171Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:56:05.278165Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T10:56:05.278526Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T10:56:05.278613Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T10:56:05.279283Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 102, at schemeshard: 72057594046678944 2025-04-10T10:56:05.279357Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-10T10:56:05.279443Z node 8 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-04-10T10:56:05.356757Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-04-10T10:56:05.357000Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409549 2025-04-10T10:56:05.357096Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 102:0 HandleReply TEvConfigureStatus operationId:102:0 at schemeshard:72057594046678944 2025-04-10T10:56:05.357185Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-04-10T10:56:05.357256Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-04-10T10:56:05.365098Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:56:05.365357Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:56:05.365425Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:56:05.365510Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2025-04-10T10:56:05.365592Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-10T10:56:05.365779Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:56:05.369722Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T10:56:05.369959Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-10T10:56:05.370464Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:56:05.370652Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 34359740523 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:56:05.370726Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-10T10:56:05.371158Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-10T10:56:05.371254Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-10T10:56:05.371449Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T10:56:05.371618Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[8:359:2333], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 72075186233409549, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T10:56:05.375058Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:56:05.375135Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:56:05.375459Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:56:05.375529Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T10:56:05.375645Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:56:05.375717Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2025-04-10T10:56:05.375775Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-04-10T10:56:05.377148Z node 8 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:56:05.377289Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:56:05.377354Z node 8 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T10:56:05.377417Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-10T10:56:05.377497Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-10T10:56:05.377620Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-10T10:56:05.381367Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:56:05.381468Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T10:56:05.381670Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:56:05.381735Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:56:05.381808Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:56:05.381868Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:56:05.381925Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T10:56:05.382044Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:300:2291] message: TxId: 102 2025-04-10T10:56:05.382124Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:56:05.382210Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T10:56:05.382262Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T10:56:05.382535Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T10:56:05.383056Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T10:56:05.385346Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:56:05.385431Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:504:2442] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-04-10T10:56:05.397183Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:56:05.397432Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } 2025-04-10T10:56:05.397531Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, path /MyRoot/USER_0 2025-04-10T10:56:05.397717Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 103:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-04-10T10:56:05.397784Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-04-10T10:56:05.401369Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:56:05.401634Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, operation: ALTER DATABASE, path: /MyRoot/USER_0 TestModificationResult got TxId: 103, wait until txId: 103 >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow >> KqpQuery::QueryExplain [GOOD] >> KqpQuery::QueryFromSqs >> TopicAutoscaling::PartitionSplit_OffsetCommit [GOOD] >> KqpExplain::SqlIn [GOOD] >> KqpExplain::SsaProgramInJsonPlan >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst >> KqpQuery::QueryCacheTtl [GOOD] >> KqpQuery::QueryCacheInvalidate >> TPQTestSlow::TestWriteVeryBigMessage >> KqpQuery::RandomNumber [GOOD] >> KqpQuery::RandomUuid >> KqpQuery::RowsLimitServiceOverride [GOOD] >> KqpQuery::SelectCountAsteriskFromVar >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink [GOOD] >> KqpLimits::QueryExecTimeout >> KqpLimits::DatashardProgramSize+useSink [GOOD] >> KqpLimits::DatashardProgramSize-useSink |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] |67.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |67.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |67.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} |67.8%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:55:51.991472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:55:52.000290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:52.000373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:55:52.000422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:55:52.000474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:55:52.000511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:55:52.000596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:52.000721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:55:52.049109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:55:52.396335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:55:52.396492Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:52.442347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:55:52.443012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:55:52.454748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:55:52.524236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:55:52.539865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:55:52.547501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:52.570825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:55:52.596734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:52.605510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:52.605586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:52.605863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:55:52.605924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:52.605968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:55:52.610532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.619674Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:55:52.766751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:52.795436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.810574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:55:52.820229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:55:52.820354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.829201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:52.834968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:55:52.835209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.842724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:55:52.842859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:55:52.842910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:55:52.849344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.849426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:55:52.849485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:55:52.855799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.855882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.859771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:52.859874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.863930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:55:52.865981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:55:52.877365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:55:52.878551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:52.878707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:52.878772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:52.879069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:55:52.879127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:52.891984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:52.892152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:55:52.918702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:52.918772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:52.918978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:52.919037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:55:52.919305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:52.919374Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:55:52.919484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:52.919522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.920138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:52.920189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.920264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:55:52.920334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:52.920378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:55:52.920416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:55:52.920518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:55:52.920564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:55:52.920601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:55:52.924375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:52.924538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:55:52.924593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 30064773228 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:56:09.905960Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 104:0, stepId:5000005, at schemeshard: 72057594046678944 2025-04-10T10:56:09.906294Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T10:56:09.906365Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:56:09.906431Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T10:56:09.906487Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:56:09.906585Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T10:56:09.906683Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-10T10:56:09.906890Z node 7 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [7:353:2331], msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72057594046678944 2025-04-10T10:56:09.906955Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:56:09.907013Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T10:56:09.907071Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T10:56:09.971231Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T10:56:09.971358Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-04-10T10:56:09.971424Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 6 2025-04-10T10:56:09.978536Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:56:09.978641Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:56:09.978956Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:56:09.979025Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 2 FAKE_COORDINATOR: Erasing txId 104 2025-04-10T10:56:09.979654Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409546, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2 2025-04-10T10:56:09.979809Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72075186233409546 2025-04-10T10:56:09.980068Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-04-10T10:56:09.980760Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:56:09.980903Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:56:09.980965Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-10T10:56:09.981023Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-10T10:56:09.981088Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T10:56:09.981214Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-10T10:56:10.018175Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 2 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-10T10:56:10.018344Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T10:56:10.018496Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:353:2331], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 2, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T10:56:10.018679Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-10T10:56:10.018728Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-10T10:56:10.018928Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-10T10:56:10.018973Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:446:2398], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-10T10:56:10.019562Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T10:56:10.019662Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T10:56:10.019867Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 0 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-10T10:56:10.020191Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-10T10:56:10.020254Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-10T10:56:10.020853Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-10T10:56:10.020995Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T10:56:10.021053Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:547:2497] TestWaitNotification: OK eventTxId 104 2025-04-10T10:56:10.021725Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:56:10.022021Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 335us result status StatusSuccess 2025-04-10T10:56:10.025925Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:56:10.091039Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-10T10:56:10.091398Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 403us result status StatusSuccess 2025-04-10T10:56:10.091951Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_OffsetCommit [GOOD] Test command err: 2025-04-10T10:53:01.995950Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635781808117088:2122];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:01.996302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001974/r3tmp/tmpGfMZNJ/pdisk_1.dat 2025-04-10T10:53:02.236107Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:53:02.402329Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:02.447827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:02.447889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:02.448877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3767, node 1 2025-04-10T10:53:02.525987Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001974/r3tmp/yandexJFB08b.tmp 2025-04-10T10:53:02.526007Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001974/r3tmp/yandexJFB08b.tmp 2025-04-10T10:53:02.526204Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001974/r3tmp/yandexJFB08b.tmp 2025-04-10T10:53:02.526378Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:53:02.579141Z INFO: TTestServer started on Port 12958 GrpcPort 3767 TClient is connected to server localhost:12958 PQClient connected to localhost:3767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:02.882848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:53:02.962929Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:53:02.968769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:53:03.104376Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:05.176727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635798987987048:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:05.176727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635798987987036:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:05.179825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:05.184456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T10:53:05.186318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635798987987080:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:05.186369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:05.202179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635798987987050:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T10:53:05.446764Z node 1 :TX_PROXY ERROR: Actor# [1:7491635798987987106:2449] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:05.473306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.554827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.582970Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491635798987987126:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:53:05.584776Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWMyZTY3N2EtMjU4OTc3NmItODYxM2NhMGItYjQyNGIzYzg=, ActorId: [1:7491635798987987032:2337], ActorState: ExecuteState, TraceId: 01jrfn9yr93gp8shjgk3gey0nt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:53:05.587759Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:53:05.682097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7491635798987987418:2632] 2025-04-10T10:53:06.995236Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491635781808117088:2122];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:06.995369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T10:53:12.094083Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-10T10:53:12.126409Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-10T10:53:12.127619Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491635829052758804:2810], Recipient [1:7491635786103084755:2192]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:12.127644Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:12.127673Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:12.127717Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491635829052758800:2807], Recipient [1:7491635786103084755:2192]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-10T10:53:12.127735Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:53:12.217396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:53:12.217906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:53:12.218596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId ... 271188544, Sender [6:7491636445804592818:3067], Recipient [6:7491636445804592810:3063]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-10T10:56:05.259024Z node 6 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-10T10:56:05.292865Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636510229103490:3383], Partition 4, Sender [0:0:0], Recipient [6:7491636510229103613:3404], Cookie: 0 2025-04-10T10:56:05.292907Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636510229103491:3384], Partition 3, Sender [0:0:0], Recipient [6:7491636510229103615:3406], Cookie: 0 2025-04-10T10:56:05.293014Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636510229103615:3406]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.293014Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636510229103613:3404]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.293051Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.293051Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.293117Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:56:05.293117Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:56:05.293210Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:56:05.293210Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:56:05.293244Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:56:05.293245Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:56:05.293296Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:56:05.293296Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:56:05.293397Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636342725375249:2472], Partition 0, Sender [0:0:0], Recipient [6:7491636342725375309:2476], Cookie: 0 2025-04-10T10:56:05.293399Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636445804592733:3056], Partition 2, Sender [0:0:0], Recipient [6:7491636445804592810:3063], Cookie: 0 2025-04-10T10:56:05.293458Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636445804592810:3063]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.293461Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636342725375309:2476]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.293476Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.293482Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.293508Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:56:05.293517Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:56:05.293548Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:56:05.293557Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:56:05.293570Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:56:05.293576Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:56:05.293596Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:56:05.293598Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:56:05.293655Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636445804592734:3057], Partition 1, Sender [0:0:0], Recipient [6:7491636445804592812:3065], Cookie: 0 2025-04-10T10:56:05.293702Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636445804592812:3065]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.293719Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.293749Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:56:05.293791Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:56:05.293813Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:56:05.293838Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:56:05.397992Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636510229103491:3384], Partition 3, Sender [0:0:0], Recipient [6:7491636510229103615:3406], Cookie: 0 2025-04-10T10:56:05.398115Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636510229103615:3406]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.398168Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.398234Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:56:05.398284Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636342725375249:2472], Partition 0, Sender [0:0:0], Recipient [6:7491636342725375309:2476], Cookie: 0 2025-04-10T10:56:05.398329Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:56:05.398345Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636342725375309:2476]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.398363Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:56:05.398368Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.398416Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:56:05.398424Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:56:05.398477Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:56:05.398501Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:56:05.398515Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636510229103490:3383], Partition 4, Sender [0:0:0], Recipient [6:7491636510229103613:3404], Cookie: 0 2025-04-10T10:56:05.398534Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:56:05.398575Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636510229103613:3404]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.398597Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.398630Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:56:05.398673Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:56:05.398706Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:56:05.398733Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:56:05.398798Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636445804592734:3057], Partition 1, Sender [0:0:0], Recipient [6:7491636445804592812:3065], Cookie: 0 2025-04-10T10:56:05.398880Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636445804592812:3065]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.398898Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.398934Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:56:05.398977Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:56:05.398997Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:56:05.399019Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:56:05.402338Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636445804592733:3056], Partition 2, Sender [0:0:0], Recipient [6:7491636445804592810:3063], Cookie: 0 2025-04-10T10:56:05.413314Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636445804592810:3063]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.413384Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:56:05.413481Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:56:05.413584Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:56:05.413626Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:56:05.413682Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> KqpQuery::CurrentUtcTimestamp [GOOD] >> KqpQuery::CreateAsSelect_BadCases >> Viewer::SharedDoesntShowExclusiveNodes [GOOD] >> Viewer::ServerlessWithExclusiveNodesCheckTable >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData >> Cdc::InitialScanEnqueuesZeroRecords [GOOD] >> Cdc::InitialScanRacyProgressAndDrop >> SlowTopicAutopartitioning::CDC_Write >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> TSchemeShardViewTest::AsyncCreateDifferentViews >> Viewer::StorageGroupOutputWithoutFilterNoDepends [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic |67.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |67.8%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |67.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut >> Viewer::UseTransactionWhenExecuteDataActionQuery [GOOD] >> ViewerTopicDataTests::TopicDataTest >> KqpScan::ScanDuringSplit10 [GOOD] >> KqpScan::ScanDuringSplitThenMerge >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> KqpScan::ScanRetryRead [GOOD] >> KqpScan::ScanRetryReadRanges >> KqpQuery::UpdateWhereInSubquery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] Test command err: Trying to start YDB, gRPC: 23999, MsgBus: 15525 2025-04-10T10:55:46.919689Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636490347311283:2111];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:46.927631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017bf/r3tmp/tmpQxjVEk/pdisk_1.dat 2025-04-10T10:55:47.758225Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:47.770624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:47.774200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:47.794021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23999, node 1 2025-04-10T10:55:48.001868Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:48.001890Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:48.001897Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:48.002018Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15525 TClient is connected to server localhost:15525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:48.852525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.017781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.590550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.811348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.984908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:51.458193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636511822149477:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:51.462333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:51.922309Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636490347311283:2111];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:51.934208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:52.767326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.808408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.845472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.873617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.923152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.969879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.098368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636520412084592:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.098504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.112456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636520412084597:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.120184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:55:53.143565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636520412084599:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:55:53.270136Z node 1 :TX_PROXY ERROR: Actor# [1:7491636520412084654:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:56.013657Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491636529002019686:2514], TxId: 281474976710671, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YjJjZWQ4OWItMjEyY2U4ZGUtM2YyM2EwYWUtOGFkY2ZkODA=. TraceId : 01jrfnf4tv0pr0623r0y0cmh4a. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(17): Bad filter value. }. 2025-04-10T10:55:56.014662Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491636529002019687:2515], TxId: 281474976710671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YjJjZWQ4OWItMjEyY2U4ZGUtM2YyM2EwYWUtOGFkY2ZkODA=. CustomerSuppliedId : . TraceId : 01jrfnf4tv0pr0623r0y0cmh4a. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491636529002019682:2505], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T10:55:56.015163Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjJjZWQ4OWItMjEyY2U4ZGUtM2YyM2EwYWUtOGFkY2ZkODA=, ActorId: [1:7491636529002019562:2505], ActorState: ExecuteState, TraceId: 01jrfnf4tv0pr0623r0y0cmh4a, Create QueryResponse for error on request, msg:
: Error: Terminate was called, reason(17): Bad filter value. Trying to start YDB, gRPC: 25398, MsgBus: 27779 2025-04-10T10:55:57.252594Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636538963734711:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:57.252659Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017bf/r3tmp/tmpCF2Sox/pdisk_1.dat 2025-04-10T10:55:57.642049Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:57.682266Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:57.682361Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:57.691150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25398, node 2 2025-04-10T10:55:57.819034Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:57.819051Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:57.819056Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:57.819163Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27779 TClient is connected to server localhost:27779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depri ... Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:02.656300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:02.722095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:02.808329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:02.872270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:02.938872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:03.043950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:03.162218Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636564733540776:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:03.162378Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:03.163384Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636564733540781:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:03.169038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:03.186644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636564733540783:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:03.269687Z node 2 :TX_PROXY ERROR: Actor# [2:7491636564733540843:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19747, MsgBus: 22655 2025-04-10T10:56:06.156416Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636579264281768:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:06.156496Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017bf/r3tmp/tmpQb2tFs/pdisk_1.dat 2025-04-10T10:56:06.292831Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:06.325789Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:06.325887Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19747, node 3 2025-04-10T10:56:06.329738Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:06.440434Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:06.440475Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:06.440483Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:06.440620Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22655 TClient is connected to server localhost:22655 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T10:56:07.095927Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:07.111539Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:56:07.131269Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:07.391717Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:07.697667Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:07.814359Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:11.066245Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636600739120028:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:11.066544Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:11.162282Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636579264281768:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:11.162361Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:11.176348Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:11.259707Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:11.350862Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:11.433980Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:11.514816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:11.654318Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:11.808818Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636600739120553:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:11.808934Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:11.809418Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636600739120558:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:11.814730Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:11.834167Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636600739120560:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:56:11.893494Z node 3 :TX_PROXY ERROR: Actor# [3:7491636600739120613:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:13.273853Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491636609329055532:2507], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:3:84: Error: At function: KiUpdateTable!
:3:84: Error: Column 'NonExistentColumn' does not exist in table '/Root/KeyValue'., code: 2017 2025-04-10T10:56:13.276151Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWEzYjIxOTAtYThiZGY5ZTYtN2ZkNTllOTItNDBkZDNhYTY=, ActorId: [3:7491636609329055522:2501], ActorState: ExecuteState, TraceId: 01jrfnfpb9czgcv8a2mtqe9ec2, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish >> KqpQuery::DictJoin [GOOD] >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] >> KqpQuery::Pure [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> Cdc::MustNotLoseSchemaSnapshot [GOOD] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:56:16.373459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:56:16.373574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:56:16.373735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:56:16.373784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:56:16.373830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:56:16.373858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:56:16.373914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:56:16.373987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:56:16.374410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:16.495065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:56:16.495151Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:16.512103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:16.512421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:56:16.512609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:56:16.521938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:56:16.522265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:56:16.523012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:56:16.523412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:56:16.525549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:56:16.527014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:56:16.527076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:56:16.527271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:56:16.527324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:56:16.527364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:56:16.527519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:56:16.543264Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:56:16.763579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:56:16.763983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:56:16.764275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:56:16.764728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:56:16.764813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:56:16.767597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:56:16.767735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:56:16.767947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:56:16.768016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:56:16.768058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:56:16.768099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:56:16.770343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:56:16.770424Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:56:16.770483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:56:16.773278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:56:16.773331Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:56:16.773420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:56:16.773548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:56:16.778815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:56:16.781772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:56:16.782061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:56:16.783474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:56:16.783674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:56:16.783757Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:56:16.784144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:56:16.784211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:56:16.784439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:56:16.784560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:56:16.792138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:56:16.792209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:56:16.792421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:56:16.792475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:56:16.792767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:56:16.792823Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:56:16.792959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:56:16.792999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:56:16.793053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:56:16.793107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:56:16.793153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:56:16.793201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:56:16.793255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:56:16.793291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:56:16.793387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:56:16.793434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:56:16.793500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:56:16.796764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:56:16.796982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:56:16.797068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4-10T10:56:16.908215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-10T10:56:16.908252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-10T10:56:16.908276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-10T10:56:16.908959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T10:56:16.909008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:56:16.909064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T10:56:16.909104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-10T10:56:16.910122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:56:16.910231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:56:16.910279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T10:56:16.910313Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-10T10:56:16.910344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:56:16.911577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:56:16.911665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:56:16.911697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T10:56:16.911725Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-10T10:56:16.911750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T10:56:16.911816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-10T10:56:16.914032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T10:56:16.915346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-04-10T10:56:16.915744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T10:56:16.915794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-10T10:56:16.915909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T10:56:16.915957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-04-10T10:56:16.916018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T10:56:16.916061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T10:56:16.916698Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T10:56:16.916861Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T10:56:16.916933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T10:56:16.916980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:336:2327] 2025-04-10T10:56:16.917300Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T10:56:16.917350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:56:16.917380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:336:2327] 2025-04-10T10:56:16.917511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T10:56:16.917537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:336:2327] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-10T10:56:16.918262Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:56:16.918504Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 271us result status StatusSuccess 2025-04-10T10:56:16.918997Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:56:16.919650Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:56:16.919848Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 212us result status StatusSuccess 2025-04-10T10:56:16.920199Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:56:16.920898Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:56:16.921173Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 290us result status StatusSuccess 2025-04-10T10:56:16.921632Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateWhereInSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 61383, MsgBus: 11654 2025-04-10T10:55:46.965910Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636490960987688:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:46.966770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017b3/r3tmp/tmp5H7zog/pdisk_1.dat 2025-04-10T10:55:47.623295Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:47.655201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:47.655337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:47.698865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61383, node 1 2025-04-10T10:55:47.956767Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:47.956801Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:47.956807Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:47.956916Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11654 TClient is connected to server localhost:11654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:48.976587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.003568Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:55:49.014807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.589757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.830062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.925044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:51.540281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636512435825790:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:51.546291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:51.961605Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636490960987688:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:51.961695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:52.767388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.851614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.911476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.944582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.984884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.026011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.094327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636521025760903:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.094455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.097226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636521025760908:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.121468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:55:53.144308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636521025760910:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:55:53.266879Z node 1 :TX_PROXY ERROR: Actor# [1:7491636521025760968:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:55.999643Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636529615696008:2516], status: GENERIC_ERROR, issues:
:3:26: Error: mismatched input '[' expecting {'*', '(', '@', '$', ABORT, ACTION, ADD, AFTER, ALL, ALTER, ANALYZE, AND, ANSI, ANY, ARRAY, AS, ASC, ASSUME, ASYMMETRIC, ASYNC, AT, ATTACH, ATTRIBUTES, AUTOINCREMENT, BACKUP, BATCH, COLLECTION, BEFORE, BEGIN, BERNOULLI, BETWEEN, BITCAST, BY, CALLABLE, CASCADE, CASE, CAST, CHANGEFEED, CHECK, CLASSIFIER, COLLATE, COLUMN, COLUMNS, COMMIT, COMPACT, CONDITIONAL, CONFLICT, CONNECT, CONSTRAINT, CONSUMER, COVER, CREATE, CROSS, CUBE, CURRENT, CURRENT_DATE, CURRENT_TIME, CURRENT_TIMESTAMP, DATA, DATABASE, DECIMAL, DECLARE, DEFAULT, DEFERRABLE, DEFERRED, DEFINE, DELETE, DESC, DESCRIBE, DETACH, DICT, DIRECTORY, DISABLE, DISCARD, DISTINCT, DO, DROP, EACH, ELSE, EMPTY, EMPTY_ACTION, ENCRYPTED, END, ENUM, ERASE, ERROR, ESCAPE, EVALUATE, EXCEPT, EXCLUDE, EXCLUSION, EXCLUSIVE, EXISTS, EXPLAIN, EXPORT, EXTERNAL, FAIL, FAMILY, FILTER, FIRST, FLATTEN, FLOW, FOLLOWING, FOR, FOREIGN, FROM, FULL, FUNCTION, GLOB, GLOBAL, GRANT, GROUP, GROUPING, GROUPS, HASH, HAVING, HOP, IF, IGNORE, ILIKE, IMMEDIATE, IMPORT, IN, INCREMENT, INCREMENTAL, INDEX, INDEXED, INHERITS, INITIAL, INITIALLY, INNER, INSERT, INSTEAD, INTERSECT, INTO, IS, ISNULL, JOIN, JSON_EXISTS, JSON_QUERY, JSON_VALUE, KEY, LAST, LEFT, LEGACY, LIKE, LIMIT, LIST, LOCAL, LOGIN, MANAGE, MATCH, MATCHES, MATCH_RECOGNIZE, MEASURES, MICROSECONDS, MILLISECONDS, MODIFY, NANOSECONDS, NATURAL, NEXT, NO, NOLOGIN, NOT, NOTNULL, NULL, NULLS, OBJECT, OF, OFFSET, OMIT, ON, ONE, ONLY, OPTION, OPTIONAL, OR, ORDER, OTHERS, OUTER, OVER, OWNER, PARALLEL, PARTITION, PASSING, PASSWORD, PAST, PATTERN, PER, PERMUTE, PLAN, POOL, PRAGMA, PRECEDING, PRESORT, PRIMARY, PRIVILEGES, PROCESS, QUERY, QUEUE, RAISE, RANGE, REDUCE, REFERENCES, REGEXP, REINDEX, RELEASE, REMOVE, RENAME, REPLACE, REPLICATION, RESET, RESOURCE, RESPECT, RESTART, RESTORE, RESTRICT, RESULT, RETURN, RETURNING, REVERT, REVOKE, RIGHT, RLIKE, ROLLBACK, ROLLUP, ROW, ROWS, SAMPLE, SAVEPOINT, SCHEMA, SECONDS, SEEK, SELECT, SEMI, SET, SETS, SHOW, TSKIP, SEQUENCE, SOURCE, START, STREAM, STRUCT, SUBQUERY, SUBSET, SYMBOLS, SYMMETRIC, SYNC, SYSTEM, TABLE, TABLES, TABLESAMPLE, TABLESTORE, TAGGED, TEMP, TEMPORARY, THEN, TIES, TO, TOPIC, TRANSACTION, TRANSFER, TRIGGER, TUPLE, TYPE, UNBOUNDED, UNCONDITIONAL, UNION, UNIQUE, UNKNOWN, UNMATCHED, UPDATE, UPSERT, USE, USER, USING, VACUUM, VALUES, VARIANT, VIEW, VIRTUAL, WHEN, WHERE, WINDOW, WITH, WITHOUT, WRAPPER, XOR, STRING_VALUE, ID_PLAIN, ID_QUOTED} 2025-04-10T10:55:56.001057Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTY3NDRmMGItY2QzOWI5NTctMmRmZGJiY2YtMjJkNzVhZGM=, ActorId: [1:7491636529615695886:2506], ActorState: ExecuteState, TraceId: 01jrfnf5hab0n0mtsxdrambft5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:3:26: Error: mismatched input '[' expecting {'*', '(', '@', '$', ABORT, ACTION, ADD, AFTER, ALL, ALTER, ANALYZE, AND, ANSI, ANY, ARRAY, AS, ASC, ASSUME, ASYMMETRIC, ASYNC, AT, ATTACH, ATTRIBUTES, AUTOINCREMENT, BACKUP, BATCH, COLLECTION, BEFORE, BEGIN, BERNOULLI, BETWEEN, BITCAST, BY, CALLABLE, CASCADE, CASE, CAST, CHANGEFEED, CHECK, CLASSIFIER, COLLATE, COLUMN, COLUMNS, COMMIT, COMPACT, CONDITIONAL, CONFLICT, CONNECT, CONSTRAINT, CONSUMER, COVER, CREATE, CROSS, CUBE, CURRENT, CURRENT_DATE, CURRENT_TIME, CURRENT_TIMESTAMP, DATA, DATABASE, DECIMAL, DECLARE, DEFAULT, DEFERRABLE, DEFERRED, DEFINE, DELETE, DESC, DESCRIBE, DETACH, DICT, DIRECTORY, DISABLE, DISCARD, DISTINCT, DO, DROP, EACH, ELSE, EMPTY, EMPTY_ACTION, ENCRYPTED, END, ENUM, ERASE, ERROR, ESCAPE, EVALUATE, EXCEPT, EXCLUDE, EXCLUSION, EXCLUSIVE, EXISTS, EXPLAIN, EXPORT, EXTERNAL, FAIL, FAMILY, FILTER, FIRST, FLATTEN, FLOW, FOLLOWING, FOR, FOREIGN, FROM, FU ... e, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.154451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.222754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.299462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.359249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.438306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636556086568028:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:01.438397Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:01.438898Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636556086568033:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:01.443659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:01.471752Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T10:56:01.474327Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636556086568035:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:01.541193Z node 2 :TX_PROXY ERROR: Actor# [2:7491636556086568091:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:01.973360Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636534611729274:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:01.973434Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:03.397724Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491636564676502995:2505], status: UNSUPPORTED, issues:
: Error: Default error
:1:15: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2025-04-10T10:56:03.398611Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTMxN2E4ZDgtYzA5NzkyMTAtY2IyOThiMS03ZTk0MWY1ZA==, ActorId: [2:7491636564676502987:2500], ActorState: ExecuteState, TraceId: 01jrfnfcrhdkqj952my20rq651, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: Trying to start YDB, gRPC: 7252, MsgBus: 31414 2025-04-10T10:56:05.181655Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636572257122748:2082];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017b3/r3tmp/tmp7tLMcP/pdisk_1.dat 2025-04-10T10:56:05.305323Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:56:05.350816Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:05.378035Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:05.378137Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:05.382061Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7252, node 3 2025-04-10T10:56:05.578861Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:05.578895Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:05.578904Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:05.579043Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31414 TClient is connected to server localhost:31414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:06.386187Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:06.398468Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:06.416726Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:06.525375Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:06.720181Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:06.868041Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:10.178635Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636572257122748:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:10.178770Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:10.419584Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636593731960970:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:10.419698Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:10.526571Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:10.583932Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:10.643488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:10.680995Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:10.733024Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:10.797270Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:10.867034Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636593731961483:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:10.867138Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:10.867366Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636593731961488:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:10.871010Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:10.887060Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636593731961490:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:10.973694Z node 3 :TX_PROXY ERROR: Actor# [3:7491636593731961546:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> KqpScan::RemoteShardScan [GOOD] >> KqpScan::ScanDuringSplit >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] >> KqpQuery::QueryFromSqs [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] Test command err: 2025-04-10T10:52:56.417384Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635760800921078:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:52:56.417460Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001469/r3tmp/tmpTfBeEK/pdisk_1.dat 2025-04-10T10:52:56.791104Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20631, node 1 2025-04-10T10:52:56.844567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:52:56.844688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:52:56.846907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:52:56.857738Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:52:56.857761Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:52:56.857773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:52:56.857909Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:52:56.905904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:52:56.927133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:52:56.955323Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7491635760800921679:2308] 2025-04-10T10:52:56.955618Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:56.974394Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:56.974456Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:52:56.976523Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:52:56.976603Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:52:56.976647Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:52:56.977124Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:52:56.977186Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:52:56.977308Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7491635760800921693:2308] in generation 1 2025-04-10T10:52:56.978339Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:52:57.025880Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:52:57.026029Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:52:57.026080Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7491635765095888993:2309] 2025-04-10T10:52:57.026104Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:52:57.026120Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:52:57.026131Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:57.026321Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:52:57.026437Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:52:57.026525Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491635760800921670:2294], serverId# [1:7491635760800921696:2307], sessionId# [0:0:0] 2025-04-10T10:52:57.026558Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:52:57.026570Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:52:57.026593Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:52:57.026620Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:52:57.026742Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:52:57.026971Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:52:57.027058Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-10T10:52:57.028259Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:52:57.028329Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:52:57.028374Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T10:52:57.030216Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491635765095889007:2315], serverId# [1:7491635765095889008:2316], sessionId# [0:0:0] 2025-04-10T10:52:57.035559Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744282377075 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282377075 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T10:52:57.035595Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:57.035714Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:52:57.035790Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:52:57.035816Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:52:57.035833Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744282377075:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-10T10:52:57.036114Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744282377075:281474976710657 keys extracted: 0 2025-04-10T10:52:57.036245Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:52:57.036488Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:52:57.036547Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T10:52:57.038961Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T10:52:57.039497Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:52:57.041101Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744282377075} 2025-04-10T10:52:57.041163Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:52:57.041202Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744282377074 2025-04-10T10:52:57.041242Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:57.041282Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744282377082 2025-04-10T10:52:57.041345Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:52:57.041372Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:52:57.041391Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T10:52:57.041456Z node 1 :TX_DATASHARD DEBUG: Complete [1744282377075 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7491635760800921499:2199], exec latency: 2 ms, propose latency: 5 ms 2025-04-10T10:52:57.041499Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-10T10:52:57.041530Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:57.045306Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7491635765095888993:2309][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-10T10:52:57.047169Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-10T10:52:57.047246Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T10:52:57.055151Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:52:57.055289Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:52:57.055360Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-04-10T10:52:57.055395Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-04-10T10:52:57.055403Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-04-10T10:52:57.056482Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:52:57.098895Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T10:52:57.099223Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-04-10T10:52:57.100092Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T10:52:57.100334Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-04-10T10:52:57.100353Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T10:52:57.100367Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-04-10T10:52:57.100386Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-04-10T10:52:57.100411Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:52:57.100445Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-04-10T10:52:57.100738Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7491635765095889093:2311], now have 1 active actors on pipe 2025-04-10T10:52:57.109267Z node 1 :PERSQUEUE DEBUG: [P ... ASHARD INFO: OnTabletDead: 72075186224037892 2025-04-10T10:56:14.223508Z node 24 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 24, TabletId: 72075186224037891 not found 2025-04-10T10:56:14.226893Z node 24 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 24, TabletId: 72075186224037892 not found 2025-04-10T10:56:14.254798Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [24:1265:2645], now have 1 active actors on pipe ... release register requests ... wait for merge tx notification 2025-04-10T10:56:14.282181Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:56:14.282407Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-10T10:56:14.282967Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T10:56:14.283110Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T10:56:14.283263Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T10:56:14.283401Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037893 2025-04-10T10:56:14.283503Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-04-10T10:56:14.283609Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T10:56:14.283760Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-04-10T10:56:14.284019Z node 24 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T10:56:14.289043Z node 24 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 ... wait for final heartbeat >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-10T10:56:14.299637Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:56:14.299801Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-10T10:56:14.301082Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 2 max time lag 0ms effective offset 0 2025-04-10T10:56:14.301256Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 2 2025-04-10T10:56:14.301496Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-10T10:56:14.301627Z node 24 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T10:56:14.310545Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T10:56:14.322130Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T10:56:14.322667Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T10:56:14.323052Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:56:14.323203Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-10T10:56:14.323503Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T10:56:14.323926Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1316:3041] Handle NKikimr::NPQ::TEvPartitionWriter::TEvInitResult { SessionId: TxId: Success { OwnerCookie: 72075186224037893|d7db50d7-de848c55-9c4b9f30-7bfc02fa_0 SourceIdInfo: SourceId: "\00072075186224037893" SeqNo: 0 Offset: 2 WriteTimestampMS: 0 Explicit: true State: STATE_REGISTERED } } 2025-04-10T10:56:14.324164Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037893:1][24:1313:3041] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-10T10:56:14.324523Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1316:3041] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-10T10:56:14.324974Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:56:14.325020Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-10T10:56:14.325204Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-10T10:56:14.325371Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:56:14.325409Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-10T10:56:14.325531Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037893' SeqNo: 1 partNo : 0 messageNo: 1 size 26 offset: -1 2025-04-10T10:56:14.325822Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037893' version v6000/0 2025-04-10T10:56:14.326025Z node 24 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2025-04-10T10:56:14.326311Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-04-10T10:56:14.520680Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 107 count 1 nextOffset 3 batches 1 2025-04-10T10:56:14.527128Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 93 WTime 6505 2025-04-10T10:56:14.527758Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T10:56:14.527937Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T10:56:14.528108Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T10:56:14.528247Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T10:56:14.528389Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037889 2025-04-10T10:56:14.528481Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-04-10T10:56:14.528525Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-04-10T10:56:14.528614Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T10:56:14.528750Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-04-10T10:56:14.529016Z node 24 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T10:56:14.529338Z node 24 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 93 2025-04-10T10:56:14.535733Z node 24 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 93 actorID [24:1285:3021] 2025-04-10T10:56:14.536200Z node 24 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 2 partno 0 count 1 parts 0 size 93 2025-04-10T10:56:14.550733Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T10:56:14.551031Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T10:56:14.551281Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037893', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 1, partNo: 0, Offset: 2 is stored on disk 2025-04-10T10:56:14.551960Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-10T10:56:14.552481Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1316:3041] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037893" SeqNo: 1 Offset: 2 WriteTimestampMS: 6505 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-04-10T10:56:14.552692Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037893:1][24:1313:3041] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-10T10:56:14.552990Z node 24 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037893 2025-04-10T10:56:14.553106Z node 24 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 1, at tablet: 72075186224037893 2025-04-10T10:56:14.567174Z node 24 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037893 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-10T10:56:15.023721Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:56:15.023803Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-10T10:56:15.023995Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 3 max time lag 0ms effective offset 0 2025-04-10T10:56:15.024077Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 3 2025-04-10T10:56:15.024170Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-04-10T10:56:15.024222Z node 24 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T10:56:15.024415Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> KqpQuery::RandomUuid [GOOD] |67.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckSessionResetAfterCommit [GOOD] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic |67.8%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |67.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |67.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |67.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |67.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] Test command err: Trying to start YDB, gRPC: 3069, MsgBus: 23353 2025-04-10T10:55:46.945168Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636493030314107:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:46.945264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017cb/r3tmp/tmpqa5N5U/pdisk_1.dat 2025-04-10T10:55:47.640802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:47.640913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:47.644332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:55:47.683341Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3069, node 1 2025-04-10T10:55:47.898212Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:47.898248Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:47.898255Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:47.898407Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23353 TClient is connected to server localhost:23353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:48.855898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.014813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.601379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.799343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.907883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:51.459721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636514505152373:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:51.459860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:51.946298Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636493030314107:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:51.946384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:52.767928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.809524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.856722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.917418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.996900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.080110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.136205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636523095087490:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.136334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.136720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636523095087496:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.140191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:55:53.149469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636523095087498:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:55:53.266416Z node 1 :TX_PROXY ERROR: Actor# [1:7491636523095087553:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:55.249881Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636531685022481:2511], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2025-04-10T10:55:55.250367Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmE2NTBhNTYtMjA3YWE2YzEtM2I1NjUzMDgtMjYwNjBiZmY=, ActorId: [1:7491636531685022473:2506], ActorState: ExecuteState, TraceId: 01jrfnf4tq541brecvna54tdj1, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2025-04-10T10:55:55.296913Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636531685022513:2514], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2025-04-10T10:55:55.298340Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmE2NTBhNTYtMjA3YWE2YzEtM2I1NjUzMDgtMjYwNjBiZmY=, ActorId: [1:7491636531685022473:2506], ActorState: ExecuteState, TraceId: 01jrfnf4wk94apv6rd8d2nvp8q, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2025-04-10T10:55:55.346579Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636531685022522:2518], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 2025-04-10T10:55:55.347791Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmE2NTBhNTYtMjA3YWE2YzEtM2I1NjUzMDgtMjYwNjBiZmY=, ActorId: [1:7491636531685022473:2506], ActorState: ExecuteState, TraceId: 01jrfnf4xe89g9cv161xztp1z0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 Trying to start YDB, gRPC: 13988, MsgBus: 10273 2025-04-10T10:55:56.283794Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636533805594243:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:56.283893Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017cb/r3tmp/tmpKoWAFJ/pdisk_1.dat 2025-04-10T10:55:56.474242Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:56.505643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:56.505738Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:56.511574Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13988, node 2 2025-04-10T10:55:56.714987Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:56.715011Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:56.715019Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:56.715143Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10273 TClient is connected to server localhost:10273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: ... 86365Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:01.795413Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636555280432514:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:01.795536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:01.842224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.917128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.974224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:02.056549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:02.128810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:02.214807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:02.333770Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636559575400329:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:02.333859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:02.334247Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636559575400334:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:02.338931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:02.376406Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636559575400336:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:56:02.445238Z node 2 :TX_PROXY ERROR: Actor# [2:7491636559575400391:3458] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 28952, MsgBus: 19543 2025-04-10T10:56:05.998791Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636572475944552:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:06.000234Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017cb/r3tmp/tmpcrzsW4/pdisk_1.dat 2025-04-10T10:56:06.399172Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:06.429233Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:06.438338Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:06.447277Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28952, node 3 2025-04-10T10:56:06.646880Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:06.646907Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:06.646917Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:06.647072Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19543 TClient is connected to server localhost:19543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:08.170216Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:08.183162Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:08.197908Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:08.366888Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:08.641106Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:08.764219Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:10.974104Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636572475944552:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:10.974204Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:12.146318Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636602540717317:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:12.146435Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:12.378066Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.485552Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.610166Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.739180Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.838989Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.930119Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:13.049186Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636606835685141:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:13.049363Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:13.050546Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636606835685146:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:13.058296Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:13.083440Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636606835685148:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:13.144106Z node 3 :TX_PROXY ERROR: Actor# [3:7491636606835685205:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> SystemView::AuthPermissions [GOOD] >> SystemView::AuthPermissions_Access >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> KqpQuery::QueryCacheInvalidate [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> KqpQuery::QueryCachePermissionsLoss ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2776, MsgBus: 13148 2025-04-10T10:55:46.929202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636492560648413:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:46.929423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017c9/r3tmp/tmp8qpNbJ/pdisk_1.dat 2025-04-10T10:55:47.584411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:47.584527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:47.587805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:55:47.614074Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2776, node 1 2025-04-10T10:55:47.894244Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:47.894289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:47.894301Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:47.894487Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13148 TClient is connected to server localhost:13148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:48.851698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:48.949661Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.013162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:55:49.579914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.928666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:50.129136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:51.653562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636514035486526:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:51.653752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:51.910678Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636492560648413:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:51.910759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:52.768220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.814313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.885809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.932078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.010933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.084617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.168002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636522625421645:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.168121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.168588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636522625421650:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.174103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:55:53.189595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636522625421652:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:55:53.277498Z node 1 :TX_PROXY ERROR: Actor# [1:7491636522625421707:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13315, MsgBus: 3942 2025-04-10T10:55:57.362872Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636540629871335:2167];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:57.364512Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017c9/r3tmp/tmppl82ZU/pdisk_1.dat 2025-04-10T10:55:57.535063Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:57.536205Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:57.536279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:57.538327Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13315, node 2 2025-04-10T10:55:57.618630Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:57.618651Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:57.618660Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:57.618777Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3942 TClient is connected to server localhost:3942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:58.219776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:01.118426Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636557809741042:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:01.118681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:01.144377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.356428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.642075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636557809742427:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:01.642192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:01.642391Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636557809742432:2444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:01.646576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T10:56:01.662321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636557809742434:2445], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T10:56:01.724986Z node 2 :TX_PROXY ERROR: Actor# [2:7491636557809742488:3224] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:02.380981Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636540629871335:2167];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:02.381064Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30794, MsgBus: 4016 2025-04-10T10:56:06.411239Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636578384501642:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:06.411296Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017c9/r3tmp/tmptiGYLc/pdisk_1.dat 2025-04-10T10:56:06.652337Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:06.685993Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:06.686105Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:06.689028Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30794, node 3 2025-04-10T10:56:06.794075Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:06.794104Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:06.794115Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:06.794312Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4016 TClient is connected to server localhost:4016 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:08.032642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:08.042965Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:11.411687Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636578384501642:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:11.411782Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:12.923617Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636604154306083:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:12.923721Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:12.998981Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:56:13.508318Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:56:14.212667Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636612744242096:2447], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:14.212760Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:14.213047Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636612744242101:2450], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:14.220462Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T10:56:14.245474Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636612744242103:2451], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T10:56:14.325824Z node 3 :TX_PROXY ERROR: Actor# [3:7491636612744242175:3314] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 13401, MsgBus: 11635 2025-04-10T10:55:47.174648Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636497098427401:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:47.174746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017c7/r3tmp/tmphDROJk/pdisk_1.dat 2025-04-10T10:55:47.904584Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:47.937520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:47.937714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:47.939357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13401, node 1 2025-04-10T10:55:48.009758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:48.009792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:48.009800Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:48.011955Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11635 TClient is connected to server localhost:11635 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:48.859080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:49.014805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:55:49.626935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.904807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.992979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:51.475570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636514278298324:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:51.475677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:52.178289Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636497098427401:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:52.178375Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:52.768179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.808513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.859125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.910115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.989652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.048679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.100300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636522868233436:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.100397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.100534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636522868233442:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.120180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:55:53.145232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636522868233444:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:55:53.269678Z node 1 :TX_PROXY ERROR: Actor# [1:7491636522868233502:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:56.010895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15081, MsgBus: 13309 2025-04-10T10:55:57.382463Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636538884621417:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:57.382628Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017c7/r3tmp/tmpKoavlQ/pdisk_1.dat 2025-04-10T10:55:57.561553Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:57.591152Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:57.591244Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:57.593277Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15081, node 2 2025-04-10T10:55:57.630868Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:57.630896Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:57.630930Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:57.631068Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13309 TClient is connected to server localhost:13309 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:58.223934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:58.237030Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:55:58.320650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:58.485189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:58.729456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, op ... {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:01.143901Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:01.192460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.247402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.299026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.353958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.402746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.482239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.546588Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636556064492863:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:01.546699Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:01.550462Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636556064492868:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:01.554733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:01.574339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636556064492870:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:01.655269Z node 2 :TX_PROXY ERROR: Actor# [2:7491636556064492926:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:02.386272Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636538884621417:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:02.386351Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2499, MsgBus: 11773 2025-04-10T10:56:04.638680Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636570560787970:2091];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:04.639622Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017c7/r3tmp/tmpYMzjMD/pdisk_1.dat 2025-04-10T10:56:04.820307Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:04.835110Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:04.835210Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:04.845264Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2499, node 3 2025-04-10T10:56:05.034988Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:05.035021Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:05.035032Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:05.035222Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11773 TClient is connected to server localhost:11773 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:06.220316Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:06.229100Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:56:06.241121Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:06.392531Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:06.958652Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:07.355500Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:09.649434Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636570560787970:2091];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:09.649546Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:12.481901Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636604920528088:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:12.482065Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:12.512859Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.587670Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.678647Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.767111Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.849158Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.989333Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:13.146573Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636609215495920:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:13.146684Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:13.146979Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636609215495925:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:13.152890Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:13.193557Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636609215495927:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:56:13.297645Z node 3 :TX_PROXY ERROR: Actor# [3:7491636609215495985:3477] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] Test command err: 2025-04-10T10:56:03.568109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:03.568422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:03.568683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013aa/r3tmp/tmpVKqb8t/pdisk_1.dat 2025-04-10T10:56:04.074723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:04.139999Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:04.197289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:04.197421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:04.211597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:04.310427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:04.705586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:04.705752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:04.705845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:04.712359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:56:04.901898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:56:04.974692Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:05.597961Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfnfe2y706kr8xtzgajnzjn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmYwMDkzYTktN2FhYjJmMDMtYzhkNjE2OWYtN2ZhNGJhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:05.721971Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfnff0z6s15e2vzjv95wt0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDNmMzUwNjQtODMwNmYxM2YtZWQyODZlMDItNmUzODJjMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:05.809257Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfnff3f24xr2qgzxpayvfs3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzA1ZTJhYmEtMjk1MmU5NTYtOGI3M2Q2MTQtNWYxY2JjMDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:05.887970Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfnff6758dnbq19xwr644mt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmE4OTAwMDQtNTBkOGJiODEtZDQ0OWJkNzctYWZkMTU5ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:06.041398Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfnff8vdtcg0pkvthxrcyfs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI3OThmNGMtOTZkNTJiZGEtNDVmMTdiOGUtZThmODg1Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:06.153870Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfnffea1gpqv0a1nczd5dra, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjUxYmRhY2MtOTFlMGVlMTMtYWE3Y2U1YjMtZTNjZWE3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:06.249706Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfnffh0cv8vmrfck4ptv63n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQ2MjhiZmQtNjZmZmZmYjEtZDIzMDQ5NDUtYzMyNGVjYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:06.326912Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfnffkxbm9x81e5gvhkwgxf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU5ZTRhOTItZDI3MGY4ZDItMTU0ZGMxNTctMjVhZWY4ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:06.403903Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jrfnffpa8qa799je0m6r04s0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWJiNjZkYjktNzNjYTdiZWQtYzY2YzIxYzgtZDYzM2E5OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:06.492257Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrfnffrnc23x4j7sr16bgg0w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlMTI5OTAtNWJiOWMxNmEtZjQ0Yjg1OWUtMWNiNTIxNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:06.644552Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jrfnffvq29t6bnwv2y79ahs8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRhMDc2MTMtZDVlZGQ3MmItNTBiYWMyMzEtN2JlM2NjZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:06.784710Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jrfnfg077bqebw5y5qhhy92c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDYzYWJmMzEtZTQ3MDI3MWEtNDBkYjZiYjYtZDMyNmZmNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:06.944818Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jrfnfg4sfak1gdstc386cx97, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmEzM2Y3Y2MtYjk2NjIxNDQtNDBhY2ZiMGYtNGQzNWIxMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:07.095334Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jrfnfg9q5sszt7e6akjhecpn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWRlY2IwNGQtMWQ4NTNlMmQtNTIxMTc3NjMtODQ3ZDM1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:07.261597Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jrfnfgebef7bf3w84g5mb2eb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTlhZmRlZWYtOWUyNmE3Ny1iMDJmNTNmOS1kZTUzMGU1MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:07.418093Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jrfnfgkqf92nxc9wxnrcmege, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQwZmQ5MzUtNWVjNTg4MDYtYzdjZjA5NjgtYWI3YThhOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:07.559151Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jrfnfgrk0mqb05k6syhnn3nt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGM2ZWM0Y2EtOWYyMzUyNjItMzUwZTc2NmEtZDIzNGNmMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:07.709566Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jrfnfgwtenshjn0j1m468522, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQ5OTdlNGQtZjFlODRhNmEtY2ExZmYyLTdlODhlZjQ1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:07.872991Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jrfnfh1qacyrghrqvdkr1x09, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTFjODljOWItNmEwM2VhYTYtYTBkNjkyMy1kYzBmMDE2MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:08.007819Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jrfnfh6z98ns096ase9c0b2g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZiNDVmY2ItODcxMzY3OWMtZTJkN2NiNjItZGQxMDc5Yzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:08.157848Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jrfnfhbb6q268y64x8axpk4j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFkZjVmNWUtZGFiZmUzMGUtMTI5YjMxZTctN2ViMWQwYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:08.296456Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jrfnfhfm3ecxaxy1v6jeyqj3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTZhNzUzOS01ODA4MTlhLTIxM2EzY2YzLTIyMWFmZTBl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:08.387797Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jrfnfhkt8r1x6f035nnbr7vb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I1OTdkZDctZGE2M2EzN2YtZjAxNzM0MGQtNThiN2JiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:08.527392Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jrfnfhpsd9m5z1ge0mth4k5c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Q2YjRiOTYtZTY3YjQ0ZmItMzlmZDAwZTctYjJjM2I3NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:08.606805Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jrfnfhv86rszefpf9f17kvs4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQxZDNmNzYtYzU0ODk2ZGEtZTJmZTdlZGQtMmQ4NTg2M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:08.752545Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jrfnfhxk4se4v9q32srfjx0p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjZmNGJlYjctZjk0NDFmZTItNTBkMWFhOTktYzY1NzY1ZGY=, Cu ... 4YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:13.647706Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jrfnfphbd1r6s52p06yed57b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWM2NjRiNDctNDhkZGNjZjktZWEzNjY4MTktMjYwYjkxMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:13.822358Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jrfnfpvc96zgbrwc4xr509xy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFhMDE2OTctNTk4NDg5NjEtMmRhMmJjMzgtYzYwMTRmOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:13.961726Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jrfnfq15dqp65x0ekxpw3m4f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFlZGVhYTUtNTE0ZGIzN2YtNjQ1NzcyODAtMzkyYTY4MTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:14.119157Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jrfnfq4zdk7d735372hcr3cb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmUyZGQ3MTktMTI0ZmNhNWMtNGJlY2IxYmEtY2E2YzgwYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:14.224671Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jrfnfq9zc6aqrqe3bq9yn7wz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjYzZDVmMTItZDRlOGU1OTEtZWUwZGNhMTEtYTQ2ZDBjZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:14.364051Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jrfnfqda5mw4v3en401jbmfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzUzMjM1OWMtOTkwM2RmZDctNDU2NDAxNmYtMzJmNzczYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:14.513121Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jrfnfqhhbacprwdhf7dg5qn6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjExZjUxMDEtYmY4YzMzZmItNTgzMGU4OWItNWMwNDE3M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:14.667665Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jrfnfqpbe8k8hwwsg2pyyqfd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFmMWY0N2MtY2IyMjg0ODgtZmM0ZDJiMTctYWNhMzczMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:14.819596Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jrfnfqv75csrx3hen6dzvx1e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDYyOTViNzMtYzk1MzcwODAtNThmYjVjNjgtOThkYTQzN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:14.924745Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jrfnfqzv2rdtspt77xpsbmjb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGQ1YmUwYjgtY2ZhMGE3MjQtMTA0MmFmMDgtYmUwNGExOTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:15.004094Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jrfnfr2y4mq7nyab55qsz67w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzc2ZTc2YS1lMTU5MTQ0ZS1kZTA5Y2E2Yi1jYmI4OGQ0MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:15.083414Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jrfnfr5k31n7ewfwsy0er4eh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTEyMzg4ZjAtZTE3ZWM1ZTUtNmI5NzQyMGEtNWQxNjhlOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:15.168634Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jrfnfr80evk9kp4rbwbtx434, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc0NTI2MDAtMmVmNTM3NTUtODI0M2M0Y2QtYWVjZGU0YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:15.256457Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jrfnfrandjhce8q61mx4pz1n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTVkNzJkYWMtZWJkZjBhOWYtMTQ3NGIzNzAtY2ViMDk5ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:15.366935Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jrfnfre10zm2w0r8jnb1eap3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjYwYzRhZjItOGVjZTNhNzgtOTA2ZDY2OTctN2JiNGFlYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:15.527709Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jrfnfrjyf54fxwnegnwk0b1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI4NWI1ZDMtOGM0NGU3N2MtNjVhMWRhMTEtZjg0NTQxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:15.611852Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jrfnfrnw8h91fg2vpehbd2f1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTFhNzhjYzYtNDhlM2JjYmUtMzFkNmFmOWUtZGIzNzJhYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:15.690328Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jrfnfrrhdcvsw11sc4ng7n1j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI5MTcxMWEtYTQ4ZDJjMzktOTdlMDIxOGYtYjAzZTM1ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:15.803851Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jrfnfrtxavg5y8a42kxm5zcy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDcxZWUzM2UtNzE1MTZiY2YtMTc1Njk0OC1mYzEwMTZjNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:15.941506Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jrfnfryw1cfbe8b5zryw572f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjkwOTIwMGUtMjAwM2QzYTctZGMwNGMxMTgtMjhiMjAwMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:16.111320Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jrfnfs3191hnah7h96wsd77j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODRmNzA1ZGItMmZhMzQxNGUtZjdhMTU2MmQtYjc4ZTg0MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:16.207290Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jrfnfs846t92b3mwqhsa1x6b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGJjY2IwYzUtMTllMTFjODctMTZjY2E2YjMtOGEzZTMwNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:16.316279Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jrfnfsb74r9ra49pfwzgez0r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGZlNTFiNzMtZWI0MTllOTItZDM5YzdmOTgtZDhmNDQzNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:16.393989Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jrfnfsef90c5sc0ephe5e6tn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTVhNmU0MjgtYzU1NzFjNmEtODIyMTk0N2YtYzViZTc4NGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:16.535322Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jrfnfsh00am1kfs8pv7bz4yp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZmZTcyZDQtMmY1ZmYyNi0zZjQzZGY4Zi01ZjQwYjRhYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:16.641239Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jrfnfsnb0x4ktz0fhyh8sfeg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGVjZjJkMDktNmNiZmYzNTEtMjgxNGU1My01OTQ0MjhmZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:16.778498Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jrfnfsrq8yaqmbpbc0tq9pct, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU3YjVlNzAtODUwODBhNjAtNjllNmMyMGEtODU1ZWI2Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:16.907600Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jrfnfsx368vk737cjsrfaw38, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTBmY2JmZDUtY2RlYzYyMWEtYTliMWQ4M2EtZjkxZTlmODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:16.994109Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jrfnft10d2f75tc8ngnjs698, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjM5ZmRjZDUtNmQyYjA5NGUtOTlhZGVlZTItZTEyMjdlNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:17.161103Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jrfnft3z6k7eb00607q11k16, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDgxNzk4NmUtZjkwMzA4ZTItZTdhMDU3MGUtNDZjZTRhZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:17.297835Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jrfnft901bjagfcn5c3crrww, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU2OWE4ZGYtNTBkZjU2ZDQtZmEyODk0ZTItOTFhZmE2MDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:17.377792Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jrfnftdb439m91442073d5m2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFjZWEwNTItODI1ZWU2YjYtMzQxZjg5ZWYtYTRmZGM4NTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:17.464288Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jrfnftfq0jmh22hyvjtdtw3h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTY0ZDA4MDktYzc1ZDFlMmUtZGMxMDMxZWUtOWNkNjliNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:17.613038Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jrfnftjb45f6t04185prs0hc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGE1MjJkMDAtNTY3ODEwMzgtZDJlNGI5ZjgtM2UxNjA1MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:17.641432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2025-04-10T10:56:18.294058Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jrfnfv3ccgteq4q2q8qkj0hv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQyYzFlYTQtMTk3Y2U4ZjItOWVhNmRlOTYtM2Y1MWQ5NjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |67.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |67.9%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |67.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs [GOOD] Test command err: Trying to start YDB, gRPC: 24065, MsgBus: 21025 2025-04-10T10:55:47.003410Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636492434188378:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:47.003684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017a4/r3tmp/tmpMSwmYQ/pdisk_1.dat 2025-04-10T10:55:47.636649Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:47.638124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:47.642278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:47.684744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24065, node 1 2025-04-10T10:55:47.910789Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:47.910814Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:47.910821Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:47.910947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21025 TClient is connected to server localhost:21025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:48.868343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.018748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.596858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.845916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.974266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:51.467630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636513909026489:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:51.467808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:52.001188Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636492434188378:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:52.001271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:52.768354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.810965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.893895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.980028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.025362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.113719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.212973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636522498961620:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.213065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.213302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636522498961625:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.217104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:55:53.231413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636522498961627:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:55:53.313396Z node 1 :TX_PROXY ERROR: Actor# [1:7491636522498961681:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:55.240640Z node 1 :GRPC_SERVER DEBUG: [0x51b0000d7480] received request Name# PrepareDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=ZmVmNWU1N2EtZDRkYTE5NDMtYjBlYzI4ODAtNmFiMmRkNWQ=" yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " operation_params { } peer# ipv6:%5B::1%5D:39318 2025-04-10T10:55:55.240714Z node 1 :GRPC_SERVER DEBUG: [0x51b000241880] created request Name# PrepareDataQuery 2025-04-10T10:55:55.241012Z node 1 :GRPC_SERVER DEBUG: [0x51b0000d7480] received request without user token Name# PrepareDataQuery data# session_id: "ydb://session/3?node_id=1&id=ZmVmNWU1N2EtZDRkYTE5NDMtYjBlYzI4ODAtNmFiMmRkNWQ=" yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " operation_params { } peer# ipv6:%5B::1%5D:39318 database# /Root 2025-04-10T10:55:55.241457Z node 1 :GRPC_SERVER DEBUG: Got grpc request# PrepareDataQueryRequest, traceId# 01jrfnf4v99t7w4n49wfvgear3, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:39318, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T10:55:55.794298Z node 1 :GRPC_SERVER DEBUG: [0x51b0000d7480] issuing response Name# PrepareDataQuery data# operation { ready: true status: SUCCESS result { type_url: "type.googleapis.com/Ydb.Table.PrepareQueryResult" value: "\n>ydb://preparedqueryid/4?id=4c0d706d-4e6f675b-6cc5a333-b7a3c777" } } peer# ipv6:%5B::1%5D:39318 2025-04-10T10:55:55.797030Z node 1 :GRPC_SERVER DEBUG: [0x51b0000d7480] finished request Name# PrepareDataQuery ok# true peer# ipv6:%5B::1%5D:39318 2025-04-10T10:55:55.814419Z node 1 :GRPC_SERVER DEBUG: [0x51b00036f680] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=ZmVmNWU1N2EtZDRkYTE5NDMtYjBlYzI4ODAtNmFiMmRkNWQ=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { id: "ydb://preparedqueryid/4?id=4c0d706d-4e6f675b-6cc5a333-b7a3c777" } query_cache_policy { keep_in_cache: true } operation_params { } peer# ipv6:%5B::1%5D:39322 2025-04-10T10:55:55.814487Z node 1 :GRPC_SERVER DEBUG: [0x51b000370480] created request Name# ExecuteDataQuery 2025-04-10T10:55:55.814624Z node 1 :GRPC_SERVER DEBUG: [0x51b00036f680] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node_id=1&id=ZmVmNWU1N2EtZDRkYTE5NDMtYjBlYzI4ODAtNmFiMmRkNWQ=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { id: "ydb://preparedqueryid/4?id=4c0d706d-4e6f675b-6cc5a333-b7a3c777" } query_cache_policy { keep_in_cache: true } operation_params { } peer# ipv6:%5B::1%5D:39322 database# /Root 2025-04-10T10:55:55.815074Z node 1 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jrfnf5d6dhmzwgyx2hwb3qh2, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:39322, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 2.996099s 2025-04-10T10:55:58.816440Z node 1 :GRPC_SERVER DEBUG: [0x51b00036f680] issuing response Name# ExecuteDataQuery data# operation { ready: true status: INTERNAL_ERROR issues { message: "Closing Grpc request, client should not see this message." severity: 1 } } peer# ipv6:%5B::1%5D:39322 2025-04-10T10:55:58.816868Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491636531088896694:2505] TxId: 281474976710671. Ctx: { TraceId: 01jrfnf5d6dhmzwgyx2hwb3qh2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmVmNWU1N2EtZDRkYTE5NDMtYjBlYzI4ODAtNmFiMmRkNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-10T10:55:58.817608Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491636531088896702:2517], TxId: 281474976710671, task: 1. Ctx: { TraceId : 01jrfnf5d6dhmzwgyx2hwb3qh2. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZmVmNWU1N2EtZDRkYTE5NDMtYjBlYzI4ODAtNmFiMmRkNWQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /R ... d: [2:7491636570906153851:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:56:04.933604Z node 2 :TX_PROXY ERROR: Actor# [2:7491636570906153906:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } AST: ( (let $1 (KqpTable '"/Root/Test" '"72057594046644480:9" '"" '1)) (let $2 '('"Amount" '"Comment" '"Group" '"Name")) (let $3 (Uint64 '"1001")) (let $4 (Uint32 '1)) (let $5 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) '((KqlKeyExc $4 (String '"Name")) (KqlKeyInc $4)))) (let $6 (OptionalType (DataType 'String))) (let $7 (StructType '('"Amount" (OptionalType (DataType 'Uint64))) '('"Comment" $6) '('"Group" (OptionalType (DataType 'Uint32))) '('"Name" $6))) (let $8 '('('"_logical_id" '710) '('"_id" '"5577253d-655c40ad-4eb4525f-50a0f937") '('"_wide_channels" $7))) (let $9 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $5)) (lambda '($13) (block '( (let $14 (lambda '($15) (Member $15 '"Amount") (Member $15 '"Comment") (Member $15 '"Group") (Member $15 '"Name"))) (return (FromFlow (ExpandMap (Take (ToFlow $13) $3) $14))) ))) $8)) (let $10 (DqCnUnionAll (TDqOutput $9 '"0"))) (let $11 (DqPhyStage '($10) (lambda '($16) (FromFlow (NarrowMap (Take (ToFlow $16) $3) (lambda '($17 $18 $19 $20) (AsStruct '('"Amount" $17) '('"Comment" $18) '('"Group" $19) '('"Name" $20)))))) '('('"_logical_id" '723) '('"_id" '"e26003c3-c3bbadae-5f05b3c4-e53edac9")))) (let $12 (DqCnResult (TDqOutput $11 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($9 $11) '($12) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $7) '"0" '"0")) '('('"type" '"data_query")))) ) Plan: {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"No estimate","ReadLimit":"1001","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/Test","E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Test","reads":[{"lookup_by":["Group (1)"],"columns":["Amount","Comment","Group","Name"],"scan_by":["Name (Name, +∞)"],"limit":"1001","type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"No estimate","ReadLimit":"1001","Name":"TableRangeScan","Path":"\/Root\/Test","E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 12741, MsgBus: 23185 2025-04-10T10:56:07.772426Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636582545299230:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:07.772500Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017a4/r3tmp/tmpTcKC8Y/pdisk_1.dat 2025-04-10T10:56:08.132673Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:08.144122Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:08.144206Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:08.148484Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12741, node 3 2025-04-10T10:56:08.314785Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:08.314807Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:08.314816Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:08.314934Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23185 TClient is connected to server localhost:23185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:09.187842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:09.212798Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:09.400922Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:09.778134Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:09.943456Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:12.774531Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636582545299230:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:12.774632Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:13.619628Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636608315104762:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:13.619735Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:13.686044Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:13.759114Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:13.852463Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:13.942521Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:14.030943Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:14.145826Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:14.402571Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636612610072594:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:14.402814Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:14.410632Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636612610072601:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:14.416403Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:14.478465Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636612610072603:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:56:14.579610Z node 3 :TX_PROXY ERROR: Actor# [3:7491636612610072662:3466] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:16.813153Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption >> KqpLimits::DatashardProgramSize-useSink [GOOD] >> KqpLimits::DatashardReplySize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] Test command err: Trying to start YDB, gRPC: 5177, MsgBus: 4165 2025-04-10T10:55:47.169621Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636497562130051:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:47.179609Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017c0/r3tmp/tmpbFYNdx/pdisk_1.dat 2025-04-10T10:55:47.956168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:47.956257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:47.957848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:55:48.057704Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5177, node 1 2025-04-10T10:55:48.318974Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:48.319004Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:48.319014Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:48.319151Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4165 TClient is connected to server localhost:4165 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:49.357345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.431890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.809117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:50.234276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:50.337084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:52.158199Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636497562130051:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:52.158359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:52.702112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636519036968181:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:52.702229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.076265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.118935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.208021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.249544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.320520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.396309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.454769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636523331935997:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.454930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.456332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636523331936002:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.460781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:55:53.502413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636523331936004:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:55:53.567454Z node 1 :TX_PROXY ERROR: Actor# [1:7491636523331936058:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:55.237844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11697, MsgBus: 65397 2025-04-10T10:55:57.357388Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636538354968338:2203];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017c0/r3tmp/tmpZmCSrZ/pdisk_1.dat 2025-04-10T10:55:57.456108Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:55:57.776050Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:57.814025Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:57.814126Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:57.831505Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11697, node 2 2025-04-10T10:55:58.050836Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:58.050864Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:58.050872Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:58.051002Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65397 TClient is connected to server localhost:65397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:59.306904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:59.326585Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:55:59.336423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:59.482133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:59.782062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: ... WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636564124773754:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:03.090468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:03.140392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:03.191697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:03.265387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:03.326870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:03.375221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:03.479567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:03.617895Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636564124774281:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:03.618034Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:03.618499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636564124774287:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:03.623873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:03.651868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636564124774289:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:03.757272Z node 2 :TX_PROXY ERROR: Actor# [2:7491636564124774345:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9989, MsgBus: 3219 2025-04-10T10:56:06.594909Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636579476800923:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:06.595998Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017c0/r3tmp/tmpB12ZOu/pdisk_1.dat 2025-04-10T10:56:06.833048Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:06.836227Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:06.837117Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:06.865039Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9989, node 3 2025-04-10T10:56:07.046926Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:07.046954Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:07.046963Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:07.047138Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3219 TClient is connected to server localhost:3219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:08.116825Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:08.130923Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:56:08.143191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:08.253023Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:08.555220Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:08.658703Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:11.597673Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636579476800923:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:11.597762Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:12.399557Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636605246606396:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:12.399641Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:12.528796Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.665015Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.715043Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.774048Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.862210Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.957101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:13.052343Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636609541574212:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:13.052457Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:13.058302Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636609541574217:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:13.061847Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:13.073890Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636609541574219:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:56:13.170035Z node 3 :TX_PROXY ERROR: Actor# [3:7491636609541574273:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:14.985686Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] Test command err: 2025-04-10T10:56:07.703333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:07.703611Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:07.703874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cc7/r3tmp/tmplfKGkV/pdisk_1.dat 2025-04-10T10:56:08.368938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:08.458716Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:08.524105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:08.524252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:08.539886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:08.646530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:08.727021Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:56:08.728215Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:56:08.728757Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T10:56:08.729023Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:08.858848Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:56:08.859698Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:08.859841Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:56:08.861809Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:56:08.861899Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:56:08.861965Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:56:08.873097Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:56:08.874676Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:56:08.874793Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T10:56:08.890820Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:56:08.959906Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:56:08.960122Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:56:08.960268Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T10:56:08.960313Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:56:08.960356Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:56:08.960393Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:08.960621Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:08.960676Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:08.961024Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:56:08.961146Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:56:08.961209Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:08.961254Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:08.961307Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T10:56:08.961345Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:56:08.961385Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:56:08.961462Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:56:08.961518Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:08.961641Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:56:08.961686Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:56:08.961741Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T10:56:08.967032Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T10:56:08.967117Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:56:08.967267Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:56:08.967488Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T10:56:08.967548Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:56:08.967651Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:56:08.967774Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T10:56:08.967823Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T10:56:08.967863Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T10:56:08.967901Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T10:56:08.968224Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T10:56:08.968268Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T10:56:08.968308Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T10:56:08.968346Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T10:56:08.968406Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T10:56:08.968437Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T10:56:08.968475Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T10:56:08.968511Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T10:56:08.968535Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T10:56:08.975021Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T10:56:08.975102Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:56:08.985880Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:56:08.985956Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T10:56:08.985993Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T10:56:08.986051Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T10:56:08.986118Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T10:56:09.156481Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:56:09.156544Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:56:09.156585Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T10:56:09.157056Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T10:56:09.157101Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T10:56:09.157248Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T10:56:09.157306Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T10:56:09.157346Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T10:56:09.157386Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T10:56:09.161766Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T10:56:09.161840Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:09.175865Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:09.175953Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:09.176029Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:0 ... t 72075186224037890 to execution unit CompleteOperation 2025-04-10T10:56:11.702004Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2025-04-10T10:56:11.702203Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is DelayComplete 2025-04-10T10:56:11.702237Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompleteOperation 2025-04-10T10:56:11.702269Z node 1 :TX_DATASHARD TRACE: Add [3000:281474976715667] at 72075186224037890 to execution unit CompletedOperations 2025-04-10T10:56:11.702313Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompletedOperations 2025-04-10T10:56:11.702357Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is Executed 2025-04-10T10:56:11.702382Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompletedOperations 2025-04-10T10:56:11.702406Z node 1 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715667] at 72075186224037890 has finished 2025-04-10T10:56:11.702437Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:11.702466Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-10T10:56:11.702497Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-10T10:56:11.702528Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-10T10:56:11.716362Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T10:56:11.716458Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T10:56:11.716509Z node 1 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2025-04-10T10:56:11.716579Z node 1 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715667] from 72075186224037890 at tablet 72075186224037890 send result to client [1:1128:2907], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T10:56:11.716636Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T10:56:17.117983Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:17.122339Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:56:17.122697Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cc7/r3tmp/tmpvONnaZ/pdisk_1.dat 2025-04-10T10:56:17.540535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:17.605731Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:17.651862Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:17.652018Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:17.673674Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:17.765751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:17.794229Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:666:2570] 2025-04-10T10:56:17.794553Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:17.852820Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:17.853036Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:56:17.856095Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:56:17.856235Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:56:17.856322Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:56:17.856818Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:56:17.856991Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:56:17.857109Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:682:2570] in generation 1 2025-04-10T10:56:17.868070Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:56:17.868186Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:56:17.868345Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:56:17.868483Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:684:2580] 2025-04-10T10:56:17.868568Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:56:17.868626Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:56:17.868707Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:17.869171Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:56:17.869338Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:56:17.869416Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:17.869500Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:17.869579Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:56:17.869651Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:17.869850Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:663:2568], serverId# [2:673:2574], sessionId# [0:0:0] 2025-04-10T10:56:17.870563Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:56:17.870938Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:56:17.871056Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:56:17.873498Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:56:17.886767Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:56:17.886883Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T10:56:18.060264Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-04-10T10:56:18.060885Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T10:56:18.060944Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:18.061654Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:18.061725Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:56:18.061772Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T10:56:18.062013Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T10:56:18.074236Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:56:18.075298Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:18.075382Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T10:56:18.075838Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T10:56:18.076267Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:18.077997Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T10:56:18.078054Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:18.087462Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T10:56:18.087573Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:18.088502Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:18.088552Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:56:18.088614Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T10:56:18.088689Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T10:56:18.088752Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T10:56:18.088852Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:18.089842Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:56:18.100723Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T10:56:18.100815Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T10:56:18.101391Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T10:56:18.114230Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-04-10T10:56:18.114424Z node 2 :TX_DATASHARD NOTICE: Rejecting bulk upsert request on datashard: tablet# 72075186224037888, error# Can't execute bulk upsert at replicated table >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::RandomUuid [GOOD] Test command err: Trying to start YDB, gRPC: 15417, MsgBus: 15607 2025-04-10T10:55:52.205146Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636519759486278:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:52.211154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001799/r3tmp/tmpopStmP/pdisk_1.dat 2025-04-10T10:55:52.681424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:52.681572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:52.683684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15417, node 1 2025-04-10T10:55:52.716275Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:52.725846Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:55:52.731095Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:55:52.805068Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:52.805115Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:52.805141Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:52.805314Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15607 TClient is connected to server localhost:15607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:53.560863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:53.607234Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:55:53.634109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:53.813853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:54.050738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:54.158639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:56.407460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636536939357222:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:56.407568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:56.765641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:55:56.810903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:56.890786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:55:56.936079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:55:56.980227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:55:57.046125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:55:57.158464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636541234325040:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:57.158597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:57.166369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636541234325045:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:57.176512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:55:57.201640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636541234325047:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:55:57.220573Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636519759486278:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:57.220866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:57.311389Z node 1 :TX_PROXY ERROR: Actor# [1:7491636541234325104:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:59.104748Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmNlOWE1YmQtZmQ5NDkxNDItMzdjMTdjYzctOGRmMTBhYzg=, ActorId: [1:7491636549824260005:2501], ActorState: ExecuteState, TraceId: 01jrfnf8hn08jama4gfpf72baw, Create QueryResponse for error on request, msg:
: Error: Request timeout 50ms exceeded
: Error: Cancelling after 74ms during compilation Trying to start YDB, gRPC: 16024, MsgBus: 23811 2025-04-10T10:56:00.459448Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636551022859095:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:00.460091Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001799/r3tmp/tmprAmDJJ/pdisk_1.dat 2025-04-10T10:56:00.635817Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:00.650531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:00.650635Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:00.654995Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16024, node 2 2025-04-10T10:56:00.774839Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:00.774864Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:00.774874Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:00.775002Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23811 TClient is connected to server localhost:23811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:01.325060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:01.340558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:01.409494Z node 2 :FLAT_TX_SCHEMESHARD WARN ... rvice] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:05.139133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:05.192198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:05.234092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:05.324608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:05.375866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:05.451654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:05.466266Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636551022859095:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:05.466353Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:05.614731Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636572497697785:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:05.614855Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:05.615498Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636572497697790:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:05.621001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:05.652456Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-10T10:56:05.653260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636572497697792:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:56:05.742760Z node 2 :TX_PROXY ERROR: Actor# [2:7491636572497697848:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 64751, MsgBus: 19608 2025-04-10T10:56:09.417249Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636590533635452:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:09.417411Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001799/r3tmp/tmpxApWqt/pdisk_1.dat 2025-04-10T10:56:09.846565Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:09.895688Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:09.895779Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:09.903477Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64751, node 3 2025-04-10T10:56:10.114764Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:10.114788Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:10.114807Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:10.114921Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19608 TClient is connected to server localhost:19608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:10.851200Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:10.860751Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:56:10.868300Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:10.962953Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:11.262946Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:11.385159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:14.418341Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636590533635452:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:14.437340Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:15.834597Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636616303441019:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:15.834715Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:15.893513Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:15.979742Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:16.075031Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:16.155956Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:16.209813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:16.291313Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:16.396299Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636620598408853:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:16.396389Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:16.396605Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636620598408858:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:16.400971Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:16.411797Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636620598408860:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:56:16.489629Z node 3 :TX_PROXY ERROR: Actor# [3:7491636620598408917:3462] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQuery::SelectCountAsteriskFromVar [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-04-10T10:54:00.991080Z :TestReorderedExecutor INFO: Random seed for debugging is 1744282440991030 2025-04-10T10:54:01.304091Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636039477188036:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:01.304197Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:01.362226Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636039382815943:2224];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:01.549905Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001516/r3tmp/tmpGWKqBM/pdisk_1.dat 2025-04-10T10:54:01.580314Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:54:01.631080Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:01.839290Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:01.844646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:01.844748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:01.845689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:01.846137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:01.850498Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:54:01.850606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:01.853743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63662, node 1 2025-04-10T10:54:01.964889Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001516/r3tmp/yandexOgemNm.tmp 2025-04-10T10:54:01.964935Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001516/r3tmp/yandexOgemNm.tmp 2025-04-10T10:54:01.965497Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001516/r3tmp/yandexOgemNm.tmp 2025-04-10T10:54:01.965686Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:54:02.007814Z INFO: TTestServer started on Port 15699 GrpcPort 63662 TClient is connected to server localhost:15699 PQClient connected to localhost:63662 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:02.274478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T10:54:04.456077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636052362090951:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.456076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636052362090926:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.456185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.460647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:54:04.492398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636052362090953:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:54:04.768208Z node 1 :TX_PROXY ERROR: Actor# [1:7491636052362091040:2689] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:04.800044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:54:04.813909Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636052362091050:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:54:04.814462Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYyYTI2MzAtNzA4NDg3MzgtZWExM2YwOTAtYzcxNjAyMjk=, ActorId: [1:7491636052362090921:2336], ActorState: ExecuteState, TraceId: 01jrfnbrmg3z0q5jsqgam55d2f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:54:04.816773Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:54:04.832473Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491636052267718000:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:54:04.832706Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDA3NDlkNzctNjI4ZTMwNGUtNDlkODc5My0yNzUxN2NmYQ==, ActorId: [2:7491636052267717982:2308], ActorState: ExecuteState, TraceId: 01jrfnbrw81gdfw8adwapjjm2c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:54:04.833160Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:54:04.963411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:54:05.096975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:63662", true, true, 1000); 2025-04-10T10:54:05.350511Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfnbsdedfkgkmqzjs3p9g72, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI0MmI4MDItN2JkMzQyNS1iMDAxNzk1LWZmOGYyN2Ji, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491636056657058756:2986] 2025-04-10T10:54:06.306266Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636039477188036:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:06.306354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:06.357213Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636039382815943:2224];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:06.357290Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T10:54:10.417015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:63662 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-10T10:54:10.557183Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:63662 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 8 ... -52e3f137-fa9d3357-e5bf442e_0] Write session: aborting 2025-04-10T10:56:12.218326Z :INFO: [] MessageGroupId [src] SessionId [src|b907049f-52e3f137-fa9d3357-e5bf442e_0] Write session: gracefully shut down, all writes complete 2025-04-10T10:56:12.218394Z :DEBUG: [] MessageGroupId [src] SessionId [src|b907049f-52e3f137-fa9d3357-e5bf442e_0] Write session: destroy 2025-04-10T10:56:12.223514Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [13:7491636605705830773:2517] destroyed 2025-04-10T10:56:12.223588Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T10:56:12.219432Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|b907049f-52e3f137-fa9d3357-e5bf442e_0 grpc read done: success: 0 data: 2025-04-10T10:56:12.219459Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b907049f-52e3f137-fa9d3357-e5bf442e_0 grpc read failed 2025-04-10T10:56:12.219525Z node 13 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: src|b907049f-52e3f137-fa9d3357-e5bf442e_0 2025-04-10T10:56:12.219546Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b907049f-52e3f137-fa9d3357-e5bf442e_0 is DEAD 2025-04-10T10:56:12.220011Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:56:12.289531Z :INFO: [/Root] [/Root] [f9371dda-49700280-a51f948a-c92d5015] Starting read session 2025-04-10T10:56:12.289616Z :DEBUG: [/Root] [/Root] [f9371dda-49700280-a51f948a-c92d5015] Starting cluster discovery 2025-04-10T10:56:12.290011Z :INFO: [/Root] [/Root] [f9371dda-49700280-a51f948a-c92d5015] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24892: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24892
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:24892. " 2025-04-10T10:56:12.290105Z :DEBUG: [/Root] [/Root] [f9371dda-49700280-a51f948a-c92d5015] Restart cluster discovery in 0.007198s 2025-04-10T10:56:12.303825Z :DEBUG: [/Root] [/Root] [f9371dda-49700280-a51f948a-c92d5015] Starting cluster discovery 2025-04-10T10:56:12.304279Z :INFO: [/Root] [/Root] [f9371dda-49700280-a51f948a-c92d5015] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24892: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24892
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:24892. " 2025-04-10T10:56:12.304338Z :DEBUG: [/Root] [/Root] [f9371dda-49700280-a51f948a-c92d5015] Restart cluster discovery in 0.010425s 2025-04-10T10:56:12.315428Z :DEBUG: [/Root] [/Root] [f9371dda-49700280-a51f948a-c92d5015] Starting cluster discovery 2025-04-10T10:56:12.315843Z :INFO: [/Root] [/Root] [f9371dda-49700280-a51f948a-c92d5015] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24892: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24892
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:24892. " 2025-04-10T10:56:12.315898Z :DEBUG: [/Root] [/Root] [f9371dda-49700280-a51f948a-c92d5015] Restart cluster discovery in 0.020649s 2025-04-10T10:56:12.343654Z :DEBUG: [/Root] [/Root] [f9371dda-49700280-a51f948a-c92d5015] Starting cluster discovery 2025-04-10T10:56:12.343948Z :NOTICE: [/Root] [/Root] [f9371dda-49700280-a51f948a-c92d5015] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24892: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24892
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:24892. " } 2025-04-10T10:56:12.348450Z :NOTICE: [/Root] [/Root] [f9371dda-49700280-a51f948a-c92d5015] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24892: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24892
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:24892. " } 2025-04-10T10:56:12.348611Z :INFO: [/Root] [/Root] [f9371dda-49700280-a51f948a-c92d5015] Closing read session. Close timeout: 0.000000s 2025-04-10T10:56:12.348712Z :NOTICE: [/Root] [/Root] [f9371dda-49700280-a51f948a-c92d5015] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:56:13.064213Z node 13 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [13:7491636610000798139:2536]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-10T10:56:13.096990Z node 13 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [13:7491636610000798139:2536]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:56:13.146561Z node 13 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [13:7491636610000798139:2536]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:56:13.215415Z node 13 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [13:7491636610000798139:2536]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:56:13.402322Z node 13 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [13:7491636610000798139:2536]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:56:13.542254Z node 13 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [13:7491636610000798139:2536]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:56:13.807322Z node 13 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [13:7491636610000798139:2536]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:56:14.342051Z node 13 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [13:7491636610000798139:2536]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:56:14.483132Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710688. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:56:14.483285Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7491636614295765520:2538] TxId: 281474976710688. Ctx: { TraceId: 01jrfnfpee0egwdmtrfgzq1x09, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTgzZDUxZjEtNjE4NWMyNjEtMzRkNDdiOWMtYTYyZjIyNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:56:14.483605Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=MTgzZDUxZjEtNjE4NWMyNjEtMzRkNDdiOWMtYTYyZjIyNjc=, ActorId: [13:7491636610000798168:2538], ActorState: ExecuteState, TraceId: 01jrfnfpee0egwdmtrfgzq1x09, Create QueryResponse for error on request, msg: 2025-04-10T10:56:14.485685Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfnfqey3svcn6fz6qm91reg" } } YdbStatus: UNAVAILABLE ConsumedRu: 668 } 2025-04-10T10:56:15.147753Z node 13 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [13:7491636610000798139:2536]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:56:15.962410Z node 13 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [13:7491636610000798139:2536]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:56:16.838671Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710690. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:56:16.838852Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7491636622885700221:2552] TxId: 281474976710690. Ctx: { TraceId: 01jrfnfrma7escbynngm970f9m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjFkYzg4YmEtYWNjZmE4MzYtZDVlZmE3ZjEtNWYzYWE0NTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:56:16.839179Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=MjFkYzg4YmEtYWNjZmE4MzYtZDVlZmE3ZjEtNWYzYWE0NTA=, ActorId: [13:7491636618590732883:2552], ActorState: ExecuteState, TraceId: 01jrfnfrma7escbynngm970f9m, Create QueryResponse for error on request, msg: 2025-04-10T10:56:16.843444Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfnfsr80nnzf6qs68nfjxp2" } } YdbStatus: UNAVAILABLE ConsumedRu: 752 } 2025-04-10T10:56:16.916481Z node 13 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [13:7491636610000798139:2536]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:56:16.917263Z node 13 :KQP_COMPUTE ERROR: SelfId: [13:7491636610000798139:2536], TxId: 281474976710687, task: 1. Ctx: { SessionId : ydb://session/3?node_id=13&id=M2UzNGE5OTEtYjU0NjcyNjYtZWE0Y2E0NDYtNDhkN2QzMDE=. TraceId : 01jrfnfp848bew46j71184zqys. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Too many retries for shard 72075186224037890 } 2025-04-10T10:56:16.917343Z node 13 :KQP_COMPUTE ERROR: SelfId: [13:7491636610000798139:2536], TxId: 281474976710687, task: 1. Ctx: { SessionId : ydb://session/3?node_id=13&id=M2UzNGE5OTEtYjU0NjcyNjYtZWE0Y2E0NDYtNDhkN2QzMDE=. TraceId : 01jrfnfp848bew46j71184zqys. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: UNAVAILABLE DEFAULT_ERROR: {
: Error: Too many retries for shard 72075186224037890 }. 2025-04-10T10:56:16.918142Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=M2UzNGE5OTEtYjU0NjcyNjYtZWE0Y2E0NDYtNDhkN2QzMDE=, ActorId: [13:7491636610000798132:2536], ActorState: ExecuteState, TraceId: 01jrfnfp848bew46j71184zqys, Create QueryResponse for error on request, msg: 2025-04-10T10:56:16.922553Z node 13 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Too many retries for shard 72075186224037890" severity: 1 } TxMeta { id: "01jrfnfp85b8k8z0h3f0scj5ay" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] Test command err: 2025-04-10T10:56:08.944663Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636588026953743:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:08.944777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cce/r3tmp/tmpqGoNHW/pdisk_1.dat 2025-04-10T10:56:09.923093Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:09.966621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:09.966763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:09.970855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:09.990141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62071 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T10:56:10.591496Z node 1 :TX_PROXY DEBUG: actor# [1:7491636588026953822:2135] Handle TEvNavigate describe path dc-1 2025-04-10T10:56:10.591584Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636596616888885:2455] HANDLE EvNavigateScheme dc-1 2025-04-10T10:56:10.591733Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491636592321921144:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:56:10.591844Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491636592321921499:2386][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7491636592321921144:2149], cookie# 1 2025-04-10T10:56:10.593794Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491636592321921506:2386][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491636592321921503:2386], cookie# 1 2025-04-10T10:56:10.593856Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491636592321921507:2386][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491636592321921504:2386], cookie# 1 2025-04-10T10:56:10.593874Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491636592321921508:2386][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491636592321921505:2386], cookie# 1 2025-04-10T10:56:10.593915Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491636588026953469:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491636592321921506:2386], cookie# 1 2025-04-10T10:56:10.593948Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491636588026953472:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491636592321921507:2386], cookie# 1 2025-04-10T10:56:10.593966Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491636588026953475:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491636592321921508:2386], cookie# 1 2025-04-10T10:56:10.593992Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491636592321921506:2386][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491636588026953469:2051], cookie# 1 2025-04-10T10:56:10.594007Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491636592321921507:2386][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491636588026953472:2054], cookie# 1 2025-04-10T10:56:10.594019Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491636592321921508:2386][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491636588026953475:2057], cookie# 1 2025-04-10T10:56:10.594054Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491636592321921499:2386][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491636592321921503:2386], cookie# 1 2025-04-10T10:56:10.594078Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491636592321921499:2386][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T10:56:10.594093Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491636592321921499:2386][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491636592321921504:2386], cookie# 1 2025-04-10T10:56:10.594133Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491636592321921499:2386][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T10:56:10.594184Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491636592321921499:2386][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491636592321921505:2386], cookie# 1 2025-04-10T10:56:10.594200Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491636592321921499:2386][/dc-1] Unexpected sync response: sender# [1:7491636592321921505:2386], cookie# 1 2025-04-10T10:56:10.594259Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491636592321921144:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-10T10:56:10.658408Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491636592321921144:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491636592321921499:2386] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T10:56:10.658677Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491636592321921144:2149], cacheItem# { Subscriber: { Subscriber: [1:7491636592321921499:2386] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-10T10:56:10.661017Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491636596616888888:2458], recipient# [1:7491636596616888885:2455], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T10:56:10.661094Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636596616888885:2455] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T10:56:10.748067Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636596616888885:2455] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-10T10:56:10.761285Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636596616888885:2455] Handle TEvDescribeSchemeResult Forward to# [1:7491636596616888884:2454] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:56:10.802919Z node 1 :TX_PROXY DEBUG: actor# [1:7491636588026953822:2135] Handle TEvProposeTransaction 2025-04-10T10:56:10.802956Z node 1 :TX_PROXY DEBUG: actor# [1:7491636588026953822:2135] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T10:56:10.803082Z node 1 :TX_PROXY DEBUG: actor# [1:7491636588026953822:2135] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491636596616888893:2462] 2025-04-10T10:56:10.973062Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491636592321921144:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:56:10.973144Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491636592321921144:2149], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-04-10T10:56:10.973313Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491636596616888901:2466][/dc-1/.metadata/initialization/migrations] Handle NKikimr::T ... _BOARD_REPLICA DEBUG: [1:7491636588026953469:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7491636603511446176:2213] 2025-04-10T10:56:12.792963Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491636588026953472:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7491636603511446177:2213] 2025-04-10T10:56:12.792984Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491636588026953475:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7491636603511446178:2213] 2025-04-10T10:56:12.820867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-10T10:56:12.792539Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7491636603511446172:2213][/dc-1/USER_0] Path was already updated: owner# [3:7491636599216478695:2107], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T10:56:12.820917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-10T10:56:12.820939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-10T10:56:12.820961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-10T10:56:12.792646Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491636603511446172:2213][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [3:7491636603511446175:2213] 2025-04-10T10:56:12.792700Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7491636603511446172:2213][/dc-1/USER_0] Path was already updated: owner# [3:7491636599216478695:2107], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T10:56:12.792798Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7491636599216478695:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Strong: 1 } 2025-04-10T10:56:12.805879Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7491636599216478695:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7491636603511446172:2213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1744282571395 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [3:7491636603511446172:2213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1744282571395 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2025-04-10T10:56:12.843348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710660 2025-04-10T10:56:12.843418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710660 2025-04-10T10:56:12.843586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-10T10:56:12.843890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-04-10T10:56:12.844187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-10T10:56:12.844376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-10T10:56:12.844492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-10T10:56:12.844613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-10T10:56:12.844738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-10T10:56:12.844865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T10:56:12.845008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T10:56:12.845028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-10T10:56:12.845148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T10:56:12.845289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T10:56:12.845310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-10T10:56:12.845350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T10:56:12.847984Z node 3 :TX_PROXY DEBUG: actor# [3:7491636603511446196:2217] Become StateWork (SchemeCache [3:7491636603511446433:2228]) 2025-04-10T10:56:12.867012Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037889 2025-04-10T10:56:12.867133Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037891 2025-04-10T10:56:12.867171Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037890 2025-04-10T10:56:12.871999Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2025-04-10T10:56:12.902611Z node 3 :TX_PROXY DEBUG: actor# [3:7491636603511446196:2217] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:56:12.959743Z node 1 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-04-10T10:56:12.990803Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491636592321921144:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:56:12.990927Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491636592321921144:2149], cacheItem# { Subscriber: { Subscriber: [1:7491636596616888901:2466] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T10:56:12.991029Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491636605206824074:2884], recipient# [1:7491636605206824073:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } TabletID: 72075186224037888 Status: OK Info { TabletID: 72075186224037888 Channels { Channel: 0 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } Channels { Channel: 1 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } Channels { Channel: 2 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } TabletType: Coordinator Version: 1 TenantIdOwner: 72057594046644480 TenantIdLocalId: 2 } 2025-04-10T10:56:12.994869Z node 1 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-04-10T10:56:13.018217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-10T10:56:13.030301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-10T10:56:13.030478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-10T10:56:13.030495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-10T10:56:13.030523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-10T10:56:13.030533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-10T10:56:13.030558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-10T10:56:13.030582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-10T10:56:13.030626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T10:56:13.039018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T10:56:13.030297Z node 3 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976725656 RangeEnd# 281474976730656 txAllocator# 72057594046447617 >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |68.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |68.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |68.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] Test command err: 2025-04-10T10:56:06.162680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:06.163050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:06.163322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001391/r3tmp/tmpejIvlK/pdisk_1.dat 2025-04-10T10:56:06.842796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:06.929718Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:06.976376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:06.976557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:07.003646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:07.110571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:07.651572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:07.651737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:07.651832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:07.675488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:56:07.904654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:56:08.054669Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:08.434569Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfnfgz090e05kxdgkjk3snk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNiZTZlYzktNjJmMjg2OGMtMWVhYTgyYTItYTkzY2IxNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:08.555741Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfnfhr25sgz5f7q4k92zqe7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjI2NjAxOS0xMzIyZDUyMy1mY2ZmZmUwOS1lZmZjNTkxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:08.629584Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfnfhvk55w04vy3bkprpdv8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGRjMTA3ODktYWY5NjZmYTItODJkN2I2ZWUtMTFjYWQzMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:08.709994Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfnfhxx1ea5zq9b7gf7x52s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTk4NDcyNzYtOWU4YzdkOGItNmNjYWRiMmEtODE4MzZhNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:08.792829Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfnfj0dc4hwgctj66s4wah8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzA1NTc4OWQtODlkYzdjYjYtZjI4OTY2YTEtZThjYTYzNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:08.872686Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfnfj31bsrv87zj351y6j79, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTEzZGRlYzYtYjc0ZWYyMWYtMWYyOWY4MWItMWQ5NGM4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:08.953004Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfnfj5g45sj3cn0pt157f5n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTBiZDUzMjQtZDQzMmZhMmYtYmM5MjZiOTktMzk3Zjk4N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:09.034371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfnfj80d3g8sjr3bseezv3v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjFlNjg1Y2YtZjA0NDU2MC0zOWZhMTBhLThlNzAxZmI2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:09.121223Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jrfnfjajdzxrs8z0mykk0sqd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWIyZWNmOTEtZDBkMWRkNmYtNjA3NGI2YjQtNGVmYjJlYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:09.252342Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrfnfjd85bxwdcz20wkk1bzz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTJkYTc1ODYtYjgyYjUyOTAtNTQyNTkxNGItNDcyOTY1ZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:09.328327Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jrfnfjhb9qhrwvagy2k3gmq3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTJmNzNmNWItNzY5MGJhYTItZjk1ZTBjOGMtMTRkYmYxN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:09.402022Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jrfnfjkq41ckk8ppfa5hjksw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGNlMTM4MzAtMjZkZmMwMGEtMTU1N2FhODQtMjNjMGUwYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:09.500464Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jrfnfjp2bj76d7k6wp8xce85, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmE0NmFjMGEtODU5NjBlY2UtNTY0MTVkMDEtMmFiY2IzOGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:09.588803Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jrfnfjs8anz4f72ys6m76tmp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjE1NTVmZDYtMTc4MGE0OWItYjFkZTNjZWItMjUzYTkzOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:09.711540Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jrfnfjvw1cbac7qvkpk33vvv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTEyNTM5MzctNjg2NjhjZTctODlhNTJiZDAtZjIzNTg1OTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:09.854701Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jrfnfjzsdw0ptj1d156n0ac2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2MyOTM2NjEtZTc1ZmM2ZTEtY2ZjZDkxMjktYmIwZTZmNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:09.933404Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jrfnfk47c7eyqw44tmhq6nkz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjJiYTViY2QtNzg2MjZiMTAtMTFlYTkxNGItMWZhZjI5YzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:10.015600Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jrfnfk6m5csrydeypwmktkgy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc2NDBmZWMtMTE0ZDA3N2YtMmQwMmNkMjEtY2NhYTI1YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:10.098568Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jrfnfk96frffdzejt4j36pgr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjUyNGExMDMtY2RiYzYxZjQtZjU3NmQyNzEtNzdjNzU4MTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:10.174834Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jrfnfkbsbpjt9j698dt7p81e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTU1NGUwNjQtM2FiMzhhMTktOTdhMGRmMmUtMzU2NTIxMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:10.282055Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jrfnfke5bybhs1b495hatqsv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2YzYTRmNDUtYTVkMzdlMzAtOWU3YTNiMTctZWNlNGI4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:10.431906Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jrfnfkhk4td6smfacc7b5q40, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQzNWUwY2UtYzkzMmUxNWUtY2Q2MGE4ZTUtMmM1YjIyOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:10.537640Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jrfnfkpj234pk37wz4h0zb0r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDRhZDhiYzktMjJiMGE2NDktNzdlYjU3ODQtNjM3OWNiZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:10.620599Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jrfnfksgcn5v0zdadqny4aze, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDY0NjQ0NTYtZDdjMmVmMzAtMjcxNjUxMjktMWY2OGJmMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:10.702550Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jrfnfkw30c4hfydyhzt68hyp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM4YTQyZTYtYjkxMjM1OTgtYmExNjM0MS1lODhiMGJiOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:10.779102Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jrfnfkypa4azyvkzwd09cahk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODYyZmFlZGYtZjU4MjhiNGQtNDM0ODg4YTItMWEzOTA3MWQ= ... :16.547004Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jrfnfsh414q2d5tfa7c9zsc1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWJjNGNhYTgtMWNkYjIyNTUtNWJjZjAxMmMtMTkyZTYwNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:16.634761Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jrfnfsnf6m575y16tgnhtdae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzVmZGUxMjUtZGUwNjg2YjQtYTZhOTM5ZTItYWEyYTc2YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:16.713713Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jrfnfsr2cj1ktzpgnhq1528c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWFlN2EzNGItYzY0NzA3ODYtMjA3OThiNzQtNGFjMDJmZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:16.794336Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jrfnfstj5fz5s3j2f8s1k9fa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjEyZjAyOGItYTdkMDhiOWEtMzdiZmIyNmQtMjBjNWRkMmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:16.880199Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jrfnfsx74mgtzjz57pahkc48, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODFiN2FhNDYtOTdmNjI3OTMtZGE5ZWIzNWEtNDQyM2I2Zjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:16.983545Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jrfnfszrfgwsn1kpphd5jsmp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjMyOGRlZjctYjQ4ODgxZWYtOGM0NzdlNDQtMmU3MzNjMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:17.129945Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jrfnft2zf6hc1mfysf2g13hq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM0MDVjYWMtYzkzNWMwMTktYWRlYWIxN2MtZWM1NTYzNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:17.227807Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jrfnft7w50wzj67hmb02f7p2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQyYjhlNDItY2U5MjA0YjMtMzNmZTY1NC1lMzA3OTkwMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:17.301032Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jrfnftak5c5yj8tmpzq270we, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRiNTE0ODktZmRmYWEzZjItMmQ1MzAzZi02ZDY5YTE2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:17.393371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jrfnftcw7mg6sdxaz51qbjyb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI5YTljZjctN2M5NTQ4MmItYmNmZDE0ZDItZjRjMjUxZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:17.549193Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jrfnftfrdh49y2pv0cta8v61, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTMxNDdjNjYtOGMwZjE2NTMtOGFmODUzNy01MjcxZjBkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:17.724254Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jrfnftmx9b5v07pqjjnnh2t2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODU5NmM4M2QtNjA0NjQ2MWEtYTViM2FmM2QtOTFjYzMyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:17.856398Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jrfnfttg4jkewszp3074c3z9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MThlMmM1NDEtZjdhZGEzYWEtMjVhYTk2ODctMTIyZDViODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:17.972940Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jrfnftym6zc09xkexjxbbdg5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGIzODI5YzgtNDkwZGI1Y2MtOWM4OWQzZWItMzU3NTRhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:18.232802Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jrfnfv25456rrptkvbe1dehk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTVlMTI4NTYtNjkwMWE3ZmItZjhlYTk3MmMtM2ZhMWNmMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:18.341643Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jrfnfva00bmdxq6a46zqyk25, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzRmYWM2MTQtYjE3ODcxM2UtOThjMTBiOGYtNGEwOTJjMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:18.505763Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jrfnfvdsd6dg0waf2qde61aq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTg5MTg0N2QtNjgyOGRiZmYtMWQ1YWMxYzctODE5MDYyNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:18.642015Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jrfnfvjz9ma5gf3qdxt5at99, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNTBlYmEtNTYyNmZmMWYtYTNlMjZlZTItODBlM2IwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:18.730797Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jrfnfvptbbywt7rgdy1b0z9x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzgxZDkwYzAtODJkNmRjZS01YTA2ZThlOS01NDFiZDE4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:18.815211Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jrfnfvsk41c1kb4s7e72cs26, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2QwYWY1YzUtN2YwMDQ2YTYtNDZiOTZjMzktNTFjMWQyZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:18.894020Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jrfnfvw66kjveetp0x77tp97, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTM2ODBjMDQtMzkzMDY3YmMtODEzNmIwZDUtYzJiY2M5NTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:19.006333Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jrfnfvyn541p8mgw7cr3h2dc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ3ODA5ZWEtYTExY2UyYzItZjY4ODMwNzUtZGJmYjBmYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:19.087780Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jrfnfw258b2apja6qbyr7fq4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmRjYTMyM2ItMmZkYjExNjMtYTNmZTRjZDYtYWZhNjQ4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:19.168339Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jrfnfw4qetvbtch4f0v12dw3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmUzMTM0ZmEtYmVmYzY1NzMtZjIzYzVmNTEtMTU2YTMzYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:19.248467Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jrfnfw778x2zhk4ezkj3zapz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGZlOGVhOGMtNGYwY2Y1YzMtM2M1ZTg5NGYtNDBkNDlmYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:19.329284Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jrfnfw9r3vzgej76pn0xyb5z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTU0MDA5MjgtOTcyNTc2YzItZGUyM2MyZjQtMzc1YjhiOWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:19.409200Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jrfnfwc8bmek8jhnse788k6b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTI4OGVjZTEtNDJjMWI1My1lMTU4NzdkLWNlMmY1MjFj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:19.488928Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jrfnfwer0emzpzc3hzfyn3g2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmJjZTdmZTgtZGY0ZDA3OGYtMzQ5NDQzYTYtMjI1ZTI5OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:19.569253Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jrfnfwh84br0bj52fh7je5m8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmE2ZWQ0N2EtYzM2ZDU2MTQtN2YzZGMzMTAtM2I5MDA0OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:19.686248Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jrfnfwksbmwc9nxa90crknq4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTBjNzAzMzAtY2M4YzIxZGMtZTY5YTY1ZTktZDIwYmRmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:19.765480Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jrfnfwqd7708hqw9w9wf0eft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y2YjE3NDMtNGJiYWQyNTAtYjY5MTQzYmUtMmU1MDI5ODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:19.844385Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jrfnfwsx6nmp1fj341ct4408, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjU1NzcyODgtY2JlMWZlNWYtOTExMTdiYjEtYzIxOTMwMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:19.920008Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jrfnfwwbdjrbe4cfj9ay8xed, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWMxOTJkYTAtNDRiMWIxOTQtOWM3ZjQyZDYtY2U5NjE2NGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:19.997770Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jrfnfwyq5k4e2znca13qtent, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhOWI2NGQtOGEyMjkzYWYtZTAzMjEzMGQtNjM4NjU0Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:20.080044Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jrfnfx15010mp0agrp9v8dq7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDlhMWNlNWMtMjU2OTNjNTQtNzZjZjQ3NDUtYjkxN2JiMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:20.299050Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jrfnfx4t49erwxvgb2ve0ca1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDc3ZWYxYmItZDAzOWVjODMtM2QzYjZlMzQtMWRiYjViMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> KqpLimits::AffectedShardsLimit >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::SelectCountAsteriskFromVar [GOOD] Test command err: Trying to start YDB, gRPC: 16198, MsgBus: 18847 2025-04-10T10:55:46.920922Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636491392204531:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:46.921210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017bb/r3tmp/tmpB8LVDQ/pdisk_1.dat 2025-04-10T10:55:47.671003Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:47.699962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:47.700102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:47.704128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16198, node 1 2025-04-10T10:55:47.940962Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:47.940991Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:47.940998Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:47.941129Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18847 TClient is connected to server localhost:18847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:49.257390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.292778Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:55:49.315435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.623637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.958763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:50.108684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:51.915982Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636491392204531:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:51.916095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:52.135502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636517162010083:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:52.135647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:52.767794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.812624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.887977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.966103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.043572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.101702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.151150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636521456977905:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.151278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.151593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636521456977910:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.156072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:55:53.172858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636521456977912:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:55:53.270819Z node 1 :TX_PROXY ERROR: Actor# [1:7491636521456977967:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 32607, MsgBus: 20742 2025-04-10T10:56:00.344303Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636552783565495:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:00.344422Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017bb/r3tmp/tmpivsUh1/pdisk_1.dat 2025-04-10T10:56:00.525159Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:00.563881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:00.563968Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:00.565897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32607, node 2 2025-04-10T10:56:00.678834Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:00.678861Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:00.678872Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:00.679018Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20742 TClient is connected to server localhost:20742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:01.217849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:01.234207Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:01.256311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:01.378454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T10:56:01.613111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:01.763190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:05.346437Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636552783565495:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:05.346523Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:05.684754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636574258403727:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:05.684971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:05.748854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:05.836877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:05.933384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:06.007252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:06.081306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:06.167892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:06.234004Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636578553371538:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:06.234084Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:06.234355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636578553371543:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:06.239177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:06.256167Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636578553371545:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:56:06.358409Z node 2 :TX_PROXY ERROR: Actor# [2:7491636578553371605:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 61661, MsgBus: 11973 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017bb/r3tmp/tmpWUvlVV/pdisk_1.dat 2025-04-10T10:56:09.701128Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:09.841985Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:09.864138Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:09.864235Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:09.874879Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61661, node 3 2025-04-10T10:56:10.046713Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:10.046739Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:10.046747Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:10.046874Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11973 TClient is connected to server localhost:11973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:10.831907Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:10.849495Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:10.995549Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:11.270625Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:11.393681Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:16.104193Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636622101862446:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:16.104313Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:16.238275Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:16.311985Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:16.399080Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:16.549136Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:16.617290Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:16.702074Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:16.803546Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636622101862988:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:16.803675Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:16.803972Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636622101862993:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:16.810520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:16.831816Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636622101862995:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:16.893197Z node 3 :TX_PROXY ERROR: Actor# [3:7491636622101863051:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> BsControllerConfig::ManyPDisksRestarts [GOOD] >> BsControllerConfig::MergeBoxes |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] >> ExternalIndex::Simple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] Test command err: 2025-04-10T10:56:09.233697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:09.233945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:09.242322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cb5/r3tmp/tmpXTkXyi/pdisk_1.dat 2025-04-10T10:56:09.973858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:10.048704Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:10.092336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:10.092481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:10.107574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:10.226423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:10.341849Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T10:56:10.346446Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:10.484313Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:10.484497Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:56:10.486495Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:56:10.486584Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:56:10.486643Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:56:10.487119Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:56:10.487314Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:56:10.487412Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T10:56:10.499001Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:56:10.534435Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:56:10.534771Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:56:10.534922Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T10:56:10.534986Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:56:10.535027Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:56:10.535085Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:10.535677Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:56:10.535807Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:56:10.535873Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:10.535915Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:10.535968Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:56:10.536044Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:10.536158Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T10:56:10.536777Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:56:10.537073Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:56:10.537226Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:56:10.539500Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:56:10.554995Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:56:10.555133Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T10:56:10.733235Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T10:56:10.740559Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T10:56:10.740675Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:10.741939Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:10.742002Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:56:10.742075Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T10:56:10.742416Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T10:56:10.742647Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:56:10.743600Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:10.743683Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T10:56:10.744226Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T10:56:10.744686Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:10.749291Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T10:56:10.749366Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:10.750078Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T10:56:10.750190Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:10.751334Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:10.751393Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:56:10.751474Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T10:56:10.751548Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T10:56:10.751612Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T10:56:10.751740Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:10.757387Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:56:10.760995Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T10:56:10.761121Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T10:56:10.761848Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T10:56:10.772652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:10.772785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:10.772881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:10.779155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:56:10.786283Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:56:10.967736Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:56:10.977722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:56:11.185764Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:11.821569Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfnfm0hc73f2b3rn3ba9n73, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjQ2NzNkYjMtMmEzYTAyYWQtMjNjODAwMGQtOWY5ZjYyOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:11.843913Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-10T10:56:11.844455Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T10:56:11.844693Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 ... aChangedResult 2025-04-10T10:56:21.366530Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state Ready 2025-04-10T10:56:21.366594Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T10:56:21.368708Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:953:2764], Recipient [2:666:2570]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 953 RawX2: 8589937356 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t\271\003\000\000\000\000\000\000\021\314\n\000\000\002\000\000\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-04-10T10:56:21.368780Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:56:21.368898Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:56:21.369132Z node 2 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2025-04-10T10:56:21.369226Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-04-10T10:56:21.369287Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-10T10:56:21.369330Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-04-10T10:56:21.369376Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T10:56:21.369414Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T10:56:21.369483Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-10T10:56:21.369549Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-04-10T10:56:21.369593Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-10T10:56:21.369622Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T10:56:21.369647Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit MakeScanSnapshot 2025-04-10T10:56:21.369674Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit MakeScanSnapshot 2025-04-10T10:56:21.369704Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-10T10:56:21.369730Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit MakeScanSnapshot 2025-04-10T10:56:21.369753Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit WaitForStreamClearance 2025-04-10T10:56:21.369776Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2025-04-10T10:56:21.369834Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:953:2764] for [0:281474976715665] at 72075186224037888 2025-04-10T10:56:21.369869Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2025-04-10T10:56:21.370078Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:953:2764], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715665 2025-04-10T10:56:21.370118Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-10T10:56:21.370269Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:953:2764], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715665 Cleared: true 2025-04-10T10:56:21.370324Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-10T10:56:21.370434Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:21.370467Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:21.370534Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:21.370581Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T10:56:21.370629Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2025-04-10T10:56:21.370674Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2025-04-10T10:56:21.370735Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715665] at 72075186224037888 2025-04-10T10:56:21.370784Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-10T10:56:21.370833Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-10T10:56:21.370879Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ReadTableScan 2025-04-10T10:56:21.370933Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2025-04-10T10:56:21.371242Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2025-04-10T10:56:21.371274Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T10:56:21.371316Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T10:56:21.371360Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:56:21.371400Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:56:21.372015Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:959:2769], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-10T10:56:21.372056Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-10T10:56:21.372290Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:56:21.372346Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:21.372650Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2025-04-10T10:56:21.373116Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:946:2757], Recipient [2:666:2570]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [2:946:2757] ServerId: [2:948:2759] } 2025-04-10T10:56:21.373159Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-10T10:56:21.373532Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T10:56:21.373714Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2025-04-10T10:56:21.373764Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2025-04-10T10:56:21.376266Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T10:56:21.376333Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037888 2025-04-10T10:56:21.376532Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:21.376572Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:21.376641Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:21.376684Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T10:56:21.376726Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2025-04-10T10:56:21.376763Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2025-04-10T10:56:21.376812Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715665] at 72075186224037888 error: , IsFatalError: 0 2025-04-10T10:56:21.376875Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-10T10:56:21.376915Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ReadTableScan 2025-04-10T10:56:21.376951Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-04-10T10:56:21.376985Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-10T10:56:21.377040Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-10T10:56:21.377120Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-04-10T10:56:21.377158Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-04-10T10:56:21.377199Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T10:56:21.377240Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-04-10T10:56:21.377292Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-10T10:56:21.377317Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T10:56:21.377346Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-04-10T10:56:21.377384Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:21.377415Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T10:56:21.377465Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:56:21.377493Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:56:21.377570Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:21.377616Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-10T10:56:21.377670Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "No clientId specified in CmdGetReadSessionsInfo" ErrorCode: BAD_REQUEST } >> IncrementalRestoreScan::Empty |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |68.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |68.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |68.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> IncrementalRestoreScan::ChangeSenderEmpty >> IncrementalRestoreScan::ChangeSenderSimple |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-04-10T10:56:23.411894Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T10:56:23.416241Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T10:56:23.416748Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-10T10:56:23.416827Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T10:56:23.416895Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-10T10:56:23.416960Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-10T10:56:23.417028Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:23.417108Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-10T10:56:23.418252Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:261:2253], now have 1 active actors on pipe 2025-04-10T10:56:23.418400Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T10:56:23.439259Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T10:56:23.443828Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T10:56:23.444093Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:23.445116Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T10:56:23.445296Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T10:56:23.445887Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T10:56:23.446421Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [3:269:2259] 2025-04-10T10:56:23.449344Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-10T10:56:23.449455Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [3:269:2259] 2025-04-10T10:56:23.449529Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T10:56:23.449592Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T10:56:23.450728Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:272:2261], now have 1 active actors on pipe 2025-04-10T10:56:23.537145Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T10:56:23.541639Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T10:56:23.542109Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-10T10:56:23.542195Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T10:56:23.542248Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-10T10:56:23.542295Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-10T10:56:23.542352Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:23.542440Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-10T10:56:23.543326Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:407:2362], now have 1 active actors on pipe 2025-04-10T10:56:23.543408Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T10:56:23.543642Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T10:56:23.547324Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T10:56:23.547495Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:23.548390Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T10:56:23.548544Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T10:56:23.549004Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T10:56:23.549257Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [3:415:2368] 2025-04-10T10:56:23.551805Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-10T10:56:23.551889Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [3:415:2368] 2025-04-10T10:56:23.551978Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T10:56:23.552044Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T10:56:23.553004Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:418:2370], now have 1 active actors on pipe 2025-04-10T10:56:23.585257Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T10:56:23.601536Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T10:56:23.602091Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-10T10:56:23.602181Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T10:56:23.602235Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-10T10:56:23.602296Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-10T10:56:23.602364Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:23.602478Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-10T10:56:23.603460Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:467:2407], now have 1 active actors on pipe 2025-04-10T10:56:23.603622Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T10:56:23.603886Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T10:56:23.616081Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T10:56:23.616316Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:23.617361Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T10:56:23.617595Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-10T10:56:23.618457Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T10:56:23.618862Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:475:2413] 2025-04-10T10:56:23.621085Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-10T10:56:23.621171Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:475:2413] 2025-04-10T10:56:23.621243Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 b ... 38] doesn't have tx info 2025-04-10T10:56:24.642483Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T10:56:24.642538Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-10T10:56:24.642596Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-10T10:56:24.642664Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:24.642747Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-10T10:56:24.643656Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:467:2407], now have 1 active actors on pipe 2025-04-10T10:56:24.643745Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T10:56:24.643995Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 7(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T10:56:24.646963Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T10:56:24.647161Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:24.648053Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 7 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T10:56:24.648233Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-10T10:56:24.648705Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T10:56:24.648975Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [4:475:2413] 2025-04-10T10:56:24.651435Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-10T10:56:24.651527Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:475:2413] 2025-04-10T10:56:24.651597Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T10:56:24.651670Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-10T10:56:24.652691Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:478:2415], now have 1 active actors on pipe 2025-04-10T10:56:24.676246Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T10:56:24.681091Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T10:56:24.681588Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-10T10:56:24.681656Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T10:56:24.681706Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-10T10:56:24.681778Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-10T10:56:24.681847Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:24.681931Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-10T10:56:24.682950Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:527:2452], now have 1 active actors on pipe 2025-04-10T10:56:24.683112Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T10:56:24.683371Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 8(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T10:56:24.686948Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T10:56:24.687151Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:24.688388Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 8 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T10:56:24.688562Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T10:56:24.689146Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T10:56:24.689484Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:535:2458] 2025-04-10T10:56:24.692377Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T10:56:24.692478Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:535:2458] 2025-04-10T10:56:24.692564Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T10:56:24.692633Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T10:56:24.693660Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:538:2460], now have 1 active actors on pipe 2025-04-10T10:56:24.695696Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [4:546:2463], now have 1 active actors on pipe 2025-04-10T10:56:24.695920Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:547:2464], now have 1 active actors on pipe 2025-04-10T10:56:24.695964Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:548:2464], now have 1 active actors on pipe 2025-04-10T10:56:24.707271Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:553:2468], now have 1 active actors on pipe 2025-04-10T10:56:24.736575Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T10:56:24.739127Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T10:56:24.739436Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-10T10:56:24.739485Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T10:56:24.739679Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-10T10:56:24.740505Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:24.740557Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-10T10:56:24.740650Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T10:56:24.740954Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T10:56:24.741169Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:610:2513] 2025-04-10T10:56:24.743163Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-10T10:56:24.744526Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-10T10:56:24.744824Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-10T10:56:24.745237Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-10T10:56:24.745487Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-10T10:56:24.745536Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T10:56:24.745591Z node 4 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T10:56:24.745632Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T10:56:24.745686Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:610:2513] 2025-04-10T10:56:24.745744Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T10:56:24.745794Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T10:56:24.746794Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [4:547:2464] destroyed 2025-04-10T10:56:24.746883Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [4:546:2463] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 4 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 4 ErrorCode: OK } PartitionLocation { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "Tablet for that partition is not running" } ErrorCode: OK } } } |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge >> Cdc::InitialScanRacyProgressAndDrop [GOOD] >> Cdc::EnqueueRequestProcessSend >> TTxAllocatorClientTest::InitiatingRequest |68.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |68.1%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |68.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] Test command err: 2025-04-10T10:56:12.074596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:12.074788Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:12.075017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c91/r3tmp/tmpgmHzIX/pdisk_1.dat 2025-04-10T10:56:12.748736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.800645Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:12.848016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:12.848151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:12.863357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:12.968377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:13.036640Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:56:13.037802Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:56:13.038369Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T10:56:13.038601Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:13.095141Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:56:13.096003Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:13.096144Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:56:13.098095Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:56:13.098236Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:56:13.098324Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:56:13.098704Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:56:13.098832Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:56:13.098935Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T10:56:13.110316Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:56:13.179985Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:56:13.180332Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:56:13.180522Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T10:56:13.180586Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:56:13.180629Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:56:13.180674Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:13.180954Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:13.181010Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:13.181426Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:56:13.181573Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:56:13.181649Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:13.181694Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:13.181755Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T10:56:13.181819Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:56:13.181879Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:56:13.181935Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:56:13.182003Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:13.182193Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:56:13.182242Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:56:13.182306Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T10:56:13.182789Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T10:56:13.182857Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:56:13.182984Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:56:13.183199Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T10:56:13.183243Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:56:13.183344Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:56:13.183422Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T10:56:13.183474Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T10:56:13.183514Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T10:56:13.183564Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T10:56:13.183849Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T10:56:13.183893Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T10:56:13.183939Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T10:56:13.183978Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T10:56:13.184032Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T10:56:13.184068Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T10:56:13.184108Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T10:56:13.184142Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T10:56:13.184174Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T10:56:13.185766Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T10:56:13.185812Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:56:13.201946Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:56:13.202040Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T10:56:13.202091Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T10:56:13.202171Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T10:56:13.202255Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T10:56:13.376027Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:56:13.376083Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:56:13.376120Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T10:56:13.376515Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T10:56:13.376550Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T10:56:13.376700Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T10:56:13.376749Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T10:56:13.376793Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T10:56:13.376827Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T10:56:13.382064Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T10:56:13.382183Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:13.383236Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:13.383282Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:13.383338Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:1 ... 24.601055Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037889, TxId: 281474976715668, MessageQuota: 0 2025-04-10T10:56:24.611945Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-10T10:56:24.612040Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037889 2025-04-10T10:56:24.612326Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:972:2777], Recipient [2:972:2777]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:24.612368Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:24.612436Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:56:24.612475Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:56:24.612521Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037889 for ReadTableScan 2025-04-10T10:56:24.612553Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit ReadTableScan 2025-04-10T10:56:24.612591Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037889 error: , IsFatalError: 0 2025-04-10T10:56:24.612654Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2025-04-10T10:56:24.612687Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit ReadTableScan 2025-04-10T10:56:24.612720Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompleteOperation 2025-04-10T10:56:24.612749Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2025-04-10T10:56:24.612976Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is DelayComplete 2025-04-10T10:56:24.613016Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompleteOperation 2025-04-10T10:56:24.613068Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompletedOperations 2025-04-10T10:56:24.613123Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompletedOperations 2025-04-10T10:56:24.613162Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2025-04-10T10:56:24.613187Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompletedOperations 2025-04-10T10:56:24.613214Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037889 has finished 2025-04-10T10:56:24.613254Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:24.613280Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-10T10:56:24.613312Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-10T10:56:24.613351Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-10T10:56:24.626978Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:56:24.627073Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:56:24.627124Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2025-04-10T10:56:24.627229Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037889 at tablet 72075186224037889 send result to client [2:1162:2940], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T10:56:24.627291Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:56:24.627666Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:1162:2940], Recipient [2:974:2779]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715668 Cleared: true 2025-04-10T10:56:24.627710Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-10T10:56:24.627792Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 3500} 2025-04-10T10:56:24.627840Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T10:56:24.627871Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T10:56:24.628022Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:974:2779], Recipient [2:974:2779]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:24.628057Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:24.628107Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T10:56:24.628141Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:56:24.628178Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for WaitForStreamClearance 2025-04-10T10:56:24.628209Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit WaitForStreamClearance 2025-04-10T10:56:24.628243Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [3500:281474976715668] at 72075186224037890 2025-04-10T10:56:24.628289Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-04-10T10:56:24.628327Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit WaitForStreamClearance 2025-04-10T10:56:24.628354Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit ReadTableScan 2025-04-10T10:56:24.628382Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-04-10T10:56:24.628601Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Continue 2025-04-10T10:56:24.628627Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:56:24.628655Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-04-10T10:56:24.628685Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 out-of-order limits exceeded 2025-04-10T10:56:24.628714Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-10T10:56:24.629582Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1184:2959], Recipient [2:974:2779]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-10T10:56:24.629634Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-10T10:56:24.629858Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2025-04-10T10:56:24.632409Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T10:56:24.750254Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2025-04-10T10:56:24.750337Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2025-04-10T10:56:24.752993Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-10T10:56:24.753050Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037890 2025-04-10T10:56:24.753605Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:974:2779], Recipient [2:974:2779]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:24.753649Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:24.753713Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T10:56:24.753745Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:56:24.753782Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for ReadTableScan 2025-04-10T10:56:24.753811Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-04-10T10:56:24.753846Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037890 error: , IsFatalError: 0 2025-04-10T10:56:24.753886Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-04-10T10:56:24.753915Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit ReadTableScan 2025-04-10T10:56:24.753945Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompleteOperation 2025-04-10T10:56:24.753972Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-04-10T10:56:24.762251Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is DelayComplete 2025-04-10T10:56:24.762339Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompleteOperation 2025-04-10T10:56:24.762381Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompletedOperations 2025-04-10T10:56:24.762414Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompletedOperations 2025-04-10T10:56:24.762466Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-04-10T10:56:24.762491Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompletedOperations 2025-04-10T10:56:24.762521Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037890 has finished 2025-04-10T10:56:24.762552Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:24.762579Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-10T10:56:24.762612Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-10T10:56:24.762643Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-10T10:56:24.775102Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T10:56:24.775168Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T10:56:24.775206Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-04-10T10:56:24.775270Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1162:2940], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T10:56:24.775319Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2025-04-10T10:56:27.061505Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-10T10:56:27.062491Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-10T10:56:27.063363Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-10T10:56:27.070851Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.071599Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-10T10:56:27.084708Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.084899Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.085006Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.085070Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-10T10:56:27.085232Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.085324Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-10T10:56:27.085468Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-10T10:56:27.086185Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-04-10T10:56:27.086803Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.086870Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.086955Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-04-10T10:56:27.086992Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 5000 2025-04-10T10:56:27.087194Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-10T10:56:27.087382Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-10T10:56:27.087563Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-10T10:56:27.087737Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-10T10:56:27.087875Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-04-10T10:56:27.088289Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.088353Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.088464Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2025-04-10T10:56:27.088504Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 5000 to# 10000 2025-04-10T10:56:27.088696Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-10T10:56:27.088869Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-10T10:56:27.089178Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-10T10:56:27.089594Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-10T10:56:27.089755Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-04-10T10:56:27.090205Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.090291Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.090449Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2025-04-10T10:56:27.090499Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 10000 to# 15000 2025-04-10T10:56:27.090725Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 >> TTxAllocatorClientTest::ZeroRange >> TColumnShardTestReadWrite::WriteOverload-InStore-WithWritePortionsOnInsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2025-04-10T10:56:27.091303Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-10T10:56:27.092316Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-10T10:56:27.093188Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-10T10:56:27.095244Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.095827Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-10T10:56:27.107537Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.107704Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.107843Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.107899Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-10T10:56:27.108042Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.108150Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-10T10:56:27.108286Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-10T10:56:27.109074Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-04-10T10:56:27.109694Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.109775Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:27.109855Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-04-10T10:56:27.109898Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 5000 >> TColumnShardTestReadWrite::RebootWriteRead |68.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |68.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |68.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile >> TableCreator::CreateTables >> TTxAllocatorClientTest::Boot >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 >> TTxAllocatorClientTest::Boot [GOOD] |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] >> IncrementalRestoreScan::Empty [GOOD] |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy |68.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |68.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |68.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2025-04-10T10:56:28.762122Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-10T10:56:28.763107Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-10T10:56:28.764234Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-10T10:56:28.766504Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:28.767206Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-10T10:56:28.782848Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:28.783028Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:28.783135Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:28.783252Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-10T10:56:28.783443Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:28.783581Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-10T10:56:28.783724Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic >> ViewerTopicDataTests::TopicDataTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: 2025-04-10T10:56:28.348858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:28.349098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:28.349291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001a2a/r3tmp/tmpW3O75A/pdisk_1.dat 2025-04-10T10:56:28.764118Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:597:2521] Exhausted 2025-04-10T10:56:28.764255Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:597:2521] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2025-04-10T10:56:28.764326Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:597:2521] Finish 0 >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] >> CompressExecutor::TestExecutorMemUsage [GOOD] |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] Test command err: 2025-04-10T10:56:11.729010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:11.729327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:11.729641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ca3/r3tmp/tmpNRewBB/pdisk_1.dat 2025-04-10T10:56:12.418106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.527076Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:12.576598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:12.576764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:12.588308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:12.680032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:12.825049Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-04-10T10:56:12.825386Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:12.895571Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:680:2580] 2025-04-10T10:56:12.895875Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:12.907175Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:12.907436Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:56:12.909678Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:56:12.909770Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:56:12.909875Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:56:12.910402Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:56:12.910723Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:56:12.910837Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:709:2578] in generation 1 2025-04-10T10:56:12.911300Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:12.911408Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:56:12.913090Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T10:56:12.913160Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T10:56:12.913210Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T10:56:12.913571Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:56:12.913704Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:56:12.913779Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2580] in generation 1 2025-04-10T10:56:12.927155Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:56:12.991459Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:56:12.991778Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:56:12.992008Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-10T10:56:12.992058Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:56:12.992106Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:56:12.992166Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:12.992300Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:56:12.992346Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T10:56:12.992410Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:56:12.992547Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-10T10:56:12.992580Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T10:56:12.992605Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T10:56:12.992645Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:56:12.993254Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:56:12.993378Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:56:12.993484Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T10:56:12.993558Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T10:56:12.993672Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:12.993723Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:12.993782Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:56:12.993837Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:12.993891Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:56:12.993922Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:12.993984Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-10T10:56:12.994026Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:56:12.994131Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2575], serverId# [1:693:2588], sessionId# [0:0:0] 2025-04-10T10:56:12.994385Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T10:56:12.994751Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-10T10:56:12.994914Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-10T10:56:12.997937Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T10:56:13.014989Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T10:56:13.015152Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-10T10:56:13.067147Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2574], serverId# [1:728:2606], sessionId# [0:0:0] 2025-04-10T10:56:13.067399Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:56:13.067652Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:56:13.067782Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:56:13.068428Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:56:13.083018Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:56:13.083168Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T10:56:13.221549Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2618], serverId# [1:743:2621], sessionId# [0:0:0] 2025-04-10T10:56:13.221762Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:741:2619], serverId# [1:744:2622], sessionId# [0:0:0] 2025-04-10T10:56:13.240640Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T10:56:13.240763Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:13.241296Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-10T10:56:13.241356Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:56:13.241656Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:13.241719Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:56:13.241785Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T10:56:13.247762Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T10:56:13.248210Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:56:13.248515Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:56:13.248566Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:56:13.248616Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-10T10:56:13.248947Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T10:56:13.249067Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:56:13.249291Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:13.249367Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-10T10:56:13.252280Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T ... an for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T10:56:28.060707Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit DirectOp 2025-04-10T10:56:28.060759Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit DirectOp 2025-04-10T10:56:28.060818Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2025-04-10T10:56:28.061035Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T10:56:28.061076Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit DirectOp 2025-04-10T10:56:28.061125Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T10:56:28.061171Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-10T10:56:28.061230Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-10T10:56:28.061256Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T10:56:28.061294Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-10T10:56:28.075165Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-10T10:56:28.075270Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:5] at 72075186224037888 on unit DirectOp 2025-04-10T10:56:28.075346Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... bulk upsert finished with status SCHEME_ERROR 2025-04-10T10:56:28.329726Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] Handle TEvExecuteKqpTransaction 2025-04-10T10:56:28.329811Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] TxId# 281474976715662 ProcessProposeKqpTransaction 2025-04-10T10:56:28.330815Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfng4xhe6neswje8r961tze, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODY2NDcyZDktMmUyNjljMjctNTZjY2VhMDItNjliMjcxMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:28.334217Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1115:2908], Recipient [3:670:2574]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-10T10:56:28.334422Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T10:56:28.334516Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v8000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v8000/18446744073709551615 ImmediateWriteEdgeReplied# v8000/18446744073709551615 2025-04-10T10:56:28.334574Z node 3 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v8000/18446744073709551615 2025-04-10T10:56:28.334639Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckRead 2025-04-10T10:56:28.334760Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-10T10:56:28.334798Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckRead 2025-04-10T10:56:28.334836Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T10:56:28.334878Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T10:56:28.334929Z node 3 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2025-04-10T10:56:28.334981Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-10T10:56:28.335005Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T10:56:28.335023Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T10:56:28.335054Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteRead 2025-04-10T10:56:28.335174Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-10T10:56:28.335457Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:1115:2908], 0} after executionsCount# 1 2025-04-10T10:56:28.335520Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1115:2908], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T10:56:28.335656Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1115:2908], 0} finished in read 2025-04-10T10:56:28.335727Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-10T10:56:28.335748Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T10:56:28.335773Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T10:56:28.335798Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-04-10T10:56:28.335844Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-10T10:56:28.335862Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T10:56:28.335881Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-04-10T10:56:28.335931Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T10:56:28.336042Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T10:56:28.337763Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1115:2908], Recipient [3:670:2574]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T10:56:28.337821Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } 2025-04-10T10:56:28.548253Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] Handle TEvExecuteKqpTransaction 2025-04-10T10:56:28.548350Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] TxId# 281474976715663 ProcessProposeKqpTransaction 2025-04-10T10:56:28.549499Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfng55pdd39myqd1b3bfbta, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzNmNzljNzAtZjA0NzFjMGYtMWNmYjdjZWEtOWU4NTY0YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:28.552862Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1146:2933], Recipient [3:910:2738]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 1 2025-04-10T10:56:28.553087Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-10T10:56:28.553161Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v5000/18446744073709551615 ImmediateWriteEdgeReplied# v5000/18446744073709551615 2025-04-10T10:56:28.553219Z node 3 :TX_DATASHARD TRACE: 72075186224037889 changed HEAD read to non-repeatable v8000/18446744073709551615 2025-04-10T10:56:28.553296Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-04-10T10:56:28.553412Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-10T10:56:28.553480Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-04-10T10:56:28.553531Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-10T10:56:28.553579Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-10T10:56:28.553647Z node 3 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2025-04-10T10:56:28.553697Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-10T10:56:28.553727Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-10T10:56:28.553774Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-04-10T10:56:28.553804Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-04-10T10:56:28.553922Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-10T10:56:28.554358Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[3:1146:2933], 0} after executionsCount# 1 2025-04-10T10:56:28.554431Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1146:2933], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T10:56:28.554602Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1146:2933], 0} finished in read 2025-04-10T10:56:28.554676Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-10T10:56:28.554708Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-04-10T10:56:28.554740Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-04-10T10:56:28.554769Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-04-10T10:56:28.554827Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-10T10:56:28.554854Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-04-10T10:56:28.554883Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2025-04-10T10:56:28.554930Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-10T10:56:28.555057Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-10T10:56:28.557548Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1146:2933], Recipient [3:910:2738]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T10:56:28.557637Z node 3 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } >> TColumnShardTestReadWrite::ReadGroupBy >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> TColumnShardTestReadWrite::CompactionGC ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] Test command err: 2025-04-10T10:56:28.344045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:28.344400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:28.344741Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0019c9/r3tmp/tmptoHscf/pdisk_1.dat 2025-04-10T10:56:29.333229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-10T10:56:29.336233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:29.336637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T10:56:29.336970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:56:29.337070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:29.338265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T10:56:29.338513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T10:56:29.338897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:29.338981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T10:56:29.339085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:56:29.339128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:56:29.339986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:29.340066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:56:29.340119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:56:29.340681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:29.340791Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:29.340849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-10T10:56:29.340924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:56:29.347759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:56:29.348632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:56:29.348907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:56:29.350453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-10T10:56:29.350515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-10T10:56:29.350582Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-10T10:56:29.460554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-10T10:56:29.460645Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:29.509720Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:56:29.511030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:29.511185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:29.511365Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T10:56:29.524625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:29.612425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:56:29.612652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:56:29.612742Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-10T10:56:29.613103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:56:29.613181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-10T10:56:29.613406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T10:56:29.613570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T10:56:29.622868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T10:56:29.622952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T10:56:29.623254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T10:56:29.623331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:572:2499], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-04-10T10:56:29.623750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:29.623806Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-04-10T10:56:29.623949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:56:29.623990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:56:29.624039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:56:29.624078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:56:29.624127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:56:29.624183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:56:29.624233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:56:29.624267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:56:29.624359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-10T10:56:29.624427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-10T10:56:29.624468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-10T10:56:29.631558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-10T10:56:29.631786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-10T10:56:29.631844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-04-10T10:56:29.631898Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-10T10:56:29.631974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T10:56:29.632158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-10T10:56:29.632221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:593:2518] 2025-04-10T10:56:29.633420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-04-10T10:56:29.637993Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T10:56:29.638164Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T10:56:29.638450Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T10:56:29.646045Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T10:56:29.646201Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T10:56:29.646923Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T10:56:29.647028Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavi ... 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-04-10T10:56:30.483774Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-04-10T10:56:30.483887Z node 1 :TX_PROXY DEBUG: Actor# [1:830:2682] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-04-10T10:56:30.484359Z node 1 :TX_PROXY DEBUG: Actor# [1:830:2682] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T10:56:30.484447Z node 1 :TX_PROXY DEBUG: Actor# [1:830:2682] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" 2025-04-10T10:56:30.485611Z node 1 :TX_PROXY DEBUG: Actor# [1:830:2682] Handle TEvDescribeSchemeResult Forward to# [1:593:2518] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-10T10:56:30.486787Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:832:2684] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T10:56:30.487085Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:832:2684] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T10:56:30.487424Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:832:2684] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-10T10:56:30.487598Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:832:2684] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] Test command err: =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-04-10T10:54:43.716240Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-04-10T10:54:43.716328Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2025-04-10T10:54:43.716557Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-04-10T10:54:43.716591Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2025-04-10T10:54:43.716663Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:35:2066] 2025-04-10T10:54:43.716699Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-04-10T10:54:43.716936Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:35:2066] 2025-04-10T10:54:43.716981Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-04-10T10:54:43.717077Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T10:54:43.717547Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:41:2068] 2025-04-10T10:54:43.717592Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant 2025-04-10T10:54:43.717707Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:41:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-10T10:54:43.717875Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:42:2068] 2025-04-10T10:54:43.717900Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/tenant 2025-04-10T10:54:43.717949Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:42:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-10T10:54:43.718080Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:43:2068] 2025-04-10T10:54:43.718104Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/tenant 2025-04-10T10:54:43.718136Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:43:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-10T10:54:43.738371Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:3:2050] 2025-04-10T10:54:43.738508Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2068] 2025-04-10T10:54:43.738563Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:6:2053] 2025-04-10T10:54:43.738624Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:42:2068] 2025-04-10T10:54:43.738683Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:43:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:9:2056] 2025-04-10T10:54:43.738718Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2068] 2025-04-10T10:54:43.738797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:38:2068] 2025-04-10T10:54:43.738879Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:39:2068] 2025-04-10T10:54:43.738927Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/tenant] Set up state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T10:54:43.738984Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:40:2068] 2025-04-10T10:54:43.739037Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:37:2068][/root/tenant] Ignore empty state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2025-04-10T10:54:43.739290Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:34:2065], cookie# 0, event size# 103 2025-04-10T10:54:43.739335Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-10T10:54:43.739418Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /root/tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-10T10:54:43.739580Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:3:2050] 2025-04-10T10:54:43.739641Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:41:2068] 2025-04-10T10:54:43.739700Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:38:2068] 2025-04-10T10:54:43.739756Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/tenant] Update to strong state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2025-04-10T10:54:44.390714Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:34:2065] 2025-04-10T10:54:44.390787Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Successful handshake: owner# 800, generation# 1 2025-04-10T10:54:44.390917Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:34:2065] 2025-04-10T10:54:44.390944Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Commit generation: owner# 800, generation# 1 2025-04-10T10:54:44.390988Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:35:2066] 2025-04-10T10:54:44.391029Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 900, generation# 1 2025-04-10T10:54:44.391228Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:35:2066] 2025-04-10T10:54:44.391259Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 900, generation# 1 2025-04-10T10:54:44.391337Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T10:54:44.391728Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:41:2068] 2025-04-10T10:54:44.391768Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Upsert description: path# /root/tenant 2025-04-10T10:54:44.391856Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Subscribe: subscriber# [3:41:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-10T10:54:44.391979Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:42:2068] 2025-04-10T10:54:44.392000Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/tenant 2025-04-10T10:54:44.392049Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:42:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-10T10:54:44.392176Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:43:2068] 2025-04-10T10:54:44.392196Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Upsert description: path# /root/tenant 2025-04-10T10:54:44.392229Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Subscribe: subscriber# [3:43:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-10T10:54:44.392295Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:3:2050] 2025-04-10T10:54:44.392338Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:41:2068] 2025-04-10T10:54:44.392388Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:42:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:6:2053] 2025-04-10T10:54:44.392422Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:42:2068] 2025-04-10T10:54:44.392453Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:43:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:9:2056] 2025-04-10T10:54:44.392490Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:43:2068] 2025-04-10T10:54:44.392563Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:38:2068] 2025-04-10T10:54:44.392619Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:39:2068] 2025-04-10T10:54:44.392668Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:37:2068][/root/tenant] Set up state: owner# [3:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T10:54:44.392722Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/ ... omainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2025-04-10T10:56:30.017922Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:34:2065] 2025-04-10T10:56:30.018011Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Successful handshake: owner# 910, generation# 1 2025-04-10T10:56:30.018841Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:35:2066] 2025-04-10T10:56:30.018906Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Successful handshake: owner# 910, generation# 1 2025-04-10T10:56:30.019004Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:34:2065] 2025-04-10T10:56:30.019050Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Commit generation: owner# 910, generation# 1 2025-04-10T10:56:30.019353Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:35:2066] 2025-04-10T10:56:30.019394Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Commit generation: owner# 910, generation# 1 2025-04-10T10:56:30.019521Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T10:56:30.020124Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:41:2068] 2025-04-10T10:56:30.020179Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Upsert description: path# /Root/Tenant/table_inside 2025-04-10T10:56:30.020284Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Subscribe: subscriber# [397:41:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-10T10:56:30.020508Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:42:2068] 2025-04-10T10:56:30.020548Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Upsert description: path# /Root/Tenant/table_inside 2025-04-10T10:56:30.020608Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Subscribe: subscriber# [397:42:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-10T10:56:30.020796Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:43:2068] 2025-04-10T10:56:30.020830Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Upsert description: path# /Root/Tenant/table_inside 2025-04-10T10:56:30.020886Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Subscribe: subscriber# [397:43:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-10T10:56:30.020997Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:41:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:3:2050] 2025-04-10T10:56:30.021070Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:41:2068] 2025-04-10T10:56:30.021135Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:42:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:6:2053] 2025-04-10T10:56:30.021188Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:42:2068] 2025-04-10T10:56:30.021236Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:43:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:9:2056] 2025-04-10T10:56:30.021287Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:43:2068] 2025-04-10T10:56:30.021390Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:38:2068] 2025-04-10T10:56:30.021510Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:39:2068] 2025-04-10T10:56:30.021573Z node 397 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][397:37:2068][/Root/Tenant/table_inside] Set up state: owner# [397:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T10:56:30.021649Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:40:2068] 2025-04-10T10:56:30.021701Z node 397 :SCHEME_BOARD_SUBSCRIBER INFO: [main][397:37:2068][/Root/Tenant/table_inside] Ignore empty state: owner# [397:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2025-04-10T10:56:30.568550Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:34:2065] 2025-04-10T10:56:30.568629Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Successful handshake: owner# 910, generation# 1 2025-04-10T10:56:30.568786Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:35:2066] 2025-04-10T10:56:30.568830Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Successful handshake: owner# 910, generation# 1 2025-04-10T10:56:30.568914Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:34:2065] 2025-04-10T10:56:30.568954Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Commit generation: owner# 910, generation# 1 2025-04-10T10:56:30.569198Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:35:2066] 2025-04-10T10:56:30.569239Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Commit generation: owner# 910, generation# 1 2025-04-10T10:56:30.569346Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T10:56:30.569918Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:41:2068] 2025-04-10T10:56:30.569970Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Upsert description: path# /Root/Tenant/table_inside 2025-04-10T10:56:30.570059Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Subscribe: subscriber# [399:41:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-10T10:56:30.570301Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:42:2068] 2025-04-10T10:56:30.570334Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Upsert description: path# /Root/Tenant/table_inside 2025-04-10T10:56:30.570415Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Subscribe: subscriber# [399:42:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-10T10:56:30.570606Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:43:2068] 2025-04-10T10:56:30.570640Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Upsert description: path# /Root/Tenant/table_inside 2025-04-10T10:56:30.570690Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Subscribe: subscriber# [399:43:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-10T10:56:30.570774Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:41:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:3:2050] 2025-04-10T10:56:30.570841Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:41:2068] 2025-04-10T10:56:30.570906Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:42:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:6:2053] 2025-04-10T10:56:30.570956Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:42:2068] 2025-04-10T10:56:30.571000Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:43:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:9:2056] 2025-04-10T10:56:30.571053Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:43:2068] 2025-04-10T10:56:30.571159Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:38:2068] 2025-04-10T10:56:30.571240Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:39:2068] 2025-04-10T10:56:30.571297Z node 399 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][399:37:2068][/Root/Tenant/table_inside] Set up state: owner# [399:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T10:56:30.571370Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:40:2068] 2025-04-10T10:56:30.571420Z node 399 :SCHEME_BOARD_SUBSCRIBER INFO: [main][399:37:2068][/Root/Tenant/table_inside] Ignore empty state: owner# [399:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() >> KqpScan::ScanRetryReadRanges [GOOD] |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] >> Viewer::JsonStorageListingV1 [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain >> KqpScan::ScanDuringSplitThenMerge [GOOD] >> KqpScan::ScanPg |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> KqpQuery::QueryCachePermissionsLoss [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] Test command err: 2025-04-10T10:56:29.370177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:29.370628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:29.370945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001a4f/r3tmp/tmpwqRzMV/pdisk_1.dat 2025-04-10T10:56:30.255343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-10T10:56:30.255608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:30.255840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T10:56:30.256169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:56:30.256246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:30.257025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T10:56:30.257211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T10:56:30.257481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:30.257560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T10:56:30.257601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:56:30.257665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:56:30.258315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:30.258394Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:56:30.258441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:56:30.258889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:30.258984Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:30.259035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-10T10:56:30.259111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:56:30.271217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:56:30.271987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:56:30.272208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:56:30.273351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-10T10:56:30.273404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-10T10:56:30.273482Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-10T10:56:30.306127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-10T10:56:30.306243Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:30.347109Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:56:30.348170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:30.348298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:30.348441Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T10:56:30.360752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:30.441900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:56:30.442139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:56:30.442239Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-10T10:56:30.442599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:56:30.442679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-10T10:56:30.442872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T10:56:30.442950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T10:56:30.444205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T10:56:30.444256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T10:56:30.444459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T10:56:30.444527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:572:2499], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-04-10T10:56:30.444894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:30.444954Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-04-10T10:56:30.445065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:56:30.445107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:56:30.445152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:56:30.445192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:56:30.445233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:56:30.445286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:56:30.445324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:56:30.445357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:56:30.445460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-10T10:56:30.445529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-10T10:56:30.445565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-10T10:56:30.448385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-10T10:56:30.448522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-10T10:56:30.448564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-04-10T10:56:30.448606Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-10T10:56:30.448670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T10:56:30.448799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-10T10:56:30.448867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:593:2518] 2025-04-10T10:56:30.449561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-04-10T10:56:30.450018Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T10:56:30.450058Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T10:56:30.450276Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T10:56:30.458277Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T10:56:30.458401Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T10:56:30.458985Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T10:56:30.459063Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavi ... hemeshard: 72057594046644480 2025-04-10T10:56:31.160059Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-10T10:56:31.160199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-10T10:56:31.160242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-10T10:56:31.160307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-10T10:56:31.160341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-10T10:56:31.160382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 1/1, is published: true 2025-04-10T10:56:31.160453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:593:2518] message: TxId: 281474976715658 2025-04-10T10:56:31.160503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-10T10:56:31.160545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-10T10:56:31.160586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715658:0 2025-04-10T10:56:31.160757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-10T10:56:31.161725Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-04-10T10:56:31.163068Z node 1 :TX_PROXY DEBUG: Actor# [1:818:2676] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-04-10T10:56:31.165365Z node 1 :TX_PROXY DEBUG: Actor# [1:818:2676] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T10:56:31.165547Z node 1 :TX_PROXY DEBUG: Actor# [1:818:2676] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" Options { ShowPrivateTable: true } 2025-04-10T10:56:31.166879Z node 1 :TX_PROXY DEBUG: Actor# [1:818:2676] Handle TEvDescribeSchemeResult Forward to# [1:593:2518] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-10T10:56:31.168186Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:828:2680], serverId# [1:829:2681], sessionId# [0:0:0] 2025-04-10T10:56:31.169115Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T10:56:31.169432Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T10:56:31.169761Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-10T10:56:31.169971Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] BodySize: 18 }] } 2025-04-10T10:56:31.170110Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-10T10:56:31.170438Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvGetProxyServicesRequest 2025-04-10T10:56:31.170548Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:834:2682] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-10T10:56:31.170965Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:835:2686], serverId# [1:836:2687], sessionId# [0:0:0] 2025-04-10T10:56:31.218900Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:834:2682] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-10T10:56:31.219057Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T10:56:31.219210Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:834:2682] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-10T10:56:31.219284Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T10:56:31.219500Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData >> DataShardVolatile::DistributedWrite >> DataShardVolatile::DistributedWriteThenImmediateUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2025-04-10T10:54:01.024937Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1744282441024897 2025-04-10T10:54:01.370191Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636040089358447:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:01.370250Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:01.425512Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636040396862997:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:01.425639Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:01.635083Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:54:01.633973Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00153b/r3tmp/tmp7ISAF5/pdisk_1.dat 2025-04-10T10:54:01.864737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:01.864875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:01.868378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:01.868472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:01.870313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:01.871969Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:54:01.872862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:01.904779Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7688, node 1 2025-04-10T10:54:01.952032Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:54:01.952050Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:54:01.991086Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/00153b/r3tmp/yandextuNN1C.tmp 2025-04-10T10:54:01.991117Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/00153b/r3tmp/yandextuNN1C.tmp 2025-04-10T10:54:01.991281Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/00153b/r3tmp/yandextuNN1C.tmp 2025-04-10T10:54:01.991423Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:54:02.049508Z INFO: TTestServer started on Port 31780 GrpcPort 7688 TClient is connected to server localhost:31780 PQClient connected to localhost:7688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:02.319128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T10:54:04.441107Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636053281765203:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.441204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636053281765191:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.441449Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.449240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T10:54:04.474513Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636053281765211:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T10:54:04.539389Z node 2 :TX_PROXY ERROR: Actor# [2:7491636053281765239:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:04.909848Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636052974261414:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:54:04.910368Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTUzNWQ0MTktNjNkODhjOGYtOTgxMWI1MTAtYWU0OGNlZWU=, ActorId: [1:7491636052974261364:2335], ActorState: ExecuteState, TraceId: 01jrfnbrqyb927xyrq9w9npt62, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:54:04.910511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T10:54:04.912599Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:54:04.916676Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491636053281765246:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:54:04.918384Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTA1ZjUzMzUtNzYzMDk3YzQtMjVkMjNiNC1iYmM5MDBkYg==, ActorId: [2:7491636053281765178:2307], ActorState: ExecuteState, TraceId: 01jrfnbrmeet8d0f8ed2vxanex, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:54:04.918879Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:54:05.064305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:54:05.207029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:7688", true, true, 1000); 2025-04-10T10:54:05.505016Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jrfnbsga33j5r6wk4xby93a6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI0NTg0NWMtMjUxOGE2OTItZWMxOTk1ZjgtNjJjYWY2Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491636057269229114:2969] 2025-04-10T10:54:06.374274Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636040089358447:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:06.374362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:06.430272Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636040396862997:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:06.430338Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T10:54:10.702313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:7688 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-10T10:54:10.831625Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:7688 MetaRequest { CmdCreat ... OOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-10T10:56:26.239890Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7491636665141292800:2606] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2025-04-10T10:56:26.245103Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7491636665141292800:2606] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2025-04-10T10:56:26.386267Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715678. Failed to resolve tablet: 72075186224037888 after several retries. 2025-04-10T10:56:26.386448Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7491636661922232207:2462] TxId: 281474976715678. Ctx: { TraceId: 01jrfng2ex719ngkpcvh38x0gx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=YjFjMzRmMjctM2ZhMmIwZGQtOTc2MDk4NTYtNzU1MjViN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2025-04-10T10:56:26.386779Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=YjFjMzRmMjctM2ZhMmIwZGQtOTc2MDk4NTYtNzU1MjViN2M=, ActorId: [14:7491636657627264897:2462], ActorState: ExecuteState, TraceId: 01jrfng2ex719ngkpcvh38x0gx, Create QueryResponse for error on request, msg: 2025-04-10T10:56:26.389477Z node 14 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jrfng325abmzdye16yd5tt1g" } } YdbStatus: UNAVAILABLE ConsumedRu: 404 } 2025-04-10T10:56:26.450786Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720699. Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-10T10:56:26.450958Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7491636665141292811:2608] TxId: 281474976720699. Ctx: { TraceId: 01jrfng3459xzbtmdd3qwtgkb7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YjJiZTk3ZDUtOTRlNmExM2ItMWE0NWVkYWEtOGZmMjhhMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-10T10:56:26.451235Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=YjJiZTk3ZDUtOTRlNmExM2ItMWE0NWVkYWEtOGZmMjhhMGI=, ActorId: [13:7491636665141292801:2608], ActorState: ExecuteState, TraceId: 01jrfng3459xzbtmdd3qwtgkb7, Create QueryResponse for error on request, msg: 2025-04-10T10:56:26.452828Z node 13 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [13:7491636665141292800:2606] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=YjJiZTk3ZDUtOTRlNmExM2ItMWE0NWVkYWEtOGZmMjhhMGI=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jrfng3461zy8jc4q4qn211f7" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2025-04-10T10:56:26.452973Z node 13 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=YjJiZTk3ZDUtOTRlNmExM2ItMWE0NWVkYWEtOGZmMjhhMGI=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jrfng3461zy8jc4q4qn211f7" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2025-04-10T10:56:26.453404Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2025-04-10T10:56:26.455568Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e8fd957d-4c7471f8-9d827a84-28dedb8b_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=YjJiZTk3ZDUtOTRlNmExM2ItMWE0NWVkYWEtOGZmMjhhMGI=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jrfng3461zy8jc4q4qn211f7" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2025-04-10T10:56:26.455613Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e8fd957d-4c7471f8-9d827a84-28dedb8b_0] Write session will restart in 2.000000s 2025-04-10T10:56:26.455782Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e8fd957d-4c7471f8-9d827a84-28dedb8b_0] Write session: Do CDS request 2025-04-10T10:56:26.455826Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e8fd957d-4c7471f8-9d827a84-28dedb8b_0] Do schedule cds request after 2000 ms 2025-04-10T10:56:26.954846Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720701. Failed to resolve tablet: 72075186224037888 after several retries. 2025-04-10T10:56:26.955018Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7491636665141292859:2599] TxId: 281474976720701. Ctx: { TraceId: 01jrfng2xjd5kdy2yy761msnqt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NzhmYTczZGMtMmJjNjJkMWItODk4YjYzNWYtMmJkMTViNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2025-04-10T10:56:26.955309Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NzhmYTczZGMtMmJjNjJkMWItODk4YjYzNWYtMmJkMTViNDk=, ActorId: [13:7491636665141292781:2599], ActorState: ExecuteState, TraceId: 01jrfng2xjd5kdy2yy761msnqt, Create QueryResponse for error on request, msg: 2025-04-10T10:56:26.957102Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jrfng3ksc3e5hm1egk13e5w4" } } YdbStatus: UNAVAILABLE ConsumedRu: 457 } 2025-04-10T10:56:27.099805Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715680. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:56:27.099975Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7491636661922232292:2470] TxId: 281474976715680. Ctx: { TraceId: 01jrfng3rme274759rhafsh3a5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=ZDAyOTdkOGUtZmMwNzcwYTMtN2MxNjBmNzItZDUzZTZjNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:56:27.100285Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZDAyOTdkOGUtZmMwNzcwYTMtN2MxNjBmNzItZDUzZTZjNWM=, ActorId: [14:7491636661922232289:2470], ActorState: ExecuteState, TraceId: 01jrfng3rme274759rhafsh3a5, Create QueryResponse for error on request, msg: 2025-04-10T10:56:27.101555Z node 14 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfng3rqf4dn1fcxt9peb7zv" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-10T10:56:27.185907Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720703. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:56:27.186063Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7491636665141292950:2613] TxId: 281474976720703. Ctx: { TraceId: 01jrfng3vgbvzdga3fqbkk31gq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZThlYzUwYzQtNzJiMGExOTQtNGEyYTNhOTctODI2ZmQ3YzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:56:27.186347Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZThlYzUwYzQtNzJiMGExOTQtNGEyYTNhOTctODI2ZmQ3YzA=, ActorId: [13:7491636665141292947:2613], ActorState: ExecuteState, TraceId: 01jrfng3vgbvzdga3fqbkk31gq, Create QueryResponse for error on request, msg: 2025-04-10T10:56:27.188029Z node 13 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfng3vh78j0qgnb5e9trgsd" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-10T10:56:27.242268Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e8fd957d-4c7471f8-9d827a84-28dedb8b_0] Write session: close. Timeout = 0 ms 2025-04-10T10:56:27.242374Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e8fd957d-4c7471f8-9d827a84-28dedb8b_0] Write session will now close 2025-04-10T10:56:27.242472Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e8fd957d-4c7471f8-9d827a84-28dedb8b_0] Write session: aborting 2025-04-10T10:56:27.243509Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e8fd957d-4c7471f8-9d827a84-28dedb8b_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-04-10T10:56:27.243575Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e8fd957d-4c7471f8-9d827a84-28dedb8b_0] Write session: destroy 2025-04-10T10:56:28.847813Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720705. Failed to resolve tablet: 72075186224037888 after several retries. 2025-04-10T10:56:28.847972Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7491636673731227630:2617] TxId: 281474976720705. Ctx: { TraceId: 01jrfng4st9q1abss45x96n4x3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZTVlODgyZjItM2FmZjg0ZjctYzgxNWM3MzItNmFlODYxN2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2025-04-10T10:56:28.848267Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZTVlODgyZjItM2FmZjg0ZjctYzgxNWM3MzItNmFlODYxN2Y=, ActorId: [13:7491636669436260305:2617], ActorState: ExecuteState, TraceId: 01jrfng4st9q1abss45x96n4x3, Create QueryResponse for error on request, msg: 2025-04-10T10:56:28.849663Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jrfng5fe2xmppffrk4hakw38" } } YdbStatus: UNAVAILABLE ConsumedRu: 455 } |68.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] >> SystemView::ShowCreateTablePartitionByHash [GOOD] >> SystemView::ShowCreateTablePartitionSettings >> TGroupMapperTest::Mirror3dc |68.3%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLimits::AffectedShardsLimit [GOOD] >> TGroupMapperTest::MonteCarlo >> TFlatTest::AutoSplitMergeQueue [GOOD] |68.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [GOOD] Test command err: 2025-04-10T10:56:07.475706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:07.476519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:07.476610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:56:07.483577Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:07.483977Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:07.484403Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000918/r3tmp/tmplD4Bxk/pdisk_1.dat 2025-04-10T10:56:08.215403Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:08.521639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:08.645302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:08.645489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:08.666751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:08.666874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:08.682088Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:56:08.682685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:08.683148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:09.029265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:09.931076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1401:2836], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:09.931178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1412:2841], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:09.931261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:09.938222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:56:10.516407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1415:2844], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:56:10.703028Z node 1 :TX_PROXY ERROR: Actor# [1:1543:2913] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:12.216304Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfnfk6821r8d43fx8nwvfyv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWQ4YzZhMDktYjEwMDhhODUtYjU5MTRlNDMtYmUzMjdjMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 2 2025-04-10T10:56:13.081522Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfnfnf52z8cgykdy81hh8kg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFlYWUxNTQtODc4MzlmNzktMWUwOWE2MmUtY2YyZTljNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [1:1622:2965] -> [2:1578:2438] -- EvScanData from [2:1627:2445]: pass 2025-04-10T10:56:14.762698Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfnfnf52z8cgykdy81hh8kg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFlYWUxNTQtODc4MzlmNzktMWUwOWE2MmUtY2YyZTljNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":1} 2025-04-10T10:56:14.766276Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-04-10T10:56:25.816351Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:25.817037Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:25.817492Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:56:25.819526Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:25.819921Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:25.820222Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000918/r3tmp/tmpCoHrOx/pdisk_1.dat 2025-04-10T10:56:26.255456Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:26.450510Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:26.569177Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:26.569375Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:26.575309Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:26.575468Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:26.602578Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-10T10:56:26.603285Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:26.603741Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:26.927431Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:27.663036Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1403:2838], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:27.663193Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1413:2843], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:27.663944Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:27.672646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:56:28.185478Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1417:2846], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:56:28.285109Z node 3 :TX_PROXY ERROR: Actor# [3:1545:2915] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:29.246784Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfng4gc08yhg4ty8qbyex77, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWVhMTNhYmQtMmNmYTRkZC02YzI2MjMyMi0yMTdkMjlhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 4 2025-04-10T10:56:30.511879Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfng6385h1afrk4m3jeyfj2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmM1YzRlNGQtOTE4ZTUxYmEtZDNmMDVlMGYtYzFmZGQwYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [3:1625:2968] -> [4:1580:2438] -- EvScanData from [4:1630:2445]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"rows":[{"items":[{"uint32_value":2},{"uint32_value":22}]},{"items":[{"uint32_value":21},{"uint32_value":2121}]},{"items":[{"uint32_value":22},{"uint32_value":2222}]},{"items":[{"uint32_value":23},{"uint32_value":2323}]},{"items":[{"uint32_value":24},{"uint32_value":2424}]},{"items":[{"uint32_value":25},{"uint32_value":2525}]},{"items":[{"uint32_value":26},{"uint32_value":2626}]},{"items":[{"uint32_value":27},{"uint32_value":2727}]},{"items":[{"uint32_value":28},{"uint32_value":2828}]},{"items":[{"uint32_value":29},{"uint32_value":2929}]},{"items":[{"uint32_value":40},{"uint32_value":4040}]},{"items":[{"uint32_value":41},{"uint32_value":4141}]},{"items":[{"uint32_value":42},{"uint32_value":4242}]},{"items":[{"uint32_value":43},{"uint32_value":4343}]},{"items":[{"uint32_value":44},{"uint32_value":4444}]},{"items":[{"uint32_value":45},{"uint32_value":4545}]},{"items":[{"uint32_value":46},{"uint32_value":4646}]},{"items":[{"uint32_value":47},{"uint32_value":4747}]},{"items":[{"uint32_value":48},{"uint32_value":4848}]},{"items":[{"uint32_value":49},{"uint32_value":4949}]},{"items":[{"uint32_value":50},{"uint32_value":5050}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2} -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}]},"SeqNo":2,"QueryResultIndex":0,"ChannelId":2} 2025-04-10T10:56:30.547792Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down >> KqpQueryServiceScripts::ExecuteScriptStatsBasic >> TGroupMapperTest::Mirror3dc [GOOD] >> KqpScan::ScanDuringSplit [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead >> TableCreator::CreateTables [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager >> KqpQueryService::ExecuteQueryExplicitTxTLI >> KqpQueryService::ClosedSessionRemovedWhileActiveWithQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCachePermissionsLoss [GOOD] Test command err: Trying to start YDB, gRPC: 9291, MsgBus: 12582 2025-04-10T10:55:55.161201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636530355440995:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:55.161260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001789/r3tmp/tmpEwxcgy/pdisk_1.dat 2025-04-10T10:55:55.687277Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:55.694622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:55.694789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:55.703621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9291, node 1 2025-04-10T10:55:55.852169Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:55.852202Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:55.852210Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:55.852333Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12582 TClient is connected to server localhost:12582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:56.614458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:56.667284Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:55:56.676384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:56.847137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:57.090094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:57.254284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:59.259747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636547535311945:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:59.259853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:59.684877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:55:59.852178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:59.924960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:55:59.993028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:00.077048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:00.166657Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636530355440995:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:00.166730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:00.209983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:00.339053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636551830279768:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:00.339153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:00.339856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636551830279773:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:00.348427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:00.377188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636551830279775:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:00.478113Z node 1 :TX_PROXY ERROR: Actor# [1:7491636551830279834:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 25293, MsgBus: 10321 2025-04-10T10:56:08.780309Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636587404553987:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001789/r3tmp/tmpYs8mbS/pdisk_1.dat 2025-04-10T10:56:08.900443Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:56:09.108462Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:09.126789Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:09.126879Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:09.128917Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25293, node 2 2025-04-10T10:56:09.306793Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:09.306815Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:09.306829Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:09.306964Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10321 TClient is connected to server localhost:10321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:10.161035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:10.168501Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:10.180110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:10.313453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:10.743138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting.. ... Id: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:30.221237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-10T10:56:30.427616Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jrfng7241d6780abxwfw6xrp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmYxZTZmMjUtZmQ1ZmMxOWYtYzVmY2RmMDEtODFjMzFiYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:30.626455Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636679645129516:3845], for# user0@builtin, access# UpdateRow 2025-04-10T10:56:30.626789Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715677. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 2 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-10T10:56:30.661117Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTRkNjcxYzItMjhiYjE3OWEtNTcyNTg5Yy1lYmQyOGY2OQ==, ActorId: [3:7491636679645129499:2562], ActorState: ExecuteState, TraceId: 01jrfng78d9h8psea0ndm41evt, Create QueryResponse for error on request, msg: 2025-04-10T10:56:30.835841Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636679645129544:3855], for# user0@builtin, access# EraseRow 2025-04-10T10:56:30.836055Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715678. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 4 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-10T10:56:30.836322Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjlmY2FjYjQtNWM3Mzg0OWMtYTEwNTVkNjItYjExNDI2MGY=, ActorId: [3:7491636679645129527:2573], ActorState: ExecuteState, TraceId: 01jrfng7g6a74t5csmaffbn17m, Create QueryResponse for error on request, msg: 2025-04-10T10:56:30.856867Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-10T10:56:31.092173Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636683940096873:3870], for# user0@builtin, access# SelectRow 2025-04-10T10:56:31.092410Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715680. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-10T10:56:31.092683Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzRkYTE0YzYtNGI5NjkxZTItNTk3MGVmM2ItMmI0N2MyMWI=, ActorId: [3:7491636679645129562:2585], ActorState: ExecuteState, TraceId: 01jrfng7p3ed9hb0fyejxtrnjz, Create QueryResponse for error on request, msg: 2025-04-10T10:56:31.093508Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jrfng7p3ed9hb0fyejxtrnjz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRkYTE0YzYtNGI5NjkxZTItNTk3MGVmM2ItMmI0N2MyMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:31.287141Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636683940096903:3880], for# user0@builtin, access# UpdateRow 2025-04-10T10:56:31.287423Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715682. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 2 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-10T10:56:31.287671Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=M2U2OTBjOC02NWRhMzMwZi00ZDhmZTJjOS1iN2Q5MjdlMw==, ActorId: [3:7491636683940096886:2595], ActorState: ExecuteState, TraceId: 01jrfng7xzcbs1ksv4pyqrjwy5, Create QueryResponse for error on request, msg: 2025-04-10T10:56:31.515278Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636683940096931:3890], for# user0@builtin, access# EraseRow 2025-04-10T10:56:31.515431Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715683. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 4 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-10T10:56:31.515665Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTE5MTdlYWYtMjdmZGU1MDUtNDA5YjM2MzYtYzZkZjYzNjI=, ActorId: [3:7491636683940096913:2606], ActorState: ExecuteState, TraceId: 01jrfng83x409mq19p5wys9ww2, Create QueryResponse for error on request, msg: 2025-04-10T10:56:31.550610Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-04-10T10:56:31.682767Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636683940096957:3901], for# user0@builtin, access# DescribeSchema 2025-04-10T10:56:31.682806Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636683940096957:3901], for# user0@builtin, access# DescribeSchema 2025-04-10T10:56:31.685160Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491636683940096953:2620], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:56:31.686510Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzM1NTJlMzYtZjk0ODI1YmYtMWQ2Y2NkNGMtYmI4ZGEwOGM=, ActorId: [3:7491636683940096949:2618], ActorState: ExecuteState, TraceId: 01jrfng8d95tz216zxqbg2st0a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:56:31.802078Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636683940096976:3906], for# user0@builtin, access# DescribeSchema 2025-04-10T10:56:31.802199Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636683940096976:3906], for# user0@builtin, access# DescribeSchema 2025-04-10T10:56:31.809233Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491636683940096973:2630], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:8:25: Error: At function: KiWriteTable!
:8:25: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:56:31.810544Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MmIyMWY4OTQtZmY3ODYzNDMtMzRhODQ1Yi1iYTIyZWVkMQ==, ActorId: [3:7491636683940096967:2627], ActorState: ExecuteState, TraceId: 01jrfng8gmfv2ym7b5hdp67y0w, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:56:31.930501Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636683940096997:3912], for# user0@builtin, access# DescribeSchema 2025-04-10T10:56:31.930540Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636683940096997:3912], for# user0@builtin, access# DescribeSchema 2025-04-10T10:56:31.938995Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491636683940096994:2641], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:56:31.941539Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDdkNjk4YzYtMTgyNjNlOTktZWIxYzE4ZDgtMmJlNTI2YWI=, ActorId: [3:7491636683940096990:2639], ActorState: ExecuteState, TraceId: 01jrfng8mj13hq76j6p6y6q29r, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:56:31.974352Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-10T10:56:32.076356Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636688235064319:3923], for# user0@builtin, access# DescribeSchema 2025-04-10T10:56:32.076391Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636688235064319:3923], for# user0@builtin, access# DescribeSchema 2025-04-10T10:56:32.078521Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491636688235064316:2651], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:56:32.086500Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGY2OGM4OWMtZjBhNTJlOGItNzkxOWQzNWMtMjQ4MDY0MjI=, ActorId: [3:7491636688235064312:2649], ActorState: ExecuteState, TraceId: 01jrfng8sybj4t7t821tbjkz8a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:56:32.158659Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636688235064346:3932], for# user0@builtin, access# DescribeSchema 2025-04-10T10:56:32.158697Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636688235064346:3932], for# user0@builtin, access# DescribeSchema 2025-04-10T10:56:32.162444Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491636688235064343:2665], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:8:25: Error: At function: KiWriteTable!
:8:25: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:56:32.164307Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzJlZDZkNTgtNzdkYTVlMzMtNDQwY2E5MGEtOTdjNWEyYzI=, ActorId: [3:7491636688235064339:2663], ActorState: ExecuteState, TraceId: 01jrfng8w74c2tybz9w5d4fex5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:56:32.260485Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636688235064366:3937], for# user0@builtin, access# DescribeSchema 2025-04-10T10:56:32.260518Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491636688235064366:3937], for# user0@builtin, access# DescribeSchema 2025-04-10T10:56:32.263936Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491636688235064363:2675], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:56:32.265927Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=M2U0MGE0MWEtOTg0NGQ1ZGEtYTc2YjgxNTQtN2YzOTc2MzQ=, ActorId: [3:7491636688235064356:2672], ActorState: ExecuteState, TraceId: 01jrfng8zb4x5tm1hf49g6d9px, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: |68.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::AffectedShardsLimit [GOOD] Test command err: Trying to start YDB, gRPC: 8481, MsgBus: 11674 2025-04-10T10:55:50.239412Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636508717934625:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:50.239563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00179c/r3tmp/tmpFDDU09/pdisk_1.dat 2025-04-10T10:55:50.938385Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:50.960392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:50.960508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:50.961729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8481, node 1 2025-04-10T10:55:51.289464Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:51.289486Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:51.289493Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:51.289604Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11674 TClient is connected to server localhost:11674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:52.167605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:52.218199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:52.494617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:52.673872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:52.748890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:54.998850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636525897805509:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:54.998981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:55.239491Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636508717934625:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:55.239578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:55.313318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:55:55.349312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:55.387566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:55:55.474720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:55:55.503989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:55:55.585132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:55:55.637081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636530192773324:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:55.637161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:55.637301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636530192773329:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:55.642271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:55:55.672563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636530192773331:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:55:55.771693Z node 1 :TX_PROXY ERROR: Actor# [1:7491636530192773388:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.Text","Name":"Sort"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"Sort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.Text","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 26816, MsgBus: 12927 2025-04-10T10:55:58.573863Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636543907855953:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:58.583452Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00179c/r3tmp/tmpNOVEhp/pdisk_1.dat 2025-04-10T10:55:58.755521Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:58.774746Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:58.774829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:58.777663Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26816, node 2 2025-04-10T10:55:58.910781Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:58.910802Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:58.910809Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:58.910914Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12927 TClient is connected to server localhost:12927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: ... 2075186224037961;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["OlapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"Value \u003E 0","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/OlapTable","E-Rows":"No estimate","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/OlapTable","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/OlapTable","E-Rows":"No estimate","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Value \u003E 0","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 5076, MsgBus: 31354 2025-04-10T10:56:23.127302Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636651497376032:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:23.127362Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00179c/r3tmp/tmpDQHzxY/pdisk_1.dat 2025-04-10T10:56:23.503398Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:23.529006Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:23.529107Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:23.531742Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5076, node 4 2025-04-10T10:56:23.694888Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:23.694916Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:23.694928Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:23.695087Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31354 TClient is connected to server localhost:31354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:24.520678Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:24.540256Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:24.655903Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:56:24.900939Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T10:56:25.019520Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:28.130188Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491636651497376032:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:28.130314Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:28.294241Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636672972214281:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:28.294349Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:28.363425Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:28.415962Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:28.508846Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:28.583669Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:28.663110Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:28.756713Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:28.892413Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636672972214805:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:28.892559Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:28.893123Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636672972214810:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:28.902177Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:28.923984Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491636672972214812:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:29.018419Z node 4 :TX_PROXY ERROR: Actor# [4:7491636677267182165:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:30.679782Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:31.153524Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:32.785754Z node 4 :KQP_EXECUTER WARN: ActorId: [4:7491636690152086470:2630] TxId: 281474976710674. Ctx: { TraceId: 01jrfng92m5g09tnmegmggq0ar, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YjEzMzk3MDQtZGIxNzM0ODYtNmE3MGVjMjgtZjBiM2JiNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Too many affected shards: datashardTasks=21, limit: 20 2025-04-10T10:56:32.786097Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjEzMzk3MDQtZGIxNzM0ODYtNmE3MGVjMjgtZjBiM2JiNmE=, ActorId: [4:7491636685857118816:2630], ActorState: ExecuteState, TraceId: 01jrfng92m5g09tnmegmggq0ar, Create QueryResponse for error on request, msg:
: Error: Affected too many shards: 0, code: 2029 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> ViewerTopicDataTests::TopicDataTest [GOOD] Test command err: 2025-04-10T10:55:19.777752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:1729:2429], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:19.778401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:19.779352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:55:19.780251Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:1735:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:19.781054Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:19.781517Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:1757:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:19.781702Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:1732:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:19.781856Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:55:19.782234Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:19.782585Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:19.782632Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:55:19.783017Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:55:19.784068Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:1738:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:19.784844Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:19.785021Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:55:20.375191Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:20.581196Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T10:55:20.626597Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-10T10:55:21.358567Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 27850, node 1 TClient is connected to server localhost:10895 2025-04-10T10:55:21.772003Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:21.772086Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:21.772130Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:21.772751Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:56:02.598439Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:02.621974Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:02.645391Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:02.645542Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:02.648641Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13809, node 6 2025-04-10T10:56:02.959257Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:02.959289Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:02.959305Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:02.959464Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:03.703780Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:03.735015Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:03.759709Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:56:03.771014Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:03.786704Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:56:08.443295Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:56:08.443384Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:56:09.651541Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491636589585444894:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:09.651766Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:09.652043Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491636589585444907:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:09.657655Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:56:09.693043Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491636589585444909:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:56:09.799590Z node 6 :TX_PROXY ERROR: Actor# [6:7491636589585444961:2372] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:10.449789Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:10.813648Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:56:10.813690Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:56:10.987421Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:56:10.987466Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:56:16.412730Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491636620968840140:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:16.412814Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001491/r3tmp/tmpY5Mf6f/pdisk_1.dat 2025-04-10T10:56:16.844484Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:16.867509Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:16.867627Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:16.874608Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17468, node 7 2025-04-10T10:56:17.186592Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:17.186621Z node 7 : ... lready_written: false write_statistics { persist_duration_ms: 5 queued_in_partition_duration_ms: 3 } 2025-04-10T10:56:23.466736Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session: acknoledged message 14 2025-04-10T10:56:23.476004Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session got write response: sequence_numbers: 15 offsets: 54 already_written: false write_statistics { persist_duration_ms: 9 } 2025-04-10T10:56:23.476059Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session: acknoledged message 15 2025-04-10T10:56:23.533096Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session: try to update token 2025-04-10T10:56:23.533144Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Send 2 message(s) (3 left), first sequence number is 16 2025-04-10T10:56:23.539858Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session: try to update token 2025-04-10T10:56:23.539983Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Send 1 message(s) (2 left), first sequence number is 18 2025-04-10T10:56:23.540803Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session got write response: sequence_numbers: 16 sequence_numbers: 17 offsets: 55 offsets: 56 already_written: false already_written: false write_statistics { persist_duration_ms: 4 } 2025-04-10T10:56:23.540843Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session: acknoledged message 16 2025-04-10T10:56:23.540876Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session: acknoledged message 17 2025-04-10T10:56:23.559881Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session got write response: sequence_numbers: 18 offsets: 57 already_written: false write_statistics { persist_duration_ms: 11 queued_in_partition_duration_ms: 4 } 2025-04-10T10:56:23.559937Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session: acknoledged message 18 2025-04-10T10:56:23.571118Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session: try to update token 2025-04-10T10:56:23.571171Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Send 2 message(s) (0 left), first sequence number is 19 2025-04-10T10:56:24.021396Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session got write response: sequence_numbers: 19 sequence_numbers: 20 offsets: 58 offsets: 59 already_written: false already_written: false write_statistics { persist_duration_ms: 9 queued_in_partition_duration_ms: 438 } 2025-04-10T10:56:24.021465Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session: acknoledged message 19 2025-04-10T10:56:24.021503Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session: acknoledged message 20 2025-04-10T10:56:24.098569Z :INFO: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session will now close 2025-04-10T10:56:24.098693Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session: aborting 2025-04-10T10:56:24.099431Z :INFO: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session: gracefully shut down, all writes complete 2025-04-10T10:56:24.099944Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|45b6a5b9-b62e65d9-6a9ad45a-4344830f_0] Write session: destroy 2025-04-10T10:56:25.365484Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491636659623546853:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:25.365721Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:25.366499Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491636659623546889:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:25.380013Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:56:25.401352Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491636659623546891:2403], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:56:25.469934Z node 7 :TX_PROXY ERROR: Actor# [7:7491636659623546944:2551] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 2025-04-10T10:56:25.753064Z :DEBUG: [] MessageGroupId [producer4] SessionId [] Write session: try to update token 2025-04-10T10:56:25.753589Z :INFO: [] MessageGroupId [producer4] SessionId [] Write session: Do CDS request 2025-04-10T10:56:25.753674Z :INFO: [] MessageGroupId [producer4] SessionId [] Start write session. Will connect to endpoint: localhost:17468 2025-04-10T10:56:25.791413Z :DEBUG: [] MessageGroupId [producer4] SessionId [] Write session: send init request: init_request { topic: "/Root/topic1" message_group_id: "producer4" } 2025-04-10T10:56:25.799740Z :INFO: [] MessageGroupId [producer4] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744282585799 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:56:25.799881Z :INFO: [] MessageGroupId [producer4] SessionId [] Write session established. Init response: session_id: "producer4|b37f8a3e-b8f3f074-3dd02d63-3db13c12_0" topic: "topic1" 2025-04-10T10:56:25.805473Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|b37f8a3e-b8f3f074-3dd02d63-3db13c12_0] Write 1 messages with Id from 1 to 1 2025-04-10T10:56:25.810458Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|b37f8a3e-b8f3f074-3dd02d63-3db13c12_0] Write session: close. Timeout = 18446744073709551 ms 2025-04-10T10:56:25.815759Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7491636659623546953:2408], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:56:25.816836Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NmE2Mzc3YzYtZTFmNjcwYmItODIxMjVmNWUtOWI0OTA1Yjc=, ActorId: [7:7491636659623546850:2396], ActorState: ExecuteState, TraceId: 01jrfng2892rbm2w19j0prr02n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:56:25.817915Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:56:25.990340Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|b37f8a3e-b8f3f074-3dd02d63-3db13c12_0] Write session: try to update token 2025-04-10T10:56:25.990410Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|b37f8a3e-b8f3f074-3dd02d63-3db13c12_0] Send 1 message(s) (0 left), first sequence number is 1 2025-04-10T10:56:26.002365Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|b37f8a3e-b8f3f074-3dd02d63-3db13c12_0] Write session got write response: sequence_numbers: 1 offsets: 60 already_written: false write_statistics { persist_duration_ms: 7 } 2025-04-10T10:56:26.002420Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|b37f8a3e-b8f3f074-3dd02d63-3db13c12_0] Write session: acknoledged message 1 2025-04-10T10:56:26.014744Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|b37f8a3e-b8f3f074-3dd02d63-3db13c12_0] Write session will now close 2025-04-10T10:56:26.014867Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|b37f8a3e-b8f3f074-3dd02d63-3db13c12_0] Write session: aborting 2025-04-10T10:56:26.015656Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|b37f8a3e-b8f3f074-3dd02d63-3db13c12_0] Write session: gracefully shut down, all writes complete 2025-04-10T10:56:26.016203Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|b37f8a3e-b8f3f074-3dd02d63-3db13c12_0] Write session: destroy Size: 4194320 Got response:400: PathErrorUnknown Got response:400: No such partition in topic 2025-04-10T10:56:26.562918Z node 7 :PERSQUEUE ERROR: [PQ: 72075186224037889, Partition: 0, State: StateIdle] reading from too big offset - topic topic1 partition 0 client $without_consumer EndOffset 61 offset 10000 Got response:400: Bad offset 2025-04-10T10:56:26.881730Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7491636663918514331:2430], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:56:26.884421Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=ODU0MWFhNC0xOTA5NTljZS02ZDY0ZjdhZi04OTExMTM3MQ==, ActorId: [7:7491636663918514324:2426], ActorState: ExecuteState, TraceId: 01jrfng3p1bev624w6n22eph1p, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:56:26.885142Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> KqpQueryService::StreamExecuteQuery >> KqpQueryService::FlowControllOnHugeLiteralAsTable >> KqpQueryService::Followers ------- [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2025-04-10T10:56:28.993111Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636672180641958:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:28.993665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00125f/r3tmp/tmpyl5kpV/pdisk_1.dat 2025-04-10T10:56:30.067420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:30.107826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:30.107919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:30.115418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:30.132488Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1701 TServer::EnableGrpc on GrpcPort 8841, node 1 2025-04-10T10:56:30.546195Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:30.546215Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:30.546224Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:30.546361Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:56:30.887585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:30.922009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-10T10:56:30.938529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-10T10:56:33.966066Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636672180641958:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:33.966141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::SessionFromPoolSuccess >> KqpQueryService::IssuesInCaseOfSuccess |68.3%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::AutoSplitMergeQueue [GOOD] Test command err: 2025-04-10T10:54:48.391167Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636241223522391:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:48.391477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002035/r3tmp/tmpewBRJo/pdisk_1.dat 2025-04-10T10:54:49.073906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:49.073987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:49.074251Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:49.090760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2437 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T10:54:49.781249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:54:49.820932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744282489957 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "String" TypeId: 4097 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) A-0 2025-04-10T10:54:50.878687Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.13, eph 1} end=0, 2 blobs 1r (max 1), put Spent{time=0.048s,wait=0.009s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-04-10T10:54:50.883137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 B-0 2025-04-10T10:54:50.941033Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.15, eph 1} end=0, 2 blobs 1r (max 1), put Spent{time=0.059s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-04-10T10:54:50.954316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-04-10T10:54:50.987361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T10:54:50.987471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 6291502 row count 1 2025-04-10T10:54:50.987560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 1, DataSize 6291502 2025-04-10T10:54:50.991046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-1 2025-04-10T10:54:51.669315Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.19, eph 2} end=0, 2 blobs 1r (max 1), put Spent{time=0.055s,wait=0.008s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-04-10T10:54:51.693550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2025-04-10T10:54:51.737262Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 2} end=0, 3 blobs 2r (max 2), put Spent{time=0.052s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (12583142 0 0)b }, ecr=1.000 2025-04-10T10:54:51.751945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583020 rowCount 2 cpuUsage 0 2025-04-10T10:54:51.798263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T10:54:51.798385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 12583020 row count 2 2025-04-10T10:54:51.798443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 2, DataSize 12583020 2025-04-10T10:54:51.806250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-10T10:54:52.215298Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 3} end=0, 2 blobs 1r (max 1), put Spent{time=0.039s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-04-10T10:54:52.230276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 18874522 rowCount 3 cpuUsage 0 B-1 2025-04-10T10:54:52.314630Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 3} end=0, 4 blobs 3r (max 3), put Spent{time=0.097s,wait=0.014s,interrupts=1} Part{ 1 pk, lobs 0 +0, (18874688 0 0)b }, ecr=1.000 2025-04-10T10:54:52.330703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T10:54:52.330796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 18874522 row count 3 2025-04-10T10:54:52.330849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 3, DataSize 18874522 2025-04-10T10:54:52.330961Z node 1 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 3], datashard# 72075186224037888, compactionInfo# {72057594046644480:1, SH# 2, Rows# 3, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046644480 2025-04-10T10:54:52.333022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 599 seconds 2025-04-10T10:54:52.333213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-10T10:54:52.334313Z node 1 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 3], datashard# 72075186224037888, shardIdx# 72057594046644480:1 in# 3 ms, with status# 1, next wakeup in# 599.996627s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2025-04-10T10:54:52.390982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 18874538 rowCount 3 cpuUsage 0 2025-04-10T10:54:52.491451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T10:54:52.491553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 18874538 row count 3 2025-04-10T10:54:52.491612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 3, DataSize 18874538 2025-04-10T10:54:52.497549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-2 2025-04-10T10:54:53.010907Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph 4} end=0, 2 blobs 1r (max 1), put Spent{time=0.052s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-04-10T10:54:53.015516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 25166040 rowCount 4 cpuUsage 0 2025-04-10T10:54:53.122345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T10:54:53.122468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 25166040 row count 4 2025-04-10T10:54:53.122547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 4, DataSize 25166040 2025-04-10T10:54:53.130398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-10T10:54:53.198544Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.31, eph 4} end=0, 5 blobs 4r (max 4), put Spent{time=0.182s,wait= ... VRow: 216 WriteKVRow: 247 EraseKVRow: 217 WriteKVRow: 248 EraseKVRow: 218 WriteKVRow: 249 EraseKVRow: 219 WriteKVRow: 250 EraseKVRow: 220 TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744282553776 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 32 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 32 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 30 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) WriteKVRow: 251 EraseKVRow: 221 WriteKVRow: 252 EraseKVRow: 222 WriteKVRow: 253 EraseKVRow: 223 WriteKVRow: 254 EraseKVRow: 224 WriteKVRow: 255 EraseKVRow: 225 WriteKVRow: 256 EraseKVRow: 226 WriteKVRow: 257 EraseKVRow: 227 WriteKVRow: 258 EraseKVRow: 228 WriteKVRow: 259 EraseKVRow: 229 WriteKVRow: 260 EraseKVRow: 230 WriteKVRow: 261 EraseKVRow: 231 WriteKVRow: 262 EraseKVRow: 232 WriteKVRow: 263 EraseKVRow: 233 WriteKVRow: 264 EraseKVRow: 234 WriteKVRow: 265 EraseKVRow: 235 WriteKVRow: 266 EraseKVRow: 236 WriteKVRow: 267 EraseKVRow: 237 WriteKVRow: 268 EraseKVRow: 238 WriteKVRow: 269 EraseKVRow: 239 WriteKVRow: 270 EraseKVRow: 240 WriteKVRow: 271 EraseKVRow: 241 WriteKVRow: 272 EraseKVRow: 242 WriteKVRow: 273 EraseKVRow: 243 WriteKVRow: 274 EraseKVRow: 244 WriteKVRow: 275 EraseKVRow: 245 WriteKVRow: 276 EraseKVRow: 246 WriteKVRow: 277 EraseKVRow: 247 WriteKVRow: 278 EraseKVRow: 248 WriteKVRow: 279 EraseKVRow: 249 WriteKVRow: 280 EraseKVRow: 250 WriteKVRow: 281 EraseKVRow: 251 WriteKVRow: 282 EraseKVRow: 252 WriteKVRow: 283 EraseKVRow: 253 WriteKVRow: 284 EraseKVRow: 254 WriteKVRow: 285 EraseKVRow: 255 WriteKVRow: 286 EraseKVRow: 256 WriteKVRow: 287 EraseKVRow: 257 WriteKVRow: 288 EraseKVRow: 258 WriteKVRow: 289 EraseKVRow: 259 WriteKVRow: 290 EraseKVRow: 260 WriteKVRow: 291 EraseKVRow: 261 WriteKVRow: 292 EraseKVRow: 262 WriteKVRow: 293 EraseKVRow: 263 WriteKVRow: 294 EraseKVRow: 264 WriteKVRow: 295 EraseKVRow: 265 WriteKVRow: 296 EraseKVRow: 266 WriteKVRow: 297 EraseKVRow: 267 WriteKVRow: 298 EraseKVRow: 268 WriteKVRow: 299 EraseKVRow: 269 TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744282553776 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 32 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 32 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 30 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) EraseKVRow: 270 EraseKVRow: 271 EraseKVRow: 272 EraseKVRow: 273 EraseKVRow: 274 EraseKVRow: 275 EraseKVRow: 276 EraseKVRow: 277 EraseKVRow: 278 EraseKVRow: 279 EraseKVRow: 280 EraseKVRow: 281 EraseKVRow: 282 EraseKVRow: 283 EraseKVRow: 284 EraseKVRow: 285 EraseKVRow: 286 EraseKVRow: 287 EraseKVRow: 288 EraseKVRow: 289 EraseKVRow: 290 EraseKVRow: 291 EraseKVRow: 292 EraseKVRow: 293 EraseKVRow: 294 EraseKVRow: 295 EraseKVRow: 296 EraseKVRow: 297 EraseKVRow: 298 EraseKVRow: 299 TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744282553776 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 32 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 32 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 30 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) 2025-04-10T10:56:31.220153Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037925 not found 2025-04-10T10:56:31.220205Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037931 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744282553776 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 33 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 33 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 31 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) 2025-04-10T10:56:31.438665Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037934 not found 2025-04-10T10:56:31.633754Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037932 not found 2025-04-10T10:56:31.633970Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037935 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744282553776 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 35 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 35 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 33 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) 2025-04-10T10:56:32.740876Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037939 not found 2025-04-10T10:56:32.740922Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037938 not found 2025-04-10T10:56:32.744886Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037936 not found 2025-04-10T10:56:32.748618Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037937 not found 2025-04-10T10:56:32.757383Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037940 not found 2025-04-10T10:56:32.757426Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037933 not found 2025-04-10T10:56:32.907586Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037943 not found 2025-04-10T10:56:32.907915Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037942 not found 2025-04-10T10:56:33.089822Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037944 not found 2025-04-10T10:56:33.089863Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037941 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744282553776 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 40 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 40 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 38 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744282553776 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 40 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 40 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 38 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] >> KqpQuery::CreateAsSelect_BadCases [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsProfile >> KqpQueryService::ShowCreateTableNotSuccess >> KqpDocumentApi::RestrictWriteExplicitPrepare >> KqpQueryService::CreateTempTable >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] Test command err: 2025-04-10T10:54:37.912728Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636197485531292:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:37.939835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:54:38.939108Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:38.978108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:38.978216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:38.984221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:38.985521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11183, node 1 2025-04-10T10:54:39.345353Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:39.345373Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:39.345380Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:39.345505Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:40.224782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:40.291435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:54:40.295178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:40.303966Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:54:42.893635Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636197485531292:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:42.893772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:45.631099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636231845270231:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:45.631242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:45.634444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636231845270258:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:45.639520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:54:45.664714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636231845270260:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:54:45.746690Z node 1 :TX_PROXY ERROR: Actor# [1:7491636231845270312:2375] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:49.815902Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636246419783389:2169];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:54:49.941778Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:50.056799Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:50.099803Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:50.099913Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:50.106032Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29346, node 2 2025-04-10T10:54:50.291417Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:50.291451Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:50.291462Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:50.291642Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:50.784063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:50.802445Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:54:50.823552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:54:50.828920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:50.837406Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:54:54.797961Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636246419783389:2169];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:54.798039Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:56.107361Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636276484555052:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:56.107502Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:56.107643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636276484555070:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:56.112961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:54:56.159675Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636276484555074:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:54:56.245767Z node 2 :TX_PROXY ERROR: Actor# [2:7491636276484555125:2365] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:59.588269Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636290912925437:2230];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:54:59.829889Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path exist ... distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:09.154487Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:09.154503Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:09.154708Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3813 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:09.812218Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:09.846269Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T10:55:09.851343Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:09.857298Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-04-10T10:55:13.610402Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491636327260853253:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:13.610535Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:14.967752Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636353030657740:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:14.967898Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:14.971953Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636353030657752:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:14.986443Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-10T10:55:15.023329Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491636353030657754:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-10T10:55:15.126552Z node 4 :TX_PROXY ERROR: Actor# [4:7491636357325625101:2366] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:27.248279Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:466:2426], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:27.248748Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:27.249035Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:55:27.680584Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:27.874899Z node 5 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T10:55:27.912389Z node 5 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-10T10:55:28.759689Z node 5 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 23418, node 5 TClient is connected to server localhost:4675 2025-04-10T10:55:29.432897Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:29.432982Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:29.433040Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:29.433725Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:55:43.210805Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:539:2427], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:43.211230Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:43.211411Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:55:43.755953Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:43.935236Z node 7 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T10:55:43.976363Z node 7 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-10T10:55:44.731439Z node 7 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 12340, node 7 TClient is connected to server localhost:22855 2025-04-10T10:55:45.435891Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:45.436037Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:45.436137Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:45.437664Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:56:02.467657Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:541:2427], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:02.468300Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:02.468646Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:56:03.342268Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:03.640788Z node 10 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T10:56:03.751472Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-10T10:56:05.162865Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 31024, node 10 TClient is connected to server localhost:30727 2025-04-10T10:56:06.028877Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:06.029018Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:06.029128Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:06.030015Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:56:28.108033Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:618:2428], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:28.108752Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:28.109020Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:56:28.849406Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:29.111904Z node 13 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T10:56:29.198861Z node 13 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-10T10:56:30.790397Z node 13 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 1558, node 13 TClient is connected to server localhost:21657 2025-04-10T10:56:32.110928Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:32.111090Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:32.111204Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:32.111937Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration >> KqpService::CloseSessionsWithLoad >> KqpQueryService::ExecuteCollectMeta ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelect_BadCases [GOOD] Test command err: Trying to start YDB, gRPC: 22415, MsgBus: 7357 2025-04-10T10:55:55.198136Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636528777384763:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:55.206280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00178e/r3tmp/tmpU583Uk/pdisk_1.dat 2025-04-10T10:55:55.739488Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:55.746826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:55.746947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:55.751496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22415, node 1 2025-04-10T10:55:55.846709Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:55.846732Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:55.846739Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:55.846846Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7357 TClient is connected to server localhost:7357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:56.492220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:56.527253Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:55:56.543507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:56.721686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:56.896587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:56.998247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:59.809025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636545957255491:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:59.810172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:00.104949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:00.151911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:00.199222Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636528777384763:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:00.199314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:00.206068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:00.266661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:00.321424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:00.410312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:00.522324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636550252223311:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:00.522447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:00.522955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636550252223316:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:00.527580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:00.539973Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T10:56:00.540241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636550252223318:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:00.631862Z node 1 :TX_PROXY ERROR: Actor# [1:7491636550252223373:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8606, MsgBus: 14767 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00178e/r3tmp/tmpPXYNLa/pdisk_1.dat 2025-04-10T10:56:04.300927Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:04.463548Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:04.470374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:04.470457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:04.471835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8606, node 2 2025-04-10T10:56:04.594803Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:04.594829Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:04.594837Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:04.594995Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14767 TClient is connected to server localhost:14767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:05.215638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:05.233743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:05.342655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:05.566242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:05.675333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard ... l::TEvTabletStatus from node 3, TabletId: 72075186224038006 not found 2025-04-10T10:56:32.428854Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038028 not found 2025-04-10T10:56:32.428874Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037992 not found 2025-04-10T10:56:32.428894Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037986 not found 2025-04-10T10:56:32.428913Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038004 not found 2025-04-10T10:56:32.428930Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038019 not found 2025-04-10T10:56:32.428948Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037965 not found 2025-04-10T10:56:32.428969Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037980 not found 2025-04-10T10:56:32.428990Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037984 not found 2025-04-10T10:56:32.429009Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038017 not found 2025-04-10T10:56:32.429028Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037978 not found 2025-04-10T10:56:32.429056Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;self_id=[3:7491636672042618008:3368];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.429365Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[3:7491636667747650526:3358];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.433083Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037975;self_id=[3:7491636667747650444:3327];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.433254Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[3:7491636667747650452:3331];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.436321Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;self_id=[3:7491636672042618009:3369];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.437164Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037989;self_id=[3:7491636667747650511:3350];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.440496Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[3:7491636672042617909:3367];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.440874Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[3:7491636672042617869:3364];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.444462Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[3:7491636667747650521:3355];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.444603Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;self_id=[3:7491636667747650454:3332];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.448740Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037973;self_id=[3:7491636672042618144:3383];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.450702Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037985;self_id=[3:7491636672042618075:3378];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.453093Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[3:7491636667747650468:3337];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.453392Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[3:7491636667747650364:3325];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.456974Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;self_id=[3:7491636672042618054:3375];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.461243Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;self_id=[3:7491636667747649849:3303];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.466118Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[3:7491636667747650366:3326];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.466223Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[3:7491636667747650519:3354];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.470667Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037990;self_id=[3:7491636667747650509:3349];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.475004Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[3:7491636672042618066:3377];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.476795Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037998;self_id=[3:7491636672042618022:3370];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.481060Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;self_id=[3:7491636667747650517:3353];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.485180Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[3:7491636667747650507:3348];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.489226Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[3:7491636667747650450:3330];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.493316Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;self_id=[3:7491636672042618100:3381];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.497223Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;self_id=[3:7491636672042618102:3382];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.501640Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;self_id=[3:7491636672042618041:3373];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.505874Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[3:7491636667747650531:3360];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.510098Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[3:7491636667747650446:3328];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.514432Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037992;self_id=[3:7491636667747650505:3347];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.518881Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[3:7491636672042617865:3362];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.523781Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[3:7491636667747650456:3333];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.526864Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;self_id=[3:7491636667747650497:3343];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.531004Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[3:7491636667747650470:3338];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.532184Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037965;self_id=[3:7491636667747649867:3305];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.534705Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;self_id=[3:7491636672042618273:3385];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.538991Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;self_id=[3:7491636672042618051:3374];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.544986Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[3:7491636672042618040:3372];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T10:56:32.709495Z node 3 :TX_PROXY ERROR: Actor# [3:7491636689222490180:7528] txid# 281474976715687, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:32.725238Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:1, at schemeshard: 72057594046644480 2025-04-10T10:56:32.959243Z node 3 :TX_PROXY ERROR: Actor# [3:7491636689222490321:7615] txid# 281474976715691, issues: { message: "Check failed: path: \'/Root/RowSrc\', error: path exist, request doesn\'t accept it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:32.962630Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTZkZjhlYjktYmRlZGFiMDctYjUyNzViMGMtOTE4NjEzZDM=, ActorId: [3:7491636689222490155:4054], ActorState: ExecuteState, TraceId: 01jrfng9b32nzrcxrx0fsf9mh3, Create QueryResponse for error on request, msg: 2025-04-10T10:56:33.410886Z node 3 :TX_PROXY ERROR: Actor# [3:7491636693517457691:7649] txid# 281474976715693, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:33.432264Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:1, at schemeshard: 72057594046644480 2025-04-10T10:56:34.652854Z node 3 :TX_PROXY ERROR: Actor# [3:7491636697812425366:7819] txid# 281474976715699, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:34.662712Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715700, at schemeshard: 72057594046644480 2025-04-10T10:56:34.667591Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:1, at schemeshard: 72057594046644480 |68.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |68.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |68.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |68.3%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |68.3%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] >> DataShardVolatile::DistributedWriteThenImmediateUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] Test command err: 2025-04-10T10:56:28.543066Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T10:56:28.723051Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T10:56:28.776357Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T10:56:28.776766Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T10:56:28.787038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T10:56:28.787344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T10:56:28.787667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T10:56:28.787823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T10:56:28.787943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T10:56:28.788065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T10:56:28.788187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T10:56:28.788329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T10:56:28.788478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T10:56:28.788610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T10:56:28.788724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T10:56:28.788866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T10:56:28.870842Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T10:56:28.871260Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T10:56:28.871327Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T10:56:28.871586Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T10:56:28.871790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T10:56:28.871910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T10:56:28.871964Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T10:56:28.872074Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T10:56:28.872150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T10:56:28.872202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T10:56:28.872244Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T10:56:28.872492Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T10:56:28.872594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T10:56:28.872646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T10:56:28.872688Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T10:56:28.872815Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T10:56:28.872909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T10:56:28.872987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T10:56:28.873036Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T10:56:28.873139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T10:56:28.873183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T10:56:28.873223Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T10:56:28.873282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T10:56:28.873352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T10:56:28.873390Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T10:56:28.873949Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=62; 2025-04-10T10:56:28.874109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=61; 2025-04-10T10:56:28.874322Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=74; 2025-04-10T10:56:28.874431Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-10T10:56:28.874661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T10:56:28.874736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T10:56:28.874777Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T10:56:28.875035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T10:56:28.875092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T10:56:28.875129Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T10:56:28.875333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T10:56:28.875380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T10:56:28.875410Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T10:56:28.875645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T10:56:28.875700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T10:56:28.875737Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T10:56:28.875873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T10:56:28.875926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T10:56:28.876008Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... s=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T10:56:39.920403Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T10:56:39.920585Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-10T10:56:39.920716Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-10T10:56:39.920913Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1056:2927];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-10T10:56:39.921129Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T10:56:39.921291Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T10:56:39.921501Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T10:56:39.921831Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T10:56:39.922021Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T10:56:39.929768Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T10:56:39.929922Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1057:2928] finished for tablet 9437184 2025-04-10T10:56:39.930701Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1056:2927];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.011},{"events":["l_bootstrap"],"t":0.013},{"events":["f_processing","f_task_result"],"t":0.022},{"events":["f_ack","l_task_result"],"t":0.051},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.061}],"full":{"a":1744282599868507,"name":"_full_task","f":1744282599868507,"d_finished":0,"c":0,"l":1744282599930032,"d":61525},"events":[{"name":"bootstrap","f":1744282599868837,"d_finished":12811,"c":1,"l":1744282599881648,"d":12811},{"a":1744282599921793,"name":"ack","f":1744282599919963,"d_finished":1580,"c":1,"l":1744282599921543,"d":9819},{"a":1744282599921777,"name":"processing","f":1744282599890979,"d_finished":16172,"c":10,"l":1744282599921548,"d":24427},{"name":"ProduceResults","f":1744282599879615,"d_finished":12067,"c":13,"l":1744282599929875,"d":12067},{"a":1744282599929894,"name":"Finish","f":1744282599929894,"d_finished":0,"c":0,"l":1744282599930032,"d":138},{"name":"task_result","f":1744282599891014,"d_finished":14388,"c":9,"l":1744282599919674,"d":14388}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T10:56:39.930847Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1056:2927];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T10:56:39.931456Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1056:2927];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.011},{"events":["l_bootstrap"],"t":0.013},{"events":["f_processing","f_task_result"],"t":0.022},{"events":["f_ack","l_task_result"],"t":0.051},{"events":["l_ProduceResults","f_Finish"],"t":0.061},{"events":["l_ack","l_processing","l_Finish"],"t":0.062}],"full":{"a":1744282599868507,"name":"_full_task","f":1744282599868507,"d_finished":0,"c":0,"l":1744282599930916,"d":62409},"events":[{"name":"bootstrap","f":1744282599868837,"d_finished":12811,"c":1,"l":1744282599881648,"d":12811},{"a":1744282599921793,"name":"ack","f":1744282599919963,"d_finished":1580,"c":1,"l":1744282599921543,"d":10703},{"a":1744282599921777,"name":"processing","f":1744282599890979,"d_finished":16172,"c":10,"l":1744282599921548,"d":25311},{"name":"ProduceResults","f":1744282599879615,"d_finished":12067,"c":13,"l":1744282599929875,"d":12067},{"a":1744282599929894,"name":"Finish","f":1744282599929894,"d_finished":0,"c":0,"l":1744282599930916,"d":1022},{"name":"task_result","f":1744282599891014,"d_finished":14388,"c":9,"l":1744282599919674,"d":14388}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T10:56:39.931593Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T10:56:39.867669Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-10T10:56:39.931659Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T10:56:39.932137Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::WriteOverload-InStore-WithWritePortionsOnInsert [GOOD] >> DataShardVolatile::DistributedWrite [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock >> KqpQueryServiceScripts::ExecuteScriptWithParameters >> KqpQueryService::Write |68.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |68.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |68.4%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut >> KqpQueryService::ExecuteQueryExplicitTxTLI [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload-InStore-WithWritePortionsOnInsert [GOOD] Test command err: 2025-04-10T10:56:28.400211Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T10:56:28.549461Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T10:56:28.584985Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T10:56:28.585354Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T10:56:28.594471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T10:56:28.594758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T10:56:28.595031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T10:56:28.595179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T10:56:28.595312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T10:56:28.595448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T10:56:28.595576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T10:56:28.595718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T10:56:28.595855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T10:56:28.595972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T10:56:28.596123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T10:56:28.596238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T10:56:28.631427Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T10:56:28.631911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T10:56:28.631980Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T10:56:28.632211Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T10:56:28.632400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T10:56:28.632502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T10:56:28.632554Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T10:56:28.632651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T10:56:28.632717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T10:56:28.632763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T10:56:28.632803Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T10:56:28.633033Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T10:56:28.633109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T10:56:28.633152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T10:56:28.633186Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T10:56:28.633298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T10:56:28.633354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T10:56:28.633420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T10:56:28.633501Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T10:56:28.633595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T10:56:28.633646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T10:56:28.633682Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T10:56:28.633751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T10:56:28.633800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T10:56:28.633839Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T10:56:28.636319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=84; 2025-04-10T10:56:28.636486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=57; 2025-04-10T10:56:28.636604Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=45; 2025-04-10T10:56:28.636710Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-10T10:56:28.636949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T10:56:28.637028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T10:56:28.637089Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T10:56:28.637291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T10:56:28.637341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T10:56:28.637370Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T10:56:28.637562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T10:56:28.637612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T10:56:28.637652Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T10:56:28.637857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T10:56:28.637903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T10:56:28.637933Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T10:56:28.638050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T10:56:28.638092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T10:56:28.638141Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 10:56:40.735280Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 4 at tablet 9437184 2025-04-10T10:56:40.735631Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-10T10:56:40.748941Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-10T10:56:40.749123Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:40.751033Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 5 at tablet 9437184 2025-04-10T10:56:40.751353Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-10T10:56:40.764999Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-10T10:56:40.765173Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:40.767057Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 6 at tablet 9437184 2025-04-10T10:56:40.767360Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-10T10:56:40.780858Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-10T10:56:40.781035Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=9;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:40.788744Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 7 at tablet 9437184 2025-04-10T10:56:40.789093Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-10T10:56:40.811387Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-10T10:56:40.811609Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=10;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:40.813540Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 8 at tablet 9437184 2025-04-10T10:56:40.813891Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-10T10:56:40.835065Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-10T10:56:40.835262Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=11;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:40.837259Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 9 at tablet 9437184 2025-04-10T10:56:40.837618Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-10T10:56:40.852353Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-10T10:56:40.852537Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=12;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:40.865452Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 10 at tablet 9437184 2025-04-10T10:56:40.865799Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-10T10:56:40.879481Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-10T10:56:40.879674Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=13;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:40.882109Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 11 at tablet 9437184 2025-04-10T10:56:40.882461Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-10T10:56:40.898224Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-10T10:56:40.898412Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=14;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:40.902215Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 12 at tablet 9437184 2025-04-10T10:56:40.902621Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-10T10:56:40.915838Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-10T10:56:40.916048Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=15;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:40.926268Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 13 at tablet 9437184 2025-04-10T10:56:40.926625Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-10T10:56:40.940951Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-10T10:56:40.941166Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=16;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:40.943291Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 14 at tablet 9437184 2025-04-10T10:56:40.943611Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-10T10:56:40.971360Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-10T10:56:40.971633Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=17;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:40.973553Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 15 at tablet 9437184 2025-04-10T10:56:40.973897Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-10T10:56:40.995174Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-10T10:56:40.995399Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=18;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:41.032757Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 16 at tablet 9437184 2025-04-10T10:56:41.033232Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-10T10:56:41.065017Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-10T10:56:41.065225Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=19;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:41.067148Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 17 at tablet 9437184 2025-04-10T10:56:41.067497Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-10T10:56:41.085682Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-10T10:56:41.085893Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=20;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:41.088125Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 18 at tablet 9437184 2025-04-10T10:56:41.088521Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-10T10:56:41.103018Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-10T10:56:41.103228Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=21;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:41.132106Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 19 at tablet 9437184 2025-04-10T10:56:41.132481Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-10T10:56:41.146968Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-10T10:56:41.147143Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=22;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:41.149184Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 20 at tablet 9437184 2025-04-10T10:56:41.149556Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-10T10:56:41.171172Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-10T10:56:41.171354Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=23;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:41.173282Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 21 at tablet 9437184 2025-04-10T10:56:41.173631Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-10T10:56:41.188359Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-10T10:56:41.188558Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=24;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:56:41.601540Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=6330728;count=1; 2025-04-10T10:56:41.671568Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 22 at tablet 9437184 2025-04-10T10:56:41.672013Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-10T10:56:41.691913Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-10T10:56:41.692148Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=25;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> Worker::Basic [GOOD] >> KqpLimits::DatashardReplySize [GOOD] >> KqpQueryService::StreamExecuteQuery [GOOD] >> KqpQueryService::StreamExecuteCollectMeta >> KqpQueryServiceScripts::ValidateScript >> SystemView::AuthPermissions_Access [GOOD] >> SystemView::AuthPermissions_ResultOrder >> BsControllerConfig::MergeBoxes [GOOD] >> Cdc::EnqueueRequestProcessSend [GOOD] >> Cdc::InitialScanAndResolvedTimestamps >> KqpQueryService::ClosedSessionRemovedWhileActiveWithQuery [GOOD] >> KqpQueryService::CloseSessionsWithLoad >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic [GOOD] Test command err: 2025-04-10T10:56:30.366601Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636680198688068:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:30.366657Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c1b/r3tmp/tmpg0BhUB/pdisk_1.dat 2025-04-10T10:56:31.156849Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:31.193661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:31.193744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:31.199782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11192 TServer::EnableGrpc on GrpcPort 16698, node 1 2025-04-10T10:56:31.799120Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:31.799145Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:31.799154Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:31.799397Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:32.472737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:32.503387Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:32.728798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744282592864 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-10T10:56:32.986450Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491636688788623475:2425] Handshake: worker# [1:7491636688788623474:2425] 2025-04-10T10:56:32.986581Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handshake: worker# [1:7491636688788623474:2425] 2025-04-10T10:56:33.050665Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T10:56:33.062771Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491636688788623475:2425] Create read session: session# [1:7491636688788623478:2294] 2025-04-10T10:56:33.062854Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7491636688788623474:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-10T10:56:33.062871Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7491636688788623474:2425] Handshake with reader: sender# [1:7491636688788623475:2425] 2025-04-10T10:56:33.062909Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491636688788623475:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-10T10:56:33.063185Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 3] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-10T10:56:33.063212Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Send handshake: worker# [1:7491636688788623474:2425] 2025-04-10T10:56:33.063236Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7491636688788623474:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-10T10:56:33.063248Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7491636688788623474:2425] Handshake with writer: sender# [1:7491636688788623476:2425] 2025-04-10T10:56:33.192754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-10T10:56:35.370439Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636680198688068:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:35.370528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:35.704462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636701673525530:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:35.704761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:35.706445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636701673525561:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:35.706512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636701673525562:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:35.714629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:2, at schemeshard: 72057594046644480 2025-04-10T10:56:35.737144Z node 1 :TX_PROXY ERROR: Actor# [1:7491636701673525567:2513] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T10:56:35.739323Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-04-10T10:56:35.739522Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-04-10T10:56:35.740393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636701673525565:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T10:56:35.740461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636701673525566:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T10:56:35.820785Z node 1 :TX_PROXY ERROR: Actor# [1:7491636701673525615:2545] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:35.823747Z node 1 :TX_PROXY ERROR: Actor# [1:7491636701673525622:2550] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:37.568933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:56:38.200292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T10:56:39.100430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T10:56:39.916306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T10:56:40.553124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.890875Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491636688788623475:2425] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-04-10T10:56:41.812000Z MessageGroupId: producer ProducerId: producer }] } } 2025-04-10T10:56:41.890970Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7491636688788623474:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-04-10T10:56:41.812000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-10T10:56:41.891047Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-04-10T10:56:41.812000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-10T10:56:41.891207Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 36 }] } 2025-04-10T10:56:41.891363Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7491636727443330172:2425] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-10T10:56:41.891414Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-10T10:56:41.891485Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7491636727443330172:2425] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-10T10:56:41.903360Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7491636727443330172:2425] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-10T10:56:41.903444Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-10T10:56:41.903493Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0] } 2025-04-10T10:56:41.903575Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7491636688788623474:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-10T10:56:41.903624Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491636688788623475:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-10T10:56:42.110167Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491636688788623475:2425] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-04-10T10:56:42.066000Z MessageGroupId: producer ProducerId: producer }] } } 2025-04-10T10:56:42.110252Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7491636688788623474:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-04-10T10:56:42.066000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-10T10:56:42.110319Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-04-10T10:56:42.066000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-10T10:56:42.110446Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 }] } 2025-04-10T10:56:42.110539Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7491636727443330172:2425] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-10T10:56:42.111872Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7491636727443330172:2425] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-10T10:56:42.111923Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-10T10:56:42.111962Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-04-10T10:56:42.112000Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7491636688788623474:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-10T10:56:42.112037Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491636688788623475:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-10T10:56:42.340004Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491636688788623475:2425] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-04-10T10:56:42.322000Z MessageGroupId: producer ProducerId: producer }] } } 2025-04-10T10:56:42.340102Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7491636688788623474:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-04-10T10:56:42.322000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-10T10:56:42.340173Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-04-10T10:56:42.322000Z MessageGroupId: producer ProducerId: producer }] } 2025-04-10T10:56:42.340313Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 36 }] } 2025-04-10T10:56:42.340436Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7491636727443330172:2425] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-10T10:56:42.354519Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7491636727443330172:2425] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-10T10:56:42.354600Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-10T10:56:42.354645Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7491636688788623476:2425] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } 2025-04-10T10:56:42.354697Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7491636688788623474:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-10T10:56:42.354742Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491636688788623475:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-10T10:56:42.501877Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491636688788623475:2425] Handle NKikimr::NReplication::TEvYdbProxy::TEvTopicReaderGone { Result: { status: UNAVAILABLE, issues: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } } 2025-04-10T10:56:42.501910Z node 1 :REPLICATION_SERVICE INFO: [RemoteTopicReader][/Root/topic][0][1:7491636688788623475:2425] Leave 2025-04-10T10:56:42.501981Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7491636688788623474:2425] Reader has gone: sender# [1:7491636688788623475:2425] 2025-04-10T10:56:42.502062Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491636731738297671:2425] Handshake: worker# [1:7491636688788623474:2425] 2025-04-10T10:56:42.503564Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491636731738297671:2425] Create read session: session# [1:7491636731738297672:2294] 2025-04-10T10:56:42.503608Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7491636688788623474:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-10T10:56:42.503618Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7491636688788623474:2425] Handshake with reader: sender# [1:7491636731738297671:2425] 2025-04-10T10:56:42.503662Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491636731738297671:2425] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } >> KqpQueryService::CreateTempTable [GOOD] >> KqpQueryService::CreateAndDropTopic >> KqpQueryServiceScripts::ParseScript |68.4%| [TA] $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DatashardReplySize [GOOD] Test command err: Trying to start YDB, gRPC: 64327, MsgBus: 18835 2025-04-10T10:55:47.056734Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636494474738573:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:47.066554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017d9/r3tmp/tmpcZ7enw/pdisk_1.dat 2025-04-10T10:55:47.655338Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:47.686255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:47.686366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:47.697713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64327, node 1 2025-04-10T10:55:47.921799Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:47.921819Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:47.921824Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:47.921930Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18835 TClient is connected to server localhost:18835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:48.869010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.019293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.580275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.794871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:55:49.942783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T10:55:51.459028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636511654609510:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:51.459200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:52.050081Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636494474738573:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:52.050194Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:52.767790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.843635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.883971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.915980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.941610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.975789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.090340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636520244544624:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.090453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.104570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636520244544629:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.123465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:55:53.147132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636520244544632:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:55:53.267641Z node 1 :TX_PROXY ERROR: Actor# [1:7491636520244544689:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:55.206001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:59.363292Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491636546014349765:2622] TxId: 281474976710672. Ctx: { TraceId: 01jrfnf8nh118zenkkzr9qkxtd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJiMTZhNGItYWY4NGJkZDUtZTkzZDEyZGMtZmI4MjlhMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. } 2025-04-10T10:55:59.363498Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDJiMTZhNGItYWY4NGJkZDUtZTkzZDEyZGMtZmI4MjlhMTA=, ActorId: [1:7491636546014349746:2622], ActorState: ExecuteState, TraceId: 01jrfnf8nh118zenkkzr9qkxtd, Create QueryResponse for error on request, msg:
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. Trying to start YDB, gRPC: 28388, MsgBus: 22399 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017d9/r3tmp/tmpxAB4P5/pdisk_1.dat 2025-04-10T10:56:00.926475Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:00.940892Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:00.954595Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:00.954685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:00.959159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28388, node 2 2025-04-10T10:56:01.090764Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:01.090785Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:01.090792Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:01.090910Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22399 TClient is connected to server localhost:22399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:01.622205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:01.635200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESche ... nsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:15.374737Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636597254194085:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:15.374833Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:18.039693Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636631613933399:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:18.039795Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:18.040365Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636631613933411:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:18.045513Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T10:56:18.065725Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636631613933413:2367], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T10:56:18.198094Z node 3 :TX_PROXY ERROR: Actor# [3:7491636631613933464:2619] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:19.604788Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491636631613933498:2358] TxId: 281474976710661. Ctx: { TraceId: 01jrfnfv3fafs2h5ga74km9nz9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWE4NjZhNzYtNmEyMzM5NTItYWYzZGQ2ZDEtYWY1NGJmMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Abort execution. Task #1 size is too big: 100442499 > 50331648 2025-04-10T10:56:19.605172Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OWE4NjZhNzYtNmEyMzM5NTItYWYzZGQ2ZDEtYWY1NGJmMWU=, ActorId: [3:7491636623023998774:2358], ActorState: ExecuteState, TraceId: 01jrfnfv3fafs2h5ga74km9nz9, Create QueryResponse for error on request, msg:
: Error: Datashard program size limit exceeded (100442499 > 50331648), code: 200509 Trying to start YDB, gRPC: 25087, MsgBus: 7835 2025-04-10T10:56:21.061157Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636643450702767:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:21.061279Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017d9/r3tmp/tmpAV2nLb/pdisk_1.dat 2025-04-10T10:56:21.390082Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:21.407841Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:21.407941Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:21.409500Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25087, node 4 2025-04-10T10:56:21.615042Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:21.615072Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:21.615082Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:21.615266Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7835 TClient is connected to server localhost:7835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:22.477328Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:22.487414Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:22.505222Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:22.735868Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:23.165625Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:23.354663Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:26.047938Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491636643450702767:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:26.048024Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:27.415729Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636669220508315:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:27.415830Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:27.500911Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:27.552016Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:27.624078Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:27.688059Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:27.767302Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:27.855490Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:27.951249Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636669220508834:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:27.951371Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:27.951636Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636669220508839:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:27.958249Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:27.988262Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491636669220508841:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:28.066550Z node 4 :TX_PROXY ERROR: Actor# [4:7491636673515476202:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:29.290090Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:36.356544Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:56:36.356583Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:42.438381Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjBmZDJlOC01MDBiNDNiYy0zN2RkZGU2Yy05NWRkZDRmYQ==, ActorId: [4:7491636720760118491:2799], ActorState: ExecuteState, TraceId: 01jrfnggex3xmz8nfm75apqxdy, Create QueryResponse for error on request, msg:
: Error: Query result size limit exceeded. (200003965 > 50331648), code: 2013 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MergeBoxes [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11014:2156] recipient: [1:10814:2166] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11014:2156] recipient: [1:10814:2166] Leader for TabletID 72057594037932033 is [1:11016:2168] sender: [1:11017:2156] recipient: [1:10814:2166] 2025-04-10T10:55:18.576850Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-10T10:55:18.581976Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-10T10:55:18.582369Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-10T10:55:18.584684Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:55:18.585202Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-10T10:55:18.585896Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:18.585926Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-10T10:55:18.586403Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-10T10:55:18.596545Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-10T10:55:18.596698Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-10T10:55:18.596856Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-10T10:55:18.596971Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:18.597076Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T10:55:18.597147Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:11016:2168] sender: [1:11042:2156] recipient: [1:110:2157] 2025-04-10T10:55:18.613175Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-10T10:55:18.613338Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:18.624421Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T10:55:18.624599Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:18.624688Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T10:55:18.624780Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:18.624923Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T10:55:18.625003Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:18.625044Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T10:55:18.625119Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:18.637966Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T10:55:18.638266Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:18.650892Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T10:55:18.651073Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-10T10:55:18.652364Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-10T10:55:18.652427Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-10T10:55:18.652651Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-10T10:55:18.652713Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-10T10:55:18.674733Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk0" } Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" } Drive { Path: "/dev/disk3" } Drive { Path: "/dev/disk4" } Drive { Path: "/dev/disk5" } Drive { Path: "/dev/disk6" } Drive { Path: "/dev/disk7" } Drive { Path: "/dev/disk8" Type: SSD } Drive { Path: "/dev/disk9" Type: SSD } Drive { Path: "/dev/disk10" Type: SSD } Drive { Path: "/dev/disk11" Type: SSD } Drive { Path: "/dev/disk12" Type: SSD } Drive { Path: "/dev/disk13" Type: SSD } Drive { Path: "/dev/disk14" Type: SSD } Drive { Path: "/dev/disk15" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12061 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12062 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12063 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12064 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12065 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12066 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12067 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12068 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12069 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12070 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12071 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12072 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12073 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12074 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12075 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12076 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12077 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12078 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12079 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12080 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12081 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12082 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12083 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12084 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12085 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12086 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12087 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12088 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12089 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12090 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12091 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12092 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12093 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12094 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12095 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12096 } HostConfigId: 1 } Host { Ke ... 9} Create new pdisk PDiskId# 275:1002 Path# /dev/disk3 2025-04-10T10:56:35.485532Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 276:1000 Path# /dev/disk1 2025-04-10T10:56:35.485561Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 276:1001 Path# /dev/disk2 2025-04-10T10:56:35.485592Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 276:1002 Path# /dev/disk3 2025-04-10T10:56:35.485621Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 277:1000 Path# /dev/disk1 2025-04-10T10:56:35.485663Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 277:1001 Path# /dev/disk2 2025-04-10T10:56:35.485698Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 277:1002 Path# /dev/disk3 2025-04-10T10:56:35.485731Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 278:1000 Path# /dev/disk1 2025-04-10T10:56:35.485761Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 278:1001 Path# /dev/disk2 2025-04-10T10:56:35.485797Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 278:1002 Path# /dev/disk3 2025-04-10T10:56:35.485836Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 279:1000 Path# /dev/disk1 2025-04-10T10:56:35.485867Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 279:1001 Path# /dev/disk2 2025-04-10T10:56:35.485901Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 279:1002 Path# /dev/disk3 2025-04-10T10:56:35.485931Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 280:1000 Path# /dev/disk1 2025-04-10T10:56:35.485971Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 280:1001 Path# /dev/disk2 2025-04-10T10:56:35.486015Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 280:1002 Path# /dev/disk3 2025-04-10T10:56:35.486047Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 281:1000 Path# /dev/disk1 2025-04-10T10:56:35.486078Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 281:1001 Path# /dev/disk2 2025-04-10T10:56:35.486131Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 281:1002 Path# /dev/disk3 2025-04-10T10:56:35.490417Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 282:1000 Path# /dev/disk1 2025-04-10T10:56:35.490491Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 282:1001 Path# /dev/disk2 2025-04-10T10:56:35.490533Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 282:1002 Path# /dev/disk3 2025-04-10T10:56:35.490579Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 283:1000 Path# /dev/disk1 2025-04-10T10:56:35.490618Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 283:1001 Path# /dev/disk2 2025-04-10T10:56:35.490657Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 283:1002 Path# /dev/disk3 2025-04-10T10:56:35.490693Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 284:1000 Path# /dev/disk1 2025-04-10T10:56:35.572806Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 284:1001 Path# /dev/disk2 2025-04-10T10:56:35.572907Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 284:1002 Path# /dev/disk3 2025-04-10T10:56:35.572941Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 285:1000 Path# /dev/disk1 2025-04-10T10:56:35.572978Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 285:1001 Path# /dev/disk2 2025-04-10T10:56:35.573011Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 285:1002 Path# /dev/disk3 2025-04-10T10:56:35.573043Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 286:1000 Path# /dev/disk1 2025-04-10T10:56:35.573076Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 286:1001 Path# /dev/disk2 2025-04-10T10:56:35.573109Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 286:1002 Path# /dev/disk3 2025-04-10T10:56:35.573145Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 287:1000 Path# /dev/disk1 2025-04-10T10:56:35.573180Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 287:1001 Path# /dev/disk2 2025-04-10T10:56:35.573229Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 287:1002 Path# /dev/disk3 2025-04-10T10:56:35.573287Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 288:1000 Path# /dev/disk1 2025-04-10T10:56:35.573325Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 288:1001 Path# /dev/disk2 2025-04-10T10:56:35.573373Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 288:1002 Path# /dev/disk3 2025-04-10T10:56:35.573426Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 289:1000 Path# /dev/disk1 2025-04-10T10:56:35.573518Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 289:1001 Path# /dev/disk2 2025-04-10T10:56:35.573562Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 289:1002 Path# /dev/disk3 2025-04-10T10:56:35.573595Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 290:1000 Path# /dev/disk1 2025-04-10T10:56:35.573634Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 290:1001 Path# /dev/disk2 2025-04-10T10:56:35.573669Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 290:1002 Path# /dev/disk3 2025-04-10T10:56:35.573700Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 291:1000 Path# /dev/disk1 2025-04-10T10:56:35.573733Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 291:1001 Path# /dev/disk2 2025-04-10T10:56:35.573765Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 291:1002 Path# /dev/disk3 2025-04-10T10:56:35.573796Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 292:1000 Path# /dev/disk1 2025-04-10T10:56:35.573827Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 292:1001 Path# /dev/disk2 2025-04-10T10:56:35.573857Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 292:1002 Path# /dev/disk3 2025-04-10T10:56:35.573890Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 293:1000 Path# /dev/disk1 2025-04-10T10:56:35.573923Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 293:1001 Path# /dev/disk2 2025-04-10T10:56:35.573957Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 293:1002 Path# /dev/disk3 2025-04-10T10:56:35.573989Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 294:1000 Path# /dev/disk1 2025-04-10T10:56:35.574021Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 294:1001 Path# /dev/disk2 2025-04-10T10:56:35.574067Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 294:1002 Path# /dev/disk3 2025-04-10T10:56:35.574121Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 295:1000 Path# /dev/disk1 2025-04-10T10:56:35.574173Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 295:1001 Path# /dev/disk2 2025-04-10T10:56:35.574209Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 295:1002 Path# /dev/disk3 2025-04-10T10:56:35.574246Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 296:1000 Path# /dev/disk1 2025-04-10T10:56:35.574283Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 296:1001 Path# /dev/disk2 2025-04-10T10:56:35.574321Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 296:1002 Path# /dev/disk3 2025-04-10T10:56:35.574359Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 297:1000 Path# /dev/disk1 2025-04-10T10:56:35.574400Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 297:1001 Path# /dev/disk2 2025-04-10T10:56:35.574435Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 297:1002 Path# /dev/disk3 2025-04-10T10:56:35.574472Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 298:1000 Path# /dev/disk1 2025-04-10T10:56:35.574508Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 298:1001 Path# /dev/disk2 2025-04-10T10:56:35.574540Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 298:1002 Path# /dev/disk3 2025-04-10T10:56:35.574572Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 299:1000 Path# /dev/disk1 2025-04-10T10:56:35.574610Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 299:1001 Path# /dev/disk2 2025-04-10T10:56:35.574642Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 299:1002 Path# /dev/disk3 2025-04-10T10:56:35.574675Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 300:1000 Path# /dev/disk1 2025-04-10T10:56:35.574706Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 300:1001 Path# /dev/disk2 2025-04-10T10:56:35.574738Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 300:1002 Path# /dev/disk3 2025-04-10T10:56:35.927964Z node 251 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.444869s 2025-04-10T10:56:35.928229Z node 251 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.445189s 2025-04-10T10:56:35.972793Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MergeBoxes { OriginBoxId: 2 OriginBoxGeneration: 1 TargetBoxId: 1 TargetBoxGeneration: 1 StoragePoolIdMap { OriginStoragePoolId: 1 TargetStoragePoolId: 2 } } } } 2025-04-10T10:56:36.025639Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { ReadBox { BoxId: 1 } } Command { QueryBaseConfig { } } } >> KqpQueryService::SessionFromPoolSuccess [GOOD] >> KqpQueryService::SeveralCTAS+UseSink >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] >> KqpQueryService::FlowControllOnHugeLiteralAsTable [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable+LongRow >> KqpQueryServiceScripts::ExecuteScriptStatsBasic [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsFull >> KqpQueryService::ShowCreateTableNotSuccess [GOOD] >> KqpQueryService::ShowCreateTableOnView >> KqpDocumentApi::RestrictWriteExplicitPrepare [GOOD] >> KqpDocumentApi::Scripting >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithTimeout >> KqpQueryService::DdlUser >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager [GOOD] >> KqpQueryServiceScripts::ExplainScript |68.4%| [TA] $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardVolatile::DistributedWriteThenSplit [GOOD] >> DataShardVolatile::DistributedWriteThenReadIterator >> KqpQueryServiceScripts::ExecuteScriptStatsProfile [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter >> KqpQueryService::TableSink_HtapComplex+withOltpSink >> KqpQueryService::IssuesInCaseOfSuccess [GOOD] >> KqpQueryService::MaterializeTxResults >> KqpQueryService::TempTablesDrop >> KqpLimits::QueryExecTimeout [GOOD] >> KqpLimits::QSReplySize-useSink >> KqpQueryService::ExecuteCollectMeta [GOOD] >> KqpQueryService::ExecuteQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] Test command err: 2025-04-10T10:53:04.282889Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635795102034618:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:04.282960Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f89/r3tmp/tmpGRjB1n/pdisk_1.dat 2025-04-10T10:53:04.695863Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:04.715280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:04.715391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:04.718874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28477, node 1 2025-04-10T10:53:04.807391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:53:04.807412Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:53:04.807425Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:53:04.807538Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:05.184975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:05.210659Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:53:07.321293Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-10T10:53:07.323949Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTNjOWYyOTAtYTc0M2UwOS0zMzQ5NjRhZS01ODA1ZjhiYg==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MTNjOWYyOTAtYTc0M2UwOS0zMzQ5NjRhZS01ODA1ZjhiYg== 2025-04-10T10:53:07.324347Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T10:53:07.324368Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-10T10:53:07.324394Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-10T10:53:07.330391Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635807986937153:2329], Start check tables existence, number paths: 2 2025-04-10T10:53:07.330566Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTNjOWYyOTAtYTc0M2UwOS0zMzQ5NjRhZS01ODA1ZjhiYg==, ActorId: [1:7491635807986937154:2330], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.331750Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635807986937153:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-10T10:53:07.331814Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635807986937153:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-10T10:53:07.331848Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491635807986937153:2329], Successfully finished 2025-04-10T10:53:07.331905Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-10T10:53:07.335257Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635807986937171:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.338506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:53:07.339589Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635807986937171:2302], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-10T10:53:07.339703Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635807986937171:2302], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-10T10:53:07.347021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635807986937171:2302], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:53:07.398817Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635807986937171:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T10:53:07.402099Z node 1 :TX_PROXY ERROR: Actor# [1:7491635807986937222:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:07.402269Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635807986937171:2302], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-10T10:53:07.404219Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmEzYTkxMTgtODNmMzRkMzYtYTkxZWMzZGEtODUxMjFhNmQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZmEzYTkxMTgtODNmMzRkMzYtYTkxZWMzZGEtODUxMjFhNmQ= 2025-04-10T10:53:07.404324Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmEzYTkxMTgtODNmMzRkMzYtYTkxZWMzZGEtODUxMjFhNmQ=, ActorId: [1:7491635807986937230:2331], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:53:07.404499Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-10T10:53:07.404519Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-10T10:53:07.404591Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635807986937232:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:53:07.404674Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmEzYTkxMTgtODNmMzRkMzYtYTkxZWMzZGEtODUxMjFhNmQ=, ActorId: [1:7491635807986937230:2331], ActorState: ReadyState, TraceId: 01jrfna0yc8qnv60229ge6kd5c, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7491635807986937229:2340] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-10T10:53:07.404742Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7491635807986937230:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZmEzYTkxMTgtODNmMzRkMzYtYTkxZWMzZGEtODUxMjFhNmQ= 2025-04-10T10:53:07.404803Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491635807986937234:2333], Database: /Root, Start database fetching 2025-04-10T10:53:07.405555Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491635807986937234:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-10T10:53:07.405688Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-10T10:53:07.405744Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491635807986937242:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZmEzYTkxMTgtODNmMzRkMzYtYTkxZWMzZGEtODUxMjFhNmQ=, Start pool fetching 2025-04-10T10:53:07.405789Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635807986937243:2335], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:53:07.405939Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635807986937232:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-10T10:53:07.405991Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-10T10:53:07.406004Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-10T10:53:07.406022Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635807986937243:2335], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-10T10:53:07.406087Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491635807986937242:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZmEzYTkxMTgtODNmMzRkMzYtYTkxZWMzZGEtODUxMjFhNmQ=, Pool info successfully resolved 2025-04-10T10:53:07.406305Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmEzYTkxMTgtODNmMzRkMzYtYTkxZWMzZGEtODUxMjFhNmQ= 2025-04-10T10:53:07.406365Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7491635807986937246:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-10T10:53:07.406444Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7491635807986937246:2336], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7491635807986937230:2331], session id: ydb://session/3?node_id=1&id=ZmEzYTkxMTgtODNmMzRkMzYtYTkxZWMzZGEtODUxMjFhNmQ= 2025-04-10T10:53:07.406503Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZmEzYTkxMTgtODNmMzRkMzYtYTkxZWMzZGEtODUxMjFhNmQ= 2025-04-10T10:53:07.406546Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] ... e 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=MTY0MzhiZDUtZjk0MTNjNGItOWUwYjM1ZjAtZjY3YWViMmQ=, ActorId: [6:7491636236120245222:2459], ActorState: ExecuteState, TraceId: 01jrfnd1eg17k0hy31cyr84jj6, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:54:46.359943Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MTY0MzhiZDUtZjk0MTNjNGItOWUwYjM1ZjAtZjY3YWViMmQ=, ActorId: [6:7491636236120245222:2459], ActorState: ExecuteState, TraceId: 01jrfnd1eg17k0hy31cyr84jj6, EndCleanup, isFinal: 1 2025-04-10T10:54:46.359999Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MTY0MzhiZDUtZjk0MTNjNGItOWUwYjM1ZjAtZjY3YWViMmQ=, ActorId: [6:7491636236120245222:2459], ActorState: ExecuteState, TraceId: 01jrfnd1eg17k0hy31cyr84jj6, Sent query response back to proxy, proxyRequestId: 16, proxyId: [6:7491636180285669307:2272] 2025-04-10T10:54:46.360045Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MTY0MzhiZDUtZjk0MTNjNGItOWUwYjM1ZjAtZjY3YWViMmQ=, ActorId: [6:7491636236120245222:2459], ActorState: unknown state, TraceId: 01jrfnd1eg17k0hy31cyr84jj6, Cleanup temp tables: 0 2025-04-10T10:54:46.360423Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MTY0MzhiZDUtZjk0MTNjNGItOWUwYjM1ZjAtZjY3YWViMmQ=, ActorId: [6:7491636236120245222:2459], ActorState: unknown state, TraceId: 01jrfnd1eg17k0hy31cyr84jj6, Session actor destroyed 2025-04-10T10:54:46.376481Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDdiODE1ZGQtN2NiNDA2YTMtZjgyYjcyNzYtNGU4MmIxMDQ=, ActorId: [6:7491636206055473497:2331], ActorState: ReadyState, TraceId: 01jrfnd1k76jz258kwkr4q5fcg, received request, proxyRequestId: 18 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: DROP RESOURCE POOL sample_pool_id; DROP RESOURCE POOL default; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-10T10:54:46.449271Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7491636206055473597:2338], DatabaseId: /Root, PoolId: sample_pool_id, Got delete notification 2025-04-10T10:54:46.450110Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-10T10:54:46.450205Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491636236120245283:2470], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T10:54:46.454316Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491636236120245283:2470], DatabaseId: /Root, PoolId: sample_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2025-04-10T10:54:46.454427Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool sample_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2025-04-10T10:54:46.458239Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7491636206055473797:2363], DatabaseId: /Root, PoolId: default, Got delete notification 2025-04-10T10:54:46.458339Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-10T10:54:46.458401Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491636236120245295:2471], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-10T10:54:46.459195Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491636236120245295:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:46.459295Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Wait pool handlers 0.000017s: number handlers = 2 2025-04-10T10:54:46.465388Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZDdiODE1ZGQtN2NiNDA2YTMtZjgyYjcyNzYtNGU4MmIxMDQ=, ActorId: [6:7491636206055473497:2331], ActorState: ExecuteState, TraceId: 01jrfnd1k76jz258kwkr4q5fcg, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [6:7491636236120245263:2331] WorkloadServiceCleanup: 0 2025-04-10T10:54:46.468101Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDdiODE1ZGQtN2NiNDA2YTMtZjgyYjcyNzYtNGU4MmIxMDQ=, ActorId: [6:7491636206055473497:2331], ActorState: CleanupState, TraceId: 01jrfnd1k76jz258kwkr4q5fcg, EndCleanup, isFinal: 0 2025-04-10T10:54:46.468174Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDdiODE1ZGQtN2NiNDA2YTMtZjgyYjcyNzYtNGU4MmIxMDQ=, ActorId: [6:7491636206055473497:2331], ActorState: CleanupState, TraceId: 01jrfnd1k76jz258kwkr4q5fcg, Sent query response back to proxy, proxyRequestId: 18, proxyId: [6:7491636180285669307:2272] Wait pool handlers 1.003174s: number handlers = 2 Wait pool handlers 2.003313s: number handlers = 2 2025-04-10T10:54:48.654460Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:54:48.654497Z node 6 :IMPORT WARN: Table profiles were not loaded Wait pool handlers 3.004390s: number handlers = 2 Wait pool handlers 4.004602s: number handlers = 2 Wait pool handlers 5.004901s: number handlers = 2 Wait pool handlers 6.008362s: number handlers = 2 Wait pool handlers 7.014313s: number handlers = 2 Wait pool handlers 8.017739s: number handlers = 2 Wait pool handlers 9.020351s: number handlers = 2 Wait pool handlers 10.024370s: number handlers = 2 Wait pool handlers 11.028381s: number handlers = 2 Wait pool handlers 12.030218s: number handlers = 2 2025-04-10T10:54:58.698331Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7491636206055473597:2338], DatabaseId: /Root, PoolId: sample_pool_id, Try to start scheduled refresh Wait pool handlers 13.032369s: number handlers = 2 Wait pool handlers 14.034630s: number handlers = 2 Wait pool handlers 15.035196s: number handlers = 2 Wait pool handlers 16.040105s: number handlers = 2 Wait pool handlers 17.041341s: number handlers = 2 Wait pool handlers 18.043824s: number handlers = 2 Wait pool handlers 19.044357s: number handlers = 2 Wait pool handlers 20.048360s: number handlers = 2 Wait pool handlers 21.049662s: number handlers = 2 Wait pool handlers 22.052353s: number handlers = 2 Wait pool handlers 23.053343s: number handlers = 2 Wait pool handlers 24.056550s: number handlers = 2 Wait pool handlers 25.060351s: number handlers = 2 Wait pool handlers 26.064446s: number handlers = 2 Wait pool handlers 27.064757s: number handlers = 2 Wait pool handlers 28.064912s: number handlers = 2 Wait pool handlers 29.065438s: number handlers = 2 Wait pool handlers 30.070865s: number handlers = 2 Wait pool handlers 31.072499s: number handlers = 2 Wait pool handlers 32.076368s: number handlers = 2 Wait pool handlers 33.078193s: number handlers = 2 Wait pool handlers 34.078340s: number handlers = 2 Wait pool handlers 35.079813s: number handlers = 2 Wait pool handlers 36.079953s: number handlers = 2 Wait pool handlers 37.080081s: number handlers = 2 Wait pool handlers 38.080216s: number handlers = 2 Wait pool handlers 39.080368s: number handlers = 2 Wait pool handlers 40.080538s: number handlers = 2 Wait pool handlers 41.081738s: number handlers = 2 Wait pool handlers 42.084354s: number handlers = 2 Wait pool handlers 43.086955s: number handlers = 2 Wait pool handlers 44.087968s: number handlers = 2 Wait pool handlers 45.092366s: number handlers = 2 Wait pool handlers 46.096361s: number handlers = 2 Wait pool handlers 47.100352s: number handlers = 2 Wait pool handlers 48.101877s: number handlers = 2 Wait pool handlers 49.102003s: number handlers = 2 Wait pool handlers 50.102265s: number handlers = 2 Wait pool handlers 51.104354s: number handlers = 2 Wait pool handlers 52.108357s: number handlers = 2 Wait pool handlers 53.108509s: number handlers = 2 Wait pool handlers 54.112366s: number handlers = 2 Wait pool handlers 55.116364s: number handlers = 2 Wait pool handlers 56.119875s: number handlers = 2 Wait pool handlers 57.119986s: number handlers = 2 Wait pool handlers 58.120141s: number handlers = 2 Wait pool handlers 59.120411s: number handlers = 2 Wait pool handlers 60.124349s: number handlers = 2 Wait pool handlers 61.128364s: number handlers = 2 Wait pool handlers 62.129404s: number handlers = 2 Wait pool handlers 63.132342s: number handlers = 2 Wait pool handlers 64.132482s: number handlers = 2 Wait pool handlers 65.132716s: number handlers = 2 Wait pool handlers 66.136344s: number handlers = 2 Wait pool handlers 67.136883s: number handlers = 2 Wait pool handlers 68.142016s: number handlers = 2 Wait pool handlers 69.142755s: number handlers = 2 Wait pool handlers 70.142889s: number handlers = 2 Wait pool handlers 71.143197s: number handlers = 2 Wait pool handlers 72.143335s: number handlers = 2 Wait pool handlers 73.145271s: number handlers = 2 Wait pool handlers 74.148357s: number handlers = 2 Wait pool handlers 75.148880s: number handlers = 2 Wait pool handlers 76.152383s: number handlers = 2 Wait pool handlers 77.155819s: number handlers = 2 Wait pool handlers 78.156992s: number handlers = 2 Wait pool handlers 79.158670s: number handlers = 2 Wait pool handlers 80.160365s: number handlers = 2 Wait pool handlers 81.164305s: number handlers = 2 Wait pool handlers 82.168350s: number handlers = 2 2025-04-10T10:56:09.455004Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7491636206055473797:2363], DatabaseId: /Root, PoolId: default, Got stop pool handler request, waiting for 0 requests 2025-04-10T10:56:09.455187Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7491636206055473597:2338], DatabaseId: /Root, PoolId: sample_pool_id, Got stop pool handler request, waiting for 0 requests 2025-04-10T10:56:09.455331Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: default 2025-04-10T10:56:09.455360Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-10T10:56:09.656555Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZDdiODE1ZGQtN2NiNDA2YTMtZjgyYjcyNzYtNGU4MmIxMDQ=, ActorId: [6:7491636206055473497:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:56:09.656635Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZDdiODE1ZGQtN2NiNDA2YTMtZjgyYjcyNzYtNGU4MmIxMDQ=, ActorId: [6:7491636206055473497:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:56:09.656672Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDdiODE1ZGQtN2NiNDA2YTMtZjgyYjcyNzYtNGU4MmIxMDQ=, ActorId: [6:7491636206055473497:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:56:09.656709Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDdiODE1ZGQtN2NiNDA2YTMtZjgyYjcyNzYtNGU4MmIxMDQ=, ActorId: [6:7491636206055473497:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:56:09.656808Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDdiODE1ZGQtN2NiNDA2YTMtZjgyYjcyNzYtNGU4MmIxMDQ=, ActorId: [6:7491636206055473497:2331], ActorState: unknown state, Session actor destroyed >> DataShardVolatile::DistributedWriteBrokenLock [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink >> KqpQueryService::Followers [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable-LongRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] Test command err: 2025-04-10T10:52:56.396753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635761827794547:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:52:56.396840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001475/r3tmp/tmpF3VSF7/pdisk_1.dat 2025-04-10T10:52:56.746524Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11686, node 1 2025-04-10T10:52:56.766741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:52:56.766841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:52:56.769170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:52:56.793109Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:52:56.793135Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:52:56.793160Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:52:56.793299Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:52:56.836650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:52:56.855008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:52:56.884719Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7491635761827795158:2308] 2025-04-10T10:52:56.885007Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:56.895126Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:56.895165Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:52:56.896513Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:52:56.896557Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:52:56.896600Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:52:56.896845Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:52:56.896883Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:52:56.896907Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7491635761827795172:2308] in generation 1 2025-04-10T10:52:56.898042Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:52:56.937325Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:52:56.937509Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:52:56.937575Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7491635761827795176:2309] 2025-04-10T10:52:56.937594Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:52:56.937612Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:52:56.937624Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:56.937783Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:52:56.937878Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:52:56.937928Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491635761827795155:2298], serverId# [1:7491635761827795175:2307], sessionId# [0:0:0] 2025-04-10T10:52:56.938016Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:52:56.938038Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:52:56.938054Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:52:56.938076Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:52:56.938116Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:52:56.938347Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:52:56.938448Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-10T10:52:56.939528Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:52:56.939969Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:52:56.940027Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T10:52:56.941623Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491635761827795190:2315], serverId# [1:7491635761827795191:2316], sessionId# [0:0:0] 2025-04-10T10:52:56.946945Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744282376984 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282376984 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T10:52:56.946988Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:56.947141Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:52:56.947213Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:52:56.947230Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:52:56.947252Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744282376984:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-10T10:52:56.947562Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744282376984:281474976710657 keys extracted: 0 2025-04-10T10:52:56.947693Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:52:56.947802Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:52:56.947848Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T10:52:56.950519Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T10:52:56.951084Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:52:56.952822Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744282376983 2025-04-10T10:52:56.952839Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:56.952868Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744282377000 2025-04-10T10:52:56.952923Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744282376984} 2025-04-10T10:52:56.952959Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:52:56.952991Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:52:56.953011Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:52:56.953026Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T10:52:56.953074Z node 1 :TX_DATASHARD DEBUG: Complete [1744282376984 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7491635761827794979:2196], exec latency: 3 ms, propose latency: 5 ms 2025-04-10T10:52:56.953104Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-10T10:52:56.953165Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:56.954916Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7491635761827795176:2309][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-10T10:52:56.958777Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-10T10:52:56.958859Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T10:52:56.966965Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:52:56.967153Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:52:56.967296Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-04-10T10:52:56.967335Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-04-10T10:52:56.967354Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-04-10T10:52:56.968675Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:52:57.010351Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T10:52:57.010624Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-04-10T10:52:57.011101Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T10:52:57.011307Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-04-10T10:52:57.011339Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T10:52:57.011356Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-04-10T10:52:57.011378Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-04-10T10:52:57.011419Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:52:57.011459Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-04-10T10:52:57.011712Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7491635761827795276:2311], now have 1 active actors on pipe 2025-04-10T10:52:57.020999Z node 1 :PERSQUEUE DEBUG: [P ... ed on disk 2025-04-10T10:56:44.676765Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 9 requestId: cookie: 5 2025-04-10T10:56:44.677230Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:938:2696] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 5 Offset: 4 WriteTimestampMS: 8969 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 5 } } } 2025-04-10T10:56:44.677499Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:853:2696] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-10T10:56:44.677761Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-10T10:56:44.677859Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 5, at tablet: 72075186224037888 2025-04-10T10:56:44.680033Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... unblocking updates ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-10T10:56:44.799223Z node 27 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 9000 at tablet 72075186224037888 2025-04-10T10:56:44.799385Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:44.799581Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v9000/18446744073709551615, at tablet# 72075186224037888 2025-04-10T10:56:44.799889Z node 27 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-10T10:56:44.804455Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2025-04-10T10:56:44.804623Z node 27 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 } 2025-04-10T10:56:44.804759Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:44.804891Z node 27 :TX_DATASHARD DEBUG: Waiting for PlanStep# 12000 from mediator time cast 2025-04-10T10:56:44.805190Z node 27 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][27:684:2580] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2025-04-10T10:56:44.805478Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:853:2696] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2025-04-10T10:56:44.806236Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2025-04-10T10:56:44.806663Z node 27 :TX_DATASHARD DEBUG: Send 1 change records: to# [27:853:2696], at tablet# 72075186224037888 2025-04-10T10:56:44.806767Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2025-04-10T10:56:44.807067Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:853:2696] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-10T10:56:44.807545Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:938:2696] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-10T10:56:44.808072Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:56:44.808187Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-10T10:56:44.808403Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 10 requestId: cookie: 6 2025-04-10T10:56:44.808649Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:56:44.808694Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-10T10:56:44.808801Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 6 partNo : 0 messageNo: 11 size 26 offset: -1 2025-04-10T10:56:44.809092Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2025-04-10T10:56:44.809263Z node 27 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2025-04-10T10:56:44.809548Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-04-10T10:56:44.810743Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 5 PartNo 0 PackedSize 107 count 1 nextOffset 6 batches 1 2025-04-10T10:56:44.812321Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 5,1 HeadOffset 0 endOffset 5 curOffset 6 d0000000000_00000000000000000005_00000_0000000001_00000| size 93 WTime 8979 2025-04-10T10:56:44.812682Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T10:56:44.812791Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T10:56:44.812895Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T10:56:44.813011Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T10:56:44.813108Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037889 2025-04-10T10:56:44.813185Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] d0000000000_00000000000000000005_00000_0000000001_00000| 2025-04-10T10:56:44.813222Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-04-10T10:56:44.813286Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T10:56:44.813373Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-04-10T10:56:44.813603Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T10:56:44.813831Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 5 partNo 0 count 1 size 93 2025-04-10T10:56:44.815369Z node 27 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 5 count 1 size 93 actorID [27:804:2665] 2025-04-10T10:56:44.815683Z node 27 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 size 93 2025-04-10T10:56:44.826366Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T10:56:44.826611Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T10:56:44.826822Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-04-10T10:56:44.830666Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 11 requestId: cookie: 6 2025-04-10T10:56:44.831185Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:938:2696] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 6 Offset: 5 WriteTimestampMS: 8979 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 6 } } } 2025-04-10T10:56:44.831479Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:853:2696] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-10T10:56:44.831792Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-10T10:56:44.831896Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 6, at tablet: 72075186224037888 2025-04-10T10:56:44.832722Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... checking the update is logged before the new resolved timestamp >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-10T10:56:44.969416Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:56:44.969578Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-10T10:56:44.969878Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 8 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 6 max time lag 0ms effective offset 0 2025-04-10T10:56:44.970031Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 8 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 6 2025-04-10T10:56:44.973244Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 8. All data is from uncompacted head. 2025-04-10T10:56:44.973377Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T10:56:44.973825Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> KqpScan::ScanPg [GOOD] >> KqpQueryService::Write [GOOD] >> KqpQueryServiceScripts::CancelScriptExecution >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> KqpQueryService::CreateAndDropTopic [GOOD] >> KqpQueryService::CreateAndAlterTopic >> KqpQueryService::ExecuteQueryInteractiveTx [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery >> KqpQueryService::TableSink_OltpLiteralUpsert >> KqpDocumentApi::RestrictWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [GOOD] Test command err: 2025-04-10T10:56:05.807419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:05.808247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:05.808338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:56:05.812961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:05.813391Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:05.813876Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009dd/r3tmp/tmp77uqZN/pdisk_1.dat 2025-04-10T10:56:06.674680Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:06.945097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:07.071992Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:56:07.078688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:07.078898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:07.080541Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T10:56:07.081301Z node 2 :TX_PROXY DEBUG: actor# [2:238:2129] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:56:07.087723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:07.087868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:07.089673Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2025-04-10T10:56:07.105856Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:56:07.106595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:07.107090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:07.448461Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Handle TEvProposeTransaction 2025-04-10T10:56:07.448558Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T10:56:07.448762Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1232:2745] 2025-04-10T10:56:07.835933Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T10:56:07.836075Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T10:56:07.836915Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T10:56:07.837077Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T10:56:07.837592Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T10:56:07.837876Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T10:56:07.838005Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T10:56:07.854565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:07.855184Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T10:56:07.871666Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T10:56:07.871758Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 SEND to# [1:1141:2687] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T10:56:07.985734Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1283:2390] 2025-04-10T10:56:07.986065Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:08.046005Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:08.046794Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:56:08.048758Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:56:08.048859Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:56:08.048913Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:56:08.049336Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:56:08.049803Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:56:08.050058Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1307:2390] in generation 1 2025-04-10T10:56:08.076774Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:56:08.150431Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:56:08.150701Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:56:08.150828Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1310:2407] 2025-04-10T10:56:08.150869Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:56:08.150930Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:56:08.150974Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:08.151517Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:56:08.151665Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:56:08.151777Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:08.151815Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:08.151871Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:56:08.151909Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:08.223252Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1267:2775], serverId# [2:1314:2408], sessionId# [0:0:0] 2025-04-10T10:56:08.223835Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:56:08.224156Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:56:08.224330Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:56:08.231517Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:56:08.251329Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:56:08.251467Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T10:56:08.623611Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-10T10:56:08.623904Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-10T10:56:08.624054Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T10:56:08.694825Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-10T10:56:08.695002Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-10T10:56:08.695177Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T10:56:08.695477Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T10:56:08.719033Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1344:2796], serverId# [2:1347:2418], sessionId# [0:0:0] 2025-04-10T10:56:08.726870Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T10:56:08.726959Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:08.728108Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:08.728171Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:56:08.728242Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T10:56:08.730204Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T10:56:08.730429Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025 ... : default. }. CA StateFunc 271646922 2025-04-10T10:56:31.830517Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1647:2975], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NzQyMjIwODEtMWI2MjE2NTItZDU4MWMwNDctYjY2ODkxZmQ=. TraceId : 01jrfng6wb69kwyj5hx1h9h7fd. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2025-04-10T10:56:31.830541Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1647:2975], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NzQyMjIwODEtMWI2MjE2NTItZDU4MWMwNDctYjY2ODkxZmQ=. TraceId : 01jrfng6wb69kwyj5hx1h9h7fd. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2025-04-10T10:56:31.830569Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1647:2975], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NzQyMjIwODEtMWI2MjE2NTItZDU4MWMwNDctYjY2ODkxZmQ=. TraceId : 01jrfng6wb69kwyj5hx1h9h7fd. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2025-04-10T10:56:31.830606Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1647:2975], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NzQyMjIwODEtMWI2MjE2NTItZDU4MWMwNDctYjY2ODkxZmQ=. TraceId : 01jrfng6wb69kwyj5hx1h9h7fd. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2025-04-10T10:56:31.830629Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1647:2975], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NzQyMjIwODEtMWI2MjE2NTItZDU4MWMwNDctYjY2ODkxZmQ=. TraceId : 01jrfng6wb69kwyj5hx1h9h7fd. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2025-04-10T10:56:31.830670Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. Tasks execution finished 2025-04-10T10:56:31.830701Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:1647:2975], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NzQyMjIwODEtMWI2MjE2NTItZDU4MWMwNDctYjY2ODkxZmQ=. TraceId : 01jrfng6wb69kwyj5hx1h9h7fd. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T10:56:31.830807Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. pass away 2025-04-10T10:56:31.830911Z node 3 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715664;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T10:56:31.831059Z node 3 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715664, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-10T10:56:31.831217Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1644:2936] TxId: 281474976715664. Ctx: { TraceId: 01jrfng6wb69kwyj5hx1h9h7fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzQyMjIwODEtMWI2MjE2NTItZDU4MWMwNDctYjY2ODkxZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1647:2975], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2083 Tasks { TaskId: 1 CpuTimeUs: 602 FinishTimeMs: 1744282591830 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 61 BuildCpuTimeUs: 541 HostName: "ghrun-x5vqckr7qy" NodeId: 3 CreateTimeMs: 1744282591827 } MaxMemoryUsage: 1048576 } 2025-04-10T10:56:31.831266Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715664. Ctx: { TraceId: 01jrfng6wb69kwyj5hx1h9h7fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzQyMjIwODEtMWI2MjE2NTItZDU4MWMwNDctYjY2ODkxZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:1647:2975] 2025-04-10T10:56:31.831369Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1644:2936] TxId: 281474976715664. Ctx: { TraceId: 01jrfng6wb69kwyj5hx1h9h7fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzQyMjIwODEtMWI2MjE2NTItZDU4MWMwNDctYjY2ODkxZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T10:56:31.831413Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1644:2936] TxId: 281474976715664. Ctx: { TraceId: 01jrfng6wb69kwyj5hx1h9h7fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzQyMjIwODEtMWI2MjE2NTItZDU4MWMwNDctYjY2ODkxZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-10T10:56:31.831454Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1644:2936] TxId: 281474976715664. Ctx: { TraceId: 01jrfng6wb69kwyj5hx1h9h7fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzQyMjIwODEtMWI2MjE2NTItZDU4MWMwNDctYjY2ODkxZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.002083s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-10T10:56:31.832367Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-04-10T10:56:31.832465Z node 3 :TX_PROXY DEBUG: actor# [3:208:2173] Handle TEvProposeTransaction 2025-04-10T10:56:31.832498Z node 3 :TX_PROXY DEBUG: actor# [3:208:2173] TxId# 0 ProcessProposeTransaction 2025-04-10T10:56:31.832609Z node 3 :TX_PROXY DEBUG: actor# [3:208:2173] Cookie# 0 userReqId# "" txid# 0 reqId# [3:1649:2976] SnapshotReq marker# P0 2025-04-10T10:56:31.833601Z node 3 :TX_PROXY DEBUG: Actor# [3:1651:2976] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-04-10T10:56:31.833819Z node 3 :TX_PROXY DEBUG: Actor# [3:1651:2976] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-04-10T10:56:31.833923Z node 3 :TX_PROXY DEBUG: Actor# [3:1649:2976] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-04-10T10:56:43.308376Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:280:2215], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:43.309120Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:56:43.309412Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:43.311707Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:43.311809Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:56:43.312219Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009dd/r3tmp/tmp24VZuo/pdisk_1.dat 2025-04-10T10:56:43.773179Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:44.006933Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.116124Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:44.116289Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:44.129483Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:44.129613Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:44.154211Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-04-10T10:56:44.155002Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:44.155766Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:44.516145Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:45.262513Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1399:2835], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:45.262638Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1408:2840], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:45.263227Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:45.269963Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:56:45.861562Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1413:2843], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:56:45.969902Z node 5 :TX_PROXY ERROR: Actor# [5:1542:2912] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:46.798474Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfngnpc48t10httgbcjacg2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZjkzYzZkZjgtMmM3N2JjNS02ZDdlNTA0ZS0yNjM1OWI0MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:47.668280Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfngq825nf5vv17fgcpvcra, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NjQyOTJhNi1kZDY5NzYyMS01ZWU4OWI5MS03OTBiMThlNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:48.465159Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfngq825nf5vv17fgcpvcra, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NjQyOTJhNi1kZDY5NzYyMS01ZWU4OWI5MS03OTBiMThlNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:56:48.469109Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down >> KqpQueryServiceScripts::ValidateScript [GOOD] >> KqpQueryServiceScripts::TestTruncatedByRows >> KqpQueryServiceScripts::ParseScript [GOOD] >> KqpQueryServiceScripts::ListScriptExecutions >> KqpQueryService::StreamExecuteCollectMeta [GOOD] >> KqpQueryService::ShowCreateViewOnTable >> KqpQueryServiceScripts::ExecuteScriptWithParameters [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter >> KqpQueryService::SeveralCTAS+UseSink [GOOD] >> KqpQueryService::SeveralCTAS-UseSink >> KqpQueryService::FlowControllOnHugeRealTable+LongRow [GOOD] >> KqpQueryService::Explain >> KqpQueryService::ShowCreateTableOnView [GOOD] >> KqpQueryService::ShowCreateView >> KqpQueryService::SessionFromPoolError >> KqpQueryService::DdlUser [GOOD] >> KqpQueryService::DdlTx >> KqpQueryService::TempTablesDrop [GOOD] >> KqpQueryService::Tcl >> KqpDocumentApi::Scripting [GOOD] >> KqpQueryService::AlterTable_DropNotNull_Valid >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsFull [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsNone >> KqpQueryService::MaterializeTxResults [GOOD] >> KqpQueryService::MixedReadQueryWithoutStreamLookup >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink >> DataShardVolatile::DistributedWriteThenReadIterator [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream >> KqpQueryService::ExecuteQuery [GOOD] >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback >> KqpQueryServiceScripts::ExplainScript [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecution ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] Test command err: 2025-04-10T10:56:05.789955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:05.790769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:05.790855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:56:05.792990Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:05.793320Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:05.793762Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a47/r3tmp/tmpP0kkEL/pdisk_1.dat 2025-04-10T10:56:06.353813Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:06.633421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:06.831055Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:56:06.838894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:06.839103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:06.840667Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T10:56:06.841426Z node 2 :TX_PROXY DEBUG: actor# [2:238:2129] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:56:06.847571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:06.847692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:06.848709Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2025-04-10T10:56:06.867976Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:56:06.868571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:06.869218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:07.252914Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Handle TEvProposeTransaction 2025-04-10T10:56:07.252992Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T10:56:07.253179Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1231:2744] 2025-04-10T10:56:07.869267Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T10:56:07.869389Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T10:56:07.883166Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T10:56:07.883382Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T10:56:07.884061Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T10:56:07.884326Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T10:56:07.884435Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T10:56:07.891297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:07.891951Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T10:56:07.903669Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T10:56:07.903811Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 SEND to# [1:1141:2687] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T10:56:08.241397Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1311:2804] 2025-04-10T10:56:08.241797Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:08.334881Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1313:2805] 2025-04-10T10:56:08.335250Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:08.353404Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:08.353730Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:56:08.356505Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T10:56:08.356650Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T10:56:08.356827Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T10:56:08.357660Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:56:08.358453Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:56:08.358570Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:1406:2804] in generation 1 2025-04-10T10:56:08.371303Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1316:2807] 2025-04-10T10:56:08.371588Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:08.385891Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:08.386687Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:56:08.388703Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-04-10T10:56:08.388803Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2025-04-10T10:56:08.388869Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2025-04-10T10:56:08.389287Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:56:08.391958Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:56:08.392020Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037894 persisting started state actor id [1:1428:2805] in generation 1 2025-04-10T10:56:08.405011Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:08.406628Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:56:08.448809Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037892 2025-04-10T10:56:08.448913Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037892 2025-04-10T10:56:08.448960Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037892 2025-04-10T10:56:08.449400Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:56:08.449708Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:56:08.449802Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037892 persisting started state actor id [1:1446:2807] in generation 1 2025-04-10T10:56:08.470043Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1409:2399] 2025-04-10T10:56:08.470372Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:08.548908Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [2:1415:2400] 2025-04-10T10:56:08.549149Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:08.560443Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037893 actor [2:1417:2401] 2025-04-10T10:56:08.560681Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:08.574915Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:08.575337Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:56:08.576753Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:56:08.576818Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:56:08.576867Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:56:08.577235Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:56:08.578068Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:56:08.578272Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1498:2399] in generation 1 2025-04-10T10:56:08.579365Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [2:1427:2403] 2025-04-10T10:56:08.579579Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:08.595696Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:08.595818Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:08.596089Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:56:08.597511Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-04-10T10:56:08.597578Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-04-10T10:56:08.597619Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-04-10T10:56:08.597930Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:56:08.597994Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:56:08.599641Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037893 2025-04-10T10:56:08.599707Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037893 2025-04-10T10:56:08.599770Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 7 ... Database : . PoolId : default. }. About to drain channelId: 1, hasPeer: 1, finished: 0 2025-04-10T10:56:53.426665Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. SendChannelData, channelId: 1, peer: [5:1973:2969], chunks: 1, bytes: 6, watermark: 0, checkpoint: 0, seqNo: 1, finished: 1 2025-04-10T10:56:53.426783Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1976:3157], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfngt0zavzj0r1gk61m36dg. SessionId : ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T10:56:53.426991Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1976:3157], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfngt0zavzj0r1gk61m36dg. SessionId : ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Send stats to executor actor [5:1973:2969] TaskId: 1 Stats: CpuTimeUs: 544 Tasks { TaskId: 1 CpuTimeUs: 227 FinishTimeMs: 1744282613426 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 67 BuildCpuTimeUs: 160 HostName: "ghrun-x5vqckr7qy" NodeId: 5 CreateTimeMs: 1744282613425 } MaxMemoryUsage: 1048576 2025-04-10T10:56:53.427307Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1973:2969] TxId: 281474976715667. Ctx: { TraceId: 01jrfngt0zavzj0r1gk61m36dg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [5:1642:2969], seqNo: 1, nRows: 1 2025-04-10T10:56:53.427412Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1976:3157], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfngt0zavzj0r1gk61m36dg. SessionId : ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T10:56:53.427443Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1976:3157], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfngt0zavzj0r1gk61m36dg. SessionId : ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2025-04-10T10:56:53.427470Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1976:3157], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfngt0zavzj0r1gk61m36dg. SessionId : ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2025-04-10T10:56:53.427512Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1976:3157], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfngt0zavzj0r1gk61m36dg. SessionId : ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2025-04-10T10:56:53.427541Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1976:3157], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfngt0zavzj0r1gk61m36dg. SessionId : ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2025-04-10T10:56:53.427567Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1976:3157], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfngt0zavzj0r1gk61m36dg. SessionId : ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2025-04-10T10:56:53.427640Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-10T10:56:53.427831Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1973:2969] TxId: 281474976715667. Ctx: { TraceId: 01jrfngt0zavzj0r1gk61m36dg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1976:3157], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 544 Tasks { TaskId: 1 CpuTimeUs: 227 FinishTimeMs: 1744282613426 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 67 BuildCpuTimeUs: 160 HostName: "ghrun-x5vqckr7qy" NodeId: 5 CreateTimeMs: 1744282613425 } MaxMemoryUsage: 1048576 } 2025-04-10T10:56:53.427931Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1973:2969] TxId: 281474976715667. Ctx: { TraceId: 01jrfngt0zavzj0r1gk61m36dg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [5:1976:3157], ... response 271646822 NKikimr::NKqp::TEvKqpExecuter::TEvStreamData NKikimrKqp.TEvExecuterStreamData ResultSet { columns { name: "column0" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint64_value: 596400 } } } SeqNo: 1 QueryResultIndex: 0 ChannelId: 1 2025-04-10T10:56:53.428272Z node 5 :KQP_EXECUTER DEBUG: TxId: 281474976715667, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 100, to: [5:1977:3157] 2025-04-10T10:56:53.428369Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Received channel data ack for channelId: 1, seqNo: 1, lastSentSeqNo: 1, freeSpace: 100, early finish: 0 2025-04-10T10:56:53.428414Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. PeerState, peerState:(freeSpace:100;inFlightBytes:0;inFlightCount:0;), sentSeqNo: 1, ackSeqNo: 1 2025-04-10T10:56:53.428440Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Resume compute actor 2025-04-10T10:56:53.428498Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1976:3157], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfngt0zavzj0r1gk61m36dg. SessionId : ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T10:56:53.428529Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1976:3157], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfngt0zavzj0r1gk61m36dg. SessionId : ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2025-04-10T10:56:53.428563Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1976:3157], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfngt0zavzj0r1gk61m36dg. SessionId : ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2025-04-10T10:56:53.428596Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1976:3157], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfngt0zavzj0r1gk61m36dg. SessionId : ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2025-04-10T10:56:53.428618Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1976:3157], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfngt0zavzj0r1gk61m36dg. SessionId : ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2025-04-10T10:56:53.428642Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1976:3157], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfngt0zavzj0r1gk61m36dg. SessionId : ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2025-04-10T10:56:53.428688Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished 2025-04-10T10:56:53.428721Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1976:3157], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfngt0zavzj0r1gk61m36dg. SessionId : ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T10:56:53.428805Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. pass away 2025-04-10T10:56:53.428898Z node 5 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715667;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T10:56:53.429037Z node 5 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715667, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-10T10:56:53.429214Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1973:2969] TxId: 281474976715667. Ctx: { TraceId: 01jrfngt0zavzj0r1gk61m36dg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1976:3157], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2115 Tasks { TaskId: 1 CpuTimeUs: 232 FinishTimeMs: 1744282613428 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 72 BuildCpuTimeUs: 160 HostName: "ghrun-x5vqckr7qy" NodeId: 5 CreateTimeMs: 1744282613425 } MaxMemoryUsage: 1048576 } 2025-04-10T10:56:53.429265Z node 5 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jrfngt0zavzj0r1gk61m36dg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [5:1976:3157] 2025-04-10T10:56:53.429397Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1973:2969] TxId: 281474976715667. Ctx: { TraceId: 01jrfngt0zavzj0r1gk61m36dg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T10:56:53.429470Z node 5 :KQP_EXECUTER TRACE: ActorId: [5:1973:2969] TxId: 281474976715667. Ctx: { TraceId: 01jrfngt0zavzj0r1gk61m36dg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-10T10:56:53.429520Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1973:2969] TxId: 281474976715667. Ctx: { TraceId: 01jrfngt0zavzj0r1gk61m36dg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MmJmZjI1ZDAtZDc0OTI5MzYtMTMxZjUzODAtOGY0NzczMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.002115s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ... response 271646721 NKikimr::NKqp::NPrivateEvents::TEvQueryResponse NKikimrKqp.TEvQueryResponse Response { TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 1163 >> KqpQueryService::TableSink_OltpLiteralUpsert [GOOD] >> KqpQueryService::TableSink_OltpInsert >> Cdc::InitialScanAndResolvedTimestamps [GOOD] >> KqpQueryService::CreateAndAlterTopic [GOOD] >> KqpQueryService::CreateOrDropTopicOverTable >> KqpDocumentApi::RestrictWrite [GOOD] >> KqpDocumentApi::AllowRead |68.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |68.4%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |68.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |68.4%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |68.4%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |68.4%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::SeveralCTAS-UseSink [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery [GOOD] >> KqpQueryService::ExecuteQueryMultiResult >> KqpService::SessionBusy >> KqpQueryServiceScripts::CancelScriptExecution [GOOD] >> KqpQueryServiceScripts::EmptyNextFetchToken >> KqpQueryService::ShowCreateViewOnTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::InitialScanAndResolvedTimestamps [GOOD] Test command err: 2025-04-10T10:52:56.395721Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635760931162343:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:52:56.395804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001476/r3tmp/tmpf6kOJh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16566, node 1 2025-04-10T10:52:56.724422Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:52:56.737423Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:52:56.737599Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:52:56.743742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:52:56.743972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:52:56.745790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:52:56.781111Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:52:56.781130Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:52:56.781136Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:52:56.781215Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:52:56.837947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:52:56.853785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:52:56.881398Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7491635760931162956:2308] 2025-04-10T10:52:56.881687Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:52:56.893796Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:52:56.893900Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:52:56.895875Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:52:56.895962Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:52:56.896089Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:52:56.896466Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:52:56.896533Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:52:56.896567Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7491635760931162970:2308] in generation 1 2025-04-10T10:52:56.897863Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:52:56.942422Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:52:56.942566Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:52:56.942627Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7491635760931162974:2309] 2025-04-10T10:52:56.942643Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:52:56.942655Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:52:56.942666Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:56.942810Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:52:56.942922Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:52:56.942993Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491635760931162953:2297], serverId# [1:7491635760931162973:2306], sessionId# [0:0:0] 2025-04-10T10:52:56.943025Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:52:56.943044Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:52:56.943080Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:52:56.943100Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:52:56.943190Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:52:56.943414Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:52:56.943492Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-10T10:52:56.944612Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:52:56.947282Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:52:56.947357Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T10:52:56.949304Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491635760931162988:2314], serverId# [1:7491635760931162989:2315], sessionId# [0:0:0] 2025-04-10T10:52:56.955195Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744282376998 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282376998 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T10:52:56.955238Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:56.955372Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:52:56.955429Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:52:56.955444Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:52:56.955464Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744282376998:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-10T10:52:56.955766Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744282376998:281474976710657 keys extracted: 0 2025-04-10T10:52:56.955924Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:52:56.956111Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:52:56.956163Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T10:52:56.958571Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T10:52:56.959044Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:52:56.960162Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744282376998} 2025-04-10T10:52:56.960215Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:52:56.960269Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744282376997 2025-04-10T10:52:56.960286Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:56.960314Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744282377000 2025-04-10T10:52:56.960360Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:52:56.960376Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:52:56.960410Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T10:52:56.960456Z node 1 :TX_DATASHARD DEBUG: Complete [1744282376998 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7491635760931162771:2193], exec latency: 2 ms, propose latency: 4 ms 2025-04-10T10:52:56.960490Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-10T10:52:56.960554Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:52:56.972538Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7491635760931162974:2309][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-10T10:52:56.975894Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-10T10:52:56.975953Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T10:52:56.983628Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:52:56.983759Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:52:56.983833Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-04-10T10:52:56.983884Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-04-10T10:52:56.983891Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-04-10T10:52:56.984799Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:52:57.008538Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T10:52:57.008996Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-04-10T10:52:57.009545Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T10:52:57.009748Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-04-10T10:52:57.009769Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T10:52:57.009809Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-04-10T10:52:57.009855Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-04-10T10:52:57.009888Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:52:57.009938Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-04-10T10:52:57.037342Z node 1 :PERSQUEU ... G: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:57.189660Z node 27 :TX_DATASHARD DEBUG: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 2], version# 3, step# 7500, txId# 281474976715662, at tablet# 72075186224037888 2025-04-10T10:56:57.191600Z node 27 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:57.215586Z node 27 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 7500} 2025-04-10T10:56:57.215842Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:57.215951Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:57.216015Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:57.216251Z node 27 :TX_DATASHARD DEBUG: Complete [7500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [27:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T10:56:57.216389Z node 27 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-04-10T10:56:57.216616Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:57.217454Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v7500/18446744073709551615, at tablet# 72075186224037888 2025-04-10T10:56:57.217756Z node 27 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-10T10:56:57.218686Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v7500/18446744073709551615, at tablet# 72075186224037888 2025-04-10T10:56:57.221905Z node 27 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-04-10T10:56:57.222682Z node 27 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T10:56:57.234333Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2025-04-10T10:56:57.234557Z node 27 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 } 2025-04-10T10:56:57.234719Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:57.234900Z node 27 :TX_DATASHARD DEBUG: Waiting for PlanStep# 9000 from mediator time cast 2025-04-10T10:56:57.235034Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 0 change record(s): at tablet# 72075186224037888 2025-04-10T10:56:57.235148Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:57.235437Z node 27 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][27:684:2580] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 }] } 2025-04-10T10:56:57.235567Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:981:2779] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 }] } 2025-04-10T10:56:57.235903Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2025-04-10T10:56:57.236111Z node 27 :TX_DATASHARD DEBUG: Send 1 change records: to# [27:981:2779], at tablet# 72075186224037888 2025-04-10T10:56:57.236168Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2025-04-10T10:56:57.236342Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:981:2779] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-10T10:56:57.236875Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:1063:2779] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-10T10:56:57.237392Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:56:57.237472Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-10T10:56:57.237699Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 2 requestId: cookie: 2 2025-04-10T10:56:57.237880Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:56:57.237927Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-10T10:56:57.237990Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 4 partNo : 0 messageNo: 3 size 26 offset: -1 2025-04-10T10:56:57.238245Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v6000/0 2025-04-10T10:56:57.238460Z node 27 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2025-04-10T10:56:57.238767Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-04-10T10:56:57.239804Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 3 PartNo 0 PackedSize 107 count 1 nextOffset 4 batches 1 2025-04-10T10:56:57.240476Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 3,1 HeadOffset 0 endOffset 3 curOffset 4 d0000000000_00000000000000000003_00000_0000000001_00000| size 93 WTime 7451 2025-04-10T10:56:57.240642Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T10:56:57.240690Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T10:56:57.240739Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T10:56:57.240780Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T10:56:57.240827Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037889 2025-04-10T10:56:57.240866Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] d0000000000_00000000000000000003_00000_0000000001_00000| 2025-04-10T10:56:57.240902Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-04-10T10:56:57.240943Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T10:56:57.240988Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-04-10T10:56:57.241093Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T10:56:57.241185Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 1 size 93 2025-04-10T10:56:57.243875Z node 27 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 3 count 1 size 93 actorID [27:923:2735] 2025-04-10T10:56:57.244061Z node 27 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 size 93 2025-04-10T10:56:57.254683Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T10:56:57.254858Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T10:56:57.254951Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-04-10T10:56:57.255232Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 3 requestId: cookie: 2 2025-04-10T10:56:57.255507Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:1063:2779] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 4 Offset: 3 WriteTimestampMS: 7451 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 2 } } } 2025-04-10T10:56:57.255631Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:981:2779] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-10T10:56:57.255816Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-10T10:56:57.255857Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037888 2025-04-10T10:56:57.267638Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-10T10:56:57.438731Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-10T10:56:57.438808Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-10T10:56:57.438986Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 4 max time lag 0ms effective offset 0 2025-04-10T10:56:57.439097Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-04-10T10:56:57.439539Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-04-10T10:56:57.439659Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T10:56:57.440610Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> KqpQueryService::Explain [GOOD] >> KqpQueryService::AlterTable_DropNotNull_Valid [GOOD] >> KqpQueryService::AlterCdcTopic >> KqpQueryService::SessionFromPoolError [GOOD] >> KqpQueryService::ReturnAndCloseSameTime >> KqpQueryService::FlowControllOnHugeRealTable-LongRow [GOOD] >> KqpQueryService::ForbidInteractiveTxOnImplicitSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::SeveralCTAS-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31582, MsgBus: 26761 2025-04-10T10:56:36.577248Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636708248785853:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:36.577606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015a6/r3tmp/tmpz0PYRU/pdisk_1.dat 2025-04-10T10:56:37.524042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:37.524173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:37.543789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:37.618111Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31582, node 1 2025-04-10T10:56:37.659251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:37.835018Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:37.835043Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:37.835054Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:37.835168Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26761 TClient is connected to server localhost:26761 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:38.796498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:38.830675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:39.041779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:39.300446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:39.501404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:41.573909Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636708248785853:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:41.573999Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:42.112809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636734018591266:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.112916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.559673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.624027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.708882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.768687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.818500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.892643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.974539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636734018591787:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.974627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.975071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636734018591792:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.983448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:43.008952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636734018591794:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:43.071480Z node 1 :TX_PROXY ERROR: Actor# [1:7491636738313559145:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 10575, MsgBus: 17624 2025-04-10T10:56:46.191023Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636750497079839:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:46.191068Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015a6/r3tmp/tmpOd6ALt/pdisk_1.dat 2025-04-10T10:56:46.427063Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:46.441537Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:46.441637Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:46.449304Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10575, node 2 2025-04-10T10:56:46.554987Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:46.555017Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:46.555026Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:46.555154Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17624 TClient is connected to server localhost:17624 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:47.228109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:47.282778Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:50.377871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636767676949681:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.378017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.378505Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636767676949693:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.383933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:56:50.431325Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636767676949695:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:56:50.522060Z node 2 :TX_PROXY ERROR: Actor# [2:7491636767676949746:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:50.618817Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:56:50.640805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480 2025-04-10T10:56:50.903784Z node 2 :TX_PROXY ERROR: Actor# [2:7491636767676950030:2507] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:50.916440Z node 2 :TX_PROXY ERROR: Actor# [2:7491636767676950037:2512] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZmMxYmI2NTUtMTQyMWY0MS0zMDQ0MjlmYy1iYTk5ZThhNA==\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:50.919109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:51.161901Z node 2 :TX_PROXY ERROR: Actor# [2:7491636771971917521:2626] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:51.165779Z node 2 :TX_PROXY ERROR: Actor# [2:7491636771971917528:2631] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZmMxYmI2NTUtMTQyMWY0MS0zMDQ0MjlmYy1iYTk5ZThhNA==\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:51.168992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:56:51.193908Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636750497079839:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:51.194005Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2316, MsgBus: 16949 2025-04-10T10:56:52.907337Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636775464937286:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:52.907382Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015a6/r3tmp/tmpxmRBQC/pdisk_1.dat 2025-04-10T10:56:53.109068Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:53.109175Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:53.126366Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:53.127777Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2316, node 3 2025-04-10T10:56:53.358859Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:53.358888Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:53.358895Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:53.359040Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16949 TClient is connected to server localhost:16949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:54.078135Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:54.087676Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:57.418170Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636796939774424:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:57.418340Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:57.418884Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636796939774436:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:57.423406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:56:57.441098Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636796939774438:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:56:57.509800Z node 3 :TX_PROXY ERROR: Actor# [3:7491636796939774489:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:57.623580Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-10T10:56:57.629074Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480 2025-04-10T10:56:57.908788Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636775464937286:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:57.908849Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:57.955375Z node 3 :TX_PROXY ERROR: Actor# [3:7491636796939774772:2507] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:57.958137Z node 3 :TX_PROXY ERROR: Actor# [3:7491636796939774781:2513] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZGEzNzAwMmUtZGM0NjMwZjMtZDEzZmUxZTMtMTM5MThmYjc=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:57.961655Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:58.303052Z node 3 :TX_PROXY ERROR: Actor# [3:7491636801234742271:2631] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:58.309009Z node 3 :TX_PROXY ERROR: Actor# [3:7491636801234742278:2636] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZGEzNzAwMmUtZGM0NjMwZjMtZDEzZmUxZTMtMTM5MThmYjc=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:58.315211Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpQueryService::TableSink_HtapComplex+withOltpSink [GOOD] >> KqpQueryService::TableSink_HtapComplex-withOltpSink >> KqpQueryService::Tcl [GOOD] >> KqpQueryService::TableSink_ReplaceFromSelectOlap >> KqpQueryService::DdlTx [GOOD] >> KqpQueryService::DdlWithExplicitTransaction >> KqpQueryService::ShowCreateView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Explain [GOOD] Test command err: Trying to start YDB, gRPC: 14691, MsgBus: 17325 2025-04-10T10:56:36.238233Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636707875749792:2123];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:36.239283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015af/r3tmp/tmp9yYdvT/pdisk_1.dat 2025-04-10T10:56:36.946632Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:36.947568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:36.947660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:36.965377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14691, node 1 2025-04-10T10:56:37.292319Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:37.292348Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:37.292359Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:37.292474Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17325 TClient is connected to server localhost:17325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:38.275570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:38.293570Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:38.306858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:38.638518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:56:38.888386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T10:56:39.012603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:41.224327Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636707875749792:2123];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:41.224391Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:41.862262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636729350587986:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.862369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.340209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.388190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.435527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.497750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.560938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.641736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.765770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636733645555801:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.765835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.766136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636733645555806:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.774065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:42.792593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636733645555808:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:42.873954Z node 1 :TX_PROXY ERROR: Actor# [1:7491636733645555865:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 18594, MsgBus: 26570 2025-04-10T10:56:46.151980Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636751456828523:2171];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:46.239830Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015af/r3tmp/tmpDuOXNw/pdisk_1.dat 2025-04-10T10:56:46.345025Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:46.387337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:46.387435Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:46.388726Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18594, node 2 2025-04-10T10:56:46.518773Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:46.518801Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:46.518809Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:46.518947Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26570 TClient is connected to server localhost:26570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:47.108008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:47.118950Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:56:47.136042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:47.248049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:47.478017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting ... ccess permissions } 2025-04-10T10:56:50.102523Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.200373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:50.255030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:50.322344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:50.403627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:50.455372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:50.538008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:50.651601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636768636699879:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.651777Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.652185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636768636699885:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.657282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:50.675553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636768636699887:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:56:50.764636Z node 2 :TX_PROXY ERROR: Actor# [2:7491636768636699942:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:51.150367Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636751456828523:2171];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:51.200335Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:51.912143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 10 Trying to start YDB, gRPC: 18435, MsgBus: 4074 2025-04-10T10:56:53.778727Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636780762416181:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015af/r3tmp/tmpoVJoO9/pdisk_1.dat 2025-04-10T10:56:53.867072Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:56:54.019278Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:54.019385Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:54.027715Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:54.033898Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18435, node 3 2025-04-10T10:56:54.124457Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:54.124479Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:54.124484Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:54.124589Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4074 TClient is connected to server localhost:4074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:54.839906Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:54.874658Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:55.036061Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:55.410736Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:55.532724Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:58.102470Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636802237254285:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:58.102565Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:58.168206Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:58.210526Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:58.288803Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:58.343563Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:58.406599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:58.503973Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:58.574278Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636780762416181:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:58.574573Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:58.619581Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636802237254807:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:58.619726Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:58.620064Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636802237254812:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:58.624589Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:58.643519Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636802237254814:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:58.721140Z node 3 :TX_PROXY ERROR: Actor# [3:7491636802237254870:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ShowCreateViewOnTable [GOOD] Test command err: Trying to start YDB, gRPC: 16229, MsgBus: 30160 2025-04-10T10:56:35.987644Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636704177435932:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:35.987700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015a9/r3tmp/tmpFzQ1gs/pdisk_1.dat 2025-04-10T10:56:36.793462Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:36.796969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:36.797071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:36.807123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16229, node 1 2025-04-10T10:56:37.094682Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:37.094706Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:37.094714Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:37.094838Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30160 TClient is connected to server localhost:30160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:37.919181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:37.956436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:38.179928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:38.362922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:56:38.456508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T10:56:40.612253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636725652274192:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:40.612449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:40.950337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:40.992456Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636704177435932:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:40.992515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:41.048218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.107292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.189934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.233791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.272779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.337806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636729947242004:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.337881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.338234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636729947242009:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.342089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:41.353378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636729947242011:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:41.422554Z node 1 :TX_PROXY ERROR: Actor# [1:7491636729947242066:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24142, MsgBus: 23362 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015a9/r3tmp/tmp7KPUbU/pdisk_1.dat 2025-04-10T10:56:44.335092Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:44.335942Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:44.382804Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:44.382909Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:44.387395Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24142, node 2 2025-04-10T10:56:44.564442Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:44.564471Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:44.564479Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:44.564619Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23362 TClient is connected to server localhost:23362 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:45.353233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:45.365330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:45.485846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:45.752496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:45.874093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:48.703615Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636757850113251:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't ... tion part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:48.936270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:48.983572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:49.060761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:49.124633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:49.247798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636762145081061:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:49.247888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:49.248079Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636762145081066:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:49.251823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:49.263923Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636762145081068:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:49.335306Z node 2 :TX_PROXY ERROR: Actor# [2:7491636762145081122:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 25383, MsgBus: 30914 2025-04-10T10:56:52.490378Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636774837249682:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:52.490489Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015a9/r3tmp/tmp9OaMYv/pdisk_1.dat 2025-04-10T10:56:52.759014Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:52.767149Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:52.767244Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:52.775361Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25383, node 3 2025-04-10T10:56:52.902886Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:52.902917Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:52.902927Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:52.903095Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30914 TClient is connected to server localhost:30914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:53.754224Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:53.763839Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:53.780023Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:53.960165Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:56:54.264420Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T10:56:54.394178Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:57.492938Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636774837249682:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:57.493027Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:57.570322Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636796312087949:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:57.570501Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:57.639507Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:57.720658Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:57.803256Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:57.855546Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:57.921275Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:58.006629Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:58.108525Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636800607055768:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:58.108643Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:58.109123Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636800607055773:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:58.113611Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:58.133777Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636800607055775:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:58.194783Z node 3 :TX_PROXY ERROR: Actor# [3:7491636800607055830:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:59.703348Z node 3 :SYSTEM_VIEWS ERROR: Scan error, actor: [3:7491636804902023492:2513], owner: [3:7491636804902023488:2511], scan id: 0, table id: [1:0:0:show_create], error: Path type mismatch, expected: View, found: Table 2025-04-10T10:56:59.704879Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491636804902023489:2512], TxId: 281474976710671, task: 2. Ctx: { TraceId : 01jrfnh3jv9j7vs4e0hbb2zj6v. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=NjVhYTFiMy04ZjRjNmJmLWZiZDlhOWE3LWZlYmVmZjMw. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7491636804902023485:2499], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-04-10T10:56:59.705383Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjVhYTFiMy04ZjRjNmJmLWZiZDlhOWE3LWZlYmVmZjMw, ActorId: [3:7491636804902023434:2499], ActorState: ExecuteState, TraceId: 01jrfnh3jv9j7vs4e0hbb2zj6v, Create QueryResponse for error on request, msg: |68.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |68.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery >> KqpQueryService::ExecuteDDLStatusCodeSchemeError >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeView |68.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |68.5%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ShowCreateView [GOOD] Test command err: Trying to start YDB, gRPC: 15823, MsgBus: 16619 2025-04-10T10:56:38.087252Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636716276626412:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:38.100313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00159e/r3tmp/tmpOB5SDp/pdisk_1.dat 2025-04-10T10:56:38.809718Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:38.837570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:38.837747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:38.840987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15823, node 1 2025-04-10T10:56:39.002056Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:39.002084Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:39.002091Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:39.002242Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16619 TClient is connected to server localhost:16619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:40.135681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:40.150778Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:40.165341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:40.540660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:40.804438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:40.903835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:43.040176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636737751464612:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.040276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.090521Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636716276626412:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:43.090591Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:43.460561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.535012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.579094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.626740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.671303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.729951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.791202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636737751465131:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.791301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.791544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636737751465136:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.797347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:43.812242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636737751465138:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:43.895814Z node 1 :TX_PROXY ERROR: Actor# [1:7491636737751465192:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:45.325949Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636746341400096:2508], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiReadTable!
:2:35: Error: Cannot find table 'db.[/Root/test_show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:56:45.326236Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2UxMjEwMTYtNmM3NjJhZDItMTBkYmU5ZTQtMWNiYmZjNDI=, ActorId: [1:7491636746341400087:2502], ActorState: ExecuteState, TraceId: 01jrfngnpq1yy1yc3gfmqwt1nk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:56:45.345253Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636746341400130:2511], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiReadTable!
:2:35: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:56:45.346787Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2UxMjEwMTYtNmM3NjJhZDItMTBkYmU5ZTQtMWNiYmZjNDI=, ActorId: [1:7491636746341400087:2502], ActorState: ExecuteState, TraceId: 01jrfngnrm55s4wj2pm2vbgthz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 15940, MsgBus: 13544 2025-04-10T10:56:46.391249Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636751415475826:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:46.391292Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00159e/r3tmp/tmptXuLbr/pdisk_1.dat 2025-04-10T10:56:46.665045Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15940, node 2 2025-04-10T10:56:46.749964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:46.750062Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:46.752254Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:46.806244Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:46.806271Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:46.806278Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:46.806388Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13544 TClient is connected to server localhost:13544 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: fa ... , but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:50.793234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:50.868653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:50.910474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:50.969832Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636768595347291:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.969915Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.970471Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636768595347296:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.974774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:50.994170Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636768595347298:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:51.087730Z node 2 :TX_PROXY ERROR: Actor# [2:7491636772890314649:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:51.391749Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636751415475826:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:51.391836Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:52.363600Z node 2 :SYSTEM_VIEWS ERROR: Scan error, actor: [2:7491636777185282310:2511], owner: [2:7491636777185282307:2509], scan id: 0, table id: [1:0:0:show_create], error: Path type mismatch, expected: Table, found: View 2025-04-10T10:56:52.364977Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491636777185282308:2510], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NmVmM2YzNzQtY2JiNmE5NmUtZWE2Y2U5OS1jY2Q5NTU3ZQ==. CustomerSuppliedId : . TraceId : 01jrfngwgbaqpfzhrvhq9gp1yr. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7491636777185282304:2497], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-04-10T10:56:52.365461Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmVmM2YzNzQtY2JiNmE5NmUtZWE2Y2U5OS1jY2Q5NTU3ZQ==, ActorId: [2:7491636777185282238:2497], ActorState: ExecuteState, TraceId: 01jrfngwgbaqpfzhrvhq9gp1yr, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 25479, MsgBus: 16199 2025-04-10T10:56:53.587284Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636781033634727:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:53.607589Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00159e/r3tmp/tmpxyOog7/pdisk_1.dat 2025-04-10T10:56:53.832319Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:53.872159Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:53.872242Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:53.876600Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25479, node 3 2025-04-10T10:56:54.077606Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:54.077631Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:54.077639Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:54.077764Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16199 TClient is connected to server localhost:16199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:54.823560Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:54.861066Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:55.028026Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:55.322396Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:55.444200Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:58.594276Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636781033634727:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:58.594370Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:59.180480Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636806803440274:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:59.180576Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:59.229124Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:59.346219Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:59.393394Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:59.497961Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:59.582999Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:59.668772Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:59.740789Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636806803440795:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:59.740886Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:59.741263Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636806803440800:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:59.746129Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:59.760766Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636806803440802:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:59.839201Z node 3 :TX_PROXY ERROR: Actor# [3:7491636806803440856:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest >> TestYmqHttpProxy::TestCreateQueue >> KqpQueryServiceScripts::ExecuteScriptStatsNone [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl >> KqpQueryServiceScripts::ExecuteScriptWithTimeout [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter >> TestKinesisHttpProxy::CreateStreamInIncorrectDb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptStatsNone [GOOD] Test command err: Trying to start YDB, gRPC: 22143, MsgBus: 21128 2025-04-10T10:56:34.792319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636700212301381:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:34.792748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015f3/r3tmp/tmprbPN9O/pdisk_1.dat 2025-04-10T10:56:35.745775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:35.745900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:35.756070Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:35.756592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22143, node 1 2025-04-10T10:56:36.046687Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:36.046709Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:36.046731Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:36.046847Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21128 TClient is connected to server localhost:21128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:37.253268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:37.287214Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:37.303561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:37.476277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:37.678599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:37.802690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:39.757124Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636700212301381:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:39.757193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:40.764868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636725982106793:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:40.764992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.231126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.296590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.376332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.422343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.495690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.540013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.645767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636730277074614:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.645866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.646130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636730277074619:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.650742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:41.670119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636730277074621:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:41.767193Z node 1 :TX_PROXY ERROR: Actor# [1:7491636730277074679:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:43.072940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.074618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.076045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12831, MsgBus: 20752 2025-04-10T10:56:46.284183Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636750182339062:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:46.284237Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015f3/r3tmp/tmp4V6aNz/pdisk_1.dat 2025-04-10T10:56:46.528644Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:46.528719Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:46.531290Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:46.545948Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12831, node 2 2025-04-10T10:56:46.618755Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:46.618788Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:46.618799Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:46.618930Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20752 TClient is connected to server localhost:20752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:47.125702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:47.132569Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:47.141457Z n ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:50.405513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:50.497753Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636767362210420:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.497862Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.498252Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636767362210425:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.503968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:50.516873Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636767362210427:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:50.604156Z node 2 :TX_PROXY ERROR: Actor# [2:7491636767362210482:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:51.285921Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636750182339062:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:51.286016Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:51.756880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:56:51.758642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:56:51.760724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22416, MsgBus: 5937 2025-04-10T10:56:55.576406Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636788573318232:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:55.638984Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015f3/r3tmp/tmpV3kU6X/pdisk_1.dat 2025-04-10T10:56:55.799722Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:55.817908Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:55.818017Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:55.822506Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22416, node 3 2025-04-10T10:56:55.990914Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:55.990954Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:55.990962Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:55.991102Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5937 TClient is connected to server localhost:5937 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:56.470209Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:56.487204Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:56:56.503716Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:56.633931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:56.934410Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:57.047673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:59.849852Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636805753189043:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:59.849937Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:59.931762Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:00.002651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:00.045001Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:00.087401Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:00.131433Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:00.164175Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:00.228401Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636810048156852:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:00.228497Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:00.230197Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636810048156857:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:00.234078Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:00.249960Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636810048156859:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:57:00.314073Z node 3 :TX_PROXY ERROR: Actor# [3:7491636810048156912:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:00.506257Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636788573318232:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:00.506348Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:01.531781Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T10:57:01.534240Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T10:57:01.535931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpDocumentApi::AllowRead [GOOD] >> KqpDocumentApi::RestrictAlter >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout >> SystemView::AuthPermissions_ResultOrder [GOOD] >> SystemView::AuthPermissions_Selects >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] >> KqpService::SessionBusy [GOOD] >> KqpService::SessionBusyRetryOperation >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] >> KqpQueryService::ExecuteQueryMultiResult [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] Test command err: Trying to start YDB, gRPC: 9030, MsgBus: 8569 2025-04-10T10:56:38.441868Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636716063911536:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:38.449757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00159a/r3tmp/tmpCNhytZ/pdisk_1.dat 2025-04-10T10:56:39.084446Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:39.087294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:39.087451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:39.092922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9030, node 1 2025-04-10T10:56:39.252940Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:39.252978Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:39.252993Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:39.253147Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8569 TClient is connected to server localhost:8569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:40.188941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:40.209708Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:40.220688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.307885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636733243781390:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.308035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.308600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636733243781402:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.313077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T10:56:42.327470Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-10T10:56:42.327799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636733243781404:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T10:56:42.421195Z node 1 :TX_PROXY ERROR: Actor# [1:7491636733243781455:2345] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:42.951699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:1, at schemeshard: 72057594046644480 2025-04-10T10:56:43.340396Z node 1 :TX_PROXY ERROR: Actor# [1:7491636737538748990:2472] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZTE2YjcwMTYtYTMwNjQ1MzEtZmMzYTgyNjItZjYwZDQwYzU=\', error: path is temporary (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:43.351161Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTE2YjcwMTYtYTMwNjQ1MzEtZmMzYTgyNjItZjYwZDQwYzU=, ActorId: [1:7491636733243781360:2329], ActorState: ExecuteState, TraceId: 01jrfngksg3znhrp6365jn14sk, Create QueryResponse for error on request, msg: 2025-04-10T10:56:43.389479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.401666Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T10:56:43.403482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.411961Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T10:56:43.414844Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636737538749036:2370], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:56:43.416614Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGM3N2EyMWEtYjBiY2ZmODEtZWE4YTJhNzAtNWE3MDNhYzY=, ActorId: [1:7491636737538749033:2369], ActorState: ExecuteState, TraceId: 01jrfngkvmfapdfrwc73h6nq7q, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:56:43.440673Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636716063911536:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:43.440774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2201, MsgBus: 27547 2025-04-10T10:56:45.280686Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636746932539656:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:45.280744Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00159a/r3tmp/tmpvQCCgC/pdisk_1.dat 2025-04-10T10:56:45.427343Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:45.455926Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:45.456018Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:45.457875Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2201, node 2 2025-04-10T10:56:45.516553Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:45.516579Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:45.516587Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:45.516726Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27547 TClient is connected to server localhost:27547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:46.125212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:46.163504Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:56:49.257001Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636764112409484:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:49.257141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:49.258520Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636764112409496:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Re ... pose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:04.352234Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491636825096606760:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:04.427828Z node 4 :TX_PROXY ERROR: Actor# [4:7491636825096606816:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:05.789348Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.952632Z node 4 :TX_PROXY ERROR: Actor# [4:7491636829391574535:3730] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup" severity: 1 } 2025-04-10T10:57:05.957250Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710672, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup 2025-04-10T10:57:05.957480Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY4MmJiMTAtZmM3NTU3ZDgtYzRlOTM1MTEtOTA1YzcwMTc=, ActorId: [4:7491636829391574525:2517], ActorState: ExecuteState, TraceId: 01jrfnh9w74p6y7khqsc0zxqs6, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1744282624392, tx_id: 281474976710668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282621151, tx_id: 281474976710661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282620738, tx_id: 281474976710659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282624252, tx_id: 281474976710666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282624336, tx_id: 281474976710667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282623930, tx_id: 281474976710662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282624056, tx_id: 281474976710663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282624112, tx_id: 281474976710664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282620871, tx_id: 281474976710660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282624175, tx_id: 281474976710665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282625904, tx_id: 281474976710671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282620528, tx_id: 281474976710658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-04-10T10:57:06.012312Z node 4 :TX_PROXY ERROR: Actor# [4:7491636833686541853:3742] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup" severity: 1 } 2025-04-10T10:57:06.012555Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710674, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup 2025-04-10T10:57:06.012693Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY4MmJiMTAtZmM3NTU3ZDgtYzRlOTM1MTEtOTA1YzcwMTc=, ActorId: [4:7491636829391574525:2517], ActorState: ExecuteState, TraceId: 01jrfnh9y71q04pr9kgmgcx9mp, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1744282624392, tx_id: 281474976710668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282621151, tx_id: 281474976710661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282620738, tx_id: 281474976710659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282624252, tx_id: 281474976710666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282624336, tx_id: 281474976710667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282623930, tx_id: 281474976710662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282624056, tx_id: 281474976710663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282624112, tx_id: 281474976710664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282620871, tx_id: 281474976710660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282624175, tx_id: 281474976710665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282625904, tx_id: 281474976710671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282620528, tx_id: 281474976710658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-04-10T10:57:06.048215Z node 4 :TX_PROXY ERROR: Actor# [4:7491636833686541875:3755] txid# 281474976710676, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:06.048454Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710676, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-10T10:57:06.048594Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY4MmJiMTAtZmM3NTU3ZDgtYzRlOTM1MTEtOTA1YzcwMTc=, ActorId: [4:7491636829391574525:2517], ActorState: ExecuteState, TraceId: 01jrfnh9ze920xsz4s3r771cs2, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-10T10:57:06.077222Z node 4 :TX_PROXY ERROR: Actor# [4:7491636833686541908:3780] txid# 281474976710678, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:06.077601Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710678, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-10T10:57:06.077806Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY4MmJiMTAtZmM3NTU3ZDgtYzRlOTM1MTEtOTA1YzcwMTc=, ActorId: [4:7491636829391574525:2517], ActorState: ExecuteState, TraceId: 01jrfnha094h00wbz0py960hh5, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1744282624392, tx_id: 281474976710668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282621151, tx_id: 281474976710661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282620738, tx_id: 281474976710659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282624252, tx_id: 281474976710666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282624336, tx_id: 281474976710667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282623930, tx_id: 281474976710662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282624056, tx_id: 281474976710663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282624112, tx_id: 281474976710664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282620871, tx_id: 281474976710660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282624175, tx_id: 281474976710665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282625904, tx_id: 281474976710671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744282620528, tx_id: 281474976710658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-04-10T10:57:06.133355Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 >> KqpQueryService::TableSink_OltpInsert [GOOD] >> KqpQueryService::TableSink_OltpInteractive >> KqpQueryService::AlterCdcTopic [GOOD] >> TestKinesisHttpProxy::MissingAction >> KqpQueryService::ExecuteDDLStatusCodeSchemeError [GOOD] >> KqpQueryService::ForbidInteractiveTxOnImplicitSession [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 2052, MsgBus: 6661 2025-04-10T10:56:36.752100Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636708051348984:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:36.752556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015a3/r3tmp/tmpSnMMNF/pdisk_1.dat 2025-04-10T10:56:37.446115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:37.446289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:37.457067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:37.513918Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2052, node 1 2025-04-10T10:56:37.818026Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:37.818054Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:37.818065Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:37.818209Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6661 TClient is connected to server localhost:6661 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:39.242747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:39.258774Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:39.297180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:39.614637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:39.954968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:40.094542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:41.735479Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636708051348984:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:41.747473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:42.288052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636733821154386:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.288200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.796241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.903850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.964199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.020024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.073882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.143007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.254685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636738116122207:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.254762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.255028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636738116122212:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.258597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:43.269470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636738116122214:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:43.345379Z node 1 :TX_PROXY ERROR: Actor# [1:7491636738116122268:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:44.606013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.711536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.803459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24293, MsgBus: 18064 2025-04-10T10:56:48.215777Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636757188646880:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:48.272368Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015a3/r3tmp/tmpnpl63e/pdisk_1.dat 2025-04-10T10:56:48.411367Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:48.439186Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:48.439284Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:48.446714Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24293, node 2 2025-04-10T10:56:48.566877Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:48.566912Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:48.566921Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:48.567065Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18064 TClient is connected to server localhost:18064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:49.231190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:49.250780Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:56:49.265667Z node 2 ... 12;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.377279Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.381816Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.384127Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.388763Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.388841Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.397170Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.397201Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.402629Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.404776Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.411482Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.418818Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.421382Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.428386Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.431112Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037918;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.437334Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.438179Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.444645Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.444685Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.451494Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.451508Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.458512Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.459714Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.465471Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.465518Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037952;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.471717Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.473103Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.478506Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.478699Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.484728Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.491624Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.494999Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.498332Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.502348Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.505259Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.508820Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.513984Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.520795Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.527119Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.535249Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.542289Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.542326Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.549011Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.553659Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T10:57:04.993230Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710664; 2025-04-10T10:57:04.993934Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710664; 2025-04-10T10:57:04.994654Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710664; 2025-04-10T10:57:04.998783Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;self_id=[3:7491636816670129057:2427];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037899;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037907,72075186224037947;receive=72075186224037903; 2025-04-10T10:57:04.998882Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;self_id=[3:7491636816670129057:2427];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037899;local_tx_no=14;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037907,72075186224037947;receive=72075186224037903; 2025-04-10T10:57:04.999039Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;self_id=[3:7491636816670129057:2427];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037899;local_tx_no=16;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037907;receive=72075186224037947; 2025-04-10T10:57:04.999119Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;self_id=[3:7491636816670129057:2427];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037899;local_tx_no=17;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037907;receive=72075186224037947; 2025-04-10T10:57:04.999773Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710664; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryMultiResult [GOOD] Test command err: Trying to start YDB, gRPC: 8135, MsgBus: 3368 2025-04-10T10:56:35.148818Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636703929307699:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:35.189482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b2/r3tmp/tmpIgvCgp/pdisk_1.dat 2025-04-10T10:56:35.789459Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:35.815402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:35.815600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:35.819687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8135, node 1 2025-04-10T10:56:35.918548Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:35.918593Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:35.918604Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:35.918775Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3368 TClient is connected to server localhost:3368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:37.043611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:37.059597Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:37.070298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:37.250653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:37.433431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:37.511075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:39.528618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636721109178519:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:39.528761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:40.068410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:40.082630Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636703929307699:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:40.082685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:40.110934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:40.138784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:40.167879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:40.199763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:40.276518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:40.347136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636725404146334:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:40.347209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:40.347447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636725404146339:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:40.352164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:40.364162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636725404146341:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:40.430324Z node 1 :TX_PROXY ERROR: Actor# [1:7491636725404146393:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:41.912786Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710672; 2025-04-10T10:56:41.913296Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710672; 2025-04-10T10:56:41.913946Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491636729699114098:2500], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7491636729699113992:2500]Got LOCKS BROKEN for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7491636729699114098:2500].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T10:56:41.914493Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491636729699114084:2500], SessionActorId: [1:7491636729699113992:2500], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7491636729699113992:2500]. isRollback=0 2025-04-10T10:56:41.914754Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWM5MTMxNTAtYTJhNWEwYjctZGU2NWU4NDgtMjc0YjRkMjM=, ActorId: [1:7491636729699113992:2500], ActorState: ExecuteState, TraceId: 01jrfngjd1dkzbz7ap1yvys24r, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7491636729699114085:2500] from: [1:7491636729699114084:2500] 2025-04-10T10:56:41.914848Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491636729699114085:2500] TxId: 281474976710672. Ctx: { TraceId: 01jrfngjd1dkzbz7ap1yvys24r, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWM5MTMxNTAtYTJhNWEwYjctZGU2NWU4NDgtMjc0YjRkMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T10:56:41.915072Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWM5MTMxNTAtYTJhNWEwYjctZGU2NWU4NDgtMjc0YjRkMjM=, ActorId: [1:7491636729699113992:2500], ActorState: ExecuteState, TraceId: 01jrfngjd1dkzbz7ap1yvys24r, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 3288, MsgBus: 17274 2025-04-10T10:56:43.320782Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636735390149228:2199];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b2/r3tmp/tmpRnJkV7/pdisk_1.dat 2025-04-10T10:56:43.407380Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:56:43.483586Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:43.507356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:43.507480Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:43.509764Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3288, node 2 2025-04-10T10:56:43.650129Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:43.650180Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:43.650191Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:43.650331Z node 2 :NET_CLASSIFIER ... 5.914494Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:56.026549Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636792575516313:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:56.026662Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:56.072823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:56.118563Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:56.159120Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:56.214022Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:56.273147Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:56.358821Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:56.446349Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636792575516827:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:56.446523Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:56.450300Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636792575516832:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:56.459628Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:56.488975Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636792575516835:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:56.578701Z node 3 :TX_PROXY ERROR: Actor# [3:7491636792575516890:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17745, MsgBus: 1477 2025-04-10T10:57:00.199862Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636808964450549:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:00.199920Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b2/r3tmp/tmpbRgrL2/pdisk_1.dat 2025-04-10T10:57:00.618916Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:00.636745Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:00.636866Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:00.641854Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17745, node 4 2025-04-10T10:57:00.842897Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:00.842932Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:00.842945Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:00.843102Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1477 TClient is connected to server localhost:1477 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:01.632198Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:01.642406Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:57:01.657103Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:01.771867Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.115798Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.286898Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:05.202311Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491636808964450549:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:05.202417Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:05.242945Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636830439288754:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.243052Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.301139Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.357080Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.432113Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.484091Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.610209Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.671521Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.792442Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636830439289277:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.792536Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.793055Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636830439289282:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.797790Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:05.816634Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491636830439289284:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:05.879473Z node 4 :TX_PROXY ERROR: Actor# [4:7491636830439289339:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterCdcTopic [GOOD] Test command err: Trying to start YDB, gRPC: 21119, MsgBus: 22506 2025-04-10T10:56:38.278766Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636717434347880:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:38.299485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00159c/r3tmp/tmpqXKNkY/pdisk_1.dat 2025-04-10T10:56:39.183493Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:39.190700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:39.190798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:39.195377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21119, node 1 2025-04-10T10:56:39.459843Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:39.459880Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:39.459889Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:39.460050Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22506 TClient is connected to server localhost:22506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:40.411883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:40.473334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:40.636535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:40.820602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:40.917867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:43.068104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636738909186131:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.068245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.282275Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636717434347880:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:43.282377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:43.492432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.570287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.624283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.702943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.782406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.847510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.941453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636738909186653:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.941630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.942103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636738909186658:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.947263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:43.962790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636738909186661:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:44.059403Z node 1 :TX_PROXY ERROR: Actor# [1:7491636743204154013:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:45.101034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:56:45.251776Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636747499121670:2510], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:25: Error: At function: KiWriteTable!
:3:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-04-10T10:56:45.252020Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDhkZTA2NGUtOTE0ZWE5NTEtYjYyMzlmODUtYzUyOTc1YTU=, ActorId: [1:7491636747499121602:2500], ActorState: ExecuteState, TraceId: 01jrfngnkv9tdj0jra9x70qh43, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:3:25: Error: At function: KiWriteTable!
:3:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 Trying to start YDB, gRPC: 2786, MsgBus: 1614 2025-04-10T10:56:46.830565Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636751885445819:2172];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:46.831160Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00159c/r3tmp/tmpPhiTDV/pdisk_1.dat 2025-04-10T10:56:47.177805Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:47.234922Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:47.235010Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:47.243074Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2786, node 2 2025-04-10T10:56:47.406907Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:47.406933Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:47.406940Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:47.407065Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1614 TClient is connected to server localhost:1614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:48.148141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDom ... ePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:56:59.351697Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636806499916630:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:56:59.451711Z node 3 :TX_PROXY ERROR: Actor# [3:7491636806499916681:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:59.491329Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-04-10T10:56:59.765708Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491636806499916834:2356], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:65: Error: Failed to convert type: Struct<'id':Int32,'val':Null> to Struct<'id':Int32,'val':Int32>
:2:65: Error: Failed to convert 'val': Null to Int32
:2:65: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T10:56:59.767338Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzdkNDgzYTYtOTZmMDJlZjMtMzhhY2Y2MjgtODUzZDI5Y2U=, ActorId: [3:7491636806499916832:2355], ActorState: ExecuteState, TraceId: 01jrfnh3v3aqbae2k48bv1k615, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T10:56:59.783811Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636785025079480:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:59.783921Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:59.831203Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9182, MsgBus: 65488 2025-04-10T10:57:01.178645Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636815454573567:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:01.178696Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00159c/r3tmp/tmpsru4OS/pdisk_1.dat 2025-04-10T10:57:01.440581Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:01.491880Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:01.491979Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:01.495480Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9182, node 4 2025-04-10T10:57:01.606775Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:01.606811Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:01.606820Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:01.606951Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65488 TClient is connected to server localhost:65488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:02.200179Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.225720Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.335551Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.731612Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.838573Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:05.614347Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636832634444517:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.614472Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.666062Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.748625Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.806757Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.863676Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.915976Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:06.006281Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:06.182275Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491636815454573567:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:06.184740Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:06.212825Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636836929412343:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:06.212906Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:06.213239Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636836929412348:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:06.219052Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:06.238374Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-10T10:57:06.239491Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491636836929412350:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:57:06.298396Z node 4 :TX_PROXY ERROR: Actor# [4:7491636836929412406:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:07.728033Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T10:57:07.895120Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037919:1][4:7491636841224380217:2527] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:18:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-10T10:57:08.027793Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T10:57:08.115778Z node 4 :TX_PROXY ERROR: Actor# [4:7491636845519347673:3878] txid# 281474976715674, issues: { message: "Cannot change partition count. Use split/merge instead" severity: 1 } 2025-04-10T10:57:08.116360Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjVhY2YyZmItNjE1ZDgzODUtODQyYTA4NDQtYjY2MGVhNGI=, ActorId: [4:7491636841224380289:2538], ActorState: ExecuteState, TraceId: 01jrfnhbzr1y69tqd7ehgsmgbx, Create QueryResponse for error on request, msg: Query failed, status: BAD_REQUEST:
: Error: Cannot change partition count. Use split/merge instead, code: 2017 >> TestKinesisHttpProxy::DifferentContentTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::Plan2SvgBad [FAIL] Test command err: 2025-04-10T10:54:38.073947Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636200734423788:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:38.073977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:54:39.077045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:39.186137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:39.186294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:39.202429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:39.210083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7673, node 1 2025-04-10T10:54:39.519218Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:39.519237Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:39.519243Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:39.519343Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:40.408059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:40.503604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:54:40.515533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:40.528821Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:54:43.077759Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636200734423788:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:43.077869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:45.219033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636230799195513:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:45.219128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:45.219657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636230799195527:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:45.224177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:54:45.239197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636230799195529:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:54:45.334367Z node 1 :TX_PROXY ERROR: Actor# [1:7491636230799195580:2373] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:49.186693Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636247743925239:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:49.270120Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:54:49.556410Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:49.556513Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:49.562818Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:49.564890Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14388, node 2 2025-04-10T10:54:49.870461Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:54:49.870480Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:54:49.870489Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:54:49.870612Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:50.567804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:50.579688Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:54:50.599793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:54:50.604138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T10:54:50.609487Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:54:54.038338Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636247743925239:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:54.038422Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:54:55.707058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636273513729586:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:55.707192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636273513729575:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:55.707655Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:55.713037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:54:55.733839Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636273513729589:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:54:55.797636Z node 2 :TX_PROXY ERROR: Actor# [2:7491636273513729640:2367] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:58.958133Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636285583300702:2232];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:54:59.015984Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existe ... DATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:23.326826Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:23.326891Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:23.922705Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491636392933268451:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:23.922834Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491636392933268433:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:23.923083Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:23.930928Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:55:23.945940Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491636392933268455:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:55:24.019845Z node 5 :TX_PROXY ERROR: Actor# [5:7491636397228235802:2364] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:24.191188Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:24.873904Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:24.873943Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:25.539042Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:25.539099Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:25.654864Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:25.654945Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:25.768588Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:25.768665Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:25.891849Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:25.891916Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:26.028953Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:26.029033Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:26.124989Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:26.125046Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:26.244177Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:26.244263Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:26.470681Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:26.470736Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:26.576724Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:26.576774Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:26.694853Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:26.694899Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:26.823878Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:26.823927Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:26.951122Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:26.951177Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:27.034311Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:27.034362Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:27.156975Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:27.157020Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:27.236574Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:27.236624Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T10:55:27.272765Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T10:55:27.276705Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T10:55:27.278362Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T10:55:29.260257Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T10:55:29.260337Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success assertion failed at ydb/core/viewer/viewer_ut.cpp:1948, virtual void NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext &): (json.GetMap().contains("metadata")) {} TBackTrace::Capture()+28 (0x18FBEE4C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1947AD10) NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext&)+9171 (0x18B51163) std::__y1::__function::__func, void ()>::operator()()+280 (0x18B677B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x194B1D36) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19481889) NTestSuiteViewer::TCurrentTest::Execute()+1204 (0x18B66664) NUnitTest::TTestFactory::Execute()+2438 (0x19483156) NUnitTest::RunMain(int, char**)+5213 (0x194AC2AD) ??+0 (0x7F58A1725D90) __libc_start_main+128 (0x7F58A1725E40) _start+41 (0x1644B029) 2025-04-10T10:55:35.617883Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491636444524484799:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:35.617993Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:55:35.853256Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:35.894420Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:35.894562Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:35.896884Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26514, node 6 2025-04-10T10:55:35.982220Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:35.982255Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:35.982267Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:35.982459Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32097 2025-04-10T10:55:40.618167Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491636444524484799:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:40.618292Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:41.279499Z node 6 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2025-04-10T10:55:44.274483Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491636483113505904:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:44.274577Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:55:44.489099Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:44.520291Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:44.520389Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:44.521927Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22107, node 7 2025-04-10T10:55:44.573247Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:44.573281Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:44.573295Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:44.573473Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6193 2025-04-10T10:55:49.277030Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491636483113505904:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:49.277148Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream >> DataShardVolatile::DistributedWriteThenScanQuery [GOOD] >> DataShardVolatile::DistributedWriteWithAsyncIndex >> KqpQueryServiceScripts::ListScriptExecutions [GOOD] >> KqpQueryServiceScripts::Tcl >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteDDLStatusCodeSchemeError [GOOD] Test command err: Trying to start YDB, gRPC: 30861, MsgBus: 13648 2025-04-10T10:56:39.839046Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636718517850728:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:39.839540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001597/r3tmp/tmpVF8P8p/pdisk_1.dat 2025-04-10T10:56:40.699083Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:40.730164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:40.730278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:40.735869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30861, node 1 2025-04-10T10:56:41.106768Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:41.106792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:41.106806Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:41.106929Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13648 TClient is connected to server localhost:13648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:42.174278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:42.233752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:56:42.445193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.722282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:42.866499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:44.835318Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636718517850728:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:44.835390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:45.268671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636744287656110:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:45.268810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:45.672054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:45.771566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:45.827507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:45.911253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:45.987074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:46.072878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:46.232533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636748582623935:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:46.232621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:46.232808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636748582623940:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:46.237257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:46.265581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636748582623942:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:46.327087Z node 1 :TX_PROXY ERROR: Actor# [1:7491636748582623998:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 26468, MsgBus: 7770 2025-04-10T10:56:48.734342Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636757898665975:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:48.735164Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001597/r3tmp/tmpaMacc0/pdisk_1.dat 2025-04-10T10:56:49.022122Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:49.051288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:49.051369Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 26468, node 2 2025-04-10T10:56:49.081851Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:49.282674Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:49.282700Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:49.282712Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:49.282830Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7770 TClient is connected to server localhost:7770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:49.891767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:49.898743Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:49.905377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:50.011036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:50.375172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:50.476832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreate ... SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:57.951587Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:57.974466Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:57.992668Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:58.111656Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:58.435375Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:58.621098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:01.296401Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636813853331624:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:01.296495Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:01.372024Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:01.409478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:01.448850Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636792378493435:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:01.449051Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:01.452880Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:01.524503Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:01.568567Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:01.648108Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:01.721555Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636813853332145:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:01.721641Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:01.721809Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636813853332150:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:01.729014Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:01.741654Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636813853332152:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:01.822927Z node 3 :TX_PROXY ERROR: Actor# [3:7491636813853332208:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:03.065231Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTE2OTBmMTMtNDNiOWRhZGUtZWI1NDY0ZmUtYzBhNmEzYzY=, ActorId: [3:7491636818148299802:2500], ActorState: ReadyState, TraceId: 01jrfnh72a1msc9qxethqjg9nn, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 6906, MsgBus: 22895 2025-04-10T10:57:04.211777Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636828151478797:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:04.222681Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001597/r3tmp/tmpLbIaFN/pdisk_1.dat 2025-04-10T10:57:04.475477Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:04.495999Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:04.496110Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:04.497804Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6906, node 4 2025-04-10T10:57:04.694849Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:04.694878Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:04.694892Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:04.695021Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22895 TClient is connected to server localhost:22895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:05.537019Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:05.550541Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:57:08.943285Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636845331348532:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:08.943360Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:08.943679Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636845331348566:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:08.949189Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:57:08.972086Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491636845331348568:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:57:09.125865Z node 4 :TX_PROXY ERROR: Actor# [4:7491636849626315915:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:09.214269Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491636828151478797:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:09.214352Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:09.440956Z node 4 :TX_PROXY ERROR: Actor# [4:7491636849626315948:2351] txid# 281474976710660, issues: { message: "Type \'TzTimestamp\' specified for column \'payload\' is not supported by storage" severity: 1 } 2025-04-10T10:57:09.442520Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjc3NDc4OGItZDJhMDE4MWEtNDM1MGJlOWUtZWVhYzQ2ZjQ=, ActorId: [4:7491636845331348527:2328], ActorState: ExecuteState, TraceId: 01jrfnh9h72qfmnkerdq6r1tk6, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ForbidInteractiveTxOnImplicitSession [GOOD] Test command err: Trying to start YDB, gRPC: 12341, MsgBus: 16751 2025-04-10T10:56:40.399963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:40.400230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:40.400436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015ac/r3tmp/tmpABgx7E/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12341, node 1 2025-04-10T10:56:41.053731Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:41.060088Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:41.060185Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:41.060272Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:41.060673Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:56:41.109039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:41.109270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:41.124023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16751 TClient is connected to server localhost:16751 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:41.594272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:41.696187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:42.099835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:42.571201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:42.987762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:44.062074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1814:3409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:44.062419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:44.096134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.345719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.677578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:45.007292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:45.325906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:45.727204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:46.138412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2396:3856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:46.138593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:46.139351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2401:3861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:46.145762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:46.373042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2403:3863], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:56:46.424219Z node 1 :TX_PROXY ERROR: Actor# [1:2468:3909] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15565, MsgBus: 5248 2025-04-10T10:56:49.163159Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636763210320273:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:49.163214Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015ac/r3tmp/tmp0Lp423/pdisk_1.dat 2025-04-10T10:56:49.394131Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:49.424512Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:49.424652Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:49.425939Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15565, node 2 2025-04-10T10:56:49.562768Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:49.562794Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:49.562801Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:49.562933Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5248 TClient is connected to server localhost:5248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:50.108510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:50.125378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:50.267929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:50.477557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:50.556241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:53.843120Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636780390191222:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not ... tatus: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:53.906182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:53.955883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:54.028454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:54.087347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:54.132585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:54.164487Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636763210320273:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:54.164597Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:54.169724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:54.256803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636784685159033:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:54.256882Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:54.257084Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636784685159038:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:54.261008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:54.282350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636784685159040:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:56:54.360668Z node 2 :TX_PROXY ERROR: Actor# [2:7491636784685159096:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:55.832837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 122 Trying to start YDB, gRPC: 62864, MsgBus: 5420 2025-04-10T10:57:01.599976Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636816203036690:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:01.600904Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015ac/r3tmp/tmpqTr6qT/pdisk_1.dat 2025-04-10T10:57:01.813213Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:01.824054Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:01.824160Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:01.827468Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62864, node 3 2025-04-10T10:57:01.989680Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:01.989710Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:01.989718Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:01.989851Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5420 TClient is connected to server localhost:5420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:02.917758Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.955587Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:57:02.976508Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:03.083937Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:03.347621Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:03.449186Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:06.606272Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636816203036690:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:06.606358Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:06.860020Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636837677874927:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:06.860113Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:06.891447Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:06.954749Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:07.010237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:07.081096Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:07.159544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:07.239894Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:07.366561Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636841972842739:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:07.366670Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:07.366959Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636841972842744:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:07.371969Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:07.410399Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636841972842746:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:07.512644Z node 3 :TX_PROXY ERROR: Actor# [3:7491636841972842805:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryService::DdlWithExplicitTransaction [GOOD] >> KqpQueryService::Ddl_Dml >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> BackupRestoreS3::RestoreTablePartitioningSettings >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams >> KqpQueryService::TableSink_ReplaceFromSelectOlap [GOOD] >> TestYmqHttpProxy::TestGetQueueUrl >> TestKinesisHttpProxy::TestPing >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] |68.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |68.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> TestYmqHttpProxy::TestCreateQueue [GOOD] >> TestKinesisHttpProxy::TestRequestWithWrongRegion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-10T10:56:09.440827Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:09.440938Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-10T10:56:09.462848Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:09.489292Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T10:56:09.491165Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-10T10:56:09.493779Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-10T10:56:09.495702Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-10T10:56:09.497597Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-10T10:56:09.521816Z node 1 :PERSQUEUE INFO: new Cookie default|bdd9be92-77f2ba6b-b7a19cc4-3e0fd7a6_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T10:56:09.535519Z node 1 :PERSQUEUE INFO: new Cookie default|eec8fa3d-e2774941-14934d37-b9a3a451_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T10:56:09.568896Z node 1 :PERSQUEUE INFO: new Cookie default|efccf240-7d48674f-e4c1801-167f1d78_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:245:2057] recipient: [1:99:2134] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:248:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:249:2057] recipient: [1:247:2248] Leader for TabletID 72057594037927937 is [1:250:2249] sender: [1:251:2057] recipient: [1:247:2248] 2025-04-10T10:56:09.667636Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:09.667714Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T10:56:09.668342Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:299:2290] 2025-04-10T10:56:09.670788Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:300:2291] 2025-04-10T10:56:09.706593Z node 1 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T10:56:09.706694Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:299:2290] 2025-04-10T10:56:09.776727Z node 1 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T10:56:09.776832Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:300:2291] Leader for TabletID 72057594037927937 is [1:250:2249] sender: [1:332:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-10T10:56:10.758307Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:10.758398Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] 2025-04-10T10:56:10.778049Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:10.778950Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-10T10:56:10.779574Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:185:2198] 2025-04-10T10:56:10.782242Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:185:2198] 2025-04-10T10:56:10.784021Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:186:2199] 2025-04-10T10:56:10.786190Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:186:2199] 2025-04-10T10:56:10.792732Z node 2 :PERSQUEUE INFO: new Cookie default|d685e9d4-cd144cd-bc6a3f84-8c09d43c_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T10:56:10.800540Z node 2 :PERSQUEUE INFO: new Cookie default|1ed669c2-18944d78-9bc5e1-effa92cc_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T10:56:10.834352Z node 2 :PERSQUEUE INFO: new Cookie default|2a7863c1-a367ef9a-13706eca-78cb3809_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvOffsets ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:242:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:245:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:246:2057] recipient: [2:244:2245] Leader for TabletID 72057594037927937 is [2:247:2246] sender: [2:248:2057] recipient: [2:244:2245] 2025-04-10T10:56:10.904878Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:10.904945Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T10:56:10.905862Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:296:2287] 2025-04-10T10:56:10.908446Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:297:2288] 2025-04-10T10:56:10.932304Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T10:56:10.932402Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:296:2287] 2025-04-10T10:56:10.938630Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T10:56:10.938706Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:297:2288] !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:247:2246] Leader for TabletID 72057594037927937 is [2:247:2246] sender: [2:349:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:247:2246] sender: [2:352:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:247:2246] sender: [2:355:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:247:2246] sender: [2:356:2057] recipient: [2:354:2322] Leader for TabletID 72057594037927937 is [2:357:2323] sender: [2:358:2057] recipient: [2:354:2322] 2025-04-10T10:56:12.472945Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:56:12.473024Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T10:56:12.473863Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:408:2366] 2025-04-10T10:56:12.477697Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:409:2367] 2025-04-10T10:56:12.519986Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T10:56:12.520101Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 4 [2:408:2366] 2025-04-10T10:56:12.526640Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T10:56:12.526719Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 4 [2:409:2367] Leader for TabletID 72057594037927937 is [2:357:2323] sender: [2:439:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: ... ion 3 [53:298:2289] Leader for TabletID 72057594037927937 is [53:248:2247] sender: [53:330:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:248:2247]) on event NKikimr::TEvPersQueue::TEvOffsets ! Leader for TabletID 72057594037927937 is [53:248:2247] sender: [53:332:2057] recipient: [53:99:2134] Leader for TabletID 72057594037927937 is [53:248:2247] sender: [53:335:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [53:248:2247] sender: [53:336:2057] recipient: [53:334:2310] Leader for TabletID 72057594037927937 is [53:337:2311] sender: [53:338:2057] recipient: [53:334:2310] 2025-04-10T10:57:09.426075Z node 53 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:57:09.426321Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T10:57:09.427725Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [53:388:2354] 2025-04-10T10:57:09.436361Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [53:389:2355] 2025-04-10T10:57:09.476262Z node 53 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T10:57:09.476386Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 4 [53:388:2354] 2025-04-10T10:57:09.484135Z node 53 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T10:57:09.484245Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 4 [53:389:2355] !Reboot 72057594037927937 (actor [53:248:2247]) rebooted! !Reboot 72057594037927937 (actor [53:248:2247]) tablet resolver refreshed! new actor is[53:337:2311] Leader for TabletID 72057594037927937 is [53:337:2311] sender: [53:438:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:108:2057] recipient: [54:101:2135] 2025-04-10T10:57:11.530921Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:57:11.531018Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927938 is [54:153:2174] sender: [54:154:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:179:2057] recipient: [54:14:2061] 2025-04-10T10:57:11.561937Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:57:11.563241Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 54 actor [54:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 54 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 54 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 54 Important: false } 2025-04-10T10:57:11.564156Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:185:2198] 2025-04-10T10:57:11.567743Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [54:185:2198] 2025-04-10T10:57:11.570389Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:186:2199] 2025-04-10T10:57:11.573078Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [54:186:2199] 2025-04-10T10:57:11.582356Z node 54 :PERSQUEUE INFO: new Cookie default|4ebcfa7a-a42dc5ff-67c049a4-d487c83_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T10:57:11.589789Z node 54 :PERSQUEUE INFO: new Cookie default|e9491fd5-62df02e5-77515595-2d43828_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T10:57:11.622927Z node 54 :PERSQUEUE INFO: new Cookie default|cb23e7a0-83264199-9d802c3d-b32e3af8_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:245:2057] recipient: [54:99:2134] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:248:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:249:2057] recipient: [54:247:2248] Leader for TabletID 72057594037927937 is [54:250:2249] sender: [54:251:2057] recipient: [54:247:2248] 2025-04-10T10:57:11.766112Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:57:11.766283Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T10:57:11.767659Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:299:2290] 2025-04-10T10:57:11.772029Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:300:2291] 2025-04-10T10:57:11.835447Z node 54 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T10:57:11.835581Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [54:299:2290] 2025-04-10T10:57:11.844102Z node 54 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T10:57:11.844225Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [54:300:2291] Leader for TabletID 72057594037927937 is [54:250:2249] sender: [54:332:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:103:2057] recipient: [55:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:103:2057] recipient: [55:101:2135] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:108:2057] recipient: [55:101:2135] 2025-04-10T10:57:12.591251Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:57:12.591360Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:149:2057] recipient: [55:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:149:2057] recipient: [55:147:2170] Leader for TabletID 72057594037927938 is [55:153:2174] sender: [55:154:2057] recipient: [55:147:2170] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:179:2057] recipient: [55:14:2061] 2025-04-10T10:57:12.626574Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:57:12.627804Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 55 actor [55:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } 2025-04-10T10:57:12.628809Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:185:2198] 2025-04-10T10:57:12.632423Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [55:185:2198] 2025-04-10T10:57:12.635012Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:186:2199] 2025-04-10T10:57:12.637643Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [55:186:2199] 2025-04-10T10:57:12.647141Z node 55 :PERSQUEUE INFO: new Cookie default|6bd164ab-e7ec2034-c3d034af-c52d320e_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T10:57:12.658045Z node 55 :PERSQUEUE INFO: new Cookie default|e68e8549-8f8fd10-48052831-8a6d0c1e_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T10:57:12.707688Z node 55 :PERSQUEUE INFO: new Cookie default|a9513792-808cea49-eb8bafb4-5eca4904_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:243:2057] recipient: [55:99:2134] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:246:2057] recipient: [55:14:2061] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:247:2057] recipient: [55:245:2246] Leader for TabletID 72057594037927937 is [55:248:2247] sender: [55:249:2057] recipient: [55:245:2246] 2025-04-10T10:57:12.806284Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:57:12.806375Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T10:57:12.807637Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:297:2288] 2025-04-10T10:57:12.812749Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:298:2289] 2025-04-10T10:57:12.845478Z node 55 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T10:57:12.845609Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [55:297:2288] 2025-04-10T10:57:12.862892Z node 55 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T10:57:12.863020Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [55:298:2289] Leader for TabletID 72057594037927937 is [55:248:2247] sender: [55:330:2057] recipient: [55:14:2061] >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] >> KqpLimits::QSReplySize-useSink [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName |68.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |68.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic [FAIL] >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_NotSplitedTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_ReplaceFromSelectOlap [GOOD] Test command err: Trying to start YDB, gRPC: 61919, MsgBus: 12856 2025-04-10T10:56:48.455277Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636758537100510:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:48.455899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001586/r3tmp/tmpQKywHK/pdisk_1.dat 2025-04-10T10:56:49.196333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:49.196461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:49.201682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:49.212469Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61919, node 1 2025-04-10T10:56:49.482730Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:49.482755Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:49.482762Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:49.482866Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12856 TClient is connected to server localhost:12856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:50.333346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:50.351242Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:52.642723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636775716970222:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:52.642884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:52.648654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636775716970234:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:52.653010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:56:52.677574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636775716970236:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:56:52.764837Z node 1 :TX_PROXY ERROR: Actor# [1:7491636775716970287:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:53.266774Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:56:53.295195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:2, at schemeshard: 72057594046644480 2025-04-10T10:56:53.414263Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636758537100510:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:53.414322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:53.714226Z node 1 :TX_PROXY ERROR: Actor# [1:7491636780011937827:2474] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:53.739211Z node 1 :TX_PROXY ERROR: Actor# [1:7491636780011937835:2480] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ODhiNmVmLWNmNjkwYTQ0LTVhMDhhZjQtYTNlNDg1MTE=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:53.782784Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T10:56:53.792623Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636780011937874:2368], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:56:53.793948Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODhiNmVmLWNmNjkwYTQ0LTVhMDhhZjQtYTNlNDg1MTE=, ActorId: [1:7491636775716970194:2329], ActorState: ExecuteState, TraceId: 01jrfngy0b8adwym9hdpmwh1yw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:56:53.864331Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636780011937899:2376], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:56:53.865608Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWYzY2E4ZS00YjI1ODlmNy1jN2EyMjllOC0zMmQ5MDg1Mw==, ActorId: [1:7491636780011937895:2373], ActorState: ExecuteState, TraceId: 01jrfngy2v639a2vd42cx70jff, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 27893, MsgBus: 27972 2025-04-10T10:56:54.754082Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636783394297861:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:54.754164Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001586/r3tmp/tmpu3JjPE/pdisk_1.dat 2025-04-10T10:56:55.093649Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:55.139440Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:55.139516Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:55.142866Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27893, node 2 2025-04-10T10:56:55.342622Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:55.342646Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:55.342655Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:55.342771Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27972 TClient is connected to server localhost:27972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:55.976368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:55.987272Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:56.005744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:56.088906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:56.297488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:56.379413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself ... 7:10.528134Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=281474976710669;tx_id=281474976710669;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710669; 2025-04-10T10:57:11.404213Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710672; 2025-04-10T10:57:11.406002Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=23;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037909;receive=72075186224037910; 2025-04-10T10:57:11.406082Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=24;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037909;receive=72075186224037910; 2025-04-10T10:57:11.406171Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=25;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037909;receive=72075186224037910; 2025-04-10T10:57:11.406251Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=26;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037909;receive=72075186224037910; 2025-04-10T10:57:11.406321Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=27;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037909;receive=72075186224037910; 2025-04-10T10:57:11.406389Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=28;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037909;receive=72075186224037910; 2025-04-10T10:57:11.406461Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=29;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037909;receive=72075186224037910; 2025-04-10T10:57:11.406527Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=30;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037909;receive=72075186224037910; 2025-04-10T10:57:11.406593Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=31;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037909;receive=72075186224037910; 2025-04-10T10:57:11.406638Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710672; 2025-04-10T10:57:11.406937Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=33;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037909; 2025-04-10T10:57:11.406998Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=34;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037909; 2025-04-10T10:57:11.407061Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=35;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037909; 2025-04-10T10:57:11.407120Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=36;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037909; 2025-04-10T10:57:11.407178Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=37;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037909; 2025-04-10T10:57:11.407238Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=38;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037909; 2025-04-10T10:57:11.407301Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=39;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037909; 2025-04-10T10:57:11.407360Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=40;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037909; 2025-04-10T10:57:11.407416Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=41;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037909; 2025-04-10T10:57:11.407503Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710672; 2025-04-10T10:57:11.407531Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=42;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037909; 2025-04-10T10:57:11.407605Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=43;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037909; 2025-04-10T10:57:11.407667Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;self_id=[3:7491636841976563947:2478];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037915;local_tx_no=44;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037909; 2025-04-10T10:57:11.408287Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710672; 2025-04-10T10:57:11.408608Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710672; 2025-04-10T10:57:11.408639Z node 3 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037903;local_tx_no=30;method=execute;tx_info=;fline=secondary.h:109;event=duplication_tablet_broken_flag;txId=281474976710672; 2025-04-10T10:57:11.408675Z node 3 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037903;local_tx_no=31;method=execute;tx_info=;fline=secondary.h:109;event=duplication_tablet_broken_flag;txId=281474976710672; 2025-04-10T10:57:11.408708Z node 3 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037903;local_tx_no=32;method=execute;tx_info=;fline=secondary.h:109;event=duplication_tablet_broken_flag;txId=281474976710672; 2025-04-10T10:57:11.408967Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710672; 2025-04-10T10:57:11.409409Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710672; 2025-04-10T10:57:11.409760Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710672; 2025-04-10T10:57:11.409766Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710672; 2025-04-10T10:57:11.409982Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710672; 2025-04-10T10:57:11.410817Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710672; 2025-04-10T10:57:11.411359Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710672; 2025-04-10T10:57:11.411665Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710672; 2025-04-10T10:57:11.411741Z node 3 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037903;local_tx_no=30;method=complete;tx_info=;fline=secondary.h:126;event=duplication_tablet_broken_flag;txId=281474976710672; 2025-04-10T10:57:11.411768Z node 3 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037903;local_tx_no=31;method=complete;tx_info=;fline=secondary.h:126;event=duplication_tablet_broken_flag;txId=281474976710672; 2025-04-10T10:57:11.411791Z node 3 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037903;local_tx_no=32;method=complete;tx_info=;fline=secondary.h:126;event=duplication_tablet_broken_flag;txId=281474976710672; 2025-04-10T10:57:11.412751Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710672; |68.5%| [TA] $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::TableSink_HtapComplex-withOltpSink [GOOD] >> KqpQueryService::TableSink_HtapInteractive+withOltpSink >> TestKinesisHttpProxy::CreateStreamInIncorrectDb [GOOD] >> BasicUsage::GetAllStartPartitionSessions >> KqpDocumentApi::RestrictAlter [GOOD] >> KqpDocumentApi::RestrictDrop >> BasicUsage::BasicWriteSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 8215, MsgBus: 12548 2025-04-10T10:56:42.506562Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636730693415403:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:42.506632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001594/r3tmp/tmpjdOvyV/pdisk_1.dat 2025-04-10T10:56:43.135098Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:43.136377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:43.136476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:43.143287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8215, node 1 2025-04-10T10:56:43.338700Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:43.338724Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:43.338731Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:43.338839Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12548 TClient is connected to server localhost:12548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:44.098485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:44.132533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:56:44.315378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.518585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:56:44.606612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T10:56:46.556419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636747873286368:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:46.556577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:47.183137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:47.232307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:47.338870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:47.411639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:47.486634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:47.510709Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636730693415403:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:47.516098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:47.576568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:47.682746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636752168254190:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:47.682867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:47.683186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636752168254195:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:47.688481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:47.713984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636752168254197:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:47.799284Z node 1 :TX_PROXY ERROR: Actor# [1:7491636752168254253:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 22700, MsgBus: 21082 2025-04-10T10:56:50.042825Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636768337679510:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:50.042873Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001594/r3tmp/tmpdwXoIw/pdisk_1.dat 2025-04-10T10:56:50.252292Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:50.277165Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:50.277267Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:50.282738Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22700, node 2 2025-04-10T10:56:50.412763Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:50.412792Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:50.412799Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:50.412939Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21082 TClient is connected to server localhost:21082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:51.192541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:51.203166Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:56:51.220948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:51.330127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:51.586602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:56:51.716206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESc ... : Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:54.677805Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636785517550981:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:54.677896Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:54.678123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636785517550986:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:54.681769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:54.703511Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636785517550988:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:56:54.788987Z node 2 :TX_PROXY ERROR: Actor# [2:7491636785517551044:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:55.050292Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636768337679510:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:55.050404Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:55.930592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T10:56:55.934287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T10:56:55.936124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7768, MsgBus: 15539 2025-04-10T10:57:00.966807Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636809715575788:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:00.967841Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001594/r3tmp/tmplQzp9z/pdisk_1.dat 2025-04-10T10:57:01.139482Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:01.154174Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:01.154257Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:01.156867Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7768, node 3 2025-04-10T10:57:01.222224Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:01.222262Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:01.222269Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:01.222384Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15539 TClient is connected to server localhost:15539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:02.049934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.056096Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:57:02.077489Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.174993Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:02.426831Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T10:57:02.582568Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:05.691176Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636831190414000:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.691256Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.740059Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.781459Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.819369Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.875449Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.934777Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.976800Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636809715575788:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:05.976884Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:06.020165Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:06.102212Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636835485381815:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:06.102301Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:06.102696Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636835485381820:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:06.107162Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:06.119902Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636835485381822:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:06.207958Z node 3 :TX_PROXY ERROR: Actor# [3:7491636835485381878:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:07.313639Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:57:07.319137Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:57:07.321763Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:57:11.355696Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282631378, txId: 281474976710703] shutting down >> TestKinesisHttpProxy::CreateStreamWithInvalidName >> SystemView::ShowCreateTablePartitionSettings [GOOD] >> SystemView::ShowCreateTableReadReplicas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 18488, MsgBus: 26321 2025-04-10T10:55:46.922442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636490341814759:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:46.927497Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017d2/r3tmp/tmp09wfQH/pdisk_1.dat 2025-04-10T10:55:47.820775Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:47.844044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:47.844208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:47.855089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18488, node 1 2025-04-10T10:55:48.090891Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:48.090924Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:48.090932Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:48.091079Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26321 TClient is connected to server localhost:26321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:49.331086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.371723Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:55:49.393005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.600885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.818958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.933162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:51.922314Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636490341814759:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:51.922431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:52.443119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636516111620190:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:52.443234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:52.825192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.898394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.937464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.982468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.026300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.099612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.154895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636520406588008:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.154957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.155245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636520406588013:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.158355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:55:53.168639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636520406588015:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:55:53.266437Z node 1 :TX_PROXY ERROR: Actor# [1:7491636520406588067:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:55.192307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:56.902548Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491636533291490875:2541], SessionActorId: [1:7491636533291490855:2541], statusCode=PRECONDITION_FAILED. Issue=
: Error: Memory limit exception, current limit is 1024 bytes., code: 2029 . sessionActorId=[1:7491636533291490855:2541]. isRollback=0 2025-04-10T10:55:56.904622Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTkzMzE5ODctMjNlYjVjNjMtNmZhYzJjMmQtNzYwOTgwNjI=, ActorId: [1:7491636533291490855:2541], ActorState: ExecuteState, TraceId: 01jrfnf5z3c60cd5h8epqv5f59, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7491636533291490876:2541] from: [1:7491636533291490875:2541] 2025-04-10T10:55:56.904698Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491636533291490876:2541] TxId: 281474976710672. Ctx: { TraceId: 01jrfnf5z3c60cd5h8epqv5f59, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTkzMzE5ODctMjNlYjVjNjMtNmZhYzJjMmQtNzYwOTgwNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception, current limit is 1024 bytes., code: 2029 } 2025-04-10T10:55:56.904844Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491636533291490884:2555], TxId: 281474976710672, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MTkzMzE5ODctMjNlYjVjNjMtNmZhYzJjMmQtNzYwOTgwNjI=. TraceId : 01jrfnf5z3c60cd5h8epqv5f59. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7491636533291490876:2541], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T10:55:56.905562Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTkzMzE5ODctMjNlYjVjNjMtNmZhYzJjMmQtNzYwOTgwNjI=, ActorId: [1:7491636533291490855:2541], ActorState: ExecuteState, TraceId: 01jrfnf5z3c60cd5h8epqv5f59, Create QueryResponse for error on request, msg:
: Error: Memory limit exception, current limit is 1024 bytes., code: 2029 Trying to start YDB, gRPC: 13502, MsgBus: 24621 2025-04-10T10:55:58.183536Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636543589731291:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:58.183593Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017d2/r3tmp/tmp1CkJhz/pdisk_1.dat 2025-04-10T10:55:58.367373Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:58.452016Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:58.452112Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:58.474621Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13502, node 2 2025-04-10T10:55:58.632465Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:58.632488Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:58.632497Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:58.632654Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:16.875275Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:16.932313Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:17.015007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:17.129066Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636625035979968:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:17.129212Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:17.130693Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636625035979973:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:17.140830Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:17.179391Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636625035979975:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:17.247133Z node 3 :TX_PROXY ERROR: Actor# [3:7491636625035980030:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:24.958649Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:56:24.958690Z node 3 :IMPORT WARN: Table profiles were not loaded
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=3&id=Y2MxYmQ0YzQtZWZmZTkzNDktNjc0NGVkNzEtY2I0ZjgyOTg= Trying to start YDB, gRPC: 21715, MsgBus: 26424 2025-04-10T10:56:48.418793Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636760109695320:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:48.419951Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017d2/r3tmp/tmpW2oKtQ/pdisk_1.dat 2025-04-10T10:56:48.885929Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:48.939308Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:48.939429Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:48.941320Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21715, node 4 2025-04-10T10:56:49.226975Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:49.227014Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:49.227029Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:49.227234Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26424 TClient is connected to server localhost:26424 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:50.181181Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:50.206660Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:50.221370Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:50.396843Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:50.655607Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:50.783833Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:53.426335Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491636760109695320:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:53.426458Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:53.945617Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636781584533569:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:53.945710Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:54.070337Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:54.119819Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:54.202385Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:54.260025Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:54.345225Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:54.421361Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:54.585771Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636785879501393:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:54.585870Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:54.586388Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636785879501398:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:54.592468Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:54.616243Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491636785879501400:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:54.694368Z node 4 :TX_PROXY ERROR: Actor# [4:7491636785879501456:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:56.195911Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:03.834981Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:57:03.835011Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:13.114418Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjBmMzkxNDQtYjRjOWY1ZWYtMTU2NmE2OTQtZmI2NzZlNjM=, ActorId: [4:7491636858893946906:2683], ActorState: ExecuteState, TraceId: 01jrfnhf11ca23p96bvyqnwmpd, Create QueryResponse for error on request, msg:
: Error: Intermediate data materialization exceeded size limit (88240925 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data., code: 2013 >> BasicUsage::WaitEventBlocksBeforeDiscovery >> BasicUsage::RetryDiscoveryWithCancel >> BasicUsage::SelectDatabaseByHash [GOOD] >> BasicUsage::SelectDatabase [GOOD] >> KqpQueryService::TableSink_OltpInteractive [GOOD] |68.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> BasicUsage::PropagateSessionClosed >> KqpService::SessionBusyRetryOperation [GOOD] |68.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> TestKinesisHttpProxy::MissingAction [GOOD] >> KqpService::RangeCache-UseCache >> BasicUsage::WriteSessionNoAvailableDatabase >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey >> TestKinesisHttpProxy::DifferentContentTypes [GOOD] >> BasicUsage::WriteSessionWriteInHandlers >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl [GOOD] |68.6%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SelectDatabase [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 27466, MsgBus: 62840 2025-04-10T10:56:51.492707Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636770911031142:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:51.516792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001583/r3tmp/tmpccpZh4/pdisk_1.dat 2025-04-10T10:56:52.154449Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:52.191031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:52.191153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:52.193285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27466, node 1 2025-04-10T10:56:52.380118Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:52.380147Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:52.380156Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:52.380278Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62840 TClient is connected to server localhost:62840 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:53.209636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:55.849380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636788090900855:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:55.849513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:56.217496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:56:56.398886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636792385868255:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:56.398984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:56.399471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636792385868260:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:56.403941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T10:56:56.429483Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-10T10:56:56.429804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636792385868262:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T10:56:56.473503Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636770911031142:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:56.473671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:56.512778Z node 1 :TX_PROXY ERROR: Actor# [1:7491636792385868314:2401] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 64297, MsgBus: 19650 2025-04-10T10:56:57.826341Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636796716953937:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001583/r3tmp/tmpkBpZAD/pdisk_1.dat 2025-04-10T10:56:57.865807Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:56:58.012410Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:58.061961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:58.062035Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:58.063307Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64297, node 2 2025-04-10T10:56:58.243053Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:58.243080Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:58.243090Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:58.243196Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19650 TClient is connected to server localhost:19650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:59.340722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:59.351062Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:57:02.470326Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636818191790889:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:02.470441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:02.496880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:57:02.613518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636818191790990:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:02.613650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:02.614101Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636818191790995:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:02.619728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T10:57:02.636761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636818191790997:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T10:57:02.705193Z node 2 :TX_PROXY ERROR: Actor# [2:7491636818191791048:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:02.782103Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636796716953937:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:02.791916Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:03.146504Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=4; 2025-04-10T10:57:03.159561Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T10:57:03.159799Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T10:57:03.160068Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491636822486758436:2368], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [2:7491636822486758420:2368]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[2:7491636822486758436:2368].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T10:57:03.160699Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491636822486758429:2368], SessionActorId: [2:7491636822486758420:2368], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7491636822486758420:2368]. isRollback=0 2025-04-10T10:57:03.160998Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Mzk1OTdhOWQtY2I1MWJkZDItNGRhYzRmMWUtYTViYTQ4ZWQ=, ActorId: [2:7491636822486758420:2368], ActorState: ExecuteState, TraceId: 01jrfnh7388ednq80x65r64vcs, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7491636822486758430:2368] from: [2:7491636822486758429:2368] 2025-04-10T10:57:03.161087Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491636822486758430:2368] TxId: 281474976710663. Ctx: { TraceId: 01jrfnh7388ednq80x65r64vcs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzk1OTdhOWQtY2I1MWJkZDItNGRhYzRmMWUtYTViYTQ4ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T10:57:03.161248Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Mzk1OTdhOWQtY2I1MWJkZDItNGRhYzRmMWUtYTViYTQ4ZWQ=, ActorId: [2:7491636822486758420:2368], ActorState: ExecuteState, TraceId: 01jrfnh7388ednq80x65r64vcs, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 3068, MsgBus: 16834 2025-04-10T10:57:09.445340Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636849104469285:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001583/r3tmp/tmprYZYXG/pdisk_1.dat 2025-04-10T10:57:09.563624Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:57:09.728956Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:09.792140Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:09.792242Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:09.799505Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3068, node 3 2025-04-10T10:57:10.043037Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:10.043062Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:10.043070Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:10.043188Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16834 TClient is connected to server localhost:16834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:10.854264Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:10.862209Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:57:14.350501Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636849104469285:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:14.360889Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:14.398270Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636870579306265:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:14.398361Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:14.465666Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:57:14.788393Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:57:15.276995Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636874874274959:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:15.277080Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:15.286025Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636874874274963:2447], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:15.286139Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:15.286420Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636874874274968:2450], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:15.295193Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T10:57:15.322855Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636874874274970:2451], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T10:57:15.416995Z node 3 :TX_PROXY ERROR: Actor# [3:7491636874874275055:3297] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> DataShardVolatile::DistributedWriteWithAsyncIndex [GOOD] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit >> TestKinesisHttpProxy::GoodRequestPutRecords >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] >> TestYmqHttpProxy::TestGetQueueUrl [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongBody >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> BasicUsage::FallbackToSingleDb >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] >> TestKinesisHttpProxy::TestPing [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink [GOOD] >> BackupRestoreS3::RestoreTablePartitioningSettings [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked >> BackupRestoreS3::RestoreIndexTablePartitioningSettings >> TestKinesisHttpProxy::TestRequestWithIAM >> BasicUsage::WriteSessionCloseWaitsForWrites |68.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |68.6%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |68.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |68.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |68.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl [GOOD] Test command err: Trying to start YDB, gRPC: 32192, MsgBus: 24264 2025-04-10T10:56:42.548292Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636733352750320:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:42.548335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001592/r3tmp/tmp4UgsgV/pdisk_1.dat 2025-04-10T10:56:43.246491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:43.246581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:43.254433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:43.290658Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32192, node 1 2025-04-10T10:56:43.434881Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:43.434906Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:43.434914Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:43.435016Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24264 TClient is connected to server localhost:24264 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:44.330934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:44.359265Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:44.370324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:44.599190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:44.875289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:56:44.981376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T10:56:46.972033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636750532621269:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:46.972149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:47.278202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:47.324233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:47.381268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:47.415533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:47.470716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:47.509099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:47.553683Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636733352750320:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:47.553809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:47.587384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636754827589076:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:47.587464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:47.587743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636754827589081:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:47.592074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:47.614654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636754827589083:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:47.694606Z node 1 :TX_PROXY ERROR: Actor# [1:7491636754827589140:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:48.902788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:56:48.904737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:56:48.906073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:56:51.688237Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282611687, txId: 281474976710695] shutting down Trying to start YDB, gRPC: 65533, MsgBus: 20364 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001592/r3tmp/tmpp9fHRJ/pdisk_1.dat 2025-04-10T10:56:52.953570Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:53.062422Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:53.101412Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:53.101520Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:53.103595Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65533, node 2 2025-04-10T10:56:53.278846Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:53.278871Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:53.278879Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:53.279017Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20364 TClient is connected to server localhost:20364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:54.089877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:54.099008Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:54.116481Z node 2 :FLAT_TX_SCHEMESHARD ... opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:57.614801Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636797303823129:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:57.669988Z node 2 :TX_PROXY ERROR: Actor# [2:7491636797303823183:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:58.881103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:56:58.901826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:56:58.909637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:57:01.041377Z node 2 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: 36c09247-301420b5-94fe74f3-b1493f61, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=YmM1YWZiNjctNzIyM2IwMjItYzM0NDk3NTUtMmIyMTk4ZjQ=, TxId: 2025-04-10T10:57:02.006179Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 36c09247-301420b5-94fe74f3-b1493f61, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=ZThmNzMyMzMtNjM2MGQzODQtNDFjYzVkOTUtNGU5NGRkY2U=, TxId: 2025-04-10T10:57:02.214115Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 36c09247-301420b5-94fe74f3-b1493f61, reply NOT_FOUND, issues: {
: Error: No such execution } 2025-04-10T10:57:02.266296Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 36c09247-301420b5-94fe74f3-b1493f61, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=OWE5YmQzNDYtZGMzZjRhMzUtODM3OGFlN2UtN2E2YjRjZWQ=, TxId: 2025-04-10T10:57:02.266465Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TCancelScriptExecutionOperationActor] ExecutionId: 36c09247-301420b5-94fe74f3-b1493f61, check lease failed 2025-04-10T10:57:02.816926Z node 2 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: 36c09247-301420b5-94fe74f3-b1493f61, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Script execution not found }, SessionId: ydb://session/3?node_id=2&id=NTBkNmNlMGEtOWM3ZTIxMmItZTdlMzkwMWItZTg0NjI0OTg=, TxId: Trying to start YDB, gRPC: 4719, MsgBus: 14523 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001592/r3tmp/tmpnXxfCe/pdisk_1.dat 2025-04-10T10:57:05.862392Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:05.894636Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:05.894910Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:05.899246Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4719, node 3 2025-04-10T10:57:05.919059Z node 3 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:57:05.919108Z node 3 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:57:05.958980Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:05.963228Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:05.963260Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:05.963268Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:05.963412Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14523 TClient is connected to server localhost:14523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:06.710604Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:06.725697Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:06.859698Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:07.115805Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:07.218680Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:10.555405Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636853049251165:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:10.555487Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:10.675930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:10.727817Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:10.791922Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:10.839838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:10.920604Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:11.031506Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:11.283390Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636857344218993:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:11.283545Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:11.283814Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636857344218999:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:11.292955Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:11.318204Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636857344219001:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:11.394323Z node 3 :TX_PROXY ERROR: Actor# [3:7491636857344219057:3466] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:12.875664Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:57:12.877511Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:57:12.878946Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:57:16.902454Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282636929, txId: 281474976710706] shutting down 2025-04-10T10:57:17.236741Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282637265, txId: 281474976710709] shutting down 2025-04-10T10:57:17.274716Z node 3 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: 19658483-b32ae8a8-1055fe4d-c9c6bb63, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Results are expired }, SessionId: ydb://session/3?node_id=3&id=ZDIxNWRjNTgtY2UyYmFjYzctMTZjNmEyODEtNjEyZWRiMDI=, TxId: >> TestKinesisHttpProxy::TestRequestBadJson >> TestYmqHttpProxy::TestCreateQueueWithEmptyName >> KqpQueryServiceScripts::ForgetScriptExecution [GOOD] >> KqpQueryServiceScripts::Tcl [GOOD] |68.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |68.6%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |68.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut >> KqpDocumentApi::RestrictDrop [GOOD] >> TestKinesisHttpProxy::CreateStreamWithInvalidName [GOOD] |68.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |68.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |68.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow >> Sharding::XXUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ForgetScriptExecution [GOOD] Test command err: Trying to start YDB, gRPC: 21800, MsgBus: 19211 2025-04-10T10:56:35.183962Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636702323069537:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:35.184122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015f0/r3tmp/tmpO8FF1T/pdisk_1.dat 2025-04-10T10:56:36.102706Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:36.128706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:36.129019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:36.132816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21800, node 1 2025-04-10T10:56:36.354549Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:36.354577Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:36.354587Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:36.355051Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19211 TClient is connected to server localhost:19211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:37.640423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:37.684374Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:37.701106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:37.886188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:38.142957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:38.294734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:40.186351Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636702323069537:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:40.186429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:40.734826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636723797907570:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:40.738575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.528232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.593651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.644741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.690282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.767522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.810830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.888733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636728092875395:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.888831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.889266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636728092875400:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.893787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:41.906325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636728092875402:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:41.964812Z node 1 :TX_PROXY ERROR: Actor# [1:7491636728092875455:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:43.200907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.203200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.204402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:56:46.106999Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282606129, txId: 281474976710696] shutting down 2025-04-10T10:56:46.148394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636749567713327:2761], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-10T10:56:46.148480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636749567713329:2763], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-10T10:56:46.148544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool another_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-10T10:56:46.148665Z node 1 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491636749567713328:2762], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=NTM2YTNmYWEtMTM3MTI3ODQtZjRjMmNjMWYtNGU3NzQxN2I=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-10T10:56:46.148758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491636749567713328:2762], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=NTM2YTNmYWEtMTM3MTI3ODQtZjRjMmNjMWYtNGU3NzQxN2I=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-10T10:56:46.148881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [1:7491636749567713323:2760]: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-10T10:56:46.148983Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTM2YTNmYWEtMTM3MTI3ODQtZjRjMmNjMWYtNGU3NzQxN2I=, ActorId: [1:7491636749567713323:2760], ActorState: ExecuteState, TraceId: 01jrfngph9ahpsh4s7sh7d4f66, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-04-10T10:56:46.149217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7491636749567713323:2760]: Pool another_pool_id not found Trying to start YDB, gRPC: 22460, MsgBus: 6353 2025-04-10T10:56:47.393561Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636752619744278:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:47.393596Z node 2 :METADATA_PROVIDER ERRO ... nsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:51.879181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:52.011437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636774094583047:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:52.011516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:52.015208Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636774094583052:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:52.019764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:52.033185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636774094583054:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:52.123790Z node 2 :TX_PROXY ERROR: Actor# [2:7491636774094583109:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:52.394366Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636752619744278:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:52.394420Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:53.184611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:56:53.186047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:56:53.188806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7303, MsgBus: 15909 2025-04-10T10:56:56.915665Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636794385980285:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015f0/r3tmp/tmpeWHyA1/pdisk_1.dat 2025-04-10T10:56:57.006506Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:56:57.265470Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:57.295286Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:57.295373Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:57.296530Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7303, node 3 2025-04-10T10:56:57.506826Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:57.506851Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:57.506858Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:57.507002Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15909 TClient is connected to server localhost:15909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:58.619555Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:58.646826Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:58.840764Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:59.073931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:59.189215Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:01.786199Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636794385980285:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:01.786280Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:02.295963Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636820155785692:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:02.296070Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:02.340566Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:02.409549Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:02.468204Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:02.535789Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:02.592898Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:02.682019Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:02.788716Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636820155786215:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:02.788824Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:02.789289Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636820155786221:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:02.794518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:02.825250Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636820155786224:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:02.909416Z node 3 :TX_PROXY ERROR: Actor# [3:7491636820155786279:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:04.281622Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:57:04.284470Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:57:04.286635Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:57:12.130413Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:57:12.130444Z node 3 :IMPORT WARN: Table profiles were not loaded >> Sharding::XXUsage [GOOD] >> KqpQueryService::TableSink_HtapInteractive+withOltpSink [GOOD] >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::Tcl [GOOD] Test command err: Trying to start YDB, gRPC: 2693, MsgBus: 22323 2025-04-10T10:56:45.574226Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636743726444205:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:45.574352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00158f/r3tmp/tmpoNr95C/pdisk_1.dat 2025-04-10T10:56:46.088838Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:46.095973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:46.096085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:46.099286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2693, node 1 2025-04-10T10:56:46.176832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:46.176856Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:46.176869Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:46.177016Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22323 TClient is connected to server localhost:22323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:46.874546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:46.895243Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:46.906213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:47.070960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:47.270093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:47.352733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:49.253974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636760906315162:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:49.254103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:49.668093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:49.711198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:49.761101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:49.830687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:49.924443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:49.972415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:50.096301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636765201282981:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.096363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.096692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636765201282987:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:50.101577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:50.131645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636765201282989:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:50.195876Z node 1 :TX_PROXY ERROR: Actor# [1:7491636765201283043:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:50.574364Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636743726444205:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:50.574442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7092, MsgBus: 64172 2025-04-10T10:56:52.370298Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636773611555890:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:52.371162Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00158f/r3tmp/tmptC4NCh/pdisk_1.dat 2025-04-10T10:56:52.446912Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7092, node 2 2025-04-10T10:56:52.493404Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:52.493507Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:52.497849Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:52.546723Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:52.546750Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:52.546757Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:52.546886Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64172 TClient is connected to server localhost:64172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:53.122996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:53.144388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:53.256196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:53.470628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:53.570754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateT ... d: 72057594046644480 2025-04-10T10:56:57.224693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636795086394522:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:56:57.329318Z node 2 :TX_PROXY ERROR: Actor# [2:7491636795086394581:3459] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:57.342247Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636773611555890:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:57.349529Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:59.037408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T10:56:59.039219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T10:56:59.043917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T10:57:07.426608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:57:07.426641Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 32015, MsgBus: 25961 2025-04-10T10:57:12.116344Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636859755923324:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:12.116417Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00158f/r3tmp/tmpR5kEOD/pdisk_1.dat 2025-04-10T10:57:12.332293Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:12.363709Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:12.363815Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:12.367869Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32015, node 3 2025-04-10T10:57:12.476940Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:12.476967Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:12.476976Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:12.477135Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25961 TClient is connected to server localhost:25961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:13.351970Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:13.360293Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:57:13.371712Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:13.491283Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:13.699350Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T10:57:13.845774Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:16.739709Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636876935794269:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:16.739835Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:16.796632Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:16.856628Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:16.933749Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:17.018846Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:17.116542Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:17.118678Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636859755923324:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:17.118725Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:17.207921Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:17.324676Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636881230762089:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:17.324806Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:17.325142Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636881230762094:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:17.334686Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:17.353931Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636881230762096:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:17.417868Z node 3 :TX_PROXY ERROR: Actor# [3:7491636881230762152:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:19.295818Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:57:19.298598Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:57:19.300460Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:57:19.744728Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491636889820697389:2526], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: COMMIT not supported inside YDB query, code: 2008 2025-04-10T10:57:19.751321Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OWVhMzUyMGEtYjAwZjMyNTYtNmFiOTM2MWQtMzM4YjI4Yjc=, ActorId: [3:7491636889820697386:2525], ActorState: ExecuteState, TraceId: 01jrfnhpxe3na26jxn8jzjr5nj, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T10:57:22.673392Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491636902705600218:2841], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: ROLLBACK not supported inside YDB query, code: 2008 2025-04-10T10:57:22.673755Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OWFmYzNlNmUtY2VkNWQyZWUtOTJiYzJjYzAtMmFmMGRkNDE=, ActorId: [3:7491636902705600215:2840], ActorState: ExecuteState, TraceId: 01jrfnht62d2d7j1wy3r61revc, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpQueryService::Ddl_Dml [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 5326286198865496372 7013585481659385130 5589892647730756021 18370825714823798433 1745994471295986566 196260001984683264 15998900756261962555 16507954031938426036 14854864077660788368 10453124784251494607 7451285447589077808 17923772027567840279 11498029964975602509 1780339973322787757 7364636180366482889 17136654433981773374 13059944408080979724 4670071575389349566 10692602155633250290 9651094188025557368 17335630849442805306 7039396658228447035 13214253307585320571 14478289028061900457 7214387433034005604 17541011190475558483 8012381043293835984 4716893414109649045 12308390238338261552 3217420161093509501 2339868140515455883 13376870606858228231 8679540459189926276 2139231456045147305 4929694391524532839 8931929496600122914 13570109535973237838 16822732863344502719 15940195316774561758 6582005241916870632 154606260934065933 1603798671807315852 17242916650304353067 715464054863697338 2504709911490459627 7705236461770313332 8343803664987397786 16035098764634771667 12705252586848844829 5957412547011099658 6383495792609164197 279885582656914611 15187603154964676130 5338644713063412962 1147194643949687533 12230928977645155127 7115539926524752454 3391151536364692843 1631913792684970369 11527640806359889346 17099506110185566985 13115482771563490762 13697960855266406302 6772320547439858315 8208573058537269347 846771493088893351 12557287688281894739 12541637491827379366 12242692759005016373 2944211663651296958 10366426411390252871 16476815738235604968 13984295721603858961 4130137330518565014 12716108078292991475 12781286654350868804 4515147577537491841 14731184713102523077 6611048662962366954 15915098292892339306 104594338477421694 14263580080133674155 3768996057925154463 3688856967647556378 3435498644312249818 15472914910355028446 13543467614074117372 11575789815998066832 17762908876584917819 10487675224230407596 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpDocumentApi::RestrictDrop [GOOD] Test command err: Trying to start YDB, gRPC: 23397, MsgBus: 24936 2025-04-10T10:56:51.466575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636772244191426:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:51.466749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001581/r3tmp/tmpuYscp9/pdisk_1.dat 2025-04-10T10:56:52.065834Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:52.076328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:52.076420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:52.085253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23397, node 1 2025-04-10T10:56:52.250730Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:52.250755Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:52.250770Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:52.250894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24936 TClient is connected to server localhost:24936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:52.915534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:52.932952Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:52.942732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:53.100477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:53.350558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:53.511150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:55.737630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636789424062372:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:55.737749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:56.151174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:56.199233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:56.241400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:56.315002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:56.355720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:56.410767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:56.467248Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636772244191426:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:56.467803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:56.501718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636793719030181:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:56.501869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:56.506463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636793719030186:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:56.512427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:56.535847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636793719030188:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:56.620347Z node 1 :TX_PROXY ERROR: Actor# [1:7491636793719030245:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:57.881003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:56:58.030529Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636798013997927:2511], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-04-10T10:56:58.032245Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGRiZDlkOTMtZjczNzI4NDMtMTc3NWRiMGYtNTc4MWQzZWI=, ActorId: [1:7491636798013997839:2501], ActorState: ExecuteState, TraceId: 01jrfnh23cfg7tny7t3tpremh2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 Trying to start YDB, gRPC: 19784, MsgBus: 31481 2025-04-10T10:56:59.063308Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636805542623590:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:59.063363Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001581/r3tmp/tmp5HazJQ/pdisk_1.dat 2025-04-10T10:56:59.371527Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:59.406872Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:59.406962Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:59.411954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19784, node 2 2025-04-10T10:56:59.618793Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:59.618819Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:59.618826Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:59.618947Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31481 TClient is connected to server localhost:31481 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. ... eration type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:12.976974Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:13.036069Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:13.120256Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:13.207736Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:13.316365Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636866936907047:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:13.316481Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:13.316949Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636866936907052:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:13.321161Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:13.341401Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636866936907054:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:13.413341Z node 3 :TX_PROXY ERROR: Actor# [3:7491636866936907109:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:14.785132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:61: Error: At function: KiAlterTable!
:2:61: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-04-10T10:57:14.968296Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26734, MsgBus: 27514 2025-04-10T10:57:16.154874Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636876663290348:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:16.154970Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001581/r3tmp/tmpCVNjOF/pdisk_1.dat 2025-04-10T10:57:16.430020Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:16.440187Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:16.440283Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:16.443024Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26734, node 4 2025-04-10T10:57:16.626834Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:16.626864Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:16.626872Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:16.627008Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27514 TClient is connected to server localhost:27514 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:17.431903Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:17.440262Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:57:17.445317Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:17.567286Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:17.851245Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:17.976959Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:21.070672Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636898138128522:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:21.070796Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:21.134002Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:21.145581Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491636876663290348:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:21.146258Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:21.209577Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:21.269837Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:21.336091Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:21.400896Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:21.492424Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:21.873717Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636898138129052:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:21.873838Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:21.874638Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636898138129057:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:21.880875Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:21.903291Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491636898138129059:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:57:22.002026Z node 4 :TX_PROXY ERROR: Actor# [4:7491636898138129116:3470] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:23.472953Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:24: Error: At function: KiDropTable!
:2:24: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 |68.7%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey [GOOD] >> TestKinesisHttpProxy::GoodRequestPutRecords [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_HtapInteractive+withOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 17215, MsgBus: 18757 2025-04-10T10:56:48.148586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636758764588258:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:48.152615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001587/r3tmp/tmpJV0GZ5/pdisk_1.dat 2025-04-10T10:56:48.679006Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:48.688140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:48.688251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:48.691737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17215, node 1 2025-04-10T10:56:48.839322Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:48.839344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:48.839351Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:48.839467Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18757 TClient is connected to server localhost:18757 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:49.796800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:49.827262Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:52.175579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636775944458033:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:52.175731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:52.524734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:56:52.800875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491636775944458179:2340];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T10:56:52.801087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491636775944458179:2340];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T10:56:52.801375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491636775944458179:2340];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T10:56:52.801494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491636775944458179:2340];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T10:56:52.801595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491636775944458179:2340];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T10:56:52.801706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491636775944458179:2340];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T10:56:52.801805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491636775944458179:2340];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T10:56:52.801908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491636775944458179:2340];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T10:56:52.802009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491636775944458179:2340];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T10:56:52.802128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491636775944458179:2340];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T10:56:52.803640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491636775944458179:2340];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T10:56:52.803765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491636775944458179:2340];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T10:56:52.807369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491636775944458177:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T10:56:52.807449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491636775944458177:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T10:56:52.807629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491636775944458177:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T10:56:52.807732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491636775944458177:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T10:56:52.807834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491636775944458177:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T10:56:52.807929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491636775944458177:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T10:56:52.808022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491636775944458177:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T10:56:52.808119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491636775944458177:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T10:56:52.808247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491636775944458177:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T10:56:52.808353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491636775944458177:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T10:56:52.808449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491636775944458177:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T10:56:52.808557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491636775944458177:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T10:56:52.860886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491636775944458181:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T10:56:52.860957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491636775944458181:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T10:56:52.861177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491636775944458181:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T10:56:52.861270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491636775944458181:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T10:56:52.861371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491636775944458181:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T10:56:52.861495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491636775944458181:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T10:56:52.861606Z node 1 :TX_COLU ... rocess=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T10:57:22.085395Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T10:57:22.085544Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T10:57:22.085578Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T10:57:22.085621Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T10:57:22.085657Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T10:57:22.085789Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T10:57:22.085822Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T10:57:22.086051Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T10:57:22.086083Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T10:57:22.088074Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T10:57:22.088117Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T10:57:22.092696Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T10:57:22.092753Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T10:57:22.092893Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T10:57:22.092927Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T10:57:22.093192Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T10:57:22.093224Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T10:57:22.093359Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T10:57:22.093411Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T10:57:22.093519Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T10:57:22.093552Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T10:57:22.093598Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T10:57:22.093630Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T10:57:22.104970Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T10:57:22.105058Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T10:57:22.105312Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T10:57:22.105376Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T10:57:22.105576Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T10:57:22.105610Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T10:57:22.105856Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T10:57:22.105892Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T10:57:22.106069Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T10:57:22.106099Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T10:57:22.146409Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T10:57:22.151863Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T10:57:22.153658Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T10:57:22.164890Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T10:57:22.166913Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T10:57:22.173093Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T10:57:22.184567Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T10:57:22.187255Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T10:57:22.195916Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T10:57:22.196966Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T10:57:22.205072Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:57:22.358490Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636903892874623:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:22.358621Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:22.359027Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636903892874628:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:22.363089Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T10:57:22.378786Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636903892874630:2421], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T10:57:22.473074Z node 3 :TX_PROXY ERROR: Actor# [3:7491636903892874683:2668] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:22.862907Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710664; 2025-04-10T10:57:23.141027Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976710667;tx_id=281474976710667;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710667; >> TestYmqHttpProxy::TestSendMessage >> SystemView::ShowCreateTableReadReplicas [FAIL] >> SystemView::ShowCreateTableKeyBloomFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Ddl_Dml [GOOD] Test command err: Trying to start YDB, gRPC: 6223, MsgBus: 23209 2025-04-10T10:56:47.363477Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636753391945129:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:47.363537Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001588/r3tmp/tmpUds6Zn/pdisk_1.dat 2025-04-10T10:56:47.842397Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:47.852612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:47.852768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:47.858027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6223, node 1 2025-04-10T10:56:48.070687Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:48.070710Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:48.070716Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:48.070914Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23209 TClient is connected to server localhost:23209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:48.837094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:48.873104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:49.062041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:49.289407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:49.372729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:51.195396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636770571816100:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:51.195517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:51.595516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:51.657325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:51.756788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:51.799003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:51.871198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:51.935561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:52.026460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636774866783915:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:52.026580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:52.027128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636774866783920:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:52.031817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:52.058290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636774866783922:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:52.130967Z node 1 :TX_PROXY ERROR: Actor# [1:7491636774866783977:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:52.366280Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636753391945129:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:52.366364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:53.537607Z node 1 :TX_PROXY ERROR: Actor# [1:7491636779161751618:3694] txid# 281474976710672, issues: { message: "User already exists" severity: 1 } 2025-04-10T10:56:53.549236Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTI3ZmUxMWMtMzBiYjUxZmYtNjk0MGY0OWYtZGYyMmE5N2Y=, ActorId: [1:7491636779161751608:2506], ActorState: ExecuteState, TraceId: 01jrfngxrfccf1scqmnhvq1bw3, Create QueryResponse for error on request, msg: 2025-04-10T10:56:53.688777Z node 1 :TX_PROXY ERROR: Actor# [1:7491636779161751662:3715] txid# 281474976710676, issues: { message: "User not found" severity: 1 } 2025-04-10T10:56:53.690594Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzA0ZDAyN2YtYmRkNTFiNzMtMmY1ZTJiZmUtNTJiNzJmYQ==, ActorId: [1:7491636779161751656:2515], ActorState: ExecuteState, TraceId: 01jrfngxwk6j331yxejw5s97qn, Create QueryResponse for error on request, msg: 2025-04-10T10:56:53.739624Z node 1 :TX_PROXY ERROR: Actor# [1:7491636779161751697:3741] txid# 281474976710678, issues: { message: "User not found" severity: 1 } 2025-04-10T10:56:53.742523Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTJiMDI4MWMtOTEzNWE4YmItNTExN2EzYy00Y2YzOWE1Yg==, ActorId: [1:7491636779161751691:2518], ActorState: ExecuteState, TraceId: 01jrfngxyp9ynrjcxcwytwxhy1, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 27078, MsgBus: 19006 2025-04-10T10:56:54.621407Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636784142327494:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:54.621458Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001588/r3tmp/tmpAHqR3T/pdisk_1.dat 2025-04-10T10:56:54.923352Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:54.961794Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:54.961877Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:54.963366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27078, node 2 2025-04-10T10:56:55.126738Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:55.126761Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:55.126770Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:55.126885Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19006 TClient is connected to server localhost:19006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Dep ... 6224037919 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T10:57:20.663044Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491636893794648366:2515], Table: `/Root/TestDdlDml2` ([72057594046644480:16:1]), SessionActorId: [4:7491636893794648294:2515]Got LOCKS BROKEN for table `/Root/TestDdlDml2`. ShardID=72075186224037919, Sink=[4:7491636893794648366:2515].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T10:57:20.663690Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491636893794648359:2515], SessionActorId: [4:7491636893794648294:2515], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestDdlDml2`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[4:7491636893794648294:2515]. isRollback=0 2025-04-10T10:57:20.664022Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Y2I3ZWE4NjUtYzVkNjE5NTMtYzQ3NjJjYzAtZDVlY2I2Y2M=, ActorId: [4:7491636893794648294:2515], ActorState: ExecuteState, TraceId: 01jrfnhqy9d3wngedntx4s66ts, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7491636893794648416:2515] from: [4:7491636893794648359:2515] 2025-04-10T10:57:20.664139Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491636893794648416:2515] TxId: 281474976715675. Ctx: { TraceId: 01jrfnhqy9d3wngedntx4s66ts, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Y2I3ZWE4NjUtYzVkNjE5NTMtYzQ3NjJjYzAtZDVlY2I2Y2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestDdlDml2`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T10:57:20.664349Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Y2I3ZWE4NjUtYzVkNjE5NTMtYzQ3NjJjYzAtZDVlY2I2Y2M=, ActorId: [4:7491636893794648294:2515], ActorState: ExecuteState, TraceId: 01jrfnhqy9d3wngedntx4s66ts, Create QueryResponse for error on request, msg: 2025-04-10T10:57:20.856015Z node 4 :KQP_COMPILE_SERVICE WARN: queryId in recompile request and queryId in cache are different, queryId in request: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT INTO TestDdlDml2 (Key, Value1) VALUES (1, \"1\");\n SELECT * FROM TestDdlDml2;\n UPSERT INTO TestDdlDml2 (Key, Value1) VALUES (2, \"2\");\n SELECT * FROM TestDdlDml2;\n CREATE TABLE TestDdlDml33 (\n Key Uint64,\n PRIMARY KEY (Key)\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_database":"Root" }, "settings": { "ydb_database":"Root" }, "rollback_settings": { } } }}, queryId in cache: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT INTO TestDdlDml2 (Key, Value1, Value2) VALUES (1, \"1\", \"1\");\n SELECT * FROM TestDdlDml2;\n ALTER TABLE TestDdlDml2 DROP COLUMN Value2;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_database":"Root" }, "settings": { "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-10T10:57:21.088820Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-10T10:57:21.373625Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-04-10T10:57:21.949314Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7491636898089616127:2597], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:11:17: Error: At function: KiReadTable!
:11:17: Error: Cannot find table 'db.[/Root/TestDdlDml5]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:21.955144Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGMwZGI1ZmEtNzBhMjFjZjUtNmQ5YjE5ZWUtZjFlMGI1MjI=, ActorId: [4:7491636898089615985:2573], ActorState: ExecuteState, TraceId: 01jrfnhrx271tc3jcgjk102f78, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:22.110879Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-10T10:57:22.268500Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-10T10:57:22.780690Z node 4 :TX_PROXY ERROR: Actor# [4:7491636902384583739:4212] txid# 281474976715696, issues: { message: "Check failed: path: \'/Root/TestDdl1\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 19], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:22.781059Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715696, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl1', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 19], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-10T10:57:22.781233Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YzE2MTU1NWQtZjZmNDAzNDYtMzg0NjQ5OGQtNzZhYWMwMzk=, ActorId: [4:7491636902384583726:2653], ActorState: ExecuteState, TraceId: 01jrfnht9wecxn6njz2n29hd7v, Create QueryResponse for error on request, msg: 2025-04-10T10:57:22.824283Z node 4 :TX_PROXY ERROR: Actor# [4:7491636902384583765:4225] txid# 281474976715698, issues: { message: "Check failed: path: \'/Root/TestDdl2\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:22.826094Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715698, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-10T10:57:22.826312Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTI2ODdhMmMtNDExMGJkYzgtZTdiZjEyZmItNmJlYTlmNGM=, ActorId: [4:7491636902384583750:2660], ActorState: ExecuteState, TraceId: 01jrfnhtb72q7vcq039h7gencv, Create QueryResponse for error on request, msg: 2025-04-10T10:57:23.429086Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-04-10T10:57:23.609525Z node 4 :TX_PROXY ERROR: Actor# [4:7491636906679551258:4335] txid# 281474976715703, issues: { message: "Check failed: path: \'/Root/TestDdl2\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:23.609674Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715703, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-10T10:57:23.609914Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGM3YTA5OGItYjBkN2JkNzUtOTJiZWRmNC0zNGM5ZjM1Nw==, ActorId: [4:7491636906679551136:2685], ActorState: ExecuteState, TraceId: 01jrfnhtng9j5qvahezvd43s5x, Create QueryResponse for error on request, msg: 2025-04-10T10:57:23.859193Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7491636906679551323:2726], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl4]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:23.859934Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NTk5NDUzOTAtOTM5MWY1MjQtY2ZhNzk4N2MtMjg5YTdhZDA=, ActorId: [4:7491636906679551320:2724], ActorState: ExecuteState, TraceId: 01jrfnhvbf7dddrfht8megkjqb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:24.129370Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-04-10T10:57:24.989972Z node 4 :KQP_COMPILE_ACTOR ERROR: Get parsing result with error, self: [4:7491636910974518917:2798], owner: [4:7491636885204712683:2397], statement id: 1 2025-04-10T10:57:24.990449Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Y2IxYTc4MTgtYzIzMTI5ZGQtMjM4ZmE5MC03NDQ5NjYwYQ==, ActorId: [4:7491636910974518915:2797], ActorState: ExecuteState, TraceId: 01jrfnhwf4a8v3e97kq4zqqrbm, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T10:57:25.222826Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7491636915269486256:2814], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:3:44: Error: Failed to convert 'Value': String to Optional
:3:44: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T10:57:25.224760Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjhkMDk4NTctMTkyOTY5NC1hZmFjY2Y3MC1lMmVlZDk3Yw==, ActorId: [4:7491636915269486240:2808], ActorState: ExecuteState, TraceId: 01jrfnhwkxbcx0n4r08gs7ceg1, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T10:57:25.358405Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2025-04-10T10:57:25.539572Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7491636915269486380:2837], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:8:29: Error: At function: KiWriteTable!
:8:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:8:44: Error: Failed to convert 'Value': String to Optional
:8:44: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T10:57:25.541320Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDFiYmE5YmItYzVlYTllOWMtY2ViMjgwOWYtMzllZTgyZmE=, ActorId: [4:7491636915269486286:2823], ActorState: ExecuteState, TraceId: 01jrfnhwrz9t9wxqbxxba4kc2a, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> TestKinesisHttpProxy::DoubleCreateStream >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] |68.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |68.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |68.7%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |68.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl |68.7%| [TA] $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlWithIAM >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongBody [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> TestKinesisHttpProxy::TestRequestBadJson [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithEmptyName [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute |68.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |68.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable >> SystemView::AuthPermissions_Selects [GOOD] >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir >> TestKinesisHttpProxy::TestConsumersEmptyNames >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes >> BackupRestoreS3::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreTableSplitBoundaries >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [GOOD] >> KqpService::CloseSessionsWithLoad [GOOD] >> TestKinesisHttpProxy::TestRequestNoAuthorization >> KqpQueryServiceScripts::TestTruncatedByRows [GOOD] >> KqpService::PatternCache >> KqpQueryServiceScripts::TestTruncatedBySize >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions [GOOD] |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> BasicUsage::BasicWriteSession [GOOD] >> BasicUsage::CloseWriteSessionImmediately ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 11614, MsgBus: 14557 2025-04-10T10:56:37.309141Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636711968211097:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:37.309244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015a0/r3tmp/tmp0IFhhr/pdisk_1.dat 2025-04-10T10:56:37.968235Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:37.981358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:37.982073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:37.984832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11614, node 1 2025-04-10T10:56:38.142794Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:38.142820Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:38.142827Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:38.142958Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14557 TClient is connected to server localhost:14557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:38.803108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:38.825554Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:38.844800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:39.000943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:39.194534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:39.326076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:41.863462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636729148081922:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.863599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.311027Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636711968211097:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:42.311110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:42.481053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.564584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.620032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.675589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.719127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.782986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:42.852538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636733443049741:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.852641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.853142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636733443049746:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:42.857242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:42.872638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636733443049748:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:42.928198Z node 1 :TX_PROXY ERROR: Actor# [1:7491636733443049801:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:44.297792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.306348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.309525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23040, MsgBus: 1299 2025-04-10T10:56:47.911808Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636752704977462:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:47.911859Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015a0/r3tmp/tmppxBWHz/pdisk_1.dat 2025-04-10T10:56:48.092448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:48.092538Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:48.092822Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:48.105036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23040, node 2 2025-04-10T10:56:48.197984Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:48.198009Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:48.198018Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:48.198459Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1299 TClient is connected to server localhost:1299 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:48.772002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:48.777370Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:56:48.787077Z node ... 10T10:57:12.541212Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636860672647144:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:12.541312Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:12.541801Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636860672647149:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:12.547094Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:12.562227Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491636860672647151:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:12.646836Z node 4 :TX_PROXY ERROR: Actor# [4:7491636860672647207:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:14.245614Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:57:14.255189Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:57:14.257884Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:57:14.729532Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzY5ODBlYzktMWE3MWYzZDYtZDUyZmRlZDMtMzUxYWUzMDQ=, ActorId: [4:7491636869262582443:2525], ActorState: ExecuteState, TraceId: 01jrfnhhzwceng7gwv2yycaj7c, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 4870, MsgBus: 22191 2025-04-10T10:57:18.680493Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491636886777308975:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:18.680554Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015a0/r3tmp/tmp4pf861/pdisk_1.dat 2025-04-10T10:57:18.842960Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:18.871392Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:18.871519Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:18.874101Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4870, node 5 2025-04-10T10:57:18.948612Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:18.948645Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:18.948655Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:18.948812Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22191 TClient is connected to server localhost:22191 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:19.598879Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:19.609418Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:57:19.625838Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:19.741861Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:20.011627Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:20.132242Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:23.621005Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491636908252147238:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.621123Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.681804Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491636886777308975:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:23.681902Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:23.705511Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:23.874298Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:23.977692Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:24.082995Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:24.224617Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:24.353876Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:24.495611Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491636912547115062:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:24.495836Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:24.499155Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491636912547115067:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:24.506107Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:24.537664Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-10T10:57:24.554576Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491636912547115069:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:57:24.646804Z node 5 :TX_PROXY ERROR: Actor# [5:7491636912547115130:3467] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:26.106398Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T10:57:26.108557Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T10:57:26.118804Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T10:57:26.589640Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=ODc0NWIzNS05YjcyMTI5MS0yOGU0NjBjNC0yZGZlMWE5MA==, ActorId: [5:7491636921137050354:2525], ActorState: ExecuteState, TraceId: 01jrfnhxjf7cyfftjsmnh46j0p, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthPermissions_Selects [GOOD] Test command err: 2025-04-10T10:55:09.898667Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636332284668698:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:09.898737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00209f/r3tmp/tmpPiBdlY/pdisk_1.dat 2025-04-10T10:55:10.723488Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:10.770688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:10.770777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:10.784609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14531, node 1 2025-04-10T10:55:11.178173Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:11.178196Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:11.178228Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:11.178351Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:11.571822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:11.831484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976710658 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:55:11.831807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:55:11.831883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-10T10:55:11.832029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-10T10:55:11.832162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T10:55:11.832277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:55:11.832293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:55:11.832365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T10:55:11.832395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T10:55:11.843433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-10T10:55:11.843605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: root@builtin, status: StatusAccepted, operation: CREATE DATABASE, path: /Root/Tenant1 2025-04-10T10:55:11.843833Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T10:55:11.843848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T10:55:11.843996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T10:55:11.844078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T10:55:11.844093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491636336579636604:2389], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-10T10:55:11.844106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491636336579636604:2389], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-10T10:55:11.844139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:55:11.844158Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:11.844188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-10T10:55:11.844215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 2025-04-10T10:55:11.958915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:55:11.970740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-10T10:55:11.970876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-10T10:55:11.970890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-10T10:55:11.970918Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-10T10:55:11.970942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-10T10:55:11.971289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-10T10:55:11.971333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-10T10:55:11.971341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-10T10:55:11.971372Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-10T10:55:11.971391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-10T10:55:11.971438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-10T10:55:11.971571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:55:11.971598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-10T10:55:11.971620Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:55:11.999414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2025-04-10T10:55:11.999520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:55:11.999626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-10T10:55:11.999658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-10T10:55:12.008878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282512049, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:55:12.009065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282512049 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:55:12.009099Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-10T10:55:12.009296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-10T10:55:12.009325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-10T10:55:12.009495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T10:55:12.009537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-0 ... DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } Children [.metadata,Dir1,Table0,Tenant1,Tenant2] }] } 2025-04-10T10:57:25.653478Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7491636919025288652:2444], row count: 0, finished: 0 2025-04-10T10:57:25.654058Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:57:25.658649Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir1,SubDir2] }] } 2025-04-10T10:57:25.658723Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7491636919025288652:2444], row count: 0, finished: 0 2025-04-10T10:57:25.662301Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:57:25.662748Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-10T10:57:25.662848Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7491636919025288652:2444], row count: 2, finished: 0 2025-04-10T10:57:25.664877Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7491636919025288652:2444], owner: [31:7491636919025288649:2442], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-10T10:57:25.666649Z node 31 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [31:7491636846010842413:2193], database# , query hash# 3187945588805523718, cpu time# 287376 2025-04-10T10:57:25.667642Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282645639, txId: 281474976715687] shutting down 2025-04-10T10:57:25.874494Z node 33 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:57:25.874525Z node 33 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:25.920209Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976715690. Ctx: { TraceId: 01jrfnhx5f2qzvb8kcbw00ndyg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=ZWVkNzg4ZjMtY2VlZGY2OGItN2I4MDBiYmUtN2M5ZTE4OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:25.922091Z node 33 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-04-10T10:57:25.922001Z 2025-04-10T10:57:25.925007Z node 31 :SYSTEM_VIEWS INFO: Scan started, actor: [31:7491636919025288691:2453], owner: [31:7491636919025288687:2451], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-10T10:57:25.928558Z node 31 :SYSTEM_VIEWS INFO: Scan prepared, actor: [31:7491636919025288691:2453], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T10:57:25.928604Z node 31 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-04-10T10:57:25.928708Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:57:25.929202Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } Children [.metadata,Dir1,Table0,Tenant1,Tenant2] }] } 2025-04-10T10:57:25.929270Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7491636919025288691:2453], row count: 0, finished: 0 2025-04-10T10:57:25.930539Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:57:25.934672Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir1,SubDir2] }] } 2025-04-10T10:57:25.934756Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7491636919025288691:2453], row count: 0, finished: 0 2025-04-10T10:57:25.935984Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:57:25.938805Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-10T10:57:25.938918Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7491636919025288691:2453], row count: 1, finished: 0 2025-04-10T10:57:25.939673Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7491636919025288691:2453], owner: [31:7491636919025288687:2451], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-10T10:57:25.944029Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282645918, txId: 281474976715689] shutting down 2025-04-10T10:57:25.944676Z node 31 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [31:7491636846010842413:2193], database# , query hash# 15123460272068726277, cpu time# 244146 2025-04-10T10:57:25.995530Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 33 2025-04-10T10:57:25.996522Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T10:57:26.003410Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 32 2025-04-10T10:57:26.004441Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T10:57:26.005537Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 34 2025-04-10T10:57:26.006014Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T10:57:26.015461Z node 34 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:57:25.996988Z node 33 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:57:26.032848Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 35 2025-04-10T10:57:26.034103Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T10:57:26.047196Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7491636846681983843:2100], Type=268959746 2025-04-10T10:57:26.047255Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7491636846681983843:2100], Type=268959746 2025-04-10T10:57:26.047291Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7491636846681983843:2100], Type=268959746 2025-04-10T10:57:26.062592Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7491636851113487600:2105], Type=268959746 2025-04-10T10:57:26.062650Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7491636851113487600:2105], Type=268959746 2025-04-10T10:57:26.062677Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7491636851113487600:2105], Type=268959746 2025-04-10T10:57:26.062703Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7491636851113487600:2105], Type=268959746 2025-04-10T10:57:26.062728Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7491636851113487600:2105], Type=268959746 2025-04-10T10:57:26.062759Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7491636851113487600:2105], Type=268959746 >> BasicUsage::WriteSessionNoAvailableDatabase [GOOD] >> BasicUsage::WriteSessionSwitchDatabases >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey [GOOD] >> BasicUsage::WriteSessionWriteInHandlers [GOOD] >> TestKinesisHttpProxy::CreateDeleteStream >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl [GOOD] >> TestKinesisHttpProxy::DoubleCreateStream [GOOD] |68.7%| [TA] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |68.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] Test command err: 2025-04-10T10:52:59.946537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:52:59.946715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:52:59.946880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00178f/r3tmp/tmpbqdmn5/pdisk_1.dat 2025-04-10T10:53:00.271620Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.271708Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.271750Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:00.271914Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-10T10:53:00.271951Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:53:00.392663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-10T10:53:00.392877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.393048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T10:53:00.393289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:53:00.393393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.393503Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.394125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.394300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T10:53:00.394346Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.394375Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T10:53:00.394519Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.394555Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.394609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.394657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T10:53:00.394689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:53:00.394719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:53:00.394815Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.395166Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.395198Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T10:53:00.395315Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.395341Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.395384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.395421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:53:00.395464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:53:00.395524Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.395798Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.395819Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T10:53:00.395885Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.395911Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:53:00.395941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.395963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:53:00.395996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-10T10:53:00.396026Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:53:00.396072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:53:00.398913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:53:00.399313Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:53:00.399347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:53:00.399537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:53:00.400517Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T10:53:00.400567Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T10:53:00.400618Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-10T10:53:00.400740Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-10T10:53:00.401075Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.401138Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.401170Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:00.401288Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-10T10:53:00.401312Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-10T10:53:00.401380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.401424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-10T10:53:00.401486Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.453018Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-10T10:53:00.453147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-10T10:53:00.453215Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:00.453959Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-10T10:53:00.454035Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-10T10:53:00.494064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:00.494237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:00.505996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:53:00.583320Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-10T10:53:00.584069Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.584118Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:00.584153Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:00.584322Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-10T10:53:00.584356Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T10:53:00.584457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:53:00.584624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... 4046644480 2025-04-10T10:55:45.929466Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:3 ProgressState 2025-04-10T10:55:45.929548Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:55:45.929580Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:3 progress is 5/7 2025-04-10T10:55:45.929608Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 5/7 2025-04-10T10:55:45.929641Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:3 progress is 5/7 2025-04-10T10:55:45.929676Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 5/7 2025-04-10T10:55:45.929708Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 5/7, is published: true 2025-04-10T10:55:45.930039Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:55:45.930077Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:55:45.930184Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:6, at schemeshard: 72057594046644480 2025-04-10T10:55:45.930231Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:6 ProgressState 2025-04-10T10:55:45.930311Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:55:45.930341Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:6 progress is 6/7 2025-04-10T10:55:45.930371Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 6/7 2025-04-10T10:55:45.930403Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:6 progress is 6/7 2025-04-10T10:55:45.930431Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 6/7 2025-04-10T10:55:45.930461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 6/7, is published: true 2025-04-10T10:55:45.930790Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T10:55:45.930829Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T10:55:45.930879Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:4, at schemeshard: 72057594046644480 2025-04-10T10:55:45.930914Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:4 ProgressState 2025-04-10T10:55:45.930982Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T10:55:45.931009Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:4 progress is 7/7 2025-04-10T10:55:45.931037Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 7/7 2025-04-10T10:55:45.931083Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:4 progress is 7/7 2025-04-10T10:55:45.931114Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 7/7 2025-04-10T10:55:45.931184Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 7/7, is published: true 2025-04-10T10:55:45.931295Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:1255:2962] message: TxId: 281474976715668 2025-04-10T10:55:45.931376Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 7/7 2025-04-10T10:55:45.931475Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:0 2025-04-10T10:55:45.931525Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:0 2025-04-10T10:55:45.931639Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 17] was 2 2025-04-10T10:55:45.931695Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:1 2025-04-10T10:55:45.931721Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:1 2025-04-10T10:55:45.931762Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 18] was 2 2025-04-10T10:55:45.931789Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:2 2025-04-10T10:55:45.931813Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:2 2025-04-10T10:55:45.931845Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 19] was 3 2025-04-10T10:55:45.931871Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:3 2025-04-10T10:55:45.931895Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:3 2025-04-10T10:55:45.932013Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 20] was 3 2025-04-10T10:55:45.932056Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-04-10T10:55:45.932132Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:4 2025-04-10T10:55:45.932158Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:4 2025-04-10T10:55:45.932236Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 21] was 3 2025-04-10T10:55:45.932267Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 3 2025-04-10T10:55:45.932301Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:5 2025-04-10T10:55:45.932325Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:5 2025-04-10T10:55:45.932388Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 22] was 3 2025-04-10T10:55:45.932418Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-10T10:55:45.932447Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:6 2025-04-10T10:55:45.932473Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:6 2025-04-10T10:55:45.932593Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 23] was 3 2025-04-10T10:55:45.932625Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 16] was 3 2025-04-10T10:55:45.933293Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:55:45.933567Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:55:45.933677Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:55:45.933840Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T10:55:45.933970Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1255:2962] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715668 at schemeshard: 72057594046644480 2025-04-10T10:55:45.934549Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1262:2968], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:55:45.934598Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T10:55:45.934628Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-10T10:55:46.068836Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [3:1556:3203], serverId# [3:1557:3204], sessionId# [0:0:0] 2025-04-10T10:55:46.068977Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrfnevrq9tkfbyexr9tvzfnf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NjYwNDZkODQtOThhZmZhZjUtYjI5NTQ1YzctMTNkNGM3MTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 4 } items { uint32_value: 40 } }, { items { uint32_value: 5 } items { uint32_value: 50 } } 2025-04-10T10:55:46.219643Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037895, clientId# [3:1585:3220], serverId# [3:1586:3221], sessionId# [0:0:0] 2025-04-10T10:55:46.219831Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jrfnevwy38hvk6q3nyzb7ys6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDU1MjRlNGEtM2ZhZGY2MGYtNTA1MzIzZmUtNGNmZWYzNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 11 } items { uint32_value: 101 } }, { items { uint32_value: 21 } items { uint32_value: 201 } }, { items { uint32_value: 31 } items { uint32_value: 301 } }, { items { uint32_value: 41 } items { uint32_value: 401 } }, { items { uint32_value: 51 } items { uint32_value: 501 } } 2025-04-10T10:55:46.362532Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037892, clientId# [3:1614:3237], serverId# [3:1615:3238], sessionId# [0:0:0] 2025-04-10T10:55:46.362683Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jrfnew1nf1690ws13e1qtayq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVmYTc1YmQtODAxM2Q3YmYtNmJlNTkyYTMtYzJlYzU3YjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 12 } items { uint32_value: 102 } }, { items { uint32_value: 22 } items { uint32_value: 202 } }, { items { uint32_value: 32 } items { uint32_value: 302 } }, { items { uint32_value: 42 } items { uint32_value: 402 } }, { items { uint32_value: 52 } items { uint32_value: 502 } } 2025-04-10T10:55:46.537413Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [3:1643:3254], serverId# [3:1644:3255], sessionId# [0:0:0] 2025-04-10T10:55:46.537638Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jrfnew63d8h85z2xsta3ds8d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmMwZDkyM2QtYWJjN2U2N2EtY2UzODk1Y2ItNTMzYjkxYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 13 } items { uint32_value: 103 } }, { items { uint32_value: 23 } items { uint32_value: 203 } }, { items { uint32_value: 33 } items { uint32_value: 303 } }, { items { uint32_value: 43 } items { uint32_value: 403 } }, { items { uint32_value: 53 } items { uint32_value: 503 } } |68.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |68.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |68.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> TestYmqHttpProxy::TestSendMessage [GOOD] >> TestKinesisHttpProxy::ListShards >> TestYmqHttpProxy::TestSendMessageFifoQueue |68.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ut/ydb-core-security-ut |68.8%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut >> BasicUsage::GetAllStartPartitionSessions [GOOD] >> BasicUsage::PreferredDatabaseNoFallback ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] Test command err: 2025-04-10T10:57:19.446249Z :WriteSessionWriteInHandlers INFO: Random seed for debugging is 1744282639446204 2025-04-10T10:57:20.042777Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636896231661289:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:20.042833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:57:20.260029Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636895263179972:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:20.543291Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:57:20.557117Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:57:20.557641Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00149d/r3tmp/tmpQ2r0Q8/pdisk_1.dat 2025-04-10T10:57:21.169703Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:21.182989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:21.189085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:21.189218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:21.195900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:21.195993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:21.209691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:21.222315Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:57:21.223412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7277, node 1 2025-04-10T10:57:21.414034Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/00149d/r3tmp/yandexip3OAK.tmp 2025-04-10T10:57:21.414070Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/00149d/r3tmp/yandexip3OAK.tmp 2025-04-10T10:57:21.414489Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/00149d/r3tmp/yandexip3OAK.tmp 2025-04-10T10:57:21.414685Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:57:21.517726Z INFO: TTestServer started on Port 10505 GrpcPort 7277 TClient is connected to server localhost:10505 PQClient connected to localhost:7277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:22.151699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T10:57:25.043780Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636896231661289:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:25.043867Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:25.226957Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636895263179972:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:25.227034Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:25.801088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636917706498806:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:25.801213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:25.802520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636917706498821:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:25.807146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-10T10:57:25.816679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636917706498860:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:25.816763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:25.855599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636917706498828:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-10T10:57:26.110299Z node 1 :TX_PROXY ERROR: Actor# [1:7491636917706498902:2695] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:26.161565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:26.221018Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491636921032983941:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:26.221719Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjlhMzgzNTgtZjM2MjE2ZTctMjMwMzgwYjItNzNkMDVlOTI=, ActorId: [2:7491636916738016620:2310], ActorState: ExecuteState, TraceId: 01jrfnhxf84z17rd9ge3bm02m0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:26.222401Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636922001466223:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:26.223140Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTBiZWYxYzgtNjdhMGNiNDktODE4ODUyNTktMjE1NWNiMDY=, ActorId: [1:7491636917706498803:2338], ActorState: ExecuteState, TraceId: 01jrfnhx8jfgzxhpxvqz948vdv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:26.223906Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:26.224928Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:26.468404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:26.659904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:7277", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-10T10:57:26.995985Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfnhy94c91cknx3a6razd6a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODczYjYxMWMtZjBlOWE2ZTYtZGE0YjE1NGYtZDBlMjc0Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491636926296433931:3003] === CheckClustersList. Ok 2025-04-10T10:57:32.509534Z node 1 :FLAT_TX_SCHEMESHARD WAR ... src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { nanos: 12000000 } min_queue_wait_time { nanos: 100000000 } max_queue_wait_time { nanos: 100000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-10T10:57:34.693276Z :DEBUG: [/Root] TraceId [] SessionId [src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2025-04-10T10:57:34.693322Z :DEBUG: [/Root] TraceId [] SessionId [src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] Write session: acknoledged message 1 === Inside AcksHandler 2025-04-10T10:57:34.693661Z :DEBUG: [/Root] TraceId [] SessionId [src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] Write 1 messages with Id from 2 to 2 === Inside ReadyToAcceptHandler 2025-04-10T10:57:34.694295Z :DEBUG: [/Root] TraceId [] SessionId [src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] Write session: try to update token 2025-04-10T10:57:34.694344Z :DEBUG: [/Root] TraceId [] SessionId [src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 2 === AcksHandler has written a message, closing the session 2025-04-10T10:57:34.695056Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-10T10:57:34.695331Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-10T10:57:34.698553Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T10:57:34.698600Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T10:57:34.698711Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 2 requestId: cookie: 2 2025-04-10T10:57:34.703229Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-04-10T10:57:34.703927Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T10:57:34.703974Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T10:57:34.704037Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 2 partNo : 0 messageNo: 3 size 107 offset: -1 2025-04-10T10:57:34.704257Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 2 partNo 0 2025-04-10T10:57:34.705674Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 181 count 1 nextOffset 2 batches 1 2025-04-10T10:57:34.706380Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 169 WTime 1744282654706 2025-04-10T10:57:34.706509Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T10:57:34.706525Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T10:57:34.706543Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T10:57:34.706559Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T10:57:34.706578Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000psrc_id 2025-04-10T10:57:34.706588Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000001_00000_0000000001_00000| 2025-04-10T10:57:34.706598Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-10T10:57:34.706617Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T10:57:34.706635Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-10T10:57:34.706694Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T10:57:34.706781Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 1 partNo 0 count 1 size 169 2025-04-10T10:57:34.710817Z node 2 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 1 count 1 size 169 actorID [2:7491636946802788285:2412] 2025-04-10T10:57:34.710921Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 114 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T10:57:34.710970Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T10:57:34.711014Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-10T10:57:34.711217Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 3 requestId: cookie: 2 2025-04-10T10:57:34.711637Z node 2 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 1 partno 0 count 1 parts 0 size 169 2025-04-10T10:57:34.711984Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-04-10T10:57:34.714434Z :DEBUG: [/Root] TraceId [] SessionId [src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-04-10T10:57:34.714640Z :DEBUG: [/Root] TraceId [] SessionId [src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 2 written { offset: 1 } } write_statistics { persisting_time { nanos: 7000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-10T10:57:34.714679Z :DEBUG: [/Root] TraceId [] SessionId [src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] OnAck: seqNo=2, txId=? 2025-04-10T10:57:34.714713Z :DEBUG: [/Root] TraceId [] SessionId [src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] Write session: acknoledged message 2 === Inside AcksHandler === Inside SessionClosedHandler 2025-04-10T10:57:34.715436Z :DEBUG: [/Root] TraceId [] SessionId [src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] Write 1 messages with Id from 3 to 3 === SessionClosedHandler has 'written' a message 2025-04-10T10:57:34.715579Z :INFO: [/Root] TraceId [] SessionId [src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-04-10T10:57:34.715624Z :INFO: [/Root] TraceId [] SessionId [src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] Write session will now close 2025-04-10T10:57:34.715688Z :DEBUG: [/Root] TraceId [] SessionId [src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] Write session: aborting 2025-04-10T10:57:34.716194Z :WARNING: [/Root] TraceId [] SessionId [src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-04-10T10:57:34.716244Z :DEBUG: [/Root] TraceId [] SessionId [src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0] MessageGroupId [src_id] Write session: destroy 2025-04-10T10:57:34.717695Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0 grpc read done: success: 0 data: 2025-04-10T10:57:34.717726Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0 grpc read failed 2025-04-10T10:57:34.717763Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0 grpc closed 2025-04-10T10:57:34.717790Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|dc5c8ed3-82594ed4-fc73f3fc-9f480803_0 is DEAD 2025-04-10T10:57:34.718732Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:57:34.719966Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7491636956361205945:2530] destroyed 2025-04-10T10:57:34.720027Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T10:57:35.388895Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491636960656173301:2536] TxId: 281474976715689. Ctx: { TraceId: 01jrfnj65y3w804e0cbs4srbbx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDY4MzdjMTctOWUwZjk3MmEtNTg3YmMxYWItNWQxMTVjOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2025-04-10T10:57:35.389481Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491636960656173311:2546], TxId: 281474976715689, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDY4MzdjMTctOWUwZjk3MmEtNTg3YmMxYWItNWQxMTVjOTA=. TraceId : 01jrfnj65y3w804e0cbs4srbbx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7491636960656173301:2536], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-10T10:57:35.389814Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491636960656173310:2545], TxId: 281474976715689, task: 2. Ctx: { TraceId : 01jrfnj65y3w804e0cbs4srbbx. SessionId : ydb://session/3?node_id=1&id=ZDY4MzdjMTctOWUwZjk3MmEtNTg3YmMxYWItNWQxMTVjOTA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491636960656173301:2536], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-10T10:57:35.775127Z node 1 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715690. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:57:35.775279Z node 1 :KQP_EXECUTER WARN: ActorId: [1:7491636960656173324:2547] TxId: 281474976715690. Ctx: { TraceId: 01jrfnj6vdejjg1b3fqk3bst6a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNlNzc4OWItYzFjNGI4YzYtMTViMTZmZGQtZTA1NTFiOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:57:35.775528Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzNlNzc4OWItYzFjNGI4YzYtMTViMTZmZGQtZTA1NTFiOTE=, ActorId: [1:7491636960656173321:2547], ActorState: ExecuteState, TraceId: 01jrfnj6vdejjg1b3fqk3bst6a, Create QueryResponse for error on request, msg: 2025-04-10T10:57:35.777702Z node 1 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfnj6ve4e0yvcj5chjdxs3k" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> TestYmqHttpProxy::TestReceiveMessage >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage [GOOD] >> Viewer::SimpleFeatureFlags >> BasicUsage::FallbackToSingleDb [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest >> KqpScanArrowFormat::SingleKey >> TestKinesisHttpProxy::GoodRequestGetRecords >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter [GOOD] |68.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |68.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join >> TestYmqHttpProxy::TestGetQueueUrlWithIAM [GOOD] >> TDataShardTrace::TestTraceDistributedSelectViaReadActors |68.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |68.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |68.8%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |68.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |68.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |68.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk >> SystemView::ShowCreateTableKeyBloomFilter [FAIL] >> SystemView::ShowCreateTableTtlSettings >> TestYmqHttpProxy::TestGetQueueAttributes >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] >> KqpQueryService::CloseSessionsWithLoad [GOOD] >> KqpQueryService::ClosedSessionRemovedFromPool |68.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |68.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |68.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |68.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader >> TestKinesisHttpProxy::TestConsumersEmptyNames [GOOD] >> KqpService::RangeCache-UseCache [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter [GOOD] Test command err: Trying to start YDB, gRPC: 25104, MsgBus: 11618 2025-04-10T10:56:39.002663Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636716466810628:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:39.003287Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00159d/r3tmp/tmpBpYwlO/pdisk_1.dat 2025-04-10T10:56:39.905951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:39.918391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:39.918860Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:39.931004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25104, node 1 2025-04-10T10:56:40.090320Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:40.090368Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:40.090378Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:40.090565Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11618 TClient is connected to server localhost:11618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:40.750004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:40.770805Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:40.783948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:41.032501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:41.335606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:56:41.454516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.660692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636737941648758:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.660838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.954459Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636716466810628:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:43.954544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:44.017159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.063255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.131046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.173061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.249081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.347417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.441496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636742236616578:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:44.441659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:44.442094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636742236616583:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:44.450535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:44.466498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636742236616585:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:44.560186Z node 1 :TX_PROXY ERROR: Actor# [1:7491636742236616640:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13549, MsgBus: 10540 2025-04-10T10:56:46.758460Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636750635542373:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:46.758553Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00159d/r3tmp/tmpWdIbM8/pdisk_1.dat 2025-04-10T10:56:47.017234Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13549, node 2 2025-04-10T10:56:47.067901Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:47.068050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:47.074913Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:47.106637Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:47.106658Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:47.106663Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:47.106778Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10540 TClient is connected to server localhost:10540 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:47.604930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:47.615441Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:56:47.622399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:47.711489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:47.878976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting ... WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:57:14.097073Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:57:14.102955Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:57:17.750390Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282637776, txId: 281474976710696] shutting down 2025-04-10T10:57:18.158804Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282638196, txId: 281474976710699] shutting down 2025-04-10T10:57:18.186319Z node 4 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: 758db260-1c0b97d0-ce8bd5f5-6d95c94c, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Results are expired }, SessionId: ydb://session/3?node_id=4&id=Y2U3YzgwMjQtMjM0Y2FlZWUtNTAyZGJiYTctM2QwNDVlNzM=, TxId: Trying to start YDB, gRPC: 27907, MsgBus: 2703 2025-04-10T10:57:21.950580Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491636900620726113:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:21.955165Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00159d/r3tmp/tmpnLecRD/pdisk_1.dat 2025-04-10T10:57:22.109923Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:22.147613Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:22.147728Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:22.155010Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27907, node 5 2025-04-10T10:57:22.275328Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:22.275357Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:22.275368Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:22.275544Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2703 TClient is connected to server localhost:2703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:23.125529Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:23.149058Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:57:23.165941Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:23.319473Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:23.714971Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:23.838904Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:26.922629Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491636900620726113:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:26.922719Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:28.359095Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491636930685498885:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:28.359214Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:28.437023Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:28.502825Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:28.569960Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:28.661994Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:28.721522Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:28.833568Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:28.990504Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491636930685499425:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:28.990685Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:28.991053Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491636930685499433:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:28.996992Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:29.029735Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491636930685499435:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:29.119711Z node 5 :TX_PROXY ERROR: Actor# [5:7491636934980466785:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:30.679693Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:57:30.692694Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:57:30.695648Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:57:34.730677Z node 5 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: da446a37-2446b4a6-702984de-bd5d4e5b, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=Y2Y5NGUwZWItMWJiYjRhMmEtZDNiODFmMTItNTBkYWU5YTQ=, TxId: 2025-04-10T10:57:36.415986Z node 5 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: da446a37-2446b4a6-702984de-bd5d4e5b, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=NjIzOWIzZmYtNTM4NTEwYmYtOThhZGQwMGEtYWYyMzJhYWQ=, TxId: 2025-04-10T10:57:37.102271Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:57:37.102326Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:37.136309Z node 5 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: da446a37-2446b4a6-702984de-bd5d4e5b, reply NOT_FOUND, issues: {
: Error: No such execution } 2025-04-10T10:57:37.290475Z node 5 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: da446a37-2446b4a6-702984de-bd5d4e5b, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=Y2Q4MzIxNTYtYWFhYjYwYjEtMWIwNGJhMzEtNGRkNTM4YTI=, TxId: 2025-04-10T10:57:37.290644Z node 5 :KQP_PROXY WARN: [ScriptExecutions] [TCancelScriptExecutionOperationActor] ExecutionId: da446a37-2446b4a6-702984de-bd5d4e5b, check lease failed 2025-04-10T10:57:38.088246Z node 5 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: da446a37-2446b4a6-702984de-bd5d4e5b, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Script execution not found }, SessionId: ydb://session/3?node_id=5&id=NjcwNmRhOGEtOWIwOTk2MTMtNWYyZDBiYWYtZWQwNjhlZTc=, TxId: >> KqpScanArrowInChanels::AggregateNoColumn >> KqpScanArrowFormat::AllTypesColumns >> KqpScanArrowInChanels::AllTypesColumns >> TestYmqHttpProxy::BillingRecordsForJsonApi >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex >> TestYmqHttpProxy::TestCreateQueueWithTags >> TestKinesisHttpProxy::TestListStreamConsumers >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] >> BasicUsage::WriteSessionCloseWaitsForWrites [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::RangeCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 19092, MsgBus: 15549 2025-04-10T10:57:00.602616Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636808555266607:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:00.602774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00157d/r3tmp/tmpnwtjGm/pdisk_1.dat 2025-04-10T10:57:01.224004Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:01.234324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:01.234417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:01.246245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19092, node 1 2025-04-10T10:57:01.493231Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:01.493256Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:01.493263Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:01.493372Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15549 TClient is connected to server localhost:15549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:02.371811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.395179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.673619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.952534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:03.061907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:05.163042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636830030104871:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.163153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.463214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.548009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.593803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.597330Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636808555266607:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:05.597377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:05.663468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.737936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.822114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.871869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636830030105399:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.871959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.872125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636830030105404:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.883158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:05.899696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636830030105406:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:05.962781Z node 1 :TX_PROXY ERROR: Actor# [1:7491636830030105459:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:07.042533Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTUxMjhiZC00ZjRiZGNlZi02MTIzZWRlZC01ZTQyNjA1Yw==, ActorId: [1:7491636838620040347:2500], ActorState: ExecuteState, TraceId: 01jrfnhaz1abfghfpqy8njx3dk, Reply query error, msg: Pending previous query completion proxyRequestId: 7 2025-04-10T10:57:07.042620Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTUxMjhiZC00ZjRiZGNlZi02MTIzZWRlZC01ZTQyNjA1Yw==, ActorId: [1:7491636838620040347:2500], ActorState: ExecuteState, TraceId: 01jrfnhaz1abfghfpqy8njx3dk, Reply query error, msg: Pending previous query completion proxyRequestId: 8 2025-04-10T10:57:07.058555Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTUxMjhiZC00ZjRiZGNlZi02MTIzZWRlZC01ZTQyNjA1Yw==, ActorId: [1:7491636838620040347:2500], ActorState: ExecuteState, TraceId: 01jrfnhaz1abfghfpqy8njx3dk, Reply query error, msg: Pending previous query completion proxyRequestId: 9 2025-04-10T10:57:07.143253Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTUxMjhiZC00ZjRiZGNlZi02MTIzZWRlZC01ZTQyNjA1Yw==, ActorId: [1:7491636838620040347:2500], ActorState: ExecuteState, TraceId: 01jrfnhaz1abfghfpqy8njx3dk, Reply query error, msg: Pending previous query completion proxyRequestId: 10 2025-04-10T10:57:07.143326Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTUxMjhiZC00ZjRiZGNlZi02MTIzZWRlZC01ZTQyNjA1Yw==, ActorId: [1:7491636838620040347:2500], ActorState: ExecuteState, TraceId: 01jrfnhaz1abfghfpqy8njx3dk, Reply query error, msg: Pending previous query completion proxyRequestId: 11 2025-04-10T10:57:07.189571Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTUxMjhiZC00ZjRiZGNlZi02MTIzZWRlZC01ZTQyNjA1Yw==, ActorId: [1:7491636838620040347:2500], ActorState: ExecuteState, TraceId: 01jrfnhaz1abfghfpqy8njx3dk, Reply query error, msg: Pending previous query completion proxyRequestId: 12 2025-04-10T10:57:07.189634Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTUxMjhiZC00ZjRiZGNlZi02MTIzZWRlZC01ZTQyNjA1Yw==, ActorId: [1:7491636838620040347:2500], ActorState: ExecuteState, TraceId: 01jrfnhaz1abfghfpqy8njx3dk, Reply query error, msg: Pending previous query completion proxyRequestId: 13 2025-04-10T10:57:07.189672Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTUxMjhiZC00ZjRiZGNlZi02MTIzZWRlZC01ZTQyNjA1Yw==, ActorId: [1:7491636838620040347:2500], ActorState: ExecuteState, TraceId: 01jrfnhaz1abfghfpqy8njx3dk, Reply query error, msg: Pending previous query completion proxyRequestId: 14 2025-04-10T10:57:07.189717Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTUxMjhiZC00ZjRiZGNlZi02MTIzZWRlZC01ZTQyNjA1Yw==, ActorId: [1:7491636838620040347:2500], ActorState: ExecuteState, TraceId: 01jrfnhaz1abfghfpqy8njx3dk, Reply query error, msg: Pending previous query completion proxyRequestId: 15 Trying to start YDB, gRPC: 18228, MsgBus: 12824 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00157d/r3tmp/tmp96ZQFP/pdisk_1.dat 2025-04-10T10:57:08.509930Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:08.630001Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:08.672064Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:08.672157Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:08.677531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18228, node 2 2025-04-10T10:57:08.867604Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:08.867636Z node 2 :NET_CLASSIFIER WARN: will try to initialize ... essionId: ydb://session/3?node_id=2&id=ODM0M2MzZjYtODQyYmYwYTAtYjYxMzRlZDMtYmI3YmU2YmE=, ActorId: [2:7491636873790378172:2561], ActorState: ExecuteState, TraceId: 01jrfnhkm9607ef26eyn8b1h9b, Reply query error, msg: Pending previous query completion proxyRequestId: 49 2025-04-10T10:57:16.119512Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjY0ZGY1NjQtZTkxMzM2NS0zY2UxMmVmNy1iMjgyMjQ2Zg==, ActorId: [2:7491636878085345502:2573], ActorState: ExecuteState, TraceId: 01jrfnhktp3nryj9y47j09hhfh, Reply query error, msg: Pending previous query completion proxyRequestId: 52 2025-04-10T10:57:16.119611Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjY0ZGY1NjQtZTkxMzM2NS0zY2UxMmVmNy1iMjgyMjQ2Zg==, ActorId: [2:7491636878085345502:2573], ActorState: ExecuteState, TraceId: 01jrfnhktp3nryj9y47j09hhfh, Reply query error, msg: Pending previous query completion proxyRequestId: 53 2025-04-10T10:57:16.119664Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjY0ZGY1NjQtZTkxMzM2NS0zY2UxMmVmNy1iMjgyMjQ2Zg==, ActorId: [2:7491636878085345502:2573], ActorState: ExecuteState, TraceId: 01jrfnhktp3nryj9y47j09hhfh, Reply query error, msg: Pending previous query completion proxyRequestId: 54 2025-04-10T10:57:16.120701Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjY0ZGY1NjQtZTkxMzM2NS0zY2UxMmVmNy1iMjgyMjQ2Zg==, ActorId: [2:7491636878085345502:2573], ActorState: ExecuteState, TraceId: 01jrfnhktp3nryj9y47j09hhfh, Reply query error, msg: Pending previous query completion proxyRequestId: 55 2025-04-10T10:57:16.253806Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTg1Y2ZjZGYtNTg5MjFiNmEtNDdlN2Q0ZWUtMjhmMTdiOGE=, ActorId: [2:7491636878085345532:2585], ActorState: ExecuteState, TraceId: 01jrfnhkywbrymmkh9j3296sw8, Reply query error, msg: Pending previous query completion proxyRequestId: 58 2025-04-10T10:57:16.253864Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTg1Y2ZjZGYtNTg5MjFiNmEtNDdlN2Q0ZWUtMjhmMTdiOGE=, ActorId: [2:7491636878085345532:2585], ActorState: ExecuteState, TraceId: 01jrfnhkywbrymmkh9j3296sw8, Reply query error, msg: Pending previous query completion proxyRequestId: 59 2025-04-10T10:57:16.254557Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTg1Y2ZjZGYtNTg5MjFiNmEtNDdlN2Q0ZWUtMjhmMTdiOGE=, ActorId: [2:7491636878085345532:2585], ActorState: ExecuteState, TraceId: 01jrfnhkywbrymmkh9j3296sw8, Reply query error, msg: Pending previous query completion proxyRequestId: 60 2025-04-10T10:57:16.435614Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWFjZDQxNzMtMTg4MTU2NjItZmRjMjQ3Y2ItNTNmNWQxMGI=, ActorId: [2:7491636878085345558:2595], ActorState: ExecuteState, TraceId: 01jrfnhm4jaejew9qarqb720eg, Reply query error, msg: Pending previous query completion proxyRequestId: 63 2025-04-10T10:57:16.436155Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWFjZDQxNzMtMTg4MTU2NjItZmRjMjQ3Y2ItNTNmNWQxMGI=, ActorId: [2:7491636878085345558:2595], ActorState: ExecuteState, TraceId: 01jrfnhm4jaejew9qarqb720eg, Reply query error, msg: Pending previous query completion proxyRequestId: 64 2025-04-10T10:57:16.606772Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWY4YzAwMmMtMTllNDZhM2QtZjRlZGZkMDUtMzE0ZGEyMWI=, ActorId: [2:7491636878085345586:2605], ActorState: ExecuteState, TraceId: 01jrfnhm9v7azgk8q991k8s0zm, Reply query error, msg: Pending previous query completion proxyRequestId: 67 Trying to start YDB, gRPC: 4036, MsgBus: 18632 2025-04-10T10:57:17.928154Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636881878952871:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:17.928203Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00157d/r3tmp/tmp0uNAla/pdisk_1.dat 2025-04-10T10:57:18.384518Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:18.437089Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:18.437173Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:18.439060Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4036, node 3 2025-04-10T10:57:18.682769Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:18.682797Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:18.682806Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:18.682942Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18632 TClient is connected to server localhost:18632 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:19.690481Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:19.707007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:19.819195Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:20.142290Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:20.246048Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:22.930350Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636881878952871:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:22.930425Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:23.309971Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636907648758284:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.310069Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.361488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:23.417414Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:23.466326Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:23.562205Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:23.617039Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:23.698501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:23.795625Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636907648758801:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.795718Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.796109Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636907648758806:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.800643Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:23.827116Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-10T10:57:23.827400Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636907648758808:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:57:23.917471Z node 3 :TX_PROXY ERROR: Actor# [3:7491636907648758863:3458] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:33.364779Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:57:33.364819Z node 3 :IMPORT WARN: Table profiles were not loaded took: 15.071409s took: 15.055648s took: 15.046351s took: 15.098121s took: 15.123564s took: 15.080501s took: 15.136825s took: 15.103807s took: 15.114950s took: 15.191578s >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> TestKinesisHttpProxy::CreateDeleteStream [GOOD] >> TestYmqHttpProxy::TestSendMessageFifoQueue [GOOD] >> KqpScanArrowFormat::AggregateCountStar >> TestYmqHttpProxy::TestSendMessageWithAttributes >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer >> TProxyActorTest::TestDisconnectWhileAttaching >> BackupRestoreS3::RestoreTableSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreIndexTableSplitBoundaries >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink >> TestKinesisHttpProxy::GoodRequestGetRecords [GOOD] >> TestYmqHttpProxy::TestReceiveMessage [GOOD] >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] >> TestKinesisHttpProxy::ListShards [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor >> TestYmqHttpProxy::TestReceiveMessageWithAttributes >> Viewer::SimpleFeatureFlags [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] Test command err: ... waiting for blocked registrations ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR ... waiting for blocked registrations (done) 2025-04-10T10:57:47.969393Z node 1 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 2 ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] Test command err: 2025-04-10T10:57:33.068547Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636952103890265:2292];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpwnyy7V/pdisk_1.dat 2025-04-10T10:57:33.434752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:57:34.045549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:34.045662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:34.056825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:34.116902Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11543, node 1 2025-04-10T10:57:34.317863Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:57:34.333295Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:57:34.501827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:34.592999Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:34.593024Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:34.593038Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:34.593172Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:35.169845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Backup "/Root" to "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpVvlfMf/"Create temporary directory "/Root/~backup_20250410T105735" in databaseProcess "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpVvlfMf/dir"Create directory "/Root/~backup_20250410T105735/dir" in database2025-04-10T10:57:35.436160Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 Write ACL into "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpVvlfMf/dir/permissions.pb"Remove directory "/Root/~backup_20250410T105735/dir"2025-04-10T10:57:35.585669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710661:0, at schemeshard: 72057594046644480 Remove temporary directory "/Root/~backup_20250410T105735" in database2025-04-10T10:57:35.643934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710662:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-10T10:57:35.689709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710663:0, at schemeshard: 72057594046644480 Restore "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpVvlfMf/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpVvlfMf/" to "/Root"Process "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpVvlfMf/dir"Restore empty directory "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpVvlfMf/dir" to "/Root/dir"Restore ACL "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpVvlfMf/dir" to "/Root/dir"Read ACL from "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpVvlfMf/dir/permissions.pb"2025-04-10T10:57:35.855545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-10T10:57:39.479824Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636977503588721:2143];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:39.480743Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpBVYCFt/pdisk_1.dat 2025-04-10T10:57:39.870942Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:39.935694Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:39.935805Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:39.952076Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24851, node 4 2025-04-10T10:57:40.070511Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:40.070532Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:40.070540Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:40.070701Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:40.470242Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:43.226744Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636994683458904:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:43.226867Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:43.480192Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:57:43.749832Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636994683459078:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:43.749913Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:43.966588Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7491636994683459263:2360] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-10T10:57:44.076479Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636998978426672:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:44.076587Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:44.177177Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7491636998978426854:2381] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:6:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-10T10:57:44.218466Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636998978426951:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:44.218710Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:44.325959Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7491636998978427132:2403] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:8:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } GetChangefeedAndTopicDescriptions: 2025-04-10T10:57:44.470493Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491636977503588721:2143];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:44.470563Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Backup "/Root" to "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/"Create temporary directory "/Root/~backup_20250410T105744" in databaseProcess "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250410T105744/table" }Backup table "/Root/~backup_20250410T105744/table" to "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table"Describe table "/Root/~backup_20250410T105744/table"Write scheme into "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/scheme.pb"Describe table "/Root/table"Process "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/a"Write changefeed into "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/a/changefeed_description.pb"Write topic into "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/a/topic_description.pb"Process "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/b"Write changefeed into "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/b/changefeed_description.pb"Write topic into "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/b/topic_description.pb"Process "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/c"Write changefeed into "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/c/changefeed_description.pb"Write topic into "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/c/topic_description.pb"Write ACL into "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/permissions.pb"Read table "/Root/~backup_20250410T105744/table"Write data into "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/data_00.csv"Drop table "/Root/~backup_20250410T105744/table"2025-04-10T10:57:45.094488Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037895 not found Remove temporary directory "/Root/~backup_20250410T105744" in database2025-04-10T10:57:45.127664Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-10T10:57:45.171760Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637003273394978:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:45.171844Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:45.186193Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710668:2, at schemeshard: 72057594046644480 2025-04-10T10:57:45.241509Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-04-10T10:57:45.241544Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037893 not found 2025-04-10T10:57:45.241555Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2025-04-10T10:57:45.241567Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037892 not found 2025-04-10T10:57:45.244979Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037894 not found 2025-04-10T10:57:45.245020Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2025-04-10T10:57:45.255047Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-04-10T10:57:45.255105Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-04-10T10:57:45.255138Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2025-04-10T10:57:45.342648Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found Restore "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/" to "/Root"Process "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table"Read scheme from "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table" to "/Root/table"2025-04-10T10:57:45.409941Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/data_00.csv"Process "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/c"Read changefeed from "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/c/changefeed_description.pb"Read topic from "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/c/topic_description.pb"2025-04-10T10:57:45.653061Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][4:7491637003273395539:2485] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:13:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } Created "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/c"Process "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/a"Read changefeed from "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/a/changefeed_description.pb"Read topic from "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/a/topic_description.pb"2025-04-10T10:57:45.773905Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][4:7491637003273395797:2500] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:15:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } Created "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/a"Process "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/b"Read changefeed from "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/b/changefeed_description.pb"Read topic from "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/b/topic_description.pb"2025-04-10T10:57:45.890703Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][4:7491637003273396059:2517] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:17:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } Created "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/b"Restore ACL "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2gt4/001d42/r3tmp/tmpf2gAw0/table/permissions.pb"2025-04-10T10:57:45.939247Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480 Restore completed successfully >> TestKinesisHttpProxy::ListShardsEmptyFields >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams >> TestYmqHttpProxy::TestCreateQueueWithTags [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence >> BasicUsage::PropagateSessionClosed [GOOD] >> BasicUsage::ReadMirrored >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] >> TestYmqHttpProxy::TestDeleteMessage |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> TestYmqHttpProxy::TestGetQueueAttributes [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumers [GOOD] >> TExternalTableTest::CreateExternalTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: 2025-04-10T10:57:46.056250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:57:46.056500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:46.056698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ae4/r3tmp/tmpLOGgoX/pdisk_1.dat 2025-04-10T10:57:46.546773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:57:46.638133Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:46.700124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:46.700272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:46.716041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:46.830226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:57:49.322736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:49.322921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:944:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:49.323019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:49.329211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T10:57:49.359570Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T10:57:49.637032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T10:57:49.874725Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:50.983665Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfnjm88ekh94fnaf05pd35m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmVmMDg1ZC02OGIyZTc5OC05ZGU4ZGQ4MC05ZDI2NWFmOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:51.246672Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfnjnyg358yg434xwxe6ah7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjgxYzNkNjktZDc0NWI3NzQtOWRmMTM1YzYtMzM5Nzc1OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:51.600900Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfnjp611zr64p821h6h8z7f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ0ZTIyN2YtYjhiNTFlNjktNjg5ZmM0MS04ZTdlOWM1NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |68.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |68.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |68.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings >> TestYmqHttpProxy::TestSendMessageWithAttributes [GOOD] >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink >> TestYmqHttpProxy::TestDeleteQueue >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] >> TestYmqHttpProxy::TestSetQueueAttributes >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer [GOOD] >> TestKinesisHttpProxy::TestWrongStream >> TExternalTableTest::ParallelCreateSameExternalTable >> BasicUsage::CloseWriteSessionImmediately [GOOD] >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag >> TExternalTableTest::DropTableTwice >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] >> SystemView::ShowCreateTableTtlSettings [FAIL] >> SystemView::ShowCreateTableTemporary >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] Test command err: 2025-04-10T10:57:16.231107Z :BasicWriteSession INFO: Random seed for debugging is 1744282636231066 2025-04-10T10:57:17.034335Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636884120445431:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:17.034442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:57:17.134068Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636880900157257:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:17.134116Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014d5/r3tmp/tmppp0ruM/pdisk_1.dat 2025-04-10T10:57:17.435130Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:57:17.454545Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:57:17.953090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:17.953274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:17.956736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:17.956801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:17.965627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:17.978858Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:57:17.982716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64063, node 1 2025-04-10T10:57:18.158893Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:18.172059Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:18.180307Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:57:18.180335Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:57:18.198825Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:18.246699Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/0014d5/r3tmp/yandexhdA0Ou.tmp 2025-04-10T10:57:18.246742Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/0014d5/r3tmp/yandexhdA0Ou.tmp 2025-04-10T10:57:18.246958Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/0014d5/r3tmp/yandexhdA0Ou.tmp 2025-04-10T10:57:18.247113Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:57:18.371120Z INFO: TTestServer started on Port 19329 GrpcPort 64063 TClient is connected to server localhost:19329 PQClient connected to localhost:64063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:18.746469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T10:57:22.036268Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636884120445431:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:22.036369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:22.133868Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636880900157257:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:22.133935Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:22.313189Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636902374994059:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:22.313469Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636902374994040:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:22.313597Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:22.330642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T10:57:22.413764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636902374994064:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T10:57:22.499667Z node 2 :TX_PROXY ERROR: Actor# [2:7491636902374994092:2132] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:22.833710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T10:57:22.847914Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491636902374994107:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:22.850316Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTMyZWViMGUtYmVhNWY5OGEtYTJiYzMwNzItYzc2ZDk0OWI=, ActorId: [2:7491636902374994023:2308], ActorState: ExecuteState, TraceId: 01jrfnhsw035ms3c1192y5e6q1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:22.848722Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636905595282805:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:22.850397Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDNkOGRlMjQtYzMzMmVjMGMtNzVjNjM4MzAtYjczMmY3Zjc=, ActorId: [1:7491636905595282740:2338], ActorState: ExecuteState, TraceId: 01jrfnhsyc6ednprkkqvxcyk2z, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:22.852603Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:22.860796Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:23.085904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:23.261825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:64063", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-10T10:57:23.709693Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jrfnhv218en127cqy5rnrbbf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmY1ZTcyNzgtY2VjMWFiNDktODE3YjU3Ny1iYmUyZTRhNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491636909890250531:2996] === CheckClustersList. Ok 2025-04-10T10:57:30.551394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting ... 059Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637015565353613:2503] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-10T10:57:49.281626Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637019860320959:2503] disconnected; active server actors: 1 2025-04-10T10:57:49.281678Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637019860320959:2503] disconnected no session 2025-04-10T10:57:49.535353Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637015565353613:2503] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-10T10:57:49.535401Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637015565353613:2503] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-10T10:57:49.535420Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637015565353613:2503] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-10T10:57:49.535459Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-10T10:57:49.542672Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7491637019860321016:2503], now have 1 active actors on pipe 2025-04-10T10:57:49.546464Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2025-04-10T10:57:49.550265Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T10:57:49.550318Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T10:57:49.550435Z node 4 :PERSQUEUE INFO: new Cookie src|909c60ed-84e101e0-4aa6b759-a0ca9cd4_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-10T10:57:49.550583Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-10T10:57:49.550652Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T10:57:49.551726Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T10:57:49.551756Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T10:57:49.551842Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T10:57:49.552388Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|909c60ed-84e101e0-4aa6b759-a0ca9cd4_0 2025-04-10T10:57:49.553979Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744282669553 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:57:49.554203Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|909c60ed-84e101e0-4aa6b759-a0ca9cd4_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-10T10:57:49.554501Z :INFO: [] MessageGroupId [src] SessionId [src|909c60ed-84e101e0-4aa6b759-a0ca9cd4_0] Write session: close. Timeout = 0 ms 2025-04-10T10:57:49.554556Z :INFO: [] MessageGroupId [src] SessionId [src|909c60ed-84e101e0-4aa6b759-a0ca9cd4_0] Write session will now close 2025-04-10T10:57:49.554611Z :DEBUG: [] MessageGroupId [src] SessionId [src|909c60ed-84e101e0-4aa6b759-a0ca9cd4_0] Write session: aborting 2025-04-10T10:57:49.555402Z :INFO: [] MessageGroupId [src] SessionId [src|909c60ed-84e101e0-4aa6b759-a0ca9cd4_0] Write session: gracefully shut down, all writes complete 2025-04-10T10:57:49.555465Z :DEBUG: [] MessageGroupId [src] SessionId [src|909c60ed-84e101e0-4aa6b759-a0ca9cd4_0] Write session: destroy 2025-04-10T10:57:49.560857Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|909c60ed-84e101e0-4aa6b759-a0ca9cd4_0 grpc read done: success: 0 data: 2025-04-10T10:57:49.560893Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|909c60ed-84e101e0-4aa6b759-a0ca9cd4_0 grpc read failed 2025-04-10T10:57:49.560934Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|909c60ed-84e101e0-4aa6b759-a0ca9cd4_0 grpc closed 2025-04-10T10:57:49.560960Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|909c60ed-84e101e0-4aa6b759-a0ca9cd4_0 is DEAD 2025-04-10T10:57:49.562077Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:57:49.563803Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7491637019860321016:2503] destroyed 2025-04-10T10:57:49.563879Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created 2025-04-10T10:57:50.367240Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [3:7491637024155288356:2524]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-10T10:57:50.403158Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [3:7491637024155288356:2524]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:57:50.459036Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [3:7491637024155288356:2524]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:57:50.526442Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [3:7491637024155288356:2524]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:57:50.626981Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [3:7491637024155288356:2524]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:57:50.779684Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [3:7491637024155288356:2524]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:57:51.030377Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [3:7491637024155288356:2524]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:57:51.182254Z node 3 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710686. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:57:51.182411Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7491637028450255711:2522] TxId: 281474976710686. Ctx: { TraceId: 01jrfnjn814zwd15d9e0fdtpnq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmJkZDY0MTItMTA5YjZiYzktOWIzODIwYjYtMTdjODQyMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:57:51.182660Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MmJkZDY0MTItMTA5YjZiYzktOWIzODIwYjYtMTdjODQyMDE=, ActorId: [3:7491637024155288345:2522], ActorState: ExecuteState, TraceId: 01jrfnjn814zwd15d9e0fdtpnq, Create QueryResponse for error on request, msg: 2025-04-10T10:57:51.184268Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfnjnwy057re3ppsbdd85a5" } } YdbStatus: UNAVAILABLE ConsumedRu: 433 } 2025-04-10T10:57:51.375077Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:57:51.375112Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:51.680734Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [3:7491637024155288356:2524]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:57:52.471190Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [3:7491637024155288356:2524]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:57:53.178284Z node 3 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710688. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:57:53.178449Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7491637032745223107:2532] TxId: 281474976710688. Ctx: { TraceId: 01jrfnjq213m34q51pt4bfsha0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2JiY2M2ZTUtZDc4YTQ2MjAtZjNiZGFkNzQtNzliYTBkYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:57:53.178698Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2JiY2M2ZTUtZDc4YTQ2MjAtZjNiZGFkNzQtNzliYTBkYzE=, ActorId: [3:7491637032745223073:2532], ActorState: ExecuteState, TraceId: 01jrfnjq213m34q51pt4bfsha0, Create QueryResponse for error on request, msg: 2025-04-10T10:57:53.180283Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfnjqtyebgwxdsg2syp6yas" } } YdbStatus: UNAVAILABLE ConsumedRu: 525 } 2025-04-10T10:57:53.435052Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [3:7491637024155288356:2524]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:57:54.459307Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [3:7491637024155288356:2524]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:57:54.460388Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491637024155288356:2524], TxId: 281474976710685, task: 1. Ctx: { TraceId : 01jrfnjn8d5j8pp0a7cmwrav0k. SessionId : ydb://session/3?node_id=3&id=NDJjMzU4Y2MtZmRkMmIxMjMtNjc0YTJhNGYtMzQwZTdlYWI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Too many retries for shard 72075186224037890 } 2025-04-10T10:57:54.460479Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491637024155288356:2524], TxId: 281474976710685, task: 1. Ctx: { TraceId : 01jrfnjn8d5j8pp0a7cmwrav0k. SessionId : ydb://session/3?node_id=3&id=NDJjMzU4Y2MtZmRkMmIxMjMtNjc0YTJhNGYtMzQwZTdlYWI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: UNAVAILABLE DEFAULT_ERROR: {
: Error: Too many retries for shard 72075186224037890 }. 2025-04-10T10:57:54.461237Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDJjMzU4Y2MtZmRkMmIxMjMtNjc0YTJhNGYtMzQwZTdlYWI=, ActorId: [3:7491637024155288349:2524], ActorState: ExecuteState, TraceId: 01jrfnjn8d5j8pp0a7cmwrav0k, Create QueryResponse for error on request, msg: 2025-04-10T10:57:54.463348Z node 3 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Too many retries for shard 72075186224037890" severity: 1 } TxMeta { id: "01jrfnjn8ka93svdsh3p7fbexz" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> TestYmqHttpProxy::TestReceiveMessageWithAttributes [GOOD] |68.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |68.9%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |68.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut >> KqpLimits::CancelAfterRoTx [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup >> TestYmqHttpProxy::BillingRecordsForJsonApi [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T10:57:55.271181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:57:55.271275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:57:55.271315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:57:55.271351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:57:55.271400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:57:55.271448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:57:55.271508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:57:55.271597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:57:55.271977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:57:55.535720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T10:57:55.535802Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:55.591356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:57:55.591809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:57:55.592074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:57:55.607750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:57:55.608029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:57:55.608819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:57:55.614626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:57:55.617744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:57:55.619409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:57:55.619482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:57:55.619706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:57:55.619780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:57:55.619834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:57:55.619985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:57:55.647788Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T10:57:56.818357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:57:56.818707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:57:56.819021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:57:56.819358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:57:56.819457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:57:56.830560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:57:56.830794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:57:56.831101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:57:56.831230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:57:56.831308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:57:56.831361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:57:56.848419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:57:56.848506Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:57:56.848549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:57:56.864410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:57:56.864506Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:57:56.864567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:57:56.864628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:57:56.869953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:57:56.891321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:57:56.891641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:57:56.902895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:57:56.903111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:57:56.903178Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:57:56.903607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:57:56.903678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:57:56.903899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:57:56.904091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:57:56.910961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:57:56.911040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:57:56.911341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:57:56.911407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:57:56.911895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:57:56.911984Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:57:56.912117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:57:56.912157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:57:56.912204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:57:56.912287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:57:56.912339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:57:56.912395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:57:56.912439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:57:56.912492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:57:56.912605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:57:56.922289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:57:56.922401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:57:56.924968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-10T10:57:59.098033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-10T10:57:59.098055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-10T10:57:59.099353Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:57:59.099442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:57:59.099491Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 102 2025-04-10T10:57:59.099545Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T10:57:59.099590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T10:57:59.100889Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:57:59.100958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:57:59.100981Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T10:57:59.101021Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-10T10:57:59.101051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T10:57:59.101633Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:57:59.101704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:57:59.101731Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T10:57:59.101769Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-10T10:57:59.101809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T10:57:59.101866Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-10T10:57:59.103863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T10:57:59.105578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T10:57:59.105674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T10:57:59.105936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T10:57:59.106007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T10:57:59.106499Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T10:57:59.106600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:57:59.106637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:333:2324] TestWaitNotification: OK eventTxId 102 2025-04-10T10:57:59.107102Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:57:59.107315Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 240us result status StatusSuccess 2025-04-10T10:57:59.107614Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-10T10:57:59.110962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:57:59.111319Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-04-10T10:57:59.111427Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2025-04-10T10:57:59.111583Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-10T10:57:59.114710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 102, at schemeshard: 72057594046678944 2025-04-10T10:57:59.114897Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T10:57:59.115229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T10:57:59.115286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T10:57:59.116020Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T10:57:59.116144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T10:57:59.116190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:341:2332] TestWaitNotification: OK eventTxId 103 2025-04-10T10:57:59.116807Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:57:59.117026Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 250us result status StatusSuccess 2025-04-10T10:57:59.117306Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalTableTest::DropTableTwice [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T10:57:58.563427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:57:58.563533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:57:58.563576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:57:58.563630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:57:58.563713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:57:58.563761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:57:58.563843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:57:58.563952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:57:58.564349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:57:58.683913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T10:57:58.684011Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:58.733376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:57:58.733780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:57:58.734011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:57:58.753938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:57:58.754403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:57:58.756006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:57:58.756527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:57:58.767262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:57:58.769164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:57:58.769256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:57:58.769529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:57:58.769618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:57:58.769683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:57:58.769847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:57:58.795249Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T10:57:59.147428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:57:59.147700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:57:59.147970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:57:59.148219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:57:59.148288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:57:59.151619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:57:59.151804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:57:59.152021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:57:59.152099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:57:59.152152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:57:59.152198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:57:59.159290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:57:59.159393Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:57:59.159438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:57:59.168031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:57:59.168113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:57:59.168164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:57:59.168220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:57:59.172509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:57:59.185679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:57:59.185963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:57:59.188479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:57:59.188657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:57:59.188721Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:57:59.189101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:57:59.189174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:57:59.189377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:57:59.189480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:57:59.194945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:57:59.195007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:57:59.195247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:57:59.196282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:57:59.196748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:57:59.196810Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:57:59.196918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:57:59.196953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:57:59.196998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:57:59.197031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:57:59.197097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:57:59.197152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:57:59.197190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:57:59.197228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:57:59.197415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:57:59.197483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:57:59.197523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:57:59.201170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... t reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T10:57:59.322203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 125, subscribers: 0 2025-04-10T10:57:59.324550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2025-04-10T10:57:59.326494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2025-04-10T10:57:59.327541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 TestModificationResult got TxId: 127, wait until txId: 127 2025-04-10T10:57:59.328206Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:57:59.328621Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 295us result status StatusSuccess 2025-04-10T10:57:59.328981Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:57:59.329542Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:57:59.329676Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 138us result status StatusSuccess 2025-04-10T10:57:59.329941Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2025-04-10T10:57:59.330455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2025-04-10T10:57:59.330506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2025-04-10T10:57:59.330597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2025-04-10T10:57:59.330620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2025-04-10T10:57:59.330663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2025-04-10T10:57:59.330682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2025-04-10T10:57:59.331229Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-04-10T10:57:59.331371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-04-10T10:57:59.331431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:342:2333] 2025-04-10T10:57:59.331656Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-04-10T10:57:59.331796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-04-10T10:57:59.331827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:342:2333] 2025-04-10T10:57:59.331929Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-04-10T10:57:59.331975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-04-10T10:57:59.331994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:342:2333] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-04-10T10:57:59.332522Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:57:59.332756Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 239us result status StatusSuccess 2025-04-10T10:57:59.333057Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2025-04-10T10:57:59.336180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:57:59.336524Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-04-10T10:57:59.336602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2025-04-10T10:57:59.336722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-10T10:57:59.339647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 125, at schemeshard: 72057594046678944 2025-04-10T10:57:59.339805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::SimpleFeatureFlags [GOOD] Test command err: BASE_PERF = 4.47230389 2025-04-10T10:55:57.291623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:57.291996Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:57.292077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 10633, node 1 TClient is connected to server localhost:22240 2025-04-10T10:56:11.237859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:319:2362], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:11.239020Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:11.239148Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 23341, node 2 TClient is connected to server localhost:23674 2025-04-10T10:56:26.664720Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:337:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:26.665203Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:26.665479Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 17084, node 3 TClient is connected to server localhost:21628 2025-04-10T10:56:41.482345Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:337:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:41.482907Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:41.484138Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 20774, node 4 TClient is connected to server localhost:17768 2025-04-10T10:56:54.774360Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:54.774894Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:54.775117Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 21576, node 5 TClient is connected to server localhost:1537 2025-04-10T10:57:10.339628Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:337:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:57:10.340253Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:10.340573Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 21254, node 6 TClient is connected to server localhost:26026 2025-04-10T10:57:30.584040Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:57:30.584466Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:30.584675Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 15731, node 7 TClient is connected to server localhost:5100 2025-04-10T10:57:39.419198Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491636976283715118:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:39.419321Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:57:39.946729Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:39.959549Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:39.959719Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:39.967849Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30612, node 8 2025-04-10T10:57:40.074532Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:40.074570Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:40.074589Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:40.074832Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1903 2025-04-10T10:57:44.422627Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7491636976283715118:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:44.422740Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibility >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] Test command err: Trying to start YDB, gRPC: 29348, MsgBus: 62675 2025-04-10T10:56:44.171886Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636743263211484:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:44.172227Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001590/r3tmp/tmprylh5d/pdisk_1.dat 2025-04-10T10:56:44.967929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:44.968012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:44.976729Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:44.978112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29348, node 1 2025-04-10T10:56:45.177304Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:45.177335Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:45.177341Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:45.177483Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62675 TClient is connected to server localhost:62675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:46.068966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:46.133030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:46.384001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:46.593742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:46.694124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:48.962890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636760443082324:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:48.963003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:49.173715Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636743263211484:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:49.173773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:49.325539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:49.385906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:49.452021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:49.539236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:49.590033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:49.663139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:49.740285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636764738050136:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:49.740432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:49.740681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636764738050141:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:49.744672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:49.763254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636764738050143:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:49.834920Z node 1 :TX_PROXY ERROR: Actor# [1:7491636764738050198:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 5034, MsgBus: 15279 2025-04-10T10:56:52.004162Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636771633662897:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:52.031821Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001590/r3tmp/tmpBC02X0/pdisk_1.dat 2025-04-10T10:56:52.129301Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5034, node 2 2025-04-10T10:56:52.171469Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:52.171545Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:52.189399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:52.301788Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:52.301819Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:52.301826Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:52.301987Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15279 TClient is connected to server localhost:15279 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:52.743607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:52.772843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:56:52.926795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T10:56:53.141727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:53.258934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:56.130288Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] ... idx:1798|>,<|idx:1799|>,<|idx:1800|>,<|idx:1801|>,<|idx:1802|>,<|idx:1803|>,<|idx:1804|>,<|idx:1805|>,<|idx:1806|>,<|idx:1807|>,<|idx:1808|>,<|idx:1809|>,<|idx:1810|>,<|idx:1811|>,<|idx:1812|>,<|idx:1813|>,<|idx:1814|>,<|idx:1815|>,<|idx:1816|>,<|idx:1817|>,<|idx:1818|>,<|idx:1819|>,<|idx:1820|>,<|idx:1821|>,<|idx:1822|>,<|idx:1823|>,<|idx:1824|>,<|idx:1825|>,<|idx:1826|>,<|idx:1827|>,<|idx:1828|>,<|idx:1829|>,<|idx:1830|>,<|idx:1831|>,<|idx:1832|>,<|idx:1833|>,<|idx:1834|>,<|idx:1835|>,<|idx:1836|>,<|idx:1837|>,<|idx:1838|>,<|idx:1839|>,<|idx:1840|>,<|idx:1841|>,<|idx:1842|>,<|idx:1843|>,<|idx:1844|>,<|idx:1845|>,<|idx:1846|>,<|idx:1847|>,<|idx:1848|>,<|idx:1849|>,<|idx:1850|>,<|idx:1851|>,<|idx:1852|>,<|idx:1853|>,<|idx:1854|>,<|idx:1855|>,<|idx:1856|>,<|idx:1857|>,<|idx:1858|>,<|idx:1859|>,<|idx:1860|>,<|idx:1861|>,<|idx:1862|>,<|idx:1863|>,<|idx:1864|>,<|idx:1865|>,<|idx:1866|>,<|idx:1867|>,<|idx:1868|>,<|idx:1869|>,<|idx:1870|>,<|idx:1871|>,<|idx:1872|>,<|idx:1873|>,<|idx:1874|>,<|idx:1875|>,<|idx:1876|>,<|idx:1877|>,<|idx:1878|>,<|idx:1879|>,<|idx:1880|>,<|idx:1881|>,<|idx:1882|>,<|idx:1883|>,<|idx:1884|>,<|idx:1885|>,<|idx:1886|>,<|idx:1887|>,<|idx:1888|>,<|idx:1889|>,<|idx:1890|>,<|idx:1891|>,<|idx:1892|>,<|idx:1893|>,<|idx:1894|>,<|idx:1895|>,<|idx:1896|>,<|idx:1897|>,<|idx:1898|>,<|idx:1899|>,<|idx:1900|>,<|idx:1901|>,<|idx:1902|>,<|idx:1903|>,<|idx:1904|>,<|idx:1905|>,<|idx:1906|>,<|idx:1907|>,<|idx:1908|>,<|idx:1909|>,<|idx:1910|>,<|idx:1911|>,<|idx:1912|>,<|idx:1913|>,<|idx:1914|>,<|idx:1915|>,<|idx:1916|>,<|idx:1917|>,<|idx:1918|>,<|idx:1919|>,<|idx:1920|>,<|idx:1921|>,<|idx:1922|>,<|idx:1923|>,<|idx:1924|>,<|idx:1925|>,<|idx:1926|>,<|idx:1927|>,<|idx:1928|>,<|idx:1929|>,<|idx:1930|>,<|idx:1931|>,<|idx:1932|>,<|idx:1933|>,<|idx:1934|>,<|idx:1935|>,<|idx:1936|>,<|idx:1937|>,<|idx:1938|>,<|idx:1939|>,<|idx:1940|>,<|idx:1941|>,<|idx:1942|>,<|idx:1943|>,<|idx:1944|>,<|idx:1945|>,<|idx:1946|>,<|idx:1947|>,<|idx:1948|>,<|idx:1949|>,<|idx:1950|>,<|idx:1951|>,<|idx:1952|>,<|idx:1953|>,<|idx:1954|>,<|idx:1955|>,<|idx:1956|>,<|idx:1957|>,<|idx:1958|>,<|idx:1959|>,<|idx:1960|>,<|idx:1961|>,<|idx:1962|>,<|idx:1963|>,<|idx:1964|>,<|idx:1965|>,<|idx:1966|>,<|idx:1967|>,<|idx:1968|>,<|idx:1969|>,<|idx:1970|>,<|idx:1971|>,<|idx:1972|>,<|idx:1973|>,<|idx:1974|>,<|idx:1975|>,<|idx:1976|>,<|idx:1977|>,<|idx:1978|>,<|idx:1979|>,<|idx:1980|>,<|idx:1981|>,<|idx:1982|>,<|idx:1983|>,<|idx:1984|>,<|idx:1985|>,<|idx:1986|>,<|idx:1987|>,<|idx:1988|>,<|idx:1989|>,<|idx:1990|>,<|idx:1991|>,<|idx:1992|>,<|idx:1993|>,<|idx:1994|>,<|idx:1995|>,<|idx:1996|>,<|idx:1997|>,<|idx:1998|>,<|idx:1999|>]);", parameters: 0b 2025-04-10T10:57:31.410290Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282651314, txId: 281474976716154] shutting down Trying to start YDB, gRPC: 24355, MsgBus: 64236 2025-04-10T10:57:33.846648Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491636949748328948:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:33.847886Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001590/r3tmp/tmpmDCR58/pdisk_1.dat 2025-04-10T10:57:34.197716Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:34.225273Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:34.225387Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:34.227370Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24355, node 3 2025-04-10T10:57:34.384772Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:34.384800Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:34.384812Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:34.384981Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64236 TClient is connected to server localhost:64236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:35.646222Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.654810Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:57:35.670020Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.817488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:36.099261Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:36.223755Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:38.850377Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491636949748328948:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:38.850481Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:43.454523Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636992698003729:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:43.454867Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:43.500222Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:43.567804Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:43.655228Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:43.714396Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:43.772348Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:43.850616Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:44.059808Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636996992971554:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:44.059993Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:44.062704Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491636996992971559:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:44.069393Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:44.110001Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491636996992971562:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:44.198127Z node 3 :TX_PROXY ERROR: Actor# [3:7491636996992971620:3480] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:46.254810Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:57:46.257684Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:57:46.276260Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:57:49.135430Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:57:49.135470Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:56.375919Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282676395, txId: 281474976710753] shutting down >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::DropTableTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T10:57:59.604016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:57:59.604120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:57:59.604161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:57:59.604218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:57:59.604280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:57:59.604311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:57:59.604370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:57:59.604463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:57:59.604865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:57:59.781475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T10:57:59.781568Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:59.808501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:57:59.808897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:57:59.809097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:57:59.830765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:57:59.831096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:57:59.831858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:57:59.832129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:57:59.839010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:57:59.840664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:57:59.840737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:57:59.840954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:57:59.841019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:57:59.841070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:57:59.841205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:57:59.859277Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T10:58:00.071293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:58:00.071572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:00.071863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:58:00.072150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:58:00.072224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:00.076590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:00.076800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:58:00.077081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:00.077174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:58:00.077230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:58:00.077269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:58:00.082112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:00.082224Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:00.082265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:58:00.085038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:00.085102Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:00.085152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:00.085210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:58:00.101738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:58:00.104873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:58:00.105157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:58:00.106379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:00.106558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:00.106623Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:00.106974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:58:00.107042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:00.107247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:58:00.107345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:58:00.110006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:00.110079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:00.110355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:00.110407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:58:00.110773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:00.110828Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:58:00.110939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:00.110979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:00.111024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:00.111071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:00.111127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:58:00.111179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:00.111241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:58:00.111282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:58:00.111374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:00.111431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:58:00.111464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:58:00.113482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-10T10:58:00.222611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:00.222722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:00.222783Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 103:0 HandleReply TEvOperationPlan: step# 5000004 2025-04-10T10:58:00.222965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T10:58:00.223051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-10T10:58:00.223233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:00.223294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T10:58:00.223332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T10:58:00.226993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T10:58:00.227168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 FAKE_COORDINATOR: Erasing txId 103 2025-04-10T10:58:00.227822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:00.227878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:00.228060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T10:58:00.228196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:58:00.228319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:00.228378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-10T10:58:00.228424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-10T10:58:00.228449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-10T10:58:00.228803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T10:58:00.228849Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-10T10:58:00.228963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T10:58:00.229010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T10:58:00.229069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T10:58:00.229123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T10:58:00.229166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-10T10:58:00.229241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T10:58:00.229303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T10:58:00.229338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T10:58:00.229448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T10:58:00.229491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:58:00.229532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2025-04-10T10:58:00.229569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-10T10:58:00.229602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-10T10:58:00.229642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-10T10:58:00.230285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:58:00.230382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:58:00.230420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2025-04-10T10:58:00.230481Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-10T10:58:00.230528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T10:58:00.231037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:58:00.231082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T10:58:00.231163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T10:58:00.232077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:58:00.232150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:58:00.232177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-10T10:58:00.232207Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-10T10:58:00.232240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:00.233381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:58:00.233518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T10:58:00.233549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-10T10:58:00.233578Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-10T10:58:00.233619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T10:58:00.233693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-10T10:58:00.237490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T10:58:00.238089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T10:58:00.238359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T10:58:00.238627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T10:58:00.238995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T10:58:00.239041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T10:58:00.239477Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T10:58:00.239616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T10:58:00.239661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:364:2355] TestWaitNotification: OK eventTxId 103 2025-04-10T10:58:00.240283Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:58:00.240536Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 249us result status StatusPathDoesNotExist 2025-04-10T10:58:00.240733Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |68.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |68.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |68.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql >> KqpScanArrowFormat::AggregateCountStar [GOOD] >> KqpScanArrowFormat::AggregateByColumn |68.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |68.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut >> KqpQueryService::ClosedSessionRemovedFromPool [GOOD] >> KqpQueryService::CloseConnection >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] |68.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |68.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |69.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |69.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar |69.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |69.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |69.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors >> TExternalTableTest::ReplaceExternalTableIfNotExists |69.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> YdbIndexTable::OnlineBuild >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] >> BackupRestoreS3::RestoreIndexTableSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries >> TestKinesisHttpProxy::ListShardsEmptyFields [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] Test command err: 2025-04-10T10:57:22.197028Z :FallbackToSingleDb INFO: Random seed for debugging is 1744282642196973 2025-04-10T10:57:22.707895Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636906375671868:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:22.708039Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:57:22.756286Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636904210267409:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:22.756326Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:57:23.020503Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:57:23.024689Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00148c/r3tmp/tmpqQcDfQ/pdisk_1.dat 2025-04-10T10:57:23.674280Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:23.693477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:23.693688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:23.734546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:23.734643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:23.745109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:23.761762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:23.770498Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:23.777999Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:57:23.778702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2014, node 1 2025-04-10T10:57:24.016392Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/00148c/r3tmp/yandexMvghyV.tmp 2025-04-10T10:57:24.016443Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/00148c/r3tmp/yandexMvghyV.tmp 2025-04-10T10:57:24.016610Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/00148c/r3tmp/yandexMvghyV.tmp 2025-04-10T10:57:24.016766Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:57:24.129359Z INFO: TTestServer started on Port 1709 GrpcPort 2014 TClient is connected to server localhost:1709 PQClient connected to localhost:2014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:24.622768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:24.752778Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:27.680054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636927850509209:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:27.680182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:27.680556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636927850509230:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:27.691107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:57:27.708928Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636906375671868:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:27.709001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:27.766225Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636904210267409:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:27.766305Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:27.853748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636927850509232:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:57:28.118403Z node 1 :TX_PROXY ERROR: Actor# [1:7491636927850509328:2686] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:28.168504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:28.179646Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636932145476656:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:28.181743Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjAwOWE1NDktYzJlODljNWYtMzA2ODFhZWYtN2FhNGQ0YQ==, ActorId: [1:7491636927850509206:2336], ActorState: ExecuteState, TraceId: 01jrfnhz2g8pt3kwcfzbwgvbav, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:28.182415Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491636925685104223:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:28.182801Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmJiOWYzNzAtM2E4MjYzZS03ZTgzNmY5Yi0yNTRmOGQ3NQ==, ActorId: [2:7491636925685104191:2309], ActorState: ExecuteState, TraceId: 01jrfnhz593zgr27edzegh2h40, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:28.184685Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:28.184590Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:28.375120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:28.580919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:2014", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-10T10:57:29.098419Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfnj098d4m7ngapwzwh89df, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTcwZmI1N2MtNTFmMmE4OGQtZGZjNjA5MDYtYTUwYzhlYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491636936440444362:3000] === CheckClustersList. Ok 2025-04-10T10:57:34.803199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... PQ Client: crea ... TE_PROXY INFO: session v1 cookie: 1 sessionId: src|5865c90c-21404a71-b33a51d7-5999817b_0 grpc closed 2025-04-10T10:57:55.229362Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|5865c90c-21404a71-b33a51d7-5999817b_0 is DEAD 2025-04-10T10:57:55.230067Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:57:55.231831Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7491637046458203879:2511] destroyed 2025-04-10T10:57:55.231892Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. PORTS 15520 23195 2025-04-10T10:57:55.277373Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:57:55.277407Z node 3 :IMPORT WARN: Table profiles were not loaded Session was created >>> Ready to answer: ok 2025-04-10T10:57:56.323847Z :INFO: [/Root] OnFederationDiscovery fall back to single mode, database=/Root 2025-04-10T10:57:56.324000Z :INFO: [/Root] [] [3065359-c3bb2377-1b5758c4-9073a4e2] Open read subsessions to databases: { name: , endpoint: localhost:23195, path: /Root } 2025-04-10T10:57:56.324214Z :INFO: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] Starting read session 2025-04-10T10:57:56.324281Z :DEBUG: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] Starting single session 2025-04-10T10:57:56.325372Z :DEBUG: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-10T10:57:56.325450Z :DEBUG: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-10T10:57:56.325518Z :DEBUG: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] [] Reconnecting session to cluster in 0.000000s 2025-04-10T10:57:56.325931Z :ERROR: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] [] Got error. Status: CLIENT_CALL_UNIMPLEMENTED. Description:
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:23195
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23195. 2025-04-10T10:57:56.326022Z :DEBUG: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-10T10:57:56.326094Z :DEBUG: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-10T10:57:56.326284Z :INFO: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] [] Closing session to cluster: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:23195" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:23195
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23195. " } 2025-04-10T10:57:56.326637Z :NOTICE: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:57:56.326690Z :DEBUG: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] [] Abort session to cluster Got new read session event: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:23195" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:23195
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23195. " } 2025-04-10T10:57:56.326844Z :INFO: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] Closing read session. Close timeout: 0.010000s 2025-04-10T10:57:56.326901Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-10T10:57:56.326949Z :INFO: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:57:56.326991Z :INFO: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] Closing read session. Close timeout: 0.000000s 2025-04-10T10:57:56.327043Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-10T10:57:56.327078Z :INFO: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:57:56.327116Z :INFO: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] Closing read session. Close timeout: 0.000000s 2025-04-10T10:57:56.327165Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-10T10:57:56.327209Z :INFO: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] Counters: { Errors: 1 CurrentSessionLifetimeMs: 3 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:57:56.327283Z :NOTICE: [/Root] [/Root] [d7f5b14c-741d5d48-564bc29c-2f5287da] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:57:56.796626Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491637050753171254:2530] TxId: 281474976715686. Ctx: { TraceId: 01jrfnjv1w05jxnk88ycsdcrnz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NmZmMGVmNi05ZjkzZjlhYi1jMDM2MGYwNS1lN2E2ZDYyNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-04-10T10:57:56.797441Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491637050753171261:2538], TxId: 281474976715686, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jrfnjv1w05jxnk88ycsdcrnz. SessionId : ydb://session/3?node_id=3&id=NmZmMGVmNi05ZjkzZjlhYi1jMDM2MGYwNS1lN2E2ZDYyNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7491637050753171254:2530], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-10T10:57:56.797778Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491637050753171263:2539], TxId: 281474976715686, task: 4. Ctx: { SessionId : ydb://session/3?node_id=3&id=NmZmMGVmNi05ZjkzZjlhYi1jMDM2MGYwNS1lN2E2ZDYyNA==. TraceId : 01jrfnjv1w05jxnk88ycsdcrnz. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7491637050753171254:2530], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-10T10:57:57.185397Z node 3 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715687. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:57:57.185532Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7491637055048138573:2541] TxId: 281474976715687. Ctx: { TraceId: 01jrfnjvrc8dv2784n55bnrxvg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTA1MjU3MTAtMmI1OGNiZWUtY2VmOTEzZWItZGU2YjU4NzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:57:57.185741Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA1MjU3MTAtMmI1OGNiZWUtY2VmOTEzZWItZGU2YjU4NzA=, ActorId: [3:7491637055048138570:2541], ActorState: ExecuteState, TraceId: 01jrfnjvrc8dv2784n55bnrxvg, Create QueryResponse for error on request, msg: 2025-04-10T10:57:57.187159Z node 3 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfnjvrc8dv2784n58jz1kta" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-10T10:57:57.801544Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NmZmMGVmNi05ZjkzZjlhYi1jMDM2MGYwNS1lN2E2ZDYyNA==, ActorId: [3:7491637050753171229:2530], ActorState: ExecuteState, TraceId: 01jrfnjv1w05jxnk88ycsdcrnz, Create QueryResponse for error on request, msg: 2025-04-10T10:57:57.807073Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "Failed to send EvStartKqpTasksRequest because node is unavailable: 4" severity: 1 } } TxMeta { id: "01jrfnjvhj9ex2vv7ksav4k65t" } } YdbStatus: UNAVAILABLE ConsumedRu: 332 } 2025-04-10T10:57:59.594266Z node 3 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715690. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:57:59.594420Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7491637063638073271:2553] TxId: 281474976715690. Ctx: { TraceId: 01jrfnjy2d64w0tj4zw6pratzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjEzN2M4OWMtYjBjY2JkNGEtMTFmNTE1MWUtOTQ1NDY4MzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:57:59.594673Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjEzN2M4OWMtYjBjY2JkNGEtMTFmNTE1MWUtOTQ1NDY4MzU=, ActorId: [3:7491637063638073268:2553], ActorState: ExecuteState, TraceId: 01jrfnjy2d64w0tj4zw6pratzj, Create QueryResponse for error on request, msg: 2025-04-10T10:57:59.596330Z node 3 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfnjy2j6942ry6t9ptyh1k6" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-10T10:58:00.032779Z node 3 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715692. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:58:00.032924Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7491637063638073318:2545] TxId: 281474976715692. Ctx: { TraceId: 01jrfnjxh16mr1hz5ntdehv6ed, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDY1Y2EyMzctZGUzYjI5YS0yYWI1MzVhMS02MTgzYmY3, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:58:00.033169Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDY1Y2EyMzctZGUzYjI5YS0yYWI1MzVhMS02MTgzYmY3, ActorId: [3:7491637059343105949:2545], ActorState: ExecuteState, TraceId: 01jrfnjxh16mr1hz5ntdehv6ed, Create QueryResponse for error on request, msg: 2025-04-10T10:58:00.034957Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfnjyheb2dx6xdy2myncxb9" } } YdbStatus: UNAVAILABLE ConsumedRu: 681 } |69.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] >> TColumnShardTestReadWrite::CompactionGC [GOOD] >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::JoinWithParams >> RemoteTopicReader::ReadTopic >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T10:58:03.778126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:58:03.778248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:03.778308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:58:03.778350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:58:03.778405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:58:03.778438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:58:03.778567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:03.778676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:58:03.779064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:03.937329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T10:58:03.937444Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:03.958665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:03.959077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:58:03.959258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:58:03.969931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:58:03.970185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:58:03.970973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:03.971219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:58:03.974602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:03.976002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:03.976086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:03.976289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:58:03.976366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:03.976417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:58:03.976536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:58:03.987148Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T10:58:04.186987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:58:04.187281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:04.187583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:58:04.187871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:58:04.187976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:04.196742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:04.196953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:58:04.197212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:04.197296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:58:04.197371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:58:04.197452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:58:04.200723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:04.200858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:04.200917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:58:04.203858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:04.203922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:04.203973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:04.204027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:58:04.208271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:58:04.213321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:58:04.213654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:58:04.214898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:04.215071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:04.215150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:04.215539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:58:04.215605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:04.215793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:58:04.215880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:58:04.219613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:04.219682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:04.219906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:04.219967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:58:04.220343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:04.220399Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:58:04.220512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:04.220552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:04.220592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:04.220627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:04.220696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:58:04.220751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:04.220796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:58:04.220836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:58:04.220934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:04.220980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:58:04.221015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:58:04.223195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... 04, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-10T10:58:04.395531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T10:58:04.397407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:58:04.397506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:58:04.397539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T10:58:04.397572Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-10T10:58:04.397624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T10:58:04.397706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-10T10:58:04.399273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-10T10:58:04.399428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-10T10:58:04.400675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:04.400802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:04.400861Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalTable TPropose, operationId: 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-04-10T10:58:04.401009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-10T10:58:04.401246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:04.401343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T10:58:04.402391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T10:58:04.405022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-04-10T10:58:04.406407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:04.406445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:04.406643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T10:58:04.406748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T10:58:04.406886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:04.406956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-10T10:58:04.406998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-10T10:58:04.407026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-10T10:58:04.407449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T10:58:04.407499Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-10T10:58:04.407606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T10:58:04.407643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:58:04.407692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T10:58:04.407738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:58:04.407782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-10T10:58:04.407825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:58:04.407876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T10:58:04.407908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T10:58:04.408012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T10:58:04.408056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T10:58:04.408092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-10T10:58:04.408127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-10T10:58:04.408154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 4 2025-04-10T10:58:04.408901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:58:04.409005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:58:04.409040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-10T10:58:04.409081Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-10T10:58:04.409118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T10:58:04.410399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:58:04.410493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:58:04.410529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-10T10:58:04.410560Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-10T10:58:04.410588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T10:58:04.410657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-10T10:58:04.414014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T10:58:04.415535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-10T10:58:04.416000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-10T10:58:04.416053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-10T10:58:04.416604Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-10T10:58:04.416730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T10:58:04.416779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:390:2381] TestWaitNotification: OK eventTxId 104 2025-04-10T10:58:04.417542Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:58:04.417831Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 444us result status StatusSuccess 2025-04-10T10:58:04.418253Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 3 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 3 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/other_location" Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] Test command err: 2025-04-10T10:57:32.135139Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636946331797072:2280];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:32.135197Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmptb1fIw/pdisk_1.dat 2025-04-10T10:57:33.000425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:33.000544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:33.009863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:33.015102Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14223, node 1 2025-04-10T10:57:33.357864Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:33.357898Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:33.357905Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:33.358019Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62736 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:33.878406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:37.135509Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636946331797072:2280];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:37.135608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:37.647088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636967806634444:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:37.647216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:37.968846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:57:38.233946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636972101601911:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:38.234060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:38.234769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636972101601916:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:38.239142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T10:57:38.278619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636972101601918:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T10:57:38.347366Z node 1 :TX_PROXY ERROR: Actor# [1:7491636972101601995:2814] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:38.564832Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfnj9dq514rbdac2g8hcage, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFiNWY1NDAtOGM1MjczMjItYzFmMTVhMTctZTkxYzRkZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:38.789259Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfnj9s4783zw2zbbmbtm2s4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFiNWY1NDAtOGM1MjczMjItYzFmMTVhMTctZTkxYzRkZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpmgbSRQ/"Create temporary directory "/Root/~backup_20250410T105738" in databaseProcess "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpmgbSRQ/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250410T105738/table" }Backup table "/Root/~backup_20250410T105738/table" to "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpmgbSRQ/table"Describe table "/Root/~backup_20250410T105738/table"Write scheme into "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpmgbSRQ/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpmgbSRQ/table/permissions.pb"Read table "/Root/~backup_20250410T105738/table"Write data into "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpmgbSRQ/table/data_00.csv"Drop table "/Root/~backup_20250410T105738/table"2025-04-10T10:57:39.523531Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found Remove temporary directory "/Root/~backup_20250410T105738" in database2025-04-10T10:57:39.569547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpmgbSRQ/" to "/Root"2025-04-10T10:57:39.814674Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpmgbSRQ/" to "/Root"Process "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpmgbSRQ/table"Read scheme from "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpmgbSRQ/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpmgbSRQ/table" to "/Root/table"2025-04-10T10:57:39.869351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpmgbSRQ/table/data_00.csv"2025-04-10T10:57:40.183552Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jrfnjb401bbmye1thw67qebn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTg4YTk5ZDktZmJlOWE0YmUtZWJjYTFjNDUtYWEzZWVlNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpmgbSRQ/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpmgbSRQ/table/permissions.pb"2025-04-10T10:57:40.241227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-10T10:57:40.491386Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jrfnjbe60b5grr6z1xcyebzx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFiNWY1NDAtOGM1MjczMjItYzFmMTVhMTctZTkxYzRkZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:42.697393Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636991278310585:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:42.698249Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpE2hl3j/pdisk_1.dat 2025-04-10T10:57:43.131927Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:43.142456Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:43.142578Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:43.148642Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14969, node 4 2025-04-10T10:57:43.340307Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:43.340333Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:43.340345Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:43.340522Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersi ... /001d6b/r3tmp/tmpys4jp6/table" to "/Root/table"2025-04-10T10:57:49.029869Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpys4jp6/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-04-10T10:57:49.233884Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T10:57:49.398652Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 Restore ACL "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpys4jp6/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpys4jp6/table/permissions.pb"2025-04-10T10:57:49.702167Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-10T10:57:52.827775Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637032232552910:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:52.828872Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpqdncl4/pdisk_1.dat 2025-04-10T10:57:53.107125Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:53.164527Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:53.164654Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:53.172113Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2329, node 7 2025-04-10T10:57:53.432055Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:53.432092Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:53.432108Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:53.432395Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14124 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:53.847300Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:57.831635Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491637032232552910:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:57.831730Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:59.307502Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491637062297325075:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:59.307621Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:59.330458Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:57:59.509347Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491637062297325301:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:59.509440Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:59.509821Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491637062297325306:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:59.515318Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T10:57:59.629600Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491637062297325308:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T10:57:59.711722Z node 7 :TX_PROXY ERROR: Actor# [7:7491637062297325379:2843] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:59.997042Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfnjy6mb0krmex26qtbr2cn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZWVkZDRiNmYtYzhlOGRiNmItMmI3OTJjNDktMzFkNjE5Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:00.260851Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfnjyqvaa2qx50mrfdtcc83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZWVkZDRiNmYtYzhlOGRiNmItMmI3OTJjNDktMzFkNjE5Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpblKT3Q/"Create temporary directory "/Root/~backup_20250410T105800" in databaseProcess "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpblKT3Q/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250410T105800/table" }2025-04-10T10:58:00.389972Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976710664:1, at schemeshard: 72057594046644480 Backup table "/Root/~backup_20250410T105800/table" to "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpblKT3Q/table"Describe table "/Root/~backup_20250410T105800/table"Write scheme into "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpblKT3Q/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpblKT3Q/table/permissions.pb"Read table "/Root/~backup_20250410T105800/table"Write data into "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpblKT3Q/table/data_00.csv"Drop table "/Root/~backup_20250410T105800/table"2025-04-10T10:58:00.958657Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976710667:1, at schemeshard: 72057594046644480 Remove temporary directory "/Root/~backup_20250410T105800" in database2025-04-10T10:58:01.097450Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-04-10T10:58:01.104479Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-10T10:58:01.205104Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976710669:1, at schemeshard: 72057594046644480 Restore "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpblKT3Q/" to "/Root"2025-04-10T10:58:01.314007Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpblKT3Q/" to "/Root"Process "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpblKT3Q/table"Read scheme from "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpblKT3Q/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpblKT3Q/table" to "/Root/table"2025-04-10T10:58:01.379076Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpblKT3Q/table/data_00.csv"2025-04-10T10:58:01.653777Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jrfnk074dcgefqqf2rnh4k6w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTg2MjljYWUtYTkyZjhhN2YtODQwMDdkMmItNzA5ZDZiMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpblKT3Q/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2gt4/001d6b/r3tmp/tmpblKT3Q/table/permissions.pb"2025-04-10T10:58:01.715352Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-10T10:58:01.988492Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jrfnk0cx7ns4jyjpe0c6fdcn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZWVkZDRiNmYtYzhlOGRiNmItMmI3OTJjNDktMzFkNjE5Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults [GOOD] >> TestYmqHttpProxy::TestSetQueueAttributes [GOOD] >> TTicketParserTest::LoginBad ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] Test command err: 2025-04-10T10:57:23.085948Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1744282643085907 2025-04-10T10:57:23.938772Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636910386040303:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:23.938845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001498/r3tmp/tmpo1tZbd/pdisk_1.dat 2025-04-10T10:57:24.608618Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:57:24.622210Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:57:24.793128Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:24.966746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:25.384639Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:25.394457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:25.394605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:25.403322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3778, node 1 2025-04-10T10:57:25.575365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:25.582305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:25.634706Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:57:25.650422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:25.799091Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001498/r3tmp/yandex3jOOYk.tmp 2025-04-10T10:57:25.799129Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001498/r3tmp/yandex3jOOYk.tmp 2025-04-10T10:57:25.814287Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001498/r3tmp/yandex3jOOYk.tmp 2025-04-10T10:57:25.815836Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:57:25.896687Z INFO: TTestServer started on Port 20932 GrpcPort 3778 TClient is connected to server localhost:20932 PQClient connected to localhost:3778 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:26.323755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T10:57:28.943087Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636910386040303:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:28.943183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:29.634523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636936155844886:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:29.634742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:29.635136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636936155844921:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:29.639615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:57:29.667361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636936155844923:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:57:30.098683Z node 1 :TX_PROXY ERROR: Actor# [1:7491636936155845020:2705] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:30.155968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:30.153084Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491636934136203676:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:30.154499Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGY4MWUzYjktZGE1OTY2YjctNjVlZjU1OGMtZGVhYWEzZmM=, ActorId: [2:7491636934136203641:2309], ActorState: ExecuteState, TraceId: 01jrfnj12w3dmvecv8wegpkkmh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:30.156888Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:30.169659Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636940450812336:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:30.174699Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTNmYzY2YTktZTkzZGVlZTItMzc5M2Y3ODQtNGQ4OTA3ZDE=, ActorId: [1:7491636936155844883:2338], ActorState: ExecuteState, TraceId: 01jrfnj1006rqdvdmgn6f1nk82, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:30.175088Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:30.517397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:30.801193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:3778", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-10T10:57:31.384417Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfnj2bvcbx3zszw498jaahm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzVkZDAwODMtNGU3NTM4YzYtZjVjZjliMWItNzM2YmRhOWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491636944745780057:3013] === CheckClustersList. Ok 2025-04-10T10:57:37.430005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:3778 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-10T10:57:37.724138Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:3778 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: ... Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC 2025-04-10T10:57:57.108570Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC request to localhost:23588 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: 2025-04-10T10:57:57.615895Z node 3 :PERSQUEUE INFO: proxy answer Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-04-10T10:57:57.632786Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-04-10T10:57:57.643204Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-04-10T10:57:57.643287Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:23588 2025-04-10T10:57:57.658495Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-10T10:57:57.662488Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-10T10:57:57.662535Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-04-10T10:57:57.666259Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-10T10:57:57.666398Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:36764 2025-04-10T10:57:57.666422Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:36764 proto=v1 topic=test-topic durationSec=0 2025-04-10T10:57:57.666433Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-10T10:57:57.668406Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-10T10:57:57.668528Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-10T10:57:57.668539Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-10T10:57:57.668547Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-10T10:57:57.668564Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637055792823687:2507] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-10T10:57:57.675711Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637055792823687:2507] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-10T10:57:57.864303Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637055792823687:2507] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-10T10:57:57.864683Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637055792823726:2507] connected; active server actors: 1 2025-04-10T10:57:57.864766Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637055792823687:2507] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-10T10:57:57.864795Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637055792823687:2507] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-10T10:57:57.865215Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637055792823726:2507] disconnected; active server actors: 1 2025-04-10T10:57:57.865247Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637055792823726:2507] disconnected no session 2025-04-10T10:57:57.990552Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637055792823687:2507] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-10T10:57:57.990598Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637055792823687:2507] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-10T10:57:57.990623Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637055792823687:2507] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-10T10:57:57.990663Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-10T10:57:57.993800Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7491637055792823754:2507], now have 1 active actors on pipe 2025-04-10T10:57:57.994108Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2025-04-10T10:57:57.994617Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T10:57:57.994660Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T10:57:57.994767Z node 4 :PERSQUEUE INFO: new Cookie src|ec97277c-5e7aaf41-a66a8d98-52b42b15_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-10T10:57:57.994932Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-10T10:57:57.995009Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T10:57:57.995927Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T10:57:57.995962Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T10:57:57.996050Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T10:57:57.998103Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|ec97277c-5e7aaf41-a66a8d98-52b42b15_0 2025-04-10T10:57:58.002464Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744282678002 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:57:58.002631Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|ec97277c-5e7aaf41-a66a8d98-52b42b15_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-10T10:57:58.006287Z :INFO: [] MessageGroupId [src] SessionId [src|ec97277c-5e7aaf41-a66a8d98-52b42b15_0] Write session: close. Timeout = 0 ms 2025-04-10T10:57:58.006364Z :INFO: [] MessageGroupId [src] SessionId [src|ec97277c-5e7aaf41-a66a8d98-52b42b15_0] Write session will now close 2025-04-10T10:57:58.006416Z :DEBUG: [] MessageGroupId [src] SessionId [src|ec97277c-5e7aaf41-a66a8d98-52b42b15_0] Write session: aborting 2025-04-10T10:57:58.006998Z :INFO: [] MessageGroupId [src] SessionId [src|ec97277c-5e7aaf41-a66a8d98-52b42b15_0] Write session: gracefully shut down, all writes complete 2025-04-10T10:57:58.007047Z :DEBUG: [] MessageGroupId [src] SessionId [src|ec97277c-5e7aaf41-a66a8d98-52b42b15_0] Write session: destroy 2025-04-10T10:57:58.010267Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|ec97277c-5e7aaf41-a66a8d98-52b42b15_0 grpc read done: success: 0 data: 2025-04-10T10:57:58.010309Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ec97277c-5e7aaf41-a66a8d98-52b42b15_0 grpc read failed 2025-04-10T10:57:58.010358Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ec97277c-5e7aaf41-a66a8d98-52b42b15_0 grpc closed 2025-04-10T10:57:58.010386Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ec97277c-5e7aaf41-a66a8d98-52b42b15_0 is DEAD 2025-04-10T10:57:58.011457Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:57:58.015819Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7491637055792823754:2507] destroyed 2025-04-10T10:57:58.015898Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created >>> Ready to answer: ok 2025-04-10T10:57:58.105704Z :ERROR: [/Root] OnFederationDiscovery: Got error. Status: UNAVAILABLE. Description: 2025-04-10T10:57:59.671647Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:57:59.671682Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:01.465527Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720691, task: 1, CA Id [3:7491637072972693129:2546]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-10T10:58:01.500767Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720691, task: 1, CA Id [3:7491637072972693129:2546]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:58:01.560277Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720691, task: 1, CA Id [3:7491637072972693129:2546]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:58:01.633631Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720691, task: 1, CA Id [3:7491637072972693129:2546]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:58:01.748135Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720691, task: 1, CA Id [3:7491637072972693129:2546]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:58:01.937108Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720691, task: 1, CA Id [3:7491637072972693129:2546]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:58:02.275114Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720691, task: 1, CA Id [3:7491637072972693129:2546]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:58:02.735638Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720691, task: 1, CA Id [3:7491637072972693129:2546]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGC [GOOD] Test command err: 2025-04-10T10:56:32.397993Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T10:56:32.563326Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T10:56:32.589682Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T10:56:32.589990Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T10:56:32.599283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T10:56:32.599519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T10:56:32.599789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T10:56:32.599935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T10:56:32.600055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T10:56:32.600169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T10:56:32.600300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T10:56:32.600422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T10:56:32.600561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T10:56:32.600687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T10:56:32.600801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T10:56:32.600908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T10:56:32.632572Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T10:56:32.632822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T10:56:32.632880Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T10:56:32.633104Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T10:56:32.633310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T10:56:32.633397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T10:56:32.633459Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T10:56:32.633626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T10:56:32.633706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T10:56:32.633758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T10:56:32.633793Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T10:56:32.633987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T10:56:32.634068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T10:56:32.634117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T10:56:32.634200Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T10:56:32.634305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T10:56:32.634360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T10:56:32.634412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T10:56:32.634444Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T10:56:32.634521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T10:56:32.634563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T10:56:32.634590Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T10:56:32.634641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T10:56:32.634682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T10:56:32.634712Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T10:56:32.635098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-10T10:56:32.635180Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-04-10T10:56:32.635286Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=61; 2025-04-10T10:56:32.635389Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-10T10:56:32.635581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T10:56:32.635646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T10:56:32.635697Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T10:56:32.635932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T10:56:32.635977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T10:56:32.636007Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T10:56:32.636160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T10:56:32.636210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T10:56:32.636241Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T10:56:32.636414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T10:56:32.636456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T10:56:32.636490Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T10:56:32.636621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T10:56:32.636665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T10:56:32.636711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... on;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=ac3e01d0-15fa11f0-8bb4cd08-71e163a,;}; 2025-04-10T10:58:03.328328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:831;event=skip_compaction;reason=disabled; 2025-04-10T10:58:03.328405Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-10T10:58:03.328513Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T10:58:03.328594Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T10:58:03.328651Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T10:58:03.328767Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.414000s; 2025-04-10T10:58:03.328840Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T10:58:03.604697Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=4735248;count=1; 2025-04-10T10:58:03.688419Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 45 at tablet 9437184 2025-04-10T10:58:03.688760Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:89 Blob count: 1 2025-04-10T10:58:03.706422Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:89 Blob count: 1 2025-04-10T10:58:03.706628Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=270;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:769;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=ac3e01d0-15fa11f0-8bb4cd08-71e163a,;}; 2025-04-10T10:58:03.708045Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:44:255:1:574112:0]; 2025-04-10T10:58:03.708178Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:44:255:2:592928:0]; GC for channel 2 deletes blobs: GC for channel 4 deletes blobs: GC for channel 3 deletes blobs: [9437184:2:85:3:0:5870200:0] Added portions: 151 152 2025-04-10T10:58:03.750102Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-10T10:58:03.750394Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[271] (CS::INDEXATION) apply at tablet 9437184 2025-04-10T10:58:03.754166Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:88 Blob count: 2 2025-04-10T10:58:03.754309Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=92311612;raw_bytes=143732845;count=39;records=1462497} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7381080;raw_bytes=7369506;count=2;records=75000} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T10:58:03.769393Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ac3e01d0-15fa11f0-8bb4cd08-71e163a;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-10T10:58:03.769488Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ac3e01d0-15fa11f0-8bb4cd08-71e163a;fline=with_appended.cpp:65;portions=75,76,;task_id=ac3e01d0-15fa11f0-8bb4cd08-71e163a; 2025-04-10T10:58:03.769771Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ac3e01d0-15fa11f0-8bb4cd08-71e163a;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::ac3e01d0-15fa11f0-8bb4cd08-71e163a; 2025-04-10T10:58:03.769836Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ac3e01d0-15fa11f0-8bb4cd08-71e163a;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T10:58:03.769901Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ac3e01d0-15fa11f0-8bb4cd08-71e163a;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:58:03.769944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;task_id=ac3e01d0-15fa11f0-8bb4cd08-71e163a;tablet_id=9437184;fline=columnshard_impl.cpp:831;event=skip_compaction;reason=disabled; 2025-04-10T10:58:03.769994Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ac3e01d0-15fa11f0-8bb4cd08-71e163a;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-10T10:58:03.770079Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ac3e01d0-15fa11f0-8bb4cd08-71e163a;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T10:58:03.770160Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ac3e01d0-15fa11f0-8bb4cd08-71e163a;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T10:58:03.770205Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ac3e01d0-15fa11f0-8bb4cd08-71e163a;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T10:58:03.770283Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ac3e01d0-15fa11f0-8bb4cd08-71e163a;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.402500s; 2025-04-10T10:58:03.770335Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ac3e01d0-15fa11f0-8bb4cd08-71e163a;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T10:58:03.770440Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:87:2:0:5870200:0] 2025-04-10T10:58:03.770496Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:88 Blob count: 2 2025-04-10T10:58:03.771780Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=45;external_task_id=ac3e01d0-15fa11f0-8bb4cd08-71e163a;mem=5963210;cpu=0; 2025-04-10T10:58:03.776771Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T10:58:03.777637Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 6080043 at tablet 9437184, mediator 0 2025-04-10T10:58:03.777741Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[274] execute at tablet 9437184 2025-04-10T10:58:03.778100Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=abstract.h:83;progress_tx_id=1043;lock_id=1;broken=0; 2025-04-10T10:58:03.778438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=tx_controller.cpp:212;event=finished_tx;tx_id=1043; 2025-04-10T10:58:03.798842Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[274] complete at tablet 9437184 2025-04-10T10:58:03.798959Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=1043;lock_id=1;broken=0; 2025-04-10T10:58:03.799073Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=5870200; 2025-04-10T10:58:03.799191Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::ad0fdf02-15fa11f0-84ce7827-2140717a; 2025-04-10T10:58:03.799231Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-04-10T10:58:03.799301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:754;event=indexation;bytes=5870200;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=ad0fdf02-15fa11f0-84ce7827-2140717a; 2025-04-10T10:58:03.799349Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:620;event=start_changes;type=CS::INDEXATION;task_id=ad0fdf02-15fa11f0-84ce7827-2140717a; 2025-04-10T10:58:03.799459Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=5963210;external_task_id=ad0fdf02-15fa11f0-84ce7827-2140717a;type=CS::INDEXATION;priority=0;; 2025-04-10T10:58:03.799630Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=46;task=cpu=0;mem=5963210;external_task_id=ad0fdf02-15fa11f0-84ce7827-2140717a;type=CS::INDEXATION;priority=0;; 2025-04-10T10:58:03.799662Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=ad0fdf02-15fa11f0-84ce7827-2140717a;mem=5963210;cpu=0; 2025-04-10T10:58:03.799692Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=ad0fdf02-15fa11f0-84ce7827-2140717a;task_id=46;mem=5963210;cpu=0; 2025-04-10T10:58:03.799813Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=ad0fdf02-15fa11f0-84ce7827-2140717a;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=ad0fdf02-15fa11f0-84ce7827-2140717a; Added portions: 153 154 2025-04-10T10:58:04.388795Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=ad0fdf02-15fa11f0-84ce7827-2140717a;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-10T10:58:04.388984Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; Compactions happened: 14 Indexations happened: 31 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |69.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TestKinesisHttpProxy::TestWrongStream [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithToken >> TestYmqHttpProxy::TestDeleteMessage [GOOD] |69.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |69.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TestYmqHttpProxy::TestTagQueue >> TestYmqHttpProxy::TestDeleteMessageBatch >> TSchemeShardAuditSettings::AlterSubdomain >> TestKinesisHttpProxy::TestWrongStream2 >> DataStreams::TestUpdateStorage >> KqpAnalyze::AnalyzeTable+ColumnStore [GOOD] >> KqpAnalyze::AnalyzeTable-ColumnStore >> DataStreams::TestNonChargeableUser >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId [GOOD] >> DataStreams::TestDeleteStream >> DataStreams::TestPutRecordsOfAnauthorizedUser >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_NotSplitedTopic [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced >> DataStreams::TestGetRecordsStreamWithSingleShard >> TestYmqHttpProxy::TestListQueues >> TestKinesisHttpProxy::BadRequestUnknownMethod >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName [GOOD] >> BasicUsage::WaitEventBlocksBeforeDiscovery [GOOD] >> BasicUsage::SimpleHandlers >> TestKinesisHttpProxy::ErroneousRequestGetRecords >> TestYmqHttpProxy::TestChangeMessageVisibility [GOOD] |69.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |69.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |69.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute |69.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |69.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |69.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut >> TTicketParserTest::LoginBad [GOOD] >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumn >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction >> BasicUsage::ReadMirrored [GOOD] >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AggregateByColumn |69.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |69.0%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |69.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] >> SystemView::ShowCreateTableTemporary [FAIL] >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] Test command err: 2025-04-10T10:57:30.606862Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636939079289003:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:30.606972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d6f/r3tmp/tmpGvtiHM/pdisk_1.dat 2025-04-10T10:57:31.659528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:31.673120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:31.673205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:31.724903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:31.789803Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16887, node 1 2025-04-10T10:57:31.905089Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:57:31.914422Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:57:32.081266Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:32.081292Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:32.081299Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:32.081458Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:32.721196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.605789Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636939079289003:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:35.605875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:36.860784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636964849093918:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:36.860913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:37.354244Z node 1 :TX_PROXY DEBUG: actor# [1:7491636939079289259:2133] Handle TEvProposeTransaction 2025-04-10T10:57:37.354285Z node 1 :TX_PROXY DEBUG: actor# [1:7491636939079289259:2133] TxId# 281474976715658 ProcessProposeTransaction 2025-04-10T10:57:37.354337Z node 1 :TX_PROXY DEBUG: actor# [1:7491636939079289259:2133] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7491636969144061253:2657] 2025-04-10T10:57:37.467870Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061253:2657] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-04-10T10:57:37.467938Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061253:2657] txid# 281474976715658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T10:57:37.468351Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061253:2657] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T10:57:37.468423Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061253:2657] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T10:57:37.468572Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061253:2657] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T10:57:37.468724Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061253:2657] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T10:57:37.468790Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061253:2657] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-04-10T10:57:37.468984Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061253:2657] txid# 281474976715658 HANDLE EvClientConnected 2025-04-10T10:57:37.475050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T10:57:37.481027Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061253:2657] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-04-10T10:57:37.481116Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061253:2657] txid# 281474976715658 SEND to# [1:7491636969144061252:2347] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-04-10T10:57:37.822389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636969144061403:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:37.822644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:37.824166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636969144061408:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:37.825598Z node 1 :TX_PROXY DEBUG: actor# [1:7491636939079289259:2133] Handle TEvProposeTransaction 2025-04-10T10:57:37.825632Z node 1 :TX_PROXY DEBUG: actor# [1:7491636939079289259:2133] TxId# 281474976715659 ProcessProposeTransaction 2025-04-10T10:57:37.825676Z node 1 :TX_PROXY DEBUG: actor# [1:7491636939079289259:2133] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [1:7491636969144061413:2778] 2025-04-10T10:57:37.829604Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061413:2778] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-10T10:57:37.829662Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061413:2778] txid# 281474976715659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T10:57:37.829690Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061413:2778] txid# 281474976715659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-10T10:57:37.831952Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061413:2778] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T10:57:37.832035Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061413:2778] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-04-10T10:57:37.832240Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061413:2778] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T10:57:37.832403Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061413:2778] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T10:57:37.832451Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061413:2778] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-04-10T10:57:37.832593Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061413:2778] txid# 281474976715659 HANDLE EvClientConnected 2025-04-10T10:57:37.846416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T10:57:37.853196Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061413:2778] txid# 281474976715659 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-04-10T10:57:37.853247Z node 1 :TX_PROXY DEBUG: Actor# [1:7491636969144061413:2778] txid# 281474976715659 SEND to# [1:7491636969144061412:2361] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 53} 2025-04-10T10:57:37.884392Z node 1 :KQP_WORKLOAD_SERVICE WARN: ... a256;x-amz-date, Signature=8914fa53b5ba5ec3f83503f4fe207cddaa63104eb5307a61eacc0ee979a0a54f content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250410T105811Z 2025-04-10T10:58:11.868949Z node 10 :IMPORT DEBUG: HandleScheme TEvExternalStorage::TEvHeadObjectResponse: self# [10:7491637114044889320:2197], result# No response body. REQUEST: HEAD /test_bucket/view/create_view.sql HTTP/1.1 HEADERS: Host: localhost:9667 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 27DC61A5-45A7-43A5-BF3F-A798052E29C5 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250410/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=58513ffb531121e4bf5a724cdb8e05348c62ceaf863b25796a5b431aa6249a5b content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250410T105811Z S3_MOCK::HttpServeRead: /test_bucket/view/create_view.sql / 165 2025-04-10T10:58:11.884344Z node 10 :IMPORT DEBUG: HandleScheme TEvExternalStorage::TEvHeadObjectResponse: self# [10:7491637114044889320:2197], result# HeadObjectResult { ETag: 54623f53d68141118383b3390c4965d5 ContentLength: 165 } REQUEST: GET /test_bucket/view/create_view.sql HTTP/1.1 HEADERS: Host: localhost:9667 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 561EAE56-CD2B-4789-B46B-0AB688E90AB7 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250410/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;range;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=4b49665d7c2a3125e8d17af3f925882f8e6f7c46d68765c01bab74e7cf59ef5a content-type: application/xml range: bytes=0-164 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250410T105811Z S3_MOCK::HttpServeRead: /test_bucket/view/create_view.sql / 165 2025-04-10T10:58:11.889213Z node 10 :IMPORT DEBUG: HandleScheme TEvExternalStorage::TEvGetObjectResponse: self# [10:7491637114044889320:2197], result# 54623f53d68141118383b3390c4965d5 REQUEST: HEAD /test_bucket/view/permissions.pb HTTP/1.1 HEADERS: Host: localhost:9667 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F4E17968-0D04-434F-81CB-F06A61E4912C amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250410/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=9bed5c8436a5e9d6e814baa568cac221f2f6a5b98d58c5038f7270aa8cdae7df content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250410T105811Z 2025-04-10T10:58:11.957366Z node 10 :IMPORT DEBUG: HandlePermissions TEvExternalStorage::TEvHeadObjectResponse: self# [10:7491637114044889320:2197], result# No response body. 2025-04-10T10:58:12.000995Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7491637118339856627:2373] [0] Resolve database: name# /Root 2025-04-10T10:58:12.001557Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7491637118339856627:2373] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T10:58:12.001579Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7491637118339856627:2373] [0] Send request: schemeShardId# 72057594046644480 2025-04-10T10:58:12.018127Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7491637118339856627:2373] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715664 Status: SUCCESS Progress: PROGRESS_PREPARING ImportFromS3Settings { endpoint: "localhost:9667" scheme: HTTP bucket: "test_bucket" items { source_prefix: "view" destination_path: "/Root/view" } } StartTime { seconds: 1744282691 } } REQUEST: GET /test_bucket?prefix=view HTTP/1.1 HEADERS: Host: localhost:9667 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 49729804-6E8F-4ECD-A598-2DF68B1C6218 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250410/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=9390cdd8ec00a29f6338324ee3f41d1dded6544f49cc1b1b0835a2d59878279c content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250410T105812Z S3_MOCK::HttpServeList: view 2025-04-10T10:58:12.030354Z node 10 :IMPORT DEBUG: HandleChangefeeds TEvExternalStorage::TEvListObjectResponse: self# [10:7491637114044889320:2197], result# ListObjectsResult { } 2025-04-10T10:58:12.030415Z node 10 :IMPORT INFO: Reply: self# [10:7491637114044889320:2197], success# 1, error# 2025-04-10T10:58:12.030544Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-10T10:58:12.030563Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeResult: id# 281474976715664, itemIdx# 0, success# 1 2025-04-10T10:58:12.066911Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-10T10:58:12.115374Z node 10 :IMPORT DEBUG: TSchemeQueryExecutor HandleCompileResponse, self: [10:7491637118339856632:2833], status: SUCCESS 2025-04-10T10:58:12.115472Z node 10 :IMPORT INFO: TSchemeQueryExecutor Reply, self: [10:7491637118339856632:2833], status: SUCCESS 2025-04-10T10:58:12.115775Z node 10 :IMPORT DEBUG: TSchemeQueryExecutor Reply, self: [10:7491637118339856632:2833], status: SUCCESS, prepared query: "WorkingDir: \"/Root\" OperationType: ESchemeOpCreateView FailedOnAlreadyExists: false CreateView { Name: \"view\" QueryText: \"SELECT 1 AS Key UNION SELECT 2 AS Key UNION SELECT 3 AS Key\" CapturedContext { PathPrefix: \"/Root\" SyntaxVersion: 1 AnsiLexer: false PgParser: false Pragmas: \"AnsiInForEmptyOrNullableItemsCollections\" Pragmas: \"AnsiLike\" Pragmas: \"FlexibleTypes\" Pragmas: \"AnsiCurrentRow\" Pragmas: \"WarnOnAnsiAliasShadowing\" Pragmas: \"AnsiOptionalAs\" Pragmas: \"EmitAggApply\" } }" 2025-04-10T10:58:12.116035Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-10T10:58:12.116066Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeQueryPreparation: id# 281474976715664, itemIdx# 0, status# SUCCESS, error# 2025-04-10T10:58:12.116269Z node 10 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/view' DstPathId: State: CreateSchemeObject SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-10T10:58:12.118901Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-10T10:58:12.119115Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-10T10:58:12.119133Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976710758, id# 281474976715664 2025-04-10T10:58:12.119212Z node 10 :IMPORT INFO: TImport::TTxProgress: ExecutePreparedQuery: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/view' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976710758 2025-04-10T10:58:12.119331Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-10T10:58:12.125002Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-10T10:58:12.125028Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976710758, status# StatusAccepted 2025-04-10T10:58:12.125181Z node 10 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/view' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 8] State: CreateSchemeObject SubState: Subscribed WaitTxId: 281474976710758 Issue: '' } 2025-04-10T10:58:12.131198Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-10T10:58:12.141458Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-10T10:58:12.141490Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-04-10T10:58:12.144687Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-10T10:58:12.430142Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7491637118339856674:2375] [0] Resolve database: name# /Root 2025-04-10T10:58:12.430910Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7491637118339856674:2375] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T10:58:12.430949Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7491637118339856674:2375] [0] Send request: schemeShardId# 72057594046644480 2025-04-10T10:58:12.432050Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7491637118339856674:2375] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715664 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:9667" scheme: HTTP bucket: "test_bucket" items { source_prefix: "view" destination_path: "/Root/view" } } StartTime { seconds: 1744282691 } EndTime { seconds: 1744282692 } } 2025-04-10T10:58:12.662563Z node 10 :TX_PROXY DEBUG: actor# [10:7491637088275084269:2131] Handle TEvExecuteKqpTransaction 2025-04-10T10:58:12.662617Z node 10 :TX_PROXY DEBUG: actor# [10:7491637088275084269:2131] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-04-10T10:58:12.663279Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfnkav1djbnkp447hp60z0y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YzdmNzc4OTYtYWRmMjA1MmItZDU3ODFlOGQtZDE3NTNhOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> DataStreams::TestPutRecordsOfAnauthorizedUser [GOOD] >> DataStreams::TestPutRecordsWithRead >> SlowTopicAutopartitioning::CDC_Write [GOOD] >> KqpScanArrowInChanels::JoinWithParams [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:58:09.455633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:58:09.455739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:09.455793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:58:09.455833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:58:09.455875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:58:09.455905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:58:09.455993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:09.456103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:58:09.456492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:09.606086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:58:09.611099Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:09.656282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:09.656615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:58:09.656793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:58:09.664544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:58:09.664810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:58:09.665486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:09.665892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:58:09.668062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:09.669394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:09.669475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:09.669693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:58:09.669743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:09.669782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:58:09.669936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:58:09.677702Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:58:09.964848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:58:09.965104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:09.965347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:58:09.965643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:58:09.965708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:09.979312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:09.979473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:58:09.979665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:09.979751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:58:09.979798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:58:09.979838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:58:09.982304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:09.982362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:09.982404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:58:09.984581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:09.984633Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:09.984669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:09.984721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:58:09.988407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:58:09.990721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:58:09.990888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:58:09.991875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:09.992032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:09.992104Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:09.992477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:58:09.992538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:09.992709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:58:09.992792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:58:09.996158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:09.996204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:09.996405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:09.996446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:58:09.996684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:09.996732Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:58:09.996822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:09.996855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:09.996894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:09.996944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:09.997011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:58:09.997048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:09.997085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:58:09.997115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:58:09.997185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:09.997224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:58:09.997257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:58:09.999517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:09.999645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:09.999688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T10:58:15.296166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T10:58:15.296201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-10T10:58:15.296245Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2025-04-10T10:58:15.296292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:15.297301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T10:58:15.297388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T10:58:15.297453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-10T10:58:15.297491Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2025-04-10T10:58:15.297531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-10T10:58:15.297614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-10T10:58:15.301013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2025-04-10T10:58:15.301155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2025-04-10T10:58:15.302484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:15.302593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:15.302643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2025-04-10T10:58:15.302684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:15.302712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-10T10:58:15.302815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 130 2025-04-10T10:58:15.303013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:58:15.303091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-04-10T10:58:15.304431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-10T10:58:15.304765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 FAKE_COORDINATOR: Erasing txId 175 2025-04-10T10:58:15.306012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:15.306050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:15.306205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-10T10:58:15.306344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:15.306393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-04-10T10:58:15.306432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-04-10T10:58:15.306525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-10T10:58:15.306562Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-04-10T10:58:15.306630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-10T10:58:15.306661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-10T10:58:15.306690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-10T10:58:15.306714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-10T10:58:15.306745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: false 2025-04-10T10:58:15.306783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-10T10:58:15.306815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-04-10T10:58:15.306840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-04-10T10:58:15.306925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-10T10:58:15.306962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 175, publications: 2, subscribers: 0 2025-04-10T10:58:15.306990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 1], 103 2025-04-10T10:58:15.307015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 26], 18446744073709551615 2025-04-10T10:58:15.308321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T10:58:15.308416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T10:58:15.308451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 175 2025-04-10T10:58:15.308496Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-04-10T10:58:15.308527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:15.309724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T10:58:15.309813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T10:58:15.309840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 175 2025-04-10T10:58:15.309895Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-04-10T10:58:15.309927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-04-10T10:58:15.310005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 175, subscribers: 0 2025-04-10T10:58:15.310691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:58:15.310733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-10T10:58:15.310815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-04-10T10:58:15.311182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T10:58:15.311220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-10T10:58:15.311289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:58:15.313592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-10T10:58:15.315380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-10T10:58:15.315496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T10:58:15.315577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-04-10T10:58:15.316855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-04-10T10:58:15.316890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-04-10T10:58:15.318413Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-04-10T10:58:15.318570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-04-10T10:58:15.318604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2470:4461] TestWaitNotification: OK eventTxId 175 >> TestYmqHttpProxy::TestDeleteQueue [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::ReadMirrored [GOOD] Test command err: 2025-04-10T10:57:17.665400Z :PropagateSessionClosed INFO: Random seed for debugging is 1744282637665354 2025-04-10T10:57:18.183755Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636889037592190:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:18.183810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:57:18.290498Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636887164133549:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:18.290552Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014c4/r3tmp/tmpQ1ZX33/pdisk_1.dat 2025-04-10T10:57:18.571458Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:57:18.590690Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:57:19.002534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:19.002698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:19.004380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:19.004477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:19.012923Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:57:19.013167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:19.016343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:19.026368Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5874, node 1 2025-04-10T10:57:19.173518Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:57:19.173569Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:57:19.238617Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/0014c4/r3tmp/yandexwBIeSE.tmp 2025-04-10T10:57:19.238649Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/0014c4/r3tmp/yandexwBIeSE.tmp 2025-04-10T10:57:19.238837Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/0014c4/r3tmp/yandexwBIeSE.tmp 2025-04-10T10:57:19.238980Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:57:19.312411Z INFO: TTestServer started on Port 9544 GrpcPort 5874 TClient is connected to server localhost:9544 PQClient connected to localhost:5874 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:19.786213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T10:57:23.157647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636910512429653:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.157812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.161131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636910512429680:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.165136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:57:23.172810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636910512429714:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.172878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.184962Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636889037592190:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:23.185023Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:23.235946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636910512429682:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:57:23.293229Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636887164133549:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:23.293304Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:23.482371Z node 1 :TX_PROXY ERROR: Actor# [1:7491636910512429766:2691] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:23.522026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:23.524229Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491636908638970356:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:23.524428Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636910512429779:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:23.526047Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDc5ZDliNWMtMzQ0ZTQxYjItY2I4NmUxYjEtNGI3YTNlYzE=, ActorId: [1:7491636910512429650:2337], ActorState: ExecuteState, TraceId: 01jrfnhtpfew4x3v6b6wr3nr5z, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:23.528320Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:23.529273Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Q1ZTY5MmEtYWM3ODU1MmMtNTliMDNiNzYtNzIzN2Y=, ActorId: [2:7491636908638970315:2308], ActorState: ExecuteState, TraceId: 01jrfnhtsebts6s64wjm6qm9dr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:23.529720Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:23.755999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:23.995819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:5874", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-10T10:57:24.528778Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfnhvsp0yq7bgftgtjgf0jp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWVjYjM5Yy02M2U1ZjcyNC0xNTNmN2Q2Ny1lODhjNThkNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491636914807397570:3054] === CheckClustersList. Ok 2025-04-10T10:57:30.747369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo un ... = 1568 to budget 2025-04-10T10:58:13.364028Z :DEBUG: [/Root] [/Root] [9475865c-11340703-f8cdde42-36c43bdb] [] In ContinueReadingDataImpl, ReadSizeBudget = 1568, ReadSizeServerDelta = 8387040 2025-04-10T10:58:13.364322Z :DEBUG: [/Root] [/Root] [9475865c-11340703-f8cdde42-36c43bdb] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 8388608 2025-04-10T10:58:13.364399Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (1-1) 2025-04-10T10:58:13.364449Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (2-2) 2025-04-10T10:58:13.364489Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 1} (3-3) 2025-04-10T10:58:13.364518Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 2} (4-4) >>> event from dataHandler: DataReceived { Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 Message { Data: ..130 bytes.. Information: { Offset: 1 ProducerId: "src_id" SeqNo: 2 CreateTime: 2025-04-10T10:58:13.279000Z WriteTime: 2025-04-10T10:58:13.305000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "_ip": "ipv6:[::1]:58352", "server": "ipv6:[::1]:58352", "logtype": "unknown" } MessageMeta: { } } Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..240 bytes.. Information: { Offset: 2 ProducerId: "src_id" SeqNo: 3 CreateTime: 2025-04-10T10:58:13.279000Z WriteTime: 2025-04-10T10:58:13.312000Z MessageGroupId: "src_id" Meta: { "logtype": "unknown", "server": "ipv6:[::1]:58352", "_ip": "ipv6:[::1]:58352", "ident": "unknown" } MessageMeta: { } } Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..350 bytes.. Information: { Offset: 3 ProducerId: "src_id" SeqNo: 4 CreateTime: 2025-04-10T10:58:13.279000Z WriteTime: 2025-04-10T10:58:13.312000Z MessageGroupId: "src_id" Meta: { "logtype": "unknown", "server": "ipv6:[::1]:58352", "_ip": "ipv6:[::1]:58352", "ident": "unknown" } MessageMeta: { } } Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..460 bytes.. Information: { Offset: 4 ProducerId: "src_id" SeqNo: 5 CreateTime: 2025-04-10T10:58:13.279000Z WriteTime: 2025-04-10T10:58:13.312000Z MessageGroupId: "src_id" Meta: { "logtype": "unknown", "server": "ipv6:[::1]:58352", "_ip": "ipv6:[::1]:58352", "ident": "unknown" } MessageMeta: { } } Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } } >>> get 4 messages in this event 2025-04-10T10:58:13.366586Z :DEBUG: [/Root] [/Root] [9475865c-11340703-f8cdde42-36c43bdb] [] The application data is transferred to the client. Number of messages 4, size 1180 bytes 2025-04-10T10:58:13.366316Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_15256370234535955024_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1568 } } 2025-04-10T10:58:13.366502Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_15256370234535955024_v1 got read request: guid# 53c32c53-412c831c-3b26d99c-a3074911 2025-04-10T10:58:13.366640Z :DEBUG: [/Root] [/Root] [9475865c-11340703-f8cdde42-36c43bdb] [] Returning serverBytesSize = 0 to budget 2025-04-10T10:58:13.381289Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|38051cb5-2c80c698-c42a04dc-72c23552_0] Write session will now close 2025-04-10T10:58:13.381385Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|38051cb5-2c80c698-c42a04dc-72c23552_0] Write session: aborting 2025-04-10T10:58:13.382012Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|38051cb5-2c80c698-c42a04dc-72c23552_0] Write session: gracefully shut down, all writes complete >>> Writes to test-topic-mirrored-from-dc3 successful 2025-04-10T10:58:13.382083Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|38051cb5-2c80c698-c42a04dc-72c23552_0] Write session: destroy 2025-04-10T10:58:13.382284Z :INFO: [/Root] [/Root] [9475865c-11340703-f8cdde42-36c43bdb] Closing read session. Close timeout: 18446744073709.551615s 2025-04-10T10:58:13.382408Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc3:0:3:4:0 -:test-topic:0:2:4:0 -:test-topic-mirrored-from-dc2:0:1:4:0 2025-04-10T10:58:13.382462Z :INFO: [/Root] [/Root] [9475865c-11340703-f8cdde42-36c43bdb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 737 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:58:13.383069Z :INFO: [/Root] [/Root] [9475865c-11340703-f8cdde42-36c43bdb] Closing read session. Close timeout: 0.000000s 2025-04-10T10:58:13.383140Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc3:0:3:4:0 -:test-topic:0:2:4:0 -:test-topic-mirrored-from-dc2:0:1:4:0 2025-04-10T10:58:13.383187Z :INFO: [/Root] [/Root] [9475865c-11340703-f8cdde42-36c43bdb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 738 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:58:13.383237Z :INFO: [/Root] [/Root] [9475865c-11340703-f8cdde42-36c43bdb] Closing read session. Close timeout: 0.000000s 2025-04-10T10:58:13.383284Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc3:0:3:4:0 -:test-topic:0:2:4:0 -:test-topic-mirrored-from-dc2:0:1:4:0 2025-04-10T10:58:13.383329Z :INFO: [/Root] [/Root] [9475865c-11340703-f8cdde42-36c43bdb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 738 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:58:13.383430Z :NOTICE: [/Root] [/Root] [9475865c-11340703-f8cdde42-36c43bdb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:58:13.391830Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|38051cb5-2c80c698-c42a04dc-72c23552_0 grpc read done: success: 0 data: 2025-04-10T10:58:13.391865Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|38051cb5-2c80c698-c42a04dc-72c23552_0 grpc read failed 2025-04-10T10:58:13.391904Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|38051cb5-2c80c698-c42a04dc-72c23552_0 grpc closed 2025-04-10T10:58:13.391927Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|38051cb5-2c80c698-c42a04dc-72c23552_0 is DEAD 2025-04-10T10:58:13.392834Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:58:13.392997Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_15256370234535955024_v1 grpc read done: success# 0, data# { } 2025-04-10T10:58:13.393010Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_15256370234535955024_v1 grpc read failed 2025-04-10T10:58:13.393035Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_15256370234535955024_v1 grpc closed 2025-04-10T10:58:13.393088Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_15256370234535955024_v1 is DEAD 2025-04-10T10:58:13.396691Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7491637118800692086:2614] disconnected; active server actors: 1 2025-04-10T10:58:13.396731Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7491637118800692086:2614] client user disconnected session shared/user_3_1_15256370234535955024_v1 2025-04-10T10:58:13.396839Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637118800692087:2614] disconnected; active server actors: 1 2025-04-10T10:58:13.396855Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637118800692087:2614] client user disconnected session shared/user_3_1_15256370234535955024_v1 2025-04-10T10:58:13.396900Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7491637118800692088:2614] disconnected; active server actors: 1 2025-04-10T10:58:13.396917Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7491637118800692088:2614] client user disconnected session shared/user_3_1_15256370234535955024_v1 2025-04-10T10:58:13.397852Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7491637123095659554:2633] destroyed 2025-04-10T10:58:13.397890Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session shared/user_3_1_15256370234535955024_v1 2025-04-10T10:58:13.397912Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7491637118800692097:2617] destroyed 2025-04-10T10:58:13.397933Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session shared/user_3_1_15256370234535955024_v1 2025-04-10T10:58:13.397950Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [3:7491637118800692095:2619] destroyed 2025-04-10T10:58:13.397969Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_15256370234535955024_v1 2025-04-10T10:58:13.397986Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7491637118800692098:2618] destroyed 2025-04-10T10:58:13.398032Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T10:58:13.398123Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_15256370234535955024_v1 2025-04-10T10:58:13.398140Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_15256370234535955024_v1 2025-04-10T10:58:13.398180Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_15256370234535955024_v1 2025-04-10T10:58:14.113375Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491637127390626898:2638] TxId: 281474976720706. Ctx: { TraceId: 01jrfnkbxkbdwdwtcwq1p3yzb5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NjVmMmFkYTItODdhMGZhNWUtMWQ4YTYwNTMtMTQ2MzM4NmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-04-10T10:58:14.114259Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491637127390626908:2647], TxId: 281474976720706, task: 4. Ctx: { SessionId : ydb://session/3?node_id=3&id=NjVmMmFkYTItODdhMGZhNWUtMWQ4YTYwNTMtMTQ2MzM4NmU=. TraceId : 01jrfnkbxkbdwdwtcwq1p3yzb5. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7491637127390626898:2638], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-10T10:58:14.116301Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491637127390626907:2646], TxId: 281474976720706, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=NjVmMmFkYTItODdhMGZhNWUtMWQ4YTYwNTMtMTQ2MzM4NmU=. TraceId : 01jrfnkbxkbdwdwtcwq1p3yzb5. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7491637127390626898:2638], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> KqpAnalyze::AnalyzeTable-ColumnStore [GOOD] >> KqpExplain::AggGroupLimit >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreViewDependentOnAnotherView >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink >> TestKinesisHttpProxy::ListShardsTimestamp >> DataStreams::TestNonChargeableUser [GOOD] >> DataStreams::TestPutEmptyMessage >> TestYmqHttpProxy::TestListDeadLetterSourceQueues >> RemoteTopicReader::ReadTopic [GOOD] >> DataStreams::TestDeleteStream [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag >> DataStreams::TestUpdateStorage [GOOD] >> DataStreams::TestStreamTimeRetention >> TestKinesisHttpProxy::TestCounters >> TestKinesisHttpProxy::TestWrongStream2 [GOOD] >> DataStreams::TestGetRecordsStreamWithSingleShard [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 21639, MsgBus: 16393 2025-04-10T10:57:42.666953Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636990783547778:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:42.667409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001850/r3tmp/tmpGDppLS/pdisk_1.dat 2025-04-10T10:57:43.383281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:43.383380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:43.390334Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:43.392385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21639, node 1 2025-04-10T10:57:43.690830Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:43.690849Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:43.690856Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:43.690963Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16393 TClient is connected to server localhost:16393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:45.111281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:45.152983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:45.385377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:45.734747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T10:57:45.897953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:47.664063Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636990783547778:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:47.687347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:48.295543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637016553353184:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:48.296149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:49.269981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:49.353538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:49.455090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:49.530044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:49.633583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:49.758243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:49.934090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637020848321019:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:49.934206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:49.934597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637020848321024:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:49.940729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:49.962225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637020848321026:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:50.066345Z node 1 :TX_PROXY ERROR: Actor# [1:7491637025143288377:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:52.365883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 864000000000 2025-04-10T10:57:53.261315Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282673273, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 19869, MsgBus: 28078 2025-04-10T10:57:55.190007Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637047842765071:2216];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001850/r3tmp/tmpJgAcXE/pdisk_1.dat 2025-04-10T10:57:55.310547Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:57:55.472422Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:55.481396Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:55.481501Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:55.491491Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19869, node 2 2025-04-10T10:57:55.670868Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:55.670914Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:55.670923Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:55.671085Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28078 TClient is connected to server localhost:28078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:56.808132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:56.837264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:57.025660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:57.268472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... ORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:00.808792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:00.905928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:58:01.056851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:58:01.170704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:58:01.253932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:58:01.342295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:58:01.465210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637073612570988:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:01.465282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:01.465672Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637073612570993:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:01.475039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:58:01.501916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637073612570995:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:58:01.557757Z node 2 :TX_PROXY ERROR: Actor# [2:7491637073612571050:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:03.830139Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282683864, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 5682, MsgBus: 2715 2025-04-10T10:58:05.158401Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637083019188584:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001850/r3tmp/tmpskjxUD/pdisk_1.dat 2025-04-10T10:58:05.246757Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:58:05.487988Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:05.517663Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:05.517752Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:05.526094Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5682, node 3 2025-04-10T10:58:05.674509Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:05.674536Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:05.674545Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:05.674688Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2715 TClient is connected to server localhost:2715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:06.600862Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:06.611186Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:58:06.629537Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:06.845682Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:07.200920Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:07.384050Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:10.014409Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637083019188584:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:10.014574Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:58:12.242761Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637117378928587:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:12.242928Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:12.324347Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:12.446056Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:58:12.585406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:58:12.724995Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:58:12.842450Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:58:12.940195Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:58:13.120163Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637121673896426:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:13.120261Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:13.120687Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637121673896431:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:13.125842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:58:13.176615Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637121673896433:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:58:13.261453Z node 3 :TX_PROXY ERROR: Actor# [3:7491637121673896492:3468] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:15.589703Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282695617, txId: 281474976710671] shutting down 2025-04-10T10:58:15.987028Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282696009, txId: 281474976710673] shutting down >> TestYmqHttpProxy::TestTagQueue [GOOD] >> KqpQueryService::CloseConnection [GOOD] >> KqpErrors::ProposeResultLost_RwTx+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::ReadTopic [GOOD] Test command err: 2025-04-10T10:58:05.032172Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637084766505863:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:05.032215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010be/r3tmp/tmpg3eqZf/pdisk_1.dat 2025-04-10T10:58:05.611100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:05.626233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:05.666715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:05.674499Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:30741 TServer::EnableGrpc on GrpcPort 24355, node 1 2025-04-10T10:58:06.230593Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:06.230627Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:06.230635Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:06.230782Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30741 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:07.450300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:08.185532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-10T10:58:10.033307Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637084766505863:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:10.033403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:58:10.643398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637110536310565:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:10.643542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:10.645084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637110536310576:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:10.645190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:10.645505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637110536310584:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:10.645536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637110536310585:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:10.650747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-10T10:58:10.656535Z node 1 :TX_PROXY ERROR: Actor# [1:7491637110536310591:2456] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T10:58:10.669449Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:58:10.669760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637110536310588:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T10:58:10.670782Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:58:10.671032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637110536310589:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T10:58:10.742010Z node 1 :TX_PROXY ERROR: Actor# [1:7491637110536310638:2487] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:10.764405Z node 1 :TX_PROXY ERROR: Actor# [1:7491637110536310656:2495] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:12.893985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T10:58:13.739166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T10:58:14.556640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T10:58:15.415975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T10:58:15.949150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T10:58:17.111141Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491637140601082523:2859] Handshake: worker# [1:7491637097651408372:2297] 2025-04-10T10:58:17.122822Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491637140601082523:2859] Create read session: session# [1:7491637140601082524:2296] 2025-04-10T10:58:17.146355Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491637140601082523:2859] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-10T10:58:17.190628Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491637140601082523:2859] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 0 SeqNo: 1 CreateTime: 2025-04-10T10:58:17.003000Z MessageGroupId: producer ProducerId: producer }] } } 2025-04-10T10:58:17.193404Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491637140601082523:2859] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-10T10:58:17.462507Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491637140601082523:2859] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2025-04-10T10:58:17.350000Z MessageGroupId: producer ProducerId: producer }] } } 2025-04-10T10:58:17.558740Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491637140601082624:2897] Handshake: worker# [1:7491637097651408372:2297] 2025-04-10T10:58:17.560329Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491637140601082624:2897] Create read session: session# [1:7491637140601082625:2296] 2025-04-10T10:58:17.560773Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491637140601082624:2897] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-04-10T10:58:17.605152Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7491637140601082624:2897] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2025-04-10T10:58:17.350000Z MessageGroupId: producer ProducerId: producer }] } } >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount >> KqpErrors::ResolveTableError >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction >> TestKinesisHttpProxy::TestWrongRequest >> TestYmqHttpProxy::TestUntagQueue >> KqpErrors::ProposeError >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] >> TSyncBrokerTests::ShouldEnqueue >> TestKinesisHttpProxy::ErroneousRequestGetRecords [GOOD] >> TSyncBrokerTests::ShouldEnqueue [GOOD] >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod [GOOD] >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] Test command err: 2025-04-10T10:58:21.796904Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-04-10T10:58:21.797067Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [1:6:2053], enqueued, active: 1, waiting: 1 2025-04-10T10:58:21.951329Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-04-10T10:58:21.951504Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], enqueued, active: 1, waiting: 1 2025-04-10T10:58:21.951589Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:7:2054], enqueued, active: 1, waiting: 1 >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CloseConnection [GOOD] Test command err: Trying to start YDB, gRPC: 6329, MsgBus: 9360 2025-04-10T10:56:35.243542Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636701088185112:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:35.243584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b1/r3tmp/tmppC6OwR/pdisk_1.dat 2025-04-10T10:56:36.219844Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:36.231908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:36.232010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:36.271257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:36.302588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 6329, node 1 2025-04-10T10:56:36.437978Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:36.438000Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:36.438008Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:36.438141Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9360 TClient is connected to server localhost:9360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:37.438353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:37.485450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:37.683906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:37.954343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:38.140758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:40.250347Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636701088185112:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:40.250418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:40.485179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636722563023380:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:40.485298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:40.989140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.052469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.126743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.176866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.249246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.285279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:41.408430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636726857991204:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.408545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.409087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636726857991209:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:41.414105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:41.432184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636726857991211:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:41.531421Z node 1 :TX_PROXY ERROR: Actor# [1:7491636726857991267:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 28055, MsgBus: 15729 2025-04-10T10:56:45.050635Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636745517579467:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:45.051414Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b1/r3tmp/tmpgkCBQv/pdisk_1.dat 2025-04-10T10:56:45.356671Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:45.357391Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:45.357479Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:45.365289Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28055, node 2 2025-04-10T10:56:45.564263Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:45.564290Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:45.564298Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:45.564413Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15729 TClient is connected to server localhost:15729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T10:56:46.259654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:46.271800Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:46.284826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:46.381114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:46.665678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281 ... on/3?node_id=4&id=M2M1ZGM5MDctNjFmMmUxNi0yYWQ3YTViYS03MDdkMTk2Mg==. CustomerSuppliedId : . TraceId : 01jrfnkccn3rc9ncgakxykaf8b. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7491637129953331146:2579], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T10:58:14.138468Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491637129953331153:2582], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jrfnkccn3rc9ncgakxykaf8b. SessionId : ydb://session/3?node_id=4&id=M2M1ZGM5MDctNjFmMmUxNi0yYWQ3YTViYS03MDdkMTk2Mg==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7491637129953331146:2579], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T10:58:14.147162Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-10T10:58:14.148168Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491637129953331156:2585], TxId: 281474976710672, task: 5. Ctx: { SessionId : ydb://session/3?node_id=4&id=M2M1ZGM5MDctNjFmMmUxNi0yYWQ3YTViYS03MDdkMTk2Mg==. TraceId : 01jrfnkccn3rc9ncgakxykaf8b. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7491637129953331146:2579], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T10:58:14.148876Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=M2M1ZGM5MDctNjFmMmUxNi0yYWQ3YTViYS03MDdkMTk2Mg==, ActorId: [4:7491637129953331143:2579], ActorState: ExecuteState, TraceId: 01jrfnkccn3rc9ncgakxykaf8b, Create QueryResponse for error on request, msg: 2025-04-10T10:58:14.173652Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-10T10:58:14.218340Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-10T10:58:14.218965Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491637129953331202:2594] TxId: 281474976710674. Ctx: { TraceId: 01jrfnkchd6e2h5ytsbacmkrgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NjI5ZWVkMTgtZjRmMDFjYmMtZWE3Y2RjZTgtZTZjOTUyZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-10T10:58:14.219126Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491637129953331210:2597], TxId: 281474976710674, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjI5ZWVkMTgtZjRmMDFjYmMtZWE3Y2RjZTgtZTZjOTUyZTY=. TraceId : 01jrfnkchd6e2h5ytsbacmkrgf. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7491637129953331202:2594], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T10:58:14.220076Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491637129953331209:2596], TxId: 281474976710674, task: 1. Ctx: { TraceId : 01jrfnkchd6e2h5ytsbacmkrgf. SessionId : ydb://session/3?node_id=4&id=NjI5ZWVkMTgtZjRmMDFjYmMtZWE3Y2RjZTgtZTZjOTUyZTY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7491637129953331202:2594], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T10:58:14.220329Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491637129953331211:2598], TxId: 281474976710674, task: 3. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjI5ZWVkMTgtZjRmMDFjYmMtZWE3Y2RjZTgtZTZjOTUyZTY=. TraceId : 01jrfnkchd6e2h5ytsbacmkrgf. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7491637129953331202:2594], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T10:58:14.220583Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491637129953331212:2599], TxId: 281474976710674, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjI5ZWVkMTgtZjRmMDFjYmMtZWE3Y2RjZTgtZTZjOTUyZTY=. TraceId : 01jrfnkchd6e2h5ytsbacmkrgf. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7491637129953331202:2594], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T10:58:14.220809Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491637129953331213:2600], TxId: 281474976710674, task: 5. Ctx: { TraceId : 01jrfnkchd6e2h5ytsbacmkrgf. SessionId : ydb://session/3?node_id=4&id=NjI5ZWVkMTgtZjRmMDFjYmMtZWE3Y2RjZTgtZTZjOTUyZTY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7491637129953331202:2594], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T10:58:14.221343Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjI5ZWVkMTgtZjRmMDFjYmMtZWE3Y2RjZTgtZTZjOTUyZTY=, ActorId: [4:7491637129953331199:2594], ActorState: ExecuteState, TraceId: 01jrfnkchd6e2h5ytsbacmkrgf, Create QueryResponse for error on request, msg: 2025-04-10T10:58:14.222592Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491637125658363828:2574], TxId: 281474976710671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTk0ZjU2YTAtOWZiMjk3MmMtMzY5YzdlYjMtZWE5NTBiMzM=. TraceId : 01jrfnkcb7fdyb48xyymp7pmg3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7491637125658363819:2571], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T10:58:14.223024Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YTk0ZjU2YTAtOWZiMjk3MmMtMzY5YzdlYjMtZWE5NTBiMzM=, ActorId: [4:7491637125658363816:2571], ActorState: ExecuteState, TraceId: 01jrfnkcb7fdyb48xyymp7pmg3, Create QueryResponse for error on request, msg: 2025-04-10T10:58:14.267644Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-10T10:58:14.355849Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-10T10:58:14.356319Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491637129953331304:2618] TxId: 281474976710679. Ctx: { TraceId: 01jrfnkcne0x92magp0rtfv4h6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Mzk1ODQyNzgtZjdmZDRkZDItOTI3NGRlMGQtODQ4YTU1YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-10T10:58:14.356568Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mzk1ODQyNzgtZjdmZDRkZDItOTI3NGRlMGQtODQ4YTU1YzE=, ActorId: [4:7491637129953331301:2618], ActorState: ExecuteState, TraceId: 01jrfnkcne0x92magp0rtfv4h6, Create QueryResponse for error on request, msg: 2025-04-10T10:58:14.450365Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-10T10:58:14.451076Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491637129953331371:2636] TxId: 281474976710682. Ctx: { TraceId: 01jrfnkcr3fdveyt2xxepm93f4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MWRjOWEzZTYtNmQ3ZjdiZTUtZWVjMTY1ZDktOWUzNWZhNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-10T10:58:14.487189Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491637129953331379:2640], TxId: 281474976710682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=4&id=MWRjOWEzZTYtNmQ3ZjdiZTUtZWVjMTY1ZDktOWUzNWZhNWY=. CustomerSuppliedId : . TraceId : 01jrfnkcr3fdveyt2xxepm93f4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7491637129953331371:2636], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T10:58:14.488475Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWRjOWEzZTYtNmQ3ZjdiZTUtZWVjMTY1ZDktOWUzNWZhNWY=, ActorId: [4:7491637129953331368:2636], ActorState: ExecuteState, TraceId: 01jrfnkcr3fdveyt2xxepm93f4, Create QueryResponse for error on request, msg: 2025-04-10T10:58:15.596011Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-10T10:58:15.596280Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491637134248299482:2852] TxId: 281474976710707. Ctx: { TraceId: 01jrfnkdv7a2wnd4f3983a265p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Y2IxYzg1YWMtOGQxMjkyNzktY2Y4YjkzZDEtZmY4MTliZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-10T10:58:15.596425Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491637134248299494:2858], TxId: 281474976710707, task: 5. Ctx: { CustomerSuppliedId : . TraceId : 01jrfnkdv7a2wnd4f3983a265p. SessionId : ydb://session/3?node_id=4&id=Y2IxYzg1YWMtOGQxMjkyNzktY2Y4YjkzZDEtZmY4MTliZWE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7491637134248299482:2852], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T10:58:15.596735Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491637134248299493:2857], TxId: 281474976710707, task: 4. Ctx: { SessionId : ydb://session/3?node_id=4&id=Y2IxYzg1YWMtOGQxMjkyNzktY2Y4YjkzZDEtZmY4MTliZWE=. CustomerSuppliedId : . TraceId : 01jrfnkdv7a2wnd4f3983a265p. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7491637134248299482:2852], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T10:58:15.597085Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Y2IxYzg1YWMtOGQxMjkyNzktY2Y4YjkzZDEtZmY4MTliZWE=, ActorId: [4:7491637134248299479:2852], ActorState: ExecuteState, TraceId: 01jrfnkdv7a2wnd4f3983a265p, Create QueryResponse for error on request, msg: 2025-04-10T10:58:16.074394Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-10T10:58:16.075725Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491637138543267046:2919] TxId: 281474976710716. Ctx: { TraceId: 01jrfnke9v3nm93x680t1rhagj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YjBmMDdiOGEtZWEzZDg2ZDktYWVjNDQ1NWEtNTc0ZjhjODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-10T10:58:16.075862Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491637138543267058:2924], TxId: 281474976710716, task: 4. Ctx: { CustomerSuppliedId : . TraceId : 01jrfnke9v3nm93x680t1rhagj. SessionId : ydb://session/3?node_id=4&id=YjBmMDdiOGEtZWEzZDg2ZDktYWVjNDQ1NWEtNTc0ZjhjODQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7491637138543267046:2919], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T10:58:16.151093Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491637138543267059:2925], TxId: 281474976710716, task: 5. Ctx: { SessionId : ydb://session/3?node_id=4&id=YjBmMDdiOGEtZWEzZDg2ZDktYWVjNDQ1NWEtNTc0ZjhjODQ=. CustomerSuppliedId : . TraceId : 01jrfnke9v3nm93x680t1rhagj. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7491637138543267046:2919], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T10:58:16.151474Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491637138543267046:2919] TxId: 281474976710716. Ctx: { TraceId: 01jrfnke9v3nm93x680t1rhagj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YjBmMDdiOGEtZWEzZDg2ZDktYWVjNDQ1NWEtNTc0ZjhjODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event while waiting for shutdown: NYql::NDq::TEvDqCompute::TEvChannelData 2025-04-10T10:58:16.151736Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjBmMDdiOGEtZWEzZDg2ZDktYWVjNDQ1NWEtNTc0ZjhjODQ=, ActorId: [4:7491637134248299745:2919], ActorState: ExecuteState, TraceId: 01jrfnke9v3nm93x680t1rhagj, Create QueryResponse for error on request, msg: 2025-04-10T10:58:17.949027Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:58:17.949066Z node 4 :IMPORT WARN: Table profiles were not loaded >> TestYmqHttpProxy::TestListQueues [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute [GOOD] >> DataShardVolatile::DistributedWriteThenDropTable >> TestKinesisHttpProxy::GoodRequestCreateStream >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] |69.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] |69.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |69.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |69.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector >> DataStreams::TestPutRecordsWithRead [GOOD] >> DataStreams::TestPutRecordsCornerCases >> TestYmqHttpProxy::TestPurgeQueue >> Viewer::JsonStorageListingV2GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV2NodeIdFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] Test command err: 2025-04-10T10:57:04.344535Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636826265792259:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:04.344606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00112a/r3tmp/tmpQZoQmz/pdisk_1.dat 2025-04-10T10:57:04.911320Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:04.951166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:04.951312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:04.954466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4643, node 1 2025-04-10T10:57:05.067012Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:05.067041Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:05.067075Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:05.067243Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:05.518565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:05.547788Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:32184 2025-04-10T10:57:05.913592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:05.922848Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:57:05.925417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:05.953477Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:57:05.964469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:06.165184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:06.220546Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-10T10:57:06.226712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:06.282012Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-04-10T10:57:06.287971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:06.342383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:06.395679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:06.442974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:06.492753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:06.539664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:06.575723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:08.349913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636843445662886:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:08.350033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:08.350300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636843445662898:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:08.355258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-10T10:57:08.378422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636843445662900:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-10T10:57:08.478327Z node 1 :TX_PROXY ERROR: Actor# [1:7491636843445662953:2918] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:08.976112Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jrfnhc7tc3d63fq156a47mvm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNkMzc5ZGEtYWVhZjhlMGUtNjg2NDM0ODItM2YwYjI3N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:09.030334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.091129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.148982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.220174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.325334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.344994Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636826265792259:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:09.345083Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:09.420510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.499765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:09.552792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T10:57:09.616566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.692269Z node 1 :HTTP INFO: Listening on http://127.0.0.1:15853 2025-04-10T10:57:10.716869Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-10T10:57:10.717324Z node 1 :HTTP INFO: Listening on http://[::]:63680 2025-04-10T10:57:10.721935Z node 1 :SQS INFO: Start SQS service actor 2025-04-10T10:57:10.722087Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-10T10:57:10.723488Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-10T10:57:10.744784Z node 1 :SQS INFO: Request SQS users list 2025-04-10T10:57:10.744830Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-10T10:57:10.744883Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-10T10:5 ... ACE: HandleSqsResponse DeleteMessageBatch { RequestId: "281cf39c-161e6d21-cb4e6da7-30d9151" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "281cf39c-161e6d21-cb4e6da7-30d9151" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-04-10T10:58:20.903916Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7491637155077535174:2551]: DeleteMessageBatch { RequestId: "281cf39c-161e6d21-cb4e6da7-30d9151" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "281cf39c-161e6d21-cb4e6da7-30d9151" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-04-10T10:58:20.903988Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7491637155077535175:3821]. Found: 1 2025-04-10T10:58:20.906644Z node 7 :SQS TRACE: Request [281cf39c-161e6d21-cb4e6da7-30d9151] HandleResponse: { DeleteMessageBatch { RequestId: "281cf39c-161e6d21-cb4e6da7-30d9151" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "281cf39c-161e6d21-cb4e6da7-30d9151" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2025-04-10T10:58:20.906725Z node 7 :SQS DEBUG: Request [281cf39c-161e6d21-cb4e6da7-30d9151] Sending reply from proxy actor: { DeleteMessageBatch { RequestId: "281cf39c-161e6d21-cb4e6da7-30d9151" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "281cf39c-161e6d21-cb4e6da7-30d9151" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-04-10T10:58:20.909711Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [281cf39c-161e6d21-cb4e6da7-30d9151] Got succesfult GRPC response. Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} 2025-04-10T10:58:20.909941Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [281cf39c-161e6d21-cb4e6da7-30d9151] reply ok 2025-04-10T10:58:20.910116Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [281cf39c-161e6d21-cb4e6da7-30d9151] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 716 ResponseSizeInBytes: 221 SourceAddress: 782b:9700:6050:0:602b:9700:6050:0 ResourceId: 000000000000000101v0 Action: DeleteMessageBatch 2025-04-10T10:58:20.910967Z node 7 :HTTP DEBUG: (#38,[::1]:46762) <- (200 ) 2025-04-10T10:58:20.911052Z node 7 :HTTP DEBUG: (#38,[::1]:46762) connection closed 2025-04-10T10:58:20.912817Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] HandleResponse { Status: 48 TxId: 281474976710712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2025-04-10T10:58:20.912844Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Attempt 1 execution duration: 11ms 2025-04-10T10:58:20.913021Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Sending mkql execution result: { Status: 48 TxId: 281474976710712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2025-04-10T10:58:20.913051Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Minikql data response: {"messages": []} 2025-04-10T10:58:20.913149Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] execution duration: 11ms 2025-04-10T10:58:20.913407Z node 7 :HTTP DEBUG: (#38,[::1]:46774) incoming connection opened 2025-04-10T10:58:20.913483Z node 7 :HTTP DEBUG: (#38,[::1]:46774) -> (POST /Root) 2025-04-10T10:58:20.913523Z node 7 :SQS DEBUG: Request [] Sending executed reply 2025-04-10T10:58:20.913640Z node 7 :SQS DEBUG: Handle oldest timestamp metrics for [cloud4/000000000000000101v0/0] 2025-04-10T10:58:20.913943Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [78c0:fe00:6050:0:60c0:fe00:6050:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 198ae44e-25509044-a4dfe142-b74b09df 2025-04-10T10:58:20.914403Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [198ae44e-25509044-a4dfe142-b74b09df] got new request from [78c0:fe00:6050:0:60c0:fe00:6050:0] 2025-04-10T10:58:20.914795Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [198ae44e-25509044-a4dfe142-b74b09df] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-10T10:58:20.914811Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [198ae44e-25509044-a4dfe142-b74b09df] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-10T10:58:20.915345Z node 7 :SQS DEBUG: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: 198ae44e-25509044-a4dfe142-b74b09df 2025-04-10T10:58:20.915454Z node 7 :SQS DEBUG: Request [198ae44e-25509044-a4dfe142-b74b09df] Proxy actor: used user_name='cloud4', queue_name='000000000000000101v0', folder_id='folder4' 2025-04-10T10:58:20.915464Z node 7 :SQS DEBUG: Request [198ae44e-25509044-a4dfe142-b74b09df] Request proxy started 2025-04-10T10:58:20.915616Z node 7 :SQS DEBUG: Request [198ae44e-25509044-a4dfe142-b74b09df] Answer configuration for queue [cloud4/000000000000000101v0] without leader 2025-04-10T10:58:20.915698Z node 7 :SQS DEBUG: Request [198ae44e-25509044-a4dfe142-b74b09df] Get configuration duration: 0ms 2025-04-10T10:58:20.915845Z node 7 :SQS DEBUG: Request [198ae44e-25509044-a4dfe142-b74b09df] Send get leader node request to sqs service for cloud4/000000000000000101v0 2025-04-10T10:58:20.915867Z node 7 :SQS DEBUG: Request [198ae44e-25509044-a4dfe142-b74b09df] Leader node for queue [cloud4/000000000000000101v0] is 7 2025-04-10T10:58:20.915893Z node 7 :SQS DEBUG: Request [198ae44e-25509044-a4dfe142-b74b09df] Got leader node for queue response. Node id: 7. Status: 0 2025-04-10T10:58:20.915986Z node 7 :SQS TRACE: Request [198ae44e-25509044-a4dfe142-b74b09df] Sending request from proxy to leader node 7: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "198ae44e-25509044-a4dfe142-b74b09df" 2025-04-10T10:58:20.916056Z node 7 :SQS DEBUG: Request [198ae44e-25509044-a4dfe142-b74b09df] Received Sqs Request: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "198ae44e-25509044-a4dfe142-b74b09df" 2025-04-10T10:58:20.916099Z node 7 :SQS DEBUG: Request [198ae44e-25509044-a4dfe142-b74b09df] Request started. Actor: [7:7491637155077535209:3849] 2025-04-10T10:58:20.916131Z node 7 :SQS TRACE: Inc local leader ref for actor [7:7491637155077535209:3849] 2025-04-10T10:58:20.916149Z node 7 :SQS DEBUG: Request [198ae44e-25509044-a4dfe142-b74b09df] Forward configuration request to queue [cloud4/000000000000000101v0] leader 2025-04-10T10:58:20.916179Z node 7 :SQS DEBUG: Request [198ae44e-25509044-a4dfe142-b74b09df] Get configuration duration: 0ms 2025-04-10T10:58:20.916195Z node 7 :SQS TRACE: Request [198ae44e-25509044-a4dfe142-b74b09df] Got configuration. Root url: http://ghrun-x5vqckr7qy.auto.internal:8771, Shards: 4, Fail: 0 2025-04-10T10:58:20.916219Z node 7 :SQS TRACE: Request [198ae44e-25509044-a4dfe142-b74b09df] Got configuration. Attributes: { ContentBasedDeduplication: 0 DelaySeconds: 0.000000s FifoQueue: 0 MaximumMessageSize: 262144 MessageRetentionPeriod: 345600.000000s ReceiveMessageWaitTime: 0.000000s VisibilityTimeout: 30.000000s } 2025-04-10T10:58:20.916232Z node 7 :SQS TRACE: Request [198ae44e-25509044-a4dfe142-b74b09df] DoRoutine 2025-04-10T10:58:20.916287Z node 7 :SQS TRACE: Increment active message requests for [cloud4/000000000000000101v0/3]. ActiveMessageRequests: 1 2025-04-10T10:58:20.916304Z node 7 :SQS DEBUG: Request [198ae44e-25509044-a4dfe142-b74b09df] Received empty result from shard 3 infly. Infly capacity: 0. Messages count: 0 2025-04-10T10:58:20.916314Z node 7 :SQS DEBUG: Request [198ae44e-25509044-a4dfe142-b74b09df] No known messages in this shard. Skip attempt to add messages to infly 2025-04-10T10:58:20.916323Z node 7 :SQS DEBUG: Request [198ae44e-25509044-a4dfe142-b74b09df] Already tried to add messages to infly 2025-04-10T10:58:20.916369Z node 7 :SQS TRACE: Decrement active message requests for [[cloud4/000000000000000101v0/3]. ActiveMessageRequests: 0 2025-04-10T10:58:20.916431Z node 7 :SQS TRACE: Request [198ae44e-25509044-a4dfe142-b74b09df] SendReplyAndDie from action actor { ReceiveMessage { RequestId: "198ae44e-25509044-a4dfe142-b74b09df" } } 2025-04-10T10:58:20.916512Z node 7 :SQS TRACE: Request [198ae44e-25509044-a4dfe142-b74b09df] Sending sqs response: { ReceiveMessage { RequestId: "198ae44e-25509044-a4dfe142-b74b09df" } RequestId: "198ae44e-25509044-a4dfe142-b74b09df" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-04-10T10:58:20.916647Z node 7 :SQS DEBUG: Request ReceiveMessage working duration: 0ms 2025-04-10T10:58:20.916710Z node 7 :SQS TRACE: HandleSqsResponse ReceiveMessage { RequestId: "198ae44e-25509044-a4dfe142-b74b09df" } RequestId: "198ae44e-25509044-a4dfe142-b74b09df" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-04-10T10:58:20.916755Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7491637155077535208:2556]: ReceiveMessage { RequestId: "198ae44e-25509044-a4dfe142-b74b09df" } RequestId: "198ae44e-25509044-a4dfe142-b74b09df" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-04-10T10:58:20.916795Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7491637155077535209:3849]. Found: 1 2025-04-10T10:58:20.917371Z node 7 :SQS TRACE: Request [198ae44e-25509044-a4dfe142-b74b09df] HandleResponse: { ReceiveMessage { RequestId: "198ae44e-25509044-a4dfe142-b74b09df" } RequestId: "198ae44e-25509044-a4dfe142-b74b09df" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2025-04-10T10:58:20.917456Z node 7 :SQS DEBUG: Request [198ae44e-25509044-a4dfe142-b74b09df] Sending reply from proxy actor: { ReceiveMessage { RequestId: "198ae44e-25509044-a4dfe142-b74b09df" } RequestId: "198ae44e-25509044-a4dfe142-b74b09df" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-04-10T10:58:20.917746Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [198ae44e-25509044-a4dfe142-b74b09df] Got succesfult GRPC response. 2025-04-10T10:58:20.917814Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [198ae44e-25509044-a4dfe142-b74b09df] reply ok 2025-04-10T10:58:20.917919Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [198ae44e-25509044-a4dfe142-b74b09df] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 526 ResponseSizeInBytes: 179 SourceAddress: 78c0:fe00:6050:0:60c0:fe00:6050:0 ResourceId: 000000000000000101v0 Action: ReceiveMessage 2025-04-10T10:58:20.918012Z node 7 :HTTP DEBUG: (#38,[::1]:46774) <- (200 ) 2025-04-10T10:58:20.918101Z node 7 :HTTP DEBUG: (#38,[::1]:46774) connection closed Http output full {} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2025-04-10T10:57:06.962756Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636835664458167:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:06.997726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010ff/r3tmp/tmpSSuflQ/pdisk_1.dat 2025-04-10T10:57:07.742383Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:07.745724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:07.745798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:07.751914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30026, node 1 2025-04-10T10:57:08.110898Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:08.110943Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:08.110958Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:08.111125Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:08.694032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:08.720343Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:26263 2025-04-10T10:57:09.141415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.152282Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:57:09.163634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.203437Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:57:09.221752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.482638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:09.566709Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-10T10:57:09.578291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:09.671300Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-04-10T10:57:09.676427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.747236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.809007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.874318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.957719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:10.022899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:10.073696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:11.950635Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636835664458167:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:11.950748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:12.185157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636861434263246:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:12.185304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:12.185762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636861434263258:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:12.194920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-10T10:57:12.214569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636861434263260:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-10T10:57:12.297722Z node 1 :TX_PROXY ERROR: Actor# [1:7491636861434263311:2924] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:13.044500Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jrfnhfzp0h231xzfrh6y0t1s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc2YmIwYTYtMjdmYzc5NDQtMzVjMTE1ZjUtNGI1MzYzNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:13.087292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:13.148192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:13.206451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T10:57:13.248054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:13.298349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:13.386511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:13.469228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:13.515417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:13.588387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:13.725845Z node 1 :HTTP INFO: Listening on http://127.0.0.1:24033 2025-04-10T10:57:14.727799Z node 1 :SQS INFO: Start SQS service actor 2025-04-10T10:57:14.729563Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-10T10:57:14.731463Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-10T10:57:14.731822Z node 1 :HTTP INFO: Listening on http://[::]:14672 2025-04-10T10:57:14.750376Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-10T10:57:14.785622Z node 1 :SQS INFO: Request SQS users list 2025-04-10T10:57:14.785674Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-10T10:57:14.797458Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_QUEUES_LIST_ID). M ... 4\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\264\003?\266\024FROM_QUEUE\003\022\000\000\013?j\003?l\000\t\351\000?n\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\320\003?\322\024BATCH_SIZE\003\022\000\003?p\000\003?r\000\006\004?v\003\203\014\000\003\203\014\000\003\003?x\000\277\007\003?D\000\002\001\000\006\002?\002\t\211\004\202\203\005@?@J\000\003?\372\022truncated\t\211\004?@?B\203\004F\000?\360\003?\001\002\002\002\001\000\003/" } } 2025-04-10T10:58:21.099248Z node 7 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_QUEUES_LIST_ID). Mode: COMPILE_AND_EXEC 2025-04-10T10:58:21.099316Z node 7 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Serializing params: {"FROM_USER": "", "FROM_QUEUE": "", "BATCH_SIZE": 1000} 2025-04-10T10:58:21.099621Z node 7 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "O&\nFlags\010Name\010Args\016Payload\022Parameter\016Account CreatedTimestamp\036CustomQueueName\016DlqName\022FifoQueue\020FolderId\034MasterTabletId\022QueueName\024QueueState\014Shards\030TablesFormat\016Version\014Member\022SetResult\t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\030\207\203\001H\207\203\010\207\203\001H\207\203\001H\207\203\014\207\203\001H\207\203\010\207\203\001H\207\203\010\207\203\010\207\203\004\207\203\010\026\032\036\"&*.26:>BJ\000\003?\006\014queues\t\211\004?:\205\004?:\203\014\020List$Truncated\203\004F\000\t\211\026?B\203\005\004\200\205\030\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\026\032\036\"&*.26:>B\213\004\203\001H\203\001H\213\000\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?H \000\001\205\000\000\000\000\001\003\000\000\000\000\000\000\000?8\005?b\003?J\002\003?L\016\003?N\026\003?P\034\003?R\n\003?T\030\003?V\024\003?X\004\003?Z\010\003?\\\020\003?^\036\003?`\032\377\017\013?h\t\351\000?d\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\236\003?\240\022FROM_USER\003\022\000\t\351\000?f\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\264\003?\266\024FROM_QUEUE\003\022\000\000\013?j\003?l\000\t\351\000?n\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\320\003?\322\024BATCH_SIZE\003\022\000\003?p\000\003?r\000\006\004?v\003\203\014\000\003\203\014\000\003\003?x\000\277\007\003?D\000\002\001\000\006\002?\002\t\211\004\202\203\005@?@J\000\003?\372\022truncated\t\211\004?@?B\203\004F\000?\360\003?\001\002\002\002\001\000\003/" } Params { Bin: "\037\000\005\205\006\203\010\203\001H\203\001H(BATCH_SIZE(FROM_QUEUE$FROM_USER\003?\000\241\017\003?\002\000\003?\004\000\007/" } FlatMKQL: true } } ExecTimeoutPeriod: 60000 }. Params: {"FROM_USER": "", "FROM_QUEUE": "", "BATCH_SIZE": 1000} 2025-04-10T10:58:21.463342Z node 7 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-10T10:58:21.463379Z node 7 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 363ms 2025-04-10T10:58:21.463836Z node 7 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-10T10:58:21.463875Z node 7 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-04-10T10:58:21.464005Z node 7 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 363ms 2025-04-10T10:58:21.464555Z node 7 :SQS TRACE: Handle queues list: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-10T10:58:21.683923Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7491637156268456294:2443]: Pool not found 2025-04-10T10:58:21.684212Z node 7 :SQS DEBUG: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-04-10T10:58:21.812222Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7491637156268456292:2442]: Pool not found 2025-04-10T10:58:21.813232Z node 7 :SQS DEBUG: [cleanup removed queues] getting queues... 2025-04-10T10:58:21.816675Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491637156268456460:2466], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:21.816758Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7491637156268456461:2467], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-10T10:58:21.816811Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:21.986362Z node 7 :HTTP DEBUG: (#37,[::1]:35320) incoming connection opened 2025-04-10T10:58:21.986462Z node 7 :HTTP DEBUG: (#37,[::1]:35320) -> (POST /Root) 2025-04-10T10:58:21.986589Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [b8c0:3700:6050:0:a0c0:3700:6050:0] request [UnknownMethodName] url [/Root] database [/Root] requestId: 38d5209b-593508a6-41937875-f99bd936 2025-04-10T10:58:21.986881Z node 7 :HTTP_PROXY INFO: http request [UnknownMethodName] requestId [38d5209b-593508a6-41937875-f99bd936] reply with status: UNSUPPORTED message: Missing method name UnknownMethodName 2025-04-10T10:58:21.987058Z node 7 :HTTP DEBUG: (#37,[::1]:35320) <- (400 InvalidAction) 2025-04-10T10:58:21.987112Z node 7 :HTTP DEBUG: (#37,[::1]:35320) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.UnknownMethodName X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 3 { } 0 2025-04-10T10:58:21.987151Z node 7 :HTTP DEBUG: (#37,[::1]:35320) Response: HTTP/1.1 400 InvalidAction Connection: close x-amzn-requestid: 38d5209b-593508a6-41937875-f99bd936 x-amz-crc32: 139748724 Content-Type: application/x-amz-json-1.1 Content-Length: 76 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2025-04-10T10:58:21.987248Z node 7 :HTTP DEBUG: (#37,[::1]:35320) connection closed Http output full {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 400 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} |69.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |69.1%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |69.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T10:55:29.484534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:55:29.484638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:29.484678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:55:29.484719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:55:29.484763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:55:29.484796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:55:29.484854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:55:29.484931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:55:29.485308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:55:29.561999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T10:55:29.562059Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T10:55:29.578130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:55:29.578564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:55:29.578736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:55:29.586063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:55:29.586459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:55:29.587184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:29.587822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:55:29.591177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:29.592687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:29.592751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:29.592949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:55:29.593008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:29.593053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:55:29.593230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T10:55:29.604924Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T10:55:29.747936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:55:29.748284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:29.748562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:55:29.748824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:55:29.748902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:29.751859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:29.752056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:55:29.752293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:29.752347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:55:29.752386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:55:29.752429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:55:29.754953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:29.755026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:55:29.755067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:55:29.757639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:29.757701Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:29.757771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:29.757872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:55:29.761783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:55:29.771209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:55:29.771474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:55:29.772391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:55:29.772570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:55:29.772625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:29.772924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:55:29.772977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:55:29.773158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:55:29.773291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:55:29.775651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:55:29.775693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:55:29.775875Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:55:29.775919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:55:29.776168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:55:29.776211Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:55:29.776313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:55:29.776347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:55:29.776378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... 678944, message: Source { RawX1: 328 RawX2: 416611830027 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-10T10:58:21.820230Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-04-10T10:58:21.820376Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 416611830027 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-10T10:58:21.820430Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:58:21.820527Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 328 RawX2: 416611830027 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-10T10:58:21.820595Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:21.820641Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-10T10:58:21.838204Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T10:58:21.848656Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T10:58:21.849322Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1646 } } 2025-04-10T10:58:21.849380Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-04-10T10:58:21.849586Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1646 } } 2025-04-10T10:58:21.849709Z node 97 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1646 } } 2025-04-10T10:58:21.851092Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 432 RawX2: 416611830113 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-10T10:58:21.851144Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-04-10T10:58:21.851281Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 432 RawX2: 416611830113 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-10T10:58:21.851334Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T10:58:21.851431Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 432 RawX2: 416611830113 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-10T10:58:21.851500Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:21.851540Z node 97 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T10:58:21.851586Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:58:21.851637Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T10:58:21.851674Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-04-10T10:58:21.855562Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T10:58:21.856208Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T10:58:21.862545Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T10:58:21.862668Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2025-04-10T10:58:21.862743Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-10T10:58:21.862787Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2025-04-10T10:58:21.862884Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-10T10:58:21.862935Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2025-04-10T10:58:21.869580Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T10:58:21.869657Z node 97 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-04-10T10:58:21.869777Z node 97 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-10T10:58:21.869819Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-10T10:58:21.869869Z node 97 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-10T10:58:21.869908Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-10T10:58:21.869951Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-04-10T10:58:21.870008Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-10T10:58:21.870052Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-10T10:58:21.870086Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-10T10:58:21.870287Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T10:58:21.870337Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-10T10:58:21.882547Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-10T10:58:21.882630Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-10T10:58:21.883133Z node 97 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-10T10:58:21.883252Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-10T10:58:21.883308Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [97:532:2492] TestWaitNotification: OK eventTxId 1003 2025-04-10T10:58:21.883856Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:58:21.884116Z node 97 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 311us result status StatusSuccess 2025-04-10T10:58:21.884657Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSyncBrokerTests::ShouldProcessAfterRelease >> TSyncBrokerTests::ShouldReturnToken >> TSyncBrokerTests::ShouldProcessAfterRelease [GOOD] >> TSyncBrokerTests::ShouldReleaseInQueue >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] >> TSyncBrokerTests::ShouldReturnToken [GOOD] >> TSyncBrokerTests::ShouldReleaseToken >> TSyncBrokerTests::ShouldReleaseToken [GOOD] |69.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseToken [GOOD] Test command err: 2025-04-10T10:58:25.846494Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-04-10T10:58:25.969942Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-04-10T10:58:25.970080Z node 2 :BS_SYNCER DEBUG: TEvReleaseSyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token released, active: 1, waiting: 0 >> TEvLocalSyncDataTests::SqueezeBlocks1 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] Test command err: 2025-04-10T10:58:25.667006Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-04-10T10:58:25.667210Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [1:6:2053], enqueued, active: 1, waiting: 1 2025-04-10T10:58:25.667295Z node 1 :BS_SYNCER DEBUG: TEvReleaseSyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token released, active: 1, waiting: 1 2025-04-10T10:58:25.667366Z node 1 :BS_SYNCER DEBUG: ProcessQueue(), VDisk actor id: [0:1:2], actor id: [1:6:2053], token sent, active: 0, waiting: 1 2025-04-10T10:58:25.810422Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-04-10T10:58:25.810559Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], enqueued, active: 1, waiting: 1 2025-04-10T10:58:25.810608Z node 2 :BS_SYNCER DEBUG: TEvReleaseSyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], removed from queue, active: 1, waiting: 0 >> DataStreams::TestDeleteStreamWithEnforceFlag [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse >> BasicUsage::PreferredDatabaseNoFallback [GOOD] >> DataStreams::TestPutEmptyMessage [GOOD] >> DataStreams::TestListStreamConsumers >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] >> TSyncNeighborsTests::SerDes2 [GOOD] |69.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |69.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |69.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer >> KqpExplain::AggGroupLimit [GOOD] >> KqpExplain::ComplexJoin |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes2 [GOOD] >> KqpScanArrowInChanels::AggregateByColumn [GOOD] |69.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |69.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |69.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> TestKinesisHttpProxy::TestCounters [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateEmptySum >> TestKinesisHttpProxy::ListShardsTimestamp [GOOD] |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] Test command err: 2025-04-10T10:57:16.130272Z :GetAllStartPartitionSessions INFO: Random seed for debugging is 1744282636130235 2025-04-10T10:57:16.990575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636877470862399:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:16.990626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:57:17.045895Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636881356745116:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:17.045942Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014db/r3tmp/tmpwb4w2X/pdisk_1.dat 2025-04-10T10:57:17.482410Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:57:17.487223Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:57:18.073287Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:18.107405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:18.114025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:18.114123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:18.126562Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:18.128239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:18.128299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:18.137469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:18.140825Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:57:18.141160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26200, node 1 2025-04-10T10:57:18.382736Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/0014db/r3tmp/yandexMcWezZ.tmp 2025-04-10T10:57:18.382759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/0014db/r3tmp/yandexMcWezZ.tmp 2025-04-10T10:57:18.382880Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/0014db/r3tmp/yandexMcWezZ.tmp 2025-04-10T10:57:18.382980Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:57:18.584066Z INFO: TTestServer started on Port 31178 GrpcPort 26200 TClient is connected to server localhost:31178 PQClient connected to localhost:26200 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:19.317101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T10:57:21.993377Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636877470862399:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:21.993458Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:22.046420Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636881356745116:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:22.046469Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:23.633052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636907535634510:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.633167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.633662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636907535634522:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.637767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:57:23.650249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636907535634562:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.650346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.677737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636907535634524:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:57:23.762661Z node 1 :TX_PROXY ERROR: Actor# [1:7491636907535634603:2701] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:24.337066Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491636911421516537:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:24.339137Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzY4N2U1Y2YtOWUwMmQxZWQtMmFiNDMzZmMtZjE0NDUzNA==, ActorId: [2:7491636911421516496:2314], ActorState: ExecuteState, TraceId: 01jrfnhvq0294agfpgg38p8hdd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:24.341213Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:24.355220Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636907535634614:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:24.356733Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjI1N2U4NWUtNjRjMmIwOTktMzg3Yjg1NzAtNTk5MzY3Y2E=, ActorId: [1:7491636907535634484:2340], ActorState: ExecuteState, TraceId: 01jrfnhv4g05c72s4vw4q9a0za, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:24.357133Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:24.382405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:24.642174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:24.834573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:26200", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-10T10:57:25.387968Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfnhwjkeekhz1shabp6tvxa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzA0NGUxZTYtOWYxNjhhZjMtZTA4OTQwOWItNDdjNDgxYjM=, CurrentExecutionId: , CustomerSu ... 025-04-10T10:57:54.502508Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637039638184200:2510] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-10T10:57:54.502537Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-10T10:57:54.515086Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7491637043933151582:2510], now have 1 active actors on pipe 2025-04-10T10:57:54.518415Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2025-04-10T10:57:54.519803Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T10:57:54.519854Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T10:57:54.519972Z node 4 :PERSQUEUE INFO: new Cookie src|1cf3ab91-9fbc6464-5c66b469-5f349809_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-10T10:57:54.520077Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-10T10:57:54.520125Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T10:57:54.523341Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T10:57:54.523380Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T10:57:54.523461Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T10:57:54.528549Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744282674528 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:57:54.528708Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|1cf3ab91-9fbc6464-5c66b469-5f349809_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-10T10:57:54.527358Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|1cf3ab91-9fbc6464-5c66b469-5f349809_0 2025-04-10T10:57:54.530803Z :INFO: [] MessageGroupId [src] SessionId [src|1cf3ab91-9fbc6464-5c66b469-5f349809_0] Write session: close. Timeout = 0 ms 2025-04-10T10:57:54.530853Z :INFO: [] MessageGroupId [src] SessionId [src|1cf3ab91-9fbc6464-5c66b469-5f349809_0] Write session will now close 2025-04-10T10:57:54.530905Z :DEBUG: [] MessageGroupId [src] SessionId [src|1cf3ab91-9fbc6464-5c66b469-5f349809_0] Write session: aborting 2025-04-10T10:57:54.531430Z :INFO: [] MessageGroupId [src] SessionId [src|1cf3ab91-9fbc6464-5c66b469-5f349809_0] Write session: gracefully shut down, all writes complete 2025-04-10T10:57:54.531471Z :DEBUG: [] MessageGroupId [src] SessionId [src|1cf3ab91-9fbc6464-5c66b469-5f349809_0] Write session: destroy 2025-04-10T10:57:54.532724Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|1cf3ab91-9fbc6464-5c66b469-5f349809_0 grpc read done: success: 0 data: 2025-04-10T10:57:54.532754Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|1cf3ab91-9fbc6464-5c66b469-5f349809_0 grpc read failed 2025-04-10T10:57:54.532778Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|1cf3ab91-9fbc6464-5c66b469-5f349809_0 grpc closed 2025-04-10T10:57:54.532793Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|1cf3ab91-9fbc6464-5c66b469-5f349809_0 is DEAD 2025-04-10T10:57:54.533534Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:57:54.534054Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7491637043933151582:2510] destroyed 2025-04-10T10:57:54.534112Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T10:57:54.554237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:57:54.554265Z node 3 :IMPORT WARN: Table profiles were not loaded ====TYdbPqTestRetryPolicy() ====ExpectBreakDown === Session was created, waiting for retries >>> Ready to answer: ok ====CreateRetryState ====CreateRetryState Initialized Test retry state: get retry delay 2025-04-10T10:57:54.633641Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-10T10:57:56.639322Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-10T10:57:58.646371Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s === In the next federation discovery response dc2 will be available Test retry state: get retry delay 2025-04-10T10:58:00.651192Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-10T10:58:02.654460Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-10T10:58:04.657975Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-10T10:58:06.662468Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-10T10:58:08.680615Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-10T10:58:10.684979Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-10T10:58:12.689489Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-10T10:58:14.698332Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-10T10:58:16.706337Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-10T10:58:18.710360Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-10T10:58:20.712478Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-04-10T10:58:22.314410Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2025-04-10T10:58:22.314502Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 1 2025-04-10T10:58:22.318680Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-04-10T10:58:22.319910Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2025-04-10T10:58:22.320442Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 Test retry state: get retry delay 2025-04-10T10:58:22.713417Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s === Waiting for repair >>> Ready to answer: ok === Closing the session 2025-04-10T10:58:24.714727Z :INFO: [/Root] [] [] Start federated write session to database 'dc2' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "fancy_datacenter" DbInfos: [ { name: "dc1" path: "/Root" id: "account-dc1" endpoint: "localhost:13660" location: "dc1" status: AVAILABLE weight: 1000 } { name: "dc2" path: "/Root" id: "account-dc2" endpoint: "localhost:13660" location: "dc2" status: AVAILABLE weight: 500 } { name: "dc3" path: "/Root" id: "account-dc3" endpoint: "localhost:13660" location: "dc3" status: AVAILABLE weight: 500 } ] ControlPlaneEndpoint: cp.logbroker-federation:2135 } 2025-04-10T10:58:24.730700Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: try to update token 2025-04-10T10:58:24.731169Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Start write session. Will connect to nodeId: 0 2025-04-10T10:58:24.732989Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-04-10T10:58:24.733042Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session will now close 2025-04-10T10:58:24.733099Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: aborting 2025-04-10T10:58:24.733213Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2025-04-10T10:58:24.733273Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: destroy 2025-04-10T10:58:25.255681Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710733, task: 1, CA Id [3:7491637177077139384:2780]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-10T10:58:25.295016Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710733, task: 1, CA Id [3:7491637177077139384:2780]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:58:25.356222Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710733, task: 1, CA Id [3:7491637177077139384:2780]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:58:25.416318Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710733, task: 1, CA Id [3:7491637177077139384:2780]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:58:25.522538Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710733, task: 1, CA Id [3:7491637177077139384:2780]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:58:25.699621Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710733, task: 1, CA Id [3:7491637177077139384:2780]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:58:25.742350Z node 3 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710734. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:58:25.742571Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7491637177077139420:2774] TxId: 281474976710734. Ctx: { TraceId: 01jrfnkq3k67z20py7m4rnpywx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGIwOGM2NzQtOTBmOGI2OTUtNmMyNDVhNDYtNDRhMzBhNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:58:25.743007Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGIwOGM2NzQtOTBmOGI2OTUtNmMyNDVhNDYtNDRhMzBhNzc=, ActorId: [3:7491637177077139362:2774], ActorState: ExecuteState, TraceId: 01jrfnkq3k67z20py7m4rnpywx, Create QueryResponse for error on request, msg: 2025-04-10T10:58:25.745085Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfnkqmk1aryf36eh1vzg8hc" } } YdbStatus: UNAVAILABLE ConsumedRu: 354 } >> DataStreams::TestStreamTimeRetention [GOOD] >> DataStreams::TestUnsupported |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] Test command err: 2025-04-10T10:57:05.226423Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636829500164552:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:05.282863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001115/r3tmp/tmp8GmpeO/pdisk_1.dat 2025-04-10T10:57:05.832279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:05.832404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:05.836466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:05.897390Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4564, node 1 2025-04-10T10:57:06.023066Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:06.023115Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:06.023129Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:06.023246Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25243 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:06.533054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:06.545675Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:25243 2025-04-10T10:57:07.032437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:07.039371Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:57:07.041005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:07.052453Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:57:07.058742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:07.327268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:07.433810Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-10T10:57:07.439201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:07.483475Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-04-10T10:57:07.488309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:07.541274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:07.622912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:07.685991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:07.740777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:07.803619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:07.861623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:10.218302Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636829500164552:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:10.218429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:10.219387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636850975002341:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:10.219513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:10.219618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636850975002353:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:10.224174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-10T10:57:10.244242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636850975002355:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-10T10:57:10.342320Z node 1 :TX_PROXY ERROR: Actor# [1:7491636850975002406:2921] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:10.805339Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jrfnhe26bdg0qmx2zmy9902c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQ3MWJkN2UtOGZjY2JmZjUtZTkzOWM5YTQtY2IxNzI3ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:10.878615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:10.927133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:10.994316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:11.042402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:11.084058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:11.143614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:11.180600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:11.237463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:11.271742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:11.385835Z node 1 :HTTP INFO: Listening on http://127.0.0.1:24414 2025-04-10T10:57:12.387947Z node 1 :SQS INFO: Start SQS service actor 2025-04-10T10:57:12.388065Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-10T10:57:12.389347Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-10T10:57:12.389643Z node 1 :HTTP INFO: Listening on http://[::]:28590 2025-04-10T10:57:12.392646Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-10T10:57:12.422370Z node 1 :SQS INFO: Request SQS users list 2025-04-10T10:57:12.422422Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-10T10:57:12.422463Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-10T10:5 ... 101v0/0]. ActiveMessageRequests: 1 2025-04-10T10:58:26.255651Z node 7 :SQS DEBUG: Request [67cca9cf-86f8a6eb-307d8ea1-8b0f420d] Sending execute request for query(idx=CHANGE_VISIBILITY_ID) to queue leader 2025-04-10T10:58:26.255682Z node 7 :SQS DEBUG: Request [67cca9cf-86f8a6eb-307d8ea1-8b0f420d] Executing compiled query(idx=CHANGE_VISIBILITY_ID) 2025-04-10T10:58:26.255818Z node 7 :SQS DEBUG: Request [67cca9cf-86f8a6eb-307d8ea1-8b0f420d] Starting executor actor for query(idx=CHANGE_VISIBILITY_ID). Mode: COMPILE_AND_EXEC 2025-04-10T10:58:26.255978Z node 7 :SQS TRACE: Request [67cca9cf-86f8a6eb-307d8ea1-8b0f420d] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 0, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 12311263855443095412, "NOW": 1744282706254, "GROUPS_READ_ATTEMPT_IDS_PERIOD": 300000, "KEYS": [{"LockTimestamp": 1744282706051, "Offset": 1, "NewVisibilityDeadline": 1744282707254}, {"LockTimestamp": 1744282706101, "Offset": 2, "NewVisibilityDeadline": 1744282708254}]} 2025-04-10T10:58:26.256645Z node 7 :SQS TRACE: Request [67cca9cf-86f8a6eb-307d8ea1-8b0f420d] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "O\034\014Exists*NewVisibilityDeadline\014Offset\006Arg\014Member\nFlags\010Name\010Args\016Payload\022Parameter\006And\032LockTimestamp$VisibilityDeadline\014Invoke\t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\n\203\014\207\203\010\203\014\203\010?\020(ChangeConddCurrentVisibilityDeadline\002\006\n$SetResult\000\003?\006\014result\t\211\006?\024\206\205\006?\020?\020?\020.\006\n?\032?\0220MapParameter\000\t\351\000?\034\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?&\003?(\010KEYS\003&\000\t\251\000?\032\016\000\005?\022\t\211\004?\010\207\203\014?\010 Coalesce\000\t\211\004?<\207\203\014\207\203\014*\000\t\211\006?B\203\005@\203\010?\0146\000\003?J\026LessOrEqual\t\351\000?L\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?X\003?Z\006NOW\003&\000\t\211\004?\014\207\205\004\207\203\010?\014.2\203\004\022\000\t\211\n?n\203\005\004\200\205\004\203\004\203\004.2\213\010\203\010\203\010\203\004?\020\203\004$SelectRow\000\003?t \000\001\205\000\000\000\000\001\030\000\000\000\000\000\000\000?l\005?z\003?v\020\003?x\026\003\013?\202\t\351\000?|\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?\226\003?\230> TestKinesisHttpProxy::ListShardsToken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateByColumn [GOOD] Test command err: Trying to start YDB, gRPC: 11374, MsgBus: 12381 2025-04-10T10:57:40.022799Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636981761859685:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:40.027124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001873/r3tmp/tmp1JowtN/pdisk_1.dat 2025-04-10T10:57:41.139147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:41.139253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:41.141150Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:41.148394Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:41.150212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11374, node 1 2025-04-10T10:57:41.494651Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:41.494677Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:41.494687Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:41.494809Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12381 TClient is connected to server localhost:12381 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:42.570829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:42.627174Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:57:42.636434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:42.924278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:43.196857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:43.317295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:45.026389Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636981761859685:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:45.026497Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:46.928714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637007531665109:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:46.928845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:47.409829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:47.492267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:47.576605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:47.665647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:47.721882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:47.827527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:47.957328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637011826632945:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:47.957408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:47.957781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637011826632950:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:47.966119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:47.982720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637011826632952:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:48.069795Z node 1 :TX_PROXY ERROR: Actor# [1:7491637016121600306:3474] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:50.191652Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282670102, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 26631, MsgBus: 28535 2025-04-10T10:57:51.359462Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637028116037910:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:51.359526Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001873/r3tmp/tmpyp1ceH/pdisk_1.dat 2025-04-10T10:57:51.669153Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:51.697446Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:51.706388Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:51.713968Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26631, node 2 2025-04-10T10:57:51.910749Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:51.910771Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:51.910780Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:51.910911Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28535 TClient is connected to server localhost:28535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:52.869769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:52.895729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:53.043257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 wait ... pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:08.284394Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637082319472727:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:08.284462Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:58:08.288894Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:08.391563Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:58:08.476620Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:58:08.580106Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:58:08.673579Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:58:08.773022Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:58:08.903380Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637103794311332:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:08.903476Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:08.903880Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637103794311337:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:08.908858Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:58:08.959842Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637103794311339:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:58:09.057975Z node 3 :TX_PROXY ERROR: Actor# [3:7491637108089278694:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:14.295859Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282692747, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 1720, MsgBus: 11880 2025-04-10T10:58:15.857763Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637132663350537:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:15.857922Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001873/r3tmp/tmphZy3as/pdisk_1.dat 2025-04-10T10:58:16.367028Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:16.399480Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:16.399587Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:16.411576Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1720, node 4 2025-04-10T10:58:16.610916Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:16.610938Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:16.610947Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:16.611099Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11880 TClient is connected to server localhost:11880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:17.816715Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:17.841897Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:18.012715Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:18.409018Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:18.576661Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:20.858358Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491637132663350537:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:20.858443Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:58:22.609608Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637162728123255:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:22.609740Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:22.790296Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:22.891739Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:58:22.988030Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:58:23.040219Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:58:23.117896Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:58:23.200959Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:58:23.332290Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637167023091082:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:23.332737Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:23.333301Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637167023091088:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:23.339965Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:58:23.365031Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491637167023091091:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:58:23.445606Z node 4 :TX_PROXY ERROR: Actor# [4:7491637167023091152:3470] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:26.658960Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282705669, txId: 281474976710671] shutting down |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TestKinesisHttpProxy::TestWrongRequest [GOOD] >> KqpScanArrowFormat::AggregateWithFunction [GOOD] >> TestYmqHttpProxy::TestUntagQueue [GOOD] >> TestYmqHttpProxy::TestListQueueTags |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn >> BackupRestoreS3::RestoreViewDependentOnAnotherView [GOOD] >> BackupRestoreS3::PrefixedVectorIndex >> BasicUsage::SimpleHandlers [GOOD] >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight >> AsyncIndexChangeCollector::InsertSingleRow >> Yq_1::ModifyConnections >> DataShardVolatile::DistributedWriteThenDropTable [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable >> Yq_1::DeleteConnections >> Yq_1::CreateConnection_With_Existing_Name >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable >> PrivateApi::PingTask >> Yq_1::CreateQuery_With_Idempotency ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateWithFunction [GOOD] Test command err: Trying to start YDB, gRPC: 21886, MsgBus: 61430 2025-04-10T10:57:42.928967Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636992263474082:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:42.938442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001868/r3tmp/tmpg8UlAe/pdisk_1.dat 2025-04-10T10:57:43.969557Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:44.002041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:44.002131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:44.013802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:44.027403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21886, node 1 2025-04-10T10:57:44.326854Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:44.326874Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:44.326881Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:44.327000Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61430 TClient is connected to server localhost:61430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:45.878034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:45.905046Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:57:45.929178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:46.145303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:46.334675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:46.486647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:47.910682Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636992263474082:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:47.910760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:49.140237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637022328246793:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:49.140339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:50.016914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:50.089056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:50.248769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:50.342284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:50.404714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:50.486458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:50.603164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637026623214615:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:50.603288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:50.606173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637026623214620:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:50.619358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:50.642433Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T10:57:50.644652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637026623214622:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:50.743024Z node 1 :TX_PROXY ERROR: Actor# [1:7491637026623214680:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:52.421599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 864000000000 2025-04-10T10:57:53.460309Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282673476, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 26683, MsgBus: 23068 2025-04-10T10:57:54.946249Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637039950320737:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:55.077220Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001868/r3tmp/tmpLjsGtr/pdisk_1.dat 2025-04-10T10:57:55.336437Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:55.379376Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:55.379484Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:55.383450Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26683, node 2 2025-04-10T10:57:55.579669Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:55.579693Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:55.579702Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:55.579844Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23068 TClient is connected to server localhost:23068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:57.533173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:57.557591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operat ... 13.070350Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637122717294156:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:13.070508Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:13.154516Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:13.240690Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:58:13.319344Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:58:13.384488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:58:13.492351Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:58:13.612298Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:58:13.726888Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637122717294708:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:13.727088Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:13.727551Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637122717294715:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:13.734750Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:58:13.771270Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637122717294717:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:58:13.829378Z node 3 :TX_PROXY ERROR: Actor# [3:7491637122717294772:3467] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:19.077872Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282696352, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 2620, MsgBus: 28317 2025-04-10T10:58:20.346698Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637153179222325:2166];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:20.346781Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001868/r3tmp/tmpVi0ONd/pdisk_1.dat 2025-04-10T10:58:20.630569Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:20.665771Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:20.665904Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:20.667785Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2620, node 4 2025-04-10T10:58:20.862981Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:20.863015Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:20.863025Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:20.863202Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28317 TClient is connected to server localhost:28317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:21.787424Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:21.816854Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:21.918980Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:22.236429Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:22.393170Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:25.350429Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491637153179222325:2166];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:25.350540Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:58:26.988039Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637178949027771:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:26.988210Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:27.083515Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:27.161625Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:58:27.222864Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:58:27.326420Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:58:27.396000Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:58:27.503140Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:58:27.618415Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637183243995600:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:27.618529Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:27.618729Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637183243995605:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:27.626444Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:58:27.643716Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491637183243995607:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:58:27.725744Z node 4 :TX_PROXY ERROR: Actor# [4:7491637183243995672:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:30.241842Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282709673, txId: 281474976710671] shutting down >> LocalTableWriter::DataAlongWithHeartbeat >> LocalTableWriter::WaitTxIds >> KqpErrors::ResolveTableError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestWrongRequest [GOOD] Test command err: 2025-04-10T10:57:05.110559Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636829932700778:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:05.153706Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001103/r3tmp/tmpF1q99u/pdisk_1.dat 2025-04-10T10:57:05.763283Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:05.770830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:05.770922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:05.776637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1222, node 1 2025-04-10T10:57:05.910800Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:05.910817Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:05.910824Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:05.910963Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24415 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:06.393773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:24415 2025-04-10T10:57:06.686274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:06.701421Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:57:06.707653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:06.727453Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:57:06.737076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:06.983091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:07.042540Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-10T10:57:07.047208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:07.140416Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-04-10T10:57:07.146483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:07.188877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:07.261420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:07.309117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... waiting...2025-04-10T10:57:07.346962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T10:57:07.385669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:07.477424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.296832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636847112571281:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:09.297026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:09.297407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636847112571293:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:09.302321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-10T10:57:09.326653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636847112571295:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-10T10:57:09.409416Z node 1 :TX_PROXY ERROR: Actor# [1:7491636847112571347:2925] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:09.817997Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jrfnhd5bbmvqa4aj8za8s8dw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQzOGM2NzAtMzQ1ZjNkMjEtMmVlZGRlMjMtNmFhYTY1YzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:09.860512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.909087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:09.983641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:10.024195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:10.084666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:10.085022Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636829932700778:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:10.085096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... 2025-04-10T10:57:10.134560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T10:57:10.196061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:10.271444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:10.345161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:10.502430Z node 1 :HTTP INFO: Listening on http://127.0.0.1:28718 2025-04-10T10:57:11.509388Z node 1 :SQS INFO: Start SQS service actor 2025-04-10T10:57:11.509539Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-10T10:57:11.509974Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-10T10:57:11.510291Z node 1 :HTTP INFO: Listening on http://[::]:8745 2025-04-10T10:57:11.510644Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-10T10:57:11.545588Z node 1 :SQS INFO: Request SQS users list 2025-04-10T10:57:11.545637Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-10T10:57:11.548502Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-10T10:57:11.548565Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-04-10T10:57:11.552670Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx ... SER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 43ms 2025-04-10T10:58:29.832454Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-10T10:58:29.832488Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-04-10T10:58:29.832569Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 46ms 2025-04-10T10:58:29.833052Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-10T10:58:29.833073Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 26ms 2025-04-10T10:58:29.833488Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-10T10:58:29.833521Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-04-10T10:58:29.833628Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 26ms 2025-04-10T10:58:29.833993Z node 8 :SQS TRACE: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-10T10:58:29.835289Z node 8 :SQS TRACE: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-10T10:58:30.112806Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7491637191229509786:2449]: Pool not found 2025-04-10T10:58:30.113612Z node 8 :SQS DEBUG: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-04-10T10:58:30.458494Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7491637191229509740:2444]: Pool not found 2025-04-10T10:58:30.459899Z node 8 :SQS DEBUG: [cleanup removed queues] getting queues... 2025-04-10T10:58:30.463082Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7491637195524477175:2464], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:30.463162Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7491637195524477176:2465], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-10T10:58:30.463213Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:30.754449Z node 8 :HTTP DEBUG: (#37,[::1]:47698) incoming connection opened 2025-04-10T10:58:30.754531Z node 8 :HTTP DEBUG: (#37,[::1]:47698) -> (POST /) 2025-04-10T10:58:30.755008Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [d8e5:d600:6050:0:c0e5:d600:6050:0] request [CreateStream] url [/] database [] requestId: 9ee73225-437406d0-3e50c014-8918bca5 2025-04-10T10:58:30.755666Z node 8 :HTTP_PROXY WARN: http request [CreateStream] requestId [9ee73225-437406d0-3e50c014-8918bca5] got new request with incorrect json from [d8e5:d600:6050:0:c0e5:d600:6050:0] database '' 2025-04-10T10:58:30.755874Z node 8 :HTTP_PROXY INFO: http request [CreateStream] requestId [9ee73225-437406d0-3e50c014-8918bca5] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName 2025-04-10T10:58:30.756088Z node 8 :HTTP DEBUG: (#37,[::1]:47698) <- (400 InvalidArgumentException) 2025-04-10T10:58:30.756143Z node 8 :HTTP DEBUG: (#37,[::1]:47698) Request: POST / HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 57 { "ShardCount":5, "StreamName":"testtopic", "WrongStreamName":"WrongStreamName" } 0 2025-04-10T10:58:30.756180Z node 8 :HTTP DEBUG: (#37,[::1]:47698) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 9ee73225-437406d0-3e50c014-8918bca5 x-amz-crc32: 3053902336 Content-Type: application/x-amz-json-1.1 Content-Length: 135 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 2025-04-10T10:58:30.756318Z node 8 :HTTP DEBUG: (#37,[::1]:47698) connection closed Http output full {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 400 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 2025-04-10T10:58:30.907978Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7491637195524477173:2463]: Pool not found 2025-04-10T10:58:30.908831Z node 8 :SQS DEBUG: [cleanup removed queues] there are no queues to delete >> DataStreams::TestDeleteStreamWithEnforceFlagFalse [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SimpleHandlers [GOOD] Test command err: 2025-04-10T10:57:16.937643Z :WaitEventBlocksBeforeDiscovery INFO: Random seed for debugging is 1744282636937581 2025-04-10T10:57:17.719102Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636882535178829:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:17.719333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:57:17.776787Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636882161745287:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:17.776829Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:57:18.202586Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:57:18.207900Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014c7/r3tmp/tmp96HP6V/pdisk_1.dat 2025-04-10T10:57:18.801764Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:18.801908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:18.949609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:18.949716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:18.951101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:18.951158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:18.953293Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:57:18.953402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:18.954482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:19.022061Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15732, node 1 2025-04-10T10:57:19.213491Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:57:19.310925Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:57:19.375875Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/0014c7/r3tmp/yandexWP42ZL.tmp 2025-04-10T10:57:19.375896Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/0014c7/r3tmp/yandexWP42ZL.tmp 2025-04-10T10:57:19.376144Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/0014c7/r3tmp/yandexWP42ZL.tmp 2025-04-10T10:57:19.376242Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:57:19.525956Z INFO: TTestServer started on Port 10005 GrpcPort 15732 TClient is connected to server localhost:10005 PQClient connected to localhost:15732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:20.249173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:20.351681Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:22.694402Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636882535178829:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:22.694466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:22.776859Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636882161745287:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:22.776935Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:24.291648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636912599950778:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:24.291753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:24.294562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636912599950790:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:24.309882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:57:24.322336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636912599950824:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:24.322667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:24.345989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636912599950792:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:57:24.414112Z node 1 :TX_PROXY ERROR: Actor# [1:7491636912599950880:2697] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:24.965667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:25.005299Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636912599950891:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:25.006494Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjg5OWNjZmEtZjc0NmRkMjctOGZhMjQ5NTQtNjhkNjkzMGU=, ActorId: [1:7491636912599950752:2340], ActorState: ExecuteState, TraceId: 01jrfnhvs19raj04q3d64w3byf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:25.007752Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491636912226516705:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:25.009784Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:25.009258Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTA4MzZkZDItMTdmNDY3MTYtYWM0ZDFmNDYtMzRjMmU3NWU=, ActorId: [2:7491636912226516663:2310], ActorState: ExecuteState, TraceId: 01jrfnhvw54ft7hh5cnmheep4f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:25.009882Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:25.261956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:25.494797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:15732", true, true, 1000 ... 2cc0c5-e86152ba-361bc77a_0] Write session will now close 2025-04-10T10:58:29.844150Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|50969987-e22cc0c5-e86152ba-361bc77a_0] Write session: aborting 2025-04-10T10:58:29.844500Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|50969987-e22cc0c5-e86152ba-361bc77a_0] Write session: gracefully shut down, all writes complete 2025-04-10T10:58:29.844546Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|50969987-e22cc0c5-e86152ba-361bc77a_0] Write session: destroy 2025-04-10T10:58:29.849191Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_15245353226342783037_v1 grpc read done: success# 0, data# { } 2025-04-10T10:58:29.849242Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_15245353226342783037_v1 grpc read failed 2025-04-10T10:58:29.849275Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_15245353226342783037_v1 grpc closed 2025-04-10T10:58:29.849309Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_15245353226342783037_v1 is DEAD 2025-04-10T10:58:29.850384Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_2198520144845563990_v1 grpc read done: success# 0, data# { } 2025-04-10T10:58:29.850398Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_2198520144845563990_v1 grpc read failed 2025-04-10T10:58:29.850418Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_2198520144845563990_v1 grpc closed 2025-04-10T10:58:29.850453Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_2198520144845563990_v1 is DEAD 2025-04-10T10:58:29.850502Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_3415971748627505650_v1 grpc read done: success# 0, data# { } 2025-04-10T10:58:29.850519Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_3415971748627505650_v1 grpc read failed 2025-04-10T10:58:29.850546Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_3415971748627505650_v1 grpc closed 2025-04-10T10:58:29.850564Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_3415971748627505650_v1 is DEAD 2025-04-10T10:58:29.851013Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|50969987-e22cc0c5-e86152ba-361bc77a_0 grpc read done: success: 0 data: 2025-04-10T10:58:29.851028Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|50969987-e22cc0c5-e86152ba-361bc77a_0 grpc read failed 2025-04-10T10:58:29.851059Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|50969987-e22cc0c5-e86152ba-361bc77a_0 grpc closed 2025-04-10T10:58:29.851079Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|50969987-e22cc0c5-e86152ba-361bc77a_0 is DEAD 2025-04-10T10:58:29.851685Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:58:29.852108Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637183800083047:2539] disconnected; active server actors: 1 2025-04-10T10:58:29.852136Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637183800083047:2539] client user disconnected session shared/user_3_3_15245353226342783037_v1 2025-04-10T10:58:29.852198Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-04-10T10:58:29.852243Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637183800083048:2538] disconnected; active server actors: 1 2025-04-10T10:58:29.852259Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637183800083048:2538] client user disconnected session shared/user_3_2_3415971748627505650_v1 2025-04-10T10:58:29.852281Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637183800083051:2537] disconnected; active server actors: 1 2025-04-10T10:58:29.852296Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637183800083051:2537] client user disconnected session shared/user_3_1_2198520144845563990_v1 2025-04-10T10:58:29.854568Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7491637183800083091:2540] destroyed 2025-04-10T10:58:29.854640Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_2198520144845563990_v1 2025-04-10T10:58:29.854668Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7491637183800083061:2549] destroyed 2025-04-10T10:58:29.854712Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T10:58:29.854812Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_2198520144845563990_v1 2025-04-10T10:58:29.869392Z :INFO: [/Root] [/Root] [4b8e2906-ab6c734d-f93e873d-62044483] Closing read session. Close timeout: 0.000000s 2025-04-10T10:58:29.869520Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-10T10:58:29.869585Z :INFO: [/Root] [/Root] [4b8e2906-ab6c734d-f93e873d-62044483] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2022 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:58:29.869647Z :INFO: [/Root] [/Root] [9b14ee3b-99d85bd6-35f11219-b912feec] Closing read session. Close timeout: 0.000000s 2025-04-10T10:58:29.869696Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2025-04-10T10:58:29.869736Z :INFO: [/Root] [/Root] [9b14ee3b-99d85bd6-35f11219-b912feec] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2000 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:58:29.869767Z :INFO: [/Root] [/Root] [bc88f1f1-424d8b9b-8e198165-828bda8b] Closing read session. Close timeout: 0.000000s 2025-04-10T10:58:29.869804Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-10T10:58:29.869839Z :INFO: [/Root] [/Root] [bc88f1f1-424d8b9b-8e198165-828bda8b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1996 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:58:29.869879Z :INFO: [/Root] [/Root] [bc88f1f1-424d8b9b-8e198165-828bda8b] Closing read session. Close timeout: 0.000000s 2025-04-10T10:58:29.869926Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-10T10:58:29.869986Z :INFO: [/Root] [/Root] [bc88f1f1-424d8b9b-8e198165-828bda8b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1996 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:58:29.870120Z :NOTICE: [/Root] [/Root] [bc88f1f1-424d8b9b-8e198165-828bda8b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:58:29.876471Z :INFO: [/Root] [/Root] [9b14ee3b-99d85bd6-35f11219-b912feec] Closing read session. Close timeout: 0.000000s 2025-04-10T10:58:29.876546Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2025-04-10T10:58:29.876590Z :INFO: [/Root] [/Root] [9b14ee3b-99d85bd6-35f11219-b912feec] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2007 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:58:29.876683Z :NOTICE: [/Root] [/Root] [9b14ee3b-99d85bd6-35f11219-b912feec] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:58:29.877291Z :INFO: [/Root] [/Root] [4b8e2906-ab6c734d-f93e873d-62044483] Closing read session. Close timeout: 0.000000s 2025-04-10T10:58:29.877325Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-10T10:58:29.877352Z :INFO: [/Root] [/Root] [4b8e2906-ab6c734d-f93e873d-62044483] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2029 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:58:29.877404Z :NOTICE: [/Root] [/Root] [4b8e2906-ab6c734d-f93e873d-62044483] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:58:30.405219Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720693, task: 1, CA Id [3:7491637196684985193:2583]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-10T10:58:30.438723Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720693, task: 1, CA Id [3:7491637196684985193:2583]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:58:30.493552Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720693, task: 1, CA Id [3:7491637196684985193:2583]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:58:30.556968Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720693, task: 1, CA Id [3:7491637196684985193:2583]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:58:30.639033Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720693, task: 1, CA Id [3:7491637196684985193:2583]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:58:30.718943Z node 3 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720694. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:58:30.719075Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7491637196684985212:2575] TxId: 281474976720694. Ctx: { TraceId: 01jrfnkvve6nv84f508ths90nn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTM4NzU2ZjYtZjA2ZGVjNmEtODM3YTk2ZjEtMzNhZDY1MWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T10:58:30.719295Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTM4NzU2ZjYtZjA2ZGVjNmEtODM3YTk2ZjEtMzNhZDY1MWQ=, ActorId: [3:7491637192390017853:2575], ActorState: ExecuteState, TraceId: 01jrfnkvve6nv84f508ths90nn, Create QueryResponse for error on request, msg: 2025-04-10T10:58:30.720930Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfnkwfa3jf7av0g05gdnhtd" } } YdbStatus: UNAVAILABLE ConsumedRu: 417 } 2025-04-10T10:58:30.824382Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720693, task: 1, CA Id [3:7491637196684985193:2583]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> DataStreams::TestPutRecordsCornerCases [GOOD] >> DataStreams::TestPutRecords >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] >> DataStreams::TestListStreamConsumers [GOOD] >> DataStreams::TestListShards1Shard >> TBlobStorageProxyTest::TestPartialGetBlock >> DataStreams::TestUnsupported [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ResolveTableError [GOOD] Test command err: 2025-04-10T10:58:29.189906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:29.191728Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c11/r3tmp/tmpBUC9fZ/pdisk_1.dat 2025-04-10T10:58:29.864591Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:30.077499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:58:30.172444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:30.172593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:30.180535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:30.180652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:30.212119Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:58:30.212696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:30.213133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:30.534168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:58:32.046033Z node 1 :KQP_EXECUTER TRACE: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Bootstrap done, become ReadyState 2025-04-10T10:58:32.046361Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Executing physical tx, type: 2, stages: 1 2025-04-10T10:58:32.046497Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-10T10:58:32.046728Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got request, become WaitResolveState 2025-04-10T10:58:32.047028Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key sets: 1 2025-04-10T10:58:32.047252Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-10T10:58:32.047416Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (Iterator (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3))))) )))) ) 2025-04-10T10:58:32.047590Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] create compute task: 1 2025-04-10T10:58:32.047747Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-04-10T10:58:32.047812Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-10T10:58:32.048351Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Collect channels updates for task: 1 at actor [1:1562:2950] 2025-04-10T10:58:32.048414Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Sending channels info to compute actor: [1:1562:2950], channels: 0 2025-04-10T10:58:32.048518Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 1, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T10:58:32.048566Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Updating channels after the creation of compute actors 2025-04-10T10:58:32.048611Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Collect channels updates for task: 1 at actor [1:1562:2950] 2025-04-10T10:58:32.048657Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Sending channels info to compute actor: [1:1562:2950], channels: 0 2025-04-10T10:58:32.048756Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Waiting for: CA [1:1562:2950], 2025-04-10T10:58:32.048833Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1562:2950], 2025-04-10T10:58:32.048893Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-10T10:58:32.061534Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, got execution state from compute actor: [1:1562:2950], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-10T10:58:32.061663Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Waiting for: CA [1:1562:2950], 2025-04-10T10:58:32.061731Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1562:2950], 2025-04-10T10:58:32.065191Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, got execution state from compute actor: [1:1562:2950], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2429 Tasks { TaskId: 1 CpuTimeUs: 1551 FinishTimeMs: 1744282712064 EgressBytes: 30 EgressRows: 3 ComputeCpuTimeUs: 73 BuildCpuTimeUs: 1478 HostName: "ghrun-x5vqckr7qy" NodeId: 1 CreateTimeMs: 1744282712049 } MaxMemoryUsage: 1048576 } 2025-04-10T10:58:32.065333Z node 1 :KQP_EXECUTER INFO: TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Compute actor has finished execution: [1:1562:2950] 2025-04-10T10:58:32.065440Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Send Commit to BufferActor=[1:1558:2950] 2025-04-10T10:58:32.065512Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.002429s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-10T10:58:32.134881Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2025-04-10T10:58:32.134981Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1559:2950] TxId: 281474976715658. Ctx: { TraceId: 01jrfnkxm396cvb8h68xv6g0bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNDY0OS01ZmVkMzQ5ZC1hZjAyZWJiZi1iMDYwNzMxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Terminate, become ZombieState 2025-04-10T10:58:32.201771Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:1580:2969], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[/Root/table-1]
: Error: LookupError, code: 2005 2025-04-10T10:58:32.203684Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2RkYmI5ODUtNDgzZWFmMjktOWVjODIzOGYtZjRkMTJiMWU=, ActorId: [1:1578:2967], ActorState: ExecuteState, TraceId: 01jrfnky2c41gj7230vjnh253p, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: >> KqpErrors::ProposeResultLost_RwTx+UseSink [GOOD] >> KqpErrors::ProposeResultLost_RwTx-UseSink >> TBlobStorageProxyTest::TestSingleFailureMirror >> TBlobStorageProxyTest::TestGetMultipart >> TestYmqHttpProxy::TestPurgeQueue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestUnsupported [GOOD] Test command err: 2025-04-10T10:58:09.386621Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637105095334191:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:09.386668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e53/r3tmp/tmpW9SQnX/pdisk_1.dat 2025-04-10T10:58:10.478017Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:10.482134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:10.518223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:10.518333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:10.529743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14046, node 1 2025-04-10T10:58:11.022976Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:11.023003Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:11.023012Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:11.023156Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:12.090134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:12.363312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:25470 2025-04-10T10:58:13.140453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:13.642702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e53/r3tmp/tmp10kpNL/pdisk_1.dat 2025-04-10T10:58:19.353013Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637149135534992:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:19.385554Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:58:19.525819Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:19.584101Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:19.584209Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:19.588974Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7706, node 4 2025-04-10T10:58:19.731449Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:19.731469Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:19.731500Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:19.731647Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5342 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:20.392107Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:20.518820Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:5342 2025-04-10T10:58:20.869823Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:21.209893Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T10:58:21.283916Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "30" shard_id: "shard-000000" } records { sequence_number: "31" shard_id: "shard-000000" } records { sequence_number: "32" shard_id: "shard-000000" } records { sequence_number: "33" shard_id: "shard-000000" } records { sequence_number: "34" shard_id: "shard-000000" } records { sequence_number: "35" shard_id: "shard-000000" } records { sequence_number: "36" shard_id: "shard-000000" } records { sequence_number: "37" shard_id: "shard-000000" } records { sequence_number: "38" shard_id: "shard-000000" } records { sequence_number: "39" shard_id: "shard-000000" } records { sequence_number: "40" shard_id: "shard-000000" } records { sequence_number: "41" shard_id: "shard-000000" } records { sequence_number: "42" shard_id: "shard-000000" } records { sequence_number: "43" shard_id: "shard-000000" } records { sequence_number: "44" shard_id: "shard-000000" } records { sequence_number: "45" shard_id: "shard-000000" } records { sequence_number: "46" shard_id: "shard-000000" } records { sequence_number: "47" shard_id: "shard-000000" } records { sequence_number: "48" shard_id: "shard-000000" } records { sequence_number: "49" shard_id: "shard-000000" } records { sequence_number: "50" shard_id: "shard-000000" } records { sequence_number: "51" shard_id: "shard-000000" } records { sequence_number: "52" shard_id: "shard-000000" } records { sequence_number: "53" shard_id: "shard-000000" } records { sequence_number: "54" shard_id: "shard-000000" } records { sequence_number: "55" shard_id: "shard-000000" } records { sequence_number: "56" shard_id: "shard-000000" } records { sequence_number: "57" shard_id: "shard-000000" } records { sequence_number: "58" shard_id: "shard-000000" } records { sequence_number: "59" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "60" shard_id: "shard-000000" } records { sequence_number: "61" shard_id: "shard-000000" } records { sequence_number: "62" shard_id: "shard-000000" } records { sequence_number: "63" shard_id: "shard-000000" } records { sequence_number: "64" shard_id: "shard-000000" } records { sequence_number: "65" shard_id: "shard-000000" } records { sequence_number: "66" shard_id: "shard-000000" } rec ... d_id: "shard-000000" } records { sequence_number: "69" shard_id: "shard-000000" } records { sequence_number: "70" shard_id: "shard-000000" } records { sequence_number: "71" shard_id: "shard-000000" } records { sequence_number: "72" shard_id: "shard-000000" } records { sequence_number: "73" shard_id: "shard-000000" } records { sequence_number: "74" shard_id: "shard-000000" } records { sequence_number: "75" shard_id: "shard-000000" } records { sequence_number: "76" shard_id: "shard-000000" } records { sequence_number: "77" shard_id: "shard-000000" } records { sequence_number: "78" shard_id: "shard-000000" } records { sequence_number: "79" shard_id: "shard-000000" } records { sequence_number: "80" shard_id: "shard-000000" } records { sequence_number: "81" shard_id: "shard-000000" } records { sequence_number: "82" shard_id: "shard-000000" } records { sequence_number: "83" shard_id: "shard-000000" } records { sequence_number: "84" shard_id: "shard-000000" } records { sequence_number: "85" shard_id: "shard-000000" } records { sequence_number: "86" shard_id: "shard-000000" } records { sequence_number: "87" shard_id: "shard-000000" } records { sequence_number: "88" shard_id: "shard-000000" } records { sequence_number: "89" shard_id: "shard-000000" } 2025-04-10T10:58:24.092348Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491637149135534992:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:24.092485Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "90" shard_id: "shard-000000" } records { sequence_number: "91" shard_id: "shard-000000" } records { sequence_number: "92" shard_id: "shard-000000" } records { sequence_number: "93" shard_id: "shard-000000" } records { sequence_number: "94" shard_id: "shard-000000" } records { sequence_number: "95" shard_id: "shard-000000" } records { sequence_number: "96" shard_id: "shard-000000" } records { sequence_number: "97" shard_id: "shard-000000" } records { sequence_number: "98" shard_id: "shard-000000" } records { sequence_number: "99" shard_id: "shard-000000" } records { sequence_number: "100" shard_id: "shard-000000" } records { sequence_number: "101" shard_id: "shard-000000" } records { sequence_number: "102" shard_id: "shard-000000" } records { sequence_number: "103" shard_id: "shard-000000" } records { sequence_number: "104" shard_id: "shard-000000" } records { sequence_number: "105" shard_id: "shard-000000" } records { sequence_number: "106" shard_id: "shard-000000" } records { sequence_number: "107" shard_id: "shard-000000" } records { sequence_number: "108" shard_id: "shard-000000" } records { sequence_number: "109" shard_id: "shard-000000" } records { sequence_number: "110" shard_id: "shard-000000" } records { sequence_number: "111" shard_id: "shard-000000" } records { sequence_number: "112" shard_id: "shard-000000" } records { sequence_number: "113" shard_id: "shard-000000" } records { sequence_number: "114" shard_id: "shard-000000" } records { sequence_number: "115" shard_id: "shard-000000" } records { sequence_number: "116" shard_id: "shard-000000" } records { sequence_number: "117" shard_id: "shard-000000" } records { sequence_number: "118" shard_id: "shard-000000" } records { sequence_number: "119" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "120" shard_id: "shard-000000" } records { sequence_number: "121" shard_id: "shard-000000" } records { sequence_number: "122" shard_id: "shard-000000" } records { sequence_number: "123" shard_id: "shard-000000" } records { sequence_number: "124" shard_id: "shard-000000" } records { sequence_number: "125" shard_id: "shard-000000" } records { sequence_number: "126" shard_id: "shard-000000" } records { sequence_number: "127" shard_id: "shard-000000" } records { sequence_number: "128" shard_id: "shard-000000" } records { sequence_number: "129" shard_id: "shard-000000" } records { sequence_number: "130" shard_id: "shard-000000" } records { sequence_number: "131" shard_id: "shard-000000" } records { sequence_number: "132" shard_id: "shard-000000" } records { sequence_number: "133" shard_id: "shard-000000" } records { sequence_number: "134" shard_id: "shard-000000" } records { sequence_number: "135" shard_id: "shard-000000" } records { sequence_number: "136" shard_id: "shard-000000" } records { sequence_number: "137" shard_id: "shard-000000" } records { sequence_number: "138" shard_id: "shard-000000" } records { sequence_number: "139" shard_id: "shard-000000" } records { sequence_number: "140" shard_id: "shard-000000" } records { sequence_number: "141" shard_id: "shard-000000" } records { sequence_number: "142" shard_id: "shard-000000" } records { sequence_number: "143" shard_id: "shard-000000" } records { sequence_number: "144" shard_id: "shard-000000" } records { sequence_number: "145" shard_id: "shard-000000" } records { sequence_number: "146" shard_id: "shard-000000" } records { sequence_number: "147" shard_id: "shard-000000" } records { sequence_number: "148" shard_id: "shard-000000" } records { sequence_number: "149" shard_id: "shard-000000" } Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1744282701147-2","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1744282701,"finish":1744282701},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744282701}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1744282701246-3","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1744282701,"finish":1744282701},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744282701}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1744282701322-4","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744282701,"finish":1744282702},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744282702}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1744282702389-5","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744282702,"finish":1744282703},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744282703}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1744282703462-6","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744282703,"finish":1744282704},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744282704}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1744282704568-7","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744282704,"finish":1744282705},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744282705}' 2025-04-10T10:58:29.836279Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637190721524525:2216];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e53/r3tmp/tmpWKIEcr/pdisk_1.dat 2025-04-10T10:58:30.014754Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:58:30.209111Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:30.248344Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:30.248445Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:30.255533Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27921, node 7 2025-04-10T10:58:30.438553Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:30.438582Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:30.438592Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:30.438757Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:30.825487Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:30.905595Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:12714 2025-04-10T10:58:31.176408Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... >> TBlobStorageProxyTest::TestPartialGetBlock [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror >> TestYmqHttpProxy::TestSendMessageBatch >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink [GOOD] >> DataShardVolatile::VolatileTxAbortedOnSplit >> TBlobStorageProxyTest::TestNormal >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert >> KqpErrors::ProposeError [GOOD] >> KqpErrors::ProposeErrorEvWrite >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] Test command err: 2025-04-10T10:57:11.303639Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636857775551112:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:11.342089Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010f3/r3tmp/tmpBdmDs1/pdisk_1.dat 2025-04-10T10:57:11.920758Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:11.925031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:11.925108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:11.930356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13462, node 1 2025-04-10T10:57:12.138966Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:12.138993Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:12.139007Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:12.139136Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:12.471437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:12.495505Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:30179 2025-04-10T10:57:12.787487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:12.792488Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:57:12.795565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:12.806299Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:57:12.813712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:12.977391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:13.058342Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-10T10:57:13.069865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:13.211141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:13.287956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:13.375333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T10:57:13.411376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:13.455236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:13.522138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:13.563750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:15.450841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636874955421614:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:15.450950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:15.454263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636874955421626:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:15.463774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-10T10:57:15.490368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636874955421628:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-10T10:57:15.552114Z node 1 :TX_PROXY ERROR: Actor# [1:7491636874955421679:2922] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:15.991558Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jrfnhk5r8m8qtsayq6pwpzdb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM2NzY0ZWItZWYyNDMxZjQtZDcyZDQyNWItOWYyNTZjZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:16.039287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:16.093311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:16.177657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:16.232359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:16.287630Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636857775551112:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:16.287746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:16.323065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:16.388227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:16.457972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:16.501004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:16.550524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:16.658620Z node 1 :HTTP INFO: Listening on http://127.0.0.1:31673 2025-04-10T10:57:17.667954Z node 1 :SQS INFO: Start SQS service actor 2025-04-10T10:57:17.668111Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-10T10:57:17.669597Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-10T10:57:17.670274Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-10T10:57:17.670541Z node 1 :HTTP INFO: Listening on http://[::]:7661 2025-04-10T10:57:17.728915Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-10T10:57:17.728968Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-04-10T10:57:17.728972Z node 1 :SQS INFO: Request SQS users list 2025-04-10T10:57:17.729013Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-10T10:57:17.743540Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(i ... Authorized successfully 2025-04-10T10:58:34.373584Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [56862e9c-125db2cb-26bafbdc-e7593a57] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-10T10:58:34.376585Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [8:7491637215160355867:2535], now have 1 active actors on pipe 2025-04-10T10:58:34.376643Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7491637215160355868:2536], now have 1 active actors on pipe 2025-04-10T10:58:34.376683Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7491637215160355864:2532], now have 1 active actors on pipe 2025-04-10T10:58:34.376726Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server connected, pipe [8:7491637215160355865:2533], now have 1 active actors on pipe 2025-04-10T10:58:34.376776Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server connected, pipe [8:7491637215160355866:2534], now have 1 active actors on pipe Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1744282714,"StorageLimitMb":0,"StreamName":"testtopic"}} 200 {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1744282714,"StorageLimitMb":0,"StreamName":"testtopic"}} 2025-04-10T10:58:34.379893Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [56862e9c-125db2cb-26bafbdc-e7593a57] reply ok 2025-04-10T10:58:34.380709Z node 8 :HTTP DEBUG: (#37,[::1]:42734) <- (200 ) 2025-04-10T10:58:34.380806Z node 8 :HTTP DEBUG: (#37,[::1]:42734) connection closed 2025-04-10T10:58:34.382793Z node 8 :HTTP DEBUG: (#40,[::1]:42742) incoming connection opened 2025-04-10T10:58:34.383630Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7491637215160355864:2532] destroyed 2025-04-10T10:58:34.383679Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [8:7491637215160355865:2533] destroyed 2025-04-10T10:58:34.383708Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [8:7491637215160355866:2534] destroyed 2025-04-10T10:58:34.383736Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [8:7491637215160355867:2535] destroyed 2025-04-10T10:58:34.383770Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7491637215160355868:2536] destroyed 2025-04-10T10:58:34.383962Z node 8 :HTTP DEBUG: (#40,[::1]:42742) -> (POST /Root) 2025-04-10T10:58:34.384150Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [588c:1601:6050:0:408c:1601:6050:0] request [DescribeStreamSummary] url [/Root] database [/Root] requestId: abe9d267-9a21899a-f772584a-9157f066 2025-04-10T10:58:34.384629Z node 8 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [abe9d267-9a21899a-f772584a-9157f066] got new request from [588c:1601:6050:0:408c:1601:6050:0] database '/Root' stream 'testtopic' 2025-04-10T10:58:34.390523Z node 8 :HTTP_PROXY DEBUG: http request [DescribeStreamSummary] requestId [abe9d267-9a21899a-f772584a-9157f066] [auth] Authorized successfully 2025-04-10T10:58:34.390778Z node 8 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [abe9d267-9a21899a-f772584a-9157f066] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1744282.714,"StreamName":"testtopic"}} 200 {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1744282.714,"StreamName":"testtopic"}} 2025-04-10T10:58:34.393199Z node 8 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [abe9d267-9a21899a-f772584a-9157f066] reply ok 2025-04-10T10:58:34.393754Z node 8 :HTTP DEBUG: (#40,[::1]:42742) <- (200 ) 2025-04-10T10:58:34.393906Z node 8 :HTTP DEBUG: (#40,[::1]:42742) connection closed 2025-04-10T10:58:34.396070Z node 8 :HTTP DEBUG: (#37,[::1]:42744) incoming connection opened 2025-04-10T10:58:34.396167Z node 8 :HTTP DEBUG: (#37,[::1]:42744) -> (POST /Root) 2025-04-10T10:58:34.396309Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [18fc:7d00:6050:0:fc:7d00:6050:0] request [DescribeStream] url [/Root] database [/Root] requestId: 17ecd5f7-a94807f4-ae1ffab3-8f3ac42b 2025-04-10T10:58:34.396767Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [17ecd5f7-a94807f4-ae1ffab3-8f3ac42b] got new request from [18fc:7d00:6050:0:fc:7d00:6050:0] database '/Root' stream 'testtopic' Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1744282714,"StorageLimitMb":0,"StreamName":"testtopic"}} 2025-04-10T10:58:34.397849Z node 8 :HTTP_PROXY DEBUG: http request [DescribeStream] requestId [17ecd5f7-a94807f4-ae1ffab3-8f3ac42b] [auth] Authorized successfully 2025-04-10T10:58:34.397990Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [17ecd5f7-a94807f4-ae1ffab3-8f3ac42b] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-10T10:58:34.399129Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7491637215160355897:2548], now have 1 active actors on pipe 2025-04-10T10:58:34.399158Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7491637215160355893:2544], now have 1 active actors on pipe 2025-04-10T10:58:34.399183Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server connected, pipe [8:7491637215160355894:2545], now have 1 active actors on pipe 2025-04-10T10:58:34.399207Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server connected, pipe [8:7491637215160355895:2546], now have 1 active actors on pipe 2025-04-10T10:58:34.399232Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [8:7491637215160355896:2547], now have 1 active actors on pipe 2025-04-10T10:58:34.400514Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [17ecd5f7-a94807f4-ae1ffab3-8f3ac42b] reply ok 2025-04-10T10:58:34.400904Z node 8 :HTTP DEBUG: (#37,[::1]:42744) <- (200 ) 2025-04-10T10:58:34.400996Z node 8 :HTTP DEBUG: (#37,[::1]:42744) connection closed 2025-04-10T10:58:34.401119Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [8:7491637215160355894:2545] destroyed 2025-04-10T10:58:34.401150Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [8:7491637215160355895:2546] destroyed 2025-04-10T10:58:34.401177Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [8:7491637215160355896:2547] destroyed 2025-04-10T10:58:34.401202Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7491637215160355897:2548] destroyed 2025-04-10T10:58:34.401227Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7491637215160355893:2544] destroyed 2025-04-10T10:58:34.482839Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7491637215160355478:2471]: Pool not found 2025-04-10T10:58:34.483373Z node 8 :SQS DEBUG: [cleanup removed queues] there are no queues to delete |69.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |69.3%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |69.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut >> KqpScanArrowFormat::AggregateEmptySum [GOOD] >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscover >> LocalTableWriter::WaitTxIds [GOOD] >> TBlobStorageProxyTest::TestGetMultipart [GOOD] >> TBlobStorageProxyTest::TestGetFail >> TBlobStorageProxyTest::TestSingleFailureMirror [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] Test command err: 2025-04-10T10:58:33.277646Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637210399515205:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:33.277723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ad6/r3tmp/tmpRWm1RZ/pdisk_1.dat 2025-04-10T10:58:33.772540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:33.772657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:33.776187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:33.803252Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6361 TServer::EnableGrpc on GrpcPort 26312, node 1 2025-04-10T10:58:34.061057Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:34.061095Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:34.061104Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:34.061267Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:34.593980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:34.613014Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:58:34.616798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282714734 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-10T10:58:34.770247Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637214694483200:2355] Handshake: worker# [1:7491637214694483201:2356] 2025-04-10T10:58:34.770529Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637214694483200:2355] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T10:58:34.770789Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637214694483200:2355] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-10T10:58:34.770830Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637214694483200:2355] Send handshake: worker# [1:7491637214694483201:2356] 2025-04-10T10:58:34.772765Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637214694483200:2355] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-10T10:58:34.783503Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637214694483200:2355] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-04-10T10:58:34.783679Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637214694483200:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-04-10T10:58:34.783858Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491637214694483204:2355] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-10T10:58:34.783894Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637214694483200:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T10:58:34.783959Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491637214694483204:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-10T10:58:34.797160Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491637214694483204:2355] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-10T10:58:34.797274Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637214694483200:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T10:58:34.797326Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637214694483200:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 4876, MsgBus: 21752 2025-04-10T10:57:42.349394Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636989022801835:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:42.349793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00185c/r3tmp/tmpxkqI0g/pdisk_1.dat 2025-04-10T10:57:43.232856Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:43.293070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:43.293185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:43.300391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4876, node 1 2025-04-10T10:57:43.555667Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:43.560969Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:43.561014Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:43.561170Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21752 TClient is connected to server localhost:21752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:44.824497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:44.890254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:45.290867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:45.978338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:46.121976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:47.338405Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636989022801835:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:47.338470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:49.235387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637019087574564:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:49.235497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:50.480918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:50.555189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:50.615357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:50.660010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:50.709806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:50.772524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:50.852480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637023382542383:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:50.852615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:50.856949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637023382542388:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:50.862450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:50.881037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637023382542390:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:50.955608Z node 1 :TX_PROXY ERROR: Actor# [1:7491637023382542456:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:58.184787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:57:58.184845Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:59.586740Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282674631, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 24930, MsgBus: 24795 2025-04-10T10:58:01.119676Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637073272483246:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:01.123499Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00185c/r3tmp/tmpxm3KhD/pdisk_1.dat 2025-04-10T10:58:01.533469Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:01.573014Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:01.573113Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:01.577757Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24930, node 2 2025-04-10T10:58:01.754101Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:01.754125Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:01.754133Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:01.754294Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24795 TClient is connected to server localhost:24795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:02.890321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:02.912738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:03.057662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:03.323749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 7 ... led to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:21.646515Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:21.793044Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:21.851732Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:58:21.910870Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:58:21.992602Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:58:22.083875Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:58:22.221053Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:58:22.323644Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637162713717031:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:22.323788Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:22.324267Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637162713717037:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:22.329150Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:58:22.342473Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T10:58:22.342758Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637162713717039:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:58:22.432988Z node 3 :TX_PROXY ERROR: Actor# [3:7491637162713717095:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:25.734033Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282704864, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 11035, MsgBus: 16236 2025-04-10T10:58:27.384168Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637181731919540:2155];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:27.554857Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00185c/r3tmp/tmp16Rx7o/pdisk_1.dat 2025-04-10T10:58:27.787524Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:27.787621Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:27.791771Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:27.806749Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11035, node 4 2025-04-10T10:58:28.018002Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:28.018030Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:28.018039Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:28.018200Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16236 TClient is connected to server localhost:16236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:28.964666Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:29.009042Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:29.133687Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:29.499166Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:29.622936Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:32.378684Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491637181731919540:2155];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:32.378778Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:58:33.115406Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637207501724990:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:33.115473Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:33.225603Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:33.300455Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:58:33.366439Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:58:33.438325Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:58:33.492756Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:58:33.558689Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:58:33.658754Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637207501725512:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:33.658859Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:33.659377Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637207501725517:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:33.663729Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:58:33.683120Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491637207501725519:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:58:33.765723Z node 4 :TX_PROXY ERROR: Actor# [4:7491637207501725573:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:36.441965Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282716015, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] Test command err: 2025-04-10T10:58:06.238839Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637093793425153:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:06.239884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00171f/r3tmp/tmpFVWDvl/pdisk_1.dat 2025-04-10T10:58:07.192313Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:07.221320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:07.221419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:07.233363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3352, node 1 2025-04-10T10:58:07.574024Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:07.574051Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:07.574060Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:07.574178Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:08.321035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:08.933566Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T10:58:08.933889Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T10:58:08.933919Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T10:58:08.934522Z node 1 :TICKET_PARSER DEBUG: Ticket **** (5DAB89DE) () has now permanent error message 'Token is not in correct format' 2025-04-10T10:58:08.934544Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Token is not in correct format 2025-04-10T10:58:08.934568Z node 1 :TICKET_PARSER ERROR: Ticket **** (5DAB89DE): Token is not in correct format 2025-04-10T10:58:14.238103Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637129853499510:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00171f/r3tmp/tmpAdLgWk/pdisk_1.dat 2025-04-10T10:58:14.307837Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:58:14.435173Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:14.468215Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:14.468301Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:14.483443Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12205, node 2 2025-04-10T10:58:14.611246Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:14.611267Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:14.611273Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:14.611374Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:14.995925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:15.039176Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:58:15.049600Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T10:58:15.049636Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T10:58:15.049646Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T10:58:15.049789Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-10T10:58:15.049859Z node 2 :GRPC_CLIENT DEBUG: [517000023108] Connect to grpc://localhost:23283 2025-04-10T10:58:15.058420Z node 2 :GRPC_CLIENT DEBUG: [517000023108] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-10T10:58:15.124729Z node 2 :GRPC_CLIENT DEBUG: [517000023108] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2025-04-10T10:58:15.125134Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-04-10T10:58:15.125244Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-10T10:58:15.125912Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T10:58:15.125934Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T10:58:15.125941Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T10:58:15.126007Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-10T10:58:15.126222Z node 2 :GRPC_CLIENT DEBUG: [517000023108] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-10T10:58:15.128969Z node 2 :GRPC_CLIENT DEBUG: [517000023108] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2025-04-10T10:58:15.129173Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-04-10T10:58:15.129234Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for folder_id aaaa1234 - Access Denied' 2025-04-10T10:58:20.259796Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637154652763862:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:20.259836Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00171f/r3tmp/tmpyGuDNK/pdisk_1.dat 2025-04-10T10:58:20.708290Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:20.728708Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:20.728798Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:20.740312Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22956, node 3 2025-04-10T10:58:20.936443Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:20.936465Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:20.936472Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:20.936576Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" P ... ACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-10T10:58:21.614613Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00171f/r3tmp/tmp1qylHI/pdisk_1.dat 2025-04-10T10:58:27.345274Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:27.431785Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:27.471893Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:27.486337Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:27.491186Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4631, node 4 2025-04-10T10:58:27.662193Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:27.662214Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:27.662224Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:27.662350Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:28.107625Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:28.122415Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:58:28.130534Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T10:58:28.130567Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T10:58:28.130582Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T10:58:28.130720Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read somewhere.sleep something.list something.write something.eat) 2025-04-10T10:58:28.130761Z node 4 :GRPC_CLIENT DEBUG: [517000061308] Connect to grpc://localhost:4755 2025-04-10T10:58:28.131762Z node 4 :GRPC_CLIENT DEBUG: [517000061308] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "somewhere.sleep" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.list" ...(truncated) } 2025-04-10T10:58:28.160621Z node 4 :GRPC_CLIENT DEBUG: [517000061308] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "something.list" r...(truncated) } 2025-04-10T10:58:28.160877Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read access denied for subject "user1@as" 2025-04-10T10:58:28.160894Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep access denied for subject "user1@as" 2025-04-10T10:58:28.160907Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list access denied for subject "user1@as" 2025-04-10T10:58:28.160925Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat access denied for subject "user1@as" 2025-04-10T10:58:28.160945Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-10T10:58:28.161112Z node 4 :GRPC_CLIENT DEBUG: [517000060f88] Connect to grpc://localhost:8338 2025-04-10T10:58:28.162060Z node 4 :GRPC_CLIENT DEBUG: [517000060f88] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-10T10:58:28.178553Z node 4 :GRPC_CLIENT DEBUG: [517000060f88] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-10T10:58:28.181375Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-10T10:58:32.678466Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491637206087596708:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:32.678544Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00171f/r3tmp/tmpdDEnf1/pdisk_1.dat 2025-04-10T10:58:32.949605Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:32.952045Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:32.952138Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:32.958560Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62103, node 5 2025-04-10T10:58:33.144772Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:33.144797Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:33.144805Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:33.144931Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:33.672396Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:33.687290Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:58:33.691956Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T10:58:33.691987Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T10:58:33.691996Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T10:58:33.692094Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-10T10:58:33.692142Z node 5 :GRPC_CLIENT DEBUG: [517000004008] Connect to grpc://localhost:2266 2025-04-10T10:58:33.693088Z node 5 :GRPC_CLIENT DEBUG: [517000004008] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-10T10:58:33.710454Z node 5 :GRPC_CLIENT DEBUG: [517000004008] Status 14 Service Unavailable 2025-04-10T10:58:33.713592Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-10T10:58:33.713626Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-04-10T10:58:33.713681Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-10T10:58:33.713797Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-10T10:58:33.714125Z node 5 :GRPC_CLIENT DEBUG: [517000004008] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-10T10:58:33.717049Z node 5 :GRPC_CLIENT DEBUG: [517000004008] Status 1 CANCELLED 2025-04-10T10:58:33.719943Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2025-04-10T10:58:33.719974Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2025-04-10T10:58:33.720008Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' >> TestKinesisHttpProxy::ListShardsToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WaitTxIds [GOOD] Test command err: 2025-04-10T10:58:33.794503Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637209621860042:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:33.795135Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000abb/r3tmp/tmpTJdDUE/pdisk_1.dat 2025-04-10T10:58:34.512646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:34.512748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:34.530586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:34.586196Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24188 TServer::EnableGrpc on GrpcPort 3075, node 1 2025-04-10T10:58:35.026800Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:35.026825Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:35.026834Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:35.026938Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:35.817313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:35.870549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282715994 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-10T10:58:36.043258Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637222506762500:2355] Handshake: worker# [1:7491637222506762501:2356] 2025-04-10T10:58:36.043590Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637222506762500:2355] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T10:58:36.043904Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637222506762500:2355] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-10T10:58:36.043939Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637222506762500:2355] Send handshake: worker# [1:7491637222506762501:2356] 2025-04-10T10:58:36.044782Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637222506762500:2355] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-10T10:58:36.050254Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637222506762500:2355] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-04-10T10:58:36.050391Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637222506762500:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-04-10T10:58:36.050557Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491637222506762505:2355] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-10T10:58:36.050593Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637222506762500:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T10:58:36.050663Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491637222506762505:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-10T10:58:36.056360Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491637222506762505:2355] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-10T10:58:36.056529Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637222506762500:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T10:58:36.056621Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637222506762500:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-04-10T10:58:37.048916Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637222506762500:2355] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-04-10T10:58:37.049098Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637222506762500:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 }] } 2025-04-10T10:58:37.049252Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491637222506762505:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-04-10T10:58:37.053374Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491637222506762505:2355] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-10T10:58:37.053492Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637222506762500:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T10:58:37.053560Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491637222506762500:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 31957, MsgBus: 18312 2025-04-10T10:57:46.021323Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637007320961279:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:46.025834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00184a/r3tmp/tmpeSCgSA/pdisk_1.dat 2025-04-10T10:57:46.934764Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:47.010261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:47.010353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:47.026026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31957, node 1 2025-04-10T10:57:47.366832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:47.366856Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:47.366863Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:47.366993Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18312 TClient is connected to server localhost:18312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:48.773293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:48.807683Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:57:48.819460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:49.251296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:49.834486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T10:57:50.160807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:51.022363Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637007320961279:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:51.022442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:54.292479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637041680701341:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:54.292588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:55.204851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:55.274214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:55.333084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:55.396118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:55.460848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:55.561662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:55.682364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637045975669173:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:55.682470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:55.686438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637045975669178:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:55.696331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:55.750052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637045975669180:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:57:55.840180Z node 1 :TX_PROXY ERROR: Actor# [1:7491637045975669248:3472] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:00.493476Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282679482, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 63830, MsgBus: 1770 2025-04-10T10:58:02.203544Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637078435762714:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00184a/r3tmp/tmpxVLyVI/pdisk_1.dat 2025-04-10T10:58:02.323200Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:58:02.591716Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:02.624075Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:02.624176Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:02.628172Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63830, node 2 2025-04-10T10:58:02.926931Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:02.926958Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:02.926967Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:02.927103Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1770 TClient is connected to server localhost:1770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:03.738796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:03.861819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:04.031098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:04.269616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046 ... ailed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:19.912659Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:19.992658Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:20.052698Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:58:20.131783Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:58:20.199366Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:58:20.260067Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:58:20.360950Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:58:20.480784Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637152628692865:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:20.480880Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:20.481271Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637152628692870:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:20.487448Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:58:20.510607Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637152628692872:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:58:20.586525Z node 3 :TX_PROXY ERROR: Actor# [3:7491637152628692927:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:27.477077Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282703359, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 5445, MsgBus: 25578 2025-04-10T10:58:28.894189Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637189221026448:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:28.894258Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00184a/r3tmp/tmpyl6j39/pdisk_1.dat 2025-04-10T10:58:29.100307Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:29.134811Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:29.134925Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:29.136258Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5445, node 4 2025-04-10T10:58:29.326871Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:29.326905Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:29.326914Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:29.327082Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25578 TClient is connected to server localhost:25578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:30.480781Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:30.489006Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:58:30.501320Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:30.609261Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:30.943161Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:58:31.084214Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T10:58:33.470324Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637210695864705:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:33.470466Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:33.538467Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:33.628542Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:58:33.684543Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:58:33.733677Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:58:33.837659Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:58:33.905452Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491637189221026448:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:33.905988Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:58:33.934633Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:58:34.037705Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637214990832521:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:34.037842Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:34.038194Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637214990832526:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:34.045181Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:58:34.070407Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491637214990832528:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:58:34.157271Z node 4 :TX_PROXY ERROR: Actor# [4:7491637214990832585:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:37.383206Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282716659, txId: 281474976710671] shutting down >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock >> TTxAllocatorClientTest::ZeroRange [GOOD] >> TestYmqHttpProxy::TestListQueueTags [GOOD] >> TBlobStorageProxyTest::TestGetFail [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscover [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] Test command err: 2025-04-10T10:58:36.277195Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001d3c/r3tmp/tmpopYEOl//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-10T10:58:36.300863Z :BS_LOCALRECOVERY CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |69.3%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestBlockPersistence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] Test command err: 2025-04-10T10:57:13.473545Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636865275295642:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:13.473720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010df/r3tmp/tmpr3Wfbl/pdisk_1.dat 2025-04-10T10:57:14.232782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:14.232924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:14.240159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:14.326851Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4690, node 1 2025-04-10T10:57:14.599095Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:14.599144Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:14.599154Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:14.599270Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:15.296638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:27783 2025-04-10T10:57:15.766197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:15.776249Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:57:15.783207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:15.802748Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:57:15.808840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:15.967156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:16.037047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:16.122592Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-04-10T10:57:16.132148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:16.201825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:16.260917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:16.333996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:16.414056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T10:57:16.488208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:16.557754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:18.430560Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636865275295642:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:18.430688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:18.968330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636886750133362:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:18.968453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:18.969135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636886750133374:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:18.978648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-10T10:57:18.991925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636886750133376:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-10T10:57:19.080817Z node 1 :TX_PROXY ERROR: Actor# [1:7491636891045100723:2922] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:19.639713Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jrfnhpkn4cgse1vgt85xncv2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjFkYmY2NWMtNDRiMDQzNjAtYTk1MGQ1ZC01MjZkZTg3Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:19.685100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:19.751059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:19.830973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T10:57:19.879873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:19.947952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:20.025737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:20.070247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:20.155200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:20.192188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:20.267185Z node 1 :HTTP INFO: Listening on http://127.0.0.1:8062 2025-04-10T10:57:21.274184Z node 1 :SQS INFO: Start SQS service actor 2025-04-10T10:57:21.274321Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-10T10:57:21.274328Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-10T10:57:21.275836Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-10T10:57:21.285170Z node 1 :HTTP INFO: Listening on http://[::]:10223 2025-04-10T10:57:21.312561Z node 1 :SQS INFO: Request SQS users list 2025-04-10T10:57:21.312607Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-10T10:57:21.316655Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_QUEUES_LIST_ID). Mode: COMPILE 2025-04-10T10:57:21.316915Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-10T10:57:21.316943Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-04-10T10:57:21.317344Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode ... onse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-10T10:58:37.564128Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 33ms 2025-04-10T10:58:37.564427Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-10T10:58:37.564474Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-04-10T10:58:37.564577Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 33ms 2025-04-10T10:58:37.564947Z node 8 :SQS TRACE: Handle user settings: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-10T10:58:37.567222Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-10T10:58:37.567259Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 33ms 2025-04-10T10:58:37.567720Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-10T10:58:37.567761Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-04-10T10:58:37.567914Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 33ms 2025-04-10T10:58:37.568474Z node 8 :SQS TRACE: Handle queues list: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-10T10:58:37.960190Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7491637228003184917:2447]: Pool not found 2025-04-10T10:58:37.961143Z node 8 :SQS DEBUG: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-04-10T10:58:38.262847Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7491637228003184913:2444]: Pool not found 2025-04-10T10:58:38.266948Z node 8 :SQS DEBUG: [cleanup removed queues] getting queues... 2025-04-10T10:58:38.272576Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7491637232298152332:2462], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:38.272691Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7491637232298152333:2463], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-10T10:58:38.272756Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:38.470651Z node 8 :HTTP DEBUG: (#37,[::1]:47458) incoming connection opened 2025-04-10T10:58:38.470750Z node 8 :HTTP DEBUG: (#37,[::1]:47458) -> (POST /Root) 2025-04-10T10:58:38.470901Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [1847:f300:6050:0:47:f300:6050:0] request [CreateStream] url [/Root] database [/Root] requestId: 2e1674cf-8589ee99-cfa57d3d-79c2da22 2025-04-10T10:58:38.471558Z node 8 :HTTP_PROXY INFO: http request [CreateStream] requestId [2e1674cf-8589ee99-cfa57d3d-79c2da22] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map 2025-04-10T10:58:38.471889Z node 8 :HTTP DEBUG: (#37,[::1]:47458) <- (400 MissingParameter) 2025-04-10T10:58:38.471949Z node 8 :HTTP DEBUG: (#37,[::1]:47458) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 4 null 0 2025-04-10T10:58:38.471998Z node 8 :HTTP DEBUG: (#37,[::1]:47458) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: 2e1674cf-8589ee99-cfa57d3d-79c2da22 x-amz-crc32: 851558042 Content-Type: application/x-amz-json-1.1 Content-Length: 127 {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} 2025-04-10T10:58:38.472109Z node 8 :HTTP DEBUG: (#37,[::1]:47458) connection closed Http output full {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} >> KqpExplain::ComplexJoin [GOOD] >> TBlobStorageProxyTest::TestInFlightPuts >> TBlobStorageProxyTest::TestVPutVCollectVGetRace >> TBlobStorageProxyTest::TestBlock |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetFail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] Test command err: 2025-04-10T10:56:28.099097Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-10T10:56:28.100086Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-10T10:56:28.101041Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-10T10:56:28.103172Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:28.103765Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-10T10:56:28.115180Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:28.115398Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:28.115508Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:28.115594Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-10T10:56:28.115774Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:28.115878Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-10T10:56:28.116011Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-10T10:56:28.116824Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-04-10T10:56:28.117523Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:28.117607Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:56:28.117711Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-04-10T10:56:28.117761Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 5000 >> TBlobStorageProxyTest::TestProxyPutInvalidSize >> TBlobStorageProxyTest::TestCollectGarbagePersistence >> DataShardVolatile::DistributedWriteThenCopyTable [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert >> DataStreams::TestListShards1Shard [GOOD] >> TBlobStorageProxyTest::TestNormal [GOOD] >> TBlobStorageProxyTest::TestNormalMirror >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] >> DataStreams::TestPutRecords [GOOD] >> TBlobStorageProxyTest::TestVPutVCollectVGetRace [GOOD] >> TBlobStorageProxyTest::TestVGetNoData >> TBlobStorageProxyTest::TestVPutVGetPersistence |69.3%| [TA] $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] Test command err: 2025-04-10T10:57:09.513772Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636847330665502:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:09.513973Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010f4/r3tmp/tmpSc6HqN/pdisk_1.dat 2025-04-10T10:57:10.069425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:10.069572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:10.076684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:10.146778Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2543, node 1 2025-04-10T10:57:10.350375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:10.350416Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:10.350426Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:10.350557Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:10.922110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:10.954903Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:13878 2025-04-10T10:57:11.399161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:11.405405Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:57:11.407366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:11.419570Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:57:11.428224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:11.575588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:11.669252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:11.730226Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:11.735461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:11.775127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:11.824294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:11.891799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:11.963237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:12.053336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:12.105342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:14.101026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636868805503438:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:14.101110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636868805503446:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:14.101158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:14.105894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-10T10:57:14.120969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636868805503452:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-10T10:57:14.181090Z node 1 :TX_PROXY ERROR: Actor# [1:7491636868805503503:2919] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:14.513922Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636847330665502:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:14.514020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:14.555759Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jrfnhhvg2dccha97j8znm52r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc5ZTE4MDktNjY1ZWNmM2QtNzM5NjU3YTUtYmFlNTdiOGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:14.627840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:14.677050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:14.721616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:14.759687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:14.808219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:14.890016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:14.970997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:15.027591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:15.098567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:15.302053Z node 1 :HTTP INFO: Listening on http://127.0.0.1:16962 2025-04-10T10:57:16.305568Z node 1 :SQS INFO: Start SQS service actor 2025-04-10T10:57:16.305727Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-10T10:57:16.306239Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-10T10:57:16.306833Z node 1 :HTTP INFO: Listening on http://[::]:8955 2025-04-10T10:57:16.310602Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-10T10:57:16.336406Z node 1 :SQS INFO: Request SQS users list 2025-04-10T10:57:16.336459Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-10T10:57:16.341106Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE 2025-04-10T10:57:16.341219Z node 1 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Compile program: ( (let fromUser ( ... CUTED FrontTxId 281474976715690 2025-04-10T10:58:39.452795Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] TPersQueue::SendEvReadSetAckToSenders 2025-04-10T10:58:39.452808Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] TxId 281474976715690, NewState WAIT_RS_ACKS 2025-04-10T10:58:39.452820Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] TxId 281474976715690 moved from EXECUTED to WAIT_RS_ACKS 2025-04-10T10:58:39.452845Z node 8 :PERSQUEUE DEBUG: [TxId: 281474976715690] PredicateAcks: 0/0 2025-04-10T10:58:39.452853Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-10T10:58:39.452866Z node 8 :PERSQUEUE DEBUG: [TxId: 281474976715690] PredicateAcks: 0/0 2025-04-10T10:58:39.452879Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] add an TxId 281474976715690 to the list for deletion 2025-04-10T10:58:39.452894Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] TxId 281474976715690, NewState DELETING 2025-04-10T10:58:39.452913Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] delete key for TxId 281474976715690 2025-04-10T10:58:39.452955Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T10:58:39.452991Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Registered with mediator time cast 2025-04-10T10:58:39.453413Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Registered with mediator time cast 2025-04-10T10:58:39.453654Z node 8 :HTTP_PROXY INFO: http request [CreateStream] requestId [8c8ef0c2-24a9fea9-9f6f0a32-18384395] reply ok 2025-04-10T10:58:39.453710Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Registered with mediator time cast 2025-04-10T10:58:39.453826Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Registered with mediator time cast 2025-04-10T10:58:39.453891Z node 8 :HTTP DEBUG: (#37,[::1]:40512) <- (200 ) 2025-04-10T10:58:39.453920Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Registered with mediator time cast 2025-04-10T10:58:39.454004Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T10:58:39.454011Z node 8 :HTTP DEBUG: (#37,[::1]:40512) connection closed 2025-04-10T10:58:39.454018Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T10:58:39.454019Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Try execute txs with state DELETING 2025-04-10T10:58:39.454033Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] TxId 281474976715690, State DELETING 2025-04-10T10:58:39.454033Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Try execute txs with state DELETING 2025-04-10T10:58:39.454046Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] TxId 281474976715690, State DELETING 2025-04-10T10:58:39.454048Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] delete TxId 281474976715690 2025-04-10T10:58:39.454060Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] delete TxId 281474976715690 2025-04-10T10:58:39.454988Z node 8 :HTTP DEBUG: (#40,[::1]:40528) incoming connection opened 2025-04-10T10:58:39.455069Z node 8 :HTTP DEBUG: (#40,[::1]:40528) -> (POST /Root) 2025-04-10T10:58:39.455224Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [f8e5:4400:6050:0:e0e5:4400:6050:0] request [ListShards] url [/Root] database [/Root] requestId: 9663629d-aa894aef-256096a0-a4450aae E0000 00:00:1744282719.456173 1162948 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn Http output full {"NextToken":"COGp6PrhMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"COGp6PrhMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} E0000 00:00:1744282719.460615 1162947 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-10T10:58:39.455676Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [9663629d-aa894aef-256096a0-a4450aae] got new request from [f8e5:4400:6050:0:e0e5:4400:6050:0] database '/Root' stream 'teststream' 2025-04-10T10:58:39.456050Z node 8 :HTTP_PROXY DEBUG: http request [ListShards] requestId [9663629d-aa894aef-256096a0-a4450aae] [auth] Authorized successfully 2025-04-10T10:58:39.456108Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [9663629d-aa894aef-256096a0-a4450aae] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"NextToken":"COWp6PrhMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]}2025-04-10T10:58:39.457649Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7491637234985286495:2525], now have 1 active actors on pipe 2025-04-10T10:58:39.457649Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7491637234985286494:2524], now have 1 active actors on pipe 2025-04-10T10:58:39.458178Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7491637234985286494:2524] destroyed 2025-04-10T10:58:39.458200Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7491637234985286495:2525] destroyed 2025-04-10T10:58:39.458444Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [9663629d-aa894aef-256096a0-a4450aae] reply ok 2025-04-10T10:58:39.458563Z node 8 :HTTP DEBUG: (#40,[::1]:40528) <- (200 ) 2025-04-10T10:58:39.458644Z node 8 :HTTP DEBUG: (#40,[::1]:40528) connection closed 200 {"NextToken":"COWp6PrhMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-04-10T10:58:39.459730Z node 8 :HTTP DEBUG: (#37,[::1]:40536) incoming connection opened 2025-04-10T10:58:39.459778Z node 8 :HTTP DEBUG: (#37,[::1]:40536) -> (POST /Root) 2025-04-10T10:58:39.459878Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [782f:801:6050:0:602f:801:6050:0] request [ListShards] url [/Root] database [/Root] requestId: 4c1f8931-1e7c675a-29fa8f38-f3f8a62a 2025-04-10T10:58:39.460198Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [4c1f8931-1e7c675a-29fa8f38-f3f8a62a] got new request from [782f:801:6050:0:602f:801:6050:0] database '/Root' stream 'teststream' 2025-04-10T10:58:39.460509Z node 8 :HTTP_PROXY DEBUG: http request [ListShards] requestId [4c1f8931-1e7c675a-29fa8f38-f3f8a62a] [auth] Authorized successfully 2025-04-10T10:58:39.460570Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [4c1f8931-1e7c675a-29fa8f38-f3f8a62a] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-10T10:58:39.461356Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7491637234985286506:2529], now have 1 active actors on pipe 2025-04-10T10:58:39.461363Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7491637234985286507:2530], now have 1 active actors on pipe 2025-04-10T10:58:39.461931Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7491637234985286506:2529] destroyed 2025-04-10T10:58:39.461953Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7491637234985286507:2530] destroyed 2025-04-10T10:58:39.462037Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [4c1f8931-1e7c675a-29fa8f38-f3f8a62a] reply ok 2025-04-10T10:58:39.462164Z node 8 :HTTP DEBUG: (#37,[::1]:40536) <- (200 ) 2025-04-10T10:58:39.462250Z node 8 :HTTP DEBUG: (#37,[::1]:40536) connection closed E0000 00:00:1744282719.464326 1162948 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn Http output full {"NextToken":"COmp6PrhMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-04-10T10:58:39.463414Z node 8 :HTTP DEBUG: (#40,[::1]:40544) incoming connection opened 200 {"NextToken":"COmp6PrhMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-04-10T10:58:39.463473Z node 8 :HTTP DEBUG: (#40,[::1]:40544) -> (POST /Root) 2025-04-10T10:58:39.463577Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [9815:e01:6050:0:8015:e01:6050:0] request [ListShards] url [/Root] database [/Root] requestId: 1cdae097-31ab0fa4-3373b3b2-d4e05f76 2025-04-10T10:58:39.463899Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [1cdae097-31ab0fa4-3373b3b2-d4e05f76] got new request from [9815:e01:6050:0:8015:e01:6050:0] database '/Root' stream 'teststream' 2025-04-10T10:58:39.464209Z node 8 :HTTP_PROXY DEBUG: http request [ListShards] requestId [1cdae097-31ab0fa4-3373b3b2-d4e05f76] [auth] Authorized successfully 2025-04-10T10:58:39.464266Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [1cdae097-31ab0fa4-3373b3b2-d4e05f76] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-10T10:58:39.465010Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7491637234985286519:2535], now have 1 active actors on pipe 2025-04-10T10:58:39.465034Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7491637234985286518:2534], now have 1 active actors on pipe 2025-04-10T10:58:39.465446Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7491637234985286518:2534] destroyed 2025-04-10T10:58:39.465469Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7491637234985286519:2535] destroyed 2025-04-10T10:58:39.465723Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [1cdae097-31ab0fa4-3373b3b2-d4e05f76] reply ok 2025-04-10T10:58:39.465834Z node 8 :HTTP DEBUG: (#40,[::1]:40544) <- (200 ) 2025-04-10T10:58:39.465909Z node 8 :HTTP DEBUG: (#40,[::1]:40544) connection closed >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block >> BackupRestoreS3::PrefixedVectorIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestListShards1Shard [GOOD] Test command err: 2025-04-10T10:58:10.238847Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637110126974316:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:10.239472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e3f/r3tmp/tmp6zpmBm/pdisk_1.dat 2025-04-10T10:58:11.275065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:11.465182Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:11.483659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:11.483755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:11.497632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16594, node 1 2025-04-10T10:58:11.895015Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:11.895048Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:11.895057Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:11.895181Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:13.093384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:13.489705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:31569 2025-04-10T10:58:13.779360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:13.801359Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-10T10:58:14.467179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480 encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } 2025-04-10T10:58:14.619418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:14.851947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:58:14.915381Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-10T10:58:14.915416Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-10T10:58:14.915432Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T10:58:15.181322Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637110126974316:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:15.181405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1744282694276-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1744282694,"finish":1744282694},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744282694}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1744282694276-2","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1744282694,"finish":1744282694},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744282694}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1744282694747-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1744282694,"finish":1744282694},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037890","source_wt":1744282694}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037890-1744282694747-4","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1744282694,"finish":1744282694},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037890","source_wt":1744282694}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1744282694748-5","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1744282694,"finish":1744282694},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744282694}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1744282694748-6","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1744282694,"finish":1744282694},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744282694}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1744282694276-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1744282694,"finish":1744282694},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744282694}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1744282694276-2","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1744282694,"finish":1744282694},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744282694}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1744282694747-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1744282694,"finish":1744282694},"labels":{"datastreams_s ... 4480 waiting... 2025-04-10T10:58:29.253688Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:20074 2025-04-10T10:58:29.619416Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:29.643020Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR E0000 00:00:1744282710.043471 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282710.043687 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282710.056266 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282710.056430 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282710.077320 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282710.077537 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282710.094757 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282710.094893 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-10T10:58:30.145469Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T10:58:30.283075Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 E0000 00:00:1744282710.388870 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282710.389007 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-10T10:58:30.411980Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 E0000 00:00:1744282710.536886 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282710.537011 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-10T10:58:30.559462Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480 E0000 00:00:1744282710.750632 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282710.750811 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282710.777023 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282710.777149 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-10T10:58:30.897324Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:58:30.983282Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037892 not found 2025-04-10T10:58:30.983323Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-04-10T10:58:30.983344Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2025-04-10T10:58:30.983360Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-04-10T10:58:30.983371Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2025-04-10T10:58:30.990667Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037893 not found 2025-04-10T10:58:31.006261Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2025-04-10T10:58:31.006373Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-04-10T10:58:31.006428Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-04-10T10:58:31.006493Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-04-10T10:58:31.006530Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-04-10T10:58:31.006559Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found E0000 00:00:1744282711.036260 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282711.036385 1162124 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-10T10:58:35.014440Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491637216603411634:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:35.014520Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e3f/r3tmp/tmpxvSnQT/pdisk_1.dat 2025-04-10T10:58:35.616212Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:35.740938Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:35.741041Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:35.758853Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22371, node 10 2025-04-10T10:58:36.046319Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:36.046345Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:36.046355Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:36.046544Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29788 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:36.447152Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:36.611164Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:29788 2025-04-10T10:58:37.039349Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:37.062096Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 E0000 00:00:1744282717.375649 1164973 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282717.396009 1164973 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282717.423668 1164973 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282717.442550 1164973 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1744282717.456525 1164973 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestPutRecords [GOOD] Test command err: 2025-04-10T10:58:10.344859Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637110411349297:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:10.344918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e58/r3tmp/tmp0PYR7l/pdisk_1.dat 2025-04-10T10:58:11.327591Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:11.354620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:11.366458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:11.366600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:11.402396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30486, node 1 2025-04-10T10:58:11.790736Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:11.790761Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:11.790767Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:11.790923Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:12.425590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:12.600418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:2570 2025-04-10T10:58:12.964847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:12.987325Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-10T10:58:17.342667Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637142657228808:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:17.342828Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e58/r3tmp/tmpCZYorX/pdisk_1.dat 2025-04-10T10:58:17.894566Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:18.046139Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:18.049593Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:18.064540Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10959, node 4 2025-04-10T10:58:18.389550Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:18.389579Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:18.389586Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:18.389743Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13587 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:18.827850Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:18.955217Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:13587 2025-04-10T10:58:19.257583Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:19.712849Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480 encryption_type: NONE sequence_number: "0" shard_id: "shard-000000" encryption_type: NONE records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } 2025-04-10T10:58:19.828308Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:19.962928Z :INFO: [/Root/] [/Root/] [6e25c5e4-1b595551-a364ac8-65a23b1d] Starting read session 2025-04-10T10:58:19.963014Z :DEBUG: [/Root/] [/Root/] [6e25c5e4-1b595551-a364ac8-65a23b1d] Starting session to cluster null (localhost:10959) 2025-04-10T10:58:19.964622Z :DEBUG: [/Root/] [/Root/] [6e25c5e4-1b595551-a364ac8-65a23b1d] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:58:19.964663Z :DEBUG: [/Root/] [/Root/] [6e25c5e4-1b595551-a364ac8-65a23b1d] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:58:19.973630Z :DEBUG: [/Root/] [/Root/] [6e25c5e4-1b595551-a364ac8-65a23b1d] [null] Reconnecting session to cluster null in 0.000000s 2025-04-10T10:58:20.066618Z node 4 :PQ_READ_PROXY DEBUG: new grpc connection 2025-04-10T10:58:20.066648Z node 4 :PQ_READ_PROXY DEBUG: new session created cookie 1 2025-04-10T10:58:20.070393Z :DEBUG: [/Root/] [/Root/] [6e25c5e4-1b595551-a364ac8-65a23b1d] [null] Successfully connected. Initializing session 2025-04-10T10:58:20.091111Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2025-04-10T10:58:20.091455Z node 4 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_4_1_7824726736030708237_v1 read init: from# ipv6:[::1]:48772, request# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2025-04-10T10:58:20.091671Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer user1 session user1_4_1_7824726736030708237_v1 auth for : user1 2025-04-10T10:58:20.101350Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer user1 session user1_4_1_7824726736030708237_v1 Handle describe topics response 2025-04-10T10:58:20.101597Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer user1 session user1_4_1_7824726736030708237_v1 auth is DEAD 2025-04-10T10:58:20.101727Z node 4 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_4_1_7824726736030708237_v1 auth ok: topics# 1, initDone# 0 2025-04-10T10:58:20.103031Z node 4 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_4_1_782472673603 ... ffset 0 clientReadOffset 0 2025-04-10T10:58:32.354617Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15559045454565015753_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 3(assignId:2) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2025-04-10T10:58:32.356220Z :DEBUG: [/Root/] [/Root/] [ffc4422c-b644a885-706b3310-b1b1bac4] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:58:32.356429Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 4 (0-1) 2025-04-10T10:58:32.356580Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {0, 0} (0-0) 2025-04-10T10:58:32.356645Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {1, 0} (1-1) 2025-04-10T10:58:32.356691Z :DEBUG: [/Root/] [/Root/] [ffc4422c-b644a885-706b3310-b1b1bac4] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2025-04-10T10:58:32.509576Z :DEBUG: [/Root/] [/Root/] [ffc4422c-b644a885-706b3310-b1b1bac4] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:58:32.510128Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (0-2) 2025-04-10T10:58:32.510274Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (3-3) 2025-04-10T10:58:32.510360Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 0} (0-0) 2025-04-10T10:58:32.510412Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 1} (1-1) 2025-04-10T10:58:32.511382Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {1, 0} (2-2) 2025-04-10T10:58:32.511422Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {2, 0} (3-3) 2025-04-10T10:58:32.511471Z :DEBUG: [/Root/] [/Root/] [ffc4422c-b644a885-706b3310-b1b1bac4] [null] The application data is transferred to the client. Number of messages 4, size 1049088 bytes 2025-04-10T10:58:32.512210Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 0 (0-1) 2025-04-10T10:58:32.512275Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (0-1) 2025-04-10T10:58:32.512329Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-10T10:58:32.512394Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2025-04-10T10:58:32.512450Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (4-4) 2025-04-10T10:58:32.512506Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (5-5) 2025-04-10T10:58:32.512541Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (6-6) 2025-04-10T10:58:32.512607Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (7-7) 2025-04-10T10:58:32.512657Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (8-8) 2025-04-10T10:58:32.512734Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-10T10:58:32.512765Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {0, 1} (1-1) 2025-04-10T10:58:32.512796Z :DEBUG: [/Root/] [/Root/] [ffc4422c-b644a885-706b3310-b1b1bac4] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2025-04-10T10:58:32.512941Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {0, 0} (0-0) 2025-04-10T10:58:32.513925Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {1, 0} (1-1) 2025-04-10T10:58:32.516018Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {2, 0} (2-2) 2025-04-10T10:58:32.517050Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {3, 0} (3-3) 2025-04-10T10:58:32.521180Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {4, 0} (4-4) 2025-04-10T10:58:32.522052Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {5, 0} (5-5) 2025-04-10T10:58:32.523002Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {6, 0} (6-6) 2025-04-10T10:58:32.524036Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {7, 0} (7-7) 2025-04-10T10:58:32.536747Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {8, 0} (8-8) 2025-04-10T10:58:32.536821Z :DEBUG: [/Root/] [/Root/] [ffc4422c-b644a885-706b3310-b1b1bac4] [null] The application data is transferred to the client. Number of messages 9, size 8388611 bytes 2025-04-10T10:58:32.540039Z :INFO: [/Root/] [/Root/] [ffc4422c-b644a885-706b3310-b1b1bac4] Closing read session. Close timeout: 0.000000s 2025-04-10T10:58:32.540132Z :INFO: [/Root/] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:stream_TestPutRecordsCornerCases:3:5:3:0 null:stream_TestPutRecordsCornerCases:2:4:0:0 null:stream_TestPutRecordsCornerCases:1:3:8:0 null:stream_TestPutRecordsCornerCases:0:2:1:0 null:stream_TestPutRecordsCornerCases:4:1:1:0 2025-04-10T10:58:32.540192Z :INFO: [/Root/] [/Root/] [ffc4422c-b644a885-706b3310-b1b1bac4] Counters: { Errors: 0 CurrentSessionLifetimeMs: 237 BytesRead: 9437699 MessagesRead: 17 BytesReadCompressed: 9437699 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:58:32.540335Z :NOTICE: [/Root/] [/Root/] [ffc4422c-b644a885-706b3310-b1b1bac4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-10T10:58:32.540404Z :DEBUG: [/Root/] [/Root/] [ffc4422c-b644a885-706b3310-b1b1bac4] [null] Abort session to cluster 2025-04-10T10:58:32.541064Z :NOTICE: [/Root/] [/Root/] [ffc4422c-b644a885-706b3310-b1b1bac4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:58:32.547918Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15559045454565015753_v1 grpc read failed 2025-04-10T10:58:32.547969Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15559045454565015753_v1 grpc closed 2025-04-10T10:58:32.548023Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_15559045454565015753_v1 is DEAD 2025-04-10T10:58:34.907318Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491637215819893342:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:34.922498Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e58/r3tmp/tmppcK88K/pdisk_1.dat 2025-04-10T10:58:35.427208Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:35.516207Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:35.516347Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:35.524993Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3452, node 10 2025-04-10T10:58:35.855372Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:35.855405Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:35.855414Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:35.855628Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:36.275899Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:36.499750Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:22462 2025-04-10T10:58:37.015588Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:37.600004Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480
: Error: Access for stream /Root/stream_TestPutRecords is denied for subject user2@builtin, code: 500018 2025-04-10T10:58:37.813021Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 PutRecordsResponse = encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } PutRecord response = encryption_type: NONE sequence_number: "7" shard_id: "shard-000004" >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListQueueTags [GOOD] Test command err: 2025-04-10T10:57:13.343160Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636865753560738:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:13.343201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010e5/r3tmp/tmpwG5AfI/pdisk_1.dat 2025-04-10T10:57:13.938425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:13.938549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:13.943445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:14.001763Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9861, node 1 2025-04-10T10:57:14.123046Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:14.123069Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:14.123075Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:14.123216Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:14.552658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:14.574762Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:17101 2025-04-10T10:57:14.843968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:14.854974Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:57:14.863313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:14.883088Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:57:14.891548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:15.159890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:15.262724Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-10T10:57:15.267202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:15.338753Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-04-10T10:57:15.347866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:15.401424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:15.470169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:15.541821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:15.618308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T10:57:15.665795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:15.764945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:17.945921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636882933431361:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:17.946075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:17.948288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636882933431373:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:17.952332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-10T10:57:17.967285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636882933431375:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-10T10:57:18.065128Z node 1 :TX_PROXY ERROR: Actor# [1:7491636887228398723:2921] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:18.343592Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636865753560738:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:18.343655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:18.399729Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jrfnhnkq33cp9emjk2kskqnq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmIyNjQyOWItYmM2Nzk4MGItYjQ5YzU4YmItZGIwMGM0OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:18.450256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:18.497153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:18.540612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:18.580920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:18.618023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:18.661040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:18.699284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:18.732115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:18.771103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:18.877607Z node 1 :HTTP INFO: Listening on http://127.0.0.1:16225 2025-04-10T10:57:19.880354Z node 1 :SQS INFO: Start SQS service actor 2025-04-10T10:57:19.880388Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-10T10:57:19.880549Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-10T10:57:19.899185Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-10T10:57:19.903904Z node 1 :HTTP INFO: Listening on http://[::]:12390 2025-04-10T10:57:19.917829Z node 1 :SQS INFO: Request SQS users list 2025-04-10T10:57:19.917871Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-10T10:57:19.917944Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-10T10:5 ... Z\t\351\000?V\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\200\003?\202(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?X\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\226\003?\230\036QUEUE_ID_NUMBER\003\022\000\000\003?\\\000\037\000\001\t\211\004\202\203\005@?8V\000\003?\260\nattrs?\252\001\t\211\004\202\203\005@\203\001HV\000\003?\270\010tags\t\211\006?\272\203\014?\272\203\001H\"\000\t\211\002?\300?\300\014Not\000\t\211\002?\300?8R\000?\252\000\000\003?\272\004{}\t\211\006?\302\203\014?\302\203\001H\"\000\t\211\006?\320\203\005@\203\001H?\322\030Invoke\000\003?\326\014Equals\003?\330\000\t\211\004?\322\207\203\001H?\322 Coalesce\000\t\211\004?\342\207\205\004\207\203\001H?\342\026\032\203\004\030Member\000\t\211\n?\354\203\005\004\200\205\004\203\004\203\004\026\032\213\004\203\001H\203\001H\203\004\036\000\003?\362 \000\001\205\000\000\000\000\001\003\000\000\000\000\000\000\000?\352\005?\370\003?\364\004\003?\366 \003\013?\376\t\351\000?\372\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?%\002\003?)\002\022USER_NAME\003\022\000\003?\374(000000000000000301v0\002\003?\001\002\000\037\003?\356\002\002\003?\322\004{}\002\003\003?\302\004{}?a\002\002\002\001\000/" } Params { Bin: "\037\000\005\205\010\203\001H\203\010\203\010\203\001H\020NAME> AsyncIndexChangeCollector::CoveredIndexUpsert [GOOD] >> AsyncIndexChangeCollector::AllColumnsInPk >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> TBlobStorageProxyTest::TestBlock [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob >> TBlobStorageProxyTest::TestInFlightPuts [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] Test command err: 2025-04-10T10:57:29.162797Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636934326344181:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:29.162852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001078/r3tmp/tmplNpXYq/pdisk_1.dat 2025-04-10T10:57:29.719631Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3995, node 1 2025-04-10T10:57:29.759542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:29.759641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:29.761931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:29.819831Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:29.819854Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:29.819866Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:29.819996Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23975 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:30.347899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:23975 2025-04-10T10:57:30.759855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:30.771806Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:57:30.773845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:30.794829Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:30.802516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T10:57:31.101188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:31.289897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:31.374293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:31.463402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:31.525539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:31.613294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:31.701008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:31.793053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:31.875840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:33.993291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636951506214818:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:33.993453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:33.993929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636951506214830:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:33.999738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-10T10:57:34.017664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636951506214832:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-10T10:57:34.085055Z node 1 :TX_PROXY ERROR: Actor# [1:7491636955801182179:2920] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:34.170376Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636934326344181:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:34.194567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:34.694217Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jrfnj596fcmh2dqbc8aykftb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTBmYWY1ZGMtNWRiMWEzYWEtMjMzYjkzZTQtOTdlN2I3ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:34.749185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:34.806219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:34.857513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:34.897058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:34.964618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.057234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.155297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.200168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.236646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.358044Z node 1 :HTTP INFO: Listening on http://127.0.0.1:64339 2025-04-10T10:57:36.359931Z node 1 :SQS INFO: Start SQS service actor 2025-04-10T10:57:36.362544Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: false EnableDeadLetterQueues: true } 2025-04-10T10:57:36.363725Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-10T10:57:36.366193Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-10T10:57:36.366540Z node 1 :HTTP INFO: Listening on http://[::]:31570 2025-04-10T10:57:36.384973Z node 1 :SQS INFO: Request SQS users list 2025-04-10T10:57:36.385021Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-10T10:57:36.389462Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE 2025-04-10T10:57:36.389497Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_QUEUES_LIST_ID). Mode: COMPILE 2025-04-10T10:57:36.389577Z node 1 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Compile program: ( (let fromUser (Parameter 'FROM_USER (DataType 'Utf8String))) (let fromName (Parameter 'FROM_NAME (DataType 'Utf8String))) (let batchSize (Parameter 'BATCH_SIZE (DataType 'Uint64))) ... ame: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2025-04-10T10:58:42.114316Z node 7 :SQS DEBUG: Request [fc18772f-dd4399eb-dcf588a8-99dfc5bc] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Attempt 1 execution duration: 114ms Http output full {"__type":"InternalFailure","message":"Untag queue query failed, conflicting query in parallel"} Http output full {"__type":"InternalFailure","message":"Untag queue query failed, conflicting query in parallel"} 2025-04-10T10:58:42.119818Z node 7 :HTTP DEBUG: (#37,[::1]:50936) incoming connection opened 2025-04-10T10:58:42.119925Z node 7 :HTTP DEBUG: (#37,[::1]:50936) -> (POST /Root) 2025-04-10T10:58:42.119991Z node 7 :SQS TRACE: Request [fc18772f-dd4399eb-dcf588a8-99dfc5bc] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Sending mkql execution result: { Status: 48 TxId: 281474976715925 Step: 1744282722070 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2025-04-10T10:58:42.120137Z node 7 :SQS TRACE: Request [fc18772f-dd4399eb-dcf588a8-99dfc5bc] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Minikql data response: {"attrs": {"ContentBasedDeduplication": false, "DelaySeconds": 0, "DlqArn": "", "DlqName": "", "FifoQueue": true, "MaxReceiveCount": 0, "MaximumMessageSize": 262144, "MessageRetentionPeriod": 345600000, "ReceiveMessageWaitTime": 0, "ShowDetailedCountersDeadline": null, "VisibilityTimeout": 30000}, "queueExists": true, "tags": "{}"} 2025-04-10T10:58:42.120152Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d81d:0:6050:0:c01d:0:6050:0] request [ListQueueTags] url [/Root] database [/Root] requestId: 9ec71fc8-959a3bab-f29f2170-b2918cd6 2025-04-10T10:58:42.120355Z node 7 :SQS DEBUG: Request [fc18772f-dd4399eb-dcf588a8-99dfc5bc] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] execution duration: 123ms 2025-04-10T10:58:42.120527Z node 7 :SQS DEBUG: Request [fc18772f-dd4399eb-dcf588a8-99dfc5bc] Sending executed reply 2025-04-10T10:58:42.120623Z node 7 :HTTP_PROXY INFO: http request [ListQueueTags] requestId [9ec71fc8-959a3bab-f29f2170-b2918cd6] got new request from [d81d:0:6050:0:c01d:0:6050:0] 2025-04-10T10:58:42.121528Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [9ec71fc8-959a3bab-f29f2170-b2918cd6] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-10T10:58:42.121564Z node 7 :HTTP_PROXY INFO: http request [ListQueueTags] requestId [9ec71fc8-959a3bab-f29f2170-b2918cd6] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-10T10:58:42.121773Z node 7 :SQS DEBUG: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: 9ec71fc8-959a3bab-f29f2170-b2918cd6 2025-04-10T10:58:42.121902Z node 7 :SQS DEBUG: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] Proxy actor: used user_name='cloud4', queue_name='000000000000000301v0', folder_id='folder4' 2025-04-10T10:58:42.121919Z node 7 :SQS DEBUG: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] Request proxy started 2025-04-10T10:58:42.121984Z node 7 :SQS DEBUG: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] Answer configuration for queue [cloud4/000000000000000301v0] without leader 2025-04-10T10:58:42.122043Z node 7 :SQS DEBUG: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] Get configuration duration: 0ms 2025-04-10T10:58:42.122157Z node 7 :SQS DEBUG: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] Send get leader node request to sqs service for cloud4/000000000000000301v0 2025-04-10T10:58:42.122175Z node 7 :SQS DEBUG: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] Leader node for queue [cloud4/000000000000000301v0] is 7 2025-04-10T10:58:42.122201Z node 7 :SQS DEBUG: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] Got leader node for queue response. Node id: 7. Status: 0 2025-04-10T10:58:42.122317Z node 7 :SQS TRACE: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] Sending request from proxy to leader node 7: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "9ec71fc8-959a3bab-f29f2170-b2918cd6" 2025-04-10T10:58:42.122477Z node 7 :SQS DEBUG: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] Received Sqs Request: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "9ec71fc8-959a3bab-f29f2170-b2918cd6" 2025-04-10T10:58:42.122552Z node 7 :SQS DEBUG: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] Request started. Actor: [7:7491637246070196937:5541] 2025-04-10T10:58:42.122599Z node 7 :SQS TRACE: Inc local leader ref for actor [7:7491637246070196937:5541] 2025-04-10T10:58:42.122621Z node 7 :SQS DEBUG: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] Forward configuration request to queue [cloud4/000000000000000301v0] leader 2025-04-10T10:58:42.122668Z node 7 :SQS DEBUG: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] Get configuration duration: 0ms 2025-04-10T10:58:42.122691Z node 7 :SQS TRACE: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] Got configuration. Root url: http://ghrun-x5vqckr7qy.auto.internal:8771, Shards: 1, Fail: 0 2025-04-10T10:58:42.122709Z node 7 :SQS TRACE: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] DoRoutine 2025-04-10T10:58:42.122787Z node 7 :SQS TRACE: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] SendReplyAndDie from action actor { ListQueueTags { RequestId: "9ec71fc8-959a3bab-f29f2170-b2918cd6" } } 2025-04-10T10:58:42.122884Z node 7 :SQS TRACE: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] Sending sqs response: { ListQueueTags { RequestId: "9ec71fc8-959a3bab-f29f2170-b2918cd6" } RequestId: "9ec71fc8-959a3bab-f29f2170-b2918cd6" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-04-10T10:58:42.122971Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7491637246070196937:5541]. Found: 1 2025-04-10T10:58:42.123035Z node 7 :SQS TRACE: HandleSqsResponse ListQueueTags { RequestId: "9ec71fc8-959a3bab-f29f2170-b2918cd6" } RequestId: "9ec71fc8-959a3bab-f29f2170-b2918cd6" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-04-10T10:58:42.123098Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7491637246070196936:2774]: ListQueueTags { RequestId: "9ec71fc8-959a3bab-f29f2170-b2918cd6" } RequestId: "9ec71fc8-959a3bab-f29f2170-b2918cd6" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-04-10T10:58:42.123226Z node 7 :SQS TRACE: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] HandleResponse: { ListQueueTags { RequestId: "9ec71fc8-959a3bab-f29f2170-b2918cd6" } RequestId: "9ec71fc8-959a3bab-f29f2170-b2918cd6" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true }, status: OK 2025-04-10T10:58:42.123307Z node 7 :SQS DEBUG: Request [9ec71fc8-959a3bab-f29f2170-b2918cd6] Sending reply from proxy actor: { ListQueueTags { RequestId: "9ec71fc8-959a3bab-f29f2170-b2918cd6" } RequestId: "9ec71fc8-959a3bab-f29f2170-b2918cd6" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-04-10T10:58:42.123484Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [9ec71fc8-959a3bab-f29f2170-b2918cd6] Got succesfult GRPC response. 2025-04-10T10:58:42.123545Z node 7 :HTTP_PROXY INFO: http request [ListQueueTags] requestId [9ec71fc8-959a3bab-f29f2170-b2918cd6] reply ok 2025-04-10T10:58:42.123714Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [9ec71fc8-959a3bab-f29f2170-b2918cd6] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 530 ResponseSizeInBytes: 179 SourceAddress: d81d:0:6050:0:c01d:0:6050:0 ResourceId: 000000000000000301v0 Action: ListQueueTags 2025-04-10T10:58:42.123863Z node 7 :HTTP DEBUG: (#37,[::1]:50936) <- (200 ) 2025-04-10T10:58:42.123978Z node 7 :HTTP DEBUG: (#37,[::1]:50936) connection closed Http output full {} >> TBlobStorageProxyTest::TestVGetNoData [GOOD] |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TBlobStorageProxyTest::TestBlockPersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ComplexJoin [GOOD] Test command err: 2025-04-10T10:55:58.319895Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636544888299910:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:58.320264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001787/r3tmp/tmpMUxjhh/pdisk_1.dat 2025-04-10T10:55:59.423968Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:59.450317Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:59.457345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:59.457436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:59.474163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28696, node 1 2025-04-10T10:55:59.766316Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:59.766341Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:59.766353Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:59.766483Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:55:59.874214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:25188 2025-04-10T10:56:00.371071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:00.437948Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636551947539293:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:00.562657Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:56:00.566536Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T10:56:00.578299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:00.578377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:00.596109Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:56:00.601081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:00.710928Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:56:00.711057Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:56:00.711119Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:56:00.711175Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:56:00.711251Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:56:00.711302Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:56:00.711393Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:56:00.711441Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:56:00.711505Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:56:00.711645Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:00.711713Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:00.739949Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:01.033428Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:01.052180Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T10:56:01.052249Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T10:56:01.097575Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T10:56:01.097649Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T10:56:01.098730Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T10:56:01.098763Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T10:56:01.098800Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T10:56:01.098842Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T10:56:01.098892Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T10:56:01.098924Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T10:56:01.099369Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T10:56:01.108203Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7491636551947539562:2204] 2025-04-10T10:56:01.162756Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T10:56:01.189733Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T10:56:01.189752Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T10:56:01.189789Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T10:56:01.191814Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T10:56:01.191857Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:7491636556242507079:2325], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T10:56:01.236237Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T10:56:01.237868Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7491636556242507136:2376] 2025-04-10T10:56:01.243883Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7491636556242507136:2376], schemeshard id = 72075186224037897 2025-04-10T10:56:01.256613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T10:56:01.290216Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T10:56:01.290272Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T10:56:01.768774Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T10:56:01.860158Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T10:56:03.310384Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636544888299910:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:03.310445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:05.111179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636574953072128:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:05.111288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:05.414259Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636551947539293:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:05.414325Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:05.798685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T10:56:06.458318Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[2:7491636577717344004:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T10:56:06.458500Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[2:7491636577717344004:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T10:56:06.458785Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[2:7491636577717344004:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T10:56:06.458926Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[2:7491636577717344004:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T10:56:06.459036Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[2:7491636577717344004:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T10:56:06.459140Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[2:7491636577717344004:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T10:56:06.459239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[2:7491636577717344004:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T10:56:06.459346Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[2:7491636577717344004:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T10:56:06.459469Z node 2 :TX_C ... :2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:58:23.463788Z node 5 :TX_PROXY ERROR: Actor# [5:7491637168483244949:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1},{"InternalOperatorId":1}],"GroupBy":"item.App","Aggregation":"{MAX(item.Message),MIN(item.Message)}","Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"item.Ts \u003E 1 AND item.Ts \u003C= 4 OR item.App == \"ydb\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Logs","E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"Aggregate-Filter-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["App"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"columns":["App","Message","Ts"],"scan_by":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/Logs","E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.Ts \u003E 1 AND item.Ts \u003C= 4 OR item.App == \"ydb\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"GroupBy":"item.App","Aggregation":"{MAX(item.Message),MIN(item.Message)}","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"App\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 19721, MsgBus: 11786 2025-04-10T10:58:28.154916Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491637188766670819:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:28.154997Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001787/r3tmp/tmphEJee2/pdisk_1.dat 2025-04-10T10:58:28.542339Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:28.578211Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:28.578334Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:28.590111Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19721, node 6 2025-04-10T10:58:28.798923Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:28.798950Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:28.798960Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:28.799124Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11786 TClient is connected to server localhost:11786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:29.734755Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:29.745798Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:58:29.769272Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:29.895648Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:30.330278Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:58:30.487612Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T10:58:33.155586Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491637188766670819:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:33.155685Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:58:34.076308Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491637214536476360:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:34.076437Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:34.143115Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:34.227377Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:58:34.319167Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:58:34.387407Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:58:34.437947Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:58:34.519902Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:58:34.626071Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491637214536476884:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:34.626234Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:34.626485Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491637214536476889:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:34.636049Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:58:34.669364Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491637214536476891:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:58:34.767115Z node 6 :TX_PROXY ERROR: Actor# [6:7491637214536476949:3460] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:36.342164Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:36.802598Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T10:58:36.878123Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 >> DataShardVolatile::VolatileTxAbortedOnSplit [GOOD] >> DataShardVolatile::VolatileTxAbortedOnDrop |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::PrefixedVectorIndex [GOOD] Test command err: 2025-04-10T10:57:05.048751Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636833312069659:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:05.048808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d78/r3tmp/tmp0CtzfO/pdisk_1.dat 2025-04-10T10:57:05.690767Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:05.694436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:05.694505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:05.701913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1527, node 1 2025-04-10T10:57:05.858501Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:05.858526Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:05.858534Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:05.858631Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:06.452385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:08.971218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636846196972405:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:08.971383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:08.971822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636846196972417:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:08.976497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:57:09.018675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636846196972419:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:57:09.080627Z node 1 :TX_PROXY ERROR: Actor# [1:7491636850491939792:2688] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:09.955393Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfnhdgcetw0t8j2gkjzy5st, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2FiZjQ0ODktOWY3YTZiNGUtM2UzZTUwNDgtNzUzMzBjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/2gt4/001d78/r3tmp/tmpTIJzHP/"Create temporary directory "/Root/~backup_20250410T105710" in database2025-04-10T10:57:10.054277Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636833312069659:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:10.054333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Process "/home/runner/.ya/build/build_root/2gt4/001d78/r3tmp/tmpTIJzHP/view"Backup view "/Root/view" to "/home/runner/.ya/build/build_root/2gt4/001d78/r3tmp/tmpTIJzHP/view"Write view into "/home/runner/.ya/build/build_root/2gt4/001d78/r3tmp/tmpTIJzHP/view/create_view.sql"Write ACL into "/home/runner/.ya/build/build_root/2gt4/001d78/r3tmp/tmpTIJzHP/view/permissions.pb"Remove temporary directory "/Root/~backup_20250410T105710" in database2025-04-10T10:57:10.263799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710663:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/2gt4/001d78/r3tmp/tmpTIJzHP/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2gt4/001d78/r3tmp/tmpTIJzHP/" to "/Root"Process "/home/runner/.ya/build/build_root/2gt4/001d78/r3tmp/tmpTIJzHP/view"Restore view "/home/runner/.ya/build/build_root/2gt4/001d78/r3tmp/tmpTIJzHP/view" to "/Root/view"Read view from "/home/runner/.ya/build/build_root/2gt4/001d78/r3tmp/tmpTIJzHP/view/create_view.sql"Created "/Root/view"Restore ACL "/home/runner/.ya/build/build_root/2gt4/001d78/r3tmp/tmpTIJzHP/view" to "/Root/view"Read ACL from "/home/runner/.ya/build/build_root/2gt4/001d78/r3tmp/tmpTIJzHP/view/permissions.pb"2025-04-10T10:57:10.607890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710666:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-10T10:57:10.819287Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfnheg31enjvdyrz602epj7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2FiZjQ0ODktOWY3YTZiNGUtM2UzZTUwNDgtNzUzMzBjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:13.318594Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491636865780644068:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:13.318658Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d78/r3tmp/tmpnhVGjx/pdisk_1.dat 2025-04-10T10:57:13.730853Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:13.751656Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:13.751912Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:13.759490Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65313, node 4 2025-04-10T10:57:13.994330Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:13.994354Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:13.994362Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:13.994503Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:14.675696Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:18.319938Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491636865780644068:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:18.320029Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:18.462300Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491636887255481612:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:18.462614Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:18.494095Z node 4 :TX_PROXY DEBUG: actor# [4:7491636865780644085:2137] Handle TEvProposeTransaction 2025-04-10T10:57:18.494135Z node 4 :TX_PROXY DEBUG: actor# [4:7491636865780644085:2137] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T10:57:18.494210Z node 4 :TX_PROXY DEBUG: actor# [4:7491636865780644085:2137] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [4:7491636887255481633:2635] 2025-04-10T10:57:18.628389Z node 4 :TX_PROXY DEBUG: Actor# [4:7491636887255481633:2635] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: ... vateTable: false } 2025-04-10T10:58:41.827529Z node 22 :TX_PROXY DEBUG: Actor# [22:7491637244011017431:4728] Handle TEvDescribeSchemeResult Forward to# [22:7491637244011017429:2453] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715760 CreateStep: 1744282720726 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Group" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableIndexes { Name: "value_idx" LocalPathId: 10 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "Group" KeyColumnNames: "Value" SchemaVersion: 2 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: SIMILARITY_INNER_PRODUCT vector_type: VECTOR_TYPE_FLOAT vector_dimension: 768 } clusters: 80 levels: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046644480 >> TBlobStorageProxyTest::TestNormalMirror [GOOD] |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVGetNoData [GOOD] >> PrivateApi::PingTask [GOOD] >> PrivateApi::GetTask >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] |69.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |69.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |69.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |69.4%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |69.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestProxyPutSingleTimeout >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscover |69.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |69.4%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |69.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe >> TBlobStorageProxyTest::TestProxyLongTailDiscover |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] |69.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge >> TBlobStorageProxyTest::TestDoubleFailure >> TCertificateCheckerTest::CheckSubjectDns [GOOD] |69.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |69.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] >> TBlobStorageProxyTest::TestDoubleGroups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] Test command err: 2025-04-10T10:57:28.341831Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636928770594032:2238];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:28.350385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010d6/r3tmp/tmpiRtTnw/pdisk_1.dat 2025-04-10T10:57:28.913522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:28.913625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:28.915678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:28.959062Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29603, node 1 2025-04-10T10:57:29.129110Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:29.129139Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:29.129145Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:29.129254Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:29.558195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:29616 2025-04-10T10:57:29.840881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:29.848027Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T10:57:29.849806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:29.866394Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T10:57:29.873044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:30.060157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:30.171849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:57:30.287259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:30.375379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:30.461617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:30.547108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:30.638892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:30.738575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:30.819347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:33.338326Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636928770594032:2238];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:33.338402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:34.096345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636954540399085:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:34.096529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:34.096967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636954540399097:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:34.101031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-10T10:57:34.127614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636954540399099:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-10T10:57:34.214605Z node 1 :TX_PROXY ERROR: Actor# [1:7491636954540399150:2927] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:34.884254Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jrfnj5c6f44ny491vah7djrc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODU0YWQ1NWItZmUzNWQ4OGMtYzVmMDBhYzAtOGY2ZDgwZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:57:34.935144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:34.983014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.057573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.145107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.207578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.254659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.312758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.382505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.462010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:35.583460Z node 1 :HTTP INFO: Listening on http://127.0.0.1:29191 2025-04-10T10:57:36.587827Z node 1 :SQS INFO: Start SQS service actor 2025-04-10T10:57:36.587942Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-10T10:57:36.594229Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-10T10:57:36.594663Z node 1 :HTTP INFO: Listening on http://[::]:8231 2025-04-10T10:57:36.595323Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-10T10:57:36.619269Z node 1 :SQS INFO: Request SQS users list 2025-04-10T10:57:36.619314Z node 1 :SQS DEBUG: Request SQS queues list 2025-04-10T10:57:36.619354Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-10T10:57:36.619384Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-04-10T10:57:36.644989Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE 2025-04-10T10:57:36.645141Z node 1 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Compile program: ( (let fromUser (Parameter 'FROM_USER (DataType 'Utf8String))) (let fromName (Parameter 'FROM_NAME (DataType 'Utf8String))) (let ba ... f2f0-92252c3c" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2025-04-10T10:58:45.843655Z node 7 :SQS DEBUG: Request SendMessageBatch working duration: 189ms 2025-04-10T10:58:45.843819Z node 7 :SQS TRACE: HandleSqsResponse SendMessageBatch { RequestId: "a05260f0-a038f64d-a047f2f0-92252c3c" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "4918b299-29515da2-10953c19-7e50813a" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "f9972b41-816ac3ae-b88cfa7b-5f7d6b1a" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "a05260f0-a038f64d-a047f2f0-92252c3c" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2025-04-10T10:58:45.843961Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7491637260600033724:2504]: SendMessageBatch { RequestId: "a05260f0-a038f64d-a047f2f0-92252c3c" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "4918b299-29515da2-10953c19-7e50813a" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "f9972b41-816ac3ae-b88cfa7b-5f7d6b1a" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "a05260f0-a038f64d-a047f2f0-92252c3c" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2025-04-10T10:58:45.844065Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7491637260600033725:3630]. Found: 1 2025-04-10T10:58:45.844115Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Compile program: ( (let queueIdNumber (Parameter 'QUEUE_ID_NUMBER (DataType 'Uint64))) (let queueIdNumberHash (Parameter 'QUEUE_ID_NUMBER_HASH (DataType 'Uint64))) (let timeFrom (Parameter 'TIME_FROM (DataType 'Uint64))) (let sentTsIdx '/Root/SQS/.FIFO/SentTimestampIdx) (let sentIdxRange '( '('QueueIdNumberHash queueIdNumberHash queueIdNumberHash) '('QueueIdNumber queueIdNumber queueIdNumber) '('SentTimestamp timeFrom (Uint64 '18446744073709551615)) '('Offset (Uint64 '0) (Uint64 '18446744073709551615)))) (let sentIdxSelect '( 'SentTimestamp 'Offset)) (let selectResult (SelectRange sentTsIdx sentIdxRange sentIdxSelect '('('"ItemsLimit" (Uint64 '1))))) (let messages (Member selectResult 'List)) (return (Extend (AsList (SetResult 'messages messages)) )) ) 2025-04-10T10:58:45.844207Z node 7 :SQS TRACE: Request [a05260f0-a038f64d-a047f2f0-92252c3c] HandleResponse: { SendMessageBatch { RequestId: "a05260f0-a038f64d-a047f2f0-92252c3c" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "4918b299-29515da2-10953c19-7e50813a" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "f9972b41-816ac3ae-b88cfa7b-5f7d6b1a" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "a05260f0-a038f64d-a047f2f0-92252c3c" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true }, status: OK 2025-04-10T10:58:45.844407Z node 7 :SQS DEBUG: Request [a05260f0-a038f64d-a047f2f0-92252c3c] Sending reply from proxy actor: { SendMessageBatch { RequestId: "a05260f0-a038f64d-a047f2f0-92252c3c" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "4918b299-29515da2-10953c19-7e50813a" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "f9972b41-816ac3ae-b88cfa7b-5f7d6b1a" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "a05260f0-a038f64d-a047f2f0-92252c3c" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2025-04-10T10:58:45.845649Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [a05260f0-a038f64d-a047f2f0-92252c3c] Got succesfult GRPC response. 2025-04-10T10:58:45.845924Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [a05260f0-a038f64d-a047f2f0-92252c3c] reply ok 2025-04-10T10:58:45.846091Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [a05260f0-a038f64d-a047f2f0-92252c3c] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 1063 ResponseSizeInBytes: 644 SourceAddress: 789a:b200:6050:0:609a:b200:6050:0 ResourceId: 000000000000000101v0 Action: SendMessageBatch 2025-04-10T10:58:45.846346Z node 7 :HTTP DEBUG: (#37,[::1]:58400) <- (200 ) 2025-04-10T10:58:45.846496Z node 7 :HTTP DEBUG: (#37,[::1]:58400) connection closed 2025-04-10T10:58:45.849482Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Compile program response: { Status: 48 MiniKQLCompileResults { CompiledProgram: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001\032\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } } 2025-04-10T10:58:45.849533Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] compilation duration: 4ms 2025-04-10T10:58:45.849764Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) has been prepared 2025-04-10T10:58:45.849779Z node 7 :SQS DEBUG: Request [] Executing compiled query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) 2025-04-10T10:58:45.849907Z node 7 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID). Mode: COMPILE_AND_EXEC 2025-04-10T10:58:45.849987Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 0, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 12311263855443095412, "TIME_FROM": 0} 2025-04-10T10:58:45.850384Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001\032\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } Params { Bin: "\037\000\005\205\n\203\010\203\010\203\010\203\004\203\010> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> TBlobStorageProxyTest::TestPersistence >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] Test command err: 2025-04-10T10:58:29.447149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:29.448382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:29.448491Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:58:29.451197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:29.451595Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:29.451999Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c30/r3tmp/tmpY7A9Z0/pdisk_1.dat 2025-04-10T10:58:30.084061Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:30.337333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:58:30.471027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:30.471196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:30.488545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:30.488680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:30.507619Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:58:30.508226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:30.508676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:30.837646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:58:32.065013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1592:2958], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:32.065188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1602:2963], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:32.065286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:32.072377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:58:32.852236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1606:2966], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:58:33.125079Z node 1 :TX_PROXY ERROR: Actor# [1:1760:3054] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:33.510763Z node 1 :KQP_EXECUTER TRACE: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2025-04-10T10:58:33.511098Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1786:2956] TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-04-10T10:58:33.511246Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-10T10:58:33.511426Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1786:2956] TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2025-04-10T10:58:33.511753Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2025-04-10T10:58:33.511931Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-10T10:58:33.512152Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1786:2956] TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (Iterator (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3))))) )))) ) 2025-04-10T10:58:33.512296Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1786:2956] TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] create compute task: 1 2025-04-10T10:58:33.512439Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:33.512516Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-10T10:58:33.512928Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [1:1789:2956] 2025-04-10T10:58:33.512999Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:1789:2956], channels: 0 2025-04-10T10:58:33.513077Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1786:2956] TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T10:58:33.513127Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1786:2956] TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-04-10T10:58:33.513201Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [1:1789:2956] 2025-04-10T10:58:33.513254Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:1789:2956], channels: 0 2025-04-10T10:58:33.513336Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1786:2956] TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:1789:2956], 2025-04-10T10:58:33.513385Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1786:2956] TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1789:2956], 2025-04-10T10:58:33.513469Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1786:2956] TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1ZjUwYzktNDNkZmZhOGEtNzA3ZjFiZjQtM2E2YWVjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-10T10:58:33.526715Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1786:2956] TxId: 281474976715660. Ctx: { TraceId: 01jrfnkxzx5an8dajez29h1f74, Database: , DatabaseId: /Root, SessionId: ydb:// ... itSnapshotState, immediate tx, become ExecuteState 2025-04-10T10:58:46.366769Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1848:3100], task: 2, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-10T10:58:46.366869Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CT 1, CA [3:1848:3100], 2025-04-10T10:58:46.366930Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:1848:3100], 2025-04-10T10:58:46.367535Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing task: 1 on compute actor: [4:1850:2482] 2025-04-10T10:58:46.367619Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [4:1850:2482] 2025-04-10T10:58:46.367711Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Task: 1, output channelId: 1, dst task: 2, at actor [3:1848:3100] 2025-04-10T10:58:46.367809Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [4:1850:2482], channels: 1 2025-04-10T10:58:46.367864Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [3:1848:3100], channels: 1 2025-04-10T10:58:46.368223Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [4:1850:2482], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-10T10:58:46.368287Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1850:2482], CA [3:1848:3100], 2025-04-10T10:58:46.368357Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:1850:2482], CA [3:1848:3100], 2025-04-10T10:58:46.369012Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [4:1850:2482], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 611 Tasks { TaskId: 1 CpuTimeUs: 360 ComputeCpuTimeUs: 8 BuildCpuTimeUs: 352 HostName: "ghrun-x5vqckr7qy" NodeId: 4 CreateTimeMs: 1744282726365 } MaxMemoryUsage: 1048576 } 2025-04-10T10:58:46.369142Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1850:2482], CA [3:1848:3100], 2025-04-10T10:58:46.369191Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:1850:2482], CA [3:1848:3100], 2025-04-10T10:58:46.376683Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got result, channelId: 2, shardId: 0, inputIndex: 0, from: [3:1849:3100], finished: 0 2025-04-10T10:58:46.376796Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send ack to channelId: 2, seqNo: 1, to: [3:1849:3100] 2025-04-10T10:58:46.384418Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got result, channelId: 2, shardId: 0, inputIndex: 0, from: [3:1849:3100], finished: 1 2025-04-10T10:58:46.384489Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send ack to channelId: 2, seqNo: 2, to: [3:1849:3100] 2025-04-10T10:58:46.385503Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1848:3100], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1416 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 821 FinishTimeMs: 1744282726385 InputRows: 3 InputBytes: 12 OutputRows: 3 OutputBytes: 12 ResultRows: 3 ResultBytes: 12 ComputeCpuTimeUs: 209 BuildCpuTimeUs: 612 HostName: "ghrun-x5vqckr7qy" NodeId: 3 CreateTimeMs: 1744282726364 } MaxMemoryUsage: 1048576 } 2025-04-10T10:58:46.385638Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:1848:3100] 2025-04-10T10:58:46.385742Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1850:2482], 2025-04-10T10:58:46.385801Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [4:1850:2482], 2025-04-10T10:58:46.386227Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [4:1850:2482], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1614 DurationUs: 9000 Tasks { TaskId: 1 CpuTimeUs: 516 FinishTimeMs: 1744282726384 OutputRows: 3 OutputBytes: 12 Tables { TablePath: "/Root/table-1" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 4 } IngressRows: 3 ComputeCpuTimeUs: 164 BuildCpuTimeUs: 352 WaitInputTimeUs: 8700 HostName: "ghrun-x5vqckr7qy" NodeId: 4 StartTimeMs: 1744282726375 CreateTimeMs: 1744282726365 } MaxMemoryUsage: 1048576 } 2025-04-10T10:58:46.386301Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [4:1850:2482] 2025-04-10T10:58:46.386543Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T10:58:46.386616Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-10T10:58:46.386680Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1840:3100] TxId: 281474976715663. Ctx: { TraceId: 01jrfnmbsvb6nnxxmm6d8dgzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTYzNDMtZWE1MzE4MzMtNTA2MjE2YjQtYzcwNDA0YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.003030s ReadRows: 3 ReadBytes: 24 ru: 3 rate limiter was not found force flag: 1 { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } } >> TBlobStorageProxyTest::TestProxyGetSingleTimeout >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestVPutVGet >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> SlowTopicAutopartitioning::CDC_Write [GOOD] Test command err: 2025-04-10T10:56:14.759685Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636610681438132:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:14.759724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:56:15.267741Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d32/r3tmp/tmpbEjSwq/pdisk_1.dat 2025-04-10T10:56:15.882870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:15.882990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:15.909324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:15.944448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:15.973834Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13344, node 1 2025-04-10T10:56:16.283034Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000d32/r3tmp/yandexgvzMsp.tmp 2025-04-10T10:56:16.283075Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000d32/r3tmp/yandexgvzMsp.tmp 2025-04-10T10:56:16.283319Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000d32/r3tmp/yandexgvzMsp.tmp 2025-04-10T10:56:16.283500Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:56:16.358320Z INFO: TTestServer started on Port 14917 GrpcPort 13344 TClient is connected to server localhost:14917 PQClient connected to localhost:13344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:16.946124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:16.964831Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:16.988208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:56:17.197877Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:17.218361Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-10T10:56:19.762429Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636610681438132:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:19.762534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:20.441607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636636451242720:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:20.441772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:20.446196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636636451242756:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:20.451980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T10:56:20.469313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636636451242758:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T10:56:20.767841Z node 1 :TX_PROXY ERROR: Actor# [1:7491636636451242822:2458] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:20.809759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:20.913130Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636636451242832:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:56:20.915222Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWM5OWIyNGQtNmUxMzRiYWMtMzU0OTc3ZmQtNWRkYTYzMTM=, ActorId: [1:7491636636451242718:2339], ActorState: ExecuteState, TraceId: 01jrfnfxe47p09t0ykj767d0hq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:56:20.918800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:20.919378Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:56:21.012770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7491636640746210432:2644] === CheckClustersList. Ok 2025-04-10T10:56:26.669349Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-10T10:56:26.754504Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491636662221047190:2794], Recipient [1:7491636614976405841:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:56:26.754554Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:56:26.754577Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:56:26.754981Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491636662221047186:2791], Recipient [1:7491636614976405841:2185]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-04-10T10:56:26.755011Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:56:26.824215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "origin" Columns { Name: "id" Type: "Uint64" NotNull: false } Columns { Name: "order" Type: "Uint64" NotNull: false } Columns { Name: "value" Type: "Utf8" NotNull: false } KeyColumnNames: "id" KeyColumnNames: "order" UniformPartitionsCount: 64 PartitionConfig { PartitioningPolicy { MinPartitionsCount: 64 MaxPartitionsCount: 64 } } Temporary: false } } TxId: 281474976710672 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:56:26.824618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/origin, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T10:56:26.824763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/origin, opId: 281474976710672:0, schema: Name: "origin" Columns { Name: "id" Type: "Uint64" NotNull: false } Columns { Name: "order" Type: "Uint64" NotNull: false } Columns { Name: "value" Type: "Utf8" NotNull: false } KeyColumnNames: "id" KeyColumnNames: "order" UniformPartitionsCount: 64 PartitionConfig { PartitioningPolicy { MinPartitionsCount: 64 MaxPartitionsCount: 64 } } Temporary: false, at schemeshard: 72057594046644480 2025-04-10T10:56:26.825652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: origin, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-10T10:56:26.825702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-10T10:56:26.825731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-10T10:56:26.825747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-10T10:56:26.825758Z node 1 :FLAT_TX_SCHEMESHARD D ... ition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037919' seqNo 2221 partNo 0 2025-04-10T10:57:37.095763Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037919' seqNo 2221 partNo 0 FormedBlobsCount 0 NewHead: Offset 38056 PartNo 0 PackedSize 5015218 count 20772 nextOffset 58828 batches 12 2025-04-10T10:57:37.095785Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037919' DisableDeduplication=0 SeqNo=2223 LocalSeqNo=2221 InitialSeqNo=(NULL) 2025-04-10T10:57:37.095800Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037919' seqNo 2223 partNo 0 2025-04-10T10:57:37.095823Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037919' seqNo 2223 partNo 0 FormedBlobsCount 0 NewHead: Offset 38056 PartNo 0 PackedSize 5015493 count 20773 nextOffset 58829 batches 12 2025-04-10T10:57:37.095845Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037919' DisableDeduplication=0 SeqNo=2225 LocalSeqNo=2223 InitialSeqNo=(NULL) 2025-04-10T10:57:37.095858Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037919' seqNo 2225 partNo 0 2025-04-10T10:57:37.095878Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037919' seqNo 2225 partNo 0 FormedBlobsCount 0 NewHead: Offset 38056 PartNo 0 PackedSize 5015768 count 20774 nextOffset 58830 batches 12 2025-04-10T10:57:37.095903Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037919' DisableDeduplication=0 SeqNo=2227 LocalSeqNo=2225 InitialSeqNo=(NULL) 2025-04-10T10:57:37.095922Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037919' seqNo 2227 partNo 0 2025-04-10T10:57:37.095944Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037919' seqNo 2227 partNo 0 FormedBlobsCount 0 NewHead: Offset 38056 PartNo 0 PackedSize 5016043 count 20775 nextOffset 58831 batches 12 2025-04-10T10:57:37.095968Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037919' DisableDeduplication=0 SeqNo=2229 LocalSeqNo=2227 InitialSeqNo=(NULL) 2025-04-10T10:57:37.095982Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037919' seqNo 2229 partNo 0 2025-04-10T10:57:37.096015Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037919' seqNo 2229 partNo 0 FormedBlobsCount 0 NewHead: Offset 38056 PartNo 0 PackedSize 5016318 count 20776 nextOffset 58832 batches 12 2025-04-10T10:57:37.096044Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037919' DisableDeduplication=0 SeqNo=2232 LocalSeqNo=2229 InitialSeqNo=(NULL) 2025-04-10T10:57:37.096069Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037919' seqNo 2232 partNo 0 2025-04-10T10:57:37.096092Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037919' seqNo 2232 partNo 0 FormedBlobsCount 0 NewHead: Offset 38056 PartNo 0 PackedSize 5016593 count 20777 nextOffset 58833 batches 12 2025-04-10T10:57:37.096118Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037919' DisableDeduplication=0 SeqNo=2233 LocalSeqNo=2232 InitialSeqNo=(NULL) 2025-04-10T10:57:37.096131Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037919' seqNo 2233 partNo 0 2025-04-10T10:57:37.096153Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037919' seqNo 2233 partNo 0 FormedBlobsCount 0 NewHead: Offset 38056 PartNo 0 PackedSize 5016868 count 20778 nextOffset 58834 batches 12 2025-04-10T10:57:37.096178Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037919' DisableDeduplication=0 SeqNo=2236 LocalSeqNo=2233 InitialSeqNo=(NULL) 2025-04-10T10:57:37.096193Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037919' seqNo 2236 partNo 0 2025-04-10T10:57:37.096215Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037919' seqNo 2236 partNo 0 FormedBlobsCount 0 NewHead: Offset 38056 PartNo 0 PackedSize 5017143 count 20779 nextOffset 58835 batches 12 2025-04-10T10:57:37.096238Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037919' DisableDeduplication=0 SeqNo=2238 LocalSeqNo=2236 InitialSeqNo=(NULL) 2025-04-10T10:57:37.096253Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037919' seqNo 2238 partNo 0 2025-04-10T10:57:37.096274Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037919' seqNo 2238 partNo 0 FormedBlobsCount 0 NewHead: Offset 38056 PartNo 0 PackedSize 5017418 count 20780 nextOffset 58836 batches 12 2025-04-10T10:57:37.096297Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037919' DisableDeduplication=0 SeqNo=2240 LocalSeqNo=2238 InitialSeqNo=(NULL) 2025-04-10T10:57:37.096314Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037919' seqNo 2240 partNo 0 2025-04-10T10:57:37.096335Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037919' seqNo 2240 partNo 0 FormedBlobsCount 0 NewHead: Offset 38056 PartNo 0 PackedSize 5017693 count 20781 nextOffset 58837 batches 12 2025-04-10T10:57:37.096358Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037919' DisableDeduplication=0 SeqNo=2244 LocalSeqNo=2240 InitialSeqNo=(NULL) 2025-04-10T10:57:37.096374Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037919' seqNo 2244 partNo 0 2025-04-10T10:57:37.096395Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037919' seqNo 2244 partNo 0 FormedBlobsCount 0 NewHead: Offset 38056 PartNo 0 PackedSize 5017968 count 20782 nextOffset 58838 batches 12 2025-04-10T10:57:37.096417Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037919' DisableDeduplication=0 SeqNo=2245 LocalSeqNo=2244 InitialSeqNo=(NULL) 2025-04-10T10:57:37.096432Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037919' seqNo 2245 partNo 0 2025-04-10T10:57:37.096451Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037919' seqNo 2245 partNo 0 FormedBlobsCount 0 NewHead: Offset 38056 PartNo 0 PackedSize 5018243 count 20783 nextOffset 58839 batches 12 2025-04-10T10:57:37.096476Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037919' DisableDeduplication=0 SeqNo=2246 LocalSeqNo=2245 InitialSeqNo=(NULL) 2025-04-10T10:57:37.096491Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037919' seqNo 2246 partNo 0 2025-04-10T10:57:37.096510Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037919' seqNo 2246 partNo 0 FormedBlobsCount 0 NewHead: Offset 38056 PartNo 0 PackedSize 5018518 count 20784 nextOffset 58840 batches 12 2025-04-10T10:57:37.096533Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037919' DisableDeduplication=0 SeqNo=2252 LocalSeqNo=2246 InitialSeqNo=(NULL) 2025-04-10T10:57:37.096548Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037919' seqNo 2252 partNo 0 2025-04-10T10:57:37.096567Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037919' seqNo 2252 partNo 0 FormedBlobsCount 0 NewHead: Offset 38056 PartNo 0 PackedSize 5018793 count 20785 nextOffset 58841 batches 12 2025-04-10T10:57:37.096591Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 process write for '\00072075186224037919' DisableDeduplication=0 SeqNo=2253 LocalSeqNo=2252 InitialSeqNo=(NULL) 2025-04-10T10:57:37.096603Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob processing sourceId '\00072075186224037919' seqNo 2253 partNo 0 2025-04-10T10:57:37.096626Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Topic 'origin/feed/streamImpl' partition 1 part blob complete sourceId '\00072075186224037919' seqNo 2253 partNo 0 FormedBlobsCount 0 NewHead: Offset 38056 PartNo 0 PackedSize 5019068 count 20786 nextOffset 58842 batches 12 |69.5%| [TA] $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestProxyPutSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock >> AsyncIndexChangeCollector::AllColumnsInPk [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn >> TBlobStorageProxyTest::TestDoubleEmptyGet ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] Test command err: 2025-04-10T10:58:10.583986Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637110642740499:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:10.584035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e6a/r3tmp/tmp9OKU43/pdisk_1.dat 2025-04-10T10:58:11.691708Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:11.705258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:11.705353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:11.710424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:11.736703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2892, node 1 2025-04-10T10:58:12.098992Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:12.099029Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:12.099037Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:12.099260Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12345 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:13.126911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:13.406744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:12345 2025-04-10T10:58:13.999185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:14.599550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T10:58:14.655231Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T10:58:14.655270Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-10T10:58:14.655282Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-10T10:58:14.655296Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-10T10:58:19.162756Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637148077932996:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:19.197831Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e6a/r3tmp/tmpaIPMfq/pdisk_1.dat 2025-04-10T10:58:19.686618Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:19.739527Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:19.739608Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:19.754420Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29270, node 4 2025-04-10T10:58:19.986341Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:19.986363Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:19.986371Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:19.986502Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:20.552974Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:20.716896Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:10692 2025-04-10T10:58:21.014791Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:21.395951Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T10:58:21.511642Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:21.551637Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2025-04-10T10:58:21.551659Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2025-04-10T10:58:21.551671Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-04-10T10:58:21.551685Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-04-10T10:58:27.821723Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637184767434303:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:27.821773Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e6a/r3tmp/tmpUoUDll/pdisk_1.dat 2025-04-10T10:58:28.330899Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:28.433150Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:28.433249Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:28.451574Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1859, node 7 2025-04-10T10:58:28.742988Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:28.743013Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:28.743022Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:28.743168Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14481 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:29.139895Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:29.284690Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:14481 2025-04-10T10:58:29.624436Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:29.967453Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T10:58:30.093680Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:30.181929Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:58:30.211260Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2025-04-10T10:58:30.211308Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-04-10T10:58:30.211884Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-04-10T10:58:30.211905Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2025-04-10T10:58:34.688905Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491637212494091452:2108];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:34.689191Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e6a/r3tmp/tmpSsigSa/pdisk_1.dat 2025-04-10T10:58:34.978638Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63103, node 10 2025-04-10T10:58:35.056349Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:35.056459Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:35.058338Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:35.086867Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:35.086902Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:35.086915Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:35.087080Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:35.511502Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:35.679198Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:23453 2025-04-10T10:58:36.135268Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:39.682297Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491637212494091452:2108];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:39.682393Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> Yq_1::CreateConnection_With_Existing_Name [GOOD] >> Yq_1::CreateConnections_With_Idempotency >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> TBlobStorageProxyTest::TestPutGetMany >> TBlobStorageProxyTest::TestQuadrupleGroups |69.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |69.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |69.5%| [TA] {RESULT} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |69.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror >> TBlobStorageProxyTest::TestVPutVGet [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit >> KqpErrors::ProposeErrorEvWrite [GOOD] >> Yq_1::DeleteConnections [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards >> TBlobStorageProxyTest::TestCollectGarbagePersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] >> TBlobStorageProxyTest::TestDoubleFailure [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 >> TBlobStorageProxyTest::TestProxyGetSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] >> TBlobStorageProxyTest::TestPersistence [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeErrorEvWrite [GOOD] Test command err: 2025-04-10T10:58:30.438004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:30.438647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:30.438723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:58:30.440356Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:30.440646Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:30.440984Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c12/r3tmp/tmpCCBbv5/pdisk_1.dat 2025-04-10T10:58:30.913909Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:31.137734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:58:31.237516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:31.237660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:31.243701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:31.243798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:31.262839Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:58:31.263359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:31.263719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:31.577848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:58:32.835829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1592:2958], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:32.835953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1602:2963], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:32.836049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:32.842507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:58:33.651579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1606:2966], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:58:33.951065Z node 1 :TX_PROXY ERROR: Actor# [1:1760:3054] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:34.428029Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2025-04-10T10:58:34.428122Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2025-04-10T10:58:34.428183Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-10T10:58:34.428250Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-04-10T10:58:34.428341Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-10T10:58:34.431233Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2025-04-10T10:58:34.440557Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnkyr15dz81zj6h79zybqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzljMDhhNjUtZjc5MjM4ZDQtODUyZDNjY2QtOTc2MTViMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution. Operation timeout: 299.453809s, cancelAfter: (empty maybe) 2025-04-10T10:58:34.440631Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnkyr15dz81zj6h79zybqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzljMDhhNjUtZjc5MjM4ZDQtODUyZDNjY2QtOTc2MTViMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution, txs: 1 2025-04-10T10:58:34.440704Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-10T10:58:34.440776Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnkyr15dz81zj6h79zybqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzljMDhhNjUtZjc5MjM4ZDQtODUyZDNjY2QtOTc2MTViMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-04-10T10:58:34.440832Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-10T10:58:34.441436Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnkyr15dz81zj6h79zybqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzljMDhhNjUtZjc5MjM4ZDQtODUyZDNjY2QtOTc2MTViMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Execution is complete, results: 1 2025-04-10T10:58:34.441692Z node 1 :KQP_EXECUTER TRACE: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnkyr15dz81zj6h79zybqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzljMDhhNjUtZjc5MjM4ZDQtODUyZDNjY2QtOTc2MTViMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2025-04-10T10:58:34.441990Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1786:2956] TxId: 281474976715660. Ctx: { TraceId: 01jrfnkyr15dz81zj6h79zybqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzljMDhhNjUtZjc5MjM4ZDQtODUyZDNjY2QtOTc2MTViMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-04-10T10:58:34.442053Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-10T10:58:34.442275Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1786:2956] TxId: 281474976715660. Ctx: { TraceId: 01jrfnkyr15dz81zj6h79zybqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzljMDhhNjUtZjc5MjM4ZDQtODUyZDNjY2QtOTc2MTViMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2025-04-10T10:58:34.442604Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2025-04-10T10:58:34.442779Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-10T10:58:34.442953Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1786:2956] TxId: 281474976715660. Ctx: { TraceId: 01jrfnkyr15dz81zj6h79zybqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzljMDhhNjUtZjc5MjM4ZDQtODUyZDNjY2QtOTc2MTViMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2025-04-10T10:58:34.443226Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1786:2956] TxId: 281474976715660. Ctx: { TraceId: 01jrfnkyr15dz81zj6h79zybqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzljMDhhNjUtZjc5MjM4ZDQtODUyZDNjY2QtOTc2MTViMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] will be executed on 1 shards. 2025-04-10T10:58:34.443335Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1786:2956] TxId: 281474976715660. Ctx: { TraceId: 01jrfnkyr15dz81zj6h79zybqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzljMDhhNjUtZjc5MjM4ZDQtODUyZDNjY2QtOTc2MTViMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2025-04-10T10:58:34.443788Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfnkyr15dz81zj6h79zybqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzljMDhhNjUtZjc5MjM4ZDQtODUyZDNjY2QtOTc2MTViMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:34.4 ... ( (return (lambda '() (block '( (let $1 (Just (Uint32 '5))) (return (Iterator (AsList (AsStruct '('"key" $1) '('"value" $1))))) )))) ) 2025-04-10T10:58:50.384057Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] create compute task: 1 2025-04-10T10:58:50.384206Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:50.384280Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-10T10:58:50.384658Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [3:2053:3205] 2025-04-10T10:58:50.384736Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [3:2053:3205], channels: 0 2025-04-10T10:58:50.384823Z node 3 :KQP_EXECUTER INFO: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T10:58:50.384889Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-04-10T10:58:50.384951Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [3:2053:3205] 2025-04-10T10:58:50.385052Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [3:2053:3205], channels: 0 2025-04-10T10:58:50.385156Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:2053:3205], 2025-04-10T10:58:50.385260Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2053:3205], 2025-04-10T10:58:50.385343Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-10T10:58:50.386663Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:2053:3205], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-10T10:58:50.386767Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:2053:3205], 2025-04-10T10:58:50.386848Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2053:3205], 2025-04-10T10:58:50.388125Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:2053:3205], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 943 Tasks { TaskId: 1 CpuTimeUs: 159 FinishTimeMs: 1744282730387 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 29 BuildCpuTimeUs: 130 HostName: "ghrun-x5vqckr7qy" NodeId: 3 CreateTimeMs: 1744282730385 } MaxMemoryUsage: 1048576 } 2025-04-10T10:58:50.388272Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:2053:3205] 2025-04-10T10:58:50.388367Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send Commit to BufferActor=[3:2049:3205] 2025-04-10T10:58:50.388459Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000943s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-10T10:58:50.406536Z node 3 :KQP_COMPUTE WARN: SelfId: [3:2056:3205], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [3:2040:3205]Got OUT_OF_SPACE for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:2056:3205]. Ignored this error. 2025-04-10T10:58:50.406741Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:2049:3205], SessionActorId: [3:2040:3205], statusCode=OVERLOADED. Issue=
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 . sessionActorId=[3:2040:3205]. isRollback=0 2025-04-10T10:58:50.407343Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, ActorId: [3:2040:3205], ActorState: ExecuteState, TraceId: 01jrfnmft4838p949qzv78pbhb, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [3:2050:3205] from: [3:2049:3205] 2025-04-10T10:58:50.407569Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got EvAbortExecution, status: OVERLOADED, message: {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 } 2025-04-10T10:58:50.407694Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 } 2025-04-10T10:58:50.407825Z node 3 :KQP_EXECUTER INFO: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-04-10T10:58:50.408049Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ReplyErrorAndDie. Response: Status: OVERLOADED Issues { message: "Tablet 72075186224037888 is out of space. Table `/Root/table-1`." issue_code: 2006 severity: 1 } Result { Stats { CpuTimeUs: 943 } } , to ActorId: [3:2040:3205] 2025-04-10T10:58:50.408122Z node 3 :KQP_EXECUTER INFO: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2025-04-10T10:58:50.408297Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T10:58:50.408380Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:2050:3205] TxId: 281474976715672. Ctx: { TraceId: 01jrfnmft4838p949qzv78pbhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-10T10:58:50.408898Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTRmMWNkYWQtMTlmNGJjM2EtNjc5NjBkNWQtZjgzODVkNGE=, ActorId: [3:2040:3205], ActorState: ExecuteState, TraceId: 01jrfnmft4838p949qzv78pbhb, Create QueryResponse for error on request, msg: >> TBackupTests::BackupUuidColumn[Zstd] >> DataShardVolatile::DistributedWriteThenBulkUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc >> KqpLimits::OutOfSpaceBulkUpsertFail [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail+useSink >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] >> Yq_1::ModifyConnections [GOOD] >> Yq_1::ModifyQuery |69.6%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestPutGetMany [GOOD] |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> BasicUsage::RetryDiscoveryWithCancel [GOOD] >> BasicUsage::RecreateObserver >> TBlobStorageProxyTest::TestDoubleGroups [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] >> TPQCachingProxyTest::TestPublishAndForget |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestDoubleEmptyGet [GOOD] >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] >> DataShardVolatile::VolatileTxAbortedOnDrop [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter+UseSink >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> TPQCachingProxyTest::TestPublishAndForget [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:58:53.934723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:58:53.934871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:53.934910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:58:53.934948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:58:53.934991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:58:53.935022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:58:53.935073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:53.935185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:58:53.935518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:54.095060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:58:54.095137Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:54.171201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:54.171637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:58:54.171852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:58:54.205448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:58:54.205850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:58:54.206736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:54.207339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:58:54.215596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:54.217471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:54.217564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:54.217785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:58:54.217846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:54.217891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:58:54.218112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.245437Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:58:54.497987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:58:54.498481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.498821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:58:54.499272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:58:54.499389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.505605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:54.505803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:58:54.506056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.506244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:58:54.506303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:58:54.506367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:58:54.511781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.511882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:54.511929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:58:54.514758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.514851Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.514914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:54.514977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:58:54.518675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:58:54.533622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:58:54.533905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:58:54.535028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:54.535227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:54.535293Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:54.535627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:58:54.535679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:54.535842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:58:54.535950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:58:54.539159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:54.539233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:54.539483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:54.539547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:58:54.539831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.539880Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:58:54.539982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:54.540017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:54.540052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:54.540082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:54.540121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:58:54.540156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:54.540190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:58:54.540224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:58:54.540293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:54.540349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:58:54.540405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:58:54.547043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:54.547192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:54.547251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... xId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:58:55.078574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:55.078667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-10T10:58:55.079245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.079301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:55.080212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:58:55.080343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:58:55.080388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T10:58:55.080425Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T10:58:55.080490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T10:58:55.080595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:18107 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5D9A6275-C7E9-4AC0-8AF1-7A021E8CEF90 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 REQUEST: PUT /data_01.csv HTTP/1.1 HEADERS: Host: localhost:18107 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 44831699-CB49-4D41-8255-25CFEF284F1A amz-sdk-request: attempt=1 content-length: 11 content-md5: jsMhyzH+cyrvZpBm0dQVGQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv / / 11 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T10:58:55.083847Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:480:2439], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-04-10T10:58:55.083920Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:485:2442], result# PutObjectResult { ETag: 8ec321cb31fe732aef669066d1d41519 } 2025-04-10T10:58:55.083984Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:485:2442], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-10T10:58:55.084158Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:479:2438] 2025-04-10T10:58:55.084240Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:480:2439], sender# [1:479:2438], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-04-10T10:58:55.084337Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:484:2440], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-10T10:58:55.099227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:18107 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F96BA8AA-9550-4A47-9720-91DD848BDFDA amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-04-10T10:58:55.105592Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:480:2439], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-04-10T10:58:55.105654Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:480:2439], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-10T10:58:55.105830Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:479:2438], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-10T10:58:55.131933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T10:58:55.132017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T10:58:55.132242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T10:58:55.132378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T10:58:55.132452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:55.132642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:55.133267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 324 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T10:58:55.133307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-10T10:58:55.133452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 324 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T10:58:55.133534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 324 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T10:58:55.133577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:55.133612Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.133658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:58:55.133698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T10:58:55.133725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T10:58:55.133889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:55.138341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.138538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.139005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.139062Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T10:58:55.139174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:58:55.139214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:58:55.139259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:58:55.139292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:58:55.139339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T10:58:55.139449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:373:2340] message: TxId: 102 2025-04-10T10:58:55.139507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:58:55.139564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T10:58:55.139596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T10:58:55.139753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T10:58:55.142689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:58:55.142749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:456:2416] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:58:54.062116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:58:54.062256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:54.062301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:58:54.062335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:58:54.062377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:58:54.062401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:58:54.062458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:54.062543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:58:54.062881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:54.166108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:58:54.166226Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:54.183559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:54.183896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:58:54.184063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:58:54.194129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:58:54.194397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:58:54.195184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:54.195637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:58:54.197888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:54.199417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:54.199500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:54.199698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:58:54.199752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:54.199795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:58:54.199998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.213019Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:58:54.469670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:58:54.469971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.470260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:58:54.470551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:58:54.470624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.479470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:54.479665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:58:54.479886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.479957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:58:54.480002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:58:54.480047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:58:54.489594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.489678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:54.489718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:58:54.495072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.495151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.495202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:54.495278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:58:54.504586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:58:54.515105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:58:54.515386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:58:54.516451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:54.516628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:54.516709Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:54.517003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:58:54.517066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:54.517237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:58:54.517343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:58:54.527343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:54.527425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:54.527631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:54.527686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:58:54.527968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.528028Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:58:54.528133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:54.528170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:54.528215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:54.528248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:54.528286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:58:54.528332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:54.528373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:58:54.528405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:58:54.528482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:54.528529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:58:54.528583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:58:54.531758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:54.531885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:54.531922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... FO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:55.079385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-10T10:58:55.079555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:58:55.081782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T10:58:55.081932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-10T10:58:55.083290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:55.083440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:55.083507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-10T10:58:55.083633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-10T10:58:55.083756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:58:55.170870Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:414:2384], attempt# 0 2025-04-10T10:58:55.215151Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:414:2384], sender# [1:413:2382] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:31342 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 637A78D5-917F-4144-ABD4-34F8E66EE77A amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-10T10:58:55.225349Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2384], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-10T10:58:55.227497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:55.227549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:58:55.227816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:55.227870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T10:58:55.227972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.228033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T10:58:55.229345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:58:55.229478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:58:55.229531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T10:58:55.229580Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T10:58:55.229627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T10:58:55.229707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:31342 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 24A6F966-CE86-4BA9-AF3A-E14FF9F42F3F amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-10T10:58:55.234751Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2384], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-04-10T10:58:55.235168Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:413:2382] 2025-04-10T10:58:55.235376Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:414:2384], sender# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-04-10T10:58:55.238213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:31342 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F70B598A-57D0-4C1B-AB30-51D3D809EB22 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-04-10T10:58:55.241334Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2384], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-04-10T10:58:55.241397Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:414:2384], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-10T10:58:55.241568Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-10T10:58:55.255720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T10:58:55.255800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T10:58:55.255988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T10:58:55.256101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T10:58:55.256157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:55.256210Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.256247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:58:55.256282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T10:58:55.256513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:55.258854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.259056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.259103Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T10:58:55.259225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:58:55.259262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:58:55.259328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:58:55.259369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:58:55.259407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T10:58:55.259490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 102 2025-04-10T10:58:55.259561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:58:55.259615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T10:58:55.259653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T10:58:55.259784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:58:55.264703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:58:55.264778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:398:2369] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] Test command err: 2025-04-10T10:58:49.912906Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001ce5/r3tmp/tmpoqYuhS//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-10T10:58:49.913736Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001ce5/r3tmp/tmpoqYuhS//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-04-10T10:58:49.974314Z :BS_LOCALRECOVERY CRIT: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T10:58:49.974613Z :BS_LOCALRECOVERY CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T10:58:54.118511Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001ce5/r3tmp/tmptl80Nh//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-10T10:58:54.119418Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001ce5/r3tmp/tmptl80Nh//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-04-10T10:58:54.150752Z :BS_LOCALRECOVERY CRIT: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T10:58:54.151034Z :BS_LOCALRECOVERY CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition >> PrivateApi::GetTask [GOOD] >> PrivateApi::Nodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2025-04-10T10:58:56.945469Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:58:56.945593Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T10:58:56.965983Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:58:56.966099Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-10T10:58:56.966218Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-10T10:58:56.966329Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-10T10:58:56.966459Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:58:54.952415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:58:54.952518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:54.952564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:58:54.952610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:58:54.952656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:58:54.952684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:58:54.952753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:54.952855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:58:54.953193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:55.041751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:58:55.041812Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:55.084330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:55.084752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:58:55.084971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:58:55.098821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:58:55.099213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:58:55.100888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:55.101511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:58:55.105513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:55.108966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:55.109096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:55.109532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:58:55.109778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:55.109910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:58:55.110313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.119897Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:58:55.305637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:58:55.307343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.307662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:58:55.308043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:58:55.308159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.311759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:55.311977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:58:55.312211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.312312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:58:55.312367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:58:55.312413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:58:55.319829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.319959Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:55.320019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:58:55.323198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.323268Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.323341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:55.323425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:58:55.327412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:58:55.329958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:58:55.330254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:58:55.331493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:55.331736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:55.331824Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:55.332190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:58:55.332258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:55.332458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:58:55.332561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:58:55.335284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:55.335333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:55.335536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:55.335605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:58:55.335882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.335972Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:58:55.336079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:55.336119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:55.336159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:55.336193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:55.336235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:58:55.336282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:55.336322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:58:55.336352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:58:55.336431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:55.336476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:58:55.336533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:58:55.339124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:55.339242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:55.339274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... FO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:55.733533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-10T10:58:55.733703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:58:55.735764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T10:58:55.735886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-10T10:58:55.737147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:55.737294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:55.737361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-10T10:58:55.737500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-10T10:58:55.737632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:58:55.748537Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:414:2384], attempt# 0 2025-04-10T10:58:55.796256Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:414:2384], sender# [1:413:2382] 2025-04-10T10:58:55.800218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:55.800290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:58:55.800553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:55.800624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-10T10:58:55.801108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.801161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:55.802056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:58:55.802142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:58:55.802202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T10:58:55.802245Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T10:58:55.802322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T10:58:55.802445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-10T10:58:55.804876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:28449 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 40B33E64-D395-4545-B120-3BF46FC57692 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-10T10:58:55.813987Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2384], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:28449 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5A1BA43F-222D-449E-BC88-3B97F7D014E1 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-10T10:58:55.826390Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2384], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-04-10T10:58:55.826535Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:413:2382] 2025-04-10T10:58:55.826815Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:414:2384], sender# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:28449 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1424EE50-A2BE-4D8F-9D0B-26C8A1E94483 amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2025-04-10T10:58:55.842466Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2384], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2025-04-10T10:58:55.842588Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:414:2384], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-10T10:58:55.842812Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-10T10:58:55.859874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-10T10:58:55.859964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T10:58:55.860151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-10T10:58:55.860269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-10T10:58:55.860361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:55.860440Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.860486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:58:55.860529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T10:58:55.860765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:55.863864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.864318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:58:55.864374Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T10:58:55.864493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:58:55.864534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:58:55.864578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:58:55.864640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:58:55.864691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T10:58:55.864780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 102 2025-04-10T10:58:55.864853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:58:55.864915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T10:58:55.864973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T10:58:55.865130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:58:55.867536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:58:55.867591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:398:2369] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: 2025-04-10T10:58:35.923133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:35.923332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:35.923464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00103d/r3tmp/tmpYkwQY6/pdisk_1.dat 2025-04-10T10:58:36.365474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:58:36.441137Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:36.482201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:36.482336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:36.493899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:36.604726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:58:36.700260Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-04-10T10:58:36.700535Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:36.795134Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:680:2580] 2025-04-10T10:58:36.795367Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:36.806416Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:36.806585Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:58:36.808428Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:58:36.808509Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:58:36.808561Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:58:36.808965Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:58:36.809178Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:58:36.809264Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:709:2578] in generation 1 2025-04-10T10:58:36.809634Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:36.809740Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:58:36.811589Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T10:58:36.811654Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T10:58:36.811698Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T10:58:36.811966Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:58:36.812068Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:58:36.812128Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2580] in generation 1 2025-04-10T10:58:36.825483Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:58:36.857156Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:58:36.857372Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:58:36.857538Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-10T10:58:36.857589Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:58:36.857627Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:58:36.857659Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:58:36.857757Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:58:36.857789Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T10:58:36.857836Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:58:36.857882Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-10T10:58:36.857902Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T10:58:36.857938Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T10:58:36.857959Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:58:36.858610Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:58:36.858732Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:58:36.858794Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T10:58:36.858874Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T10:58:36.858981Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:58:36.859060Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:58:36.859139Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:58:36.859185Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:58:36.859233Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:58:36.859263Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:58:36.859316Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-10T10:58:36.859348Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:58:36.859438Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2575], serverId# [1:693:2588], sessionId# [0:0:0] 2025-04-10T10:58:36.859584Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T10:58:36.859825Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-10T10:58:36.859930Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-10T10:58:36.862316Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T10:58:36.873054Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T10:58:36.873166Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-10T10:58:36.924306Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2574], serverId# [1:728:2606], sessionId# [0:0:0] 2025-04-10T10:58:36.924516Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:58:36.924703Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:58:36.924787Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:58:36.925324Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:58:36.947504Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:58:36.947601Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T10:58:37.078663Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2618], serverId# [1:743:2621], sessionId# [0:0:0] 2025-04-10T10:58:37.078802Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:741:2619], serverId# [1:744:2622], sessionId# [0:0:0] 2025-04-10T10:58:37.085698Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T10:58:37.085789Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:58:37.086187Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-10T10:58:37.086224Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:58:37.086419Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:58:37.086468Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:58:37.086508Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T10:58:37.095110Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T10:58:37.095317Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:58:37.095529Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:58:37.095566Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:58:37.095605Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-10T10:58:37.095814Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T10:58:37.095920Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:58:37.096114Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:58:37.096176Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-10T10:58:37.104993Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T ... WaitScheme 2025-04-10T10:58:55.343360Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:740:2618], serverId# [4:743:2621], sessionId# [0:0:0] 2025-04-10T10:58:55.343551Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:741:2619], serverId# [4:744:2622], sessionId# [0:0:0] 2025-04-10T10:58:55.343904Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-10T10:58:55.343965Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:58:55.344526Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:58:55.344591Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:58:55.344655Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-10T10:58:55.345024Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T10:58:55.345216Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:58:55.345694Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T10:58:55.345742Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:58:55.345871Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:58:55.345952Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T10:58:55.347591Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T10:58:55.348150Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:58:55.350049Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:58:55.350106Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:58:55.350314Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T10:58:55.350642Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T10:58:55.350816Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:58:55.350995Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-04-10T10:58:55.351050Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:58:55.351312Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:58:55.351379Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-10T10:58:55.351761Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T10:58:55.352086Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:58:55.367636Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-04-10T10:58:55.367917Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:58:55.371471Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:58:55.371578Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T10:58:55.371641Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-04-10T10:58:55.371745Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T10:58:55.371825Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T10:58:55.371947Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:58:55.372970Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T10:58:55.373041Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:58:55.380021Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T10:58:55.380139Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:58:55.382080Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:58:55.382164Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:58:55.382219Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T10:58:55.382304Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T10:58:55.382383Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T10:58:55.382492Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:58:55.384392Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:58:55.384530Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T10:58:55.388639Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-10T10:58:55.388750Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-10T10:58:55.389470Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T10:58:55.395112Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T10:58:55.395215Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T10:58:55.395776Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T10:58:55.410124Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:788:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:55.410276Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:798:2663], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:55.410365Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:55.417727Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:58:55.431989Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:58:55.432202Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T10:58:55.618856Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:58:55.619021Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T10:58:55.623259Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:802:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:58:55.660465Z node 4 :TX_PROXY ERROR: Actor# [4:880:2713] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:55.743708Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfnmmsf1gtxbmbn765qwa3h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YTFkMzJkODMtZjI3ZGVjMmQtNTU4MDM4ZDYtNGUwYjI4NzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:55.747116Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:949:2744], serverId# [4:950:2745], sessionId# [0:0:0] 2025-04-10T10:58:55.747589Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037889 2025-04-10T10:58:55.747901Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744282735747787 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-10T10:58:55.748134Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-04-10T10:58:55.759945Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-10T10:58:55.803400Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:58:55.809276Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:956:2750], serverId# [4:957:2751], sessionId# [0:0:0] 2025-04-10T10:58:55.816623Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:958:2752], serverId# [4:959:2753], sessionId# [0:0:0] |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] Test command err: 2025-04-10T10:58:35.231589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:35.231804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:35.231994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001061/r3tmp/tmpNjmt51/pdisk_1.dat 2025-04-10T10:58:35.806541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:58:35.889090Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:35.929500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:35.929630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:35.941148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:36.047023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:58:36.115827Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-04-10T10:58:36.116083Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:36.203633Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:680:2580] 2025-04-10T10:58:36.203861Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:36.227461Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:36.227604Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:58:36.229343Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:58:36.229413Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:58:36.229490Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:58:36.229878Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:58:36.230073Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:58:36.234237Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:709:2578] in generation 1 2025-04-10T10:58:36.234706Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:36.234796Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:58:36.236118Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T10:58:36.236191Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T10:58:36.236232Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T10:58:36.236500Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:58:36.236595Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:58:36.236660Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2580] in generation 1 2025-04-10T10:58:36.250837Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:58:36.293080Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:58:36.293310Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:58:36.293436Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-10T10:58:36.293494Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:58:36.293537Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:58:36.293587Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:58:36.293703Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:58:36.293741Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T10:58:36.293793Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:58:36.293852Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-10T10:58:36.293874Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T10:58:36.293907Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T10:58:36.293929Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:58:36.294600Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:58:36.294704Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:58:36.294759Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T10:58:36.294813Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T10:58:36.294880Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:58:36.294938Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:58:36.294990Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:58:36.295032Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:58:36.295077Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:58:36.295101Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:58:36.295140Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-10T10:58:36.295170Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:58:36.295265Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2575], serverId# [1:693:2588], sessionId# [0:0:0] 2025-04-10T10:58:36.295438Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T10:58:36.295712Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-10T10:58:36.295828Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-10T10:58:36.298128Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T10:58:36.310823Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T10:58:36.310947Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-10T10:58:36.356278Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2574], serverId# [1:728:2606], sessionId# [0:0:0] 2025-04-10T10:58:36.356441Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:58:36.356639Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:58:36.356728Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:58:36.357183Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:58:36.370756Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:58:36.370851Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T10:58:36.516437Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2618], serverId# [1:743:2621], sessionId# [0:0:0] 2025-04-10T10:58:36.516584Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:741:2619], serverId# [1:744:2622], sessionId# [0:0:0] 2025-04-10T10:58:36.529714Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T10:58:36.529802Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:58:36.534588Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-10T10:58:36.534680Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:58:36.535017Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:58:36.535076Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:58:36.535141Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T10:58:36.535424Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T10:58:36.535616Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:58:36.535755Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T10:58:36.535783Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:58:36.535812Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-10T10:58:36.536011Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T10:58:36.536109Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:58:36.536254Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:58:36.536326Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-10T10:58:36.538595Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T ... 8:55.226425Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:58:55.226466Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037890 2025-04-10T10:58:55.226696Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037890 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T10:58:55.226824Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:58:55.227174Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-04-10T10:58:55.227250Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:58:55.227334Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T10:58:55.227403Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037890 tableId# [OwnerId: 72057594046644480, LocalPathId: 6] schema version# 1 2025-04-10T10:58:55.227796Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T10:58:55.228137Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:58:55.230865Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T10:58:55.230911Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:58:55.231436Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T10:58:55.231496Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:58:55.232753Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T10:58:55.232829Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T10:58:55.232889Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-04-10T10:58:55.232965Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T10:58:55.233024Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T10:58:55.233151Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:58:55.235613Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2025-04-10T10:58:55.235657Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T10:58:55.236192Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:58:55.236235Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:58:55.236284Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T10:58:55.236348Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T10:58:55.236396Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T10:58:55.236471Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:58:55.236583Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2025-04-10T10:58:55.236632Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T10:58:55.237752Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:58:55.237890Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T10:58:55.237952Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-10T10:58:55.238794Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T10:58:55.238839Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-10T10:58:55.238879Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-04-10T10:58:55.238935Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T10:58:55.238976Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T10:58:55.239074Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T10:58:55.253039Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T10:58:55.253479Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-10T10:58:55.253571Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-10T10:58:55.253906Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T10:58:55.257749Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T10:58:55.258227Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T10:58:55.258280Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T10:58:55.258835Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-04-10T10:58:55.258894Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-10T10:58:55.283125Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:838:2697], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:55.283234Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:848:2702], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:55.283333Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:55.299695Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:58:55.318774Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:58:55.319285Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T10:58:55.319365Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-10T10:58:55.516056Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:58:55.516177Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T10:58:55.516236Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-10T10:58:55.519534Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:852:2705], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:58:55.555497Z node 4 :TX_PROXY ERROR: Actor# [4:934:2756] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:55.652722Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfnmmnhbvsftnz039n4n2c6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YzQ5Y2U4ZGYtZWNkNjI4NTAtNWQ2NzFhMGItMjFkODM0MjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:55.661107Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1039:2799], serverId# [4:1040:2800], sessionId# [0:0:0] 2025-04-10T10:58:55.661482Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037889 2025-04-10T10:58:55.661786Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744282735661678 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 38b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-10T10:58:55.661989Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744282735661678 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-10T10:58:55.662116Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-04-10T10:58:55.673283Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 38 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-10T10:58:55.673382Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T10:58:55.678097Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1046:2805], serverId# [4:1047:2806], sessionId# [0:0:0] 2025-04-10T10:58:55.684283Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1048:2807], serverId# [4:1049:2808], sessionId# [0:0:0] >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> TTxDataShardMiniKQL::ReadConstant >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx >> DataStreams::TestGetRecordsStreamWithMultipleShards [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::ShowCreateTableTemporary [FAIL] Test command err: 2025-04-10T10:55:10.019590Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636336657381964:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:10.019643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fb2/r3tmp/tmpNxHJ83/pdisk_1.dat 2025-04-10T10:55:10.540286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:10.540387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:10.551980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:55:10.559710Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26968, node 1 2025-04-10T10:55:10.600536Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:55:10.600565Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:55:10.774359Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:10.774385Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:10.774394Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:10.774507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3831 TClient is connected to server localhost:3831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:12.136545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:14.994581Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-04-10T10:55:14.994625Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-04-10T10:55:15.024222Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636336657381964:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:15.024296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:15.063276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636358132219420:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:15.063439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:15.067165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636358132219432:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:15.072214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:55:15.110375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636358132219434:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:55:15.193104Z node 1 :TX_PROXY ERROR: Actor# [1:7491636358132219512:2768] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:15.199454Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-10T10:55:15.199623Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F0001B3EA8 2025-04-10T10:55:15.199676Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7491636353837252085:2339], queryUid: , queryText: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n ", keepInCache: 1, split: 0{ TraceId: 01jrfndxkbej0ajnv98qgefszr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTM2MzhkMC00YTEyM2UwMC1hMTg3NzM3MC05Y2FlZTlkOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-04-10T10:55:15.199847Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-10T10:55:15.199927Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7491636353837252085:2339], queueSize: 1 2025-04-10T10:55:15.200732Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7491636353837252085:2339], compileActor: [1:7491636358132219523:2352] 2025-04-10T10:55:15.673880Z node 1 :KQP_YQL INFO: TraceId: 01jrfndxkbej0ajnv98qgefszr, SessionId: CompileActor 2025-04-10 10:55:15.648 INFO ydb-core-sys_view-ut(pid=1100713, tid=0x00007FA81D4BB640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (return (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) ) 2025-04-10T10:55:15.675601Z node 1 :KQP_YQL TRACE: TraceId: 01jrfndxkbej0ajnv98qgefszr, SessionId: CompileActor 2025-04-10 10:55:15.674 TRACE ydb-core-sys_view-ut(pid=1100713, tid=0x00007FA81D4BB640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (let $4 (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-04-10T10:55:15.676133Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfndxkbej0ajnv98qgefszr, SessionId: CompileActor 2025-04-10 10:55:15.676 DEBUG ydb-core-sys_view-ut(pid=1100713, tid=0x00007FA81D4BB640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 420us 2025-04-10T10:55:15.677009Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfndxkbej0ajnv98qgefszr, SessionId: CompileActor 2025-04-10 10:55:15.676 DEBUG ydb-core-sys_view-ut(pid=1100713, tid=0x00007FA81D4BB640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-04-10T10:55:15.679184Z node 1 : ... : fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:25.631662Z node 16 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-04-10T10:57:25.631721Z node 16 :KQP_COMPILE_SERVICE INFO: Updated config 2025-04-10T10:57:25.694873Z node 16 :KQP_PROXY ERROR: TraceId: "01jrfnhr0m7xdfjb66c8hz3e5v", Request deadline has expired for 0.301664s seconds (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession 2025-04-10T10:57:29.273617Z node 21 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[21:7491636936598145652:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:29.273696Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fb2/r3tmp/tmpvTuBQ9/pdisk_1.dat 2025-04-10T10:57:30.160332Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:30.276802Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:30.276979Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:30.293412Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23736, node 21 2025-04-10T10:57:30.582765Z node 21 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:30.582791Z node 21 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:30.582806Z node 21 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:30.583021Z node 21 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27223 TClient is connected to server localhost:27223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:32.456728Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:34.275012Z node 21 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[21:7491636936598145652:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:34.275099Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:37.557755Z node 21 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-04-10T10:57:37.557831Z node 21 :KQP_COMPILE_SERVICE INFO: Updated config (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession 2025-04-10T10:57:42.334424Z node 26 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[26:7491636989396741392:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:42.353029Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fb2/r3tmp/tmptIdkkt/pdisk_1.dat 2025-04-10T10:57:43.035457Z node 26 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:43.173043Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:43.173183Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:43.225929Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6282, node 26 2025-04-10T10:57:43.779162Z node 26 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:43.779193Z node 26 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:43.779208Z node 26 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:43.779492Z node 26 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24313 TClient is connected to server localhost:24313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:46.046449Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:47.274455Z node 26 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[26:7491636989396741392:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:47.274555Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; GRpc shutdown warning: left infly: 1, spent: 3.620285 sec 2025-04-10T10:57:55.256009Z node 26 :KQP_PROXY ERROR: TraceId: "01jrfnjh6173hmxbza904f2c6j", Request deadline has expired for 4.079812s seconds 2025-04-10T10:57:55.256552Z node 26 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-04-10T10:57:55.256628Z node 26 :KQP_COMPILE_SERVICE INFO: Updated config (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession 2025-04-10T10:58:00.486251Z node 31 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[31:7491637067040112744:2105];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:00.660726Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fb2/r3tmp/tmp8PkbFo/pdisk_1.dat 2025-04-10T10:58:01.260802Z node 31 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:01.344591Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:01.344745Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:01.362631Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25448, node 31 2025-04-10T10:58:01.913215Z node 31 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:01.913248Z node 31 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:01.913267Z node 31 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:01.913577Z node 31 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16060 TClient is connected to server localhost:16060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:03.796793Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:05.456608Z node 31 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[31:7491637067040112744:2105];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:05.456744Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; GRpc shutdown warning: left infly: 1, spent: 3.520759 sec 2025-04-10T10:58:13.251286Z node 31 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-04-10T10:58:13.251361Z node 31 :KQP_COMPILE_SERVICE INFO: Updated config 2025-04-10T10:58:13.259695Z node 31 :KQP_PROXY ERROR: TraceId: "01jrfnk2ge3ebkn83em2s7p4ba", Request deadline has expired for 4.355499s seconds (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge >> TTxDataShardMiniKQL::CrossShard_5_AllToAll >> TTxDataShardMiniKQL::WriteKeyTooLarge >> TTxDataShardMiniKQL::CrossShard_1_Cycle >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] >> TTxDataShardMiniKQL::ReadSpecialColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] Test command err: 2025-04-10T10:58:50.272716Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001cf0/r3tmp/tmpuUzATe//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-04-10T10:58:50.295917Z :BS_LOCALRECOVERY CRIT: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T10:58:53.931809Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001cf0/r3tmp/tmpuUzATe//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-04-10T10:58:53.942116Z :BS_LOCALRECOVERY CRIT: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T10:58:55.367191Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001cf0/r3tmp/tmpuUzATe//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-04-10T10:58:55.377579Z :BS_LOCALRECOVERY CRIT: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T10:58:57.039240Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001cf0/r3tmp/tmpuUzATe//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-04-10T10:58:57.055273Z :BS_LOCALRECOVERY CRIT: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T10:58:58.399127Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001cf0/r3tmp/tmpuUzATe//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-04-10T10:58:58.433128Z :BS_LOCALRECOVERY CRIT: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TTxDataShardMiniKQL::WriteEraseRead >> TTxDataShardMiniKQL::Write |69.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |69.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |69.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |69.7%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut >> TTxDataShardMiniKQL::ReadConstant [GOOD] >> TTxDataShardMiniKQL::ReadAfterWrite >> TSchemeShardTopicSplitMergeTest::Boot >> Yq_1::CreateQuery_With_Idempotency [GOOD] >> Yq_1::CreateQuery_Without_Connection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] Test command err: 2025-04-10T10:58:57.239767Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001ce0/r3tmp/tmpqfU5Mh//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-10T10:58:57.240237Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001ce0/r3tmp/tmpqfU5Mh//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-04-10T10:58:57.280554Z :BS_LOCALRECOVERY CRIT: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T10:58:57.302546Z :BS_LOCALRECOVERY CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:58:59.422797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:58:59.422893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:59.422960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:58:59.423002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:58:59.423056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:58:59.423100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:58:59.423165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:59.423230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:58:59.423546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:59.525884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:58:59.525964Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:59.542291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:59.542575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:58:59.542764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:58:59.550072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:58:59.550331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:58:59.550964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:59.551333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:58:59.553327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:59.554661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:59.554722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:59.554901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:58:59.554947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:59.554986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:58:59.555187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.562393Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:58:59.716721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:58:59.716973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.717185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:58:59.717512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:58:59.717584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.720420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:59.720574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:58:59.720838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.720892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:58:59.720931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:58:59.720963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:58:59.723359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.723423Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:59.723481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:58:59.725378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.725440Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.725482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:59.725537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.729490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:58:59.731859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:58:59.732050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:58:59.733045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:59.733179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:59.733243Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:59.733571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:58:59.733636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:59.733835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:58:59.733928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:58:59.736307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:59.736354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:59.736552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:59.736604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:58:59.736864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.736922Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:58:59.737012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:59.737049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.737114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:59.737170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.737225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:58:59.737274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.737313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:58:59.737352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:58:59.737442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:59.737491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:58:59.737523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:58:59.739937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:59.740054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:59.740088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 57594046678944 message# TabletId: 72075186233409548 TxId: 104 Status: OK 2025-04-10T10:59:00.083794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-10T10:59:00.083847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-10T10:59:00.095263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-10T10:59:00.095583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-10T10:59:00.095644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-10T10:59:00.096031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2025-04-10T10:59:00.096074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-10T10:59:00.096141Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2025-04-10T10:59:00.135660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:00.135759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 0 RawX2: 0 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:00.135800Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2025-04-10T10:59:00.135874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-10T10:59:00.177752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-04-10T10:59:00.177910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-10T10:59:00.177977Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-10T10:59:00.178030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.178067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-10T10:59:00.178261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-10T10:59:00.178437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T10:59:00.178648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T10:59:00.181217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.181608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:00.181668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:59:00.181882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T10:59:00.182202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:00.182247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-10T10:59:00.182288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-10T10:59:00.182362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.182401Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-10T10:59:00.182502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T10:59:00.182534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:59:00.182574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T10:59:00.182607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:59:00.182660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-10T10:59:00.182701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:59:00.182759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T10:59:00.182787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T10:59:00.182919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T10:59:00.182980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-04-10T10:59:00.183016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-10T10:59:00.183043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-10T10:59:00.184449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:59:00.184569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:59:00.184639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-10T10:59:00.184695Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-10T10:59:00.184733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T10:59:00.185336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:59:00.185408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:59:00.185450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-10T10:59:00.185478Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-10T10:59:00.185507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T10:59:00.185568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-04-10T10:59:00.185631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:408:2375] 2025-04-10T10:59:00.192334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T10:59:00.192533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T10:59:00.192652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T10:59:00.192700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:546:2482] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } TestModificationResults wait txId: 105 2025-04-10T10:59:00.204692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:00.204952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.205157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 7, at schemeshard: 72057594046678944 2025-04-10T10:59:00.210361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 7" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:00.210560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 7, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-10T10:59:00.210915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-10T10:59:00.210966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-10T10:59:00.211415Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-10T10:59:00.211517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T10:59:00.211550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:642:2567] TestWaitNotification: OK eventTxId 105 >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:58:59.323844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:58:59.323954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:59.324127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:58:59.324173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:58:59.324225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:58:59.324274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:58:59.324348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:59.324430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:58:59.324813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:59.423038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:58:59.423100Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:59.464221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:59.464588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:58:59.464783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:58:59.483987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:58:59.484274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:58:59.485022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:59.485488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:58:59.488517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:59.489983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:59.490044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:59.490261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:58:59.490309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:59.490350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:58:59.490513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.499090Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:58:59.637862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:58:59.638138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.638399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:58:59.638680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:58:59.638740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.641243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:59.641394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:58:59.641640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.641704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:58:59.641740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:58:59.641773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:58:59.643923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.643986Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:59.644041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:58:59.645981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.646028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.646076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:59.646122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.649879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:58:59.651693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:58:59.651903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:58:59.652987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:59.653100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:59.653138Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:59.653368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:58:59.653432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:59.653579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:58:59.653651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:58:59.655403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:59.655443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:59.655601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:59.655646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:58:59.655907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.655953Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:58:59.656232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:59.656284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.656343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:59.656412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.656457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:58:59.656508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.656565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:58:59.656596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:58:59.656656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:59.656684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:58:59.656707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:58:59.658631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:59.658729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:59.658757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... chemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:00.113619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary is empty, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-10T10:59:00.113947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-10T10:59:00.114010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-10T10:59:00.114505Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-10T10:59:00.114608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T10:59:00.114646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:642:2567] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } TestModificationResults wait txId: 106 2025-04-10T10:59:00.120307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:00.120602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.120851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-04-10T10:59:00.125845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'01\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:00.126068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-10T10:59:00.126447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-10T10:59:00.126500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-10T10:59:00.127018Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-10T10:59:00.127131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-10T10:59:00.127181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:649:2574] TestWaitNotification: OK eventTxId 106 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } TestModificationResults wait txId: 107 2025-04-10T10:59:00.130948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } } } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:00.131209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 107:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.131454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-04-10T10:59:00.133949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:00.134124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-04-10T10:59:00.134482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-04-10T10:59:00.134529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-04-10T10:59:00.135017Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-04-10T10:59:00.135112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-10T10:59:00.135151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:656:2581] TestWaitNotification: OK eventTxId 107 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } TestModificationResults wait txId: 108 2025-04-10T10:59:00.138564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:00.138792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 108:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.139098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-04-10T10:59:00.141827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AD\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:00.142013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-10T10:59:00.142501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-10T10:59:00.142552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-10T10:59:00.142991Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-10T10:59:00.143109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-10T10:59:00.143151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:663:2588] TestWaitNotification: OK eventTxId 108 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } TestModificationResults wait txId: 109 2025-04-10T10:59:00.148541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:00.148834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.149087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-04-10T10:59:00.152394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:00.152626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-04-10T10:59:00.153045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-04-10T10:59:00.153095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-04-10T10:59:00.153622Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-04-10T10:59:00.153733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-04-10T10:59:00.153778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:670:2595] TestWaitNotification: OK eventTxId 109 >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] Test command err: 2025-04-10T10:58:49.006271Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001d01/r3tmp/tmpsfa0Oc//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-10T10:58:49.037574Z :BS_LOCALRECOVERY CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T10:58:50.766077Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001d01/r3tmp/tmpsfa0Oc//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-04-10T10:58:50.800913Z :BS_LOCALRECOVERY CRIT: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T10:58:52.712937Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001d01/r3tmp/tmpsfa0Oc//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-04-10T10:58:52.773179Z :BS_LOCALRECOVERY CRIT: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T10:58:54.740968Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001d01/r3tmp/tmpsfa0Oc//vdisk_bad_3/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 4 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 4 2025-04-10T10:58:54.764211Z :BS_LOCALRECOVERY CRIT: PDiskId# 4 VDISK[0:_:0:3:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T10:58:56.832043Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001d01/r3tmp/tmpsfa0Oc//vdisk_bad_4/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 5 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 5 2025-04-10T10:58:56.874830Z :BS_LOCALRECOVERY CRIT: PDiskId# 5 VDISK[0:_:0:4:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T10:58:58.628056Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001d01/r3tmp/tmpsfa0Oc//vdisk_bad_5/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 6 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 6 2025-04-10T10:58:58.637579Z :BS_LOCALRECOVERY CRIT: PDiskId# 6 VDISK[0:_:0:5:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> TTxDataShardMiniKQL::ReadSpecialColumns [GOOD] >> TTxDataShardMiniKQL::SelectRange >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] >> TTxDataShardMiniKQL::WriteEraseRead [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards >> TTxDataShardMiniKQL::ReadAfterWrite [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting >> TSchemeShardTopicSplitMergeTest::MargePartitions >> TTxDataShardMiniKQL::WriteKeyTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteValueTooLarge >> TTxDataShardMiniKQL::Write [GOOD] >> TTxDataShardMiniKQL::TableStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:58:59.927202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:58:59.927322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:59.927399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:58:59.927448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:58:59.927510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:58:59.927544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:58:59.927644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:59.927735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:58:59.928114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:00.015099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:00.015168Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:00.032302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:00.032661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:00.032910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:59:00.077338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:00.077637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:00.078469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:00.078950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:59:00.081194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:00.082860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:00.082937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:00.083166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:00.083227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:00.083269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:00.083461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.091881Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:59:00.257782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:00.258215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.258458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:59:00.258783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:59:00.258861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.261634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:00.261775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:59:00.261966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.262040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:59:00.262075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:59:00.262108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:59:00.264107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.264155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:00.264190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:59:00.265827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.265866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.265918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:00.265962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:59:00.270503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:00.272599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:59:00.272797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:59:00.274031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:00.274170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:00.274233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:00.274962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:59:00.275055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:00.275238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:59:00.275322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:59:00.277635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:00.277674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:00.277839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:00.277881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:59:00.278202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.278256Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:59:00.278350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:00.278382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:00.278418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:00.278510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:00.278558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:59:00.278600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:00.278644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:59:00.278682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:59:00.278738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:59:00.278772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:59:00.278801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:59:00.281238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:00.281339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:00.281375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... y Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Inactive ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active ParentPartitionIds: 1 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:00.938801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:760:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:762:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:764:2058] recipient: [1:763:2672] Leader for TabletID 72057594046678944 is [1:765:2673] sender: [1:766:2058] recipient: [1:763:2672] 2025-04-10T10:59:00.998908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:59:00.999040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:00.999098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:59:00.999147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:59:00.999191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:59:00.999242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:59:00.999310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:00.999388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:59:00.999779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:01.018851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:01.020645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:01.020936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:01.021078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:01.021111Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:01.021435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:01.022134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-10T10:59:01.022294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T10:59:01.022341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T10:59:01.022442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.022591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.022812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T10:59:01.023157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.023237Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T10:59:01.023500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.023610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.023716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-04-10T10:59:01.023783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T10:59:01.023816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:59:01.023833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-10T10:59:01.023847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T10:59:01.023935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.024027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.024262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-04-10T10:59:01.024485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T10:59:01.024911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.025051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.025529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.025629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.025921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.026021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.026165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.026397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.026506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.026753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.027053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.027259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.027317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.027408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.038070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:01.038216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:01.038630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:01.038686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:01.038728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:01.038842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:58:59.695227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:58:59.695359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:59.695408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:58:59.695451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:58:59.695492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:58:59.695524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:58:59.695583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:59.695667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:58:59.695996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:59.817102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:58:59.817162Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:59.836017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:59.836335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:58:59.836516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:58:59.846463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:58:59.846789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:58:59.847473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:59.847895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:58:59.850100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:59.851417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:59.851474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:59.851656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:58:59.851710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:59.851750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:58:59.851900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.864187Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:59:00.020954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:00.021197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.021444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:59:00.021685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:59:00.021744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.031167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:00.031341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:59:00.031548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.031607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:59:00.031668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:59:00.031711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:59:00.034421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.034482Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:00.034520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:59:00.038379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.038439Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.038486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:00.038531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:59:00.042638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:00.045175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:59:00.045400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:59:00.046493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:00.046620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:00.046676Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:00.046983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:59:00.047045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:00.047243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:59:00.047338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:59:00.049543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:00.049587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:00.049757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:00.049817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:59:00.050092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.050179Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:59:00.050278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:00.050312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:00.050351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:00.050408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:00.050460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:59:00.050507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:00.050542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:59:00.050573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:59:00.050641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:59:00.050695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:59:00.050727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:59:00.052943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:00.053063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:00.053098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:00.740677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:742:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:745:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:746:2058] recipient: [1:744:2660] Leader for TabletID 72057594046678944 is [1:747:2661] sender: [1:748:2058] recipient: [1:744:2660] 2025-04-10T10:59:00.909454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:59:00.909586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:00.909640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:59:00.909682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:59:00.909742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:59:00.909779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:59:00.909839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:00.909908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:59:00.910295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:00.960283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:00.961969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:00.962198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:00.962443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:00.962481Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:00.962606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:00.968462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-10T10:59:00.968637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T10:59:00.968694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T10:59:00.968781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.968860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.969161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T10:59:00.969447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.969542Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T10:59:00.969810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.969922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.970035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-04-10T10:59:00.970074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T10:59:00.970108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:59:00.970128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-10T10:59:00.970164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T10:59:00.970251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.970339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.970574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-04-10T10:59:00.970768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T10:59:00.971121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.971238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.971665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.971747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.971988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.972086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.972197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.972407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.972483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.972685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.972916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.973070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.977931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.978049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:00.992877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:00.992942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:00.993286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:00.993356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:00.993398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:00.994422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:747:2661] sender: [1:806:2058] recipient: [1:15:2062] 2025-04-10T10:59:01.072062Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T10:59:01.072336Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 292us result status StatusSuccess 2025-04-10T10:59:01.072944Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:58:59.182750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:58:59.182859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:59.182916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:58:59.182954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:58:59.183015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:58:59.183044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:58:59.183110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:59.183184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:58:59.183521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:59.285110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:58:59.285180Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:59.306298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:59.306640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:58:59.306839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:58:59.333366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:58:59.333677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:58:59.334500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:59.334987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:58:59.337724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:59.339459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:59.339531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:59.339767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:58:59.339825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:59.339885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:58:59.340094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.349183Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:58:59.498879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:58:59.499171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.499434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:58:59.499685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:58:59.499742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.503985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:59.504140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:58:59.504321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.504366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:58:59.504394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:58:59.504420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:58:59.506094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.506175Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:59.506223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:58:59.509541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.509594Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.509639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:59.509686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.513227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:58:59.515270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:58:59.515536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:58:59.516672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:59.516813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:59.516867Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:59.517174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:58:59.517232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:59.517467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:58:59.517586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:58:59.523594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:59.523646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:59.523879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:59.523923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:58:59.524191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.524245Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:58:59.524360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:59.524407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.524461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:59.524492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.524528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:58:59.524575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.524614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:58:59.524645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:58:59.524715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:59.524747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:58:59.524776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:58:59.527119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:59.527237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:59.527274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2057594046678944 2025-04-10T10:59:01.500924Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.501053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-04-10T10:59:01.501233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409546 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 151 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409546 2025-04-10T10:59:01.503509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:3 msg type: 269550082 2025-04-10T10:59:01.503636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 0:105 msg type: 269090816 2025-04-10T10:59:01.503746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72075186233409546 2025-04-10T10:59:01.504109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-04-10T10:59:01.504312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409548 TxId: 105 Status: OK 2025-04-10T10:59:01.504395Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 105 Status: OK 2025-04-10T10:59:01.504450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-10T10:59:01.504502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-10T10:59:01.506747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-10T10:59:01.507094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-10T10:59:01.507164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-10T10:59:01.507663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-04-10T10:59:01.507710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-04-10T10:59:01.507752Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-04-10T10:59:01.549802Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:01.549990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:01.550062Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2025-04-10T10:59:01.550113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-10T10:59:01.596155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-04-10T10:59:01.596400Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-10T10:59:01.596497Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-10T10:59:01.596556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.596599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-10T10:59:01.596836Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-04-10T10:59:01.597052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T10:59:01.600615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.601324Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:01.601382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T10:59:01.601768Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:01.601824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-10T10:59:01.602284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.602362Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-10T10:59:01.602489Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T10:59:01.602531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T10:59:01.602584Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T10:59:01.602619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T10:59:01.602661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-10T10:59:01.602709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T10:59:01.602752Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-10T10:59:01.602797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-10T10:59:01.602958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T10:59:01.603006Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-04-10T10:59:01.603042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-10T10:59:01.604126Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:59:01.604249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:59:01.604302Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-10T10:59:01.604347Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-10T10:59:01.604387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T10:59:01.604472Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-04-10T10:59:01.604518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:411:2377] 2025-04-10T10:59:01.610227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T10:59:01.610378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T10:59:01.610426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:672:2593] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2025-04-10T10:59:01.621944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:01.622286Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.622536Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2025-04-10T10:59:01.625801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:01.626021Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-10T10:59:01.630618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-10T10:59:01.630685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-10T10:59:01.631253Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-10T10:59:01.631406Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-10T10:59:01.631447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:765:2674] TestWaitNotification: OK eventTxId 106 >> TTxDataShardMiniKQL::SelectRange [GOOD] >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions >> TPQCachingProxyTest::OutdatedSession >> TPQCachingProxyTest::MultipleSessions >> TPQCachingProxyTest::TestDeregister >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] >> TPQCachingProxyTest::TestDeregister [GOOD] >> TPQCachingProxyTest::OutdatedSession [GOOD] >> TTxDataShardMiniKQL::TableStats [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms >> TPQCachingProxyTest::MultipleSessions [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:58:58.503376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:58:58.503488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:58.503574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:58:58.503627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:58:58.503679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:58:58.503716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:58:58.503805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:58.503881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:58:58.504272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:58.586980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:58:58.587045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:58.606543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:58.606857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:58:58.607036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:58:58.617277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:58:58.617615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:58:58.618398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:58.618828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:58:58.621974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:58.623365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:58.623426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:58.623604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:58:58.623658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:58.623699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:58:58.623871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:58:58.633607Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:58:58.851596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:58:58.851875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:58.852105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:58:58.852382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:58:58.852456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:58.855883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:58.856119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:58:58.856332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:58.856422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:58:58.856467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:58:58.856505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:58:58.863146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:58.863242Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:58.863285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:58:58.871258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:58.871362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:58.871409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:58.871467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:58:58.883722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:58:58.888492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:58:58.888755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:58:58.889886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:58.890040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:58.890096Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:58.890430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:58:58.890489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:58.890714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:58:58.890834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:58:58.897965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:58.898037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:58.898271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:58.898325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:58:58.898602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:58.898664Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:58:58.898797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:58.898837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:58.898905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:58.898974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:58.899026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:58:58.899071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:58.899110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:58:58.899151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:58:58.899225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:58.899268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:58:58.899313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:58:58.902031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:58.906264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:58.906357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... arts: 1/1 2025-04-10T10:59:02.243642Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T10:59:02.243684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T10:59:02.243742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-10T10:59:02.243803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T10:59:02.243850Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-10T10:59:02.243894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-10T10:59:02.244077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T10:59:02.244129Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-04-10T10:59:02.244173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-10T10:59:02.244851Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:59:02.244958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:59:02.244997Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-10T10:59:02.245047Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-10T10:59:02.245097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T10:59:02.245215Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-10T10:59:02.251540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-10T10:59:02.260290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-10T10:59:02.260355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-10T10:59:02.260945Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-10T10:59:02.261057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T10:59:02.261105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:756:2667] TestWaitNotification: OK eventTxId 105 2025-04-10T10:59:02.922921Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:02.923240Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 371us result status StatusSuccess 2025-04-10T10:59:02.923909Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:03.023086Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T10:59:03.023413Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 381us result status StatusSuccess 2025-04-10T10:59:03.024121Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } TestModificationResults wait txId: 106 2025-04-10T10:59:03.028334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:03.028589Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.028752Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046678944 2025-04-10T10:59:03.035225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:03.035624Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-10T10:59:03.036082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-10T10:59:03.036133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-10T10:59:03.036718Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-10T10:59:03.036836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-10T10:59:03.036883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:771:2681] TestWaitNotification: OK eventTxId 106 |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] Test command err: 2025-04-10T10:59:00.002359Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:59:00.004057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:00.004131Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:00.043302Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:59:00.044027Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T10:59:00.044430Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:00.126739Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:59:00.153481Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:00.155414Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:59:00.157491Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T10:59:00.157605Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T10:59:00.157662Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T10:59:00.158215Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:59:00.159551Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:59:00.159683Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:199:2154] in generation 2 2025-04-10T10:59:00.326386Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:59:00.365745Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T10:59:00.366057Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:59:00.366730Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-10T10:59:00.366805Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T10:59:00.366856Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T10:59:00.366905Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:00.367245Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:00.367321Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:00.367722Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T10:59:00.367863Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T10:59:00.367953Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:00.368006Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:59:00.368067Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T10:59:00.368112Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T10:59:00.368182Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T10:59:00.368238Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T10:59:00.368292Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T10:59:00.368402Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:00.368451Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:00.368521Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-10T10:59:00.377490Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T10:59:00.377595Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:59:00.377728Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:00.377941Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T10:59:00.378002Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T10:59:00.378078Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T10:59:00.378138Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:00.378202Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T10:59:00.378266Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T10:59:00.378325Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:59:00.378725Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T10:59:00.378769Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T10:59:00.378870Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T10:59:00.378911Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:59:00.378970Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T10:59:00.379006Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T10:59:00.379049Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T10:59:00.379108Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T10:59:00.379140Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T10:59:00.399105Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:00.399238Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:59:00.399290Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:59:00.399343Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T10:59:00.399461Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T10:59:00.400227Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:00.400300Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:00.400356Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-10T10:59:00.400535Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T10:59:00.400570Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T10:59:00.400745Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T10:59:00.400796Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T10:59:00.400861Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T10:59:00.400908Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T10:59:00.409671Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T10:59:00.409777Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:00.410073Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:00.410124Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:00.410217Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:00.410266Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:59:00.410307Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T10:59:00.410366Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T10:59:00.410403Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T10:59:00.410455Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T10:59:00.410508Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T10:59:00.410571Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T10:59:00.410615Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T10:59:00.410829Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T10:59:00.410868Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T10:59:00.410893Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T10:59:00.410930Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T10:59:00.410957Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T10:59:00.411030Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:00.411060Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T10:59:00.411093Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T10:59:00.411131Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T10:59:00.411200Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T10:59:00.411260Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T10:59:00.411308Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T10:59:00.411372Z node 1 :TX_DATA ... tateInit, received event# 268828673, Sender [3:230:2224], Recipient [3:232:2225]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:59:03.064440Z node 3 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [3:230:2224], Recipient [3:232:2225]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:59:03.094197Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [3:232:2225] 2025-04-10T10:59:03.094545Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:03.101086Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute Persist Sys_SubDomainInfo 2025-04-10T10:59:03.143118Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:03.143285Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:59:03.145604Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T10:59:03.145691Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T10:59:03.145756Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T10:59:03.146214Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:59:03.146452Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:59:03.146524Z node 3 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [3:275:2225] in generation 3 2025-04-10T10:59:03.159464Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:59:03.159623Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 9437184 2025-04-10T10:59:03.159750Z node 3 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-04-10T10:59:03.159926Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2025-04-10T10:59:03.160210Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [3:280:2264] 2025-04-10T10:59:03.160277Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T10:59:03.160344Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-04-10T10:59:03.160389Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:03.160570Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-10T10:59:03.160711Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-10T10:59:03.161096Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:232:2225], Recipient [3:232:2225]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:03.161157Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:03.161464Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T10:59:03.161590Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T10:59:03.161754Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [3:24:2071], Recipient [3:232:2225]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-04-10T10:59:03.161802Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-10T10:59:03.161857Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-04-10T10:59:03.161908Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:03.162065Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 232 RawX2: 12884904113 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2025-04-10T10:59:03.162339Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [3:24:2071], Recipient [3:232:2225]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2025-04-10T10:59:03.162384Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-10T10:59:03.162432Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-04-10T10:59:03.162526Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:03.162599Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:59:03.162644Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T10:59:03.162686Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T10:59:03.162731Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T10:59:03.162773Z node 3 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T10:59:03.162824Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T10:59:03.162951Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [3:278:2262], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [3:282:2266] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T10:59:03.162989Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T10:59:03.163088Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [3:123:2149], Recipient [3:232:2225]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-10T10:59:03.163126Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-10T10:59:03.163171Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-10T10:59:03.163262Z node 3 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-10T10:59:03.184075Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [3:278:2262], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [3:278:2262] ServerId: [3:282:2266] } 2025-04-10T10:59:03.184165Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-10T10:59:03.240657Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-04-10T10:59:03.240762Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-10T10:59:03.241065Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:288:2270], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:03.241106Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:03.241159Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:286:2269], serverId# [3:288:2270], sessionId# [0:0:0] 2025-04-10T10:59:03.241398Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\365\001\037\004\0021\nvalue\005\205\n\205\002\207\205\002\207\203\001H\006\002\205\004\205\002?\006\002\205\000\034MyReads MyWrites\205\004\205\002?\006\002\206\202\024Reply\024Write?\014\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\010)\211\n?\006\203\005\004\200\205\002\203\004\006\213\002\203\004\203\004$SelectRow\000\003?\036 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000?\004\005?\"\003? p\001\013?&\003?$T\001\003?(\000\037\002\000\005?\016\005?\n?8\000\005?\014\003\005?\024\005?\020?8\000\006\000?\022\003?>\005?\032\006\000?\030\001\037/ \0018\001" TxId: 2 ExecLevel: 0 Flags: 0 2025-04-10T10:59:03.241451Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:59:03.241565Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:03.242956Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-04-10T10:59:03.243110Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-10T10:59:03.243166Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-04-10T10:59:03.243213Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T10:59:03.243255Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T10:59:03.243303Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T10:59:03.243391Z node 3 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-04-10T10:59:03.243449Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-10T10:59:03.243475Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-10T10:59:03.243502Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-04-10T10:59:03.243540Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-10T10:59:03.244043Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-04-10T10:59:03.244122Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T10:59:03.244192Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-10T10:59:03.244221Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-04-10T10:59:03.244249Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-04-10T10:59:03.244277Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-04-10T10:59:03.244326Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-10T10:59:03.244413Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayComplete 2025-04-10T10:59:03.244453Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-04-10T10:59:03.244507Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-04-10T10:59:03.244544Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-04-10T10:59:03.244598Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-10T10:59:03.244621Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-04-10T10:59:03.244650Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-04-10T10:59:03.244741Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:03.244788Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-04-10T10:59:03.244833Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession [GOOD] Test command err: 2025-04-10T10:59:03.713979Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:59:03.714066Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T10:59:03.736219Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:03.736319Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-10T10:59:03.736480Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-10T10:59:03.736524Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-10T10:59:03.736619Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2, killed existing session with older generation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2025-04-10T10:59:03.712361Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:59:03.712468Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T10:59:03.730805Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:03.730902Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-10T10:59:03.730950Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 1 2025-04-10T10:59:03.731124Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: session1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2025-04-10T10:59:03.722864Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:59:03.722975Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T10:59:03.742007Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:03.742100Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-10T10:59:03.742283Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-10T10:59:03.742350Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 2 for session: session1 2025-04-10T10:59:03.742389Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-10T10:59:03.742464Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 2 for session session1, Generation: 1 2025-04-10T10:59:03.742549Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 2 2025-04-10T10:59:03.742611Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 3 for session: session2 2025-04-10T10:59:03.742660Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 3 for session session2, Generation: 2 >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] >> TPQCachingProxyTest::TestWrongSessionOrGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] Test command err: 2025-04-10T10:59:01.180102Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:59:01.181123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:01.181176Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:01.194115Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:59:01.194645Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T10:59:01.194948Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:01.247555Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:59:01.258688Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:01.259229Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:59:01.260988Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T10:59:01.261072Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T10:59:01.261125Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T10:59:01.261534Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:59:01.261730Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:59:01.261792Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:198:2154] in generation 2 2025-04-10T10:59:01.326425Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:59:01.362767Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T10:59:01.362959Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:59:01.363051Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-10T10:59:01.363091Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T10:59:01.363134Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T10:59:01.363171Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:01.363439Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.363494Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.363723Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T10:59:01.363814Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T10:59:01.363874Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:01.363912Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:59:01.363948Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T10:59:01.363981Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T10:59:01.364038Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T10:59:01.364088Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T10:59:01.364129Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T10:59:01.364236Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.364276Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.364333Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-10T10:59:01.366878Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nx\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\016\n\010__tablet\030\004 9\032\023\n\r__updateEpoch\030\004 :\032\020\n\n__updateNo\030\004 ;(\"J\014/Root/table1\222\002\013\th\020\000\000\000\000\000\000\020\r" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T10:59:01.366956Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:59:01.367063Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:01.367225Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T10:59:01.367267Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T10:59:01.367324Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T10:59:01.367379Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:01.367424Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T10:59:01.367458Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T10:59:01.367511Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:59:01.367833Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T10:59:01.367873Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T10:59:01.367913Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T10:59:01.367948Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:59:01.367993Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T10:59:01.368031Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T10:59:01.368077Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T10:59:01.368116Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T10:59:01.368141Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T10:59:01.382474Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:01.382557Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:59:01.382600Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:59:01.382643Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T10:59:01.382728Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T10:59:01.383269Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.383321Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.383365Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-10T10:59:01.383471Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T10:59:01.383499Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T10:59:01.383634Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T10:59:01.383676Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T10:59:01.383712Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T10:59:01.383801Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T10:59:01.388096Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T10:59:01.388160Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:01.388407Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.388451Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.388504Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:01.388546Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:59:01.388611Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T10:59:01.388656Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T10:59:01.388699Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T10:59:01.388740Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T10:59:01.388779Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T10:59:01.388816Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T10:59:01.388872Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T10:59:01.389084Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T10:59:01.389133Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T10:59:01.389163Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T10:59:01.389216Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T10:59:01.389243Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T10:59:01.389308Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:01.389352Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T10:59:01.389478Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T10:59:01.389519Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T10:59:01.389577Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T10:59:01.389612Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T10:59:01.389646Z node 1 :TX_DATASHARD TR ... 603508Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:03.603533Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:6] at 9437184 on unit FinishPropose 2025-04-10T10:59:03.603567Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:03.607159Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:236:2229]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-04-10T10:59:03.607206Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-10T10:59:03.607413Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:304:2286], Recipient [3:236:2229]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:03.607436Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:03.607491Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:303:2285], serverId# [3:304:2286], sessionId# [0:0:0] 2025-04-10T10:59:03.607680Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:236:2229]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\351\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4e\005\'?8\003\013?>\003?\000\003?@\000\003?B\000\006\004?F\003\203\014\000\003\203\014\000\003\003?H\000\377\007\002\000\005?\032\005?\026?r\000\005?\030\003\005? \005?\034?r\000\006\000?\036\003?x\005?&\006\0 2025-04-10T10:59:03.614532Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:59:03.614614Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:03.615330Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CheckDataTx 2025-04-10T10:59:03.615410Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-10T10:59:03.615473Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CheckDataTx 2025-04-10T10:59:03.615514Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T10:59:03.615550Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T10:59:03.615599Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-10T10:59:03.615659Z node 3 :TX_DATASHARD TRACE: Activated operation [0:8] at 9437184 2025-04-10T10:59:03.615693Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-10T10:59:03.615717Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-10T10:59:03.615763Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit ExecuteDataTx 2025-04-10T10:59:03.615793Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit ExecuteDataTx 2025-04-10T10:59:03.616342Z node 3 :TX_DATASHARD TRACE: Executed operation [0:8] at tablet 9437184 with status COMPLETE 2025-04-10T10:59:03.616410Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:8] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 2, SelectRangeBytes: 31, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T10:59:03.616462Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-10T10:59:03.616491Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit ExecuteDataTx 2025-04-10T10:59:03.616520Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit FinishPropose 2025-04-10T10:59:03.616546Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit FinishPropose 2025-04-10T10:59:03.616585Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 8 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-10T10:59:03.616649Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is DelayComplete 2025-04-10T10:59:03.616683Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit FinishPropose 2025-04-10T10:59:03.616738Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit CompletedOperations 2025-04-10T10:59:03.616773Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CompletedOperations 2025-04-10T10:59:03.616814Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-10T10:59:03.616835Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CompletedOperations 2025-04-10T10:59:03.616864Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:8] at 9437184 has finished 2025-04-10T10:59:03.616922Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:03.616956Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:8] at 9437184 on unit FinishPropose 2025-04-10T10:59:03.616995Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:59:01.794623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:59:01.794754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:01.794850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:59:01.794900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:59:01.794952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:59:01.794987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:59:01.795065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:01.795165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:59:01.795605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:01.893833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:01.893910Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:01.911620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:01.911967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:01.912183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:59:01.921664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:01.921980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:01.922804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:01.923348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:59:01.925905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:01.927612Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:01.927690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:01.927915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:01.927995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:01.928048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:01.928241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:59:01.936446Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:59:02.077662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:02.077992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:02.078505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:59:02.078817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:59:02.078897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:02.081606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:02.081810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:59:02.082050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:02.082125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:59:02.082195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:59:02.082274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:59:02.085689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:02.085792Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:02.085849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:59:02.088221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:02.088321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:02.088371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:02.088448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:59:02.092791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:02.095489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:59:02.095805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:59:02.097074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:02.097252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:02.097317Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:02.097745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:59:02.097812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:02.098038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:59:02.098211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:59:02.101121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:02.101188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:02.101404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:02.101468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:59:02.101743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:02.101798Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:59:02.101939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:02.101985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:02.102053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:02.102093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:02.102138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:59:02.102224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:02.102284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:59:02.102325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:59:02.102432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:59:02.102479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:59:02.102526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:59:02.105478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:02.105747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:02.105797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 678944 is [2:124:2150] sender: [2:644:2058] recipient: [2:15:2062] Leader for TabletID 72057594046678944 is [2:124:2150] sender: [2:645:2058] recipient: [2:643:2563] Leader for TabletID 72057594046678944 is [2:646:2564] sender: [2:647:2058] recipient: [2:643:2563] 2025-04-10T10:59:03.591542Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:59:03.591662Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:03.591712Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:59:03.591769Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:59:03.591825Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:59:03.591884Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:59:03.591966Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:03.592054Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:59:03.592498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:03.614966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:03.616843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:03.617067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:03.617289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:03.617357Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:03.617531Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:03.618625Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-10T10:59:03.618762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T10:59:03.619592Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T10:59:03.619708Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.619814Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.620111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T10:59:03.620463Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.620579Z node 2 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T10:59:03.620779Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.620872Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.620989Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-04-10T10:59:03.621044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T10:59:03.621082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:59:03.621099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-10T10:59:03.621125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T10:59:03.621242Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.621344Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.621671Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-04-10T10:59:03.621927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T10:59:03.622366Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.622493Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.622845Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.622945Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.623159Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.623251Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.623343Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.623549Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.623630Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.623869Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.624073Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.624236Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.624304Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.624361Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.631688Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:03.631760Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:03.632429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:03.632497Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:03.632552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:03.639839Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:646:2564] sender: [2:706:2058] recipient: [2:15:2062] 2025-04-10T10:59:03.701403Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T10:59:03.701875Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 465us result status StatusSuccess 2025-04-10T10:59:03.702630Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 1 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTxDataShardMiniKQL::WriteValueTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteLargeExternalBlob >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] >> TBlobStorageProxyTest::TestSingleFailure >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:58:59.171348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:58:59.171467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:59.171534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:58:59.171579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:58:59.171650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:58:59.171712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:58:59.171782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:59.171868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:58:59.172265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:59.275152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:58:59.275235Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:59.299330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:59.299783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:58:59.300018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:58:59.309543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:58:59.309860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:58:59.312441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:59.313104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:58:59.316106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:59.317782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:59.317862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:59.318091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:58:59.318189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:59.318258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:58:59.318446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.329455Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:58:59.508003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:58:59.508312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.508574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:58:59.508805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:58:59.508858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.511251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:59.511440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:58:59.511658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.511727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:58:59.511771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:58:59.511813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:58:59.514059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.514171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:59.514217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:58:59.516083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.516142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.516199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:59.516265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.520935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:58:59.523615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:58:59.523860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:58:59.525062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:59.525231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:59.525296Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:59.525658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:58:59.525744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:59.525954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:58:59.526065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:58:59.536073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:59.536146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:59.536431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:59.536485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:58:59.536814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:59.536878Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:58:59.536991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:59.537031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.537094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:59.537131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.537172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:58:59.537221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:59.537260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:58:59.537300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:58:59.537398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:59.537456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:58:59.537493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:58:59.545105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:59.545280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:59.545341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... AT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-10T10:59:03.401188Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-04-10T10:59:03.401437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T10:59:03.404377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.405041Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:03.405091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T10:59:03.405393Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:03.405471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-10T10:59:03.405914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.405975Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-10T10:59:03.406117Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T10:59:03.406187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T10:59:03.406246Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T10:59:03.406291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T10:59:03.406342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-10T10:59:03.406414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T10:59:03.406475Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-10T10:59:03.406516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-10T10:59:03.406690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T10:59:03.406762Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-04-10T10:59:03.406809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-10T10:59:03.407842Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:59:03.407963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:59:03.408013Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-10T10:59:03.408060Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-10T10:59:03.408107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T10:59:03.408225Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-10T10:59:03.414610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-10T10:59:03.422076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-10T10:59:03.422174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-10T10:59:03.422694Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-10T10:59:03.422805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T10:59:03.422849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:764:2673] TestWaitNotification: OK eventTxId 105 2025-04-10T10:59:04.044076Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:04.044502Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 425us result status StatusSuccess 2025-04-10T10:59:04.045347Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:04.141963Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T10:59:04.142384Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 468us result status StatusSuccess 2025-04-10T10:59:04.143187Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Verify partition 0 >>>>> Verify partition 1 >>>>> Verify partition 2 |69.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |69.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |69.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] Test command err: 2025-04-10T10:58:59.778106Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:58:59.779251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:58:59.779305Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:59.790632Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:58:59.791203Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T10:58:59.791555Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:59.846999Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:58:59.872641Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:59.873253Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:58:59.875157Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T10:58:59.875242Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T10:58:59.875289Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T10:58:59.875693Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:58:59.875942Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:58:59.876009Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:198:2154] in generation 2 2025-04-10T10:58:59.943876Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:58:59.985465Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T10:58:59.985656Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:58:59.985763Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-10T10:58:59.985790Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T10:58:59.985820Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T10:58:59.985861Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:58:59.986052Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:58:59.986088Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:58:59.986323Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T10:58:59.986406Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T10:58:59.986454Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:58:59.986480Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:58:59.986509Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T10:58:59.986532Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T10:58:59.986584Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T10:58:59.986631Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T10:58:59.986663Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T10:58:59.986765Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:58:59.986816Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:58:59.986872Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-10T10:58:59.988964Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T10:58:59.989030Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:58:59.989124Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:58:59.989389Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T10:58:59.989464Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T10:58:59.989512Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T10:58:59.989559Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:58:59.989606Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T10:58:59.989638Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T10:58:59.989685Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:58:59.990053Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T10:58:59.990095Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T10:58:59.990134Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T10:58:59.990182Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:58:59.990229Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T10:58:59.990264Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T10:58:59.990294Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T10:58:59.990324Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T10:58:59.990364Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T10:59:00.004040Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:00.004138Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:59:00.004178Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:59:00.004231Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T10:59:00.004306Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T10:59:00.004932Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:00.005005Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:00.005049Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-10T10:59:00.005171Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-10T10:59:00.005204Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T10:59:00.005346Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-10T10:59:00.005408Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-10T10:59:00.005467Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-10T10:59:00.005514Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-10T10:59:00.009359Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-10T10:59:00.009441Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:00.009690Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:00.009729Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:00.009775Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:00.009830Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:59:00.009876Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T10:59:00.009922Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-10T10:59:00.009954Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-10T10:59:00.009989Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-10T10:59:00.010016Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-10T10:59:00.010042Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T10:59:00.010108Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-10T10:59:00.010263Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Restart 2025-04-10T10:59:00.010297Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:59:00.010336Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T10:59:00.010373Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T10:59:00.010421Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T10:59:00.010724Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:00.010764Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-10T10:59:00.010924Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-10T10:59:00.010961Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-10T10:59:00.011001Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T10:59:00.011046Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T10:59:00.011080Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T10:59:00.011156Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:00.011210Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEc ... 0534} resubmit=1) 2025-04-10T10:59:03.747809Z node 3 :RESOURCE_BROKER DEBUG: Assigning waiting task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:369:2314]) to queue queue_transaction 2025-04-10T10:59:03.747852Z node 3 :RESOURCE_BROKER DEBUG: Allocate resources {0, 96990534} for task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:369:2314]) from queue queue_transaction 2025-04-10T10:59:03.747881Z node 3 :RESOURCE_BROKER DEBUG: Assigning in-fly task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:369:2314]) to queue queue_transaction 2025-04-10T10:59:03.747921Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 1.881864 to 18.818640 (insert task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:369:2314])) 2025-04-10T10:59:03.747999Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-04-10T10:59:03.748055Z node 3 :RESOURCE_BROKER DEBUG: Update task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:258:2229]) (priority=5 type=transaction resources={0, 96990534} resubmit=1) 2025-04-10T10:59:03.748082Z node 3 :RESOURCE_BROKER DEBUG: Assigning waiting task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:258:2229]) to queue queue_transaction 2025-04-10T10:59:03.748115Z node 3 :RESOURCE_BROKER DEBUG: Allocate resources {0, 96990534} for task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:258:2229]) from queue queue_transaction 2025-04-10T10:59:03.748145Z node 3 :RESOURCE_BROKER DEBUG: Assigning in-fly task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:258:2229]) to queue queue_transaction 2025-04-10T10:59:03.748169Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 33.873553 (insert task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:258:2229])) 2025-04-10T10:59:03.748220Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-04-10T10:59:03.748271Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-10T10:59:03.748298Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit ExecuteDataTx 2025-04-10T10:59:03.749470Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437185 restored its data 2025-04-10T10:59:04.061623Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437185 with status COMPLETE 2025-04-10T10:59:04.061782Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437185: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T10:59:04.061890Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is ExecutedNoMoreRestarts 2025-04-10T10:59:04.061950Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit ExecuteDataTx 2025-04-10T10:59:04.062018Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompleteOperation 2025-04-10T10:59:04.062080Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompleteOperation 2025-04-10T10:59:04.062434Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is DelayComplete 2025-04-10T10:59:04.062487Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompleteOperation 2025-04-10T10:59:04.062545Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompletedOperations 2025-04-10T10:59:04.062599Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompletedOperations 2025-04-10T10:59:04.062639Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is Executed 2025-04-10T10:59:04.062675Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompletedOperations 2025-04-10T10:59:04.062726Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437185 has finished 2025-04-10T10:59:04.062777Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:59:04.062840Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-10T10:59:04.062898Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-10T10:59:04.062948Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-10T10:59:04.063131Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-04-10T10:59:04.063258Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-04-10T10:59:04.063499Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:04.063547Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2025-04-10T10:59:04.064907Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437184 restored its data 2025-04-10T10:59:04.365897Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437184 with status COMPLETE 2025-04-10T10:59:04.365972Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T10:59:04.366038Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:04.366072Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit ExecuteDataTx 2025-04-10T10:59:04.366122Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompleteOperation 2025-04-10T10:59:04.366193Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompleteOperation 2025-04-10T10:59:04.366447Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is DelayComplete 2025-04-10T10:59:04.366480Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompleteOperation 2025-04-10T10:59:04.366511Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompletedOperations 2025-04-10T10:59:04.366545Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompletedOperations 2025-04-10T10:59:04.366577Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is Executed 2025-04-10T10:59:04.366603Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompletedOperations 2025-04-10T10:59:04.366630Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437184 has finished 2025-04-10T10:59:04.366667Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:59:04.366693Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T10:59:04.366724Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T10:59:04.366752Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T10:59:04.366893Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-04-10T10:59:04.366960Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-04-10T10:59:04.367172Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:369:2314]) (release resources {0, 96990534}) 2025-04-10T10:59:04.367247Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 33.873553 to 16.936776 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:369:2314])) 2025-04-10T10:59:04.367388Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:258:2229]) (release resources {0, 96990534}) 2025-04-10T10:59:04.367426Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 0.000000 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:258:2229])) 2025-04-10T10:59:04.381179Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:10} commited cookie 1 for step 9 2025-04-10T10:59:04.381268Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T10:59:04.381312Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437184 on unit CompleteOperation 2025-04-10T10:59:04.381396Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437184 at tablet 9437184 send result to client [3:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-10T10:59:04.381517Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-10T10:59:04.381578Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:04.382515Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:236:2229], Recipient [3:458:2400]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-10T10:59:04.382589Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T10:59:04.382653Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 5 2025-04-10T10:59:04.382945Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:10} commited cookie 1 for step 9 2025-04-10T10:59:04.382991Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-10T10:59:04.383023Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437185 on unit CompleteOperation 2025-04-10T10:59:04.383077Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437185 at tablet 9437185 send result to client [3:99:2134], exec latency: 1 ms, propose latency: 4 ms 2025-04-10T10:59:04.383127Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-04-10T10:59:04.383166Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-10T10:59:04.383399Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:347:2314], Recipient [3:458:2400]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-04-10T10:59:04.383434Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T10:59:04.383461Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437185 consumer 9437185 txId 5 |69.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |69.8%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |69.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2025-04-10T10:59:04.953175Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T10:59:04.953293Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T10:59:04.974533Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:04.974607Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2 2025-04-10T10:59:04.974726Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-10T10:59:04.974768Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 2 2025-04-10T10:59:04.974818Z node 1 :PQ_READ_PROXY INFO: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2025-04-10T10:59:04.974860Z node 1 :PQ_READ_PROXY ALERT: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2025-04-10T10:59:04.974895Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-10T10:59:04.974965Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV1NodeIdFilter |69.8%| [TA] $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |69.8%| [TA] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] |69.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |69.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject |69.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |69.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |69.8%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |69.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:59:02.795940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:59:02.796074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:02.796144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:59:02.796192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:59:02.796247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:59:02.796279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:59:02.796351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:02.796432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:59:02.796819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:02.889461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:02.889554Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:02.909307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:02.909646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:02.909889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:59:02.929057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:02.929432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:02.930294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:02.930818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:59:02.933841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:02.935518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:02.935593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:02.935835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:02.935908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:02.935953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:02.936164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:59:02.945372Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:59:03.087383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:03.087725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.087993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:59:03.088324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:59:03.088398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.093586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:03.093777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:59:03.093990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.094057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:59:03.094103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:59:03.094166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:59:03.096694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.096764Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:03.096808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:59:03.100567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.100665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.100738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:03.100805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:59:03.105218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:03.109841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:59:03.110112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:59:03.111227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:03.111380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:03.111429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:03.111692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:59:03.111737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:03.111919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:59:03.111998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:59:03.116097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:03.116164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:03.116452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:03.116500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:59:03.116830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:03.116938Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:59:03.117053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:03.117091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:03.117153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:03.117192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:03.117236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:59:03.117317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:03.117360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:59:03.117395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:59:03.117526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:59:03.117580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:59:03.117622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:59:03.127035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:03.127236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:03.127282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... essage# TabletId: 72075186233409548 TxId: 104 Status: OK 2025-04-10T10:59:05.398695Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-10T10:59:05.398741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-10T10:59:05.400741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-10T10:59:05.401042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-10T10:59:05.401099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-10T10:59:05.401643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2025-04-10T10:59:05.401705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-10T10:59:05.401756Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2025-04-10T10:59:05.438511Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:05.438681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 0 RawX2: 0 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:05.438753Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2025-04-10T10:59:05.438861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-10T10:59:05.497776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-04-10T10:59:05.497998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-10T10:59:05.498090Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-10T10:59:05.498228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:05.498284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-10T10:59:05.498542Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-10T10:59:05.498766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T10:59:05.498847Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T10:59:05.502081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T10:59:05.507047Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:05.507144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:59:05.507380Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T10:59:05.507626Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:05.507681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-10T10:59:05.507766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-10T10:59:05.508222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T10:59:05.508292Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-10T10:59:05.508420Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T10:59:05.508482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:59:05.508530Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T10:59:05.508576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:59:05.508629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-10T10:59:05.508685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T10:59:05.508737Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T10:59:05.508778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T10:59:05.508988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T10:59:05.509036Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-04-10T10:59:05.509100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-10T10:59:05.509138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-10T10:59:05.511335Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:59:05.511472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:59:05.511524Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-10T10:59:05.511577Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-10T10:59:05.511629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T10:59:05.514394Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:59:05.514511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T10:59:05.514547Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-10T10:59:05.514585Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-10T10:59:05.514627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T10:59:05.514726Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-04-10T10:59:05.514780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:411:2377] 2025-04-10T10:59:05.520367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T10:59:05.522810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T10:59:05.522964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T10:59:05.523018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:547:2480] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } TestModificationResults wait txId: 105 2025-04-10T10:59:05.539331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:05.539614Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:59:05.539839Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, at schemeshard: 72057594046678944 2025-04-10T10:59:05.544362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "You cannot merge non-contiguous partitions" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:05.544595Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-10T10:59:05.544921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-10T10:59:05.544972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-10T10:59:05.545463Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-10T10:59:05.545575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T10:59:05.545615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:643:2565] TestWaitNotification: OK eventTxId 105 |69.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject >> TS3WrapperTests::CompleteUnknownUpload >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] |69.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |69.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] |69.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:59:03.931835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:59:03.931929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:03.931971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:59:03.931997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:59:03.932033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:59:03.932062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:59:03.932103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:03.932151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:59:03.932402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:04.015452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:04.015510Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:04.042647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:04.042983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:04.043161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:59:04.064528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:04.064829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:04.065557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:04.065995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:59:04.077363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:04.078926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:04.079008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:04.079305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:04.079356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:04.079415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:04.079591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:59:04.087648Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:59:04.250505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:04.250805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:04.251044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:59:04.251268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:59:04.251332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:04.255400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:04.255576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:59:04.255776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:04.255835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:59:04.255871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:59:04.255921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:59:04.258579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:04.258641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:04.258680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:59:04.260980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:04.261041Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:04.261117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:04.261165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:59:04.265226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:04.267557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:59:04.267793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:59:04.268797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:04.268937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:04.268986Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:04.269251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:59:04.269302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:04.269518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:59:04.269615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:59:04.272142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:04.272194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:04.272371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:04.272437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:59:04.272695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:04.272751Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:59:04.272921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:04.272955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:04.273009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:04.273043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:04.273080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:59:04.273124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:04.273164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:59:04.273195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:59:04.273285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:59:04.273322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:59:04.273358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:59:04.276024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:04.276164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:04.276204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... iber for txId 105: send EvNotifyTxCompletion 2025-04-10T10:59:06.007879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-10T10:59:06.008324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-04-10T10:59:06.008374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-04-10T10:59:06.008421Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-04-10T10:59:06.043633Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:06.043825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:06.043886Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2025-04-10T10:59:06.043937Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-10T10:59:06.089474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-04-10T10:59:06.089623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-10T10:59:06.089683Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-10T10:59:06.089730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:06.089762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-10T10:59:06.089903Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-04-10T10:59:06.090053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T10:59:06.092772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:59:06.093212Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:06.093253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T10:59:06.093492Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:06.093536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-10T10:59:06.093593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T10:59:06.093629Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-10T10:59:06.093710Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T10:59:06.093737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T10:59:06.093768Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T10:59:06.093791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T10:59:06.093819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-10T10:59:06.093849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T10:59:06.093881Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-10T10:59:06.093907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-10T10:59:06.094007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T10:59:06.094037Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-04-10T10:59:06.094063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-10T10:59:06.094780Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:59:06.094868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T10:59:06.094896Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-10T10:59:06.094922Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-10T10:59:06.094951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T10:59:06.095016Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-04-10T10:59:06.095043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:411:2377] 2025-04-10T10:59:06.101836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T10:59:06.101968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T10:59:06.102011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:678:2597] TestWaitNotification: OK eventTxId 105 2025-04-10T10:59:06.113075Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T10:59:06.113304Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 252us result status StatusSuccess 2025-04-10T10:59:06.113826Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TS3WrapperTests::PutObject [GOOD] >> TS3WrapperTests::CompleteUnknownUpload [GOOD] >> TS3WrapperTests::GetObject >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] >> TBackupTests::ShouldSucceedOnLargeData[Raw] >> DataShardVolatile::UpsertNoLocksArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter-UseSink |69.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject |69.9%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] Test command err: 2025-04-10T10:59:06.976776Z node 1 :S3_WRAPPER NOTICE: Request: uuid# F09F3A62-1109-4739-808A-DBF650CD4462, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:24784 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E85164F9-3100-4A6B-A1E2-0114856D808D amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-04-10T10:59:06.981769Z node 1 :S3_WRAPPER NOTICE: Response: uuid# F09F3A62-1109-4739-808A-DBF650CD4462, response# No response body. >> TS3WrapperTests::GetObject [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject [GOOD] Test command err: 2025-04-10T10:59:07.131640Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 65D12AD8-DA27-4223-AC68-FBDA07D7F730, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:24695 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 52D29414-A29A-406B-8E1D-B604986BC0B7 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-04-10T10:59:07.148845Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 65D12AD8-DA27-4223-AC68-FBDA07D7F730, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } >> TS3WrapperTests::GetUnknownObject [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] Test command err: 2025-04-10T10:59:07.142277Z node 1 :S3_WRAPPER NOTICE: Request: uuid# A1A5058C-1DA6-47E8-B005-8476C3966686, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: uploadId MultipartUpload: { Parts: [ETag] } } REQUEST: POST /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:23471 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8987C64F-7B55-4C2E-BE98-8DCD7DC4BF42 amz-sdk-request: attempt=1 content-length: 207 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=uploadId 2025-04-10T10:59:07.151900Z node 1 :S3_WRAPPER NOTICE: Response: uuid# A1A5058C-1DA6-47E8-B005-8476C3966686, response# >> TS3WrapperTests::AbortUnknownUpload ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] Test command err: 2025-04-10T10:58:11.248559Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637112870426809:2150];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:11.248723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e3e/r3tmp/tmpCF5WKq/pdisk_1.dat 2025-04-10T10:58:12.293203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:12.293331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:12.370037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:12.388396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:12.444537Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21953, node 1 2025-04-10T10:58:12.607366Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:58:12.638314Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:58:12.714108Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:12.714134Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:12.714141Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:12.714283Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:13.778729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:14.028652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8313 2025-04-10T10:58:14.479252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:15.103070Z node 1 :PERSQUEUE ERROR: [PQ: 72075186224037888, Partition: 0, State: StateIdle] reading from too big offset - topic stream_TestGetRecordsStreamWithSingleShard partition 0 client $without_consumer EndOffset 30 offset 100000 2025-04-10T10:58:16.254284Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637112870426809:2150];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:16.254363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:58:19.673677Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637151376667999:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:19.673727Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e3e/r3tmp/tmp85wBea/pdisk_1.dat 2025-04-10T10:58:20.006077Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:20.095474Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:20.095561Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:20.103773Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8304, node 4 2025-04-10T10:58:20.308282Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:20.308305Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:20.308312Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:20.308442Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:20.747854Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:20.940158Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:21626 2025-04-10T10:58:21.338437Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:21.366742Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-10T10:58:24.674263Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491637151376667999:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:24.674376Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:58:34.977680Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:58:34.977713Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:52.832344Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637290332924032:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:53.032150Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e3e/r3tmp/tmpQJ0nf2/pdisk_1.dat 2025-04-10T10:58:53.391754Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:53.392021Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:53.397990Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:53.403683Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27594, node 7 2025-04-10T10:58:53.775074Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:53.775100Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:53.775108Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:53.779553Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:54.248413Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:54.367746Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:13267 2025-04-10T10:58:54.695608Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:00.164767Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491637323319733548:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:00.164830Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e3e/r3tmp/tmpnGxA0n/pdisk_1.dat 2025-04-10T10:59:00.742619Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:00.848317Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:00.848450Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:00.855195Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14324, node 10 2025-04-10T10:59:01.004079Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:01.004113Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:01.004127Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:01.004318Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:01.440839Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:01.524080Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:9730 2025-04-10T10:59:01.843732Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetObject [GOOD] Test command err: 2025-04-10T10:59:07.633947Z node 1 :S3_WRAPPER NOTICE: Request: uuid# BEAD8A0A-67E5-40BE-8C5E-7602A5A6C44F, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:9827 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3ADA7CFB-6E76-46D1-9663-085E2D3DF93F amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-04-10T10:59:07.671418Z node 1 :S3_WRAPPER NOTICE: Response: uuid# BEAD8A0A-67E5-40BE-8C5E-7602A5A6C44F, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-04-10T10:59:07.672111Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 63A85C10-094D-48BC-97C5-61520AC5D944, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:9827 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BFE51A2B-81A3-4BD0-879B-9890FA8A34DE amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-04-10T10:59:07.677444Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 63A85C10-094D-48BC-97C5-61520AC5D944, response# GetObjectResult { } |69.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] >> TS3WrapperTests::AbortUnknownUpload [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] Test command err: 2025-04-10T10:59:07.909273Z node 1 :S3_WRAPPER NOTICE: Request: uuid# C221166D-F294-42E7-A77D-BD85D9C1DD9F, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:21047 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A8429149-E71B-4D51-B3A0-43128F324551 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-04-10T10:59:07.918771Z node 1 :S3_WRAPPER NOTICE: Response: uuid# C221166D-F294-42E7-A77D-BD85D9C1DD9F, response# No response body. |69.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> Yq_1::CreateConnections_With_Idempotency [GOOD] >> TBlobStorageProxyTest::TestSingleFailure [GOOD] >> TS3WrapperTests::HeadObject >> TS3WrapperTests::AbortMultipartUpload >> TS3WrapperTests::MultipartUpload ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload [GOOD] Test command err: 2025-04-10T10:59:08.646305Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 6EDD4237-197A-42A1-AF78-13522E4D089B, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: uploadId } REQUEST: DELETE /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:5126 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A425FD47-E4EF-456B-9D6C-21DA95399C63 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=uploadId 2025-04-10T10:59:08.656807Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 6EDD4237-197A-42A1-AF78-13522E4D089B, response# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] Test command err: 2025-04-10T10:59:00.980134Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:59:00.981802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:00.981863Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:00.994775Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:59:00.995322Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T10:59:00.995611Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:01.050684Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:59:01.069185Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:01.070106Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:59:01.072063Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T10:59:01.072150Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T10:59:01.072210Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T10:59:01.072571Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:59:01.072798Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:59:01.072880Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:199:2154] in generation 2 2025-04-10T10:59:01.149905Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:59:01.196866Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T10:59:01.197203Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:59:01.197311Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-10T10:59:01.197348Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T10:59:01.197401Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T10:59:01.197460Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:01.197735Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.197781Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.198098Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T10:59:01.198245Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T10:59:01.198332Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:01.198385Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:59:01.198450Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T10:59:01.198496Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T10:59:01.198536Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T10:59:01.198581Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T10:59:01.198724Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T10:59:01.198837Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.198898Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.198988Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-10T10:59:01.201699Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T10:59:01.201754Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:59:01.201857Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:01.202046Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T10:59:01.202101Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T10:59:01.202344Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T10:59:01.202400Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:01.202443Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T10:59:01.202494Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T10:59:01.202530Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:59:01.202853Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T10:59:01.202905Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T10:59:01.202947Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T10:59:01.202993Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:59:01.203055Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T10:59:01.203101Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T10:59:01.203144Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T10:59:01.203181Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T10:59:01.203208Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T10:59:01.227223Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:01.227347Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:59:01.227461Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:59:01.227528Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T10:59:01.227624Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T10:59:01.228391Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.228457Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.228502Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-10T10:59:01.228689Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T10:59:01.228722Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T10:59:01.228898Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T10:59:01.228941Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T10:59:01.228993Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T10:59:01.229031Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T10:59:01.233139Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T10:59:01.233208Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:01.233464Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.233506Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.233562Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:01.233607Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:59:01.233644Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T10:59:01.233687Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T10:59:01.233721Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T10:59:01.233768Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T10:59:01.233806Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T10:59:01.233882Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T10:59:01.233930Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T10:59:01.234132Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T10:59:01.237284Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T10:59:01.237326Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T10:59:01.237356Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T10:59:01.237388Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T10:59:01.237498Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:01.237531Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T10:59:01.237566Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T10:59:01.237597Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T10:59:01.237657Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T10:59:01.237808Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T10:59:01.237847Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at ... v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T10:59:06.372288Z node 3 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-04-10T10:59:06.372326Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-10T10:59:06.372351Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-10T10:59:06.372374Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-04-10T10:59:06.372396Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-10T10:59:06.372438Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T10:59:06.372488Z node 3 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 33554432 more memory 2025-04-10T10:59:06.372529Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-10T10:59:06.372673Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:06.372717Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-10T10:59:06.372766Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T10:59:06.406161Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-04-10T10:59:06.406273Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 7340039, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T10:59:06.406359Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:06.406409Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-04-10T10:59:06.406454Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-04-10T10:59:06.406499Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-04-10T10:59:06.406588Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T10:59:06.406636Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-04-10T10:59:06.406686Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-04-10T10:59:06.406728Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-04-10T10:59:06.406784Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-10T10:59:06.406819Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-04-10T10:59:06.406865Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-04-10T10:59:06.461585Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:06.461685Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-04-10T10:59:06.461762Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 5 ms, status: COMPLETE 2025-04-10T10:59:06.461897Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:07.535972Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-04-10T10:59:07.536053Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-10T10:59:07.536364Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:297:2278], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:07.536400Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:07.536448Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:296:2277], serverId# [3:297:2278], sessionId# [0:0:0] 2025-04-10T10:59:07.754981Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\332\201\200\010\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\004\203\004\203\001H\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000\013?\024\003?\020\251\003\003?\022\006bar\003\005?\030\003?\026\007\000\000\000\001xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx 2025-04-10T10:59:07.757666Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:59:07.757843Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:07.817881Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CheckDataTx 2025-04-10T10:59:07.817997Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-04-10T10:59:07.818063Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CheckDataTx 2025-04-10T10:59:07.818119Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T10:59:07.818171Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T10:59:07.818232Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-10T10:59:07.818315Z node 3 :TX_DATASHARD TRACE: Activated operation [0:3] at 9437184 2025-04-10T10:59:07.818354Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-04-10T10:59:07.818384Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-10T10:59:07.818411Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit ExecuteDataTx 2025-04-10T10:59:07.818439Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-04-10T10:59:07.818501Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-10T10:59:07.818559Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 requested 46269638 more memory 2025-04-10T10:59:07.818602Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2025-04-10T10:59:07.818768Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:07.818820Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-04-10T10:59:07.818886Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-10T10:59:07.848758Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 exceeded memory limit 50463942 and requests 403711536 more for the next try 2025-04-10T10:59:07.857440Z node 3 :TX_DATASHARD DEBUG: tx 3 released its data 2025-04-10T10:59:07.857544Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2025-04-10T10:59:07.857885Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:07.857931Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-04-10T10:59:07.947015Z node 3 :TX_DATASHARD DEBUG: tx 3 at 9437184 restored its data 2025-04-10T10:59:07.947151Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-10T10:59:08.066526Z node 3 :TX_DATASHARD TRACE: Executed operation [0:3] at tablet 9437184 with status COMPLETE 2025-04-10T10:59:08.066635Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:3] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 16777223, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T10:59:08.066719Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:08.066761Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit ExecuteDataTx 2025-04-10T10:59:08.066811Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit FinishPropose 2025-04-10T10:59:08.066851Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit FinishPropose 2025-04-10T10:59:08.066896Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is DelayComplete 2025-04-10T10:59:08.066924Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit FinishPropose 2025-04-10T10:59:08.066963Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit CompletedOperations 2025-04-10T10:59:08.066998Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CompletedOperations 2025-04-10T10:59:08.067065Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-04-10T10:59:08.067093Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CompletedOperations 2025-04-10T10:59:08.067135Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:3] at 9437184 has finished 2025-04-10T10:59:08.180994Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:08.181080Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:3] at 9437184 on unit FinishPropose 2025-04-10T10:59:08.181136Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 3 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2025-04-10T10:59:08.181238Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:08.288090Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T10:59:08.288162Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-04-10T10:59:08.304795Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:230:2224], Recipient [3:232:2225]: NKikimr::TEvTablet::TEvFollowerGcApplied |69.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] >> TS3WrapperTests::UploadUnknownPart >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc [GOOD] |69.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> TS3WrapperTests::HeadObject [GOOD] >> TS3WrapperTests::AbortMultipartUpload [GOOD] >> TBlobStorageWardenTest::TestSendToInvalidGroupId >> DataShardVolatile::DistributedWriteLostPlanThenDrop >> TS3WrapperTests::MultipartUpload [GOOD] >> TTxLocatorTest::TestWithReboot >> TS3WrapperTests::UploadUnknownPart [GOOD] >> TS3WrapperTests::CopyPartUpload >> TTxLocatorTest::TestImposibleSize >> TTxLocatorTest::Boot >> TS3WrapperTests::CopyPartUpload [GOOD] >> TTxLocatorTest::TestImposibleSize [GOOD] |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2025-04-10T10:59:06.050507Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001c80/r3tmp/tmpnhaNCG//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-10T10:59:06.099203Z :BS_LOCALRECOVERY CRIT: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TTxLocatorTest::Boot [GOOD] >> TBackupTests::BackupUuidColumn[Raw] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] Test command err: 2025-04-10T10:59:09.552094Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 4B5392E0-A2D7-41AC-9F8A-BB05D904E24C, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:62600 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3AB3659C-49BD-4743-975F-380EBA006034 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2025-04-10T10:59:09.559726Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 4B5392E0-A2D7-41AC-9F8A-BB05D904E24C, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2025-04-10T10:59:09.560278Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 0FB001B5-5967-403E-A634-E5E1DEAD7006, request# UploadPart { Bucket: TEST Key: key UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:62600 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 888D7FD5-51C2-43D5-A255-3B7455D2F311 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=1 / 4 2025-04-10T10:59:09.578799Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 0FB001B5-5967-403E-A634-E5E1DEAD7006, response# UploadPartResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-04-10T10:59:09.582415Z node 1 :S3_WRAPPER NOTICE: Request: uuid# A06674CE-9687-4811-9586-347540D602E8, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: 1 MultipartUpload: { Parts: [841a2d689ad86bd1611447453c22c6fc] } } REQUEST: POST /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:62600 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CB6F28A9-4FB5-47D1-8F28-EB50FE1D2F8D amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=1 2025-04-10T10:59:09.587126Z node 1 :S3_WRAPPER NOTICE: Response: uuid# A06674CE-9687-4811-9586-347540D602E8, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-04-10T10:59:09.587508Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 95E859C0-CAF9-4F25-9DE6-F36C787AF747, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:62600 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C5B321A3-E24C-4422-8C05-E6DFB33A1744 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-04-10T10:59:09.591015Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 95E859C0-CAF9-4F25-9DE6-F36C787AF747, response# GetObjectResult { } >> TBackupTests::ShouldSucceedOnLargeData[Zstd] >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TTxLocatorTest::TestWithReboot [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest |69.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |69.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] Test command err: 2025-04-10T10:59:09.621031Z node 1 :S3_WRAPPER NOTICE: Request: uuid# F9870AE7-0E0C-47A9-B9B8-4037689A7C77, request# UploadPart { Bucket: TEST Key: key UploadId: uploadId PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:13575 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BD27DF64-61BB-4DCC-83F7-D10E03B63503 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=uploadId / 4 2025-04-10T10:59:09.628480Z node 1 :S3_WRAPPER NOTICE: Response: uuid# F9870AE7-0E0C-47A9-B9B8-4037689A7C77, response# >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject [GOOD] Test command err: 2025-04-10T10:59:09.379865Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 4FE98FDA-B76F-4C87-9030-F585CB78052B, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:26129 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 13BA337C-F01A-4685-90C5-2AFD1687F3BE amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-04-10T10:59:09.387649Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 4FE98FDA-B76F-4C87-9030-F585CB78052B, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-04-10T10:59:09.388277Z node 1 :S3_WRAPPER NOTICE: Request: uuid# B7EE05E1-F488-4B22-A19E-D745230D8BC9, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:26129 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 96CC657C-26AB-47AD-85D6-F21ADFB844E9 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-04-10T10:59:09.391732Z node 1 :S3_WRAPPER NOTICE: Response: uuid# B7EE05E1-F488-4B22-A19E-D745230D8BC9, response# HeadObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc ContentLength: 4 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot [GOOD] Test command err: 2025-04-10T10:59:10.359256Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-10T10:59:10.360140Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-10T10:59:10.360973Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-10T10:59:10.363014Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.363582Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-10T10:59:10.375258Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.375416Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.375534Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.375626Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-10T10:59:10.375798Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.375900Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-10T10:59:10.376050Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError >> TBackupTests::BackupUuidColumn[Raw] [GOOD] >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] |70.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |70.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |70.0%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase >> BasicUsage::RecreateObserver [GOOD] >> PrivateApi::Nodes [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] Test command err: 2025-04-10T10:59:09.436092Z node 1 :S3_WRAPPER NOTICE: Request: uuid# D109C688-ED84-43C1-A070-0CE6B41AB8A4, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:8671 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 60ADE6ED-76CD-4FEB-A9C6-6F8EE9C579ED amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2025-04-10T10:59:09.445112Z node 1 :S3_WRAPPER NOTICE: Response: uuid# D109C688-ED84-43C1-A070-0CE6B41AB8A4, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2025-04-10T10:59:09.446727Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 49258545-4154-495C-BB68-C9B1311A189C, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: 1 } REQUEST: DELETE /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8671 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4A086545-99AB-4B41-951D-3B05ECA8584F amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=1 2025-04-10T10:59:09.457493Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 49258545-4154-495C-BB68-C9B1311A189C, response# AbortMultipartUploadResult { } 2025-04-10T10:59:09.458117Z node 1 :S3_WRAPPER NOTICE: Request: uuid# B6470A75-4116-4653-83FF-CD39DE68AD17, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:8671 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4C9106CF-C6D8-420B-A204-8D71E809C7AA amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-04-10T10:59:09.462744Z node 1 :S3_WRAPPER NOTICE: Response: uuid# B6470A75-4116-4653-83FF-CD39DE68AD17, response# No response body. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] Test command err: 2025-04-10T10:59:10.019081Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-10T10:59:10.019892Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-10T10:59:10.020708Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-10T10:59:10.022739Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.023270Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-10T10:59:10.035412Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.035571Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.035690Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.035792Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-10T10:59:10.035960Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.036061Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-10T10:59:10.036195Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-10T10:59:10.036870Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#281474976710656 2025-04-10T10:59:10.037057Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 0 Reserved to# 0 2025-04-10T10:59:10.040755Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-04-10T10:59:10.041175Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2108] requested range size#123456 2025-04-10T10:59:10.041710Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.041752Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.041810Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 123456 2025-04-10T10:59:10.041836Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2108] TEvAllocateResult from# 0 to# 123456 expected SUCCESS 2025-04-10T10:59:10.042106Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2112] requested range size#281474976587200 2025-04-10T10:59:10.042413Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 123456 Reserved to# 0 2025-04-10T10:59:10.042478Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:78:2112] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-04-10T10:59:10.043009Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2115] requested range size#246912 2025-04-10T10:59:10.043452Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.043514Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.043648Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 123456 Reserved to# 370368 2025-04-10T10:59:10.043686Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:81:2115] TEvAllocateResult from# 123456 to# 370368 expected SUCCESS 2025-04-10T10:59:10.044110Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:85:2119] requested range size#281474976340288 2025-04-10T10:59:10.044208Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 370368 Reserved to# 0 2025-04-10T10:59:10.044240Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:85:2119] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE >> DataShardVolatile::UpsertNoLocksArbiter-UseSink [GOOD] >> TTxLocatorTest::TestZeroRange >> TTxLocatorTest::TestAllocateAll >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink |70.0%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestWithReboot [GOOD] Test command err: 2025-04-10T10:59:10.027343Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-10T10:59:10.028244Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-10T10:59:10.029123Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-10T10:59:10.031187Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.031791Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-10T10:59:10.046487Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.046632Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.046745Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.046838Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-10T10:59:10.047011Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.047118Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-10T10:59:10.047249Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-10T10:59:10.048808Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2025-04-10T10:59:10.049616Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2025-04-10T10:59:10.050040Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2025-04-10T10:59:10.050385Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2025-04-10T10:59:10.050763Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2025-04-10T10:59:10.051290Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.051401Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2025-04-10T10:59:10.051625Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.051842Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.051960Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2025-04-10T10:59:10.052117Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.052279Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.052412Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.052526Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2025-04-10T10:59:10.052700Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.052759Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2025-04-10T10:59:10.052960Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.053181Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.053297Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2025-04-10T10:59:10.053499Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2025-04-10T10:59:10.053551Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2025-04-10T10:59:10.053751Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.053901Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2025-04-10T10:59:10.053934Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2025-04-10T10:59:10.054047Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.054112Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2025-04-10T10:59:10.054136Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2025-04-10T10:59:10.054259Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.054333Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.054434Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.054502Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2025-04-10T10:59:10.054539Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2025-04-10T10:59:10.054692Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.054753Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2025-04-10T10:59:10.054781Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 400000 to# 500000 2025-04-10T10:59:10.054842Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.054893Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2025-04-10T10:59:10.054926Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 500000 to# 600000 2025-04-10T10:59:10.055059Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.055100Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2025-04-10T10:59:10.055126Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2025-04-10T10:59:10.055319Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.055392Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2025-04-10T10:59:10.055416Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2025-04-10T10:59:10.055508Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.055545Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2025-04-10T10:59:10.055571Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2025-04-10T10:59:10.055726Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.055815Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2025-04-10T10:59:10.055842Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-10T10:59:10.060894Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 2 Marker# TSYS31 2025-04-10T10:59:10.062498Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 2 Promote Marker# TSYS16 2025-04-10T10:59:10.063284Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:2:12:0:0:71:0] Snap: 2:1 for 72057594046447617 Marker# TRRH04 2025-04-10T10:59:10.063386Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:12:0:0:71:0], refs: [[72057594046447617:2:12:1:24576:76:0],] for 72057594046447617 2025-04-10T10:59:10.063594Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:1:0:0:42:0], refs: [[72057594046447617:2:1:1:28672:35:0],] for 72057594046447617 2025-04-10T10:59:10.063657Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:2:0:0:71:0], refs: [[72057594046447617:2:2:1:8192:71:0],] for 72057594046447617 2025-04-10T10:59:10.063702Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:3:0:0:69:0], refs: [[72057594046447617:2:3:1:24576:70:0],] for 72057594046447617 2025-04-10T10:59:10.063760Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:4:0:0:71:0], refs: [[72057594046447617:2:4:1:24576:76:0],] for 720575940 ... 72057594046447617:11:7:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.474355Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2025-04-10T10:59:10.474388Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:615:2546] TEvAllocateResult from# 9000000 to# 9100000 2025-04-10T10:59:10.474513Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.474559Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.474644Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2025-04-10T10:59:10.474668Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:617:2548] TEvAllocateResult from# 9100000 to# 9200000 2025-04-10T10:59:10.474822Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2025-04-10T10:59:10.474849Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:619:2550] TEvAllocateResult from# 9200000 to# 9300000 2025-04-10T10:59:10.474959Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.475061Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.475136Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2025-04-10T10:59:10.475165Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:621:2552] TEvAllocateResult from# 9300000 to# 9400000 2025-04-10T10:59:10.475293Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.475400Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2025-04-10T10:59:10.475427Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:623:2554] TEvAllocateResult from# 9400000 to# 9500000 2025-04-10T10:59:10.475517Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2025-04-10T10:59:10.475538Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:625:2556] TEvAllocateResult from# 9500000 to# 9600000 2025-04-10T10:59:10.475628Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.475684Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2025-04-10T10:59:10.475706Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:627:2558] TEvAllocateResult from# 9600000 to# 9700000 2025-04-10T10:59:10.475811Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.475896Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:1:24576:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.475953Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2025-04-10T10:59:10.475979Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:629:2560] TEvAllocateResult from# 9700000 to# 9800000 2025-04-10T10:59:10.476093Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2025-04-10T10:59:10.476116Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:631:2562] TEvAllocateResult from# 9800000 to# 9900000 2025-04-10T10:59:10.476203Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.476346Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2025-04-10T10:59:10.476373Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:633:2564] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-10T10:59:10.480791Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 11 Marker# TSYS31 2025-04-10T10:59:10.482268Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 11 Promote Marker# TSYS16 2025-04-10T10:59:10.483049Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:11:11:0:0:71:0] Snap: 11:1 for 72057594046447617 Marker# TRRH04 2025-04-10T10:59:10.483119Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:11:0:0:71:0], refs: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617 2025-04-10T10:59:10.483281Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:1:0:0:42:0], refs: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2025-04-10T10:59:10.483346Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:2:0:0:69:0], refs: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2025-04-10T10:59:10.483414Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:3:0:0:71:0], refs: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2025-04-10T10:59:10.483456Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:4:0:0:71:0], refs: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2025-04-10T10:59:10.483511Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:5:0:0:71:0], refs: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2025-04-10T10:59:10.483554Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:6:0:0:71:0], refs: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617 2025-04-10T10:59:10.483635Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:7:0:0:71:0], refs: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617 2025-04-10T10:59:10.483677Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:8:0:0:71:0], refs: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617 2025-04-10T10:59:10.483709Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:9:0:0:71:0], refs: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617 2025-04-10T10:59:10.483797Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:10:0:0:71:0], refs: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617 2025-04-10T10:59:10.483923Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::BuildHistory - Process generation 11 from 1 with 11 steps Marker# TRRH09 2025-04-10T10:59:10.483955Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2025-04-10T10:59:10.483986Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2025-04-10T10:59:10.484008Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2025-04-10T10:59:10.484032Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2025-04-10T10:59:10.484074Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2025-04-10T10:59:10.484103Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617 2025-04-10T10:59:10.484135Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:7:1:24576:78:0],] 2025-04-10T10:59:10.484168Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617, Gc+: [[72057594046447617:11:8:1:24576:75:0],] 2025-04-10T10:59:10.484195Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:9:1:24576:78:0],] 2025-04-10T10:59:10.484232Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:10:1:24576:78:0],] 2025-04-10T10:59:10.484263Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617, Gc+: [[72057594046447617:11:11:1:24576:72:0],] 2025-04-10T10:59:10.484551Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:12:0:0:0:0:0] Marker# TSYS01 2025-04-10T10:59:10.485854Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.489557Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-10T10:59:10.489824Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-10T10:59:10.490635Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:1:28672:1639:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.490744Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 12, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-10T10:59:10.490813Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:10.490940Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 12:0 Marker# TSYS28 >> TTxLocatorTest::TestAllocateAll [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:59:08.262909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:59:08.263085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:08.263146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:59:08.263185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:59:08.263225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:59:08.263254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:59:08.263305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:08.264583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:59:08.264957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:08.380167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:08.380242Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:08.398605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:08.399004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:08.399192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:59:08.410141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:08.410442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:08.411238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:08.411700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:59:08.414735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:08.416284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:08.416360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:08.416565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:08.416612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:08.416654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:08.416871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.435632Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:59:08.654884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:08.655250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.655531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:59:08.655908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:59:08.655994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.659385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:08.659573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:59:08.659780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.659858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:59:08.659908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:59:08.659952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:59:08.662354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.662425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:08.662535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:59:08.664819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.664875Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.664938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:08.664990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:59:08.668714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:08.671095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:59:08.671330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:59:08.672508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:08.672733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:08.672814Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:08.673162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:59:08.673230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:08.673405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:59:08.673546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:59:08.676687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:08.676780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:08.676978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:08.677035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:59:08.677292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.677345Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:59:08.677478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:08.677516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:08.677560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:08.677618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:08.677656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:59:08.677694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:08.677731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:59:08.677758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:59:08.677832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:59:08.677874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:59:08.677934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:59:08.680751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:08.680901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:08.680947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:09.067510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-10T10:59:09.067683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:09.069616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T10:59:09.069741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-10T10:59:09.071063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:09.071217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:09.071277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-10T10:59:09.071408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-10T10:59:09.071544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:59:09.082993Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:414:2384], attempt# 0 2025-04-10T10:59:09.174061Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:414:2384], sender# [1:413:2382] 2025-04-10T10:59:09.178279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:09.178394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:59:09.178675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:09.178715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-10T10:59:09.179155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:59:09.179208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:09.180161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:59:09.180298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:59:09.180387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T10:59:09.180443Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T10:59:09.180536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T10:59:09.180653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-10T10:59:09.183445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:25531 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9CEB567A-CE7E-4E9A-A48B-48E5EDC48366 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-10T10:59:09.184575Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2384], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:25531 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9DA0D0C7-5825-4417-ADD4-7E4852F4F296 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-10T10:59:09.191423Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2384], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-04-10T10:59:09.191574Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:413:2382] 2025-04-10T10:59:09.191692Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:414:2384], sender# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:25531 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BE079D6A-0488-4AF0-B6FC-4485BE866665 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-04-10T10:59:09.197486Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2384], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-04-10T10:59:09.197561Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:414:2384], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-10T10:59:09.197719Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-10T10:59:09.207020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T10:59:09.207089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T10:59:09.207246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T10:59:09.207339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T10:59:09.207399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:09.207456Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:59:09.207505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:59:09.207593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T10:59:09.207772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:09.211142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:59:09.211587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:59:09.211647Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T10:59:09.211749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:59:09.211794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:59:09.211836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:59:09.211868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:59:09.211928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T10:59:09.212008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 102 2025-04-10T10:59:09.212080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:59:09.212138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T10:59:09.212174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T10:59:09.212319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:59:09.214850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:59:09.214919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:398:2369] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] Test command err: 2025-04-10T10:59:10.029849Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T10:59:10.030035Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T10:59:10.030112Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T10:59:10.075870Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T10:59:10.077063Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T10:59:10.077137Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T10:59:10.077188Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0020ab/r3tmp/tmpFjDKWx/pdisk_1.dat 2025-04-10T10:59:10.759220Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] bootstrap ActorId# [1:480:2462] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1291:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T10:59:10.759455Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T10:59:10.759513Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T10:59:10.759547Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T10:59:10.759579Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T10:59:10.759610Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T10:59:10.759641Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T10:59:10.759692Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:8:0:0:1291:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T10:59:10.759780Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1291:1] Marker# BPG33 2025-04-10T10:59:10.759845Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1291:1] Marker# BPG32 2025-04-10T10:59:10.759905Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1291:2] Marker# BPG33 2025-04-10T10:59:10.759936Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1291:2] Marker# BPG32 2025-04-10T10:59:10.759973Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1291:3] Marker# BPG33 2025-04-10T10:59:10.760001Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1291:3] Marker# BPG32 2025-04-10T10:59:10.760233Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:3] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T10:59:10.760321Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:2] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T10:59:10.760381Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:1] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T10:59:10.763282Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-10T10:59:10.763583Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-10T10:59:10.763689Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-10T10:59:10.763786Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1291:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-10T10:59:10.763863Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1291:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T10:59:10.764106Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.425 sample PartId# [72057594037932033:2:8:0:0:1291:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.426 sample PartId# [72057594037932033:2:8:0:0:1291:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.427 sample PartId# [72057594037932033:2:8:0:0:1291:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 4.397 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.633 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.736 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-10T10:59:10.788160Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 4294967295 IsLimitedKeyless# 0 fullIfPossible# 1 Marker# DSP58 2025-04-10T10:59:10.790616Z node 1 :BS_PROXY CRIT: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvBlock {TabletId# 1234 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} Response# TEvBlockResult {Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 Sending TEvPut 2025-04-10T10:59:10.790961Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvPut {Id# [1234:1:0:0:0:5:0] Size# 5 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID" ApproximateFreeSpaceShare# 0} Marker# DSP31 2025-04-10T10:59:10.791126Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Response# TEvCollectGarbageResult {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateConnections_With_Idempotency [GOOD] Test command err: 2025-04-10T10:58:33.259689Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637207984674282:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:33.259738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0410 10:58:34.097266830 1163999 dns_resolver.cc:162] no server name supplied in dns URI E0410 10:58:34.097469417 1163999 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T10:58:34.308953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:35.230942Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:10306: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:10306 2025-04-10T10:58:35.232331Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:10306: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10306 } ] 2025-04-10T10:58:35.334124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:36.334176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:36.769289Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:10306: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10306 } ] 2025-04-10T10:58:37.334655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:38.266293Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637207984674282:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:38.266380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:58:38.338607Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0410 10:58:39.101613140 1164406 dns_resolver.cc:162] no server name supplied in dns URI E0410 10:58:39.103636492 1164406 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T10:58:39.164692Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:10306: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10306 } ] 2025-04-10T10:58:39.270003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:39.343212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:39.355674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:58:39.363039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491637233754478473:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a92/r3tmp/tmpQFRsnD/pdisk_1.dat 2025-04-10T10:58:39.441121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491637233754478473:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:39.632062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491637233754478473:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 10306, node 1 2025-04-10T10:58:39.783028Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:39.807654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:39.807699Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:39.807711Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:39.807905Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:40.185371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:42.124246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:42.124339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:42.127452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:43.284980Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-10T10:58:43.285035Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T10:58:43.285047Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T10:58:43.294331Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-10T10:58:43.294369Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T10:58:43.294377Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T10:58:43.295638Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-10T10:58:43.295652Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T10:58:43.295660Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T10:58:43.303775Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-10T10:58:43.303805Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T10:58:43.303811Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T10:58:43.313699Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-10T10:58:43.313724Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T10:58:43.313732Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T10:58:43.316830Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-10T10:58:43.316868Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T10:58:43.316875Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T10:58:43.318082Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-10T10:58:43.318104Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T10:58:43.318111Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T10:58:43.319416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T10:58:43.320686Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-10T10:58:43.320714Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T10:58:43.320720Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T10:58:43.321797Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-10T10:58:43.321828Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T10:58:43.321835Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T10:58:43.322531Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-10T10:58:43.322561Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T10:58:43.322567Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T10:58:43.323502Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-10T10:58:43.323527Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T10:58:43.323535Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T10:58:43.326422Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-10T10:58:43.326437Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T10:58:43.326443Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call cre ... hed with error: 2025-04-10T10:59:06.936208Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.936329Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.936476Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.936651Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.936755Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.936873Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.936982Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.937090Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.937247Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.937433Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.937586Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.937746Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.937910Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.938062Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.938250Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.938407Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.938570Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.938701Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.938841Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.939000Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.939145Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.939257Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.939353Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.939444Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.939534Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.939642Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.939773Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.939903Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.940021Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.940143Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.940241Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.940361Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.940449Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.940577Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.940676Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.940773Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.940897Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.941024Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.941118Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.941227Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.941400Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.941551Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.941649Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.941745Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.941839Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.941947Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.942045Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.942546Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.942761Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.942955Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.943118Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.943254Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.943354Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.943499Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.943624Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.943783Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.943872Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.943931Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944038Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944086Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944151Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944218Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944254Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944316Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944353Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944411Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944476Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944512Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944622Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944652Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944725Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944799Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944843Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944899Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.944940Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945023Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945048Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945132Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945166Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945259Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945287Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945349Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945389Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945483Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945542Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945604Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945646Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945737Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945770Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945828Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945865Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945916Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.945960Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.946010Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.946073Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.946136Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.946496Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.946763Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: Client is stopped 2025-04-10T10:59:06.946959Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.947007Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.947193Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.947250Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.947429Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.947540Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.947696Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.947814Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.947857Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948020Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948062Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948148Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948239Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948332Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948377Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948475Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948510Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948614Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948642Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948714Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948744Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948812Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948844Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948919Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.948966Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.949051Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.949100Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.949150Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.949197Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.949260Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.949330Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:06.949358Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] Test command err: 2025-04-10T10:58:32.838602Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637204274941106:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:32.842079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0410 10:58:33.329218585 1163774 dns_resolver.cc:162] no server name supplied in dns URI E0410 10:58:33.329386741 1163774 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T10:58:33.844592Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:34.574082Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:32348: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32348 } ] 2025-04-10T10:58:34.627947Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:32348: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:32348 2025-04-10T10:58:34.847955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:35.850058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:36.250362Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:32348: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32348 } ] 2025-04-10T10:58:36.850467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ac9/r3tmp/tmpBpU8gf/pdisk_1.dat 2025-04-10T10:58:37.438142Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:58:37.451480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491637225749778164:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:37.606365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491637225749778164:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 32348, node 1 2025-04-10T10:58:37.742012Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:58:37.742201Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:58:37.839116Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637204274941106:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:37.839218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:30037 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:38.307823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 E0410 10:58:38.332373844 1164009 dns_resolver.cc:162] no server name supplied in dns URI E0410 10:58:38.332541497 1164009 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// waiting... 2025-04-10T10:58:38.698324Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:38.698367Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:38.698393Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:38.698551Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:58:38.698672Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:39.006777Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-10T10:58:39.006857Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T10:58:39.006869Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T10:58:39.036315Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-10T10:58:39.036348Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T10:58:39.036356Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T10:58:39.046877Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-10T10:58:39.046898Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T10:58:39.046907Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T10:58:39.052160Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-10T10:58:39.052192Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T10:58:39.052201Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T10:58:39.054239Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-10T10:58:39.054259Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T10:58:39.054267Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T10:58:39.079751Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T10:58:39.092485Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-10T10:58:39.092520Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-10T10:58:39.092528Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-10T10:58:39.092552Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-10T10:58:39.092552Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T10:58:39.092560Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T10:58:39.092565Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T10:58:39.092572Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T10:58:39.093882Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-10T10:58:39.093907Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T10:58:39.093912Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T10:58:39.111721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T10:58:39.113104Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-10T10:58:39.113119Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T10:58:39.113125Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T10:58:39.114142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:39.114299Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-10T10:58:39.114313Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T10:58:39.114318Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T10:58:39.115201Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-10T10:58:39.115231Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-10T10:58:39.115236Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-10T10:58:39.115276Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-10T10:58:39.115292Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T10:58:39.115298Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T10:58:39.115959Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-10T10:58:39.115972Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T10:58:39.115978Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T10:58:39.120282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T10:58:39.122546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T10:58:39.135554Z node 1 :FLAT_TX_SCHEMESHARD WA ... 91637374910576592 RawX2: 4503616807242469 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7491637374910576592 RawX2: 4503616807242469 } } DstEndpoint { ActorId { RawX1: 7491637374910576582 RawX2: 4503616807242260 } } InMemory: true } 2025-04-10T10:59:12.033572Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710717, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-10T10:59:12.033581Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710717, task: 2. Tasks execution finished 2025-04-10T10:59:12.033601Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576592:2789], TxId: 281474976710718, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jrfnn50j98ta9qjt959nyp5c. SessionId : ydb://session/3?node_id=4&id=MTBhYTNlNTctMmNlYzI3NTEtYzc0MDJlOTctZjNhYzVjOWI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T10:59:12.033601Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576589:2787], TxId: 281474976710717, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jrfnn4mse6h01zgyf3wvk916. SessionId : ydb://session/3?node_id=4&id=NWRiZjNlYmEtNjM0NDRlMzEtZDJjYTM0M2YtYzZjOWJmZmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T10:59:12.033627Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576592:2789], TxId: 281474976710718, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jrfnn50j98ta9qjt959nyp5c. SessionId : ydb://session/3?node_id=4&id=MTBhYTNlNTctMmNlYzI3NTEtYzc0MDJlOTctZjNhYzVjOWI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-04-10T10:59:12.033648Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710718, task: 2. Finish input channelId: 1, from: [4:7491637374910576590:2788] 2025-04-10T10:59:12.033678Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710717, task: 2. pass away 2025-04-10T10:59:12.033681Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576592:2789], TxId: 281474976710718, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jrfnn50j98ta9qjt959nyp5c. SessionId : ydb://session/3?node_id=4&id=MTBhYTNlNTctMmNlYzI3NTEtYzc0MDJlOTctZjNhYzVjOWI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T10:59:12.033731Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710717;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T10:59:12.033737Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576592:2789], TxId: 281474976710718, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jrfnn50j98ta9qjt959nyp5c. SessionId : ydb://session/3?node_id=4&id=MTBhYTNlNTctMmNlYzI3NTEtYzc0MDJlOTctZjNhYzVjOWI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T10:59:12.033893Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576590:2788], TxId: 281474976710718, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MTBhYTNlNTctMmNlYzI3NTEtYzc0MDJlOTctZjNhYzVjOWI=. TraceId : 01jrfnn50j98ta9qjt959nyp5c. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-10T10:59:12.033917Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576590:2788], TxId: 281474976710718, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MTBhYTNlNTctMmNlYzI3NTEtYzc0MDJlOTctZjNhYzVjOWI=. TraceId : 01jrfnn50j98ta9qjt959nyp5c. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T10:59:12.033950Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710718, task: 1. Tasks execution finished 2025-04-10T10:59:12.033961Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576590:2788], TxId: 281474976710718, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MTBhYTNlNTctMmNlYzI3NTEtYzc0MDJlOTctZjNhYzVjOWI=. TraceId : 01jrfnn50j98ta9qjt959nyp5c. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T10:59:12.034051Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710718, task: 1. pass away 2025-04-10T10:59:12.034136Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710718;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T10:59:12.034143Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576577:2387], SessionActorId: [4:7491637314781030956:2387], Start prepare for distributed commit 2025-04-10T10:59:12.034177Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576599:2387], Table: `Root/yq/connections` ([72057594046644480:3:1]), SessionActorId: [4:7491637314781030956:2387]SetPrepare; txId=281474976710717 2025-04-10T10:59:12.034204Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576577:2387], SessionActorId: [4:7491637314781030956:2387], Flush data 2025-04-10T10:59:12.034368Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576599:2387], Table: `Root/yq/connections` ([72057594046644480:3:1]), SessionActorId: [4:7491637314781030956:2387]Send EvWrite to ShardID=72075186224037890, isPrepare=1, isImmediateCommit=0, TxId=281474976710717, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976710711 DataShard: 72075186224037890 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 3, Size=324, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=1 2025-04-10T10:59:12.034408Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576592:2789], TxId: 281474976710718, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jrfnn50j98ta9qjt959nyp5c. SessionId : ydb://session/3?node_id=4&id=MTBhYTNlNTctMmNlYzI3NTEtYzc0MDJlOTctZjNhYzVjOWI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T10:59:12.034433Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710718, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-10T10:59:12.034439Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710718, task: 2. Tasks execution finished 2025-04-10T10:59:12.034445Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576592:2789], TxId: 281474976710718, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jrfnn50j98ta9qjt959nyp5c. SessionId : ydb://session/3?node_id=4&id=MTBhYTNlNTctMmNlYzI3NTEtYzc0MDJlOTctZjNhYzVjOWI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T10:59:12.034481Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710718, task: 2. pass away 2025-04-10T10:59:12.034485Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576577:2387], SessionActorId: [4:7491637314781030956:2387], Send EvWrite (external) to ShardID=72075186224037894, isPrepare=1, isImmediateCommit=0, TxId=281474976710717, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976710711 DataShard: 72075186224037894 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 9, Size=0, Cookie=0, OperationsCount=0, IsFinal=1, Attempts=0 2025-04-10T10:59:12.034515Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710718;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T10:59:12.036099Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576577:2387], SessionActorId: [4:7491637314781030956:2387], Recv EvWriteResult (external) from ShardID=72075186224037894, Status=STATUS_PREPARED, TxId=281474976710717, Locks= , Cookie=0 2025-04-10T10:59:12.036127Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576577:2387], SessionActorId: [4:7491637314781030956:2387], Got prepared result TxId=281474976710717, TabletId=72075186224037894, Cookie=0 2025-04-10T10:59:12.036152Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576577:2387], SessionActorId: [4:7491637314781030956:2387], Flush data 2025-04-10T10:59:12.036181Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576599:2387], Table: `Root/yq/connections` ([72057594046644480:3:1]), SessionActorId: [4:7491637314781030956:2387]Recv EvWriteResult from ShardID=72075186224037890, Status=STATUS_PREPARED, TxId=281474976710717, Locks= , Cookie=1 2025-04-10T10:59:12.072654Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576577:2387], SessionActorId: [4:7491637314781030956:2387], Start distributed commit with TxId=281474976710717 2025-04-10T10:59:12.072693Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576599:2387], Table: `Root/yq/connections` ([72057594046644480:3:1]), SessionActorId: [4:7491637314781030956:2387]SetDistributedCommit; txId=281474976710717 2025-04-10T10:59:12.072728Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576577:2387], SessionActorId: [4:7491637314781030956:2387], Execute planned transaction, coordinator: 72057594046316545, volitale: 1, shards: 2 2025-04-10T10:59:12.073923Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576577:2387], SessionActorId: [4:7491637314781030956:2387], Got transaction status, status: 16 2025-04-10T10:59:12.074018Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576577:2387], SessionActorId: [4:7491637314781030956:2387], Got transaction status, status: 17 2025-04-10T10:59:12.078205Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:59:12.078238Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:12.083384Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576577:2387], SessionActorId: [4:7491637314781030956:2387], Recv EvWriteResult (external) from ShardID=72075186224037894, Status=STATUS_COMPLETED, TxId=281474976710717, Locks= , Cookie=0 2025-04-10T10:59:12.083421Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576577:2387], SessionActorId: [4:7491637314781030956:2387], Got completed result TxId=281474976710717, TabletId=72075186224037894, Cookie=0, Locks= 2025-04-10T10:59:12.083832Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576599:2387], Table: `Root/yq/connections` ([72057594046644480:3:1]), SessionActorId: [4:7491637314781030956:2387]Recv EvWriteResult from ShardID=72075186224037890, Status=STATUS_COMPLETED, TxId=281474976710717, Locks= , Cookie=0 2025-04-10T10:59:12.083858Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576599:2387], Table: `Root/yq/connections` ([72057594046644480:3:1]), SessionActorId: [4:7491637314781030956:2387]Got completed result TxId=281474976710717, TabletId=72075186224037890, Cookie=0, Mode=2, Locks= 2025-04-10T10:59:12.083874Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491637374910576577:2387], SessionActorId: [4:7491637314781030956:2387], Committed TxId=281474976710717 E0410 10:59:12.994355628 1180256 dns_resolver.cc:162] no server name supplied in dns URI E0410 10:59:12.994524107 1180256 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T10:59:13.018350Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:3087: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:3087 2025-04-10T10:59:13.132580Z node 4 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:3087: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:3087 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] Test command err: 2025-04-10T10:59:10.056678Z node 1 :S3_WRAPPER NOTICE: Request: uuid# D150B1B7-0246-407D-B198-53A4BE4FC49E, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:19723 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B3FABD0E-318C-469E-9767-2A1B32980BC2 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-04-10T10:59:10.062836Z node 1 :S3_WRAPPER NOTICE: Response: uuid# D150B1B7-0246-407D-B198-53A4BE4FC49E, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-04-10T10:59:10.063214Z node 1 :S3_WRAPPER NOTICE: Request: uuid# F3348D48-E1CB-490C-A045-7C9571668BC7, request# CreateMultipartUpload { Bucket: TEST Key: key1 } REQUEST: POST /TEST/key1?uploads HTTP/1.1 HEADERS: Host: localhost:19723 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B6FDA036-60C7-442E-A51F-A17AD5BF412C amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploads= 2025-04-10T10:59:10.066304Z node 1 :S3_WRAPPER NOTICE: Response: uuid# F3348D48-E1CB-490C-A045-7C9571668BC7, response# CreateMultipartUploadResult { Bucket: Key: TEST/key1 UploadId: 1 } 2025-04-10T10:59:10.069319Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 1A9CAE11-B239-4709-B7EF-20D4A4AC9FE1, request# UploadPartCopy { Bucket: TEST Key: key1 UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key1?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:19723 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3BE9CEB1-597D-4F3E-9D45-A505301E0425 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-copy-source: /TEST/key x-amz-copy-source-range: bytes=1-2 S3_MOCK::HttpServeWrite: /TEST/key1 / partNumber=1&uploadId=1 / 0 2025-04-10T10:59:10.072931Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 1A9CAE11-B239-4709-B7EF-20D4A4AC9FE1, response# UploadPartCopyResult { } 2025-04-10T10:59:10.073235Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 14E7766B-D156-489F-B33A-DA284048D194, request# CompleteMultipartUpload { Bucket: TEST Key: key1 UploadId: 1 MultipartUpload: { Parts: [afc7e8a98f75755e513d9d5ead888e1d] } } REQUEST: POST /TEST/key1?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:19723 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5E299203-3FF8-4CA6-9936-BFEDD8413FB5 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploadId=1 2025-04-10T10:59:10.077004Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 14E7766B-D156-489F-B33A-DA284048D194, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key1 ETag: afc7e8a98f75755e513d9d5ead888e1d } 2025-04-10T10:59:10.077282Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 1BAF1E79-856E-448C-BF54-780F7209F5DD, request# GetObject { Bucket: TEST Key: key1 Range: bytes=0-1 } REQUEST: GET /TEST/key1 HTTP/1.1 HEADERS: Host: localhost:19723 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4479A672-29AB-4389-BE98-98F6EB8039A3 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-1 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key1 / 2 2025-04-10T10:59:10.081480Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 1BAF1E79-856E-448C-BF54-780F7209F5DD, response# GetObjectResult { } >> TTxLocatorTest::TestZeroRange [GOOD] >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:59:11.085458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:59:11.085583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:11.085628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:59:11.085668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:59:11.085719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:59:11.085748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:59:11.085833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:11.085955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:59:11.086404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:11.175015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:11.175076Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:11.204220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:11.204520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:11.204676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:59:11.213804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:11.214081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:11.215036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:11.215522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:59:11.217467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:11.218720Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:11.218769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:11.218958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:11.219033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:11.219070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:11.219211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.227990Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:59:11.405874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:11.406131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.406363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:59:11.406686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:59:11.406765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.411512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:11.411668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:59:11.411847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.411917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:59:11.411966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:59:11.412009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:59:11.415162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.415240Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:11.415274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:59:11.419276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.419354Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.419434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:11.419499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:59:11.423487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:11.429245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:59:11.429592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:59:11.430806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:11.430997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:11.431062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:11.431342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:59:11.431396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:11.431561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:59:11.431672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:59:11.436504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:11.436564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:11.436762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:11.436848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:59:11.437126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.437181Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:59:11.437300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:11.437339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:11.437379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:11.437428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:11.437473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:59:11.437523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:11.437567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:59:11.437602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:59:11.437680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:59:11.437729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:59:11.437788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:59:11.441788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:11.441952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:11.441995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:11.843781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-10T10:59:11.843977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:11.846027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T10:59:11.846190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-10T10:59:11.847543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:11.847714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:11.847787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-10T10:59:11.847919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-10T10:59:11.848064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:59:11.864469Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:414:2384], attempt# 0 2025-04-10T10:59:11.899481Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:414:2384], sender# [1:413:2382] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:25534 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A52EA811-4A1B-4219-869E-964CA2BBFCB6 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-10T10:59:11.909681Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2384], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:25534 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DABC176B-ED67-4D13-93D7-206D8CA2468F amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-10T10:59:11.918864Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2384], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-10T10:59:11.919043Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:413:2382] 2025-04-10T10:59:11.919157Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:414:2384], sender# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-04-10T10:59:11.919517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:11.919596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:59:11.919880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:11.919929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T10:59:11.920035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.920108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T10:59:11.921666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:59:11.921782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:59:11.921829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T10:59:11.921890Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T10:59:11.921931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T10:59:11.922040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:25534 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B4590BF0-20C5-4180-85A9-9A744A86069D amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2025-04-10T10:59:11.927983Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2384], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2025-04-10T10:59:11.928063Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:414:2384], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-10T10:59:11.928333Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-10T10:59:11.936508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T10:59:11.950170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-10T10:59:11.950266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T10:59:11.950478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-10T10:59:11.950608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-10T10:59:11.950682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:11.950753Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.950817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:59:11.950890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T10:59:11.951103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:11.954176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.954692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.954761Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T10:59:11.954908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:59:11.954955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:59:11.955004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:59:11.955044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:59:11.955088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T10:59:11.955198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 102 2025-04-10T10:59:11.955282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:59:11.955351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T10:59:11.955416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T10:59:11.955555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:59:11.959743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:59:11.959834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:398:2369] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::RecreateObserver [GOOD] Test command err: 2025-04-10T10:57:17.352014Z :RetryDiscoveryWithCancel INFO: Random seed for debugging is 1744282637351952 2025-04-10T10:57:17.888228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636883714501535:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:17.888334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:57:17.974763Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636883093540938:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:17.974854Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:57:18.255382Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014b5/r3tmp/tmpjbu8T4/pdisk_1.dat 2025-04-10T10:57:18.315312Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:57:18.705528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:18.705661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:18.706682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:18.706761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:18.714229Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:57:18.714468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:18.715725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:18.774813Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4654, node 1 2025-04-10T10:57:18.805658Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:57:18.805679Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:57:18.922633Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/0014b5/r3tmp/yandex7jOm6E.tmp 2025-04-10T10:57:18.922659Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/0014b5/r3tmp/yandex7jOm6E.tmp 2025-04-10T10:57:18.922821Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/0014b5/r3tmp/yandex7jOm6E.tmp 2025-04-10T10:57:18.922958Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:57:19.001218Z INFO: TTestServer started on Port 16303 GrpcPort 4654 TClient is connected to server localhost:16303 PQClient connected to localhost:4654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:19.402558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T10:57:22.878360Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636883714501535:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:22.878444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:22.975845Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636883093540938:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:22.975922Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:23.526005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636908863345017:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.526093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636908863344999:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.526359Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:23.542836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T10:57:23.588960Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636908863345036:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T10:57:23.684712Z node 2 :TX_PROXY ERROR: Actor# [2:7491636908863345064:2132] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:24.338010Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491636908863345071:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:24.341652Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjY3MTg0MGEtMzhiZmNiZDUtNTJlMTgwZTctZThjZWU5ODA=, ActorId: [2:7491636908863344997:2310], ActorState: ExecuteState, TraceId: 01jrfnhv1x77nvm3fgwwjhs7ye, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:24.345255Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:24.350680Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636909484306433:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:24.351056Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmRlYjI2NmQtNTYwZGU1MWQtZTE2ZDhmZjEtNWY1NjVjZGI=, ActorId: [1:7491636909484306392:2337], ActorState: ExecuteState, TraceId: 01jrfnhv8sd8bs2b09ghvdt7zm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:24.351913Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:24.355701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T10:57:24.569360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:24.740039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:4654", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-10T10:57:25.127367Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jrfnhwds7hzdjyh3frw40ct1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzYwZmMzZTctMmI0NmEwYTQtYmFiZjRjMzEtOGIyZjMzZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491636918074241452:2999] === CheckClustersList. Ok 2025-04-10T10:57:30.845412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:4654 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:4654 2025-04-10T10:57:31.254431Z node 1 :PERSQUEUE INFO: proxy a ... ffset: 0 SizeLag: 0 WriteTimestampEstimateMS: 1744282752501 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-04-10T10:59:12.531633Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_8342222032539336704_v1 INIT DONE TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 0 readOffset 0 committedOffset 0 2025-04-10T10:59:12.531740Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_8342222032539336704_v1 sending to client partition status >>> Got event: StartPartitionSession { Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc1 Database path: /Root Database id: account-dc1 CommittedOffset: 0 EndOffset: 0 } 2025-04-10T10:59:12.534226Z :INFO: [/Root] [/Root] [9bf09786-6dd06143-a5d61a76-f1f6e7d6] Closing read session. Close timeout: 0.000000s 2025-04-10T10:59:12.531019Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T10:59:12.531101Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 2025-04-10T10:59:12.534286Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2025-04-10T10:59:12.534341Z :INFO: [/Root] [/Root] [9bf09786-6dd06143-a5d61a76-f1f6e7d6] Counters: { Errors: 0 CurrentSessionLifetimeMs: 67 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:59:12.534455Z :NOTICE: [/Root] [/Root] [9bf09786-6dd06143-a5d61a76-f1f6e7d6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-10T10:59:12.534503Z :DEBUG: [/Root] [/Root] [9bf09786-6dd06143-a5d61a76-f1f6e7d6] [] Abort session to cluster 2025-04-10T10:59:12.534935Z :INFO: [/Root] [/Root] [36ab6e0e-dc5438cb-ebc49967-99f866b4] Closing read session. Close timeout: 0.000000s 2025-04-10T10:59:12.534977Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-10T10:59:12.535012Z :INFO: [/Root] [/Root] [36ab6e0e-dc5438cb-ebc49967-99f866b4] Counters: { Errors: 0 CurrentSessionLifetimeMs: 64 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:59:12.535069Z :NOTICE: [/Root] [/Root] [36ab6e0e-dc5438cb-ebc49967-99f866b4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-10T10:59:12.535097Z :DEBUG: [/Root] [/Root] [36ab6e0e-dc5438cb-ebc49967-99f866b4] [] Abort session to cluster 2025-04-10T10:59:12.535317Z :INFO: [/Root] [/Root] [a2b8444-6a9307b8-bd7d13ec-b96db342] Closing read session. Close timeout: 0.000000s 2025-04-10T10:59:12.535353Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-10T10:59:12.535386Z :INFO: [/Root] [/Root] [a2b8444-6a9307b8-bd7d13ec-b96db342] Counters: { Errors: 0 CurrentSessionLifetimeMs: 63 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:59:12.535436Z :NOTICE: [/Root] [/Root] [a2b8444-6a9307b8-bd7d13ec-b96db342] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-10T10:59:12.535463Z :DEBUG: [/Root] [/Root] [a2b8444-6a9307b8-bd7d13ec-b96db342] [] Abort session to cluster 2025-04-10T10:59:12.535694Z :INFO: [/Root] [/Root] [a2b8444-6a9307b8-bd7d13ec-b96db342] Closing read session. Close timeout: 0.000000s 2025-04-10T10:59:12.535738Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-10T10:59:12.535784Z :INFO: [/Root] [/Root] [a2b8444-6a9307b8-bd7d13ec-b96db342] Counters: { Errors: 0 CurrentSessionLifetimeMs: 63 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:59:12.535877Z :NOTICE: [/Root] [/Root] [a2b8444-6a9307b8-bd7d13ec-b96db342] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:59:12.536006Z :INFO: [/Root] [/Root] [36ab6e0e-dc5438cb-ebc49967-99f866b4] Closing read session. Close timeout: 0.000000s 2025-04-10T10:59:12.536034Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-10T10:59:12.536063Z :INFO: [/Root] [/Root] [36ab6e0e-dc5438cb-ebc49967-99f866b4] Counters: { Errors: 0 CurrentSessionLifetimeMs: 65 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:59:12.536107Z :NOTICE: [/Root] [/Root] [36ab6e0e-dc5438cb-ebc49967-99f866b4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:59:12.536150Z :INFO: [/Root] [/Root] [9bf09786-6dd06143-a5d61a76-f1f6e7d6] Closing read session. Close timeout: 0.000000s 2025-04-10T10:59:12.536183Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2025-04-10T10:59:12.536213Z :INFO: [/Root] [/Root] [9bf09786-6dd06143-a5d61a76-f1f6e7d6] Counters: { Errors: 0 CurrentSessionLifetimeMs: 69 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:59:12.536260Z :NOTICE: [/Root] [/Root] [9bf09786-6dd06143-a5d61a76-f1f6e7d6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:59:12.538666Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_13285814238749014763_v1 grpc read done: success# 0, data# { } 2025-04-10T10:59:12.538721Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_13285814238749014763_v1 grpc read failed 2025-04-10T10:59:12.541540Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_8342222032539336704_v1 2025-04-10T10:59:12.538755Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_13285814238749014763_v1 grpc closed 2025-04-10T10:59:12.541613Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7491637376533766491:2557] destroyed 2025-04-10T10:59:12.541691Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_8342222032539336704_v1 2025-04-10T10:59:12.538784Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_13285814238749014763_v1 is DEAD 2025-04-10T10:59:12.539684Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_245455109081702504_v1 grpc read done: success# 0, data# { } 2025-04-10T10:59:12.539698Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_245455109081702504_v1 grpc read failed 2025-04-10T10:59:12.539715Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_245455109081702504_v1 grpc closed 2025-04-10T10:59:12.539733Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_245455109081702504_v1 is DEAD 2025-04-10T10:59:12.540282Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637376533766481:2548] disconnected; active server actors: 1 2025-04-10T10:59:12.540306Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637376533766481:2548] client user disconnected session shared/user_3_3_13285814238749014763_v1 2025-04-10T10:59:12.540367Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-04-10T10:59:12.540426Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=2, Families=1, UnradableFamilies=0 [], RequireBalancing=0 [] 2025-04-10T10:59:12.540456Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=2, desiredFamilyCount=0, allowPlusOne=1 2025-04-10T10:59:12.540485Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000037s 2025-04-10T10:59:12.540531Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_8342222032539336704_v1 grpc read done: success# 0, data# { } 2025-04-10T10:59:12.540543Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_8342222032539336704_v1 grpc read failed 2025-04-10T10:59:12.540561Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_8342222032539336704_v1 grpc closed 2025-04-10T10:59:12.540592Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_8342222032539336704_v1 is DEAD 2025-04-10T10:59:12.541295Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637376533766483:2547] disconnected; active server actors: 1 2025-04-10T10:59:12.541314Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637376533766483:2547] client user disconnected session shared/user_3_2_245455109081702504_v1 2025-04-10T10:59:12.541345Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-04-10T10:59:12.541374Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637376533766488:2546] disconnected; active server actors: 1 2025-04-10T10:59:12.541392Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7491637376533766488:2546] client user disconnected session shared/user_3_1_8342222032539336704_v1 2025-04-10T10:59:13.501706Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491637380828733836:2560] TxId: 281474976720690. Ctx: { TraceId: 01jrfnn5y1aee4ahvte2yhkjdx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NjZkNWM0NTYtYTlmZTdlMTEtNDBiNmE0NzQtMzk3NzVlYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-04-10T10:59:13.502965Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491637380828733846:2568], TxId: 281474976720690, task: 2. Ctx: { TraceId : 01jrfnn5y1aee4ahvte2yhkjdx. SessionId : ydb://session/3?node_id=3&id=NjZkNWM0NTYtYTlmZTdlMTEtNDBiNmE0NzQtMzk3NzVlYmI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7491637380828733836:2560], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-10T10:59:13.503474Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491637380828733847:2569], TxId: 281474976720690, task: 4. Ctx: { CustomerSuppliedId : . TraceId : 01jrfnn5y1aee4ahvte2yhkjdx. SessionId : ydb://session/3?node_id=3&id=NjZkNWM0NTYtYTlmZTdlMTEtNDBiNmE0NzQtMzk3NzVlYmI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7491637380828733836:2560], status: UNAVAILABLE, reason: {
: Error: Terminate execution } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> PrivateApi::Nodes [GOOD] Test command err: 2025-04-10T10:58:33.094299Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637207697919672:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:33.094478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0410 10:58:33.635657898 1163979 dns_resolver.cc:162] no server name supplied in dns URI E0410 10:58:33.635820604 1163979 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T10:58:34.098414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:34.722702Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24138: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24138 } ] 2025-04-10T10:58:34.728128Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24138: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24138 2025-04-10T10:58:35.094771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:36.096208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:36.191747Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24138: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24138 } ] 2025-04-10T10:58:37.099487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a87/r3tmp/tmpInkuG5/pdisk_1.dat 2025-04-10T10:58:37.489990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:37.490176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:37.514275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:58:37.516396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:37.525538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491637224877789603:2316], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 24138, node 1 2025-04-10T10:58:37.607040Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:58:37.607083Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:58:37.780606Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:37.780638Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:37.780646Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:37.780818Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:58:37.783398Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:38.026746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:38.095053Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637207697919672:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:38.095154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0410 10:58:38.643051233 1164217 dns_resolver.cc:162] no server name supplied in dns URI E0410 10:58:38.643208435 1164217 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T10:58:39.281861Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-10T10:58:39.281911Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T10:58:39.281921Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T10:58:39.282479Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-10T10:58:39.282502Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T10:58:39.282509Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T10:58:39.300231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T10:58:39.300713Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-10T10:58:39.300730Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T10:58:39.300736Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T10:58:39.326933Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-10T10:58:39.326960Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T10:58:39.326966Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T10:58:39.336559Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-10T10:58:39.336593Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T10:58:39.336598Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T10:58:39.341119Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-10T10:58:39.341148Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T10:58:39.341154Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T10:58:39.349100Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-10T10:58:39.349144Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-10T10:58:39.349153Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-10T10:58:39.350627Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-10T10:58:39.350663Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T10:58:39.350670Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T10:58:39.360492Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-10T10:58:39.360516Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T10:58:39.360524Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T10:58:39.370323Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-10T10:58:39.370387Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T10:58:39.370397Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T10:58:39.371826Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-10T10:58:39.371841Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T10:58:39.371848Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T10:58:39.373370Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-10T10:58:39.373397Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T10:58:39.373405Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T10:58:39.400244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T10:58:39.402088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T10:58:39.403661Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-10T10:58:39.403684Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T10:58:39.403692Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T10:58:39.406104Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-10T10:58:39.406163Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-10T10:58:39.406226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:39.413691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, b ... ydb://session/3?node_id=7&id=Zjg0NDZiYTUtNGI0MmJhNmYtNjczNzQ0MjgtMmQ5MDFkNDQ=. CustomerSuppliedId : . TraceId : 01jrfnn2a73gvcxmc68cc0659z. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Waiting finish of sink[0] 2025-04-10T10:59:09.459658Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854222:2561], TxId: 281474976715681, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=Zjg0NDZiYTUtNGI0MmJhNmYtNjczNzQ0MjgtMmQ5MDFkNDQ=. CustomerSuppliedId : . TraceId : 01jrfnn2a73gvcxmc68cc0659z. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-04-10T10:59:09.459679Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854222:2561], TxId: 281474976715681, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=Zjg0NDZiYTUtNGI0MmJhNmYtNjczNzQ0MjgtMmQ5MDFkNDQ=. CustomerSuppliedId : . TraceId : 01jrfnn2a73gvcxmc68cc0659z. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: 2025-04-10T10:59:09.459694Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715681, task: 1. Tasks execution finished 2025-04-10T10:59:09.459704Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854222:2561], TxId: 281474976715681, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=Zjg0NDZiYTUtNGI0MmJhNmYtNjczNzQ0MjgtMmQ5MDFkNDQ=. CustomerSuppliedId : . TraceId : 01jrfnn2a73gvcxmc68cc0659z. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Waiting finish of sink[0] 2025-04-10T10:59:09.459734Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854222:2561], TxId: 281474976715681, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=Zjg0NDZiYTUtNGI0MmJhNmYtNjczNzQ0MjgtMmQ5MDFkNDQ=. CustomerSuppliedId : . TraceId : 01jrfnn2a73gvcxmc68cc0659z. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-10T10:59:09.459751Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715681, task: 1. Tasks execution finished 2025-04-10T10:59:09.459760Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854222:2561], TxId: 281474976715681, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=Zjg0NDZiYTUtNGI0MmJhNmYtNjczNzQ0MjgtMmQ5MDFkNDQ=. CustomerSuppliedId : . TraceId : 01jrfnn2a73gvcxmc68cc0659z. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Waiting finish of sink[0] 2025-04-10T10:59:09.459814Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854212:2561], SessionActorId: [7:7491637361926886396:2561], Create new TableWriteActor for table `Root/yq/nodes` ([72057594046644480:5:1]). lockId=281474976715678 [7:7491637366221854225:2561] 2025-04-10T10:59:09.459843Z node 7 :KQP_COMPUTE DEBUG: Table: `Root/yq/nodes` ([72057594046644480:5:1]), SessionActorId: [7:7491637361926886396:2561]Open: token=0 2025-04-10T10:59:09.459863Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854212:2561], SessionActorId: [7:7491637361926886396:2561], ProcessRequestQueue [72057594046644480:5:1] NOT READY queue=1 2025-04-10T10:59:09.459904Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854225:2561], Table: `Root/yq/nodes` ([72057594046644480:5:1]), SessionActorId: [7:7491637361926886396:2561]Write: token=0 2025-04-10T10:59:09.460012Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854225:2561], Table: `Root/yq/nodes` ([72057594046644480:5:1]), SessionActorId: [7:7491637361926886396:2561]Close: token=0 2025-04-10T10:59:09.460099Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854224:2561], TxId: 281474976715681, task: 1. TKqpForwardWriteActor recieve EvBufferWriteResult from [7:7491637366221854212:2561] 2025-04-10T10:59:09.460110Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854224:2561], TxId: 281474976715681, task: 1. Finished 2025-04-10T10:59:09.460128Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854222:2561], TxId: 281474976715681, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=Zjg0NDZiYTUtNGI0MmJhNmYtNjczNzQ0MjgtMmQ5MDFkNDQ=. CustomerSuppliedId : . TraceId : 01jrfnn2a73gvcxmc68cc0659z. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-10T10:59:09.460145Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715681, task: 1. Tasks execution finished 2025-04-10T10:59:09.460161Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854222:2561], TxId: 281474976715681, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=Zjg0NDZiYTUtNGI0MmJhNmYtNjczNzQ0MjgtMmQ5MDFkNDQ=. CustomerSuppliedId : . TraceId : 01jrfnn2a73gvcxmc68cc0659z. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-10T10:59:09.460239Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715681, task: 1. pass away 2025-04-10T10:59:09.460311Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715681;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T10:59:09.460600Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854212:2561], SessionActorId: [7:7491637361926886396:2561], Start immediate commit 2025-04-10T10:59:09.460609Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854225:2561], Table: `Root/yq/nodes` ([72057594046644480:5:1]), SessionActorId: [7:7491637361926886396:2561]SetImmediateCommit 2025-04-10T10:59:09.460622Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854212:2561], SessionActorId: [7:7491637361926886396:2561], Flush data 2025-04-10T10:59:09.460751Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854225:2561], Table: `Root/yq/nodes` ([72057594046644480:5:1]), SessionActorId: [7:7491637361926886396:2561]Send EvWrite to ShardID=72075186224037890, isPrepare=0, isImmediateCommit=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715678 DataShard: 72075186224037890 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 5, Size=228, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=3 2025-04-10T10:59:09.469430Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854221:2386], Table: `Root/yq/nodes` ([72057594046644480:5:1]), SessionActorId: [7:7491637344747015460:2386]Recv EvWriteResult from ShardID=72075186224037890, Status=STATUS_COMPLETED, TxId=4, Locks= , Cookie=1 2025-04-10T10:59:09.469493Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854221:2386], Table: `Root/yq/nodes` ([72057594046644480:5:1]), SessionActorId: [7:7491637344747015460:2386]Got completed result TxId=4, TabletId=72075186224037890, Cookie=1, Mode=3, Locks= 2025-04-10T10:59:09.469570Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854210:2386], SessionActorId: [7:7491637344747015460:2386], Committed TxId=0 2025-04-10T10:59:09.471033Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854225:2561], Table: `Root/yq/nodes` ([72057594046644480:5:1]), SessionActorId: [7:7491637361926886396:2561]Recv EvWriteResult from ShardID=72075186224037890, Status=STATUS_COMPLETED, TxId=5, Locks= , Cookie=1 2025-04-10T10:59:09.471071Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854225:2561], Table: `Root/yq/nodes` ([72057594046644480:5:1]), SessionActorId: [7:7491637361926886396:2561]Got completed result TxId=5, TabletId=72075186224037890, Cookie=1, Mode=3, Locks= 2025-04-10T10:59:09.471112Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7491637366221854212:2561], SessionActorId: [7:7491637361926886396:2561], Committed TxId=0 2025-04-10T10:59:09.515343Z node 7 :YQ_CONTROL_PLANE_STORAGE WARN: DB Error, Status: CLIENT_CANCELLED, Issues: [ {
: Error: GRpc error: (1): Cancelled on the server side } {
: Error: Grpc error response on endpoint localhost:4701 } ], Query: --!syntax_v1 -- Query name: GetTask(read stale ro) PRAGMA TablePathPrefix("Root/yq"); DECLARE $tenant as String; DECLARE $from as Timestamp; DECLARE $tasks_limit as Uint64; SELECT `scope`, `query_id`, `owner`, `last_seen_at`, `retry_counter`, `retry_counter_updated_at`, `retry_rate`, `query_type` FROM `pending_small` WHERE `tenant` = $tenant AND `assigned_until` < $from ORDER BY `query_id` DESC LIMIT $tasks_limit; 2025-04-10T10:59:09.517495Z node 7 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (1): Cancelled on the server side
: Error: Grpc error response on endpoint [::]:4701 2025-04-10T10:59:09.517981Z node 7 :YQ_CONTROL_PLANE_STORAGE WARN: GetTaskRequest - GetTaskResult: {tenant: "TestTenant" owner_id: "ddf5ca06-11f73241-3447eac-c3f1778c1" host: "ghrun-x5vqckr7qy" } ERROR: [ {
: Error: GRpc error: (1): Cancelled on the server side } {
: Error: Grpc error response on endpoint localhost:4701 } ] 2025-04-10T10:59:09.518331Z node 7 :YQL_PRIVATE_PROXY ERROR: PrivateGetTask - Owner: ddf5ca06-11f73241-3447eac-c3f1778c1, Host: ghrun-x5vqckr7qy, Tenant: TestTenant, Failed with code: GENERIC_ERROR Details:
: Error: GRpc error: (1): Cancelled on the server side
: Error: Grpc error response on endpoint localhost:4701
: Error: ControlPlane::GetTaskError 2025-04-10T10:59:09.530852Z node 7 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: CLIENT_CANCELLED
: Error: GRpc error: (1): Cancelled on the server side
: Error: Grpc error response on endpoint [::]:4701 2025-04-10T10:59:10.291355Z node 7 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:4701: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:4701 2025-04-10T10:59:11.290431Z node 7 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:4701: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:4701 2025-04-10T10:59:12.206226Z node 7 :YQ_CONTROL_PLANE_STORAGE WARN: DB Error, Status: TRANSPORT_UNAVAILABLE, Issues: [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4701: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4701 } ], Query: --!syntax_v1 -- Query name: NodesHealthCheck(read) PRAGMA TablePathPrefix("Root/yq"); DECLARE $now as Timestamp; DECLARE $tenant as String; SELECT `node_id`, `instance_id`, `hostname`, `active_workers`, `memory_limit`, `memory_allocated`, `interconnect_port`, `node_address`, `data_center` FROM `nodes` WHERE `tenant` = $tenant AND `expire_at` >= $now; 2025-04-10T10:59:12.206872Z node 7 :YQ_CONTROL_PLANE_STORAGE WARN: NodesHealthCheckRequest - NodesHealthCheckResult: {tenant: "TestTenant" node { node_id: 7 instance_id: "9b764603-43f4f5bb-77c43a85-bf458095" hostname: "ghrun-x5vqckr7qy" node_address: "127.0.1.1" } } ERROR: [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4701: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4701 } ] 2025-04-10T10:59:12.207387Z node 7 :YQL_NODES_MANAGER ERROR: Failed with code: INTERNAL_ERROR Details:
: Error: Can't do NodesHealthCheck: (yexception) ydb/core/fq/libs/actors/nodes_health_check.cpp:95:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4701: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4701 2025-04-10T10:59:12.281691Z node 7 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:4701: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:4701 2025-04-10T10:59:13.276477Z node 7 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: Client is stopped |70.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |70.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |70.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAll [GOOD] Test command err: 2025-04-10T10:59:17.639834Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-10T10:59:17.640864Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-10T10:59:17.641728Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-10T10:59:17.643884Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:17.644283Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-10T10:59:17.653322Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:17.653493Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:17.653605Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:17.653696Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-10T10:59:17.653863Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:17.653960Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-10T10:59:17.654087Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-10T10:59:17.655563Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#281474976710655 2025-04-10T10:59:17.656223Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:17.656289Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:17.656397Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 281474976710655 2025-04-10T10:59:17.656438Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 281474976710655 expected SUCCESS 2025-04-10T10:59:17.660438Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:75:2109] requested range size#1 2025-04-10T10:59:17.660612Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2025-04-10T10:59:17.660651Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:75:2109] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |70.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] Test command err: 2025-04-10T10:59:17.835137Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-10T10:59:17.835972Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-10T10:59:17.836834Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-10T10:59:17.838653Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:17.839167Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-10T10:59:17.850574Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:17.850731Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:17.850832Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:17.850907Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-10T10:59:17.851059Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:17.851157Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-10T10:59:17.851286Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-10T10:59:17.851962Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#0 2025-04-10T10:59:17.852517Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:17.852571Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:17.852650Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 0 2025-04-10T10:59:17.852687Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 0 expected SUCCESS |70.1%| [TA] $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |70.1%| [TA] {RESULT} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |70.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |70.1%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |70.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |70.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |70.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |70.1%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |70.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] Test command err: 2025-04-10T10:59:18.366196Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-10T10:59:18.367055Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-10T10:59:18.367856Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-10T10:59:18.369612Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.370113Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-10T10:59:18.383647Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.383840Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.383985Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.384065Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-10T10:59:18.384210Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.384344Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-10T10:59:18.384488Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-10T10:59:18.386091Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2025-04-10T10:59:18.386926Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2025-04-10T10:59:18.387284Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2025-04-10T10:59:18.387622Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2025-04-10T10:59:18.387995Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2025-04-10T10:59:18.388446Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.388570Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2025-04-10T10:59:18.388805Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.389015Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.389129Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2025-04-10T10:59:18.389295Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.389460Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.389600Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.389730Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2025-04-10T10:59:18.389903Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.389953Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2025-04-10T10:59:18.390128Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.390688Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.390822Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2025-04-10T10:59:18.391048Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2025-04-10T10:59:18.391096Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2025-04-10T10:59:18.391285Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.391434Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2025-04-10T10:59:18.391464Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2025-04-10T10:59:18.391601Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.391665Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2025-04-10T10:59:18.391690Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2025-04-10T10:59:18.391787Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.391867Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.391950Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.392046Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2025-04-10T10:59:18.392079Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2025-04-10T10:59:18.392238Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.392284Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2025-04-10T10:59:18.392311Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 400000 to# 500000 2025-04-10T10:59:18.392374Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.392429Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2025-04-10T10:59:18.392453Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 500000 to# 600000 2025-04-10T10:59:18.392609Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.392647Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2025-04-10T10:59:18.392671Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2025-04-10T10:59:18.392835Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.392919Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2025-04-10T10:59:18.392955Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2025-04-10T10:59:18.393030Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.393065Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2025-04-10T10:59:18.393088Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2025-04-10T10:59:18.393225Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.393298Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2025-04-10T10:59:18.393330Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-10T10:59:18.398346Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:111:2145] requested range size#100000 2025-04-10T10:59:18.398872Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:113:2147] requested range size#100000 2025-04-10T10:59:18.399351Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.399460Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:129:2163] requested range size#100000 2025-04-10T10:59:18.399729Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.399869Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:115:2149] requested range size#100000 2025-04-10T10:59:18.400272Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.400395Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:117:2151] requested range size#100000 2025-04-10T10:59:18.400583Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.400825Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:15:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.400944Z node 1 :TABLET_MAIN DEBUG: Put Resul ... 000 2025-04-10T10:59:18.571821Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.571895Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8300000 Reserved to# 8400000 2025-04-10T10:59:18.571923Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:397:2431] TEvAllocateResult from# 8300000 to# 8400000 2025-04-10T10:59:18.572020Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.572151Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8400000 Reserved to# 8500000 2025-04-10T10:59:18.572180Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:399:2433] TEvAllocateResult from# 8400000 to# 8500000 2025-04-10T10:59:18.572305Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8500000 Reserved to# 8600000 2025-04-10T10:59:18.572357Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:401:2435] TEvAllocateResult from# 8500000 to# 8600000 2025-04-10T10:59:18.572466Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.572603Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.572693Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8600000 Reserved to# 8700000 2025-04-10T10:59:18.572722Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:403:2437] TEvAllocateResult from# 8600000 to# 8700000 2025-04-10T10:59:18.572805Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.572875Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8700000 Reserved to# 8800000 2025-04-10T10:59:18.572903Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:405:2439] TEvAllocateResult from# 8700000 to# 8800000 2025-04-10T10:59:18.573055Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.573113Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8800000 Reserved to# 8900000 2025-04-10T10:59:18.573139Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:407:2441] TEvAllocateResult from# 8800000 to# 8900000 2025-04-10T10:59:18.573239Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8900000 Reserved to# 9000000 2025-04-10T10:59:18.573281Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:409:2443] TEvAllocateResult from# 8900000 to# 9000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-10T10:59:18.581228Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:431:2465] requested range size#100000 2025-04-10T10:59:18.581764Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:433:2467] requested range size#100000 2025-04-10T10:59:18.582522Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:435:2469] requested range size#100000 2025-04-10T10:59:18.582935Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.583139Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:437:2471] requested range size#100000 2025-04-10T10:59:18.583288Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.583640Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:439:2473] requested range size#100000 2025-04-10T10:59:18.584093Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.584266Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:441:2475] requested range size#100000 2025-04-10T10:59:18.584413Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.584685Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.584896Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:443:2477] requested range size#100000 2025-04-10T10:59:18.585033Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.585227Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.585351Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:445:2479] requested range size#100000 2025-04-10T10:59:18.585516Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.585703Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.585866Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:447:2481] requested range size#100000 2025-04-10T10:59:18.586104Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.586385Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:449:2483] requested range size#100000 2025-04-10T10:59:18.586546Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.586708Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2025-04-10T10:59:18.586745Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:431:2465] TEvAllocateResult from# 9000000 to# 9100000 2025-04-10T10:59:18.586830Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.586885Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.587027Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2025-04-10T10:59:18.587061Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:433:2467] TEvAllocateResult from# 9100000 to# 9200000 2025-04-10T10:59:18.587129Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.587300Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2025-04-10T10:59:18.587328Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:435:2469] TEvAllocateResult from# 9200000 to# 9300000 2025-04-10T10:59:18.587395Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.587519Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2025-04-10T10:59:18.587561Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:437:2471] TEvAllocateResult from# 9300000 to# 9400000 2025-04-10T10:59:18.587619Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.587768Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2025-04-10T10:59:18.587797Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:439:2473] TEvAllocateResult from# 9400000 to# 9500000 2025-04-10T10:59:18.587845Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.587971Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2025-04-10T10:59:18.588006Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:441:2475] TEvAllocateResult from# 9500000 to# 9600000 2025-04-10T10:59:18.588058Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.588187Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2025-04-10T10:59:18.588217Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:443:2477] TEvAllocateResult from# 9600000 to# 9700000 2025-04-10T10:59:18.588294Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.588415Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2025-04-10T10:59:18.588442Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:445:2479] TEvAllocateResult from# 9700000 to# 9800000 2025-04-10T10:59:18.588498Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:18.588613Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2025-04-10T10:59:18.588660Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:447:2481] TEvAllocateResult from# 9800000 to# 9900000 2025-04-10T10:59:18.588768Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2025-04-10T10:59:18.588800Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:449:2483] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS |70.1%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |70.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |70.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse |70.1%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |70.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |70.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> KqpSysColV0::SelectRowById >> KqpSysColV1::InnerJoinSelect >> YdbOlapStore::LogLast50 >> YdbTableBulkUpsert::Nulls >> YdbYqlClient::TestDoubleKey >> DataShardVolatile::DistributedWriteLostPlanThenDrop [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenSplit >> GrpcConnectionStringParserTest::NoDatabaseFlag >> YdbTableBulkUpsert::Simple >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials >> TTxLocatorTest::TestAllocateAllByPieces >> YdbIndexTable::AlterIndexImplBySuperUser >> TGRpcYdbTest::RemoveNotExistedDirectory >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession >> YdbYqlClient::SecurityTokenAuth >> YdbYqlClient::TestDecimal1 |70.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |70.1%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |70.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut >> ClientStatsCollector::PrepareQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] Test command err: 2025-04-10T10:59:21.665481Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-10T10:59:21.666350Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-10T10:59:21.667299Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-10T10:59:21.669443Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.670044Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-10T10:59:21.695818Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.695966Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.696093Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.696162Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-10T10:59:21.696309Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.696410Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-10T10:59:21.696532Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-10T10:59:21.697225Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#8796093022207 2025-04-10T10:59:21.697779Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.697837Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.697922Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 8796093022207 2025-04-10T10:59:21.697959Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 8796093022207 expected SUCCESS 2025-04-10T10:59:21.705015Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:75:2109] requested range size#8796093022207 2025-04-10T10:59:21.705602Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.705678Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.705790Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8796093022207 Reserved to# 17592186044414 2025-04-10T10:59:21.705833Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:75:2109] TEvAllocateResult from# 8796093022207 to# 17592186044414 expected SUCCESS 2025-04-10T10:59:21.706426Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:79:2113] requested range size#8796093022207 2025-04-10T10:59:21.706805Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.706879Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.706979Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 17592186044414 Reserved to# 26388279066621 2025-04-10T10:59:21.707021Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:79:2113] TEvAllocateResult from# 17592186044414 to# 26388279066621 expected SUCCESS 2025-04-10T10:59:21.707411Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:83:2117] requested range size#8796093022207 2025-04-10T10:59:21.707764Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.707837Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.707983Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 26388279066621 Reserved to# 35184372088828 2025-04-10T10:59:21.708028Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:83:2117] TEvAllocateResult from# 26388279066621 to# 35184372088828 expected SUCCESS 2025-04-10T10:59:21.708419Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:87:2121] requested range size#8796093022207 2025-04-10T10:59:21.708740Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.708808Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.708914Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 35184372088828 Reserved to# 43980465111035 2025-04-10T10:59:21.709017Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:87:2121] TEvAllocateResult from# 35184372088828 to# 43980465111035 expected SUCCESS 2025-04-10T10:59:21.709463Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:91:2125] requested range size#8796093022207 2025-04-10T10:59:21.709783Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.709852Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.709930Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 43980465111035 Reserved to# 52776558133242 2025-04-10T10:59:21.709964Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:91:2125] TEvAllocateResult from# 43980465111035 to# 52776558133242 expected SUCCESS 2025-04-10T10:59:21.710799Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:95:2129] requested range size#8796093022207 2025-04-10T10:59:21.711372Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.711522Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.711650Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 52776558133242 Reserved to# 61572651155449 2025-04-10T10:59:21.711722Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:95:2129] TEvAllocateResult from# 52776558133242 to# 61572651155449 expected SUCCESS 2025-04-10T10:59:21.712273Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:99:2133] requested range size#8796093022207 2025-04-10T10:59:21.712577Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.712639Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.712713Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 61572651155449 Reserved to# 70368744177656 2025-04-10T10:59:21.712749Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:99:2133] TEvAllocateResult from# 61572651155449 to# 70368744177656 expected SUCCESS 2025-04-10T10:59:21.713240Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:103:2137] requested range size#8796093022207 2025-04-10T10:59:21.713653Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.713727Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.713803Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 70368744177656 Reserved to# 79164837199863 2025-04-10T10:59:21.713844Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:103:2137] TEvAllocateResult from# 70368744177656 to# 79164837199863 expected SUCCESS 2025-04-10T10:59:21.714321Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:107:2141] requested range size#8796093022207 2025-04-10T10:59:21.714666Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.714743Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.714817Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 79164837199863 Reserved to# 87960930222070 2025-04-10T10:59:21.714861Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:107:2141] TEvAllocateResult from# 79164837199863 to# 87960930222070 expected SUCCESS 2025-04-10T10:59:21.715329Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:111:2145] requested range size#8796093022207 2025-04-10T10:59:21.715650Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.715700Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.715816Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 87960930222070 Reserved to# 96757023244277 2025-04-10T10:59:21.715851Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:111:2145] TEvAllocateResult from# 87960930222070 to# 96757023244277 expected SUCCESS 2025-04-10T10:59:21.716283Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:115:2149] requested range size#8796093022207 2025-04-10T10:59:21.716654Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.716707Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:69:0] Status# OK StatusFla ... e 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:151:2185] requested range size#8796093022207 2025-04-10T10:59:21.777400Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.777511Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.777616Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 175921860444140 Reserved to# 184717953466347 2025-04-10T10:59:21.777668Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:151:2185] TEvAllocateResult from# 175921860444140 to# 184717953466347 expected SUCCESS 2025-04-10T10:59:21.778576Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:155:2189] requested range size#8796093022207 2025-04-10T10:59:21.779027Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.779100Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.779193Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 184717953466347 Reserved to# 193514046488554 2025-04-10T10:59:21.779231Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:155:2189] TEvAllocateResult from# 184717953466347 to# 193514046488554 expected SUCCESS 2025-04-10T10:59:21.779986Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:159:2193] requested range size#8796093022207 2025-04-10T10:59:21.780468Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.780563Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.780655Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 193514046488554 Reserved to# 202310139510761 2025-04-10T10:59:21.780707Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:159:2193] TEvAllocateResult from# 193514046488554 to# 202310139510761 expected SUCCESS 2025-04-10T10:59:21.781459Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:163:2197] requested range size#8796093022207 2025-04-10T10:59:21.781792Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.781896Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.782011Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 202310139510761 Reserved to# 211106232532968 2025-04-10T10:59:21.782052Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:163:2197] TEvAllocateResult from# 202310139510761 to# 211106232532968 expected SUCCESS 2025-04-10T10:59:21.782844Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:167:2201] requested range size#8796093022207 2025-04-10T10:59:21.783176Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.783249Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.783338Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 211106232532968 Reserved to# 219902325555175 2025-04-10T10:59:21.783383Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:167:2201] TEvAllocateResult from# 211106232532968 to# 219902325555175 expected SUCCESS 2025-04-10T10:59:21.784094Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:171:2205] requested range size#8796093022207 2025-04-10T10:59:21.784443Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.784509Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.784605Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 219902325555175 Reserved to# 228698418577382 2025-04-10T10:59:21.784644Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:171:2205] TEvAllocateResult from# 219902325555175 to# 228698418577382 expected SUCCESS 2025-04-10T10:59:21.785363Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:175:2209] requested range size#8796093022207 2025-04-10T10:59:21.785728Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:1:24576:73:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.785783Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.785890Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 228698418577382 Reserved to# 237494511599589 2025-04-10T10:59:21.785939Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:175:2209] TEvAllocateResult from# 228698418577382 to# 237494511599589 expected SUCCESS 2025-04-10T10:59:21.786693Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:179:2213] requested range size#8796093022207 2025-04-10T10:59:21.787043Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.787148Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.787252Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 237494511599589 Reserved to# 246290604621796 2025-04-10T10:59:21.787290Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:179:2213] TEvAllocateResult from# 237494511599589 to# 246290604621796 expected SUCCESS 2025-04-10T10:59:21.788032Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:183:2217] requested range size#8796093022207 2025-04-10T10:59:21.788386Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.788448Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.788526Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 246290604621796 Reserved to# 255086697644003 2025-04-10T10:59:21.788563Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:183:2217] TEvAllocateResult from# 246290604621796 to# 255086697644003 expected SUCCESS 2025-04-10T10:59:21.789325Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:187:2221] requested range size#8796093022207 2025-04-10T10:59:21.789765Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.789859Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.789968Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 255086697644003 Reserved to# 263882790666210 2025-04-10T10:59:21.790020Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:187:2221] TEvAllocateResult from# 255086697644003 to# 263882790666210 expected SUCCESS 2025-04-10T10:59:21.790876Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:191:2225] requested range size#8796093022207 2025-04-10T10:59:21.791269Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:1:24576:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.791341Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.791447Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 263882790666210 Reserved to# 272678883688417 2025-04-10T10:59:21.791533Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:191:2225] TEvAllocateResult from# 263882790666210 to# 272678883688417 expected SUCCESS 2025-04-10T10:59:21.792420Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:195:2229] requested range size#8796093022207 2025-04-10T10:59:21.792784Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.792884Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.792977Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 272678883688417 Reserved to# 281474976710624 2025-04-10T10:59:21.793019Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:195:2229] TEvAllocateResult from# 272678883688417 to# 281474976710624 expected SUCCESS 2025-04-10T10:59:21.793966Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:199:2233] requested range size#31 2025-04-10T10:59:21.797787Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.797883Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T10:59:21.798035Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 281474976710624 Reserved to# 281474976710655 2025-04-10T10:59:21.798100Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:199:2233] TEvAllocateResult from# 281474976710624 to# 281474976710655 expected SUCCESS 2025-04-10T10:59:21.799138Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:203:2237] requested range size#1 2025-04-10T10:59:21.799291Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2025-04-10T10:59:21.799335Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:203:2237] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit |70.2%| [TA] $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase >> YdbYqlClient::TestYqlWrongTable >> TTxDataShardMiniKQL::CrossShard_1_Cycle [GOOD] >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy >> Yq_1::ModifyQuery [GOOD] >> TColumnShardTestReadWrite::ReadGroupBy [GOOD] |70.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |70.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |70.2%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |70.2%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut >> YdbOlapStore::ManyTables >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ModifyQuery [GOOD] Test command err: 2025-04-10T10:58:32.814772Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637203800182229:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:32.815037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0410 10:58:33.744222176 1163725 dns_resolver.cc:162] no server name supplied in dns URI E0410 10:58:33.744390468 1163725 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T10:58:33.817318Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:34.865368Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:29692: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29692 } ] 2025-04-10T10:58:34.877166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:34.914129Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:29692: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:29692 2025-04-10T10:58:35.878577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:36.442086Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:29692: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29692 } ] 2025-04-10T10:58:36.882961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:37.815775Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637203800182229:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:37.815831Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:58:37.890542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000aa0/r3tmp/tmpa2TKaf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29692, node 1 E0410 10:58:38.745564146 1163994 dns_resolver.cc:162] no server name supplied in dns URI E0410 10:58:38.745736649 1163994 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T10:58:38.787450Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-10T10:58:38.787483Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T10:58:38.787491Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T10:58:38.790612Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-10T10:58:38.793182Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T10:58:38.793205Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T10:58:38.802227Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-10T10:58:38.802254Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T10:58:38.802440Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T10:58:38.807610Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-10T10:58:38.807637Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T10:58:38.807644Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T10:58:38.811822Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:58:38.823507Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-10T10:58:38.825889Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-10T10:58:38.837958Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-10T10:58:38.838010Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-10T10:58:38.838022Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T10:58:38.838034Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T10:58:38.838062Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T10:58:38.838075Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T10:58:38.840843Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-10T10:58:38.840869Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T10:58:38.840875Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T10:58:38.841039Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-10T10:58:38.841059Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T10:58:38.841074Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T10:58:38.846316Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-10T10:58:38.846342Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-10T10:58:38.846349Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-10T10:58:38.850441Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-10T10:58:38.850471Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T10:58:38.850477Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T10:58:38.856368Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-10T10:58:38.856396Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T10:58:38.856403Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T10:58:38.856907Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-10T10:58:38.856925Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T10:58:38.856934Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T10:58:38.861024Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-10T10:58:38.861050Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T10:58:38.861056Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T10:58:38.879408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637229569986811:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:38.879408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637229569986826:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:38.879512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637229569986818:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:38.879562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:38.884264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 TClient is connected to server localhost:25432 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-04-10T10:58:38.922778Z node 1 :TX_PROXY ERROR: Actor# [1:7491637229569986832:2518] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".metadata" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: false CreateTxId: 281474976710659 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "yq" PathId: 2 SchemeshardId: 72057594046644480 ... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:38.985142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637229569986831:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T10:58:38.985614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637229569986830:2392], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T10:58:38.989134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself ... 025-04-10T10:59:21.550334Z node 4 :YQL_PROXY DEBUG: SessionId: utquedim37m0vbg2r37b 2025-04-10 10:59:21.550 DEBUG ydb-services-fq-ut_integration(pid=1163326, tid=0x00007F84C578B640) [perf] yql_expr_constraint.cpp:3228: {utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud/#199848/#199858} Execution of [ConstraintTransformer::DoTransform] took 19us 2025-04-10T10:59:21.550388Z node 4 :YQL_PROXY DEBUG: SessionId: utquedim37m0vbg2r37b 2025-04-10 10:59:21.550 DEBUG ydb-services-fq-ut_integration(pid=1163326, tid=0x00007F84C578B640) [perf] yql_expr_csee.cpp:620: {utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud/#199848/#199858} Execution of [UpdateCompletness] took 13us 2025-04-10T10:59:21.550494Z node 4 :YQL_PROXY DEBUG: SessionId: utquedim37m0vbg2r37b 2025-04-10 10:59:21.550 DEBUG ydb-services-fq-ut_integration(pid=1163326, tid=0x00007F84C578B640) [perf] yql_expr_csee.cpp:633: {utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud/#199848/#199858} Execution of [EliminateCommonSubExpressionsForSubGraph] took 63us 2025-04-10T10:59:21.582689Z node 4 :YQL_PROXY INFO: SessionId: utquedim37m0vbg2r37b 2025-04-10 10:59:21.566 INFO ydb-services-fq-ut_integration(pid=1163326, tid=0x00007F84C578B640) [core exec] yql_execution.cpp:466: {utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud} Register async execution for node #199848 2025-04-10T10:59:21.582890Z node 4 :YQL_PROXY INFO: SessionId: utquedim37m0vbg2r37b 2025-04-10 10:59:21.582 INFO ydb-services-fq-ut_integration(pid=1163326, tid=0x00007F84C578B640) [core exec] yql_execution.cpp:87: {utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud} Finish, output #199849, status: Async 2025-04-10T10:59:21.583167Z node 4 :YQL_PROXY INFO: SessionId: utquedim37m0vbg2r37b 2025-04-10 10:59:21.583 INFO ydb-services-fq-ut_integration(pid=1163326, tid=0x00007F84C578B640) [core exec] yql_execution.cpp:133: {utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud} Completed async execution for node #199848 2025-04-10T10:59:21.583243Z node 4 :YQL_PROXY DEBUG: SessionId: utquedim37m0vbg2r37b 2025-04-10 10:59:21.583 DEBUG ydb-services-fq-ut_integration(pid=1163326, tid=0x00007F84C578B640) [DQ] yql_dq_exectransformer.cpp:1048: {utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud} utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud WrapFutureCallback 2025-04-10T10:59:21.583291Z node 4 :YQL_PROXY INFO: SessionId: utquedim37m0vbg2r37b 2025-04-10 10:59:21.583 INFO ydb-services-fq-ut_integration(pid=1163326, tid=0x00007F84C578B640) [DQ] yql_dq_exectransformer.cpp:1051: {utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud} Execution Result complete, duration: 0.037272s 2025-04-10T10:59:21.583577Z node 4 :YQL_PROXY INFO: SessionId: utquedim37m0vbg2r37b 2025-04-10 10:59:21.583 INFO ydb-services-fq-ut_integration(pid=1163326, tid=0x00007F84C578B640) [core exec] yql_execution.cpp:153: {utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud} State is ExecutionComplete after apply async changes for node #199848 2025-04-10T10:59:21.583631Z node 4 :YQL_PROXY INFO: SessionId: utquedim37m0vbg2r37b 2025-04-10 10:59:21.583 INFO ydb-services-fq-ut_integration(pid=1163326, tid=0x00007F84C578B640) [core exec] yql_execution.cpp:59: {utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud} Begin, root #199849 2025-04-10T10:59:21.583666Z node 4 :YQL_PROXY INFO: SessionId: utquedim37m0vbg2r37b 2025-04-10 10:59:21.583 INFO ydb-services-fq-ut_integration(pid=1163326, tid=0x00007F84C578B640) [core exec] yql_execution.cpp:72: {utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud} Collect unused nodes for root #199849, status: Ok 2025-04-10T10:59:21.583757Z node 4 :YQL_PROXY INFO: SessionId: utquedim37m0vbg2r37b 2025-04-10 10:59:21.583 INFO ydb-services-fq-ut_integration(pid=1163326, tid=0x00007F84C578B640) [core exec] yql_execution.cpp:577: {utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud} Node #199849 finished execution 2025-04-10T10:59:21.583824Z node 4 :YQL_PROXY INFO: SessionId: utquedim37m0vbg2r37b 2025-04-10 10:59:21.583 INFO ydb-services-fq-ut_integration(pid=1163326, tid=0x00007F84C578B640) [core exec] yql_execution.cpp:594: {utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud} Node #199849 created 0 trackable nodes: 2025-04-10T10:59:21.583860Z node 4 :YQL_PROXY INFO: SessionId: utquedim37m0vbg2r37b 2025-04-10 10:59:21.583 INFO ydb-services-fq-ut_integration(pid=1163326, tid=0x00007F84C578B640) [core exec] yql_execution.cpp:87: {utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud} Finish, output #199849, status: Ok 2025-04-10T10:59:21.583897Z node 4 :YQL_PROXY INFO: SessionId: utquedim37m0vbg2r37b 2025-04-10 10:59:21.583 INFO ydb-services-fq-ut_integration(pid=1163326, tid=0x00007F84C578B640) [core exec] yql_execution.cpp:93: {utquedim37m0vbg2r37b#utruedim2pqbi51n7d8u#yandexcloud://Execute_folder_id#c07acfff-bee234d2-6060fe39-a40f2e8417#mock_cloud} Creating finalizing transformer, output #199849 2025-04-10T10:59:21.622662Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: DB Error, Status: TRANSPORT_UNAVAILABLE, Issues: [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17472: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17472 } ], Query: --!syntax_v1 -- Query name: GetTask(read stale ro) PRAGMA TablePathPrefix("Root/yq"); DECLARE $tenant as String; DECLARE $from as Timestamp; DECLARE $tasks_limit as Uint64; SELECT `scope`, `query_id`, `owner`, `last_seen_at`, `retry_counter`, `retry_counter_updated_at`, `retry_rate`, `query_type` FROM `pending_small` WHERE `tenant` = $tenant AND `assigned_until` < $from ORDER BY `query_id` DESC LIMIT $tasks_limit; 2025-04-10T10:59:21.634534Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: DB Error, Status: TRANSPORT_UNAVAILABLE, Issues: [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17472: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17472 } ], Query: --!syntax_v1 -- Query name: HardPingTask(read) PRAGMA TablePathPrefix("Root/yq"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $query_id as String; $last_job_id = SELECT `last_job_id` FROM `queries` WHERE `scope` = $scope AND `query_id` = $query_id; SELECT `query`, `internal` FROM `queries` WHERE `scope` = $scope AND `query_id` = $query_id; SELECT `job_id`, `job` FROM `jobs` WHERE `scope` = $scope AND `query_id` = $query_id AND `job_id` = $last_job_id; SELECT `owner`, `retry_counter`, `retry_counter_updated_at`, `retry_rate`, `assigned_until` FROM `pending_small` WHERE `tenant` = $tenant AND `scope` = $scope AND `query_id` = $query_id; 2025-04-10T10:59:21.663069Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: GetTaskRequest - GetTaskResult: {tenant: "TestTenant" owner_id: "c07acfff-bee234d2-6060fe39-a40f2e8418" host: "ghrun-x5vqckr7qy" } ERROR: [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17472: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17472 } ] 2025-04-10T10:59:21.663507Z node 4 :FQ_PINGER WARN: QueryId: utquedim37m0vbg2r37b, Owner: c07acfff-bee234d2-6060fe39-a40f2e8417 Ping response error: [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:17472: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint [::]:17472 } ]. Retry after: 0.085324s 2025-04-10T10:59:21.663633Z node 4 :YQL_PRIVATE_PROXY ERROR: PrivateGetTask - Owner: c07acfff-bee234d2-6060fe39-a40f2e8418, Host: ghrun-x5vqckr7qy, Tenant: TestTenant, Failed with code: GENERIC_ERROR Details:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17472: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:17472
: Error: ControlPlane::GetTaskError 2025-04-10T10:59:21.701629Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: PingTaskRequest - PingTaskResult: {owner_id: "c07acfff-bee234d2-6060fe39-a40f2e8417" query_id { value: "utquedim37m0vbg2r37b" } scope: "yandexcloud://Execute_folder_id" started_at { seconds: 1744282761 nanos: 483000000 } deadline { seconds: 1744369161 nanos: 324389000 } tenant: "TestTenant" } ERROR: [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17472: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17472 } ] 2025-04-10T10:59:21.702032Z node 4 :YQL_PRIVATE_PROXY ERROR: PrivatePingTask - QueryId: utquedim37m0vbg2r37b, Owner: c07acfff-bee234d2-6060fe39-a40f2e8417, Failed with code: GENERIC_ERROR Details:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17472: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:17472
: Error: ControlPlane PingTaskError 2025-04-10T10:59:21.761385Z node 4 :FQ_PINGER WARN: QueryId: utquedim37m0vbg2r37b, Owner: c07acfff-bee234d2-6060fe39-a40f2e8417 Ping response error: [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:17472: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint [::]:17472 } ]. Retry after: 0.117894s 2025-04-10T10:59:21.886407Z node 4 :FQ_PINGER WARN: QueryId: utquedim37m0vbg2r37b, Owner: c07acfff-bee234d2-6060fe39-a40f2e8417 Ping response error: [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:17472: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint [::]:17472 } ]. Retry after: 0.258133s 2025-04-10T10:59:22.153850Z node 4 :FQ_PINGER WARN: QueryId: utquedim37m0vbg2r37b, Owner: c07acfff-bee234d2-6060fe39-a40f2e8417 Ping response error: [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:17472: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint [::]:17472 } ]. Retry after: 0.530885s 2025-04-10T10:59:22.182574Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:17472: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:17472 2025-04-10T10:59:22.689022Z node 4 :FQ_PINGER WARN: QueryId: utquedim37m0vbg2r37b, Owner: c07acfff-bee234d2-6060fe39-a40f2e8417 Ping response error: [ {
: Error: GRpc error: (1): CANCELLED } {
: Error: Grpc error response on endpoint [::]:17472 } ]. Retry after: 0.807035s >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> YdbYqlClient::TestDoubleKey [GOOD] >> YdbYqlClient::TestMultipleModifications >> TGRpcYdbTest::RemoveNotExistedDirectory [GOOD] >> TGRpcYdbTest::SdkUuid ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] Test command err: === Server->StartServer(false); 2025-04-10T10:59:11.197023Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637371507881011:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:11.197118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:11.242169Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637373370195185:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:11.253897Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017a9/r3tmp/tmpgxfJCy/pdisk_1.dat 2025-04-10T10:59:11.460946Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:11.466285Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:11.771372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:11.771599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:11.773098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:11.773165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:11.775755Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:59:11.775873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:11.776838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8730, node 1 2025-04-10T10:59:11.856967Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:11.856987Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:11.872370Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:11.986669Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/0017a9/r3tmp/yandexQDQokE.tmp 2025-04-10T10:59:11.986705Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/0017a9/r3tmp/yandexQDQokE.tmp 2025-04-10T10:59:11.986953Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/0017a9/r3tmp/yandexQDQokE.tmp 2025-04-10T10:59:11.987131Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:59:12.127733Z INFO: TTestServer started on Port 8564 GrpcPort 8730 TClient is connected to server localhost:8564 PQClient connected to localhost:8730 === TenantModeEnabled() = 1 === Init PQ - start server on port 8730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:12.713798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:59:12.713996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:12.714338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T10:59:12.714554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:12.714597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:12.722815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:12.723060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T10:59:12.723318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:12.723406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T10:59:12.723426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-10T10:59:12.723453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-10T10:59:12.730525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:12.730601Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:59:12.730628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-10T10:59:12.736995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:59:12.737032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-10T10:59:12.737053Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:59:12.737860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:12.737900Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:12.737940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T10:59:12.737984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-10T10:59:12.762635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:12.771210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-10T10:59:12.771424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:59:12.775417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282752821, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:12.776255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282752821 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:59:12.776305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T10:59:12.776621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-10T10:59:12.776663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T10:59:12.776813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T10:59:12.776870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T10:59:12.791209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T10:59:12.791243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T10:59:12.791448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T10:59:12.791467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491637371507881726:2439], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-10T10:59:12.791517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:12.791548Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-10T10:59:12.791655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T10:59:12.791667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T10:59:12.791701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T10:59:12.791712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T10:59:12.791727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-10T10:59:12.791752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T10:59:12.791779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-10T10:59:12.791790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 2 ... ed cookie: 1 sessionId: 12345678|fff2e3f-7c7c6ef2-d2db38b7-670bd16a_0 2025-04-10T10:59:25.435926Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|fff2e3f-7c7c6ef2-d2db38b7-670bd16a_0 is DEAD 2025-04-10T10:59:25.436182Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison Finish: 0 === InitializeWritePQService done === PersQueueClient === InitializePQ completed BEFORE MODIFY PERMISSIONS 2025-04-10T10:59:25.469469Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_0@builtin \003\n\031\010\001\022\025\032\023test_user_1@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_1@builtin \003\n\031\010\001\022\025\032\023test_user_2@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_2@builtin \003" } } TxId: 281474976715665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:54966" , at schemeshard: 72057594046644480 2025-04-10T10:59:25.470300Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:59:25.470500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-04-10T10:59:25.470517Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-10T10:59:25.470728Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:25.470756Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:59:25.470839Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-04-10T10:59:25.470881Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-10T10:59:25.470910Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-04-10T10:59:25.470921Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-10T10:59:25.470963Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-04-10T10:59:25.471038Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2025-04-10T10:59:25.471065Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-04-10T10:59:25.471081Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-10T10:59:25.471094Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-04-10T10:59:25.471112Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2025-04-10T10:59:25.471125Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2025-04-10T10:59:25.473858Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:59:25.474217Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user_0@builtin, add access: +W:test_user_1@builtin, add access: +W:test_user_2@builtin, remove access: -():test_user_0@builtin:-, remove access: -():test_user_1@builtin:-, remove access: -():test_user_2@builtin:- 2025-04-10T10:59:25.474404Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T10:59:25.474422Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-10T10:59:25.474655Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T10:59:25.474671Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7491637408360986197:2380], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 10 2025-04-10T10:59:25.475789Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-10T10:59:25.475868Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-10T10:59:25.475891Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2025-04-10T10:59:25.475912Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2025-04-10T10:59:25.475932Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-04-10T10:59:25.476037Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2025-04-10T10:59:25.483220Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2025-04-10T10:59:25.483631Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-10T10:59:25.483655Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-04-10T10:59:25.484129Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2025-04-10T10:59:25.484214Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:54948 2025-04-10T10:59:25.484230Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:54948 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-04-10T10:59:25.484238Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-10T10:59:25.485449Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-10T10:59:25.485644Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-10T10:59:25.485654Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-10T10:59:25.485676Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-10T10:59:25.485732Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637434130791073:2396] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-10T10:59:25.485753Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-10T10:59:25.486602Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-10T10:59:25.486844Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|7eb5df6d-9ceae6ee-a7417426-f40ef229_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2025-04-10T10:59:25.487278Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|7eb5df6d-9ceae6ee-a7417426-f40ef229_0 2025-04-10T10:59:25.489059Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|7eb5df6d-9ceae6ee-a7417426-f40ef229_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-04-10T10:59:25.489357Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|7eb5df6d-9ceae6ee-a7417426-f40ef229_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-04-10T10:59:25.489397Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: got another 'update_token_request' while previous still in progress, only single token update is allowed at a time sessionId: test-group-id|7eb5df6d-9ceae6ee-a7417426-f40ef229_0 2025-04-10T10:59:25.489651Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|7eb5df6d-9ceae6ee-a7417426-f40ef229_0 is DEAD 2025-04-10T10:59:25.489930Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:59:25.789429Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491637434130791087:2402], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:59:25.792724Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2M5YzA5Yy1hMjc0ZTlkYi02Y2VjODk1Zi1jZDhhNzY2MQ==, ActorId: [3:7491637434130791081:2398], ActorState: ExecuteState, TraceId: 01jrfnnjda8cemnx0y0286z0n8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:59:25.793258Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> GrpcConnectionStringParserTest::NoDatabaseFlag [GOOD] >> GrpcConnectionStringParserTest::IncorrectConnectionString ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-04-10T10:59:11.476853Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637371523481445:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:11.477363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001781/r3tmp/tmpYoJ5gs/pdisk_1.dat 2025-04-10T10:59:11.796515Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:11.796916Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:11.917156Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:12.131067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:12.131181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:12.170427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:12.170512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:12.171061Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:12.177761Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:59:12.177916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:12.180990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31005, node 1 2025-04-10T10:59:12.524143Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001781/r3tmp/yandexZcZdZh.tmp 2025-04-10T10:59:12.524178Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001781/r3tmp/yandexZcZdZh.tmp 2025-04-10T10:59:12.524352Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001781/r3tmp/yandexZcZdZh.tmp 2025-04-10T10:59:12.524460Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:59:12.730836Z INFO: TTestServer started on Port 19768 GrpcPort 31005 TClient is connected to server localhost:19768 PQClient connected to localhost:31005 === TenantModeEnabled() = 1 === Init PQ - start server on port 31005 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:13.360547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:59:13.360792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:13.361044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T10:59:13.361304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:13.361386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:13.366293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:13.366512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T10:59:13.366703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:13.366784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T10:59:13.366799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-10T10:59:13.366821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-10T10:59:13.373446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:13.373504Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:59:13.373531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-10T10:59:13.380052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:13.380089Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:13.380124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T10:59:13.380145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-10T10:59:13.391006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:13.391438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:59:13.391476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-10T10:59:13.391523Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:59:13.397121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-10T10:59:13.397316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:59:13.401632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282753444, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:13.401824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282753444 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:59:13.401869Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T10:59:13.402130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-10T10:59:13.402189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T10:59:13.403391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T10:59:13.403460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T10:59:13.410685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T10:59:13.410716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T10:59:13.410910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T10:59:13.410931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491637375818449402:2399], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-10T10:59:13.410971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:13.410991Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-10T10:59:13.411075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T10:59:13.411084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T10:59:13.411100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T10:59:13.411113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T10:59:13.411125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-10T10:59:13.411159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T10:59:13.411190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-10T10:59:13.411228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-10T10:59:13.411310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-10T10:59:13.411345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-10T10:59:13.4113 ... uest 2025-04-10T10:59:26.572698Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T10:59:26.572737Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-04-10T10:59:26.572828Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-10T10:59:26.572913Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-10T10:59:26.573273Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T10:59:26.573296Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-04-10T10:59:26.574191Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 0 messageNo: 1 size: 511961 2025-04-10T10:59:26.578463Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 1 messageNo: 1 size: 511961 2025-04-10T10:59:26.579478Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size: 176151 2025-04-10T10:59:26.579512Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size 176151 offset: -1 2025-04-10T10:59:26.579874Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0. Amount: 1200088. Cookie: 7 2025-04-10T10:59:26.579934Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0: Cookie: 7 2025-04-10T10:59:26.580075Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 0 2025-04-10T10:59:26.580122Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 1 2025-04-10T10:59:26.580145Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 2 2025-04-10T10:59:26.653518Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491637435947994356:2635], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:59:26.664576Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzI4M2ZlYTMtMWIzMzM2Mi02YjhmY2Y2Zi1iOWJlNTAxYg==, ActorId: [1:7491637435947994354:2634], ActorState: ExecuteState, TraceId: 01jrfnnk6wcgd6833fd0z8p4wz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:59:26.664982Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:59:26.684893Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob complete sourceId '\0001236' seqNo 1 partNo 2 FormedBlobsCount 0 NewHead: Offset 6 PartNo 0 PackedSize 1200285 count 1 nextOffset 7 batches 3 2025-04-10T10:59:26.686055Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account3/folder1/folder2/topic' partition 0 compactOffset 6,1 HeadOffset 6 endOffset 6 curOffset 7 d0000000000_00000000000000000006_00000_0000000001_00002| size 1200275 WTime 1744282766680 2025-04-10T10:59:26.695649Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T10:59:26.695687Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T10:59:26.695704Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T10:59:26.695721Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T10:59:26.695740Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] m0000000000p1236 2025-04-10T10:59:26.695753Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] d0000000000_00000000000000000006_00000_0000000001_00002| 2025-04-10T10:59:26.695766Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] i0000000000 2025-04-10T10:59:26.695784Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T10:59:26.695799Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] =========================== 2025-04-10T10:59:26.695883Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T10:59:26.695948Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 6 partNo 0 count 1 size 1200275 2025-04-10T10:59:26.731333Z node 1 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 6 count 1 size 1200275 actorID [1:7491637423063092008:2565] 2025-04-10T10:59:26.731450Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1200088 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T10:59:26.731488Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T10:59:26.731534Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 6 is stored on disk 2025-04-10T10:59:26.731574Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T10:59:26.731601Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 1, Offset: 6 is stored on disk 2025-04-10T10:59:26.731663Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T10:59:26.731690Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 2, Offset: 6 is stored on disk 2025-04-10T10:59:26.732183Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037899' partition 0 offset 6 partno 0 count 1 parts 2 size 1200275 2025-04-10T10:59:26.767499Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|4e2156ac-64a2eba2-fc730ca4-ef7f2a13_0] Write session got write response: sequence_numbers: 1 offsets: 6 already_written: false write_statistics { persist_duration_ms: 153 } 2025-04-10T10:59:26.767557Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|4e2156ac-64a2eba2-fc730ca4-ef7f2a13_0] Write session: acknoledged message 1 2025-04-10T10:59:26.765251Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-10T10:59:26.765503Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-10T10:59:26.858483Z :INFO: [] MessageGroupId [1236] SessionId [1236|4e2156ac-64a2eba2-fc730ca4-ef7f2a13_0] Write session will now close 2025-04-10T10:59:26.858551Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|4e2156ac-64a2eba2-fc730ca4-ef7f2a13_0] Write session: aborting 2025-04-10T10:59:26.859032Z :INFO: [] MessageGroupId [1236] SessionId [1236|4e2156ac-64a2eba2-fc730ca4-ef7f2a13_0] Write session: gracefully shut down, all writes complete 2025-04-10T10:59:26.859068Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|4e2156ac-64a2eba2-fc730ca4-ef7f2a13_0] Write session: destroy DURATION 2.873494s 2025-04-10T10:59:26.863107Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 22 sessionId: 1236|4e2156ac-64a2eba2-fc730ca4-ef7f2a13_0 grpc read done: success: 0 data: 2025-04-10T10:59:26.863143Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|4e2156ac-64a2eba2-fc730ca4-ef7f2a13_0 grpc read failed 2025-04-10T10:59:26.863187Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|4e2156ac-64a2eba2-fc730ca4-ef7f2a13_0 grpc closed 2025-04-10T10:59:26.863200Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|4e2156ac-64a2eba2-fc730ca4-ef7f2a13_0 is DEAD 2025-04-10T10:59:26.863701Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:59:26.864716Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] server disconnected, pipe [1:7491637435947994352:2632] destroyed 2025-04-10T10:59:26.864774Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T10:59:27.200717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:59:27.200751Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:27.690730Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491637440242961690:2643], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:59:27.693199Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWRhNjAzMTgtZGFjODI5YTAtMzM5NTY3NWUtOWM0MGE1Y2M=, ActorId: [1:7491637440242961688:2642], ActorState: ExecuteState, TraceId: 01jrfnnm9mft1raw94177t2t3q, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:59:27.693580Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> KqpSysColV1::InnerJoinSelect [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink >> GrpcConnectionStringParserTest::IncorrectConnectionString [GOOD] >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials [GOOD] >> TGRpcNewClient::SimpleYqlQuery >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_NotSplitedTopic [GOOD] Test command err: 2025-04-10T10:53:01.879347Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491635783087067214:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:01.879383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:53:02.103081Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0019aa/r3tmp/tmp0B50TB/pdisk_1.dat 2025-04-10T10:53:02.265128Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:53:02.309945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:53:02.310077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:53:02.312022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6273, node 1 2025-04-10T10:53:02.374893Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/0019aa/r3tmp/yandexLczrxn.tmp 2025-04-10T10:53:02.374918Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/0019aa/r3tmp/yandexLczrxn.tmp 2025-04-10T10:53:02.393205Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/0019aa/r3tmp/yandexLczrxn.tmp 2025-04-10T10:53:02.393639Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:53:02.429715Z INFO: TTestServer started on Port 29854 GrpcPort 6273 TClient is connected to server localhost:29854 PQClient connected to localhost:6273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:53:02.698852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:53:02.733121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:53:02.871064Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:53:04.930437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635795971969923:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.930543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.931050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491635795971969937:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:53:04.939865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T10:53:04.953220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491635795971969939:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T10:53:05.225855Z node 1 :TX_PROXY ERROR: Actor# [1:7491635800266937299:2448] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:53:05.275730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.356619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:53:05.369641Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491635800266937307:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:53:05.371871Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODJkZWY1N2UtYjM4ODczNWUtYzUyNjRmYTctNTA2MGIxM2Q=, ActorId: [1:7491635795971969913:2336], ActorState: ExecuteState, TraceId: 01jrfn9yfybbzz2sx3myh45n18, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:53:05.374315Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:53:05.475923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7491635800266937608:2630] 2025-04-10T10:53:06.879833Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491635783087067214:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:53:06.879921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T10:53:11.982379Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-10T10:53:12.017973Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-10T10:53:12.019239Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491635830331708995:2809], Recipient [1:7491635787382034948:2196]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:12.019268Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:53:12.019297Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T10:53:12.019341Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491635830331708991:2806], Recipient [1:7491635787382034948:2196]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-10T10:53:12.019363Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T10:53:12.111125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:53:12.111669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T10:53:12.111991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-10T10:53:12.112036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-10T10:53:12.112076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-10T10:53:12.112129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-10T10:53:12.112185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] w ... on: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:58:04.434836Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:58:04.434873Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:58:04.434916Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:58:04.534795Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636942958639906:2476], Partition 0, Sender [0:0:0], Recipient [6:7491636942958639970:2480], Cookie: 0 2025-04-10T10:58:04.534924Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636942958639970:2480]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:58:04.534979Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:58:04.535057Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:58:04.535177Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:58:04.535217Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:58:04.535263Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:58:04.563655Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [6:7491636874239162016:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:58:04.563722Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T10:58:04.563798Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [6:7491636874239162016:2155], Recipient [6:7491636874239162016:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:58:04.563829Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T10:58:04.637646Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636942958639906:2476], Partition 0, Sender [0:0:0], Recipient [6:7491636942958639970:2480], Cookie: 0 2025-04-10T10:58:04.637770Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636942958639970:2480]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:58:04.637813Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:58:04.637910Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:58:04.638028Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:58:04.638069Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:58:04.638120Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:58:04.655133Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [6:7491636912893868444:2385], Recipient [6:7491636874239162016:2155]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037891 TableLocalId: 12 Generation: 1 Round: 3 TableStats { DataSize: 656 RowCount: 2 IndexSize: 0 InMemSize: 656 LastAccessTime: 1744282684240 LastUpdateTime: 1744282644528 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 2 RowDeletes: 0 RowReads: 20 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 679 Memory: 137784 } ShardState: 2 UserTablePartOwners: 72075186224037891 NodeId: 6 StartTime: 1744282644076 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-10T10:58:04.655202Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T10:58:04.655253Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 12] state 'Ready' dataSize 656 rowCount 2 cpuUsage 0.0679 2025-04-10T10:58:04.655421Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 12] raw table stats: DataSize: 656 RowCount: 2 IndexSize: 0 InMemSize: 656 LastAccessTime: 1744282684240 LastUpdateTime: 1744282644528 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 2 RowDeletes: 0 RowReads: 20 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T10:58:04.655459Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.099995s, queue# 1 2025-04-10T10:58:04.655711Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [6:7491636955843542309:2594], Recipient [6:7491636874239162016:2155]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037895 TableLocalId: 16 Generation: 1 Round: 2 TableStats { DataSize: 968 RowCount: 1 IndexSize: 0 InMemSize: 968 LastAccessTime: 1744282680247 LastUpdateTime: 1744282680247 ImmediateTxCompleted: 4 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 4 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 4 LocksWholeShard: 0 LocksBroken: 4 } TabletMetrics { CPU: 635 Memory: 123960 } ShardState: 2 UserTablePartOwners: 72075186224037895 NodeId: 6 StartTime: 1744282654318 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-10T10:58:04.655744Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T10:58:04.655771Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 16] state 'Ready' dataSize 968 rowCount 1 cpuUsage 0.0635 2025-04-10T10:58:04.655908Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 16] raw table stats: DataSize: 968 RowCount: 1 IndexSize: 0 InMemSize: 968 LastAccessTime: 1744282680247 LastUpdateTime: 1744282680247 ImmediateTxCompleted: 4 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 4 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 4 LocksWholeShard: 0 LocksBroken: 4 2025-04-10T10:58:04.656158Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [6:7491636908598900993:2362], Recipient [6:7491636874239162016:2155]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037889 TableLocalId: 10 Generation: 1 Round: 3 TableStats { DataSize: 824 RowCount: 2 IndexSize: 0 InMemSize: 824 LastAccessTime: 1744282684240 LastUpdateTime: 1744282644528 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 20 PartCount: 0 RangeReadRows: 40 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 818 Memory: 137784 } ShardState: 2 UserTablePartOwners: 72075186224037889 NodeId: 6 StartTime: 1744282643808 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-10T10:58:04.656177Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T10:58:04.656204Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 10] state 'Ready' dataSize 824 rowCount 2 cpuUsage 0.0818 2025-04-10T10:58:04.656316Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 10] raw table stats: DataSize: 824 RowCount: 2 IndexSize: 0 InMemSize: 824 LastAccessTime: 1744282684240 LastUpdateTime: 1744282644528 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 20 PartCount: 0 RangeReadRows: 40 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T10:58:04.656493Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [6:7491636908598901079:2373], Recipient [6:7491636874239162016:2155]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037890 TableLocalId: 11 Generation: 1 Round: 3 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 419 Memory: 119352 } ShardState: 2 UserTablePartOwners: 72075186224037890 NodeId: 6 StartTime: 1744282643901 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-10T10:58:04.656512Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T10:58:04.656531Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 11] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0419 2025-04-10T10:58:04.656631Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 11] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 >> KqpSysColV0::SelectRowById [GOOD] >> YdbIndexTable::AlterIndexImplBySuperUser [GOOD] >> YdbIndexTable::CreateTableAddIndex >> YdbYqlClient::SecurityTokenAuth [GOOD] >> YdbYqlClient::RetryOperationTemplate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2025-04-10T10:56:32.049977Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T10:56:32.131722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T10:56:32.152063Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T10:56:32.152375Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T10:56:32.161373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T10:56:32.161557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T10:56:32.161742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T10:56:32.161835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T10:56:32.161934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T10:56:32.162025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T10:56:32.162092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T10:56:32.162183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T10:56:32.162283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T10:56:32.162369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T10:56:32.162442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T10:56:32.162522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T10:56:32.196408Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T10:56:32.196709Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T10:56:32.196766Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T10:56:32.197017Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T10:56:32.197229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T10:56:32.197318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T10:56:32.197367Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T10:56:32.197511Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T10:56:32.197596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T10:56:32.197646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T10:56:32.197679Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T10:56:32.197857Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T10:56:32.197924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T10:56:32.197986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T10:56:32.198027Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T10:56:32.198165Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T10:56:32.198231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T10:56:32.198293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T10:56:32.198334Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T10:56:32.198423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T10:56:32.198470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T10:56:32.198502Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T10:56:32.198558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T10:56:32.198604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T10:56:32.198636Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T10:56:32.199070Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-10T10:56:32.199171Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-10T10:56:32.199270Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=49; 2025-04-10T10:56:32.199370Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-10T10:56:32.199567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T10:56:32.199639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T10:56:32.199680Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T10:56:32.200049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T10:56:32.200113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T10:56:32.200161Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T10:56:32.200326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T10:56:32.200375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T10:56:32.200406Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T10:56:32.200612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T10:56:32.200664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T10:56:32.200699Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T10:56:32.200826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T10:56:32.200871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T10:56:32.200973Z node 1 :TX_COLUMNS ... :24.550899Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-10T10:59:24.550963Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2052; 2025-04-10T10:59:24.551043Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=2052; 2025-04-10T10:59:24.551113Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-10T10:59:24.551325Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-10T10:59:24.551386Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-04-10T10:59:24.551453Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T10:59:24.552266Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T10:59:24.552492Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-10T10:59:24.552555Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T10:59:24.552713Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;);columns=4;rows=1; 2025-04-10T10:59:24.552815Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=26;num_rows=1;batch_columns=100,101,102,103; 2025-04-10T10:59:24.552967Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[54:432:2450];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-04-10T10:59:24.553131Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-10T10:59:24.553294Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-10T10:59:24.553478Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-10T10:59:24.553895Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T10:59:24.554058Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-10T10:59:24.558745Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-10T10:59:24.558874Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: Scan [54:433:2451] finished for tablet 9437184 2025-04-10T10:59:24.559667Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[54:432:2450];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.054},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.061}],"full":{"a":1744282764497248,"name":"_full_task","f":1744282764497248,"d_finished":0,"c":0,"l":1744282764558996,"d":61748},"events":[{"name":"bootstrap","f":1744282764497628,"d_finished":3994,"c":1,"l":1744282764501622,"d":3994},{"a":1744282764553865,"name":"ack","f":1744282764552230,"d_finished":1288,"c":1,"l":1744282764553518,"d":6419},{"a":1744282764553847,"name":"processing","f":1744282764501804,"d_finished":29939,"c":10,"l":1744282764553521,"d":35088},{"name":"ProduceResults","f":1744282764499911,"d_finished":17368,"c":13,"l":1744282764558832,"d":17368},{"a":1744282764558847,"name":"Finish","f":1744282764558847,"d_finished":0,"c":0,"l":1744282764558996,"d":149},{"name":"task_result","f":1744282764501829,"d_finished":28348,"c":9,"l":1744282764551557,"d":28348}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-10T10:59:24.559785Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[54:432:2450];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T10:59:24.560474Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[54:432:2450];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.054},{"events":["l_ProduceResults","f_Finish"],"t":0.061},{"events":["l_ack","l_processing","l_Finish"],"t":0.062}],"full":{"a":1744282764497248,"name":"_full_task","f":1744282764497248,"d_finished":0,"c":0,"l":1744282764559845,"d":62597},"events":[{"name":"bootstrap","f":1744282764497628,"d_finished":3994,"c":1,"l":1744282764501622,"d":3994},{"a":1744282764553865,"name":"ack","f":1744282764552230,"d_finished":1288,"c":1,"l":1744282764553518,"d":7268},{"a":1744282764553847,"name":"processing","f":1744282764501804,"d_finished":29939,"c":10,"l":1744282764553521,"d":35937},{"name":"ProduceResults","f":1744282764499911,"d_finished":17368,"c":13,"l":1744282764558832,"d":17368},{"a":1744282764558847,"name":"Finish","f":1744282764558847,"d_finished":0,"c":0,"l":1744282764559845,"d":998},{"name":"task_result","f":1744282764501829,"d_finished":28348,"c":9,"l":1744282764551557,"d":28348}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-10T10:59:24.560596Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T10:59:24.496345Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=16001;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=16001;selected_rows=0; 2025-04-10T10:59:24.560661Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T10:59:24.561169Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; >> YdbYqlClient::ConnectDbAclIsStrictlyChecked |70.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |70.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |70.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |70.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |70.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |70.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 >> YdbYqlClient::TestDecimal1 [GOOD] >> YdbYqlClient::TestDecimal35 >> YdbTableBulkUpsert::Simple [GOOD] >> YdbTableBulkUpsert::SyncIndexShouldSucceed >> ClientStatsCollector::PrepareQuery [GOOD] >> ClientStatsCollector::CounterCacheMiss >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 8382, MsgBus: 28503 2025-04-10T10:59:21.018832Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637412145585715:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:21.019324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b14/r3tmp/tmpAwnZA0/pdisk_1.dat 2025-04-10T10:59:21.827940Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:21.850524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:21.850619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:21.855854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8382, node 1 2025-04-10T10:59:22.129696Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:22.129726Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:22.129737Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:22.129852Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28503 TClient is connected to server localhost:28503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:23.391135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:59:23.444064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:23.806961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:24.117207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:24.246097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:26.013909Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637412145585715:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:26.014007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:26.898720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637437915391146:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:26.898839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:27.687198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:59:27.756031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:59:27.820511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:59:27.898690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:59:27.972066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:59:28.080979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:59:28.294865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637446505326274:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:28.294973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:28.295224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637446505326279:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:28.299648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:59:28.328039Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T10:59:28.328691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637446505326281:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:59:28.431799Z node 1 :TX_PROXY ERROR: Actor# [1:7491637446505326339:3472] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] Test command err: === Server->StartServer(false); 2025-04-10T10:59:11.223010Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637371252185371:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:11.223114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:11.339784Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637372825345175:2152];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:11.640187Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:11.644434Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:11.644772Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001801/r3tmp/tmpCeeCvf/pdisk_1.dat 2025-04-10T10:59:12.144523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:12.144654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:12.145757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:12.145835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:12.169946Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:59:12.170339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:12.171408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:12.283269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 3203, node 1 2025-04-10T10:59:12.340608Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:12.340645Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:12.418034Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:12.620618Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001801/r3tmp/yandex3AtKch.tmp 2025-04-10T10:59:12.620649Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001801/r3tmp/yandex3AtKch.tmp 2025-04-10T10:59:12.620811Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001801/r3tmp/yandex3AtKch.tmp 2025-04-10T10:59:12.620947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:59:12.774474Z INFO: TTestServer started on Port 6120 GrpcPort 3203 TClient is connected to server localhost:6120 PQClient connected to localhost:3203 === TenantModeEnabled() = 1 === Init PQ - start server on port 3203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:13.389672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:59:13.389893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:13.390138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T10:59:13.390484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:13.390576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:13.395492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:13.395670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T10:59:13.395870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:13.395919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T10:59:13.395936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-10T10:59:13.395962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-10T10:59:13.404024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:59:13.404057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-10T10:59:13.404103Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:59:13.404232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:13.404271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:59:13.404288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-10T10:59:13.406695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:13.406730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:13.406765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T10:59:13.406796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-10T10:59:13.423513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:13.426478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-10T10:59:13.426675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:59:13.430070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282753472, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:13.430298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282753472 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:59:13.430358Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T10:59:13.430735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-10T10:59:13.430799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T10:59:13.430974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T10:59:13.431029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T10:59:13.433159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T10:59:13.433187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T10:59:13.433404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T10:59:13.433442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491637375547153429:2449], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-10T10:59:13.433489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:13.433512Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-10T10:59:13.433606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T10:59:13.433622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T10:59:13.433648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T10:59:13.433657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T10:59:13.433672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-10T10:59:13.433697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready par ... Query: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-10T10:59:28.420901Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-10T10:59:28.420954Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637444264657270:2401] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-10T10:59:28.420983Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-10T10:59:28.425958Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-10T10:59:28.426289Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|984a6f85-c5fb0be7-9a42107a-6b23f6b8_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2025-04-10T10:59:28.426776Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|984a6f85-c5fb0be7-9a42107a-6b23f6b8_0 ===Assert streaming op1 ===Assert streaming op2 2025-04-10T10:59:28.434417Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|984a6f85-c5fb0be7-9a42107a-6b23f6b8_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-10T10:59:28.435071Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-10T10:59:28.435501Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse ===ModifyAcl BEFORE MODIFY PERMISSIONS 2025-04-10T10:59:28.467291Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-10T10:59:28.490617Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin" } } TxId: 281474976715666 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:43604" , at schemeshard: 72057594046644480 2025-04-10T10:59:28.490826Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:59:28.490971Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-04-10T10:59:28.490984Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-10T10:59:28.491213Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715666:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:28.491242Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:59:28.491332Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715666:0 progress is 1/1 2025-04-10T10:59:28.491345Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-04-10T10:59:28.491370Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715666:0 progress is 1/1 2025-04-10T10:59:28.491383Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-04-10T10:59:28.491429Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-04-10T10:59:28.491483Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715666, ready parts: 1/1, is published: false 2025-04-10T10:59:28.491506Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-04-10T10:59:28.491519Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-04-10T10:59:28.491536Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715666:0 2025-04-10T10:59:28.491550Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715666, publications: 1, subscribers: 0 2025-04-10T10:59:28.491565Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715666, [OwnerId: 72057594046644480, LocalPathId: 10], 4 2025-04-10T10:59:28.503180Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715666, response: Status: StatusSuccess TxId: 281474976715666 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:59:28.503453Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715666, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, remove access: -():test_user_0@builtin:- 2025-04-10T10:59:28.503719Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T10:59:28.503742Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715666, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-10T10:59:28.503935Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T10:59:28.503955Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7491637414199885061:2362], at schemeshard: 72057594046644480, txId: 281474976715666, path id: 10 2025-04-10T10:59:28.505357Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2025-04-10T10:59:28.505475Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2025-04-10T10:59:28.505493Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715666 2025-04-10T10:59:28.505514Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715666, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 4 2025-04-10T10:59:28.505536Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-04-10T10:59:28.505653Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 0 ===Wait for session created with token with removed ACE to die2025-04-10T10:59:28.512768Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715666 2025-04-10T10:59:28.715669Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491637444264657301:2410], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:59:28.715927Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzlhMTI0YmQtNDI1YWY2NTEtM2YwYTQxNGMtODBkNTAxMjY=, ActorId: [3:7491637444264657294:2406], ActorState: ExecuteState, TraceId: 01jrfnnn9f26k8c8h03fy9npcf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:59:28.716430Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:59:29.427652Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-10T10:59:29.429014Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|984a6f85-c5fb0be7-9a42107a-6b23f6b8_0 describe result for acl check 2025-04-10T10:59:29.429178Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_0@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-group-id|984a6f85-c5fb0be7-9a42107a-6b23f6b8_0 2025-04-10T10:59:29.429564Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|984a6f85-c5fb0be7-9a42107a-6b23f6b8_0 is DEAD 2025-04-10T10:59:29.429962Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison status: UNAUTHORIZED issues { message: "access to topic \'Topic /Root/acc/topic1 in database: /Root\' denied for \'test_user_0@builtin\' due to \'no WriteTopic rights\', Marker# PQ1125" issue_code: 500018 severity: 1 } 2025-04-10T10:59:29.765021Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491637448559624622:2418], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:59:29.765499Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTNhNjMzMDAtYTkxZTA0NjctNjA5NGY3NjUtNDI2ZWRkYzI=, ActorId: [3:7491637448559624620:2417], ActorState: ExecuteState, TraceId: 01jrfnnp9r903cjak9a7kv10br, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:59:29.765953Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 14002, MsgBus: 5403 2025-04-10T10:59:21.139277Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637417410433433:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:21.139985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b0c/r3tmp/tmpoC1n5e/pdisk_1.dat 2025-04-10T10:59:21.887389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:21.887491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:21.890377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:21.916694Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14002, node 1 2025-04-10T10:59:22.018891Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:22.018914Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:22.018922Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:22.019038Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5403 TClient is connected to server localhost:5403 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:22.837378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:22.876681Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:59:22.888921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:23.076775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:23.330204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:23.479534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:26.122396Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637417410433433:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:26.122465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:26.500888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637438885271547:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:26.501032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:27.052285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:59:27.156263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:59:27.235983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:59:27.595375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:59:27.687045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:59:27.773508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:59:27.847407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637443180239373:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:27.847508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:27.847822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637443180239378:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:27.852490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:59:27.871948Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T10:59:27.872301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637443180239380:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:59:27.980313Z node 1 :TX_PROXY ERROR: Actor# [1:7491637443180239435:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> Yq_1::CreateQuery_Without_Connection [GOOD] >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout >> TGRpcNewCoordinationClient::SessionMethods >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> YdbYqlClient::TestYqlWrongTable [GOOD] >> YdbYqlClient::TraceId >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] Test command err: === Server->StartServer(false); 2025-04-10T10:59:12.318981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637376792539514:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:12.319063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:12.398039Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637375674824586:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:12.398085Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:12.748868Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001753/r3tmp/tmpYHWRbH/pdisk_1.dat 2025-04-10T10:59:12.770797Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:13.294426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:13.294532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:13.296068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:13.296125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:13.303630Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:59:13.303851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:13.306556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:13.332050Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:13.364976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 4492, node 1 2025-04-10T10:59:13.448145Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:13.471267Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:13.471287Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:13.634224Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001753/r3tmp/yandexGJdM0W.tmp 2025-04-10T10:59:13.634251Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001753/r3tmp/yandexGJdM0W.tmp 2025-04-10T10:59:13.634451Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001753/r3tmp/yandexGJdM0W.tmp 2025-04-10T10:59:13.634603Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:59:13.707223Z INFO: TTestServer started on Port 19687 GrpcPort 4492 TClient is connected to server localhost:19687 PQClient connected to localhost:4492 === TenantModeEnabled() = 1 === Init PQ - start server on port 4492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:14.250640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:59:14.250848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:14.251117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T10:59:14.251363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:14.251403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:14.256990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:14.257123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T10:59:14.257319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:14.257381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T10:59:14.257408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-04-10T10:59:14.257437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2025-04-10T10:59:14.259372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:14.259416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:59:14.259435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2025-04-10T10:59:14.262008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:14.262028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:14.262067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-04-10T10:59:14.262123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-04-10T10:59:14.266574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:14.267619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-04-10T10:59:14.267669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-04-10T10:59:14.267689Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-04-10T10:59:14.273882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-04-10T10:59:14.274051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:59:14.278441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282754319, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:14.278582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282754319 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:59:14.278625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-04-10T10:59:14.279226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2025-04-10T10:59:14.279262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-04-10T10:59:14.279445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T10:59:14.279516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T10:59:14.283326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T10:59:14.283361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T10:59:14.283560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T10:59:14.283608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491637381087507547:2437], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2025-04-10T10:59:14.283659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:14.283702Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2025-04-10T10:59:14.283785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-04-10T10:59:14.283798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-04-10T10:59:14.283817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-04-10T10:59:14.283825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-04-10T10:59:14.283839Z node 1 :FLAT_TX_SCH ... path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-04-10T10:59:30.143475Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-10T10:59:30.143595Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:30.143615Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:59:30.143715Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-04-10T10:59:30.143737Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-10T10:59:30.143766Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-04-10T10:59:30.143775Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-10T10:59:30.143819Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-04-10T10:59:30.143861Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2025-04-10T10:59:30.143885Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-04-10T10:59:30.143896Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-10T10:59:30.143908Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-04-10T10:59:30.143919Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2025-04-10T10:59:30.143930Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2025-04-10T10:59:30.147526Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:59:30.147755Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user@builtin, remove access: -():test_user@builtin:- 2025-04-10T10:59:30.147942Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T10:59:30.147957Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-10T10:59:30.148195Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T10:59:30.148212Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7491637421304623837:2367], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 10 2025-04-10T10:59:30.149360Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-10T10:59:30.149460Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-10T10:59:30.149475Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2025-04-10T10:59:30.149491Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2025-04-10T10:59:30.149512Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-04-10T10:59:30.149599Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2025-04-10T10:59:30.156616Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-10T10:59:30.156647Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-04-10T10:59:30.157742Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-message-group" } 2025-04-10T10:59:30.157842Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-message-group" from ipv6:[::1]:49188 2025-04-10T10:59:30.157857Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:49188 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-04-10T10:59:30.157867Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-10T10:59:30.158916Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-10T10:59:30.159125Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-10T10:59:30.159137Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-10T10:59:30.159147Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-10T10:59:30.159183Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637455664363341:2404] (SourceId=test-message-group, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-10T10:59:30.159203Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-10T10:59:30.159311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2025-04-10T10:59:30.159847Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-10T10:59:30.159996Z node 3 :PERSQUEUE INFO: new Cookie test-message-group|cf7496c3-f9690bb4-c9a83e75-19fc8c53_0 generated for partition 0 topic 'acc/topic1' owner test-message-group 2025-04-10T10:59:30.160614Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group|cf7496c3-f9690bb4-c9a83e75-19fc8c53_0 2025-04-10T10:59:30.162287Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-message-group|cf7496c3-f9690bb4-c9a83e75-19fc8c53_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-04-10T10:59:30.162655Z node 3 :PQ_WRITE_PROXY INFO: updating token 2025-04-10T10:59:30.162688Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-10T10:59:30.163583Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|cf7496c3-f9690bb4-c9a83e75-19fc8c53_0 describe result for acl check 2025-04-10T10:59:30.163683Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_2@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-message-group|cf7496c3-f9690bb4-c9a83e75-19fc8c53_0 2025-04-10T10:59:30.163915Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|cf7496c3-f9690bb4-c9a83e75-19fc8c53_0 is DEAD 2025-04-10T10:59:30.164203Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:59:30.572836Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491637455664363362:2414], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:59:30.574712Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZjYzYzliZmYtNGY2NTkzMjktMTEwYWRiNjQtYTVkNjVhNDI=, ActorId: [3:7491637455664363355:2410], ActorState: ExecuteState, TraceId: 01jrfnnq3h33jnf4wc1mj34rtj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:59:30.575209Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:59:31.649928Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491637459959330683:2421], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:59:31.650883Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWZhM2E5ZGYtZmM1NDQ5MWQtMTEyOWY0YjYtZmM1OGUwNDY=, ActorId: [3:7491637459959330681:2420], ActorState: ExecuteState, TraceId: 01jrfnnr3x5kvzkwqzmbynnn7z, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:59:31.651397Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-04-10T10:59:13.540341Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637379962347181:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:13.540449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:13.595092Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637380048134697:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:13.859073Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:13.860443Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:13.861018Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001739/r3tmp/tmpY3YFVH/pdisk_1.dat 2025-04-10T10:59:14.448384Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:14.502732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:14.502892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:14.504000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:14.504056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:14.515035Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:59:14.515559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:14.515948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5540, node 1 2025-04-10T10:59:14.724917Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001739/r3tmp/yandexRxC5aV.tmp 2025-04-10T10:59:14.724974Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001739/r3tmp/yandexRxC5aV.tmp 2025-04-10T10:59:14.725182Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001739/r3tmp/yandexRxC5aV.tmp 2025-04-10T10:59:14.725367Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:59:14.793886Z INFO: TTestServer started on Port 21071 GrpcPort 5540 TClient is connected to server localhost:21071 PQClient connected to localhost:5540 === TenantModeEnabled() = 1 === Init PQ - start server on port 5540 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:15.233811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:59:15.234053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:15.234368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T10:59:15.234682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:15.234736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:15.243536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:15.243721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T10:59:15.243935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:15.244006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T10:59:15.244041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-04-10T10:59:15.244061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2025-04-10T10:59:15.250285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:15.250367Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:59:15.250387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2025-04-10T10:59:15.253053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-04-10T10:59:15.253077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-04-10T10:59:15.253099Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-04-10T10:59:15.259477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:15.259547Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:15.259615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-04-10T10:59:15.259657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-04-10T10:59:15.269347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:15.274915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-04-10T10:59:15.275089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:59:15.288407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282755320, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:15.288611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282755320 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:59:15.288650Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-04-10T10:59:15.288964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2025-04-10T10:59:15.289001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-04-10T10:59:15.289154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T10:59:15.289209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T10:59:15.299391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T10:59:15.299502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T10:59:15.299710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T10:59:15.299733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491637384257314976:2382], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2025-04-10T10:59:15.299777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:15.299802Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2025-04-10T10:59:15.299926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-04-10T10:59:15.299941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-04-10T10:59:15.299959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-04-10T10:59:15.299969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-04-10T10:59:15.299990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2025-04-10T10:59:15.300040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-04-10T10:59:15.300061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2025-04-10T10:59:15.300072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2025-04-10T10:59:15.300148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 7205759404664 ... ceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-10T10:59:32.241792Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637464086000819:2444] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-10T10:59:32.241813Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 4 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-10T10:59:32.246570Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server connected, pipe [3:7491637464086000822:2444], now have 1 active actors on pipe 2025-04-10T10:59:32.246651Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037893, NodeId 3, Generation: 1 2025-04-10T10:59:32.246824Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T10:59:32.246858Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-10T10:59:32.246951Z node 3 :PERSQUEUE INFO: new Cookie 123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0 generated for partition 0 topic 'PQ/account/topic' owner 123 2025-04-10T10:59:32.247066Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-10T10:59:32.247120Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T10:59:32.247459Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T10:59:32.247492Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-10T10:59:32.247561Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T10:59:32.247639Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: 123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0 2025-04-10T10:59:32.250371Z :INFO: [] MessageGroupId [123] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744282772250 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:59:32.250536Z :INFO: [] MessageGroupId [123] SessionId [] Write session established. Init response: last_sequence_number: 2 session_id: "123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0" topic: "PQ/account/topic" 2025-04-10T10:59:32.251216Z :DEBUG: [] MessageGroupId [123] SessionId [123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0] Write 1 messages with Id from 1 to 1 2025-04-10T10:59:32.251342Z :DEBUG: [] MessageGroupId [123] SessionId [123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0] Write session: try to update token 2025-04-10T10:59:32.251389Z :DEBUG: [] MessageGroupId [123] SessionId [123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0] Send 1 message(s) (0 left), first sequence number is 3 2025-04-10T10:59:32.251655Z :INFO: [] MessageGroupId [123] SessionId [123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0] Write session: close. Timeout = 10000 ms 2025-04-10T10:59:32.252070Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-10T10:59:32.252427Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-10T10:59:32.254411Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T10:59:32.254465Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-10T10:59:32.254599Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-10T10:59:32.254686Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-10T10:59:32.255016Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T10:59:32.255050Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-10T10:59:32.255111Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message topic: PQ/account/topic partition: 0 SourceId: '\000123' SeqNo: 3 partNo : 0 messageNo: 1 size 372 offset: -1 2025-04-10T10:59:32.255198Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account/topic". Partition: 0. Amount: 376. Cookie: 3 2025-04-10T10:59:32.255262Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account/topic". Partition: 0: Cookie: 3 2025-04-10T10:59:32.255417Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob processing sourceId '\000123' seqNo 3 partNo 0 2025-04-10T10:59:32.256416Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob complete sourceId '\000123' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 443 count 1 nextOffset 3 batches 1 2025-04-10T10:59:32.257110Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account/topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 431 WTime 1744282772254 2025-04-10T10:59:32.257248Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T10:59:32.257270Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T10:59:32.257295Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T10:59:32.257319Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T10:59:32.257341Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] m0000000000p123 2025-04-10T10:59:32.257355Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-04-10T10:59:32.257368Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] i0000000000 2025-04-10T10:59:32.257388Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T10:59:32.257406Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] =========================== 2025-04-10T10:59:32.257473Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T10:59:32.257545Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 431 2025-04-10T10:59:32.262286Z node 3 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 431 actorID [3:7491637459791033209:2413] 2025-04-10T10:59:32.262443Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 376 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T10:59:32.262500Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T10:59:32.262553Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Answering for message sourceid: '\000123', Topic: 'PQ/account/topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-10T10:59:32.262768Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-10T10:59:32.262845Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-10T10:59:32.263914Z :DEBUG: [] MessageGroupId [123] SessionId [123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 7 } 2025-04-10T10:59:32.263965Z :DEBUG: [] MessageGroupId [123] SessionId [123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0] Write session: acknoledged message 1 2025-04-10T10:59:32.264550Z node 3 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037893' partition 0 offset 2 partno 0 count 1 parts 0 size 431 2025-04-10T10:59:32.356400Z :INFO: [] MessageGroupId [123] SessionId [123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0] Write session will now close 2025-04-10T10:59:32.356492Z :DEBUG: [] MessageGroupId [123] SessionId [123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0] Write session: aborting 2025-04-10T10:59:32.357114Z :INFO: [] MessageGroupId [123] SessionId [123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0] Write session: gracefully shut down, all writes complete 2025-04-10T10:59:32.357170Z :DEBUG: [] MessageGroupId [123] SessionId [123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0] Write session: destroy 2025-04-10T10:59:32.358247Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0 grpc read done: success: 0 data: 2025-04-10T10:59:32.358275Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0 grpc read failed 2025-04-10T10:59:32.361165Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0 grpc closed 2025-04-10T10:59:32.361223Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|9a8f62b3-96b5edba-59d742b8-e9af68d4_0 is DEAD 2025-04-10T10:59:32.362063Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:59:32.362408Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server disconnected, pipe [3:7491637464086000822:2444] destroyed 2025-04-10T10:59:32.362464Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T10:59:32.491780Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491637464086000830:2448], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:59:32.494466Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzliYWZkNjAtZDY2YmRkZGItNTljNTM5OTQtNDhjN2IxNDU=, ActorId: [3:7491637464086000828:2447], ActorState: ExecuteState, TraceId: 01jrfnnrz61608bmgbt5fk0hnc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:59:32.494968Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] >> TSchemeShardTest::ConsistentCopyTable >> TSchemeShardCheckProposeSize::CopyTable >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit >> TKeyValueTracingTest::ReadHuge ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateQuery_Without_Connection [GOOD] Test command err: 2025-04-10T10:58:33.663705Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637211604896446:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:33.663754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0410 10:58:34.712713093 1164275 dns_resolver.cc:162] no server name supplied in dns URI E0410 10:58:34.712907074 1164275 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T10:58:34.801952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:35.803937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:35.949832Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26147: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26147 2025-04-10T10:58:35.955649Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26147: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26147 } ] 2025-04-10T10:58:36.817566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:37.815393Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26147: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26147 } ] 2025-04-10T10:58:37.825360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:38.664217Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637211604896446:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:38.664278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:58:38.826796Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:39.666430Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0410 10:58:39.695727987 1164547 dns_resolver.cc:162] no server name supplied in dns URI E0410 10:58:39.695941982 1164547 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T10:58:39.830656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:40.200737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491637241669667933:2314], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:40.200976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:58:40.206393Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26147: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26147 2025-04-10T10:58:40.218448Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26147: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26147 } ] 2025-04-10T10:58:40.246603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491637241669667933:2314], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a9e/r3tmp/tmpzQmgli/pdisk_1.dat 2025-04-10T10:58:40.401719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491637241669667933:2314], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 26147, node 1 2025-04-10T10:58:40.479919Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:58:40.479917Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 TClient is connected to server localhost:26285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:40.863412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:41.386142Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:41.386189Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:41.386198Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:41.386441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:41.389647Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:58:42.906110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:42.906205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:42.909750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected E0410 10:58:44.699290666 1164547 dns_resolver.cc:162] no server name supplied in dns URI E0410 10:58:44.699529309 1164547 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T10:58:44.739808Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-10T10:58:44.739858Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T10:58:44.739871Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T10:58:44.741754Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-10T10:58:44.741805Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T10:58:44.741812Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T10:58:44.742853Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-10T10:58:44.742871Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T10:58:44.742878Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T10:58:44.746708Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-10T10:58:44.746740Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T10:58:44.746748Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T10:58:44.747820Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-10T10:58:44.747837Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T10:58:44.747844Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T10:58:44.749878Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T10:58:44.756273Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-10T10:58:44.756324Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T10:58:44.756334Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T10:58:44.761137Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-10T10:58:44.761175Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T10:58:44.761199Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T10:58:44.766500Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-10T10:58:44.766545Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-10T10:58:44.767022Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-10T10:58:44.767057Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T10:58:44.767066Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T10:58:44.778464Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-10T10:58:44.778502Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T10:58:44.778523Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T10:58:44.779 ... .537176Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.537221Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.537254Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.537334Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.537391Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.537470Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.537561Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.537599Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.537627Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.537653Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.537709Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.537787Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.537845Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.537914Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.537976Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538136Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538196Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538232Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538260Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538337Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538379Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538407Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538454Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538512Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538678Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538721Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538751Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538776Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538850Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538901Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538921Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.538977Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.539001Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.539024Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.539065Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.539114Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.539175Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.539301Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.539335Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.539363Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.539393Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.539490Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.539547Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.539579Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.539774Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.539961Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.539998Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.540163Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.540223Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.540252Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.540342Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.540426Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.540541Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.540785Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.540816Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.540838Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.540991Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541020Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541131Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541214Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541252Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541281Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541447Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541475Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541499Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541628Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541678Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541703Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541796Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541824Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541850Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541875Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541970Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.541999Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.542027Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.542435Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.542462Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.542713Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.542745Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.542888Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.542927Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.542948Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.543046Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.543168Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.543253Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.543423Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.543449Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.543469Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.543642Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.543665Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.543701Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.543883Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.543904Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.543929Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544026Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544100Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544124Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544145Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544229Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544252Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544292Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544381Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544415Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544442Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544539Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544586Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544629Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544735Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544766Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544887Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544925Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.544948Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545017Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545040Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545061Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545113Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545144Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545236Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545297Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545326Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545451Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545484Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545507Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545579Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545608Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545630Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545718Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545768Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545830Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T10:59:29.545864Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: [good] Yq_1::CreateQuery_Without_Connection ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] Test command err: === Server->StartServer(false); 2025-04-10T10:59:12.091745Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637375600783248:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:12.098118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:12.214450Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637378591287282:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001772/r3tmp/tmplpgKpB/pdisk_1.dat 2025-04-10T10:59:12.537024Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:12.553458Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:12.640941Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:13.063714Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:13.172057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:13.172307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:13.173961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:13.179159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:13.179269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:13.190532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:13.195002Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:59:13.201063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63816, node 1 2025-04-10T10:59:13.505779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001772/r3tmp/yandexTzfLEu.tmp 2025-04-10T10:59:13.505810Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001772/r3tmp/yandexTzfLEu.tmp 2025-04-10T10:59:13.506493Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001772/r3tmp/yandexTzfLEu.tmp 2025-04-10T10:59:13.506711Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:59:13.576503Z INFO: TTestServer started on Port 9406 GrpcPort 63816 TClient is connected to server localhost:9406 PQClient connected to localhost:63816 === TenantModeEnabled() = 1 === Init PQ - start server on port 63816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:14.087620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:59:14.087829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:14.088058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T10:59:14.089006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:14.089083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:14.095173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:14.095339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T10:59:14.095560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:14.095660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T10:59:14.095678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-04-10T10:59:14.095703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-10T10:59:14.103019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:14.103084Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:59:14.103131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-10T10:59:14.104689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:59:14.104709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-04-10T10:59:14.104731Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:59:14.122492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:14.122542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:14.122585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-10T10:59:14.122627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-04-10T10:59:14.161088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:14.171033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-04-10T10:59:14.171211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:59:14.174512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282754221, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:14.174674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282754221 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:59:14.174716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-10T10:59:14.175011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-10T10:59:14.175040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-10T10:59:14.175239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T10:59:14.175306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T10:59:14.177340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T10:59:14.177362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T10:59:14.177581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T10:59:14.177602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491637379895751053:2390], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-04-10T10:59:14.177673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:14.177697Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-10T10:59:14.177819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-10T10:59:14.177830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-10T10:59:14.177856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-10T10:59:14.177865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-10T10:59:14.177885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2025-04-10T10:59:14.180346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-10T10:59:14.180372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-10T10:59:14.180388Z n ... t] [/Root] [9f108bd1-ebaf6e9b-84670da6-db854501] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:59:32.739080Z :INFO: [/Root] [/Root] [d2d1d045-e5ec93b7-86b57296-35d72c09] Starting read session 2025-04-10T10:59:32.739131Z :DEBUG: [/Root] [/Root] [d2d1d045-e5ec93b7-86b57296-35d72c09] Starting session to cluster null (localhost:18329) 2025-04-10T10:59:32.739402Z :DEBUG: [/Root] [/Root] [d2d1d045-e5ec93b7-86b57296-35d72c09] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:59:32.739440Z :DEBUG: [/Root] [/Root] [d2d1d045-e5ec93b7-86b57296-35d72c09] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:59:32.739495Z :DEBUG: [/Root] [/Root] [d2d1d045-e5ec93b7-86b57296-35d72c09] [null] Reconnecting session to cluster null in 0.000000s 2025-04-10T10:59:32.744377Z :DEBUG: [/Root] [/Root] [d2d1d045-e5ec93b7-86b57296-35d72c09] [null] Successfully connected. Initializing session 2025-04-10T10:59:32.745869Z node 3 :PQ_READ_PROXY DEBUG: new grpc connection 2025-04-10T10:59:32.745903Z node 3 :PQ_READ_PROXY DEBUG: new session created cookie 2 2025-04-10T10:59:32.748212Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-04-10T10:59:32.748384Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 read init: from# ipv6:[::1]:57876, request# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-04-10T10:59:32.748551Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 auth for : consumer_aba 2025-04-10T10:59:32.749475Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 Handle describe topics response 2025-04-10T10:59:32.749595Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 auth is DEAD 2025-04-10T10:59:32.749679Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 auth ok: topics# 1, initDone# 0 2025-04-10T10:59:32.750943Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 register session: topic# /Root/account1/write_topic 2025-04-10T10:59:32.751814Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7491637461426932707:2434] connected; active server actors: 1 2025-04-10T10:59:32.752508Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] consumer "consumer_aba" register session for pipe [3:7491637461426932707:2434] session consumer_aba_3_2_2872924548860059564_v1 2025-04-10T10:59:32.752572Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba register readable partition 0 2025-04-10T10:59:32.752635Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba family created family=1 (Status=Free, Partitions=[0]) 2025-04-10T10:59:32.752679Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba register reading session ReadingSession "consumer_aba_3_2_2872924548860059564_v1" (Sender=[3:7491637461426932704:2434], Pipe=[3:7491637461426932707:2434], Partitions=[], ActiveFamilyCount=0) 2025-04-10T10:59:32.752708Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba rebalancing was scheduled 2025-04-10T10:59:32.752779Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-04-10T10:59:32.752842Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "consumer_aba_3_2_2872924548860059564_v1" (Sender=[3:7491637461426932704:2434], Pipe=[3:7491637461426932707:2434], Partitions=[], ActiveFamilyCount=0) 2025-04-10T10:59:32.752922Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba family 1 status Active partitions [0] session "consumer_aba_3_2_2872924548860059564_v1" sender [3:7491637461426932704:2434] lock partition 0 for ReadingSession "consumer_aba_3_2_2872924548860059564_v1" (Sender=[3:7491637461426932704:2434], Pipe=[3:7491637461426932707:2434], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-04-10T10:59:32.753021Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-04-10T10:59:32.753072Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing duration: 0.000251s 2025-04-10T10:59:32.754283Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 assign: record# { Partition: 0 TabletId: 72075186224037893 Topic: "write_topic" Generation: 1 Step: 1 Session: "consumer_aba_3_2_2872924548860059564_v1" ClientId: "consumer_aba" PipeClient { RawX1: 7491637461426932707 RawX2: 4503612512274818 } Path: "/Root/account1/write_topic" } 2025-04-10T10:59:32.754413Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 INITING TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) 2025-04-10T10:59:32.754823Z :INFO: [/Root] [/Root] [d2d1d045-e5ec93b7-86b57296-35d72c09] [null] Server session id: consumer_aba_3_2_2872924548860059564_v1 2025-04-10T10:59:32.754933Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037893 Generation: 1, pipe: [3:7491637461426932709:2437] 2025-04-10T10:59:32.755046Z :DEBUG: [/Root] [/Root] [d2d1d045-e5ec93b7-86b57296-35d72c09] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:59:32.755153Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: consumer_aba_3_2_2872924548860059564_v1:1 with generation 1 2025-04-10T10:59:32.755463Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 grpc read done: success# 1, data# { read { } } 2025-04-10T10:59:32.755539Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 got read request: guid# c9290319-25df8abb-6b1edc47-d0575c87 2025-04-10T10:59:32.778091Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 1 WriteTimestampMS: 1744282772626 CreateTimestampMS: 1744282772611 SizeLag: 165 WriteTimestampEstimateMS: 1744282772626 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-04-10T10:59:32.784897Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 INIT DONE TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 0 committedOffset 0 2025-04-10T10:59:32.785014Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 sending to client partition status Got new read session event: CreatePartitionStream { PartitionStreamId: 1 TopicPath: account1/write_topic Cluster: PartitionId: 0 CommittedOffset: 0 EndOffset: 1 } 2025-04-10T10:59:32.790366Z :INFO: [/Root] [/Root] [d2d1d045-e5ec93b7-86b57296-35d72c09] Closing read session. Close timeout: 0.000000s 2025-04-10T10:59:32.790430Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account1/write_topic:0:1:0:0 2025-04-10T10:59:32.790472Z :INFO: [/Root] [/Root] [d2d1d045-e5ec93b7-86b57296-35d72c09] Counters: { Errors: 0 CurrentSessionLifetimeMs: 51 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:59:32.790578Z :NOTICE: [/Root] [/Root] [d2d1d045-e5ec93b7-86b57296-35d72c09] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-10T10:59:32.790620Z :DEBUG: [/Root] [/Root] [d2d1d045-e5ec93b7-86b57296-35d72c09] [null] Abort session to cluster 2025-04-10T10:59:32.791700Z :NOTICE: [/Root] [/Root] [d2d1d045-e5ec93b7-86b57296-35d72c09] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T10:59:32.797817Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 grpc read done: success# 0, data# { } 2025-04-10T10:59:32.797846Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 grpc read failed 2025-04-10T10:59:32.797875Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 grpc closed 2025-04-10T10:59:32.797913Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_2872924548860059564_v1 is DEAD 2025-04-10T10:59:32.802442Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: consumer_aba_3_2_2872924548860059564_v1 2025-04-10T10:59:32.813591Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7491637461426932707:2434] disconnected; active server actors: 1 2025-04-10T10:59:32.813629Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] pipe [3:7491637461426932707:2434] client consumer_aba disconnected session consumer_aba_3_2_2872924548860059564_v1 2025-04-10T10:59:33.402360Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491637465721900026:2441], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:59:33.404196Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=M2RhZWZlYmYtNzFhNjI3OTktZTZhYTI4NGYtZjE0NDIzZjU=, ActorId: [3:7491637465721900024:2440], ActorState: ExecuteState, TraceId: 01jrfnnsvnfk15rd5r1ge391z4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:59:33.412810Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |70.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |70.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |70.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString [GOOD] >> LocalityOperation::LocksFromAnotherTenants+UseSink |70.3%| [TA] $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] Test command err: === Server->StartServer(false); 2025-04-10T10:59:14.396092Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637384107456521:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:14.396136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:14.489243Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637384568813867:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:14.489442Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:14.751393Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:14.769533Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001735/r3tmp/tmph9AOJ8/pdisk_1.dat 2025-04-10T10:59:15.089109Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:15.103297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:15.103400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:15.106965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:15.107039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:15.113686Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:59:15.113877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:15.115075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30760, node 1 2025-04-10T10:59:15.261770Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001735/r3tmp/yandexh29nCK.tmp 2025-04-10T10:59:15.261798Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001735/r3tmp/yandexh29nCK.tmp 2025-04-10T10:59:15.263526Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001735/r3tmp/yandexh29nCK.tmp 2025-04-10T10:59:15.263766Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:59:15.322632Z INFO: TTestServer started on Port 31283 GrpcPort 30760 TClient is connected to server localhost:31283 PQClient connected to localhost:30760 === TenantModeEnabled() = 1 === Init PQ - start server on port 30760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:15.764057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:59:15.764301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:15.764539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T10:59:15.764811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:15.764866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:15.767930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:15.768136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T10:59:15.768339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:15.768382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T10:59:15.768417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-10T10:59:15.768434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-10T10:59:15.770705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:15.770763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T10:59:15.770783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-10T10:59:15.772731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:15.772758Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:15.772793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T10:59:15.772852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-10T10:59:15.777304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:15.777698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:59:15.777720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-10T10:59:15.777745Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:59:15.779074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-10T10:59:15.779338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T10:59:15.782275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282755824, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:15.782485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282755824 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:59:15.782524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T10:59:15.782834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-10T10:59:15.782875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T10:59:15.783044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T10:59:15.783121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T10:59:15.785351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T10:59:15.785399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T10:59:15.785581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T10:59:15.785600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491637388402424226:2399], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-10T10:59:15.785631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:15.785655Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-10T10:59:15.785738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T10:59:15.785763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T10:59:15.785786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T10:59:15.785795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T10:59:15.785807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-10T10:59:15.785862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T10:59:15.785886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-10T10:59:15.785896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-10T10:59:15.785935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 720575940 ... msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-10T10:59:33.268726Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-10T10:59:33.268740Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715664 2025-04-10T10:59:33.268756Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 5 2025-04-10T10:59:33.268774Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 2 2025-04-10T10:59:33.268961Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-10T10:59:33.269020Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-04-10T10:59:33.269030Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715664 2025-04-10T10:59:33.269042Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 2 2025-04-10T10:59:33.269053Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 4 2025-04-10T10:59:33.269095Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715664, subscribers: 1 2025-04-10T10:59:33.269119Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7491637466479034716:2378] 2025-04-10T10:59:33.274250Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715664 2025-04-10T10:59:33.274293Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715664 Create topic result: 1 === EnablePQLogs === CreateChannel === NewStub === InitializeWritePQService === InitializeWritePQService start iteration === InitializeWritePQService create streamingWriter === InitializeWritePQService Write 2025-04-10T10:59:33.390328Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-10T10:59:33.390367Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-04-10T10:59:33.391932Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "Root/acc/topic1" message_group_id: "12345678" } 2025-04-10T10:59:33.392033Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "Root/acc/topic1" message_group_id: "12345678" from ipv6:[::1]:32860 2025-04-10T10:59:33.392048Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:32860 proto=v1 topic=Root/acc/topic1 durationSec=0 2025-04-10T10:59:33.392058Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-10T10:59:33.396228Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-10T10:59:33.396405Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-10T10:59:33.396416Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-10T10:59:33.396423Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-10T10:59:33.396460Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637466479034954:2389] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-10T10:59:33.396480Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-10T10:59:33.397378Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-10T10:59:33.397698Z node 3 :PERSQUEUE INFO: new Cookie 12345678|c72e3b17-863e2b8b-e3b0f070-60ca81f_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-04-10T10:59:33.402380Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|c72e3b17-863e2b8b-e3b0f070-60ca81f_0 2025-04-10T10:59:33.406794Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|c72e3b17-863e2b8b-e3b0f070-60ca81f_0 grpc read done: success: 0 data: 2025-04-10T10:59:33.406820Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|c72e3b17-863e2b8b-e3b0f070-60ca81f_0 grpc read failed 2025-04-10T10:59:33.407070Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|c72e3b17-863e2b8b-e3b0f070-60ca81f_0 2025-04-10T10:59:33.407087Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|c72e3b17-863e2b8b-e3b0f070-60ca81f_0 is DEAD 2025-04-10T10:59:33.407353Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison Finish: 0 === InitializeWritePQService done === PersQueueClient === InitializePQ completed 2025-04-10T10:59:33.435324Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-10T10:59:33.435353Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-04-10T10:59:33.435906Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "12345678" } 2025-04-10T10:59:33.435999Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "topic1" message_group_id: "12345678" from ipv6:[::1]:32860 2025-04-10T10:59:33.436015Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:32860 proto=v1 topic=topic1 durationSec=0 2025-04-10T10:59:33.436037Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-10T10:59:33.442757Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-10T10:59:33.442907Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-10T10:59:33.442915Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-10T10:59:33.442936Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-10T10:59:33.442975Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637466479034974:2398] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-10T10:59:33.442996Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-10T10:59:33.443773Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-10T10:59:33.443990Z node 3 :PERSQUEUE INFO: new Cookie 12345678|fa8fc19a-bb00147e-2452a0b9-ce97dd38_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-04-10T10:59:33.444391Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 12345678|fa8fc19a-bb00147e-2452a0b9-ce97dd38_0 2025-04-10T10:59:33.446777Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 12345678|fa8fc19a-bb00147e-2452a0b9-ce97dd38_0 grpc read done: success: 0 data: 2025-04-10T10:59:33.446793Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|fa8fc19a-bb00147e-2452a0b9-ce97dd38_0 grpc read failed 2025-04-10T10:59:33.446823Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|fa8fc19a-bb00147e-2452a0b9-ce97dd38_0 grpc closed 2025-04-10T10:59:33.446835Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|fa8fc19a-bb00147e-2452a0b9-ce97dd38_0 is DEAD 2025-04-10T10:59:33.447531Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:59:33.998421Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491637466479034991:2408], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:59:34.000338Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzVmZjg2MTUtMzE4OThhYzctNjU1YjNhOTgtNzhiNWIzZDc=, ActorId: [3:7491637466479034984:2404], ActorState: ExecuteState, TraceId: 01jrfnntcw3yxbrwfmnrffd1v4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:59:34.006470Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> TColumnShardTestSchema::RebootHotTiersWithStat >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> TSchemeShardCheckProposeSize::CopyTables >> YdbYqlClient::TestMultipleModifications [GOOD] >> YdbYqlClient::TestDescribeTableWithShardStats |70.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/ut/ydb-core-control-ut >> YdbIndexTable::CreateTableAddIndex [GOOD] |70.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut >> YdbIndexTable::AlterTableAddIndex >> BasicStatistics::ServerlessGlobalIndex >> TSchemeShardTest::AlterTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::AlterTable >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts >> TGRpcYdbTest::SdkUuid [GOOD] >> TGRpcYdbTest::SdkUuidViaParams >> YdbYqlClient::TestDecimal35 [GOOD] >> ClientStatsCollector::CounterCacheMiss [GOOD] >> YdbTableBulkUpsert::SyncIndexShouldSucceed [GOOD] >> YdbYqlClient::TestDecimalFullStack >> DataShardVolatile::DistributedWriteLostPlanThenSplit [GOOD] >> ClientStatsCollector::CounterRetryOperation >> YdbTableBulkUpsert::Timeout >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency >> TKeyValueTracingTest::ReadHuge [FAIL] |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects >> YdbYqlClient::ConnectDbAclIsStrictlyChecked [GOOD] >> YdbYqlClient::CopyTables >> YdbYqlClient::RetryOperationTemplate [GOOD] >> TSchemeShardTest::AlterTable [GOOD] >> YdbYqlClient::RetryOperationSync >> TSchemeShardTest::AlterTableDropColumnReCreateSplit |70.3%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} |70.3%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut >> TGRpcNewClient::SimpleYqlQuery [GOOD] >> TGRpcNewClient::TestAuth >> TGRpcNewCoordinationClient::SessionMethods [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchData >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout [GOOD] >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions >> TSchemeShardTest::AlterTableDropColumnReCreateSplit [GOOD] >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TSchemeShardTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait >> TGRpcStreamingTest::ClientNeverWrites >> YdbYqlClient::TraceId [GOOD] >> YdbYqlClient::Utf8DatabasePassViaHeader >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] |70.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |70.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |70.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate [GOOD] >> TSchemeShardTest::AlterTableById >> TSchemeShardTest::ConsistentCopyTableAwait [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TTableProfileTests::DescribeTableWithPartitioningPolicy |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1020541C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x106C12B0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFE4D8CC) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+318 (0xFE5449E) std::__y1::__function::__func, void ()>::operator()()+280 (0xFE67C78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x106EF216) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x106C7E29) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFE66B24) NUnitTest::TTestFactory::Execute()+2438 (0x106C96F6) NUnitTest::RunMain(int, char**)+5213 (0x106E978D) ??+0 (0x7F5EC4D15D90) __libc_start_main+128 (0x7F5EC4D15E40) _start+41 (0xD7D1029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] Test command err: 2025-04-10T10:59:01.458467Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:59:01.459614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:01.459670Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:01.476979Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:59:01.477587Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T10:59:01.477899Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:01.529332Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:59:01.558141Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:01.559015Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:59:01.561487Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T10:59:01.561578Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T10:59:01.561645Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T10:59:01.562179Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:59:01.562449Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:59:01.562530Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:198:2154] in generation 2 2025-04-10T10:59:01.651133Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:59:01.697500Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T10:59:01.697710Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:59:01.697805Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-10T10:59:01.697838Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T10:59:01.697871Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T10:59:01.697910Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:01.698162Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.698202Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.698412Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T10:59:01.698485Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T10:59:01.698538Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:01.698610Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:59:01.698661Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T10:59:01.698696Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T10:59:01.698745Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T10:59:01.698784Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T10:59:01.698824Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T10:59:01.698924Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.698972Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.699014Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-10T10:59:01.701113Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T10:59:01.701162Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:59:01.701263Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:01.701449Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T10:59:01.701495Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T10:59:01.701543Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T10:59:01.701601Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:01.701647Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T10:59:01.701681Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T10:59:01.701723Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:59:01.702014Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T10:59:01.702045Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T10:59:01.702073Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T10:59:01.702102Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:59:01.702815Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T10:59:01.702868Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T10:59:01.702910Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T10:59:01.702955Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T10:59:01.702991Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T10:59:01.717587Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:01.717677Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:59:01.717724Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:59:01.717792Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T10:59:01.717876Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T10:59:01.718479Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.718556Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.718628Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-10T10:59:01.718781Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T10:59:01.718815Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T10:59:01.718971Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T10:59:01.719030Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T10:59:01.719074Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T10:59:01.719130Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T10:59:01.723281Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T10:59:01.723370Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:01.723658Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.723705Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.723769Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:01.723815Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:59:01.723858Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T10:59:01.723902Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T10:59:01.723944Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T10:59:01.723999Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T10:59:01.724050Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T10:59:01.724099Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T10:59:01.724160Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T10:59:01.724354Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T10:59:01.724407Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T10:59:01.724434Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T10:59:01.724461Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T10:59:01.724486Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T10:59:01.724563Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:01.724600Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T10:59:01.724644Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T10:59:01.724703Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T10:59:01.724763Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T10:59:01.724808Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T10:59:01.724847Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T10:59:01.724923Z node 1 :TX_DATA ... 4MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\231\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1002 ExecLevel: 0 Flags: 0 2025-04-10T10:59:43.258907Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:59:43.260636Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:43.261375Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CheckDataTx 2025-04-10T10:59:43.261469Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-04-10T10:59:43.261506Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CheckDataTx 2025-04-10T10:59:43.261543Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T10:59:43.261573Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T10:59:43.261612Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-10T10:59:43.261678Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1002] at 9437184 2025-04-10T10:59:43.261715Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-04-10T10:59:43.261737Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-10T10:59:43.261769Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit ExecuteDataTx 2025-04-10T10:59:43.261793Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit ExecuteDataTx 2025-04-10T10:59:43.261837Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-10T10:59:43.262190Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1002] at tablet 9437184 with status COMPLETE 2025-04-10T10:59:43.262286Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1002] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T10:59:43.262341Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:43.262369Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit ExecuteDataTx 2025-04-10T10:59:43.262407Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit FinishPropose 2025-04-10T10:59:43.262440Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit FinishPropose 2025-04-10T10:59:43.262481Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is DelayComplete 2025-04-10T10:59:43.262508Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit FinishPropose 2025-04-10T10:59:43.262541Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit CompletedOperations 2025-04-10T10:59:43.262573Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CompletedOperations 2025-04-10T10:59:43.262610Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-04-10T10:59:43.262631Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CompletedOperations 2025-04-10T10:59:43.262656Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1002] at 9437184 has finished 2025-04-10T10:59:43.292298Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:43.292367Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1002] at 9437184 on unit FinishPropose 2025-04-10T10:59:43.292414Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1002 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-10T10:59:43.292506Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 .2025-04-10T10:59:43.298670Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:236:2229]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-04-10T10:59:43.298740Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-10T10:59:43.307950Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4548:6467], Recipient [3:236:2229]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:43.308020Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:43.308061Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4547:6466], serverId# [3:4548:6467], sessionId# [0:0:0] 2025-04-10T10:59:43.308517Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:236:2229]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\265\002\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\235\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1003 ExecLevel: 0 Flags: 0 2025-04-10T10:59:43.308556Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:59:43.308663Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:43.309311Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CheckDataTx 2025-04-10T10:59:43.309380Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-04-10T10:59:43.309433Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CheckDataTx 2025-04-10T10:59:43.309472Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T10:59:43.309503Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T10:59:43.309548Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-10T10:59:43.309610Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1003] at 9437184 2025-04-10T10:59:43.309646Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-04-10T10:59:43.309668Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-10T10:59:43.309690Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit ExecuteDataTx 2025-04-10T10:59:43.309712Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit ExecuteDataTx 2025-04-10T10:59:43.309754Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-10T10:59:43.314222Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1003] at tablet 9437184 with status COMPLETE 2025-04-10T10:59:43.314327Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1003] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T10:59:43.314416Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:43.314457Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit ExecuteDataTx 2025-04-10T10:59:43.314492Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit FinishPropose 2025-04-10T10:59:43.314531Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit FinishPropose 2025-04-10T10:59:43.314583Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is DelayComplete 2025-04-10T10:59:43.314612Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit FinishPropose 2025-04-10T10:59:43.314643Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit CompletedOperations 2025-04-10T10:59:43.314675Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CompletedOperations 2025-04-10T10:59:43.314727Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-04-10T10:59:43.314754Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CompletedOperations 2025-04-10T10:59:43.314787Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1003] at 9437184 has finished 2025-04-10T10:59:43.341531Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:43.341614Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1003] at 9437184 on unit FinishPropose 2025-04-10T10:59:43.341662Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1003 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2025-04-10T10:59:43.341759Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:43.439606Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T10:59:43.439684Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-04-10T10:59:43.456694Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:233:2228], Recipient [3:236:2229]: NKikimr::TEvTablet::TEvFollowerGcApplied .2025-04-10T10:59:43.460836Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4562:6480], Recipient [3:236:2229]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:43.460929Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:43.460994Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4561:6479], serverId# [3:4562:6480], sessionId# [0:0:0] 2025-04-10T10:59:43.461674Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553160, Sender [3:4560:6478], Recipient [3:236:2229]: NKikimrTxDataShard.TEvGetTableStats TableId: 13 { InMemSize: 0 LastAccessTime: 1719 LastUpdateTime: 1719 } >> TGRpcStreamingTest::ClientDisconnects [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI >> TTxDataShardMiniKQL::CrossShard_5_AllToAll [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local >> TSchemeShardTest::AlterTableById [GOOD] >> TSchemeShardTest::AlterTableConfig >> YdbIndexTable::AlterTableAddIndex [GOOD] >> YdbLogStore::AlterLogStore >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> TGRpcYdbTest::SdkUuidViaParams [GOOD] >> TGRpcYdbTest::ReadTablePg >> YdbYqlClient::TestDescribeTableWithShardStats [GOOD] >> YdbYqlClient::TestExplicitPartitioning ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2025-04-10T10:59:41.096698Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637503307146136:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:41.096743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015a4/r3tmp/tmpgcfiGK/pdisk_1.dat 2025-04-10T10:59:42.214408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:42.340181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:42.340283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:42.347697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:42.400491Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:42.611744Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:59346 2025-04-10T10:59:42.611977Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:59346 2025-04-10T10:59:42.612122Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7491637507602113974:2263] peer# ipv6:[::1]:59346 2025-04-10T10:59:42.618333Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2025-04-10T10:59:42.631287Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2025-04-10T10:59:42.631398Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# unknown (finish done) >> TGRpcStreamingTest::ClientNeverWrites [GOOD] |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TGRpcNewCoordinationClient::SessionDescribeWatchData [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TSchemeShardTest::AlterTableConfig [GOOD] >> TSchemeShardTest::AlterTableCompactionPolicy >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2025-04-10T10:59:44.246585Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637513048178509:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:44.247174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001595/r3tmp/tmpOUMtCt/pdisk_1.dat 2025-04-10T10:59:44.889873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:44.890034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:44.892196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:44.945661Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:45.138601Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] stream accepted Name# Session ok# true peer# ipv6:[::1]:38286 2025-04-10T10:59:45.141649Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade attach Name# Session actor# [1:7491637517343146201:2259] peer# ipv6:[::1]:38286 2025-04-10T10:59:45.141702Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade read Name# Session peer# ipv6:[::1]:38286 2025-04-10T10:59:45.141778Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade write Name# Session data# peer# ipv6:[::1]:38286 2025-04-10T10:59:45.142069Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade finish Name# Session peer# ipv6:[::1]:38286 grpc status# (0) message# 2025-04-10T10:59:45.142792Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] write finished Name# Session ok# true peer# ipv6:[::1]:38286 2025-04-10T10:59:45.143085Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] read finished Name# Session ok# false data# peer# ipv6:[::1]:38286 2025-04-10T10:59:45.143119Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] stream done notification Name# Session ok# true peer# ipv6:[::1]:38286 2025-04-10T10:59:45.143145Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] stream finished Name# Session ok# true peer# ipv6:[::1]:38286 grpc status# (0) message# 2025-04-10T10:59:45.143191Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] deregistering request Name# Session peer# ipv6:[::1]:38286 (finish done) 2025-04-10T10:59:45.143463Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-04-10T10:59:45.143479Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2025-04-10T10:59:45.143491Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone >> YdbTableBulkUpsert::Timeout [GOOD] >> YdbTableBulkUpsert::RetryOperationSync >> TGRpcNewClient::TestAuth [GOOD] >> TGRpcNewClient::CreateAlterUpsertDrop >> YdbYqlClient::TestDecimalFullStack [GOOD] >> YdbYqlClient::TestDescribeDirectory >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions [GOOD] >> TGRpcYdbTest::AlterTableAddIndexBadRequest >> LocalityOperation::LocksFromAnotherTenants+UseSink [GOOD] >> LocalityOperation::LocksFromAnotherTenants-UseSink |70.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |70.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |70.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage >> TSchemeShardTest::AlterTableCompactionPolicy [GOOD] >> TSchemeShardTest::AlterTableFollowers >> ObjectStorageListingTest::FilterListing |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] >> YdbYqlClient::CopyTables [GOOD] >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy >> ClientStatsCollector::CounterRetryOperation [GOOD] >> ClientStatsCollector::ExternalMetricRegistryByRawPtr |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> YdbYqlClient::Utf8DatabasePassViaHeader [GOOD] >> YdbYqlClient::TestYqlTypesFromPreparedQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] Test command err: 2025-04-10T10:59:11.534004Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637373318023225:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:11.534122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:11.636006Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637373595611280:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:11.639337Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:11.851865Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:11.851817Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001793/r3tmp/tmp19ovqZ/pdisk_1.dat 2025-04-10T10:59:12.227556Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:12.233967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:12.234517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:12.236351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:12.236417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:12.239553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:12.246258Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:59:12.248329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26356, node 1 2025-04-10T10:59:12.475816Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001793/r3tmp/yandexUD9fdY.tmp 2025-04-10T10:59:12.475859Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001793/r3tmp/yandexUD9fdY.tmp 2025-04-10T10:59:12.476113Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001793/r3tmp/yandexUD9fdY.tmp 2025-04-10T10:59:12.476290Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:59:12.642838Z INFO: TTestServer started on Port 16058 GrpcPort 26356 TClient is connected to server localhost:16058 PQClient connected to localhost:26356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:13.160831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:59:13.238934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T10:59:15.872741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637390497893536:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:15.873241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637390497893514:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:15.873341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:15.879192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T10:59:15.883650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637390497893571:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:15.883755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:15.953690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637390497893539:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T10:59:16.158469Z node 1 :TX_PROXY ERROR: Actor# [1:7491637394792860912:2770] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:16.187675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:59:16.308339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:59:16.329353Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491637394792860931:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:59:16.329641Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzY5MTIxMjgtMjU3N2U0ZDAtMTZlZDk5ZDUtNzdkNTFkNzA=, ActorId: [1:7491637390497893507:2338], ActorState: ExecuteState, TraceId: 01jrfnn8r2ft0gbf536dj3pazb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:59:16.332082Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:59:16.489647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:59:16.534220Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637373318023225:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:16.534283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:16.623034Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637373595611280:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:16.623102Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T10:59:16.817148Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfnn9ht9kjjvqee953g98wa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI0NDdiOWItNTkxNjIzMmItMTI3MWEyNjEtMmY4M2FhOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491637394792861401:3120] === CheckClustersList. Ok WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-04-10T10:59:22.603004Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491637373318023511:2147], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:59:22.603297Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491637373318023511:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-04-10T10:59:22.603406Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491637373318023511:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491637377612991276:2457] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744282753234 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T10:59:22.603518Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491637373318023511:2147], cacheItem# { Subscriber: { Subscriber: [1:7491637377612991276:2457] DomainOwnerId: 72057594046644480 Type: ... self# [3:7491637525338176187:4957], recipient# [3:7491637525338176185:2618], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T10:59:48.001031Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491637525338176188:4958], recipient# [3:7491637525338176186:2619], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T10:59:48.002187Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491637448028761066:2167], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:59:48.002294Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491637448028761066:2167], cacheItem# { Subscriber: { Subscriber: [3:7491637482388500479:2858] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744282778595 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T10:59:48.002374Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491637448028761066:2167], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:10:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:59:48.002423Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491637448028761066:2167], cacheItem# { Subscriber: { Subscriber: [3:7491637482388500344:2777] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744282777951 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:10:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T10:59:48.002551Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491637529633143487:4959], recipient# [3:7491637448028760963:2290], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T10:59:48.002652Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491637529633143488:4960], recipient# [3:7491637448028760963:2290], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T10:59:48.003300Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491637448028761066:2167], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:59:48.003362Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491637448028761066:2167], cacheItem# { Subscriber: { Subscriber: [3:7491637452323728761:2446] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 29 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744282770846 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T10:59:48.003471Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491637529633143489:4961], recipient# [3:7491637448028760963:2290], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T10:59:48.338912Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491637448028761066:2167], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:59:48.339119Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491637448028761066:2167], cacheItem# { Subscriber: { Subscriber: [3:7491637452323728769:2448] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T10:59:48.339225Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491637529633143493:4964], recipient# [3:7491637529633143492:2622], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:59:48.588967Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491637448028761066:2167], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:59:48.589132Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491637448028761066:2167], cacheItem# { Subscriber: { Subscriber: [3:7491637482388500218:2717] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T10:59:48.589247Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491637529633143502:4968], recipient# [3:7491637529633143501:2624], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |70.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |70.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |70.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart |70.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |70.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |70.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes >> TSchemeShardTest::ConsistentCopyTableToDeletedPath [GOOD] >> TSchemeShardTest::CopyIndexedTable |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> TSchemeShardTest::AlterTableFollowers [GOOD] >> TSchemeShardTest::AlterPersQueueGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] Test command err: 2025-04-10T10:57:18.659417Z :WriteSessionNoAvailableDatabase INFO: Random seed for debugging is 1744282638659381 2025-04-10T10:57:19.155849Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636890327604177:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:19.155933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:57:19.261474Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636889785481595:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:19.265297Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:57:19.596337Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:57:19.598567Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00149e/r3tmp/tmpULvJb2/pdisk_1.dat 2025-04-10T10:57:19.952727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:19.952857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:19.959358Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:19.960560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:19.960659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:19.970283Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:57:19.970410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:57:19.970620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19818, node 1 2025-04-10T10:57:20.178887Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/00149e/r3tmp/yandex8plIVM.tmp 2025-04-10T10:57:20.178914Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/00149e/r3tmp/yandex8plIVM.tmp 2025-04-10T10:57:20.179056Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/00149e/r3tmp/yandex8plIVM.tmp 2025-04-10T10:57:20.179162Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:57:20.280192Z INFO: TTestServer started on Port 10965 GrpcPort 19818 TClient is connected to server localhost:10965 PQClient connected to localhost:19818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:20.871660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:20.896955Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T10:57:24.151167Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636890327604177:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:24.151267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:24.244740Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636889785481595:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:24.244819Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:24.341158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636911802441669:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:24.341266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:24.344767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636911802441695:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:24.350689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:57:24.354486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636911802441730:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:24.354567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:24.412214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636911802441698:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:57:24.682313Z node 1 :TX_PROXY ERROR: Actor# [1:7491636911802441770:2680] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:24.720187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:24.741877Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491636911260318409:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:24.743105Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636911802441789:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:57:24.746195Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWI4YjlmNzEtMWY2MWVkYTYtZmJkOWY0OWYtYzdlZWFlZWM=, ActorId: [1:7491636911802441661:2338], ActorState: ExecuteState, TraceId: 01jrfnhvtmc1dagx4ybgcf4szt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:24.745624Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzcwNzY5OC1kZGJhNDMyNS1lNGU1ZjdmNi05MjgyZTI3ZQ==, ActorId: [2:7491636911260318367:2308], ActorState: ExecuteState, TraceId: 01jrfnhw1me61gxj4d5947t114, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:57:24.748708Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:24.749215Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:57:24.987856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:25.186354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:19818", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-10T10:57:25.547751Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfnhwvga9vxwgm0bq9z1pk4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGQ5ZTBkMDEtZGM3ZGRiNjUtNDY4YzU1NGEtZmI2MjA1MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491636916097409503:2990] === CheckClustersList. Ok 2025-04-10T10:57:31.082956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is un ... 025-04-10T10:59:29.305159Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0 2025-04-10T10:59:29.306382Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-04-10T10:59:29.306448Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744282769306 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T10:59:29.306583Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session established. Init response: last_seq_no: 2 session_id: "src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0" supported_codecs { codecs: 1 codecs: 2 codecs: 3 } 2025-04-10T10:59:30.304237Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637448362517527:3362] (SourceId=src_id, PreferedPartition=(NULL)) Update the table 2025-04-10T10:59:30.378278Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637448362517527:3362] (SourceId=src_id, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=1 Status=SUCCESS 2025-04-10T10:59:30.378324Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7491637448362517527:3362] (SourceId=src_id, PreferedPartition=(NULL)) Start idle 2025-04-10T10:59:47.474393Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2025-04-10T10:59:47.474447Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 4 2025-04-10T10:59:47.475240Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-04-10T10:59:47.476142Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 4 DataSize: 0 UsedReserveSize: 0 2025-04-10T10:59:47.476265Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 >>> Ready to answer: ok 2025-04-10T10:59:49.298100Z :DEBUG: [/Root] TraceId [] SessionId [src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0] MessageGroupId [src_id] Write 1 messages with Id from 1 to 1 >>> Got event: ReadyToAcceptEvent 2025-04-10T10:59:49.302962Z :DEBUG: [/Root] TraceId [] SessionId [src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0] MessageGroupId [src_id] Write session: try to update token 2025-04-10T10:59:49.303024Z :DEBUG: [/Root] TraceId [] SessionId [src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 3 2025-04-10T10:59:49.306749Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-10T10:59:49.307090Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-10T10:59:49.413885Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T10:59:49.413933Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T10:59:49.415243Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-10T10:59:49.418378Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-04-10T10:59:49.419958Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T10:59:49.420007Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T10:59:49.420069Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 3 partNo : 0 messageNo: 1 size 98 offset: -1 2025-04-10T10:59:49.420307Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 3 partNo 0 2025-04-10T10:59:49.457051Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 172 count 1 nextOffset 3 batches 1 2025-04-10T10:59:49.457682Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 160 WTime 1744282789457 2025-04-10T10:59:49.457820Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T10:59:49.457839Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T10:59:49.457861Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T10:59:49.457881Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T10:59:49.457901Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000psrc_id 2025-04-10T10:59:49.457919Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-04-10T10:59:49.457935Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-10T10:59:49.457954Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T10:59:49.457971Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-10T10:59:49.464583Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T10:59:49.464724Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 160 2025-04-10T10:59:49.469010Z node 4 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 160 actorID [4:7491637012059994435:2409] 2025-04-10T10:59:49.469137Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 105 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T10:59:49.469191Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T10:59:49.469239Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-10T10:59:49.469458Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Topic 'rt3.dc1--test-topic' counters. CacheSize 480 CachedBlobs 3 2025-04-10T10:59:49.469492Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-10T10:59:49.469695Z node 4 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 2 partno 0 count 1 parts 0 size 160 2025-04-10T10:59:49.472841Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-04-10T10:59:49.474866Z :DEBUG: [/Root] TraceId [] SessionId [src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-04-10T10:59:49.475076Z :DEBUG: [/Root] TraceId [] SessionId [src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 3 written { offset: 2 } } write_statistics { persisting_time { nanos: 13000000 } min_queue_wait_time { nanos: 33000000 } max_queue_wait_time { nanos: 33000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-10T10:59:49.475115Z :DEBUG: [/Root] TraceId [] SessionId [src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2025-04-10T10:59:49.475145Z :DEBUG: [/Root] TraceId [] SessionId [src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0] MessageGroupId [src_id] Write session: acknoledged message 1 2025-04-10T10:59:49.483343Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0 grpc read done: success: 0 data: 2025-04-10T10:59:49.483381Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0 grpc read failed 2025-04-10T10:59:49.483577Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 4 sessionId: src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0 2025-04-10T10:59:49.483606Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0 is DEAD 2025-04-10T10:59:49.483961Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T10:59:49.484769Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7491637448362517562:3362] destroyed 2025-04-10T10:59:49.484836Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T10:59:49.488872Z :DEBUG: [/Root] TraceId [] SessionId [src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: Cancelled on the server side, Details: , InternalError: 0 2025-04-10T10:59:49.489008Z :ERROR: [/Root] TraceId [] SessionId [src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0] MessageGroupId [src_id] Got error. Status: CLIENT_CANCELLED, Description:
: Error: GRpc error: (1): Cancelled on the server side 2025-04-10T10:59:49.489090Z :ERROR: [/Root] TraceId [] SessionId [src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0] MessageGroupId [src_id] Write session will not restart after a fatal error 2025-04-10T10:59:49.489134Z :INFO: [/Root] TraceId [] SessionId [src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0] MessageGroupId [src_id] Write session will now close 2025-04-10T10:59:49.489217Z :DEBUG: [/Root] TraceId [] SessionId [src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0] MessageGroupId [src_id] Write session: aborting 2025-04-10T10:59:49.532168Z :DEBUG: [/Root] TraceId [] SessionId [src_id|879513e7-74f330f9-8a576403-eb9c7dc5_0] MessageGroupId [src_id] Write session: destroy 2025-04-10T10:59:50.427577Z node 3 :KQP_COMPUTE WARN: TxId: 281474976715880, task: 1, CA Id [3:7491637538556831850:3533]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-10T10:59:50.462978Z node 3 :KQP_COMPUTE WARN: TxId: 281474976715880, task: 1, CA Id [3:7491637538556831850:3533]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:59:50.521016Z node 3 :KQP_COMPUTE WARN: TxId: 281474976715880, task: 1, CA Id [3:7491637538556831850:3533]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T10:59:50.595297Z node 3 :KQP_COMPUTE WARN: TxId: 281474976715880, task: 1, CA Id [3:7491637538556831850:3533]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> IcbAsActorTests::TestHttpPostReaction |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] >> IcbAsActorTests::TestHttpGetResponse >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> YdbOlapStore::LogLast50 [GOOD] >> YdbOlapStore::LogLast50ByResource |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy [GOOD] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> YdbYqlClient::RetryOperationSync [GOOD] >> YdbYqlClient::RetryOperationLimitedDuration >> TSchemeShardTest::CopyIndexedTable [GOOD] >> TSchemeShardTest::CopyTable >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> TGRpcYdbTest::ReadTablePg [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> YdbLogStore::AlterLogStore [GOOD] >> YdbLogStore::AlterLogTable >> TTableProfileTests::DescribeTableWithPartitioningPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsComplex >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> TGRpcYdbTest::AlterTableAddIndexBadRequest [GOOD] >> TGRpcYdbTest::CreateAlterCopyAndDropTable |70.5%| [TA] $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardMiniKQL::CrossShard_6_Local [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx >> TSchemeShardTest::CopyTable [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentChanges >> ObjectStorageListingTest::FilterListing [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:58:54.076874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:58:54.077004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:54.077057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:58:54.077116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:58:54.077181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:58:54.077217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:58:54.077287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:58:54.077451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:58:54.077864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:58:54.313067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:58:54.313307Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:54.349902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:58:54.350549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:58:54.350874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:58:54.361469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:58:54.361811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:58:54.362929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:54.363529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:58:54.367538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:54.369726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:54.369822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:54.370130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:58:54.370304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:54.370374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:58:54.370641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.380063Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:58:54.527738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:58:54.528097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.528409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:58:54.528696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:58:54.528803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.534645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:54.534922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:58:54.535133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.535230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:58:54.535285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:58:54.535333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:58:54.538023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.538098Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:58:54.538141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:58:54.541109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.541160Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.541241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:54.541300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:58:54.545499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:58:54.551109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:58:54.551398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:58:54.552641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:58:54.552889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:58:54.552997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:54.553354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:58:54.553434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:58:54.553648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:58:54.553775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:58:54.557838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:58:54.557888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:58:54.558101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:58:54.558182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:58:54.558466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:58:54.558547Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:58:54.558663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:54.558708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:54.558751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:58:54.558785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:54.558823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:58:54.558872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:58:54.558916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:58:54.558986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:58:54.559077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:58:54.559122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:58:54.559178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:58:54.561982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:54.562120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:58:54.562185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 02:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:55.981834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-10T10:59:55.982054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:55.986400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T10:59:55.986592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-10T10:59:55.987497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:55.987658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:55.987729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-10T10:59:55.987888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-10T10:59:55.988051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:59:56.046584Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:3458:5421], attempt# 0 2025-04-10T10:59:56.086082Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3458:5421], sender# [1:3457:5420] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:6594 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 114922B7-2774-49BC-9903-0AC9E3287A02 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-10T10:59:56.098605Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3458:5421], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:6594 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 82137DB6-5EEC-4859-A902-7EFB8CBAA6D4 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-10T10:59:56.104373Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3458:5421], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-10T10:59:56.104858Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3457:5420] 2025-04-10T10:59:56.105330Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3458:5421], sender# [1:3457:5420], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-04-10T10:59:56.105895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:56.105952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T10:59:56.106253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:56.106310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T10:59:56.107177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:59:56.107267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:56.108732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:59:56.108846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T10:59:56.108886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T10:59:56.108931Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T10:59:56.108974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T10:59:56.109072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:6594 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AF7DA3F1-DC15-4780-A9E3-B1556FAB4403 amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2025-04-10T10:59:56.115535Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3458:5421], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2025-04-10T10:59:56.115609Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3458:5421], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-10T10:59:56.116464Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3457:5420], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-10T10:59:56.129971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T10:59:56.160253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-10T10:59:56.160334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T10:59:56.160527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-10T10:59:56.160647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-10T10:59:56.160727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:56.160769Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:59:56.160811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T10:59:56.160853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T10:59:56.161024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:56.165668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:59:56.166465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T10:59:56.166541Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T10:59:56.166677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:59:56.166736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:59:56.166786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T10:59:56.166823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:59:56.166866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T10:59:56.166942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 102 2025-04-10T10:59:56.166996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T10:59:56.167042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T10:59:56.167079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T10:59:56.167225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T10:59:56.171186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T10:59:56.171245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3443:5407] TestWaitNotification: OK eventTxId 102 |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TGRpcNewClient::CreateAlterUpsertDrop [GOOD] >> TGRpcNewClient::InMemoryTables |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> YdbTableBulkUpsert::RetryOperationSync [GOOD] >> YdbTableBulkUpsert::RetryOperation |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> ClientStatsCollector::ExternalMetricRegistryByRawPtr [GOOD] >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] Test command err: 2025-04-10T10:59:56.046468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:59:56.046716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:56.046878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001531/r3tmp/tmpSgupKj/pdisk_1.dat 2025-04-10T10:59:56.433869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:59:56.504784Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:56.564061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:56.564215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:56.579562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:56.681557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:56.753942Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T10:59:56.754278Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:56.803867Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:56.804015Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:59:56.805805Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:59:56.805899Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:59:56.805958Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:59:56.806516Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:59:56.806672Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:59:56.806766Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T10:59:56.818889Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:59:56.848172Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:59:56.848436Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:59:56.848576Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T10:59:56.848616Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:59:56.848662Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:59:56.848701Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:59:56.849198Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:59:56.849333Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:59:56.849430Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:59:56.849484Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:59:56.849530Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:59:56.849585Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:59:56.849685Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T10:59:56.850874Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:59:56.851169Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:59:56.851257Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:59:56.853105Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:59:56.866791Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T10:59:56.866925Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T10:59:57.039327Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T10:59:57.044290Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T10:59:57.044387Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:59:57.045373Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:59:57.045441Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:59:57.045489Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T10:59:57.045772Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T10:59:57.045963Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T10:59:57.048325Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:59:57.048418Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T10:59:57.050809Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T10:59:57.052983Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:59:57.054754Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T10:59:57.054820Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:59:57.055364Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T10:59:57.055452Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:59:57.056433Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:59:57.056474Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:59:57.056538Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T10:59:57.056633Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T10:59:57.056694Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T10:59:57.056809Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:59:57.062351Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:59:57.065092Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T10:59:57.065174Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T10:59:57.065782Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T10:59:57.078680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:57.078819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:57.078909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:57.086780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:59:57.094251Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:59:57.266992Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:59:57.270772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:59:57.348976Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:57.738235Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfnph0h0kvgxete96bcc7k6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTJhZjRhYjItYmU3ZDQzNTItYjNmN2Q0YzYtYzU5OTMxZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:57.745108Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-10T10:59:57.745669Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T10:59:57.745913Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-04-10T10:59:57.759260Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:59:57.765314Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:864:2694], serverId# [1:865:2695], sessionId# [0:0:0] 2025-04-10T10:59:57.765620Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-10T10:59:57.765850Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 1 2025-04-10T10:59:57.766066Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:864:2694], serverId# [1:865:2695], sessionId# [0:0:0] 2025-04-10T10:59:57.768666Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:870:2700], serverId# [1:871:2701], sessionId# [0:0:0] 2025-04-10T10:59:57.768865Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-10T10:59:57.769105Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 1 common prefixes: 1 2025-04-10T10:59:57.769337Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:870:2700], serverId# [1:871:2701], sessionId# [0:0:0] >> YdbYqlClient::TestDescribeDirectory [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 |70.5%| [TA] $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:59:54.076261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:59:54.076369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:54.076408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:59:54.076450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:59:54.076498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:59:54.076569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:59:54.076635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:54.076736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:59:54.077121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:54.198726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:54.198798Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:54.231779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:54.232127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:54.232329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:59:54.244404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:54.244734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:54.245457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:54.245911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:59:54.248142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:54.249645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:54.249702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:54.249905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:54.249952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:54.249995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:54.250197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:59:54.263867Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:59:54.390626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:54.390902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:54.391163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:59:54.391427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:59:54.391484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:54.394301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:54.394468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:59:54.394680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:54.394736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:59:54.394792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:59:54.394826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:59:54.397209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:54.397284Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:54.397327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:59:54.399272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:54.399373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:54.399429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:54.399478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:59:54.410227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:54.414357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:59:54.414597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:59:54.415709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:54.415872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:54.415933Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:54.416359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:59:54.416426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:54.416583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:59:54.416683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:59:54.429352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:54.429479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:54.429730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:54.429776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:59:54.430182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:54.430238Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:59:54.430367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:54.430401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:54.430440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:54.430492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:54.430530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:59:54.430580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:54.430629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:59:54.430669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:59:54.430754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:59:54.430794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:59:54.430824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:59:54.433551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:54.433702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:54.433759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 0s, InflightLimit# 10 2025-04-10T10:59:59.536177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:59:59.536215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:59:59.536251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:59:59.536286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:59:59.536364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:59.536446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:59:59.536780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:59.557879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:59.560992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:59.561232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:59.561394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:59.561455Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:59.561772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:59.562521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-04-10T10:59:59.562685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SomeTable, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T10:59:59.562788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.562851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.563258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:59.563446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-10T10:59:59.563537Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T10:59:59.563805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-04-10T10:59:59.563946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.564039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:59.564078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T10:59:59.564234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:59.564419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:59.564813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-10T10:59:59.565161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.565282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.566369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.566512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.566783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.566905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.567047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.567283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.567365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.567525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.567801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.567971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.568025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.568076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T10:59:59.594131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:59.594286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:59.595108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:59.595185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:59.595238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:59.599658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:761:2715] sender: [1:815:2058] recipient: [1:15:2062] 2025-04-10T10:59:59.654775Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:59:59.655080Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeTable" took 351us result status StatusSuccess 2025-04-10T10:59:59.655552Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeTable" PathDescription { Self { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SomeTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 Family: 1 FamilyName: "alternative" NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 4140 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1020 IndexSize: 0 } PoolsUsage { PoolKind: "pool-kind-2" DataSize: 3120 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82488 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:59.657884Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T10:59:59.658095Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 252us result status StatusSuccess 2025-04-10T10:59:59.670814Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink >> KqpIndexes::CheckUpsertNonEquatableType+NotNull |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:114:2057] recipient: [1:108:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:114:2057] recipient: [1:108:2140] Leader for TabletID 9437184 is [1:131:2154] sender: [1:132:2057] recipient: [1:108:2140] 2025-04-10T10:59:01.034779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:01.034864Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:01.035606Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:59:01.061502Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:59:01.062117Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T10:59:01.062470Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:01.081163Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:59:01.152889Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:01.153557Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:59:01.159699Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T10:59:01.159825Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T10:59:01.159881Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T10:59:01.160307Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:59:01.160631Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:59:01.160718Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:198:2154] in generation 2 Leader for TabletID 9437184 is [1:131:2154] sender: [1:208:2057] recipient: [1:14:2061] 2025-04-10T10:59:01.233519Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:59:01.273505Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T10:59:01.273709Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:59:01.273832Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-10T10:59:01.273871Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T10:59:01.273906Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T10:59:01.273940Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:01.274210Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.274263Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.274590Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T10:59:01.274720Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T10:59:01.274777Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:01.274818Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:59:01.274881Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T10:59:01.274917Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T10:59:01.274958Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T10:59:01.275003Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T10:59:01.275046Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T10:59:01.275150Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.275203Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.275256Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-10T10:59:01.283387Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T10:59:01.283471Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:59:01.283607Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:01.283814Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T10:59:01.283868Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T10:59:01.283934Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T10:59:01.283995Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:01.284035Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T10:59:01.284092Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T10:59:01.284138Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:59:01.284486Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T10:59:01.284534Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T10:59:01.284573Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T10:59:01.284620Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:59:01.284688Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T10:59:01.284722Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T10:59:01.284765Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T10:59:01.284830Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T10:59:01.284857Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T10:59:01.297814Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:01.297898Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:59:01.297957Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:59:01.298005Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T10:59:01.298084Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T10:59:01.298784Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.298844Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.298894Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-10T10:59:01.299008Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-10T10:59:01.299041Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T10:59:01.299192Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-10T10:59:01.299251Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-10T10:59:01.299288Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-10T10:59:01.299322Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-10T10:59:01.303452Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-10T10:59:01.303537Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:01.303781Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.303819Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.303881Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:01.303919Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:59:01.303952Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T10:59:01.303992Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-10T10:59:01.304031Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-10T10:59:01.304078Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-10T10:59:01.304129Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-10T10:59:01.304183Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T10:59:01.304232Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-10T10:59:01.304416Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-10T10:59:01.304453Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-10T10:59:01.304480Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T10:59:01.304501Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T10:59:01.304522Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T10:59:01.304596Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:01.304619Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T10:59:01.304649Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T10:59:01.304677Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T10:59:01.304740Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-04-10T10:59:01.304770Z node 1 :TX_DATASHARD TRAC ... 9.230285Z node 24 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T10:59:59.230468Z node 24 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [24:281:2265], Recipient [24:235:2228]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [24:285:2269] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T10:59:59.230527Z node 24 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T10:59:59.230664Z node 24 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [24:122:2148], Recipient [24:235:2228]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-10T10:59:59.230714Z node 24 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-10T10:59:59.230785Z node 24 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-10T10:59:59.230885Z node 24 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-10T10:59:59.254898Z node 24 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [24:281:2265], Recipient [24:235:2228]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [24:281:2265] ServerId: [24:285:2269] } 2025-04-10T10:59:59.255006Z node 24 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-10T10:59:59.290513Z node 24 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [24:292:2274], Recipient [24:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:59.290628Z node 24 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:59.290713Z node 24 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [24:290:2273], serverId# [24:292:2274], sessionId# [0:0:0] 2025-04-10T10:59:59.290945Z node 24 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [24:289:2272], Recipient [24:235:2228]: NKikimrTabletBase.TEvGetCounters 2025-04-10T10:59:59.322492Z node 24 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [24:99:2134], Recipient [24:235:2228]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 103079217238 } 2025-04-10T10:59:59.322608Z node 24 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-10T10:59:59.323193Z node 24 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [24:294:2276], Recipient [24:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:59.323246Z node 24 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:59.323323Z node 24 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [24:293:2275], serverId# [24:294:2276], sessionId# [0:0:0] 2025-04-10T10:59:59.323519Z node 24 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [24:99:2134], Recipient [24:235:2228]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 103079217238 } TxBody: "\032\324\002\037\002\006Arg\005\205\n\205\000\205\004?\000\205\002\202\0047\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\004\01057$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020T\001\005?\026)\211\n?\024\206\203\004?\024? ?\024\203\004\020Fold\000)\211\002?\"\206? \034Collect\000)\211\006?(? \203\004\203\0024ListFromRange\000\003? \000\003?,\003\022z\003?.\004\007\010\000\n\003?\024\000)\251\000? \002\000\004)\251\000?\024\002\000\002)\211\006?$\203\005@? ?\024\030Invoke\000\003?F\006Add?@?D\001\006\002\014\000\007\016\000\003\005?\010?\014\006\002?\006?R\000\003?\014?\014\037/ \0018\000" TxId: 2 ExecLevel: 0 Flags: 0 2025-04-10T10:59:59.323555Z node 24 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:59:59.323685Z node 24 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:59.324694Z node 24 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-04-10T10:59:59.324779Z node 24 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-10T10:59:59.324831Z node 24 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-04-10T10:59:59.324875Z node 24 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T10:59:59.324924Z node 24 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T10:59:59.324962Z node 24 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T10:59:59.325029Z node 24 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-04-10T10:59:59.325074Z node 24 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-10T10:59:59.325101Z node 24 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-10T10:59:59.325127Z node 24 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-04-10T10:59:59.325152Z node 24 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-10T10:59:59.325205Z node 24 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T10:59:59.325255Z node 24 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 132374 more memory 2025-04-10T10:59:59.325302Z node 24 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-10T10:59:59.325702Z node 24 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:59.325771Z node 24 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-10T10:59:59.325847Z node 24 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T10:59:59.327043Z node 24 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 132502 and requests 1060016 more for the next try 2025-04-10T10:59:59.327259Z node 24 :TX_DATASHARD DEBUG: tx 2 released its data 2025-04-10T10:59:59.327327Z node 24 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-10T10:59:59.327628Z node 24 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:59.327672Z node 24 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-10T10:59:59.328689Z node 24 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-04-10T10:59:59.328762Z node 24 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T10:59:59.329335Z node 24 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 1192518 and requests 9540144 more for the next try 2025-04-10T10:59:59.329482Z node 24 :TX_DATASHARD DEBUG: tx 2 released its data 2025-04-10T10:59:59.329531Z node 24 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-10T10:59:59.329741Z node 24 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:59.329787Z node 24 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-10T10:59:59.330523Z node 24 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-04-10T10:59:59.330581Z node 24 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T10:59:59.331103Z node 24 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 10732662 and requests 85861296 more for the next try 2025-04-10T10:59:59.331210Z node 24 :TX_DATASHARD DEBUG: tx 2 released its data 2025-04-10T10:59:59.331250Z node 24 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-10T10:59:59.331434Z node 24 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:59.331477Z node 24 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-10T10:59:59.332156Z node 24 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-04-10T10:59:59.332213Z node 24 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T10:59:59.709795Z node 24 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-04-10T10:59:59.709952Z node 24 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T10:59:59.710064Z node 24 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:59.710120Z node 24 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-04-10T10:59:59.710208Z node 24 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-04-10T10:59:59.710309Z node 24 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-04-10T10:59:59.710452Z node 24 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T10:59:59.710496Z node 24 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-04-10T10:59:59.710560Z node 24 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-04-10T10:59:59.710619Z node 24 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-04-10T10:59:59.710682Z node 24 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-10T10:59:59.710726Z node 24 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-04-10T10:59:59.710784Z node 24 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-04-10T10:59:59.733057Z node 24 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:59.733183Z node 24 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-04-10T10:59:59.733278Z node 24 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-10T10:59:59.733456Z node 24 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:59.735173Z node 24 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [24:299:2281], Recipient [24:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:59.735279Z node 24 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:59.735362Z node 24 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [24:298:2280], serverId# [24:299:2281], sessionId# [0:0:0] 2025-04-10T10:59:59.735626Z node 24 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [24:297:2279], Recipient [24:235:2228]: NKikimrTabletBase.TEvGetCounters ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestDescribeDirectory [GOOD] Test command err: 2025-04-10T10:59:23.059350Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637422072020369:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:23.059644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d3e/r3tmp/tmpFDr5Ib/pdisk_1.dat 2025-04-10T10:59:24.003946Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:24.110099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:24.129892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:24.129989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:24.139424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27229, node 1 2025-04-10T10:59:24.492155Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:24.492171Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:24.492242Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:24.492346Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:25.271597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:28.038906Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637422072020369:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:28.038981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:30.367877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637452136792448:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:30.367995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:30.368432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637452136792460:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:30.372929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:59:30.420177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637452136792462:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:59:30.491391Z node 1 :TX_PROXY ERROR: Actor# [1:7491637452136792551:2712] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:33.431698Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637466212457112:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:33.431740Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d3e/r3tmp/tmpywEpcv/pdisk_1.dat 2025-04-10T10:59:33.774049Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:33.843265Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:33.843376Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:33.861145Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21752, node 4 2025-04-10T10:59:34.042976Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:34.043000Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:34.043009Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:34.043142Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:34.526121Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:38.256542Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637487687294662:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:38.256724Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:38.258622Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637487687294674:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:38.263463Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:59:38.334475Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491637487687294676:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:59:38.415390Z node 4 :TX_PROXY ERROR: Actor# [4:7491637487687294744:2687] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:38.434238Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491637466212457112:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:38.434317Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:40.743675Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637498508527296:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:40.743820Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d3e/r3tmp/tmp2eSLZo/pdisk_1.dat 2025-04-10T10:59:41.391976Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:41.446688Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:41.446789Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:41.459803Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63232, node 7 2025-04-10T10:59:41.810794Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:41.810820Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:41.810827Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:41.810964Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:42.467610Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:42.496619Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:59:45.750328Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491637498508527296:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:45.750451Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:46.316955Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:46.641557Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491637524278332159:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:46.641684Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:46.642315Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491637524278332171:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:46.648201Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T10:59:46.725456Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491637524278332173:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T10:59:46.829640Z node 7 :TX_PROXY ERROR: Actor# [7:7491637524278332255:2812] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:47.117991Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfnp6tf4mww1ap7d24517r5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NTkwMWI2YjAtMTcwYzg5YjctY2U3MDliZjktZTQ2ZjQ2MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:47.287973Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfnp7ay2qxp8h9p3bvqh71v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NTkwMWI2YjAtMTcwYzg5YjctY2U3MDliZjktZTQ2ZjQ2MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:47.560416Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jrfnp7fxaz3mxtchqngrd1nj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NTkwMWI2YjAtMTcwYzg5YjctY2U3MDliZjktZTQ2ZjQ2MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:47.901426Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jrfnp7rq5mt7vxh0a8jsb5kw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NTkwMWI2YjAtMTcwYzg5YjctY2U3MDliZjktZTQ2ZjQ2MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:48.100151Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfnp83f0999qkvpykp4v5xc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NTkwMWI2YjAtMTcwYzg5YjctY2U3MDliZjktZTQ2ZjQ2MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:49.458681Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfnp893bk4z6azfnaj5b3nd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NTkwMWI2YjAtMTcwYzg5YjctY2U3MDliZjktZTQ2ZjQ2MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:49.469695Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfnp893bk4z6azfnaj5b3nd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NTkwMWI2YjAtMTcwYzg5YjctY2U3MDliZjktZTQ2ZjQ2MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:51.926298Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491637543860838724:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:51.926388Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d3e/r3tmp/tmpRjgTku/pdisk_1.dat 2025-04-10T10:59:52.394635Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:52.435172Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:52.435330Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:52.448742Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32320, node 10 2025-04-10T10:59:52.902730Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:52.902755Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:52.902766Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:52.902938Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:53.878659Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:56.928975Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491637543860838724:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:56.929068Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:57.846468Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491637569630643625:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:57.846566Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:57.937004Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] Test command err: 2025-04-10T10:59:24.819187Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637428351418679:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:24.819232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d31/r3tmp/tmpkLHuFY/pdisk_1.dat 2025-04-10T10:59:25.815557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:25.815673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:25.831087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:25.834512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:25.881578Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7083, node 1 2025-04-10T10:59:26.459060Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:26.464984Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:26.464994Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:26.465122Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:27.666561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:29.828461Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637428351418679:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:29.828523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:32.515767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637462711158142:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:32.515879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:33.054893Z node 1 :TX_PROXY ERROR: Actor# [1:7491637467006125472:2663] txid# 281474976710658, issues: { message: "Column Key has wrong key type Json" severity: 1 } 2025-04-10T10:59:33.098367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637467006125482:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:33.098452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:33.132852Z node 1 :TX_PROXY ERROR: Actor# [1:7491637467006125489:2673] txid# 281474976710659, issues: { message: "Column Key has wrong key type Yson" severity: 1 } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d31/r3tmp/tmpikFz6d/pdisk_1.dat TServer::EnableGrpc on GrpcPort 64226, node 4 TClient is connected to server localhost:27860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d31/r3tmp/tmpc40GUv/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23698, node 7 TClient is connected to server localhost:17092 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T10:59:53.546390Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491637554432889708:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:53.550237Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d31/r3tmp/tmp4EkBHZ/pdisk_1.dat 2025-04-10T10:59:53.890764Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:53.934825Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:53.934917Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:53.944330Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26393, node 10 2025-04-10T10:59:54.185169Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:54.185197Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:54.185205Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:54.185335Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:54.540798Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:58.034358Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491637575907727165:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:58.034457Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:58.038268Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491637575907727177:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:58.042822Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:59:58.103276Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491637575907727179:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:59:58.176150Z node 10 :TX_PROXY ERROR: Actor# [10:7491637575907727254:2688] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:58.546905Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491637554432889708:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:58.546982Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexes::NullInIndexTableNoDataRead >> TSchemeShardTest::CopyTableAndConcurrentChanges [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplit >> KqpUniqueIndex::UpdateOnFkAlreadyExist >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanScalar >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace [GOOD] >> TGRpcNewCoordinationClient::SessionReconnectReattach >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex-UseSink >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] >> KqpIndexes::SecondaryIndexOrderBy >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] >> KqpIndexes::MultipleModifications >> LocalityOperation::LocksFromAnotherTenants-UseSink [GOOD] >> KqpIndexes::InnerJoinWithNonIndexWherePredicate >> KqpIndexes::UpsertMultipleUniqIndexes >> YdbLogStore::AlterLogTable [FAIL] >> TGRpcYdbTest::CreateAlterCopyAndDropTable [GOOD] >> TGRpcYdbTest::BeginTxRequestError >> TSchemeShardTest::AlterPersQueueGroup [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema >> TSchemeShardTest::CopyTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] Test command err: 2025-04-10T10:59:22.623528Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637419300809991:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:22.623580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d4f/r3tmp/tmphHiKcx/pdisk_1.dat 2025-04-10T10:59:23.624846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:23.624966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:23.735890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:23.743903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:23.784683Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24670, node 1 2025-04-10T10:59:23.814843Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:23.814865Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:24.143331Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:24.143382Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:24.143397Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:24.143547Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:24.903144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:25.083110Z node 1 :TX_PROXY ERROR: Actor# [1:7491637432185712706:2615] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-10T10:59:30.970166Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637454235714854:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:30.971192Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d4f/r3tmp/tmpEICkvX/pdisk_1.dat 2025-04-10T10:59:31.394905Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:31.419693Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:31.419775Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:31.422761Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15486, node 4 2025-04-10T10:59:31.745515Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:31.745539Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:31.745547Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:31.745699Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:32.354034Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:35.944792Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491637454235714854:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:35.944864Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:37.335550Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637484300486885:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:37.335658Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:37.336070Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637484300486897:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:37.346862Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:59:37.402397Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491637484300486899:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:59:37.487706Z node 4 :TX_PROXY ERROR: Actor# [4:7491637484300486974:2705] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:40.690444Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637497659317507:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:40.696132Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d4f/r3tmp/tmps8UVZf/pdisk_1.dat 2025-04-10T10:59:41.252622Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:41.304330Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:41.304473Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:41.314610Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2149, node 7 2025-04-10T10:59:41.563188Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:41.563218Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:41.563228Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:41.563435Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:42.056925Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:45.661271Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491637497659317507:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:45.661347Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:46.837521Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491637523429122385:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: { ... : SchemeBoardUpdate /Root 2025-04-10T10:59:54.361058Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T10:59:54.361148Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T10:59:54.376653Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T10:59:54.376665Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T10:59:54.376768Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T10:59:54.376768Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T10:59:54.386304Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491637555521678626:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T10:59:54.479650Z node 10 :TX_PROXY ERROR: Actor# [10:7491637555521678699:2804] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:54.694244Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491637534046840931:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:54.694321Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:54.732435Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfnpeba8a0aer3xa8r4qrvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MWIyODU0ZDYtYzRhMWI4OTQtN2FkNjQzNjktYjZhMWZmYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:54.751028Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jrfnpeqy928g47yyehysvh40, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51280, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T10:59:54.751495Z node 10 :READ_TABLE_API NOTICE: [10:7491637555521678763:2353] Finish grpc stream, status: 400010 2025-04-10T10:59:54.764729Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jrfnperce95wm4kt3f3k5ftq, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51280, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T10:59:54.784394Z node 10 :READ_TABLE_API DEBUG: [10:7491637555521678764:2354] Adding quota request to queue ShardId: 0, TxId: 281474976715662 2025-04-10T10:59:54.784463Z node 10 :READ_TABLE_API DEBUG: [10:7491637555521678764:2354] Assign stream quota to Shard 0, Quota 5, TxId 281474976715662 Reserved: 5 of 25, Queued: 0 2025-04-10T10:59:54.786545Z node 10 :READ_TABLE_API DEBUG: [10:7491637555521678764:2354] got stream part, size: 246, RU required: 128 rate limiter absent 2025-04-10T10:59:54.786969Z node 10 :READ_TABLE_API DEBUG: [10:7491637555521678764:2354] Starting inactivity timer for 600.000000s with tag 3 2025-04-10T10:59:54.841930Z node 10 :READ_TABLE_API NOTICE: [10:7491637555521678764:2354] Finish grpc stream, status: 400000 2025-04-10T10:59:54.849579Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jrfnpev186gjqaafshb654xn, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51280, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T10:59:54.880096Z node 10 :READ_TABLE_API DEBUG: [10:7491637555521678788:2356] Adding quota request to queue ShardId: 0, TxId: 281474976715664 2025-04-10T10:59:54.880142Z node 10 :READ_TABLE_API DEBUG: [10:7491637555521678788:2356] Assign stream quota to Shard 0, Quota 5, TxId 281474976715664 Reserved: 5 of 25, Queued: 0 2025-04-10T10:59:54.881233Z node 10 :READ_TABLE_API DEBUG: [10:7491637555521678788:2356] got stream part, size: 84, RU required: 128 rate limiter absent 2025-04-10T10:59:54.881513Z node 10 :READ_TABLE_API DEBUG: [10:7491637555521678788:2356] Starting inactivity timer for 600.000000s with tag 3 2025-04-10T10:59:54.933037Z node 10 :READ_TABLE_API NOTICE: [10:7491637555521678788:2356] Finish grpc stream, status: 400000 2025-04-10T10:59:54.936422Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jrfnpexrbt9znyjnedmc62ns, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51280, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T10:59:54.958984Z node 10 :READ_TABLE_API DEBUG: [10:7491637555521678810:2358] Adding quota request to queue ShardId: 0, TxId: 281474976715666 2025-04-10T10:59:54.959022Z node 10 :READ_TABLE_API DEBUG: [10:7491637555521678810:2358] Assign stream quota to Shard 0, Quota 5, TxId 281474976715666 Reserved: 5 of 25, Queued: 0 2025-04-10T10:59:54.960251Z node 10 :READ_TABLE_API DEBUG: [10:7491637555521678810:2358] got stream part, size: 210, RU required: 128 rate limiter absent 2025-04-10T10:59:54.960561Z node 10 :READ_TABLE_API DEBUG: [10:7491637555521678810:2358] Starting inactivity timer for 600.000000s with tag 3 2025-04-10T10:59:54.963604Z node 10 :READ_TABLE_API NOTICE: [10:7491637555521678810:2358] Finish grpc stream, status: 400000 2025-04-10T10:59:54.970935Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b8e80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.971204Z node 10 :GRPC_SERVER DEBUG: [0x51a0000aa480] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.971397Z node 10 :GRPC_SERVER DEBUG: [0x51a000006080] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.971601Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ab080] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.971786Z node 10 :GRPC_SERVER DEBUG: [0x51a000005a80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.971977Z node 10 :GRPC_SERVER DEBUG: [0x51a000081c80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.972175Z node 10 :GRPC_SERVER DEBUG: [0x51a0000abc80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.972368Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ac880] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.972547Z node 10 :GRPC_SERVER DEBUG: [0x51a000000080] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.972734Z node 10 :GRPC_SERVER DEBUG: [0x51a00000a280] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.972921Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a8c80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.973098Z node 10 :GRPC_SERVER DEBUG: [0x51a000081680] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.973289Z node 10 :GRPC_SERVER DEBUG: [0x51a00000f680] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.973504Z node 10 :GRPC_SERVER DEBUG: [0x51a00000fc80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.973703Z node 10 :GRPC_SERVER DEBUG: [0x51a000081080] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.973903Z node 10 :GRPC_SERVER DEBUG: [0x51a000082e80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-10T10:59:54.974113Z node 10 :GRPC_SERVER DEBUG: [0x51a000065a80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-04-10T10:59:57.345201Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491637571780768577:2099];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:57.345368Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d4f/r3tmp/tmpqqMrFq/pdisk_1.dat 2025-04-10T10:59:57.674993Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:57.708869Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:57.708992Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:57.711675Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17933, node 13 2025-04-10T10:59:57.819619Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:57.819652Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:57.819661Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:57.819865Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:58.208107Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:58.404176Z node 13 :TICKET_PARSER DEBUG: Ticket 3AD8D16FE4ED9CBDE02B28917C53524038B3F5536BDDE52CBF0F16BE839DF04D (ipv6:[::1]:42330) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-10T10:59:58.667632Z node 13 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token 2025-04-10T10:59:58.759194Z node 13 :TICKET_PARSER DEBUG: Ticket F6594335197EBA5D10A2AE710F2EAE22B2D4892180B4CA1095C5441E19B15B78 (ipv6:[::1]:42398) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-10T10:59:58.759892Z node 13 :TICKET_PARSER ERROR: Ticket F6594335197EBA5D10A2AE710F2EAE22B2D4892180B4CA1095C5441E19B15B78: Cannot create token from certificate. Client certificate failed verification ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] Test command err: 2025-04-10T10:59:13.139255Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637379154516797:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:13.139316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:13.286691Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637379958429156:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:13.286753Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:13.669915Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:13.701595Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00174a/r3tmp/tmpwLWNFn/pdisk_1.dat 2025-04-10T10:59:14.250313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:14.304968Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:14.353565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:14.353682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:14.354550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:14.354610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:14.362013Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:59:14.362197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:14.364889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:14.390527Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6539, node 1 2025-04-10T10:59:14.540064Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:14.558295Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:14.629589Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/00174a/r3tmp/yandexB0v9oc.tmp 2025-04-10T10:59:14.629624Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/00174a/r3tmp/yandexB0v9oc.tmp 2025-04-10T10:59:14.629795Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/00174a/r3tmp/yandexB0v9oc.tmp 2025-04-10T10:59:14.629937Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:59:14.711606Z INFO: TTestServer started on Port 5447 GrpcPort 6539 TClient is connected to server localhost:5447 PQClient connected to localhost:6539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:15.035660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:59:15.104493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T10:59:18.139776Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637379154516797:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:18.150234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:18.287186Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637379958429156:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:18.287249Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:18.498982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637400629354404:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:18.499258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:18.499598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637400629354416:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:18.503941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2025-04-10T10:59:18.570404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637400629354418:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2025-04-10T10:59:19.142343Z node 1 :TX_PROXY ERROR: Actor# [1:7491637400629354509:2784] txid# 281474976720663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:19.247822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-04-10T10:59:19.458133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-04-10T10:59:19.612985Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491637404924321823:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:59:19.614698Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmM1NDhiODgtNmE2M2QxM2MtNTI3YjdjYmItMTNmNDdiNzQ=, ActorId: [1:7491637400629354401:2339], ActorState: ExecuteState, TraceId: 01jrfnnbabasga77gqjgmy8j5k, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:59:19.621623Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:59:19.712135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T10:59:20.076064Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720667. Ctx: { TraceId: 01jrfnncp3b8x0eeksdn18ccq3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYxZThiODktNTgzM2ZhNGYtZjc4ZWIxOGUtNGU3MmY5YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491637409219289591:3132] === CheckClustersList. Ok >>>>> Prepare scheme WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-04-10T10:59:25.937035Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491637379154517045:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:59:25.937299Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491637379154517045:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-04-10T10:59:25.937391Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491637379154517045:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491637383449484607:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744282755103 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T10:59:25.937499Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491637379154517045:2127], cacheItem# { Subscriber: { Subscribe ... 057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:00:01.766127Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491637589322962057:2932], recipient# [4:7491637589322962056:2459], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:01.931986Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491637468728680553:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:01.932155Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491637468728680553:2149], cacheItem# { Subscriber: { Subscriber: [3:7491637473023648282:2445] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 28 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744282776110 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:00:01.932420Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491637588987769986:5574], recipient# [3:7491637468728680250:2075], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:00:02.154451Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [3:7491637468728680553:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-10T11:00:02.154619Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7491637468728680553:2149], cacheItem# { Subscriber: { Subscriber: [3:7491637503088420045:2907] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 30 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744282781808 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:00:02.154720Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7491637468728680553:2149], cacheItem# { Subscriber: { Subscriber: [3:7491637503088419827:2752] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 30 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744282781304 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:00:02.155086Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491637593282737291:5578], recipient# [3:7491637593282737290:2931], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-10T11:00:02.174347Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491637468728680553:2149], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:02.174529Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491637468728680553:2149], cacheItem# { Subscriber: { Subscriber: [3:7491637473023648282:2445] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 28 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744282776110 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:00:02.174649Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491637468728680553:2149], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:02.174740Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491637468728680553:2149], cacheItem# { Subscriber: { Subscriber: [3:7491637473023648290:2447] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:00:02.174798Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491637468728680553:2149], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:02.174871Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491637468728680553:2149], cacheItem# { Subscriber: { Subscriber: [3:7491637503088419724:2705] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:00:02.175066Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491637593282737296:5579], recipient# [3:7491637593282737293:2954], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:00:02.175147Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491637593282737297:5580], recipient# [3:7491637593282737294:2955], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:02.175211Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491637593282737298:5581], recipient# [3:7491637593282737295:2956], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> LocalityOperation::LocksFromAnotherTenants-UseSink [GOOD] Test command err: 2025-04-10T10:59:22.366350Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637418396932546:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:22.375007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d55/r3tmp/tmp2yMf7Y/pdisk_1.dat 2025-04-10T10:59:23.443371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:23.453652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:23.454505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:23.465191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27721, node 1 2025-04-10T10:59:23.605777Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:23.607267Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:23.608513Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:23.842829Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:23.842851Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:23.842859Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:23.842982Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:24.773772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:27.438256Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637418396932546:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:27.451179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:32.075306Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637463259161754:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d55/r3tmp/tmp7mZouz/pdisk_1.dat 2025-04-10T10:59:32.290064Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:32.477463Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:32.534973Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:32.535167Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:32.541238Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7002, node 4 2025-04-10T10:59:32.831308Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:32.831337Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:32.831345Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:32.831512Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:33.207835Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:38.962752Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637489829178508:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:38.962821Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d55/r3tmp/tmpGSuCXZ/pdisk_1.dat 2025-04-10T10:59:39.223783Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:39.270087Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:39.270219Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:39.279660Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18213, node 7 2025-04-10T10:59:39.531186Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:39.531222Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:39.531231Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:39.531435Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:39.872531Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:21176 2025-04-10T10:59:40.317278Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:40.414472Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:40.930256Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491637496927342300:2143];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:40.930640Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_0/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:41.030195Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:41.030328Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:41.045191Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-04-10T10:59:41.045394Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:41.215480Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:41.321351Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:42.160543Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:42.160665Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:42.171382Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-04-10T10:59:42.174650Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:43.964150Z node 7 :METADATA_PROVIDER ERROR: fline=table_ ... hildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:53.415126Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:2734 2025-04-10T10:59:53.913923Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:53.956706Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:54.464342Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7491637558435295088:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:54.464402Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_0/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:54.572309Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:54.572399Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:54.582219Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-04-10T10:59:54.598353Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:54.616334Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:54.686125Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:55.196893Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7491637561039815330:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:55.463364Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:55.463454Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:55.467550Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 11 Cookie 11 2025-04-10T10:59:55.534749Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:55.695152Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:57.362276Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491637548579831572:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:57.362372Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:58.553875Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:59:58.735921Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:59:58.899423Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491637574349637328:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:58.899564Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:58.900114Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491637574349637343:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:58.905956Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-04-10T10:59:58.966656Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491637574349637345:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-10T10:59:59.032136Z node 10 :TX_PROXY ERROR: Actor# [10:7491637578644604723:3392] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:59.136594Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfnpjse6qqraqnrxk16j56a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjNmYjM1NzctNDQxNmNhMWUtYjBmNzgxZmQtNjQ3YWY2ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:59.349908Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfnpk2d660j7rk3wt62tj7p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjNmYjM1NzctNDQxNmNhMWUtYjBmNzgxZmQtNjQ3YWY2ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:59.466359Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7491637558435295088:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:59.466458Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_0/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:59.733728Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jrfnpkbfdpvezhv4ahaqws1t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjNmYjM1NzctNDQxNmNhMWUtYjBmNzgxZmQtNjQ3YWY2ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:00:00.200518Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7491637561039815330:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:00.200617Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:01.080104Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrfnpkbfdpvezhv4ahaqws1t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjNmYjM1NzctNDQxNmNhMWUtYjBmNzgxZmQtNjQ3YWY2ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:00:01.090599Z node 10 :KQP_EXECUTER ERROR: ActorId: [10:7491637587234539528:2362] TxId: 281474976715669. Ctx: { TraceId: 01jrfnpkbfdpvezhv4ahaqws1t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjNmYjM1NzctNDQxNmNhMWUtYjBmNzgxZmQtNjQ3YWY2ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Handle TEvProposeTransactionResult: unable to select coordinator. Tx canceled, actorId: [10:7491637587234539528:2362], previously selected coordinator: 72075186224037888, coordinator selected at propose result: 72075186224037890 2025-04-10T11:00:01.090879Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=NjNmYjM1NzctNDQxNmNhMWUtYjBmNzgxZmQtNjQ3YWY2ZTM=, ActorId: [10:7491637574349637157:2362], ActorState: ExecuteState, TraceId: 01jrfnpkbfdpvezhv4ahaqws1t, Create QueryResponse for error on request, msg: 2025-04-10T11:00:01.092595Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jrfnpkbfdpvezhv4ahaqws1t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjNmYjM1NzctNDQxNmNhMWUtYjBmNzgxZmQtNjQ3YWY2ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:00:01.111908Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-04-10T11:00:01.112509Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:00:01.112656Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-04-10T11:00:01.113004Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:00:02.242051Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:00:02.244426Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7491637591104586740:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:00:02.313230Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7491637591104586740:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:00:02.470649Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7491637591104586740:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:00:02.682981Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7491637591104586740:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:00:03.202593Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7491637591104586740:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:00:03.255777Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume >> TGRpcNewClient::InMemoryTables [GOOD] >> TTableProfileTests::ExplicitPartitionsComplex [GOOD] >> TTableProfileTests::DescribeTableOptions >> KqpIndexes::UpsertWithoutExtraNullDelete+UseSink >> TSchemeShardTest::CopyTableAndConcurrentMerge [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel1 >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad [GOOD] >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewClient::InMemoryTables [GOOD] Test command err: 2025-04-10T10:59:22.731890Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637421291969852:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:22.744546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d5d/r3tmp/tmp20slUb/pdisk_1.dat 2025-04-10T10:59:23.514686Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:23.522853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:23.522970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:23.545159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6318, node 1 2025-04-10T10:59:23.894851Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:23.894878Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:23.894886Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:23.895066Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:24.414359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:31.825316Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637458894325363:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:31.825460Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d5d/r3tmp/tmpMglZsa/pdisk_1.dat 2025-04-10T10:59:32.445696Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:32.540975Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:32.541043Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:32.546924Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21253, node 4 2025-04-10T10:59:32.814624Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:32.814650Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:32.814657Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:32.814814Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12706 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:33.311671Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:36.806559Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491637458894325363:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:36.806644Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:38.578220Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:38.909668Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637488959097595:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:38.909785Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:38.910238Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637488959097607:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:38.915309Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T10:59:38.961663Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491637488959097609:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T10:59:39.068883Z node 4 :TX_PROXY ERROR: Actor# [4:7491637493254064976:2808] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:42.775882Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637507180877356:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:42.817156Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d5d/r3tmp/tmpcSxzvb/pdisk_1.dat 2025-04-10T10:59:43.134481Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:43.235820Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:43.235926Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:43.251633Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31289, node 7 2025-04-10T10:59:43.555293Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:43.555318Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:43.555326Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:43.555537Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23982 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:44.039999Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:47.738402Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491637507180877356:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:47.738482Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:51.532336Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491637545724464901:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:51.532432Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d5d/r3tmp/tmpPW3Nao/pdisk_1.dat 2025-04-10T10:59:52.322058Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:52.447382Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:52.457095Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:52.468440Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24165, node 10 2025-04-10T10:59:52.815491Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:52.815524Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:52.815538Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:52.815770Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13512 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:53.300597Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:56.532472Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491637545724464901:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:56.532557Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:56.580889Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T10:59:56.739285Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T10:59:56.810623Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491637567199302750:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:56.810704Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491637567199302761:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:56.810751Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:56.815046Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-10T10:59:56.841057Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491637567199302764:2362], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-10T10:59:56.928123Z node 10 :TX_PROXY ERROR: Actor# [10:7491637567199302839:2896] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:57.071335Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfnpgr84anys9031wbee4zp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODZmODI3ZC1jZTBmYjRmMi02MDZjZmMxMS1mZGYzNmEzZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:57.207836Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-04-10T10:59:59.420016Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491637577620345939:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:59.420087Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d5d/r3tmp/tmp5Vdqfs/pdisk_1.dat 2025-04-10T10:59:59.834047Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:59.891238Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:59.891490Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:59.895941Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12461, node 13 2025-04-10T11:00:00.046272Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:00.046296Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:00.046307Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:00.046485Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:00.703897Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:04.406552Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7491637577620345939:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:04.406654Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:04.483350Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:00:04.666558Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:00:04.781482Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 >> KqpUniqueIndex::InsertNullInPk >> TSchemeShardTest::AlterBlockStoreVolume [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions >> YdbTableBulkUpsert::RetryOperation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] Test command err: 2025-04-10T10:59:23.683442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637424786781412:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:23.683579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d39/r3tmp/tmpoEZWj3/pdisk_1.dat 2025-04-10T10:59:24.693212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:24.709944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:24.710047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:24.728258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:24.738573Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17454, node 1 2025-04-10T10:59:25.119573Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:25.119592Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:25.119600Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:25.166108Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:25.919697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:28.686364Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637424786781412:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:28.686435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:30.480201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637454851553507:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:30.480344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:30.481098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637454851553519:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:30.486059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:59:30.548104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637454851553521:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:59:30.659209Z node 1 :TX_PROXY ERROR: Actor# [1:7491637454851553597:2699] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:31.306830Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710660. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NWZjNjIzNmMtYTgxNGJjZmMtMmZmMmE5OGUtN2UzN2QzZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-04-10T10:59:33.348318Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637468455287104:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:33.348410Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d39/r3tmp/tmpVaObb9/pdisk_1.dat 2025-04-10T10:59:33.574271Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12892, node 4 2025-04-10T10:59:33.743565Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:33.743837Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:33.813365Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:33.894912Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:33.894939Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:33.894947Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:33.895113Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:34.299948Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:38.350039Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491637468455287104:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:38.350168Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:38.828128Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637489930124663:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:38.828204Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637489930124674:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:38.828266Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:38.833333Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:59:38.865373Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491637489930124677:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:59:38.939459Z node 4 :TX_PROXY ERROR: Actor# [4:7491637489930124748:2684] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:41.426564Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637503438234525:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:41.426658Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d39/r3tmp/tmpWHe0je/pdisk_1.dat 2025-04-10T10:59:41.809219Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:41.864023Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:41.864125Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:41.871665Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25078, node 7 2025-04-10T10:59:42.049230Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:42.049257Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:42.049266Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:42.049472Z node 7 ... d=7&id=YWU3Y2EwNTYtYjA3ZDY2MWYtNzZiMWRiMC1kNDliMDU4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:50.609609Z node 7 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=24; 2025-04-10T10:59:50.609954Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7491637542092942008:2368], Table: `Root/names` ([72057594046644480:2:1]), SessionActorId: [7:7491637533503007007:2368]Got LOCKS BROKEN for table `Root/names`. ShardID=72075186224037888, Sink=[7:7491637542092942008:2368].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T10:59:50.610073Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7491637542092941968:2368], SessionActorId: [7:7491637533503007007:2368], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `Root/names`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[7:7491637533503007007:2368]. isRollback=0 2025-04-10T10:59:50.610324Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=YWU3Y2EwNTYtYjA3ZDY2MWYtNzZiMWRiMC1kNDliMDU4YQ==, ActorId: [7:7491637533503007007:2368], ActorState: ExecuteState, TraceId: 01jrfnpapd0vt8xtyn6s3cjhqm, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [7:7491637542092942002:2368] from: [7:7491637542092941968:2368] 2025-04-10T10:59:50.610404Z node 7 :KQP_EXECUTER ERROR: ActorId: [7:7491637542092942002:2368] TxId: 281474976710678. Ctx: { TraceId: 01jrfnpapd0vt8xtyn6s3cjhqm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YWU3Y2EwNTYtYjA3ZDY2MWYtNzZiMWRiMC1kNDliMDU4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `Root/names`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T10:59:50.610564Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=YWU3Y2EwNTYtYjA3ZDY2MWYtNzZiMWRiMC1kNDliMDU4YQ==, ActorId: [7:7491637533503007007:2368], ActorState: ExecuteState, TraceId: 01jrfnpapd0vt8xtyn6s3cjhqm, Create QueryResponse for error on request, msg: 2025-04-10T10:59:53.558237Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491637552193691141:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:53.558311Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d39/r3tmp/tmpb11Pjy/pdisk_1.dat 2025-04-10T10:59:53.953048Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:53.998857Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:53.998973Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:54.012874Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17448, node 10 2025-04-10T10:59:54.151018Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:54.151044Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:54.151053Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:54.151237Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:54.537166Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:57.587904Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491637569373561399:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:57.587977Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491637569373561410:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:57.588096Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:57.594451Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T10:59:57.622547Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491637569373561413:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T10:59:57.711701Z node 10 :TX_PROXY ERROR: Actor# [10:7491637569373561482:2677] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:00.199361Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491637581526190827:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:00.199430Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d39/r3tmp/tmpctqGSS/pdisk_1.dat 2025-04-10T11:00:00.487893Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:00.549274Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:00.549384Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:00.564335Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16382, node 13 2025-04-10T11:00:00.771178Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:00.771212Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:00.771223Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:00.771401Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17589 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:01.398800Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:05.199568Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7491637581526190827:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:05.199645Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:05.526675Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491637603001028402:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:05.526788Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:05.527269Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491637603001028414:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:05.533751Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:00:05.577242Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7491637603001028416:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:00:05.693088Z node 13 :TX_PROXY ERROR: Actor# [13:7491637603001028489:2681] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpIndexes::CheckUpsertNonEquatableType+NotNull [GOOD] >> KqpIndexes::CheckUpsertNonEquatableType-NotNull >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::AdoptDropSolomon >> KqpIndexMetadata::HandleNotReadyIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::RetryOperation [GOOD] Test command err: 2025-04-10T10:59:22.260590Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637418081010478:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:22.260643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d52/r3tmp/tmpB19ACI/pdisk_1.dat 2025-04-10T10:59:23.311858Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:23.384083Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:23.399236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:23.399335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:23.415625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18717, node 1 2025-04-10T10:59:23.754669Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:23.754693Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:23.754700Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:23.754817Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20981 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:24.507478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:27.266071Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637418081010478:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:27.266123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:28.092968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 SUCCESS 3 rows in 0.062674s 2025-04-10T10:59:29.352928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637448145784597:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:29.353070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637448145784605:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:29.353151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:29.358396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T10:59:29.419367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637448145784611:2447], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T10:59:29.511510Z node 1 :TX_PROXY ERROR: Actor# [1:7491637448145784737:4267] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:30.584596Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfnnny56vs3t4xtynmzg94s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzk1NmMxZWUtNTA5MGVlNzYtY2NmOTM3NTAtNjRhYTc2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 3 rows 2025-04-10T10:59:33.399925Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637466449252784:2166];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:33.400600Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d52/r3tmp/tmpHU10ZW/pdisk_1.dat 2025-04-10T10:59:33.675677Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7655, node 4 2025-04-10T10:59:33.747021Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:33.747108Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:33.764992Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:33.834891Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:33.834916Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:33.834927Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:33.835113Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6087 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:34.127776Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:38.402311Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491637466449252784:2166];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:38.402401Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:38.927127Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/ui8' Only async-indexed tables are supported by BulkUpsert
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' unknown table 2025-04-10T10:59:41.426863Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637501070033723:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:41.426966Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d52/r3tmp/tmpV4xdjF/pdisk_1.dat 2025-04-10T10:59:41.887135Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:41.904374Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:41.904483Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:41.909142Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11563, node 7 2025-04-10T10:59:42.134591Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:42.134623Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:42.134635Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:42.134830Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24442 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 184467 ... Tx ydb://long-tx/read-only timed out, duration: 0 sec 4096 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 8192 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 16384 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 32768 usec 2025-04-10T10:59:51.349920Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491637545143801391:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:51.350008Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d52/r3tmp/tmpDKtOu4/pdisk_1.dat 2025-04-10T10:59:51.815374Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:51.930669Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:51.930778Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:51.934420Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30898, node 10 2025-04-10T10:59:52.135260Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:52.135290Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:52.135301Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:52.135499Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20965 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:52.621280Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:56.308676Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:56.354248Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491637545143801391:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:56.354339Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS 2025-04-10T10:59:59.925595Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491637580791737978:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:59.928909Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d52/r3tmp/tmpkMolEo/pdisk_1.dat 2025-04-10T11:00:00.478962Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:00.559988Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:00.560101Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:00.567348Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6992, node 13 2025-04-10T11:00:00.849099Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:00.849122Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:00.849133Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:00.849283Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:01.552120Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:04.906115Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7491637580791737978:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:04.906208Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:05.632660Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge [GOOD] >> TSchemeShardTest::CopyTableForBackup >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] Test command err: 2025-04-10T10:59:34.986737Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637470279676749:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:34.986804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1a/r3tmp/tmp8CfDI1/pdisk_1.dat 2025-04-10T10:59:35.994350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:36.285307Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:36.297970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:36.299710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:36.310015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23735, node 1 2025-04-10T10:59:36.674785Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:36.674815Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:36.674824Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:36.674960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17125 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:37.350880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:37.647750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:43.039252Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637512212064277:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:43.039306Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1a/r3tmp/tmpwUbAEx/pdisk_1.dat 2025-04-10T10:59:43.312339Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:43.350213Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:43.350299Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:43.358240Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62306, node 4 2025-04-10T10:59:43.588933Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:43.588955Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:43.588962Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:43.589108Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:43.831580Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:43.981163Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:50.643204Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637541307343016:2152];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:50.643524Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1a/r3tmp/tmp2FBsjf/pdisk_1.dat 2025-04-10T10:59:50.820463Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:50.843430Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:50.843520Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:50.847355Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9337, node 7 2025-04-10T10:59:50.973249Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:50.973272Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:50.973279Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:50.973450Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:51.254930Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:51.428047Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:56.930376Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491637567989794253:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:56.950250Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1a/r3tmp/tmp8GxXQk/pdisk_1.dat 2025-04-10T10:59:57.153768Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:57.210598Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:57.210698Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:57.213482Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9959, node 10 2025-04-10T10:59:57.403851Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:57.403874Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:57.403883Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:57.404023Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:57.668225Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:57.805350Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:00:03.681303Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491637595677560003:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:03.681360Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1a/r3tmp/tmpLjyrXQ/pdisk_1.dat 2025-04-10T11:00:04.182214Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:04.203777Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:04.203878Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:04.208031Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5952, node 13 2025-04-10T11:00:04.377439Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:04.377472Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:04.377482Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:04.377666Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2303 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:04.743129Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:04.844264Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel1 >> TGRpcYdbTest::BeginTxRequestError [GOOD] >> TSchemeShardTest::AdoptDropSolomon [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex-UseSink [GOOD] >> KqpIndexes::DuplicateUpsertInterleave >> KqpIndexes::NullInIndexTableNoDataRead [GOOD] >> KqpIndexes::NullInIndexTable >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink >> KqpIndexes::MultipleModifications [GOOD] >> KqpIndexes::JoinWithNonPKColumnsInPredicate+UseStreamJoin >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn [GOOD] >> KqpUniqueIndex::UpdateOnNullInComplexFk >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs [GOOD] >> TBlobStorageProxyTest::TestEmptyRange ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::BeginTxRequestError [GOOD] Test command err: 2025-04-10T10:59:35.108932Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637477142395962:2280];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:35.110193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1e/r3tmp/tmprpVQfP/pdisk_1.dat 2025-04-10T10:59:36.145541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:36.610366Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:36.672605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:36.672713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:36.687679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12088, node 1 2025-04-10T10:59:37.079046Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:37.079066Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:37.079078Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:37.079185Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23762 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:37.664679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:37.976702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1e/r3tmp/tmpddTvAn/pdisk_1.dat 2025-04-10T10:59:44.013324Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:44.233859Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:44.286419Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:44.286520Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:44.292610Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29063, node 4 2025-04-10T10:59:44.632555Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:44.632578Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:44.632585Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:44.643370Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14664 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:45.148124Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:45.519799Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:45.684561Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T10:59:45.860704Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T10:59:45.999108Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T10:59:46.165461Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:59:46.704856Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:59:46.759424Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:59:46.836189Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:59:51.927155Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637545807260743:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:51.928060Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1e/r3tmp/tmpxJTVco/pdisk_1.dat 2025-04-10T10:59:52.202357Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:52.269690Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:52.269807Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:52.275141Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7004, node 7 2025-04-10T10:59:52.383009Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:52.383043Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:52.383054Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:52.383273Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:52.792075Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:52.927080Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1e/r3tmp/tmpsJHl1S/pdisk_1.dat 2025-04-10T10:59:58.324774Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:58.433928Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:58.490625Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:58.490741Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:58.520752Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27464, node 10 2025-04-10T10:59:58.646974Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:58.647002Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:58.647012Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:58.647213Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:58.959360Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:59.054623Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:59.264389Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T10:59:59.973466Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-04-10T11:00:04.940664Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491637598709022509:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:04.940730Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1e/r3tmp/tmpUEsD20/pdisk_1.dat 2025-04-10T11:00:05.221002Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:05.263864Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:05.263974Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:05.270063Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20236, node 13 2025-04-10T11:00:05.406740Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:05.406763Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:05.406784Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:05.406991Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27012 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:05.781227Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:09.392932Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491637620183860058:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:09.393087Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:09.393395Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491637620183860070:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:09.398517Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:00:09.462372Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7491637620183860072:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:00:09.568286Z node 13 :TX_PROXY ERROR: Actor# [13:7491637620183860161:2674] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:09.570056Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZWZjNDRiOTUtYTc2YWVmM2UtOGNlMGU5YTYtMmUyZDYyMWE=, ActorId: [13:7491637620183860040:2333], ActorState: ExecuteState, TraceId: 01jrfnpx1e580jdbgrk3qv5fap, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-04-10T11:00:09.585147Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZWZjNDRiOTUtYTc2YWVmM2UtOGNlMGU5YTYtMmUyZDYyMWE=, ActorId: [13:7491637620183860040:2333], ActorState: ExecuteState, TraceId: 01jrfnpx7aekearnb332tdjw4n, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-04-10T11:00:09.591457Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZWZjNDRiOTUtYTc2YWVmM2UtOGNlMGU5YTYtMmUyZDYyMWE=, ActorId: [13:7491637620183860040:2333], ActorState: ExecuteState, TraceId: 01jrfnpx7pfw2tvere4ddwf072, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin >> KqpUniqueIndex::InsertFkPartialColumnSet >> KqpUniqueIndex::UpdateOnFkAlreadyExist [GOOD] >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 >> KqpUniqueIndex::ReplaceFkPartialColumnSet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] Test command err: Trying to start YDB, gRPC: 62538, MsgBus: 11900 2025-04-10T10:58:10.510797Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637112189921265:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:10.511448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d48/r3tmp/tmpWOiD85/pdisk_1.dat 2025-04-10T10:58:11.345729Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:11.378713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:11.378848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:11.382508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62538, node 1 2025-04-10T10:58:11.517732Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:11.517759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:11.517765Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:11.517878Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11900 TClient is connected to server localhost:11900 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:12.393927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:12.430682Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:58:12.448249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:12.681159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:12.945176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:13.063616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:15.296353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637133664759520:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:15.296456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:15.511636Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637112189921265:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:15.511706Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:58:15.590229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:15.634308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:58:15.708768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:58:15.764397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:58:15.815284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:58:15.894128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:58:15.996290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637133664760039:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:15.996349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637133664760044:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:15.996389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:16.001058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:58:16.016509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637133664760046:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:58:16.078658Z node 1 :TX_PROXY ERROR: Actor# [1:7491637137959727396:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:18.462031Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282698473, txId: 281474976710672] shutting down 2025-04-10T10:58:18.865484Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282698858, txId: 281474976710675] shutting down 2025-04-10T10:58:19.235000Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282699257, txId: 281474976710678] shutting down 2025-04-10T10:58:19.649308Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282699670, txId: 281474976710681] shutting down 2025-04-10T10:58:20.019328Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282700034, txId: 281474976710684] shutting down 2025-04-10T10:58:20.397003Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282700412, txId: 281474976710687] shutting down 2025-04-10T10:58:20.724337Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282700727, txId: 281474976710690] shutting down 2025-04-10T10:58:21.052729Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282701084, txId: 281474976710693] shutting down 2025-04-10T10:58:21.624221Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282701588, txId: 281474976710696] shutting down 2025-04-10T10:58:21.954822Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282701973, txId: 281474976710699] shutting down 2025-04-10T10:58:22.302868Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282702316, txId: 281474976710702] shutting down 2025-04-10T10:58:22.832452Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282702848, txId: 281474976710705] shutting down 2025-04-10T10:58:23.199109Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282703226, txId: 281474976710708] shutting down 2025-04-10T10:58:23.522349Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282703548, txId: 281474976710711] shutting down 2025-04-10T10:58:23.829875Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282703849, txId: 281474976710714] shutting down 2025-04-10T10:58:24.104325Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282704136, txId: 281474976710717] shutting down 2025-04-10T10:58:24.431650Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282704430, txId: 281474976710720] shutting down 2025-04-10T10:58:24.853454Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282704885, txId: 281474976710723] shutting down 2025-04-10T10:58:25.195700Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282705228, txId: 281474976710726] shutting down 2025-04-10T10:58:25.584930Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282705606, txId: 281474976710729] shutting down 2025-04-10T10:58:25.998248Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282706005, txId: 281474976710732] shutting down 2025-04-10T10:58:26.318314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:58:26.318357Z node 1 :IMP ... E_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282790194, txId: 281474976711209] shutting down 2025-04-10T10:59:50.713433Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282790691, txId: 281474976711212] shutting down 2025-04-10T10:59:51.157942Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282791167, txId: 281474976711215] shutting down 2025-04-10T10:59:51.979119Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282791972, txId: 281474976711218] shutting down 2025-04-10T10:59:52.599061Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282792602, txId: 281474976711221] shutting down 2025-04-10T10:59:53.500875Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282793498, txId: 281474976711224] shutting down 2025-04-10T10:59:54.002247Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282794002, txId: 281474976711227] shutting down 2025-04-10T10:59:54.377983Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282794401, txId: 281474976711230] shutting down 2025-04-10T10:59:54.983197Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282794982, txId: 281474976711233] shutting down 2025-04-10T10:59:55.513303Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282795514, txId: 281474976711236] shutting down 2025-04-10T10:59:55.957773Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282795976, txId: 281474976711239] shutting down 2025-04-10T10:59:56.356006Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282796361, txId: 281474976711242] shutting down 2025-04-10T10:59:56.938896Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282796900, txId: 281474976711245] shutting down 2025-04-10T10:59:57.511216Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282797516, txId: 281474976711248] shutting down 2025-04-10T10:59:58.071127Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282798076, txId: 281474976711251] shutting down 2025-04-10T10:59:58.605687Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282798615, txId: 281474976711254] shutting down 2025-04-10T10:59:59.049918Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282799070, txId: 281474976711257] shutting down 2025-04-10T10:59:59.672081Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282799651, txId: 281474976711260] shutting down 2025-04-10T11:00:00.155991Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282800176, txId: 281474976711263] shutting down 2025-04-10T11:00:00.823690Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282800820, txId: 281474976711266] shutting down 2025-04-10T11:00:01.203439Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282801219, txId: 281474976711269] shutting down Trying to start YDB, gRPC: 2538, MsgBus: 17838 2025-04-10T11:00:02.787843Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637591701314149:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:02.823746Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d48/r3tmp/tmpSzEk5r/pdisk_1.dat 2025-04-10T11:00:03.028683Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:03.091735Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:03.091867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:03.097189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2538, node 2 2025-04-10T11:00:03.254987Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:03.255015Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:03.255025Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:03.255209Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17838 TClient is connected to server localhost:17838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:04.172405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:04.202761Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:04.226024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:04.344706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:04.723303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:04.847989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:07.463821Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637613176152258:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:07.464109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:07.483792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:07.533973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:07.578410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:07.626269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:07.704265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:07.765576Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637591701314149:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:07.765687Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:07.787988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:07.916118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637613176152784:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:07.916228Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:07.916513Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637613176152789:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:07.920740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:07.934065Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637613176152791:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:07.994603Z node 2 :TX_PROXY ERROR: Actor# [2:7491637613176152845:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:11.537028Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282811544, txId: 281474976710671] shutting down >> TSchemeShardTest::AlterTableAndAfterSplit [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:59:08.317123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:59:08.317259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:08.317308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:59:08.317353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:59:08.317402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:59:08.317453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:59:08.317515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:08.317648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:59:08.318009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:08.424477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:08.424659Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:08.456576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:08.457077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:08.457344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:59:08.475893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:08.476277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:08.477350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:08.477979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:59:08.481789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:08.484204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:08.484302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:08.485492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:08.485653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:08.485759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:08.486032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.519520Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:59:08.681321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:08.681695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.681968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:59:08.682383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:59:08.682486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.686973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:08.687186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:59:08.687392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.687482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:59:08.687536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:59:08.687585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:59:08.690424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.690528Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:08.690575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:59:08.692997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.693054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.693123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:08.693185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:59:08.697348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:08.702356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:59:08.702686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:59:08.704053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:08.704320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:08.704397Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:08.705240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:59:08.705339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:08.705610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:59:08.705744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:59:08.711066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:08.711157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:08.711418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:08.711482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:59:08.711789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:08.711859Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:59:08.711970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:08.712009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:08.712055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:08.712091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:08.712138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:59:08.712190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:08.712228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:59:08.712266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:59:08.712370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:59:08.712422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:59:08.712475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:59:08.715536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:08.715695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:08.715739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:25069 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6AD106F6-7EE9-44A4-8C4E-F824F9E60647 amz-sdk-request: attempt=1 content-length: 130 content-md5: rsyfbQ5vVOk4oQ1A/altew== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=99&uploadId=1 / 130 2025-04-10T11:00:11.898000Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5421], result# UploadPartResult { ETag: aecc9f6d0e6f54e938a10d40fda96d7b } 2025-04-10T11:00:11.898329Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3457:5420] 2025-04-10T11:00:11.898542Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3458:5421], sender# [1:3457:5420], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:25069 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E77943E4-7CCF-4140-9D91-8775DA79785A amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2025-04-10T11:00:11.902536Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5421], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2025-04-10T11:00:11.902829Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3457:5420] 2025-04-10T11:00:11.902963Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3458:5421], sender# [1:3457:5420], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:25069 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F354354C-79DC-438A-82D8-8E6FD68AD6DC amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2025-04-10T11:00:11.907483Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5421], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-04-10T11:00:11.907553Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3458:5421], success# 1, error# , multipart# 1, uploadId# 1 2025-04-10T11:00:11.915912Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3458:5421], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:25069 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AA783916-F732-4056-8D40-8A1BF8436D7B amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-04-10T11:00:11.930014Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3458:5421], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2025-04-10T11:00:11.930438Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3457:5420], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-10T11:00:11.961182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-10T11:00:11.961267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T11:00:11.961481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-10T11:00:11.961594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-10T11:00:11.961669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:00:11.961716Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:00:11.961779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:00:11.961832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T11:00:11.962057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:00:11.968136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:00:11.968861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:00:11.968932Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:00:11.969061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:00:11.969107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:00:11.969170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:00:11.969214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:00:11.969258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T11:00:11.969353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 102 2025-04-10T11:00:11.969430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:00:11.969475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:00:11.969511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:00:11.969670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:00:11.992659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:00:11.992742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3443:5407] TestWaitNotification: OK eventTxId 102 >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> KqpIndexes::SelectConcurentTX2 >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] >> KqpIndexes::InnerJoinWithNonIndexWherePredicate [GOOD] >> KqpIndexes::InnerJoinSecondaryIndexLookupAndRightTablePredicateNonIndexColumn >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll >> KqpIndexes::UniqIndexComplexPkComplexFkOverlap >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false |70.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel1 >> KqpIndexes::UpsertMultipleUniqIndexes [GOOD] >> KqpIndexes::UpsertNoIndexColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] Test command err: 2025-04-10T10:59:32.966424Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637461223500653:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:32.966492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d24/r3tmp/tmpnaaXKv/pdisk_1.dat 2025-04-10T10:59:33.780844Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:33.814088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:33.814225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:33.843805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2692, node 1 2025-04-10T10:59:34.214840Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:34.214859Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:34.214867Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:34.214986Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:35.005169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:35.184652Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:38768 Call 2025-04-10T10:59:35.242460Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:38774 2025-04-10T10:59:37.954283Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637461223500653:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:37.954361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:39.398859Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:38790 Call Call 2025-04-10T10:59:39.460197Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv6:[::1]:38820 2025-04-10T10:59:39.468779Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv6:[::1]:38824 2025-04-10T10:59:39.470203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:41.694795Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637499564984464:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:41.694885Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d24/r3tmp/tmpDPqhFT/pdisk_1.dat 2025-04-10T10:59:42.313006Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:42.357859Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:42.358086Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:42.378203Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27862, node 4 2025-04-10T10:59:42.786710Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:42.786736Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:42.786742Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:42.786860Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65222 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:43.279571Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:46.682322Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491637499564984464:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:46.682394Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:47.059491Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table-1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:47.060602Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:47.060623Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:47.067081Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Table-1 2025-04-10T10:59:47.223408Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282787261, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:47.323482Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-10T10:59:47.347622Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-2, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T10:59:47.348210Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:47.350496Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, path: /Root/Table-2 2025-04-10T10:59:47.429170Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282787471, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:47.581259Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715659, done: 0, blocked: 1 2025-04-10T10:59:47.587669Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-10T10:59:47.608455Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-3, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T10:59:47.608935Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:47.608951Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-4, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-04-10T10:59:47.609182Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:47.619148Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, dst path: /Root/Table-3, dst path: /Root/Table-4 2025-04-10T10:59:47.732447Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282787779, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:47.791041Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715660, done: 0, blocked: 2 2025-04-10T10:59:47.795225Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-04-10T10:59:47.795346Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2025-04-10T10:59:47.829325Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-5, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T10:59:47.829866Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:47.829885Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-6, opId: 281474976715661:1, at schemeshard: 72057594046644480 2025-04-10T10:59:47.830112Z node 4 :F ... PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:54.516650Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:8848 2025-04-10T10:59:58.360657Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491637553007368970:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:58.360778Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:58.476707Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8848 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744282798615 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) 2025-04-10T10:59:58.924430Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8848 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744282798615 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) 2025-04-10T11:00:01.666467Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491637588584461365:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:01.667531Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d24/r3tmp/tmpqiWR1f/pdisk_1.dat 2025-04-10T11:00:01.939863Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:02.001573Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:02.001693Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22500, node 10 2025-04-10T11:00:02.026357Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:02.186850Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:02.186873Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:02.186880Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:02.187048Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:02.628391Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:06.181531Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.345646Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.462433Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.638215Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491637588584461365:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:06.638346Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:08.386636Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491637618287600397:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:08.386715Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d24/r3tmp/tmpSABgR7/pdisk_1.dat 2025-04-10T11:00:08.684591Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:08.725314Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:08.725444Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:08.732058Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24780, node 13 2025-04-10T11:00:08.931117Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:08.931146Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:08.931156Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:08.931372Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:09.452864Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:13.233788Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:00:13.387607Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7491637618287600397:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:13.387703Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:13.420761Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 >> KqpIndexes::CheckUpsertNonEquatableType-NotNull [GOOD] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL >> TTableProfileTests::DescribeTableOptions [GOOD] >> KqpIndexes::UpsertWithoutExtraNullDelete+UseSink [GOOD] >> KqpIndexes::UpsertWithoutExtraNullDelete-UseSink >> KqpUniqueIndex::InsertNullInPk [GOOD] >> KqpUniqueIndex::InsertNullInFk >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:59:11.287061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:59:11.287166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:11.287207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:59:11.287242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:59:11.287287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:59:11.287319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:59:11.287382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:11.287484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:59:11.287820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:11.364544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:11.364596Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:11.380929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:11.381162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:11.381299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:59:11.387479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:11.387701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:11.388246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:11.388562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:59:11.390111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:11.391182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:11.391256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:11.391436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:11.391479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:11.391522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:11.391736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.398834Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:59:11.549108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:11.549356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.549597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:59:11.549842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:59:11.549905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.554228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:11.554403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:59:11.554600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.554682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:59:11.554737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:59:11.554779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:59:11.558412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.558488Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:11.558536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:59:11.561304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.561361Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.561407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:11.561469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:59:11.576872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:11.579147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:59:11.579376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:59:11.580396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:11.580553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:11.580612Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:11.580904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:59:11.580976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:11.581167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:59:11.581254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:59:11.583626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:11.583677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:11.583896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:11.583962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:59:11.584227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:11.584276Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:59:11.584376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:11.584412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:11.584448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:11.584479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:11.584517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:59:11.584556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:11.584597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:59:11.584626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:59:11.584686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:59:11.584733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:59:11.584795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:59:11.593593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:11.593757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:11.593799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ksum: } REQUEST: PUT /data_00.csv.zst?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:9865 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B9409503-0904-4E8A-8503-809CBDE0691F amz-sdk-request: attempt=1 content-length: 55 content-md5: Ry5TonSXZhxkAEEJCUBcTg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=99&uploadId=1 / 55 2025-04-10T11:00:14.385882Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5421], result# UploadPartResult { ETag: 472e53a27497661c6400410909405c4e } 2025-04-10T11:00:14.386126Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3457:5420] 2025-04-10T11:00:14.386790Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3458:5421], sender# [1:3457:5420], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:9865 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1FE11873-D0E8-4E5D-831C-1E4FB3A2C339 amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2025-04-10T11:00:14.390820Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5421], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2025-04-10T11:00:14.391077Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3457:5420] 2025-04-10T11:00:14.391195Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3458:5421], sender# [1:3457:5420], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:9865 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BD99D6F8-17F4-436E-9D64-28FF2AA3C9B7 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2025-04-10T11:00:14.395127Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5421], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-04-10T11:00:14.395209Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3458:5421], success# 1, error# , multipart# 1, uploadId# 1 2025-04-10T11:00:14.400725Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3458:5421], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:9865 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 505563F3-9EF3-41A5-B76B-CCD652574E49 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2025-04-10T11:00:14.413643Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3458:5421], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2025-04-10T11:00:14.414107Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3457:5420], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-10T11:00:14.457097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-10T11:00:14.457173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T11:00:14.457352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-10T11:00:14.457487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-10T11:00:14.457566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:00:14.457609Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:00:14.457656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:00:14.457703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T11:00:14.457900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:00:14.463266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:00:14.464000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:00:14.464074Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:00:14.464215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:00:14.464253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:00:14.464297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:00:14.464335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:00:14.464374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T11:00:14.464454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 102 2025-04-10T11:00:14.464502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:00:14.464547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:00:14.464579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:00:14.464713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:00:14.469850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:00:14.469928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3443:5407] TestWaitNotification: OK eventTxId 102 >> KqpIndexes::SecondaryIndexOrderBy [GOOD] >> KqpIndexes::SecondaryIndexOrderBy2 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::DescribeTableOptions [GOOD] Test command err: 2025-04-10T10:59:33.992733Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637467876276219:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:33.992779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1f/r3tmp/tmp7zXqom/pdisk_1.dat 2025-04-10T10:59:34.863244Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:34.916279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:34.916389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:34.920330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10181, node 1 2025-04-10T10:59:35.279810Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:35.279830Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:35.279838Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:35.281853Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:35.601934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1744289974656409 Nodes { NodeId: 1024 Host: "localhost" Port: 28624 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1744289974656409 } Nodes { NodeId: 1 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 2 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 3 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1f/r3tmp/tmpieSC2F/pdisk_1.dat 2025-04-10T10:59:40.540778Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:40.789803Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:40.820203Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:40.820293Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:40.827439Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15932, node 4 2025-04-10T10:59:41.074742Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:41.074765Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:41.074782Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:41.074909Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:41.349823Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1744289980758977 Nodes { NodeId: 1024 Host: "localhost" Port: 9019 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1744289980758977 } Nodes { NodeId: 4 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 5 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 6 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-04-10T10:59:47.075436Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637525930621203:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:47.075496Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1f/r3tmp/tmpoONU4l/pdisk_1.dat 2025-04-10T10:59:47.749046Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:47.806947Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:47.807049Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:47.824529Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5020, node 7 2025-04-10T10:59:48.190949Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:48.190973Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:48.190985Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:48.191134Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:48.558226Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:11895 2025-04-10T10:59:49.016171Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:49.079665Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:49.588542Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491637535915910124:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:49.588614Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:49.703885Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:49.703990Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:49.735771Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-04-10T10:59:49.793334Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11895 2025-04-10T10:59:52.082322Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491637525930621203:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:52.082425Z node 7 :METADATA_PROVIDER ERROR: f ... path=//Root/ydb_ut_tenant/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:59:56.334698Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:7491637565980682385:2370], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:59:57.746514Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491637571831836093:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:57.746580Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1f/r3tmp/tmpAoZCDD/pdisk_1.dat 2025-04-10T10:59:57.984896Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9879, node 10 2025-04-10T10:59:58.090617Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:58.090723Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:58.099516Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:58.110938Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:58.110965Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:58.110976Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:58.111128Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:58.554824Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:17321 2025-04-10T10:59:59.064469Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:59.134821Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:59.644163Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7491637578475427302:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:59.644233Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:59.874230Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:59.874326Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:59.879575Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-04-10T10:59:59.883308Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17321 2025-04-10T11:00:00.311032Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:17321 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744282800550 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "ke... (TRUNCATED) 2025-04-10T11:00:01.355367Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-04-10T11:00:01.356241Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:00:04.646520Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7491637578475427302:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:04.646604Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:08.062210Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491637615610441508:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:08.062954Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1f/r3tmp/tmphUL7cp/pdisk_1.dat 2025-04-10T11:00:08.458468Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:08.507565Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:08.507694Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:08.520623Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26460, node 13 2025-04-10T11:00:08.791427Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:08.791460Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:08.791473Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:08.791657Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2761 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:09.260138Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:2761 2025-04-10T11:00:09.775109Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:09.841774Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:10.367137Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7491637627680644337:2088];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:10.367945Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:00:10.443741Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:10.443884Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:10.503454Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-04-10T11:00:10.506204Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2761 2025-04-10T11:00:11.253923Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-10T11:00:11.254518Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:00:15.371335Z node 15 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[15:7491637627680644337:2088];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:15.371443Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; >> KqpIndexes::ForbidViewModification |70.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |70.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |70.6%| [TA] {RESULT} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::UniqAndNoUniqSecondaryIndex >> KqpIndexes::DuplicateUpsertInterleave [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> YdbTableBulkUpsert::Nulls [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true >> KqpIndexes::WriteWithParamsFieldOrder >> YdbTableBulkUpsert::NotNulls >> KqpIndexes::NullInIndexTable [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel1 >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] >> KqpIndexMetadata::HandleNotReadyIndex [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink >> KqpIndexes::MultipleSecondaryIndexWithSameComulns-UseSink >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin |70.6%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |70.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin >> KqpUniqueIndex::InsertFkPartialColumnSet [GOOD] >> KqpUniqueIndex::InsertFkPkOverlap >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters >> KqpUniqueIndex::ReplaceFkPartialColumnSet [GOOD] >> KqpUniqueIndex::UpdateFkAlreadyExist >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] Test command err: 2025-04-10T10:59:13.115045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637381232371776:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:13.115094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:13.321784Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637379280434651:2277];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:13.321989Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:13.624549Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:59:13.651058Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001752/r3tmp/tmpVAZoDh/pdisk_1.dat 2025-04-10T10:59:14.257467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:14.347968Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:14.358992Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:14.383142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:14.383247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:14.387335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:14.387436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:14.394190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:14.395865Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:59:14.397481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14637, node 1 2025-04-10T10:59:14.543995Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001752/r3tmp/yandexfymFii.tmp 2025-04-10T10:59:14.544020Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001752/r3tmp/yandexfymFii.tmp 2025-04-10T10:59:14.544189Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001752/r3tmp/yandexfymFii.tmp 2025-04-10T10:59:14.544324Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:59:14.619014Z INFO: TTestServer started on Port 1290 GrpcPort 14637 TClient is connected to server localhost:1290 PQClient connected to localhost:14637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:14.934475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T10:59:15.047698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T10:59:18.089680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637402707209348:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:18.089835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:18.090759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637402707209375:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:18.096380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T10:59:18.119601Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637381232371776:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:18.119673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:18.128798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637402707209377:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T10:59:18.322593Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637379280434651:2277];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:18.322673Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:18.450246Z node 1 :TX_PROXY ERROR: Actor# [1:7491637402707209470:2768] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:18.484922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:59:18.653862Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491637402707209493:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:59:18.657609Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2M5M2JkNDctODBhNDY0ZjgtNzBjZTg0Mi1hOTM3NjBkZg==, ActorId: [1:7491637402707209345:2338], ActorState: ExecuteState, TraceId: 01jrfnnaxcdnhcb3ngz1m4p4zf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:59:18.662929Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:59:18.673615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:59:18.851997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T10:59:19.288370Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfnnbvgf5m4av12yfqj8eyp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWE5NDg0OWQtOTlhNTQ1Y2UtNmEzZjAxNDYtNGY0NzlkYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491637407002177253:3119] === CheckClustersList. Ok 2025-04-10T10:59:24.719006Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491637381232372036:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T10:59:24.719270Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491637381232372036:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-04-10T10:59:24.719368Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491637381232372036:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491637385527339628:2317] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744282755019 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T10:59:24.719464Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491637381232372036:2130], cacheItem# { Subscriber: { Subscriber: [1:7491637385527339628:2317] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744282755019 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: ... FO: [/Root] TraceId [] SessionId [123|3e386ac5-eb050cff-205be53a-4817b7cc_0] MessageGroupId [123] Write session: gracefully shut down, all writes complete 2025-04-10T11:00:18.518877Z :DEBUG: [/Root] TraceId [] SessionId [123|3e386ac5-eb050cff-205be53a-4817b7cc_0] MessageGroupId [123] Write session: destroy 2025-04-10T11:00:18.550595Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [7:7491637658347339318:2516] destroyed 2025-04-10T11:00:18.550664Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T11:00:18.548570Z node 7 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 123|3e386ac5-eb050cff-205be53a-4817b7cc_0 grpc read done: success: 0 data: 2025-04-10T11:00:18.548604Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|3e386ac5-eb050cff-205be53a-4817b7cc_0 grpc read failed 2025-04-10T11:00:18.548642Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|3e386ac5-eb050cff-205be53a-4817b7cc_0 grpc closed 2025-04-10T11:00:18.548658Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|3e386ac5-eb050cff-205be53a-4817b7cc_0 is DEAD 2025-04-10T11:00:18.549467Z node 7 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T11:00:18.818724Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7491637598217794538:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:18.818923Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7491637598217794538:2127], cacheItem# { Subscriber: { Subscriber: [7:7491637602512762328:2437] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:00:18.819059Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7491637658347339368:4014], recipient# [7:7491637658347339367:2543], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:18.831936Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7491637598217794538:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:18.832128Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7491637598217794538:2127], cacheItem# { Subscriber: { Subscriber: [7:7491637602512762328:2437] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:00:18.832237Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7491637658347339370:4015], recipient# [7:7491637658347339369:2544], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:18.861195Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [7:7491637598217794538:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-10T11:00:18.861342Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [7:7491637598217794538:2127], cacheItem# { Subscriber: { Subscriber: [7:7491637628282566780:2909] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744282811565 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:00:18.861429Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [7:7491637598217794538:2127], cacheItem# { Subscriber: { Subscriber: [7:7491637623987599330:2792] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744282811166 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:00:18.861749Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7491637658347339375:4018], recipient# [7:7491637658347339374:2506], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-10T11:00:18.864136Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7491637598217794538:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:18.864406Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7491637598217794538:2127], cacheItem# { Subscriber: { Subscriber: [7:7491637602512762320:2435] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 27 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744282806231 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:00:18.864634Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7491637658347339378:4019], recipient# [7:7491637658347339377:2545], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:00:18.970937Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7491637598217794538:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:18.971130Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7491637598217794538:2127], cacheItem# { Subscriber: { Subscriber: [7:7491637623987599258:2760] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:00:18.971264Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7491637658347339387:4023], recipient# [7:7491637658347339386:2548], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpIndexes::SelectConcurentTX2 [GOOD] >> KqpIndexes::SelectFromAsyncIndexedTable >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] Test command err: 2025-04-10T10:59:22.843991Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637418498659380:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:22.844037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d44/r3tmp/tmpqKJspe/pdisk_1.dat 2025-04-10T10:59:23.589234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:23.589467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:23.597018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:23.653353Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19510, node 1 2025-04-10T10:59:23.719881Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:23.719912Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:23.982905Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:23.982933Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:23.982942Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:23.983118Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:24.599857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:27.846505Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637418498659380:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:27.846645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:28.796670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637444268464242:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:28.796792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:29.146612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:29.384064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:59:29.570542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637448563431758:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:29.570640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:29.570830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637448563431763:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:29.575834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T10:59:29.610879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637448563431765:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T10:59:29.724022Z node 1 :TX_PROXY ERROR: Actor# [1:7491637448563431840:2840] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:29.784542Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7491637448563431863:2852], for# test_user@builtin, access# DescribeSchema 2025-04-10T10:59:29.784575Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7491637448563431863:2852], for# test_user@builtin, access# DescribeSchema 2025-04-10T10:59:29.803730Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491637448563431855:2369], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:59:29.805734Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWUxMTYwOS1mMTZhNzE3OC1iOGUwYzZhMS05NDNjZGRlOQ==, ActorId: [1:7491637448563431754:2358], ActorState: ExecuteState, TraceId: 01jrfnnp4zepaxe07pp8m1hrsa, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:59:32.733111Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637462683439512:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:32.733164Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d44/r3tmp/tmphDUIvY/pdisk_1.dat 2025-04-10T10:59:33.264741Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:33.444200Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:33.444331Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:33.455526Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23930, node 4 2025-04-10T10:59:33.821848Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:33.821877Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:33.821885Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:33.822036Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:34.301172Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:37.736669Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491637462683439512:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:37.736736Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:38.619427Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637488453244301:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:38.619524Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:38.635732Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:38.818790Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637488453244479:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:38.818867Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637488453244484:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T ... 37488453244565:2822] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:39.161671Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfnnz61adsncbaapcj719gs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzA3YTBhMS1kOTc4NGNmMC04MTAxZjViNi1hN2FkYjkyMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:39.340530Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfnnzhm4x75jaz684rg7w2c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzA3YTBhMS1kOTc4NGNmMC04MTAxZjViNi1hN2FkYjkyMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:42.042316Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637506739005020:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:42.042401Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d44/r3tmp/tmpc2y09B/pdisk_1.dat 2025-04-10T10:59:42.592585Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:42.606749Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:42.606833Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:42.619786Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7689, node 7 2025-04-10T10:59:42.838708Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:42.838728Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:42.838736Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:42.838847Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:43.627649Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:47.023475Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491637506739005020:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:47.023570Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 2025-04-10T10:59:50.281699Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491637541098744564:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:50.281817Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:50.282312Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491637541098744576:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:50.287497Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T10:59:50.338132Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491637541098744578:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T10:59:50.407237Z node 7 :TX_PROXY ERROR: Actor# [7:7491637541098744645:2715] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 2025-04-10T10:59:56.514656Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491637564638547391:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:56.514718Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d44/r3tmp/tmpczFWwP/pdisk_1.dat 2025-04-10T10:59:56.882880Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:56.955644Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:56.955770Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:56.968161Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29878, node 10 2025-04-10T10:59:57.095706Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:57.095735Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:57.095746Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:57.095929Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:57.482324Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 2025-04-10T11:00:01.518601Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491637564638547391:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:01.518707Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 2025-04-10T11:00:11.851332Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:00:11.851362Z node 10 :IMPORT WARN: Table profiles were not loaded Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] >> RetryPolicy::RetryWithBatching [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] Test command err: Trying to start YDB, gRPC: 19674, MsgBus: 13276 2025-04-10T11:00:02.512379Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637590809875184:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:02.512572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e7c/r3tmp/tmpkK0NbI/pdisk_1.dat 2025-04-10T11:00:03.146611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:03.146729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:03.155222Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:03.159442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19674, node 1 2025-04-10T11:00:03.491407Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:03.491432Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:03.491440Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:03.491548Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13276 TClient is connected to server localhost:13276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:04.539315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:04.563183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:04.731750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:04.931659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:05.051792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:07.259203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637612284713282:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:07.259350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:07.498365Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637590809875184:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:07.507295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:07.727822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:07.793122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:07.865228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:07.908377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:07.957254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.042426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.124313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637616579681102:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:08.124400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:08.124764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637616579681107:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:08.129283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:08.145261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637616579681109:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:08.214871Z node 1 :TX_PROXY ERROR: Actor# [1:7491637616579681164:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:09.477187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:11.581850Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnpyaeaa0ben1kfrb6wzvw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRjNjI2NTItYTEyODQ3Mi0yZTIzNmExZC1jNGRmYzAyNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:11.593266Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzRjNjI2NTItYTEyODQ3Mi0yZTIzNmExZC1jNGRmYzAyNw==, ActorId: [1:7491637620874649529:2560], ActorState: ExecuteState, TraceId: 01jrfnpyaeaa0ben1kfrb6wzvw, Create QueryResponse for error on request, msg: 2025-04-10T11:00:12.432522Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnpz70fr0wjd2mefn186yf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRjNjI2NTItYTEyODQ3Mi0yZTIzNmExZC1jNGRmYzAyNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:12.432854Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzRjNjI2NTItYTEyODQ3Mi0yZTIzNmExZC1jNGRmYzAyNw==, ActorId: [1:7491637620874649529:2560], ActorState: ExecuteState, TraceId: 01jrfnpz70fr0wjd2mefn186yf, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 14207, MsgBus: 6528 2025-04-10T11:00:13.295407Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637637636578372:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:13.300203Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e7c/r3tmp/tmpXWuPcx/pdisk_1.dat 2025-04-10T11:00:13.475201Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:13.484791Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:13.484888Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:13.493294Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14207, node 2 2025-04-10T11:00:13.603819Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:13.603839Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:13.603847Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:13.603971Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6528 TClient is connected to server localhost:6528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:00:14.171545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:00:14.196278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:14.327349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:14.565180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:14.642103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:16.871240Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637650521482006:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:16.871340Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:16.916510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:16.987433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.059781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.115632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.158687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.208368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.290856Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637654816449817:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:17.290973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:17.291226Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637654816449822:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:17.299659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:17.312582Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637654816449824:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:17.373171Z node 2 :TX_PROXY ERROR: Actor# [2:7491637654816449881:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:18.297148Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637637636578372:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:18.297263Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:18.378736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:21.106405Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnq7c83w4hvxzcdpj0kfah, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjQzOWIxMjUtYjQ0NDZmMTQtYjk3YWMyYWItMzM3NTgyYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:21.106673Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjQzOWIxMjUtYjQ0NDZmMTQtYjk3YWMyYWItMzM3NTgyYzI=, ActorId: [2:7491637659111418275:2555], ActorState: ExecuteState, TraceId: 01jrfnq7c83w4hvxzcdpj0kfah, Create QueryResponse for error on request, msg: 2025-04-10T11:00:22.525525Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnq8ms4afr8a8aes18bz64, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjQzOWIxMjUtYjQ0NDZmMTQtYjk3YWMyYWItMzM3NTgyYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:22.525815Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjQzOWIxMjUtYjQ0NDZmMTQtYjk3YWMyYWItMzM3NTgyYzI=, ActorId: [2:7491637659111418275:2555], ActorState: ExecuteState, TraceId: 01jrfnq8ms4afr8a8aes18bz64, Create QueryResponse for error on request, msg: >> YdbYqlClient::TestExplicitPartitioning [GOOD] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel1 >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true |70.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |70.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |70.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer >> YdbTableBulkUpsert::NotNulls [GOOD] >> YdbTableBulkUpsert::Errors >> KqpUniqueIndex::InsertNullInFk [GOOD] >> KqpIndexes::ForbidViewModification [GOOD] >> KqpIndexes::IndexOr ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestExplicitPartitioning [GOOD] Test command err: 2025-04-10T10:59:21.856643Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637413888863238:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:21.862768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d67/r3tmp/tmpPYzFJv/pdisk_1.dat 2025-04-10T10:59:22.532573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:22.532796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:22.542686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:22.550757Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2604, node 1 2025-04-10T10:59:22.799991Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:22.800023Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T10:59:22.943199Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:22.943222Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:22.943232Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:22.943368Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:23.445904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:26.665039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637435363700755:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:26.665170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:26.986296Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637413888863238:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:26.986391Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:27.720752Z node 1 :TX_PROXY ERROR: Actor# [1:7491637439658668101:2662] txid# 281474976710658, issues: { message: "Column Key has wrong key type Double" severity: 1 } 2025-04-10T10:59:29.978328Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637452103710517:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:29.982507Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d67/r3tmp/tmpZ8hbyF/pdisk_1.dat 2025-04-10T10:59:30.458277Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:30.584218Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:30.584339Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:30.587867Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14063, node 4 2025-04-10T10:59:31.202990Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:31.203018Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:31.203025Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:31.203192Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6249 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:31.892778Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:34.918270Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491637452103710517:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:34.918353Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:36.590669Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637482168482739:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:36.590750Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:36.647565Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637482168482770:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:36.648312Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:36.664663Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:36.674075Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637482168482787:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:36.683659Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:36.686624Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491637482168482827:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:36.699496Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:36.724782Z node 4 :TX_PROXY ERROR: Actor# [4:7491637482168482856:2694] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T10:59:36.725663Z node 4 :TX_PROXY ERROR: Actor# [4:7491637482168482865:2699] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T10:59:36.731811Z node 4 :TX_PROXY ERROR: Actor# [4:7491637482168482866:2700] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T10:59:36.731976Z node 4 :TX_PROXY ERROR: Actor# [4:7491637482168482864:2698] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T10:59:36.732090Z node 4 :TX_PROXY ERROR: Actor# [4:7491637482168482858:2696] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T10:59:36.732208Z node 4 :TX_PROXY ERROR: Actor# [4:7491637482168482863:2697] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T10:59:36.732938Z node 4 :TX_PROXY ERROR: Actor# [4:7491637482168482914:2736] txid# 2814 ... 2NhZjItY2ExNTM0NGUtOTZhYjc3MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:40.363596Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637496832034747:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d67/r3tmp/tmp0mx9cp/pdisk_1.dat 2025-04-10T10:59:40.531067Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:59:40.688119Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:40.735043Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:40.735134Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:40.739434Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3984, node 7 2025-04-10T10:59:40.986923Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:40.986951Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:40.986961Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:40.987139Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:41.884013Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:41.918584Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:59:45.336918Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491637496832034747:2219];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:45.337008Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:46.660344Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:46.879881Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491637522601839675:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:46.879980Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:46.880364Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491637522601839687:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:46.885786Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T10:59:46.933037Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491637522601839689:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T10:59:47.012638Z node 7 :TX_PROXY ERROR: Actor# [7:7491637526896807069:2864] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:47.164990Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfnp71x2xxxvaq0906z8xay, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTdiNWQ1OTItNjJiMTAyMDQtYjE3NDMzODMtZDgxOTM3YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:59:50.343451Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491637541388811267:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:50.344865Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d67/r3tmp/tmppZoykG/pdisk_1.dat 2025-04-10T10:59:50.837135Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:50.955676Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:50.955797Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:50.968455Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18126, node 10 2025-04-10T10:59:51.276993Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:51.277019Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:51.277028Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:51.277195Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:52.182871Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:55.321042Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491637541388811267:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:55.321132Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:56.558256Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:00:05.775813Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:00:05.775859Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:22.822606Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491637678827767116:2546], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:22.822734Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:22.823424Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491637678827767128:2549], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:22.836383Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:00:22.895085Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491637678827767130:2550], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:00:22.988260Z node 10 :TX_PROXY ERROR: Actor# [10:7491637678827767208:3228] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:23.108651Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfnqa545af2byeh76fp8fw9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OTc0ZjZhNzQtN2E5NzQ1ODAtNDQ5YzdlZjgtMWE2NGI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:00:23.677266Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfnqaf54x4gcxnpdftf48x2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OTc0ZjZhNzQtN2E5NzQ1ODAtNDQ5YzdlZjgtMWE2NGI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |70.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |70.6%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |70.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut >> KqpIndexes::UpsertWithoutExtraNullDelete-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] Test command err: Trying to start YDB, gRPC: 9092, MsgBus: 25136 2025-04-10T11:00:01.537348Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637588489369334:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:01.537425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ea4/r3tmp/tmpiivrGZ/pdisk_1.dat 2025-04-10T11:00:02.159875Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:02.162127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:02.162413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:02.169377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9092, node 1 2025-04-10T11:00:02.286319Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:02.286342Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:02.286356Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:02.286512Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25136 TClient is connected to server localhost:25136 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:02.946115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:02.990322Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:03.008768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:03.232594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:03.459651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:03.612974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:05.916680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637605669240133:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:05.916791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.378077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.422802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.491182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.528528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.537636Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637588489369334:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:06.537714Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:06.586757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.623385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.682280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637609964207945:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.682375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.682398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637609964207950:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.686961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:06.704247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637609964207952:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:06.774864Z node 1 :TX_PROXY ERROR: Actor# [1:7491637609964208006:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:07.763427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7335, MsgBus: 15242 2025-04-10T11:00:09.753502Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637621363423179:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:09.753609Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ea4/r3tmp/tmpltcw2I/pdisk_1.dat 2025-04-10T11:00:09.910933Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:09.937360Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:09.937481Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:09.939933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7335, node 2 2025-04-10T11:00:10.024455Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:10.024478Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:10.024486Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:10.024620Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15242 TClient is connected to server localhost:15242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:10.471742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:10.486958Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:00:10.500849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:10.606875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T ... 44480 2025-04-10T11:00:13.972801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:14.078174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:14.165589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:14.265881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637642838261976:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:14.265969Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:14.266696Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637642838261981:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:14.278520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:14.303664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637642838261983:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:14.408949Z node 2 :TX_PROXY ERROR: Actor# [2:7491637642838262040:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:14.768047Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637621363423179:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:14.768155Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:15.587136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:16.096830Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:16.110036Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 4248, MsgBus: 27257 2025-04-10T11:00:16.924032Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637653812489050:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:16.925509Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ea4/r3tmp/tmpM30xIG/pdisk_1.dat 2025-04-10T11:00:17.132202Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:17.148994Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:17.149112Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:17.151601Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4248, node 3 2025-04-10T11:00:17.209457Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:17.209478Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:17.209495Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:17.209608Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27257 TClient is connected to server localhost:27257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:17.909198Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:17.952488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.053636Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.301375Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.396559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:21.261351Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637675287327306:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.261467Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.322116Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.397310Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.485302Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.539705Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.591737Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.674206Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.789171Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637675287327836:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.789269Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.789561Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637675287327841:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.796183Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:21.815680Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637675287327843:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:21.917925Z node 3 :TX_PROXY ERROR: Actor# [3:7491637675287327899:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:21.930284Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637653812489050:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:21.939327Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:23.118599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.236455Z node 3 :TX_PROXY ERROR: Actor# [3:7491637683877263087:3831] txid# 281474976715672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:24.510721Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:24.534277Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> YdbOlapStore::LogLast50ByResource [GOOD] >> YdbOlapStore::LogGrepNonExisting >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-04-10T10:54:00.972682Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.972725Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.972761Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T10:54:00.973133Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-10T10:54:00.973173Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.973191Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.974221Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007126s 2025-04-10T10:54:00.974653Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-10T10:54:00.974675Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.974689Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.974732Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006105s 2025-04-10T10:54:00.975133Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-10T10:54:00.975161Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.975202Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T10:54:00.975254Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007164s 2025-04-10T10:54:01.014095Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1744282441014060 2025-04-10T10:54:01.439222Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636041007611981:2095];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:01.440984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:01.471839Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636043220011785:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:54:01.471883Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T10:54:01.670905Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T10:54:01.684344Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001525/r3tmp/tmpWtcv1c/pdisk_1.dat 2025-04-10T10:54:01.898766Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:54:01.927447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:01.927564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:01.930139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:54:01.930224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:54:01.933217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:54:01.935516Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:54:01.937075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23050, node 1 2025-04-10T10:54:02.031973Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001525/r3tmp/yandexcoHLwD.tmp 2025-04-10T10:54:02.032003Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001525/r3tmp/yandexcoHLwD.tmp 2025-04-10T10:54:02.032180Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001525/r3tmp/yandexcoHLwD.tmp 2025-04-10T10:54:02.032334Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:54:02.084516Z INFO: TTestServer started on Port 21183 GrpcPort 23050 TClient is connected to server localhost:21183 PQClient connected to localhost:23050 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:54:02.362109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T10:54:04.479646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636053892514845:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.479775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.479577Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636056104913965:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.479576Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636056104913997:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.479773Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.480192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636053892514857:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:54:04.484847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T10:54:04.488637Z node 2 :TX_PROXY ERROR: Actor# [2:7491636056104914005:2124] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T10:54:04.513898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636053892514859:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:54:04.514016Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636056104914002:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T10:54:04.576192Z node 2 :TX_PROXY ERROR: Actor# [2:7491636056104914032:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:04.612304Z node 1 :TX_PROXY ERROR: Actor# [1:7491636053892514961:2694] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:54:04.837901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:54:04.855473Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491636053892514971:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:54:04.855811Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWM0NGZmOGUtOWRjM2ZlOWMtNGFiN2I0NGMtODhlZTBjNjU=, ActorId: [1:7491636053892514842:2336], ActorState: ExecuteState, TraceId: 01jrfnbrn75a4jqp1t6tzxkbye, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T10:54:04.858340Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T10:54:04.858472Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491636056104914039:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T10:54:0 ... ] --- write ----------------- 2025-04-10T11:00:20.830914Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000ptest-message-group-id 2025-04-10T11:00:20.830935Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000010_00000| 2025-04-10T11:00:20.830951Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:00:20.830979Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:00:20.831010Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:00:20.831088Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:00:20.831241Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 10 size 1208 2025-04-10T11:00:20.838949Z node 17 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 10 size 1208 actorID [17:7491637660475094042:2648] 2025-04-10T11:00:20.839171Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:00:20.839266Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:00:20.839361Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-10T11:00:20.839371Z node 17 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 size 1208 2025-04-10T11:00:20.839423Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:00:20.839479Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-10T11:00:20.839509Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:00:20.839554Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-10T11:00:20.839590Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:00:20.839630Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-04-10T11:00:20.839658Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:00:20.839691Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2025-04-10T11:00:20.839717Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:00:20.839747Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-04-10T11:00:20.839771Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:00:20.839794Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2025-04-10T11:00:20.839818Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:00:20.839850Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2025-04-10T11:00:20.839880Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:00:20.839926Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2025-04-10T11:00:20.839964Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:00:20.840011Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2025-04-10T11:00:20.840253Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:00:20.840306Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-10T11:00:20.840496Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-10T11:00:20.840643Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-10T11:00:20.841264Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2025-04-10T11:00:20.841323Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 10 2025-04-10T11:00:20.841611Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-10T11:00:20.841641Z node 17 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T11:00:20.841740Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1744282820826 queuesize 0 startOffset 0 2025-04-10T11:00:20.842941Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 12 } 2025-04-10T11:00:20.843050Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session: acknoledged message 1 2025-04-10T11:00:20.843129Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session: acknoledged message 2 2025-04-10T11:00:20.843181Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session: acknoledged message 3 2025-04-10T11:00:20.843277Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session: acknoledged message 4 2025-04-10T11:00:20.843318Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session: acknoledged message 5 2025-04-10T11:00:20.843359Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session: acknoledged message 6 2025-04-10T11:00:20.843404Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session: acknoledged message 7 2025-04-10T11:00:20.843444Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session: acknoledged message 8 2025-04-10T11:00:20.843502Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session: acknoledged message 9 2025-04-10T11:00:20.843542Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session: acknoledged message 10 2025-04-10T11:00:20.846307Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session: close. Timeout = 0 ms 2025-04-10T11:00:20.846385Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session will now close 2025-04-10T11:00:20.846484Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session: aborting 2025-04-10T11:00:20.847185Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session: gracefully shut down, all writes complete 2025-04-10T11:00:20.847251Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0] Write session: destroy 2025-04-10T11:00:20.850343Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0 grpc read done: success: 0 data: 2025-04-10T11:00:20.850380Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0 grpc read failed 2025-04-10T11:00:20.850427Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0 grpc closed 2025-04-10T11:00:20.850458Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|ca406376-79cdc04e-264ecb26-765b7f87_0 is DEAD 2025-04-10T11:00:20.851668Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T11:00:20.851853Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7491637669065028897:2673] destroyed 2025-04-10T11:00:20.851962Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] >> KqpIndexes::UpsertNoIndexColumns [GOOD] >> KqpIndexes::JoinWithNonPKColumnsInPredicate+UseStreamJoin [GOOD] >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertNullInFk [GOOD] Test command err: Trying to start YDB, gRPC: 27002, MsgBus: 7173 2025-04-10T11:00:08.483317Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637615698717677:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:08.495134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e68/r3tmp/tmpzrkEuP/pdisk_1.dat 2025-04-10T11:00:08.939817Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:08.955599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:08.955693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:08.959628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27002, node 1 2025-04-10T11:00:09.082770Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:09.082791Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:09.082798Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:09.082901Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7173 TClient is connected to server localhost:7173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:09.834593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:09.876031Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:09.890201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:10.136215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:10.350275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:10.435271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:12.368336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637632878588611:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:12.368515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:12.705363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:12.747451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:12.819706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:12.883038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:12.916691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:12.980341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:13.055734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637637173556425:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:13.055827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:13.056040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637637173556430:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:13.060382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:13.098527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637637173556432:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:13.177935Z node 1 :TX_PROXY ERROR: Actor# [1:7491637637173556488:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:13.486278Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637615698717677:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:13.486358Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:14.195687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:16.381774Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491637650058459977:2621], TxId: 281474976710679, task: 1. Ctx: { TraceId : 01jrfnq3f1f243sx760rsd2apt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=Y2FhYjk2OTUtNTgyZDE4OTItN2RjZDc5Y2YtZjk4ZDQzZTE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-10T11:00:16.382189Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491637650058459979:2622], TxId: 281474976710679, task: 2. Ctx: { TraceId : 01jrfnq3f1f243sx760rsd2apt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=Y2FhYjk2OTUtNTgyZDE4OTItN2RjZDc5Y2YtZjk4ZDQzZTE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491637650058459974:2556], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:00:16.382649Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2FhYjk2OTUtNTgyZDE4OTItN2RjZDc5Y2YtZjk4ZDQzZTE=, ActorId: [1:7491637641468524865:2556], ActorState: ExecuteState, TraceId: 01jrfnq3f1f243sx760rsd2apt, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 15930, MsgBus: 18527 2025-04-10T11:00:17.240367Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637658202295598:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:17.242439Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e68/r3tmp/tmp7kI2ku/pdisk_1.dat 2025-04-10T11:00:17.402917Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:17.416532Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:17.416620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:17.418397Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15930, node 2 2025-04-10T11:00:17.480800Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:17.480822Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:17.480830Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:17.480958Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18527 TClient is connected to server localhost:18527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:18.032218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.040560Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.058896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:00:18.219584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.414836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.494742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:21.088916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637675382166542:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.089032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.141933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.186430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.221997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.305989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.380196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.460522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.541372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637675382167073:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.541483Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.541559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637675382167079:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.546114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:21.560242Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637675382167081:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:21.629888Z node 2 :TX_PROXY ERROR: Actor# [2:7491637675382167135:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:22.242225Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637658202295598:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:22.242305Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:22.637622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> KqpIndexes::WriteWithParamsFieldOrder [GOOD] >> KqpMultishardIndex::CheckPushTopSort >> KqpIndexes::SecondaryIndexOrderBy2 [GOOD] >> KqpIndexes::SecondaryIndexReplace+UseSink >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink [GOOD] >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin [GOOD] >> KqpIndexMetadata::HandleWriteOnlyIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-04-10T10:59:22.503064Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637421383296415:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:22.503237Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d5a/r3tmp/tmp4v97qU/pdisk_1.dat 2025-04-10T10:59:23.417636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:23.417723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:23.436106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:23.457218Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19153, node 1 2025-04-10T10:59:23.736733Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:23.736759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:23.736766Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:23.738139Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29036 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:24.331885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:29036 2025-04-10T10:59:27.506358Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637421383296415:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:27.506417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:28.890444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:29.258562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T10:59:29.258701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusNameConflict, reason: Check failed: path: '/Root/Foo/TimestampIndex/indexImplTable', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-10T10:59:29.261262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusNameConflict, reason: Check failed: path: '/Root/Foo/TimestampIndex/indexImplTable', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges), operation: ALTER TABLE, path: Root/Foo/TimestampIndex/indexImplTable 2025-04-10T10:59:29.261536Z node 1 :TX_PROXY ERROR: Actor# [1:7491637451448068815:2948] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/Foo/TimestampIndex/indexImplTable\', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } Error 128: Administrative access denied TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282769166 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-04-10T10:59:29.280850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976710660:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: false, at schemeshard: 72057594046644480 2025-04-10T10:59:29.281002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:29.281026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-10T10:59:29.281481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:29.281506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-10T10:59:29.283897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: root@builtin, status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo/TimestampIndex/indexImplTable waiting... 2025-04-10T10:59:29.305783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282769348, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:29.336699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2025-04-10T10:59:29.336763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:1 TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282769166 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-04-10T10:59:29.350568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976710661:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: false, at schemeshard: 72057594046644480 2025-04-10T10:59:29.350702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:29.350721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976710661:1, at schemeshard: 72057594046644480 2025-04-10T10:59:29.351099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:29.351115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:1, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:29.355995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710661, database: /Root, subject: root@builtin, status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo/TimestampIndex/indexImplTable 2025-04-10T10:59:29.393675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282769439, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T10:59:29.431688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710661:0 2025-04-10T10:59:29.431749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710661:1 TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282769166 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-04-10T10:59:29.441467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex P ... :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:41.043223Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:41.043412Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:41.769782Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:42.028349Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:42.244171Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T10:59:42.481784Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T10:59:49.283173Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491637536735159242:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:49.283300Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d5a/r3tmp/tmp0N8CDn/pdisk_1.dat 2025-04-10T10:59:49.937317Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:49.995405Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:49.995493Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:50.005853Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18043, node 10 2025-04-10T10:59:50.406770Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:50.406793Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:50.406800Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:50.406937Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:51.078020Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:51.102640Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:59:57.779068Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491637568304138019:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:57.779126Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d5a/r3tmp/tmpbZ6cps/pdisk_1.dat 2025-04-10T10:59:58.238362Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:58.284298Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:58.284392Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:58.293198Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13618, node 13 2025-04-10T10:59:58.468059Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:58.468084Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:58.468093Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:58.468253Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:59.065523Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:59.188531Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:50362" , at schemeshard: 72057594046644480 2025-04-10T10:59:59.189044Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:59.189079Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-04-10T10:59:59.202427Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976715658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T10:59:59.202589Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-04-10T10:59:59.202843Z node 13 :TX_PROXY ERROR: Actor# [13:7491637576894073567:2601] txid# 281474976715658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x1C8C03EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1CD7D4A0) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+8721 (0x1C3F27D1) std::__y1::__function::__func, void ()>::operator()()+280 (0x1C41B668) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1CDB44C6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1CD84019) NTestSuiteYdbLogStore::TCurrentTest::Execute()+1204 (0x1C41A834) NUnitTest::TTestFactory::Execute()+2438 (0x1CD858E6) NUnitTest::RunMain(int, char**)+5213 (0x1CDAEA3D) ??+0 (0x7FE738921D90) __libc_start_main+128 (0x7FE738921E40) _start+41 (0x1924C029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithoutExtraNullDelete-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28968, MsgBus: 64321 2025-04-10T11:00:07.460143Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637614184303488:2239];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:07.460470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e6b/r3tmp/tmp04tyDq/pdisk_1.dat 2025-04-10T11:00:08.129382Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:08.133141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:08.133257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:08.140282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28968, node 1 2025-04-10T11:00:08.452196Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:08.452217Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:08.452226Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:08.452340Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64321 TClient is connected to server localhost:64321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:09.332522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:09.366354Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:09.388969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:09.614008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:09.843628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:09.909995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:11.841096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637631364174257:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:11.841284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:12.199037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:12.315038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:12.358195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:12.392915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:12.418268Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637614184303488:2239];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:12.418484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:12.435946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:12.527229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:12.639841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637635659142080:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:12.639918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:12.640305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637635659142085:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:12.644791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:12.664313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637635659142087:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:12.732170Z node 1 :TX_PROXY ERROR: Actor# [1:7491637635659142142:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:13.754577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:14.566533Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:15.670858Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:16.298988Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 29551, MsgBus: 10269 2025-04-10T11:00:17.161292Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637654162413277:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:17.161823Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e6b/r3tmp/tmpqMBKDu/pdisk_1.dat 2025-04-10T11:00:17.322105Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:17.354944Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:17.355042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:17.357207Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29551, node 2 2025-04-10T11:00:17.433873Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:17.433912Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:17.433922Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:17.434070Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10269 TClient is connected to server localhost:10269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:18.053988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.092624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.229817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.475130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.583292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:21.240816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637671342284224:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.240892Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.302279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.379563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.455257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.531039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.592633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.675565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.755308Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637671342284752:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.755397Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.755608Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637671342284757:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.759310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:21.773388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637671342284759:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:21.896925Z node 2 :TX_PROXY ERROR: Actor# [2:7491637671342284813:3458] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:22.161613Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637654162413277:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:22.161694Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:23.262829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.038761Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:25.316084Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:25.722109Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:25.984837Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpUniqueIndex::UpdateOnFkSelectResultSameValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:59:37.832474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:59:37.832603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:37.832664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:59:37.832715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:59:37.832770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:59:37.832799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:59:37.832856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:37.832966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:59:37.833365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:37.973371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:37.973457Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:38.000648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:38.001233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:38.001439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:59:38.010647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:38.010987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:38.011763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:38.012349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:59:38.014731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:38.016303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:38.016361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:38.016556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:38.016608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:38.016648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:38.016873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.024598Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:59:38.281835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:38.282133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.282417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:59:38.282674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:59:38.282742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.285964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:38.286159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:59:38.286411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.286471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:59:38.286514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:59:38.286551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:59:38.289058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.289130Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:38.289181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:59:38.291853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.291908Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.291974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:38.292045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:59:38.296291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:38.299005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:59:38.299274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:59:38.300444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:38.300601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:38.300671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:38.300992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:59:38.301051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:38.301254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:59:38.301333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:59:38.304277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:38.304336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:38.304558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:38.304614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:59:38.304902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.304962Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:59:38.305069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:38.305104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:38.305141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:38.305173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:38.305274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:59:38.305324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:38.305360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:59:38.305393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:59:38.305485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:59:38.305523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:59:38.305574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:59:38.308163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:38.308311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:38.308352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 11:00:27.096008Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 62500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 357 } } 2025-04-10T11:00:27.096186Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 102:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 62500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 357 } } 2025-04-10T11:00:27.096267Z node 12 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-10T11:00:27.096464Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2025-04-10T11:00:27.096547Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-04-10T11:00:27.108504Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:00:27.108860Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:00:27.108944Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:00:27.109085Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-10T11:00:27.109356Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:00:27.119307Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T11:00:27.119561Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-10T11:00:27.121376Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:00:27.121660Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 51539609708 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:00:27.121765Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-10T11:00:27.124657Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-10T11:00:27.124925Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-10T11:00:27.134929Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:00:27.135033Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:00:27.135513Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:00:27.135596Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [12:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T11:00:27.137615Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:00:27.137718Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:00:27.138613Z node 12 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:00:27.138807Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:00:27.138882Z node 12 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:00:27.138953Z node 12 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-10T11:00:27.139035Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:00:27.139168Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-10T11:00:27.140934Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2240 } } 2025-04-10T11:00:27.140996Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T11:00:27.141200Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2240 } } 2025-04-10T11:00:27.141365Z node 12 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2240 } } FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:00:27.144528Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 51539609849 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:00:27.144614Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T11:00:27.144863Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 51539609849 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:00:27.144953Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:00:27.145109Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 310 RawX2: 51539609849 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:00:27.145217Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:00:27.145302Z node 12 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:00:27.145373Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:00:27.145450Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T11:00:27.148667Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:00:27.151345Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:00:27.151573Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:00:27.151974Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:00:27.152025Z node 12 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:00:27.152204Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:00:27.152255Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:00:27.152326Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:00:27.152417Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:00:27.152477Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T11:00:27.152589Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [12:336:2315] message: TxId: 102 2025-04-10T11:00:27.152680Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:00:27.152749Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:00:27.152800Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:00:27.152984Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:00:27.157221Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:00:27.157324Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [12:396:2368] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertNoIndexColumns [GOOD] Test command err: Trying to start YDB, gRPC: 3990, MsgBus: 22024 2025-04-10T11:00:04.587553Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637599523268362:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:04.587590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e6d/r3tmp/tmpxI2jb9/pdisk_1.dat 2025-04-10T11:00:05.240189Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:05.245268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:05.245383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:05.248190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3990, node 1 2025-04-10T11:00:05.402704Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:05.402731Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:05.402738Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:05.402841Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22024 TClient is connected to server localhost:22024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:06.252957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:06.300587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:06.497977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:06.713684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:00:06.807213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:00:09.032432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637620998106620:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:09.032542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:09.539709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:09.587080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:09.591217Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637599523268362:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:09.591744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:09.634998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:09.685836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:09.728576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:09.822627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:09.932496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637620998107146:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:09.932596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:09.932934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637620998107151:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:09.937672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:09.961378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637620998107153:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:10.044754Z node 1 :TX_PROXY ERROR: Actor# [1:7491637625293074506:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:11.184477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:12.695623Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:12.765560Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:14.351483Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnq0bf1dwy9d65vr1pnem8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxNGUwNy1lZjc2MTI2Ni1jZWJjMjVkNC0yMWZiMTMyZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:14.371828Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2UxNGUwNy1lZjc2MTI2Ni1jZWJjMjVkNC0yMWZiMTMyZQ==, ActorId: [1:7491637629588042097:2500], ActorState: ExecuteState, TraceId: 01jrfnq0bf1dwy9d65vr1pnem8, Create QueryResponse for error on request, msg: 2025-04-10T11:00:14.483406Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:15.845733Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 21468, MsgBus: 3713 2025-04-10T11:00:16.844710Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637653867691284:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:16.844847Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e6d/r3tmp/tmpaFaqXG/pdisk_1.dat 2025-04-10T11:00:17.017852Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:17.038399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:17.038487Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:17.041124Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21468, node 2 2025-04-10T11:00:17.092391Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:17.092421Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:17.092431Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:17.092562Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3713 TClient is connected to server localhost:3713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:00:17.651454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.677310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:17.810376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.058669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.173956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:20.859842Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637671047562138:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:20.859911Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:20.918889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:20.999137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.046704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.093941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.144846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.226254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.307857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637675342529957:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.307980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.308537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637675342529962:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.313247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:21.331766Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637675342529964:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:21.420017Z node 2 :TX_PROXY ERROR: Actor# [2:7491637675342530019:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:21.846261Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637653867691284:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:21.846326Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:22.652535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:22.734379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:00:22.824272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpIndexes::SelectConcurentTX >> KqpIndexes::UpsertWithNullKeysSimple >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] >> KqpIndexes::MultipleSecondaryIndex+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29090, MsgBus: 25386 2025-04-10T11:00:03.496534Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637595941837146:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:03.496593Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e7b/r3tmp/tmp7zMdQq/pdisk_1.dat 2025-04-10T11:00:04.157253Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:04.161639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:04.161764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:04.176220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29090, node 1 2025-04-10T11:00:04.306723Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:04.306750Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:04.306756Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:04.306866Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25386 TClient is connected to server localhost:25386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:05.132063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:05.150558Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:05.160588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:05.393025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:05.629399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:05.774749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:07.692453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637613121708119:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:07.692573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:07.979609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.018835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.087659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.142798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.212304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.249004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.341358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637617416675939:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:08.341450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:08.341680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637617416675944:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:08.346805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:08.358740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637617416675946:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:08.419396Z node 1 :TX_PROXY ERROR: Actor# [1:7491637617416676001:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:08.496940Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637595941837146:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:08.497016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:09.552283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 768 cpu_time_us: 768 } query_phases { duration_us: 10092 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 9889 affected_shards: 1 } query_phases { duration_us: 2435 cpu_time_us: 2435 } query_phases { duration_us: 8046 cpu_time_us: 9302 } query_phases { duration_us: 11057 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 5662 affected_shards: 2 } compilation { duration_us: 706415 cpu_time_us: 693301 } process_cpu_time_us: 6926 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":27,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":26,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_1_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1744282810370,\"TaskId\":1,\"Host\":\"ghrun-x5vqckr7qy\",\"ComputeTimeUs\":92}],\"CpuTimeUs\":835}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\"}],\"BaseTimeMs\":1744282810369,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":835,\"Max\":835,\"Min\":835}},\"CTE Name\":\"precompute_1_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":25,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":24,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_3_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1744282810369,\"FinishedTasks\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_3_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":23,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":22,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_3_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1744282810369,\"TaskId\":2,\"Host\":\"ghrun-x5vqckr7qy\",\"ComputeTimeUs\":151}],\"CpuTimeUs\":648}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\"}],\"BaseTimeMs\":1744282810369,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":648,\"Max\":648,\"Min\":648}},\"CTE Name\":\"precompute_3_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":20,\"Subplan Name\":\"CTE precompute_3_0\",\"Plans\":[{\"PlanNodeId\":19,\"Plans\":[{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Operators\":[{\"Inputs\":[{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"}],\"Iterator\":\"FlatMap\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"FinishTimeMs\":1744282810353,\"Host\":\"ghrun-x5vqckr7qy\ ... 15.869061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:15.908671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:15.952640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:15.993930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:16.083618Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637651376375719:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:16.083707Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:16.084127Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637651376375724:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:16.090114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:16.121158Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-10T11:00:16.121910Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637651376375726:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:16.202377Z node 2 :TX_PROXY ERROR: Actor# [2:7491637651376375784:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:17.066332Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637634196504252:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:17.066407Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:17.199436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:18.169921Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 25722, MsgBus: 18451 2025-04-10T11:00:19.227155Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637665541276317:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:19.227257Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e7b/r3tmp/tmpudVSdV/pdisk_1.dat 2025-04-10T11:00:19.387731Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:19.388294Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:19.388367Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:19.405636Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25722, node 3 2025-04-10T11:00:19.483564Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:19.483589Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:19.483597Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:19.483728Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18451 TClient is connected to server localhost:18451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:20.008200Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:20.015439Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:00:20.025345Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:20.096729Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:20.292304Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:20.379758Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:23.909675Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637682721147290:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:23.909765Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:23.980104Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.022987Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.114897Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.193948Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.227478Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637665541276317:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:24.227551Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:24.240559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.289289Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.398314Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637687016115106:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:24.398445Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:24.404158Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637687016115111:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:24.409559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:24.438663Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637687016115113:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:24.497788Z node 3 :TX_PROXY ERROR: Actor# [3:7491637687016115169:3457] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:25.706972Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:27.410839Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpIndexes::SelectFromAsyncIndexedTable [GOOD] >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout >> TSchemeShardTest::CopyTableForBackup [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup |70.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |70.7%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |70.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut >> KqpMultishardIndex::SortedRangeReadDesc >> KqpMultishardIndex::DataColumnWriteNull >> KqpIndexes::UniqAndNoUniqSecondaryIndex [GOOD] >> KqpIndexes::Uint8Index >> KqpMultishardIndex::DataColumnUpsertMixedSemantic >> TModifyUserTest::ModifyUser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 7203, MsgBus: 9124 2025-04-10T11:00:13.112052Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637637316972780:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:13.112102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e5d/r3tmp/tmpatmGMF/pdisk_1.dat 2025-04-10T11:00:13.614416Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:13.634819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:13.634934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:13.640658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7203, node 1 2025-04-10T11:00:13.777919Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:13.777945Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:13.777958Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:13.778096Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9124 TClient is connected to server localhost:9124 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:14.570738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:14.597472Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:14.609842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:14.825766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:15.041624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:00:15.145643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.021868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637654496843511:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:17.022017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:17.359732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.448464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.510535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.579024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.621540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.690090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.762681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637654496844031:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:17.762744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:17.762904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637654496844036:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:17.767631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:17.780231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637654496844038:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:17.886983Z node 1 :TX_PROXY ERROR: Actor# [1:7491637654496844094:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:18.141133Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637637316972780:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:18.141182Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:18.892821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 9050, MsgBus: 25665 2025-04-10T11:00:21.557253Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637673430310660:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:21.557388Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e5d/r3tmp/tmp2kAjOB/pdisk_1.dat 2025-04-10T11:00:21.722958Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:21.754363Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:21.754452Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:21.756634Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9050, node 2 2025-04-10T11:00:21.849943Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:21.849970Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:21.849978Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:21.850089Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25665 TClient is connected to server localhost:25665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:22.521009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.530886Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:00:22.551754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.644420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.897080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:23.008691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:25.752176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637690610181589:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:25.752291Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:25.812800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:25.857048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:25.893340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:25.935661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:25.984052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.023273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.091750Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637694905149395:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:26.091825Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:26.092167Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637694905149400:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:26.095890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:26.106407Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637694905149402:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:26.200816Z node 2 :TX_PROXY ERROR: Actor# [2:7491637694905149458:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:26.557461Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637673430310660:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:26.557569Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:27.289037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> KqpIndexes::UniqIndexComplexPkComplexFkOverlap [GOOD] >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] >> TSubDomainTest::LsLs >> TSchemeShardTest::ConsistentCopyTablesForBackup [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup |70.7%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 >> YdbOlapStore::ManyTables [GOOD] >> YdbOlapStore::LogWithUnionAllAscending ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 19492, MsgBus: 4092 2025-04-10T11:00:01.752631Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637585455530106:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:01.752682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e95/r3tmp/tmpTE6rR1/pdisk_1.dat 2025-04-10T11:00:02.382399Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:02.389217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:02.389319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:02.399426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19492, node 1 2025-04-10T11:00:02.578478Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:02.578503Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:02.578511Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:02.578626Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4092 TClient is connected to server localhost:4092 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:03.363029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:03.382791Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:03.402009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:00:03.662318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:03.924026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:00:04.102803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:06.321938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637606930368294:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.322079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.653963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.690275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.733298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.753308Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637585455530106:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:06.753402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:06.801537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.855593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.911266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.985670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637606930368807:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.985756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.986042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637606930368812:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.990043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:07.000929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637606930368814:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:07.091294Z node 1 :TX_PROXY ERROR: Actor# [1:7491637611225336167:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:08.294889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.407141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19361, MsgBus: 20476 2025-04-10T11:00:12.919365Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637635691037168:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:12.919434Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e95/r3tmp/tmpfxDjBt/pdisk_1.dat 2025-04-10T11:00:13.048018Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:13.075998Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:13.076093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:13.077739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19361, node 2 2025-04-10T11:00:13.199626Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:13.199645Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:13.199653Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:13.199776Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20476 TClient is connected to server localhost:20476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:00:13.729565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:00:13.749162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:13.879882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976 ... 4976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:16.804001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:16.874933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:16.917601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:16.993753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.076483Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637657165875907:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:17.076627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:17.076950Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637657165875912:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:17.085454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:17.102989Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637657165875914:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:17.173166Z node 2 :TX_PROXY ERROR: Actor# [2:7491637657165875971:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:17.921691Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637635691037168:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:17.921771Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:18.437625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:18.504291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13494, MsgBus: 10501 2025-04-10T11:00:21.354569Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637673967138782:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:21.354614Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e95/r3tmp/tmpsR6Xo4/pdisk_1.dat 2025-04-10T11:00:21.539510Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:21.568859Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:21.568964Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:21.570536Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13494, node 3 2025-04-10T11:00:21.666626Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:21.666646Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:21.666652Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:21.666753Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10501 TClient is connected to server localhost:10501 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:22.288582Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.313706Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.411891Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.633602Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.755211Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:26.000020Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637691147009737:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:26.000152Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:26.090373Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.179576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.221884Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.272362Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.315898Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.357644Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637673967138782:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:26.361190Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:26.374207Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.445428Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637695441977550:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:26.445552Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637695441977555:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:26.445611Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:26.451009Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:26.474970Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637695441977557:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:26.534630Z node 3 :TX_PROXY ERROR: Actor# [3:7491637695441977613:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:27.852841Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:27.970888Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink >> TSubDomainTest::CreateDummyTabletsInDifferentDomains >> KqpIndexes::InnerJoinSecondaryIndexLookupAndRightTablePredicateNonIndexColumn [GOOD] >> KqpIndexes::IndexTopSortPushDown >> YdbTableBulkUpsert::Errors [GOOD] >> YdbTableBulkUpsert::Limits |70.7%| [TA] $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} |70.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |70.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut >> TSchemeShardTest::CopyLockedTableForBackup [GOOD] >> TSchemeShardTest::ConfigColumnFamily >> TModifyUserTest::ModifyUser [GOOD] >> TModifyUserTest::ModifyLdapUser >> KqpUniqueIndex::UpdateFkAlreadyExist [GOOD] >> KqpIndexes::IndexOr [GOOD] >> TTransferTests::Create_Disabled >> KqpIndexes::IndexFilterPushDown >> KqpMultishardIndex::CheckPushTopSort [GOOD] >> TSchemeShardTest::ConfigColumnFamily [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes >> TSubDomainTest::LsLs [GOOD] >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] >> TSubDomainTest::LsAltered >> TTransferTests::Create_Disabled [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] >> TTransferTests::CreateWithoutCredentials |70.7%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::MultipleSecondaryIndexWithSameComulns-UseSink [GOOD] >> KqpIndexes::UpsertWithNullKeysSimple [GOOD] >> KqpIndexes::Uint8Index [GOOD] >> KqpIndexes::SecondaryIndexReplace+UseSink [GOOD] >> KqpIndexes::UpsertWithNullKeysComplex >> KqpIndexes::SelectConcurentTX [GOOD] >> TTransferTests::CreateWithoutCredentials [GOOD] >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 [GOOD] >> TTransferTests::CreateWrongConfig >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 >> TTransferTests::CreateWrongConfig [GOOD] >> KqpMultishardIndex::SortedRangeReadDesc [GOOD] >> KqpUniqueIndex::UpdateOnFkSelectResultSameValue [GOOD] >> KqpIndexes::MultipleSecondaryIndex+UseSink [GOOD] >> KqpMultishardIndex::SortByPk >> TTransferTests::CreateWrongBatchSize |70.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |70.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |70.7%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |70.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn >> KqpMultishardIndex::DataColumnUpsertMixedSemantic [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] >> KqpIndexes::MultipleSecondaryIndex-UseSink >> TModifyUserTest::ModifyLdapUser [GOOD] >> KqpMultishardIndex::DataColumnSelect >> TTransferTests::CreateWrongBatchSize [GOOD] >> TModifyUserTest::ModifyUserIsEnabled >> TTransferTests::CreateWrongFlushIntervalIsSmall >> KqpMultishardIndex::DataColumnWriteNull [GOOD] >> KqpIndexMetadata::HandleWriteOnlyIndex [GOOD] >> TSubDomainTest::LsAltered [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink [GOOD] >> KqpMultishardIndex::DuplicateUpsert >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink >> TTransferTests::CreateWrongFlushIntervalIsSmall [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsBig >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] |70.8%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkAlreadyExist [GOOD] Test command err: Trying to start YDB, gRPC: 25653, MsgBus: 11223 2025-04-10T11:00:13.356467Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637637535709319:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:13.357082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e5c/r3tmp/tmpa0e3o0/pdisk_1.dat 2025-04-10T11:00:13.896485Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:13.900818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:13.900899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:13.908185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25653, node 1 2025-04-10T11:00:14.101809Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:14.101839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:14.101847Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:14.101990Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11223 TClient is connected to server localhost:11223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:15.034236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:15.077219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:15.253309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:15.462537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:15.558373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:17.535635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637654715580239:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:17.535780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:17.904760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.971097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:18.035594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:18.123749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:18.186862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:18.234728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:18.310985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637659010548049:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:18.311064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:18.314238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637659010548054:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:18.319244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:18.331558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637659010548056:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:18.357979Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637637535709319:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:18.358082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:18.396820Z node 1 :TX_PROXY ERROR: Actor# [1:7491637659010548112:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:19.362061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 2911, MsgBus: 6365 2025-04-10T11:00:22.482173Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637679325116955:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:22.490180Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e5c/r3tmp/tmpGCKuZj/pdisk_1.dat 2025-04-10T11:00:22.665614Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:22.681923Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:22.682017Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:22.683669Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2911, node 2 2025-04-10T11:00:22.747099Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:22.747120Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:22.747129Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:22.747267Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6365 TClient is connected to server localhost:6365 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:00:23.207407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:23.231325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.347042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:23.558857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:23.643160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:25.970300Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637692210020606:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:25.970390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:26.038301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.126421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.196110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.241510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.278813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.365063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.419073Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637696504988423:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:26.419170Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:26.419423Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637696504988428:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:26.423659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:26.437229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637696504988430:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:26.528682Z node 2 :TX_PROXY ERROR: Actor# [2:7491637696504988488:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:27.482952Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637679325116955:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:27.483036Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:27.556135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:30.158040Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqg76cjp16z0t22jw662x, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODkxYmVkMmYtNmVlMzg2ZDQtZGMxYzdkYzEtZGIwYzY3YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:30.174848Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODkxYmVkMmYtNmVlMzg2ZDQtZGMxYzdkYzEtZGIwYzY3YTU=, ActorId: [2:7491637700799956854:2555], ActorState: ExecuteState, TraceId: 01jrfnqg76cjp16z0t22jw662x, Create QueryResponse for error on request, msg: 2025-04-10T11:00:31.425894Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqhbcc4849xgc214tf0st, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODkxYmVkMmYtNmVlMzg2ZDQtZGMxYzdkYzEtZGIwYzY3YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:31.426227Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODkxYmVkMmYtNmVlMzg2ZDQtZGMxYzdkYzEtZGIwYzY3YTU=, ActorId: [2:7491637700799956854:2555], ActorState: ExecuteState, TraceId: 01jrfnqhbcc4849xgc214tf0st, Create QueryResponse for error on request, msg: 2025-04-10T11:00:31.457576Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqjjbbwpa43b83hntvbm8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODkxYmVkMmYtNmVlMzg2ZDQtZGMxYzdkYzEtZGIwYzY3YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:31.457858Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODkxYmVkMmYtNmVlMzg2ZDQtZGMxYzdkYzEtZGIwYzY3YTU=, ActorId: [2:7491637700799956854:2555], ActorState: ExecuteState, TraceId: 01jrfnqjjbbwpa43b83hntvbm8, Create QueryResponse for error on request, msg: 2025-04-10T11:00:32.635472Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqjkbfd7a6mmvsjphs33k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODkxYmVkMmYtNmVlMzg2ZDQtZGMxYzdkYzEtZGIwYzY3YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:32.635753Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODkxYmVkMmYtNmVlMzg2ZDQtZGMxYzdkYzEtZGIwYzY3YTU=, ActorId: [2:7491637700799956854:2555], ActorState: ExecuteState, TraceId: 01jrfnqjkbfd7a6mmvsjphs33k, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::MultipleSecondaryIndexWithSameComulns-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6947, MsgBus: 1837 2025-04-10T11:00:02.003544Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637587367542693:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:02.003929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e8a/r3tmp/tmpXoxuh6/pdisk_1.dat 2025-04-10T11:00:02.591398Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:02.594387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:02.594495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:02.599157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6947, node 1 2025-04-10T11:00:02.699757Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:02.699781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:02.699793Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:02.700015Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1837 TClient is connected to server localhost:1837 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:03.438530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:03.478024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:00:03.671554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:00:03.850904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:03.943272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:06.344974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637608842380816:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.345099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.734713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.775144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.854804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.904697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.988453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:07.012191Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637587367542693:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:07.012250Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:07.057287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:07.134437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637613137348631:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:07.134534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:07.134766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637613137348636:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:07.138244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:07.153101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637613137348638:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:07.243027Z node 1 :TX_PROXY ERROR: Actor# [1:7491637613137348696:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:08.494004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.616201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.724021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24427, MsgBus: 3948 2025-04-10T11:00:12.137629Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637632616305596:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:12.137843Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e8a/r3tmp/tmp1THnn6/pdisk_1.dat 2025-04-10T11:00:12.315171Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:12.330358Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:12.330457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:12.331843Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24427, node 2 2025-04-10T11:00:12.377863Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:12.377882Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:12.377887Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:12.377972Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3948 TClient is connected to server localhost:3948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:12.867767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:12.894233Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:00:12.899602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at sch ... T11:00:16.433037Z node 2 :TX_PROXY ERROR: Actor# [2:7491637649796177128:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:17.137992Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637632616305596:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:17.138073Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:17.524252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.570907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.621864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10228, MsgBus: 26330 2025-04-10T11:00:20.739702Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637667192004985:2147];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e8a/r3tmp/tmp8HNTb1/pdisk_1.dat 2025-04-10T11:00:20.798195Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:00:20.896164Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:20.925216Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:20.925312Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:20.929885Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10228, node 3 2025-04-10T11:00:20.998863Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:20.998885Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:20.998892Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:20.999005Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26330 TClient is connected to server localhost:26330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:21.577832Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:21.611902Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:21.705528Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.013670Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.126874Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:24.594997Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637684371875832:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:24.595106Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:24.672528Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.721872Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.794689Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.838644Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.884924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.975071Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:25.043984Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637688666843648:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:25.044083Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:25.045483Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637688666843653:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:25.050366Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:25.064110Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637688666843656:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:25.153644Z node 3 :TX_PROXY ERROR: Actor# [3:7491637688666843711:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:25.737543Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637667192004985:2147];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:25.737621Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:26.440031Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:27.784263Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:27.824451Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:29.805965Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:29.896063Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:31.289671Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:31.322695Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:31.368692Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:32.112778Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:32.155888Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:33.907412Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:34.001035Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:34.040873Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:34.804587Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:34.850307Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:34.904401Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:35.690951Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:35.718802Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:35.737587Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:35.893397Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:00:35.893472Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:36.405654Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:36.434745Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:37.212074Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:38.118889Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 24555, MsgBus: 3944 2025-04-10T11:00:01.389772Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637587270855132:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:01.398825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001eb1/r3tmp/tmpsXTqPl/pdisk_1.dat 2025-04-10T11:00:02.162632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:02.162763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:02.167329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:02.207179Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24555, node 1 2025-04-10T11:00:02.426778Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:02.426803Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:02.426813Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:02.426931Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3944 TClient is connected to server localhost:3944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:03.281833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:03.322411Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:03.362383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:03.584614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:03.812490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:03.927807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:06.130427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637608745693282:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.130560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.391632Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637587270855132:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:06.391699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:06.436580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.485882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.525501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.601827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.664011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.726113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.809512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637608745693799:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.809594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.810348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637608745693804:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:06.814807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:06.836563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637608745693806:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:06.907325Z node 1 :TX_PROXY ERROR: Actor# [1:7491637608745693863:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:07.855681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:10.398778Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnpx4mcpf48jj37h84bzdf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQ2Y2U1N2MtYTliNjUyZjYtNDI4NTU3MzQtZjhlMWZlYjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:10.419183Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmQ2Y2U1N2MtYTliNjUyZjYtNDI4NTU3MzQtZjhlMWZlYjc=, ActorId: [1:7491637617335629523:2559], ActorState: ExecuteState, TraceId: 01jrfnpx4mcpf48jj37h84bzdf, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 5558, MsgBus: 62076 2025-04-10T11:00:12.698625Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637635558268289:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:12.698691Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001eb1/r3tmp/tmp9KFemX/pdisk_1.dat 2025-04-10T11:00:12.903309Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:12.906847Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:12.906945Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:12.911568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5558, node 2 2025-04-10T11:00:12.990192Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:12.990218Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:12.990226Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:12.990368Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62076 TClient is connected to server localhost:62076 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:00:13.571318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:00:13.576696Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:00:13.596832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:13.680766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:13.909905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:14.009975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:16.525143Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637652738139260:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:16.525234Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:16.582784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:16.625361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:16.671626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:16.744271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:16.838802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:16.888526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:16.955928Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637652738139779:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:16.956049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:16.956249Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637652738139784:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:16.961038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:16.979061Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637652738139786:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:17.060062Z node 2 :TX_PROXY ERROR: Actor# [2:7491637657033107137:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:17.699388Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637635558268289:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:17.699488Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:18.051013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:24.472466Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqataaps10p0j5f5v0mqs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjM2MGU4NzktNGM5NjFjZmYtNjQ0ZDhmMDMtM2FhMDZmYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:24.472767Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjM2MGU4NzktNGM5NjFjZmYtNjQ0ZDhmMDMtM2FhMDZmYWQ=, ActorId: [2:7491637661328075496:2555], ActorState: ExecuteState, TraceId: 01jrfnqataaps10p0j5f5v0mqs, Create QueryResponse for error on request, msg: 2025-04-10T11:00:27.855109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:00:27.855153Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:28.594766Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-10T11:00:28.594821Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-10T11:00:28.594847Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037923 not found 2025-04-10T11:00:28.631435Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037920 not found 2025-04-10T11:00:28.631484Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-04-10T11:00:28.631501Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-04-10T11:00:28.631519Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-10T11:00:28.635422Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037922 not found 2025-04-10T11:00:28.635463Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2025-04-10T11:00:28.635481Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found 2025-04-10T11:00:28.645689Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037930 not found 2025-04-10T11:00:28.660198Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-10T11:00:28.660266Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-04-10T11:00:33.544630Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2025-04-10T11:00:33.547607Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-04-10T11:00:33.694963Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqkd7dxrmf54f5bckhn0k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjM2MGU4NzktNGM5NjFjZmYtNjQ0ZDhmMDMtM2FhMDZmYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:33.695289Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjM2MGU4NzktNGM5NjFjZmYtNjQ0ZDhmMDMtM2FhMDZmYWQ=, ActorId: [2:7491637661328075496:2555], ActorState: ExecuteState, TraceId: 01jrfnqkd7dxrmf54f5bckhn0k, Create QueryResponse for error on request, msg: 2025-04-10T11:00:34.977694Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqmvrbp34jj0kzvk2aakj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjM2MGU4NzktNGM5NjFjZmYtNjQ0ZDhmMDMtM2FhMDZmYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:34.978060Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjM2MGU4NzktNGM5NjFjZmYtNjQ0ZDhmMDMtM2FhMDZmYWQ=, ActorId: [2:7491637661328075496:2555], ActorState: ExecuteState, TraceId: 01jrfnqmvrbp34jj0kzvk2aakj, Create QueryResponse for error on request, msg: 2025-04-10T11:00:36.563500Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqqh45sxr1nwem01ax54k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjM2MGU4NzktNGM5NjFjZmYtNjQ0ZDhmMDMtM2FhMDZmYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:36.568753Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjM2MGU4NzktNGM5NjFjZmYtNjQ0ZDhmMDMtM2FhMDZmYWQ=, ActorId: [2:7491637661328075496:2555], ActorState: ExecuteState, TraceId: 01jrfnqqh45sxr1nwem01ax54k, Create QueryResponse for error on request, msg: >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::CheckPushTopSort [GOOD] Test command err: Trying to start YDB, gRPC: 28160, MsgBus: 63697 2025-04-10T11:00:20.192300Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637667896886264:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:20.192363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e36/r3tmp/tmpvOUndY/pdisk_1.dat 2025-04-10T11:00:20.753638Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:20.760311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:20.760419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:20.763595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28160, node 1 2025-04-10T11:00:20.916574Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:20.916599Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:20.916607Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:20.916722Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63697 TClient is connected to server localhost:63697 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:21.647117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:21.676659Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:21.689184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:21.839113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.023872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.164855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:24.180847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637685076757219:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:24.180970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:24.525864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.579856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.620356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.686806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.729862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.809196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:24.900957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637685076757742:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:24.901050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:24.901428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637685076757747:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:24.905440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:24.933079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637685076757749:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:24.988762Z node 1 :TX_PROXY ERROR: Actor# [1:7491637685076757804:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:25.193869Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637667896886264:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:25.193944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:26.127952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:27.090670Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:27.160675Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 63839, MsgBus: 7587 2025-04-10T11:00:28.359683Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637703018288499:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:28.360812Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e36/r3tmp/tmpTGqpxL/pdisk_1.dat 2025-04-10T11:00:28.544566Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:28.577971Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:28.578083Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:28.579839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63839, node 2 2025-04-10T11:00:28.638790Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:28.638820Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:28.638829Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:28.638971Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7587 TClient is connected to server localhost:7587 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:29.200580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:29.211787Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:29.226000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:29.320858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:00:29.554726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:00:29.655246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:32.563287Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637720198159441:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:32.563382Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:32.643384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.720653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.803580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.882169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.980486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:33.095799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:33.180131Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637724493127253:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:33.180238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:33.180544Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637724493127258:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:33.189222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:33.214386Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:00:33.214604Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637724493127260:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:33.305753Z node 2 :TX_PROXY ERROR: Actor# [2:7491637724493127315:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:33.359210Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637703018288499:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:33.359307Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:34.529286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::Uint8Index [GOOD] Test command err: Trying to start YDB, gRPC: 15136, MsgBus: 27864 2025-04-10T11:00:19.091071Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637665548651444:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:19.091344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e49/r3tmp/tmpxqy8d4/pdisk_1.dat 2025-04-10T11:00:19.603553Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:19.604257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:19.604354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:19.608889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15136, node 1 2025-04-10T11:00:19.710025Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:19.710046Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:19.710055Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:19.710252Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27864 TClient is connected to server localhost:27864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:20.296505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:20.321684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:20.511864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:00:20.690274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:00:20.771902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.835401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637678433554900:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:22.835557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:23.347660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.417139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.484785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.554210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.594764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.632283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.683664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637682728522715:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:23.683752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:23.683970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637682728522720:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:23.689084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:23.702350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637682728522722:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:23.789301Z node 1 :TX_PROXY ERROR: Actor# [1:7491637682728522776:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:24.090793Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637665548651444:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:24.090867Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:24.898212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:27.211955Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqdf461e2f0jn5qbvtmp3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg1NTg4OTctNTk2NGI0ODUtNmM0YzUyNi1jYWM2ODY5MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:27.232324Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTg1NTg4OTctNTk2NGI0ODUtNmM0YzUyNi1jYWM2ODY5MQ==, ActorId: [1:7491637687023490372:2498], ActorState: ExecuteState, TraceId: 01jrfnqdf461e2f0jn5qbvtmp3, Create QueryResponse for error on request, msg: 2025-04-10T11:00:27.734218Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491637699908392897:2586], TxId: 281474976710677, task: 1. Ctx: { TraceId : 01jrfnqefp15dmk9zbtp800hae. SessionId : ydb://session/3?node_id=1&id=OTg1NTg4OTctNTk2NGI0ODUtNmM0YzUyNi1jYWM2ODY5MQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-10T11:00:27.734775Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491637699908392898:2587], TxId: 281474976710677, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTg1NTg4OTctNTk2NGI0ODUtNmM0YzUyNi1jYWM2ODY5MQ==. TraceId : 01jrfnqefp15dmk9zbtp800hae. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491637699908392894:2498], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:00:27.735504Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTg1NTg4OTctNTk2NGI0ODUtNmM0YzUyNi1jYWM2ODY5MQ==, ActorId: [1:7491637687023490372:2498], ActorState: ExecuteState, TraceId: 01jrfnqefp15dmk9zbtp800hae, Create QueryResponse for error on request, msg: 2025-04-10T11:00:29.162973Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqez4f3km879yjv7g7sak, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg1NTg4OTctNTk2NGI0ODUtNmM0YzUyNi1jYWM2ODY5MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:29.163271Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTg1NTg4OTctNTk2NGI0ODUtNmM0YzUyNi1jYWM2ODY5MQ==, ActorId: [1:7491637687023490372:2498], ActorState: ExecuteState, TraceId: 01jrfnqez4f3km879yjv7g7sak, Create QueryResponse for error on request, msg: 2025-04-10T11:00:29.222023Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:29.272136Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:29.329489Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:30.214410Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:30.259799Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 15949, MsgBus: 23757 2025-04-10T11:00:31.283801Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637714183793372:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:31.283855Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e49/r3tmp/tmpXbVJWJ/pdisk_1.dat 2025-04-10T11:00:31.508335Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:31.549039Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:31.549153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:31.553058Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15949, node 2 2025-04-10T11:00:31.641481Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:31.641508Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:31.641517Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:31.641647Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23757 TClient is connected to server localhost:23757 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:32.239301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:32.255133Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:32.274971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:32.376574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:32.610523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:32.737197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:35.704846Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637731363664319:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:35.704944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:35.764702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.807100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.854729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.916021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.982723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:36.083672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:36.191897Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637735658632136:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:36.192023Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:36.193713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637735658632141:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:36.198114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:36.213170Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637735658632143:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:36.284053Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637714183793372:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:36.284140Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:36.297676Z node 2 :TX_PROXY ERROR: Actor# [2:7491637735658632197:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:37.821836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:38.107124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:00:38.194300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:00:38.272731Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::LsAltered [GOOD] Test command err: 2025-04-10T11:00:33.050768Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637719085074624:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:33.103978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000df6/r3tmp/tmpACx2fT/pdisk_1.dat 2025-04-10T11:00:33.908676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:33.913733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:33.913860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:33.919418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7529 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:00:34.238408Z node 1 :TX_PROXY DEBUG: actor# [1:7491637723380042139:2103] Handle TEvNavigate describe path dc-1 2025-04-10T11:00:34.238456Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637727675009735:2264] HANDLE EvNavigateScheme dc-1 2025-04-10T11:00:34.238602Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491637723380042163:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:34.238701Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637727675009717:2258][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7491637723380042163:2116], cookie# 1 2025-04-10T11:00:34.240242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637727675009721:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637727675009718:2258], cookie# 1 2025-04-10T11:00:34.240283Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637727675009722:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637727675009719:2258], cookie# 1 2025-04-10T11:00:34.240298Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637727675009723:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637727675009720:2258], cookie# 1 2025-04-10T11:00:34.240333Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491637719085074539:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637727675009721:2258], cookie# 1 2025-04-10T11:00:34.240358Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491637719085074542:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637727675009722:2258], cookie# 1 2025-04-10T11:00:34.240373Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491637719085074545:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637727675009723:2258], cookie# 1 2025-04-10T11:00:34.240396Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637727675009721:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637719085074539:2049], cookie# 1 2025-04-10T11:00:34.240423Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637727675009722:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637719085074542:2052], cookie# 1 2025-04-10T11:00:34.240438Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637727675009723:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637719085074545:2055], cookie# 1 2025-04-10T11:00:34.240465Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637727675009717:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637727675009718:2258], cookie# 1 2025-04-10T11:00:34.240491Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637727675009717:2258][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:00:34.240505Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637727675009717:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637727675009719:2258], cookie# 1 2025-04-10T11:00:34.240524Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637727675009717:2258][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:00:34.240565Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637727675009717:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637727675009720:2258], cookie# 1 2025-04-10T11:00:34.240580Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637727675009717:2258][/dc-1] Unexpected sync response: sender# [1:7491637727675009720:2258], cookie# 1 2025-04-10T11:00:34.240643Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491637723380042163:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-10T11:00:34.255631Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491637723380042163:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491637727675009717:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:00:34.255771Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491637723380042163:2116], cacheItem# { Subscriber: { Subscriber: [1:7491637727675009717:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-10T11:00:34.257915Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491637727675009736:2265], recipient# [1:7491637727675009735:2264], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:00:34.257978Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637727675009735:2264] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:00:34.312031Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637727675009735:2264] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-10T11:00:34.315546Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637727675009735:2264] Handle TEvDescribeSchemeResult Forward to# [1:7491637727675009734:2263] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:00:34.358822Z node 1 :TX_PROXY DEBUG: actor# [1:7491637723380042139:2103] Handle TEvProposeTransaction 2025-04-10T11:00:34.358858Z node 1 :TX_PROXY DEBUG: actor# [1:7491637723380042139:2103] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T11:00:34.358982Z node 1 :TX_PROXY DEBUG: actor# [1:7491637723380042139:2103] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491637727675009742:2270] 2025-04-10T11:00:34.520507Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637727675009742:2270] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-10T11:00:34.520591Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637727675009742:2270] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:00:34.520716Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637727675009742:2270] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:00:34.520855Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491637723380042163:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status ... okie: 2 IsSync: true Partial: 0 } 2025-04-10T11:00:39.551776Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7491637749171412200:2335], recipient# [2:7491637749171412199:2334], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:00:39.551816Z node 2 :TX_PROXY DEBUG: Actor# [2:7491637749171412199:2334] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:00:39.551885Z node 2 :TX_PROXY DEBUG: Actor# [2:7491637749171412199:2334] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2025-04-10T11:00:39.552564Z node 2 :TX_PROXY DEBUG: Actor# [2:7491637749171412199:2334] Handle TEvDescribeSchemeResult Forward to# [2:7491637749171412198:2333] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282839054 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046644480 2025-04-10T11:00:39.554611Z node 2 :TX_PROXY DEBUG: actor# [2:7491637744876444257:2086] Handle TEvNavigate describe path /dc-1 2025-04-10T11:00:39.554658Z node 2 :TX_PROXY DEBUG: Actor# [2:7491637749171412202:2337] HANDLE EvNavigateScheme /dc-1 2025-04-10T11:00:39.554743Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7491637744876444506:2111], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:39.554834Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7491637744876444646:2181][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7491637744876444506:2111], cookie# 4 2025-04-10T11:00:39.554897Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7491637744876444657:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7491637744876444654:2181], cookie# 4 2025-04-10T11:00:39.554922Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7491637744876444658:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7491637744876444655:2181], cookie# 4 2025-04-10T11:00:39.554940Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7491637744876444659:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7491637744876444656:2181], cookie# 4 2025-04-10T11:00:39.554970Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7491637744876444200:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7491637744876444657:2181], cookie# 4 2025-04-10T11:00:39.554998Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7491637744876444203:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7491637744876444658:2181], cookie# 4 2025-04-10T11:00:39.555017Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7491637744876444206:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7491637744876444659:2181], cookie# 4 2025-04-10T11:00:39.555051Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7491637744876444657:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7491637744876444200:2049], cookie# 4 2025-04-10T11:00:39.555083Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7491637744876444658:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7491637744876444203:2052], cookie# 4 2025-04-10T11:00:39.555106Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7491637744876444659:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7491637744876444206:2055], cookie# 4 2025-04-10T11:00:39.555140Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7491637744876444646:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7491637744876444654:2181], cookie# 4 2025-04-10T11:00:39.555164Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7491637744876444646:2181][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:00:39.555186Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7491637744876444646:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7491637744876444655:2181], cookie# 4 2025-04-10T11:00:39.555218Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7491637744876444646:2181][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:00:39.555240Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7491637744876444646:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7491637744876444656:2181], cookie# 4 2025-04-10T11:00:39.555255Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7491637744876444646:2181][/dc-1] Unexpected sync response: sender# [2:7491637744876444656:2181], cookie# 4 2025-04-10T11:00:39.555319Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7491637744876444506:2111], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-10T11:00:39.555428Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7491637744876444506:2111], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7491637744876444646:2181] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744282839012 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:00:39.555501Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7491637744876444506:2111], cacheItem# { Subscriber: { Subscriber: [2:7491637744876444646:2181] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744282839012 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-10T11:00:39.555660Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7491637749171412203:2338], recipient# [2:7491637749171412202:2337], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:00:39.555693Z node 2 :TX_PROXY DEBUG: Actor# [2:7491637749171412202:2337] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:00:39.555750Z node 2 :TX_PROXY DEBUG: Actor# [2:7491637749171412202:2337] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-10T11:00:39.556341Z node 2 :TX_PROXY DEBUG: Actor# [2:7491637749171412202:2337] Handle TEvDescribeSchemeResult Forward to# [2:7491637749171412201:2336] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744282839012 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744282839012 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282839054 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 ... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexReplace+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31277, MsgBus: 31094 2025-04-10T11:00:03.812616Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637594613256050:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:03.813101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e75/r3tmp/tmpwcQSeW/pdisk_1.dat 2025-04-10T11:00:04.551187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:04.551299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:04.560344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:04.618324Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31277, node 1 2025-04-10T11:00:04.838614Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:04.838633Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:04.838639Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:04.838749Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31094 TClient is connected to server localhost:31094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:05.639612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:05.709227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:05.991095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:06.187040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:06.279755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:08.448076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637616088094171:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:08.448225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:08.778323Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637594613256050:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:08.778389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:08.785920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.819139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.876439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.913669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.993907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:09.048499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:09.138896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637620383061988:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:09.138993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:09.139243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637620383061993:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:09.142727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:09.155092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637620383061995:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:09.235953Z node 1 :TX_PROXY ERROR: Actor# [1:7491637620383062052:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:10.345791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26789, MsgBus: 22377 2025-04-10T11:00:18.541785Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637659669429954:2213];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e75/r3tmp/tmpum69mc/pdisk_1.dat 2025-04-10T11:00:18.626562Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:00:18.722169Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:18.748483Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:18.748605Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:18.749603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26789, node 2 2025-04-10T11:00:18.819700Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:18.819729Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:18.819756Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:18.819884Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22377 TClient is connected to server localhost:22377 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:19.284001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:19.291359Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:00:19.299729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:19.373354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:19.617318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, o ... SHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:22.621793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:22.687315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:22.774327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:22.871941Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637676849301271:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:22.872037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:22.872528Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637676849301276:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:22.876592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:22.899655Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637676849301278:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:22.964081Z node 2 :TX_PROXY ERROR: Actor# [2:7491637676849301333:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:23.541143Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637659669429954:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:23.541210Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:23.941315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1375, MsgBus: 6033 2025-04-10T11:00:28.677689Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637702538579154:2084];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e75/r3tmp/tmpy5RFoH/pdisk_1.dat 2025-04-10T11:00:28.765432Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:00:28.940670Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:28.976919Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:28.977016Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:28.980017Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1375, node 3 2025-04-10T11:00:29.094893Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:29.094926Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:29.094935Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:29.095088Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6033 TClient is connected to server localhost:6033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:29.852335Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:29.863643Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:00:29.873081Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:00:29.994592Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:30.339021Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:00:30.488213Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:33.678118Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637702538579154:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:33.678205Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:33.818476Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637724013417359:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:33.818615Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:33.877875Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:33.934757Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:33.996455Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:34.062240Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:34.137671Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:34.221061Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:34.298741Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637728308385171:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:34.298841Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:34.299103Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637728308385176:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:34.303543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:34.318100Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637728308385178:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:34.393425Z node 3 :TX_PROXY ERROR: Actor# [3:7491637728308385233:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:35.939334Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.350254Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:37.379923Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:38.480108Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:38.534009Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:59:37.815352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:59:37.815499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:37.815549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:59:37.815586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:59:37.815632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:59:37.815667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:59:37.815730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:37.815848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:59:37.816250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:38.002741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:38.002806Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:38.020217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:38.020560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:38.020732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:59:38.028901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:38.029151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:38.030080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:38.030529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:59:38.033003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:38.034583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:38.034650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:38.034831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:38.034882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:38.034958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:38.035129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.043108Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:59:38.221276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:38.221520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.221743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:59:38.222047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:59:38.222125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.227472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:38.227650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:59:38.227853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.227911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:59:38.227952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:59:38.227989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:59:38.235198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.235283Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:38.235324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:59:38.237358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.237450Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.237528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:38.237582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:59:38.241787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:38.244281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:59:38.244489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:59:38.245680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:38.245845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:38.245918Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:38.246260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:59:38.246325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:38.246521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:59:38.246624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:59:38.249035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:38.249098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:38.249322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:38.249403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:59:38.249716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:38.249785Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:59:38.249901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:38.249938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:38.249977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:38.250011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:38.250049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:59:38.250095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:38.250136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:59:38.250190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:59:38.250267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:59:38.250308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:59:38.250379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:59:38.253108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:38.253234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:38.253270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:00:41.286630Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:00:41.287084Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 479us result status StatusSuccess 2025-04-10T11:00:41.287727Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table1" PathDescription { Self { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:00:41.289315Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:00:41.289817Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy1" took 525us result status StatusSuccess 2025-04-10T11:00:41.290608Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy1" PathDescription { Self { Name: "Copy1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000007 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Copy1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:00:41.292229Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:00:41.292664Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy2" took 454us result status StatusSuccess 2025-04-10T11:00:41.293225Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy2" PathDescription { Self { Name: "Copy2" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Copy2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:00:41.297951Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:00:41.298541Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy3" took 610us result status StatusSuccess 2025-04-10T11:00:41.299373Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy3" PathDescription { Self { Name: "Copy3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Copy3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:114:2057] recipient: [1:108:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:114:2057] recipient: [1:108:2140] Leader for TabletID 9437184 is [1:131:2154] sender: [1:132:2057] recipient: [1:108:2140] 2025-04-10T10:59:01.326045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:01.326129Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:01.326851Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:59:01.343854Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:59:01.344522Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T10:59:01.344896Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:01.356423Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:59:01.394987Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:01.395604Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:59:01.397620Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T10:59:01.397703Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T10:59:01.397759Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T10:59:01.398458Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:59:01.398688Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:59:01.398810Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:198:2154] in generation 2 Leader for TabletID 9437184 is [1:131:2154] sender: [1:208:2057] recipient: [1:14:2061] 2025-04-10T10:59:01.477458Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:59:01.533344Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T10:59:01.533681Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:59:01.533885Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-10T10:59:01.533950Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T10:59:01.534022Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T10:59:01.534102Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:01.538620Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.538730Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.539165Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T10:59:01.539313Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T10:59:01.539381Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:01.539451Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:59:01.539530Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T10:59:01.539590Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T10:59:01.539654Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T10:59:01.539717Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T10:59:01.539781Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T10:59:01.539928Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.539989Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.540075Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-10T10:59:01.543202Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T10:59:01.543292Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:59:01.543440Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:01.543666Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T10:59:01.543729Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T10:59:01.543801Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T10:59:01.543869Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:01.543925Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T10:59:01.543989Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T10:59:01.544031Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:59:01.544400Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T10:59:01.544454Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T10:59:01.544500Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T10:59:01.544545Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:59:01.544598Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T10:59:01.544647Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T10:59:01.544713Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T10:59:01.544762Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T10:59:01.544788Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T10:59:01.559992Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:01.560078Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:59:01.560138Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:59:01.560240Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T10:59:01.560375Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T10:59:01.561046Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.561109Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.561179Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-10T10:59:01.561285Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-10T10:59:01.561323Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T10:59:01.561496Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-10T10:59:01.561563Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-10T10:59:01.561608Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-10T10:59:01.561658Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-10T10:59:01.566335Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-10T10:59:01.566427Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:01.566723Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.566793Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.566874Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:01.566925Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:59:01.566968Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T10:59:01.567026Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-10T10:59:01.567072Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-10T10:59:01.567126Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-10T10:59:01.567216Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-10T10:59:01.567267Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T10:59:01.567309Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-10T10:59:01.567543Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-10T10:59:01.567598Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-10T10:59:01.567636Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T10:59:01.567671Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T10:59:01.567708Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T10:59:01.567799Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:01.567832Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T10:59:01.567893Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T10:59:01.567949Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T10:59:01.568044Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-04-10T10:59:01.568089Z node 1 :TX_DATASHARD TRAC ... 9437186 to execution unit ExecuteDataTx 2025-04-10T11:00:38.355024Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit ExecuteDataTx 2025-04-10T11:00:38.355448Z node 40 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437186 with status COMPLETE 2025-04-10T11:00:38.355516Z node 40 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437186: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T11:00:38.355576Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-10T11:00:38.355607Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit ExecuteDataTx 2025-04-10T11:00:38.355638Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit CompleteOperation 2025-04-10T11:00:38.355670Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit CompleteOperation 2025-04-10T11:00:38.355913Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is DelayComplete 2025-04-10T11:00:38.355948Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit CompleteOperation 2025-04-10T11:00:38.355983Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit CompletedOperations 2025-04-10T11:00:38.356016Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit CompletedOperations 2025-04-10T11:00:38.356052Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-10T11:00:38.356079Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit CompletedOperations 2025-04-10T11:00:38.356109Z node 40 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437186 has finished 2025-04-10T11:00:38.356144Z node 40 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:00:38.356176Z node 40 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-10T11:00:38.356211Z node 40 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-10T11:00:38.356252Z node 40 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-10T11:00:38.356506Z node 40 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [40:345:2312], Recipient [40:345:2312]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:00:38.356551Z node 40 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:00:38.356612Z node 40 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-10T11:00:38.356649Z node 40 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:00:38.356681Z node 40 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-10T11:00:38.356718Z node 40 :TX_DATASHARD DEBUG: Found ready operation [7:6] in PlanQueue unit at 9437185 2025-04-10T11:00:38.356751Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit PlanQueue 2025-04-10T11:00:38.356788Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-10T11:00:38.356819Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit PlanQueue 2025-04-10T11:00:38.356850Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit LoadTxDetails 2025-04-10T11:00:38.356885Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit LoadTxDetails 2025-04-10T11:00:38.362853Z node 40 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 7:6 keys extracted: 1 2025-04-10T11:00:38.362937Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-10T11:00:38.362977Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit LoadTxDetails 2025-04-10T11:00:38.363015Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit FinalizeDataTxPlan 2025-04-10T11:00:38.363051Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit FinalizeDataTxPlan 2025-04-10T11:00:38.363098Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-10T11:00:38.363127Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit FinalizeDataTxPlan 2025-04-10T11:00:38.363156Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit BuildAndWaitDependencies 2025-04-10T11:00:38.363188Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit BuildAndWaitDependencies 2025-04-10T11:00:38.363253Z node 40 :TX_DATASHARD TRACE: Operation [7:6] is the new logically complete end at 9437185 2025-04-10T11:00:38.363291Z node 40 :TX_DATASHARD TRACE: Operation [7:6] is the new logically incomplete end at 9437185 2025-04-10T11:00:38.363325Z node 40 :TX_DATASHARD TRACE: Activated operation [7:6] at 9437185 2025-04-10T11:00:38.363371Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-10T11:00:38.363401Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit BuildAndWaitDependencies 2025-04-10T11:00:38.363430Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit BuildDataTxOutRS 2025-04-10T11:00:38.363462Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit BuildDataTxOutRS 2025-04-10T11:00:38.363532Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-10T11:00:38.363561Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit BuildDataTxOutRS 2025-04-10T11:00:38.363589Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit StoreAndSendOutRS 2025-04-10T11:00:38.363620Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit StoreAndSendOutRS 2025-04-10T11:00:38.363654Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-10T11:00:38.363683Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit StoreAndSendOutRS 2025-04-10T11:00:38.363714Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit PrepareDataTxInRS 2025-04-10T11:00:38.363745Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit PrepareDataTxInRS 2025-04-10T11:00:38.363780Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-10T11:00:38.363810Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit PrepareDataTxInRS 2025-04-10T11:00:38.363841Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit LoadAndWaitInRS 2025-04-10T11:00:38.363875Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit LoadAndWaitInRS 2025-04-10T11:00:38.363906Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-10T11:00:38.363935Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit LoadAndWaitInRS 2025-04-10T11:00:38.363964Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit ExecuteDataTx 2025-04-10T11:00:38.363990Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit ExecuteDataTx 2025-04-10T11:00:38.364385Z node 40 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437185 with status COMPLETE 2025-04-10T11:00:38.364452Z node 40 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437185: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T11:00:38.364507Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-10T11:00:38.364538Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit ExecuteDataTx 2025-04-10T11:00:38.364566Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit CompleteOperation 2025-04-10T11:00:38.364598Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit CompleteOperation 2025-04-10T11:00:38.364841Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is DelayComplete 2025-04-10T11:00:38.364877Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit CompleteOperation 2025-04-10T11:00:38.364910Z node 40 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit CompletedOperations 2025-04-10T11:00:38.364944Z node 40 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit CompletedOperations 2025-04-10T11:00:38.365000Z node 40 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-10T11:00:38.365040Z node 40 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit CompletedOperations 2025-04-10T11:00:38.365069Z node 40 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437185 has finished 2025-04-10T11:00:38.365107Z node 40 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:00:38.365138Z node 40 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-10T11:00:38.365171Z node 40 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-10T11:00:38.365207Z node 40 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-10T11:00:38.386174Z node 40 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437186 step# 7 txid# 6} 2025-04-10T11:00:38.386265Z node 40 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437186 step# 7} 2025-04-10T11:00:38.386330Z node 40 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:00:38.386373Z node 40 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437186 on unit CompleteOperation 2025-04-10T11:00:38.386443Z node 40 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437186 at tablet 9437186 send result to client [40:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:00:38.386494Z node 40 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:00:38.386791Z node 40 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 7 txid# 6} 2025-04-10T11:00:38.386830Z node 40 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 7} 2025-04-10T11:00:38.386872Z node 40 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-10T11:00:38.386908Z node 40 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437185 on unit CompleteOperation 2025-04-10T11:00:38.386961Z node 40 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437185 at tablet 9437185 send result to client [40:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:00:38.386995Z node 40 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-10T11:00:38.387218Z node 40 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 7 txid# 6} 2025-04-10T11:00:38.387255Z node 40 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 7} 2025-04-10T11:00:38.387299Z node 40 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:00:38.387335Z node 40 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437184 on unit CompleteOperation 2025-04-10T11:00:38.387390Z node 40 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437184 at tablet 9437184 send result to client [40:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:00:38.387428Z node 40 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:00:37.888089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:00:37.888199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:00:37.888266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:00:37.888302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:00:37.888359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:00:37.888392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:00:37.888448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:00:37.888532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:00:37.888939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:00:37.984919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:37.984994Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:38.005426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:00:38.005852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:00:38.006098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:00:38.028702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:00:38.029104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:00:38.030041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:00:38.030909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:00:38.034291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:00:38.036085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:00:38.036191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:00:38.036333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:00:38.036400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:00:38.036457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:00:38.036635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:00:38.050004Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:00:38.228784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:00:38.229088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:38.229384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:00:38.229691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:00:38.229751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:38.239348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:00:38.239552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:00:38.239809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:38.239915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:00:38.239966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:00:38.240021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:00:38.247262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:38.247388Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:00:38.247439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:00:38.250393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:38.250468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:38.250518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:00:38.250619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:00:38.263322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:00:38.271394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:00:38.271710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:00:38.272958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:00:38.273150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:00:38.273216Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:00:38.273643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:00:38.273733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:00:38.273977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:00:38.274078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:00:38.283701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:00:38.283784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:00:38.284039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:00:38.284090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:00:38.284474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:38.284530Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:00:38.284651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:00:38.284710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:00:38.284781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:00:38.284822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:00:38.284890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:00:38.284948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:00:38.284985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:00:38.285027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:00:38.285117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:00:38.285158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:00:38.285203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:00:38.299795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:00:38.300021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:00:38.300085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:00:43.503349Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:00:43.503554Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:00:43.504575Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:00:43.504718Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 25769805930 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:00:43.504788Z node 6 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:00:43.505110Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:00:43.505184Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:00:43.505439Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:00:43.505539Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:00:43.507972Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:00:43.508040Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:00:43.508301Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:00:43.508371Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [6:209:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:00:43.508795Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:43.508857Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:00:43.509016Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:00:43.509072Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:00:43.509129Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:00:43.509171Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:00:43.509232Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:00:43.509286Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:00:43.509341Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:00:43.509446Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:00:43.509542Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:00:43.509600Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:00:43.509652Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:00:43.510370Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:00:43.510513Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:00:43.510561Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:00:43.510623Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:00:43.510679Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:00:43.510798Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:00:43.514234Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:00:43.514897Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:00:43.515954Z node 6 :TX_PROXY DEBUG: actor# [6:272:2263] Bootstrap 2025-04-10T11:00:43.546997Z node 6 :TX_PROXY DEBUG: actor# [6:272:2263] Become StateWork (SchemeCache [6:277:2268]) 2025-04-10T11:00:43.547682Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [6:276:2267], Recipient [6:133:2156]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2025-04-10T11:00:43.547768Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T11:00:43.551473Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTransfer Replication { Name: "Transfer" Config { TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } Batching { FlushIntervalMilliSeconds: 86400001 } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:00:43.551772Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateReplication Propose: opId# 101:0, path# /MyRoot/Transfer 2025-04-10T11:00:43.551893Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, at schemeshard: 72057594046678944 2025-04-10T11:00:43.552162Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:00:43.553195Z node 6 :TX_PROXY DEBUG: actor# [6:272:2263] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:00:43.556683Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Flush interval must be less than or equal to 24 hours" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:00:43.556889Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, operation: CREATE TRANSFER, path: /MyRoot/Transfer 2025-04-10T11:00:43.556974Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:00:43.558539Z node 6 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:00:43.558815Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:00:43.558889Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:00:43.559328Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:289:2280], Recipient [6:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:00:43.559401Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:00:43.559461Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T11:00:43.559648Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [6:286:2277], Recipient [6:133:2156]: NKikimrScheme.TEvNotifyTxCompletion TxId: 101 2025-04-10T11:00:43.559700Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-10T11:00:43.559789Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:00:43.559911Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:00:43.559966Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [6:287:2278] 2025-04-10T11:00:43.560225Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:289:2280], Recipient [6:133:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:00:43.560273Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:00:43.560317Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-04-10T11:00:43.560752Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [6:290:2281], Recipient [6:133:2156]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-10T11:00:43.560819Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-10T11:00:43.560935Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:00:43.561160Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 222us result status StatusPathDoesNotExist 2025-04-10T11:00:43.561383Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Transfer\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Transfer" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexMetadata::HandleWriteOnlyIndex [GOOD] Test command err: Trying to start YDB, gRPC: 15781, MsgBus: 17642 2025-04-10T11:00:10.580577Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637625143501077:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:10.593298Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e62/r3tmp/tmpe9rfvR/pdisk_1.dat 2025-04-10T11:00:11.007803Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:11.008433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:11.008522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:11.013781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15781, node 1 2025-04-10T11:00:11.104970Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:11.104993Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:11.104999Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:11.105121Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17642 TClient is connected to server localhost:17642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:11.731704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:11.747169Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:11.765328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:11.957326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:12.151991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:12.255794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:14.394744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637642323371890:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:14.394856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:14.768127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:14.821333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:14.866091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:14.902864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:14.954037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:15.023261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:15.129599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637646618339707:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:15.129707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:15.130102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637646618339712:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:15.135603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:15.151698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637646618339714:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:15.233589Z node 1 :TX_PROXY ERROR: Actor# [1:7491637646618339769:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:15.576497Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637625143501077:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:15.576563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:16.323947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15529, MsgBus: 3489 2025-04-10T11:00:20.914722Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637669068664744:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:20.915007Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e62/r3tmp/tmpYr4hp9/pdisk_1.dat 2025-04-10T11:00:21.136929Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:21.159309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:21.159427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 15529, node 2 2025-04-10T11:00:21.165904Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:21.234457Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:21.234478Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:21.234484Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:21.234615Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3489 TClient is connected to server localhost:3489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:21.795747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:21.810790Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:21.822910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.891126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10 ... Columns":["b","id"],"Node Type":"TableLookup","PlanNodeId":11,"Path":"\/Root\/tg","Columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"E-Rows":"No estimate","Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Input":"precompute_1_0","Name":"PartitionByKey"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_1_0"}],"Table":"tg","PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"TopSort-Filter"}],"Node Type":"Merge","SortColumns":["system_date (Desc)","id (Desc)"],"PlanNodeType":"Connection"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"ExternalPlanNodeId":6}],"Limit":"11","Name":"Top","TopBy":"[row.b,row.pa_id,row.system_date,row.id]"}],"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Operators":[{"Inputs":[{"InternalOperatorId":1},{"InternalOperatorId":1},{"InternalOperatorId":1},{"InternalOperatorId":1}],"Limit":"11","Name":"Limit"},{"E-Rows":"No estimate","Inputs":[{"ExternalPlanNodeId":4}],"Predicate":"NOT If AND item.status != $status_1 AND item.am != $am_1","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"Tables":["tg\/tg_index\/indexImplTable"],"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":"%kqp%tx_result_binding_0_0","Reverse":true,"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tg\/tg_index\/indexImplTable","E-Rows":"No estimate","ReadRangesPointPrefixLen":"2","ReadRangesKeys":["b","pa_id","system_date","id"],"Table":"tg\/tg_index\/indexImplTable","ReadColumns":["am","b","id","pa_id","status","system_date","type"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Limit-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Top"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Node Type":"Stage"}],"Subplan Name":"CTE precompute_0_0","Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tg","reads":[{"lookup_by":["b","id"],"columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"type":"Lookup"}]},{"name":"\/Root\/tg\/tg_index\/indexImplTable","reads":[{"columns":["am","b","id","pa_id","status","system_date","type"],"reverse":true,"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Limit":"1001","Name":"Limit"}],"Plans":[{"PlanNodeId":4,"Operators":[{"Limit":"1001","Name":"TopSort","TopSortBy":"[row.system_date,row.id]"}],"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.id)","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"PlanNodeId":6,"Operators":[{"E-Rows":"No estimate","Columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"Name":"TableLookup","E-Cost":"No estimate","E-Size":"No estimate","LookupKeyColumns":["b","id"],"Table":"tg"}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Node Type":"Filter"}],"Node Type":"TopSort"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2}}} Trying to start YDB, gRPC: 1702, MsgBus: 27344 2025-04-10T11:00:28.761565Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637705065180053:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:28.761647Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e62/r3tmp/tmp8A4O6O/pdisk_1.dat 2025-04-10T11:00:28.927278Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:28.932943Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:28.933032Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:28.936212Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1702, node 3 2025-04-10T11:00:29.024815Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:29.024838Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:29.024846Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:29.024963Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27344 TClient is connected to server localhost:27344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:29.762813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:29.773214Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:00:29.779910Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:29.885308Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:30.143745Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:30.242512Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:33.310197Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637726540018303:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:33.310315Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:33.456242Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:33.566262Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:33.656467Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:33.773603Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637705065180053:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:33.773703Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:33.781761Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:33.890626Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:33.967264Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:34.062121Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637730834986121:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:34.062310Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:34.062856Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637730834986127:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:34.067557Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:34.086104Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637730834986129:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:34.153556Z node 3 :TX_PROXY ERROR: Actor# [3:7491637730834986184:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:35.316977Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 23712, MsgBus: 30866 2025-04-10T11:00:15.275197Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637646122078081:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:15.275642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e59/r3tmp/tmpOlh6O5/pdisk_1.dat 2025-04-10T11:00:15.746922Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:15.768766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:15.768919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:15.770961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23712, node 1 2025-04-10T11:00:15.838702Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:15.838722Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:15.838728Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:15.838851Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30866 TClient is connected to server localhost:30866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:16.467028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:16.481581Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:16.493702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:16.662041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:16.875536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:16.966622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.931709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637659006981623:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:18.931813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:19.287464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:19.321220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:19.356769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:19.404534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:19.437371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:19.475298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:19.561329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637663301949437:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:19.561424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:19.561474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637663301949442:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:19.565698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:19.578917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637663301949444:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:19.646564Z node 1 :TX_PROXY ERROR: Actor# [1:7491637663301949499:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:20.269631Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637646122078081:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:20.269722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:20.613303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:22.458350Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710678; 2025-04-10T11:00:22.469231Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491637676186852193:2497], Table: `/Root/TestTable/Index/indexImplTable` ([72057594046644480:18:1]), SessionActorId: [1:7491637667596917088:2497]Got LOCKS BROKEN for table `/Root/TestTable/Index/indexImplTable`. ShardID=72075186224037919, Sink=[1:7491637676186852193:2497].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:00:22.469815Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491637676186852167:2497], SessionActorId: [1:7491637667596917088:2497], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestTable/Index/indexImplTable`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7491637667596917088:2497]. isRollback=0 2025-04-10T11:00:22.470107Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzczMWYyMGQtOTg5ZjEwNDctY2Y4ZDI0ZjQtMmM4ZWUxZTE=, ActorId: [1:7491637667596917088:2497], ActorState: ExecuteState, TraceId: 01jrfnq9c0f79t8ktp0yv5th9r, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7491637676186852179:2497] from: [1:7491637676186852167:2497] 2025-04-10T11:00:22.470223Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491637676186852179:2497] TxId: 281474976710678. Ctx: { TraceId: 01jrfnq9c0f79t8ktp0yv5th9r, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzczMWYyMGQtOTg5ZjEwNDctY2Y4ZDI0ZjQtMmM4ZWUxZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestTable/Index/indexImplTable`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:00:22.470455Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzczMWYyMGQtOTg5ZjEwNDctY2Y4ZDI0ZjQtMmM4ZWUxZTE=, ActorId: [1:7491637667596917088:2497], ActorState: ExecuteState, TraceId: 01jrfnq9c0f79t8ktp0yv5th9r, Create QueryResponse for error on request, msg: 2025-04-10T11:00:22.472621Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710678; 2025-04-10T11:00:22.472753Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1744282822506 : 281474976710678] from 72075186224037920 at tablet 72075186224037920, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } Trying to start YDB, gRPC: 6032, MsgBus: 18154 2025-04-10T11:00:23.366979Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637683382754476:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:23.367220Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e59/r3tmp/tmpjF1zc5/pdisk_1.dat 2025-04-10T11:00:23.511424Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6032, node 2 2025-04-10T11:00:23.546944Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:23.547065Z node ... wnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:28.366244Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637683382754476:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:28.366311Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:28.685348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:29.041806Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491637709152561035:2497] TxId: 281474976710672. Ctx: { TraceId: 01jrfnqg7c8bf9g8mj8r9fzem7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmExMDFlZGEtYTU1YjViNS0xMGViMzNhZC03M2MwMGE2Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-04-10T11:00:29.042026Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmExMDFlZGEtYTU1YjViNS0xMGViMzNhZC03M2MwMGE2Ng==, ActorId: [2:7491637704857593510:2497], ActorState: ExecuteState, TraceId: 01jrfnqg7c8bf9g8mj8r9fzem7, Create QueryResponse for error on request, msg: 2025-04-10T11:00:29.062337Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491637709152561060:2497] TxId: 281474976710674. Ctx: { TraceId: 01jrfnqg820gayrtc91vazzbz5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmExMDFlZGEtYTU1YjViNS0xMGViMzNhZC03M2MwMGE2Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-04-10T11:00:29.062536Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmExMDFlZGEtYTU1YjViNS0xMGViMzNhZC03M2MwMGE2Ng==, ActorId: [2:7491637704857593510:2497], ActorState: ExecuteState, TraceId: 01jrfnqg820gayrtc91vazzbz5, Create QueryResponse for error on request, msg: 2025-04-10T11:00:29.084316Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491637709152561076:2497] TxId: 281474976710676. Ctx: { TraceId: 01jrfnqg8tc1vtx8xbzpqhdcz6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmExMDFlZGEtYTU1YjViNS0xMGViMzNhZC03M2MwMGE2Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-04-10T11:00:29.084511Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmExMDFlZGEtYTU1YjViNS0xMGViMzNhZC03M2MwMGE2Ng==, ActorId: [2:7491637704857593510:2497], ActorState: ExecuteState, TraceId: 01jrfnqg8tc1vtx8xbzpqhdcz6, Create QueryResponse for error on request, msg: 2025-04-10T11:00:29.104580Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491637709152561087:2497] TxId: 281474976710678. Ctx: { TraceId: 01jrfnqg9e5xzyxt85gtvbyf24, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmExMDFlZGEtYTU1YjViNS0xMGViMzNhZC03M2MwMGE2Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-04-10T11:00:29.104791Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmExMDFlZGEtYTU1YjViNS0xMGViMzNhZC03M2MwMGE2Ng==, ActorId: [2:7491637704857593510:2497], ActorState: ExecuteState, TraceId: 01jrfnqg9e5xzyxt85gtvbyf24, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 14772, MsgBus: 28790 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e59/r3tmp/tmpfp7I3C/pdisk_1.dat 2025-04-10T11:00:30.866066Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:00:30.888812Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:30.903393Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:30.903493Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:30.905705Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14772, node 3 2025-04-10T11:00:31.066683Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:31.066708Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:31.066716Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:31.066842Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28790 TClient is connected to server localhost:28790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:31.640416Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:31.654122Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:00:31.670834Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:31.758068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:32.026573Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:32.161915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:35.758297Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637732783103414:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:35.758424Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:35.834340Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.875976Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.963293Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:36.010325Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:36.094288Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:36.147066Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:36.265344Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637737078071236:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:36.265490Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:36.265804Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637737078071241:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:36.271807Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:36.315270Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637737078071243:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:36.420958Z node 3 :TX_PROXY ERROR: Actor# [3:7491637737078071301:3461] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:38.042529Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:38.150640Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:00:38.232596Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel1 [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel2 >> TModifyUserTest::ModifyUserIsEnabled [GOOD] |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest |70.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |70.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |70.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut >> KqpRm::SingleSnapshotByExchanger >> YdbTableBulkUpsert::Limits [GOOD] >> YdbTableBulkUpsert::Overload |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] >> TGRpcCmsTest::SimpleTenantsTestSyncOperation >> TGRpcCmsTest::SimpleTenantsTest >> KqpRm::NotEnoughMemory >> TGRpcCmsTest::AlterRemoveTest >> TGRpcCmsTest::DisabledTxTest |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain |70.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |70.8%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |70.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |70.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |70.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |70.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T10:59:35.858410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T10:59:35.858519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:35.858567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T10:59:35.858608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T10:59:35.858658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T10:59:35.858696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T10:59:35.858757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T10:59:35.858891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T10:59:35.859285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:36.042120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:36.050334Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:36.086450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:36.086709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T10:59:36.086895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T10:59:36.118034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T10:59:36.118422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T10:59:36.119341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:36.119780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T10:59:36.122720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:36.124516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:36.124602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:36.124859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T10:59:36.124925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:36.124980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T10:59:36.125176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T10:59:36.158384Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T10:59:36.380011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T10:59:36.380303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:36.380583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T10:59:36.380860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T10:59:36.380923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:36.385002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:36.385169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T10:59:36.385388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:36.385470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T10:59:36.385513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T10:59:36.385565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T10:59:36.393833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:36.393918Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T10:59:36.393957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T10:59:36.396128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:36.396192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:36.396260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:36.396319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T10:59:36.400689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T10:59:36.403275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T10:59:36.403531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T10:59:36.404777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T10:59:36.404971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T10:59:36.405054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:36.405442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T10:59:36.405526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T10:59:36.405728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T10:59:36.405808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T10:59:36.408168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T10:59:36.408215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T10:59:36.408384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T10:59:36.408446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T10:59:36.408701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T10:59:36.408751Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T10:59:36.408853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:36.408887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:36.408929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T10:59:36.408965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:36.408999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T10:59:36.409036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T10:59:36.409067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T10:59:36.409093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T10:59:36.409156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T10:59:36.409193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T10:59:36.409250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T10:59:36.411749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:36.411914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T10:59:36.411961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... itioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:00:45.060232Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:00:45.060856Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue" took 704us result status StatusSuccess 2025-04-10T11:00:45.069428Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue" PathDescription { Self { Name: "indexByValue" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 3 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "indexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 3 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:00:45.071948Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:00:45.072567Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue/indexImplTable" took 652us result status StatusSuccess 2025-04-10T11:00:45.073982Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 3 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpIndexes::IndexTopSortPushDown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TModifyUserTest::ModifyUserIsEnabled [GOOD] Test command err: 2025-04-10T11:00:31.911541Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637717003779301:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:31.911589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e41/r3tmp/tmpiAG8mu/pdisk_1.dat 2025-04-10T11:00:32.647355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:32.647473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:32.654838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:32.737009Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20535 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:00:33.138681Z node 1 :TX_PROXY DEBUG: actor# [1:7491637717003779536:2098] Handle TEvNavigate describe path dc-1 2025-04-10T11:00:33.138752Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637725593714445:2265] HANDLE EvNavigateScheme dc-1 2025-04-10T11:00:33.138915Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491637721298746864:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:33.139108Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637721298747130:2258][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7491637721298746864:2114], cookie# 1 2025-04-10T11:00:33.141100Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637721298747134:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637721298747131:2258], cookie# 1 2025-04-10T11:00:33.141176Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637721298747135:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637721298747132:2258], cookie# 1 2025-04-10T11:00:33.141194Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637721298747136:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637721298747133:2258], cookie# 1 2025-04-10T11:00:33.141246Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491637717003779254:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637721298747134:2258], cookie# 1 2025-04-10T11:00:33.141277Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491637717003779257:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637721298747135:2258], cookie# 1 2025-04-10T11:00:33.141296Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491637717003779260:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637721298747136:2258], cookie# 1 2025-04-10T11:00:33.141341Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637721298747134:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637717003779254:2049], cookie# 1 2025-04-10T11:00:33.141357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637721298747135:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637717003779257:2052], cookie# 1 2025-04-10T11:00:33.141370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637721298747136:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637717003779260:2055], cookie# 1 2025-04-10T11:00:33.141453Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637721298747130:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637721298747131:2258], cookie# 1 2025-04-10T11:00:33.141491Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637721298747130:2258][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:00:33.141516Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637721298747130:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637721298747132:2258], cookie# 1 2025-04-10T11:00:33.141548Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637721298747130:2258][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:00:33.141582Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637721298747130:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637721298747133:2258], cookie# 1 2025-04-10T11:00:33.141598Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637721298747130:2258][/dc-1] Unexpected sync response: sender# [1:7491637721298747133:2258], cookie# 1 2025-04-10T11:00:33.141695Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491637721298746864:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-10T11:00:33.156294Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491637721298746864:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491637721298747130:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:00:33.156507Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491637721298746864:2114], cacheItem# { Subscriber: { Subscriber: [1:7491637721298747130:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-10T11:00:33.159461Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491637725593714446:2266], recipient# [1:7491637725593714445:2265], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:00:33.159577Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637725593714445:2265] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:00:33.225871Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637725593714445:2265] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-10T11:00:33.230062Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637725593714445:2265] Handle TEvDescribeSchemeResult Forward to# [1:7491637725593714444:2264] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:00:33.268554Z node 1 :TX_PROXY DEBUG: actor# [1:7491637717003779536:2098] Handle TEvProposeTransaction 2025-04-10T11:00:33.268599Z node 1 :TX_PROXY DEBUG: actor# [1:7491637717003779536:2098] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T11:00:33.268696Z node 1 :TX_PROXY DEBUG: actor# [1:7491637717003779536:2098] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491637725593714451:2270] 2025-04-10T11:00:33.426372Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637725593714451:2270] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-10T11:00:33.426432Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637725593714451:2270] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:00:33.426510Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637725593714451:2270] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:00:33.426697Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491637721298746864:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Statu ... es# 0 2025-04-10T11:00:42.589031Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491637762906078548:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7491637762906078550:2260], cookie# 10 2025-04-10T11:00:42.589054Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491637762906078548:2260][/dc-1] Sync is done: cookie# 10, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:00:42.589077Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491637762906078548:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7491637762906078551:2260], cookie# 10 2025-04-10T11:00:42.589095Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491637762906078548:2260][/dc-1] Unexpected sync response: sender# [3:7491637762906078551:2260], cookie# 10 2025-04-10T11:00:42.589139Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7491637758611110984:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-10T11:00:42.589219Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7491637758611110984:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7491637762906078548:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744282842309 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:00:42.589300Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491637758611110984:2113], cacheItem# { Subscriber: { Subscriber: [3:7491637762906078548:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744282842309 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2025-04-10T11:00:42.589509Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491637762906078636:2332], recipient# [3:7491637762906078635:2331], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2025-04-10T11:00:42.589555Z node 3 :TX_PROXY DEBUG: Actor# [3:7491637762906078635:2331] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:00:42.589600Z node 3 :TX_PROXY ERROR: Actor# [3:7491637762906078635:2331] txid# 281474976710663, Access denied for user2 on path /dc-1, with access AlterSchema 2025-04-10T11:00:42.589686Z node 3 :TX_PROXY ERROR: Actor# [3:7491637762906078635:2331] txid# 281474976710663, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2025-04-10T11:00:42.589718Z node 3 :TX_PROXY DEBUG: Actor# [3:7491637762906078635:2331] txid# 281474976710663 SEND to# [3:7491637762906078634:2330] Source {TEvProposeTransactionStatus Status# 5} 2025-04-10T11:00:42.658558Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491637758611110984:2113], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:42.658640Z node 3 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [3:7491637758611110984:2113], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-04-10T11:00:42.658846Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491637762906078639:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:00:42.659272Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491637758611110675:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [3:7491637762906078643:2334] 2025-04-10T11:00:42.659291Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7491637758611110675:2049] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-04-10T11:00:42.659373Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7491637758611110675:2049] Subscribe: subscriber# [3:7491637762906078643:2334], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:00:42.659417Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491637758611110678:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [3:7491637762906078644:2334] 2025-04-10T11:00:42.659427Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7491637758611110678:2052] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-04-10T11:00:42.659451Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7491637758611110678:2052] Subscribe: subscriber# [3:7491637762906078644:2334], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:00:42.659506Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491637758611110681:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [3:7491637762906078645:2334] 2025-04-10T11:00:42.659515Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7491637758611110681:2055] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-04-10T11:00:42.659535Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7491637758611110681:2055] Subscribe: subscriber# [3:7491637762906078645:2334], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:00:42.659576Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7491637762906078643:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7491637758611110675:2049] 2025-04-10T11:00:42.659599Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7491637762906078644:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7491637758611110678:2052] 2025-04-10T11:00:42.659619Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7491637762906078645:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7491637758611110681:2055] 2025-04-10T11:00:42.659658Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491637762906078639:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7491637762906078640:2334] 2025-04-10T11:00:42.659701Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491637762906078639:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7491637762906078641:2334] 2025-04-10T11:00:42.659741Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7491637762906078639:2334][/dc-1/.metadata/initialization/migrations] Set up state: owner# [3:7491637758611110984:2113], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:00:42.659769Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491637762906078639:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7491637762906078642:2334] 2025-04-10T11:00:42.659799Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7491637762906078639:2334][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [3:7491637758611110984:2113], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:00:42.659827Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491637758611110675:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491637762906078643:2334] 2025-04-10T11:00:42.659845Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491637758611110678:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491637762906078644:2334] 2025-04-10T11:00:42.659856Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491637758611110681:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491637762906078645:2334] 2025-04-10T11:00:42.659911Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7491637758611110984:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2025-04-10T11:00:42.660010Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7491637758611110984:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7491637762906078639:2334] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:00:42.660117Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491637758611110984:2113], cacheItem# { Subscriber: { Subscriber: [3:7491637762906078639:2334] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:00:42.660205Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491637762906078646:2335], recipient# [3:7491637762906078638:2297], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |70.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |70.9%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |70.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink >> TSubDomainTest::CreateDummyTabletsInDifferentDomains [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] Test command err: Trying to start YDB, gRPC: 21702, MsgBus: 30925 2025-04-10T11:00:16.453907Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637650328410998:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:16.458703Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e56/r3tmp/tmp420Me8/pdisk_1.dat 2025-04-10T11:00:16.919727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:16.919845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:16.925624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:16.968661Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21702, node 1 2025-04-10T11:00:17.077401Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:17.077441Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:17.077447Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:17.077567Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30925 TClient is connected to server localhost:30925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:17.858117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:00:17.905439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:00:18.111235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.381143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.529917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:20.487413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637667508281890:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:20.487769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:20.784963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:20.821452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:20.864843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:20.909088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:20.948971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:20.988174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.059816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637671803249698:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.059891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.060255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637671803249703:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.064540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:21.079774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637671803249705:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:21.178212Z node 1 :TX_PROXY ERROR: Actor# [1:7491637671803249761:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:21.458274Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637650328410998:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:21.458355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:22.233759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.546302Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:24.626993Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:25.513857Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqbykb42cxnq3p6j7ah9g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjk2YjQ1YWMtMWUzODljZmItMWMxZTI4ZTktMzRmODgyNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:25.560927Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Zjk2YjQ1YWMtMWUzODljZmItMWMxZTI4ZTktMzRmODgyNTg=, ActorId: [1:7491637676098217354:2498], ActorState: ExecuteState, TraceId: 01jrfnqbykb42cxnq3p6j7ah9g, Create QueryResponse for error on request, msg: 2025-04-10T11:00:26.627687Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:26.692630Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:27.906070Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqdz28vzndsawj9mt113r, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjk2YjQ1YWMtMWUzODljZmItMWMxZTI4ZTktMzRmODgyNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:27.906404Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Zjk2YjQ1YWMtMWUzODljZmItMWMxZTI4ZTktMzRmODgyNTg=, ActorId: [1:7491637676098217354:2498], ActorState: ExecuteState, TraceId: 01jrfnqdz28vzndsawj9mt113r, Create QueryResponse for error on request, msg: 2025-04-10T11:00:27.946254Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:28.443605Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491637701868022032:2661], TxId: 281474976710701, task: 1. Ctx: { TraceId : 01jrfnqf6w22j2rcf1dcm5qsv6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=Zjk2YjQ1YWMtMWUzODljZmItMWMxZTI4ZTktMzRmODgyNTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-10T11:00:28.443909Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491637701868022033:2662], TxId: 281474976710701, task: 2. Ctx: { TraceId : 01jrfnqf6w22j2rcf1dcm5qsv6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=Zjk2YjQ1YWMtMWUzODljZmItMWMxZTI4ZTktMzRmODgyNTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7491637701868022029:2498], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:00:28.444282Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Zjk2YjQ1YWMtMWUzODljZmItMWMxZTI4ZTktMzRmODgyNTg=, ActorId: [1:7491637676098217354:2498], ActorState: ExecuteState, TraceId: 01jrfnqf6w22j2rcf1dcm5qsv6, Create QueryResponse for error on request, msg: 2025-04-10T11:00:29.594282Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqfn7ae3s8ad6y1xagze7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjk2YjQ1YWMtMWUzODljZmItMWMxZTI4ZTktMzRmODgyNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:29.594569Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Zjk2YjQ1YWMtMWUzODljZmItMWMxZTI4ZTktMzRmODgyNTg=, ActorId: [1:7491637676098217354:2498], ActorState: ExecuteState, TraceId: 01jrfnqfn7ae3s8ad6y1xagze7, Create QueryResponse for error on request, msg: 2025-04-10T11:00:31.103465Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqgskbcz3vsc1822mz53k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjk2YjQ1YWMtMWUzODljZmItMWMxZTI4ZTktMzRmODgyNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:31.103723Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Zjk2YjQ1YWMtMWUzODljZmItMWMxZTI4ZTktMzRmODgyNTg=, ActorId: [1:7491637676098217354:2498], ActorState: ExecuteState, TraceId: 01jrfnqgskbcz3vsc1822mz53k, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 8931, MsgBus: 3124 2025-04-10T11:00:32.583491Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637720142190195:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:32.583753Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e56/r3tmp/tmpHjwX3y/pdisk_1.dat 2025-04-10T11:00:32.833953Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:32.863970Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:32.864078Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:32.872916Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8931, node 2 2025-04-10T11:00:33.047041Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:33.047069Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:33.047077Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:33.047232Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3124 TClient is connected to server localhost:3124 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:33.731778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:33.743219Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:33.757579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:33.893576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:34.104514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:34.222522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:37.235408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637741617028443:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.235516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.328534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.419965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.479953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.541274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.586356Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637720142190195:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:37.586425Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:37.632472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.758198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.865573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637741617028967:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.865659Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.866013Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637741617028972:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.870943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:37.894959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637741617028974:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:37.987681Z node 2 :TX_PROXY ERROR: Actor# [2:7491637741617029032:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:39.605324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:42.201700Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqw074b4ptxz8863qaqy7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFmMDc5YTEtZDYyOTNhMDMtZjVkNDczYzUtZjU5YmU5OGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:42.202085Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGFmMDc5YTEtZDYyOTNhMDMtZjVkNDczYzUtZjU5YmU5OGY=, ActorId: [2:7491637750206963941:2500], ActorState: ExecuteState, TraceId: 01jrfnqw074b4ptxz8863qaqy7, Create QueryResponse for error on request, msg: 2025-04-10T11:00:42.720452Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491637763091866454:2590], TxId: 281474976710677, task: 1. Ctx: { TraceId : 01jrfnqx3859ra0252vp2h7g5f. SessionId : ydb://session/3?node_id=2&id=ZGFmMDc5YTEtZDYyOTNhMDMtZjVkNDczYzUtZjU5YmU5OGY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-10T11:00:42.720810Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491637763091866455:2591], TxId: 281474976710677, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGFmMDc5YTEtZDYyOTNhMDMtZjVkNDczYzUtZjU5YmU5OGY=. TraceId : 01jrfnqx3859ra0252vp2h7g5f. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7491637763091866451:2500], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:00:42.721171Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGFmMDc5YTEtZDYyOTNhMDMtZjVkNDczYzUtZjU5YmU5OGY=, ActorId: [2:7491637750206963941:2500], ActorState: ExecuteState, TraceId: 01jrfnqx3859ra0252vp2h7g5f, Create QueryResponse for error on request, msg: 2025-04-10T11:00:43.825714Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnqxkg04yhz6e61cw47hpv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFmMDc5YTEtZDYyOTNhMDMtZjVkNDczYzUtZjU5YmU5OGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:43.826010Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGFmMDc5YTEtZDYyOTNhMDMtZjVkNDczYzUtZjU5YmU5OGY=, ActorId: [2:7491637750206963941:2500], ActorState: ExecuteState, TraceId: 01jrfnqxkg04yhz6e61cw47hpv, Create QueryResponse for error on request, msg: 2025-04-10T11:00:43.995319Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:45.547267Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:45.606740Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpIndexes::IndexFilterPushDown [GOOD] |70.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |70.9%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |70.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut >> TJaegerTracingConfiguratorTests::DefaultConfig >> KqpRm::NotEnoughMemory [GOOD] >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription >> TModificationsValidatorTests::TestIsValidationRequired_NONE [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS_AND_NODE_TYPES [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer >> KqpRm::SingleSnapshotByExchanger [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2025-04-10T11:00:48.053894Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:00:48.058659Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d5d/r3tmp/tmp5P6DSL/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:00:48.059388Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d5d/r3tmp/tmp5P6DSL/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/000d5d/r3tmp/tmp5P6DSL/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17911673623293536338 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:00:48.131986Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:00:48.132352Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:00:48.180032Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:459:2100] with ResourceBroker at [2:430:2099] 2025-04-10T11:00:48.180195Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:460:2101] 2025-04-10T11:00:48.180365Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:458:2336] with ResourceBroker at [1:429:2317] 2025-04-10T11:00:48.180438Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:461:2337] 2025-04-10T11:00:48.183603Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:00:48.183687Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:00:48.183722Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:00:48.183750Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:00:48.184007Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:00:48.220586Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744282848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:00:48.220967Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:00:48.221057Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744282848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:00:48.221392Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:00:48.221517Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:00:48.221558Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:00:48.221676Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744282848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:00:48.221785Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:00:48.221822Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:00:48.221897Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744282848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:00:48.222041Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:00:48.229486Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-10T11:00:48.229861Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:00:48.230489Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:00:48.230634Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:00:48.230738Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:00:48.230984Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:00:48.231067Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:00:48.231297Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:00:48.231444Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:00:48.231499Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::IndexTopSortPushDown [GOOD] Test command err: Trying to start YDB, gRPC: 24966, MsgBus: 63340 2025-04-10T11:00:04.131018Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637600183708314:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:04.131908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e73/r3tmp/tmpjcLgYT/pdisk_1.dat 2025-04-10T11:00:04.797914Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:04.801794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:04.802086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:04.809071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24966, node 1 2025-04-10T11:00:05.026698Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:05.026719Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:05.026728Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:05.026832Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63340 TClient is connected to server localhost:63340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:05.913225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:05.941283Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:05.958299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:00:06.136348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:00:06.333831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:06.465804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:08.751545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637617363579261:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:08.751660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:09.063440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:09.103201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:09.132043Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637600183708314:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:09.132108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:09.193296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:09.233142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:09.286825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:09.341144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:09.398935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637621658547079:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:09.399019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:09.399405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637621658547084:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:09.405397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:09.424194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637621658547086:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:09.524830Z node 1 :TX_PROXY ERROR: Actor# [1:7491637621658547142:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:10.802031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:10.871836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:00:10.923179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3975, MsgBus: 10190 2025-04-10T11:00:15.372185Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637649280860718:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:15.372619Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e73/r3tmp/tmpCtVRxO/pdisk_1.dat 2025-04-10T11:00:15.497302Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:15.516811Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:15.516897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:15.518385Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3975, node 2 2025-04-10T11:00:15.621915Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:15.621953Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:15.621961Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:15.622076Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10190 TClient is connected to server localhost:10190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:16.098199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:16.109075Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:00:16.113237Z nod ... CreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:19.815410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:19.902589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:19.993794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637666460732206:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:19.993875Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:19.994203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637666460732211:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:19.998081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:20.012551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637666460732213:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:20.084971Z node 2 :TX_PROXY ERROR: Actor# [2:7491637670755699564:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:20.364141Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637649280860718:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:20.364210Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:21.234573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:22.274440Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:30.478750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:00:30.478776Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 19817, MsgBus: 28065 2025-04-10T11:00:34.799110Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637728774483528:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:34.799173Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e73/r3tmp/tmpxWbpKn/pdisk_1.dat 2025-04-10T11:00:35.090447Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19817, node 3 2025-04-10T11:00:35.193577Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:35.193752Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:35.231013Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:35.322790Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:35.322812Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:35.322823Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:35.322959Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28065 TClient is connected to server localhost:28065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:36.096931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:36.109853Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:36.129366Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:36.277656Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:36.517196Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:36.642462Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:39.808692Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637728774483528:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:39.808847Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637750249321803:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:39.808919Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:39.808971Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:39.909272Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:39.992886Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:40.065214Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:40.122179Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:40.179425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:40.242178Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:40.310613Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637754544289616:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:40.310738Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:40.311037Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637754544289621:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:40.317599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:40.340833Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637754544289623:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:40.424393Z node 3 :TX_PROXY ERROR: Actor# [3:7491637754544289679:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:42.017186Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:42.109473Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:00:42.164644Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |70.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |70.9%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |70.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::IndexFilterPushDown [GOOD] Test command err: Trying to start YDB, gRPC: 29298, MsgBus: 21585 2025-04-10T11:00:18.808581Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637658916520740:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:18.808635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e4b/r3tmp/tmplTUmHo/pdisk_1.dat 2025-04-10T11:00:19.263187Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:19.282386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:19.282513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:19.285268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29298, node 1 2025-04-10T11:00:19.418593Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:19.418624Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:19.418648Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:19.418761Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21585 TClient is connected to server localhost:21585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:20.066183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:20.108730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:20.264526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:20.465278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:20.544678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.656550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637676096391622:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:22.656717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:23.035258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.112050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.154401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.238395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.310354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.390742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.481977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637680391359451:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:23.482052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:23.482293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637680391359456:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:23.487416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:23.500500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637680391359458:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:23.570307Z node 1 :TX_PROXY ERROR: Actor# [1:7491637680391359512:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:23.812420Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637658916520740:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:23.812502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:24.768458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:25.556456Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491637688981294742:2531], status: GENERIC_ERROR, issues:
:3:46: Error: mismatched input 'VIEW' expecting {'(', DEFAULT, DISCARD, FROM, PROCESS, REDUCE, SELECT, VALUES} 2025-04-10T11:00:25.557927Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmVkOGU1ZDItNDZiMDhkMDAtYmVmZTNlMi04OGFjOGMwMA==, ActorId: [1:7491637684686327105:2498], ActorState: ExecuteState, TraceId: 01jrfnqct812g4vb3vewca3m3h, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:00:25.581654Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491637688981294746:2533], status: GENERIC_ERROR, issues:
:3:46: Error: mismatched input 'VIEW' expecting {'(', DEFAULT, DISCARD, FROM, PROCESS, REDUCE, SELECT, VALUES} 2025-04-10T11:00:25.582305Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmVkOGU1ZDItNDZiMDhkMDAtYmVmZTNlMi04OGFjOGMwMA==, ActorId: [1:7491637684686327105:2498], ActorState: ExecuteState, TraceId: 01jrfnqcv04zbgxvrtaary2zb1, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:00:25.599922Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491637688981294750:2535], status: GENERIC_ERROR, issues:
:3:41: Error: mismatched input 'VIEW' expecting {ON, SET} 2025-04-10T11:00:25.600228Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmVkOGU1ZDItNDZiMDhkMDAtYmVmZTNlMi04OGFjOGMwMA==, ActorId: [1:7491637684686327105:2498], ActorState: ExecuteState, TraceId: 01jrfnqcvq3t5r9wjy5nqcxpzr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:00:25.615153Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491637688981294756:2538], status: GENERIC_ERROR, issues:
:3:46: Error: mismatched input 'VIEW' expecting {, ';'} 2025-04-10T11:00:25.615378Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmVkOGU1ZDItNDZiMDhkMDAtYmVmZTNlMi04OGFjOGMwMA==, ActorId: [1:7491637684686327105:2498], ActorState: ExecuteState, TraceId: 01jrfnqcw88cpppaayb7jmrysw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 30602, MsgBus: 1101 2025-04-10T11:00:26.673009Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637695751253261:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:26.676296Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e4b/r3tmp/tmp8corwp/pdisk_1.dat 2025-04-10T11:00:26.861642Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:26.888753Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:26.888839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:26.889655Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30602, node 2 2025-04-10T11:00:27.047044Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:27.047069Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:27.047079Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:27.047201Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhos ... 1474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:31.290822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:31.343875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:31.420395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637717226092007:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:31.420480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:31.421283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637717226092012:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:31.425640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:31.437285Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637717226092014:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:31.500415Z node 2 :TX_PROXY ERROR: Actor# [2:7491637717226092066:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:31.673703Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637695751253261:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:31.673781Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:32.868072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.966773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:00:33.068341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24540, MsgBus: 8145 2025-04-10T11:00:37.250735Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637741112369307:2182];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:37.250838Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e4b/r3tmp/tmpPQCaV0/pdisk_1.dat 2025-04-10T11:00:37.608778Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:37.612839Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:37.613195Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:37.615382Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24540, node 3 2025-04-10T11:00:37.734782Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:37.734812Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:37.734822Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:37.734950Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8145 TClient is connected to server localhost:8145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:38.466479Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:38.488219Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:38.757561Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:39.056664Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:39.174626Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:41.914953Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637758292240136:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:41.915025Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:41.968222Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:42.017276Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:42.099958Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:42.149805Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:42.227544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:42.258750Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637741112369307:2182];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:42.258910Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:42.349526Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:42.549108Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637762587207966:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:42.549240Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:42.550071Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637762587207971:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:42.556428Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:42.577561Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637762587207973:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:42.665089Z node 3 :TX_PROXY ERROR: Actor# [3:7491637762587208030:3466] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:43.847359Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:43.952032Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:00:44.071106Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2025-04-10T11:00:47.292750Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:00:47.293459Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d67/r3tmp/tmpGfA0hP/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:00:47.294054Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d67/r3tmp/tmpGfA0hP/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/000d67/r3tmp/tmpGfA0hP/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17142742181500314028 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:00:47.353441Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:00:47.353801Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:00:47.406079Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:459:2100] with ResourceBroker at [2:430:2099] 2025-04-10T11:00:47.406276Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:460:2101] 2025-04-10T11:00:47.406451Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:458:2336] with ResourceBroker at [1:429:2317] 2025-04-10T11:00:47.406528Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:461:2337] 2025-04-10T11:00:47.406662Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:00:47.406701Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:00:47.406729Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:00:47.406749Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:00:47.406997Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:00:47.439228Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744282847 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:00:47.440080Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:00:47.440171Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744282847 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:00:47.440443Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:00:47.440530Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:00:47.440561Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:00:47.440689Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744282847 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:00:47.440779Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:00:47.440808Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:00:47.440886Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744282847 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:00:47.440988Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:00:47.441698Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-10T11:00:47.441999Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:00:47.449274Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:00:47.449457Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:00:47.449572Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:00:47.449823Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:00:47.449915Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:00:47.450130Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:00:47.450382Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:00:47.450465Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:00:47.453649Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:458:2336]) priority=0 resources={0, 100} 2025-04-10T11:00:47.453726Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:00:47.453790Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:00:47.453832Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:00:47.453881Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:458:2336])) 2025-04-10T11:00:47.454204Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:00:47.454290Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-1-2 (2 by [1:458:2336]) priority=0 resources={0, 100} 2025-04-10T11:00:47.454331Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-1-2 (2 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:00:47.454377Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:00:47.454434Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-1-2 (2 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:00:47.454486Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:458:2336])) 2025-04-10T11:00:47.454572Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:00:47.454843Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:00:47.454978Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744282847 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-04-10T11:00:47.455242Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:00:48.717061Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-10T11:00:48.717217Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-2-1 (1 by [1:458:2336]) (release resources {0, 100}) 2025-04-10T11:00:48.717289Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.300200 (remove task kqp-1-2-1 (1 by [1:458:2336])) 2025-04-10T11:00:48.717331Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.100400 2025-04-10T11:00:48.717383Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-10T11:00:48.717467Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-2-1-2 (2 by [1:458:2336]) (release resources {0, 100}) 2025-04-10T11:00:48.717534Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.300200 to 0.100400 (remove task kqp-2-1-2 (2 by [1:458:2336])) 2025-04-10T11:00:48.717583Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-10T11:00:48.717783Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:00:48.717921Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744282848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:00:48.720187Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:00:49.029893Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> TConsoleTests::TestCreateTenant >> TJaegerTracingConfiguratorTests::DefaultConfig [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin [GOOD] |70.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |70.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |70.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription >> TConsoleConfigTests::TestModifyConfigItem >> KqpIndexes::MultipleSecondaryIndex-UseSink [GOOD] >> KqpIndexes::MultipleSecondaryIndexWithSameComulns+UseSink >> KqpMultishardIndex::SortByPk [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling >> KqpMultishardIndex::DuplicateUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] Test command err: Trying to start YDB, gRPC: 9013, MsgBus: 15660 2025-04-10T11:00:29.900909Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637707671512391:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:29.916251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e20/r3tmp/tmpXUcgwo/pdisk_1.dat 2025-04-10T11:00:30.585736Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:30.587419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:30.587520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:30.600765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9013, node 1 2025-04-10T11:00:30.850731Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:30.850757Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:30.850763Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:30.850869Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15660 TClient is connected to server localhost:15660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:31.698526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:31.726745Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:31.815246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:32.089096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:32.344394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:32.435442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:34.620278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637729146350506:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:34.620385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:34.887746Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637707671512391:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:34.887812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:34.984457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.051212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.109837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.185108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.225034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.319824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.418242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637733441318327:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:35.418326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:35.418786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637733441318332:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:35.423397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:35.435797Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:00:35.436098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637733441318334:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:35.516061Z node 1 :TX_PROXY ERROR: Actor# [1:7491637733441318388:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:36.762575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:36.985665Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710673 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "IndexName" index_columns: "IndexColumn" global_index { settings { } } } } 2025-04-10T11:00:36.986537Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:36.986613Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobal, IndexName: IndexName, IndexColumn: IndexColumn, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637737736286102:2516], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:36.986778Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715757 2025-04-10T11:00:36.986832Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobal, IndexName: IndexName, IndexColumn: IndexColumn, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637737736286102:2516], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:36.987134Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:36.987171Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobal, IndexName: IndexName, IndexColumn: IndexColumn, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637737736286102:2516], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusSuccess, L ... tatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744282847580, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:47.602997Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Applying to Unlocking 2025-04-10T11:00:47.603189Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715677 2025-04-10T11:00:47.603225Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715677, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7491637783495810944:2548], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744282847580, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:47.603313Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976715677, txId# 281474976710760 2025-04-10T11:00:47.603361Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715677, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7491637783495810944:2548], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744282847580, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:47.603625Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715677 2025-04-10T11:00:47.603680Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715677, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7491637783495810944:2548], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744282847580, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:47.604355Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976715677, cookie: 281474976715677, txId: 281474976710760, status: StatusAccepted 2025-04-10T11:00:47.604505Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715677, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7491637783495810944:2548], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744282847580, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046644480 PathId: 16 2025-04-10T11:00:47.605178Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715677 2025-04-10T11:00:47.605243Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715677, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7491637783495810944:2548], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744282847580, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:47.607377Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 281474976715677 2025-04-10T11:00:47.607474Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 281474976715677, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7491637783495810944:2548], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744282847580, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:47.607867Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715677 2025-04-10T11:00:47.607916Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715677, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7491637783495810944:2548], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744282847580, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:47.607938Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T11:00:47.608272Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715677 2025-04-10T11:00:47.608318Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715677, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobal, IndexName: IndexName2, IndexColumn: IndexColumn2, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [2:7491637783495810944:2548], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1744282847580, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 2, upload bytes: 85, read rows: 2, read bytes: 85 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:47.608332Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976715677, subscribers count# 0 2025-04-10T11:00:47.694451Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715677 2025-04-10T11:00:47.694764Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715677 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "IndexName2" index_columns: "IndexColumn2" global_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-10T11:00:48.086291Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:49.365046Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:49.398322Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TVPatchTests::PatchPartFastXorDiffDisorder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 28485, MsgBus: 15661 2025-04-10T11:00:04.062410Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637600700418541:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:04.070340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e77/r3tmp/tmprAikjG/pdisk_1.dat 2025-04-10T11:00:04.728250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:04.728362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:04.737995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:04.738526Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28485, node 1 2025-04-10T11:00:04.866476Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:04.866499Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:04.866505Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:04.866608Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15661 TClient is connected to server localhost:15661 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:05.711203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:05.755092Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:05.769780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:06.021935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:06.263421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:06.389648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:08.189521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637617880289363:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:08.189726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:08.567684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.624811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.687527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.751797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.822767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.896112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:08.972778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637617880289879:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:08.972884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:08.973430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637617880289884:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:08.977955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:09.008103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637617880289886:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:09.036497Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637600700418541:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:09.036561Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:09.101464Z node 1 :TX_PROXY ERROR: Actor# [1:7491637622175257240:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:10.313258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25855, MsgBus: 29654 2025-04-10T11:00:12.338269Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637634751227846:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:12.338326Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e77/r3tmp/tmpet4qxc/pdisk_1.dat 2025-04-10T11:00:12.551702Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:12.582927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:12.583024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:12.585427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25855, node 2 2025-04-10T11:00:12.698161Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:12.698183Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:12.698192Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:12.698304Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29654 TClient is connected to server localhost:29654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:13.276221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:13.283622Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:13.294955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:13.396900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04 ... ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:16.521909Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637651931099314:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:16.583906Z node 2 :TX_PROXY ERROR: Actor# [2:7491637651931099369:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:17.338545Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637634751227846:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:17.338613Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:17.812230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2025-04-10T11:00:17.872079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.969687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:00:18.051178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:00:18.153818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:00:18.237077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9704, MsgBus: 3478 2025-04-10T11:00:27.999702Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637699903646127:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:28.000276Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e77/r3tmp/tmpDYwWfr/pdisk_1.dat 2025-04-10T11:00:28.388934Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:28.392897Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:28.392994Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:28.403774Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9704, node 3 2025-04-10T11:00:28.554824Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:28.554848Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:28.554857Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:28.554985Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3478 TClient is connected to server localhost:3478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:00:29.267380Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:00:29.280111Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:29.408815Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:29.680942Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:29.811250Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:32.407312Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637721378484379:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:32.407405Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:32.459640Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.536816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.625133Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.700502Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.759251Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.823762Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.919575Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637721378484897:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:32.919695Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:32.923630Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637721378484902:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:32.929493Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:32.950875Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637721378484904:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:33.001327Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637699903646127:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:33.001422Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:33.032980Z node 3 :TX_PROXY ERROR: Actor# [3:7491637725673452255:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:34.740841Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2025-04-10T11:00:34.841836Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:00:34.974885Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.048774Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.171159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.271034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:00:43.350858Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:00:43.350890Z node 3 :IMPORT WARN: Table profiles were not loaded |70.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] |70.9%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |71.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] >> TConsoleTests::TestGetUnknownTenantStatus >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] Test command err: Recv 65537 2025-04-10T11:00:52.483705Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-10T11:00:52.484743Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-04-10T11:00:52.484829Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-04-10T11:00:52.485092Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-04-10T11:00:52.485199Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-10T11:00:52.485426Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# [XorDiff from datapart] the start of the diff at index 0 righter than the start of the diff at index 1; PrevDiffStart# 2 DiffStart# 0 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain [GOOD] >> TConsoleTests::TestSetDefaultStorageUnitsQuota >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel1 [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel2 >> TConsoleConfigTests::TestModifyConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] Test command err: Recv 65537 2025-04-10T11:00:52.760321Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-10T11:00:52.761226Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-04-10T11:00:52.761305Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-04-10T11:00:52.761642Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-10T11:00:52.761811Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 5 PatchedPartId# 5 XorReceiver# yes ParityPart# yes ForceEnd# no 2025-04-10T11:00:52.761877Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:100:0] PullingPart# 5 Send NKikimr::TEvBlobStorage::TEvVGet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::SortByPk [GOOD] Test command err: Trying to start YDB, gRPC: 20437, MsgBus: 6210 2025-04-10T11:00:31.087044Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637716010481060:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:31.087095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e1d/r3tmp/tmpyJQpGf/pdisk_1.dat 2025-04-10T11:00:31.854880Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:31.890320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:31.890434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:31.892347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20437, node 1 2025-04-10T11:00:32.168515Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:32.168534Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:32.168540Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:32.168653Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6210 TClient is connected to server localhost:6210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:33.441475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:33.475301Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:33.493583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:33.769750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:34.008322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:34.152385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:36.087833Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637716010481060:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:36.151829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:36.331395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637737485319339:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:36.331515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.016309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.124395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.227731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.292767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.395801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.474465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.575794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637741780287171:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.575882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.576449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637741780287176:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.581039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:37.600698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637741780287178:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:37.654206Z node 1 :TX_PROXY ERROR: Actor# [1:7491637741780287233:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:38.641644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 22863, MsgBus: 3169 2025-04-10T11:00:41.170067Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637761289205075:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:41.170122Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e1d/r3tmp/tmpEV3adQ/pdisk_1.dat 2025-04-10T11:00:41.298271Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:41.315009Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:41.315103Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:41.316789Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22863, node 2 2025-04-10T11:00:41.402789Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:41.402810Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:41.402825Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:41.402934Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3169 TClient is connected to server localhost:3169 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:42.107018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:42.154270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:42.251618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:42.463283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:42.578648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:45.762455Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637778469076025:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:45.762572Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:45.817286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:45.865513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:45.907308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:45.956148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:46.014061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:46.116204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:46.174059Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637761289205075:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:46.174166Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:46.230306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637782764043839:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:46.236750Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:46.237733Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637782764043846:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:46.242564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:46.260358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637782764043848:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:46.325447Z node 2 :TX_PROXY ERROR: Actor# [2:7491637782764043903:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:47.707356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler >> TGRpcCmsTest::AlterRemoveTest [GOOD] >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn [GOOD] >> TGRpcCmsTest::DescribeOptionsTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DuplicateUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 16562, MsgBus: 30259 2025-04-10T11:00:31.358384Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637716743555652:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:31.359099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e1b/r3tmp/tmpkiPZBd/pdisk_1.dat 2025-04-10T11:00:31.857098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:31.857209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:31.859942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:31.897124Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16562, node 1 2025-04-10T11:00:32.081129Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:32.081156Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:32.081163Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:32.081311Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30259 TClient is connected to server localhost:30259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:33.154406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:33.194862Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:33.201745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:33.493340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:33.735840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:33.867114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:36.170235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637738218393761:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:36.170378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:36.351737Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637716743555652:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:36.351812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:36.623606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:36.682903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:36.740366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:36.790665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:36.844107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:36.929094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.044647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637742513361575:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.044775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.045235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637742513361580:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.050492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:37.069041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637742513361582:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:37.178462Z node 1 :TX_PROXY ERROR: Actor# [1:7491637742513361641:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:38.683936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 27298, MsgBus: 11534 2025-04-10T11:00:42.561377Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637765534646806:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:42.561480Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e1b/r3tmp/tmpjgEz57/pdisk_1.dat 2025-04-10T11:00:42.782914Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27298, node 2 2025-04-10T11:00:42.815727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:42.815842Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:42.862814Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:43.041727Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:43.041755Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:43.041764Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:43.041929Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11534 TClient is connected to server localhost:11534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:43.796723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:43.808375Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:43.834872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:43.946894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:44.236715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:44.341093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:47.363753Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637787009485067:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:47.363883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:47.458860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:47.515234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:47.553042Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637765534646806:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:47.553137Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:47.611969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:47.687074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:47.741503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:47.854972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:47.971936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637787009485595:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:47.972062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:47.974561Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637787009485600:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:47.987798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:48.013805Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637787009485602:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:48.105996Z node 2 :TX_PROXY ERROR: Actor# [2:7491637791304452955:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:49.301596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... >> TGRpcCmsTest::DisabledTxTest [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> TVPatchTests::FindingPartsWhenSeveralPartsExist >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2025-04-10T11:00:47.454442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637782892001342:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:47.454613Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001efa/r3tmp/tmpI3T8Of/pdisk_1.dat 2025-04-10T11:00:48.294377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:48.294545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:48.326760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:48.397653Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27519, node 1 2025-04-10T11:00:48.612362Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:00:48.612418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:00:48.612677Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:00:48.650891Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:48.650930Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:48.650948Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:48.651104Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:49.180529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:21988 2025-04-10T11:00:49.702572Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locking 2025-04-10T11:00:49.702599Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locked by parent 2025-04-10T11:00:49.709001Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now active 2025-04-10T11:00:49.813093Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285140, Sender [1:7491637791481936769:2316], Recipient [1:7491637787186969123:2197]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" } 2025-04-10T11:00:49.818494Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2025-04-10T11:00:49.826872Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } >> TVPatchTests::FindingPartsWhenSeveralPartsExist [GOOD] >> TVPatchTests::FindingPartsWithTimeout >> TConsoleConfigTests::TestRemoveConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest [GOOD] Test command err: 2025-04-10T11:00:47.638033Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637783946643833:2280];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:47.638083Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f2a/r3tmp/tmpwDXNPY/pdisk_1.dat 2025-04-10T11:00:48.525020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:48.525112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:48.598032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:48.651095Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:48.677784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 17726, node 1 2025-04-10T11:00:48.802721Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:00:48.820249Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:00:48.875233Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:48.875278Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:48.875286Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:48.875440Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17865 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:49.409646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:49.571991Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7491637792536578986:2314], Recipient [1:7491637788241611389:2196]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-04-10T11:00:49.572048Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-10T11:00:49.572077Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:49.572091Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:49.572196Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-04-10T11:00:49.572323Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1744282849570261) 2025-04-10T11:00:49.572981Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1744282849570261 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-10T11:00:49.573231Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-10T11:00:49.587507Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-10T11:00:49.588917Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849570261&action=1" } } } 2025-04-10T11:00:49.589208Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:49.589328Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-10T11:00:49.589610Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-10T11:00:49.590554Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-10T11:00:49.590709Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-10T11:00:49.625651Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-10T11:00:49.625762Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-10T11:00:49.635699Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7491637792536578991:2196], Recipient [1:7491637788241611389:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-10T11:00:49.635736Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-10T11:00:49.635780Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:49.635794Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:49.635865Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-10T11:00:49.635912Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-10T11:00:49.635992Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-10T11:00:49.655168Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-10T11:00:49.655200Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:49.655211Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:49.655221Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:49.655284Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-10T11:00:49.655320Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1744282849570261 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-10T11:00:49.674671Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-10T11:00:49.694197Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:49.694295Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-04-10T11:00:49.694317Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-04-10T11:00:49.708204Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-04-10T11:00:49.708409Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7491637792536579001:2315], Recipient [1:7491637788241611389:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849570261&action=1" } UserToken: "" } 2025-04-10T11:00:49.708442Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-10T11:00:49.708648Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849570261&action=1" } } 2025-04-10T11:00:49.713755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-10T11:00:49.721328Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-10T11:00:49.721419Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-04-10T11:00:49.729590Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-04-10T11:00:49.748739Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2025-04-10T11:00:49.749375Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282849778 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-10T11:00:49.749390Z node 1 : ... 38859Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-10T11:00:49.839091Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849814235&action=2" } } 2025-04-10T11:00:49.839474Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got check response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 12 2025-04-10T11:00:49.839505Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-10T11:00:49.839592Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7491637792536579165:2196], Recipient [1:7491637788241611389:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-10T11:00:49.839606Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-10T11:00:49.839619Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:49.839627Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:49.839670Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-10T11:00:49.839688Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=2 2025-04-10T11:00:49.840278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-04-10T11:00:49.840604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:00:49.854048Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-10T11:00:49.854102Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:49.860553Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2025-04-10T11:00:49.860580Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-10T11:00:49.860648Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-04-10T11:00:49.860721Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-04-10T11:00:49.860761Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710660 2025-04-10T11:00:49.860830Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435076, Sender [1:7491637792536579103:2196], Recipient [1:7491637788241611389:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-04-10T11:00:49.860848Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainReady 2025-04-10T11:00:49.860861Z node 1 :CMS_TENANTS DEBUG: Ignoring ready subdomain for tenant /Root/users/user-1 in REMOVING_SUBDOMAIN state 2025-04-10T11:00:49.860903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-04-10T11:00:49.873826Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710660 2025-04-10T11:00:49.899889Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7491637792536579259:2326], Recipient [1:7491637788241611389:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849814235&action=2" } UserToken: "" } 2025-04-10T11:00:49.899932Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-10T11:00:49.900156Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849814235&action=2" } } 2025-04-10T11:00:49.910990Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2025-04-10T11:00:49.911019Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-10T11:00:49.911092Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-10T11:00:49.911164Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7491637792536579175:2196], Recipient [1:7491637788241611389:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-10T11:00:49.911182Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-04-10T11:00:49.911201Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:49.911216Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:49.911267Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-04-10T11:00:49.911291Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1744282849814235 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-10T11:00:49.911364Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744282849814235 issue= 2025-04-10T11:00:49.915006Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-04-10T11:00:49.915104Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-04-10T11:00:49.915122Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:49.915307Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7491637788241611263:2201], Recipient [1:7491637788241611389:2196]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-10T11:00:49.915322Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-10T11:00:49.915338Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:49.915347Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:49.915369Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-04-10T11:00:49.915390Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1744282849814235 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-10T11:00:49.921446Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-10T11:00:49.921516Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:49.921562Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-10T11:00:49.921725Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-10T11:00:49.922489Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-04-10T11:00:49.922572Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-04-10T11:00:49.933087Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-04-10T11:00:49.933257Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7491637792536579282:2196], Recipient [1:7491637788241611389:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-10T11:00:49.933300Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-10T11:00:49.933325Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:49.933347Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:49.933396Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-10T11:00:49.933441Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-10T11:00:49.971519Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7491637792536579296:2328], Recipient [1:7491637788241611389:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849814235&action=2" } UserToken: "" } 2025-04-10T11:00:49.971551Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-10T11:00:49.971730Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849814235&action=2" } } 2025-04-10T11:00:50.003218Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-10T11:00:50.003277Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:50.003287Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:50.003296Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:50.003420Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1744282849814235 2025-04-10T11:00:50.003435Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744282849814235 issue= 2025-04-10T11:00:50.003467Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1744282849814235 issue= 2025-04-10T11:00:50.003483Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-10T11:00:50.003608Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1744282849814235 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-10T11:00:50.010635Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-10T11:00:50.010745Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:50.081287Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7491637796831546601:2330], Recipient [1:7491637788241611389:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849814235&action=2" } UserToken: "" } 2025-04-10T11:00:50.081336Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-10T11:00:50.081524Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849814235&action=2" ready: true status: SUCCESS } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 1771, MsgBus: 1197 2025-04-10T11:00:29.396683Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637707573405200:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:29.397329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e25/r3tmp/tmpxr0jXr/pdisk_1.dat 2025-04-10T11:00:29.982335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:29.982474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:29.987618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:30.030676Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1771, node 1 2025-04-10T11:00:30.210761Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:30.210784Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:30.210793Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:30.210898Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1197 TClient is connected to server localhost:1197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:30.973130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:31.000971Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:31.011157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:31.229387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:31.453730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:31.561245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:34.079300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637729048243326:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:34.079413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:34.358276Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637707573405200:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:34.358364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:34.420082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:34.497061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:34.563711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:34.605346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:34.660296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:34.710478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:34.806580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637729048243846:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:34.806671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:34.807024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637729048243851:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:34.811369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:34.835520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637729048243853:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:34.909372Z node 1 :TX_PROXY ERROR: Actor# [1:7491637729048243908:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:36.084883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 26176, MsgBus: 17747 2025-04-10T11:00:41.379876Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637759547141391:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:41.379916Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e25/r3tmp/tmpONFDrx/pdisk_1.dat 2025-04-10T11:00:41.658318Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:41.676466Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:41.676576Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:41.680658Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26176, node 2 2025-04-10T11:00:41.759588Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:41.759614Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:41.759624Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:41.759782Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17747 TClient is connected to server localhost:17747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:42.321973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:42.335468Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:42.351450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:42.469283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:42.728947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:42.864603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:45.982043Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637776727012348:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:45.986357Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:46.053914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:46.111799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:46.168798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:46.229979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:46.323079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:46.380990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:46.382585Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637759547141391:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:46.382688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:46.542450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637781021980171:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:46.542560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:46.546684Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637781021980176:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:46.551907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:46.575111Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637781021980178:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:46.673682Z node 2 :TX_PROXY ERROR: Actor# [2:7491637781021980234:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:47.862831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:50.824717Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfnr47qe7515kjhm3jcj8kg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjY4ZTVjMDItMjgzNTRmNzgtY2E0OWRjZjEtZjhmYjA3OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:00:50.837152Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjY4ZTVjMDItMjgzNTRmNzgtY2E0OWRjZjEtZjhmYjA3OA==, ActorId: [2:7491637789611915969:2558], ActorState: ExecuteState, TraceId: 01jrfnr47qe7515kjhm3jcj8kg, Create QueryResponse for error on request, msg: >> TJaegerTracingConfiguratorTests::RequestTypeThrottler [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> TVPatchTests::FindingPartsWithTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DisabledTxTest [GOOD] Test command err: 2025-04-10T11:00:47.754906Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637783228996646:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:47.754959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ee8/r3tmp/tmpm6Faty/pdisk_1.dat 2025-04-10T11:00:48.601993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:48.602170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:48.612220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:48.658404Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15369, node 1 2025-04-10T11:00:48.773950Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:00:48.780429Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:00:48.780479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:00:48.949527Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:48.949565Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:48.949575Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:48.949730Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:49.590500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:49.802866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-10T11:00:49.863208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> ReadSessionImplTest::ReconnectOnTmpError >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure |71.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |71.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> TConsoleTests::TestCreateTenant [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain |71.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |71.0%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |71.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWithTimeout [GOOD] Test command err: Recv 65537 2025-04-10T11:00:54.849789Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-10T11:00:54.850819Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-10T11:00:54.850902Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1 2] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-10T11:00:54.851146Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-04-10T11:00:54.851226Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-10T11:00:54.851305Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-04-10T11:00:55.154403Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NActors::TEvents::TEvWakeup 2025-04-10T11:00:55.165122Z node 2 :BS_VDISK_PATCH ERROR: {BSVSP11@skeleton_vpatch_actor.cpp:734} [0:1:0:0:0] TEvVPatch: the vpatch actor died due to a deadline, before receiving diff; 2025-04-10T11:00:55.165225Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-04-10T11:00:55.165347Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TGRpcCmsTest::SimpleTenantsTest [GOOD] >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] >> KqpMultishardIndex::DataColumnSelect [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns+UseSink [GOOD] >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns-UseSink >> TJaegerTracingConfiguratorTests::RequestTypeSampler [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope >> TConsoleConfigTests::TestRemoveConfigItems [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts >> TConsoleTests::TestGetUnknownTenantStatus [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain >> KqpProxy::PassErrroViaSessionActor ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-04-10T11:00:55.670206Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.670252Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.670278Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:00:55.674836Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-10T11:00:55.674916Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.674946Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.676442Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007252s 2025-04-10T11:00:55.677193Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:00:55.678414Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-10T11:00:55.678504Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.683295Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.683321Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.683347Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:00:55.683813Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-10T11:00:55.683856Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.683884Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.683965Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006762s 2025-04-10T11:00:55.694831Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:00:55.695533Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-10T11:00:55.695679Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.706282Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.706316Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.706342Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:00:55.706821Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-10T11:00:55.706867Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.706899Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.706981Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.179492s 2025-04-10T11:00:55.707572Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:00:55.710184Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-10T11:00:55.710279Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.711699Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.711804Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.711833Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:00:55.712808Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-10T11:00:55.712859Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.712893Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.712991Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.169131s 2025-04-10T11:00:55.713641Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:00:55.718366Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-10T11:00:55.718479Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.724049Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.724093Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.724118Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:00:55.724517Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:00:55.734466Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:00:55.756453Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.763576Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-04-10T11:00:55.763642Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.763668Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.763736Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.219228s 2025-04-10T11:00:55.763948Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-10T11:00:55.795916Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.795948Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.795970Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:00:55.806778Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:00:55.813232Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:00:55.813476Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.814647Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:00:55.919272Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.919552Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-10T11:00:55.919618Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:00:55.919669Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-10T11:00:55.919747Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-10T11:00:56.023002Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-10T11:00:56.023200Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-10T11:00:56.024495Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:56.024546Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:56.024568Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:00:56.024970Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:00:56.025615Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:00:56.025779Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:56.030669Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:00:56.135286Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:56.135717Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-10T11:00:56.135789Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:00:56.135827Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-10T11:00:56.135924Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-04-10T11:00:56.136023Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-10T11:00:56.142476Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-10T11:00:56.142613Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-10T11:00:56.143281Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> TKqpScanData::ArrowToUnboxedValueConverter >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop >> KqpProxy::CalcPeerStats [GOOD] >> KqpProxy::CreatesScriptExecutionsTable >> TPartitionWriterCacheActorTests::WriteReplyOrder >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] Test command err: 2025-04-10T11:00:47.765337Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637787056586633:2277];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:47.765565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001edf/r3tmp/tmpRA07nR/pdisk_1.dat 2025-04-10T11:00:48.560785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:48.560876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:48.573156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:48.650352Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7286, node 1 2025-04-10T11:00:48.923482Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:00:48.923503Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:00:48.937742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:00:48.938109Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:48.938119Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:48.938127Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:48.938241Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:49.496798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:49.636882Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7491637795646521802:2314], Recipient [1:7491637791351554228:2193]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-04-10T11:00:49.636929Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-10T11:00:49.636967Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:49.636983Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:49.637103Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-04-10T11:00:49.637217Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1744282849637152) 2025-04-10T11:00:49.637845Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1744282849637152 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-10T11:00:49.638043Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-10T11:00:49.645855Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-10T11:00:49.647000Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849637152&action=1" } } } 2025-04-10T11:00:49.647147Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:49.647215Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-10T11:00:49.647355Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-10T11:00:49.647874Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-10T11:00:49.648060Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-10T11:00:49.678304Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-10T11:00:49.678404Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-10T11:00:49.682612Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7491637795646521807:2193], Recipient [1:7491637791351554228:2193]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-10T11:00:49.682648Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-10T11:00:49.682674Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:49.682687Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:49.682733Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-10T11:00:49.682762Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-10T11:00:49.682828Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-10T11:00:49.710891Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7491637795646521819:2316], Recipient [1:7491637791351554228:2193]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849637152&action=1" } UserToken: "" } 2025-04-10T11:00:49.710912Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-10T11:00:49.711097Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849637152&action=1" } } 2025-04-10T11:00:49.740432Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-10T11:00:49.740472Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:49.740488Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:49.740497Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:49.740554Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-10T11:00:49.740578Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1744282849637152 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-10T11:00:49.766555Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-10T11:00:49.766688Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:49.766729Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-04-10T11:00:49.766737Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-04-10T11:00:49.773671Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-04-10T11:00:49.775591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-10T11:00:49.779480Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-10T11:00:49.779544Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-04-10T11:00:49.784956Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-04-10T11:00:49.820037Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2025-04-10T11:00:49.820686Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282849827 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-10T11:00:49.820697Z node 1 :C ... er-1) done 2025-04-10T11:00:51.059683Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-10T11:00:51.059763Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7491637804236456989:2193], Recipient [1:7491637791351554228:2193]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-10T11:00:51.059807Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-04-10T11:00:51.059833Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:51.059845Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:51.059876Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-04-10T11:00:51.059896Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1744282851002167 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-10T11:00:51.059945Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744282851002167 issue= 2025-04-10T11:00:51.064335Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-04-10T11:00:51.064429Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-04-10T11:00:51.064448Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:51.064637Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7491637791351554076:2198], Recipient [1:7491637791351554228:2193]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-10T11:00:51.064661Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-10T11:00:51.064698Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:51.064714Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:51.064738Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-04-10T11:00:51.064757Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1744282851002167 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-10T11:00:51.067186Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-04-10T11:00:51.071901Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-10T11:00:51.071965Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:51.072010Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-10T11:00:51.072113Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-10T11:00:51.072599Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-04-10T11:00:51.072686Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-04-10T11:00:51.080857Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-04-10T11:00:51.080931Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7491637804236457085:2193], Recipient [1:7491637791351554228:2193]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-10T11:00:51.080953Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-10T11:00:51.080975Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:51.080982Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:51.081021Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-10T11:00:51.081042Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-10T11:00:51.107337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-04-10T11:00:51.086881Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037889 because of ERROR 2025-04-10T11:00:51.086954Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037896 because of ERROR 2025-04-10T11:00:51.117596Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-10T11:00:51.117630Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:51.117637Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:51.117644Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:51.117691Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1744282851002167 2025-04-10T11:00:51.117702Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744282851002167 issue= 2025-04-10T11:00:51.117712Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1744282851002167 issue= 2025-04-10T11:00:51.086985Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037897 because of ERROR 2025-04-10T11:00:51.126226Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-10T11:00:51.129920Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1744282851002167 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-10T11:00:51.087013Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037890 because of ERROR 2025-04-10T11:00:51.155200Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2025-04-10T11:00:51.161887Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7491637804236457111:2390], Recipient [1:7491637791351554228:2193]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282851002167&action=2" } UserToken: "" } 2025-04-10T11:00:51.087048Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037893 because of ERROR 2025-04-10T11:00:51.087084Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037895 because of ERROR 2025-04-10T11:00:51.087114Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037894 because of ERROR 2025-04-10T11:00:51.087145Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037891 because of ERROR 2025-04-10T11:00:51.087182Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037892 because of ERROR 2025-04-10T11:00:51.117562Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-10T11:00:51.117959Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-04-10T11:00:51.117974Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-04-10T11:00:51.117986Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-10T11:00:51.118008Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-04-10T11:00:51.118033Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-04-10T11:00:51.118053Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-04-10T11:00:51.118079Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-10T11:00:51.118097Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-04-10T11:00:51.161945Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-10T11:00:51.167373Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282851002167&action=2" } } 2025-04-10T11:00:51.190056Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-10T11:00:51.190133Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:51.199723Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-10T11:00:51.250863Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7491637804236457147:2396], Recipient [1:7491637791351554228:2193]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282851002167&action=2" } UserToken: "" } 2025-04-10T11:00:51.250919Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-10T11:00:51.251104Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282851002167&action=2" ready: true status: SUCCESS } } 2025-04-10T11:00:51.255396Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7491637804236457150:2398], Recipient [1:7491637791351554228:2193]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2025-04-10T11:00:51.255429Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-10T11:00:51.255578Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-04-10T11:00:51.263019Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7491637804236457153:2399], Recipient [1:7491637791351554228:2193]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2025-04-10T11:00:51.263071Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-04-10T11:00:51.263291Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-04-10T11:00:51.271301Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-10T11:00:51.271499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected >> YdbTableBulkUpsert::Overload [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription >> TPersQueueTest::DirectReadPreCached >> TJaegerTracingConfiguratorTests::SamplingSameScope [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry >> TComputeScheduler::TTotalLimits [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] Test command err: 2025-04-10T11:00:47.518591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637786943485991:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:47.518634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f03/r3tmp/tmpLCYxLP/pdisk_1.dat 2025-04-10T11:00:48.320230Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:48.339476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:48.339580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:48.360609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30234, node 1 2025-04-10T11:00:48.754863Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:48.754889Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:48.754896Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:48.755003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:49.546346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:49.791085Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7491637795533421370:2315], Recipient [1:7491637791238453773:2199]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-04-10T11:00:49.791148Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-10T11:00:49.791205Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:49.791223Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:49.791382Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-04-10T11:00:49.791593Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1744282849790782) 2025-04-10T11:00:49.792258Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1744282849790782 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-10T11:00:49.792512Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-10T11:00:49.810303Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-10T11:00:49.811023Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849790782&action=1" } } } 2025-04-10T11:00:49.811228Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:49.811324Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-10T11:00:49.811546Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-10T11:00:49.812163Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-10T11:00:49.812358Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-10T11:00:49.818857Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285139, Sender [1:7491637795533421370:2315], Recipient [1:7491637791238453773:2199]: NKikimr::NConsole::TEvConsole::TEvNotifyOperationCompletionRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849790782&action=1" } UserToken: "" } 2025-04-10T11:00:49.818899Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2025-04-10T11:00:49.819228Z node 1 :CMS_TENANTS DEBUG: Add subscription to /Root/users/user-1 for [1:7491637795533421370:2315] 2025-04-10T11:00:49.819333Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282849790782&action=1" } } 2025-04-10T11:00:49.829843Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-10T11:00:49.829932Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-10T11:00:49.830018Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7491637795533421375:2199], Recipient [1:7491637791238453773:2199]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-10T11:00:49.830037Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-10T11:00:49.830053Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:49.830064Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:49.830117Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-10T11:00:49.830180Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-10T11:00:49.830256Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-10T11:00:49.846797Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-10T11:00:49.846840Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:49.846849Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:49.846869Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:49.846944Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-10T11:00:49.846974Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1744282849790782 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-10T11:00:49.863232Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-10T11:00:49.863406Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:49.863457Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-04-10T11:00:49.863472Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-04-10T11:00:49.869421Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-04-10T11:00:49.879993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480 2025-04-10T11:00:49.893836Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-10T11:00:49.893932Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715658 2025-04-10T11:00:49.913582Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715658 2025-04-10T11:00:49.940229Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715658 2025-04-10T11:00:49.941048Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744282849960 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-10T11:00:49.941078Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-10T11:00:49.941150Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user ... 480, LocalPathId: 3] by tx: 281474976715660 2025-04-10T11:00:51.502519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:00:51.503210Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285139, Sender [1:7491637804123356520:2377], Recipient [1:7491637791238453773:2199]: NKikimr::NConsole::TEvConsole::TEvNotifyOperationCompletionRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282851490030&action=2" } UserToken: "" } 2025-04-10T11:00:51.503237Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2025-04-10T11:00:51.503423Z node 1 :CMS_TENANTS DEBUG: Add subscription to /Root/users/user-1 for [1:7491637804123356520:2377] 2025-04-10T11:00:51.503491Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282851490030&action=2" } } 2025-04-10T11:00:51.507180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976715660 2025-04-10T11:00:51.508270Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-04-10T11:00:51.508352Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715660 2025-04-10T11:00:51.516981Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715660 2025-04-10T11:00:51.636974Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715660 2025-04-10T11:00:51.637016Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-10T11:00:51.637132Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-10T11:00:51.637248Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7491637804123356540:2199], Recipient [1:7491637791238453773:2199]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-10T11:00:51.637269Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-04-10T11:00:51.637293Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:51.637311Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:51.637417Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-04-10T11:00:51.637455Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1744282851490030 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-10T11:00:51.637561Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744282851490030 issue= 2025-04-10T11:00:51.645156Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-04-10T11:00:51.645271Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-04-10T11:00:51.645288Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:51.645584Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7491637786943486346:2204], Recipient [1:7491637791238453773:2199]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-10T11:00:51.645602Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-10T11:00:51.645625Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:51.645636Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:51.645719Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-04-10T11:00:51.645743Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1744282851490030 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-10T11:00:51.645631Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-04-10T11:00:51.655096Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-10T11:00:51.655193Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:51.655245Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-10T11:00:51.655420Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-10T11:00:51.656717Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-04-10T11:00:51.656826Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-04-10T11:00:51.658916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-04-10T11:00:51.659882Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-10T11:00:51.659947Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-04-10T11:00:51.659964Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-04-10T11:00:51.659975Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-10T11:00:51.659993Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-04-10T11:00:51.660007Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-04-10T11:00:51.660017Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-10T11:00:51.660027Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-04-10T11:00:51.660039Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-04-10T11:00:51.680917Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-04-10T11:00:51.681112Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7491637804123356609:2199], Recipient [1:7491637791238453773:2199]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-10T11:00:51.681152Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-10T11:00:51.681180Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:51.681198Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:51.681258Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-10T11:00:51.681285Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-10T11:00:51.767214Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2025-04-10T11:00:51.803177Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-10T11:00:51.815389Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-10T11:00:51.815438Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:00:51.815449Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:51.815460Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:00:51.815546Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1744282851490030 2025-04-10T11:00:51.815561Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744282851490030 issue= 2025-04-10T11:00:51.815580Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1744282851490030 issue= 2025-04-10T11:00:51.815606Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-10T11:00:51.815719Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1744282851490030 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-10T11:00:51.923393Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-10T11:00:51.938322Z node 1 :CMS_TENANTS TRACE: Send /Root/users/user-1 notification to [1:7491637804123356520:2377]: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744282851490030&action=2" ready: true status: SUCCESS } } 2025-04-10T11:00:51.938466Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:00:51.995877Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7491637804123356670:2383], Recipient [1:7491637791238453773:2199]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2025-04-10T11:00:51.995921Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-10T11:00:51.996105Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-04-10T11:00:51.998926Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7491637804123356673:2384], Recipient [1:7491637791238453773:2199]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2025-04-10T11:00:51.998963Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-04-10T11:00:51.999195Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-04-10T11:00:52.021833Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-10T11:00:52.022081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:00:54.959410Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637791538836074:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:54.959499Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnSelect [GOOD] Test command err: Trying to start YDB, gRPC: 18339, MsgBus: 10546 2025-04-10T11:00:31.579325Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637714463653511:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:31.579776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e18/r3tmp/tmpUO8zd7/pdisk_1.dat 2025-04-10T11:00:32.217731Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:32.225594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:32.226545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:32.235038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18339, node 1 2025-04-10T11:00:32.448306Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:32.450266Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:32.450288Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:32.450430Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10546 TClient is connected to server localhost:10546 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:33.424871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:33.484191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:33.752970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:33.986168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:34.083703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:36.554301Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637714463653511:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:36.554381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:36.738737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637735938491627:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:36.738832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.177282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.228637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.305440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.362466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.407005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.481325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:37.581064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637740233459450:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.581168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.581899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637740233459455:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:37.587017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:37.619863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637740233459457:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:37.715681Z node 1 :TX_PROXY ERROR: Actor# [1:7491637740233459515:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:38.992430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 61459, MsgBus: 20410 2025-04-10T11:00:42.180482Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637762790525166:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:42.181032Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e18/r3tmp/tmpRp5UMa/pdisk_1.dat 2025-04-10T11:00:42.381332Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:42.397127Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:42.397233Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:42.399721Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61459, node 2 2025-04-10T11:00:42.586018Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:42.586045Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:42.586066Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:42.586191Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20410 TClient is connected to server localhost:20410 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:43.279553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:43.312757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:43.427592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:00:43.663938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:00:43.744540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:47.147859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637784265363345:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:47.147981Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:47.181398Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637762790525166:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:47.181464Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:47.215840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:47.274957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:47.350364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:47.408413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:47.462133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:47.525399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:47.631466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637784265363871:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:47.631620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:47.632175Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637784265363876:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:47.636998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:47.654050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637784265363878:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:47.753254Z node 2 :TX_PROXY ERROR: Actor# [2:7491637784265363933:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:49.417856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:49.521060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:00:49.620495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:00:52.024527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotification >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch >> TConsoleConfigTests::TestConfigureOrderConflicts [GOOD] >> TConsoleConfigTests::TestGetItems |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> TConsoleTests::TestSetDefaultStorageUnitsQuota [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::TTotalLimits [GOOD] Test command err: 1610 1600 1610 1600 >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags >> TPartitionWriterCacheActorTests::WriteReplyOrder [GOOD] >> TPartitionWriterCacheActorTests::DropOldWriter >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate >> TPartitionWriterCacheActorTests::DropOldWriter [GOOD] >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel1 [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel2 |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::Overload [GOOD] Test command err: 2025-04-10T10:59:21.974690Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637415373208960:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:21.974745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d63/r3tmp/tmpx39tR2/pdisk_1.dat 2025-04-10T10:59:23.033974Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:23.038419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:23.066799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:23.066895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:23.088544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10214, node 1 2025-04-10T10:59:23.558766Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:23.558786Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:23.558794Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:23.558897Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3034 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:24.117543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:26.978382Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637415373208960:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:26.978435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:59:28.354800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-10T10:59:28.745642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637445437981109:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:28.745729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:28.746295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637445437981121:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:28.754539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T10:59:28.788273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637445437981123:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T10:59:28.861679Z node 1 :TX_PROXY ERROR: Actor# [1:7491637445437981198:2822] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:59:30.380781Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfnnnb727tbje0t3cg9nxne, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzVlOGRhZDEtODU5NDdmMS0yMGU0OWM0My1lMTFiNmIwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-10T10:59:31.522678Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfnnpzw9h2hyj8jeag1e0qa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzVlOGRhZDEtODU5NDdmMS0yMGU0OWM0My1lMTFiNmIwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-10T10:59:31.654756Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T10:59:31.701073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-10T10:59:32.343858Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfnnrc3bma2rxyk0k1sm9z6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTcwMzRlYjgtN2JmY2RjZC1iMjQ4M2VhZC00ODU2M2VhNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-10T10:59:32.800877Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfnnrw7ah5dntncm6kjnb34, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTcwMzRlYjgtN2JmY2RjZC1iMjQ4M2VhZC00ODU2M2VhNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-10T10:59:32.881950Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-10T10:59:32.924048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-10T10:59:33.983525Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jrfnnsjf902wmntax6rdtvn5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE1ZjFkY2UtMWFiMGJhZWYtYzBkOWJkMTItNzczZDM4YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-10T10:59:35.110457Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jrfnntfkcahnwqyfw4hkyxps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE1ZjFkY2UtMWFiMGJhZWYtYzBkOWJkMTItNzczZDM4YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-10T10:59:35.195346Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-10T10:59:35.239998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-10T10:59:35.987426Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jrfnnvte45bfx7q4zczy6tys, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjdkYjA2MDItMmZhMDU3OTctMzZlNjA5ZTctMjlhYTQxMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-10T10:59:36.995892Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jrfnnwe33qekjq9ramqyypfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjdkYjA2MDItMmZhMDU3OTctMzZlNjA5ZTctMjlhYTQxMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-10T10:59:37.158712Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-10T10:59:37.185864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-10T10:59:38.021638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:59:38.021670Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:38.044079Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jrfnnxpw2kq3fvx7218hfyn7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRlNDA5NDktOTYzMjU4NzgtNTExYzc2ZjAtYTkzNWU4M2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-10T10:59:38.538870Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jrfnnyeb96cvathd89rdprwt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRlNDA5NDktOTYzMjU4NzgtNTExYzc2ZjAtYTkzNWU4M2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-10T10:59:38.621107Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-10T10:59:38.659157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-10T10:59:39.334845Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jrfnnz5f0xhrcas0xkty388k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZhNmE5MDAtMTliMDA5ODItZWU2Y2VmNTgtZDBmYTc4N2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-10T10:59:39.966853Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jrfnnzpqejqwqqyrb9j4jtdv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZhNmE5MDAtMTliMDA5ODItZWU2Y2VmNTgtZDBmYTc4N2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-10T10:59:40.042813Z node 1 :H ... ctionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.676515Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.676535Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.677720Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-10T11:00:54.677775Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:00:54.677965Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037890 2025-04-10T11:00:54.677984Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:00:54.678172Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-04-10T11:00:54.678844Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-04-10T11:00:54.683578Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.683605Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.684052Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.684070Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.686265Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-04-10T11:00:54.686314Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-10T11:00:54.693111Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-04-10T11:00:54.693161Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-10T11:00:54.693863Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.693885Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.694533Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.694566Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 .2025-04-10T11:00:54.702237Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.702289Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.703519Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.703550Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.712773Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.712818Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.716811Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.716862Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.721378Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-04-10T11:00:54.722044Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037890 2025-04-10T11:00:54.722406Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-04-10T11:00:54.722738Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-04-10T11:00:54.735117Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.735168Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.735318Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-04-10T11:00:54.735351Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-10T11:00:54.735693Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-10T11:00:54.735711Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:00:54.736107Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.736125Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.736316Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037890 2025-04-10T11:00:54.736336Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:00:54.736724Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.736740Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.736815Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-04-10T11:00:54.736832Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-10T11:00:54.737175Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.737189Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 .2025-04-10T11:00:54.752734Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.752781Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.753154Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.753166Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.759015Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.759067Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.762963Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.763003Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.764253Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-04-10T11:00:54.764839Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-04-10T11:00:54.765349Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037890 2025-04-10T11:00:54.766132Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-04-10T11:00:54.777723Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037890 2025-04-10T11:00:54.777782Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:00:54.777965Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-04-10T11:00:54.777982Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-10T11:00:54.778101Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-10T11:00:54.778118Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:00:54.778253Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-04-10T11:00:54.778271Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-10T11:00:54.779772Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.779825Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.780284Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.780308Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 .2025-04-10T11:00:54.782865Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.782902Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.783265Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.783283Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.806335Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.806381Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.811488Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.811530Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.812185Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.812204Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-04-10T11:00:54.816859Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:00:54.816898Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 >> TJaegerTracingConfiguratorTests::ThrottlingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb >> TKqpScanData::EmptyColumns >> TConsoleConfigTests::TestGetItems [GOOD] >> TConsoleConfigTests::TestGetNodeItems >> TKqpScanData::EmptyColumns [GOOD] >> TKqpScanData::DifferentNumberOfInputAndResultColumns >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate >> TKqpScanData::FailOnUnsupportedPgType >> TComputeScheduler::ResourceWeight [GOOD] >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose >> TKqpScanData::FailOnUnsupportedPgType [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain [GOOD] >> TConsoleTests::TestRestartConsoleAndPools |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns [GOOD] >> TKqpScanData::UnboxedValueSize >> TComputeScheduler::QueryLimits [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::ResourceWeight [GOOD] Test command err: 510 500 1510 1500 990 1000 1000 1000 |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem >> TConsoleTests::TestCreateSharedTenant >> TKqpScanData::UnboxedValueSize [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel2 >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 >> TConsoleConfigTests::TestGetNodeItems [GOOD] >> TConsoleConfigTests::TestGetNodeConfig >> KqpProxy::PassErrroViaSessionActor [GOOD] >> KqpProxy::NodeDisconnectedTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::QueryLimits [GOOD] Test command err: 800 800 800 800 |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::UnboxedValueSize [GOOD] |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute |71.1%| [TA] $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel1 [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel2 >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess >> TConsoleTests::TestSetDefaultComputationalUnitsQuota [GOOD] >> TConsoleTests::TestTenantConfigConsistency >> TConsoleConfigTests::TestGetNodeConfig [GOOD] >> TConsoleConfigTests::TestAutoOrder >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithoutLoginPlaceholders [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnames [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV4List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV6List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesLdapsScheme [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer >> TJaegerTracingConfiguratorTests::SharedSamplingLimits [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification >> TConsoleConfigTests::TestAutoOrder [GOOD] >> TConsoleConfigTests::TestAutoSplit |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes+UseSink >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist >> TLogSettingsConfiguratorTests::TestNoChanges [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer >> TConsoleConfigTests::TestAutoSplit [GOOD] >> TConsoleConfigTests::TestValidation >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenant >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel2 >> TConsoleTests::TestTenantConfigConsistency [GOOD] >> TConsoleTests::TestSetConfig >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem >> TConsoleTests::TestRestartConsoleAndPools [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] >> TConfigsDispatcherTests::TestYamlEndToEnd >> TLogSettingsConfiguratorTests::TestAddComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries >> LdapAuthProviderTest::LdapServerIsUnavailable >> TConsoleConfigTests::TestValidation [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] >> TConsoleConfigHelpersTests::TestConfigCourier >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 >> DataShardVolatile::DistributedUpsertRestartAfterPlan [GOOD] >> DataShardVolatile::CompactedVolatileChangesCommit |71.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |71.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |71.1%| [TA] {RESULT} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood >> TConsoleConfigTests::TestCheckConfigUpdates [GOOD] >> TConsoleConfigTests::TestManageValidators >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad >> TLogSettingsConfiguratorTests::TestChangeDefaults [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestApplyValidatorsWithOldConfig [GOOD] >> TModificationsValidatorTests::TestChecksLimitError >> TModificationsValidatorTests::TestChecksLimitError [GOOD] >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood >> YdbOlapStore::LogGrepNonExisting [GOOD] >> YdbOlapStore::LogGrepExisting |71.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |71.1%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] Test command err: 2025-04-10T11:00:49.305930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:49.306026Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:49.360079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:50.928495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:50.928575Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:51.035115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:52.698651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:52.698742Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:52.740556Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:54.010115Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:54.010220Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:54.107434Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:55.651372Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:55.651450Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:55.706771Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:57.209215Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:57.209304Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:57.295617Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:58.897322Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:58.897431Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:58.952176Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:00.488289Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:00.488386Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:00.566108Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:01.949169Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:01.949271Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:02.007827Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:03.855999Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:03.856091Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:03.912753Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:06.102859Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 268637729, Sender [11:144:2157], Recipient [11:358:2297]: {TEvControllerProposeConfigRequest Record# } 2025-04-10T11:01:06.102973Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvBlobStorage::TEvControllerProposeConfigRequest 2025-04-10T11:01:06.125344Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 269877760, Sender [11:319:2286], Recipient [11:318:2283]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936131 Status: OK ServerId: [11:411:2343] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T11:01:06.125475Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T11:01:06.175174Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285144, Sender [11:318:2283], Recipient [11:358:2297]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionRequest { Generation: 1 Options { NodeId: 11 Host: "ghrun-x5vqckr7qy.auto.internal" Tenant: "" NodeType: "" } ConfigItemKinds: 29 ConfigItemKinds: 1 ConfigItemKinds: 89 ConfigItemKinds: 2 ConfigItemKinds: 32 ConfigItemKinds: 3 ConfigItemKinds: 33 ConfigItemKinds: 34 ConfigItemKinds: 6 ConfigItemKinds: 36 ConfigItemKinds: 37 ConfigItemKinds: 8 ConfigItemKinds: 38 ConfigItemKinds: 10 ConfigItemKinds: 39 ConfigItemKinds: 43 ConfigItemKinds: 73 ConfigItemKinds: 75 ConfigItemKinds: 46 ConfigItemKinds: 77 ConfigItemKinds: 80 ConfigItemKinds: 81 ConfigItemKinds: 52 ConfigItemKinds: 54 ConfigItemKinds: 25 ConfigItemKinds: 55 ConfigItemKinds: 26 ServeYaml: true YamlApiVersion: 1 } 2025-04-10T11:01:06.175633Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285144, Sender [11:318:2283], Recipient [11:365:2309]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionRequest { Generation: 1 Options { NodeId: 11 Host: "ghrun-x5vqckr7qy.auto.internal" Tenant: "" NodeType: "" } ConfigItemKinds: 29 ConfigItemKinds: 1 ConfigItemKinds: 89 ConfigItemKinds: 2 ConfigItemKinds: 32 ConfigItemKinds: 3 ConfigItemKinds: 33 ConfigItemKinds: 34 ConfigItemKinds: 6 ConfigItemKinds: 36 ConfigItemKinds: 37 ConfigItemKinds: 8 ConfigItemKinds: 38 ConfigItemKinds: 10 ConfigItemKinds: 39 ConfigItemKinds: 43 ConfigItemKinds: 73 ConfigItemKinds: 75 ConfigItemKinds: 46 ConfigItemKinds: 77 ConfigItemKinds: 80 ConfigItemKinds: 81 ConfigItemKinds: 52 ConfigItemKinds: 54 ConfigItemKinds: 25 ConfigItemKinds: 55 ConfigItemKinds: 26 ServeYaml: true YamlApiVersion: 1 } 2025-04-10T11:01:06.175721Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionRequest 2025-04-10T11:01:06.175866Z node 11 :CMS_CONFIGS DEBUG: TConfigsProvider registered new subscription [11:318:2283]:1 2025-04-10T11:01:06.175991Z node 11 :CMS_CONFIGS TRACE: TConfigsProvider: check if update is required for volatile subscription [11:318:2283]:1 2025-04-10T11:01:06.176083Z node 11 :CMS_CONFIGS TRACE: TConfigsProvider: new config found for subscription [11:318:2283]:1 version= 2025-04-10T11:01:06.176215Z node 11 :CMS_CONFIGS TRACE: TSubscriptionClientSender([11:318:2283]) send TEvConfigSubscriptionResponse 2025-04-10T11:01:06.176396Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285146, Sender [11:365:2309], Recipient [11:412:2309]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { } MainYamlConfigNotChanged: true } 2025-04-10T11:01:06.176472Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-04-10T11:01:06.176568Z node 11 :CMS_CONFIGS TRACE: TSubscriptionClientSender([11:318:2283]) send TEvConfigSubscriptionNotificationRequest: Order: 1 Generation: 1 Config { } MainYamlConfigNotChanged: true 2025-04-10T11:01:06.177760Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273286169, Sender [11:412:2309], Recipient [11:318:2283]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionResponse { Generation: 1 Status { Code: SUCCESS } } 2025-04-10T11:01:06.177825Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionResponse 2025-04-10T11:01:06.178103Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285146, Sender [11:412:2309], Recipient [11:318:2283]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Order: 1 Generation: 1 Config { } MainYamlConfigNotChanged: true } 2025-04-10T11:01:06.178165Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-04-10T11:01:06.184473Z node 11 :CMS_CONFIGS INFO: TLogSettingsConfigurator: got new config: 2025-04-10T11:01:06.184639Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GLOBAL has been changed from WARN to NOTICE 2025-04-10T11:01:06.184714Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GLOBAL has been changed from WARN to DEBUG 2025-04-10T11:01:06.184766Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT has been changed from WARN to NOTICE 2025-04-10T11:01:06.184799Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT has been changed from WARN to DEBUG 2025-04-10T11:01:06.184832Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TEST has been changed from WARN to NOTICE 2025-04-10T11:01:06.184864Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TEST has been changed from WARN to DEBUG 2025-04-10T11:01:06.184893Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component PROTOCOLS has been changed from WARN to NOTICE 2025-04-10T11:01:06.184922Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component PROTOCOLS has been changed from WARN to DEBUG 2025-04-10T11:01:06.184953Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_SPEED_TEST has been changed from WARN to NOTICE 2025-04-10T11:01:06.184982Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_SPEED_TEST has been changed from WARN to DEBUG 2025-04-10T11:01:06.185011Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_STATUS has been changed from WARN to NOTICE 2025-04-10T11:01:06.185040Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_STATUS has been changed from WARN to DEBUG 2025-04-10T11:01:06.185066Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_NETWORK has been changed from WARN to NOTICE 2025-04-10T11:01:06.185096Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_NETWORK has been changed from WARN to DEBUG 2025-04-10T11:01:06.185125Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_SESSION has been changed from WARN to NOTICE 2025-04-10T11:01:06.185155Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_SESSION has been changed from WARN to DEBUG 2025-04-10T11:01:06.185184Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component HTTP has been changed from WARN to NOTICE 2025-04-10T11:01:06.185214Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component HTTP has been changed from WARN to DEBUG 2025-04-10T11:01:06.185243Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component LOGGER has been changed from WARN to NOTICE 2025-04-10T11:01:06.185270Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component LOGGER has been changed from WARN to DEBUG 2025-04-10T11:01:06.185303Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BLOBSTORAGE has been changed from WARN to NOTI ... ZER has been changed from 0 to 10 2025-04-10T11:01:11.639062Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_MANAGER has been changed from NOTICE to ALERT 2025-04-10T11:01:11.639098Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_MANAGER has been changed from DEBUG to ALERT 2025-04-10T11:01:11.639140Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_MANAGER has been changed from 0 to 10 2025-04-10T11:01:11.639175Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_SECRET has been changed from NOTICE to ALERT 2025-04-10T11:01:11.639213Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_SECRET has been changed from DEBUG to ALERT 2025-04-10T11:01:11.639242Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_SECRET has been changed from 0 to 10 2025-04-10T11:01:11.639276Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_TIERING has been changed from NOTICE to ALERT 2025-04-10T11:01:11.639312Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_TIERING has been changed from DEBUG to ALERT 2025-04-10T11:01:11.639342Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_TIERING has been changed from 0 to 10 2025-04-10T11:01:11.639380Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BG_TASKS has been changed from NOTICE to ALERT 2025-04-10T11:01:11.639436Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BG_TASKS has been changed from DEBUG to ALERT 2025-04-10T11:01:11.639473Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BG_TASKS has been changed from 0 to 10 2025-04-10T11:01:11.639508Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY has been changed from NOTICE to ALERT 2025-04-10T11:01:11.639543Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY has been changed from DEBUG to ALERT 2025-04-10T11:01:11.639573Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY has been changed from 0 to 10 2025-04-10T11:01:11.639620Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY_CACHE has been changed from NOTICE to ALERT 2025-04-10T11:01:11.639655Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY_CACHE has been changed from DEBUG to ALERT 2025-04-10T11:01:11.639683Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY_CACHE has been changed from 0 to 10 2025-04-10T11:01:11.639714Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component EXT_INDEX has been changed from NOTICE to ALERT 2025-04-10T11:01:11.639752Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component EXT_INDEX has been changed from DEBUG to ALERT 2025-04-10T11:01:11.639788Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component EXT_INDEX has been changed from 0 to 10 2025-04-10T11:01:11.639826Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_CONVEYOR has been changed from NOTICE to ALERT 2025-04-10T11:01:11.639862Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_CONVEYOR has been changed from DEBUG to ALERT 2025-04-10T11:01:11.639893Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_CONVEYOR has been changed from 0 to 10 2025-04-10T11:01:11.639921Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_LIMITER has been changed from NOTICE to ALERT 2025-04-10T11:01:11.639950Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_LIMITER has been changed from DEBUG to ALERT 2025-04-10T11:01:11.639976Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_LIMITER has been changed from 0 to 10 2025-04-10T11:01:11.640015Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component ARROW_HELPER has been changed from NOTICE to ALERT 2025-04-10T11:01:11.640053Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component ARROW_HELPER has been changed from DEBUG to ALERT 2025-04-10T11:01:11.640083Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component ARROW_HELPER has been changed from 0 to 10 2025-04-10T11:01:11.640112Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component SSA_GRAPH_EXECUTION has been changed from NOTICE to ALERT 2025-04-10T11:01:11.640159Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component SSA_GRAPH_EXECUTION has been changed from DEBUG to ALERT 2025-04-10T11:01:11.640190Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component SSA_GRAPH_EXECUTION has been changed from 0 to 10 2025-04-10T11:01:11.640221Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component KAFKA_PROXY has been changed from NOTICE to ALERT 2025-04-10T11:01:11.640306Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component KAFKA_PROXY has been changed from DEBUG to ALERT 2025-04-10T11:01:11.640341Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component KAFKA_PROXY has been changed from 0 to 10 2025-04-10T11:01:11.640372Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component OBJECTS_MONITORING has been changed from NOTICE to ALERT 2025-04-10T11:01:11.640400Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component OBJECTS_MONITORING has been changed from DEBUG to ALERT 2025-04-10T11:01:11.640424Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component OBJECTS_MONITORING has been changed from 0 to 10 2025-04-10T11:01:11.640453Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component STATISTICS has been changed from NOTICE to ALERT 2025-04-10T11:01:11.640483Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component STATISTICS has been changed from DEBUG to ALERT 2025-04-10T11:01:11.640507Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component STATISTICS has been changed from 0 to 10 2025-04-10T11:01:11.640535Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_REQUEST_COST has been changed from NOTICE to ALERT 2025-04-10T11:01:11.640562Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_REQUEST_COST has been changed from DEBUG to ALERT 2025-04-10T11:01:11.640586Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_REQUEST_COST has been changed from 0 to 10 2025-04-10T11:01:11.640614Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_VDISK_BALANCING has been changed from NOTICE to ALERT 2025-04-10T11:01:11.640641Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_VDISK_BALANCING has been changed from DEBUG to ALERT 2025-04-10T11:01:11.640680Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_VDISK_BALANCING has been changed from 0 to 10 2025-04-10T11:01:11.640708Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_PROXY_GETBLOCK has been changed from NOTICE to ALERT 2025-04-10T11:01:11.640735Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_GETBLOCK has been changed from DEBUG to ALERT 2025-04-10T11:01:11.640761Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_GETBLOCK has been changed from 0 to 10 2025-04-10T11:01:11.640793Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_SHRED has been changed from NOTICE to ALERT 2025-04-10T11:01:11.640822Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_SHRED has been changed from DEBUG to ALERT 2025-04-10T11:01:11.640845Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_SHRED has been changed from 0 to 10 2025-04-10T11:01:11.640874Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component LDAP_AUTH_PROVIDER has been changed from NOTICE to ALERT 2025-04-10T11:01:11.640900Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component LDAP_AUTH_PROVIDER has been changed from DEBUG to ALERT 2025-04-10T11:01:11.640925Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component LDAP_AUTH_PROVIDER has been changed from 0 to 10 2025-04-10T11:01:11.640954Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GROUPED_MEMORY_LIMITER has been changed from NOTICE to ALERT 2025-04-10T11:01:11.640984Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GROUPED_MEMORY_LIMITER has been changed from DEBUG to ALERT 2025-04-10T11:01:11.641008Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component GROUPED_MEMORY_LIMITER has been changed from 0 to 10 2025-04-10T11:01:11.641039Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DATA_INTEGRITY has been changed from NOTICE to ALERT 2025-04-10T11:01:11.641065Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DATA_INTEGRITY has been changed from DEBUG to ALERT 2025-04-10T11:01:11.641088Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DATA_INTEGRITY has been changed from 0 to 10 2025-04-10T11:01:11.641130Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_PRIORITIES_QUEUE has been changed from NOTICE to ALERT 2025-04-10T11:01:11.641158Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_PRIORITIES_QUEUE has been changed from DEBUG to ALERT 2025-04-10T11:01:11.641183Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_PRIORITIES_QUEUE has been changed from 0 to 10 2025-04-10T11:01:11.641212Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BSCONFIG has been changed from NOTICE to ALERT 2025-04-10T11:01:11.641239Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BSCONFIG has been changed from DEBUG to ALERT 2025-04-10T11:01:11.641269Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BSCONFIG has been changed from 0 to 10 2025-04-10T11:01:11.641297Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component NAMESERVICE has been changed from NOTICE to ALERT 2025-04-10T11:01:11.641323Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component NAMESERVICE has been changed from DEBUG to ALERT 2025-04-10T11:01:11.641349Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component NAMESERVICE has been changed from 0 to 10 2025-04-10T11:01:11.641510Z node 14 :CMS_CONFIGS TRACE: TLogSettingsConfigurator: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> TConsoleTests::TestSetConfig [GOOD] >> TConsoleTests::TestTenantGeneration >> TConsoleConfigTests::TestManageValidators [GOOD] >> TConsoleConfigTests::TestDryRun >> YdbIndexTable::OnlineBuild [GOOD] >> YdbIndexTable::OnlineBuildWithDataColumn >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |71.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb >> TConsoleConfigTests::TestDryRun [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag >> KqpIndexes::MultipleSecondaryIndexWithSameComulns+UseSink [GOOD] >> TPersQueueTest::DirectReadPreCached [GOOD] >> TPersQueueTest::DirectReadNotCached |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |71.2%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits [GOOD] >> TConsoleTests::TestListTenants >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |71.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest::LdapServerIsUnavailable [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyHost >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart >> Viewer::JsonStorageListingV2NodeIdFilter [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant >> Viewer::JsonStorageListingV2PDiskIdFilter >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::MultipleSecondaryIndexWithSameComulns+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26203, MsgBus: 63388 2025-04-10T11:00:30.515623Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637711847617383:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:30.516099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e1e/r3tmp/tmpetdSXe/pdisk_1.dat 2025-04-10T11:00:31.195005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:31.195486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:31.197455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:31.292998Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26203, node 1 2025-04-10T11:00:31.470342Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:31.470366Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:31.470372Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:31.470485Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63388 TClient is connected to server localhost:63388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:32.236983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:32.288171Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:32.314101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:32.623211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:00:32.958058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:00:33.100073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:35.237246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637733322455501:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:35.237390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:35.507749Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637711847617383:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:35.507850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:35.682698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.736451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.776551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.812481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.854025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.919534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:36.018067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637737617423317:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:36.018233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:36.018545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637737617423323:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:36.024991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:36.044082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637737617423325:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:36.197889Z node 1 :TX_PROXY ERROR: Actor# [1:7491637737617423380:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:37.800786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:40.068635Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:40.118262Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:40.166276Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 5424, MsgBus: 15166 2025-04-10T11:00:41.170777Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637759379616269:2089];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:41.170829Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e1e/r3tmp/tmp5pZ0Gm/pdisk_1.dat 2025-04-10T11:00:41.423517Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:41.441470Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:41.441558Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:41.446058Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5424, node 2 2025-04-10T11:00:41.606822Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:41.606845Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:41.606853Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:41.606970Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15166 TClient is connected to server localhost:15166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:42.203077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:42.216350Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:00:42.237113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28147497671565 ... ode 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637780854455020:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:46.404901Z node 2 :TX_PROXY ERROR: Actor# [2:7491637780854455077:3459] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:47.877078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:50.787517Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:50.856343Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:50.941274Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 29093, MsgBus: 24472 2025-04-10T11:00:52.149233Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637804783468146:2216];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:52.158802Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e1e/r3tmp/tmpBdkCyU/pdisk_1.dat 2025-04-10T11:00:52.388825Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:52.428367Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:52.428483Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:52.438992Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29093, node 3 2025-04-10T11:00:52.618758Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:52.618776Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:52.618781Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:52.618880Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24472 TClient is connected to server localhost:24472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:53.277829Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:53.320706Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:53.427809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:53.662472Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:53.792682Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:57.066280Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637804783468146:2216];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:57.066387Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:57.294440Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637826258306220:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:57.294573Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:57.364425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:57.494588Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:57.577363Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:57.632841Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:57.690886Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:57.783757Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:57.903255Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637826258306736:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:57.903331Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:57.905144Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637826258306741:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:57.909952Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:57.929856Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637826258306743:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:00:57.989043Z node 3 :TX_PROXY ERROR: Actor# [3:7491637826258306798:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:59.536217Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:01:00.951374Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:00.991437Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:03.083227Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:03.159505Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:03.215598Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:05.125774Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:05.161357Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:05.249784Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:06.320201Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:06.418306Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:07.366368Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:01:07.366411Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:08.259851Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:08.302733Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:08.332915Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:10.332076Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:10.411766Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:10.522345Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:11.603166Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:11.671234Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:12.416067Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:12.450341Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:12.497150Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:12.952338Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:12.974512Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:13.829528Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:13.867377Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TConsoleTests::TestTenantGeneration [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |71.2%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TConsoleTests::TestCreateTenantWrongName >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns-UseSink [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem >> KqpProxy::CreatesScriptExecutionsTable [GOOD] >> KqpProxy::DatabasesCacheForServerless >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood >> KqpProxy::NodeDisconnectedTest [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants >> TBSV::ShouldLimitBlockStoreVolumeDropRate >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |71.2%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> TConsoleTests::TestListTenants [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29579, MsgBus: 2398 2025-04-10T11:00:29.720446Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637709046520606:2159];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:29.721077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e23/r3tmp/tmp41iYtH/pdisk_1.dat 2025-04-10T11:00:30.559914Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:30.574770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:30.574885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:30.577167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29579, node 1 2025-04-10T11:00:30.862831Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:30.862860Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:30.862868Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:30.863011Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2398 TClient is connected to server localhost:2398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:31.601699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:31.666538Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:31.685455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:31.909688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:32.150657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:32.290053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:34.628251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637730521358755:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:34.628396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:34.698298Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637709046520606:2159];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:34.698385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:34.988871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.038882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.126588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.184146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.245205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.320506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:35.458659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637734816326580:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:35.458761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:35.459380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637734816326585:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:35.463944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:35.482005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637734816326588:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:35.545120Z node 1 :TX_PROXY ERROR: Actor# [1:7491637734816326645:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:36.840808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15181, MsgBus: 18774 2025-04-10T11:00:40.079809Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637750512283241:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e23/r3tmp/tmppNcOeK/pdisk_1.dat 2025-04-10T11:00:40.144173Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:00:40.264943Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:40.271031Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:40.271140Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:40.278263Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15181, node 2 2025-04-10T11:00:40.537894Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:40.537919Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:40.537929Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:40.538082Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18774 TClient is connected to server localhost:18774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:41.351689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:41.362979Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:41.379536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:41.515929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10 ... 941Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:00:45.532007Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637776282089155:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:45.610886Z node 2 :TX_PROXY ERROR: Actor# [2:7491637776282089211:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:46.939535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:48.175194Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:48.213001Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:48.288917Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:50.292495Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:50.331999Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:51.341104Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:51.367869Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:52.452592Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:52.903932Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:52.935227Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:53.992995Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:54.932374Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:00:55.231163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:00:55.231200Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 12826, MsgBus: 14354 2025-04-10T11:00:56.860305Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637822993977601:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e23/r3tmp/tmp1sP3ZC/pdisk_1.dat 2025-04-10T11:00:56.932996Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:00:57.016618Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:57.043416Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:57.043879Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:57.045626Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12826, node 3 2025-04-10T11:00:57.211034Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:57.211068Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:57.211079Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:57.211245Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14354 TClient is connected to server localhost:14354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:58.193065Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:58.207296Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:00:58.222596Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:58.392392Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:58.663289Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:58.784990Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:01.714159Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637822993977601:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:01.714263Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:01.756647Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637844468815697:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:01.756726Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:01.821172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:01:01.871775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:01:01.948915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:01:02.039664Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:01:02.120282Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:01:02.235183Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:01:02.407172Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637848763783522:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:02.407403Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:02.410585Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491637848763783527:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:02.420840Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:01:02.459689Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491637848763783529:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:01:02.524631Z node 3 :TX_PROXY ERROR: Actor# [3:7491637848763783583:3464] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:04.157929Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:01:06.589516Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:06.707091Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:11.187376Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:11.238267Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:12.012888Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:01:12.012917Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:12.903579Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:12.950273Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:14.174286Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:16.202466Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:16.887415Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:17.674421Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:01:17.721621Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> DataShardVolatile::NotCachingAbortingDeletes+UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes-UseSink >> LdapAuthProviderTest::LdapRequestWithEmptyHost [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |71.2%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NodeDisconnectedTest [GOOD] Test command err: 2025-04-10T11:00:57.331715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637826652851449:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:57.331759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001b06/r3tmp/tmpDthnm7/pdisk_1.dat 2025-04-10T11:00:58.193592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:58.193695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:58.197793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:58.266778Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3877 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:00:58.684555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:58.707094Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:01:01.570306Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:01:01.571343Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:01:01.584324Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=ZTM2ZjA2NmItMzdjNGZlZTEtZDdiNzc3M2YtNDNmYjE4M2U=, workerId: [1:7491637843832721234:2309], database: , longSession: 0, local sessions count: 1 2025-04-10T11:01:01.584383Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:01:01.584620Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=1&id=ZTM2ZjA2NmItMzdjNGZlZTEtZDdiNzc3M2YtNDNmYjE4M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.010000s timeout: 0.010000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [1:7491637843832721234:2309] 2025-04-10T11:01:01.584640Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 0.010000s actor id: [0:0:0] 2025-04-10T11:01:01.585251Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTM2ZjA2NmItMzdjNGZlZTEtZDdiNzc3M2YtNDNmYjE4M2U=, ActorId: [1:7491637843832721234:2309], ActorState: ReadyState, Reply query error, msg:
: Error: SomeUniqTextForUt proxyRequestId: 2 2025-04-10T11:01:01.585382Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:01:01.585442Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:01:01.585468Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:01:01.585495Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:01:01.585529Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:01:01.585562Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:01:01.585654Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7491637830947819323:2287], selfId: [1:7491637826652851696:2278], source: [1:7491637843832721234:2309] 2025-04-10T11:01:01.588336Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:01:01.588366Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:01:01.588388Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:01:01.591797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637843832721237:2312], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:01.591899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:01.598256Z node 1 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-04-10T11:01:01.598281Z node 1 :KQP_PROXY DEBUG: Invalid request info while on request timeout handle. RequestId: 2 2025-04-10T11:01:13.709797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:13.710209Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:13.710470Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:01:13.711110Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:13.711471Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:13.711618Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001b06/r3tmp/tmp0kPJ7T/pdisk_1.dat 2025-04-10T11:01:14.351548Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10489 KQP PROXY1 [2:8678280833929343339:121] KQP PROXY2 [3:8678280833929343339:121] SENDER [2:1141:2687] 2025-04-10T11:01:14.884887Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=MTg0YTRlZWYtOTZkMGFiZTItOWM1OTEzYTktZGI3YzQwZjM=, workerId: [3:1142:2375], database: , longSession: 1, local sessions count: 1 2025-04-10T11:01:14.885125Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=MTg0YTRlZWYtOTZkMGFiZTItOWM1OTEzYTktZGI3YzQwZjM= 2025-04-10T11:01:14.885849Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=MTg0YTRlZWYtOTZkMGFiZTItOWM1OTEzYTktZGI3YzQwZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [3:8678280833929343339:121] 2025-04-10T11:01:14.885938Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 0.001000s actor id: [0:0:0] 2025-04-10T11:01:14.886382Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=MTg0YTRlZWYtOTZkMGFiZTItOWM1OTEzYTktZGI3YzQwZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [3:1142:2375] 2025-04-10T11:01:14.886427Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 0.001000s actor id: [0:0:0] 2025-04-10T11:01:15.345544Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1143:2688], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:15.345791Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:15.346165Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1144:2376], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:15.346282Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:15.373599Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(3) 2025-04-10T11:01:15.373706Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 3 sessionId: ydb://session/3?node_id=3&id=MTg0YTRlZWYtOTZkMGFiZTItOWM1OTEzYTktZGI3YzQwZjM= status: TIMEOUT round: 0 2025-04-10T11:01:15.373857Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-04-10T11:01:15.373890Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 2 sessionId: ydb://session/3?node_id=3&id=MTg0YTRlZWYtOTZkMGFiZTItOWM1OTEzYTktZGI3YzQwZjM= status: TIMEOUT round: 0 2025-04-10T11:01:15.374072Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTg0YTRlZWYtOTZkMGFiZTItOWM1OTEzYTktZGI3YzQwZjM=, ActorId: [3:1142:2375], ActorState: ExecuteState, TraceId: 01jrfnrx061410mcenhn5gcg4c, Create QueryResponse for error on request, msg: 2025-04-10T11:01:15.374337Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-04-10T11:01:15.376932Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1142:2375] 2025-04-10T11:01:15.377173Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 2 2025-04-10T11:01:15.380178Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=NmUyNTcwOGYtYzk4NTQyZGYtODg3M2M1ZWMtZmFmMWI2NmM=, workerId: [3:1165:2380], database: , longSession: 1, local sessions count: 2 2025-04-10T11:01:15.380403Z node 3 :KQP_PROXY DE ... QP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ZTYyMzJlNDAtMmI4NDg4NDAtNTc1NDRhMmUtZjUwYmNhOTU=, workerId: [3:1411:2518], database: , longSession: 1, local sessions count: 56 2025-04-10T11:01:18.582929Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:01:18.583301Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 57, sender: [2:1141:2687], trace_id: 2025-04-10T11:01:18.583438Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 57 timeout: 0.001000s actor id: [0:0:0] 2025-04-10T11:01:18.594249Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(57) 2025-04-10T11:01:18.594347Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 57 sessionId: ydb://session/3?node_id=3&id=ZTYyMzJlNDAtMmI4NDg4NDAtNTc1NDRhMmUtZjUwYmNhOTU= status: TIMEOUT round: 0 2025-04-10T11:01:18.594490Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 57, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-04-10T11:01:18.596610Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=NjA0NTBkYjQtNDM3MjZkNDItNjM1MDU1YTktZjdjZDQwOGY=, workerId: [3:1412:2519], database: , longSession: 1, local sessions count: 57 2025-04-10T11:01:18.596791Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=NjA0NTBkYjQtNDM3MjZkNDItNjM1MDU1YTktZjdjZDQwOGY= 2025-04-10T11:01:18.597332Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=NjA0NTBkYjQtNDM3MjZkNDItNjM1MDU1YTktZjdjZDQwOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 58, targetId: [3:8678280833929343339:121] 2025-04-10T11:01:18.597385Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 58 timeout: 0.001000s actor id: [0:0:0] 2025-04-10T11:01:18.597795Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=NjA0NTBkYjQtNDM3MjZkNDItNjM1MDU1YTktZjdjZDQwOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 87, targetId: [3:1412:2519] 2025-04-10T11:01:18.597855Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 87 timeout: 0.001000s actor id: [0:0:0] 2025-04-10T11:01:18.673700Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1414:2520], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:18.673847Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1413:2750], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:18.673944Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:18.674048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:18.686946Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(58) 2025-04-10T11:01:18.687021Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 58 sessionId: ydb://session/3?node_id=3&id=NjA0NTBkYjQtNDM3MjZkNDItNjM1MDU1YTktZjdjZDQwOGY= status: TIMEOUT round: 0 2025-04-10T11:01:18.687161Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(87) 2025-04-10T11:01:18.687193Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 87 sessionId: ydb://session/3?node_id=3&id=NjA0NTBkYjQtNDM3MjZkNDItNjM1MDU1YTktZjdjZDQwOGY= status: TIMEOUT round: 0 2025-04-10T11:01:18.687318Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 58, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-04-10T11:01:18.687408Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjA0NTBkYjQtNDM3MjZkNDItNjM1MDU1YTktZjdjZDQwOGY=, ActorId: [3:1412:2519], ActorState: ExecuteState, TraceId: 01jrfns0m6925q5qyy14csexw7, Create QueryResponse for error on request, msg: 2025-04-10T11:01:18.689551Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 87, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1412:2519] 2025-04-10T11:01:18.689782Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 58 2025-04-10T11:01:18.693255Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YWZkOTM5ZDctYTQ1ZDE1ODItYTkyYzdlNC1hOGJhZjk0Zg==, workerId: [3:1419:2523], database: , longSession: 1, local sessions count: 58 2025-04-10T11:01:18.693452Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:01:18.693875Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 59, sender: [2:1141:2687], trace_id: 2025-04-10T11:01:18.693984Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 59 timeout: 0.001000s actor id: [0:0:0] 2025-04-10T11:01:18.694132Z node 3 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=3&id=YWZkOTM5ZDctYTQ1ZDE1ODItYTkyYzdlNC1hOGJhZjk0Zg==, rpc ctrl: [0:0:0], sameNode: 0, trace_id: 2025-04-10T11:01:18.694326Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 59, sender: [2:1141:2687], selfId: [2:206:2171], source: [3:236:2127] 2025-04-10T11:01:18.696109Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=NmMyMGFmZWUtZThmMjIzNmQtZTVhNTFlMGMtMWIyYWI0Mg==, workerId: [3:1420:2524], database: , longSession: 1, local sessions count: 59 2025-04-10T11:01:18.696253Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=NmMyMGFmZWUtZThmMjIzNmQtZTVhNTFlMGMtMWIyYWI0Mg== 2025-04-10T11:01:18.696756Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=NmMyMGFmZWUtZThmMjIzNmQtZTVhNTFlMGMtMWIyYWI0Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 60, targetId: [3:8678280833929343339:121] 2025-04-10T11:01:18.696820Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 60 timeout: 0.001000s actor id: [0:0:0] 2025-04-10T11:01:18.697196Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=NmMyMGFmZWUtZThmMjIzNmQtZTVhNTFlMGMtMWIyYWI0Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 90, targetId: [3:1420:2524] 2025-04-10T11:01:18.697241Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 90 timeout: 0.001000s actor id: [0:0:0] 2025-04-10T11:01:18.698488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1421:2752], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:18.698632Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:18.728809Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1424:2525], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:18.728955Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:18.742629Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:18.742791Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:18.764191Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:18.764326Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:18.784350Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:01:18.785018Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:18.785419Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:18.798557Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(90) 2025-04-10T11:01:18.798629Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 90 sessionId: ydb://session/3?node_id=3&id=NmMyMGFmZWUtZThmMjIzNmQtZTVhNTFlMGMtMWIyYWI0Mg== status: TIMEOUT round: 0 2025-04-10T11:01:18.798722Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(59) 2025-04-10T11:01:18.798747Z node 2 :KQP_PROXY DEBUG: Invalid request info while on request timeout handle. RequestId: 59 2025-04-10T11:01:18.798796Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(60) 2025-04-10T11:01:18.798827Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 60 sessionId: ydb://session/3?node_id=3&id=NmMyMGFmZWUtZThmMjIzNmQtZTVhNTFlMGMtMWIyYWI0Mg== status: TIMEOUT round: 0 2025-04-10T11:01:18.798963Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NmMyMGFmZWUtZThmMjIzNmQtZTVhNTFlMGMtMWIyYWI0Mg==, ActorId: [3:1420:2524], ActorState: ExecuteState, TraceId: 01jrfns0q9astf8fss0y4dm3sp, Create QueryResponse for error on request, msg: 2025-04-10T11:01:18.800788Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 60, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-04-10T11:01:18.800983Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 90, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1420:2524] 2025-04-10T11:01:18.801216Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 60 2025-04-10T11:01:18.803148Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YTg2MDBiMS1mMzAwNzNhNC1iMWExNmRkLWU0N2I4YTM=, workerId: [3:1442:2528], database: , longSession: 1, local sessions count: 60 2025-04-10T11:01:18.803318Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:01:18.803714Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 61, sender: [2:1141:2687], trace_id: 2025-04-10T11:01:18.803832Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 61 timeout: 0.001000s actor id: [0:0:0] 2025-04-10T11:01:18.830436Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(61) 2025-04-10T11:01:18.830518Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 61 sessionId: ydb://session/3?node_id=3&id=YTg2MDBiMS1mMzAwNzNhNC1iMWExNmRkLWU0N2I4YTM= status: TIMEOUT round: 0 2025-04-10T11:01:18.830666Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 61, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain >> TConsoleTests::TestTenantGenerationExtSubdomain [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:01:21.479014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:01:21.479139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:21.479237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:01:21.479300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:01:21.479354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:01:21.479412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:01:21.479499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:21.479601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:01:21.480006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:01:21.671658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:21.671735Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:21.692817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:01:21.693201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:01:21.693418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:01:21.717662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:01:21.717989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:01:21.718837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:21.719320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:01:21.722340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:21.723910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:21.723990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:21.724210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:01:21.724265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:21.724308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:01:21.724483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:01:21.732294Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:01:21.894796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:01:21.895085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:21.895365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:01:21.895645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:01:21.895708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:21.900320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:21.900491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:01:21.900716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:21.900792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:01:21.900841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:01:21.900889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:01:21.904311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:21.904392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:01:21.904435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:01:21.910844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:21.910928Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:21.910973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:21.911029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:01:21.915144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:01:21.918394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:01:21.918617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:01:21.919712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:21.919867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:01:21.919911Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:21.920258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:01:21.920319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:21.920518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:01:21.920610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:01:21.931100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:21.931174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:21.931403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:21.931446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:01:21.931698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:21.931745Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:01:21.931858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:21.931911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:21.931952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:21.931999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:21.932062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:01:21.932111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:21.932166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:01:21.932202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:01:21.932285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:01:21.932323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:01:21.932357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:01:21.943152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:21.943345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:21.943388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1/1 2025-04-10T11:01:23.435147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:01:23.436492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-10T11:01:23.437004Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 TabletID: 72075186233409569 2025-04-10T11:01:23.437200Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 TabletID: 72075186233409568 2025-04-10T11:01:23.437861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-04-10T11:01:23.438200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 4 2025-04-10T11:01:23.438562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-04-10T11:01:23.438815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 3 Forgetting tablet 72075186233409569 2025-04-10T11:01:23.439315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 Forgetting tablet 72075186233409568 2025-04-10T11:01:23.442800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-04-10T11:01:23.442955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000028 2025-04-10T11:01:23.443529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000028, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:23.443677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000028 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:01:23.443750Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000028, at schemeshard: 72057594046678944 2025-04-10T11:01:23.443872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-04-10T11:01:23.444022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-10T11:01:23.444071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-10T11:01:23.444114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-10T11:01:23.444148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-10T11:01:23.444211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:01:23.444291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-04-10T11:01:23.444340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-04-10T11:01:23.444401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-10T11:01:23.444450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2025-04-10T11:01:23.444485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2025-04-10T11:01:23.444628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-04-10T11:01:23.444688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-04-10T11:01:23.444731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 54 2025-04-10T11:01:23.444768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 13], 18446744073709551615 2025-04-10T11:01:23.445223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-10T11:01:23.445305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-04-10T11:01:23.445478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-04-10T11:01:23.445546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-04-10T11:01:23.447523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:01:23.447570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:01:23.447726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:23.447763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:23.448000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 13] 2025-04-10T11:01:23.448165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:23.448204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-04-10T11:01:23.448248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 129, path id: 13 FAKE_COORDINATOR: Erasing txId 129 2025-04-10T11:01:23.448778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-04-10T11:01:23.448862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-04-10T11:01:23.448901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-04-10T11:01:23.448970Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 13], version: 18446744073709551615 2025-04-10T11:01:23.449036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-04-10T11:01:23.449524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:01:23.449575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-04-10T11:01:23.449649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:01:23.450071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-04-10T11:01:23.450173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-04-10T11:01:23.450207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-04-10T11:01:23.450241Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 54 2025-04-10T11:01:23.450285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:01:23.450366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-04-10T11:01:23.450770Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 2025-04-10T11:01:23.450942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-04-10T11:01:23.451164Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 2025-04-10T11:01:23.451414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-04-10T11:01:23.454469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-10T11:01:23.454887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:01:23.456357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-10T11:01:23.456484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-10T11:01:23.456575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2025-04-10T11:01:23.457294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2025-04-10T11:01:23.457354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2025-04-10T11:01:23.458200Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-04-10T11:01:23.458337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-04-10T11:01:23.458397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:1663:3533] TestWaitNotification: OK eventTxId 129 >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> DataShardVolatile::CompactedVolatileChangesCommit [GOOD] >> DataShardVolatile::CompactedVolatileChangesAbort >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TConsoleTests::TestListTenantsExtSubdomain [GOOD] >> TConsoleTests::TestModifyUsedZoneKind >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongPool >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood >> TBSV::CleanupDroppedVolumesOnRestart >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain >> TConsoleTests::TestSchemeShardErrorForwarding [GOOD] >> TConsoleTests::TestScaleRecommenderPolicies >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetDefaultFilter [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithOneLoginPlaceholder [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithSearchAttribute [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] Test command err: 2025-04-10T11:00:52.010956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:52.011040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:52.108063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:53.975500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:53.975582Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:54.063533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:55.567720Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:55.567805Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:55.634639Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:57.276519Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:57.276601Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:57.327519Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:58.969116Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:58.969195Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:59.017001Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:00.817351Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:00.817447Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:00.902470Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:02.710856Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:02.710943Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:02.779583Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:04.334675Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:04.334768Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:04.383695Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:06.012521Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:06.012630Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:06.074779Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:07.749210Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:07.749290Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:07.794420Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:09.935038Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:09.935134Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:10.009836Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:12.214136Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:12.218413Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:12.339839Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:14.165978Z node 13 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:14.166111Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:14.216382Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:15.870793Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:15.870901Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:15.947203Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:18.371551Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:18.371667Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:18.435993Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:20.476036Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:20.476151Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:20.564031Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:22.597279Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:22.597381Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:22.679357Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:24.746070Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:24.746186Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:24.793301Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:26.381250Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:26.381354Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:26.459821Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:27.278037Z node 23 :BS_CONTROLLER ERROR: {BSC26@console_interaction.cpp:113} failed to parse config obtained from Console ErrorReason# ydb/library/yaml_config/yaml_config_parser.cpp:1268: Condition violated: `config.HasDomainsConfig()' Yaml# --- metadata: kind: MainConfig cluster: "" version: 1 config: log_config: cluster_name: cluster1 allowed_labels: test: type: enum values: ? true selector_config: [] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:01:28.296187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:01:28.296331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:28.296423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:01:28.296472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:01:28.296524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:01:28.296562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:01:28.296626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:28.296712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:01:28.297109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:01:28.396254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:28.396331Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:28.446095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:01:28.446546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:01:28.446763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:01:28.469526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:01:28.469836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:01:28.470656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:28.471111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:01:28.479127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:28.480855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:28.480931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:28.481465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:01:28.481533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:28.481579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:01:28.481761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:01:28.503535Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:01:28.705875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:01:28.706199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:28.706482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:01:28.706753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:01:28.706836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:28.709785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:28.709999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:01:28.710264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:28.710332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:01:28.710371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:01:28.710407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:01:28.713169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:28.713262Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:01:28.713312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:01:28.716117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:28.716192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:28.716235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:28.716287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:01:28.720391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:01:28.723165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:01:28.723432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:01:28.724743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:28.724963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:01:28.725032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:28.725433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:01:28.725492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:28.725728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:01:28.725838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:01:28.728774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:28.728828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:28.729082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:28.729132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:01:28.729477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:28.729545Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:01:28.729682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:28.729725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:28.729773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:28.729825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:28.729904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:01:28.729962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:28.730013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:01:28.730053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:01:28.730173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:01:28.730251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:01:28.730310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:01:28.733221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:28.733364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:28.733439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... -10T11:01:29.130512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.130642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.130720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.151337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:01:29.166420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:29.167263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:01:29.167350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:29.167611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:01:29.167651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:01:29.167751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:01:29.168318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.180376Z node 1 :FLAT_TX_SCHEMESHARD WARN: TTxCleanBlockStoreVolumes Complete, done PersistRemoveBlockStoreVolume for 1 volumes, left 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.180495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:29.180558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:01:29.181105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:469:2058] recipient: [1:15:2062] 2025-04-10T11:01:29.247665Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:01:29.247947Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 302us result status StatusPathDoesNotExist 2025-04-10T11:01:29.248118Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:01:29.249156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:470:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:473:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:474:2058] recipient: [1:472:2432] Leader for TabletID 72057594046678944 is [1:475:2433] sender: [1:476:2058] recipient: [1:472:2432] 2025-04-10T11:01:29.346003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:01:29.346113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:29.346421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:01:29.346469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:01:29.346507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:01:29.346534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:01:29.346603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:29.346707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:01:29.347048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:01:29.365652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:01:29.367293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:01:29.367518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:01:29.367642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:29.367681Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:29.368092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:01:29.368924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:29.369040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.369137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.369603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.369692Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T11:01:29.369949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.370059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.370233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.370338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.370482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.370698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.370983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.371103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.371517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.371611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.371822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.371942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.372042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.372247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.372336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.372477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.372741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.372916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.372984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.373073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.411306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:29.411421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:29.411856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:01:29.411934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:29.411985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:01:29.412133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:475:2433] sender: [1:536:2058] recipient: [1:15:2062] 2025-04-10T11:01:29.451417Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:01:29.451711Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 330us result status StatusPathDoesNotExist 2025-04-10T11:01:29.451898Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] Test command err: 2025-04-10T11:01:05.445721Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637860276081033:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:05.457559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015a1/r3tmp/tmp3m9XL4/pdisk_1.dat 2025-04-10T11:01:06.461301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:06.461454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:06.461645Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:06.472280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:06.488324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 11970, node 1 2025-04-10T11:01:06.716029Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:06.716053Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:06.716059Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:06.716169Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:06.916365Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:06.929937Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:06.929975Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:06.930738Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:23380, port: 23380 2025-04-10T11:01:06.931461Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:06.961337Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:07.006613Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:07.010117Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:07.010215Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:07.057944Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:07.102465Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:07.104904Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****k7gA (C114546E) () has now valid token of ldapuser@ldap 2025-04-10T11:01:10.438679Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****k7gA (C114546E) 2025-04-10T11:01:10.438778Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:23380, port: 23380 2025-04-10T11:01:10.438854Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:10.464999Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:10.465348Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:23380 return no entries 2025-04-10T11:01:10.465540Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637860276081033:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:10.465616Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****k7gA (C114546E) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:23380 return no entries)' 2025-04-10T11:01:10.465664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:13.451966Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****k7gA (C114546E) 2025-04-10T11:01:18.330415Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637917616470377:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015a1/r3tmp/tmpl8fvzU/pdisk_1.dat 2025-04-10T11:01:18.482266Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:18.628910Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:18.657841Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:18.665382Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:18.666975Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22467, node 2 2025-04-10T11:01:18.798671Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:18.798696Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:18.798717Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:18.798835Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:18.950306Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:18.958409Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:18.958442Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:18.966973Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5991, port: 5991 2025-04-10T11:01:18.967035Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:18.986322Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:18.987024Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:5991. Server is busy 2025-04-10T11:01:18.987280Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****X6JA (351ACF8D) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:5991. Server is busy)' 2025-04-10T11:01:18.987576Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:18.987592Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:18.994640Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5991, port: 5991 2025-04-10T11:01:18.994700Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:19.014307Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:19.014726Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:5991. Server is busy 2025-04-10T11:01:19.014947Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****X6JA (351ACF8D) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:5991. Server is busy)' 2025-04-10T11:01:21.224560Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****X6JA (351ACF8D) 2025-04-10T11:01:21.225246Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:21.225284Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:21.226553Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5991, port: 5991 2025-04-10T11:01:21.226653Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:21.248463Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:21.248973Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:5991. Server is busy 2025-04-10T11:01:21.249214Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****X6JA (351ACF8D) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:5991. Server is busy)' 2025-04-10T11:01:23.301605Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637917616470377:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:23.301688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:25.239082Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****X6JA (351ACF8D) 2025-04-10T11:01:25.239293Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:25.239309Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:25.239900Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5991, port: 5991 2025-04-10T11:01:25.239971Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:25.260706Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:25.306482Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:25.307187Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:25.307251Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:25.350948Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:25.403053Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:25.404418Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****X6JA (351ACF8D) () has now valid token of ldapuser@ldap |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2025-04-10T11:00:50.683622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:50.683709Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:50.739498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:52.186227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:52.186305Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:52.236699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:53.502912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:53.502986Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:53.555804Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:55.022696Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:55.022782Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:55.072193Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:57.311317Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:57.311399Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:57.399546Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:14.180992Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:14.181080Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:14.232334Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:16.626608Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:16.626713Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:16.715983Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:23.465943Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:23.466047Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:23.552500Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:24.937186Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:24.937290Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:25.006514Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:26.667533Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:26.667648Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:26.756033Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:29.081450Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:29.081568Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:29.186889Z node 24 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:01:29.540446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:01:29.540583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:29.540647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:01:29.540692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:01:29.540744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:01:29.540783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:01:29.540861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:29.540952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:01:29.541358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:01:29.652669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:29.652757Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:29.677273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:01:29.677675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:01:29.677888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:01:29.691432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:01:29.691708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:01:29.692524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:29.692973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:01:29.695451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:29.696974Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:29.697040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:29.697247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:01:29.697297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:29.697337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:01:29.697569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.706586Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:01:29.866396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:01:29.866715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.867022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:01:29.867342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:01:29.867411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.878893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:29.879217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:01:29.879544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.879627Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:01:29.879684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:01:29.879739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:01:29.891495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.891631Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:01:29.891682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:01:29.899402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.899493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.903169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:29.903305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:01:29.907954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:01:29.914631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:01:29.914917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:01:29.916372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:29.916556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:01:29.916619Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:29.917040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:01:29.917108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:29.917338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:01:29.917462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:01:29.931644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:29.931733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:29.932455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:29.932577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:01:29.932950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:29.933023Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:01:29.933152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:29.933208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:29.933273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:29.933327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:29.933377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:01:29.933448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:29.933512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:01:29.933550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:01:29.933663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:01:29.933720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:01:29.933765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:01:29.936760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:29.936903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:29.936947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:30.124292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:01:30.124666Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-10T11:01:30.125711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:01:30.126089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-04-10T11:01:30.127384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:01:30.137149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T11:01:30.137536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:01:30.137856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:01:30.137938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-10T11:01:30.138563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:30.138741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:01:30.138820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-10T11:01:30.138992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:01:30.139178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:01:30.139227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:01:30.139292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:01:30.139347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:01:30.139416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:01:30.139513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:01:30.139568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-10T11:01:30.139628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:01:30.139689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:01:30.139737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:01:30.139886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:01:30.139935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-10T11:01:30.139975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-10T11:01:30.140033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-10T11:01:30.140764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:01:30.140843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:01:30.154312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:01:30.154394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:01:30.154584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:30.154642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:30.154827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:01:30.154981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:30.155030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-10T11:01:30.155074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:01:30.155645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:01:30.155741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:01:30.155808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:01:30.155855Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:01:30.155906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:01:30.156405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:01:30.156461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:01:30.156545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:01:30.156990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:01:30.157063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:01:30.157093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:01:30.157139Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:01:30.157174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:01:30.157251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-10T11:01:30.158077Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-04-10T11:01:30.162518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:30.163076Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-04-10T11:01:30.163410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:01:30.173124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:01:30.173964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:01:30.174068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:01:30.179465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:01:30.179660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:01:30.180079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:01:30.180132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:01:30.180620Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:01:30.180732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:01:30.180772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:392:2372] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-10T11:01:30.181200Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-10T11:01:30.181311Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } >> TConsoleTests::TestModifyUsedZoneKind [GOOD] >> TConsoleTests::TestMergeConfig >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad |71.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |71.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |71.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex >> TCacheTest::Attributes >> KqpScanSpilling::SelfJoinQueryService >> KikimrIcGateway::TestCreateExternalTable >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer >> TCacheTest::Attributes [GOOD] >> TCacheTest::CheckAccess >> KikimrIcGateway::TestCreateSameExternalTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 >> UpsertLoad::ShouldWriteDataBulkUpsert >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> TConsoleTests::TestScaleRecommenderPolicies [GOOD] >> TConsoleTests::TestScaleRecommenderPoliciesValidation >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain [GOOD] >> TConsoleTests::TestAlterUnknownTenant >> TCacheTest::CheckAccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckAccess [GOOD] Test command err: 2025-04-10T11:01:34.081550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:34.081623Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:01:34.278367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:01:34.299682Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-04-10T11:01:34.943473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:34.943552Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:01:35.023475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-10T11:01:35.036337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2025-04-10T11:01:35.040296Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:197:2187], for# user1@builtin, access# DescribeSchema 2025-04-10T11:01:35.040741Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:201:2191], for# user1@builtin, access# DescribeSchema >> TConsoleTests::TestMergeConfig [GOOD] >> TConsoleTests::TestRemoveTenant >> KqpProxy::DatabasesCacheForServerless [GOOD] |71.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |71.3%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |71.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::DatabasesCacheForServerless [GOOD] Test command err: 2025-04-10T11:00:59.179819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637835731035277:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:59.179895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:00:59.362562Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637834628247533:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:59.362748Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:00:59.466570Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491637834897969286:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:59.466748Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:00.237484Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637835595484725:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:00.238070Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:00.496595Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001b02/r3tmp/tmpGqC0QA/pdisk_1.dat 2025-04-10T11:01:00.503288Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637834977422107:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:00.503438Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:00.511252Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:00.571865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:01.430035Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:01.610325Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:01.636732Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:01.693834Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:01.694878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:01.784970Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:01.962940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:01.974617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:01.998817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:01.998923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:02.000360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:02.000422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:02.039931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:02.040018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:02.040186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:02.040239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:02.050454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:02.090992Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:01:02.091071Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-10T11:01:02.092086Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-10T11:01:02.092114Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:01:02.101034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:02.155548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:02.155915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:02.156681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:04.180693Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637835731035277:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:04.180775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:04.248514Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637835595484725:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:04.248604Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:04.280819Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637834977422107:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:04.280885Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:04.367504Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491637834628247533:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:04.374524Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:04.462342Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491637834897969286:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:04.462401Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:7198 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:01:05.069193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:13.575900Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:01:13.589374Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:01:13.638558Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:01:13.638613Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:01:13.638650Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:01:13.638707Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:01:13.638886Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:01:13.638952Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:01:13.646971Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-10T11:01:13.646997Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-10T11:01:13.647053Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-10T11:01:13.647199Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-10T11:01:13.647208Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-10T11:01:13.647227Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-10T11:01:13.647266Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-10T11:01:13.647272Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-10T11:01:13.647299Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-10T11:01:13.667172Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is sma ... 154708Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:01:26.154786Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:01:26.154870Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:01:26.154946Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:01:26.155164Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:26.155275Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:26.165706Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:26.376328Z node 8 :STATISTICS WARN: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false 2025-04-10T11:01:26.378371Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:26.610520Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:01:26.715914Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637951053217516:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:26.715973Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:26.798212Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:26.798333Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:26.826310Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 7 Cookie 7 2025-04-10T11:01:26.830482Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:27.364987Z node 7 :HIVE WARN: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:27.365121Z node 7 :HIVE WARN: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:27.369918Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:01:27.370116Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:01:27.370223Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:01:27.370329Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:01:27.370434Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:01:27.370520Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:01:27.370641Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:01:27.370726Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:01:27.370825Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:01:27.387004Z node 7 :HIVE WARN: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:27.872879Z node 7 :STATISTICS WARN: [72075186224038895] TTxInit::Complete. EnableColumnStatistics=false 2025-04-10T11:01:27.890604Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:28.046371Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:01:28.497368Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:28.756172Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7491637959643153074:2542], Database: /Root/test-serverless, Start database fetching 2025-04-10T11:01:28.756391Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7491637959643153074:2542], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 2025-04-10T11:01:30.926391Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7491637949847999336:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:30.926495Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:31.722462Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491637951053217516:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:31.722580Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:33.772755Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-04-10T11:01:33.792531Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:01:33.792756Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 7 2025-04-10T11:01:33.792986Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:01:33.950944Z node 8 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-10T11:01:33.951018Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7491637984207738537:2351], Start check tables existence, number paths: 2 2025-04-10T11:01:33.952624Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T11:01:33.952641Z node 8 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-10T11:01:33.975186Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7491637984207738537:2351], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-10T11:01:33.975280Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7491637984207738537:2351], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-10T11:01:33.975319Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7491637984207738537:2351], Successfully finished 2025-04-10T11:01:33.975367Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-10T11:01:33.982469Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-04-10T11:01:35.016979Z node 7 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-10T11:01:35.017304Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T11:01:35.017325Z node 7 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-10T11:01:35.017382Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Start check tables existence, number paths: 2 2025-04-10T11:01:35.030479Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-04-10T11:01:35.030607Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status LookupError 2025-04-10T11:01:35.030768Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:35.030807Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status LookupError 2025-04-10T11:01:35.030938Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:01:35.130283Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Start check tables existence, number paths: 2 2025-04-10T11:01:35.134555Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status LookupError 2025-04-10T11:01:35.134693Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:35.134731Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status LookupError 2025-04-10T11:01:35.266283Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Start check tables existence, number paths: 2 2025-04-10T11:01:35.271735Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status LookupError 2025-04-10T11:01:35.271943Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:35.271987Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status LookupError 2025-04-10T11:01:35.614366Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Start check tables existence, number paths: 2 2025-04-10T11:01:35.614924Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status LookupError 2025-04-10T11:01:35.615059Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:35.615095Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7491637989707924311:2379], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status LookupError >> ReadAttributesUtils::ReplaceAttributesEmpty [GOOD] >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> KikimrIcGateway::TestCreateExternalTable [GOOD] >> KikimrIcGateway::TestCreateResourcePool >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> TConsoleTests::TestAlterUnknownTenant [GOOD] >> TConsoleTests::TestAlterUnknownTenantExtSubdomain |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> TPersQueueTest::DirectReadNotCached [GOOD] >> TPersQueueTest::DirectReadBadCases >> DataShardVolatile::NotCachingAbortingDeletes-UseSink [GOOD] >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain >> KikimrIcGateway::TestCreateSameExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalTable >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-04-10T11:01:06.947462Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637866982099066:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:06.947494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001584/r3tmp/tmpkGYZ37/pdisk_1.dat 2025-04-10T11:01:08.054939Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:08.060463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:08.060585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:08.077698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:08.078125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28218, node 1 2025-04-10T11:01:08.200122Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:08.200147Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:08.200155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:08.200298Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:08.454517Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:08.456044Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:08.456087Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:08.458074Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:61794, port: 61794 2025-04-10T11:01:08.458253Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:08.542820Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:08.586702Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:08.632063Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****dG3g (5C7A0525) () has now valid token of ldapuser@ldap 2025-04-10T11:01:12.571438Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637892426827743:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:12.571580Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001584/r3tmp/tmpdEScu5/pdisk_1.dat 2025-04-10T11:01:12.812758Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:12.828909Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:12.829045Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:12.830796Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10135, node 2 2025-04-10T11:01:12.921110Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:12.921134Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:12.921147Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:12.921266Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:13.186190Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:13.189790Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:13.189812Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:13.190661Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:27399, port: 27399 2025-04-10T11:01:13.190742Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:13.258615Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:13.306458Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:13.307247Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:13.307313Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:13.362552Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:13.410480Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:13.411825Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****nefA (29FB2465) () has now valid token of ldapuser@ldap 2025-04-10T11:01:16.938265Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637910267890969:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:16.938333Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001584/r3tmp/tmp2L2vJ9/pdisk_1.dat 2025-04-10T11:01:17.522811Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:17.568207Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:17.568294Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:17.575323Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31904, node 3 2025-04-10T11:01:17.769326Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:17.769349Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:17.769358Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:17.769513Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:18.241861Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:18.254063Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:18.254098Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:18.254862Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:1448, port: 1448 2025-04-10T11:01:18.254921Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:18.326382Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:18.378614Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****uxZA (2740FF38) () has now valid token of ldapuser@ldap 2025-04-10T11:01:22.435322Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637936248676939:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:22.466083Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001584/r3tmp/tmpygDHuX/pdisk_1.dat 2025-04-10T11:01:22.648962Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:22.689961Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:22.690083Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:22.695483Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9582, node 4 2025-04-10T11:01:22.778663Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:22.778685Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:22.778692Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:22.778810Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:22.974954Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:22.975277Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:22.975297Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:22.976137Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://qqq:7995 ldaps://localhost:7995 ldaps://localhost:11111, port: 7995 2025-04-10T11:01:22.976217Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:23.034954Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:23.082508Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:23.083225Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:23.083281Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:23.131823Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:23.182486Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:23.183794Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****yW-Q (02BD3DAF) () has now valid token of ldapuser@ldap 2025-04-10T11:01:27.323718Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491637958746929849:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:27.396582Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001584/r3tmp/tmpyyE10p/pdisk_1.dat 2025-04-10T11:01:27.635989Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:27.664211Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:27.664317Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:27.667746Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13010, node 5 2025-04-10T11:01:27.757585Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:27.757610Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:27.757617Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:27.757745Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:28.354499Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:28.358768Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:28.358817Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:28.359829Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:19071, port: 19071 2025-04-10T11:01:28.359907Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:28.438751Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-10T11:01:28.483285Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:28.483895Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:28.483949Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-10T11:01:28.528938Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-10T11:01:28.571186Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-10T11:01:28.572547Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****MlIw (7A7AEB1D) () has now valid token of ldapuser@ldap 2025-04-10T11:01:32.703070Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491637978195198846:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:32.703147Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001584/r3tmp/tmpyEnJUR/pdisk_1.dat 2025-04-10T11:01:33.159329Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:33.204603Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:33.204697Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:33.217264Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8856, node 6 2025-04-10T11:01:33.422961Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:33.422987Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:33.422998Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:33.423161Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:33.818361Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:33.830408Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:33.830443Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:33.831276Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:1371, port: 1371 2025-04-10T11:01:33.831367Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:33.921820Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-04-10T11:01:33.921940Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:1371. Bad search filter 2025-04-10T11:01:33.922571Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****-YFA (DEB3D612) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:1371. Bad search filter)' >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-04-10T11:01:05.287990Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637863851996519:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:05.288032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001598/r3tmp/tmpdvzekX/pdisk_1.dat 2025-04-10T11:01:06.361344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:06.402370Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:06.430691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:06.430800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:06.461761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28983, node 1 2025-04-10T11:01:06.694225Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:06.694259Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:06.694267Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:06.694412Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:06.914335Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:06.919970Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:06.920011Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:06.920795Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:22957, port: 22957 2025-04-10T11:01:06.921481Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:06.954352Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:06.998672Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:07.047239Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****7A9g (3F31B1FC) () has now valid token of ldapuser@ldap 2025-04-10T11:01:11.688198Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637889165593917:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:11.780339Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001598/r3tmp/tmpihVQ0s/pdisk_1.dat 2025-04-10T11:01:12.120658Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:12.123662Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:12.134590Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:12.143159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8033, node 2 2025-04-10T11:01:12.398783Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:12.398809Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:12.398816Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:12.398951Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:12.670297Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:12.673501Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:12.673530Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:12.674132Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:19846, port: 19846 2025-04-10T11:01:12.674204Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:12.691070Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:12.738949Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:12.739540Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:12.739600Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:12.790423Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:12.835802Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:12.837281Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****FcHg (244F9E91) () has now valid token of ldapuser@ldap 2025-04-10T11:01:17.168326Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637915883025830:2212];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001598/r3tmp/tmpFarPyf/pdisk_1.dat 2025-04-10T11:01:17.367724Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:17.699158Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:17.712730Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:17.712844Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:17.723390Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15346, node 3 2025-04-10T11:01:17.911066Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:17.911094Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:17.911103Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:17.911243Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:18.276151Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:18.282475Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:18.282508Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:18.283144Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:17476, port: 17476 2025-04-10T11:01:18.283230Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:18.309753Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:18.358677Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****NmaQ (F4418947) () has now valid token of ldapuser@ldap 2025-04-10T11:01:22.021392Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637936778258339:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:22.021451Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001598/r3tmp/tmpFZifSv/pdisk_1.dat 2025-04-10T11:01:22.297207Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:22.331111Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:22.331197Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:22.335586Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1772, node 4 2025-04-10T11:01:22.470814Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:22.470839Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:22.470846Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:22.470978Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:22.676138Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:22.676447Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:22.676464Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:22.677166Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:6250 ldap://localhost:6250 ldap://localhost:11111, port: 6250 2025-04-10T11:01:22.677252Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:22.706438Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:22.751723Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:22.752338Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:22.752400Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:22.794545Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:22.838556Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:22.841251Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****TACg (25A45A8C) () has now valid token of ldapuser@ldap 2025-04-10T11:01:27.163484Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491637958271828085:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001598/r3tmp/tmpOpnnDX/pdisk_1.dat 2025-04-10T11:01:27.339528Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:27.576073Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:27.628968Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:27.629075Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:27.631697Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10214, node 5 2025-04-10T11:01:27.882853Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:27.882883Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:27.882891Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:27.883042Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:27.999607Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:28.003276Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:28.003318Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:28.004034Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21305, port: 21305 2025-04-10T11:01:28.004125Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:28.030869Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-10T11:01:28.089216Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:28.089976Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:28.090023Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-10T11:01:28.134518Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-10T11:01:28.182453Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-10T11:01:28.189054Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****wmmg (D07490F4) () has now valid token of ldapuser@ldap 2025-04-10T11:01:33.496919Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491637980479047750:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:33.496964Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001598/r3tmp/tmp0PcRDT/pdisk_1.dat 2025-04-10T11:01:33.861814Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:33.905881Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:33.905976Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:33.914115Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6417, node 6 2025-04-10T11:01:34.138961Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:34.138991Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:34.139000Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:34.139177Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:34.406309Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:34.409084Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:34.409113Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:34.409840Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:13670, port: 13670 2025-04-10T11:01:34.409905Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:34.458839Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-04-10T11:01:34.458935Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:13670. Bad search filter 2025-04-10T11:01:34.459212Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****kUdQ (313F7613) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:13670. Bad search filter)' >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly |71.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |71.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |71.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] Test command err: 2025-04-10T11:01:06.459332Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637865278052586:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:06.459377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001576/r3tmp/tmpZsQbWG/pdisk_1.dat 2025-04-10T11:01:07.352627Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:07.368325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:07.368445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:07.384907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19572, node 1 2025-04-10T11:01:07.668438Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:07.668465Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:07.668472Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:07.668617Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:08.192406Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:08.196260Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:08.196295Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:08.197633Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:20179, port: 20179 2025-04-10T11:01:08.197720Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:08.232644Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:08.275381Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:08.322617Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:08.323264Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:08.323308Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:08.366499Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:08.415782Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:08.417324Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****sq3g (30BD1735) () has now valid token of ldapuser@ldap 2025-04-10T11:01:13.779792Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637895152997088:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001576/r3tmp/tmpECDhrd/pdisk_1.dat 2025-04-10T11:01:13.948979Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:14.030548Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:14.033309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:14.033374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:14.043618Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15678, node 2 2025-04-10T11:01:14.286869Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:14.286893Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:14.286900Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:14.287020Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:14.578374Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:14.582443Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:14.582477Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:14.583289Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:29096, port: 29096 2025-04-10T11:01:14.583367Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:14.620646Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:14.670707Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:14.719181Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****vXMA (FFBC2DC3) () has now valid token of ldapuser@ldap 2025-04-10T11:01:19.668791Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637922229153148:2172];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001576/r3tmp/tmpr2ZG6a/pdisk_1.dat 2025-04-10T11:01:19.759206Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:19.873194Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:19.877957Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:19.878059Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:19.879369Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3828, node 3 2025-04-10T11:01:19.980179Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:19.980207Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:19.980224Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:19.980391Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:20.270328Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:20.271160Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:20.271190Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:20.271914Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:61895 ldap://localhost:61895 ldap://localhost:11111, port: 61895 2025-04-10T11:01:20.272026Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:20.298552Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:20.354033Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:20.406427Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:20.410654Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:20.410707Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:20.462532Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:20.511268Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:20.512416Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****tCfQ (48336901) () has now valid token of ldapuser@ldap 2025-04-10T11:01:24.192723Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637945372837142:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:24.320360Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001576/r3tmp/tmpFAq5i3/pdisk_1.dat 2025-04-10T11:01:24.459553Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:24.492167Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:24.492273Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:24.494445Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16239, node 4 2025-04-10T11:01:24.602877Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:24.602904Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:24.602914Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:24.603056Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:24.802314Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:24.803208Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:24.803237Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:24.803909Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:13099, port: 13099 2025-04-10T11:01:24.803991Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:24.832059Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:24.878984Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-10T11:01:24.934164Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****7vrw (96F02AEB) () has now valid token of ldapuser@ldap 2025-04-10T11:01:29.201127Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491637966089990626:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:29.201288Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001576/r3tmp/tmp2xSuNe/pdisk_1.dat 2025-04-10T11:01:29.621645Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:29.621736Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:29.623540Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:29.638435Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27156, node 5 2025-04-10T11:01:29.854884Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:29.854908Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:29.854917Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:29.855074Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:30.134349Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:30.142230Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:30.142270Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:30.143161Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21565, port: 21565 2025-04-10T11:01:30.143226Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:30.192622Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:30.246743Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:21565. Invalid credentials 2025-04-10T11:01:30.247666Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****I1Vw (71BF5E8F) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:21565. Invalid credentials)' 2025-04-10T11:01:35.434836Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491637991133098727:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:35.435436Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001576/r3tmp/tmpcnu9ns/pdisk_1.dat 2025-04-10T11:01:35.740583Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:35.781674Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:35.786319Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:35.788075Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7271, node 6 2025-04-10T11:01:35.956297Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:35.956318Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:35.956326Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:35.956530Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:36.272958Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:36.277884Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:36.277935Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:36.278705Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:15908, port: 15908 2025-04-10T11:01:36.278795Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:36.305806Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:36.356167Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:15908. Invalid credentials 2025-04-10T11:01:36.356761Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****ZPTw (8D27646E) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:15908. Invalid credentials)' |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 >> KikimrIcGateway::TestLoadTableMetadata ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] Test command err: 2025-04-10T11:00:35.246210Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637733881275132:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:35.260732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000dbb/r3tmp/tmp6UOvxV/pdisk_1.dat 2025-04-10T11:00:36.111696Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:36.133669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:36.133767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:36.145177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11764 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:00:36.446713Z node 1 :TX_PROXY DEBUG: actor# [1:7491637733881275258:2141] Handle TEvNavigate describe path dc-1 2025-04-10T11:00:36.446764Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637738176243024:2456] HANDLE EvNavigateScheme dc-1 2025-04-10T11:00:36.446872Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491637733881275282:2154], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:00:36.446949Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637738176243002:2449][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7491637733881275282:2154], cookie# 1 2025-04-10T11:00:36.453304Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637738176243006:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637738176243003:2449], cookie# 1 2025-04-10T11:00:36.453348Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637738176243007:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637738176243004:2449], cookie# 1 2025-04-10T11:00:36.453360Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637738176243008:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637738176243005:2449], cookie# 1 2025-04-10T11:00:36.453457Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491637729586307600:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637738176243006:2449], cookie# 1 2025-04-10T11:00:36.453480Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491637729586307603:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637738176243007:2449], cookie# 1 2025-04-10T11:00:36.453501Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491637729586307606:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491637738176243008:2449], cookie# 1 2025-04-10T11:00:36.453591Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637738176243006:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637729586307600:2051], cookie# 1 2025-04-10T11:00:36.453605Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637738176243007:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637729586307603:2054], cookie# 1 2025-04-10T11:00:36.453619Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491637738176243008:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637729586307606:2057], cookie# 1 2025-04-10T11:00:36.453642Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637738176243002:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637738176243003:2449], cookie# 1 2025-04-10T11:00:36.453674Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637738176243002:2449][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:00:36.453690Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637738176243002:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637738176243004:2449], cookie# 1 2025-04-10T11:00:36.453705Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637738176243002:2449][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:00:36.453723Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637738176243002:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491637738176243005:2449], cookie# 1 2025-04-10T11:00:36.453733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491637738176243002:2449][/dc-1] Unexpected sync response: sender# [1:7491637738176243005:2449], cookie# 1 2025-04-10T11:00:36.453952Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491637733881275282:2154], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-10T11:00:36.459227Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491637733881275282:2154], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491637738176243002:2449] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:00:36.459375Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491637733881275282:2154], cacheItem# { Subscriber: { Subscriber: [1:7491637738176243002:2449] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-10T11:00:36.461181Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491637738176243027:2459], recipient# [1:7491637738176243024:2456], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:00:36.461261Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637738176243024:2456] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:00:36.504037Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637738176243024:2456] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-10T11:00:36.510997Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637738176243024:2456] Handle TEvDescribeSchemeResult Forward to# [1:7491637738176243023:2455] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:00:36.565309Z node 1 :TX_PROXY DEBUG: actor# [1:7491637733881275258:2141] Handle TEvProposeTransaction 2025-04-10T11:00:36.565343Z node 1 :TX_PROXY DEBUG: actor# [1:7491637733881275258:2141] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T11:00:36.565489Z node 1 :TX_PROXY DEBUG: actor# [1:7491637733881275258:2141] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491637738176243032:2463] 2025-04-10T11:00:36.650542Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637738176243032:2463] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-10T11:00:36.650598Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637738176243032:2463] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:00:36.650692Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637738176243032:2463] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:00:36.650815Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491637733881275282:2154], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Statu ... ePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:01:38.418710Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [13:7491637931601952220:2110], cacheItem# { Subscriber: { Subscriber: [13:7491638000321429068:2156] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:01:38.418794Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [13:7491637931601952220:2110], cacheItem# { Subscriber: { Subscriber: [13:7491638000321429069:2157] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:01:38.418927Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [13:7491638004616396397:2166], recipient# [13:7491638000321429066:2333], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:01:38.419374Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:7491638000321429066:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:38.777915Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [13:7491637931601952220:2110], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:01:38.778124Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [13:7491637931601952220:2110], cacheItem# { Subscriber: { Subscriber: [13:7491637931601952235:2115] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:01:38.778288Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [13:7491638004616396399:2167], recipient# [13:7491638004616396398:2337], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:01:38.866663Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7491637931382957395:2111], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:01:38.866865Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7491637931382957395:2111], cacheItem# { Subscriber: { Subscriber: [11:7491637931382957410:2116] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:01:38.866992Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7491638004397401653:2230], recipient# [11:7491638004397401652:2333], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:01:38.877081Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7491637931382957395:2111], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:01:38.877230Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7491637931382957395:2111], cacheItem# { Subscriber: { Subscriber: [11:7491637995807467028:2217] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:01:38.877280Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7491637931382957395:2111], cacheItem# { Subscriber: { Subscriber: [11:7491637995807467029:2218] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:01:38.877415Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7491638004397401654:2231], recipient# [11:7491637995807467027:2328], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:01:38.877794Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7491637995807467027:2328], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:39.078552Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7491637931382957395:2111], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:01:39.078730Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7491637931382957395:2111], cacheItem# { Subscriber: { Subscriber: [11:7491637995807467020:2216] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:01:39.078831Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7491638008692368952:2232], recipient# [11:7491638008692368951:2334], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:01:39.079346Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] >> KikimrIcGateway::TestLoadExternalTable >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer [GOOD] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency |71.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |71.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |71.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut >> TConsoleTests::TestAlterUnknownTenantExtSubdomain [GOOD] >> TConsoleTests::TestAlterBorrowedStorage >> TConsoleTxProcessorTests::TestTxProcessorSingle [GOOD] >> KikimrProvider::TestFillAuthPropertiesBasic [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor >> KikimrProvider::TestFillAuthPropertiesAws [GOOD] >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> KqpScanSpilling::SelfJoinQueryService [GOOD] >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> KikimrIcGateway::TestALterResourcePool >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad >> UpsertLoad::ShouldWriteDataBulkUpsertBatch >> KikimrIcGateway::TestListPath |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> TConsoleTests::TestCreateSubSubDomain >> KikimrIcGateway::TestDropExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalDataSource |71.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |71.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |71.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id >> UpsertLoad::ShouldWriteKqpUpsert2 >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SelfJoinQueryService [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2gt4/001832/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk6 Trying to start YDB, gRPC: 10515, MsgBus: 15455 2025-04-10T11:01:34.048298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637985773601667:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:34.048388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001832/r3tmp/tmplLCb8e/pdisk_1.dat 2025-04-10T11:01:34.733182Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:34.792249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:34.792361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:34.797441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10515, node 1 2025-04-10T11:01:35.156397Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:35.156420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:35.156428Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:35.156559Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15455 TClient is connected to server localhost:15455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:36.349158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:36.392544Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:01:36.410565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:36.658566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:36.881393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:01:36.974242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:01:39.053752Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637985773601667:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:39.053870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:39.281328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638007248439923:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:39.281536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:39.593324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:01:39.670781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:01:39.721154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:01:39.772498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:01:39.856810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:01:39.950426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:01:40.060024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638011543407748:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:40.060100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:40.060462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638011543407753:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:40.065068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:01:40.083401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638011543407755:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:01:40.150841Z node 1 :TX_PROXY ERROR: Actor# [1:7491638011543407809:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (DataType 'String)) (let $5 (OptionalType $4)) (let $6 (StructType '('"Key" $3) '('"Value" $5))) (let $7 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($21) (block '( (let $22 (lambda '($23) (block '( (let $24 (VariantType (TupleType $6 $6))) (let $25 (Variant $23 '0 $24)) (let $26 (Variant $23 '1 $24)) (return $25 $26) )))) (return (FromFlow (MultiMap (ToFlow $21) $22))) ))) '('('"_logical_id" '706) '('"_id" '"28d4e37a-2bf538e0-7bc84d4b-7d03cef3")))) (let $8 (DqCnUnionAll (TDqOutput $7 '1))) (let $9 '('('"_logical_id" '551) '('"_id" '"3dbec364-4e878a77-8a0ac06c-4f270860") '('"_wide_channels" $6))) (let $10 (DqPhyStage '($8) (lambda '($27) (block '( (let $28 (lambda '($29) (Member $29 '"Key") (Member $29 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $27) $28))) ))) $9)) (let $11 (DqCnMap (TDqOutput $7 '0))) (let $12 (DqCnBroadcast (TDqOutput $10 '0))) (let $13 (StructType '('"t1.Key" $3) '('"t1.Value" $5) '('"t2.Key" $3) '('"t2.Value" $5))) (let $14 '('('"_logical_id" '621) '('"_id" '"2f3b7b8b-22a62732-466fe10d-81cf904a") '('"_wide_channels" $13))) (let $15 (DqPhyStage '($11 $12) (lambda '($30 $31) (block '( (let $32 '('Many 'Hashed 'Compact)) (let $33 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $31) (lambda '($36 $37) (Exists $37))) (lambda '($38 $39) (IfPresent $39 (lambda '($40) (Just '($40 (AsStruct '('"Key" $38) '('"Value" $39))))) (Nothing (OptionalType (TupleType $4 $6)))))) (lambda '($41) (Nth $41 '0)) (lambda '($42) (Nth $42 '1)) $32)) (let $34 (Sort (FlatMap $33 (lambda '($43) (block '( (let $44 '('"Value")) (let $45 '('"Key" '"t1.Key" '"Value" '"t1.Value")) (let $46 '('"Key" '"t2.Key" '"Value" '"t2.Value")) (return (MapJoinCore (OrderedFilter (ToFlow $30) (lambda '($47) (Exists (Member $47 '"Value")))) $43 'Inner $44 $44 $45 $46 '('"t1.Value") '('"t2.Value"))) )))) (Bool 'true) (lambda '($48) (Member $48 '"t1.Key")))) (let $35 (lambda '($49) (Member $49 '"t1.Key") (Member $49 '"t1.Value") (Member $49 '"t2.Key") (Member $49 '"t2.Value"))) (return (FromFlow (ExpandMap $34 $35))) ))) $14)) (let $16 (DqCnMerge (TDqOutput $15 '0) '('('0 '"Asc")))) (let $17 (DqPhyStage '($16) (lambda '($50) (FromFlow (NarrowMap (ToFlow $50) (lambda '($51 $52 $53 $54) (AsStruct '('"t1.Key" $51) '('"t1.Value" $52) '('"t2.Key" $53) '('"t2.Value" $54)))))) '('('"_logical_id" '633) '('"_id" '"5d70a6b-26c772a6-2cec7fb0-e8ed7f7b")))) (let $18 '($7 $10 $15 $17)) (let $19 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $20 (DqCnResult (TDqOutput $17 '0) $19)) (return (KqpPhysicalQuery '((KqpPhysicalTx $18 '($20) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $13) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] Test command err: 2025-04-10T11:01:09.321733Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637878939813627:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:09.321769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00155a/r3tmp/tmpuvDERG/pdisk_1.dat 2025-04-10T11:01:10.734403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:11.197075Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:11.199935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:11.200044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:11.202513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1895, node 1 2025-04-10T11:01:11.508890Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:11.508913Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:11.508920Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:11.509068Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:11.690528Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:11.694595Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:11.694633Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:11.696268Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://unavailablehost:25788, port: 25788 2025-04-10T11:01:11.696376Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:11.706805Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not start TLS. Can't contact LDAP server 2025-04-10T11:01:11.707421Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****VP4Q (D1C98B13) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2025-04-10T11:01:11.707710Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:11.707728Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:11.708628Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://unavailablehost:25788, port: 25788 2025-04-10T11:01:11.708726Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:11.716499Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not start TLS. Can't contact LDAP server 2025-04-10T11:01:11.716687Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****VP4Q (D1C98B13) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2025-04-10T11:01:17.190598Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637911932068613:2216];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00155a/r3tmp/tmp3vKG4a/pdisk_1.dat 2025-04-10T11:01:17.354028Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:17.515290Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:17.551734Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:17.551827Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:17.559494Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18931, node 2 2025-04-10T11:01:17.802987Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:17.803012Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:17.803019Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:17.803127Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:18.122312Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:18.125739Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:18.125776Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:18.126644Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****Sp4A (AF429BB1) () has now permanent error message 'Could not login via LDAP (List of ldap server hosts is empty)' 2025-04-10T11:01:21.673599Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637932966461364:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00155a/r3tmp/tmpb49XhE/pdisk_1.dat 2025-04-10T11:01:21.799391Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:21.856942Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:21.885367Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:21.885504Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:21.887680Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12623, node 3 2025-04-10T11:01:22.082671Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:22.082694Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:22.082702Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:22.082859Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:22.217510Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:22.238284Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:22.238332Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:22.239206Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****J5SQ (16F226C5) () has now permanent error message 'Could not login via LDAP (Parameter BaseDn is empty)' 2025-04-10T11:01:26.948175Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637952827917076:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:27.032038Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00155a/r3tmp/tmpfwDrJ4/pdisk_1.dat 2025-04-10T11:01:27.377934Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:27.381721Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:27.383471Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:27.417018Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4982, node 4 2025-04-10T11:01:27.619025Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:27.619053Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:27.619062Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:27.619230Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:27.798328Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:27.801436Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:27.801463Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:27.802487Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****gRhA (BF7A2AA0) () has now permanent error message 'Could not login via LDAP (Parameter BindDn is empty)' 2025-04-10T11:01:34.525824Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491637987074626959:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:34.525876Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00155a/r3tmp/tmpeEoW4U/pdisk_1.dat 2025-04-10T11:01:34.968186Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:35.007653Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:35.007761Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:35.010204Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21438, node 5 2025-04-10T11:01:35.190934Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:35.190964Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:35.190973Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:35.191141Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:35.385378Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:35.387029Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:35.387066Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:35.387918Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****iX1A (A0001CD4) () has now permanent error message 'Could not login via LDAP (Parameter BindPassword is empty)' 2025-04-10T11:01:39.601917Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491638008431113897:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:39.611455Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00155a/r3tmp/tmpA1kcgd/pdisk_1.dat 2025-04-10T11:01:39.800069Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:39.825489Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:39.825627Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:39.827297Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29938, node 6 2025-04-10T11:01:39.910837Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:39.910873Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:39.910881Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:39.911017Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:40.078325Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:40.079632Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:40.079676Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:40.080496Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:29070, port: 29070 2025-04-10T11:01:40.080612Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:40.174211Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:40.227835Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****zvxQ (E7828438) () has now valid token of ldapuser@ldap >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries >> TConsoleTests::TestRemoveTenant [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] Test command err: 2025-04-10T11:01:38.481487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:38.481754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:38.481981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001533/r3tmp/tmp46YaLV/pdisk_1.dat 2025-04-10T11:01:38.916855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:01:38.968906Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:39.014996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:39.015136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:39.027248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:39.123646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:39.510938Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-10T11:01:39.511142Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-10T11:01:39.583277Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.071651s, errors=0 2025-04-10T11:01:39.583546Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 2025-04-10T11:01:43.747835Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:43.748091Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:01:43.748340Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001533/r3tmp/tmpSJl8m8/pdisk_1.dat 2025-04-10T11:01:44.157775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:01:44.189165Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:44.230363Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:44.230511Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:44.243918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:44.333872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:44.662036Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-10T11:01:44.663924Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-04-10T11:01:44.744300Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.079774s, errors=0 2025-04-10T11:01:44.744433Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144283384.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144283384.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144283384.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283384.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144283384.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144283384.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124282184.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283384.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124283384.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124282184.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124282184.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124282184.000000s;Name=;Codec=}; 2025-04-10T10:59:44.772040Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T10:59:45.021055Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T10:59:45.058231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T10:59:45.058635Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T10:59:45.067130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T10:59:45.067369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T10:59:45.067604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T10:59:45.067743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T10:59:45.067859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T10:59:45.067990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T10:59:45.068102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T10:59:45.068215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T10:59:45.068321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T10:59:45.068430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T10:59:45.068540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T10:59:45.068640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T10:59:45.105557Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T10:59:45.105855Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T10:59:45.105947Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T10:59:45.106161Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T10:59:45.106382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T10:59:45.106483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T10:59:45.106529Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T10:59:45.106624Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T10:59:45.106683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T10:59:45.106723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T10:59:45.106754Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T10:59:45.106912Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T10:59:45.106972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T10:59:45.107017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T10:59:45.107048Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T10:59:45.107142Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T10:59:45.107198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T10:59:45.107239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T10:59:45.107272Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T10:59:45.107346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T10:59:45.107388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T10:59:45.107447Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T10:59:45.107531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T10:59:45.107576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T10:59:45.107610Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T10:59:45.108057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-04-10T10:59:45.108168Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-10T10:59:45.108252Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-10T10:59:45.108346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-10T10:59:45.108523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T10:59:45.108581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T10:59:45.108615Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T10:59:45.108827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T10:59:45.108870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T10:59:45.108899Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... cpp:29;EXECUTE:finishLoadingTime=690; 2025-04-10T11:01:44.615882Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=62014; 2025-04-10T11:01:44.628282Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=12298; 2025-04-10T11:01:44.640862Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11506; 2025-04-10T11:01:44.641019Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12613; 2025-04-10T11:01:44.641207Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=119; 2025-04-10T11:01:44.641335Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=76; 2025-04-10T11:01:44.641500Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=112; 2025-04-10T11:01:44.641627Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=76; 2025-04-10T11:01:44.658444Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=16729; 2025-04-10T11:01:44.690953Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=32365; 2025-04-10T11:01:44.691109Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=44; 2025-04-10T11:01:44.691192Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=28; 2025-04-10T11:01:44.691243Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-04-10T11:01:44.691293Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-04-10T11:01:44.691341Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-04-10T11:01:44.691430Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=44; 2025-04-10T11:01:44.691491Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-10T11:01:44.691591Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=59; 2025-04-10T11:01:44.691640Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-10T11:01:44.691713Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=32; 2025-04-10T11:01:44.691850Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=85; 2025-04-10T11:01:44.692256Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=353; 2025-04-10T11:01:44.692307Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=149564; 2025-04-10T11:01:44.692474Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:01:44.692602Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:01:44.692664Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:01:44.692744Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:01:44.718017Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-10T11:01:44.718225Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:01:44.718298Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:01:44.718389Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-10T11:01:44.718459Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:01:44.718503Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:01:44.718554Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:01:44.718596Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:01:44.718712Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:01:44.719228Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:01:44.719309Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2646:4520];tablet_id=9437184;parent=[1:2604:4485];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-04-10T11:01:44.720071Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:01:44.720666Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:01:44.720707Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:01:44.720735Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:01:44.720787Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:01:44.720890Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:01:44.720987Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-10T11:01:44.721071Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:01:44.721126Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:01:44.721204Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:01:44.721285Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:01:44.721430Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:01:44.723300Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-04-10T11:01:44.725137Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel2 >> TTopicReaderTests::TestRun_ReadOneMessage >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorTemporary ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] Test command err: 2025-04-10T10:56:37.165741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:37.165939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:37.166137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f17/r3tmp/tmpqWr5JB/pdisk_1.dat 2025-04-10T10:56:37.559969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:37.615127Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:37.664361Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:56:37.665530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:37.665643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:37.665809Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T10:56:37.679521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:37.872773Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T10:56:37.872841Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T10:56:37.872988Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:645:2553] 2025-04-10T10:56:38.034965Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T10:56:38.035081Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T10:56:38.035731Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T10:56:38.035845Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T10:56:38.036221Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T10:56:38.036470Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T10:56:38.036573Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T10:56:38.036880Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T10:56:38.038708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:38.039848Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T10:56:38.039925Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T10:56:38.109107Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:56:38.110397Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:56:38.110857Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:670:2574] 2025-04-10T10:56:38.111146Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:38.160406Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:56:38.161183Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:38.161340Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:56:38.163876Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:56:38.163989Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:56:38.164053Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:56:38.164457Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:56:38.164615Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:56:38.164728Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:686:2574] in generation 1 2025-04-10T10:56:38.177247Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:56:38.257427Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:56:38.257635Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:56:38.257762Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:688:2584] 2025-04-10T10:56:38.257802Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:56:38.257842Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:56:38.257880Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:38.258059Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:670:2574], Recipient [1:670:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:38.258111Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:38.258421Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:56:38.258518Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:56:38.258658Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:38.258699Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:38.258731Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T10:56:38.258761Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:56:38.258786Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:56:38.258810Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:56:38.258854Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:38.258981Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:677:2578], Recipient [1:670:2574]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:56:38.259009Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:56:38.259052Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2571], serverId# [1:677:2578], sessionId# [0:0:0] 2025-04-10T10:56:38.259404Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:677:2578] 2025-04-10T10:56:38.259440Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:56:38.259571Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:56:38.259741Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T10:56:38.259778Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:56:38.259857Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:56:38.259904Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T10:56:38.259939Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T10:56:38.259979Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T10:56:38.260013Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T10:56:38.260250Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T10:56:38.260278Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T10:56:38.260308Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T10:56:38.260333Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T10:56:38.260364Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T10:56:38.260385Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T10:56:38.260409Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T10:56:38.260430Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T10:56:38.260457Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T10:56:38.261525Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:689:2585], Recipient [1:670:2574]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T10:56:38.261566Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:56:38.272394Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Compl ... nected at leader tablet# 72075186224037888, clientId# [26:981:2787], serverId# [26:982:2788], sessionId# [0:0:0] 2025-04-10T11:01:41.060552Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [26:981:2787] 2025-04-10T11:01:41.060681Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [26:981:2787] 2025-04-10T11:01:41.060786Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [26:981:2787] 2025-04-10T11:01:41.060940Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] shutdown pipe due to pending shutdown request [26:981:2787] 2025-04-10T11:01:41.061041Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [26:981:2787] 2025-04-10T11:01:41.061366Z node 26 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [26:980:2786], Recipient [26:696:2585]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-04-10T11:01:41.061587Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} queued, type NKikimr::NDataShard::TDataShard::TTxCompactTable 2025-04-10T11:01:41.061756Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:01:41.061924Z node 26 :TABLET_EXECUTOR DEBUG: TCompactionLogic PrepareForceCompaction for 72075186224037888 table 1001, mode Full, forced state None, forced mode Full 2025-04-10T11:01:41.078459Z node 26 :TX_DATASHARD INFO: Started background compaction# 1 of 72075186224037888 tableId# 2 localTid# 1001, requested from [26:980:2786], partsCount# 0, memtableSize# 656, memtableWaste# 3952, memtableRows# 2 2025-04-10T11:01:41.078850Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:01:41.079070Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:01:41.079631Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888: task 1, edge 9223372036854775807/0, generation 0 2025-04-10T11:01:41.079778Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:21} starting compaction 2025-04-10T11:01:41.080387Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:22} starting Scan{1 on 1001, Compact{72075186224037888.1.21, eph 1}} 2025-04-10T11:01:41.080609Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:22} started compaction 1 2025-04-10T11:01:41.080731Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888 started compaction 1 generation 0 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR 2025-04-10T11:01:41.188290Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:22} Compact 1 on TGenCompactionParams{1001: gen 0 epoch +inf, 0 parts} step 21, product {tx status + 1 parts epoch 2} done 2025-04-10T11:01:41.188715Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CompactionFinished for 72075186224037888: compaction 1, generation 0 2025-04-10T11:01:41.188905Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 1, state Free, final id 0, final level 0 2025-04-10T11:01:41.189000Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 3, state Free, final id 0, final level 0 2025-04-10T11:01:41.189612Z node 26 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:01.511991Z 2025-04-10T11:01:41.189874Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} queued, type NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs 2025-04-10T11:01:41.190034Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:01:41.190185Z node 26 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2025-04-10T11:01:41.190323Z node 26 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [26:980:2786]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:01:41.191252Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} hope 1 -> done Change{17, redo 83b alter 0b annex 0, ~{ 27 } -{ }, 0 gb} 2025-04-10T11:01:41.191442Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} release 4194304b of static, Memory{0 dyn 0} ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR ========= Starting an immediate read ========= 2025-04-10T11:01:41.401598Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfnspph2e2wz08md9288c23, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=YzE1YTk4MTEtMzM1NTAxZjQtYjFlYmZjMDUtNDA2OTExOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:01:41.403549Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send [26:914:2733] 2025-04-10T11:01:41.403678Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [26:914:2733] 2025-04-10T11:01:41.404143Z node 26 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [26:1006:2794], Recipient [26:696:2585]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-04-10T11:01:41.404431Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-04-10T11:01:41.404594Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:01:41.404820Z node 26 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:01:41.404952Z node 26 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1512/281474976715662 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T11:01:41.405057Z node 26 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1512/18446744073709551615 2025-04-10T11:01:41.405220Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-04-10T11:01:41.405455Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-10T11:01:41.405576Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:01:41.405687Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:01:41.405769Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:01:41.405839Z node 26 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-04-10T11:01:41.405924Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-10T11:01:41.405958Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:01:41.405982Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:01:41.406008Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:01:41.406223Z node 26 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-10T11:01:41.406536Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayComplete 2025-04-10T11:01:41.406591Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:01:41.406691Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:01:41.406767Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:01:41.406822Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-10T11:01:41.406847Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:01:41.406899Z node 26 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-10T11:01:41.407023Z node 26 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:01:41.407219Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{18, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:01:41.407393Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:01:41.463324Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} commited cookie 8 for step 20 2025-04-10T11:01:41.496720Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:22} commited cookie 8 for step 21 2025-04-10T11:01:41.521096Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-10T11:01:41.521350Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:01:41.521790Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{12, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-10T11:01:41.521987Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:01:41.522862Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:19} commited cookie 1 for step 18 2025-04-10T11:01:41.523146Z node 26 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [26:540:2479] 2025-04-10T11:01:41.523268Z node 26 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [26:540:2479] 2025-04-10T11:01:41.551697Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:23} commited cookie 8 for step 22 2025-04-10T11:01:41.586824Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:24} commited cookie 8 for step 23 2025-04-10T11:01:41.627680Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:25} commited cookie 8 for step 24 >> KikimrIcGateway::TestLoadTableMetadata [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata >> KikimrIcGateway::TestALterResourcePool [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables [GOOD] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp >> TTopicWriterTests::TestTopicWriterParams_No_Delimiter [GOOD] >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 >> TConsoleTests::TestAlterBorrowedStorage [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant >> KikimrIcGateway::TestDropExternalDataSource [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> KikimrIcGateway::TestLoadExternalTable [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata |71.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |71.5%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |71.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestALterResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 23574, MsgBus: 30128 2025-04-10T11:01:34.380874Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637987958359359:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:34.380941Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b88/r3tmp/tmpRRXoCj/pdisk_1.dat 2025-04-10T11:01:34.907754Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:34.923099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:34.923183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:34.928420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23574, node 1 2025-04-10T11:01:35.046805Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:35.046840Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:35.046850Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:35.046992Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30128 TClient is connected to server localhost:30128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:35.868969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:35.894961Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:01:35.919123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-10T11:01:35.940797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8041, MsgBus: 10304 2025-04-10T11:01:39.015073Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638004053573113:2212];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b88/r3tmp/tmps4gaT8/pdisk_1.dat 2025-04-10T11:01:39.064299Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:39.171316Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:39.202184Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:39.202274Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:39.205341Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8041, node 2 2025-04-10T11:01:39.351679Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:39.351728Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:39.351735Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:39.351893Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10304 TClient is connected to server localhost:10304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:39.937896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:39.973336Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:01:40.002471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1910, MsgBus: 25344 2025-04-10T11:01:44.253001Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491638027833890522:2086];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:44.259155Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b88/r3tmp/tmpL1GRtk/pdisk_1.dat 2025-04-10T11:01:44.414057Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:44.445423Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:44.445521Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:44.448191Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1910, node 3 2025-04-10T11:01:44.562789Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:44.562818Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:44.562825Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:44.562964Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25344 TClient is connected to server localhost:25344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:45.244524Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:45.289043Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:01:45.328650Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:01:45.364577Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976710659:0, at schemeshard: 72057594046644480 >> KikimrIcGateway::TestListPath [GOOD] >> KikimrIcGateway::TestDropTable |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> TTopicWriterTests::TestEnterMessage_OnlyDelimiters [GOOD] >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 14721, MsgBus: 31378 2025-04-10T11:00:08.318929Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637619058625545:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:08.701292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e69/r3tmp/tmpzDc3rG/pdisk_1.dat 2025-04-10T11:00:09.121305Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:09.124793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:09.124868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:09.141789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14721, node 1 2025-04-10T11:00:09.374870Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:09.374893Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:09.374903Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:09.375018Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31378 TClient is connected to server localhost:31378 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:10.269768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:10.285800Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:10.304522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:10.541609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:10.756932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:10.863762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:12.624967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637636238496320:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:12.625112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:13.010168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:13.084821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:13.127177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:13.202486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:13.263815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:13.291537Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637619058625545:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:13.291601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:13.314706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:13.380501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637640533464137:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:13.380580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:13.380827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637640533464142:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:13.385373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:13.401270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637640533464144:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:13.479805Z node 1 :TX_PROXY ERROR: Actor# [1:7491637640533464199:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:14.594904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:14.882349Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710673 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-04-10T11:00:14.883270Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:14.883380Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637644828432035:2523], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:14.883514Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715757 2025-04-10T11:00:14.883590Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637644828432035:2523], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:14.883898Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:14.883942Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637644828432035:2523], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusSuccess, LockTxDone: 0, Initiate ... :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Applying to Unlocking 2025-04-10T11:00:54.751620Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715673 2025-04-10T11:00:54.751678Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637813191559415:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:54.751787Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976715673, txId# 281474976710764 2025-04-10T11:00:54.751846Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637813191559415:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:54.752168Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715673 2025-04-10T11:00:54.752247Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637813191559415:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710764, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:54.753678Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976715673, cookie: 281474976715673, txId: 281474976710764, status: StatusAccepted 2025-04-10T11:00:54.753812Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637813191559415:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710764, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710764 SchemeshardId: 72057594046644480 PathId: 16 2025-04-10T11:00:54.757218Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715673 2025-04-10T11:00:54.757275Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637813191559415:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710764, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:54.763014Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-10T11:00:54.763058Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-10T11:00:54.763173Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710764, buildInfoId: 281474976715673 2025-04-10T11:00:54.763261Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710764, buildInfo: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637813191559415:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710764, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:54.763443Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-10T11:00:54.763667Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715673 2025-04-10T11:00:54.763751Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637813191559415:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710764, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:54.763786Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T11:00:54.764552Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715673 2025-04-10T11:00:54.764631Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637813191559415:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710764, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 579, read rows: 30, read bytes: 651 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:54.764653Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976715673, subscribers count# 1 2025-04-10T11:00:54.765826Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715673 2025-04-10T11:00:54.766056Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715673 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-10T11:01:01.498282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:01:01.498323Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:04.582628Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976715673 >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropExternalDataSource [GOOD] Test command err: Trying to start YDB, gRPC: 27388, MsgBus: 9804 2025-04-10T11:01:34.993125Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637985561434228:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:34.993745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b89/r3tmp/tmpALR6fi/pdisk_1.dat 2025-04-10T11:01:35.873359Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:35.900645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:35.900756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:35.903460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27388, node 1 2025-04-10T11:01:36.231460Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:36.231484Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:36.231492Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:36.231617Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9804 TClient is connected to server localhost:9804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:37.365337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:37.409293Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:01:37.422022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-10T11:01:37.446441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:01:37.458988Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-10T11:01:37.472225Z node 1 :TX_PROXY ERROR: Actor# [1:7491637998446336682:2346] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/f1/f2/external_table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges)" severity: 1 }
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges) 2025-04-10T11:01:37.472615Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710660, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges) Trying to start YDB, gRPC: 2821, MsgBus: 28132 2025-04-10T11:01:40.141124Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638011469271162:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:40.142118Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b89/r3tmp/tmpmnbV3a/pdisk_1.dat 2025-04-10T11:01:40.530767Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:40.556729Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:40.556833Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:40.560445Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2821, node 2 2025-04-10T11:01:40.718716Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:40.718737Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:40.718743Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:40.718855Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28132 TClient is connected to server localhost:28132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:01:41.302176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:41.323773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-10T11:01:41.348824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22128, MsgBus: 30438 2025-04-10T11:01:44.905087Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491638030705594596:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:44.905134Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b89/r3tmp/tmpxCrHdC/pdisk_1.dat 2025-04-10T11:01:45.226700Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:45.262067Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:45.262329Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:45.263343Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22128, node 3 2025-04-10T11:01:45.366836Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:45.366863Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:45.366870Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:45.366992Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30438 TClient is connected to server localhost:30438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:46.035835Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:46.046450Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:01:46.073719Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2025-04-10T11:01:03.174002Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637854468560448:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:03.174047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b5/r3tmp/tmpQgZfXY/pdisk_1.dat 2025-04-10T11:01:04.156945Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:04.195881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:04.196007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:04.199663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:04.211984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2441, node 1 2025-04-10T11:01:04.426846Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:04.426866Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:04.426873Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:04.426995Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:04.794314Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:04.797250Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:04.797284Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:04.798079Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:4060, port: 4060 2025-04-10T11:01:04.799059Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:04.844179Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-10T11:01:04.887139Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****LBZQ (30084EC9) () has now valid token of ldapuser@ldap 2025-04-10T11:01:09.224013Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637877638958991:2216];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:09.224237Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b5/r3tmp/tmpISymUS/pdisk_1.dat 2025-04-10T11:01:09.934097Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:09.958705Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:09.958793Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:09.967368Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28692, node 2 2025-04-10T11:01:10.424340Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:10.424369Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:10.424376Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:10.424490Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:10.958318Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:10.977906Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:10.977948Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:10.978655Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:6232, port: 6232 2025-04-10T11:01:10.978742Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:10.998866Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:6232. Invalid credentials 2025-04-10T11:01:10.999197Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****GZ_g (2AF7AF91) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:6232. Invalid credentials)' 2025-04-10T11:01:17.161734Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637914951516069:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:17.302945Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b5/r3tmp/tmpShBIq1/pdisk_1.dat 2025-04-10T11:01:17.647146Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:17.695081Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:17.695169Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:17.699919Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2994, node 3 2025-04-10T11:01:17.938743Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:17.938764Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:17.938771Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:17.938922Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:18.209625Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:18.212356Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:18.212386Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:18.213036Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10270, port: 10270 2025-04-10T11:01:18.213110Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:18.237537Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:10270. Invalid credentials 2025-04-10T11:01:18.237795Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****Dn_A (8762736D) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:10270. Invalid credentials)' 2025-04-10T11:01:22.040019Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637936150974792:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:22.040065Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b5/r3tmp/tmpwPXfnc/pdisk_1.dat 2025-04-10T11:01:22.366692Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:22.413272Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:22.413347Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:22.414689Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4024, node 4 2025-04-10T11:01:22.558825Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:22.558849Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:22.558857Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:22.558990Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:22.814325Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:22.818418Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:22.818454Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:22.819164Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:2380, port: 2380 2025-04-10T11:01:22.819252Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:22.838340Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:22.841029Z node 4 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:2380 return no entries 2025-04-10T11:01:22.841279Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****7knA (449D13D4) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:2380 return no entries)' 2025-04-10T11:01:26.949096Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491637951506750698:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b5/r3tmp/tmpr8md0t/pdisk_1.dat 2025-04-10T11:01:27.103832Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:27.217538Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:27.316089Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:27.316189Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:27.322522Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27715, node 5 2025-04-10T11:01:27.490827Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:27.490859Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:27.490867Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:27.491014Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:27.762457Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:27.774538Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:27.774583Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:27.775368Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:7848, port: 7848 2025-04-10T11:01:27.775456Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:27.810863Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:27.857234Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:27.857922Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:27.857979Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:27.904382Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:27.950436Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:27.951301Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****kuuQ (EA7B6E9D) () has now valid token of ldapuser@ldap 2025-04-10T11:01:31.907625Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491637951506750698:2219];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:31.909347Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:33.003211Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****kuuQ (EA7B6E9D) 2025-04-10T11:01:33.003288Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:7848, port: 7848 2025-04-10T11:01:33.003353Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:33.018800Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:33.070388Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:33.071017Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:33.071063Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:33.118715Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:33.166443Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:33.168286Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****kuuQ (EA7B6E9D) () has now valid token of ldapuser@ldap 2025-04-10T11:01:38.022386Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****kuuQ (EA7B6E9D) 2025-04-10T11:01:38.022940Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:7848, port: 7848 2025-04-10T11:01:38.023028Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:38.023456Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:7848. Can't contact LDAP server 2025-04-10T11:01:38.023590Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****kuuQ (EA7B6E9D) () has now retryable error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:7848. Can't contact LDAP server)' 2025-04-10T11:01:38.023962Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:38.023985Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:38.025121Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:7848, port: 7848 2025-04-10T11:01:38.025180Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:38.025745Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:7848. Can't contact LDAP server 2025-04-10T11:01:38.025825Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****kuuQ (EA7B6E9D) () has now retryable error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:7848. Can't contact LDAP server)' 2025-04-10T11:01:38.813739Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491638004222841154:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:38.813782Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b5/r3tmp/tmpqpc1In/pdisk_1.dat 2025-04-10T11:01:39.002362Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:39.005336Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:39.005485Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:39.008924Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29222, node 6 2025-04-10T11:01:39.122883Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:39.122909Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:39.122916Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:39.123051Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:39.284747Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:39.288340Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:39.288373Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:39.289305Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:15446, port: 15446 2025-04-10T11:01:39.289393Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:39.312639Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:39.354820Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****M-Yw (971D41F5) () has now valid token of ldapuser@ldap 2025-04-10T11:01:42.849543Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****M-Yw (971D41F5) 2025-04-10T11:01:42.849839Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:15446, port: 15446 2025-04-10T11:01:42.849934Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:42.871958Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:42.922962Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****M-Yw (971D41F5) () has now valid token of ldapuser@ldap 2025-04-10T11:01:43.818447Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491638004222841154:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:43.818550Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:47.861615Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****M-Yw (971D41F5) 2025-04-10T11:01:47.861759Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:15446, port: 15446 2025-04-10T11:01:47.861891Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:47.886880Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:47.943962Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****M-Yw (971D41F5) () has now valid token of ldapuser@ldap >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] Test command err: 2025-04-10T11:01:48.333631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:48.333940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:48.334192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001517/r3tmp/tmpxhaHN6/pdisk_1.dat 2025-04-10T11:01:48.765054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:01:48.809294Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:48.852778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:48.852962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:48.866031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:48.961691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:49.319722Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "JustTable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-04-10T11:01:49.319855Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-04-10T11:01:49.322982Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} started# 5 actors each with inflight# 4 2025-04-10T11:01:49.323057Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-10T11:01:49.323115Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-10T11:01:49.323143Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-10T11:01:49.323177Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-10T11:01:49.323201Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-10T11:01:49.327996Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} session: ydb://session/3?node_id=1&id=NGQ5OGRmOGYtY2NmYzJmMDQtZDQ5MDdlZjctNzY4MjM3NzU= 2025-04-10T11:01:49.328069Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} session: ydb://session/3?node_id=1&id=ODkwNWE1YWEtNmZiZjczZmItMmJjNTdiZjItYzNkMjcwYzg= 2025-04-10T11:01:49.329431Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} session: ydb://session/3?node_id=1&id=MmY1YzVkMy05YWZkNGJmYi02ZjM4YTNjOC1iNjcwMWJiMg== 2025-04-10T11:01:49.333101Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} session: ydb://session/3?node_id=1&id=M2I4N2RlYzItNTkzYTZhNDEtNjc4ZDc3YjAtOGJlNDYzZjI= 2025-04-10T11:01:49.333181Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} session: ydb://session/3?node_id=1&id=MTJjZGM1MDQtOTJkNzkzNjEtZDU3NjFiYmEtNGJjZjZmZmQ= 2025-04-10T11:01:49.338490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:777:2653], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:49.338615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:49.338783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:49.338986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:776:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:49.339050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:778:2654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:49.339110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:49.339553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:49.347454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:01:49.414616Z node 1 :TX_PROXY ERROR: Actor# [1:797:2673] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:01:49.415472Z node 1 :TX_PROXY ERROR: Actor# [1:798:2674] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:01:49.416309Z node 1 :TX_PROXY ERROR: Actor# [1:799:2675] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:01:49.416862Z node 1 :TX_PROXY ERROR: Actor# [1:800:2676] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:01:49.597303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:788:2664], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:01:49.597449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:789:2665], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:01:49.597537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:01:49.597603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:01:49.597676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:01:49.638828Z node 1 :TX_PROXY ERROR: Actor# [1:898:2739] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:50.240880Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} finished in 1744282910.240787s, errors=0 2025-04-10T11:01:50.241323Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1744282910240 OperationsOK: 4 OperationsError: 0 } 2025-04-10T11:01:50.258337Z node 1 :TX_PROXY ERROR: Actor# [1:971:2777] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:50.342050Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} finished in 1744282910.341988s, errors=0 2025-04-10T11:01:50.342491Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1744282910341 OperationsOK: 4 OperationsError: 0 } 2025-04-10T11:01:50.364666Z node 1 :TX_PROXY ERROR: Actor# [1:1022:2799] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:50.456416Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} finished in 1744282910.456347s, errors=0 2025-04-10T11:01:50.456820Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1744282910456 OperationsOK: 4 OperationsError: 0 } 2025-04-10T11:01:50.471759Z node 1 :TX_PROXY ERROR: Actor# [1:1073:2821] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:50.546891Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} finished in 1744282910.546843s, errors=0 2025-04-10T11:01:50.547360Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1744282910546 OperationsOK: 4 OperationsError: 0 } 2025-04-10T11:01:50.562602Z node 1 :TX_PROXY ERROR: Actor# [1:1124:2843] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:50.639628Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} finished in 1744282910.639564s, errors=0 2025-04-10T11:01:50.640031Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1744282910639 OperationsOK: 4 OperationsError: 0 } 2025-04-10T11:01:50.640097Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} finished in 1.317280s, oks# 20, errors# 0 2025-04-10T11:01:50.640241Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 >> IndexBuildTest::RejectsCreate >> TConsoleTxProcessorTests::TestTxProcessorTemporary [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom >> IndexBuildTest::BaseCase >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] >> VectorIndexBuildTest::BaseCase >> TPersQueueTest::DirectReadBadCases [GOOD] >> TPersQueueTest::DirectReadStop >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] >> IndexBuildTest::Lock >> IndexBuildTest::ShadowDataNotAllowedByDefault |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: 2025-04-10T11:01:50.265025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:50.265351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:50.265575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001514/r3tmp/tmpcgUHoG/pdisk_1.dat 2025-04-10T11:01:50.694292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:01:50.739707Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:50.784879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:50.785044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:50.797233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:50.883974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:51.294908Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2025-04-10T11:01:51.295052Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2025-04-10T11:01:51.298977Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} started# 5 actors each with inflight# 4 2025-04-10T11:01:51.299055Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-10T11:01:51.299133Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-10T11:01:51.299167Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-10T11:01:51.299215Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-10T11:01:51.299245Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-10T11:01:51.304513Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} session: ydb://session/3?node_id=1&id=NjBjZTQxYmEtODFkYTk0NzEtMzI3YzExOTMtZjIyMzZmYjM= 2025-04-10T11:01:51.304595Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} session: ydb://session/3?node_id=1&id=NWZjYjA3MzUtNzEyYTFiNDQtMzQ3ZjJhNjktZDUzYjY2ZWY= 2025-04-10T11:01:51.306355Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} session: ydb://session/3?node_id=1&id=Yzg3YjdmZmYtN2E4MjZjMDQtYjExYTE3MDAtMWI2N2Q0YTk= 2025-04-10T11:01:51.309550Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} session: ydb://session/3?node_id=1&id=YjNlMmYxYTMtMTJlOTIwODgtZGRjMjk2NmItYzcwMTY0MmE= 2025-04-10T11:01:51.309609Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} session: ydb://session/3?node_id=1&id=M2Q5ZTQzOS1iZjZmMGU3OS04ODM4ZTY0ZS04MjlkMGEyMg== 2025-04-10T11:01:51.314303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:777:2653], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:51.314418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:51.314591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:51.314777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:776:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:51.314840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:778:2654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:51.314902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:51.315341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:51.322661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:01:51.402414Z node 1 :TX_PROXY ERROR: Actor# [1:797:2673] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:01:51.403415Z node 1 :TX_PROXY ERROR: Actor# [1:798:2674] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:01:51.404425Z node 1 :TX_PROXY ERROR: Actor# [1:799:2675] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:01:51.405152Z node 1 :TX_PROXY ERROR: Actor# [1:800:2676] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:01:51.576247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:788:2664], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:01:51.576387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:789:2665], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:01:51.576458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:01:51.576525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:01:51.576576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:01:51.616991Z node 1 :TX_PROXY ERROR: Actor# [1:898:2739] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:52.295930Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} finished in 1744282912.295861s, errors=0 2025-04-10T11:01:52.296289Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1744282912295 OperationsOK: 4 OperationsError: 0 } 2025-04-10T11:01:52.310451Z node 1 :TX_PROXY ERROR: Actor# [1:971:2777] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:52.379867Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} finished in 1744282912.379818s, errors=0 2025-04-10T11:01:52.380226Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1744282912379 OperationsOK: 4 OperationsError: 0 } 2025-04-10T11:01:52.394375Z node 1 :TX_PROXY ERROR: Actor# [1:1022:2799] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:52.465063Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} finished in 1744282912.465021s, errors=0 2025-04-10T11:01:52.465452Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1744282912465 OperationsOK: 4 OperationsError: 0 } 2025-04-10T11:01:52.479720Z node 1 :TX_PROXY ERROR: Actor# [1:1073:2821] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:52.572282Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} finished in 1744282912.572231s, errors=0 2025-04-10T11:01:52.572635Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1744282912572 OperationsOK: 4 OperationsError: 0 } 2025-04-10T11:01:52.590071Z node 1 :TX_PROXY ERROR: Actor# [1:1124:2843] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:52.677141Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} finished in 1744282912.677084s, errors=0 2025-04-10T11:01:52.677489Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1744282912677 OperationsOK: 4 OperationsError: 0 } 2025-04-10T11:01:52.677561Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} finished in 1.378821s, oks# 20, errors# 0 2025-04-10T11:01:52.677769Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2025-04-10T11:01:05.210706Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637863736945989:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:05.210803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00158a/r3tmp/tmpGdAXXG/pdisk_1.dat 2025-04-10T11:01:06.102435Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:06.107547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:06.107647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:06.114626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19979, node 1 2025-04-10T11:01:06.410777Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:06.410800Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:06.410813Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:06.410936Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:06.746008Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:06.746401Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:06.746425Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:06.748045Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:25134, port: 25134 2025-04-10T11:01:06.748119Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:06.814635Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-10T11:01:06.867393Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****jv2w (5944EBD1) () has now valid token of ldapuser@ldap 2025-04-10T11:01:12.230593Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637893498546891:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00158a/r3tmp/tmpHQt3Rk/pdisk_1.dat 2025-04-10T11:01:12.379451Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:12.643724Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:12.719796Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:12.719883Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:12.727513Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1410, node 2 2025-04-10T11:01:12.986863Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:12.986889Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:12.986902Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:12.987009Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:13.258368Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:13.271461Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:13.271493Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:13.272204Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:28447, port: 28447 2025-04-10T11:01:13.272263Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:13.354584Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:28447. Invalid credentials 2025-04-10T11:01:13.355093Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****shwQ (51705BBD) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:28447. Invalid credentials)' 2025-04-10T11:01:19.017632Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637919622492796:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00158a/r3tmp/tmpgzE1Mk/pdisk_1.dat 2025-04-10T11:01:19.216423Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:19.298129Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:19.317978Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:19.318073Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:19.319837Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22281, node 3 2025-04-10T11:01:19.424566Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:19.424589Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:19.424600Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:19.424733Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:19.793152Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:19.796471Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:19.796504Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:19.797259Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:3267, port: 3267 2025-04-10T11:01:19.797374Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:19.870705Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:3267. Invalid credentials 2025-04-10T11:01:19.871352Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****yZQw (44D75D35) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:3267. Invalid credentials)' 2025-04-10T11:01:23.581946Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637940443320258:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:23.581997Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00158a/r3tmp/tmpMfej6f/pdisk_1.dat 2025-04-10T11:01:24.103417Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:24.167091Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:24.167187Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:24.168331Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25299, node 4 2025-04-10T11:01:24.366967Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:24.367009Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:24.367016Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:24.367161Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:24.496767Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:24.499160Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:24.499193Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:24.499916Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:6580, port: 6580 2025-04-10T11:01:24.499994Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:24.570751Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:24.571359Z node 4 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:6580 return no entries 2025-04-10T11:01:24.571979Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****C-pQ (901FA819) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:6580 return no entries)' 2025-04-10T11:01:29.246866Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491637963758233962:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:29.248061Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00158a/r3tmp/tmpRkYj8y/pdisk_1.dat 2025-04-10T11:01:29.651466Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:29.697390Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:29.700083Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:29.707631Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25504, node 5 2025-04-10T11:01:29.878434Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:29.878458Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:29.878465Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:29.878728Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:30.169731Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:30.178272Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:30.178307Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:30.179014Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:6597, port: 6597 2025-04-10T11:01:30.179085Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:30.280625Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:30.327150Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:30.335961Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:30.336059Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:30.382586Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:30.426862Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:30.428616Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****9Khw (AAAA38F8) () has now valid token of ldapuser@ldap 2025-04-10T11:01:33.432660Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****9Khw (AAAA38F8) 2025-04-10T11:01:33.432744Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:6597, port: 6597 2025-04-10T11:01:33.432813Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:33.510634Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:33.558268Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:33.559064Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:33.559102Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:33.602578Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:33.647156Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:33.648538Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****9Khw (AAAA38F8) () has now valid token of ldapuser@ldap 2025-04-10T11:01:34.255240Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491637963758233962:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:34.255369Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:38.442919Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****9Khw (AAAA38F8) 2025-04-10T11:01:38.443047Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:6597, port: 6597 2025-04-10T11:01:38.443135Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:38.530883Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:38.579397Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:38.579992Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:38.580030Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:38.630371Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:38.682164Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:38.684499Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****9Khw (AAAA38F8) () has now valid token of ldapuser@ldap 2025-04-10T11:01:41.320734Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491638015236393615:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:41.320826Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00158a/r3tmp/tmpLukBDn/pdisk_1.dat 2025-04-10T11:01:41.617598Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:41.656982Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:41.657089Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:41.658636Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63241, node 6 2025-04-10T11:01:41.737351Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:41.737378Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:41.737392Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:41.737565Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:41.826141Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:41.828606Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:41.828642Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:41.829585Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:15278, port: 15278 2025-04-10T11:01:41.829698Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:41.898975Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:41.948045Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****HJzA (E3C355EA) () has now valid token of ldapuser@ldap 2025-04-10T11:01:46.321989Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491638015236393615:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:46.322071Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:47.362652Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****HJzA (E3C355EA) 2025-04-10T11:01:47.362995Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:15278, port: 15278 2025-04-10T11:01:47.363086Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:47.427423Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:47.479296Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****HJzA (E3C355EA) () has now valid token of ldapuser@ldap 2025-04-10T11:01:51.370388Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****HJzA (E3C355EA) 2025-04-10T11:01:51.370527Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:15278, port: 15278 2025-04-10T11:01:51.370648Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:51.446875Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:51.499176Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****HJzA (E3C355EA) () has now valid token of ldapuser@ldap >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel2 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel2 >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] >> IndexBuildTest::ShadowDataNotAllowedByDefault [GOOD] >> IndexBuildTest::ShadowDataEdgeCases >> IndexBuildTest::Lock [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant [GOOD] >> TConsoleTests::TestAlterServerlessTenant >> IndexBuildTest::RejectsCreate [GOOD] >> IndexBuildTest::RejectsDropIndex >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> KikimrIcGateway::TestDropTable [GOOD] >> KikimrIcGateway::TestDropResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: 2025-04-10T11:01:47.919763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:47.920001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:47.920205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001526/r3tmp/tmpQU7iMH/pdisk_1.dat 2025-04-10T11:01:48.502502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:01:48.554361Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:48.595897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:48.596011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:48.607810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:48.705347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:49.084983Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2025-04-10T11:01:49.085282Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-10T11:01:49.179194Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.093422s, errors=0 2025-04-10T11:01:49.179291Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 2025-04-10T11:01:53.408047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:53.408258Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:01:53.408489Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001526/r3tmp/tmpqbIOXC/pdisk_1.dat 2025-04-10T11:01:53.723823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:01:53.768564Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:53.815665Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:53.815792Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:53.830411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:53.919127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:54.190681Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-04-10T11:01:54.190908Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-10T11:01:54.260830Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.069415s, errors=0 2025-04-10T11:01:54.260928Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 >> OperationMapping::IndexBuildSuccess |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] >> YdbOlapStore::LogGrepExisting [GOOD] >> YdbOlapStore::LogExistingUserId >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 >> IndexBuildTest::ShadowDataEdgeCases [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> FolderServiceTest::TFolderServiceAdapter >> IndexBuildTest::RejectsDropIndex [GOOD] |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] >> OperationMapping::IndexBuildCanceled >> OperationMapping::IndexBuildCanceled [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> TConsoleTests::TestAuthorization |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] |71.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |71.6%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |71.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::ShadowDataEdgeCases [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:01:54.130728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:01:54.130807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:54.130858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:01:54.130891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:01:54.130925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:01:54.130948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:01:54.130993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:54.131069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:01:54.131310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:01:54.205534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:54.205583Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:54.220065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:01:54.220315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:01:54.220487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:01:54.228236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:01:54.228496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:01:54.229135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:54.229522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:01:54.231886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:54.233237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:54.233311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:54.233494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:01:54.233539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:54.233578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:01:54.233717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.240801Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:01:54.406516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:01:54.406767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.407006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:01:54.407244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:01:54.407297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.410768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:54.410924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:01:54.411135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.411194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:01:54.411247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:01:54.411299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:01:54.415935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.416009Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:01:54.416048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:01:54.418032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.418079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.418130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:54.418215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:01:54.422114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:01:54.424335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:01:54.424507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:01:54.425556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:54.425718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:01:54.425789Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:54.426108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:01:54.426200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:54.426385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:01:54.426467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:01:54.428814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:54.428989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:54.429170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:54.429216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:01:54.429488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.429534Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:01:54.429642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:54.429695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:54.429736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:54.429793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:54.429830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:01:54.429883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:54.429920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:01:54.429955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:01:54.430019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:01:54.430063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:01:54.430101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:01:54.432540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:54.432668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:54.432706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-04-10T11:01:56.889704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 160500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 236 } } 2025-04-10T11:01:56.889844Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 109:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 160500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 236 } } 2025-04-10T11:01:56.889893Z node 2 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-10T11:01:56.890074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 109:0, left await: 0, at schemeshard: 72057594046678944 2025-04-10T11:01:56.890126Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 3 -> 128 2025-04-10T11:01:56.899962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-10T11:01:56.900297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-10T11:01:56.900386Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 109:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:01:56.900507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 109 ready parts: 1/1 2025-04-10T11:01:56.900753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:01:56.907100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 109:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:109 msg type: 269090816 2025-04-10T11:01:56.907324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 109, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 109 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 109 at step: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 109 at step: 5000008 2025-04-10T11:01:56.908720Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:56.908920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 109 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:01:56.909008Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 109:0 HandleReply TEvOperationPlan, operationId: 109:0, stepId: 5000008, at schemeshard: 72057594046678944 2025-04-10T11:01:56.909332Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 128 -> 129 2025-04-10T11:01:56.909555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000008 2025-04-10T11:01:56.921108Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:56.921203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 109, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:01:56.921641Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:56.921710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 109, path id: 4 2025-04-10T11:01:56.921856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-10T11:01:56.921922Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:01:56.926071Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2025-04-10T11:01:56.926302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2025-04-10T11:01:56.926358Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 109 2025-04-10T11:01:56.926415Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 109, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-04-10T11:01:56.926473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:01:56.926636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 109 2025-04-10T11:01:56.936064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 109 2025-04-10T11:01:56.943293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 5119 } } 2025-04-10T11:01:56.943370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-04-10T11:01:56.943553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 5119 } } 2025-04-10T11:01:56.943707Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 5119 } } 2025-04-10T11:01:56.945317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 679 RawX2: 8589937225 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-04-10T11:01:56.945425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-04-10T11:01:56.945598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: Source { RawX1: 679 RawX2: 8589937225 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-04-10T11:01:56.945660Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:01:56.945797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 679 RawX2: 8589937225 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-04-10T11:01:56.945887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 109:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:56.945943Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-10T11:01:56.945994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 109:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-10T11:01:56.946055Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 129 -> 240 2025-04-10T11:01:56.978878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-10T11:01:56.979611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-10T11:01:56.980047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-10T11:01:56.980110Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 109:0 ProgressState 2025-04-10T11:01:56.980290Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-04-10T11:01:56.980340Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-10T11:01:56.980410Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-04-10T11:01:56.980474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-10T11:01:56.980525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 1/1, is published: true 2025-04-10T11:01:56.980625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:335:2314] message: TxId: 109 2025-04-10T11:01:56.980705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-10T11:01:56.980762Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 109:0 2025-04-10T11:01:56.980814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 109:0 2025-04-10T11:01:56.980986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:01:56.983623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-04-10T11:01:56.983691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:784:2729] TestWaitNotification: OK eventTxId 109 >> OperationMapping::IndexBuildRejected [GOOD] |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsDropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:01:52.851330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:01:52.851435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:52.851483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:01:52.851514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:01:52.851553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:01:52.851579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:01:52.851633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:52.851705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:01:52.852012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:01:52.943010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:52.943066Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:52.959425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:01:52.959749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:01:52.959918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:01:52.967612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:01:52.967899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:01:52.968664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:52.969039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:01:52.971170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:52.972538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:52.972613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:52.972781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:01:52.972831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:52.972873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:01:52.973014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:01:52.980196Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:01:53.114088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:01:53.114347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.114569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:01:53.114846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:01:53.114904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.119621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:53.119835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:01:53.120108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.120187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:01:53.120235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:01:53.120288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:01:53.127368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.127522Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:01:53.127583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:01:53.135388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.135486Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.135550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:53.135641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:01:53.147201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:01:53.155449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:01:53.155795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:01:53.157122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:53.157347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:01:53.157440Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:53.157854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:01:53.157925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:53.158224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:01:53.158358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:01:53.167837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:53.167925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:53.168213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:53.168274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:01:53.168661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.168736Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:01:53.168888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:53.168951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:53.169019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:53.169067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:53.169120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:01:53.169179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:53.169231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:01:53.169308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:01:53.169443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:01:53.169505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:01:53.169554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:01:53.173082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:53.173243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:53.173295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 107 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 107 at step: 5000004 2025-04-10T11:01:57.101836Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:57.102049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 107 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:01:57.102122Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId# 107:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2025-04-10T11:01:57.102235Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 128 -> 136 2025-04-10T11:01:57.107914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:01:57.108009Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-10T11:01:57.108074Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, no renaming has been detected for this operation 2025-04-10T11:01:57.108114Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 136 -> 137 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 FAKE_COORDINATOR: Erasing txId 107 2025-04-10T11:01:57.109504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 745 } } 2025-04-10T11:01:57.109548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2025-04-10T11:01:57.109687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 745 } } 2025-04-10T11:01:57.109792Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 745 } } 2025-04-10T11:01:57.110722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-10T11:01:57.110776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2025-04-10T11:01:57.110912Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-10T11:01:57.110959Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-10T11:01:57.113094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:01:57.113172Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-10T11:01:57.113248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-10T11:01:57.113300Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-04-10T11:01:57.113422Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-10T11:01:57.113570Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-04-10T11:01:57.113704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:01:57.113777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:01:57.115553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:01:57.115696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:01:57.117066Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:57.117121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:57.117288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:01:57.117577Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:57.117647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 107, path id: 1 2025-04-10T11:01:57.117702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-04-10T11:01:57.118162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:01:57.118225Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:01:57.118343Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:01:57.118395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:01:57.118440Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-04-10T11:01:57.120070Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 107 2025-04-10T11:01:57.120175Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 107 2025-04-10T11:01:57.120220Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-10T11:01:57.120261Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-10T11:01:57.120308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:01:57.121942Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-10T11:01:57.122038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-10T11:01:57.122071Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-10T11:01:57.122103Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:01:57.122136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:01:57.122233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-04-10T11:01:57.125009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:01:57.125067Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:01:57.125419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:01:57.125589Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-10T11:01:57.125638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-10T11:01:57.125683Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-10T11:01:57.125722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-10T11:01:57.125765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-10T11:01:57.125843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:376:2344] message: TxId: 107 2025-04-10T11:01:57.125889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-10T11:01:57.125938Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-10T11:01:57.126000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-10T11:01:57.126121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:01:57.127710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-10T11:01:57.128383Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-10T11:01:57.129676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-10T11:01:57.129732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:581:2541] TestWaitNotification: OK eventTxId 107 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:01:53.889029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:01:53.889136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:53.889187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:01:53.889225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:01:53.889270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:01:53.889300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:01:53.889360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:53.889462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:01:53.889854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:01:54.004187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:54.004248Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:54.023343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:01:54.023676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:01:54.023883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:01:54.032499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:01:54.032845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:01:54.033600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:54.034043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:01:54.036475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:54.038213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:54.038300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:54.038535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:01:54.038590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:54.038627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:01:54.038741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.046567Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:01:54.171768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:01:54.171972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.172162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:01:54.172364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:01:54.172406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.175066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:54.175205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:01:54.175391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.175448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:01:54.175485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:01:54.175533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:01:54.178060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.178129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:01:54.178192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:01:54.180877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.180946Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.181007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:54.181077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:01:54.184746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:01:54.186882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:01:54.187055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:01:54.188152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:54.188318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:01:54.188383Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:54.188705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:01:54.188768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:54.189035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:01:54.189144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:01:54.194820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:54.194881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:54.195072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:54.195117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:01:54.195353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:54.195404Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:01:54.195502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:54.195563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:54.195615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:54.195656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:54.195695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:01:54.195742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:54.195803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:01:54.195838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:01:54.195911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:01:54.195955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:01:54.195996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:01:54.198067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:54.198256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:54.198294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... on: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:01:57.325731Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:01:57.325916Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index" took 206us result status StatusSuccess 2025-04-10T11:01:57.326513Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index" PathDescription { Self { Name: "Index" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:01:57.326964Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:01:57.327273Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable" took 304us result status StatusSuccess 2025-04-10T11:01:57.327861Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "alice" } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "bob" } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\005\000\000\000alice\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\003\000\000\000bob\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildCanceled [GOOD] |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildRejected [GOOD] >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> SplitPathTests::WithDatabaseShouldFail [GOOD] |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> KikimrIcGateway::TestDropResourcePool [GOOD] >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady |71.6%| [TA] $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> IndexBuildTest::CancellationNotEnoughRetries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 16127, MsgBus: 20957 2025-04-10T11:00:11.434469Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637631846932253:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:11.434598Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e5f/r3tmp/tmpwGZnbm/pdisk_1.dat 2025-04-10T11:00:11.908073Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:11.926335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:11.926443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:11.928752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16127, node 1 2025-04-10T11:00:12.024958Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:12.024998Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:12.025030Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:12.025223Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20957 TClient is connected to server localhost:20957 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:12.765454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:12.808739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:00:13.010548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:00:13.171826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:00:13.266951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:00:15.016216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637649026803199:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:15.016363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:15.407120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:15.446510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:15.496361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:15.554954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:15.590259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:15.668326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:15.727648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637649026803717:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:15.727741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:15.727909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637649026803722:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:15.731407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:15.742332Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:00:15.742639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637649026803724:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:15.831865Z node 1 :TX_PROXY ERROR: Actor# [1:7491637649026803778:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:16.434071Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637631846932253:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:16.434143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:16.884132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:17.193822Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710673 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-04-10T11:00:17.194790Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:17.194883Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637657616738908:2521], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:17.195012Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715757 2025-04-10T11:00:17.195079Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637657616738908:2521], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:17.195344Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:17.195388Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637657616738908:2521], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusSuccess, LockTxDone: 0, Initiate ... :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Applying to Unlocking 2025-04-10T11:01:02.876115Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:02.876164Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637848287700647:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:02.876269Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715765 2025-04-10T11:01:02.876322Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637848287700647:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:02.876595Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:02.876637Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637848287700647:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715765, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:02.877734Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976710673, cookie: 281474976710673, txId: 281474976715765, status: StatusAccepted 2025-04-10T11:01:02.877868Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637848287700647:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715765, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976715765 SchemeshardId: 72057594046644480 PathId: 16 2025-04-10T11:01:02.879760Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:02.879817Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637848287700647:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:02.883877Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715765, buildInfoId: 281474976710673 2025-04-10T11:01:02.883972Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715765, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637848287700647:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:02.884288Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:02.884340Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637848287700647:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:02.884369Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T11:01:02.884656Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:02.884737Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637848287700647:2528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 26, upload bytes: 655, read rows: 30, read bytes: 575 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:02.884750Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710673, subscribers count# 1 2025-04-10T11:01:02.885774Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710673 2025-04-10T11:01:02.885996Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710673 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-10T11:01:02.898491Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-10T11:01:02.898532Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-10T11:01:02.898553Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-10T11:01:08.859675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:01:08.859710Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:12.554321Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710673 >> YdbOlapStore::LogWithUnionAllAscending [GOOD] >> YdbOlapStore::LogWithUnionAllDescending >> IndexBuildTest::CheckLimitWithDroppedIndex >> FolderServiceTest::TFolderServiceAdapter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-04-10T11:01:16.405959Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637911054408806:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:16.422606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00153f/r3tmp/tmpClnNyg/pdisk_1.dat 2025-04-10T11:01:17.517065Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:17.531595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:17.546410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:17.549890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:17.550762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 6237, node 1 2025-04-10T11:01:17.837292Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:17.837316Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:17.837324Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:17.837475Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:18.225729Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:18.231707Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:18.231743Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:18.239393Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:20548, port: 20548 2025-04-10T11:01:18.239491Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:18.326665Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:18.378057Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:18.378997Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:18.379066Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:18.429608Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:18.481793Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:18.515947Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****o7-Q (A64DB887) () has now valid token of ldapuser@ldap 2025-04-10T11:01:21.405153Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637911054408806:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:21.405252Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:22.522407Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****o7-Q (A64DB887) 2025-04-10T11:01:22.522509Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:20548, port: 20548 2025-04-10T11:01:22.522615Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:22.610847Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:22.616958Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:20548 return no entries 2025-04-10T11:01:22.617592Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****o7-Q (A64DB887) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:20548 return no entries)' 2025-04-10T11:01:27.538498Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****o7-Q (A64DB887) 2025-04-10T11:01:29.429426Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637963866245838:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:29.429478Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00153f/r3tmp/tmpoFw82G/pdisk_1.dat 2025-04-10T11:01:29.739838Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:29.781024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:29.781126Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:29.785859Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28728, node 2 2025-04-10T11:01:30.031206Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:30.031242Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:30.031254Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:30.031493Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:30.478319Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:30.479343Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:30.479362Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:30.480025Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:14747, port: 14747 2025-04-10T11:01:30.480108Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:30.559530Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:30.560766Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldaps://localhost:14747. Server is busy 2025-04-10T11:01:30.561717Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****DsBA (059866D8) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:14747. Server is busy)' 2025-04-10T11:01:30.562007Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:30.562028Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:30.563005Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:14747, port: 14747 2025-04-10T11:01:30.563094Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:30.645858Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:30.648015Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldaps://localhost:14747. Server is busy 2025-04-10T11:01:30.648517Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****DsBA (059866D8) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:14747. Server is busy)' 2025-04-10T11:01:32.451223Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****DsBA (059866D8) 2025-04-10T11:01:32.451524Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:32.451543Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:32.463390Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:14747, port: 14747 2025-04-10T11:01:32.463495Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:32.630386Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:32.631122Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldaps://localhost:14747. Server is busy 2025-04-10T11:01:32.631898Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****DsBA (059866D8) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:14747. Server is busy)' 2025-04-10T11:01:34.429875Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637963866245838:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:34.429950Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:35.457963Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****DsBA (059866D8) 2025-04-10T11:01:35.458297Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:35.458316Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:35.459095Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:14747, port: 14747 2025-04-10T11:01:35.459158Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:35.542756Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:35.598561Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:35.602974Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:35.603035Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:35.646650Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: ... 11:01:39.638546Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:39.690514Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:39.692205Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****DsBA (059866D8) () has now valid token of ldapuser@ldap 2025-04-10T11:01:41.517284Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491638015596015436:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00153f/r3tmp/tmpixHcWW/pdisk_1.dat 2025-04-10T11:01:41.673031Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:41.762007Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:41.762093Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:41.763514Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:41.764547Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23159, node 3 2025-04-10T11:01:41.908795Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:41.908818Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:41.908827Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:41.908972Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:42.086337Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:42.089050Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:42.089080Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:42.089805Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:18850, port: 18850 2025-04-10T11:01:42.089895Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:42.138545Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:42.183964Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:42.233500Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:42.280385Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****pP2A (C962AC08) () has now valid token of ldapuser@ldap 2025-04-10T11:01:46.381344Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491638036534740308:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:46.381418Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00153f/r3tmp/tmpTq22Wd/pdisk_1.dat 2025-04-10T11:01:46.692317Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:46.752316Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:46.752416Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:46.754031Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28044, node 4 2025-04-10T11:01:46.926997Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:46.927032Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:46.927042Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:46.927193Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:47.198351Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:47.201727Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:47.201748Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:47.202602Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:27789, port: 27789 2025-04-10T11:01:47.202713Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:47.217936Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:47.267170Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:47.315091Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****rcug (DDB335F7) () has now valid token of ldapuser@ldap 2025-04-10T11:01:51.326994Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491638060775665894:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00153f/r3tmp/tmplXqVy2/pdisk_1.dat 2025-04-10T11:01:51.428185Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:51.561139Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:51.565162Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:51.565260Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:51.567256Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2668, node 5 2025-04-10T11:01:51.748947Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:51.748970Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:51.748978Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:51.749141Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:51.970287Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:51.973205Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:51.973234Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:51.973999Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:17189, port: 17189 2025-04-10T11:01:51.974088Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:52.001853Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:52.050748Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-10T11:01:52.094986Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:52.095796Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:52.095848Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-10T11:01:52.142498Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-10T11:01:52.190479Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-10T11:01:52.192261Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****T_og (9435BA53) () has now valid token of ldapuser@ldap 2025-04-10T11:01:56.168591Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491638079461939488:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:56.168638Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00153f/r3tmp/tmpGr7zjK/pdisk_1.dat 2025-04-10T11:01:56.486367Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:56.503596Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:56.503692Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:56.508525Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15776, node 6 2025-04-10T11:01:56.699105Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:56.699131Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:56.699140Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:56.699322Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:56.874377Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:56.878596Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:56.878639Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:56.879682Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:23735, port: 23735 2025-04-10T11:01:56.879779Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:56.925645Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:56.978743Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-04-10T11:01:56.978859Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:23735. Bad search filter 2025-04-10T11:01:56.979426Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****S5YA (7B805C06) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:23735. Bad search filter)' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 26714, MsgBus: 21919 2025-04-10T11:01:44.534005Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638031182614341:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:44.534598Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000afa/r3tmp/tmp8ZJayE/pdisk_1.dat 2025-04-10T11:01:45.154395Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:45.156357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:45.156445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:45.168459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26714, node 1 2025-04-10T11:01:45.343170Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:45.343194Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:45.343204Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:45.343304Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21919 TClient is connected to server localhost:21919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:46.269216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:46.346180Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-10T11:01:48.622397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638048362484241:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:48.622562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:48.924228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:01:49.114715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:01:49.187255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:01:49.238980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:01:49.326940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638052657451858:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:49.327026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:49.327275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638052657451863:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:49.333558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-04-10T11:01:49.367802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638052657451865:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-04-10T11:01:49.460167Z node 1 :TX_PROXY ERROR: Actor# [1:7491638052657451919:2576] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:49.537030Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638031182614341:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:49.537126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29418, MsgBus: 6623 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000afa/r3tmp/tmpqdyKjH/pdisk_1.dat 2025-04-10T11:01:50.780424Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:50.795090Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:50.806140Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:50.806254Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:50.808230Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29418, node 2 2025-04-10T11:01:50.922908Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:50.922950Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:50.922961Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:50.923104Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6623 TClient is connected to server localhost:6623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:51.579997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:51.645825Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:01:54.778290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638072506010720:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:54.778394Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:54.862251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:01:54.920910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:01:55.014667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:01:55.069919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:01:55.165150Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638076800978336:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:55.165260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:55.165626Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638076800978341:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:55.169768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-04-10T11:01:55.192580Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491638076800978343:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-10T11:01:55.255950Z node 2 :TX_PROXY ERROR: Actor# [2:7491638076800978397:2575] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:55.477498Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found
: Info: Success, code: 4 Trying to start YDB, gRPC: 5382, MsgBus: 7426 2025-04-10T11:01:56.372557Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491638081143940047:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:56.372822Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000afa/r3tmp/tmpaZxZWv/pdisk_1.dat 2025-04-10T11:01:56.597843Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:56.602034Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:56.602116Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:56.631968Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5382, node 3 2025-04-10T11:01:56.802733Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:56.802759Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:56.802766Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:56.802902Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7426 TClient is connected to server localhost:7426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:57.566896Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:57.578107Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:01:57.596230Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 >> IndexBuildTest::WithFollowers >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel2 [GOOD] |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TConsoleTests::TestAlterServerlessTenant [GOOD] >> TConsoleTests::TestAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2025-04-10T11:01:57.739837Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638083758145825:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:57.746461Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011e4/r3tmp/tmpgRg453/pdisk_1.dat 2025-04-10T11:01:58.304440Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:58.320632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:58.320753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:58.324809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:58.725139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:58.748532Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:01:58.846370Z node 1 :GRPC_CLIENT DEBUG: [51700001a888] Connect to grpc://localhost:16375 2025-04-10T11:01:58.862774Z node 1 :GRPC_CLIENT DEBUG: [51700001a888] Request ListFoldersRequest { id: "i_am_exists" } 2025-04-10T11:01:59.060233Z node 1 :GRPC_CLIENT DEBUG: [51700001a888] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2025-04-10T11:01:59.063429Z node 1 :GRPC_CLIENT DEBUG: [517000069488] Connect to grpc://localhost:63647 2025-04-10T11:01:59.064312Z node 1 :GRPC_CLIENT DEBUG: [517000069488] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-04-10T11:01:59.099042Z node 1 :GRPC_CLIENT DEBUG: [517000069488] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2025-04-10T11:01:59.099732Z node 1 :GRPC_CLIENT DEBUG: [517000069488] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-10T11:01:59.106227Z node 1 :GRPC_CLIENT DEBUG: [517000069488] Status 5 Not Found 2025-04-10T11:01:59.107036Z node 1 :GRPC_CLIENT DEBUG: [51700001a888] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-10T11:01:59.109284Z node 1 :GRPC_CLIENT DEBUG: [51700001a888] Status 5 Not Found >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] >> ExternalIndex::Simple [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties >> TableWriter::Backup [GOOD] |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 18995, MsgBus: 15905 2025-04-10T11:00:20.866437Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637669692973666:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:20.872130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e33/r3tmp/tmpW6UkAX/pdisk_1.dat 2025-04-10T11:00:21.453990Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:21.456118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:21.456220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:21.467716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18995, node 1 2025-04-10T11:00:21.573866Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:21.573902Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:21.573917Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:21.574078Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15905 TClient is connected to server localhost:15905 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:22.182098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.250048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.407081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.598031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:22.685832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:24.751576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637686872844535:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:24.751706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:25.114798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:25.159571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:25.243673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:25.282076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:25.320347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:25.378255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:25.447108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637691167812350:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:25.447239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:25.447481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637691167812355:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:25.451809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:25.464712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637691167812357:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:25.561047Z node 1 :TX_PROXY ERROR: Actor# [1:7491637691167812412:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:25.850726Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637669692973666:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:25.850794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:26.636717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.957769Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710673 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: SIMILARITY_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-04-10T11:00:26.958756Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:26.958842Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637695462780244:2523], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:26.958979Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715757 2025-04-10T11:00:26.959038Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637695462780244:2523], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:26.959318Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:26.959364Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637695462780244:2523], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, Unl ... :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Applying to Unlocking 2025-04-10T11:01:12.876164Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:12.876214Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637893911351573:2530], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 633, read rows: 32, read bytes: 597 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:12.876307Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715765 2025-04-10T11:01:12.876356Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637893911351573:2530], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 633, read rows: 32, read bytes: 597 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:12.876659Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:12.876715Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637893911351573:2530], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715765, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 633, read rows: 32, read bytes: 597 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:12.877639Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976710673, cookie: 281474976710673, txId: 281474976715765, status: StatusAccepted 2025-04-10T11:01:12.877772Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637893911351573:2530], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715765, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 633, read rows: 32, read bytes: 597 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976715765 SchemeshardId: 72057594046644480 PathId: 16 2025-04-10T11:01:12.878621Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:12.878671Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637893911351573:2530], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 633, read rows: 32, read bytes: 597 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:12.885723Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715765, buildInfoId: 281474976710673 2025-04-10T11:01:12.885811Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715765, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637893911351573:2530], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 633, read rows: 32, read bytes: 597 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:12.886256Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:12.886329Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637893911351573:2530], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 633, read rows: 32, read bytes: 597 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:12.886357Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T11:01:12.887007Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:12.887066Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637893911351573:2530], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 633, read rows: 32, read bytes: 597 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:12.887080Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710673, subscribers count# 1 2025-04-10T11:01:12.887476Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710673 2025-04-10T11:01:12.887670Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710673 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-10T11:01:12.891801Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-10T11:01:12.891838Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-10T11:01:12.891854Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-10T11:01:18.580033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:01:18.580069Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:22.686536Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710673 >> TTopicReaderTests::TestRun_ReadOneMessage [GOOD] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel2 [GOOD] >> KqpIndexes::SecondaryIndexInsert1 |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] Leader for TabletID 72057594046678944 is [1:235:2153] sender: [1:236:2060] recipient: [1:218:2142] 2025-04-10T11:00:59.150647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:00:59.150754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:00:59.150811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:00:59.150847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:00:59.150895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:00:59.150925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:00:59.151015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:00:59.151092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:00:59.151493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:00:59.274168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:59.274245Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:59.290407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:00:59.291126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:00:59.291334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:00:59.300943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:00:59.301163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:00:59.301909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:00:59.302544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:00:59.305657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:00:59.307568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:00:59.307634Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:00:59.307811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:00:59.307871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:00:59.307920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:00:59.308133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:00:59.315810Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:235:2153] sender: [1:349:2060] recipient: [1:17:2064] 2025-04-10T11:00:59.455610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:00:59.455985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:59.456258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:00:59.456505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:00:59.456573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:59.459420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:00:59.459581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:00:59.459822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:59.459888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:00:59.459951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:00:59.460005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:00:59.462407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:59.462477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:00:59.462519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:00:59.464570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:59.464634Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:59.464681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:00:59.464741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:00:59.468604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:00:59.471188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:00:59.471429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:00:59.472468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:00:59.472615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 242 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:00:59.472666Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:00:59.472949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:00:59.473015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:00:59.473219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:00:59.473302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:00:59.476230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:00:59.476281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:00:59.476501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:00:59.476544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:316:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:00:59.476887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:59.476961Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:00:59.477082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:00:59.477120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:00:59.477178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:00:59.477212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:00:59.477250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:00:59.477299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:00:59.477416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:00:59.477454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:00:59.477548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:00:59.477593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:00:59.477637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:00:59.481442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:00:59.481591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:00:59.481649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hemaChanged> complete, operationId: 104:2, at schemeshard: 72057594046678944 2025-04-10T11:02:03.164046Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:02:03.164087Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:2 2025-04-10T11:02:03.164220Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:971:2740] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2025-04-10T11:02:03.164349Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:236:2154], Recipient [7:971:2740]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2025-04-10T11:02:03.164388Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-10T11:02:03.164438Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409550 state Ready 2025-04-10T11:02:03.164508Z node 7 :TX_DATASHARD DEBUG: 72075186233409550 Got TEvSchemaChangedResult from SS at 72075186233409550 2025-04-10T11:02:03.164808Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:02:03.164841Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:02:03.164869Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:0 2025-04-10T11:02:03.164920Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:978:2745] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2025-04-10T11:02:03.165003Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:236:2154], Recipient [7:978:2745]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2025-04-10T11:02:03.165043Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-10T11:02:03.165086Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409551 state Ready 2025-04-10T11:02:03.165124Z node 7 :TX_DATASHARD DEBUG: 72075186233409551 Got TEvSchemaChangedResult from SS at 72075186233409551 2025-04-10T11:02:03.165309Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:236:2154], Recipient [7:236:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T11:02:03.165348Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T11:02:03.165420Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2025-04-10T11:02:03.165467Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:2 ProgressState 2025-04-10T11:02:03.165580Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:02:03.165611Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 2/3 2025-04-10T11:02:03.165668Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-04-10T11:02:03.165715Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 2/3 2025-04-10T11:02:03.165813Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-04-10T11:02:03.165854Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-04-10T11:02:03.168154Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:236:2154], Recipient [7:236:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T11:02:03.168224Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T11:02:03.168297Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:02:03.168344Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-10T11:02:03.168425Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:02:03.168451Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-04-10T11:02:03.168477Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-10T11:02:03.168511Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-04-10T11:02:03.168534Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-10T11:02:03.168563Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-04-10T11:02:03.168644Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:575:2401] message: TxId: 104 2025-04-10T11:02:03.168694Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-10T11:02:03.168734Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T11:02:03.168772Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T11:02:03.168915Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2025-04-10T11:02:03.168973Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-04-10T11:02:03.169006Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-04-10T11:02:03.169037Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-04-10T11:02:03.169058Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-04-10T11:02:03.169076Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-04-10T11:02:03.169132Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2025-04-10T11:02:03.172570Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:02:03.172704Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:02:03.172827Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:575:2401] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 104 at schemeshard: 72057594046678944 2025-04-10T11:02:03.173007Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:02:03.173069Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:1028:2781] 2025-04-10T11:02:03.173323Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1030:2783], Recipient [7:236:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:02:03.173386Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:02:03.173433Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-10T11:02:03.174590Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:551:2102], Recipient [7:236:2154] 2025-04-10T11:02:03.174647Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T11:02:03.177314Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/tmp" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "NotTempTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Utf8" } KeyColumnNames: "key" } IndexDescription { Name: "ValueIndex" KeyColumnNames: "value" } } AllowCreateInTempDir: false } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:03.177904Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-10T11:02:03.177963Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-10T11:02:03.181065Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:02:03.184572Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/tmp\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:03.184825Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE TABLE WITH INDEXES, path: /MyRoot/tmp/NotTempTable 2025-04-10T11:02:03.184893Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-10T11:02:03.185512Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-10T11:02:03.185567Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-10T11:02:03.186034Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1100:2853], Recipient [7:236:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:02:03.186096Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:02:03.186134Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T11:02:03.186336Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:575:2401], Recipient [7:236:2154]: NKikimrScheme.TEvNotifyTxCompletion TxId: 105 2025-04-10T11:02:03.186374Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-10T11:02:03.186464Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-10T11:02:03.186583Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T11:02:03.186657Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:1098:2851] 2025-04-10T11:02:03.186898Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1100:2853], Recipient [7:236:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:02:03.186940Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:02:03.186983Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 |71.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |71.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |71.7%| [TA] {RESULT} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> IndexBuildTest::WithFollowers [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 |71.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup >> IndexBuildTest::CheckLimitWithDroppedIndex [GOOD] >> IndexBuildTest::DropIndex >> TConsoleTests::TestAuthorization [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain >> TableWriter::Restore [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata |71.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |71.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Backup [GOOD] |71.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::WithFollowers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:03.092114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:03.092217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:03.092282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:03.092327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:03.092374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:03.092403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:03.092502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:03.092616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:03.092971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:03.205328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:03.205382Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:03.230679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:03.230937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:03.231108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:03.244823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:03.245176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:03.246015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:03.246703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:03.249121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:03.250796Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:03.250866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:03.251086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:03.251142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:03.251184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:03.251359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:03.266950Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:03.413527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:03.413815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:03.414075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:03.414358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:03.414417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:03.417283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:03.417460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:03.417687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:03.417749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:03.417808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:03.417849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:03.420324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:03.420395Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:03.420435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:03.423436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:03.423492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:03.423534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:03.423604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:03.427507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:03.432757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:03.433014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:03.434093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:03.434314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:03.434371Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:03.434690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:03.434746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:03.434952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:03.435051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:03.441449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:03.441505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:03.441676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:03.441710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:03.441922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:03.441957Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:03.442048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:03.442077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:03.442118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:03.442141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:03.442200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:03.442240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:03.442280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:03.442307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:03.442379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:03.442410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:03.442437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:03.444433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:03.444533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:03.444560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1:02:04.445542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:1, at schemeshard: 72057594046678944 2025-04-10T11:02:04.445591Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:1 ProgressState 2025-04-10T11:02:04.445720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:1 progress is 2/3 2025-04-10T11:02:04.445763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-04-10T11:02:04.445805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:1 progress is 2/3 2025-04-10T11:02:04.445843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-04-10T11:02:04.445883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: false 2025-04-10T11:02:04.447071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:02:04.447226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:02:04.447264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:02:04.447301Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-10T11:02:04.447355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:02:04.448342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:02:04.448527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:02:04.448566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:02:04.448606Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:02:04.448641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:04.449828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:02:04.449917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:02:04.449953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:02:04.449986Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-10T11:02:04.450020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:02:04.451503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:02:04.451594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:02:04.451624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:02:04.452227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:02:04.452293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:02:04.452325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:02:04.452354Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-10T11:02:04.452390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:02:04.452462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-04-10T11:02:04.453590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2025-04-10T11:02:04.453664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:2 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:04.454075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:02:04.456325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2025-04-10T11:02:04.456381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-10T11:02:04.456444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2025-04-10T11:02:04.456482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-10T11:02:04.456519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-04-10T11:02:04.456591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 104 2025-04-10T11:02:04.456644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-10T11:02:04.456691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T11:02:04.456732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T11:02:04.456919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:02:04.456972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-04-10T11:02:04.456997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-04-10T11:02:04.457035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:02:04.457091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-04-10T11:02:04.457127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-04-10T11:02:04.457187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:02:04.458864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:02:04.460484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:02:04.460564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:02:04.460609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:02:04.462813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:02:04.463039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:02:04.463116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:696:2653] TestWaitNotification: OK eventTxId 104 2025-04-10T11:02:04.463924Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/WithFollowers" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:02:04.464253Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/WithFollowers" took 305us result status StatusSuccess 2025-04-10T11:02:04.464788Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/WithFollowers" PathDescription { Self { Name: "WithFollowers" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "WithFollowers" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "valueFloat" Type: "Float" TypeId: 33 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel2 [GOOD] |71.7%| [TA] $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited >> AssignTxId::Basic [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_NewlineDelimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited >> TTopicWriterTests::TestEnterMessage_ZeroSymbol_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> IndexBuildTest::DropIndex [GOOD] >> Viewer::JsonStorageListingV1NodeIdFilter [GOOD] >> Viewer::JsonStorageListingV1PDiskIdFilter >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> TTopicWriterTests::TestEnterMessage_EmptyInput [GOOD] >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:01.434545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:01.434642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:01.434702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:01.434740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:01.434785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:01.434817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:01.434871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:01.434946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:01.435301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:01.524481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:01.524545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:01.551232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:01.551539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:01.551680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:01.560651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:01.560921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:01.561586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:01.561897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:01.563678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:01.564783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:01.564832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:01.564993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:01.565029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:01.565059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:01.565174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:01.571753Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:01.809430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:01.809756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:01.810023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:01.810378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:01.810454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:01.816579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:01.816775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:01.817005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:01.817060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:01.817095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:01.817141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:01.820439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:01.820533Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:01.820575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:01.827426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:01.827504Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:01.827556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:01.827606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:01.835556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:01.839580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:01.839789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:01.841002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:01.841172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:01.841225Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:01.841518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:01.841571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:01.841754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:01.841856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:01.846085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:01.846179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:01.846391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:01.846441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:01.846795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:01.846846Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:01.846947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:01.846988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:01.847045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:01.847083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:01.847121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:01.847169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:01.847217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:01.847250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:01.847326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:01.847364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:01.847399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:01.854468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:01.854644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:01.854758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athId: 3] was 2 2025-04-10T11:02:06.149464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:06.149907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:02:06.149981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:02:06.150354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:02:06.150532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:02:06.150673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:02:06.150908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:02:06.151025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:02:06.151378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:02:06.151787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:06.152063Z node 1 :BUILD_INDEX DEBUG: AddShardStatus id# 102 shard 72057594046678944:11 range { From: -inf, To: inf } 2025-04-10T11:02:06.152166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T11:02:06.152226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T11:02:06.152295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T11:02:06.161305Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-10T11:02:06.161459Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: by_embedding, IndexColumn: embedding, DataColumns: covered, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [0:0:0], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976720765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976720766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:06.161510Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 0 2025-04-10T11:02:06.170595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:06.170692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:06.177590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:06.177700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:06.177774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:06.180342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:3104:4899] sender: [1:3162:2058] recipient: [1:15:2062] 2025-04-10T11:02:06.228500Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/by_embedding" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:02:06.228844Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/by_embedding" took 383us result status StatusSuccess 2025-04-10T11:02:06.230175Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/by_embedding" PathDescription { Self { Name: "by_embedding" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "by_embedding" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataColumnNames: "covered" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::DropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:02.667976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:02.668065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:02.668102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:02.668129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:02.668174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:02.668198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:02.668239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:02.668298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:02.668591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:02.756328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:02.756386Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:02.773331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:02.773656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:02.773849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:02.799417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:02.799671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:02.800325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:02.800701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:02.802626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:02.803930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:02.804001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:02.804175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:02.804217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:02.804256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:02.804393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.811375Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:02.955266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:02.955494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.955717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:02.955969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:02.956026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.959694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:02.959891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:02.960208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.960299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:02.960353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:02.960419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:02.968373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.968478Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:02.968543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:02.974094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.974178Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.974232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:02.974299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:02.978402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:02.980579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:02.980796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:02.981777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:02.981919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:02.981969Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:02.982277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:02.982334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:02.982532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:02.982613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:02.984782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:02.984828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:02.984987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:02.985049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:02.985288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.985334Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:02.985452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:02.985499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:02.985539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:02.985570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:02.985604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:02.985642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:02.985692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:02.985724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:02.985778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:02.985816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:02.985849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:02.988144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:02.988252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:02.988287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 09550, at schemeshard: 72057594046678944 2025-04-10T11:02:06.664175Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2025-04-10T11:02:06.664646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:1, at schemeshard: 72057594046678944 2025-04-10T11:02:06.664693Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:1 ProgressState 2025-04-10T11:02:06.664795Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 1/3 2025-04-10T11:02:06.664834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/3 2025-04-10T11:02:06.664873Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 1/3 2025-04-10T11:02:06.664909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/3 2025-04-10T11:02:06.664949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/3, is published: false 2025-04-10T11:02:06.665133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2025-04-10T11:02:06.665164Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:2 ProgressState at tablet: 72057594046678944 2025-04-10T11:02:06.665210Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:2, at schemeshard: 72057594046678944 2025-04-10T11:02:06.665234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-10T11:02:06.665263Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:2 129 -> 240 2025-04-10T11:02:06.665798Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:02:06.665906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:02:06.665942Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-10T11:02:06.665983Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-04-10T11:02:06.666032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-04-10T11:02:06.666525Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:02:06.666609Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:02:06.666638Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-10T11:02:06.666670Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 18446744073709551615 2025-04-10T11:02:06.666699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-04-10T11:02:06.668242Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:02:06.668318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:02:06.668344Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-10T11:02:06.668374Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-04-10T11:02:06.668403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:06.669644Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:02:06.669721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:02:06.669746Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-10T11:02:06.670824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T11:02:06.670878Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:06.671132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-04-10T11:02:06.671252Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 2/3 2025-04-10T11:02:06.671291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2025-04-10T11:02:06.671330Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 2/3 2025-04-10T11:02:06.671365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2025-04-10T11:02:06.671409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: false 2025-04-10T11:02:06.671946Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:02:06.672014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:02:06.672038Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-10T11:02:06.672897Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:02:06.672965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:02:06.672990Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-10T11:02:06.673020Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-04-10T11:02:06.673050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 4 2025-04-10T11:02:06.673139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: true 2025-04-10T11:02:06.674794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:02:06.674867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:02:06.675483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2025-04-10T11:02:06.675527Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:2 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:06.675725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-04-10T11:02:06.675813Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:2 progress is 3/3 2025-04-10T11:02:06.675838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-04-10T11:02:06.675869Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:2 progress is 3/3 2025-04-10T11:02:06.675892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-04-10T11:02:06.675918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 3/3, is published: true 2025-04-10T11:02:06.675983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:415:2372] message: TxId: 105 2025-04-10T11:02:06.676036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-04-10T11:02:06.676079Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-10T11:02:06.676114Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-10T11:02:06.676208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-10T11:02:06.676248Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:1 2025-04-10T11:02:06.676269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:1 2025-04-10T11:02:06.676295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2025-04-10T11:02:06.676334Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:2 2025-04-10T11:02:06.676354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:2 2025-04-10T11:02:06.676391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-04-10T11:02:06.677104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:02:06.677153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:02:06.677254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:02:06.678613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:02:06.679929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T11:02:06.679980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:939:2863] TestWaitNotification: OK eventTxId 105 >> TPersQueueTest::DirectReadStop [GOOD] >> TPersQueueTest::DirectReadCleanCache >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 28627, MsgBus: 3209 2025-04-10T11:00:16.930485Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637650752966708:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:16.943514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e50/r3tmp/tmp7cH32j/pdisk_1.dat 2025-04-10T11:00:17.467307Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:17.473889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:17.474001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:17.479492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28627, node 1 2025-04-10T11:00:17.598553Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:17.598576Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:17.598596Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:17.598731Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3209 TClient is connected to server localhost:3209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:18.422560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.461649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:18.603502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:00:18.761092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:00:18.875025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:20.731912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637667932837522:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:20.732010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.031182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.071915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.146488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.184126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.226847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.323089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:21.390657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637672227805337:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.390747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.390982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637672227805342:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:21.394990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:21.413180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637672227805344:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:21.494861Z node 1 :TX_PROXY ERROR: Actor# [1:7491637672227805399:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:21.924879Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637650752966708:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:21.924951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:22.565981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:23.020826Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710673 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: SIMILARITY_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-04-10T11:00:23.021491Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:23.021549Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637680817740549:2522], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:23.021654Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715757 2025-04-10T11:00:23.021692Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637680817740549:2522], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:23.021872Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:23.021903Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637680817740549:2522], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, Unlock ... :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Applying to Unlocking 2025-04-10T11:01:10.270940Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:10.271007Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637878045784875:2531], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 557, read rows: 32, read bytes: 673 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:10.271120Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715764 2025-04-10T11:01:10.271171Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637878045784875:2531], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 557, read rows: 32, read bytes: 673 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:10.271463Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:10.271517Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637878045784875:2531], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715764, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 557, read rows: 32, read bytes: 673 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:10.272545Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976710673, cookie: 281474976710673, txId: 281474976715764, status: StatusAccepted 2025-04-10T11:01:10.272684Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637878045784875:2531], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715764, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 557, read rows: 32, read bytes: 673 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976715764 SchemeshardId: 72057594046644480 PathId: 16 2025-04-10T11:01:10.273388Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:10.273452Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637878045784875:2531], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715764, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 557, read rows: 32, read bytes: 673 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:10.275683Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715764, buildInfoId: 281474976710673 2025-04-10T11:01:10.275741Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715764, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637878045784875:2531], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715764, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 557, read rows: 32, read bytes: 673 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:10.276674Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:10.276731Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637878045784875:2531], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715764, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 557, read rows: 32, read bytes: 673 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:10.276767Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T11:01:10.277046Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:10.277125Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637878045784875:2531], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715763, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715764, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 24, upload bytes: 557, read rows: 32, read bytes: 673 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:10.277143Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710673, subscribers count# 1 2025-04-10T11:01:10.277709Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710673 2025-04-10T11:01:10.277928Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710673 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-10T11:01:10.338445Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-10T11:01:10.338890Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-10T11:01:10.338915Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-10T11:01:14.968241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:01:14.968274Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:19.744697Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710673 |71.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic [GOOD] Test command err: 2025-04-10T11:02:01.016167Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638101499380342:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:01.016666Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d8d/r3tmp/tmp5HJcN8/pdisk_1.dat 2025-04-10T11:02:01.654419Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:01.661239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:01.661325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:01.673348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26858 TServer::EnableGrpc on GrpcPort 18056, node 1 2025-04-10T11:02:02.426496Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:02.426521Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:02.426529Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:02.426688Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:02.889188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:02.921264Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:02:05.137231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638118679250198:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:05.137420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:05.603880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:02:05.629428Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] OnActivateExecutor 2025-04-10T11:02:05.629507Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInitSchema] Execute 2025-04-10T11:02:05.637731Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInitSchema] Complete 2025-04-10T11:02:05.637830Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInit] Execute 2025-04-10T11:02:05.638084Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInit] Complete 2025-04-10T11:02:05.638094Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] SwitchToWork 2025-04-10T11:02:05.676690Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 2 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:18056" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } 2025-04-10T11:02:05.680793Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxCreateReplication] Execute: NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 2 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:18056" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } 2025-04-10T11:02:05.680921Z node 1 :REPLICATION_CONTROLLER NOTICE: [controller 72075186224037888][TxCreateReplication] Add replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:02:05.690331Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxCreateReplication] Complete 2025-04-10T11:02:05.692346Z node 1 :REPLICATION_CONTROLLER TRACE: [TenantResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root/replication TableId: [72057594046644480:2:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindReplication DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:02:05.693689Z node 1 :REPLICATION_CONTROLLER TRACE: [TenantResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:02:05.693791Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvResolveTenantResult { ReplicationId: 1 Tenant: /Root Sucess: 1 } 2025-04-10T11:02:05.693802Z node 1 :REPLICATION_CONTROLLER NOTICE: [controller 72075186224037888] Tenant resolved: rid# 1, tenant# /Root 2025-04-10T11:02:05.693812Z node 1 :REPLICATION_CONTROLLER INFO: [controller 72075186224037888] Discover tenant nodes: tenant# /Root 2025-04-10T11:02:05.694907Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvDiscovery::TEvDiscoveryData 2025-04-10T11:02:05.694939Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888] Create session: nodeId# 1 2025-04-10T11:02:05.738208Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-04-10T11:02:05.738262Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/table, status# SCHEME_ERROR, issues# {
: Error: Path not found } 2025-04-10T11:02:05.738425Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-04-10T11:02:05.738549Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-04-10T11:02:05.738582Z node 1 :REPLICATION_CONTROLLER ERROR: [controller 72075186224037888][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /Root/table: SCHEME_ERROR ({
: Error: Path not found }) 2025-04-10T11:02:05.739672Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxDiscoveryTargetsResult] Complete TClient::Ls request: /Root/replication TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "replication" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282925742 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsIns... (TRUNCATED) 2025-04-10T11:02:05.757229Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 1 TxId: 0 } 2025-04-10T11:02:05.757301Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 0 2025-04-10T11:02:05.757336Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 1, assigned# 0, allocated# 0, exhausted# 1 2025-04-10T11:02:05.757437Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-10T11:02:05.757475Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 5 2025-04-10T11:02:05.758378Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-04-10T11:02:05.758853Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 0 } 2025-04-10T11:02:05.758889Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-04-10T11:02:05.758950Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-04-10T11:02:05.759441Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 18446744073709551615 } 2025-04-10T11:02:05.759468Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-04-10T11:02:05.759491Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-04-10T11:02:05.759990Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 10000 TxId: 0 } 2025-04-10T11:02:05.760033Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-04-10T11:02:05.760597Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-04-10T11:02:05.761075Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 5000 TxId: 0 } 2025-04-10T11:02:05.761106Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 2, allocated# 3 2025-04-10T11:02:05.761134Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-04-10T11:02:05.761589Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 20000 TxId: 0 } Versions { Step: 30000 TxId: 0 } Versions { Step: 40000 TxId: 0 } 2025-04-10T11:02:05.761618Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 3, assigned# 2, allocated# 3 2025-04-10T11:02:05.762002Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 0, exhausted# 0 2025-04-10T11:02:05.762074Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-10T11:02:05.762093Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 0, assigned# 5, allocated# 5 2025-04-10T11:02:05.762114Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2025-04-10T11:02:05.762526Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 50000 TxId: 0 } 2025-04-10T11:02:05.762551Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 5, allocated# 5 2025-04-10T11:02:05.762975Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2025-04-10T11:02:06.017329Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638101499380342:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:06.017427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |71.8%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |71.8%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] >> IndexBuildTest::BaseCase [GOOD] >> IndexBuildTest::CancelBuild |71.8%| [TA] $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] Test command err: 2025-04-10T11:01:06.619491Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637865422757628:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:06.619534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001589/r3tmp/tmpSJ8ob2/pdisk_1.dat 2025-04-10T11:01:07.514613Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:07.517332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:07.517443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:07.521681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24773, node 1 2025-04-10T11:01:07.870955Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:07.870978Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:07.870989Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:07.871131Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:08.368826Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:08.373736Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:08.373771Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:08.374623Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:26768, port: 26768 2025-04-10T11:01:08.375412Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:08.410398Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:08.459126Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****qBaA (352EE8EE) () has now valid token of ldapuser@ldap 2025-04-10T11:01:15.010701Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637901827430924:2227];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:15.010917Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001589/r3tmp/tmpLtfANC/pdisk_1.dat 2025-04-10T11:01:15.222322Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:15.239436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:15.239548Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:15.245338Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26937, node 2 2025-04-10T11:01:15.470835Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:15.470869Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:15.470881Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:15.471034Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:15.711490Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:15.718577Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:15.718616Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:15.719449Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:14536, port: 14536 2025-04-10T11:01:15.719513Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:15.755204Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:15.806742Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:15.807444Z node 2 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:14536 return no entries 2025-04-10T11:01:15.808031Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****WlPQ (189FBB46) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:14536 return no entries)' 2025-04-10T11:01:20.406591Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637926562881526:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001589/r3tmp/tmpgsXU3p/pdisk_1.dat 2025-04-10T11:01:20.505036Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:20.648952Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:20.683122Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:20.683201Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:20.688148Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15382, node 3 2025-04-10T11:01:20.864592Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:20.864620Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:20.864627Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:20.864775Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:21.038318Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:21.042540Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:21.042578Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:21.043247Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1443, port: 1443 2025-04-10T11:01:21.043314Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:21.064117Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:21.112807Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:21.158636Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:21.159412Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:21.159504Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:21.206513Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:21.262487Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:21.263903Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****-o_A (521E9749) () has now valid token of ldapuser@ldap 2025-04-10T11:01:24.391225Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****-o_A (521E9749) 2025-04-10T11:01:24.391349Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1443, port: 1443 2025-04-10T11:01:24.391414Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:24.424077Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:24.467145Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:24.516183Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:24.516909Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:24.516945Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:24.562485Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:24.607212Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:24.608329Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****-o_A (521E9749) () has now valid token of ldapuser@ldap 2025-04-10T11:01:25.372662Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491637926562881526:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:25.372792Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:27.402286Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****-o_A (521E9749) 2025-04-10T11:01:27.402393Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1443, port: 1443 2025-04-10T11:01:27.402465Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:27.446324Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:27.498689Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:27.542779Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:27.543406Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:27.543439Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:27.591519Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: base ... State: Unknown -> Disconnected 2025-04-10T11:01:44.542730Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:44.544479Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21583, node 5 2025-04-10T11:01:44.655002Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:44.655036Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:44.655046Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:44.655201Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:44.746352Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:44.750544Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:44.750588Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:44.751421Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:4713, port: 4713 2025-04-10T11:01:44.751496Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:44.777941Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:44.822928Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:44.867288Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:01:44.869076Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:01:44.869137Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:44.911760Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:44.954577Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:01:44.955848Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****Ugfw (22C17F23) () has now valid token of ldapuser@ldap 2025-04-10T11:01:49.299868Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491638031721851385:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:49.299969Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:50.328070Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Ugfw (22C17F23) 2025-04-10T11:01:50.328274Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:4713, port: 4713 2025-04-10T11:01:50.328365Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:50.352404Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:50.404524Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:50.406445Z node 5 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:4713 return no entries 2025-04-10T11:01:50.407091Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****Ugfw (22C17F23) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:4713 return no entries)' 2025-04-10T11:01:53.332754Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Ugfw (22C17F23) 2025-04-10T11:01:55.960250Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491638076084256091:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:55.960312Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001589/r3tmp/tmpHU9CfI/pdisk_1.dat 2025-04-10T11:01:56.170610Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:56.196252Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:56.197379Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:56.199412Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16093, node 6 2025-04-10T11:01:56.303133Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:56.303171Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:56.303180Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:56.303359Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:56.530420Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:01:56.534643Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:56.534696Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:56.535681Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:18181, port: 18181 2025-04-10T11:01:56.535807Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:56.563541Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:56.610745Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:56.611450Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:18181. Server is busy 2025-04-10T11:01:56.612048Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****CnOg (E1E249E3) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:18181. Server is busy)' 2025-04-10T11:01:56.612432Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:56.612473Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:56.613655Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:18181, port: 18181 2025-04-10T11:01:56.613767Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:56.654974Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:56.698804Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:56.699382Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:18181. Server is busy 2025-04-10T11:01:56.713092Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****CnOg (E1E249E3) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:18181. Server is busy)' 2025-04-10T11:01:58.979793Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****CnOg (E1E249E3) 2025-04-10T11:01:58.980198Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:01:58.980225Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:01:59.011798Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:18181, port: 18181 2025-04-10T11:01:59.011956Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:01:59.035644Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:01:59.080703Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:01:59.090409Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:18181. Server is busy 2025-04-10T11:01:59.091123Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****CnOg (E1E249E3) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:18181. Server is busy)' 2025-04-10T11:02:00.960641Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491638076084256091:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:00.960760Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:01.990321Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****CnOg (E1E249E3) 2025-04-10T11:02:01.990641Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:02:01.990662Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:02:01.995048Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:18181, port: 18181 2025-04-10T11:02:01.995175Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-10T11:02:02.039467Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:02:02.087227Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-10T11:02:02.130705Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-10T11:02:02.131254Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-10T11:02:02.131316Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:02:02.181937Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:02:02.227861Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-10T11:02:02.229461Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****CnOg (E1E249E3) () has now valid token of ldapuser@ldap |71.8%| [TA] $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |71.8%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |71.8%| [TA] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.8%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut >> TContinuousBackupTests::TakeIncrementalBackup >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] >> TSchemeshardCompactionQueueTest::UpdateBelowThreshold [GOOD] >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> TConsoleTests::TestAttributes [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant >> TSchemeshardCompactionQueueTest::EnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributesExtSubdomain |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] Test command err: Trying to start YDB, gRPC: 13953, MsgBus: 8836 2025-04-10T11:01:42.713526Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638022864380264:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:42.713965Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b1e/r3tmp/tmp1HFlCT/pdisk_1.dat 2025-04-10T11:01:43.275242Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:43.281392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:43.282168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:43.285034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13953, node 1 2025-04-10T11:01:43.462324Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:43.462352Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:43.462363Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:43.462507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8836 TClient is connected to server localhost:8836 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:44.403153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:44.434389Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:01:44.462196Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:01:46.996513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638040044250019:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:46.996625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:47.356832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:01:47.575601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:01:47.624708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:01:47.664978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:01:47.697706Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638022864380264:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:47.697774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:47.721672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638044339217629:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:47.721738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:47.721937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638044339217634:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:47.725898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-04-10T11:01:47.740601Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710664, at schemeshard: 72057594046644480 2025-04-10T11:01:47.740786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638044339217636:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-04-10T11:01:47.802981Z node 1 :TX_PROXY ERROR: Actor# [1:7491638044339217690:2575] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17690, MsgBus: 5108 2025-04-10T11:01:48.988778Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638047854409301:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b1e/r3tmp/tmpsm38j6/pdisk_1.dat 2025-04-10T11:01:49.097114Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:49.154110Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:49.185919Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:49.186016Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:49.194101Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17690, node 2 2025-04-10T11:01:49.306634Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:49.306662Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:49.306672Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:49.306825Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5108 TClient is connected to server localhost:5108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:50.095196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:50.111542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:50.200407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:50.458537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:50.572945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:53.394372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638069329247371:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:53.394487Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:53.450549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:01:53.503073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 720575 ... don't have access permissions } 2025-04-10T11:01:53.807088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638069329247886:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:53.814314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:01:53.834652Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491638069329247889:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:01:53.906755Z node 2 :TX_PROXY ERROR: Actor# [2:7491638069329247944:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:53.985572Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491638047854409301:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:53.985635Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:54.833226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-04-10T11:01:55.716232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-10T11:01:56.251683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:1, at schemeshard: 72057594046644480 2025-04-10T11:01:57.010110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-10T11:01:57.659687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-10T11:01:58.307471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-10T11:01:58.877711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:01:58.965088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:02:01.241202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715713:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13178, MsgBus: 3193 2025-04-10T11:02:02.738653Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491638108958632033:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:02.738706Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b1e/r3tmp/tmpqSZ0sg/pdisk_1.dat 2025-04-10T11:02:03.003661Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:03.009131Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:03.009256Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:03.011304Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13178, node 3 2025-04-10T11:02:03.094715Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:03.094738Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:03.094747Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:03.094883Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3193 TClient is connected to server localhost:3193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:03.720511Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:03.816292Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:02:03.833058Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:03.941037Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:04.146595Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:04.272427Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:07.187414Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638130433470300:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:07.187550Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:07.249731Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:02:07.298168Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:02:07.354725Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:02:07.411806Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:02:07.464702Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:02:07.550975Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:02:07.679981Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638130433470822:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:07.680104Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:07.680628Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638130433470828:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:07.694969Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:02:07.714528Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-10T11:02:07.714882Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491638130433470830:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:02:07.739505Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491638108958632033:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:07.742539Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:07.805818Z node 3 :TX_PROXY ERROR: Actor# [3:7491638130433470888:3458] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 |71.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |71.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |71.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled >> IndexBuildTest::CancelBuild [GOOD] |71.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |71.9%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |71.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:10.866141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:10.866276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:10.866322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:10.866390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:10.866446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:10.866477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:10.866537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:10.866624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:10.866998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:10.980836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:10.980913Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:11.007505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:11.007914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:11.008116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:11.054983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:11.055399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:11.056270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:11.056919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:11.062768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:11.064178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:11.064242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:11.064402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:11.064456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:11.064493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:11.064621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.079994Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:11.212335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:11.212662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.213009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:11.213305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:11.213375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.216446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:11.216617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:11.216880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.216947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:11.216987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:11.217030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:11.219679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.219747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:11.219792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:11.222298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.222371Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.222436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:11.222491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:11.226405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:11.235300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:11.235605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:11.236634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:11.236788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:11.236843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:11.237179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:11.237233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:11.237474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:11.237559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:11.240635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:11.240702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:11.240924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:11.240967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:11.241264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.241320Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:11.241437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:11.241472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:11.241512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:11.241544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:11.241606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:11.241685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:11.241759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:11.241796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:11.241886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:11.241927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:11.241961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:11.244714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:11.244854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:11.244899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 33409546, at schemeshard: 72057594046678944 2025-04-10T11:02:12.331637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:1 129 -> 240 2025-04-10T11:02:12.341354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-10T11:02:12.341947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-10T11:02:12.342089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-10T11:02:12.342159Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:1 ProgressState 2025-04-10T11:02:12.342242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2025-04-10T11:02:12.342279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-04-10T11:02:12.342340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2025-04-10T11:02:12.342370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-04-10T11:02:12.342405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/4, is published: true 2025-04-10T11:02:12.342500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 103 2025-04-10T11:02:12.342580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-04-10T11:02:12.342632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:02:12.342672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:02:12.342779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:02:12.342817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-04-10T11:02:12.342842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-10T11:02:12.342925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:02:12.342966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-04-10T11:02:12.342989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-04-10T11:02:12.343048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-10T11:02:12.343097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2025-04-10T11:02:12.343123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2025-04-10T11:02:12.343213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:02:12.352016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:02:12.352096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:717:2620] TestWaitNotification: OK eventTxId 103 2025-04-10T11:02:12.352886Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:02:12.353232Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 370us result status StatusSuccess 2025-04-10T11:02:12.353801Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:12.354530Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:02:12.354807Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 248us result status StatusSuccess 2025-04-10T11:02:12.355421Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 4 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "continuousBackupImpl" TopicPath: "/MyRoot/Table/continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS OffloadConfig { IncrementalBackup { DstPath: "/MyRoot/IncrBackupImpl" DstPathId { OwnerId: 72057594046678944 LocalId: 5 } } } } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:12.356755Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:02:12.357034Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 292us result status StatusSuccess 2025-04-10T11:02:12.357531Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit >> KqpIndexes::SecondaryIndexInsert1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:01:53.470776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:01:53.470898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:53.470953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:01:53.470990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:01:53.471039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:01:53.471072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:01:53.471136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:53.471221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:01:53.471578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:01:53.564529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:53.564584Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:53.589924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:01:53.590287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:01:53.590456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:01:53.601905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:01:53.602210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:01:53.602983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:53.603393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:01:53.605637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:53.607007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:53.607089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:53.607278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:01:53.607325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:53.607364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:01:53.607519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.615404Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:01:53.793472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:01:53.793707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.793922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:01:53.794220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:01:53.794284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.798179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:53.798342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:01:53.798550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.798606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:01:53.801548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:01:53.801683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:01:53.804552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.804641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:01:53.804690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:01:53.807178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.807244Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.807295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:53.807381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:01:53.811146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:01:53.813344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:01:53.813540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:01:53.814559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:53.814693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:01:53.814761Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:53.815030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:01:53.815084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:53.815266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:01:53.815339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:01:53.817840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:53.817885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:53.818069Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:53.818108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:01:53.818372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.818417Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:01:53.818516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:53.818613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:53.818652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:53.818680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:53.818715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:01:53.818807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:53.818845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:01:53.818874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:01:53.818952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:01:53.818987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:01:53.819068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:01:53.821365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:53.821490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:53.821525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... on: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-10T11:02:13.251801Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:13.251904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:13.251966Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-10T11:02:13.252011Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-10T11:02:13.256047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-10T11:02:13.256097Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-10T11:02:13.256201Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-10T11:02:13.256231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T11:02:13.256305Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-10T11:02:13.256353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T11:02:13.256401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-10T11:02:13.256465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:124:2150] message: TxId: 281474976710760 2025-04-10T11:02:13.256523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T11:02:13.256569Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-10T11:02:13.256600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-10T11:02:13.256672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 13 2025-04-10T11:02:13.259757Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-10T11:02:13.259836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-10T11:02:13.259901Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-10T11:02:13.259980Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1174:3027], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:13.262115Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-10T11:02:13.262227Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1174:3027], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:13.262286Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-04-10T11:02:13.264208Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-10T11:02:13.264300Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1174:3027], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:13.264345Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-10T11:02:13.264524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:02:13.264571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1270:3112] TestWaitNotification: OK eventTxId 102 2025-04-10T11:02:13.267589Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-10T11:02:13.267857Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 } 2025-04-10T11:02:13.271801Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:02:13.272064Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 286us result status StatusSuccess 2025-04-10T11:02:13.272639Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:13.275345Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:02:13.275575Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 258us result status StatusPathDoesNotExist 2025-04-10T11:02:13.275746Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000005, drop txId: 281474976710759" Path: "/MyRoot/Table/index1" PathId: 3 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |71.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |71.9%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |71.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError >> TSchemeshardCompactionQueueTest::EnqueueBelowSearchHeightThreshold [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueBelowRowDeletesThreshold [GOOD] >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] >> VectorIndexBuildTest::BaseCase [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexInsert1 [GOOD] Test command err: Trying to start YDB, gRPC: 8740, MsgBus: 61405 2025-04-10T11:00:27.499075Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637697467301489:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:27.499528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e26/r3tmp/tmpjRnFy9/pdisk_1.dat 2025-04-10T11:00:28.027005Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:28.029141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:28.029253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:28.036138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8740, node 1 2025-04-10T11:00:28.150447Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:28.150477Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:28.150483Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:28.150586Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61405 TClient is connected to server localhost:61405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:28.845431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:28.865320Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:28.874010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:29.085280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:29.266489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:29.372854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:31.496342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637714647172304:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:31.547549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:31.784816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:31.837235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:31.884305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:31.971803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.054692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.137210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.258936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637718942140121:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:32.259085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:32.259447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637718942140126:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:32.265019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:32.292131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637718942140128:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:32.389761Z node 1 :TX_PROXY ERROR: Actor# [1:7491637718942140184:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:32.503552Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637697467301489:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:32.520451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:34.433651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:34.892496Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710673 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: SIMILARITY_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-04-10T11:00:34.893691Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:34.893798Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637727532075368:2526], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:34.893971Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715757 2025-04-10T11:00:34.894040Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637727532075368:2526], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:34.894965Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:34.895066Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637727532075368:2526], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, ... CreateSender: [2:7491637928387020266:2531], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:20.963379Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T11:01:20.964310Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:01:20.964369Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7491637928387020266:2531], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:20.964384Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710673, subscribers count# 1 2025-04-10T11:01:20.965946Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710673 2025-04-10T11:01:20.966183Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710673 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-10T11:01:20.971194Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-10T11:01:20.971228Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-04-10T11:01:20.973106Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-04-10T11:01:20.975506Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-10T11:01:22.922738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:01:22.922781Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:30.554355Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710673 Trying to start YDB, gRPC: 11930, MsgBus: 3270 2025-04-10T11:02:04.696786Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491638114596905422:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:04.696890Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e26/r3tmp/tmpV661a5/pdisk_1.dat 2025-04-10T11:02:04.938181Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:04.973081Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:04.973224Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:04.975678Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11930, node 3 2025-04-10T11:02:05.042822Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:05.042853Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:05.042861Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:05.043030Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3270 TClient is connected to server localhost:3270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:05.695721Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:05.746664Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:05.879127Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:06.119019Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:02:06.222506Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.698676Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638136071743672:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:09.698790Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491638114596905422:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:09.698877Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:09.698936Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:09.755943Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.800841Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.865133Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.949186Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:02:10.011160Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:02:10.095776Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:02:10.226370Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638140366711497:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:10.226539Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:10.230391Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638140366711502:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:10.236838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:02:10.253926Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491638140366711504:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:02:10.315951Z node 3 :TX_PROXY ERROR: Actor# [3:7491638140366711559:3457] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:02:12.308750Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:02:13.166004Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueSinglePartedShardWithMemData [GOOD] >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> FolderServiceTest::TFolderService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:01:53.615524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:01:53.615641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:53.615709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:01:53.615745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:01:53.615795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:01:53.615827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:01:53.615895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:01:53.615978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:01:53.616353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:01:53.695305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:53.695380Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:53.729906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:01:53.730353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:01:53.730572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:01:53.739439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:01:53.739741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:01:53.740542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:53.740947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:01:53.743324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:53.745056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:53.745128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:53.745422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:01:53.745483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:53.745523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:01:53.745694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.753832Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:01:53.942139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:01:53.942412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.942636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:01:53.942930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:01:53.942990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.945481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:53.945640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:01:53.945868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.945926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:01:53.945973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:01:53.946009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:01:53.948091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.948164Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:01:53.948200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:01:53.950075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.950122Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.950186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:53.950240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:01:53.954121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:01:53.958600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:01:53.958805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:01:53.959965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:01:53.960146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:01:53.960227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:53.960526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:01:53.960587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:01:53.960791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:01:53.960890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:01:53.963400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:01:53.963460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:01:53.963648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:01:53.963690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:01:53.964094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:01:53.964150Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:01:53.964250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:53.964288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:53.964353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:01:53.964385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:53.964423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:01:53.964475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:01:53.964515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:01:53.964546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:01:53.964638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:01:53.964679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:01:53.964716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:01:53.967307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:53.967467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:01:53.967508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ssing event TEvDataShard::TEvStateChangedResult 2025-04-10T11:02:15.342952Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186233409567 state Offline 2025-04-10T11:02:15.343217Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268829696, Sender [1:4560:6225], Recipient [1:4569:6232]: NKikimr::TEvTablet::TEvTabletDead 2025-04-10T11:02:15.343567Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186233409568 2025-04-10T11:02:15.343717Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186233409568 2025-04-10T11:02:15.345815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72075186233409561 ShardLocalIdx: 8, at schemeshard: 72075186233409561 2025-04-10T11:02:15.346225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409561, LocalPathId: 7] was 1 2025-04-10T11:02:15.346831Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409561 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186233409567 2025-04-10T11:02:15.356387Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:4873:6525], Recipient [1:4445:6120]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409561 ClientId: [1:4873:6525] ServerId: [1:4874:6526] } 2025-04-10T11:02:15.356465Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed Forgetting tablet 72075186233409567 2025-04-10T11:02:15.357064Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268829696, Sender [1:4436:6113], Recipient [1:4445:6120]: NKikimr::TEvTablet::TEvTabletDead 2025-04-10T11:02:15.357375Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186233409567 2025-04-10T11:02:15.357540Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186233409567 2025-04-10T11:02:15.360094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409561 2025-04-10T11:02:15.360167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409561, LocalPathId: 7], at schemeshard: 72075186233409561 2025-04-10T11:02:15.360276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409561, LocalPathId: 3] was 5 2025-04-10T11:02:15.361121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72075186233409561 ShardLocalIdx: 7, at schemeshard: 72075186233409561 2025-04-10T11:02:15.361418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409561, LocalPathId: 6] was 1 2025-04-10T11:02:15.367807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409561:8 2025-04-10T11:02:15.367892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409561:8 tabletId 72075186233409568 2025-04-10T11:02:15.370867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72075186233409561 2025-04-10T11:02:15.371162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409561 2025-04-10T11:02:15.371241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409561, LocalPathId: 6], at schemeshard: 72075186233409561 2025-04-10T11:02:15.371337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409561, LocalPathId: 3] was 4 2025-04-10T11:02:15.371766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409561:7 2025-04-10T11:02:15.371839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409561:7 tabletId 72075186233409567 2025-04-10T11:02:15.374781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409561 2025-04-10T11:02:15.439911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1900, transactions count in step: 1, at schemeshard: 72075186233409561 2025-04-10T11:02:15.440063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976735762 AckTo { RawX1: 0 RawX2: 0 } } Step: 1900 MediatorID: 72075186233409563 TabletID: 72075186233409561, at schemeshard: 72075186233409561 2025-04-10T11:02:15.440129Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409561] TDropLock TPropose opId# 281474976735762:0 HandleReply TEvOperationPlan: step# 1900 2025-04-10T11:02:15.440180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976735762:0 128 -> 240 2025-04-10T11:02:15.443376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976735762:0, at schemeshard: 72075186233409561 2025-04-10T11:02:15.443452Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409561] TDone opId# 281474976735762:0 ProgressState 2025-04-10T11:02:15.443550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976735762:0 progress is 1/1 2025-04-10T11:02:15.443581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735762 ready parts: 1/1 2025-04-10T11:02:15.443619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976735762:0 progress is 1/1 2025-04-10T11:02:15.443660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735762 ready parts: 1/1 2025-04-10T11:02:15.443697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976735762, ready parts: 1/1, is published: true 2025-04-10T11:02:15.443765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:3312:5057] message: TxId: 281474976735762 2025-04-10T11:02:15.443806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735762 ready parts: 1/1 2025-04-10T11:02:15.443839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976735762:0 2025-04-10T11:02:15.443867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976735762:0 2025-04-10T11:02:15.443941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409561, LocalPathId: 2] was 4 2025-04-10T11:02:15.449067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976735762 2025-04-10T11:02:15.449158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976735762 2025-04-10T11:02:15.449228Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735762, buildInfoId: 115 2025-04-10T11:02:15.449335Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735762, buildInfo: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409561, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409561, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:4185:5885], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:15.452311Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2025-04-10T11:02:15.452411Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409561, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409561, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:4185:5885], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:15.452459Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T11:02:15.454531Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2025-04-10T11:02:15.454622Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409561, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409561, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:4185:5885], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:15.454670Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 115, subscribers count# 1 2025-04-10T11:02:15.454810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2025-04-10T11:02:15.454853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:4317:5995] TestWaitNotification: OK eventTxId 115 2025-04-10T11:02:15.465677Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/CommonDB" IndexBuildId: 115 2025-04-10T11:02:15.466007Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 } |71.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |71.9%| [LD] {RESULT} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get |71.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |71.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple [GOOD] Test command err: 2025-04-10T10:56:27.286451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:27.286905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:27.287054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/cs_index/external;error=incorrect path status: LookupError; 2025-04-10T10:56:27.287139Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0019f4/r3tmp/tmps5IUSZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24283, node 1 TClient is connected to server localhost:9944 2025-04-10T10:56:27.982695Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvGetProxyServicesRequest 2025-04-10T10:56:27.982972Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvGetProxyServicesRequest 2025-04-10T10:56:27.988252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:28.032389Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:28.037652Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:28.037726Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:28.037761Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:28.038039Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:56:28.079081Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:56:28.080214Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T10:56:28.080629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:28.080779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:28.095683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:28.245907Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvProposeTransaction 2025-04-10T10:56:28.246006Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T10:56:28.246246Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:678:2571] 2025-04-10T10:56:28.354245Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "olapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Name: "uid" Type: "Utf8" NotNull: true StorageId: "__MEMORY" } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" StorageId: "__MEMORY" } Columns { Name: "json_payload" Type: "JsonDocument" } KeyColumnNames: "timestamp" KeyColumnNames: "uid" } } } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T10:56:28.354382Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T10:56:28.355132Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T10:56:28.355250Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T10:56:28.355643Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T10:56:28.355909Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T10:56:28.356028Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T10:56:28.361247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:28.362037Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T10:56:28.363169Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T10:56:28.363310Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 SEND to# [1:677:2570] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-10T10:56:28.490953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:749:2632];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T10:56:28.518224Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:749:2632];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T10:56:28.518654Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-10T10:56:28.528409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:749:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T10:56:28.528687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:749:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T10:56:28.529036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:749:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T10:56:28.529199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:749:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T10:56:28.529350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:749:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T10:56:28.529523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:749:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T10:56:28.529658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:749:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T10:56:28.529812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:749:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T10:56:28.529988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:749:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T10:56:28.530129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:749:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T10:56:28.530302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:749:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T10:56:28.530499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:749:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T10:56:28.557827Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-04-10T10:56:28.558826Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T10:56:28.558913Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T10:56:28.559120Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T10:56:28.559297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T10:56:28.559395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T10:56:28.559448Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T10:56:28.559555Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T10:56:28.559628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T10:56:28.559685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T10:56:28.559738Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T10:56:28.559962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T10:56:28.560040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute; ... 17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '351) '('"_id" '"cc4c8328-921e3591-90cd96b9-949e3aab")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) ) 2025-04-10T11:01:58.490442Z node 1 :KQP_YQL TRACE: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.489 TRACE ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [KQP] kqp_transform.cpp:33: PhysicalPeepholeTransformer: ( (let $1 (KqpTable '"//Root/.metadata/initialization/migrations" '"72057594046644480:6" '"" '1)) (let $2 '('"componentId" '"instant" '"modificationId")) (let $3 (Uint64 '"1001")) (let $4 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) (Void) '())) (let $5 (OptionalType (DataType 'Utf8))) (let $6 (StructType '('"componentId" $5) '('"instant" (OptionalType (DataType 'Uint32))) '('"modificationId" $5))) (let $7 '('('"_logical_id" '338) '('"_id" '"8ffc7b0a-11cfac15-4f1a2552-eb67dbaa") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $4)) (lambda '($12) (block '( (let $13 (lambda '($14) (Member $14 '"componentId") (Member $14 '"instant") (Member $14 '"modificationId"))) (return (FromFlow (ExpandMap (Take (ToFlow $12) $3) $13))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($15) (FromFlow (NarrowMap (Take (ToFlow $15) $3) (lambda '($16 $17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '351) '('"_id" '"cc4c8328-921e3591-90cd96b9-949e3aab")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) 2025-04-10T11:01:58.505728Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.505 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:466: Register async execution for node #268 2025-04-10T11:01:58.505896Z node 1 :KQP_YQL TRACE: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.505 TRACE ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:387: {3}, callable #277 2025-04-10T11:01:58.506018Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.505 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:577: Node #277 finished execution 2025-04-10T11:01:58.506091Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.506 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:594: Node #277 created 0 trackable nodes: 2025-04-10T11:01:58.506199Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.506 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:87: Finish, output #280, status: Async 2025-04-10T11:01:58.506947Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.506 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:133: Completed async execution for node #268 2025-04-10T11:01:58.507035Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.507 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:153: State is ExecutionRequired after apply async changes for node #268 2025-04-10T11:01:58.507112Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.507 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:59: Begin, root #280 2025-04-10T11:01:58.507177Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.507 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:72: Collect unused nodes for root #280, status: Ok 2025-04-10T11:01:58.507246Z node 1 :KQP_YQL TRACE: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.507 TRACE ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:387: {0}, callable #280 2025-04-10T11:01:58.507312Z node 1 :KQP_YQL TRACE: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.507 TRACE ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:387: {1}, callable #279 2025-04-10T11:01:58.507377Z node 1 :KQP_YQL TRACE: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.507 TRACE ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:387: {2}, callable #278 2025-04-10T11:01:58.507487Z node 1 :KQP_YQL TRACE: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.507 TRACE ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:387: {3}, callable #275 2025-04-10T11:01:58.507558Z node 1 :KQP_YQL TRACE: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.507 TRACE ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:387: {4}, callable #268 2025-04-10T11:01:58.507772Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.507 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:577: Node #268 finished execution 2025-04-10T11:01:58.507845Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.507 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:594: Node #268 created 0 trackable nodes: 2025-04-10T11:01:58.507914Z node 1 :KQP_YQL TRACE: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.507 TRACE ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:387: {3}, callable #275 2025-04-10T11:01:58.507978Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.507 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:577: Node #275 finished execution 2025-04-10T11:01:58.508053Z node 1 :KQP_YQL TRACE: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.508 TRACE ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:387: {2}, callable #278 2025-04-10T11:01:58.508306Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.508 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:577: Node #278 finished execution 2025-04-10T11:01:58.508376Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.508 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:594: Node #278 created 0 trackable nodes: 2025-04-10T11:01:58.508453Z node 1 :KQP_YQL TRACE: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.508 TRACE ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:387: {1}, callable #279 2025-04-10T11:01:58.508544Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.508 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:577: Node #279 finished execution 2025-04-10T11:01:58.508611Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.508 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:594: Node #279 created 0 trackable nodes: 2025-04-10T11:01:58.508683Z node 1 :KQP_YQL TRACE: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.508 TRACE ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:387: {0}, callable #280 2025-04-10T11:01:58.508756Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.508 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:577: Node #280 finished execution 2025-04-10T11:01:58.508822Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.508 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:594: Node #280 created 0 trackable nodes: 2025-04-10T11:01:58.508885Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.508 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:87: Finish, output #280, status: Ok 2025-04-10T11:01:58.508947Z node 1 :KQP_YQL INFO: TraceId: 01jrfnt79g6czdstt63cdzj9ge, SessionId: CompileActor 2025-04-10 11:01:58.508 INFO ydb-services-ext_index-ut(pid=1123770, tid=0x00007F67085CFD00) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #280 2025-04-10T11:01:58.533141Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvExecuteKqpTransaction 2025-04-10T11:01:58.533222Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976716232 ProcessProposeKqpTransaction 2025-04-10T11:01:58.543643Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvExecuteKqpTransaction 2025-04-10T11:01:58.543723Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976716233 ProcessProposeKqpTransaction 2025-04-10T11:01:58.733938Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:749:2632];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-10T11:01:58.734338Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:756:2635];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-10T11:01:58.734411Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:761:2638];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-10T11:01:58.734473Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:766:2642];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-10T11:01:58.747132Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:749:2632];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-10T11:01:58.747339Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:756:2635];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-04-10T11:01:58.747454Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:761:2638];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-04-10T11:01:58.747531Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:766:2642];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] |72.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |72.0%| [LD] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tools/sql2yql/sql2yql |72.0%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |72.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas >> TContinuousBackupTests::Basic >> GroupWriteTest::Simple >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData >> GroupWriteTest::WriteHardRateDispatcher >> YdbTableSplit::SplitByLoadWithDeletes |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |72.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |72.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |72.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap >> TAccessServiceTest::PassRequestId |72.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut >> FolderServiceTest::TFolderServiceTransitional >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRemoveServerlessTenant >> TAccessServiceTest::Authenticate >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 >> TContinuousBackupTests::Basic [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> TUserAccountServiceTest::Get [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::Basic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:19.910682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:19.910789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:19.910837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:19.910899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:19.910956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:19.910988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:19.911069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:19.911148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:19.911478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:20.004494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:20.004559Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:20.026044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:20.026464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:20.026678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:20.059755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:20.060057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:20.060877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:20.061376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:20.063831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:20.065271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:20.065343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:20.065559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:20.065646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:20.065702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:20.065879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:20.077986Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:20.230451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:20.230755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:20.231014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:20.231306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:20.231394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:20.237715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:20.237914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:20.238197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:20.238286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:20.238330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:20.238366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:20.243537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:20.243642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:20.243690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:20.248558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:20.248650Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:20.248723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:20.248784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:20.253144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:20.263410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:20.263738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:20.264916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:20.265114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:20.265191Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:20.265596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:20.265675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:20.265888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:20.265969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:20.276583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:20.276655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:20.276880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:20.276936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:20.277200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:20.277267Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:20.277380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:20.277443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:20.277502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:20.277543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:20.277620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:20.277671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:20.277714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:20.277786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:20.277880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:20.277927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:20.277981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:20.280830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:20.281003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:20.281055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : 104, ready parts: 2/3, is published: true 2025-04-10T11:02:21.220204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:02:21.220271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:02:21.220303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:02:21.220330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-04-10T11:02:21.223238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:02:21.223324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:02:21.225632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:02:21.225742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:02:21.225991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:02:21.226075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:02:21.226123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:02:21.251863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 949 } } 2025-04-10T11:02:21.251945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-10T11:02:21.252108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 949 } } 2025-04-10T11:02:21.252216Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 949 } } FAKE_COORDINATOR: Erasing txId 104 2025-04-10T11:02:21.254469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-10T11:02:21.254539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-10T11:02:21.254719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-10T11:02:21.254782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:02:21.254900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-10T11:02:21.254991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:21.255044Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:02:21.255088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:02:21.255142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-10T11:02:21.257363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:02:21.257975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:02:21.258438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:02:21.258492Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-10T11:02:21.258607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-04-10T11:02:21.258649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-10T11:02:21.258696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-04-10T11:02:21.258738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-10T11:02:21.258787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-04-10T11:02:21.258869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 104 2025-04-10T11:02:21.258933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-10T11:02:21.258987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T11:02:21.259036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T11:02:21.259190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:02:21.259247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-04-10T11:02:21.259271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-04-10T11:02:21.259320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:02:21.259351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-04-10T11:02:21.259374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-04-10T11:02:21.259461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T11:02:21.259885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:02:21.259939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-10T11:02:21.260023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:02:21.260078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T11:02:21.260118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:02:21.264251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:02:21.264309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:726:2640] 2025-04-10T11:02:21.264739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-04-10T11:02:21.265334Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:02:21.265608Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl" took 303us result status StatusPathDoesNotExist 2025-04-10T11:02:21.265783Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:02:21.266322Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:02:21.266597Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 232us result status StatusPathDoesNotExist 2025-04-10T11:02:21.266766Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TServiceAccountServiceTest::Get >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] >> TServiceAccountServiceTest::Get [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] Leader for TabletID 72057594046678944 is [1:234:2152] sender: [1:235:2060] recipient: [1:218:2142] 2025-04-10T11:00:58.563343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:00:58.563436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:00:58.563470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:00:58.563505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:00:58.563545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:00:58.563571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:00:58.563629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:00:58.563695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:00:58.564054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:00:58.651001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:58.651068Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:58.665361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:00:58.669833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:00:58.670024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:00:58.676969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:00:58.677344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:00:58.678065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:00:58.678640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:00:58.680901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:00:58.682086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:00:58.682135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:00:58.682309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:00:58.682346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:00:58.682377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:00:58.682484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:00:58.687486Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:234:2152] sender: [1:350:2060] recipient: [1:17:2064] 2025-04-10T11:00:58.830681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:00:58.830958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:58.831307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:00:58.831563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:00:58.831635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:58.839247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:00:58.839420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:00:58.839645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:58.839702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:00:58.839742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:00:58.839785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:00:58.843557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:58.843643Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:00:58.843683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:00:58.847255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:58.847315Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:58.847382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:00:58.847450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:00:58.851672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:00:58.857384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:00:58.857670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:00:58.858851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:00:58.859010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 243 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:00:58.859071Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:00:58.859420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:00:58.859488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:00:58.859683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:00:58.859773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:00:58.863008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:00:58.863063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:00:58.863268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:00:58.863308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:317:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:00:58.863698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:00:58.863759Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:00:58.863936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:00:58.863991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:00:58.864039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:00:58.864074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:00:58.864112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:00:58.864177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:00:58.864222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:00:58.864269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:00:58.864347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:00:58.864388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:00:58.864420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:00:58.866503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:00:58.866662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:00:58.866709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T11:02:21.201157Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-10T11:02:21.201190Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-04-10T11:02:21.201225Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-10T11:02:21.201312Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-10T11:02:21.201366Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:02:21.204119Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:02:21.205663Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T11:02:21.205711Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:02:21.206996Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T11:02:21.207038Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-10T11:02:21.207412Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-10T11:02:21.207485Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-10T11:02:21.207946Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:682:2508], Recipient [7:236:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:02:21.208005Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:02:21.208056Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T11:02:21.208256Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:577:2403], Recipient [7:236:2154]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2025-04-10T11:02:21.208297Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-10T11:02:21.208386Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-10T11:02:21.208534Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-10T11:02:21.208588Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:680:2506] 2025-04-10T11:02:21.208830Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:682:2508], Recipient [7:236:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:02:21.208872Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:02:21.208923Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2025-04-10T11:02:21.209468Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:551:2102], Recipient [7:236:2154] 2025-04-10T11:02:21.209528Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T11:02:21.212700Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 551 RawX2: 34359740470 } AllowCreateInTempDir: false } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:21.213096Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:02:21.213248Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-10T11:02:21.213546Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:02:21.220938Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/test/tmp/a/b\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:21.221288Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-04-10T11:02:21.221371Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-04-10T11:02:21.221922Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-04-10T11:02:21.221984Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-04-10T11:02:21.222483Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:688:2514], Recipient [7:236:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:02:21.222554Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:02:21.222604Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T11:02:21.222807Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:577:2403], Recipient [7:236:2154]: NKikimrScheme.TEvNotifyTxCompletion TxId: 107 2025-04-10T11:02:21.222848Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-10T11:02:21.222950Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-04-10T11:02:21.223091Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-10T11:02:21.223151Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [7:686:2512] 2025-04-10T11:02:21.223377Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:688:2514], Recipient [7:236:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:02:21.223494Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:02:21.223541Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-10T11:02:21.224110Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:551:2102], Recipient [7:236:2154] 2025-04-10T11:02:21.224174Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T11:02:21.227195Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 551 RawX2: 34359740470 } AllowCreateInTempDir: true } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:21.227593Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T11:02:21.227664Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., at schemeshard: 72057594046678944 2025-04-10T11:02:21.227932Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:02:21.231297Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "Can\'t create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can\'t be created in another temporary directory." TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:21.231579Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-04-10T11:02:21.231663Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-10T11:02:21.232189Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-10T11:02:21.232244Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-10T11:02:21.232689Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:694:2520], Recipient [7:236:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:02:21.232755Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:02:21.232804Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T11:02:21.233001Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:577:2403], Recipient [7:236:2154]: NKikimrScheme.TEvNotifyTxCompletion TxId: 108 2025-04-10T11:02:21.233041Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-10T11:02:21.233147Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-10T11:02:21.233282Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-10T11:02:21.233336Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:692:2518] 2025-04-10T11:02:21.233572Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:694:2520], Recipient [7:236:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:02:21.233611Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:02:21.233659Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 |72.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |72.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |72.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |72.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> FolderServiceTest::TFolderService [GOOD] |72.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] Test command err: Trying to start YDB, gRPC: 11035, MsgBus: 27017 2025-04-10T10:55:46.988320Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636492943630242:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:46.988379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017af/r3tmp/tmpNeITZv/pdisk_1.dat 2025-04-10T10:55:47.610129Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:47.652262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:47.652383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:47.684136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11035, node 1 2025-04-10T10:55:47.894741Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:47.894761Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:47.894766Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:47.894859Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27017 TClient is connected to server localhost:27017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:48.855505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.015925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.644757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.843776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:50.047888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:51.457261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636514418468354:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:51.457375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:51.989475Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636492943630242:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:51.989577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:52.767788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.809357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.900455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.941254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:55:52.979647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.014468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.089053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636523008403471:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.089136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.097174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636523008403478:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.120218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:55:53.142464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636523008403480:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:55:53.268910Z node 1 :TX_PROXY ERROR: Actor# [1:7491636523008403539:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:55.236088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24037, MsgBus: 3357 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017af/r3tmp/tmpCN9T5k/pdisk_1.dat 2025-04-10T10:55:58.158425Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:58.166200Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:58.214880Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:58.214978Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:58.223344Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24037, node 2 2025-04-10T10:55:58.338198Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:58.338221Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:58.338228Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:58.338365Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3357 TClient is connected to server localhost:3357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:58.901408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:58.919990Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:55:58.925176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:59.072901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:59.297021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:59.419112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... jgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnsvgq0g0yqq31zt933y4h, Create QueryResponse for error on request, msg: 2025-04-10T11:01:47.032205Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnsvz1a6et1r5rcpz8mz16, Create QueryResponse for error on request, msg: 2025-04-10T11:01:47.757655Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnswnf9n9d5d97cw9wdbzw, Create QueryResponse for error on request, msg: 2025-04-10T11:01:48.210861Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnsx3m71a4a4kaeh4rvssq, Create QueryResponse for error on request, msg: 2025-04-10T11:01:49.029520Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnsxx729w7gbnptkw4gw8a, Create QueryResponse for error on request, msg: 2025-04-10T11:01:49.501083Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnsybt1yqy1de1c271fj9f, Create QueryResponse for error on request, msg: 2025-04-10T11:01:49.981876Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnsytq0xztp5yz2rpfrk97, Create QueryResponse for error on request, msg: 2025-04-10T11:01:50.458846Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnsz9s8gnye74fk5x4s5c6, Create QueryResponse for error on request, msg: 2025-04-10T11:01:50.974695Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnszst54f9f694q2n1xbw3, Create QueryResponse for error on request, msg: 2025-04-10T11:01:51.450405Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt08k8mfp783d7zfr75qr, Create QueryResponse for error on request, msg: 2025-04-10T11:01:51.973053Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt0rz4daymz8w5n02gaf8, Create QueryResponse for error on request, msg: 2025-04-10T11:01:52.442570Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt17gffambb9am9jntbg6, Create QueryResponse for error on request, msg: 2025-04-10T11:01:53.347782Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt23h3rxdcndsh34djkzh, Create QueryResponse for error on request, msg: 2025-04-10T11:01:53.824500Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt2jhb7z4xj5ykab184t8, Create QueryResponse for error on request, msg: 2025-04-10T11:01:54.630451Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt3bm647mvnr8mbrv5j5e, Create QueryResponse for error on request, msg: 2025-04-10T11:01:55.106134Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt3tsd3418n5j4xffhxrk, Create QueryResponse for error on request, msg: 2025-04-10T11:01:55.796748Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt4g527n5c6sbf8dy6bjj, Create QueryResponse for error on request, msg: 2025-04-10T11:01:56.272020Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt4yy9qg21002kxs2g9yk, Create QueryResponse for error on request, msg: 2025-04-10T11:01:57.087115Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt5r5azyww1wj1v1bpjjv, Create QueryResponse for error on request, msg: 2025-04-10T11:01:57.573804Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt67e1q2fvze1xn88gz8y, Create QueryResponse for error on request, msg: 2025-04-10T11:01:58.309902Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt6yd7z1fj079rnv1tw62, Create QueryResponse for error on request, msg: 2025-04-10T11:01:58.806544Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt7e00tyypksx4m4y7wf2, Create QueryResponse for error on request, msg: 2025-04-10T11:01:59.538071Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt84qfceevbpach895je5, Create QueryResponse for error on request, msg: 2025-04-10T11:02:00.032037Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt8m45pp4vda1cbx9y9sp, Create QueryResponse for error on request, msg: 2025-04-10T11:02:00.559029Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt94f7z66b5k4ysk8qp0r, Create QueryResponse for error on request, msg: 2025-04-10T11:02:01.048228Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnt9kt2s613sn7anmctn0y, Create QueryResponse for error on request, msg: 2025-04-10T11:02:01.836845Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfntacf4947qhznnb66jycx, Create QueryResponse for error on request, msg: 2025-04-10T11:02:02.346444Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfntaw63fdd3fgp2abteapa, Create QueryResponse for error on request, msg: 2025-04-10T11:02:03.313963Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfntbtg6g2a9zvs4c254z7e, Create QueryResponse for error on request, msg: 2025-04-10T11:02:03.822496Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfntca6aczhm15j00pks5zp, Create QueryResponse for error on request, msg: 2025-04-10T11:02:04.501812Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfntczceknthc40xpbfsm7t, Create QueryResponse for error on request, msg: 2025-04-10T11:02:05.012622Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfntdfc5zf811wqekmvwz15, Create QueryResponse for error on request, msg: 2025-04-10T11:02:05.666487Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnte3n3djxjw2qajc19yjj, Create QueryResponse for error on request, msg: 2025-04-10T11:02:06.165737Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfntekd45q5p45tpyrfy03k, Create QueryResponse for error on request, msg: 2025-04-10T11:02:07.080669Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfntffw5f5g80vkk1tv0hg6, Create QueryResponse for error on request, msg: 2025-04-10T11:02:08.078989Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfntgf10as2pme462x7gv30, Create QueryResponse for error on request, msg: 2025-04-10T11:02:08.587570Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfntgyv56204gvp3j2kzr7r, Create QueryResponse for error on request, msg: 2025-04-10T11:02:09.098556Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfnthev6118s4bs5ecgnb27, Create QueryResponse for error on request, msg: 2025-04-10T11:02:09.705830Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfntj1g1smyare0wx5hw9dp, Create QueryResponse for error on request, msg: 2025-04-10T11:02:10.677955Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfntk028yab7bq9zzbc89pg, Create QueryResponse for error on request, msg: 2025-04-10T11:02:11.191954Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTlhMGEtMjU1OTFkYjYtOGM4YzNkOGItNGE5YWZmYTI=, ActorId: [4:7491637612962567777:2501], ActorState: ExecuteState, TraceId: 01jrfntkg2fv272wf7at7s565k, Create QueryResponse for error on request, msg: >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2025-04-10T11:02:17.803102Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638173017951533:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:17.803237Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011d1/r3tmp/tmp8UdYut/pdisk_1.dat 2025-04-10T11:02:18.763816Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:18.864828Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:02:18.871128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:18.871234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:18.877428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19124 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:19.323507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:19.347905Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 >> TServiceAccountServiceTest::IssueToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] Test command err: Trying to start YDB, gRPC: 16137, MsgBus: 21702 2025-04-10T11:01:35.446761Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637992033365449:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:35.447224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b70/r3tmp/tmpAozYnE/pdisk_1.dat 2025-04-10T11:01:36.363167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:36.363293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:36.364692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:36.383076Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16137, node 1 2025-04-10T11:01:36.576073Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:36.576123Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:36.576135Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:36.576312Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21702 TClient is connected to server localhost:21702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:37.231523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:37.275621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:01:37.470426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:01:37.661160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:37.748798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:39.932069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638009213236262:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:39.932329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:40.408508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:01:40.419927Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637992033365449:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:40.419990Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:40.459660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:01:40.504875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:01:40.548422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:01:40.627849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:01:40.685224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:01:40.784447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638013508204080:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:40.784553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:40.784885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638013508204085:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:40.789628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:01:40.803790Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:01:40.804471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638013508204087:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:01:40.875362Z node 1 :TX_PROXY ERROR: Actor# [1:7491638013508204144:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:41.988608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2025-04-10T11:01:42.892599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:01:43.448700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480 2025-04-10T11:01:44.184231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:01:44.721621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:01:45.315952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-04-10T11:01:45.917629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:01:45.988394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:01:48.333163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710711:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12183, MsgBus: 25718 2025-04-10T11:01:49.360504Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638052140508665:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:49.361187Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b70/r3tmp/tmpryqamb/pdisk_1.dat 2025-04-10T11:01:49.502204Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:49.528172Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:49.528282Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:49.530197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12183, node 2 2025-04-10T11:01:49.582697Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:49.582727Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:49.582737Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:49.582882Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25718 TClient is connected to server localhost:25718 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Na ... sed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-10T11:01:56.399741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:1, at schemeshard: 72057594046644480 2025-04-10T11:01:57.108194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-10T11:01:57.779402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-10T11:01:58.481572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-10T11:01:59.127843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:01:59.253335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:02:03.184255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715738:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4294, MsgBus: 11064 2025-04-10T11:02:04.409930Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491638114588528721:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:04.410026Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b70/r3tmp/tmprZShXw/pdisk_1.dat 2025-04-10T11:02:04.611157Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:04.632918Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:04.633026Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:04.634491Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4294, node 3 2025-04-10T11:02:04.686632Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:04.686663Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:04.686675Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:04.686857Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11064 TClient is connected to server localhost:11064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:02:05.312305Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:02:05.334258Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:05.464368Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:05.745821Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:02:05.858761Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:02:08.900269Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638131768399667:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:08.900372Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:08.971341Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.042463Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.088668Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.136153Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.216624Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.294115Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.361228Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638136063367484:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:09.361383Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:09.365225Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638136063367490:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:09.368945Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:02:09.385807Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491638136063367492:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:02:09.410325Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491638114588528721:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:09.410424Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:09.476548Z node 3 :TX_PROXY ERROR: Actor# [3:7491638136063367550:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:02:10.966796Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2025-04-10T11:02:11.862884Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:02:12.708552Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480 2025-04-10T11:02:13.560895Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:02:14.337884Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:02:15.174245Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-04-10T11:02:16.065671Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:02:16.184949Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:02:19.586067Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:02:19.586107Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:21.207781Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710728:0, at schemeshard: 72057594046644480 |72.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |72.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |72.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut >> YdbSdkSessionsPool::Get1Session ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2025-04-10T11:02:17.481550Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638173676307061:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:17.481672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011b3/r3tmp/tmp8n09Tj/pdisk_1.dat 2025-04-10T11:02:18.097943Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:18.122166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:18.122294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:18.129520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:18.940937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:19.006271Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Connect to grpc://localhost:27645 2025-04-10T11:02:19.025981Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-10T11:02:19.158521Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:27645: Failed to connect to remote host: Connection refused 2025-04-10T11:02:19.162735Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-10T11:02:19.176186Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:27645: Failed to connect to remote host: Connection refused 2025-04-10T11:02:20.178650Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-10T11:02:20.188850Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 5 Not Found 2025-04-10T11:02:20.189630Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-04-10T11:02:20.194552Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter >> YdbSdkSessionsPool::StressTestSync1 >> LocalPartitionReader::FeedSlowly >> TPersQueueTest::DirectReadCleanCache [GOOD] >> TPersQueueTest::DirectReadRestartPQRB >> SysViewQueryHistory::AggrMergeDedup >> TAccessServiceTest::PassRequestId [GOOD] >> TAccessServiceTest::Authenticate [GOOD] >> LocalPartitionReader::FeedSlowly [GOOD] >> SysViewQueryHistory::AggrMergeDedup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] Test command err: Trying to start YDB, gRPC: 8794, MsgBus: 23734 2025-04-10T11:01:43.082083Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638025052368819:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:43.082176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b18/r3tmp/tmpyTYOBV/pdisk_1.dat 2025-04-10T11:01:43.668511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:43.668620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:43.675455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:43.760908Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8794, node 1 2025-04-10T11:01:43.990876Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:43.990899Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:43.990908Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:43.991029Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23734 TClient is connected to server localhost:23734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:44.863870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:44.889672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:45.129421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:45.359347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:45.484120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:47.622654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638042232239665:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:47.622806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:48.004757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:01:48.060176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:01:48.084180Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638025052368819:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:48.084266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:48.131662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:01:48.212375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:01:48.278527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:01:48.335250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:01:48.400322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638046527207484:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:48.400477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:48.400827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638046527207489:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:48.405203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:01:48.417238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638046527207491:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:01:48.518106Z node 1 :TX_PROXY ERROR: Actor# [1:7491638046527207547:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:49.566921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:01:49.580971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:01:49.592307Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710672, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11137, MsgBus: 21908 2025-04-10T11:01:50.565637Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638054452871912:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:50.565688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b18/r3tmp/tmpa0TXLo/pdisk_1.dat 2025-04-10T11:01:50.706031Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:50.743541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:50.743634Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11137, node 2 2025-04-10T11:01:50.747865Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:50.830815Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:50.830840Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:50.830849Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:50.830983Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21908 TClient is connected to server localhost:21908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:51.399775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:51.433294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:51.571441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreate ... de 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:01:57.814121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:1, at schemeshard: 72057594046644480 2025-04-10T11:01:58.471283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:01:59.161179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-10T11:01:59.780130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-04-10T11:02:00.409618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:02:00.463796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:02:02.911185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-10T11:02:02.925522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 2025-04-10T11:02:02.936881Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710716, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1088, MsgBus: 1951 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b18/r3tmp/tmpzjYymL/pdisk_1.dat 2025-04-10T11:02:05.333375Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:02:05.470894Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:05.492332Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:05.492452Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:05.495185Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1088, node 3 2025-04-10T11:02:05.611286Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:05.611317Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:05.611326Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:05.611452Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1951 TClient is connected to server localhost:1951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:06.330642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:06.336747Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:02:06.344037Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:06.414330Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:06.618101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:06.716929Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:09.783328Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638135924672287:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:09.783414Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:09.850840Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.897495Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.947625Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.988503Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:02:10.032876Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:02:10.071831Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:02:10.198354Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638140219640100:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:10.198552Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:10.202324Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638140219640105:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:10.207346Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:02:10.225869Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491638140219640108:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:02:10.285871Z node 3 :TX_PROXY ERROR: Actor# [3:7491638140219640163:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:02:11.903790Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2025-04-10T11:02:12.776487Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:02:13.568640Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480 2025-04-10T11:02:14.389043Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:02:15.163702Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-10T11:02:15.829218Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-04-10T11:02:16.738596Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:02:16.804615Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:02:20.456463Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:02:20.456494Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:21.522545Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710728:0, at schemeshard: 72057594046644480 |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting >> TConsoleTests::TestDatabaseQuotas [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota >> LocalPartitionReader::Booting [GOOD] |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AggrMergeDedup [GOOD] |72.1%| [TA] $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> FolderServiceTest::TFolderServiceTransitional [GOOD] |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly [GOOD] >> TBlobStorageHullDecimal::TestMkRatio [GOOD] >> TBlobStorageHullDecimal::TestMult [GOOD] |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-04-10T11:02:20.679598Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638182831979367:2169];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:20.680073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001184/r3tmp/tmp1UD3GF/pdisk_1.dat 2025-04-10T11:02:21.303459Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:21.385362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:21.385820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:21.391798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6796 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:22.059924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:22.074937Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:02:22.267997Z node 1 :GRPC_CLIENT DEBUG: [51700007c508]{trololo} Connect to grpc://localhost:5131 2025-04-10T11:02:22.269305Z node 1 :GRPC_CLIENT DEBUG: [51700007c508]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-04-10T11:02:22.312772Z node 1 :GRPC_CLIENT DEBUG: [51700007c508]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestMult [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2025-04-10T11:02:20.864642Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638186325233653:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:20.864715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00117d/r3tmp/tmpxHMNR7/pdisk_1.dat 2025-04-10T11:02:21.449871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:21.449988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:21.452766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:21.486535Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:21.945807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:22.121582Z node 1 :GRPC_CLIENT DEBUG: [51700008c808] Connect to grpc://localhost:24671 2025-04-10T11:02:22.122965Z node 1 :GRPC_CLIENT DEBUG: [51700008c808] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2025-04-10T11:02:22.144136Z node 1 :GRPC_CLIENT DEBUG: [51700008c808] Status 7 Permission Denied 2025-04-10T11:02:22.146787Z node 1 :GRPC_CLIENT DEBUG: [51700008c808] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2025-04-10T11:02:22.162363Z node 1 :GRPC_CLIENT DEBUG: [51700008c808] Response AuthenticateResponse { subject { user_account { id: "1234" } } } |72.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |72.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |72.1%| [TA] {RESULT} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |72.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |72.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |72.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |72.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut >> TBlobStorageHullDecimal::TestRoundToInt [GOOD] >> TBlobStorageHullDecimal::TestToUi64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2025-04-10T11:02:20.807280Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638185506062077:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:20.823256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00118f/r3tmp/tmpIjOLAh/pdisk_1.dat 2025-04-10T11:02:21.508038Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:21.512357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:21.512474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:21.524957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:22.089916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:22.136020Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:02:22.151167Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Connect to grpc://localhost:14018 2025-04-10T11:02:22.295494Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-10T11:02:22.339547Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:14018: Failed to connect to remote host: Connection refused 2025-04-10T11:02:22.341025Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-10T11:02:22.343087Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:14018: Failed to connect to remote host: Connection refused 2025-04-10T11:02:23.343995Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-10T11:02:23.358882Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 5 Not Found 2025-04-10T11:02:23.362531Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ListFoldersRequest { id: "i_am_exists" } 2025-04-10T11:02:23.371261Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } >> THullDsHeapItTest::HeapLevelSliceForwardIteratorBenchmark >> TBlobStorageHullStorageRatio::Test [GOOD] >> TBlobStorageKeyBarrierTest::ParseTest [GOOD] >> THullDsHeapItTest::HeapLevelSliceForwardIteratorBenchmark [GOOD] >> THullDsHeapItTest::HeapLevelSliceBackwardIteratorBenchmark [GOOD] |72.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |72.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |72.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestToUi64 [GOOD] |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapLevelSliceBackwardIteratorBenchmark [GOOD] |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageKeyBarrierTest::ParseTest [GOOD] >> SemiSortedDeltaAndVarLengthCodec::Random32 >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless >> SemiSortedDeltaAndVarLengthCodec::Random32 [GOOD] >> SemiSortedDeltaAndVarLengthCodec::Random64 >> RunLengthCodec::Random32 >> RunLengthCodec::Random32 [GOOD] >> RunLengthCodec::Random64 >> SemiSortedDeltaAndVarLengthCodec::Random64 [GOOD] >> SemiSortedDeltaCodec::BasicTest32 [GOOD] >> SemiSortedDeltaCodec::BasicTest64 [GOOD] >> TOlap::CreateStoreWithDirs >> TOlapNaming::CreateColumnStoreOk >> RunLengthCodec::Random64 [GOOD] >> SemiSortedDeltaAndVarLengthCodec::BasicTest32 [GOOD] >> SemiSortedDeltaAndVarLengthCodec::BasicTest64 [GOOD] >> TOlapNaming::AlterColumnTableOk >> CodecsTest::Basic [GOOD] >> CodecsTest::NaturalNumbersAndZero |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> SemiSortedDeltaCodec::BasicTest64 [GOOD] >> CodecsTest::NaturalNumbersAndZero [GOOD] >> CodecsTest::LargeAndRepeated [GOOD] >> NaiveFragmentWriterTest::Basic [GOOD] >> FormatTimes::DurationMs [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 >> TConsoleTests::TestRemoveServerlessTenant [GOOD] |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> SemiSortedDeltaAndVarLengthCodec::BasicTest64 [GOOD] >> TConsoleTests::TestRegisterComputationalUnitsForPending |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> NaiveFragmentWriterTest::Basic [GOOD] |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationMs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2025-04-10T11:02:18.968890Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638175988976906:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:19.045527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001198/r3tmp/tmp2nd8LZ/pdisk_1.dat 2025-04-10T11:02:19.468620Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:19.472097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:19.472201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:19.489072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:19.857605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001198/r3tmp/tmpbdhKdn/pdisk_1.dat 2025-04-10T11:02:23.577844Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:02:23.664925Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:23.665010Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:23.667320Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:23.673501Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2971 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:24.008203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota >> YdbSdkSessionsPool::Get1Session [GOOD] >> YdbSdkSessionsPool::PeriodicTask1 >> TOlap::CreateStoreWithDirs [GOOD] >> TOlap::CreateTable >> StatsFormat::FullStat [GOOD] |72.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |72.2%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |72.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut >> TOlapNaming::CreateColumnStoreOk [GOOD] >> TOlapNaming::CreateColumnStoreFailed >> FormatTimes::DurationUs [GOOD] |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::AggregateStat [GOOD] >> Config::IncludeScope [GOOD] |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationUs [GOOD] >> Config::ExcludeScope [GOOD] |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::FullStat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2025-04-10T11:02:21.072262Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638187380530002:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:21.073245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00118c/r3tmp/tmph8S8ti/pdisk_1.dat 2025-04-10T11:02:21.806996Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:21.890016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:21.890120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:21.907839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:22.459909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:22.477559Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00118c/r3tmp/tmpryPEd6/pdisk_1.dat 2025-04-10T11:02:25.857505Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:02:25.953655Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:25.982033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:25.982393Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:25.985192Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62872 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:02:26.273649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:02:26.286241Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> Config::IncludeScope [GOOD] |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::AggregateStat [GOOD] >> TOlapNaming::CreateColumnStoreFailed [GOOD] |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> Config::ExcludeScope [GOOD] |72.2%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TOlap::CreateTable [GOOD] >> TOlap::CreateTableTtl |72.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |72.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |72.2%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::PostgreSQLOnPremSelectAll |72.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnStoreFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:29.522846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:29.522955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:29.523019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:29.523059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:29.523107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:29.523141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:29.523218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:29.523304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:29.523690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:29.669152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:29.669211Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:29.720101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:29.720497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:29.720732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:29.766627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:29.766952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:29.767647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:29.768175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:29.774952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:29.781420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:29.781511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:29.781684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:29.781742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:29.781780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:29.781931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.827218Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:30.070905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:30.071165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:30.071404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:30.071651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:30.071721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:30.081467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:30.081673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:30.081923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:30.081993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:30.082069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:30.082117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:30.093322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:30.093424Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:30.093478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:30.099791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:30.099883Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:30.099966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:30.100022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:30.108548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:30.111683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:30.111971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:30.113079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:30.113267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:30.113322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:30.113697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:30.113790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:30.113984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:30.114081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:30.125483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:30.125558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:30.125773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:30.125817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:30.126076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:30.126163Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:30.126307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:30.126350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:30.126396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:30.126444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:30.126489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:30.126536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:30.126594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:30.126637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:30.126724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:30.126777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:30.126825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:30.132523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:30.132727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:30.132783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... AT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:31.531544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:31.531656Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:31.531925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:31.532002Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:31.532038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:31.532069Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:31.533998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:31.534065Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:31.534104Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:31.535837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:31.535887Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:31.535932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:31.535980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:31.536121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:31.537611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:31.537780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:31.538634Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:31.539026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:31.539089Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:31.539361Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:31.539423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:31.539610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:31.539701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:31.542099Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:31.542168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:31.542394Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:31.542466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:31.542906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:31.542964Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:31.543068Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:31.543107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:31.543150Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:31.543237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:31.543285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:31.543337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:31.543378Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:31.543417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:31.543489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:31.543532Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:31.543571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:31.544470Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:31.544589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:31.544630Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:02:31.544722Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:02:31.544766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:31.544868Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:02:31.548136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:02:31.548729Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:02:31.549589Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Bootstrap 2025-04-10T11:02:31.602143Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Become StateWork (SchemeCache [2:274:2265]) 2025-04-10T11:02:31.605821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "OlapStore" ColumnShardCount: 1 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "data" Type: "Utf8" } Columns { Name: "mess age" Type: "Utf8" } KeyColumnNames: "timestamp" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:31.610513Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /MyRoot/OlapStore, opId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:31.610789Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2025-04-10T11:02:31.611949Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:02:31.627280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:31.627481Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: CREATE COLUMN STORE, path: /MyRoot/OlapStore 2025-04-10T11:02:31.631384Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:02:31.631684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:02:31.631737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:02:31.632166Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:02:31.632282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:02:31.632321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:284:2275] TestWaitNotification: OK eventTxId 101 2025-04-10T11:02:31.632787Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:02:31.632972Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 228us result status StatusPathDoesNotExist 2025-04-10T11:02:31.633180Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/OlapStore" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpPg::InsertFromSelect_Simple+useSink >> KqpPg::CreateTableBulkUpsertAndRead >> KqpPg::TypeCoercionInsert-useSink >> KqpPg::EmptyQuery+useSink >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] >> KqpPg::InsertNoTargetColumns_Simple+useSink >> KqpPg::NoTableQuery+useSink >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady [GOOD] >> IndexBuildTest::RejectsCancel >> TOlap::CreateTableTtl [GOOD] >> KqpPg::CreateTableSerialColumns+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:29.216741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:29.216892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:29.216955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:29.217007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:29.217094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:29.217139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:29.217213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:29.217662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:29.218119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:29.318678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:29.318769Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:29.348317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:29.348818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:29.349069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:29.359808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:29.360213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:29.361164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:29.361890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:29.377759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:29.379965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:29.380073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:29.380312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:29.380385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:29.380474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:29.380702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.390804Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:29.574909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:29.575308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.575612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:29.575950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:29.576072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.579886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:29.580107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:29.580356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.580417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:29.580482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:29.580518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:29.590298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.590392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:29.590434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:29.596134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.596245Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.596316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:29.596381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:29.600354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:29.603659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:29.603871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:29.604812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:29.605040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:29.605089Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:29.605370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:29.605449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:29.605652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:29.605746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:29.611153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:29.611225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:29.611457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:29.611507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:29.611810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.611885Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:29.612009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:29.612071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:29.612127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:29.612173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:29.612218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:29.612266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:29.612325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:29.612369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:29.612492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:29.615731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:29.615830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:29.618685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:29.618824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:29.618867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-10T11:02:34.033813Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-10T11:02:34.034000Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T11:02:34.034068Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T11:02:34.034132Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T11:02:34.034195Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T11:02:34.034258Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-04-10T11:02:34.034365Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:340:2319] message: TxId: 105 2025-04-10T11:02:34.034437Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T11:02:34.034528Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-10T11:02:34.034584Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-10T11:02:34.034773Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-10T11:02:34.037901Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T11:02:34.038014Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:519:2490] TestWaitNotification: OK eventTxId 105 2025-04-10T11:02:34.039043Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:02:34.039498Z node 3 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/Table3" took 523us result status StatusSuccess 2025-04-10T11:02:34.040284Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/Table3" PathDescription { Self { Name: "Table3" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "Table3" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 360 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } Version: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-04-10T11:02:34.045861Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "Table4" TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 3600000000 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } } ColumnShardCount: 1 } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:34.046456Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/OlapStore/Table4, opId: 106:0, at schemeshard: 72057594046678944 2025-04-10T11:02:34.047067Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: OlapStore, child name: Table4, child id: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-10T11:02:34.047163Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 0 2025-04-10T11:02:34.047562Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-04-10T11:02:34.047984Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:34.048054Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 106:0, at schemeshard: 72057594046678944 2025-04-10T11:02:34.048231Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T11:02:34.048328Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-10T11:02:34.063697Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusAccepted TxId: 106 SchemeshardId: 72057594046678944 PathId: 7, at schemeshard: 72057594046678944 2025-04-10T11:02:34.063980Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/ 2025-04-10T11:02:34.064429Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:34.064501Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:02:34.064848Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-10T11:02:34.065010Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:34.065068Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 106, path id: 2 2025-04-10T11:02:34.065149Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 106, path id: 7 2025-04-10T11:02:34.065906Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-10T11:02:34.065985Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState at tabletId# 72057594046678944 2025-04-10T11:02:34.067934Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2025-04-10T11:02:34.069124Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T11:02:34.069271Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T11:02:34.069324Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-10T11:02:34.069388Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2025-04-10T11:02:34.069469Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-10T11:02:34.070697Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T11:02:34.070810Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T11:02:34.070848Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-10T11:02:34.070896Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 1 2025-04-10T11:02:34.070936Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-10T11:02:34.071047Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-04-10T11:02:34.073963Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2025-04-10T11:02:34.074170Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2025-04-10T11:02:34.076750Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T11:02:34.076899Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 >> KqpPg::TypeCoercionBulkUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] Test command err: 2025-04-10T11:00:51.168624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:51.168699Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:51.250353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:52.562931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-10T11:00:52.730601Z node 8 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:00:52.731394Z node 8 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpruRyeN/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:00:52.732962Z node 8 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpruRyeN/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpruRyeN/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12972813681535975924 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:00:52.793085Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:00:52.793723Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpruRyeN/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:00:52.794009Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpruRyeN/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpruRyeN/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15020481385447754600 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:00:52.797598Z node 3 :BS_LOCALRECOVERY CRIT: PDiskId# 1000 VDISK[80000001:_:0:0:0]: (2147483649) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpruRyeN/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T11:00:52.909813Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:00:52.910488Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpruRyeN/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:00:52.910675Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpruRyeN/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpruRyeN/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13041134302087475743 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:00:52.913425Z node 4 :BS_LOCALRECOVERY CRIT: PDiskId# 1000 VDISK[80000002:_:0:0:0]: (2147483650) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpruRyeN/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T11:00:53.004862Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:00:53.005434Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2 ... ild/build_root/2gt4/001c6e/r3tmp/tmpE7Hzg9/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:02:22.202736Z node 147 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpE7Hzg9/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpE7Hzg9/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17606121417665119190 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:02:22.216457Z node 147 :BS_LOCALRECOVERY CRIT: PDiskId# 1000 VDISK[80000001:_:0:0:0]: (2147483649) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpE7Hzg9/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T11:02:22.326852Z node 152 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:02:22.327423Z node 152 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpE7Hzg9/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:02:22.327651Z node 152 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpE7Hzg9/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpE7Hzg9/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17861689766410643975 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:02:22.383700Z node 153 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:02:22.384340Z node 153 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpE7Hzg9/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:02:22.384562Z node 153 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpE7Hzg9/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpE7Hzg9/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8496181440925343568 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:02:22.783933Z node 145 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:22.784042Z node 145 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:22.787737Z node 145 :STATISTICS WARN: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2025-04-10T11:02:26.595108Z node 154 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:26.595214Z node 154 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:26.697886Z node 154 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:02:31.007138Z node 163 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:31.007244Z node 163 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:31.126306Z node 163 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:02:32.415780Z node 170 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:02:32.416530Z node 170 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpR1JAxz/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:02:32.416893Z node 170 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpR1JAxz/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001c6e/r3tmp/tmpR1JAxz/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7342528698314451093 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 >> TConsoleTests::TestRegisterComputationalUnitsForPending [GOOD] >> TConsoleTests::TestNotifyOperationCompletion |72.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |72.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |72.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 >> KqpPg::ReadPgArray >> KqpPg::ReadPgArray [GOOD] >> KqpPg::TableArrayInsert+useSink >> YdbSdkSessionsPool::StressTestSync1 [GOOD] |72.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |72.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |72.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] >> KqpPg::JoinWithQueryService+StreamLookup >> KqpPg::EmptyQuery+useSink [GOOD] >> KqpPg::EmptyQuery-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync1 [GOOD] Test command err: 2025-04-10T11:02:25.599190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638206273348212:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:25.599240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001820/r3tmp/tmpjRaVMU/pdisk_1.dat 2025-04-10T11:02:26.463578Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:26.487265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:26.487403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:26.507271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18891, node 1 2025-04-10T11:02:26.726933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:26.726964Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:26.726977Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:26.727109Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:27.228780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:30.602387Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638206273348212:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:30.602491Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TOlapNaming::AlterColumnTableOk [GOOD] >> TOlapNaming::AlterColumnTableFailed >> GroupWriteTest::Simple [GOOD] |72.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |72.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |72.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |72.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |72.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut >> KqpPg::InsertNoTargetColumns_Simple+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Simple-useSink >> KqpPg::NoTableQuery+useSink [GOOD] >> KqpPg::NoTableQuery-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple [GOOD] Test command err: RandomSeed# 4436624708555719165 2025-04-10T11:02:21.059235Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-10T11:02:21.085181Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-10T11:02:21.085266Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-10T11:02:21.088333Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-10T11:02:21.103888Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:02:21.106917Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-10T11:02:40.765192Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-10T11:02:40.765308Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:02:40.765365Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-10T11:02:40.765427Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:02:40.840115Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2025-04-10T11:02:40.840237Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-04-10T11:01:48.534452Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638047406483706:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:48.534512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:48.582904Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638048552379243:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:48.582941Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00191a/r3tmp/tmph1sUlh/pdisk_1.dat 2025-04-10T11:01:48.862240Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:01:48.863980Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:01:49.165617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:49.165766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:49.167393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:49.167474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:49.170208Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:01:49.170375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:49.171359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:49.216026Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25611, node 1 2025-04-10T11:01:49.249621Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:01:49.282701Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:01:49.442080Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/00191a/r3tmp/yandexSgwWjN.tmp 2025-04-10T11:01:49.442110Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/00191a/r3tmp/yandexSgwWjN.tmp 2025-04-10T11:01:49.442266Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/00191a/r3tmp/yandexSgwWjN.tmp 2025-04-10T11:01:49.442390Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:49.578430Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:49.666831Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:49.752060Z INFO: TTestServer started on Port 61438 GrpcPort 25611 TClient is connected to server localhost:61438 PQClient connected to localhost:25611 === TenantModeEnabled() = 0 === Init PQ - start server on port 25611 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:50.471865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:01:50.472070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:50.472288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T11:01:50.472535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:01:50.472579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:50.480486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T11:01:50.480623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T11:01:50.480790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:50.481624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T11:01:50.481649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-10T11:01:50.481664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-10T11:01:50.484236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:50.484284Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:01:50.484303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-10T11:01:50.486401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:01:50.486478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-10T11:01:50.486516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:01:50.487911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:50.487974Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:50.488020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:01:50.488060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:01:50.492996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:01:50.500751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-10T11:01:50.501000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:01:50.506643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282910545, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:01:50.506855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282910545 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T11:01:50.506913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:01:50.507319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-10T11:01:50.507363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:01:50.507602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:01:50.508085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T11:01:50.511136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:01:50.511192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:01:50.511471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:01:50.511511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491638051701451745:2449], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-10T11:01:50.511587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:50.511636Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-10T11:01:50.511779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T11:01:50.511799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:01:50.511835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T11:01:50.511855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:01:50. ... -topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-04-10T11:02:37.052432Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 sending to client partition status 2025-04-10T11:02:37.057094Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-04-10T11:02:37.057283Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-04-10T11:02:37.057836Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-04-10T11:02:37.057865Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-04-10T11:02:37.057925Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2025-04-10T11:02:37.057942Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1TEvPartitionReady. Aval parts: 1 2025-04-10T11:02:37.058001Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 performing read request: guid# 1118242f-d0a0e735-1ad8f62e-2cb54830, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-10T11:02:37.058130Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid 1118242f-d0a0e735-1ad8f62e-2cb54830 2025-04-10T11:02:37.060401Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1744282956882 CreateTimestampMS: 1744282956878 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1744282956888 CreateTimestampMS: 1744282956878 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1744282956889 CreateTimestampMS: 1744282956878 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-04-10T11:02:37.060640Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-04-10T11:02:37.060692Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 1118242f-d0a0e735-1ad8f62e-2cb54830 has messages 1 2025-04-10T11:02:37.060800Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 read done: guid# 1118242f-d0a0e735-1ad8f62e-2cb54830, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 490 2025-04-10T11:02:37.060833Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 response to read: guid# 1118242f-d0a0e735-1ad8f62e-2cb54830 2025-04-10T11:02:37.061127Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 Process answer. Aval parts: 0 2025-04-10T11:02:37.062443Z :DEBUG: [] [] [3b7aa037-c9cb7bd6-604b20e4-ecd57fca] [] Got ReadResponse, serverBytesSize = 490, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-04-10T11:02:37.062613Z :DEBUG: [] [] [3b7aa037-c9cb7bd6-604b20e4-ecd57fca] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-04-10T11:02:37.062950Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-04-10T11:02:37.062997Z :DEBUG: [] [] [3b7aa037-c9cb7bd6-604b20e4-ecd57fca] [] Returning serverBytesSize = 490 to budget 2025-04-10T11:02:37.063032Z :DEBUG: [] [] [3b7aa037-c9cb7bd6-604b20e4-ecd57fca] [] In ContinueReadingDataImpl, ReadSizeBudget = 490, ReadSizeServerDelta = 52428310 2025-04-10T11:02:37.063298Z :DEBUG: [] [] [3b7aa037-c9cb7bd6-604b20e4-ecd57fca] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-10T11:02:37.063777Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 grpc read done: success# 1, data# { read_request { bytes_size: 490 } } 2025-04-10T11:02:37.063903Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 got read request: guid# 488fe5a3-27e357ba-acd355e2-37d68479 2025-04-10T11:02:37.066330Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-10T11:02:37.066416Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-10T11:02:37.066457Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-04-10T11:02:37.066522Z :DEBUG: [] [] [3b7aa037-c9cb7bd6-604b20e4-ecd57fca] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-04-10T11:02:37.066606Z :DEBUG: [] [] [3b7aa037-c9cb7bd6-604b20e4-ecd57fca] [] Returning serverBytesSize = 0 to budget 2025-04-10T11:02:37.066880Z :INFO: [] [] [3b7aa037-c9cb7bd6-604b20e4-ecd57fca] Closing read session. Close timeout: 0.000000s 2025-04-10T11:02:37.066937Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-04-10T11:02:37.067002Z :INFO: [] [] [3b7aa037-c9cb7bd6-604b20e4-ecd57fca] Counters: { Errors: 0 CurrentSessionLifetimeMs: 80 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:02:37.067140Z :NOTICE: [] [] [3b7aa037-c9cb7bd6-604b20e4-ecd57fca] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-10T11:02:37.067211Z :DEBUG: [] [] [3b7aa037-c9cb7bd6-604b20e4-ecd57fca] [] Abort session to cluster 2025-04-10T11:02:37.068531Z :NOTICE: [] [] [3b7aa037-c9cb7bd6-604b20e4-ecd57fca] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T11:02:37.068772Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 grpc read done: success# 0, data# { } 2025-04-10T11:02:37.068813Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 grpc read failed 2025-04-10T11:02:37.068848Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 grpc closed 2025-04-10T11:02:37.068893Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_7251908049571431520_v1 is DEAD 2025-04-10T11:02:37.071091Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_7251908049571431520_v1 2025-04-10T11:02:37.070575Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7491638256129855202:2660] disconnected; active server actors: 1 2025-04-10T11:02:37.070644Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7491638256129855202:2660] client cli disconnected session shared/cli_5_1_7251908049571431520_v1 2025-04-10T11:02:37.250968Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 10] state 'Ready' dataSize 824 rowCount 2 cpuUsage 0 2025-04-10T11:02:37.352002Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T11:02:37.352137Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 10 shard idx 72057594046644480:2 data size 824 row count 2 2025-04-10T11:02:37.352207Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037889 maps to shardIdx: 72057594046644480:2 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], pathId map=Cluster, is column=0, is olap=0, RowCount 2, DataSize 824 2025-04-10T11:02:37.354307Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-10T11:02:37.391277Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 11] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-10T11:02:37.491684Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T11:02:37.491837Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 11 shard idx 72057594046644480:3 data size 0 row count 0 2025-04-10T11:02:37.491913Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037890 maps to shardIdx: 72057594046644480:3 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], pathId map=Topics, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:02:37.492240Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-10T11:02:37.662618Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 12] state 'Ready' dataSize 656 rowCount 2 cpuUsage 0 2025-04-10T11:02:37.766739Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T11:02:37.766942Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 12 shard idx 72057594046644480:4 data size 656 row count 2 2025-04-10T11:02:37.767038Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037891 maps to shardIdx: 72057594046644480:4 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], pathId map=Versions, is column=0, is olap=0, RowCount 2, DataSize 656 2025-04-10T11:02:37.768771Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable >> KqpPg::CreateTableSerialColumns+useSink [GOOD] >> KqpPg::CreateTableSerialColumns-useSink |72.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut >> IndexBuildTest::RejectsCancel [GOOD] |72.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |72.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |72.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 >> AnalyzeColumnshard::AnalyzeDeadline |72.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |72.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut >> KqpPg::EmptyQuery-useSink [GOOD] >> KqpPg::DuplicatedColumns+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsCancel [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:01.796184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:01.796305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:01.796368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:01.796409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:01.796464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:01.796499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:01.796567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:01.796661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:01.797082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:01.895894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:01.895970Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:01.921655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:01.922057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:01.922335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:01.960263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:01.960593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:01.961639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:01.962119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:01.964631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:01.966280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:01.966375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:01.966616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:01.966675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:01.966727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:01.966928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:01.976560Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:02.152927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:02.153225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.153519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:02.153797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:02.153863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.156528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:02.156684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:02.156953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.157030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:02.157103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:02.157174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:02.159683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.159760Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:02.159810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:02.168946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.169022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.169074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:02.169153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:02.196216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:02.200383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:02.200645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:02.201861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:02.202036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:02.202097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:02.202471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:02.202547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:02.202767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:02.202872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:02.205824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:02.205886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:02.206086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:02.206138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:02.206464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.206521Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:02.206646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:02.206710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:02.206762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:02.206801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:02.206843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:02.206903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:02.206946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:02.206979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:02.207052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:02.207095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:02.207131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:02.212417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:02.212585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:02.212631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... TxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1177:3031], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:42.890484Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T11:02:42.892704Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-10T11:02:42.892798Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1177:3031], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:42.892855Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-10T11:02:42.893043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:02:42.893112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1271:3114] TestWaitNotification: OK eventTxId 102 2025-04-10T11:02:42.895843Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: DoExecute TxId: 105 DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-10T11:02:42.896006Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: Reply TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } BUILDINDEX RESPONSE CANCEL: NKikimrIndexBuilder.TEvCancelResponse TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } 2025-04-10T11:02:42.898532Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-10T11:02:42.898803Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 } 2025-04-10T11:02:42.901380Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:02:42.901656Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 297us result status StatusSuccess 2025-04-10T11:02:42.903599Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:42.906724Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:02:42.907045Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 349us result status StatusSuccess 2025-04-10T11:02:42.908029Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/index1" PathDescription { Self { Name: "index1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |72.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |72.4%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |72.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |72.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |72.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |72.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet |72.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] |72.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut >> TConsoleTests::TestNotifyOperationCompletion [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain |72.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth |72.4%| [LD] {RESULT} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth >> YdbOlapStore::LogExistingUserId [GOOD] |72.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |72.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |72.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |72.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |72.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut >> TConsoleTxProcessorTests::TestTxProcessorRandom [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable |72.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth |72.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 6833, MsgBus: 19553 2025-04-10T11:00:25.086950Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637688993180769:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:25.087006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e2c/r3tmp/tmpO7OdQY/pdisk_1.dat 2025-04-10T11:00:25.550784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:25.550898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:25.552512Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:25.553226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6833, node 1 2025-04-10T11:00:25.659215Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:25.659238Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:25.659247Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:25.661807Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19553 TClient is connected to server localhost:19553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:26.343317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:26.500088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:00:26.719704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:00:26.940843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:27.049204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:29.211182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637706173051589:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:29.211280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:29.692242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:29.750621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:29.832326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:29.908609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:29.955526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:30.013577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:30.080703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637710468019402:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:30.080797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:30.080998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637710468019407:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:30.085826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:30.087142Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637688993180769:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:30.087207Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:30.102745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637710468019409:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:30.179023Z node 1 :TX_PROXY ERROR: Actor# [1:7491637710468019465:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:31.610692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.145086Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710673 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-04-10T11:00:32.146269Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:32.146388Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637719057954634:2525], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:32.146535Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715757 2025-04-10T11:00:32.146594Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637719057954634:2525], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:32.146873Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:32.146931Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637719057954634:2525], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, App ... ildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638077686078278:2527], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:56.570231Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976715673, txId# 281474976710766 2025-04-10T11:01:56.570317Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638077686078278:2527], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:56.570704Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715673 2025-04-10T11:01:56.570785Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638077686078278:2527], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:56.571878Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976715673, cookie: 281474976715673, txId: 281474976710766, status: StatusAccepted 2025-04-10T11:01:56.572021Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638077686078278:2527], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710766 SchemeshardId: 72057594046644480 PathId: 16 2025-04-10T11:01:56.572517Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715673 2025-04-10T11:01:56.572594Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638077686078278:2527], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:56.579327Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710766, buildInfoId: 281474976715673 2025-04-10T11:01:56.579413Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710766, buildInfo: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638077686078278:2527], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:56.580045Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037928 not found 2025-04-10T11:01:56.580073Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037927 not found 2025-04-10T11:01:56.580087Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037926 not found 2025-04-10T11:01:56.580204Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715673 2025-04-10T11:01:56.580317Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638077686078278:2527], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:56.580353Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T11:01:56.580535Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037929 not found 2025-04-10T11:01:56.580845Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715673 2025-04-10T11:01:56.580923Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638077686078278:2527], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:01:56.580949Z node 3 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976715673, subscribers count# 1 2025-04-10T11:01:56.584561Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715673 2025-04-10T11:01:56.584816Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715673 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-10T11:02:02.686879Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:02:02.686916Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:06.140349Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976715673 |72.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut >> KqpPg::JoinWithQueryService+StreamLookup [GOOD] >> KqpPg::Insert_Serial+useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls >> KqpPg::NoTableQuery-useSink [GOOD] >> KqpPg::PgCreateTable >> KqpPg::InsertNoTargetColumns_Simple-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial-useSink |72.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |72.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |72.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut >> KqpPg::CreateTableSerialColumns-useSink [GOOD] >> KqpPg::DropIndex >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] Test command err: 2025-04-10T11:02:20.607317Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638185314283812:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:20.646358Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001dc4/r3tmp/tmpJxyyRR/pdisk_1.dat 2025-04-10T11:02:21.486703Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:21.516504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:21.516592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:21.523093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24648, node 1 2025-04-10T11:02:21.717846Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:21.717866Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:21.717875Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:21.717981Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:22.272336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:21114 2025-04-10T11:02:25.107006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638206789121198:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:25.107138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:25.504081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:02:25.607650Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638185314283812:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:25.607700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:25.748309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638206789121369:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:25.748387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:25.791417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282945657 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282945657 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-10T11:02:26.022506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638211084088782:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.022589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.025091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638211084088789:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.046420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638211084088793:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.050249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638211084088796:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.050344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.054263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:02:26.054591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:02:26.054678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-10T11:02:26.054786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:02:26.054809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-10T11:02:26.054897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:02:26.054961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:3, path# /Root/.metadata/workload_manager/pools/default 2025-04-10T11:02:26.055315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:3 1 -> 128 2025-04-10T11:02:26.055628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:02:26.055644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T11:02:26.062645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638211084088830:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.128736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710662:0, path# /Root/.metadata/workload_manager/pools/default 2025-04-10T11:02:26.138457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710662:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), at schemeshard: 72057594046644480 2025-04-10T11:02:26.154545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638211084088849:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.154608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638211084088851:2400], DatabaseId: /Roo ... n/3?node_id=1&id=NWNiZWYzM2QtZjg1YWMxMmQtMTYwODllNGQtZmM2Yzk1NDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.727036Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714007. Ctx: { TraceId: 01jrfnvnp13sbdz6n6xmsajcx7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY4ZmFjNjEtMjQ5ZWQwNzUtMjA5Y2VkMGMtMzkwOWI2YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.728813Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714008. Ctx: { TraceId: 01jrfnvnp16yb2afrbtej7447n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQyYjkwNDQtYzlmNzU4MzktNmRjNjk2NWEtOTQ0Y2I0NmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.741489Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714009. Ctx: { TraceId: 01jrfnvnpjd0183n0aefeyn3m0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNhMDk4ODEtNDM0NTc3M2UtOTU2MTIxZDYtOGUzZjMxZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.743235Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714010. Ctx: { TraceId: 01jrfnvnpj82vmteh3sgwmzqek, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njk2YzBhNDAtOGI1YjExMjAtNjY4NThkMzMtNjA0M2M1ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.752374Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714011. Ctx: { TraceId: 01jrfnvnpmdscfqn9tp831rwxc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTg0MGI0Y2UtYWU4MGJhMDQtNjZlOGRlNjgtYzU0MjhiYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.759713Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714012. Ctx: { TraceId: 01jrfnvnqg62rhdmttx9wx9zmt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzhmMjAxNzctY2M2NWYxNzgtYTc3ZTA1ODYtM2Q3ZDUyMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.761661Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714013. Ctx: { TraceId: 01jrfnvnqt6sxkyh2wy8q09vrv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGNiODA1N2QtZDUwNDc2NzAtMmE1NWFmOGYtNjhmNTY2NzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.770516Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714014. Ctx: { TraceId: 01jrfnvnqya08spdzjdnszrhaq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzdiODAzZTAtZDk5N2I1YTUtNzM5NGQ5YWYtYTdiZmUyMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.779897Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714015. Ctx: { TraceId: 01jrfnvnr23v2ww5kdqtwbzrfk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Y2YjUxOWYtYTQxNDI4YWMtNDg4ZDQxY2ItMWQxMmYxNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.798773Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714016. Ctx: { TraceId: 01jrfnvnr9br9wt8efybz32cv0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQyYjkwNDQtYzlmNzU4MzktNmRjNjk2NWEtOTQ0Y2I0NmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.801281Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714017. Ctx: { TraceId: 01jrfnvnrf7wxmyspgfeh1c034, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY4ZmFjNjEtMjQ5ZWQwNzUtMjA5Y2VkMGMtMzkwOWI2YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.812813Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714018. Ctx: { TraceId: 01jrfnvnrs82kw2b717n287qrs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNhMDk4ODEtNDM0NTc3M2UtOTU2MTIxZDYtOGUzZjMxZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.820316Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714019. Ctx: { TraceId: 01jrfnvnrs6x219w7c1r0w4qh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njk2YzBhNDAtOGI1YjExMjAtNjY4NThkMzMtNjA0M2M1ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.823433Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714021. Ctx: { TraceId: 01jrfnvns794985bq8q6e7x50x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzhmMjAxNzctY2M2NWYxNzgtYTc3ZTA1ODYtM2Q3ZDUyMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.825013Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714022. Ctx: { TraceId: 01jrfnvns7dpny3k35fwcnefqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTg0MGI0Y2UtYWU4MGJhMDQtNjZlOGRlNjgtYzU0MjhiYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.826236Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714020. Ctx: { TraceId: 01jrfnvnrv3vsad4dqj09pzmcp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWNiZWYzM2QtZjg1YWMxMmQtMTYwODllNGQtZmM2Yzk1NDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.839975Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714023. Ctx: { TraceId: 01jrfnvnsmc659mdnr74gtaz1a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGNiODA1N2QtZDUwNDc2NzAtMmE1NWFmOGYtNjhmNTY2NzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.859733Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714024. Ctx: { TraceId: 01jrfnvnsy2e1bjtxjx3h87t9s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Y2YjUxOWYtYTQxNDI4YWMtNDg4ZDQxY2ItMWQxMmYxNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.860136Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714025. Ctx: { TraceId: 01jrfnvnsrc14xp2vpjncyhehx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzdiODAzZTAtZDk5N2I1YTUtNzM5NGQ5YWYtYTdiZmUyMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.886053Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714026. Ctx: { TraceId: 01jrfnvntrbjzyfm5j7szn91yw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQyYjkwNDQtYzlmNzU4MzktNmRjNjk2NWEtOTQ0Y2I0NmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.887696Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714027. Ctx: { TraceId: 01jrfnvnty2292rvrsb1vvc5c9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY4ZmFjNjEtMjQ5ZWQwNzUtMjA5Y2VkMGMtMzkwOWI2YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.889088Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714028. Ctx: { TraceId: 01jrfnvnvk1wgc3yzkddx58wtc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njk2YzBhNDAtOGI1YjExMjAtNjY4NThkMzMtNjA0M2M1ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.889735Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714029. Ctx: { TraceId: 01jrfnvnvkc4jvpg25xmh7gg54, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzhmMjAxNzctY2M2NWYxNzgtYTc3ZTA1ODYtM2Q3ZDUyMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.898957Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714030. Ctx: { TraceId: 01jrfnvntz5x7ns4y1x11b7nhd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNhMDk4ODEtNDM0NTc3M2UtOTU2MTIxZDYtOGUzZjMxZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282945657 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-10T11:02:45.915507Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714032. Ctx: { TraceId: 01jrfnvnwbe0j3hew35zckf1ea, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWNiZWYzM2QtZjg1YWMxMmQtMTYwODllNGQtZmM2Yzk1NDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.918897Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714031. Ctx: { TraceId: 01jrfnvnwbcyhdjzjxbn28gttp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTg0MGI0Y2UtYWU4MGJhMDQtNjZlOGRlNjgtYzU0MjhiYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.921094Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714033. Ctx: { TraceId: 01jrfnvnweajwwkq2y8re1bajt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGNiODA1N2QtZDUwNDc2NzAtMmE1NWFmOGYtNjhmNTY2NzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.936653Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714034. Ctx: { TraceId: 01jrfnvnwmc5xycck4v9tbvmyd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Y2YjUxOWYtYTQxNDI4YWMtNDg4ZDQxY2ItMWQxMmYxNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:45.957264Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714035. Ctx: { TraceId: 01jrfnvnww7216w9sg3vtarv2a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzdiODAzZTAtZDk5N2I1YTUtNzM5NGQ5YWYtYTdiZmUyMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282945657 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards |72.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |72.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |72.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |72.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |72.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |72.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut >> KqpPg::DuplicatedColumns+useSink [GOOD] >> KqpPg::DuplicatedColumns-useSink >> TImmediateControlsConfiguratorTests::TestModifiedControls [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault |72.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |72.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |72.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |72.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |72.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |72.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside |72.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |72.5%| [LD] {RESULT} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |72.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant >> TOlapNaming::AlterColumnTableFailed [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable |72.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |72.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut >> TImmediateControlsConfiguratorTests::TestResetToDefault [GOOD] >> TImmediateControlsConfiguratorTests::TestMaxLimit >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] |72.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |72.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnTableFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:29.453626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:29.453735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:29.453778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:29.453830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:29.453876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:29.453910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:29.453972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:29.454091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:29.454550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:29.613985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:29.614043Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:29.650737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:29.651117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:29.658456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:29.667581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:29.667869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:29.668650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:29.669091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:29.671204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:29.672836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:29.672902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:29.673073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:29.673126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:29.673171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:29.673342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.692187Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:29.891050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:29.891353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.891610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:29.891932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:29.892004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.899052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:29.899237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:29.899471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.899538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:29.899598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:29.899638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:29.902182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.902251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:29.902293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:29.904905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.904973Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.905032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:29.905085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:29.909718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:29.913562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:29.913816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:29.914959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:29.915134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:29.915197Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:29.915523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:29.915596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:29.915780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:29.915861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:29.919749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:29.919804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:29.920019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:29.920072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:29.920373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:29.920449Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:29.920591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:29.920635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:29.920685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:29.920763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:29.920806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:29.920858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:29.920910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:29.920961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:29.921049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:29.921089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:29.921127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:29.934803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:29.934976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:29.935036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:02:51.850352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.850582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.850675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.850781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.851038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.851188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.851306Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.851716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.852479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.852603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.852689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.852734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.852807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.852880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.852951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.864273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.864550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.864710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.864849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.864984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.865122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.865278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.865457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.866821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.866967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.867060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.867157Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.867272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.867362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.867504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.867585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.871276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.871421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.871534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.871615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.871715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.871853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.871908Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:02:51.872032Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:02:51.872083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:02:51.872139Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:02:51.872178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:02:51.872221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-10T11:02:51.872311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:2776:4041] message: TxId: 101 2025-04-10T11:02:51.872376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:02:51.872457Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:02:51.872499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:02:51.873944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-04-10T11:02:51.879698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:02:51.879766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:2777:4042] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-10T11:02:51.885982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TestTable" AlterSchema { AddColumns { Name: "New Column" Type: "Int32" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:51.886259Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TestTable, opId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:02:51.886513Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: update parse error: Invalid name for column 'New Column'. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2025-04-10T11:02:51.890369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "update parse error: Invalid name for column \'New Column\'. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:51.890537Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: update parse error: Invalid name for column 'New Column'. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TestTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:02:51.890930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:02:51.890976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:02:51.891683Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:02:51.891842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:02:51.891885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:3580:4774] TestWaitNotification: OK eventTxId 102 >> TSchemeShardSubDomainTest::RmDir |72.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |72.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:52.870096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:52.870240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:52.870318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:52.870392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:52.870452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:52.870487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:52.870569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:52.870702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:52.871193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:53.006089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:53.007809Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:53.033580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:53.034016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:53.034242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:53.055553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:53.055931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:53.057181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:53.057800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:53.061539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:53.063582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:53.063680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:53.063953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:53.064028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:53.064110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:53.064342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:53.075193Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:53.274233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:53.274614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:53.274959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:53.275355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:53.275440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:53.278762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:53.278928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:53.279150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:53.279214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:53.279272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:53.279306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:53.281958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:53.282041Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:53.282110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:53.284733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:53.284808Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:53.284862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:53.284949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:53.290382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:53.293198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:53.293466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:53.294474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:53.294630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:53.294684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:53.295035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:53.295103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:53.295304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:53.295447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:53.298120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:53.298188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:53.298438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:53.298497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:53.298848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:53.298897Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:53.299026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:53.299068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:53.299129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:53.299168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:53.299220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:53.299291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:53.299334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:53.299370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:53.299442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:53.299479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:53.299508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:53.307796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:53.307989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:53.308031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T11:02:53.429633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-10T11:02:53.430048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:53.430105Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 101:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:53.430212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-10T11:02:53.430405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:53.431316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:02:53.431446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:02:53.431492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:02:53.431559Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-10T11:02:53.431612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:02:53.435224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:02:53.435370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:02:53.435419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:02:53.435480Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-10T11:02:53.435525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:02:53.435640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-10T11:02:53.439435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-10T11:02:53.439647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-10T11:02:53.440450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:53.440639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:53.440722Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-10T11:02:53.440944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-10T11:02:53.441182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:02:53.441283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:02:53.442653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:02:53.444432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:02:53.446278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:53.446315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:02:53.446477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:02:53.446574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:53.446622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:02:53.446659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-10T11:02:53.446987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:53.447038Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:02:53.447162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:02:53.447198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:02:53.447241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:02:53.447274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:02:53.447314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:02:53.447362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:02:53.447395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:02:53.447427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:02:53.447529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:02:53.447586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-10T11:02:53.447633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-10T11:02:53.447660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-10T11:02:53.448254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:02:53.448331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:02:53.448370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:02:53.448421Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-10T11:02:53.448484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:02:53.449284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:02:53.449377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:02:53.449428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:02:53.449458Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-10T11:02:53.449488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:02:53.449580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:02:53.454783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:02:53.456870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-10T11:02:53.460765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0/dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:53.461140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:02:53.461249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, schema: Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId", at schemeshard: 72057594046678944 2025-04-10T11:02:53.461380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, at schemeshard: 72057594046678944 2025-04-10T11:02:53.464409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Inclusive subDomain do not support shared transactions" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:53.464657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot/USER_0, subject: , status: StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, operation: CREATE TABLE, path: /MyRoot/USER_0/dir/table_0 TestModificationResult got TxId: 102, wait until txId: 102 |72.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |72.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |72.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain [GOOD] >> TConsoleTests::TestRemoveAttributes |72.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |72.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |72.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects >> KqpPg::InsertNoTargetColumns_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault+useSink |72.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut/ydb-core-base-ut |72.6%| [LD] {RESULT} $(B)/ydb/core/base/ut/ydb-core-base-ut >> KqpPg::Insert_Serial+useSink [GOOD] >> KqpPg::Insert_Serial-useSink >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] |72.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydbd/ydbd |72.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |72.6%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] >> TImmediateControlsConfiguratorTests::TestDynamicMap >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] |72.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 18996, MsgBus: 18110 2025-04-10T11:00:25.902251Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637690159811047:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:25.902314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e29/r3tmp/tmpB0dQ99/pdisk_1.dat 2025-04-10T11:00:26.383463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:26.383559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:26.385333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18996, node 1 2025-04-10T11:00:26.428847Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:00:26.429812Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:00:26.456341Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:26.531850Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:26.531874Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:26.531884Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:26.532022Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18110 TClient is connected to server localhost:18110 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:27.230093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:27.245327Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:27.259701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:27.426453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:27.614583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:27.693097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:29.891941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637707339682012:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:29.892024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:30.219574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:00:30.309273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:00:30.367670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:00:30.410688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:00:30.453449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:00:30.490302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:00:30.558615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637711634649821:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:30.558704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:30.559046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637711634649826:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:00:30.564043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:00:30.584602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637711634649828:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:00:30.654721Z node 1 :TX_PROXY ERROR: Actor# [1:7491637711634649882:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:00:30.909964Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637690159811047:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:30.910025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:00:31.693745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:00:32.167172Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710673 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: SIMILARITY_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-04-10T11:00:32.168042Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:32.168121Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637720224585040:2523], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:32.168246Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715757 2025-04-10T11:00:32.168309Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7491637720224585040:2523], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:00:32.168562Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:00:32.168600Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellatio ... ildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638106576516068:2525], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:03.260441Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715766 2025-04-10T11:02:03.260519Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638106576516068:2525], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:03.260893Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:02:03.260951Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638106576516068:2525], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:03.262138Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976710673, cookie: 281474976710673, txId: 281474976715766, status: StatusAccepted 2025-04-10T11:02:03.262806Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638106576516068:2525], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976715766 SchemeshardId: 72057594046644480 PathId: 16 2025-04-10T11:02:03.263311Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:02:03.263374Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638106576516068:2525], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:03.266714Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715766, buildInfoId: 281474976710673 2025-04-10T11:02:03.266815Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715766, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638106576516068:2525], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:03.267275Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:02:03.267365Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638106576516068:2525], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:03.267400Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T11:02:03.267993Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-04-10T11:02:03.268055Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7491638106576516068:2525], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 102, upload bytes: 2700, read rows: 99, read bytes: 2892 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:02:03.268076Z node 3 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710673, subscribers count# 1 2025-04-10T11:02:03.268361Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710673 2025-04-10T11:02:03.268599Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710673 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 } 2025-04-10T11:02:03.274031Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037928 not found 2025-04-10T11:02:03.274077Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037926 not found 2025-04-10T11:02:03.274098Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037929 not found 2025-04-10T11:02:03.274122Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037927 not found 2025-04-10T11:02:10.101952Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:02:10.101979Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:12.826866Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710673 >> TSchemeShardSubDomainTest::RmDir [GOOD] |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest |72.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/util/ut/ydb-core-util-ut |72.6%| [LD] {RESULT} $(B)/ydb/core/util/ut/ydb-core-util-ut |72.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |72.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |72.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy >> TBackupCollectionTests::HiddenByFeatureFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] Test command err: 2025-04-10T11:02:24.969675Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638199880956980:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00182c/r3tmp/tmptvTSrE/pdisk_1.dat 2025-04-10T11:02:25.206052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:02:25.546932Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:25.565508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:25.565620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:25.592736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9603, node 1 2025-04-10T11:02:26.071933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:26.071954Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:26.071962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:26.072081Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:26.772611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:30.739572Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491638227815922620:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:30.739632Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00182c/r3tmp/tmpqob0Br/pdisk_1.dat 2025-04-10T11:02:31.079439Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:31.159685Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:31.159772Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:31.171587Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1381, node 4 2025-04-10T11:02:31.322770Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:31.322805Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:31.322815Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:31.322950Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:31.606847Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:35.746775Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491638227815922620:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:35.746897Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:46.054429Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:02:46.054478Z node 4 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:54.110428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:54.110583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:54.110662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:54.110724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:54.110782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:54.110814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:54.110888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:54.110998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:54.111476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:54.232328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:54.232416Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:54.264463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:54.264905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:54.265160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:54.275962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:54.276341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:54.277215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:54.277807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:54.280520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:54.282240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:54.282311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:54.282577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:54.282646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:54.282697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:54.282900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:54.292311Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:54.494522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:54.494912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:54.495299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:54.495675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:54.495770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:54.504001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:54.504271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:54.504843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:54.504939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:54.504987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:54.505106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:54.514372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:54.514476Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:54.514534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:54.517823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:54.517897Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:54.517948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:54.518027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:54.526787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:54.529856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:54.530101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:54.531353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:54.531543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:54.531605Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:54.531983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:54.532049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:54.532267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:54.532459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:54.536150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:54.536243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:54.536486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:54.536534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:54.536818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:54.536865Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:54.536999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:54.537037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:54.537099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:54.537137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:54.537179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:54.537236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:54.537277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:54.537311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:54.537436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:54.537489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:54.537524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:54.541026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:54.541194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:54.541238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... rsion: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:02:55.136572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:02:55.136610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:02:55.136646Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-10T11:02:55.136684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:02:55.136769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-10T11:02:55.137527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 2949 } } 2025-04-10T11:02:55.137577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-04-10T11:02:55.137768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 2949 } } 2025-04-10T11:02:55.137896Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 2949 } } 2025-04-10T11:02:55.139216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 625 RawX2: 4294969830 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:02:55.139277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-04-10T11:02:55.139456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 625 RawX2: 4294969830 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:02:55.139520Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:02:55.139698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 625 RawX2: 4294969830 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:02:55.139783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:7, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:55.139830Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:55.139898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-04-10T11:02:55.139952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-10T11:02:55.143652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:02:55.153780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:02:55.153987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:55.159454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:55.159941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:55.160001Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:02:55.160133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:02:55.160180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:02:55.160233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:02:55.160268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:02:55.160322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-10T11:02:55.160442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:275:2266] message: TxId: 101 2025-04-10T11:02:55.160504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:02:55.160579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:02:55.160639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:02:55.160792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:02:55.171251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:02:55.171343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:276:2267] TestWaitNotification: OK eventTxId 101 2025-04-10T11:02:55.172121Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:02:55.172460Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 356us result status StatusSuccess 2025-04-10T11:02:55.173544Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:55.174566Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:02:55.174889Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 308us result status StatusSuccess 2025-04-10T11:02:55.175361Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpPg::DuplicatedColumns-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder+useSink >> KqpPg::TypeCoercionBulkUpsert [GOOD] >> KqpPg::TypeCoercionInsert+useSink |72.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |72.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |72.7%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogExistingUserId [GOOD] Test command err: 2025-04-10T10:59:21.942465Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637417183554586:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:21.942561Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d60/r3tmp/tmpXA1U8A/pdisk_1.dat 2025-04-10T10:59:22.760792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:22.760880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:22.769913Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:22.773297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29947, node 1 2025-04-10T10:59:23.127263Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:23.127297Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:23.127305Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:23.127456Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19418 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:23.797699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:19418 2025-04-10T10:59:24.371096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:59:24.685440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491637430068457561:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T10:59:24.685719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491637430068457561:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T10:59:24.686038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491637430068457561:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T10:59:24.686737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491637430068457561:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T10:59:24.686978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491637430068457561:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T10:59:24.687189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491637430068457561:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T10:59:24.687346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491637430068457561:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T10:59:24.687497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491637430068457561:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T10:59:24.687685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491637430068457561:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T10:59:24.687806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491637430068457561:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T10:59:24.687969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491637430068457561:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T10:59:24.688108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491637430068457561:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T10:59:24.735463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491637430068457557:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T10:59:24.735545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491637430068457557:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T10:59:24.735893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491637430068457557:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T10:59:24.736044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491637430068457557:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T10:59:24.736162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491637430068457557:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T10:59:24.736301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491637430068457557:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T10:59:24.736455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491637430068457557:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T10:59:24.736608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491637430068457557:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T10:59:24.736762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491637430068457557:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T10:59:24.736893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491637430068457557:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T10:59:24.737010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491637430068457557:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T10:59:24.737171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491637430068457557:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T10:59:24.751925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T10:59:24.752000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T10:59:24.752107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T10:59:24.752128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T10:59:24.752308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T10:59:24.752337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T10:59:24.752403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T10:59:24.752439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T10:59:24.752511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T10:59:24.752528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T10:59:24.752555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-0 ... don't wait for ack delivery in input channelId: 37, seqNo: [1] 2025-04-10T11:02:43.250823Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 38, seqNo: [1] 2025-04-10T11:02:43.250840Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 39, seqNo: [1] 2025-04-10T11:02:43.250858Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 40, seqNo: [1] 2025-04-10T11:02:43.250877Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 41, seqNo: [1] 2025-04-10T11:02:43.250899Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 42, seqNo: [1] 2025-04-10T11:02:43.250927Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 43, seqNo: [1] 2025-04-10T11:02:43.250947Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 44, seqNo: [1] 2025-04-10T11:02:43.250967Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 45, seqNo: [1] 2025-04-10T11:02:43.250987Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 46, seqNo: [1] 2025-04-10T11:02:43.251007Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 47, seqNo: [1] 2025-04-10T11:02:43.251027Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 48, seqNo: [1] 2025-04-10T11:02:43.251049Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 49, seqNo: [1] 2025-04-10T11:02:43.251069Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 50, seqNo: [1] 2025-04-10T11:02:43.251088Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 51, seqNo: [1] 2025-04-10T11:02:43.251110Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 52, seqNo: [1] 2025-04-10T11:02:43.251133Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 53, seqNo: [1] 2025-04-10T11:02:43.251157Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 54, seqNo: [1] 2025-04-10T11:02:43.251178Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 55, seqNo: [1] 2025-04-10T11:02:43.251198Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 56, seqNo: [1] 2025-04-10T11:02:43.251219Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 57, seqNo: [1] 2025-04-10T11:02:43.251239Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 58, seqNo: [1] 2025-04-10T11:02:43.251287Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 59, seqNo: [1] 2025-04-10T11:02:43.251316Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 60, seqNo: [1] 2025-04-10T11:02:43.251334Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 61, seqNo: [1] 2025-04-10T11:02:43.251351Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 62, seqNo: [1] 2025-04-10T11:02:43.251371Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 63, seqNo: [1] 2025-04-10T11:02:43.251392Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 64, seqNo: [1] 2025-04-10T11:02:43.251430Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. Tasks execution finished 2025-04-10T11:02:43.251483Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7491638268782830132:3214], TxId: 281474976710670, task: 65. Ctx: { TraceId : 01jrfnvfnn580pnq1haq53ye44. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=ZmI1MDdjZmQtY2ZjNWE3NjEtNmQ0MWQ1OGQtOWVmY2ZkMzg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-04-10T11:02:43.251807Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 65. pass away 2025-04-10T11:02:43.252142Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710670;task_id=65;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:02:43.252561Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7491638268782830132:3214], TxId: 281474976710670, task: 65. Ctx: { TraceId : 01jrfnvfnn580pnq1haq53ye44. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=ZmI1MDdjZmQtY2ZjNWE3NjEtNmQ0MWQ1OGQtOWVmY2ZkMzg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Send stats to executor actor [28:7491638268782830038:3141] TaskId: 65 Stats: CpuTimeUs: 63352 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 966 FinishTimeMs: 1744282963249 InputRows: 1 InputBytes: 310 OutputRows: 1 OutputBytes: 310 ResultRows: 1 ResultBytes: 310 ComputeCpuTimeUs: 408 BuildCpuTimeUs: 558 HostName: "ghrun-x5vqckr7qy" NodeId: 28 CreateTimeMs: 1744282960672 } MaxMemoryUsage: 1048576 2025-04-10T11:02:43.253826Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7491638268782830038:3141] TxId: 281474976710670. Ctx: { TraceId: 01jrfnvfnn580pnq1haq53ye44, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmI1MDdjZmQtY2ZjNWE3NjEtNmQ0MWQ1OGQtOWVmY2ZkMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7491638268782830132:3214], task: 65, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 63352 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 966 FinishTimeMs: 1744282963249 InputRows: 1 InputBytes: 310 OutputRows: 1 OutputBytes: 310 ResultRows: 1 ResultBytes: 310 ComputeCpuTimeUs: 408 BuildCpuTimeUs: 558 HostName: "ghrun-x5vqckr7qy" NodeId: 28 CreateTimeMs: 1744282960672 } MaxMemoryUsage: 1048576 } 2025-04-10T11:02:43.253953Z node 28 :KQP_EXECUTER INFO: TxId: 281474976710670. Ctx: { TraceId: 01jrfnvfnn580pnq1haq53ye44, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmI1MDdjZmQtY2ZjNWE3NjEtNmQ0MWQ1OGQtOWVmY2ZkMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7491638268782830132:3214] 2025-04-10T11:02:43.256814Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7491638268782830038:3141] TxId: 281474976710670. Ctx: { TraceId: 01jrfnvfnn580pnq1haq53ye44, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmI1MDdjZmQtY2ZjNWE3NjEtNmQ0MWQ1OGQtOWVmY2ZkMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:02:43.256969Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7491638268782830038:3141] TxId: 281474976710670. Ctx: { TraceId: 01jrfnvfnn580pnq1haq53ye44, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmI1MDdjZmQtY2ZjNWE3NjEtNmQ0MWQ1OGQtOWVmY2ZkMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.057074s ReadRows: 0 ReadBytes: 0 ru: 38 rate limiter was not found force flag: 1 2025-04-10T11:02:43.257172Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZmI1MDdjZmQtY2ZjNWE3NjEtNmQ0MWQ1OGQtOWVmY2ZkMzg=, ActorId: [28:7491638264487862708:3141], ActorState: ExecuteState, TraceId: 01jrfnvfnn580pnq1haq53ye44, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-10T11:02:43.257812Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=ZmI1MDdjZmQtY2ZjNWE3NjEtNmQ0MWQ1OGQtOWVmY2ZkMzg=, ActorId: [28:7491638264487862708:3141], ActorState: ExecuteState, TraceId: 01jrfnvfnn580pnq1haq53ye44, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 2843.054 QueriesCount: 1 2025-04-10T11:02:43.257934Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZmI1MDdjZmQtY2ZjNWE3NjEtNmQ0MWQ1OGQtOWVmY2ZkMzg=, ActorId: [28:7491638264487862708:3141], ActorState: ExecuteState, TraceId: 01jrfnvfnn580pnq1haq53ye44, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T11:02:43.258130Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=ZmI1MDdjZmQtY2ZjNWE3NjEtNmQ0MWQ1OGQtOWVmY2ZkMzg=, ActorId: [28:7491638264487862708:3141], ActorState: ExecuteState, TraceId: 01jrfnvfnn580pnq1haq53ye44, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T11:02:43.258246Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZmI1MDdjZmQtY2ZjNWE3NjEtNmQ0MWQ1OGQtOWVmY2ZkMzg=, ActorId: [28:7491638264487862708:3141], ActorState: ExecuteState, TraceId: 01jrfnvfnn580pnq1haq53ye44, EndCleanup, isFinal: 1 2025-04-10T11:02:43.258386Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZmI1MDdjZmQtY2ZjNWE3NjEtNmQ0MWQ1OGQtOWVmY2ZkMzg=, ActorId: [28:7491638264487862708:3141], ActorState: ExecuteState, TraceId: 01jrfnvfnn580pnq1haq53ye44, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7491638182883480062:2278] 2025-04-10T11:02:43.258471Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZmI1MDdjZmQtY2ZjNWE3NjEtNmQ0MWQ1OGQtOWVmY2ZkMzg=, ActorId: [28:7491638264487862708:3141], ActorState: unknown state, TraceId: 01jrfnvfnn580pnq1haq53ye44, Cleanup temp tables: 0 RESULT: [[42000u;"nginx";"resource_6";"19";[2];["message"];["{\"auth\":{\"org_id\":7704,\"service\":{\"internal\":\"false\",\"ip\":\"258.258.258.258\"},\"type\":\"token\",\"user\":{\"id\":1000042,\"ip\":\"257.257.257.257\",\"is_cloud\":\"false\"}}}"]]] --------------------- STATS: total CPU: 1492 duration: 2823659 usec cpu: 1795012 usec { name: "/Root/OlapStore/log1" reads { rows: 50 bytes: 16000 } } 2025-04-10T11:02:43.274823Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[28:7491638195768382816:2328];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-10T11:02:43.277722Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744282960000, txId: 18446744073709551615] shutting down 2025-04-10T11:02:43.278004Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZmI1MDdjZmQtY2ZjNWE3NjEtNmQ0MWQ1OGQtOWVmY2ZkMzg=, ActorId: [28:7491638264487862708:3141], ActorState: unknown state, TraceId: 01jrfnvfnn580pnq1haq53ye44, Session actor destroyed 2025-04-10T11:02:43.279768Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[28:7491638195768382814:2327];fline=actor.cpp:33;event=skip_flush_writing; |72.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |72.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:55.341755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:55.341880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:55.341944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:55.341999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:55.342044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:55.342091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:55.342200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:55.342314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:55.342729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:55.447846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:55.447917Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:55.480053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:55.480333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:55.480491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:55.490251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:55.490539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:55.491210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:55.491576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:55.493568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:55.494893Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:55.494951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:55.495135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:55.495178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:55.495216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:55.495378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:55.502957Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:55.730421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:55.730738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:55.730993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:55.731246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:55.731328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:55.741614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:55.741774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:55.744195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:55.744338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:55.744391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:55.744430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:55.753856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:55.753953Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:55.753997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:55.756829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:55.756890Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:55.756934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:55.756992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:55.760845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:55.764771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:55.764964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:55.766034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:55.766224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:55.766285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:55.766590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:55.766645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:55.766824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:55.766952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:55.775392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:55.775461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:55.775665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:55.775711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:55.775959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:55.776004Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:55.776110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:55.776158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:55.776227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:55.776280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:55.776329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:55.776376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:55.776411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:55.776441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:55.776672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:55.776721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:55.776764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:55.779375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:55.779500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:55.779556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... CHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:02:56.172884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-10T11:02:56.173041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:56.179419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-10T11:02:56.179575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-10T11:02:56.179987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:56.180138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:56.180189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:02:56.180554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-10T11:02:56.180619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:02:56.180780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:56.180872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-10T11:02:56.180954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-10T11:02:56.193016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:56.193080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:56.193264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:02:56.193380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:56.193455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-10T11:02:56.193497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-10T11:02:56.193903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:02:56.193961Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-10T11:02:56.194061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:02:56.194109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:02:56.194173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:02:56.194209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:02:56.194249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-10T11:02:56.194300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:02:56.194346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-10T11:02:56.194379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-10T11:02:56.194653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-10T11:02:56.194703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-04-10T11:02:56.194752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:02:56.194797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-10T11:02:56.195455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:02:56.195573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:02:56.195620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:02:56.195656Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:02:56.195694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:56.196400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:02:56.196489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:02:56.196528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:02:56.196557Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:02:56.196581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-10T11:02:56.196646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-04-10T11:02:56.196689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:563:2472] 2025-04-10T11:02:56.200155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T11:02:56.200944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T11:02:56.201052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:02:56.201084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:564:2473] TestWaitNotification: OK eventTxId 100 2025-04-10T11:02:56.201738Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:02:56.203461Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 1.74ms result status StatusSuccess 2025-04-10T11:02:56.203972Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:02:56.206930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "USER_0" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:56.207106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/USER_0, pathId: 0, opId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:02:56.207351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-10T11:02:56.211661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathIsNotDirectory Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:56.211967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: DROP DIRECTORY, path: /MyRoot/USER_0 TestModificationResult got TxId: 101, wait until txId: 101 >> KqpPg::DropIndex [GOOD] >> KqpPg::CreateUniqPgColumn+useSink >> TBlobStorageBarriersTreeTest::Tree [GOOD] >> AuthTokenAllowed::PassOnListMatchUserSid [GOOD] >> AuthTokenAllowed::PassOnListMatchUserSidWithGroup [GOOD] >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TBackupCollectionTests::HiddenByFeatureFlag [GOOD] >> TBackupCollectionTests::DisallowedPath |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageBarriersTreeTest::Tree [GOOD] |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnListMatchUserSidWithGroup [GOOD] |72.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut/ydb-core-base-ut |72.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/util/ut/ydb-core-util-ut |72.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |72.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |72.7%| [LD] {RESULT} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |72.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut >> TBlobStorageIngressMatrix::VectorTestMinus [GOOD] >> TBlobStorageIngressMatrix::VectorTestIterator3 [GOOD] |72.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TBackupCollectionTests::DisallowedPath [GOOD] >> TBackupCollectionTests::ParallelCreate >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestIterator3 [GOOD] >> TBlobStorageIngressMatrix::VectorTestIterator1 [GOOD] >> TBlobStorageIngressMatrix::VectorTestIterator2 [GOOD] >> DiscoveryConverterTest::AccountDatabase [GOOD] >> DiscoveryConverterTest::CmWay [GOOD] >> TBackupCollectionTests::ParallelCreate [GOOD] >> TBackupCollectionTests::Drop |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestIterator2 [GOOD] |72.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::CmWay [GOOD] |72.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |72.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut >> TPDiskErrorStateTests::Basic [GOOD] >> TPDiskErrorStateTests::Basic2 [GOOD] >> TPDiskErrorStateTests::BasicErrorReason [GOOD] |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TPDiskErrorStateTests::BasicErrorReason [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] Test command err: 2025-04-10T11:00:47.831522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:47.831603Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:47.905694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:49.332978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-10T11:00:49.511498Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:00:49.512067Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ce8/r3tmp/tmpWxLSMx/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:00:49.512745Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ce8/r3tmp/tmpWxLSMx/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001ce8/r3tmp/tmpWxLSMx/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2404953778577865548 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:00:49.558551Z node 8 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:00:49.559138Z node 8 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ce8/r3tmp/tmpWxLSMx/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:00:49.559363Z node 8 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ce8/r3tmp/tmpWxLSMx/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001ce8/r3tmp/tmpWxLSMx/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1344054314869693531 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:00:49.620510Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:00:49.621063Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ce8/r3tmp/tmpWxLSMx/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:00:49.625739Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ce8/r3tmp/tmpWxLSMx/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001ce8/r3tmp/tmpWxLSMx/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10783790731848397737 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:00:49.629527Z node 2 :BS_LOCALRECOVERY CRIT: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ce8/r3tmp/tmpWxLSMx/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T11:00:49.718837Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:00:49.719387Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ce8/r3tmp/tmpWxLSMx/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:00:49.719600Z node 5 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ce8/r3tmp/tmpWxLSMx/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001ce8/r3tmp/tmpWxLSMx/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4645230127984403485 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:00:49.737766Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoCon ... istered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerRequestDataSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerShardReadSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerShardIncomingReadSetSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.DefaultTimeoutMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.EnableLeaderLeases was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.MinLeaderLeaseDurationUs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.VolatilePlanLeaseMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.PlanAheadTimeShiftMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.MinPlanResolutionMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control SchemeShardControls.ForceShardSplitDataSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control SchemeShardControls.DisableForceShardSplit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.ProfileSamplingRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.GuardedSamplingRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.PageCacheTargetSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.PageCacheReleaseRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableLocalSyncLogDataCutting was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableSyncLogChunkCompressionHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableSyncLogChunkCompressionSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxSyncLogChunksInFlightHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxSyncLogChunksInFlightSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsNVME was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleNVME was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DefaultHugeGarbagePerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.HugeDefragFreeSpaceBorderPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxChunksToDefragInflight was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingDryRun was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinLevel0SstCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxLevel0SstCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinInplacedSizeHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxInplacedSizeHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinInplacedSizeSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxInplacedSizeSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinOccupancyPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxOccupancyPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinLogChunkCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxLogChunkCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxInProgressSyncCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TabletControls.MaxCommitRedoMB was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThreshold was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplier was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.LongRequestThresholdMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisks was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.BucketSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.LeakDurationMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.LeakRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.UseNoopSchedulerHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.UseNoopSchedulerSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control BlobStorageControllerControls.EnableSelfHealWithDegraded was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TableServiceControls.EnableMergeDatashardReads was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TestShardControls.DisableWrites was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. >> TBackupCollectionTests::Drop [GOOD] >> TBackupCollectionTests::DropTwice >> TBlobStorageGroupInfoTest::GroupQuorumCheckerOrdinary >> TBlobStorageGroupInfoBlobMapTest::BelongsToSubgroupBenchmark >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> TBlobStorageGroupInfoTest::GroupQuorumCheckerOrdinary [GOOD] >> TBlobStorageGroupInfoTest::GroupQuorumCheckerMirror3dc [GOOD] |72.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |72.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |72.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut >> KqpPg::InsertValuesFromTableWithDefault+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault-useSink >> KqpPg::Insert_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText+useSink >> TQueueBackpressureTest::CreateDelete [GOOD] >> TBackupCollectionTests::DropTwice [GOOD] >> TBackupCollectionTests::TableWithSystemColumns |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoTest::GroupQuorumCheckerMirror3dc [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail+useSink [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::CreateDelete [GOOD] |72.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |72.8%| [LD] {RESULT} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |72.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |72.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |72.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |72.8%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |72.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg >> KqpPg::InsertFromSelect_NoReorder+useSink [GOOD] >> KqpPg::DropTablePg >> TConsoleTests::TestRemoveAttributes [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain >> TQueueBackpressureTest::PerfInFlight >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |73.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fqrun/fqrun >> TPersQueueTest::DirectReadRestartPQRB [GOOD] |73.0%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun >> TBackupCollectionTests::TableWithSystemColumns [GOOD] >> TPersQueueTest::DirectReadRestartTablet >> TActorTest::TestSendFromAnotherThread ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::TableWithSystemColumns [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:57.647853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:57.647993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:57.648078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:57.648123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:57.648173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:57.648204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:57.648291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:57.648404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:57.648798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:57.782771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:57.782835Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:57.804209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:57.804566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:57.804789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:57.816432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:57.816724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:57.817597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:57.818042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:57.820499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:57.821968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:57.822034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:57.822282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:57.822342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:57.822390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:57.822573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:57.830844Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:58.010331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:58.010597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:58.010815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:58.011161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:58.011225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:58.014816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:58.014982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:58.015311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:58.015388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:58.015444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:58.015486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:58.017961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:58.018027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:58.018072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:58.020339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:58.020405Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:58.020500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:58.020555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:58.024933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:58.027457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:58.027652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:58.028946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:58.029100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:58.029159Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:58.029626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:58.029691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:58.029918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:58.030016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:58.032573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:58.032630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:58.032850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:58.032895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:58.033166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:58.033219Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:58.033324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:58.033359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:58.033436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:58.033483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:58.033538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:58.033592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:58.033637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:58.033668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:58.033734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:58.033773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:58.033808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:58.036450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:58.036570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:58.036611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RACE: StateWork, received event# 274137603, Sender [6:209:2211], Recipient [6:133:2156]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 8] Version: 3 } 2025-04-10T11:03:03.625012Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-10T11:03:03.625081Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T11:03:03.625154Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T11:03:03.625180Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-10T11:03:03.625218Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 3 2025-04-10T11:03:03.625245Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 4 2025-04-10T11:03:03.625313Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/2, is published: true 2025-04-10T11:03:03.625351Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:03:03.626086Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269550080, Sender [6:594:2551], Recipient [6:133:2156]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1370 } } 2025-04-10T11:03:03.626131Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransactionResult 2025-04-10T11:03:03.626284Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1370 } } 2025-04-10T11:03:03.626319Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409548, partId: 1 2025-04-10T11:03:03.626478Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1370 } } 2025-04-10T11:03:03.626599Z node 6 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1370 } } 2025-04-10T11:03:03.626648Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:03:03.628954Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:659:2608], Recipient [6:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:03:03.629029Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:03:03.629066Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T11:03:03.629308Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [6:594:2551], Recipient [6:133:2156]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 594 RawX2: 25769806327 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-10T11:03:03.629351Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-10T11:03:03.629475Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 594 RawX2: 25769806327 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-10T11:03:03.629548Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409548, partId: 1 2025-04-10T11:03:03.629749Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:1, at schemeshard: 72057594046678944, message: Source { RawX1: 594 RawX2: 25769806327 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-10T11:03:03.629822Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:03:03.629941Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 594 RawX2: 25769806327 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-10T11:03:03.630025Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:1, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:03:03.630080Z node 6 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-10T11:03:03.630124Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:1, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-10T11:03:03.630194Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:1 129 -> 240 2025-04-10T11:03:03.630408Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:03:03.631524Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:03:03.631667Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:03:03.631801Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T11:03:03.631861Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:03:03.634439Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T11:03:03.634485Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:03:03.634634Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T11:03:03.634662Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:03:03.634763Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-10T11:03:03.634804Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:03:03.636554Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-10T11:03:03.636616Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:03:03.636670Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 106:1 2025-04-10T11:03:03.636808Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:594:2551] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 106 at schemeshard: 72057594046678944 2025-04-10T11:03:03.637268Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:133:2156], Recipient [6:133:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T11:03:03.637320Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T11:03:03.637407Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-10T11:03:03.637468Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:1 ProgressState 2025-04-10T11:03:03.637620Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:03:03.637661Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:1 progress is 2/2 2025-04-10T11:03:03.637736Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-04-10T11:03:03.637819Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:1 progress is 2/2 2025-04-10T11:03:03.637868Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-04-10T11:03:03.637927Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 2/2, is published: true 2025-04-10T11:03:03.638046Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:305:2296] message: TxId: 106 2025-04-10T11:03:03.638170Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-04-10T11:03:03.638230Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-10T11:03:03.638276Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-10T11:03:03.638373Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-10T11:03:03.638415Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:1 2025-04-10T11:03:03.638435Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:1 2025-04-10T11:03:03.638529Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-04-10T11:03:03.640748Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:03:03.640872Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:305:2296] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 106 at schemeshard: 72057594046678944 2025-04-10T11:03:03.641090Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-10T11:03:03.641176Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [6:624:2573] 2025-04-10T11:03:03.641454Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:626:2575], Recipient [6:133:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:03:03.641503Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:03:03.641533Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 >> TActorTest::TestSendEvent [GOOD] >> TActorTest::TestSendAfterDelay >> TActorTest::TestSendAfterDelay [GOOD] >> TActorTest::TestHandleEvent [GOOD] >> TActorTest::TestGetCtxTime [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] >> GenericFederatedQuery::PostgreSQLSelectCount >> TActorTest::TestSendFromAnotherThread [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-04-10T11:02:07.014033Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638126853802789:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:07.014076Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:02:07.130917Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638129332556446:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:07.133006Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:02:07.395593Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:02:07.397054Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0018e8/r3tmp/tmpTK8iEU/pdisk_1.dat 2025-04-10T11:02:08.046859Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:08.098143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:02:08.135801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:08.135935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:08.153452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:08.153543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:08.158773Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:02:08.164646Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:02:08.164981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:08.168527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6968, node 1 2025-04-10T11:02:08.499725Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/0018e8/r3tmp/yandexIXqU2d.tmp 2025-04-10T11:02:08.499759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/0018e8/r3tmp/yandexIXqU2d.tmp 2025-04-10T11:02:08.499906Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/0018e8/r3tmp/yandexIXqU2d.tmp 2025-04-10T11:02:08.500037Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:02:08.561003Z INFO: TTestServer started on Port 15368 GrpcPort 6968 TClient is connected to server localhost:15368 PQClient connected to localhost:6968 === TenantModeEnabled() = 0 === Init PQ - start server on port 6968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:09.049942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:02:09.050191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.050465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T11:02:09.050912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:02:09.051447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.054767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T11:02:09.054907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T11:02:09.055139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.055203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T11:02:09.055231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-10T11:02:09.055244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-10T11:02:09.057824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.057866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:02:09.057887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-10T11:02:09.060755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.060799Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.060834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:02:09.060874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:02:09.067660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:09.068140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:02:09.068185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-10T11:02:09.068237Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:02:09.069956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-10T11:02:09.070092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:02:09.075500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282929116, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:02:09.075696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282929116 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T11:02:09.075770Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:02:09.076179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-10T11:02:09.076239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:02:09.076460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:02:09.076561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T11:02:09.080166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:02:09.080187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:02:09.080464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:02:09.080495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491638131148770749:2405], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-10T11:02:09.080574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:02:09.080631Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-10T11:02:09.080742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T11:02:09.080756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:02:09.080782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T11:02:09.080793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:02:09.080816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-10T11:02:09.080843Z node 1 :FLAT_TX_SC ... 8][rt3.dc1--topic1] consumer cli balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/cli_5_1_14176260244880472873_v1" (Sender=[5:7491638359625207107:2643], Pipe=[5:7491638359625207110:2643], Partitions=[], ActiveFamilyCount=0) 2025-04-10T11:03:01.441545Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] consumer cli family 1 status Active partitions [0] session "shared/cli_5_1_14176260244880472873_v1" sender [5:7491638359625207107:2643] lock partition 0 for ReadingSession "shared/cli_5_1_14176260244880472873_v1" (Sender=[5:7491638359625207107:2643], Pipe=[5:7491638359625207110:2643], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-04-10T11:03:01.441637Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-04-10T11:03:01.441685Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000309s 2025-04-10T11:03:01.447182Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_14176260244880472873_v1" ClientId: "cli" PipeClient { RawX1: 7491638359625207110 RawX2: 4503621102209619 } Path: "/Root/PQ/rt3.dc1--topic1" } 2025-04-10T11:03:01.447315Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-10T11:03:01.447863Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7491638359625207112:2646] 2025-04-10T11:03:01.447927Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_14176260244880472873_v1:1 with generation 1 2025-04-10T11:03:01.453713Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1744282981422 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-04-10T11:03:01.453785Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-04-10T11:03:01.453883Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 sending to client partition status 2025-04-10T11:03:01.458287Z :INFO: [] [] [e3398e7f-d3ee4917-1eb6a7b7-c2742446] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2025-04-10T11:03:01.459132Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-04-10T11:03:01.459378Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-04-10T11:03:01.459460Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-04-10T11:03:01.459501Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-04-10T11:03:01.459577Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2025-04-10T11:03:01.459598Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1TEvPartitionReady. Aval parts: 1 2025-04-10T11:03:01.459669Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 performing read request: guid# ebe78415-6b86c397-831dcdf4-615732fe, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-10T11:03:01.459836Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid ebe78415-6b86c397-831dcdf4-615732fe 2025-04-10T11:03:01.461235Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1744282981315 CreateTimestampMS: 1744282981313 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1744282981321 CreateTimestampMS: 1744282981313 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1744282981365 CreateTimestampMS: 1744282981313 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-04-10T11:03:01.461472Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-04-10T11:03:01.461527Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid ebe78415-6b86c397-831dcdf4-615732fe has messages 1 2025-04-10T11:03:01.461622Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 read done: guid# ebe78415-6b86c397-831dcdf4-615732fe, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 490 2025-04-10T11:03:01.461655Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 response to read: guid# ebe78415-6b86c397-831dcdf4-615732fe 2025-04-10T11:03:01.461972Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 Process answer. Aval parts: 0 2025-04-10T11:03:01.462366Z :DEBUG: [] [] [e3398e7f-d3ee4917-1eb6a7b7-c2742446] [] Got ReadResponse, serverBytesSize = 490, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-04-10T11:03:01.462515Z :DEBUG: [] [] [e3398e7f-d3ee4917-1eb6a7b7-c2742446] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-04-10T11:03:01.462786Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-04-10T11:03:01.462872Z :DEBUG: [] [] [e3398e7f-d3ee4917-1eb6a7b7-c2742446] [] Returning serverBytesSize = 490 to budget 2025-04-10T11:03:01.462921Z :DEBUG: [] [] [e3398e7f-d3ee4917-1eb6a7b7-c2742446] [] In ContinueReadingDataImpl, ReadSizeBudget = 490, ReadSizeServerDelta = 52428310 2025-04-10T11:03:01.463171Z :DEBUG: [] [] [e3398e7f-d3ee4917-1eb6a7b7-c2742446] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-10T11:03:01.464245Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-10T11:03:01.464306Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-10T11:03:01.464347Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-04-10T11:03:01.464408Z :DEBUG: [] [] [e3398e7f-d3ee4917-1eb6a7b7-c2742446] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-04-10T11:03:01.464471Z :DEBUG: [] [] [e3398e7f-d3ee4917-1eb6a7b7-c2742446] [] Returning serverBytesSize = 0 to budget 2025-04-10T11:03:01.464679Z :INFO: [] [] [e3398e7f-d3ee4917-1eb6a7b7-c2742446] Closing read session. Close timeout: 0.000000s 2025-04-10T11:03:01.464724Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-04-10T11:03:01.464785Z :INFO: [] [] [e3398e7f-d3ee4917-1eb6a7b7-c2742446] Counters: { Errors: 0 CurrentSessionLifetimeMs: 42 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:03:01.464905Z :NOTICE: [] [] [e3398e7f-d3ee4917-1eb6a7b7-c2742446] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-10T11:03:01.464954Z :DEBUG: [] [] [e3398e7f-d3ee4917-1eb6a7b7-c2742446] [] Abort session to cluster 2025-04-10T11:03:01.464954Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 grpc read done: success# 1, data# { read_request { bytes_size: 490 } } 2025-04-10T11:03:01.465180Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 got read request: guid# 83b0d5c2-6b52f6b5-5869430f-7c3005cc 2025-04-10T11:03:01.465471Z :NOTICE: [] [] [e3398e7f-d3ee4917-1eb6a7b7-c2742446] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T11:03:01.468052Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 grpc read done: success# 0, data# { } 2025-04-10T11:03:01.468097Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 grpc read failed 2025-04-10T11:03:01.468127Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 grpc closed 2025-04-10T11:03:01.468189Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14176260244880472873_v1 is DEAD 2025-04-10T11:03:01.469880Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7491638359625207110:2643] disconnected; active server actors: 1 2025-04-10T11:03:01.469934Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7491638359625207110:2643] client cli disconnected session shared/cli_5_1_14176260244880472873_v1 2025-04-10T11:03:01.470710Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_14176260244880472873_v1 |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestSendAfterDelay [GOOD] |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestGetCtxTime [GOOD] >> KqpPg::InsertFromSelect_Simple+useSink [GOOD] >> KqpPg::InsertFromSelect_Simple-useSink |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestSendFromAnotherThread [GOOD] >> TActorTest::TestWaitFuture >> TActorTest::TestWaitFuture [GOOD] |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitFuture [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstMultiIndex [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekToFirst [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekToLast [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekAfterAndPrev [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText-useSink >> TActorTest::TestWaitFor >> TActorTest::TestWaitFor [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 |73.0%| [TA] $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobMultiSstMultiIndex [GOOD] >> TBlobStorageHullSstIt::TestSeekBefore [GOOD] >> TBlobStorageHullSstIt::TestSeekAfterAndPrev [GOOD] >> TQueueBackpressureTest::PerfInFlight [GOOD] |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullOrderedSstsIt::TestSeekAfterAndPrev [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitFor [GOOD] Test command err: ... waiting for value = 42 ... waiting for value = 42 (done) >> KqpPg::InsertValuesFromTableWithDefault-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekAfterAndPrev [GOOD] |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::PerfInFlight [GOOD] >> KqpPg::CreateUniqPgColumn+useSink [GOOD] >> KqpPg::CreateUniqPgColumn-useSink |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TRegistryTests::TestAddGet [GOOD] >> TRegistryTests::TestCheckConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestZeroQueueWeight [GOOD] >> ResourceBrokerConfigValidatorTests::TestZeroDefaultDuration [GOOD] |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> BasicStatistics::ServerlessGlobalIndex [GOOD] |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestZeroDefaultDuration [GOOD] |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> VDiskTest::HugeBlobWrite >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] >> KqpPg::DropTablePg [GOOD] >> KqpPg::DropTablePgMultiple |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessGlobalIndex [GOOD] Test command err: 2025-04-10T10:59:44.038551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:59:44.038925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:44.039000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e95/r3tmp/tmpJ5ry3s/pdisk_1.dat 2025-04-10T10:59:44.807704Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27549, node 1 2025-04-10T10:59:45.313010Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:45.313076Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:45.313119Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:45.313634Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:59:45.316914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:59:45.455689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:45.455853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:45.473029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1479 2025-04-10T10:59:46.439735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:59:51.779083Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T10:59:51.851448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:51.851590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:51.892038Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T10:59:51.895091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:52.260151Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:59:52.260814Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:59:52.261511Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:59:52.261698Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:59:52.262007Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:59:52.262121Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:59:52.262320Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:59:52.262412Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:59:52.262487Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T10:59:52.475759Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:52.475870Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:52.494578Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:59:52.748464Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:52.908143Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T10:59:52.908254Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T10:59:52.974840Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T10:59:52.976167Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T10:59:52.976386Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T10:59:52.976454Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T10:59:52.976519Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T10:59:52.976573Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T10:59:52.976663Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T10:59:52.976715Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T10:59:52.977184Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T10:59:53.033012Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T10:59:53.033140Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1880:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T10:59:53.048874Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1894:2611] 2025-04-10T10:59:53.066505Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1925:2626] 2025-04-10T10:59:53.067802Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1925:2626], schemeshard id = 72075186224037897 2025-04-10T10:59:53.075060Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-10T10:59:53.095831Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T10:59:53.095889Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T10:59:53.095949Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-10T10:59:53.107538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T10:59:53.115828Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T10:59:53.115994Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T10:59:53.428718Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T10:59:53.680571Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T10:59:53.759552Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T10:59:54.588558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T10:59:55.428775Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:55.639990Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-10T10:59:55.640055Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-10T10:59:55.640207Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2596:2948], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-10T10:59:55.640865Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2597:2949] 2025-04-10T10:59:55.641520Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2597:2949], schemeshard id = 72075186224037899 2025-04-10T10:59:57.232473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2726:3247], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:57.232639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:57.269302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-10T10:59:57.643971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2956:3292], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:57.644147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:59:57.709309Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2961:3296]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T10:59:57.709613Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T10:59:57.709850Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-10T10:59:57.709918Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2964:3299] 2025-04-10T10:59:57.709992Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2964:3299] 2025-04-10T10:59:57.710668Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2965:3135] 2025-04-10T10:59:57.710993Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2964:3299], server id = [2:2965:3135], tablet id = 72075186224037894, status = OK 2025-04-10T10:59:57.711206Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2965:3135], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T10:59:57.711282Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T10:59:57.711542Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T10:59:57.711622Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2961:3296], StatRequests.size() = 1 2025-04-10T10:59:57.720225Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2998:3308]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T10:59:57.720454Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] Re ... 11:02:59.400306Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:02:59.447242Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:7833:5557]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:02:59.447510Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-10T11:02:59.447551Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:7833:5557], StatRequests.size() = 1 2025-04-10T11:03:00.903290Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:03:00.903410Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:03:00.903496Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-10T11:03:00.903555Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:03:00.905852Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-10T11:03:00.943792Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:03:00.960997Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7871:5584], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:00.961191Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7881:5589], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:00.961480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:01.019809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-10T11:03:01.172598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7885:5592], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:03:01.280078Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7980:5640]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:03:01.280495Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-10T11:03:01.280546Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7980:5640], StatRequests.size() = 1 2025-04-10T11:03:01.366771Z node 2 :TX_PROXY ERROR: Actor# [2:7982:5642] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:03:01.502519Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:8011:5657]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:03:01.502924Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-10T11:03:01.503192Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-10T11:03:01.503258Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-10T11:03:01.503437Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:03:01.503549Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:8011:5657], StatRequests.size() = 1 2025-04-10T11:03:01.734558Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTlhOWVkYWItMTU5YzAyODQtMWY1ZTUxODQtMzczNDBkNg==, TxId: 2025-04-10T11:03:01.734651Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTlhOWVkYWItMTU5YzAyODQtMWY1ZTUxODQtMzczNDBkNg==, TxId: 2025-04-10T11:03:01.739167Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:03:01.772113Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:03:01.772206Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:03:01.818988Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:03:01.819056Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:03:01.912869Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3234:3179], schemeshard count = 1 2025-04-10T11:03:02.258405Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037899 2025-04-10T11:03:02.258471Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 208.000000s, at schemeshard: 72075186224037899 2025-04-10T11:03:02.258845Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 50 2025-04-10T11:03:02.275711Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:03:03.350214Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:8082:5701]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:03:03.350517Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-10T11:03:03.350564Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:8082:5701], StatRequests.size() = 1 2025-04-10T11:03:04.924859Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:03:04.936443Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:03:04.936503Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:03:04.936546Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 4] is data table. 2025-04-10T11:03:04.936586Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 4] 2025-04-10T11:03:04.936915Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-10T11:03:04.940162Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:03:04.965409Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2RmNWYwYjktMTJlMzUwZi1mYzQwNWI4LWZjZDcxZWY2, TxId: 2025-04-10T11:03:04.965474Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2RmNWYwYjktMTJlMzUwZi1mYzQwNWI4LWZjZDcxZWY2, TxId: 2025-04-10T11:03:04.965995Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:03:04.986287Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 4] 2025-04-10T11:03:04.986376Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:03:05.052507Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:8153:5744]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:03:05.052950Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-10T11:03:05.053024Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:8153:5744], StatRequests.size() = 1 2025-04-10T11:03:06.733347Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:8205:5775]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:03:06.733558Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-10T11:03:06.733589Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:8205:5775], StatRequests.size() = 1 2025-04-10T11:03:08.186128Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-04-10T11:03:08.186351Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:03:08.186717Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:03:08.198047Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:03:08.198115Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:03:08.198178Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-10T11:03:08.198214Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-10T11:03:08.198503Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-10T11:03:08.201544Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:03:08.217696Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTdhNTNmOGUtNDU1NDE4MWUtZDA0YzI4ZDctNGVhNWE2ZDU=, TxId: 2025-04-10T11:03:08.217760Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTdhNTNmOGUtNDU1NDE4MWUtZDA0YzI4ZDctNGVhNWE2ZDU=, TxId: 2025-04-10T11:03:08.218474Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:06.000000Z, event interval end# 2025-04-10T11:03:06.000000Z 2025-04-10T11:03:08.218789Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:03:08.233678Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-10T11:03:08.233738Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:03:08.337010Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:8271:5813]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:03:08.337529Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-10T11:03:08.337582Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:8271:5813], StatRequests.size() = 1 >> KqpPg::CreateTableBulkUpsertAndRead [GOOD] >> KqpPg::CopyTableSerialColumns+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] Test command err: 2025-04-10T11:00:53.776036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:53.776120Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:53.839839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:58.078573Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:58.078638Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:58.133315Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:02.353981Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:02.354058Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:02.495487Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:03.763703Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-10T11:01:03.972322Z node 24 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:01:03.972885Z node 24 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c47/r3tmp/tmpH9IRIa/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:01:03.973559Z node 24 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c47/r3tmp/tmpH9IRIa/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001c47/r3tmp/tmpH9IRIa/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12932451175326336101 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:01:04.026080Z node 23 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:01:04.030863Z node 23 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c47/r3tmp/tmpH9IRIa/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:01:04.031107Z node 23 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c47/r3tmp/tmpH9IRIa/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001c47/r3tmp/tmpH9IRIa/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10104110022764288944 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:01:04.096029Z node 20 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:01:04.096532Z node 20 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c47/r3tmp/tmpH9IRIa/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:01:04.096731Z node 20 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c47/r3tmp/tmpH9IRIa/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001c47/r3tmp/tmpH9IRIa/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6411422800067424836 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:01:04.106411Z node 20 :BS_LOCALRECOVERY CRIT: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c47/r3tmp/tmpH9IRIa/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-10T11:01:04.169309Z node 27 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:01:04.169797Z node 27 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c47/r3tmp/tmpH9IRIa/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:01:04.169983Z node 27 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001c47/r3tmp/tmpH9IRIa/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001c47/r3tmp/tmpH9IRIa/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10274551360716914816 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotT ... ode 163 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715661:0 progress is 1/1 2025-04-10T11:03:09.746958Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-04-10T11:03:09.747067Z node 163 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715661:0 progress is 1/1 2025-04-10T11:03:09.747158Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-04-10T11:03:09.747271Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 11 2025-04-10T11:03:09.747371Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715661, ready parts: 1/1, is published: false 2025-04-10T11:03:09.747594Z node 163 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [163:1392:2621], msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3, at schemeshard: 72057594046578944 2025-04-10T11:03:09.747657Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-04-10T11:03:09.747763Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715661:0 2025-04-10T11:03:09.747841Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715661:0 2025-04-10T11:03:09.747965Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 12 2025-04-10T11:03:09.748062Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715661, publications: 1, subscribers: 1 2025-04-10T11:03:09.748161Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715661, [OwnerId: 72057594046578944, LocalPathId: 3], 7 2025-04-10T11:03:09.756084Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409546, msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3 2025-04-10T11:03:09.756245Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3, at schemeshard: 72075186233409546 2025-04-10T11:03:09.756617Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-04-10T11:03:09.757130Z node 163 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-10T11:03:09.757245Z node 163 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 for mediator 72057594046382081 tablet 72057594046578944 removed=1 2025-04-10T11:03:09.757304Z node 163 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 for mediator 72057594046382081 acknowledged 2025-04-10T11:03:09.757355Z node 163 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 acknowledged 2025-04-10T11:03:09.757594Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-04-10T11:03:09.757641Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 281474976715661, path id: [OwnerId: 72057594046578944, LocalPathId: 3] 2025-04-10T11:03:09.757849Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-04-10T11:03:09.757890Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [163:687:2240], at schemeshard: 72057594046578944, txId: 281474976715661, path id: 3 2025-04-10T11:03:09.759808Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046578944, cookie: 281474976715661 2025-04-10T11:03:09.759921Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046578944, cookie: 281474976715661 2025-04-10T11:03:09.759982Z node 163 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 281474976715661 2025-04-10T11:03:09.760118Z node 163 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 281474976715661, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], version: 7 2025-04-10T11:03:09.760219Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 11 2025-04-10T11:03:09.760436Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 281474976715661, subscribers: 1 2025-04-10T11:03:09.760547Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [163:1938:2388] 2025-04-10T11:03:09.767070Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046578944, msg: DomainSchemeShard: 72057594046578944 DomainPathId: 3 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 3 TenantHive: 18446744073709551615 TenantSysViewProcessor: 72075186233409553 TenantRootACL: "" TenantStatisticsAggregator: 72075186233409554 TenantGraphShard: 18446744073709551615 2025-04-10T11:03:09.767183Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 2025-04-10T11:03:09.767290Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046578944, LocalPathId: 3], Generation: 2, ActorId:[163:1392:2621], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 3, TenantHive: 18446744073709551615, TenantSysViewProcessor: 72075186233409553, TenantStatisticsAggregator: 72075186233409554, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 3, tenantHive: 18446744073709551615, tenantSysViewProcessor: 72075186233409553, at schemeshard: 72057594046578944 2025-04-10T11:03:09.767413Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-10T11:03:09.767452Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-10T11:03:09.767634Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-10T11:03:09.767690Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [163:1592:2762], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-10T11:03:09.770645Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72075186233409546, cookie: 0 2025-04-10T11:03:09.770936Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046578944, cookie: 281474976715661 2025-04-10T11:03:09.771041Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } SecurityState { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } SecurityState { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] Test command err: 2025-04-10T11:02:20.444487Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638186365577934:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:20.444547Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001db3/r3tmp/tmp9Fg4Ci/pdisk_1.dat 2025-04-10T11:02:21.433630Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:21.490346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:02:21.495373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:21.495476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:21.502620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30708, node 1 2025-04-10T11:02:21.723017Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:21.723042Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:21.723050Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:21.723188Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24384 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:22.362481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:24384 2025-04-10T11:02:25.199469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638207840415467:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:25.199573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:25.445709Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638186365577934:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:25.445786Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:25.610832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:02:26.188785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638212135382997:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.188959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.189711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638212135383023:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.189759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638212135383025:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.189818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638212135383024:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.189863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638212135383020:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.189898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638212135383026:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.189934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638212135383021:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.189980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638212135383022:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.193293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638212135383053:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.193356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.195212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638212135383055:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.202860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2025-04-10T11:02:26.229762Z node 1 :TX_PROXY ERROR: Actor# [1:7491638212135383042:2791] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:02:26.230289Z node 1 :TX_PROXY ERROR: Actor# [1:7491638212135383041:2790] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:02:26.230457Z node 1 :TX_PROXY ERROR: Actor# [1:7491638212135383047:2796] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:02:26.230583Z node 1 :TX_PROXY ERROR: Actor# [1:7491638212135383046:2795] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:02:26.230719Z node 1 :TX_PROXY ERROR: Actor# [1:7491638212135383043:2792] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:02:26.234940Z node 1 :TX_PROXY ERROR: Actor# [1:7491638212135383044:2793] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:02:26.240379Z node 1 :TX_PROXY ERROR: Actor# [1:7491638212135383084:2810] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:02:26.245908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638212135383141:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.246558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638212135383144:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.246640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638212135383145:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.246835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:26.263928Z node 1 :TX_PROXY ERROR: Actor# [1:749163821213 ... n/3?node_id=1&id=Y2ZlNWI2Y2EtNjRkYzQzNTItMjQyYjhjOTgtYTY2ODNiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.644524Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726320. Ctx: { TraceId: 01jrfnwc2n9h68htjmjd30gje0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzkwMDMxYjgtNTk3NzNmODEtNDVhNDBjOTgtYWVkN2UxYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.650817Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726321. Ctx: { TraceId: 01jrfnwc376p94h3cbyrbe2m58, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM1YTJhZTgtMWExNDk5NmItZWY2NzcwOGQtNDhkNGEyMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.651575Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726322. Ctx: { TraceId: 01jrfnwc38f9pagqewrww8ap6s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGM4Njg1MTAtOWRkMDBlMDYtODk5YmI2NzAtZTA1NjllZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.660612Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726323. Ctx: { TraceId: 01jrfnwc3d5xbk6ht8389sjt5j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTg4NTc3OWEtNjIwZWZlZWEtZmZiYTQ4NjctMWI1N2QxMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.661267Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726324. Ctx: { TraceId: 01jrfnwc3caqp7dmr1t7keescy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmNkNDQ2NGItN2I2NmI0ZjYtZWQ4OGIxYTktOTE1YzA4YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.663173Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726325. Ctx: { TraceId: 01jrfnwc3dabsdamfgv3wsefzp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI5M2E3MS0yZjU3MzMwMy0xMzQ0YzY0My05NGYyYTMwNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.663591Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726326. Ctx: { TraceId: 01jrfnwc3dczdwya3sy02n6edw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU0ZGEyZjctYWNlMjdmOTctMTliNzY1YjQtNzgwMzdjMTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.665731Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726327. Ctx: { TraceId: 01jrfnwc3f7z7na4ytetcqr6f6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGExMDlmMWUtZjNlMzBiYmQtODhlNGVjZjctMmZiYjE1ZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.672715Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726328. Ctx: { TraceId: 01jrfnwc3pb8mfsbfv474847t1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDkwYzFkODktZGJhZWE0ODQtYmY1NzI3OTYtYWNiODVjNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.673501Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726329. Ctx: { TraceId: 01jrfnwc3p6skdmdvnm88zm0jh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzkwMDMxYjgtNTk3NzNmODEtNDVhNDBjOTgtYWVkN2UxYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.676277Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726330. Ctx: { TraceId: 01jrfnwc3pey22xg13m32watyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZlNWI2Y2EtNjRkYzQzNTItMjQyYjhjOTgtYTY2ODNiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.677100Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726331. Ctx: { TraceId: 01jrfnwc3x3sepgk3tkn5dr6b1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM1YTJhZTgtMWExNDk5NmItZWY2NzcwOGQtNDhkNGEyMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.677723Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726332. Ctx: { TraceId: 01jrfnwc3z7av629754nbjwtj2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGM4Njg1MTAtOWRkMDBlMDYtODk5YmI2NzAtZTA1NjllZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.678362Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726333. Ctx: { TraceId: 01jrfnwc3z0z9drsj3mehjdmb5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTg4NTc3OWEtNjIwZWZlZWEtZmZiYTQ4NjctMWI1N2QxMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.690422Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726335. Ctx: { TraceId: 01jrfnwc4734vg7p4vaj0h91zq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI5M2E3MS0yZjU3MzMwMy0xMzQ0YzY0My05NGYyYTMwNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.690553Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726334. Ctx: { TraceId: 01jrfnwc472k3mt11qzshnekv0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmNkNDQ2NGItN2I2NmI0ZjYtZWQ4OGIxYTktOTE1YzA4YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.694844Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726337. Ctx: { TraceId: 01jrfnwc49et3sq3q2esb2yvpy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU0ZGEyZjctYWNlMjdmOTctMTliNzY1YjQtNzgwMzdjMTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.695562Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726336. Ctx: { TraceId: 01jrfnwc490ghr4khn63hbg9xb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGExMDlmMWUtZjNlMzBiYmQtODhlNGVjZjctMmZiYjE1ZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.700845Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726340. Ctx: { TraceId: 01jrfnwc4e1nky0c3hq0hz1d6h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZlNWI2Y2EtNjRkYzQzNTItMjQyYjhjOTgtYTY2ODNiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.700952Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726338. Ctx: { TraceId: 01jrfnwc4eabyqmwbfy8tbm22w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDkwYzFkODktZGJhZWE0ODQtYmY1NzI3OTYtYWNiODVjNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.701660Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726339. Ctx: { TraceId: 01jrfnwc4edc1rjw01hgzrsys8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzkwMDMxYjgtNTk3NzNmODEtNDVhNDBjOTgtYWVkN2UxYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.706976Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726342. Ctx: { TraceId: 01jrfnwc4qc0f8kg401n0dfy5p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGM4Njg1MTAtOWRkMDBlMDYtODk5YmI2NzAtZTA1NjllZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.707182Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726341. Ctx: { TraceId: 01jrfnwc4k6rgzxq76v19damgj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM1YTJhZTgtMWExNDk5NmItZWY2NzcwOGQtNDhkNGEyMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-04-10T11:03:08.710398Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726343. Ctx: { TraceId: 01jrfnwc4w3z55n0k0fb5je8de, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTg4NTc3OWEtNjIwZWZlZWEtZmZiYTQ4NjctMWI1N2QxMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.710611Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726344. Ctx: { TraceId: 01jrfnwc502q695seqerzkv1ns, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmNkNDQ2NGItN2I2NmI0ZjYtZWQ4OGIxYTktOTE1YzA4YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282945818 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-10T11:03:08.726058Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726346. Ctx: { TraceId: 01jrfnwc5660b9r31rbhze4kdc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGExMDlmMWUtZjNlMzBiYmQtODhlNGVjZjctMmZiYjE1ZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.726169Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726347. Ctx: { TraceId: 01jrfnwc5602np0rp8yctsbvkx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU0ZGEyZjctYWNlMjdmOTctMTliNzY1YjQtNzgwMzdjMTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.727416Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726345. Ctx: { TraceId: 01jrfnwc54aa7vpx6fan8c2tm1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI5M2E3MS0yZjU3MzMwMy0xMzQ0YzY0My05NGYyYTMwNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:08.729647Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726348. Ctx: { TraceId: 01jrfnwc5k1x7qmcj6a344sw8p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDkwYzFkODktZGJhZWE0ODQtYmY1NzI3OTYtYWNiODVjNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282945818 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 4 shards |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> KqpPg::InsertValuesFromTableWithDefaultText-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |73.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |73.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 |73.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |73.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest >> KqpPg::TableArrayInsert+useSink [GOOD] >> KqpPg::TableArrayInsert-useSink |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |73.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |73.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |73.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest >> TFlatDatabasePgTest::BasicTypes |73.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |73.2%| [TA] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.2%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut >> TFlatDatabasePgTest::BasicTypes [GOOD] |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] |73.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest >> TFlatDatabasePgTest::BasicTypes [GOOD] |73.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest >> NameserviceConfigValidatorTests::TestModifyIdForHostPort [GOOD] >> NameserviceConfigValidatorTests::TestModifyIdForResolveHostPort [GOOD] >> NameserviceConfigValidatorTests::TestModifyResolveHost [GOOD] >> NameserviceConfigValidatorTests::TestModifyPort [GOOD] >> KqpPg::DropTablePgMultiple [GOOD] >> KqpPg::DropTableIfExists |73.2%| [TA] $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} >> BootstrapTabletsValidatorTests::TestUnknownNodeForTablet [GOOD] >> NameserviceConfigValidatorTests::TestAddNewNode [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingHostPort [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingAddrPort [GOOD] >> NameserviceConfigValidatorTests::TestRemoveTooMany [GOOD] >> ResourceBrokerConfigValidatorTests::TestEmptyConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestEmptyQueueName [GOOD] >> ResourceBrokerConfigValidatorTests::TestEmptyTaskName [GOOD] |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestModifyPort [GOOD] |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestDuplicatingAddrPort [GOOD] |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestEmptyTaskName [GOOD] >> NameserviceConfigValidatorTests::TestLongWalleDC [GOOD] >> NameserviceConfigValidatorTests::TestModifyClusterUUID [GOOD] >> NameserviceConfigValidatorTests::TestModifyIdForAddrPort [GOOD] >> NameserviceConfigValidatorTests::TestModifyHost [GOOD] |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |73.2%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |73.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestModifyHost [GOOD] >> ResourceBrokerConfigValidatorTests::TestRepeatedTaskName [GOOD] >> BootstrapTabletsValidatorTests::TestNoNodeForTablet [GOOD] >> BootstrapTabletsValidatorTests::TestRequiredTablet [GOOD] >> BootstrapTabletsValidatorTests::TestImportantTablet [GOOD] >> ResourceBrokerConfigValidatorTests::TestUnknownQueue [GOOD] >> BootstrapTabletsValidatorTests::TestCompactionBroker [GOOD] >> ResourceBrokerConfigValidatorTests::TestUnlimitedResource [GOOD] >> ResourceBrokerConfigValidatorTests::TestUnusedQueue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] Test command err: 2025-04-10T11:00:49.728303Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637791597211053:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:49.739194Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmpX6Jl8Z/pdisk_1.dat 2025-04-10T11:00:50.461654Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:50.467882Z node 1 :HTTP ERROR: (#26,[::1]:23469) connection closed with error: Connection refused 2025-04-10T11:00:50.468356Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:00:50.472543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:50.472702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:50.477505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:53.854371Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637812396286492:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:53.962267Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmpLZuvtq/pdisk_1.dat 2025-04-10T11:00:54.179801Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:54.181293Z node 2 :HTTP ERROR: (#28,[::1]:26073) connection closed with error: Connection refused 2025-04-10T11:00:54.182287Z node 2 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:00:54.205812Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:54.206545Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:54.209330Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:59.135547Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637837754328663:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:59.135858Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmpJskSYd/pdisk_1.dat 2025-04-10T11:00:59.453378Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:59.472643Z node 3 :HTTP ERROR: (#26,[::1]:23333) connection closed with error: Connection refused 2025-04-10T11:00:59.473907Z node 3 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:00:59.475147Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:59.475240Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:59.477485Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmplSmdIK/pdisk_1.dat 2025-04-10T11:01:04.875109Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:04.875369Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:04.996242Z node 4 :HTTP ERROR: (#28,[::1]:3393) connection closed with error: Connection refused 2025-04-10T11:01:04.997671Z node 4 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:01:04.998733Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:04.998819Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:05.007295Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmpwT5RMn/pdisk_1.dat 2025-04-10T11:01:11.358166Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:11.457273Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:11.497351Z node 5 :HTTP ERROR: (#30,[::1]:13377) connection closed with error: Connection refused 2025-04-10T11:01:11.502884Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:11.502994Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:11.503429Z node 5 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:01:11.518073Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:17.672135Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491637914559552066:2147];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmpYgJFWq/pdisk_1.dat 2025-04-10T11:01:17.872923Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:18.093310Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:18.094775Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:18.094862Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:18.096562Z node 6 :HTTP ERROR: (#32,[::1]:4767) connection closed with error: Connection refused 2025-04-10T11:01:18.096811Z node 6 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:01:18.099626Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:22.933510Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491637936672266595:2217];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:23.077812Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmp9zi7JW/pdisk_1.dat 2025-04-10T11:01:23.367439Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:23.409478Z node 7 :HTTP ERROR: (#34,[::1]:6461) connection closed with error: Connection refused 2025-04-10T11:01:23.412927Z node 7 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:01:23.417789Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:23.417885Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:23.419651Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:28.778745Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491637963049400572:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:28.779383Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmpsxn1j5/pdisk_1.dat 2025-04-10T11:01:29.178631Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:29.181485Z node 8 :HTTP ERROR: (#36,[::1]:7241) connection closed with error: Connection refused 2025-04-10T11:01:29.183038Z node 8 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:01:29.209290Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:29.209419Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:29.215463Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:35.615257Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491637990845328933:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:35.615315Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmpAvLV0E/pdisk_1.dat 2025-04-10T11:01:35.993605Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:36.031676Z node 9 :HTTP ERROR: (#38,[::1]:28029) connection closed with error: Connection refused 2025-04-10T11:01:36.034872Z node 9 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:01:36.035367Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:36.035455Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:36.044263Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:41.002851Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491638011407248218:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:41.003552Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmpormgFV/pdisk_1.dat 2025-04-10T11:01:41.225153Z node 10 :HTTP ERROR: (#26,[::1]:5043) connection closed with error: Connection refused 2025-04-10T11:01:41.225691Z node 10 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:01:41.256573Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:41.297660Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:41.297755Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:41.300532Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:46.603248Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=unde ... eme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmpvgMmTu/pdisk_1.dat 2025-04-10T11:02:03.357037Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:03.391369Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:03.391503Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:03.391929Z node 14 :HTTP ERROR: (#34,[::1]:23196) connection closed with error: Connection refused 2025-04-10T11:02:03.392633Z node 14 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:02:03.394951Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:09.395377Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7491638137976080610:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:09.395456Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmpj6EJFk/pdisk_1.dat 2025-04-10T11:02:09.618274Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:09.648312Z node 15 :HTTP ERROR: (#36,[::1]:15994) connection closed with error: Connection refused 2025-04-10T11:02:09.652030Z node 15 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:02:09.653466Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:09.653588Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:09.655450Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:15.683036Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7491638162856391149:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:15.683113Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmp0n1C6L/pdisk_1.dat 2025-04-10T11:02:15.968819Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:16.000158Z node 16 :HTTP ERROR: (#38,[::1]:27815) connection closed with error: Connection refused 2025-04-10T11:02:16.002812Z node 16 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:02:16.008067Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:16.008222Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:16.014137Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:23.037348Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7491638196377436869:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:23.052656Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmprmUBre/pdisk_1.dat 2025-04-10T11:02:23.365059Z node 17 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:23.401227Z node 17 :HTTP ERROR: (#26,[::1]:22863) connection closed with error: Connection refused 2025-04-10T11:02:23.401798Z node 17 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:02:23.402393Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:23.402516Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:23.408022Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:30.460324Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7491638225882550386:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:30.488601Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmpudGOuA/pdisk_1.dat 2025-04-10T11:02:30.769940Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:30.790797Z node 18 :HTTP ERROR: (#28,[::1]:9821) connection closed with error: Connection refused 2025-04-10T11:02:30.797045Z node 18 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:02:30.817528Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:30.817650Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:30.819034Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:37.910002Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7491638255315686712:2111];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:37.951492Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmpORiFEt/pdisk_1.dat 2025-04-10T11:02:38.224752Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:38.272369Z node 19 :HTTP ERROR: (#30,[::1]:24415) connection closed with error: Connection refused 2025-04-10T11:02:38.272878Z node 19 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:02:38.274920Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:38.275085Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:38.277405Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:45.999261Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7491638290463007112:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:46.006217Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmp7S7N70/pdisk_1.dat 2025-04-10T11:02:46.473493Z node 20 :HTTP ERROR: (#32,[::1]:11792) connection closed with error: Connection refused 2025-04-10T11:02:46.482645Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:46.484278Z node 20 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:02:46.498119Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:46.498286Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:46.507981Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:55.339626Z node 21 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[21:7491638334789701951:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:55.339716Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmpP1YDwZ/pdisk_1.dat 2025-04-10T11:02:55.827663Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:55.831239Z node 21 :HTTP ERROR: (#34,[::1]:14348) connection closed with error: Connection refused 2025-04-10T11:02:55.843596Z node 21 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:02:55.845201Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:55.845360Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:55.847498Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:03:03.090732Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7491638367339986628:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:03.090798Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmpondhmR/pdisk_1.dat 2025-04-10T11:03:03.306106Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:03.319055Z node 22 :HTTP ERROR: (#36,[::1]:18682) connection closed with error: Connection refused 2025-04-10T11:03:03.324682Z node 22 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:03:03.348644Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:03.348788Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:03.351049Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:03:10.954563Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7491638397932457416:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:10.955969Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c91/r3tmp/tmpXZqTmD/pdisk_1.dat 2025-04-10T11:03:11.211723Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:11.241622Z node 23 :HTTP ERROR: (#38,[::1]:28431) connection closed with error: Connection refused 2025-04-10T11:03:11.242357Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:11.242507Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:11.243092Z node 23 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:03:11.247197Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> BootstrapTabletsValidatorTests::TestCompactionBroker [GOOD] |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestUnusedQueue [GOOD] >> KqpPg::CopyTableSerialColumns+useSink [GOOD] >> ResourceBrokerConfigValidatorTests::TestMinConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestRepeatedQueueName [GOOD] >> ResourceBrokerConfigValidatorTests::TestNoDefaultQueue [GOOD] >> ResourceBrokerConfigValidatorTests::TestNoUnknownTask [GOOD] >> KqpPg::CopyTableSerialColumns-useSink >> KqpPg::CreateUniqPgColumn-useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn+useSink >> TRegistryTests::TestLock [GOOD] >> TRegistryTests::TestClasses [GOOD] >> TRegistryTests::TestDisableEnable [GOOD] >> IndexBuildTest::CancellationNotEnoughRetries [GOOD] >> IndexBuildTest::CancellationNoTable >> NameserviceConfigValidatorTests::TestEmptyConfig [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingId [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingResolveHostPort [GOOD] >> NameserviceConfigValidatorTests::TestEmptyAddresses [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestNoUnknownTask [GOOD] |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> TRegistryTests::TestDisableEnable [GOOD] |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestEmptyAddresses [GOOD] >> TBlobStorageReplRecoveryMachine::BasicFunctionality |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> IndexBuildTest::CancellationNoTable [GOOD] |73.4%| [TA] $(B)/ydb/core/cms/console/validators/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] >> GenericFederatedQuery::PostgreSQLFilterPushdown >> TBlobStorageReplRecoveryMachine::BasicFunctionality [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBlobStorageReplRecoveryMachine::BasicFunctionality [GOOD] |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |73.4%| [TA] {RESULT} $(B)/ydb/core/cms/console/validators/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |73.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancellationNoTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:01.980274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:01.980443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:01.980507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:01.980553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:01.980608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:01.980635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:01.980702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:01.980805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:01.981223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:02.113451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:02.113517Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:02.141454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:02.141779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:02.141981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:02.158897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:02.159228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:02.159970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:02.160431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:02.166955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:02.168473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:02.168559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:02.168745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:02.168797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:02.168839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:02.169044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.176633Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:02.378978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:02.379258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.379492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:02.379729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:02.379791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.382329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:02.382482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:02.382667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.382728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:02.382772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:02.382827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:02.384914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.384978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:02.385017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:02.386786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.386839Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.386887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:02.386951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:02.391720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:02.395399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:02.395628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:02.396729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:02.396908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:02.396968Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:02.397310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:02.397381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:02.397635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:02.397746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:02.400705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:02.400765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:02.400985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:02.401032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:02.401292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:02.401343Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:02.401471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:02.401532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:02.401582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:02.401618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:02.401659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:02.401702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:02.401753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:02.401790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:02.401864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:02.401908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:02.401964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:02.404551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:02.404676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:02.404720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... shardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:03:22.198737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:03:22.198958Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:03:22.207550Z node 2 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [2:124:2150] sender: [2:239:2058] recipient: [2:15:2062] 2025-04-10T11:03:22.225365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:03:22.225677Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:03:22.225924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:03:22.226214Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:03:22.226304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:03:22.230307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:03:22.230466Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:03:22.230751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:03:22.230828Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:03:22.230895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:03:22.230949Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:03:22.234323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:03:22.234410Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:03:22.234480Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:03:22.246881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:03:22.246976Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:03:22.247056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:03:22.247141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:03:22.247358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:03:22.250114Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:03:22.250447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:03:22.251663Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:03:22.251822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:03:22.251886Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:03:22.252266Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:03:22.252348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:03:22.252573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:03:22.252674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:03:22.256193Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:03:22.256261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:03:22.256483Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:03:22.256540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:03:22.256978Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:03:22.257052Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:03:22.257180Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:03:22.257230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:03:22.257278Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:03:22.257317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:03:22.257365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:03:22.257439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:03:22.257491Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:03:22.257532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:03:22.257640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:03:22.257686Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:03:22.257730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:03:22.258524Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:03:22.258643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:03:22.258702Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:03:22.258750Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:03:22.258805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:03:22.258912Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:03:22.262676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:03:22.263308Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-04-10T11:03:22.263842Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Bootstrap 2025-04-10T11:03:22.296351Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Become StateWork (SchemeCache [2:274:2265]) 2025-04-10T11:03:22.297110Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 101 DatabaseName: "/MyRoot" Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { settings { } } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } 2025-04-10T11:03:22.297466Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: Reply TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" severity: 1 } SchemeStatus: 2 2025-04-10T11:03:22.302578Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:03:22.307361Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" severity: 1 } SchemeStatus: 2 TestWaitNotification wait txId: 101 2025-04-10T11:03:22.307914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:03:22.307971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:03:22.308520Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:03:22.308654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:03:22.308704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:281:2272] TestWaitNotification: OK eventTxId 101 2025-04-10T11:03:22.309270Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" PageSize: 100 PageToken: "" 2025-04-10T11:03:22.309409Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: Reply Status: SUCCESS NextPageToken: "0" BUILDINDEX RESPONSE LIST: NKikimrIndexBuilder.TEvListResponse Status: SUCCESS NextPageToken: "0" >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] Test command err: 2025-04-10T10:56:36.935748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:36.935992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:36.936209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f11/r3tmp/tmpZit7gZ/pdisk_1.dat 2025-04-10T10:56:37.329561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T10:56:37.427744Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:37.469626Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T10:56:37.471132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:37.471374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:37.471593Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T10:56:37.483663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:37.693906Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T10:56:37.693995Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T10:56:37.694245Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:645:2553] 2025-04-10T10:56:37.844369Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T10:56:37.844528Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T10:56:37.845304Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T10:56:37.845482Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T10:56:37.845873Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T10:56:37.846338Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T10:56:37.846472Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T10:56:37.846886Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T10:56:37.849192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T10:56:37.853351Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T10:56:37.853478Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T10:56:37.922429Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:56:37.924011Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:56:37.924704Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:670:2574] 2025-04-10T10:56:37.925035Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:56:37.975629Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:56:37.976681Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:56:37.976846Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:56:37.979193Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T10:56:37.979324Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T10:56:37.979405Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T10:56:37.979891Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:56:37.980100Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:56:37.980239Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:686:2574] in generation 1 2025-04-10T10:56:37.991532Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:56:38.021066Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T10:56:38.021407Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:56:38.021608Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:688:2584] 2025-04-10T10:56:38.021666Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T10:56:38.021724Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T10:56:38.021781Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T10:56:38.022083Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:670:2574], Recipient [1:670:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:38.023401Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:56:38.023999Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T10:56:38.024156Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T10:56:38.024291Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T10:56:38.024344Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:56:38.024413Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T10:56:38.024462Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T10:56:38.024505Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T10:56:38.024543Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T10:56:38.024595Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T10:56:38.024762Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:677:2578], Recipient [1:670:2574]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:56:38.024808Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:56:38.024901Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2571], serverId# [1:677:2578], sessionId# [0:0:0] 2025-04-10T10:56:38.025473Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:677:2578] 2025-04-10T10:56:38.025527Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:56:38.025699Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T10:56:38.025984Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T10:56:38.026051Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T10:56:38.026198Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T10:56:38.026270Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T10:56:38.026341Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T10:56:38.026392Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T10:56:38.026441Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T10:56:38.026859Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T10:56:38.026906Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T10:56:38.026949Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T10:56:38.027006Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T10:56:38.027072Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T10:56:38.027111Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T10:56:38.027152Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T10:56:38.027210Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T10:56:38.027253Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T10:56:38.028927Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:689:2585], Recipient [1:670:2574]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T10:56:38.028997Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T10:56 ... RoTransaction(std::__y1::unique_ptr>, NActors::TActorContext const&, NKikimr::NTabletFlatExecutor::TExecutorCounters*, NKikimr::TTabletCountersWithTxTypes*) /-S/ydb/core/tablet_flat/flat_executor_txloglogic.cpp:69:11 #14 0x209fbe8e in NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&) /-S/ydb/core/tablet_flat/flat_executor_txloglogic.cpp:298:13 #15 0x20844b93 in NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/flat_executor.cpp:3121:59 #16 0x207e1914 in NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&) /-S/ydb/core/tablet_flat/flat_executor.cpp:4156:9 #17 0x1a71942c in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #18 0x36ea8834 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #19 0x36ea10a9 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #20 0x36e9de50 in DispatchEvents /-S/ydb/library/actors/testlib/test_runtime.cpp:1091:16 #21 0x36e9de50 in NActors::TTestActorRuntimeBase::DispatchEvents(NActors::TDispatchOptions const&) /-S/ydb/library/actors/testlib/test_runtime.cpp:1082:16 #22 0x18fe2c4e in Ydb::Table::CreateSessionResponse NKikimr::NDataShard::NKqpHelpers::AwaitResponse(NActors::TTestActorRuntime&, NThreading::TFuture) /-S/ydb/core/tx/datashard/datashard_ut_common_kqp.h:37:21 #23 0x18eb4bbe in NKikimr::NDataShard::NKqpHelpers::CreateSessionRPC(NActors::TTestActorRuntime&, TBasicString> const&) /-S/ydb/core/tx/datashard/datashard_ut_common_kqp.h:50:25 #24 0x18f67e84 in NKikimr::NDataShard::NKqpHelpers::KqpSimpleSend(NActors::TTestActorRuntime&, TBasicString> const&, bool, TBasicString> const&) /-S/ydb/core/tx/datashard/datashard_ut_common_kqp.h:175:29 #25 0x18fa28be in NKikimr::NTestSuiteDataShardVolatile::TTestCaseGracefulShardRestartNoEarlyReadSetAck::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:3953:27 #26 0x18fb3b07 in operator() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #27 0x18fb3b07 in __invoke<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #28 0x18fb3b07 in __call<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #29 0x18fb3b07 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #30 0x18fb3b07 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #31 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #32 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #33 0x199cac35 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #34 0x1999a788 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #35 0x18fb29b3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #36 0x1999c055 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #37 0x199c51ac in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #38 0x7f2b570dad8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x1920aa0d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x19e3a60e in CreateString > > /-S/contrib/libs/protobuf/src/google/protobuf/arenastring.cc:102:20 #2 0x19e3a60e in void google::protobuf::internal::ArenaStringPtr::Set<>(TBasicString> const&, google::protobuf::Arena*) /-S/contrib/libs/protobuf/src/google/protobuf/arenastring.cc:150:38 #3 0x2ae05b65 in set_name > &> /-B/ydb/core/protos/query_stats.pb.h:1740:16 #4 0x2ae05b65 in SetName /-B/ydb/core/protos/query_stats.pb.h:696:56 #5 0x2ae05b65 in NKikimr::NDataShard::KqpFillTxStats(NKikimr::NDataShard::TDataShard&, NKikimr::NMiniKQL::TEngineHostCounters const&, NKikimrQueryStats::TTxStats&) /-S/ydb/core/tx/datashard/datashard_kqp.cpp:917:34 #6 0x2b215464 in NKikimr::NDataShard::TExecuteWriteUnit::Execute(TIntrusivePtr>, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/execute_write_unit.cpp:436:13 #7 0x2b06d8cc in NKikimr::NDataShard::TPipeline::RunExecutionPlan(TIntrusivePtr>, TVector>&, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard_pipeline.cpp:1851:28 #8 0x2ad2e714 in NKikimr::NDataShard::TDataShard::TTxWrite::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard__write.cpp:100:38 #9 0x2081b894 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #10 0x20817234 in NKikimr::NTabletFlatExecutor::TExecutor::DoExecute(TAutoPtr, NKikimr::NTabletFlatExecutor::TExecutor::ETxMode) /-S/ydb/core/tablet_flat/flat_executor.cpp:1814:13 #11 0x2081e42e in NKikimr::NTabletFlatExecutor::TExecutor::Execute(TAutoPtr, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/flat_executor.cpp:1828:5 #12 0x207b4a9a in Execute /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:62:46 #13 0x207b4a9a in NKikimr::NTabletFlatExecutor::TTabletExecutedFlat::Execute(TAutoPtr, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:57:5 #14 0x21e45361 in NKikimr::NDataShard::TDataShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard.cpp:3415:21 #15 0x21dedd4b in NKikimr::NDataShard::TDataShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/datashard/datashard_impl.h:3157:13 #16 0x1a71942c in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #17 0x36ea8834 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #18 0x36ea10a9 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #19 0x36eab423 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #20 0x37073ae3 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #21 0x37073082 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #22 0x37073082 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #23 0x3706b2e2 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:577:24 #24 0x3706aeba in NActors::TTestActorRuntime::SimulateSleep(TDuration) /-S/ydb/core/testlib/actors/test_runtime.cpp:298:9 #25 0x18f9e247 in NKikimr::NTestSuiteDataShardVolatile::TTestCaseGracefulShardRestartNoEarlyReadSetAck::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:3909:17 #26 0x18fb3b07 in operator() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #27 0x18fb3b07 in __invoke<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #28 0x18fb3b07 in __call<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #29 0x18fb3b07 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #30 0x18fb3b07 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #31 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #32 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #33 0x199cac35 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #34 0x1999a788 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #35 0x18fb29b3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #36 0x1999c055 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #37 0x199c51ac in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #38 0x7f2b570dad8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 664 byte(s) leaked in 11 allocation(s). |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |73.4%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut >> KqpPg::DropTableIfExists [GOOD] >> KqpPg::DropTableIfExists_GenericQuery >> KqpPg::TypeCoercionInsert-useSink [GOOD] >> KqpPg::V1CreateTable >> KqpPg::TypeCoercionInsert+useSink [GOOD] >> KqpPg::TableSelect+useSink >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink >> KqpPg::CopyTableSerialColumns-useSink [GOOD] >> KqpPg::CreateIndex >> BuildStatsHistogram::Three_Mixed_Small_2_Levels >> BuildStatsHistogram::Three_Mixed_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_1_Level >> DataCleanup::CleanupDataNoTables [GOOD] >> DataCleanup::CleanupDataNoTablesWithRestart >> DataCleanup::CleanupDataNoTablesWithRestart [GOOD] >> DataCleanup::CleanupDataLog [GOOD] >> DataCleanup::CleanupData >> BuildStatsHistogram::Three_Mixed_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_0_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> DataCleanup::CleanupData [GOOD] >> DataCleanup::CleanupDataMultipleFamilies >> RetryPolicy::TWriteSession_TestBrokenPolicy >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] >> DataCleanup::CleanupDataMultipleFamilies [GOOD] >> DataCleanup::CleanupDataMultipleTables >> BuildStatsHistogram::Three_Serial_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Serial_Small_1_Level >> DataCleanup::CleanupDataMultipleTables [GOOD] >> DataCleanup::CleanupDataWithFollowers [GOOD] >> DataCleanup::CleanupDataMultipleTimes >> DataCleanup::CleanupDataMultipleTimes [GOOD] >> DataCleanup::CleanupDataEmptyTable >> TIterator::Single >> DataCleanup::CleanupDataEmptyTable [GOOD] >> DataCleanup::CleanupDataWithRestarts [GOOD] >> DataCleanup::CleanupDataRetryWithNotGreaterGenerations >> BuildStatsHistogram::Three_Serial_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Serial_Small_0_Levels [GOOD] >> BuildStatsMixedIndex::Single >> DataCleanup::CleanupDataRetryWithNotGreaterGenerations [GOOD] >> DBase::Select [GOOD] >> DBase::Subsets [GOOD] >> DBase::WideKey >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink >> TRowVersionRangesTest::SimpleInserts [GOOD] >> TRowVersionRangesTest::MergeHoleOuter [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorOrder [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorLowerBound [GOOD] >> TS3FIFOCache::Touch [GOOD] >> TS3FIFOCache::Touch_MainQueue [GOOD] >> TS3FIFOCache::EvictNext [GOOD] >> TS3FIFOCache::UpdateLimit [GOOD] >> TS3FIFOCache::Erase [GOOD] >> TS3FIFOCache::Random >> BuildStatsMixedIndex::Single [GOOD] >> BuildStatsMixedIndex::Single_Slices >> Bloom::Conf [GOOD] >> Bloom::Hashes >> TS3FIFOCache::Random [GOOD] >> TS3FIFOGhostQueue::Basics [GOOD] >> TScheme::Shapshot [GOOD] >> TScheme::Delta [GOOD] >> TScheme::Policy [GOOD] >> TScreen::Cuts [GOOD] >> TScreen::Join [GOOD] >> TScreen::Sequential >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot >> BuildStatsMixedIndex::Single_Slices [GOOD] >> BuildStatsMixedIndex::Single_History >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 >> Bloom::Hashes [GOOD] >> Bloom::Rater >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular >> BuildStatsMixedIndex::Single_History [GOOD] >> BuildStatsMixedIndex::Single_History_Slices >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 >> BuildStatsMixedIndex::Single_History_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups >> DBase::WideKey [GOOD] >> DBase::VersionBasics [GOOD] >> DBase::VersionPureMem >> Bloom::Rater [GOOD] >> Bloom::Dipping >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> BuildStatsMixedIndex::Single_Groups [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan >> BuildStatsMixedIndex::Single_Groups_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups_History >> TFlatTableExecutor_Cold::ColdBorrowScan [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows >> TScreen::Sequential [GOOD] >> TScreen::Random >> TIterator::Single [GOOD] >> TIterator::SingleReverse >> BuildStatsMixedIndex::Single_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign >> BuildStatsMixedIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsMixedIndex::Mixed >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability >> DBase::VersionPureMem [GOOD] >> DBase::VersionPureParts >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 >> KqpPg::CreateUniqComplexPgColumn+useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn-useSink >> BuildStatsMixedIndex::Mixed [GOOD] >> BuildStatsMixedIndex::Mixed_Groups >> BuildStatsMixedIndex::Mixed_Groups [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History >> TChargeBTreeIndex::NoNodes_Groups >> TBtreeIndexBuilder::NoNodes [GOOD] >> TBtreeIndexBuilder::OneNode [GOOD] >> TBtreeIndexBuilder::FewNodes [GOOD] >> TBtreeIndexBuilder::SplitBySize >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::SomeRejectProbability >> BuildStatsMixedIndex::Mixed_Groups_History [GOOD] >> BuildStatsMixedIndex::Serial >> TBtreeIndexBuilder::SplitBySize [GOOD] >> TBtreeIndexNode::TIsNullBitmap [GOOD] >> TBtreeIndexNode::CompareTo [GOOD] >> TBtreeIndexNode::Basics [GOOD] >> TBtreeIndexNode::Group [GOOD] >> TBtreeIndexNode::History [GOOD] >> TBtreeIndexNode::OneKey [GOOD] >> TBtreeIndexNode::Reusable [GOOD] >> TBtreeIndexNode::CutKeys [GOOD] >> TBtreeIndexTPart::Conf [GOOD] >> TBtreeIndexTPart::NoNodes [GOOD] >> TBtreeIndexTPart::OneNode [GOOD] >> TBtreeIndexTPart::FewNodes [GOOD] >> TBtreeIndexTPart::Erases [GOOD] >> TBtreeIndexTPart::Groups >> KqpPg::DropTableIfExists_GenericQuery [GOOD] >> KqpPg::EquiJoin+useSink >> TBtreeIndexTPart::Groups [GOOD] >> TBtreeIndexTPart::History >> Bloom::Dipping [GOOD] >> Bloom::Basics [GOOD] >> Bloom::Stairs >> TBtreeIndexTPart::History [GOOD] >> TBtreeIndexTPart::External >> TIterator::SingleReverse [GOOD] >> TIterator::Mixed >> TScreen::Random [GOOD] >> TScreen::Shrink [GOOD] >> TScreen::Cook [GOOD] >> TSharedPageCache::Limits >> BuildStatsMixedIndex::Serial [GOOD] >> BuildStatsMixedIndex::Serial_Groups >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] >> TBtreeIndexTPart::External [GOOD] >> TChargeBTreeIndex::NoNodes >> TFlatTableExecutor_RejectProbability::SomeRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] >> Bloom::Stairs [GOOD] >> BuildStatsBTreeIndex::Single >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> TChargeBTreeIndex::NoNodes [GOOD] >> TChargeBTreeIndex::FewNodes >> BuildStatsMixedIndex::Serial_Groups [GOOD] >> BuildStatsMixedIndex::Serial_Groups_History >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables >> BuildStatsBTreeIndex::Single [GOOD] >> BuildStatsBTreeIndex::Single_Slices >> BuildStatsMixedIndex::Serial_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_LowResolution >> TSharedPageCache::Limits [GOOD] >> TSharedPageCache::Limits_Config >> BuildStatsBTreeIndex::Single_Slices [GOOD] >> BuildStatsBTreeIndex::Single_History >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables [GOOD] >> TFlatTableExecutor_Reschedule::TestExecuteReschedule >> BuildStatsMixedIndex::Single_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Slices_LowResolution >> TChargeBTreeIndex::NoNodes_Groups [GOOD] >> TChargeBTreeIndex::NoNodes_History >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts >> TChargeBTreeIndex::FewNodes [GOOD] >> TChargeBTreeIndex::FewNodes_Groups >> TFlatTableExecutor_Reschedule::TestExecuteReschedule [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory >> BuildStatsBTreeIndex::Single_History [GOOD] >> BuildStatsBTreeIndex::Single_History_Slices >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages >> BuildStatsMixedIndex::Single_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_LowResolution >> TChargeBTreeIndex::NoNodes_History [GOOD] >> TChargeBTreeIndex::NoNodes_Groups_History >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded >> BuildStatsMixedIndex::Single_Groups_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory >> DBase::VersionCompactedMem [GOOD] >> DBase::VersionCompactedParts >> BuildStatsBTreeIndex::Single_History_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower >> TSharedPageCache::Limits_Config [GOOD] >> TSharedPageCache::ThreeLeveledLRU >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] Test command err: 2025-04-10T10:59:01.180678Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T10:59:01.181643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T10:59:01.181687Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:01.192213Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T10:59:01.192691Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T10:59:01.193065Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T10:59:01.233258Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T10:59:01.241134Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T10:59:01.241613Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T10:59:01.243065Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T10:59:01.243135Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T10:59:01.243185Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T10:59:01.243524Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T10:59:01.243672Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T10:59:01.243729Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:198:2154] in generation 2 2025-04-10T10:59:01.296966Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T10:59:01.340452Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T10:59:01.340643Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T10:59:01.340752Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-10T10:59:01.340783Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T10:59:01.340810Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T10:59:01.340839Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:01.341038Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.341072Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.341278Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T10:59:01.341356Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T10:59:01.341461Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:01.341493Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T10:59:01.341538Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T10:59:01.341568Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T10:59:01.341605Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T10:59:01.341641Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T10:59:01.341676Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T10:59:01.341755Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.341799Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.341841Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-10T10:59:01.344379Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T10:59:01.344424Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T10:59:01.344523Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T10:59:01.344669Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T10:59:01.344722Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T10:59:01.344788Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T10:59:01.344840Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:01.344870Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T10:59:01.344898Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T10:59:01.344937Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:59:01.345184Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T10:59:01.345208Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T10:59:01.345235Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T10:59:01.345263Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:59:01.345296Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T10:59:01.345323Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T10:59:01.345357Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T10:59:01.345396Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T10:59:01.345439Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T10:59:01.357599Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T10:59:01.357674Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T10:59:01.357718Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T10:59:01.357762Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T10:59:01.357840Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T10:59:01.358415Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.358499Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T10:59:01.358537Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-10T10:59:01.358675Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T10:59:01.358707Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T10:59:01.358866Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T10:59:01.358924Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T10:59:01.359006Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T10:59:01.359059Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T10:59:01.367184Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T10:59:01.367262Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T10:59:01.367493Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.367531Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T10:59:01.367587Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T10:59:01.367630Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T10:59:01.367670Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T10:59:01.367724Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T10:59:01.367761Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T10:59:01.367810Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T10:59:01.367849Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T10:59:01.367893Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T10:59:01.367941Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T10:59:01.368113Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T10:59:01.368152Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T10:59:01.368178Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T10:59:01.368203Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T10:59:01.368227Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T10:59:01.368303Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T10:59:01.368336Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T10:59:01.368370Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T10:59:01.368405Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T10:59:01.368461Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T10:59:01.368499Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T10:59:01.368534Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T10:59:01.368600Z node 1 :TX_DATA ... c latency: 58 ms, propose latency: 58 ms, status: COMPLETE 2025-04-10T11:03:16.564541Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is DelayComplete 2025-04-10T11:03:16.564578Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit FinishPropose 2025-04-10T11:03:16.564615Z node 3 :TX_DATASHARD TRACE: Add [0:10] at 9437184 to execution unit CompletedOperations 2025-04-10T11:03:16.564647Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:10] at 9437184 on unit CompletedOperations 2025-04-10T11:03:16.564707Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is Executed 2025-04-10T11:03:16.564735Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit CompletedOperations 2025-04-10T11:03:16.564767Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:10] at 9437184 has finished 2025-04-10T11:03:16.598014Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:03:16.598099Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:10] at 9437184 on unit FinishPropose 2025-04-10T11:03:16.598186Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:03:20.816393Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:236:2229]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-04-10T11:03:20.816477Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-10T11:03:20.816938Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:651:2626], Recipient [3:236:2229]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:03:20.816995Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:03:20.817048Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:650:2625], serverId# [3:651:2626], sessionId# [0:0:0] 2025-04-10T11:03:20.817292Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:236:2229]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\354\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4\000\'?8\003\013?>\003?<\003j\030\001\003?@\000\003?B\000\003?D\007\240%&\003?F\000\006\004?J\003\203\014\000\003\203\014\000\003\003?L\000\377\007\002\000\005?\032\005?\026?x\000\005?\030\003\005? \005?\034?x\000\006\ 2025-04-10T11:03:20.817330Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:03:20.817457Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:03:20.820887Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CheckDataTx 2025-04-10T11:03:20.840768Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-10T11:03:20.840844Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CheckDataTx 2025-04-10T11:03:20.840876Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:03:20.840913Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:03:20.840967Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-10T11:03:20.841020Z node 3 :TX_DATASHARD TRACE: Activated operation [0:11] at 9437184 2025-04-10T11:03:20.841062Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-10T11:03:20.841093Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-10T11:03:20.841114Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit ExecuteDataTx 2025-04-10T11:03:20.841138Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-10T11:03:20.845808Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-10T11:03:20.845979Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-10T11:03:20.846017Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-10T11:03:20.908184Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:03:20.908284Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-10T11:03:20.909275Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-10T11:03:20.913288Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-10T11:03:20.913530Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-10T11:03:20.913592Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-10T11:03:21.082097Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:03:21.082220Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-10T11:03:21.083241Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-10T11:03:21.127727Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 4194304 and requests 33554432 more for the next try 2025-04-10T11:03:21.128131Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-10T11:03:21.128204Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-10T11:03:21.128785Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:03:21.128846Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-10T11:03:21.129811Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-10T11:03:21.415812Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-10T11:03:21.416717Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-10T11:03:21.416797Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-10T11:03:21.706865Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:03:21.706974Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-10T11:03:21.708025Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-10T11:03:22.311864Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2025-04-10T11:03:22.313113Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-10T11:03:22.313208Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-10T11:03:22.568177Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:03:22.568270Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-10T11:03:22.569064Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-10T11:03:22.574490Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-10T11:03:22.574709Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-10T11:03:22.574767Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-10T11:03:22.611989Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:03:22.612102Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-10T11:03:22.613104Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-10T11:03:22.615454Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-10T11:03:22.615687Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-10T11:03:22.615753Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-10T11:03:22.634418Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:03:22.634525Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-10T11:03:22.636070Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-10T11:03:22.643483Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-10T11:03:22.643774Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-10T11:03:22.643856Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-10T11:03:23.164724Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:03:23.164839Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-10T11:03:23.165993Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-10T11:03:24.619245Z node 3 :TX_DATASHARD TRACE: Executed operation [0:11] at tablet 9437184 with status COMPLETE 2025-04-10T11:03:24.619416Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:11] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T11:03:24.619525Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-10T11:03:24.619585Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit ExecuteDataTx 2025-04-10T11:03:24.619635Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit FinishPropose 2025-04-10T11:03:24.619686Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit FinishPropose 2025-04-10T11:03:24.619766Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 11 at tablet 9437184 send to client, exec latency: 62 ms, propose latency: 62 ms, status: COMPLETE 2025-04-10T11:03:24.619990Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is DelayComplete 2025-04-10T11:03:24.620086Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit FinishPropose 2025-04-10T11:03:24.620128Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit CompletedOperations 2025-04-10T11:03:24.620168Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CompletedOperations 2025-04-10T11:03:24.620240Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-10T11:03:24.620277Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CompletedOperations 2025-04-10T11:03:24.620312Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:11] at 9437184 has finished 2025-04-10T11:03:24.655289Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:03:24.655399Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:11] at 9437184 on unit FinishPropose 2025-04-10T11:03:24.655479Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> BuildStatsBTreeIndex::Single_Groups [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData >> KqpPg::CreateIndex [GOOD] >> KqpPg::CreateNotNullPgColumn >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataGC [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxPartialDataHold [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease [GOOD] >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig [GOOD] >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan >> BuildStatsMixedIndex::Single_Groups_History_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution >> BuildStatsBTreeIndex::Single_Groups_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution [GOOD] >> Charge::Lookups [GOOD] >> Charge::ByKeysBasics >> BuildStatsBTreeIndex::Single_Groups_History [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History_Slices >> Charge::ByKeysBasics [GOOD] >> Charge::ByKeysGroups [GOOD] >> Charge::ByKeysGroupsLimits [GOOD] >> Charge::ByKeysLimits [GOOD] >> Charge::ByKeysReverse [GOOD] >> Charge::ByKeysHistory [GOOD] >> Charge::ByKeysIndex >> DBase::VersionCompactedParts [GOOD] >> DBase::UncommittedChangesVisibility [GOOD] >> DBase::UncommittedChangesCommitWithUpdates [GOOD] >> DBase::ReplayNewTable [GOOD] >> DBase::SnapshotNewTable [GOOD] >> Memtable::Basics [GOOD] >> Memtable::BasicsReverse [GOOD] >> Memtable::Markers [GOOD] >> Memtable::Overlap [GOOD] >> Memtable::Wreck >> Charge::ByKeysIndex [GOOD] >> Charge::ByRows [GOOD] >> Charge::ByRowsReverse [GOOD] >> Charge::ByRowsLimits [GOOD] >> Charge::ByRowsLimitsReverse [GOOD] >> DBase::Basics [GOOD] >> DBase::Defaults [GOOD] >> DBase::Garbage [GOOD] >> DBase::Affects [GOOD] >> DBase::Annex [GOOD] >> DBase::AnnexRollbackChanges [GOOD] >> DBase::Outer [GOOD] >> DBase::KIKIMR_15506_MissingSnapshotKeys [GOOD] >> DBase::EraseCacheWithUncommittedChanges [GOOD] >> DBase::EraseCacheWithUncommittedChangesCompacted [GOOD] >> DBase::AlterAndUpsertChangesVisibility [GOOD] >> DBase::DropModifiedTable [GOOD] >> DBase::KIKIMR_15598_Many_MemTables >> BuildStatsBTreeIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsBTreeIndex::Mixed >> TFlatTableExecutor_ColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan >> Memtable::Wreck [GOOD] >> Memtable::Erased >> BuildStatsBTreeIndex::Mixed [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |73.4%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut >> TFlatTableExecutor_CompactionScan::TestCompactionScan [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect >> Memtable::Erased [GOOD] >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> NFwd_TBlobs::Trace [GOOD] >> NFwd_TBlobs::Filtered [GOOD] >> NFwd_TBTreeIndexCache::Basics [GOOD] >> TSharedPageCache::ThreeLeveledLRU [GOOD] >> TSharedPageCache::S3FIFO >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> NFwd_TBTreeIndexCache::GetTwice [GOOD] >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> NFwd_TBTreeIndexCache::End [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups_History >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue [GOOD] >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutor_Follower::BasicFollowerRead |73.4%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTableExecutor_Follower::BasicFollowerRead [GOOD] >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot >> BuildStatsBTreeIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Single >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] >> TChargeBTreeIndex::FewNodes_Groups [GOOD] >> TChargeBTreeIndex::FewNodes_History >> BuildStatsFlatIndex::Single [GOOD] >> BuildStatsFlatIndex::Single_Slices >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan [GOOD] >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex >> BuildStatsFlatIndex::Single_Slices [GOOD] >> BuildStatsFlatIndex::Single_History >> TChargeBTreeIndex::NoNodes_Groups_History [GOOD] >> TChargeBTreeIndex::OneNode >> BuildStatsFlatIndex::Single_History [GOOD] >> BuildStatsFlatIndex::Single_History_Slices >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex >> TChargeBTreeIndex::OneNode [GOOD] >> TChargeBTreeIndex::OneNode_Groups >> BuildStatsFlatIndex::Single_History_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups >> TSharedPageCache::S3FIFO [GOOD] >> TSharedPageCache::ClockPro >> YdbOlapStore::LogWithUnionAllDescending [GOOD] >> YdbOlapStore::LogTsRangeDescending >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan [GOOD] >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits >> BuildStatsFlatIndex::Single_Groups [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:30.453995Z 00000.011 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.012 II| FAKE_ENV: Starting storage for BS group 0 00000.012 II| FAKE_ENV: Starting storage for BS group 1 00000.012 II| FAKE_ENV: Starting storage for BS group 2 00000.012 II| FAKE_ENV: Starting storage for BS group 3 00000.023 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.023 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.024 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.024 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {146b, 4} 00000.024 II| FAKE_ENV: DS.1 gone, left {105b, 3}, put {105b, 3} 00000.024 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.024 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.024 II| FAKE_ENV: All BS storage groups are stopped 00000.024 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.024 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:30.489065Z 00000.023 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.024 II| FAKE_ENV: Starting storage for BS group 0 00000.030 II| FAKE_ENV: Starting storage for BS group 1 00000.030 II| FAKE_ENV: Starting storage for BS group 2 00000.030 II| FAKE_ENV: Starting storage for BS group 3 00000.040 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.040 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.040 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.040 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.041 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {292b, 8} 00000.041 II| FAKE_ENV: DS.1 gone, left {210b, 6}, put {210b, 6} 00000.041 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.041 II| FAKE_ENV: All BS storage groups are stopped 00000.041 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.041 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:30.538021Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.010 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.011 II| FAKE_ENV: Starting storage for BS group 2 00000.011 II| FAKE_ENV: Starting storage for BS group 3 00000.062 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.062 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 76b} miss {0 0b} 00000.062 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.062 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {132b, 2} 00000.063 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {116b, 2} 00000.063 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1181b, 13} 00000.063 II| FAKE_ENV: DS.1 gone, left {909b, 3}, put {1913b, 12} 00000.063 II| FAKE_ENV: All BS storage groups are stopped 00000.063 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.063 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:30.610955Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.045 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.046 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} 00000.046 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.046 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.046 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 00000.046 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.046 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.046 II| FAKE_ENV: All BS storage groups are stopped 00000.046 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.046 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:30.668353Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.098 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.099 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.100 NN| TABLET_SAUSAGECACHE: Poison cache serviced 11 reqs hit {18 513007b} miss {0 0b} 00000.100 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.101 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {2095b, 23} 00000.101 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {210604b, 21} 00000.101 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {205178b, 4} 00000.101 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {102690b, 4} 00000.101 II| FAKE_ENV: All BS storage groups are stopped 00000.101 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.101 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:30.778544Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.069 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.070 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {3 307329b} miss {0 0b} 00000.070 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.071 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1828b, 23} 00000.071 II| FAKE_ENV: DS.1 gone, left {1247b, 3}, put {311467b, 22} 00000.071 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.071 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.071 II| FAKE_ENV: All BS storage groups are stopped 00000.071 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.071 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:30.860410Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.040 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 5 actors 00000.042 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4 reqs hit {8 307836b} miss {0 0b} 00000.042 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.042 II| FAKE_ENV: DS.0 gone, left {57b, 2}, put {1436b, 31} 00000.042 II| FAKE_ENV: DS.1 gone, left {629b, 3}, put {310476b, 16} 00000.042 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.042 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.042 II| FAKE_ENV: All BS storage groups are stopped 00000.042 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.042 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:30.909513Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.080 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.081 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 194646b} miss {0 0b} 00000.081 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.081 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1768b, 27} 00000.081 II| FAKE_ENV: DS.1 gone, left {732b, 6}, put {197813b, 24} 00000.082 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.082 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.082 II| FAKE_ENV: All BS storage groups are stopped 00000.082 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.082 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:31.003309Z 00000.013 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.014 II| FAKE_ENV: Starting storage for BS group 0 00000.015 II| FAKE_ENV: Starting storage for BS group 1 00000.015 II| FAKE_ENV: Starting storage for BS group 2 00000.015 II| FAKE_ENV: Starting storage for BS group 3 00000.028 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.029 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.029 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.029 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {326b, 7} 00000.029 II| FAKE_ENV: DS.1 gone, left {418b, 4}, put {453b, 5} 00000.029 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.029 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.029 II| FAKE_ENV: All BS storage groups are stopped 00000.029 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.029 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:31.038772Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.076 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.077 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.078 NN| TABLET_SAUSAGECACHE: Poison cache serviced 6 reqs hit {8 410030b} miss {0 0b} 00000.078 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.078 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1492b, 23} 00000.078 II| FAKE_ENV: DS.1 gone, left {504b, 4}, put {310786b, 20} 00000.078 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.078 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.078 II| FAKE_ENV: All BS storage groups are stopped 00000.078 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 15.00s 00000.079 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:31.125246Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.027 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.028 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} 00000.028 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.028 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {753b, 11} 00000.028 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 0 ... t32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:14.953171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:14.953329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:14.953386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:14.953453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:14.953533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:14.953572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:14.953667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:14.953780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:14.954321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:15.050175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:15.050243Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:15.077226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:15.077496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:15.077637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:15.106256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:15.106683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:15.107525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:15.108024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:15.112820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:15.114938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:15.115042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:15.115286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:15.115350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:15.115407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:15.115639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.127567Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:15.309382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:15.309723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.309978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:15.310306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:15.310376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.315613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:15.315784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:15.315985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.316050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:15.316090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:15.316140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:15.319080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.319183Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:15.319230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:15.325130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.325198Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.325271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:15.325330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:15.344518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:15.347254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:15.347463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:15.348606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:15.348767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:15.348820Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:15.349152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:15.349208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:15.349425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:15.349530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:15.355836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:15.355894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:15.356113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:15.356156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:15.356440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.356496Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:15.356621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:15.356662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:15.356722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:15.356757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:15.356814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:15.356885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:15.356936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:15.356971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:15.357058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:15.357112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:15.357151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:15.359148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:15.359246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:15.359275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... nalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T11:03:38.068811Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-10T11:03:38.126208Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:357:2335]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:03:38.126298Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:03:38.126467Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:357:2335], Recipient [3:357:2335]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:03:38.126508Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:03:38.149749Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435096, Sender [0:0:0], Recipient [3:357:2335]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-04-10T11:03:38.149830Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-04-10T11:03:38.149961Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:357:2335]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-04-10T11:03:38.149990Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-04-10T11:03:38.150018Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-04-10T11:03:38.150095Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-04-10T11:03:38.150202Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-04-10T11:03:38.268855Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:765:2651]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-10T11:03:38.268953Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-10T11:03:38.269073Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409552 outdated step 200 last cleanup 0 2025-04-10T11:03:38.269163Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409552 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:03:38.269208Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409552 2025-04-10T11:03:38.269246Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409552 has no attached operations 2025-04-10T11:03:38.269279Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409552 2025-04-10T11:03:38.269473Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:765:2651]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:03:38.269653Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409552, FollowerId 0, tableId 2 2025-04-10T11:03:38.270046Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:765:2651], Recipient [3:898:2757]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409552 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 32 Memory: 119352 } ShardState: 2 UserTablePartOwners: 72075186233409552 NodeId: 3 StartTime: 120 TableOwnerId: 72075186233409549 FollowerId: 0 2025-04-10T11:03:38.270117Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T11:03:38.270200Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0032 2025-04-10T11:03:38.270375Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T11:03:38.270432Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-10T11:03:38.286916Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:769:2654]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-10T11:03:38.286998Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-10T11:03:38.287125Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409553 outdated step 200 last cleanup 0 2025-04-10T11:03:38.287205Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409553 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:03:38.287244Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409553 2025-04-10T11:03:38.287285Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409553 has no attached operations 2025-04-10T11:03:38.287325Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409553 2025-04-10T11:03:38.287507Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:769:2654]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:03:38.287635Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409553, FollowerId 0, tableId 2 2025-04-10T11:03:38.288009Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:769:2654], Recipient [3:898:2757]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409553 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 30 Memory: 119352 } ShardState: 2 UserTablePartOwners: 72075186233409553 NodeId: 3 StartTime: 121 TableOwnerId: 72075186233409549 FollowerId: 0 2025-04-10T11:03:38.288065Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T11:03:38.288120Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.003 2025-04-10T11:03:38.288257Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T11:03:38.299953Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:898:2757]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:03:38.300027Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:03:38.300128Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:898:2757], Recipient [3:898:2757]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:03:38.300176Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:03:38.311580Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435096, Sender [0:0:0], Recipient [3:898:2757]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-04-10T11:03:38.311672Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-04-10T11:03:38.312008Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:898:2757]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-04-10T11:03:38.312049Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-04-10T11:03:38.312094Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-04-10T11:03:38.312177Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-04-10T11:03:38.312244Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-04-10T11:03:38.312425Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269746180, Sender [3:2039:3858], Recipient [3:898:2757]: NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-04-10T11:03:38.312468Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-04-10T11:03:38.339407Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:2042:3861], Recipient [3:765:2651]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:03:38.339499Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:03:38.339564Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409552, clientId# [3:2041:3860], serverId# [3:2042:3861], sessionId# [0:0:0] 2025-04-10T11:03:38.339728Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:2040:3859], Recipient [3:765:2651]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } 2025-04-10T11:03:38.340544Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:2045:3864], Recipient [3:769:2654]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:03:38.340582Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:03:38.340627Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409553, clientId# [3:2044:3863], serverId# [3:2045:3864], sessionId# [0:0:0] 2025-04-10T11:03:38.340776Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:2043:3862], Recipient [3:769:2654]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex >> BuildStatsFlatIndex::Single_Groups_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups_History >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestSticky >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink >> TChargeBTreeIndex::FewNodes_History [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky >> BuildStatsFlatIndex::Single_Groups_History [GOOD] >> BuildStatsFlatIndex::Single_Groups_History_Slices >> TFlatTableExecutor_StickyPages::TestSticky [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex >> BuildStatsFlatIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsFlatIndex::Mixed >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyMain >> TFlatTableExecutor_StickyPages::TestStickyMain [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex >> BuildStatsFlatIndex::Mixed [GOOD] >> BuildStatsFlatIndex::Mixed_Groups >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex >> BuildStatsFlatIndex::Mixed_Groups [GOOD] >> BuildStatsFlatIndex::Mixed_Groups_History >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAll >> KqpKv::ReadRows_SpecificKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:14.070665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:14.070834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:14.070887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:14.070934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:14.070980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:14.071009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:14.071082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:14.071180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:14.071478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:14.166377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:14.166437Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:14.182032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:14.182417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:14.182595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:14.193411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:14.193656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:14.194402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:14.194853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:14.197090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:14.198543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:14.198604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:14.198787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:14.198835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:14.198876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:14.199021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:14.206314Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:14.409633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:14.409894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:14.410075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:14.410622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:14.410692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:14.415328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:14.415487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:14.415697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:14.415753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:14.415818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:14.415871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:14.418327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:14.418408Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:14.418450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:14.420788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:14.420843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:14.420893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:14.420945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:14.424847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:14.427083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:14.427303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:14.428342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:14.428486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:14.428553Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:14.428865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:14.428924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:14.429153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:14.429254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:14.435282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:14.435360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:14.435551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:14.435598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:14.435914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:14.435964Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:14.436068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:14.436103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:14.436140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:14.436176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:14.436225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:14.436272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:14.436306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:14.436337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:14.436401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:14.436456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:14.436506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:14.439044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:14.439185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:14.439225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:03:39.836137Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435096, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-04-10T11:03:39.836209Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-04-10T11:03:39.836333Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-04-10T11:03:39.836367Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-04-10T11:03:39.836414Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-10T11:03:39.836508Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T11:03:39.836585Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-10T11:03:39.898527Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:321:2303]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-10T11:03:39.898608Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-10T11:03:39.898729Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2025-04-10T11:03:39.898818Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:03:39.898860Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409546 2025-04-10T11:03:39.898898Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409546 has no attached operations 2025-04-10T11:03:39.898933Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409546 2025-04-10T11:03:39.899123Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:321:2303]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:03:39.899349Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-10T11:03:39.899948Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:321:2303], Recipient [3:122:2148]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 38 Memory: 124232 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-10T11:03:39.900014Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T11:03:39.900086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0038 2025-04-10T11:03:39.900237Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T11:03:39.900282Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-10T11:03:39.911886Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:324:2305]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-10T11:03:39.911968Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-10T11:03:39.912088Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409547 outdated step 5000002 last cleanup 0 2025-04-10T11:03:39.913039Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409547 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:03:39.913082Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409547 2025-04-10T11:03:39.913119Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409547 has no attached operations 2025-04-10T11:03:39.913160Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409547 2025-04-10T11:03:39.913419Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:324:2305]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:03:39.913534Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 2 2025-04-10T11:03:39.913797Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:324:2305], Recipient [3:122:2148]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 77 Memory: 119352 } ShardState: 2 UserTablePartOwners: 72075186233409547 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-10T11:03:39.913829Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T11:03:39.913874Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0077 2025-04-10T11:03:39.913951Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T11:03:39.967703Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T11:03:39.967795Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T11:03:39.967830Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-10T11:03:39.967913Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-04-10T11:03:39.967967Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-04-10T11:03:39.968086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-10T11:03:39.968167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-04-10T11:03:39.968246Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:29.000000Z at schemeshard 72057594046678944 2025-04-10T11:03:39.968333Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-10T11:03:39.968393Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:03:39.968453Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:2 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2025-04-10T11:03:39.968548Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:03:39.979040Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T11:03:39.979135Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T11:03:39.979166Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:03:40.013167Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:1334:3254], Recipient [3:321:2303]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:03:40.013259Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:03:40.013337Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409546, clientId# [3:1333:3253], serverId# [3:1334:3254], sessionId# [0:0:0] 2025-04-10T11:03:40.013684Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:1332:3252], Recipient [3:321:2303]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } 2025-04-10T11:03:40.018199Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:1337:3257], Recipient [3:324:2305]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:03:40.018259Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:03:40.018307Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409547, clientId# [3:1336:3256], serverId# [3:1337:3257], sessionId# [0:0:0] 2025-04-10T11:03:40.018532Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:1335:3255], Recipient [3:324:2305]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } >> TFlatTableExecutor_StickyPages::TestStickyAll [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky >> TChargeBTreeIndex::FewNodes_Sticky [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky >> TSharedPageCache::ClockPro [GOOD] >> TSharedPageCache::ReplacementPolicySwitch >> BuildStatsFlatIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Serial >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky [GOOD] >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs >> TChargeBTreeIndex::OneNode_Groups [GOOD] >> TChargeBTreeIndex::OneNode_History >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRows >> TSharedPageCache::ReplacementPolicySwitch [GOOD] >> TSharedPageCache::BigCache_BTreeIndex >> KqpRanges::WhereInSubquery >> BuildStatsFlatIndex::Serial [GOOD] >> BuildStatsFlatIndex::Serial_Groups >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 >> KqpRanges::IsNull >> BuildStatsFlatIndex::Serial_Groups [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History >> TFlatTableExecutor_VersionedRows::TestVersionedRows [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> TSharedPageCache::BigCache_BTreeIndex [GOOD] >> DBase::KIKIMR_15598_Many_MemTables [GOOD] >> TSharedPageCache::BigCache_FlatIndex >> TIterator::Mixed [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History [GOOD] >> TIterator::MixedReverse >> BuildStatsHistogram::Single >> TChargeBTreeIndex::OneNode_History [GOOD] >> TSharedPageCache::BigCache_FlatIndex [GOOD] >> KqpNotNullColumns::UpdateNotNullPk >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink [GOOD] >> KqpPg::CreateNotNullPgColumn [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink >> TSharedPageCache::MiddleCache_BTreeIndex >> KqpPg::EquiJoin+useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn-useSink [GOOD] >> KqpPg::EquiJoin-useSink >> TSharedPageCache::MiddleCache_BTreeIndex [GOOD] >> KqpPg::CreateTempTable >> KqpPg::CreateSequence >> TSharedPageCache::MiddleCache_FlatIndex >> TSharedPageCache::MiddleCache_FlatIndex [GOOD] >> TSharedPageCache::ZeroCache_BTreeIndex >> TSharedPageCache::ZeroCache_BTreeIndex [GOOD] >> TSharedPageCache::ZeroCache_FlatIndex >> TSharedPageCache::ZeroCache_FlatIndex [GOOD] >> TSwitchableCache::Touch [GOOD] >> TSwitchableCache::Erase [GOOD] >> TSwitchableCache::EvictNext [GOOD] >> KqpKv::ReadRows_SpecificKey [GOOD] >> KqpKv::ReadRows_NonExistentKeys >> TSwitchableCache::UpdateLimit [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_All [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_Parts [GOOD] >> TSwitchableCache::Switch_RotatePages_Force [GOOD] >> TSwitchableCache::Switch_RotatePages_Evicts [GOOD] >> TSwitchableCache::Switch_Touch [GOOD] >> TSwitchableCache::Switch_Erase [GOOD] >> TSwitchableCache::Switch_EvictNext [GOOD] >> TSwitchableCache::Switch_UpdateLimit [GOOD] >> TVersions::WreckHead >> BuildStatsHistogram::Single [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History [GOOD] >> TClockProCache::Touch [GOOD] >> BuildStatsHistogram::Single_Slices >> TClockProCache::Lifecycle [GOOD] >> TClockProCache::EvictNext [GOOD] >> TClockProCache::UpdateLimit [GOOD] >> TClockProCache::Erase [GOOD] >> TClockProCache::Random >> TClockProCache::Random [GOOD] >> TCompaction::OneMemtable [GOOD] |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes >> TChargeBTreeIndex::FewNodes_Groups_History [GOOD] |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink [GOOD] >> KqpNotNullColumns::UpdateNotNullPk [GOOD] >> KqpRanges::WhereInSubquery [GOOD] >> TCompaction::ManyParts >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink [GOOD] >> KqpPg::CreateSequence [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink >> KqpPg::AlterSequence >> KqpNotNullColumns::UpdateNotNullPkPg >> TCompaction::ManyParts [GOOD] >> KqpReturning::ReturningTwice >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink >> TCompaction::BootAbort >> TCompaction::BootAbort [GOOD] >> TCompaction::Defaults [GOOD] >> TCompaction::Merges [GOOD] >> TCompactionMulti::ManyParts |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |73.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |73.5%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |73.5%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |73.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> DBase::KIKIMR_15598_Many_MemTables [GOOD] Test command err: 3 parts: [0:0:1:0:0:0:0] 150 rows, 7 pages, 1 levels: (286, 103) (607, 210) (811, 278) (1315, 446) (1540, 521) [0:0:2:0:0:0:0] 197 rows, 9 pages, 2 levels: (253, 92) (577, 200) (742, 255) (1156, 393) (1594, 539) [0:0:3:0:0:0:0] 153 rows, 7 pages, 1 levels: (199, 74) (514, 179) (769, 264) (1291, 438) (1555, 526) Checking BTree: Touched 100% bytes, 5 pages RowCountHistogram: 2% (actual 0%) key = (10, 11) value = 12 (actual 1 - 2% error) 2% (actual 0%) key = (16, 13) value = 24 (actual 2 - 4% error) 5% (actual 11%) key = (199, 74) value = 49 (actual 61 - -2% error) 4% (actual 2%) key = (253, 92) value = 73 (actual 74 - 0% error) 4% (actual 2%) key = (286, 103) value = 97 (actual 84 - 2% error) 4% (actual 8%) key = (418, 147) value = 120 (actual 125 - -1% error) 4% (actual 5%) key = (514, 179) value = 144 (actual 154 - -2% error) 5% (actual 4%) key = (577, 200) value = 169 (actual 174 - -1% error) 4% (actual 1%) key = (607, 210) value = 192 (actual 183 - 1% error) 4% (actual 8%) key = (742, 255) value = 214 (actual 226 - -2% error) 5% (actual 1%) key = (769, 264) value = 239 (actual 235 - 0% error) 4% (actual 2%) key = (811, 278) value = 262 (actual 248 - 2% error) 4% (actual 9%) key = (958, 327) value = 286 (actual 293 - -1% error) 5% (actual 5%) key = (1054, 359) value = 311 (actual 322 - -2% error) 4% (actual 2%) key = (1087, 370) value = 334 (actual 332 - 0% error) 4% (actual 4%) key = (1156, 393) value = 358 (actual 354 - 0% error) 4% (actual 8%) key = (1291, 438) value = 381 (actual 394 - -2% error) 4% (actual 1%) key = (1315, 446) value = 404 (actual 401 - 0% error) 4% (actual 3%) key = (1375, 466) value = 426 (actual 419 - 1% error) 4% (actual 10%) key = (1540, 521) value = 449 (actual 469 - -4% error) 3% (actual 1%) key = (1555, 526) value = 465 (actual 474 - -1% error) 3% (actual 2%) key = (1594, 539) value = 482 (actual 484 - 0% error) 1% (actual 2%) key = (1636, 553) value = 491 (actual 497 - -1% error) 1% (actual 0%) key = (1639, 554) value = 496 (actual 498 - 0% error) 0% (actual 0%) DataSizeHistogram: 2% (actual 13%) key = (10, 11) value = 950 (actual 5800 - -11% error) 2% (actual 0%) key = (16, 13) value = 1933 (actual 5800 - -9% error) 4% (actual 0%) key = (199, 74) value = 3866 (actual 5800 - -4% error) 4% (actual 9%) key = (253, 92) value = 5849 (actual 9821 - -9% error) 4% (actual 4%) key = (286, 103) value = 7810 (actual 11827 - -9% error) 4% (actual 4%) key = (418, 147) value = 9825 (actual 13848 - -9% error) 4% (actual 4%) key = (514, 179) value = 11834 (actual 15888 - -9% error) 4% (actual 4%) key = (577, 200) value = 13865 (actual 17883 - -9% error) 4% (actual 4%) key = (607, 210) value = 15865 (actual 19876 - -9% error) 4% (actual 4%) key = (742, 255) value = 17859 (actual 21881 - -9% error) 4% (actual 4%) key = (769, 264) value = 19882 (actual 23918 - -9% error) 4% (actual 0%) key = (811, 278) value = 21897 (actual 23918 - -4% error) 4% (actual 9%) key = (958, 327) value = 23894 (actual 27913 - -9% error) 4% (actual 4%) key = (1054, 359) value = 25915 (actual 29895 - -9% error) 4% (actual 0%) key = (1087, 370) value = 27901 (actual 29895 - -4% error) 4% (actual 4%) key = (1156, 393) value = 29881 (actual 31850 - -4% error) 4% (actual 4%) key = (1291, 438) value = 31821 (actual 33747 - -4% error) 4% (actual 4%) key = (1315, 446) value = 33794 (actual 35739 - -4% error) 4% (actual 9%) key = (1375, 466) value = 35737 (actual 39763 - -9% error) 4% (actual 4%) key = (1540, 521) value = 37749 (actual 41447 - -8% error) 3% (actual 0%) key = (1555, 526) value = 39198 (actual 41447 - -5% error) 3% (actual 1%) key = (1594, 539) value = 40605 (actual 42020 - -3% error) 1% (actual 0%) key = (1636, 553) value = 41344 (actual 42020 - -1% error) 0% (actual 0%) key = (1639, 554) value = 41733 (actual 42020 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 4% (actual 0%) key = (10, 11) value = 24 (actual 1 - 4% error) 5% (actual 0%) key = (16, 13) value = 49 (actual 2 - 9% error) 5% (actual 11%) key = (199, 74) value = 74 (actual 61 - 2% error) 4% (actual 2%) key = (253, 92) value = 97 (actual 74 - 4% error) 4% (actual 2%) key = (286, 103) value = 120 (actual 84 - 7% error) 4% (actual 8%) key = (418, 147) value = 144 (actual 125 - 3% error) 5% (actual 5%) key = (514, 179) value = 169 (actual 154 - 3% error) 5% (actual 4%) key = (577, 200) value = 194 (actual 174 - 4% error) 4% (actual 1%) key = (607, 210) value = 216 (actual 183 - 6% error) 4% (actual 8%) key = (742, 255) value = 237 (actual 226 - 2% error) 5% (actual 1%) key = (769, 264) value = 262 (actual 235 - 5% error) 5% (actual 2%) key = (811, 278) value = 287 (actual 248 - 7% error) 4% (actual 9%) key = (958, 327) value = 311 (actual 293 - 3% error) 4% (actual 5%) key = (1054, 359) value = 335 (actual 322 - 2% error) 4% (actual 2%) key = (1087, 370) value = 358 (actual 332 - 5% error) 4% (actual 4%) key = (1156, 393) value = 382 (actual 354 - 5% error) 4% (actual 8%) key = (1291, 438) value = 404 (actual 394 - 2% error) 4% (actual 1%) key = (1315, 446) value = 426 (actual 401 - 5% error) 4% (actual 3%) key = (1375, 466) value = 448 (actual 419 - 5% error) 4% (actual 10%) key = (1540, 521) value = 472 (actual 469 - 0% error) 2% (actual 1%) key = (1555, 526) value = 483 (actual 474 - 1% error) 2% (actual 2%) key = (1594, 539) value = 493 (actual 484 - 1% error) 1% (actual 3%) DataSizeHistogram: 4% (actual 13%) key = (10, 11) value = 1900 (actual 5800 - -9% error) 4% (actual 0%) key = (16, 13) value = 3867 (actual 5800 - -4% error) 4% (actual 0%) key = (199, 74) value = 5800 (actual 5800 - 0% error) 4% (actual 9%) key = (253, 92) value = 7798 (actual 9821 - -4% error) 4% (actual 4%) key = (286, 103) value = 9821 (actual 11827 - -4% error) 4% (actual 4%) key = (418, 147) value = 11827 (actual 13848 - -4% error) 4% (actual 4%) key = (514, 179) value = 13848 (actual 15888 - -4% error) 4% (actual 4%) key = (577, 200) value = 15888 (actual 17883 - -4% error) 4% (actual 4%) key = (607, 210) value = 17883 (actual 19876 - -4% error) 4% (actual 4%) key = (742, 255) value = 19876 (actual 21881 - -4% error) 4% (actual 4%) key = (769, 264) value = 21881 (actual 23918 - -4% error) 4% (actual 0%) key = (811, 278) value = 23918 (actual 23918 - 0% error) 4% (actual 9%) key = (958, 327) value = 25907 (actual 27913 - -4% error) 4% (actual 4%) key = (1054, 359) value = 27913 (actual 29895 - -4% error) 4% (actual 0%) key = (1087, 370) value = 29895 (actual 29895 - 0% error) 4% (actual 4%) key = (1156, 393) value = 31850 (actual 31850 - 0% error) 4% (actual 4%) key = (1291, 438) value = 33747 (actual 33747 - 0% error) 4% (actual 4%) key = (1315, 446) value = 35739 (actual 35739 - 0% error) 4% (actual 9%) key = (1375, 466) value = 37727 (actual 39763 - -4% error) 4% (actual 4%) key = (1540, 521) value = 39763 (actual 41447 - -4% error) 2% (actual 0%) key = (1555, 526) value = 40669 (actual 41447 - -1% error) 1% (actual 1%) key = (1594, 539) value = 41447 (actual 42020 - -1% error) 1% (actual 0%) Checking Mixed: Touched 100% bytes, 5 pages RowCountHistogram: 14% (actual 12%) key = (199, 74) value = 74 (actual 61 - 2% error) 4% (actual 2%) key = (253, 92) value = 97 (actual 74 - 4% error) 4% (actual 2%) key = (286, 103) value = 120 (actual 84 - 7% error) 4% (actual 8%) key = (418, 147) value = 144 (actual 125 - 3% error) 5% (actual 5%) key = (514, 179) value = 169 (actual 154 - 3% error) 5% (actual 4%) key = (577, 200) value = 194 (actual 174 - 4% error) 4% (actual 1%) key = (607, 210) value = 216 (actual 183 - 6% error) 4% (actual 8%) key = (742, 255) value = 237 (actual 226 - 2% error) 5% (actual 1%) key = (769, 264) value = 262 (actual 235 - 5% error) 5% (actual 2%) key = (811, 278) value = 287 (actual 248 - 7% error) 4% (actual 9%) key = (958, 327) value = 311 (actual 293 - 3% error) 4% (actual 5%) key = (1054, 359) value = 335 (actual 322 - 2% error) 4% (actual 2%) key = (1087, 370) value = 358 (actual 332 - 5% error) 4% (actual 4%) key = (1156, 393) value = 382 (actual 354 - 5% error) 4% (actual 8%) key = (1291, 438) value = 404 (actual 394 - 2% error) 4% (actual 1%) key = (1315, 446) value = 426 (actual 401 - 5% error) 4% (actual 3%) key = (1375, 466) value = 448 (actual 419 - 5% error) 4% (actual 10%) key = (1540, 521) value = 472 (actual 469 - 0% error) 2% (actual 1%) key = (1555, 526) value = 483 (actual 474 - 1% error) 2% (actual 2%) key = (1594, 539) value = 493 (actual 484 - 1% error) 1% (actual 3%) DataSizeHistogram: 13% (actual 13%) key = (199, 74) value = 5800 (actual 5800 - 0% error) 4% (actual 9%) key = (253, 92) value = 7798 (actual 9821 - -4% error) 4% (actual 4%) key = (286, 103) value = 9821 (actual 11827 - -4% error) 4% (actual 4%) key = (418, 147) value = 11827 (actual 13848 - -4% error) 4% (actual 4%) key = (514, 179) value = 13848 (actual 15888 - -4% error) 4% (actual 4%) key = (577, 200) value = 15888 (actual 17883 - -4% error) 4% (actual 4%) key = (607, 210) value = 17883 (actual 19876 - -4% error) 4% (actual 4%) key = (742, 255) value = 19876 (actual 21881 - -4% error) 4% (actual 4%) key = (769, 264) value = 21881 (actual 23918 - -4% error) 4% (actual 0%) key = (811, 278) value = 23918 (actual 23918 - 0% error) 4% (actual 9%) key = (958, 327) value = 25907 (actual 27913 - -4% error) 4% (actual 4%) key = (1054, 359) value = 27913 (actual 29895 - -4% error) 4% (actual 0%) key = (1087, 370) value = 29895 (actual 29895 - 0% error) 4% (actual 4%) key = (1156, 393) value = 31850 (actual 31850 - 0% error) 4% (actual 4%) key = (1291, 438) value = 33747 (actual 33747 - 0% error) 4% (actual 4%) key = (1315, 446) value = 35739 (actual 35739 - 0% error) 4% (actual 9%) key = (1375, 466) value = 37727 (actual 39763 - -4% error) 4% (actual 4%) key = (1540, 521) value = 39763 (actual 41447 - -4% error) 2% (actual 0%) key = (1555, 526) value = 40669 (actual 41447 - -1% error) 1% (actual 1%) key = (1594, 539) value = 41447 (actual 42020 - -1% error) 1% (actual 0%) 3 parts: [0:0:1:0:0:0:0] 150 rows, 7 pages, 1 levels: (286, 103) (607, 210) (811, 278) (1315, 446) (1540, 521) [0:0:2:0:0:0:0] 197 rows, 9 pages, 2 levels: (253, 92) (577, 200) (742, 255) (1156, 393) (1594, 539) [0:0:3:0:0:0:0] 153 rows, 7 pages, 1 levels: (199, 74) (514, 179) (769, 264) (1291, 438) (1555, 526) Checking BTree: Touched 100% bytes, 5 pages RowCountHistogram: 19% (actual 16%) key = (286, 103) value = 97 (actual 84 - 2% error) 19% (actual 19%) key = (607, 210) value = 192 (actual 183 - 1% error) 18% (actual 22%) key = (958, 327) value = 286 (actual 293 - -1% error) 19% (actual 20%) key = (1291, 438) value = 381 (actual 394 - -2% error) 23% (actual 21%) DataSizeHistogram: 18% (actual 28%) key = (286, 103) value = 7810 (actual 11827 - -9% error) 19% (actual 19%) key = (607, 210) value = 15865 (actual 19876 - -9% error) 19% (actual 19%) key = (958, 327) value ... 85 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27765 (actual 27678 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 5% (actual 5%) key = (91, 38) value = 25 (actual 25 - 0% error) 5% (actual 5%) key = (166, 63) value = 50 (actual 50 - 0% error) 4% (actual 4%) key = (253, 92) value = 74 (actual 74 - 0% error) 4% (actual 4%) key = (325, 116) value = 96 (actual 96 - 0% error) 4% (actual 4%) key = (394, 139) value = 119 (actual 119 - 0% error) 5% (actual 5%) key = (481, 168) value = 144 (actual 144 - 0% error) 4% (actual 4%) key = (556, 193) value = 167 (actual 167 - 0% error) 4% (actual 4%) key = (631, 218) value = 191 (actual 191 - 0% error) 4% (actual 4%) key = (709, 244) value = 215 (actual 215 - 0% error) 3% (actual 3%) key = (766, 263) value = 234 (actual 234 - 0% error) 5% (actual 5%) key = (853, 292) value = 261 (actual 261 - 0% error) 4% (actual 4%) key = (934, 319) value = 285 (actual 285 - 0% error) 4% (actual 4%) key = (1006, 343) value = 309 (actual 309 - 0% error) 4% (actual 4%) key = (1087, 370) value = 332 (actual 332 - 0% error) 0% (actual 0%) key = (1090, 371) value = 333 (actual 333 - 0% error) 4% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 4% (actual 4%) key = (91, 38) value = 1974 (actual 1974 - 0% error) 4% (actual 4%) key = (166, 63) value = 3992 (actual 3992 - 0% error) 4% (actual 4%) key = (253, 92) value = 5889 (actual 5889 - 0% error) 4% (actual 4%) key = (325, 116) value = 7868 (actual 7868 - 0% error) 4% (actual 4%) key = (394, 139) value = 9910 (actual 9910 - 0% error) 4% (actual 4%) key = (481, 168) value = 11938 (actual 11938 - 0% error) 4% (actual 4%) key = (556, 193) value = 13685 (actual 13685 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27678 (actual 27678 - 0% error) 0% (actual 0%) key = (1090, 371) value = 27765 (actual 27765 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Mixed: Touched 100% bytes, 3 pages RowCountHistogram: 14% (actual 5%) key = (91, 38) value = 70 (actual 25 - 9% error) 5% (actual 5%) key = (166, 63) value = 95 (actual 50 - 9% error) 4% (actual 4%) key = (253, 92) value = 119 (actual 74 - 9% error) 4% (actual 4%) key = (325, 116) value = 141 (actual 96 - 9% error) 4% (actual 4%) key = (394, 139) value = 164 (actual 119 - 9% error) 5% (actual 5%) key = (481, 168) value = 189 (actual 144 - 9% error) 4% (actual 9%) key = (631, 218) value = 212 (actual 191 - 4% error) 4% (actual 4%) key = (709, 244) value = 236 (actual 215 - 4% error) 3% (actual 3%) key = (766, 263) value = 255 (actual 234 - 4% error) 5% (actual 5%) key = (853, 292) value = 282 (actual 261 - 4% error) 4% (actual 4%) key = (934, 319) value = 306 (actual 285 - 4% error) 4% (actual 4%) key = (1006, 343) value = 330 (actual 309 - 4% error) 4% (actual 4%) key = (1087, 370) value = 353 (actual 332 - 4% error) 0% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 14% (actual 4%) key = (91, 38) value = 5939 (actual 1974 - 9% error) 4% (actual 4%) key = (166, 63) value = 7957 (actual 3992 - 9% error) 4% (actual 4%) key = (253, 92) value = 9854 (actual 5889 - 9% error) 4% (actual 4%) key = (325, 116) value = 11833 (actual 7868 - 9% error) 4% (actual 4%) key = (394, 139) value = 13875 (actual 9910 - 9% error) 4% (actual 4%) key = (481, 168) value = 15903 (actual 11938 - 9% error) 4% (actual 8%) key = (631, 218) value = 17650 (actual 15674 - 4% error) 4% (actual 4%) key = (709, 244) value = 19685 (actual 17709 - 4% error) 4% (actual 4%) key = (766, 263) value = 21640 (actual 19664 - 4% error) 4% (actual 4%) key = (853, 292) value = 23649 (actual 21673 - 4% error) 4% (actual 4%) key = (934, 319) value = 25688 (actual 23712 - 4% error) 4% (actual 4%) key = (1006, 343) value = 27663 (actual 25687 - 4% error) 4% (actual 4%) key = (1087, 370) value = 29654 (actual 27678 - 4% error) 0% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) 3 parts: [0:0:1:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () [0:0:2:0:0:0:0] 166 rows, 1 pages, 0 levels: () () () () () [0:0:3:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 33% (actual 33%) key = (553, 192) value = 167 (actual 166 - 0% error) 33% (actual 33%) key = (1087, 370) value = 333 (actual 332 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (553, 192) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1087, 370) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 33% (actual 33%) key = (556, 193) value = 167 (actual 167 - 0% error) 33% (actual 33%) key = (1090, 371) value = 333 (actual 333 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (556, 193) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1090, 371) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) Got : 24000 2106439 49449 38 44 Expected: 24000 2106439 49449 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 20 23 Expected: 12816 1121048 49449 20 23 Got : 24000 3547100 81694 64 44 Expected: 24000 3547100 81694 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 81694 26 17 Expected: 9582 1425198 81694 26 17 Got : 24000 2460139 23760 42 41 Expected: 24000 2460139 23760 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 23760 18 18 Expected: 10440 1060798 23760 18 18 Got : 24000 4054050 46562 68 43 Expected: 24000 4054050 46562 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 46562 38 24 Expected: 13570 2277890 46562 38 24 Got : 24000 2106459 49449 38 44 Expected: 24000 2106459 49449 38 44 Got : 24000 2460219 23555 41 41 Expected: 24000 2460219 23555 41 41 Got : 24000 4054270 46543 66 43 Expected: 24000 4054270 46543 66 43 Got : 24000 2106479 49555 38 44 Expected: 24000 2106479 49555 38 44 Got : 24000 2460259 23628 41 41 Expected: 24000 2460259 23628 41 41 Got : 24000 4054290 46640 65 43 Expected: 24000 4054290 46640 65 43 Got : 24000 2106439 66674 3 4 Expected: 24000 2106439 66674 3 4 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 66674 2 2 Expected: 12816 1121048 66674 2 2 Got : 24000 2460139 33541 4 4 Expected: 24000 2460139 33541 4 4 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 33541 1 1 Expected: 10440 1060798 33541 1 1 Got : 24000 4054050 64742 7 4 Expected: 24000 4054050 64742 7 4 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2234982 64742 4 2 Expected: 13570 2234982 64742 4 2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] >> KqpRanges::IsNull [GOOD] >> KqpRanges::IsNotNullSecondComponent >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> TCompactionMulti::ManyParts [GOOD] >> TCompactionMulti::MainPageCollectionEdge >> TIterator::MixedReverse [GOOD] >> TIterator::Serial >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] >> KqpKv::ReadRows_NonExistentKeys [GOOD] >> KqpKv::ReadRows_NotFullPK >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] >> KqpExtractPredicateLookup::SimpleRange ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] Test command err: 2025-04-10T11:02:19.794828Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638180625889208:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:19.794966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001dc8/r3tmp/tmpJ159KF/pdisk_1.dat 2025-04-10T11:02:20.412421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:20.412529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:20.436092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:20.458739Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18783, node 1 2025-04-10T11:02:20.962942Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:20.962973Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:20.962986Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:20.963141Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:21.602945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:25391 2025-04-10T11:02:24.793570Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638180625889208:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:24.793712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:25.007239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638202100726777:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:25.007370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:25.298783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:02:25.649731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638206395694248:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:25.649854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:25.650302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638206395694253:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:25.658608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:02:25.702402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638206395694255:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:02:25.783393Z node 1 :TX_PROXY ERROR: Actor# [1:7491638206395694324:2826] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:02:26.083370Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfnv23gbxpqz9mn0kyswy4e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.130313Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfnv2jcfq187mfstg7e2r2b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.174828Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jrfnv2kwfr6c4adp8qvhjf5t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.197173Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jrfnv2mj1mpjfgyr8qvhp4y7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.239717Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfnv2nvfv3rj35rcm1efejp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.283861Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfnv2pwcw4bkkpah33smgxq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.334585Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfnv2rw3rq6hp0jb7wr3s91, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.372356Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jrfnv2sm3a8vex7r01x2myne, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.403768Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jrfnv2v189waph4fhtv4mrdk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.435974Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jrfnv2w13hph8z7jcvxv5d01, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.467054Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jrfnv2x09y0x2ygw5safaqtf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.504581Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jrfnv2y3ammm3gv7kmcjd68z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.529100Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jrfnv2yycmyn1rbx13mn5yhs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.555968Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jrfnv2zp2zwq2en85dfgmqny, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.582705Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jrfnv30k3sp2w8r50xk0axvf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.626811Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jrfnv31qb95tvky2sjsprmts, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.667873Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jrfnv32z4typfamd1wsjvk0q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.700685Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jrfnv34aee24sywfwfm3gyc3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5OGI0NDItZTg5MWRlMDYtNGFlODA4NjAtZjhmOTI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:02:26.748924Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jrfnv35rf1ye4qq36zvx5ka ... n/3?node_id=1&id=NThkZjc3YzMtODcxMGQ3NmQtZTJmNTI5YjYtOGMwYzViMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.576423Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723171. Ctx: { TraceId: 01jrfnxj2f2v0qxpt4z0djkekz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NjNzQzMjktNDE1ZDA4NTYtNmFhMzFlMS0xN2E0ZmRkMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.580166Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723172. Ctx: { TraceId: 01jrfnxj370w7rnh19110jdg9y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWJjMmQyMTktOWMyYzVkMWEtNGFhMGJlZTgtNGY1ZGFhOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.583118Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723177. Ctx: { TraceId: 01jrfnxj3pf9f8fn1jbsvtpfx1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTEyYmU0MDktZjhiMTNiZjEtODUwOTZhNmQtYTkyNzJjNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.584248Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723173. Ctx: { TraceId: 01jrfnxj3q2w7f8qd4d5798j59, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQwMjZhZGMtOTQyY2NlYTItMTc4MmI3OGEtOTA2YmVhYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.584806Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723174. Ctx: { TraceId: 01jrfnxj371s4wfqwhjk0yh4ym, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzYxNjBjNjItMzU5MDgzOGMtMTdhOTM2NTEtYmMyMzNkYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.585216Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723175. Ctx: { TraceId: 01jrfnxj324ge9k84220xcanq9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzk3MzFkMjUtZGMwMGQ4MDMtNjY1YzgyZTUtOTAwOGMxMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.588091Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723176. Ctx: { TraceId: 01jrfnxj3qeprmy2d6ybnrw1s6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZlZWFkNDQtOTMxZTZjNjgtNjc1ZjIxMmUtNTZlZmM1YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.600721Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723178. Ctx: { TraceId: 01jrfnxj3v57sbds0m9v63wmfb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWI4ZGY4ZS00Y2ZlNzQyZi05ODg5MjYyZS0xNTc3ZDhiNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.602635Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723179. Ctx: { TraceId: 01jrfnxj413bftp59embgr8kt1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ5ZTNiYTEtMmQzNzkzZDgtMWIwM2ZlZDMtNDNmY2YyNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.616665Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723180. Ctx: { TraceId: 01jrfnxj4607kw62jwz4bjx385, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NjNzQzMjktNDE1ZDA4NTYtNmFhMzFlMS0xN2E0ZmRkMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.617263Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723181. Ctx: { TraceId: 01jrfnxj4scc772dmgfd74z90x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWJjMmQyMTktOWMyYzVkMWEtNGFhMGJlZTgtNGY1ZGFhOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.620306Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723182. Ctx: { TraceId: 01jrfnxj4w4akahbcggyneagpd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NThkZjc3YzMtODcxMGQ3NmQtZTJmNTI5YjYtOGMwYzViMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.625298Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723183. Ctx: { TraceId: 01jrfnxj4xanmn0vpxqkdprcm5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzk3MzFkMjUtZGMwMGQ4MDMtNjY1YzgyZTUtOTAwOGMxMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.630054Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723184. Ctx: { TraceId: 01jrfnxj532jyfnnfewk2bcd8k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQwMjZhZGMtOTQyY2NlYTItMTc4MmI3OGEtOTA2YmVhYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.643858Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723185. Ctx: { TraceId: 01jrfnxj592hbe4ams21be2930, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzYxNjBjNjItMzU5MDgzOGMtMTdhOTM2NTEtYmMyMzNkYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.645025Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723186. Ctx: { TraceId: 01jrfnxj5acpttz8y7xk7f10ye, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZlZWFkNDQtOTMxZTZjNjgtNjc1ZjIxMmUtNTZlZmM1YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.651069Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723187. Ctx: { TraceId: 01jrfnxj5m6rs1zan6qtsnx773, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTEyYmU0MDktZjhiMTNiZjEtODUwOTZhNmQtYTkyNzJjNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.651598Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723188. Ctx: { TraceId: 01jrfnxj5m9b4gfraq7jy5vzxb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ5ZTNiYTEtMmQzNzkzZDgtMWIwM2ZlZDMtNDNmY2YyNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.651957Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723189. Ctx: { TraceId: 01jrfnxj5m1azeh853n6z4kbz4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWI4ZGY4ZS00Y2ZlNzQyZi05ODg5MjYyZS0xNTc3ZDhiNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.662920Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723191. Ctx: { TraceId: 01jrfnxj628qrg4nvvep45atne, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzk3MzFkMjUtZGMwMGQ4MDMtNjY1YzgyZTUtOTAwOGMxMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-04-10T11:03:47.664171Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723192. Ctx: { TraceId: 01jrfnxj62dd6k63nmahqk82nk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NjNzQzMjktNDE1ZDA4NTYtNmFhMzFlMS0xN2E0ZmRkMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.664373Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723190. Ctx: { TraceId: 01jrfnxj628f29hvy5q2dnnnh0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWJjMmQyMTktOWMyYzVkMWEtNGFhMGJlZTgtNGY1ZGFhOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282945503 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-10T11:03:47.670514Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723194. Ctx: { TraceId: 01jrfnxj6b9rryhrc7vee6k7a8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NThkZjc3YzMtODcxMGQ3NmQtZTJmNTI5YjYtOGMwYzViMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.670748Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723193. Ctx: { TraceId: 01jrfnxj6a37waj9tpd2ypd5av, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQwMjZhZGMtOTQyY2NlYTItMTc4MmI3OGEtOTA2YmVhYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.681768Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723195. Ctx: { TraceId: 01jrfnxj6j7843grbe8f4bcqab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzYxNjBjNjItMzU5MDgzOGMtMTdhOTM2NTEtYmMyMzNkYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.681864Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723196. Ctx: { TraceId: 01jrfnxj6j8eqsnr13pfyx4q6k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZlZWFkNDQtOTMxZTZjNjgtNjc1ZjIxMmUtNTZlZmM1YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.682518Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723197. Ctx: { TraceId: 01jrfnxj6pay74m3h0w151aemz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ5ZTNiYTEtMmQzNzkzZDgtMWIwM2ZlZDMtNDNmY2YyNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.683024Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723198. Ctx: { TraceId: 01jrfnxj6pb25zabzpvc6w01xn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWI4ZGY4ZS00Y2ZlNzQyZi05ODg5MjYyZS0xNTc3ZDhiNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:03:47.686366Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723199. Ctx: { TraceId: 01jrfnxj6s3c3afc59vd4aabv0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTEyYmU0MDktZjhiMTNiZjEtODUwOTZhNmQtYTkyNzJjNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744282945503 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [FAIL] >> KqpPg::CreateTempTable [GOOD] >> KqpPg::CreateTempTableSerial >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |73.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown 2025-04-10 11:03:30,666 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:03:30,936 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 1259919 46.0M 41.1M 23.2M test_tool run_ut @/home/runner/.ya/build/build_root/2gt4/001dbe/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk7/testing_out_stuff/te 1260286 1.6G 1.6G 1.1G └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/2gt4/001dbe/ydb/core/kqp/ut/federated_query/generic_ut/test-results/uni Test command err: Trying to start YDB, gRPC: 17796, MsgBus: 16059 2025-04-10T11:02:32.998270Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638234615208988:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:33.002359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001dbe/r3tmp/tmp0Hn8xC/pdisk_1.dat 2025-04-10T11:02:33.580811Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:33.587390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:33.587582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:33.592905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17796, node 1 2025-04-10T11:02:33.765448Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:33.765489Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:33.765499Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:33.765688Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16059 TClient is connected to server localhost:16059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:34.747328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:34.776416Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:02:37.554759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638256090046118:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:37.554961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:38.002021Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638234615208988:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:38.002099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:38.003028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-10T11:02:38.188474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638260385013539:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:38.188546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:38.188749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638260385013544:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:38.193199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-10T11:02:38.204776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638260385013546:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:02:38.307499Z node 1 :TX_PROXY ERROR: Actor# [1:7491638260385013586:2406] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:02:39.758187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:02:40.616825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-04-10T11:02:41.641201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:02:42.663758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:02:43.464824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:02:44.154944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:02:44.194922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:02:47.207700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710708:0, at schemeshard: 72057594046644480 2025-04-10T11:02:47.246622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-04-10T11:02:47.248344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-10T11:02:47.250760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "c ... MAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-04-10T11:03:20.686054Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:03:20.686088Z node 3 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 16949, MsgBus: 64453 2025-04-10T11:03:23.015809Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491638454246407805:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:23.016495Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001dbe/r3tmp/tmpZf15I0/pdisk_1.dat 2025-04-10T11:03:23.202520Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:23.225331Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:23.225546Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:23.229199Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16949, node 4 2025-04-10T11:03:23.300625Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:23.300668Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:23.300680Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:23.300879Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64453 TClient is connected to server localhost:64453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:24.089511Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:24.109810Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:03:28.022303Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491638454246407805:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:28.022416Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:03:28.168177Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491638475721244949:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:28.168368Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:28.218945Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-10T11:03:28.287750Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491638475721245068:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:28.287907Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:28.288404Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491638475721245073:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:28.294560Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-10T11:03:28.313020Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491638475721245075:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:03:28.419876Z node 4 :TX_PROXY ERROR: Actor# [4:7491638475721245115:2403] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:03:29.216249Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:03:29.976403Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:1, at schemeshard: 72057594046644480 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/001dbe/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk7/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/001dbe/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk7/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) >> KqpPg::EquiJoin-useSink [GOOD] >> KqpPg::ExplainColumnsReorder >> BuildStatsHistogram::Single_Slices [GOOD] >> BuildStatsHistogram::Single_History >> TCompactionMulti::MainPageCollectionEdge [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany >> KqpPg::InsertFromSelect_Simple-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder-useSink >> KqpNotNullColumns::UpdateNotNullPkPg [GOOD] >> KqpNotNullColumns::UpdateNotNull >> TIterator::Serial [GOOD] >> TIterator::SerialReverse >> KqpReturning::ReturningWorksIndexedDeleteV2-QueryService |73.5%| [TA] $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut >> TCompactionMulti::MainPageCollectionEdgeMany [GOOD] |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut >> TCompactionMulti::MainPageCollectionOverflow >> TCompactionMulti::MainPageCollectionOverflow [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs [GOOD] >> TCompactionMulti::MainPageCollectionOverflowLargeRefs >> TCompactionMulti::MainPageCollectionOverflowLargeRefs [GOOD] >> TExecutorDb::RandomOps >> KqpReturning::ReturningWorksIndexedUpsert+QueryService >> KqpKv::ReadRows_NotFullPK [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning [GOOD] >> KqpKv::ReadRows_SpecificReturnValue >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky [GOOD] >> KqpPg::AlterSequence [GOOD] >> KqpReturning::ReturningTwice [GOOD] >> KqpPg::PgCreateTable [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit >> KqpPg::AlterColumnSetDefaultFromSequence |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap >> TIterator::SerialReverse [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] >> KqpReturning::ReplaceSerial >> KqpRanges::IsNotNullSecondComponent [GOOD] >> KqpQueryService::ReturnAndCloseSameTime [GOOD] >> KqpPg::PgUpdate+useSink >> NFwd_TFlatIndexCache::IndexPagesLocator [GOOD] >> TIterator::GetKey [GOOD] >> TIterator::GetKeyWithEraseCache >> TIterator::GetKeyWithEraseCache [GOOD] >> TIterator::GetKeyWithVersionSkips [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex >> KqpRanges::IsNullInValue >> KqpQueryService::ReplaceIntoWithDefaultValue >> NFwd_TFlatIndexCache::GetTwice [GOOD] >> TLegacy::IndexIter >> TLegacy::IndexIter [GOOD] >> NFwd_TFlatIndexCache::ForwardTwice [GOOD] >> NFwd_TFlatIndexCache::Skip_Done [GOOD] >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter >> TLegacy::StatsIter [GOOD] >> NFwd_TFlatIndexCache::Skip_Done_None [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> NFwd_TFlatIndexCache::Skip_Keep [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> NFwd_TFlatIndexCache::Skip_Wait [GOOD] |73.5%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |73.5%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap >> NFwd_TFlatIndexCache::Trace [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> NFwd_TFlatIndexCache::Slices [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> NFwd_TLoadedPagesCircularBuffer::Basics [GOOD] >> TPart::State [GOOD] >> NOther::Blocks [GOOD] >> TPart::Trivials [GOOD] >> TPart::Basics [GOOD] >> KqpPg::CreateTempTableSerial [GOOD] >> NPage::Encoded [GOOD] >> NPage::ABI_002 >> KqpPg::DropSequence >> NPage::ABI_002 [GOOD] >> TPart::BasicColumnGroups [GOOD] >> NPage::GroupIdEncoding [GOOD] >> NPageCollection::Align [GOOD] >> NPageCollection::Meta >> NPageCollection::Meta [GOOD] >> TPart::CellDefaults [GOOD] >> NPageCollection::PagesToBlobsConverter [GOOD] >> NPageCollection::Grow [GOOD] >> NPageCollection::Groups [GOOD] >> NPageCollection::Chop [GOOD] >> NPageCollection::CookieAllocator [GOOD] >> NProto::LargeGlobId [GOOD] >> Redo::ABI_008 [GOOD] >> Self::Literals [GOOD] >> TPart::Matter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 30080, MsgBus: 22508 2025-04-10T11:02:39.864757Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638267078938482:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:39.865091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c70/r3tmp/tmpCltwYm/pdisk_1.dat 2025-04-10T11:02:40.950818Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:40.987837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:02:41.006001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:41.010514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:41.013871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30080, node 1 2025-04-10T11:02:41.394851Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:41.394869Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:41.394877Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:41.395020Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22508 TClient is connected to server localhost:22508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:42.767635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:44.836344Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638267078938482:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:44.836446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:46.025221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638297143710092:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:46.025335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:46.101713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:02:46.486557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638297143710202:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:46.486639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:46.524496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:02:46.683398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638297143710283:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:46.683510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:46.683928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638297143710288:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:46.688482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T11:02:46.731328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638297143710290:2362], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T11:02:46.817206Z node 1 :TX_PROXY ERROR: Actor# [1:7491638297143710343:2459] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24552, MsgBus: 15363 2025-04-10T11:02:48.802685Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638302850798012:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:48.802720Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c70/r3tmp/tmpOG5cXC/pdisk_1.dat 2025-04-10T11:02:49.172389Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:49.197777Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:49.197890Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:49.207610Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24552, node 2 2025-04-10T11:02:49.454786Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:49.454809Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:49.454817Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:49.454938Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15363 TClient is connected to server localhost:15363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:02:50.628485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:02:50.638499Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:02:53.803479Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491638302850798012:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:53.803572Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:53.948679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638324325635139:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:53.948812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:53.949130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638324325635166:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:53.954741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:02:53.969778Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491638324325635168:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:02:54.073722Z node 2 :TX_PROXY ERROR: Actor# [2:7491638328620602517:2342] txid# 281474976710659, issues: { message: "Check failed: path ... 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:43.755133Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:43.756806Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19722, node 10 2025-04-10T11:03:43.849092Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:43.849117Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:43.849132Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:43.849290Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10818 TClient is connected to server localhost:10818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:44.935849Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:48.467750Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491638541952370189:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:48.467835Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:03:49.161142Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491638567722174643:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:49.161238Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491638567722174632:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:49.161417Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:49.165938Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:03:49.180072Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491638567722174646:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:03:49.277293Z node 10 :TX_PROXY ERROR: Actor# [10:7491638567722174697:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:03:49.303040Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7491638567722174714:2340], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-04-10T11:03:49.303279Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZDA3ZDJlYTItMWE3NmMzNzItMjY4Mzk1NjMtNGNiNTg0NjQ=, ActorId: [10:7491638567722174630:2331], ActorState: ExecuteState, TraceId: 01jrfnxfhy2cjw398jv6bs9vg5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" Trying to start YDB, gRPC: 23004, MsgBus: 3320 2025-04-10T11:03:50.495066Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7491638572045168229:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:50.495123Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c70/r3tmp/tmpejVLux/pdisk_1.dat 2025-04-10T11:03:50.844429Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:50.870794Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:50.870960Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:50.875274Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23004, node 11 2025-04-10T11:03:51.007993Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:51.008033Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:51.008049Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:51.008262Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3320 TClient is connected to server localhost:3320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:52.292342Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:52.337020Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:03:55.497581Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7491638572045168229:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:55.497687Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:03:57.841621Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491638602109939964:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:57.841699Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491638602109939991:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:57.841762Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:57.848991Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:03:57.875085Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7491638602109939993:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:03:57.972085Z node 11 :TX_PROXY ERROR: Actor# [11:7491638602109940044:2349] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:03:58.076172Z node 11 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [11:7491638602109940053:2344], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-04-10T11:03:58.078601Z node 11 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=11&id=NDMxZWI0MGItODJmN2EyOWQtY2RlMTg1MTYtMTA5ZGMyMQ==, ActorId: [11:7491638602109939962:2335], ActorState: ExecuteState, TraceId: 01jrfnxps0afsmzg03tghptk18, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" >> TPart::External [GOOD] >> KqpNotNullColumns::UpdateNotNull [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit [GOOD] >> TPart::Outer [GOOD] >> TPart::MassCheck >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge >> TPart::MassCheck [GOOD] >> KqpNotNullColumns::UpdateNotNullPg >> TPart::WreckPart >> BuildStatsHistogram::Single_History [GOOD] >> TPart::WreckPart [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs [GOOD] >> KqpPg::InsertFromSelect_NoReorder-useSink [GOOD] >> BuildStatsHistogram::Single_History_Slices |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut >> TPart::PageFailEnv >> KqpPg::InsertFromSelect_Serial+useSink >> KqpPg::ExplainColumnsReorder [GOOD] >> KqpKv::ReadRows_SpecificReturnValue [GOOD] >> KqpSqlIn::SimpleKey >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs >> KqpKv::ReadRows_PgValue |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |73.6%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |73.6%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |73.6%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |73.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |73.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 27031, MsgBus: 11423 2025-04-10T11:02:34.634182Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638243492709415:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:34.634359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cc4/r3tmp/tmpuFVcV6/pdisk_1.dat 2025-04-10T11:02:35.589197Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:35.662367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:35.662454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:35.665383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:02:35.666691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27031, node 1 2025-04-10T11:02:35.937916Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:35.937943Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:35.937951Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:35.938092Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11423 TClient is connected to server localhost:11423 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:36.971817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:37.003735Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:02:39.634544Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638243492709415:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:39.634644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:40.442500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638269262513769:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:40.442676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:40.444840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638269262513781:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:40.449959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:02:40.479879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638269262513783:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:02:40.558931Z node 1 :TX_PROXY ERROR: Actor# [1:7491638269262513834:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:02:40.630856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4600, MsgBus: 22784 2025-04-10T11:02:42.499838Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638279168180964:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:42.501767Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cc4/r3tmp/tmp8Ag5PY/pdisk_1.dat 2025-04-10T11:02:42.702736Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:42.718367Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:42.718454Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:42.720486Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4600, node 2 2025-04-10T11:02:42.805125Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:42.805158Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:42.805169Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:42.805327Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22784 TClient is connected to server localhost:22784 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:43.644881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:43.652272Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:02:47.506320Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491638279168180964:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:47.506423Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:47.757813Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638300643018092:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:47.757893Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:47.758727Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638300643018119:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:47.762456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:02:47.775664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491638300643018121:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:02:47.842614Z node 2 :TX_PROXY ERROR: Actor# [2:7491638300643018172:2343] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:02:47.871792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7866, MsgBus: 26201 2025-04-10T11:02:49.535927Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491638308775072472:2156];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cc4/r3tmp/tmpvIjdTt/pdisk_1.dat 2025-04-10T11:02:49.683808Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:02:49.728768Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:49.755188Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState ... 897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:40.940261Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:40.943357Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21245, node 10 2025-04-10T11:03:41.110966Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:41.111002Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:41.111019Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:41.111237Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27172 TClient is connected to server localhost:27172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:42.148226Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:45.709102Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491638528040589623:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:45.709193Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:03:47.175142Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491638558105361365:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:47.175257Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491638558105361373:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:47.175353Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:47.183258Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:03:47.200017Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491638558105361379:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:03:47.297624Z node 10 :TX_PROXY ERROR: Actor# [10:7491638558105361430:2346] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:03:47.347318Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:03:47.627666Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:03:47.783084Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7491638558105361672:2365], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-04-10T11:03:47.783503Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZWRkZmU5ZTUtN2Q1ZjhmNmItNWM5NmQ3ZTItNjczOWJmNjE=, ActorId: [10:7491638558105361670:2364], ActorState: ExecuteState, TraceId: 01jrfnxj85f4westy9abdpzss4, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 26510, MsgBus: 10670 2025-04-10T11:03:49.869727Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7491638566443364699:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:49.869827Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cc4/r3tmp/tmpD12YqN/pdisk_1.dat 2025-04-10T11:03:50.129080Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:50.132510Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:50.132633Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:50.135682Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26510, node 11 2025-04-10T11:03:50.258166Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:50.258195Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:50.258210Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:50.258421Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10670 TClient is connected to server localhost:10670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:03:51.299734Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:03:51.310683Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:03:54.870374Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7491638566443364699:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:54.870494Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:03:56.079036Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491638596508136443:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:56.079170Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:56.079302Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491638596508136454:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:56.092698Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:03:56.121076Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7491638596508136457:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:03:56.194195Z node 11 :TX_PROXY ERROR: Actor# [11:7491638596508136508:2346] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:03:56.264107Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:03:56.458432Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:03:56.606120Z node 11 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [11:7491638596508136749:2364], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-04-10T11:03:56.609369Z node 11 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=11&id=NjU0MDAwZDItYTVlZDQ1ZGYtMmJkMjJjOTgtYmY2YWVhYjE=, ActorId: [11:7491638596508136747:2363], ActorState: ExecuteState, TraceId: 01jrfnxtwg5tmd5dgrt2gybw8c, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> Self::Literals [GOOD] Test command err: + BTreeIndex{PageId: 0 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385, 13 rev 1, 683b} | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | > {0, a, false, 0} | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | > {1, b, true, 10} | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | > {2, c, false, 20} | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | > {3, d, true, 30} | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | > {4, e, false, 40} | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | > {5, f, true, 50} | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | > {6, g, false, 60} | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | > {7, h, true, 70} | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | > {9, j, true, 90} | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 + BTreeIndex{PageId: 9 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 116b} | + BTreeIndex{PageId: 5 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | + BTreeIndex{PageId: 0 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93, 13 rev 1, 179b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, a, false, 0} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, b, true, 10} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | > {2, c, false, 20} | | + BTreeIndex{PageId: 1 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195, 13 rev 1, 179b} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, d, true, 30} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, e, false, 40} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | > {5, f, true, 50} | | + BTreeIndex{PageId: 2 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, g, false, 60} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, h, true, 70} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | + BTreeIndex{PageId: 8 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 242b} | | + BTreeIndex{PageId: 3 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 179b} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, j, true, 90} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, k, false, 100} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, l, true, 110} | | + BTreeIndex{PageId: 4 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555, 13 rev 1, 179b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, m, false, 120} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, n, true, 130} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | > {14, o, false, 140} | | + BTreeIndex{PageId: 6 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693, 13 rev 1, 179b} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, p, true, 150} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, q, false, 160} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | > {17, r, true, 170} | | + BTreeIndex{PageId: 7 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 179b} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, s, false, 180} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, t, true, 190} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 + BTreeIndex{PageId: 15 RowCount: 15150 DataSize: 106050 GroupDataSize: 207050 ErasedRowCount: 8080, 13 rev 1, 174b} | + BTreeIndex{PageId: 12 RowCount: 9078 DataSize: 70278 GroupDataSize: 138278 ErasedRowCount: 4318, 13 rev 1, 690b} | | + BTreeIndex{PageId: 0 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 702b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, x, NULL, NULL} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, xx, NULL, NULL} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | | > {2, xxx, NULL, NULL} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, xxxx, NULL, NULL} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, xxxxx, NULL, NULL} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | | > {5, xxxxxx, NULL, NULL} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, xxxxxxx, NULL, NULL} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, xxxxxxxx, NULL, NULL} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | | | > {8, xxxxxxxxx, NULL, NULL} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, xxxxxxxxxx, NULL, NULL} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 1 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891, 13 rev 1, 683b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | | > {14, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | | > {17, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 | | | > {20, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10021 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891 | | > {21, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 2 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395, 13 rev 1, 689b} | | | PageId: 10022 RowCount: 2553 DataSize: 23253 GroupDataSize: 46253 ErasedRowCount: 943 | | | > {22, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10023 RowCount: 2676 DataSize: 24276 GroupDataSize: 48276 ErasedRowCount: 996 | | | > {23, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10024 RowCount: 2800 DataSize: 25300 GroupDataSize: 50300 ErasedRowCount: 1050 | | | > {24, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10025 RowCount: 2925 DataSize: 26325 GroupDataSize: 52325 ErasedRowCount: 1105 | | | > {25, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10026 RowCount: 3051 DataSize: 27351 GroupDataSize: 54351 ErasedRowCount: 1161 | | | > {26, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10027 RowCount: 3178 DataSize: 28378 GroupDataSize: 56378 ErasedRowCount: 1218 | | | > {27, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10028 RowCount: 3306 DataSize: 29406 GroupDataSize: 58406 ErasedRowCount: 1276 | | | > {28, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10029 RowCount: 3435 DataSize: 30435 GroupDataSize: 60435 ErasedRowCount: 1335 | | | > {29, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10030 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395 | | > {30, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 3 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911, 13 rev 1, 669b} | | | PageId: 10031 RowCount: 3696 DataSize: 32496 GroupDataSize: 64496 ErasedRowCount: 1456 | | | > {31, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10032 RowCount: 3828 DataSize: 33528 GroupDataSize: 66528 ErasedRowCount: 1518 | | | > {32, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10033 RowCount: 3961 DataSize: 34561 GroupDataSize: 68561 ErasedRowCount: 1581 | | | > {33, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10034 RowCount: 4095 DataSize: 35595 GroupDataSize: 70595 ErasedRowCount: 1645 | | | > {34, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10035 RowCount: 4230 DataSize: 36630 GroupDataSize: 72630 ErasedRowCount: 1710 | | | > {35, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10036 RowCount: 4366 DataSize: 37666 GroupDataSize: 74666 ErasedRowCount: 1776 | | | > {36, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10037 RowCount: 4503 DataSize: 38703 GroupDataSize: 76703 ErasedRowCount: 1843 | | | > {37, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10038 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911 | | > {38, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 4 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491, 13 rev 1, 725b} | | | PageId: 10039 RowCount: 4780 DataSize: 40780 GroupDataSize: 80780 ErasedRowCount: 1980 | | | > {39, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10040 RowCount: 4920 DataSize: 41820 GroupDataSize: 82820 ErasedRowCount: 2050 | | | > {40, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10041 RowCount: 5061 DataSize: 42861 GroupDataSize: 84861 ErasedRowCount: 2121 | | | > {41, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10042 RowCount: 5203 DataSize: 43903 GroupDataSize: 86903 ErasedRowCount: 2193 | | | > {42, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10043 RowCount: 5346 DataSize: 44946 GroupDataSize: 88946 ErasedRowCount: 2266 | | | > {43, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10044 RowCount: 5490 DataSize: 45990 GroupDataSize: 90990 ErasedRowCount: 2340 | | | > {44, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10045 RowCount: 5635 DataSize: 47035 GroupDataSize: 93035 ErasedRowCount: 2415 | | | > {45, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10046 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491 | | > {46, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 5 RowCount: 6831 DataSize: 55431 GroupDataSize: 109431 ErasedRowCount: 3051, 13 ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} >> TPart::PageFailEnv [GOOD] >> TPart::ForwardEnv ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::ExplainColumnsReorder [GOOD] Test command err: Trying to start YDB, gRPC: 12939, MsgBus: 9636 2025-04-10T11:02:34.199935Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638244216630885:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:34.205609Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ce1/r3tmp/tmp9fQQdl/pdisk_1.dat 2025-04-10T11:02:34.904053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:34.904176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:34.907642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:34.922926Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12939, node 1 2025-04-10T11:02:35.075052Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:35.075086Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:35.075095Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:35.075231Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9636 TClient is connected to server localhost:9636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:35.992109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:38.740532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638261396500737:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:38.740778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:38.741326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638261396500749:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:38.746008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:02:38.763617Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:02:38.763935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638261396500751:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:02:38.831652Z node 1 :TX_PROXY ERROR: Actor# [1:7491638261396500802:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 11249, MsgBus: 4310 2025-04-10T11:02:39.918205Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638263952991984:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:39.918259Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ce1/r3tmp/tmpVEewb2/pdisk_1.dat 2025-04-10T11:02:40.343393Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:40.364599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:40.364699Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:40.371466Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11249, node 2 2025-04-10T11:02:40.542809Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:40.542842Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:40.542853Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:40.542988Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4310 TClient is connected to server localhost:4310 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:41.174188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:41.183719Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:02:43.679981Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638281132861764:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:43.680103Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:43.680384Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638281132861776:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:43.686130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:02:43.707857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491638281132861778:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:02:43.767688Z node 2 :TX_PROXY ERROR: Actor# [2:7491638281132861829:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 62752, MsgBus: 7432 2025-04-10T11:02:44.596322Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491638287590157380:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:44.700852Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ce1/r3tmp/tmpdlLZz7/pdisk_1.dat 2025-04-10T11:02:44.874925Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:44.892460Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:44.892559Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:44.894228Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62752, node 3 2025-04-10T11:02:45.103950Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:45.103979Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:45.103989Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:45.104139Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7432 TClient is connected to server localhost:7432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 Sub ... n't have access permissions } 2025-04-10T11:03:41.039272Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T11:03:41.056407Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491638533641815332:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T11:03:41.139276Z node 10 :TX_PROXY ERROR: Actor# [10:7491638533641815384:2451] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13745, MsgBus: 12921 2025-04-10T11:03:44.511718Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7491638543970223833:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:44.511820Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ce1/r3tmp/tmpReWaIV/pdisk_1.dat 2025-04-10T11:03:44.892009Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:44.932128Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:44.932287Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:44.935207Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13745, node 11 2025-04-10T11:03:45.043078Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:45.043117Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:45.043136Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:45.043416Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12921 TClient is connected to server localhost:12921 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:46.013589Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:46.023651Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:03:49.510313Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7491638543970223833:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:49.510438Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:03:50.651435Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491638569740028263:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:50.651545Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:50.679298Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:03:50.781800Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:03:50.895480Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491638569740028442:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:50.895654Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:50.896365Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491638569740028447:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:50.904741Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T11:03:50.927939Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7491638569740028449:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T11:03:51.023506Z node 11 :TX_PROXY ERROR: Actor# [11:7491638574034995797:2452] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:04:00.774437Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:04:00.774768Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:04:00.775039Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ce1/r3tmp/tmpt4qXe9/pdisk_1.dat 2025-04-10T11:04:01.269309Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:04:01.326889Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:01.374565Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:01.374810Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:01.390796Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:04:01.505832Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:644:2552], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:01.506039Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:654:2557], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:01.506208Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:01.517321Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:04:01.729655Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:658:2560], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:04:01.803049Z node 12 :TX_PROXY ERROR: Actor# [12:730:2601] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } PreparedQuery: "9c0e1868-ff2c5700-36cab5a3-adf51e9f" QueryAst: "(\n(let $1 (PgType \'int4))\n(let $2 \'(\'(\'\"_logical_id\" \'218) \'(\'\"_id\" \'\"9c0b3980-cd81c698-983dc589-1461b1de\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $3 (DqPhyStage \'() (lambda \'() (Iterator (AsList (AsStruct \'(\'\"x\" (PgConst \'1 $1)) \'(\'\"y\" (PgConst \'2 $1)))))) $2))\n(let $4 (DqCnResult (TDqOutput $3 \'\"0\") \'(\'\"y\" \'\"x\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($3) \'($4) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"x\" $1) \'(\'\"y\" $1))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" QueryPlan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{x: \\\"1\\\",y: \\\"2\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}" YdbResults { columns { name: "y" type { pg_type { oid: 23 } } } columns { name: "x" type { pg_type { oid: 23 } } } } QueryDiagnostics: "" >> TPart::ForwardEnv [GOOD] >> TPart::WreckPartColumnGroups >> AuthDatabaseAdmin::FailOnEmptyOwnerAndEmptyToken [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndTokenWithEmptyUserSid [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndNoToken [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence [FAIL] >> KqpPg::CreateTableIfNotExists_GenericQuery >> KqpPg::TableSelect+useSink [GOOD] >> KqpPg::TableSelect-useSink |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::FailOnEmptyOwnerAndNoToken [GOOD] >> TVersions::WreckHead [GOOD] >> TVersions::WreckHeadReverse >> TPersQueueTest::DirectReadRestartTablet [GOOD] >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder >> KqpReturning::ReturningWorksIndexedDeleteV2-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedInsert+QueryService >> KqpNewEngine::StreamLookupWithView >> KqpReturning::ReplaceSerial [GOOD] >> KqpReturning::ReturningSerial >> KqpPg::PgUpdate+useSink [GOOD] >> KqpPg::PgUpdate-useSink >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] >> TSequence::CreateSequenceParallel |73.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |73.6%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |73.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:13.668585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:13.668732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:13.668803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:13.668861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:13.668921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:13.668959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:13.669038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:13.669157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:13.669638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:13.774524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:13.774582Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:13.798090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:13.798480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:13.798661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:13.812207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:13.812574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:13.813498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:13.813973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:13.816716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:13.818440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:13.818519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:13.818744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:13.818808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:13.818859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:13.819077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:13.826616Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:14.023019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:14.023337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:14.023593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:14.023875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:14.023944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:14.029219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:14.029382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:14.029635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:14.029728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:14.029775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:14.029826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:14.034208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:14.034311Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:14.034360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:14.037171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:14.037240Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:14.037287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:14.037412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:14.042221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:14.044519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:14.044770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:14.045794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:14.045980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:14.046038Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:14.046388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:14.046439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:14.046598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:14.046722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:14.049265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:14.049307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:14.049507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:14.049551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:14.049926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:14.049964Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:14.050072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:14.050102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:14.050221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:14.050252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:14.050293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:14.050336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:14.050378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:14.050425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:14.050510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:14.050558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:14.050602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:14.054101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:14.054267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:14.054315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 65Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 30 seconds 2025-04-10T11:04:05.383273Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [3:122:2148], Recipient [3:309:2296]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 2 } CompactSinglePartedShards: true 2025-04-10T11:04:05.383490Z node 3 :TX_DATASHARD INFO: Started background compaction# 6 of 72075186233409546 tableId# 2 localTid# 1001, requested from [3:122:2148], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-04-10T11:04:05.384795Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 5, ts 1970-01-01T00:00:18.152000Z 2025-04-10T11:04:05.384884Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 5, front# 6 2025-04-10T11:04:05.398655Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1243:3183], Recipient [3:309:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-10T11:04:05.398803Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-10T11:04:05.400828Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:309:2296], Recipient [3:122:2148]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 5 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 18 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 30757 Memory: 124232 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-10T11:04:05.400906Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T11:04:05.400973Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 3.0757 2025-04-10T11:04:05.401132Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 18 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T11:04:05.401187Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-10T11:04:05.403609Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:298:2287], Recipient [3:309:2296]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-10T11:04:05.411422Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 6, ts 1970-01-01T00:00:19.152000Z 2025-04-10T11:04:05.411541Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 6, front# 6 2025-04-10T11:04:05.411602Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:122:2148]pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:04:05.412030Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553211, Sender [3:309:2296], Recipient [3:122:2148]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 2 } Status: OK 2025-04-10T11:04:05.412094Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvCompactTableResult 2025-04-10T11:04:05.412209Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 0 seconds 2025-04-10T11:04:05.412282Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.996000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-10T11:04:05.415404Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:298:2287], Recipient [3:309:2296]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-10T11:04:05.429358Z node 3 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:19.152000Z 2025-04-10T11:04:05.485665Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T11:04:05.485772Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T11:04:05.485824Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-10T11:04:05.486005Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-10T11:04:05.486090Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-10T11:04:05.486325Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-10T11:04:05.486439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-04-10T11:04:05.486547Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:18.000000Z at schemeshard 72057594046678944 2025-04-10T11:04:05.486704Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:04:05.497432Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T11:04:05.497542Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T11:04:05.497594Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:04:05.926963Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:309:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:04:05.927286Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:05.927366Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:05.927539Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:122:2148], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:05.927604Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:05.974321Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue wakeup 2025-04-10T11:04:05.974492Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 1, Rows# 100, Deletes# 0, Compaction# 1970-01-01T00:00:18.000000Z}, next wakeup in# 0.000000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-10T11:04:05.974696Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 30 seconds 2025-04-10T11:04:05.975064Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [3:122:2148], Recipient [3:309:2296]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 2 } CompactSinglePartedShards: true 2025-04-10T11:04:05.975282Z node 3 :TX_DATASHARD INFO: Started background compaction# 7 of 72075186233409546 tableId# 2 localTid# 1001, requested from [3:122:2148], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-04-10T11:04:05.976705Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 6, ts 1970-01-01T00:00:19.152000Z 2025-04-10T11:04:05.976810Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 6, front# 7 2025-04-10T11:04:05.991357Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1272:3209], Recipient [3:309:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-10T11:04:06.008273Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:298:2287], Recipient [3:309:2296]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-10T11:04:06.015460Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 7, ts 1970-01-01T00:00:20.152000Z 2025-04-10T11:04:06.015575Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 7, front# 7 2025-04-10T11:04:06.015631Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:122:2148]pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:04:06.016448Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553211, Sender [3:309:2296], Recipient [3:122:2148]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 2 } Status: OK 2025-04-10T11:04:06.016508Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvCompactTableResult 2025-04-10T11:04:06.016603Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 0 seconds 2025-04-10T11:04:06.016672Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.996000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-10T11:04:06.020590Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:298:2287], Recipient [3:309:2296]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-10T11:04:06.034432Z node 3 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:20.152000Z >> TPart::WreckPartColumnGroups [GOOD] >> TPart::PageFailEnvColumnGroups >> KqpReturning::ReturningWorksIndexedUpsert+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedUpsert-QueryService >> KqpNotNullColumns::UpdateNotNullPg [GOOD] >> KqpNotNullColumns::UpdateOnNotNull >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign >> KqpRanges::IsNullInValue [GOOD] >> KqpRanges::IsNullInJsonValue >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [FAIL] >> KqpPg::DropSequence [GOOD] >> KqpPg::DeleteWithQueryService+useSink >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign >> TSequence::CreateSequenceSequential [GOOD] >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [FAIL] >> TPart::PageFailEnvColumnGroups [GOOD] >> BuildStatsHistogram::Single_History_Slices [GOOD] >> TNebiusAccessServiceTest::PassRequestId >> TSequence::CreateSequenceInsideTableThenDropSequence >> BuildStatsHistogram::Ten_Mixed >> TPart::ForwardEnvColumnGroups >> TNebiusAccessServiceTest::PassRequestId [GOOD] |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TPart::ForwardEnvColumnGroups [GOOD] >> KqpPg::InsertFromSelect_Serial+useSink [GOOD] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TPart::Versions [GOOD] >> KqpPg::InsertFromSelect_Serial-useSink >> TPart::ManyVersions >> TSequence::CreateSequenceInsideTableThenDropTable >> TPart::ManyVersions [GOOD] >> TPart::ManyDeltas [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> TPart::CutKeys_Seek [GOOD] >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String [GOOD] >> TPartBtreeIndexIteration::NoNodes |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |73.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::PassRequestId [GOOD] >> TPartBtreeIndexIteration::NoNodes [GOOD] Test command err: 2025-04-10T11:04:11.603003Z node 2 :GRPC_CLIENT DEBUG: [517000004708]{reqId} Connect to grpc://localhost:23608 2025-04-10T11:04:11.616152Z node 2 :GRPC_CLIENT DEBUG: [517000004708]{reqId} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-04-10T11:04:11.631445Z node 2 :GRPC_CLIENT DEBUG: [517000004708]{reqId} Response AuthenticateResponse { account { user_account { id: "1234" } } } >> KqpQueryService::ReplaceIntoWithDefaultValue [GOOD] |73.7%| [LD] {RESULT} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |73.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view >> TPartBtreeIndexIteration::NoNodes_Groups >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans >> TSequence::CreateSequencesWithIndexedTable |73.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] >> KqpNotNullColumns::UpdateOnNotNull [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname >> KqpNotNullColumns::UpdateOnNotNullPg >> KqpPg::PgUpdate-useSink [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |73.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |73.7%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |73.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> KqpSqlIn::SimpleKey [GOOD] >> KqpSqlIn::SelectNotAllElements >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes >> KqpKv::ReadRows_PgValue [GOOD] >> KqpKv::ReadRows_PgKey >> KqpReturning::ReturningWorksIndexedInsert+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedInsert-QueryService |73.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |73.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:11.576221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:11.576317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:11.576351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:11.576389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:11.576431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:11.576462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:11.576514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:11.576594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:11.576898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:11.723832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:11.723896Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:11.739894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:11.740259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:11.740439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:11.755882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:11.756185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:11.756970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:11.757416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:11.759781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:11.761233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:11.761300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:11.761505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:11.761557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:11.761599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:11.761733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.769083Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:11.944240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:11.944624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.944832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:11.945161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:11.945231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.948132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:11.948299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:11.948580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.948657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:11.948701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:11.948765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:11.951345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.951412Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:11.951463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:11.953757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.953812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.953876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:11.953946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:11.959957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:11.962678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:11.962896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:11.964013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:11.964160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:11.964222Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:11.964631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:11.964696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:11.964898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:11.965053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:11.967556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:11.967605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:11.967811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:11.967872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:11.968126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:11.968166Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:11.968274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:11.968307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:11.968365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:11.968395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:11.968457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:11.968507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:11.968543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:11.968574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:11.968651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:11.968693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:11.968768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:11.978942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:11.979121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:11.979161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... AT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:122:2148], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:13.854549Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:14.255188Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:14.255273Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:14.255376Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:122:2148], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:14.255408Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:14.709376Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:14.709480Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:14.710551Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:122:2148], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:14.710612Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:14.790681Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:309:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-10T11:04:14.790765Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-10T11:04:14.790856Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2025-04-10T11:04:14.790929Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:04:14.790969Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409546 2025-04-10T11:04:14.791006Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409546 has no attached operations 2025-04-10T11:04:14.791041Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409546 2025-04-10T11:04:14.791273Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:309:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:04:14.791590Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-10T11:04:14.793777Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:309:2296], Recipient [3:122:2148]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 7 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 203 Memory: 124232 Storage: 14156 GroupWriteThroughput { GroupID: 0 Channel: 0 Throughput: 263 } GroupWriteThroughput { GroupID: 0 Channel: 1 Throughput: 444 } GroupWriteIops { GroupID: 0 Channel: 0 Iops: 1 } } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 2025-04-10T11:04:14.793834Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T11:04:14.793889Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0203 2025-04-10T11:04:14.794022Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T11:04:14.794070Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-10T11:04:14.796789Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1061:3005], Recipient [3:309:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-10T11:04:14.849542Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T11:04:14.849632Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T11:04:14.849677Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-10T11:04:14.849757Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-10T11:04:14.849800Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-10T11:04:14.849970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-10T11:04:14.850067Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-04-10T11:04:14.851995Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:01:20.000000Z at schemeshard 72057594046678944 2025-04-10T11:04:14.852254Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:04:14.862776Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T11:04:14.862860Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T11:04:14.862896Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:04:15.186685Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:15.186772Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:15.186878Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:122:2148], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:15.186915Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:15.594876Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:15.594968Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:15.595069Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:122:2148], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:15.595105Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:15.998539Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:15.998625Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:15.998725Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:122:2148], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:15.998760Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:16.397853Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:16.397945Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:16.398044Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:122:2148], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:16.398078Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:16.781867Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:16.781950Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:16.782051Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:122:2148], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:16.782087Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:16.816977Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:309:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:04:17.193830Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:17.193916Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:04:17.194025Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:122:2148], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:04:17.194067Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex >> TSchemeShardUserAttrsTest::Boot >> KqpNewEngine::StreamLookupWithView [GOOD] >> KqpNewEngine::Truncated >> KqpPg::DeleteWithQueryService+useSink [GOOD] >> KqpPg::DeleteWithQueryService-useSink >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex >> KqpReturning::ReturningSerial [GOOD] >> KqpReturning::ReturningColumnsOrder >> BuildStatsHistogram::Ten_Mixed [GOOD] >> BuildStatsHistogram::Ten_Serial ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReplaceIntoWithDefaultValue [GOOD] Test command err: Trying to start YDB, gRPC: 2592, MsgBus: 62583 2025-04-10T10:56:54.276910Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636785936601395:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:54.276955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001580/r3tmp/tmpNlcOpP/pdisk_1.dat 2025-04-10T10:56:54.886952Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:54.894622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:54.894751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:54.903367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2592, node 1 2025-04-10T10:56:55.186771Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:55.186798Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:55.186825Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:55.186957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62583 TClient is connected to server localhost:62583 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:56.115288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:56.148067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:56.345801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:56.597965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:56.698318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:58.647956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636803116472327:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:58.648065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:59.012193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:59.063534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:59.105278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:59.174139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:59.212257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:59.266437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:59.281856Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636785936601395:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:59.281911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:59.315513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636807411440138:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:59.315585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:59.315827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636807411440143:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:59.319264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:59.331847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636807411440145:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:59.395126Z node 1 :TX_PROXY ERROR: Actor# [1:7491636807411440199:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 23293, MsgBus: 7421 2025-04-10T10:57:01.191188Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491636812529533312:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:01.191238Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001580/r3tmp/tmpDeIwF4/pdisk_1.dat 2025-04-10T10:57:01.377256Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:57:01.410325Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:57:01.410411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:57:01.414981Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23293, node 2 2025-04-10T10:57:01.595014Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:57:01.595035Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:57:01.595042Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:57:01.595136Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7421 TClient is connected to server localhost:7421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:57:02.411749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.433875Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T10:57:02.454405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.560811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.749944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:02.850267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:57:04.955279Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636825414436954:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:04.955372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.014685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.076205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.146970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.216065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.273576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.360028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T10:57:05.429966Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636829709404772:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.430061Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.430386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491636829709404777:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:57:05.434522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T10:57:05.459105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491636829709404779:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T10:57:05.526644Z node 2 :TX_PROXY ERROR: Actor# [2:7491636829709404833:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:57:06.192238Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491636812529533312:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:57:06.192298Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:57:16.331752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:57:16.331784Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 20174, MsgBus: 24583 2025-04-10T11:04:00.113749Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491638612684349332:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:00.148571Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001580/r3tmp/tmpIDT6Ug/pdisk_1.dat 2025-04-10T11:04:00.245260Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:00.276928Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:00.277045Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:00.279878Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20174, node 3 2025-04-10T11:04:00.386890Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:00.386923Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:00.386934Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:00.387113Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24583 TClient is connected to server localhost:24583 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:01.146935Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:05.022353Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491638612684349332:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:05.022490Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:05.230426Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638634159186340:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:05.230507Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638634159186329:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:05.231082Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:05.235837Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:04:05.250947Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491638634159186343:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:04:05.336351Z node 3 :TX_PROXY ERROR: Actor# [3:7491638634159186395:2341] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:04:08.314812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480 >> TPartBtreeIndexIteration::FewNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups >> TSchemeShardUserAttrsTest::Boot [GOOD] >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> KqpReturning::ReturningWorksIndexedUpsert-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedReplace-QueryService >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TVersions::WreckHeadReverse [GOOD] >> TVersions::Wreck2 >> KqpRanges::IsNullInJsonValue [GOOD] >> KqpRanges::IsNotNullInValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:04:21.073048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:04:21.073200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:04:21.073268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:04:21.073318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:04:21.073375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:04:21.073418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:04:21.073504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:04:21.073619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:04:21.073997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:04:21.172985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:04:21.173066Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:21.202409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:04:21.202867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:04:21.203089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:04:21.228557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:04:21.228912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:04:21.229780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:04:21.230342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:04:21.234763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:04:21.236683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:04:21.236761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:04:21.237005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:04:21.237060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:04:21.237106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:04:21.237269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:04:21.246079Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:04:21.438770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:04:21.439130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:04:21.439379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:04:21.439727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:04:21.439816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:04:21.445597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:04:21.445791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:04:21.446033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:04:21.446100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:04:21.446142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:04:21.446215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:04:21.455520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:04:21.455625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:04:21.455668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:04:21.458594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:04:21.458666Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:04:21.458733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:04:21.458811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:04:21.464552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:04:21.470291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:04:21.470593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:04:21.471913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:04:21.472124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:04:21.472195Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:04:21.472585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:04:21.472652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:04:21.472887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:04:21.473002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:04:21.481238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:04:21.481318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:04:21.481623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:04:21.481674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:04:21.482013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:04:21.482086Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:04:21.482235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:04:21.482282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:04:21.482338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:04:21.482399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:04:21.482449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:04:21.482513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:04:21.482562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:04:21.482599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:04:21.482707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:04:21.482767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:04:21.482809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:04:21.488057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:04:21.488241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:04:21.488288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:04:21.488331Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:04:21.488377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:04:21.488517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:04:21.504184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:04:21.504986Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 |73.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/kqprun |73.9%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun >> KqpPg::InsertFromSelect_Serial-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:04:07.467188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:04:07.467304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:04:07.467378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:04:07.467422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:04:07.467477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:04:07.467523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:04:07.467633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:04:07.467704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:04:07.468042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:04:07.559849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:04:07.559923Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:07.579319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:04:07.579685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:04:07.579876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:04:07.588202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:04:07.588469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:04:07.589213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:04:07.589643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:04:07.591747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:04:07.593265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:04:07.593329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:04:07.593553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:04:07.593606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:04:07.593651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:04:07.593830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:04:07.601488Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:04:07.736866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:04:07.737134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:04:07.737359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:04:07.737647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:04:07.737707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:04:07.743526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:04:07.743730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:04:07.743932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:04:07.743998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:04:07.744037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:04:07.744076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:04:07.751594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:04:07.751693Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:04:07.751748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:04:07.754088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:04:07.754171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:04:07.754249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:04:07.754303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:04:07.764736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:04:07.767464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:04:07.767667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:04:07.768773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:04:07.768949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:04:07.769008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:04:07.769404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:04:07.769469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:04:07.769648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:04:07.769754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:04:07.772174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:04:07.772222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:04:07.772447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:04:07.772495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:04:07.772772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:04:07.772820Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:04:07.772924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:04:07.772961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:04:07.773015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:04:07.773056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:04:07.773092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:04:07.773130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:04:07.773176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:04:07.773202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:04:07.773271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:04:07.773310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:04:07.773356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:04:07.775889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:04:07.776012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:04:07.776066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... peration in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:04:21.823196Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-10T11:04:21.823236Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:04:21.823381Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2025-04-10T11:04:21.823454Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:04:21.825262Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:04:21.825316Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:2 2025-04-10T11:04:21.825462Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:340:2319] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-04-10T11:04:21.827640Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T11:04:21.827697Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T11:04:21.827788Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-10T11:04:21.827868Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:04:21.828329Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:04:21.828520Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:04:21.828561Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-04-10T11:04:21.828613Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-04-10T11:04:21.828670Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-04-10T11:04:21.828723Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-04-10T11:04:21.828793Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-04-10T11:04:21.830533Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:04:21.830637Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:0 2025-04-10T11:04:21.830764Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:343:2321] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-04-10T11:04:21.831271Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:04:21.831311Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:04:21.831660Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T11:04:21.831705Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T11:04:21.831783Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:04:21.831848Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:04:21.832184Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:04:21.832315Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:04:21.832353Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-04-10T11:04:21.832386Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-10T11:04:21.832427Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-04-10T11:04:21.832460Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-10T11:04:21.832500Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-04-10T11:04:21.832595Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:415:2373] message: TxId: 102 2025-04-10T11:04:21.832665Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-10T11:04:21.832727Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:04:21.832774Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:04:21.832923Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:04:21.832977Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-10T11:04:21.833004Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-10T11:04:21.833039Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:04:21.833068Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-04-10T11:04:21.833093Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-04-10T11:04:21.833142Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:04:21.833178Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-04-10T11:04:21.833204Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-04-10T11:04:21.833279Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-10T11:04:21.835216Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:04:21.835267Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:04:21.835646Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435084, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-04-10T11:04:21.835699Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-04-10T11:04:21.835812Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:04:21.835885Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-10T11:04:21.835993Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:04:21.836448Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:04:21.836492Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:04:21.836562Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:04:21.836618Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:04:21.836680Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:04:21.836707Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:04:21.839188Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:04:21.839236Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:04:21.842210Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:04:21.842356Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:04:21.842477Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:415:2373] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2025-04-10T11:04:21.842673Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:04:21.842729Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:523:2474] 2025-04-10T11:04:21.842862Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:04:21.843107Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:525:2476], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:04:21.843151Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:04:21.843185Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-10T11:04:21.843742Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:603:2553], Recipient [7:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-10T11:04:21.843813Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-10T11:04:21.843966Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:04:21.844233Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 267us result status StatusPathDoesNotExist 2025-04-10T11:04:21.844436Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 102" Path: "/MyRoot/Table" PathId: 2 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] >> YdbOlapStore::LogTsRangeDescending [GOOD] >> YdbQueryService::TestAttachTwice |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex |73.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |73.9%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> TPersQueueTest::AllEqual [GOOD] >> TPersQueueTest::BadSids >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 [GOOD] >> TGenCompaction::OverloadFactorDuringForceCompaction >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [FAIL] >> KqpPg::CheckPgAutoParams+useSink |73.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] Test command err: Trying to start YDB, gRPC: 11330, MsgBus: 11275 2025-04-10T11:03:44.119536Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638543197600991:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:44.119602Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00112c/r3tmp/tmpZ9dPJT/pdisk_1.dat 2025-04-10T11:03:44.786173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:44.786317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:44.787766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:03:44.795832Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11330, node 1 2025-04-10T11:03:44.957036Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:44.957059Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:44.957089Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:44.957222Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11275 TClient is connected to server localhost:11275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:45.589014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:47.811545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638556082503547:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:47.811749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:48.221622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:03:48.377365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638560377470948:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:48.377490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:48.377779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638560377470953:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:48.382924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:03:48.395564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638560377470955:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:03:48.489734Z node 1 :TX_PROXY ERROR: Actor# [1:7491638560377471007:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:03:48.886646Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491638560377471075:2363], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:61: Error: At function: KiUpdateTable!
:1:61: Error: Cannot update primary key column: Key 2025-04-10T11:03:48.888165Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDczMWFiMmQtZjlkYjc3OC0zNWRlY2QzYi1lZTBhMzhhNQ==, ActorId: [1:7491638556082503535:2328], ActorState: ExecuteState, TraceId: 01jrfnxkaybdxa7xaa6pkj1tkb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:03:48.917622Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491638560377471084:2367], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiUpdateTable!
:1:63: Error: Cannot update primary key column: Key 2025-04-10T11:03:48.919075Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDczMWFiMmQtZjlkYjc3OC0zNWRlY2QzYi1lZTBhMzhhNQ==, ActorId: [1:7491638556082503535:2328], ActorState: ExecuteState, TraceId: 01jrfnxkd4arntygv2mvabnm96, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:03:49.122333Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638543197600991:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:49.122388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30871, MsgBus: 8798 2025-04-10T11:03:49.782047Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638568791777990:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:49.812122Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00112c/r3tmp/tmpr6z3EC/pdisk_1.dat 2025-04-10T11:03:49.905569Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:49.938906Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:49.939000Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:49.943939Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30871, node 2 2025-04-10T11:03:50.054796Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:50.054822Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:50.054831Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:50.055078Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8798 TClient is connected to server localhost:8798 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:50.549747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:50.563381Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:03:53.391649Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638585971647747:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:53.391741Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:53.405912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:03:53.500502Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638585971647850:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:53.500610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:53.500842Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638585971647855:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:53.505508Z node 2 :F ... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:10.338335Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:13.749152Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491638648680062125:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:13.773502Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:14.379219Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491638674449866408:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:14.379326Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:14.403867Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:04:14.516657Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491638674449866511:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:14.516802Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:14.517152Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491638674449866516:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:14.524081Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:04:14.547885Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491638674449866518:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:04:14.640075Z node 5 :TX_PROXY ERROR: Actor# [5:7491638674449866569:2402] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:04:15.060047Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7491638678744833933:2363], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiWriteTable!
:1:45: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:45: Error: Failed to convert 'Value': Null to String
:1:45: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T11:04:15.062647Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=YjlmMzg3ZWMtOWY1ZGNhMTYtNmUwOGI1NDQtNTBjMTA5ZDI=, ActorId: [5:7491638674449866390:2331], ActorState: ExecuteState, TraceId: 01jrfnycxy06tq80e0sq6knfsk, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 2413, MsgBus: 20432 2025-04-10T11:04:16.200436Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491638683303131773:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:16.200570Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00112c/r3tmp/tmpIBCE6w/pdisk_1.dat 2025-04-10T11:04:16.443570Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:16.470190Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:16.470309Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:16.476188Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2413, node 6 2025-04-10T11:04:16.592975Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:16.592999Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:16.593010Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:16.593158Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20432 TClient is connected to server localhost:20432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:17.865901Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:17.880257Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:04:21.204791Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491638683303131773:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:21.204899Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:22.755998Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638709072936229:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:22.756141Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:22.780509Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:04:22.875650Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638709072936330:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:22.875859Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:22.878681Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638709072936335:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:22.884201Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:04:22.903885Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491638709072936337:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:04:23.003844Z node 6 :TX_PROXY ERROR: Actor# [6:7491638709072936388:2405] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:04:23.411767Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7491638713367903760:2333], TxId: 281474976710663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=6&id=ZDI5Zjc3MjktYzU3MjMwZDgtOTI5ZTBmNTAtM2ZkYTM4ZjU=. CustomerSuppliedId : . TraceId : 01jrfnyn0bbagt4ayx0mgxnara. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: Value, code: 2031 }. 2025-04-10T11:04:23.412343Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDI5Zjc3MjktYzU3MjMwZDgtOTI5ZTBmNTAtM2ZkYTM4ZjU=, ActorId: [6:7491638709072936226:2333], ActorState: ExecuteState, TraceId: 01jrfnyn0bbagt4ayx0mgxnara, Create QueryResponse for error on request, msg: |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TExecutorDb::RandomOps [GOOD] >> TExecutorDb::FullScan >> TGenCompaction::OverloadFactorDuringForceCompaction [GOOD] >> TGenCompaction::ForcedCompactionNoGenerations >> TGenCompaction::ForcedCompactionNoGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithFinalParts [GOOD] >> TGenCompaction::ForcedCompactionByDeletedRows [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccData [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataBorrowed [GOOD] >> TIterator::Basics [GOOD] >> TIterator::External >> TIterator::External [GOOD] |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpKv::ReadRows_PgKey [GOOD] >> KqpKv::ReadRows_Nulls >> TPartBtreeIndexIteration::FewNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes_History >> KqpPg::JoinWithQueryService-StreamLookup [GOOD] >> KqpPg::PgAggregate+useSink >> TBlobStorageBlocksCacheTest::MultipleTables >> TBlobStorageBlocksCacheTest::MultipleTables [GOOD] >> TBlobStorageBlocksCacheTest::DeepInFlight [GOOD] |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::MultipleTables [GOOD] |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |74.0%| [LD] {RESULT} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut >> KqpSqlIn::SelectNotAllElements [GOOD] >> KqpSqlIn::SimpleKey_In_And_In >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] >> KqpNewEngine::Truncated [GOOD] >> KqpNewEngine::StaleRO_Immediate >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::DeepInFlight [GOOD] >> KqpPg::TableArrayInsert-useSink [GOOD] >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder [GOOD] >> TPersQueueTest::Delete >> KqpPg::Returning+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TIterator::External [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:33.076046Z 00000.007 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.010 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.011 II| FAKE_ENV: Starting storage for BS group 0 00000.012 II| FAKE_ENV: Starting storage for BS group 1 00000.012 II| FAKE_ENV: Starting storage for BS group 2 00000.012 II| FAKE_ENV: Starting storage for BS group 3 00000.022 II| TABLET_FLATBOOT: Leader{1:2:-} booting Deps{0:0 entries 0} {nil} 00000.022 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 1, has 1 jobs, Boot{ 2 que, 2 refs } 00000.022 II| TABLET_FLATBOOT: Leader{1:2:-} loading { Alter 0, Turns 0, Loans 0, GCExt 0 } 00000.022 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 2, has 4 jobs, Boot{ 5 que, 2 refs } 00000.022 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 3, has 1 jobs, Boot{ 2 que, 2 refs } 00000.022 II| TABLET_FLATBOOT: Leader{1:2:-} redo log has 0 records, last before 0:0 00000.023 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 4, has 1 jobs, Boot{ 2 que, 2 refs } 00000.023 II| TABLET_FLATBOOT: Leader{1:2:-} result: db change {1 -> 1} snap on 0 00000.023 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 5, has 0 jobs, Boot{ 1 que, 2 refs } 00000.023 II| TABLET_FLATBOOT: Leader{1:2:-} booting completed, took 0.000s 00000.109 II| TABLET_FLATBOOT: Leader{1:3:-} booting Deps{2:1 entries 252} {nil} 00000.109 DD| TABLET_FLATBOOT: Leader{1:3:-} fired stage 1, has 1 jobs, Boot{ 2 que, 2 refs } 00000.109 DD| TABLET_FLATBOOT: Leader{1:3:-} snap in deps on 2:1, TLargeGlobId{[1:2:1:1:28672:35:0] ~35b, grp 1} 00000.109 DD| TABLET_FLATBOOT: Leader{1:3:-} process snap gc entry, + [ ], - [ ] 00000.109 DD| TABLET_FLATBOOT: Leader{1:3:-} Loading TLargeGlobId{[1:2:1:1:28672:35:0] ~35b, grp 1} 00000.109 II| TABLET_FLATBOOT: Leader{1:3:-} snap on 2:1 change 1, 25b, ABI 28 of [1, 28], GC{ +0 -0 } 00000.109 DD| TABLET_FLATBOOT: Leader{1:3:-} process gc snapshot, + [ ], - [ ] 00000.109 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:2:1:8192:209:0] ], - [ ] 00000.109 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:3:1:24576:74:0] ], - [ ] 00000.109 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:4:1:24576:79:0] ], - [ ] 00000.109 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:5:1:24576:81:0] ], - [ ] 00000.109 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:6:1:24576:81:0] ], - [ ] 00000.109 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:7:1:24576:81:0] ], - [ ] 00000.109 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:8:1:24576:79:0] ], - [ ] 00000.109 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:9:1:24576:79:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:10:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:11:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:12:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:13:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:14:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:15:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:16:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:17:1:24576:79:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:18:1:24576:79:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:19:1:24576:79:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:20:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:21:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:22:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:23:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:24:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:25:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:26:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:27:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:28:1:24576:79:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:29:1:24576:79:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:30:1:24576:79:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:31:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:32:1:24576:79:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:33:1:24576:83:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:34:1:24576:82:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:35:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:36:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:37:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:38:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:39:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:40:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:41:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:42:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:43:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:44:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:45:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:46:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:47:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:48:1:24576:79:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:49:1:24576:79:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:50:1:24576:82:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:51:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:52:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:53:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:54:1:24576:81:0] ], - [ ] 00000.110 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:55:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:56:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:57:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:58:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:59:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:60:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:61:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:62:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:63:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:64:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:65:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:66:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:67:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:68:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:69:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:70:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:71:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:72:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:73:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:74:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:75:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:76:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:77:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:78:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:79:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:80:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:81:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:82:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:83:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:84:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:85:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:86:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:87:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:88:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:89:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:90:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:91:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:92:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:93:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:94:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:95:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:96:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:97:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:98:1:24576:81:0] ], - [ ] 00000.111 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:99:1:24576:81:0] ], - ... 24576:100:0], [1:2:19:1:24576:97:0], [1:2:20:1:24576:96:0], [1:2:21:1:24576:97:0], [1:2:22:1:24576:97:0], [1:2:23:1:24576:97:0], [1:2:24:1:24576:97:0], [1:2:25:1:24576:97:0], [1:2:26:1:24576:97:0], [1:2:27:1:24576:97:0], [1:2:28:1:24576:96:0], [1:2:29:1:24576:100:0], [1:2:30:1:24576:97:0], [1:2:31:1:24576:96:0], [1:2:32:1:24576:96:0], [1:2:33:1:24576:104:0], [1:2:34:1:24576:97:0], [1:2:35:1:24576:99:0], [1:2:36:1:24576:97:0], [1:2:37:1:24576:97:0], [1:2:38:1:24576:97:0], [1:2:39:1:24576:97:0], [1:2:40:1:24576:97:0], [1:2:41:1:24576:97:0], [1:2:42:1:24576:97:0], [1:2:43:1:24576:97:0], [1:2:44:1:24576:97:0], [1:2:45:1:24576:97:0], [1:2:46:1:24576:97:0], [1:2:47:1:24576:97:0], [1:2:48:1:24576:97:0], [1:2:49:1:24576:97:0], [1:2:50:1:24576:97:0], [1:2:51:1:24576:97:0], [1:2:52:1:24576:97:0], [1:2:53:1:24576:97:0], [1:2:54:1:24576:97:0], [1:2:55:1:24576:97:0], [1:2:56:1:24576:97:0], [1:2:57:1:24576:97:0], [1:2:58:1:24576:97:0], [1:2:59:1:24576:97:0], [1:2:60:1:24576:97:0], [1:2:61:1:24576:97:0], [1:2:62:1:24576:97:0], [1:2:63:1:24576:97:0], [1:2:64:1:24576:97:0], [1:2:65:1:24576:97:0], [1:2:66:1:24576:97:0], [1:2:67:1:24576:97:0], [1:2:68:1:24576:97:0], [1:2:69:1:24576:97:0], [1:2:70:1:24576:97:0], [1:2:71:1:24576:97:0], [1:2:72:1:24576:97:0], [1:2:73:1:24576:101:0], [1:2:74:1:24576:102:0], [1:2:75:1:24576:101:0], [1:2:76:1:24576:102:0], [1:2:77:1:24576:104:0], [1:2:78:1:24576:104:0], [1:2:79:1:24576:104:0], [1:2:80:1:24576:104:0], [1:2:81:1:24576:103:0], [1:2:82:1:24576:101:0], [1:2:83:1:24576:104:0], [1:2:84:1:24576:104:0], [1:2:85:1:24576:104:0], [1:2:86:1:24576:104:0], [1:2:87:1:24576:104:0], [1:2:88:1:24576:104:0], [1:2:89:1:24576:104:0], [1:2:90:1:24576:101:0], [1:2:91:1:24576:104:0], [1:2:92:1:24576:104:0], [1:2:93:1:24576:98:0], [1:2:94:1:24576:104:0], [1:2:95:1:24576:104:0], [1:2:96:1:24576:104:0], [1:2:97:1:24576:104:0], [1:2:98:1:24576:104:0], [1:2:99:1:24576:104:0], [1:2:100:1:24576:104:0], [1:2:101:1:24576:97:0], [1:2:102:1:24576:100:0], [1:2:103:1:24576:104:0], [1:2:104:1:24576:104:0], [1:2:105:1:24576:104:0], [1:2:106:1:24576:104:0], [1:2:107:1:24576:104:0], [1:2:108:1:24576:104:0], [1:2:109:1:24576:104:0], [1:2:110:1:24576:104:0], [1:2:111:1:24576:104:0], [1:2:112:1:24576:104:0], [1:2:113:1:24576:104:0], [1:2:114:1:24576:104:0], [1:2:115:1:24576:104:0], [1:2:116:1:24576:104:0], [1:2:117:1:24576:104:0], [1:2:118:1:24576:104:0], [1:2:119:1:24576:104:0], [1:2:120:1:24576:104:0], [1:2:121:1:24576:104:0], [1:2:122:1:24576:104:0], [1:2:123:1:24576:104:0], [1:2:124:1:24576:104:0], [1:2:125:1:24576:104:0], [1:2:126:1:24576:104:0], [1:2:127:1:24576:104:0], [1:2:128:1:24576:104:0], [1:2:129:1:24576:104:0], [1:2:130:1:24576:104:0], [1:2:131:1:24576:104:0], [1:2:132:1:24576:104:0], [1:2:133:1:24576:104:0], [1:2:134:1:24576:104:0], [1:2:135:1:24576:104:0], [1:2:136:1:24576:104:0], [1:2:137:1:24576:104:0], [1:2:138:1:24576:104:0], [1:2:139:1:24576:104:0], [1:2:140:1:24576:104:0], [1:2:141:1:24576:104:0], [1:2:142:1:24576:104:0], [1:2:145:1:24576:60:0], [1:2:146:1:24576:60:0] } 00000.150 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] owner [35:212:2237] class Online from cache [ ] already requested [ ] to request [ 22 23 24 25 ] 00000.151 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:143:1:12288:758:0] status OK pages [ 22 23 24 25 ] 00000.151 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.152 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 1880b, wait} done, Waste{2:0, 141856b +(140, 14018b), 146 trc} 00000.153 DD| TABLET_SAUSAGECACHE: Attach page collection [1:2:143:1:12288:758:0] owner [35:212:2237] 00000.154 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] owner [35:212:2237] class AsyncLoad from cache [ ] already requested [ 22 23 24 25 ] to request [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 ] 00000.154 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] async queue pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 ] 00000.154 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 00000.155 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:143:1:12288:758:0] owner [35:212:2237] pages [ 22 23 24 25 ] 00000.155 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:143:1:12288:758:0] status OK pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 ] 00000.156 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{26 pages [1:2:143:1:12288:758:0] ok OK}, category 2 00000.156 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:143:1:12288:758:0] owner [35:212:2237] pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 ] 00000.157 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan 00000.157 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.158 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} hope 1 -> done Change{145, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.158 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} release 4194304b of static, Memory{0 dyn 0} 00000.158 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.158 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 141856b +(0, 0b), 1 trc, -14018b acc} 00000.159 DD| TABLET_SAUSAGECACHE: Unregister owner [35:212:2237] 00000.159 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {6 1077b} miss {50 281387b} 00000.159 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.160 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14354b, 149} 00000.160 II| FAKE_ENV: DS.1 gone, left {143736b, 8}, put {157893b, 150} 00000.160 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.160 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.160 II| FAKE_ENV: All BS storage groups are stopped 00000.160 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.160 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 782}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:41.909355Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.011 II| FAKE_ENV: Starting storage for BS group 0 00000.012 II| FAKE_ENV: Starting storage for BS group 1 00000.012 II| FAKE_ENV: Starting storage for BS group 2 00000.012 II| FAKE_ENV: Starting storage for BS group 3 00000.046 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.047 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {3 512b} miss {0 0b} 00000.047 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.047 II| FAKE_ENV: DS.0 gone, left {1356b, 12}, put {1376b, 13} 00000.047 II| FAKE_ENV: DS.1 gone, left {6814b, 23}, put {6814b, 23} 00000.047 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.047 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.047 II| FAKE_ENV: All BS storage groups are stopped 00000.048 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.048 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:41.964126Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.403 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.403 NN| TABLET_SAUSAGECACHE: Poison cache serviced 10 reqs hit {860 5551893b} miss {0 0b} 00000.403 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.403 II| FAKE_ENV: DS.0 gone, left {1201b, 13}, put {1221b, 14} 00000.403 II| FAKE_ENV: DS.1 gone, left {6751256b, 17}, put {6751256b, 17} 00000.407 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.407 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.407 II| FAKE_ENV: All BS storage groups are stopped 00000.407 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.407 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:42.387754Z 00000.010 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.011 II| FAKE_ENV: Starting storage for BS group 0 00000.011 II| FAKE_ENV: Starting storage for BS group 1 00000.011 II| FAKE_ENV: Starting storage for BS group 2 00000.011 II| FAKE_ENV: Starting storage for BS group 3 00020.145 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00020.145 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4109 reqs hit {2091 2366986b} miss {6144 6340608b} 00020.146 II| FAKE_ENV: Shut order, stopping 4 BS groups 00020.146 II| FAKE_ENV: DS.0 gone, left {1761b, 14}, put {1781b, 15} 00020.146 II| FAKE_ENV: DS.1 gone, left {6927727b, 27}, put {6927727b, 27} 00020.150 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00020.150 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00020.150 II| FAKE_ENV: All BS storage groups are stopped 00020.150 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00020.150 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:04:02.568011Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00022.929 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00022.929 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4106 reqs hit {43 253450b} miss {4096 4227072b} 00022.930 II| FAKE_ENV: Shut order, stopping 4 BS groups 00022.931 II| FAKE_ENV: DS.0 gone, left {44744b, 2}, put {164747b, 16} 00022.931 II| FAKE_ENV: DS.1 gone, left {2764621b, 2068}, put {2764621b, 2068} 00022.942 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00022.942 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00022.942 II| FAKE_ENV: All BS storage groups are stopped 00022.942 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00022.942 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:04:25.530911Z 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:04:25.578000Z 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:04:25.644555Z 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:04:25.700686Z 00000.021 II| FAKE_ENV: Starting storage for BS group 0 00000.021 II| FAKE_ENV: Starting storage for BS group 1 00000.021 II| FAKE_ENV: Starting storage for BS group 2 00000.021 II| FAKE_ENV: Starting storage for BS group 3 |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication >> KqpPg::DeleteWithQueryService-useSink [GOOD] |74.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> BuildStatsHistogram::Ten_Serial [GOOD] >> BuildStatsHistogram::Ten_Crossed >> TBlobStorageBlocksCacheTest::LegacyAndModern [GOOD] |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |74.0%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::DeleteWithQueryService-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 7453, MsgBus: 62648 2025-04-10T11:02:34.964106Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638243444177074:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:34.964653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ca0/r3tmp/tmpxPGbvW/pdisk_1.dat 2025-04-10T11:02:36.055423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:02:36.158284Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:36.207568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:36.207704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:36.210863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7453, node 1 2025-04-10T11:02:36.404366Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:36.404397Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:36.404408Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:36.404548Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62648 TClient is connected to server localhost:62648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:37.520058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:37.575404Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:02:39.942525Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638243444177074:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:39.984947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:40.864716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638269213981385:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:40.864885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:40.975859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:02:41.192965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638273508948822:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:41.193058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:41.193684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638273508948827:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:41.198986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:02:41.212636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638273508948829:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:02:41.272035Z node 1 :TX_PROXY ERROR: Actor# [1:7491638273508948882:2428] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 1 1 1 Trying to start YDB, gRPC: 24812, MsgBus: 18138 2025-04-10T11:02:43.103218Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638284667301586:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:43.103257Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ca0/r3tmp/tmpDDruDQ/pdisk_1.dat 2025-04-10T11:02:43.475221Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:43.506668Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:43.506822Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:43.508374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24812, node 2 2025-04-10T11:02:43.666708Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:43.666732Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:43.666739Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:43.666846Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18138 TClient is connected to server localhost:18138 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:44.381829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:44.393811Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:02:47.820028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638301847171437:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:47.820132Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:47.837634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:02:47.954370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638301847171572:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:47.954522Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:47.954974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638301847171577:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:47.959736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:02:47.979705Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491638301847171579:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:02:48.073810Z node 2 :TX_PROXY ERROR: Actor# [2:7491638306142138926:2419] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:02:48. ... 10T11:04:10.672178Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7491638657519041398:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:10.672263Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ca0/r3tmp/tmpM9JNX6/pdisk_1.dat 2025-04-10T11:04:11.070140Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:11.094949Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:11.095098Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:11.104218Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5161, node 11 2025-04-10T11:04:11.262948Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:11.262988Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:11.263002Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:11.263224Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10755 TClient is connected to server localhost:10755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:12.677123Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:12.689170Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:04:15.674779Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7491638657519041398:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:15.674913Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:17.864547Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491638687583813148:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:17.864717Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:17.933678Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:04:18.154422Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491638691878780548:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:18.154681Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:18.158848Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491638691878780553:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:18.184143Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:04:18.246503Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7491638691878780555:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:04:18.336497Z node 11 :TX_PROXY ERROR: Actor# [11:7491638691878780607:2408] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9850, MsgBus: 16306 2025-04-10T11:04:21.156740Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7491638702107321726:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:21.156814Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ca0/r3tmp/tmpQok8JG/pdisk_1.dat 2025-04-10T11:04:21.428293Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:21.468159Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:21.468312Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:21.470054Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9850, node 12 2025-04-10T11:04:21.643031Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:21.643065Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:21.643079Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:21.643314Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16306 TClient is connected to server localhost:16306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:22.864194Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:26.161281Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7491638702107321726:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:26.161438Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:28.206354Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7491638732172093344:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:28.206658Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:28.256520Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:04:28.533328Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7491638732172093451:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:28.533563Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:28.534656Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7491638732172093456:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:28.542531Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:04:28.572682Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7491638732172093458:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:04:28.640150Z node 12 :TX_PROXY ERROR: Actor# [12:7491638732172093510:2407] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::LegacyAndModern [GOOD] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] >> TPartBtreeIndexIteration::FewNodes_History [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |74.0%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |74.1%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut >> S3SettingsConversion::Basic >> S3SettingsConversion::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV1PDiskIdFilter 2025-04-10 11:04:18,774 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:04:19,313 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1089975 46.0M 46.0M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/2gt4/00147f/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args 1091671 3.5G 3.4G 3.5G └─ ydb-core-viewer-ut --trace-path-append /home/runner/.ya/build/build_root/2gt4/00147f/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk4/ytest.report.trac Test command err: 2025-04-10T10:54:46.209881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:54:46.210216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:54:46.210275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 18745, node 1 TClient is connected to server localhost:6029 2025-04-10T10:56:03.599661Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:2901:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:03.601266Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:03.602261Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:56:03.607900Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:2919:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:03.608105Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:2922:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:03.608697Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:2910:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:03.608893Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:2916:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:03.609687Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:1421:2177], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:03.609979Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:56:03.610426Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:03.610505Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:03.611169Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:03.611325Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:03.611400Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:56:03.611447Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:56:03.611991Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:03.612282Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:56:03.612509Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:56:03.615270Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:2907:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:03.615454Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:2913:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:03.615881Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:2904:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:56:03.616819Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:03.616916Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:03.617560Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:56:03.618184Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:56:03.618284Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:56:03.618978Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:56:04.297318Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:04.639774Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T10:56:04.745311Z node 2 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-10T10:56:06.033315Z node 2 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 9641, node 2 TClient is connected to server localhost:27106 2025-04-10T10:56:06.729232Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:06.729325Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:06.729385Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:06.734602Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:58:33.833044Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:3121:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:33.835679Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:33.836394Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:58:33.846770Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:3117:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:33.848314Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:1743:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:33.849361Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:33.850937Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:58:33.851837Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [16:3133:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:33.851997Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:33.852216Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:1746:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:33.852435Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [19:1749:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:33.852909Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:3124:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:58:33.853213Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:58:33.854465Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:33.854564Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existen ... tadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:58:33.856414Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:58:33.856471Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:58:33.857042Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:58:33.858424Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:33.858544Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:58:33.859675Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:58:33.859745Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:58:34.540026Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:58:34.918327Z node 11 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T10:58:34.968916Z node 11 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-10T10:58:36.102637Z node 11 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 20965, node 11 TClient is connected to server localhost:8899 2025-04-10T10:58:36.875323Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:36.875433Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:36.875522Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:36.876897Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:28.609897Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [20:3134:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:28.613888Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:28.614338Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:01:28.616865Z node 25 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [25:2206:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:28.617969Z node 24 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [24:2203:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:28.618906Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [26:2209:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:28.620095Z node 21 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [21:3130:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:28.620365Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [22:3137:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:28.621107Z node 28 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [28:2215:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:28.621758Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:28.621832Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:01:28.621909Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:28.621968Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:01:28.622023Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:28.623298Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:28.623417Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:28.623799Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:01:28.624010Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [27:2212:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:28.624161Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:28.625042Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:01:28.625247Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [23:3140:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:01:28.625509Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:01:28.626295Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:01:28.626564Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:28.627492Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:28.627693Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:01:28.627997Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T11:01:29.489718Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:29.884028Z node 20 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T11:01:29.976437Z node 20 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-10T11:01:31.441981Z node 20 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 20298, node 20 TClient is connected to server localhost:12560 2025-04-10T11:01:32.676024Z node 20 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:01:32.676181Z node 20 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:01:32.676294Z node 20 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:01:32.677653Z node 20 :NET_CLASSIFIER ERROR: got bad distributable configuration Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/00147f/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/00147f/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV2PDiskIdFilter 2025-04-10 11:04:22,922 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:04:23,553 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1090604 46.0M 45.9M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/2gt4/001454/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk5/testing_out_stuff/test_tool.args 1092461 3.4G 3.2G 3.3G └─ ydb-core-viewer-ut --trace-path-append /home/runner/.ya/build/build_root/2gt4/001454/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk5/ytest.report.trac Test command err: 2025-04-10T10:55:44.643883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:3117:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:44.645753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:44.646302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:55:44.647608Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:1969:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:44.647831Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:1972:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:44.650026Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:44.650128Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:44.650569Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:55:44.650659Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:55:44.650855Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:1975:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:44.651620Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:1978:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:44.652419Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:44.652499Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:44.652887Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:55:44.652939Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:55:44.653927Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:3113:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:44.655576Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:44.655868Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:3126:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:44.656258Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:55:44.657291Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:3120:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:44.657475Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:3123:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:55:44.657722Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:44.657762Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:55:44.658609Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:44.658684Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:55:44.659098Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:55:44.659185Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T10:55:45.170857Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:45.430223Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T10:55:45.460655Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-10T10:55:46.055800Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 19835, node 1 TClient is connected to server localhost:6382 2025-04-10T10:55:46.356079Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:46.356187Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:46.356229Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:46.356729Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T10:57:47.956044Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:3133:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:57:47.956821Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:47.957711Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:57:47.962234Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:3129:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:57:47.962527Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:3136:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:57:47.962772Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:3139:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:57:47.964964Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:47.965405Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:1995:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:57:47.966264Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:47.966423Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:47.966479Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:57:47.966779Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:57:47.966877Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:57:47.967124Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [16:1992:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:57:47.967507Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:1998:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T10:57:47.968469Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:47.969155Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:57:47.969246Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T10:57:47.969306Z node 18 :METADATA_PR ... tect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:00:32.244262Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:00:32.244399Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:00:32.244647Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [27:1289:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:00:32.245177Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:00:32.245269Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:00:32.246079Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:00:32.246832Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T11:00:33.189344Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:33.822445Z node 19 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T11:00:33.878415Z node 19 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-10T11:00:35.275157Z node 19 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 1562, node 19 TClient is connected to server localhost:23107 2025-04-10T11:00:36.885974Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:36.886109Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:36.890950Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:36.903165Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:03:46.773756Z node 28 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [28:3129:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:03:46.777139Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:03:46.779647Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:03:46.781896Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [29:3125:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:03:46.782303Z node 34 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [34:2224:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:03:46.785181Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [31:1208:2178], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:03:46.785402Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:03:46.786030Z node 32 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [32:2218:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:03:46.910628Z node 33 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [33:2221:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:03:46.911018Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:03:46.911954Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:03:46.913116Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:03:46.913279Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:03:46.913913Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:03:46.913980Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:03:46.914069Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:03:46.915529Z node 30 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [30:3132:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:03:46.915775Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:03:46.915835Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:03:46.916763Z node 35 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [35:2227:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:03:46.917509Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:03:46.918286Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:03:46.918573Z node 36 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [36:2230:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:03:46.919250Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:03:46.919486Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:03:46.920177Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:03:46.920729Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T11:03:47.794673Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:48.166301Z node 28 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T11:03:48.214472Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-10T11:03:49.818916Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 17221, node 28 TClient is connected to server localhost:16080 2025-04-10T11:03:50.753686Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:50.753819Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:50.753933Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:50.755188Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/001454/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk5/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/001454/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk5/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> KqpReturning::ReturningWorksIndexedInsert-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedReplace+QueryService >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink >> KqpReturning::ReturningColumnsOrder [GOOD] >> KqpReturning::Random |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Basic [GOOD] |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |74.1%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large >> TBlobStorageBlocksCacheTest::PutDeepIntoPast [GOOD] |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |74.1%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |74.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex |74.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large >> TPartBtreeIndexIteration::FewNodes_Sticky [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::PutDeepIntoPast [GOOD] |74.2%| [TA] $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |74.2%| [TA] {RESULT} $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown >> KqpRanges::IsNotNullInValue [GOOD] >> KqpRanges::IsNotNullInJsonValue2 |74.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |74.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut >> TExecutorDb::FullScan [GOOD] >> TExecutorDb::CoordinatorSimulation |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |74.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut >> KqpKv::ReadRows_Nulls [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex >> KqpReturning::ReturningWorksIndexedReplace-QueryService [GOOD] >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate >> TPartBtreeIndexIteration::FewNodes_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_Slices >> BuildStatsHistogram::Ten_Crossed [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |74.3%| [LD] {RESULT} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut >> KqpNewEngine::StaleRO_Immediate [GOOD] >> KqpNewEngine::UnionAllPure |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |74.3%| [LD] {RESULT} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut >> ColumnShardTiers::DSConfigs >> KqpPg::PgAggregate+useSink [GOOD] >> KqpPg::PgAggregate-useSink |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TBlobStorageBlocksCacheTest::Repeat [GOOD] |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |74.3%| [LD] {RESULT} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::Repeat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpKv::ReadRows_Nulls [GOOD] Test command err: Trying to start YDB, gRPC: 18013, MsgBus: 18346 2025-04-10T11:03:41.897040Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638533230314213:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:41.905400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001155/r3tmp/tmpwjpySy/pdisk_1.dat 2025-04-10T11:03:42.451235Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:42.453009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:42.453073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:42.466399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18013, node 1 2025-04-10T11:03:42.630891Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:42.630917Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:42.630930Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:42.631116Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18346 TClient is connected to server localhost:18346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:43.317428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:45.334485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638550410183962:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:45.334707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:45.738018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS Trying to start YDB, gRPC: 18812, MsgBus: 29067 2025-04-10T11:03:46.802140Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638553890921317:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:46.802327Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001155/r3tmp/tmpQtFlya/pdisk_1.dat 2025-04-10T11:03:46.925456Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:46.960778Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:46.960875Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:46.962593Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18812, node 2 2025-04-10T11:03:47.012428Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:47.012452Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:47.012461Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:47.012596Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29067 TClient is connected to server localhost:29067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:47.554071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:50.477218Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638571070791160:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:50.477380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:50.499818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS [] IsSuccess(): 1 GetStatus(): SUCCESS 2025-04-10T11:03:50.690078Z node 2 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: no keys are found in request's proto Trying to start YDB, gRPC: 10702, MsgBus: 19510 2025-04-10T11:03:51.604003Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491638575663468917:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:51.604228Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001155/r3tmp/tmpysuKh3/pdisk_1.dat 2025-04-10T11:03:51.837681Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:51.863182Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:51.863289Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:51.865203Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10702, node 3 2025-04-10T11:03:51.924899Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:51.924930Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:51.924941Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:51.925107Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19510 TClient is connected to server localhost:19510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:52.661104Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:55.935659Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638592843338768:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:55.935762Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:55.981075Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:03:56.074519Z node 3 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Missing key columns: Key Trying to start YDB, gRPC: 5463, MsgBus: 20695 2025-04-10T11:03:57.361379Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491638598980521714:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001155/r3tmp/tmpuRyV0q/pdisk_1.da ... e TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037931 not found 2025-04-10T11:04:15.663002Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715746:0, at schemeshard: 72057594046644480 2025-04-10T11:04:15.838303Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715748:0, at schemeshard: 72057594046644480 2025-04-10T11:04:15.864497Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037932 not found 2025-04-10T11:04:15.972905Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037933 not found 2025-04-10T11:04:15.986358Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715750:0, at schemeshard: 72057594046644480 2025-04-10T11:04:16.126383Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037934 not found 2025-04-10T11:04:16.130187Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715752:0, at schemeshard: 72057594046644480 2025-04-10T11:04:16.257314Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037935 not found 2025-04-10T11:04:16.260787Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715754:0, at schemeshard: 72057594046644480 2025-04-10T11:04:16.453674Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715756:0, at schemeshard: 72057594046644480 2025-04-10T11:04:16.470087Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037936 not found 2025-04-10T11:04:16.635776Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037937 not found 2025-04-10T11:04:16.667846Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715758:0, at schemeshard: 72057594046644480 2025-04-10T11:04:16.830911Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037938 not found 2025-04-10T11:04:16.832457Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715760:0, at schemeshard: 72057594046644480 2025-04-10T11:04:17.026738Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037939 not found Trying to start YDB, gRPC: 28427, MsgBus: 10159 2025-04-10T11:04:18.865078Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491638689883702220:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:18.865146Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001155/r3tmp/tmpukYOnt/pdisk_1.dat 2025-04-10T11:04:19.209425Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:19.231339Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:19.231474Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:19.236208Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28427, node 6 2025-04-10T11:04:19.390904Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:19.390939Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:19.390949Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:19.391143Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10159 TClient is connected to server localhost:10159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:20.445478Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:20.462865Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:04:23.866926Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491638689883702220:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:23.867045Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:25.333653Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:04:25.663861Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:04:25.668038Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037888 not found 2025-04-10T11:04:25.884636Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037889 not found 2025-04-10T11:04:25.993749Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:04:26.204330Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037890 not found Trying to start YDB, gRPC: 10611, MsgBus: 19752 2025-04-10T11:04:28.465071Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491638736058483318:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:28.526040Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001155/r3tmp/tmpEXlZl1/pdisk_1.dat 2025-04-10T11:04:28.794856Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:28.914863Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:28.914988Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:28.923531Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10611, node 7 2025-04-10T11:04:29.210124Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:29.210167Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:29.210198Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:29.210397Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19752 TClient is connected to server localhost:19752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:30.947552Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:33.274466Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491638736058483318:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:33.274575Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:37.214447Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491638774713189503:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:37.214622Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:37.258868Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS |74.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |74.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut >> YdbQueryService::TestAttachTwice [GOOD] >> TVersions::Wreck2 [GOOD] >> TVersions::Wreck2Reverse >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTx >> TFlatTableExecutor_LongTx::MemTableLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactCommittedLongTx >> TFlatTableExecutor_LongTx::CompactCommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactedLongTxRestart >> TFlatTableExecutor_LongTx::CompactedLongTxRestart [GOOD] >> TFlatTableExecutor_LongTx::CompactMultipleChanges >> TFlatTableExecutor_LongTx::CompactMultipleChanges [GOOD] >> TFlatTableExecutor_LongTx::LongTxBorrow >> TFlatTableExecutor_LongTx::LongTxBorrow [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTxRead >> TFlatTableExecutor_LongTx::MemTableLongTxRead [GOOD] >> TFlatTableExecutor_LongTx::CompactedTxIdReuse |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpSqlIn::SimpleKey_In_And_In [GOOD] >> TFlatTableExecutor_LongTx::CompactedTxIdReuse [GOOD] >> KqpSqlIn::SecondaryIndex_TupleParameter >> TFlatTableExecutor_LongTx::MergeSkewedCommitted >> TFlatTableExecutor_LongTx::MergeSkewedCommitted [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::SmallValues >> TFlatTableExecutor_LongTxAndBlobs::SmallValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestEnqueueCancel |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TFlatTableExecutor_LowPriorityTxs::TestEnqueueCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriority [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityCancel >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |74.3%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScan >> KqpPg::Returning+useSink [GOOD] >> KqpPg::Returning-useSink >> TFlatTableExecutor_PostponedScan::TestPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TPartBtreeIndexIteration::FewNodes_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] >> KqpPg::V1CreateTable [GOOD] >> KqpPg::ValuesInsert+useSink |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |74.4%| [LD] {RESULT} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |74.4%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink >> BuildStatsHistogram::Ten_Mixed_Log [GOOD] >> BuildStatsHistogram::Ten_Serial_Log >> TBlobStorageBlocksCacheTest::PutIntoPast [GOOD] |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::PutIntoPast [GOOD] >> KqpReturning::Random [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:31.513315Z 00000.007 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.010 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.011 II| FAKE_ENV: Starting storage for BS group 0 00000.011 II| FAKE_ENV: Starting storage for BS group 1 00000.012 II| FAKE_ENV: Starting storage for BS group 2 00000.012 II| FAKE_ENV: Starting storage for BS group 3 00000.021 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [1:30:2062]) priority=200 resources={1, 0} 00000.021 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [1:30:2062]) to queue queue_background_compaction 00000.021 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [1:30:2062]) from queue queue_background_compaction 00000.021 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [1:30:2062]) to queue queue_background_compaction 00000.021 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 50.000000 (insert task gen0-table-101-tablet-1 (1 by [1:30:2062])) 00000.025 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [1:30:2062]) (release resources {1, 0}) 00000.025 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 50.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [1:30:2062])) 00000.026 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.026 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.026 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.026 II| FAKE_ENV: DS.0 gone, left {771b, 9}, put {791b, 10} 00000.026 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.027 II| FAKE_ENV: DS.1 gone, left {1347b, 10}, put {1347b, 10} 00000.027 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.027 II| FAKE_ENV: All BS storage groups are stopped 00000.027 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.027 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 23}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:31.564299Z 00000.007 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.013 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987987 by [2:8:2055]) priority=0 resources={1, 0} 00000.013 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987987 by [2:8:2055]) to queue queue_background_compaction 00000.013 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987987 by [2:8:2055]) from queue queue_background_compaction 00000.013 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987987 by [2:8:2055]) to queue queue_background_compaction 00000.014 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 300.000000 (insert task bckg-block (987987987987 by [2:8:2055])) 00000.018 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [2:30:2062]) priority=200 resources={1, 0} 00000.018 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_background_compaction 00000.018 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.020 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (1 by [2:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.020 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_compaction_gen0 00000.020 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [2:30:2062]) from queue queue_compaction_gen0 00000.021 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_compaction_gen0 00000.021 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 5.000000 (insert task gen0-table-101-tablet-1 (1 by [2:30:2062])) 00000.026 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [2:30:2062]) (release resources {1, 0}) 00000.026 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 5.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [2:30:2062])) 00000.028 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.028 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.028 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.028 II| FAKE_ENV: DS.0 gone, left {1262b, 14}, put {1282b, 15} 00000.028 II| FAKE_ENV: DS.1 gone, left {1890b, 15}, put {1890b, 15} 00000.029 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.029 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.029 II| FAKE_ENV: All BS storage groups are stopped 00000.029 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.029 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 31}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:31.600927Z 00000.007 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.016 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.017 II| FAKE_ENV: Starting storage for BS group 0 00000.017 II| FAKE_ENV: Starting storage for BS group 1 00000.017 II| FAKE_ENV: Starting storage for BS group 2 00000.018 II| FAKE_ENV: Starting storage for BS group 3 00000.021 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987987 by [3:8:2055]) priority=0 resources={1, 0} 00000.021 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987987 by [3:8:2055]) to queue queue_background_compaction 00000.021 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987987 by [3:8:2055]) from queue queue_background_compaction 00000.021 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987987 by [3:8:2055]) to queue queue_background_compaction 00000.025 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 300.000000 (insert task bckg-block (987987987987 by [3:8:2055])) 00000.028 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [3:30:2062]) priority=200 resources={1, 0} 00000.028 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_background_compaction 00000.028 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.030 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (1 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.030 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_compaction_gen0 00000.030 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [3:30:2062]) from queue queue_compaction_gen0 00000.030 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_compaction_gen0 00000.030 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 5.000000 (insert task gen0-table-101-tablet-1 (1 by [3:30:2062])) 00000.036 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [3:30:2062]) (release resources {1, 0}) 00000.036 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 5.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [3:30:2062])) 00000.038 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (2 by [3:30:2062]) priority=200 resources={1, 0} 00000.038 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_background_compaction 00000.038 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.040 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (2 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.040 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_compaction_gen0 00000.040 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (2 by [3:30:2062]) from queue queue_compaction_gen0 00000.040 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_compaction_gen0 00000.040 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.750000 (insert task gen0-table-101-tablet-1 (2 by [3:30:2062])) 00000.067 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (2 by [3:30:2062]) (release resources {1, 0}) 00000.067 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 4.750000 to 0.000000 (remove task gen0-table-101-tablet-1 (2 by [3:30:2062])) 00000.068 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (3 by [3:30:2062]) priority=200 resources={1, 0} 00000.068 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_background_compaction 00000.068 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.070 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (3 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.070 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_compaction_gen0 00000.070 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (3 by [3:30:2062]) from queue queue_compaction_gen0 00000.070 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_compaction_gen0 00000.070 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.500000 (insert task gen0-table-101-tablet-1 (3 by [3:30:2062])) 00000.074 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (3 by [3:30:2062]) (release resources {1, 0}) 00000.074 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 4.500000 to 0.000000 (remove task gen0-table-101-tablet-1 (3 by [3:30:2062])) 00000.076 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (4 by [3:30:2062]) priority=200 resources={1, 0} 00000.076 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_background_compaction 00000.076 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.077 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (4 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.077 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_compaction_gen0 00000.078 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (4 by [3:30:2062]) from queue queue_compaction_gen0 00000.078 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_compaction_gen0 00000.078 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.250000 (insert task gen0-table-101-tablet-1 (4 by [3:30:2062])) 00000.080 DD| R ... eader{1:2:97} starting compaction 00000.094 II| TABLET_EXECUTOR: Leader{1:2:98} starting Scan{15 on 101, Compact{1.2.97, eph 8}} 00000.094 II| TABLET_EXECUTOR: Leader{1:2:98} started compaction 15 00000.095 II| TABLET_OPS_HOST: Scan{15 on 101, Compact{1.2.97, eph 8}} begin on TSubset{head 9, 1m 1p 0c} 00000.096 II| TABLET_OPS_HOST: Scan{15 on 101, Compact{1.2.97, eph 8}} end=0, 80r seen, TFwd{fetch=1.93KiB,saved=1.93KiB,usage=1.93KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.100 II| TABLET_EXECUTOR: Leader{1:2:98} Compact 15 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 97, product {1 parts epoch 9} done 00000.104 II| TABLET_EXECUTOR: Leader{1:2:109} starting compaction 00000.108 II| TABLET_EXECUTOR: Leader{1:2:110} starting Scan{17 on 101, Compact{1.2.109, eph 9}} 00000.108 II| TABLET_EXECUTOR: Leader{1:2:110} started compaction 17 00000.108 II| TABLET_OPS_HOST: Scan{17 on 101, Compact{1.2.109, eph 9}} begin on TSubset{head 10, 1m 1p 0c} 00000.110 II| TABLET_OPS_HOST: Scan{17 on 101, Compact{1.2.109, eph 9}} end=0, 90r seen, TFwd{fetch=2.21KiB,saved=2.21KiB,usage=2.21KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.112 II| TABLET_EXECUTOR: Leader{1:2:111} Compact 17 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 109, product {1 parts epoch 10} done 00000.117 II| TABLET_EXECUTOR: Leader{1:2:121} starting compaction 00000.118 II| TABLET_EXECUTOR: Leader{1:2:122} starting Scan{19 on 101, Compact{1.2.121, eph 10}} 00000.118 II| TABLET_EXECUTOR: Leader{1:2:122} started compaction 19 00000.118 II| TABLET_OPS_HOST: Scan{19 on 101, Compact{1.2.121, eph 10}} begin on TSubset{head 11, 1m 1p 0c} 00000.120 II| TABLET_OPS_HOST: Scan{19 on 101, Compact{1.2.121, eph 10}} end=0, 100r seen, TFwd{fetch=2.48KiB,saved=2.48KiB,usage=2.48KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.142 II| TABLET_EXECUTOR: Leader{1:2:122} Compact 19 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 121, product {1 parts epoch 11} done 00000.152 II| TABLET_EXECUTOR: Leader{1:2:133} starting compaction 00000.153 II| TABLET_EXECUTOR: Leader{1:2:134} starting Scan{21 on 101, Compact{1.2.133, eph 11}} 00000.153 II| TABLET_EXECUTOR: Leader{1:2:134} started compaction 21 00000.153 II| TABLET_OPS_HOST: Scan{21 on 101, Compact{1.2.133, eph 11}} begin on TSubset{head 12, 1m 1p 0c} 00000.158 II| TABLET_OPS_HOST: Scan{21 on 101, Compact{1.2.133, eph 11}} end=0, 110r seen, TFwd{fetch=2.75KiB,saved=2.75KiB,usage=2.75KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.160 II| TABLET_EXECUTOR: Leader{1:2:135} Compact 21 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 133, product {1 parts epoch 12} done 00000.162 II| TABLET_EXECUTOR: Leader{1:2:137} starting Scan{24 on 101, DummyScan} 00000.162 II| TABLET_OPS_HOST: Scan{24 on 101, DummyScan} begin on TSubset{head 12, 1m 1p 0c} 00000.168 II| TABLET_EXECUTOR: Leader{1:2:146} starting compaction 00000.168 II| TABLET_EXECUTOR: Leader{1:2:147} starting Scan{25 on 101, Compact{1.2.146, eph 12}} 00000.169 II| TABLET_EXECUTOR: Leader{1:2:147} started compaction 25 00000.169 II| TABLET_OPS_HOST: Scan{25 on 101, Compact{1.2.146, eph 12}} begin on TSubset{head 13, 1m 1p 0c} 00000.171 II| TABLET_OPS_HOST: Scan{25 on 101, Compact{1.2.146, eph 12}} end=0, 120r seen, TFwd{fetch=3.03KiB,saved=3.03KiB,usage=3.03KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.178 II| TABLET_EXECUTOR: Leader{1:2:147} Compact 25 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 146, product {1 parts epoch 13} done 00000.182 II| TABLET_EXECUTOR: Leader{1:2:158} starting compaction 00000.183 II| TABLET_EXECUTOR: Leader{1:2:159} starting Scan{27 on 101, Compact{1.2.158, eph 13}} 00000.183 II| TABLET_EXECUTOR: Leader{1:2:159} started compaction 27 00000.183 II| TABLET_OPS_HOST: Scan{27 on 101, Compact{1.2.158, eph 13}} begin on TSubset{head 14, 1m 1p 0c} 00000.191 II| TABLET_OPS_HOST: Scan{27 on 101, Compact{1.2.158, eph 13}} end=0, 130r seen, TFwd{fetch=3.44KiB,saved=3.44KiB,usage=3.44KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.192 II| TABLET_EXECUTOR: Leader{1:2:159} Compact 27 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 158, product {1 parts epoch 14} done 00000.196 II| TABLET_EXECUTOR: Leader{1:2:170} starting compaction 00000.197 II| TABLET_EXECUTOR: Leader{1:2:171} starting Scan{29 on 101, Compact{1.2.170, eph 14}} 00000.197 II| TABLET_EXECUTOR: Leader{1:2:171} started compaction 29 00000.197 II| TABLET_OPS_HOST: Scan{29 on 101, Compact{1.2.170, eph 14}} begin on TSubset{head 15, 1m 1p 0c} 00000.207 II| TABLET_OPS_HOST: Scan{29 on 101, Compact{1.2.170, eph 14}} end=0, 140r seen, TFwd{fetch=3.87KiB,saved=3.87KiB,usage=3.87KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.218 II| TABLET_EXECUTOR: Leader{1:2:171} Compact 29 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 170, product {1 parts epoch 15} done 00000.227 II| TABLET_EXECUTOR: Leader{1:2:182} starting compaction 00000.227 II| TABLET_EXECUTOR: Leader{1:2:183} starting Scan{31 on 101, Compact{1.2.182, eph 15}} 00000.227 II| TABLET_EXECUTOR: Leader{1:2:183} started compaction 31 00000.227 II| TABLET_OPS_HOST: Scan{31 on 101, Compact{1.2.182, eph 15}} begin on TSubset{head 16, 1m 1p 0c} 00000.235 II| TABLET_OPS_HOST: Scan{31 on 101, Compact{1.2.182, eph 15}} end=0, 150r seen, TFwd{fetch=4.3KiB,saved=4.3KiB,usage=4.3KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.236 II| TABLET_EXECUTOR: Leader{1:2:184} Compact 31 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 182, product {1 parts epoch 16} done 00000.245 II| TABLET_EXECUTOR: Leader{1:2:194} starting compaction 00000.245 II| TABLET_EXECUTOR: Leader{1:2:195} starting Scan{33 on 101, Compact{1.2.194, eph 16}} 00000.245 II| TABLET_EXECUTOR: Leader{1:2:195} started compaction 33 00000.245 II| TABLET_OPS_HOST: Scan{33 on 101, Compact{1.2.194, eph 16}} begin on TSubset{head 17, 1m 1p 0c} 00000.251 II| TABLET_OPS_HOST: Scan{33 on 101, Compact{1.2.194, eph 16}} end=0, 160r seen, TFwd{fetch=4.73KiB,saved=4.73KiB,usage=4.73KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.253 II| TABLET_EXECUTOR: Leader{1:2:195} Compact 33 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 194, product {1 parts epoch 17} done 00000.257 II| TABLET_EXECUTOR: Leader{1:2:206} starting compaction 00000.257 II| TABLET_EXECUTOR: Leader{1:2:207} starting Scan{35 on 101, Compact{1.2.206, eph 17}} 00000.257 II| TABLET_EXECUTOR: Leader{1:2:207} started compaction 35 00000.257 II| TABLET_OPS_HOST: Scan{35 on 101, Compact{1.2.206, eph 17}} begin on TSubset{head 18, 1m 1p 0c} 00000.261 II| TABLET_OPS_HOST: Scan{35 on 101, Compact{1.2.206, eph 17}} end=0, 170r seen, TFwd{fetch=5.16KiB,saved=5.16KiB,usage=5.16KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.263 II| TABLET_EXECUTOR: Leader{1:2:208} Compact 35 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 206, product {1 parts epoch 18} done 00000.267 II| TABLET_EXECUTOR: Leader{1:2:218} starting compaction 00000.267 II| TABLET_EXECUTOR: Leader{1:2:219} starting Scan{37 on 101, Compact{1.2.218, eph 18}} 00000.267 II| TABLET_EXECUTOR: Leader{1:2:219} started compaction 37 00000.267 II| TABLET_OPS_HOST: Scan{37 on 101, Compact{1.2.218, eph 18}} begin on TSubset{head 19, 1m 1p 0c} 00000.270 II| TABLET_OPS_HOST: Scan{37 on 101, Compact{1.2.218, eph 18}} end=0, 180r seen, TFwd{fetch=5.59KiB,saved=5.59KiB,usage=5.59KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.278 II| TABLET_EXECUTOR: Leader{1:2:220} Compact 37 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 218, product {1 parts epoch 19} done 00000.282 II| TABLET_EXECUTOR: Leader{1:2:230} starting compaction 00000.282 II| TABLET_EXECUTOR: Leader{1:2:231} starting Scan{39 on 101, Compact{1.2.230, eph 19}} 00000.282 II| TABLET_EXECUTOR: Leader{1:2:231} started compaction 39 00000.282 II| TABLET_OPS_HOST: Scan{39 on 101, Compact{1.2.230, eph 19}} begin on TSubset{head 20, 1m 1p 0c} 00000.285 II| TABLET_OPS_HOST: Scan{39 on 101, Compact{1.2.230, eph 19}} end=0, 190r seen, TFwd{fetch=6.02KiB,saved=6.02KiB,usage=6.02KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.287 II| TABLET_EXECUTOR: Leader{1:2:232} Compact 39 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 230, product {1 parts epoch 20} done 00000.291 II| TABLET_EXECUTOR: Leader{1:2:242} starting compaction 00000.291 II| TABLET_EXECUTOR: Leader{1:2:243} starting Scan{41 on 101, Compact{1.2.242, eph 20}} 00000.291 II| TABLET_EXECUTOR: Leader{1:2:243} started compaction 41 00000.291 II| TABLET_OPS_HOST: Scan{41 on 101, Compact{1.2.242, eph 20}} begin on TSubset{head 21, 1m 1p 0c} 00000.294 II| TABLET_OPS_HOST: Scan{41 on 101, Compact{1.2.242, eph 20}} end=0, 200r seen, TFwd{fetch=6.45KiB,saved=6.45KiB,usage=6.45KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.296 II| TABLET_EXECUTOR: Leader{1:2:244} Compact 41 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 242, product {1 parts epoch 21} done 00000.300 II| TABLET_EXECUTOR: Leader{1:2:254} starting compaction 00000.300 II| TABLET_EXECUTOR: Leader{1:2:255} starting Scan{43 on 101, Compact{1.2.254, eph 21}} 00000.300 II| TABLET_EXECUTOR: Leader{1:2:255} started compaction 43 00000.300 II| TABLET_OPS_HOST: Scan{43 on 101, Compact{1.2.254, eph 21}} begin on TSubset{head 22, 1m 1p 0c} 00000.303 II| TABLET_OPS_HOST: Scan{43 on 101, Compact{1.2.254, eph 21}} end=0, 210r seen, TFwd{fetch=6.88KiB,saved=6.88KiB,usage=6.88KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.305 II| TABLET_EXECUTOR: Leader{1:2:256} Compact 43 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 254, product {1 parts epoch 22} done 00000.309 II| TABLET_EXECUTOR: Leader{1:2:266} starting compaction 00000.309 II| TABLET_EXECUTOR: Leader{1:2:267} starting Scan{45 on 101, Compact{1.2.266, eph 22}} 00000.309 II| TABLET_EXECUTOR: Leader{1:2:267} started compaction 45 00000.309 II| TABLET_OPS_HOST: Scan{45 on 101, Compact{1.2.266, eph 22}} begin on TSubset{head 23, 1m 1p 0c} 00000.320 II| TABLET_OPS_HOST: Scan{45 on 101, Compact{1.2.266, eph 22}} end=0, 220r seen, TFwd{fetch=7.43KiB,saved=7.43KiB,usage=7.43KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.340 II| TABLET_EXECUTOR: Leader{1:2:267} Compact 45 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 266, product {1 parts epoch 23} done 00000.361 II| TABLET_OPS_HOST: Scan{24 on 101, DummyScan} end=0, 111r seen, TFwd{fetch=3.03KiB,saved=3.03KiB,usage=3.03KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1} 00000.370 II| TABLET_EXECUTOR: Leader{1:2:270} suiciding, Waste{2:0, 8879b +(262, 99771b), 269 trc, -99771b acc} 00000.373 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.373 NN| TABLET_SAUSAGECACHE: Poison cache serviced 23 reqs hit {24 76962b} miss {0 0b} 00000.373 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.374 II| FAKE_ENV: DS.0 gone, left {27015b, 269}, put {27035b, 270} 00000.383 II| FAKE_ENV: DS.1 gone, left {111149b, 290}, put {111149b, 290} 00000.384 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.384 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.384 II| FAKE_ENV: All BS storage groups are stopped 00000.384 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.384 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 153}, stopped |74.4%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |74.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group >> TPartBtreeIndexIteration::FewNodes_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |74.4%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::Random [GOOD] Test command err: Trying to start YDB, gRPC: 27534, MsgBus: 2861 2025-04-10T11:03:42.414300Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638536469670632:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:42.414384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001134/r3tmp/tmpvhkrE2/pdisk_1.dat 2025-04-10T11:03:42.922752Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:42.933309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:42.935965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:42.958724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27534, node 1 2025-04-10T11:03:43.186682Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:43.186704Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:43.186710Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:43.186819Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2861 TClient is connected to server localhost:2861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:43.977947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:43.999470Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:03:44.018342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:44.205433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:44.460193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:44.564418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:46.532611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638553649541617:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:46.532848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:46.929039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:03:47.045510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:03:47.083688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:03:47.126062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:03:47.163785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:03:47.244313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:03:47.342465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638557944509434:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:47.342582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:47.346557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638557944509439:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:47.351422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:03:47.364714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638557944509441:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:03:47.408884Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638536469670632:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:47.408977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:03:47.426602Z node 1 :TX_PROXY ERROR: Actor# [1:7491638557944509496:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Warning: Type annotation, code: 1030
:4:13: Warning: At function: RemovePrefixMembers, At function: RemoveSystemMembers, At function: PersistableRepr, At function: SqlProject
:4:27: Warning: At function: Filter, At function: Coalesce
:4:50: Warning: At function: SqlIn
:4:50: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:4:13: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 2877, MsgBus: 12434 2025-04-10T11:03:50.030298Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638571367757037:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:50.030450Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001134/r3tmp/tmpjRTEE0/pdisk_1.dat 2025-04-10T11:03:50.165232Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:50.186883Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:50.186992Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:50.191479Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2877, node 2 2025-04-10T11:03:50.267842Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:50.267867Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:50.267876Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:50.267989Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12434 TClient is connected to server localhost:12434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:50.805866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:50.820240Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:03:50.832945Z node 2 :FLAT_TX_SCHEM ... ons } 2025-04-10T11:04:29.534594Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:29.616368Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:04:29.727741Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:04:29.820775Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:04:29.916399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:04:30.030743Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:04:30.121438Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:04:30.214880Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491638742536034570:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:30.215046Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:30.218265Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491638742536034575:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:30.232412Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:04:30.256362Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491638742536034577:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:04:30.314388Z node 5 :TX_PROXY ERROR: Actor# [5:7491638742536034632:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:04:31.956783Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 [[[2];["321"]];[["111"];[2]]] Trying to start YDB, gRPC: 17951, MsgBus: 9327 2025-04-10T11:04:35.540522Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491638764757368963:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:35.540582Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001134/r3tmp/tmpfWOYT5/pdisk_1.dat 2025-04-10T11:04:36.082552Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:36.161170Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:36.161287Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:36.163000Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17951, node 6 2025-04-10T11:04:36.415131Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:36.415161Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:36.415172Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:36.415356Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9327 TClient is connected to server localhost:9327 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:37.667925Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:37.676207Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:04:37.694504Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:37.816693Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:38.103755Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:38.263785Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:40.542380Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491638764757368963:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:40.542471Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:42.639956Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638794822141684:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:42.640099Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:42.747307Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:04:42.836488Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:04:42.927503Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:04:42.990953Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:04:43.042107Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:04:43.135936Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:04:43.267510Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638799117109512:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:43.267668Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:43.267805Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638799117109517:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:43.275533Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:04:43.293479Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491638799117109519:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:04:43.395340Z node 6 :TX_PROXY ERROR: Actor# [6:7491638799117109575:3468] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:04:45.406582Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |74.5%| [LD] {RESULT} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut >> TPersQueueTest::BadSids [GOOD] >> KqpReturning::ReturningWorksIndexedReplace+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedOperationsWithDefault+QueryService >> KqpNewEngine::UnionAllPure [GOOD] >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |74.5%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate [GOOD] >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate >> KqpRanges::IsNotNullInJsonValue2 [GOOD] |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |74.5%| [LD] {RESULT} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut >> KqpPg::PgAggregate-useSink [GOOD] >> KqpPg::MkqlTerminate |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |74.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |74.5%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::IsNotNullInJsonValue2 [GOOD] Test command err: Trying to start YDB, gRPC: 31935, MsgBus: 11428 2025-04-10T11:03:42.526670Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638535329821238:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:42.526786Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00113d/r3tmp/tmpgRWAt2/pdisk_1.dat 2025-04-10T11:03:43.011633Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:43.031962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:43.032076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:43.035815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31935, node 1 2025-04-10T11:03:43.184119Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:43.184146Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:43.184160Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:43.184309Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11428 TClient is connected to server localhost:11428 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:43.958381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:43.991954Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:03:44.007528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:44.256342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:44.503012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:44.611118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:46.339011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638552509692184:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:46.339173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:46.747428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:03:46.793236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:03:46.840222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:03:46.890310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:03:46.939982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:03:46.998343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:03:47.059433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638556804659994:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:47.059538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:47.059687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638556804659999:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:47.063791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:03:47.075095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638556804660001:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:03:47.180215Z node 1 :TX_PROXY ERROR: Actor# [1:7491638556804660057:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:03:47.527253Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638535329821238:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:47.527400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:03:48.451371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:03:48.779947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:03:48.998329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:03:49.217809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:03:49.559727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22773, MsgBus: 64796 2025-04-10T11:03:50.925927Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638570323160625:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:50.933043Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00113d/r3tmp/tmpZi9530/pdisk_1.dat 2025-04-10T11:03:51.104764Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:51.124294Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:51.124400Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:51.126033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22773, node 2 2025-04-10T11:03:51.280626Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:51.280652Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:51.280663Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:51.280827Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64796 TClient is connected to server localhost:64796 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRo ... d: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:31.343396Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:31.343695Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491638748726975443:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:31.349840Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:04:31.367341Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491638748726975445:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:04:31.471493Z node 5 :TX_PROXY ERROR: Actor# [5:7491638748726975501:3469] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:04:33.428251Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:04:33.964037Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:04:34.293853Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:04:34.675270Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:04:35.499029Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10407, MsgBus: 23135 2025-04-10T11:04:37.668050Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491638772484121953:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:37.668120Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00113d/r3tmp/tmp0yoEzG/pdisk_1.dat 2025-04-10T11:04:38.007592Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:38.058209Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:38.058330Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:38.063128Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10407, node 6 2025-04-10T11:04:38.166834Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:38.166863Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:38.166874Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:38.167030Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23135 TClient is connected to server localhost:23135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:39.049282Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:39.058948Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:04:39.081599Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:39.194296Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:39.482926Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:39.623459Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:42.669028Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491638772484121953:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:42.669152Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:44.155276Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638802548894802:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:44.155383Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:44.296171Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:04:44.405202Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:04:44.593507Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:04:44.776024Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:04:44.882589Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:04:45.010205Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:04:45.154377Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638806843862659:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:45.154546Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:45.155133Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638806843862664:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:45.165078Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:04:45.206413Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491638806843862666:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:04:45.290034Z node 6 :TX_PROXY ERROR: Actor# [6:7491638806843862724:3476] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:04:47.195516Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:04:47.670236Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:04:47.959396Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:04:48.265568Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:04:48.779722Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 |74.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut >> KqpYql::NonStrictDml |74.6%| [LD] {RESULT} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut >> KqpPg::CheckPgAutoParams+useSink [GOOD] >> KqpPg::CheckPgAutoParams-useSink >> KqpScripting::EndOfQueryCommit |74.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut >> BuildStatsHistogram::Ten_Serial_Log [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log >> KqpPg::TableSelect-useSink [GOOD] >> KqpPg::TableInsert+useSink |74.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] Test command err: Starting YDB, grpc: 17997, msgbus: 25857 2025-04-10T11:00:00.366732Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637582342052752:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:00.366779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001b24/r3tmp/tmpe7eiCE/pdisk_1.dat 2025-04-10T11:00:00.883177Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:00.889707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:00.889812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 17997, node 1 2025-04-10T11:00:00.896824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:00.916634Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-10T11:00:00.959482Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-10T11:00:00.991013Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:00:00.991031Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:00:00.991038Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:00:00.993101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25857 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:00:01.319329Z node 1 :TX_PROXY DEBUG: actor# [1:7491637582342052996:2134] Handle TEvNavigate describe path dc-1 2025-04-10T11:00:01.319373Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020763:2438] HANDLE EvNavigateScheme dc-1 2025-04-10T11:00:01.320252Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020763:2438] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:00:01.396933Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020763:2438] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } TClient::Ls response: 2025-04-10T11:00:01.408119Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020763:2438] Handle TEvDescribeSchemeResult Forward to# [1:7491637586637020762:2437] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:00:01.440296Z node 1 :TX_PROXY DEBUG: actor# [1:7491637582342052996:2134] Handle TEvProposeTransaction 2025-04-10T11:00:01.440326Z node 1 :TX_PROXY DEBUG: actor# [1:7491637582342052996:2134] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T11:00:01.440450Z node 1 :TX_PROXY DEBUG: actor# [1:7491637582342052996:2134] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491637586637020787:2448] 2025-04-10T11:00:01.533430Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020787:2448] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:00:01.533540Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020787:2448] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:00:01.533566Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020787:2448] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:00:01.533657Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020787:2448] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:00:01.534100Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020787:2448] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:00:01.534372Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020787:2448] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T11:00:01.534469Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020787:2448] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T11:00:01.534654Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020787:2448] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T11:00:01.535470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:00:01.538785Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020787:2448] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T11:00:01.538841Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020787:2448] txid# 281474976710657 SEND to# [1:7491637586637020786:2447] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2025-04-10T11:00:01.559727Z node 1 :TX_PROXY DEBUG: actor# [1:7491637582342052996:2134] Handle TEvProposeTransaction 2025-04-10T11:00:01.559760Z node 1 :TX_PROXY DEBUG: actor# [1:7491637582342052996:2134] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T11:00:01.559791Z node 1 :TX_PROXY DEBUG: actor# [1:7491637582342052996:2134] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491637586637020831:2485] 2025-04-10T11:00:01.561871Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020831:2485] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:00:01.561943Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020831:2485] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:00:01.561959Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020831:2485] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:00:01.562023Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020831:2485] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:00:01.562336Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020831:2485] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:00:01.562465Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020831:2485] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:00:01.562506Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020831:2485] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T11:00:01.562640Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020831:2485] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T11:00:01.563069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:00:01.566432Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020831:2485] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-10T11:00:01.566481Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020831:2485] txid# 281474976710658 SEND to# [1:7491637586637020830:2484] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-10T11:00:01.613564Z node 1 :TX_PROXY DEBUG: actor# [1:7491637582342052996:2134] Handle TEvProposeTransaction 2025-04-10T11:00:01.613599Z node 1 :TX_PROXY DEBUG: actor# [1:7491637582342052996:2134] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T11:00:01.613638Z node 1 :TX_PROXY DEBUG: actor# [1:7491637582342052996:2134] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7491637586637020849:2495] 2025-04-10T11:00:01.616153Z node 1 :TX_PROXY DEBUG: Actor# [1:7491637586637020849:2495] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known ... che 2025-04-10T11:03:48.524545Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892477:2536] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:03:48.524696Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892477:2536] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:03:48.524742Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892477:2536] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-10T11:03:48.524888Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892477:2536] txid# 281474976710660 HANDLE EvClientConnected 2025-04-10T11:03:48.526348Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T11:03:48.533244Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892477:2536] txid# 281474976710660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-04-10T11:03:48.533298Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892477:2536] txid# 281474976710660 SEND to# [59:7491638561049892476:2343] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 53} 2025-04-10T11:03:48.577226Z node 59 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7491638561049892476:2343], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T11:03:48.670274Z node 59 :TX_PROXY DEBUG: actor# [59:7491638535280087985:2113] Handle TEvProposeTransaction 2025-04-10T11:03:48.670335Z node 59 :TX_PROXY DEBUG: actor# [59:7491638535280087985:2113] TxId# 281474976710661 ProcessProposeTransaction 2025-04-10T11:03:48.670415Z node 59 :TX_PROXY DEBUG: actor# [59:7491638535280087985:2113] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7491638561049892541:2580] 2025-04-10T11:03:48.673857Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892541:2580] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n#\010\000\022\037\010\001\020\377\377\003\032\025cluster_admin@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T11:03:48.673955Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892541:2580] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:03:48.673990Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892541:2580] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T11:03:48.683086Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892541:2580] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:03:48.683273Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892541:2580] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:03:48.683531Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892541:2580] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:03:48.683754Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892541:2580] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:03:48.683826Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892541:2580] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-10T11:03:48.684007Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892541:2580] txid# 281474976710661 HANDLE EvClientConnected 2025-04-10T11:03:48.693289Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892541:2580] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-10T11:03:48.693662Z node 59 :TX_PROXY ERROR: Actor# [59:7491638561049892541:2580] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:03:48.693712Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892541:2580] txid# 281474976710661 SEND to# [59:7491638561049892476:2343] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-10T11:03:48.724837Z node 59 :TX_PROXY DEBUG: actor# [59:7491638535280087985:2113] Handle TEvProposeTransaction 2025-04-10T11:03:48.724878Z node 59 :TX_PROXY DEBUG: actor# [59:7491638535280087985:2113] TxId# 281474976710662 ProcessProposeTransaction 2025-04-10T11:03:48.724930Z node 59 :TX_PROXY DEBUG: actor# [59:7491638535280087985:2113] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7491638561049892564:2591] 2025-04-10T11:03:48.728226Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892564:2591] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51316" 2025-04-10T11:03:48.728314Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892564:2591] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:03:48.728342Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892564:2591] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:03:48.728407Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892564:2591] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:03:48.728817Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892564:2591] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:03:48.728953Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892564:2591] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:03:48.729020Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892564:2591] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-10T11:03:48.729185Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892564:2591] txid# 281474976710662 HANDLE EvClientConnected 2025-04-10T11:03:48.738012Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892564:2591] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-10T11:03:48.738096Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892564:2591] txid# 281474976710662 SEND to# [59:7491638561049892563:2335] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-10T11:03:48.827720Z node 59 :TX_PROXY DEBUG: actor# [59:7491638535280087985:2113] Handle TEvProposeTransaction 2025-04-10T11:03:48.827758Z node 59 :TX_PROXY DEBUG: actor# [59:7491638535280087985:2113] TxId# 281474976710663 ProcessProposeTransaction 2025-04-10T11:03:48.827812Z node 59 :TX_PROXY DEBUG: actor# [59:7491638535280087985:2113] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7491638561049892597:2605] 2025-04-10T11:03:48.830865Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892597:2605] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\025cluster_admin@builtin\022\030\022\026\n\024all-users@well-known\032\025cluster_admin@builtin\"\007Builtin*\027clus****ltin (2AB0E265)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51348" 2025-04-10T11:03:48.830953Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892597:2605] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:03:48.830982Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892597:2605] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin IsClusterAdministrator: 1 2025-04-10T11:03:48.831041Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892597:2605] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:03:48.831434Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892597:2605] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:03:48.831483Z node 59 :TX_PROXY ERROR: Actor# [59:7491638561049892597:2605] txid# 281474976710663, Access denied for cluster_admin@builtin on path /dc-1, with access AlterSchema 2025-04-10T11:03:48.831600Z node 59 :TX_PROXY ERROR: Actor# [59:7491638561049892597:2605] txid# 281474976710663, issues: { message: "Access denied for cluster_admin@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2025-04-10T11:03:48.831634Z node 59 :TX_PROXY DEBUG: Actor# [59:7491638561049892597:2605] txid# 281474976710663 SEND to# [59:7491638561049892596:2353] Source {TEvProposeTransactionStatus Status# 5} 2025-04-10T11:03:48.832522Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=NTg2ZjVkMTMtOWQ4NDI1ZTctZGEwNDhjNGItMmQzYTBkNTI=, ActorId: [59:7491638561049892582:2353], ActorState: ExecuteState, TraceId: 01jrfnxka57m9ry5twwn374tbb, Create QueryResponse for error on request, msg: 2025-04-10T11:03:48.833313Z node 59 :TX_PROXY DEBUG: actor# [59:7491638535280087985:2113] Handle TEvExecuteKqpTransaction 2025-04-10T11:03:48.833337Z node 59 :TX_PROXY DEBUG: actor# [59:7491638535280087985:2113] TxId# 281474976710664 ProcessProposeKqpTransaction |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |74.6%| [LD] {RESULT} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |74.6%| [LD] {RESULT} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbQueryService::TestAttachTwice [GOOD] Test command err: 2025-04-10T10:59:27.966092Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637441830478281:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:59:27.966219Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d27/r3tmp/tmp7pU2eo/pdisk_1.dat 2025-04-10T10:59:29.001991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T10:59:29.109463Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:59:29.119494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:59:29.119587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:59:29.144321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13851, node 1 2025-04-10T10:59:29.488507Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:59:29.488532Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:59:29.488540Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:59:29.488642Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26337 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:59:30.509551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:26337 2025-04-10T10:59:31.265111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "OlapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } Columns { Name: "request_id" Type: "Utf8" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T10:59:31.265704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:31.266292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: OlapStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-10T10:59:31.266340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-10T10:59:31.266432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T10:59:31.266491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T10:59:31.266543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-10T10:59:31.266578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-10T10:59:31.266855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-04-10T10:59:31.280299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-10T10:59:31.280658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T10:59:31.280680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:31.280823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T10:59:31.280864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-04-10T10:59:31.293850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-10T10:59:31.294043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore waiting... 2025-04-10T10:59:31.294321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T10:59:31.294345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T10:59:31.294507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T10:59:31.294673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T10:59:31.294701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491637450420413359:2399], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-10T10:59:31.294733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491637450420413359:2399], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-10T10:59:31.294806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T10:59:31.294851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-04-10T10:59:31.295675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-10T10:59:31.296355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: ColumnShard ObjectDomain { SchemeShard: 7 ... ks execution finished, don't wait for ack delivery in input channelId: 56, seqNo: [1] 2025-04-10T11:04:20.980797Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 57, seqNo: [1] 2025-04-10T11:04:20.980821Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 58, seqNo: [1] 2025-04-10T11:04:20.980842Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 59, seqNo: [1] 2025-04-10T11:04:20.983223Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 60, seqNo: [1] 2025-04-10T11:04:20.983275Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 61, seqNo: [1] 2025-04-10T11:04:20.983299Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 62, seqNo: [1] 2025-04-10T11:04:20.983320Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 63, seqNo: [1] 2025-04-10T11:04:20.983345Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 64, seqNo: [1] 2025-04-10T11:04:20.983375Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 65. Tasks execution finished 2025-04-10T11:04:20.983427Z node 22 :KQP_COMPUTE DEBUG: SelfId: [22:7491638699508536709:3218], TxId: 281474976715670, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=22&id=OGJjZmJkZmItY2M0Y2EzYmMtZjNlZDk5NWUtNWYwZjI2Njc=. TraceId : 01jrfnyh9tahxez35qra41243b. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T11:04:20.983715Z node 22 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 65. pass away 2025-04-10T11:04:20.984012Z node 22 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=65;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:04:20.984322Z node 22 :KQP_COMPUTE DEBUG: SelfId: [22:7491638699508536709:3218], TxId: 281474976715670, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=22&id=OGJjZmJkZmItY2M0Y2EzYmMtZjNlZDk5NWUtNWYwZjI2Njc=. TraceId : 01jrfnyh9tahxez35qra41243b. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Send stats to executor actor [22:7491638699508536630:3144] TaskId: 65 Stats: CpuTimeUs: 17539 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 657 FinishTimeMs: 1744283060979 ComputeCpuTimeUs: 219 BuildCpuTimeUs: 438 HostName: "ghrun-x5vqckr7qy" NodeId: 22 CreateTimeMs: 1744283060735 } MaxMemoryUsage: 1048576 2025-04-10T11:04:20.985355Z node 22 :KQP_EXECUTER DEBUG: ActorId: [22:7491638699508536630:3144] TxId: 281474976715670. Ctx: { TraceId: 01jrfnyh9tahxez35qra41243b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=OGJjZmJkZmItY2M0Y2EzYmMtZjNlZDk5NWUtNWYwZjI2Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [22:7491638699508536709:3218], task: 65, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 17539 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 657 FinishTimeMs: 1744283060979 ComputeCpuTimeUs: 219 BuildCpuTimeUs: 438 HostName: "ghrun-x5vqckr7qy" NodeId: 22 CreateTimeMs: 1744283060735 } MaxMemoryUsage: 1048576 } 2025-04-10T11:04:20.985479Z node 22 :KQP_EXECUTER INFO: TxId: 281474976715670. Ctx: { TraceId: 01jrfnyh9tahxez35qra41243b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=OGJjZmJkZmItY2M0Y2EzYmMtZjNlZDk5NWUtNWYwZjI2Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [22:7491638699508536709:3218] 2025-04-10T11:04:20.985700Z node 22 :KQP_EXECUTER DEBUG: ActorId: [22:7491638699508536630:3144] TxId: 281474976715670. Ctx: { TraceId: 01jrfnyh9tahxez35qra41243b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=OGJjZmJkZmItY2M0Y2EzYmMtZjNlZDk5NWUtNWYwZjI2Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:04:20.985812Z node 22 :KQP_EXECUTER DEBUG: ActorId: [22:7491638699508536630:3144] TxId: 281474976715670. Ctx: { TraceId: 01jrfnyh9tahxez35qra41243b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=OGJjZmJkZmItY2M0Y2EzYmMtZjNlZDk5NWUtNWYwZjI2Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.004777s ReadRows: 0 ReadBytes: 0 ru: 3 rate limiter was not found force flag: 1 2025-04-10T11:04:20.985978Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=OGJjZmJkZmItY2M0Y2EzYmMtZjNlZDk5NWUtNWYwZjI2Njc=, ActorId: [22:7491638695213569271:3144], ActorState: ExecuteState, TraceId: 01jrfnyh9tahxez35qra41243b, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-10T11:04:20.987123Z node 22 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=22&id=OGJjZmJkZmItY2M0Y2EzYmMtZjNlZDk5NWUtNWYwZjI2Njc=, ActorId: [22:7491638695213569271:3144], ActorState: ExecuteState, TraceId: 01jrfnyh9tahxez35qra41243b, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 400.446 QueriesCount: 1 2025-04-10T11:04:20.987228Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=OGJjZmJkZmItY2M0Y2EzYmMtZjNlZDk5NWUtNWYwZjI2Njc=, ActorId: [22:7491638695213569271:3144], ActorState: ExecuteState, TraceId: 01jrfnyh9tahxez35qra41243b, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T11:04:20.987426Z node 22 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=22&id=OGJjZmJkZmItY2M0Y2EzYmMtZjNlZDk5NWUtNWYwZjI2Njc=, ActorId: [22:7491638695213569271:3144], ActorState: ExecuteState, TraceId: 01jrfnyh9tahxez35qra41243b, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T11:04:20.987498Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=OGJjZmJkZmItY2M0Y2EzYmMtZjNlZDk5NWUtNWYwZjI2Njc=, ActorId: [22:7491638695213569271:3144], ActorState: ExecuteState, TraceId: 01jrfnyh9tahxez35qra41243b, EndCleanup, isFinal: 1 2025-04-10T11:04:20.987621Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=OGJjZmJkZmItY2M0Y2EzYmMtZjNlZDk5NWUtNWYwZjI2Njc=, ActorId: [22:7491638695213569271:3144], ActorState: ExecuteState, TraceId: 01jrfnyh9tahxez35qra41243b, Sent query response back to proxy, proxyRequestId: 5, proxyId: [22:7491638609314219160:2183] 2025-04-10T11:04:20.987694Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=OGJjZmJkZmItY2M0Y2EzYmMtZjNlZDk5NWUtNWYwZjI2Njc=, ActorId: [22:7491638695213569271:3144], ActorState: unknown state, TraceId: 01jrfnyh9tahxez35qra41243b, Cleanup temp tables: 0 2025-04-10T11:04:20.993618Z node 22 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283060037, txId: 18446744073709551615] shutting down 2025-04-10T11:04:20.993846Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=OGJjZmJkZmItY2M0Y2EzYmMtZjNlZDk5NWUtNWYwZjI2Njc=, ActorId: [22:7491638695213569271:3144], ActorState: unknown state, TraceId: 01jrfnyh9tahxez35qra41243b, Session actor destroyed RESULT: [] --------------------- STATS: total CPU: 3565 duration: 1803 usec cpu: 1803 usec duration: 371460 usec cpu: 425830 usec { name: "/Root/OlapStore/log1" } 2025-04-10T11:04:21.002929Z node 22 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[22:7491638622199122045:2327];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-10T11:04:21.069668Z node 22 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[22:7491638622199122048:2328];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-10T11:04:21.246873Z node 22 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[22:7491638622199122042:2326];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-10T11:04:26.077225Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7491638727571757406:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:26.077335Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d27/r3tmp/tmph00Za2/pdisk_1.dat 2025-04-10T11:04:27.162248Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:04:27.240712Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:27.418553Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:27.418736Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:27.426604Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28292, node 25 2025-04-10T11:04:27.844136Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:27.844161Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:27.844178Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:27.844440Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:29.857983Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:31.078375Z node 25 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[25:7491638727571757406:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:31.078512Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TExecutorDb::CoordinatorSimulation [GOOD] >> TExecutorDb::RandomCoordinatorSimulation >> TPersQueueTest::Delete [GOOD] >> TPersQueueTest::FetchRequest >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] |74.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |74.7%| [LD] {RESULT} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |74.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::BadSids [GOOD] Test command err: 2025-04-10T11:00:58.688842Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:00:58.688921Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:00:59.386406Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:00:59.386480Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info === Server->StartServer(false); 2025-04-10T11:01:00.195280Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491637840864649671:2148];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:00.195322Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:00.321349Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491637839135955370:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:00.605251Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:01:00.630571Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a95/r3tmp/tmp7OANqX/pdisk_1.dat 2025-04-10T11:01:00.810713Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:01:01.293211Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:01:01.495950Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:01.606086Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:01.606213Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:01.609563Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:01:01.609641Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:01:01.623599Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-10T11:01:01.623719Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:01:01.626494Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21740, node 3 2025-04-10T11:01:01.978897Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000a95/r3tmp/yandex4gG0Am.tmp 2025-04-10T11:01:01.978919Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000a95/r3tmp/yandex4gG0Am.tmp 2025-04-10T11:01:01.979070Z node 3 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000a95/r3tmp/yandex4gG0Am.tmp 2025-04-10T11:01:01.979187Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:01:02.220335Z INFO: TTestServer started on Port 20501 GrpcPort 21740 TClient is connected to server localhost:20501 PQClient connected to localhost:21740 === TenantModeEnabled() = 0 === Init PQ - start server on port 21740 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:01:03.376159Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:01:03.376349Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:03.376554Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T11:01:03.376760Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:01:03.376789Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:01:03.379230Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T11:01:03.379332Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T11:01:03.379473Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:03.379513Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T11:01:03.379527Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-04-10T11:01:03.379540Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-10T11:01:03.382677Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:03.382718Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:01:03.382735Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-10T11:01:03.384687Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:03.384719Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:03.384892Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-10T11:01:03.384935Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-04-10T11:01:03.389553Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:01:03.389871Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:01:03.389887Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-04-10T11:01:03.389910Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:01:03.394673Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-04-10T11:01:03.394878Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:01:03.399526Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282863442, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:01:03.399680Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282863442 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T11:01:03.399710Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-10T11:01:03.400019Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-10T11:01:03.400046Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-10T11:01:03.400205Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:01:03.400267Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T11:01:03.407712Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:01:03.407735Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:01:03.407930Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:01:03.407944Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7491637845159617549:2391], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-04-10T11:01:03.407983Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:01:03.408011Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-10T11:01:03.408112Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-10T11:01:03.408122Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-10T11:01:03.408140Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-10T11:01:03.408153Z node 3 :FLAT_TX_SCHEMESHARD ... rtitionChooser [21:7491638809630322709:2559] (SourceId=base64:aa, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-10T11:04:46.914568Z node 21 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [21:7491638809630322709:2559] (SourceId=base64:aa, PreferedPartition=(NULL)) ReplyResult: Partition=3, SeqNo=(NULL) 2025-04-10T11:04:46.914607Z node 21 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [21:7491638809630322709:2559] (SourceId=base64:aa, PreferedPartition=(NULL)) Start idle 2025-04-10T11:04:46.914665Z node 21 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 5 sessionId: partition: 3 expectedGeneration: (NULL) 2025-04-10T11:04:46.916499Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [21:7491638809630322740:2559], now have 1 active actors on pipe 2025-04-10T11:04:46.916628Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=3) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 22, Generation: 1 2025-04-10T11:04:46.916795Z node 22 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-10T11:04:46.916844Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 3 2025-04-10T11:04:46.916991Z node 22 :PERSQUEUE INFO: new Cookie base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0 generated for partition 3 topic 'rt3.dc1--topic1' owner base64:aa 2025-04-10T11:04:46.917133Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 3 2025-04-10T11:04:46.917198Z node 22 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 3 messageNo: 0 requestId: cookie: 0 2025-04-10T11:04:46.918621Z node 21 :PQ_WRITE_PROXY INFO: session inited cookie: 5 partition: 3 MaxSeqNo: 0 sessionId: base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0 2025-04-10T11:04:46.919593Z :INFO: [] MessageGroupId [base64:aa] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744283086919 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:04:46.919785Z :INFO: [] MessageGroupId [base64:aa] SessionId [] Write session established. Init response: session_id: "base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0" topic: "topic1" cluster: "dc1" partition_id: 3 supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-10T11:04:46.920070Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0] Write 1 messages with Id from 1 to 1 2025-04-10T11:04:46.920191Z :INFO: [] MessageGroupId [base64:aa] SessionId [base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0] Write session: close. Timeout = 18446744073709551 ms 2025-04-10T11:04:46.920669Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0] Write session: try to update token 2025-04-10T11:04:46.920768Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0] Send 1 message(s) (0 left), first sequence number is 1 2025-04-10T11:04:46.921409Z node 21 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-10T11:04:46.921754Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=3) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-10T11:04:46.917892Z node 22 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-10T11:04:46.917938Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 3 2025-04-10T11:04:46.918082Z node 22 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 3 messageNo: 0 requestId: cookie: 0 2025-04-10T11:04:46.922164Z node 22 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-10T11:04:46.922193Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 3 2025-04-10T11:04:46.922325Z node 22 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 3 messageNo: 0 requestId: cookie: 1 2025-04-10T11:04:46.923144Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=3) Received event: NActors::IEventHandle 2025-04-10T11:04:46.924442Z node 22 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-10T11:04:46.924489Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 3 2025-04-10T11:04:46.925015Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--topic1 partition: 3 SourceId: '\0base64:aa' SeqNo: 1 partNo : 0 messageNo: 1 size 92 offset: -1 2025-04-10T11:04:46.925392Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] Topic 'rt3.dc1--topic1' partition 3 part blob processing sourceId '\0base64:aa' seqNo 1 partNo 0 2025-04-10T11:04:46.992979Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] Topic 'rt3.dc1--topic1' partition 3 part blob complete sourceId '\0base64:aa' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 169 count 1 nextOffset 1 batches 1 2025-04-10T11:04:46.994127Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] Add new write blob: topic 'rt3.dc1--topic1' partition 3 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000003_00000000000000000000_00000_0000000001_00000| size 157 WTime 1744283086993 2025-04-10T11:04:46.995420Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:04:46.995502Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] --- delete ---------------- 2025-04-10T11:04:46.995563Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] [x0000000003, x0000000004) 2025-04-10T11:04:46.995623Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] --- write ----------------- 2025-04-10T11:04:46.995687Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] m0000000003pbase64:aa 2025-04-10T11:04:46.995704Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] d0000000003_00000000000000000000_00000_0000000001_00000| 2025-04-10T11:04:46.995722Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] i0000000003 2025-04-10T11:04:46.995779Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] --- rename ---------------- 2025-04-10T11:04:46.995837Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] =========================== 2025-04-10T11:04:46.995935Z node 22 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:04:46.996070Z node 22 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 3 offset 0 partNo 0 count 1 size 157 2025-04-10T11:04:47.003712Z node 22 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 3 offset 0 count 1 size 157 actorID [22:7491638803868334999:2422] 2025-04-10T11:04:47.003887Z node 22 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 3 offset 0 partno 0 count 1 parts 0 size 157 2025-04-10T11:04:47.003933Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 102 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:04:47.004012Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] TPartition::ReplyWrite. Partition: 3 2025-04-10T11:04:47.004129Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] Answering for message sourceid: '\0base64:aa', Topic: 'rt3.dc1--topic1', Partition: 3, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-10T11:04:47.004143Z node 22 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 3 messageNo: 1 requestId: cookie: 1 2025-04-10T11:04:47.004429Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] Topic 'rt3.dc1--topic1' partition 3 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:04:47.004473Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] Topic 'rt3.dc1--topic1' partition 3 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-10T11:04:47.004665Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] read cookie 0 Topic 'rt3.dc1--topic1' partition 3 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-10T11:04:47.004714Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-10T11:04:47.004722Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=3) Received event: NActors::IEventHandle 2025-04-10T11:04:47.004783Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-10T11:04:47.004815Z node 22 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T11:04:47.004916Z node 22 :PERSQUEUE DEBUG: Topic 'rt3.dc1--topic1' partition 3 user user readTimeStamp done, result 1744283086924 queuesize 0 startOffset 0 2025-04-10T11:04:47.005582Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0] Write session got write response: sequence_numbers: 1 offsets: 0 already_written: false partition_id: 3 write_statistics { persist_duration_ms: 10 queued_in_partition_duration_ms: 67 } 2025-04-10T11:04:47.005646Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0] Write session: acknoledged message 1 2025-04-10T11:04:47.020365Z :INFO: [] MessageGroupId [base64:aa] SessionId [base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0] Write session will now close 2025-04-10T11:04:47.020481Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0] Write session: aborting 2025-04-10T11:04:47.021352Z :INFO: [] MessageGroupId [base64:aa] SessionId [base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0] Write session: gracefully shut down, all writes complete 2025-04-10T11:04:47.021463Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0] Write session: destroy 2025-04-10T11:04:47.023071Z node 21 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0 grpc read done: success: 0 data: 2025-04-10T11:04:47.023121Z node 21 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0 grpc read failed 2025-04-10T11:04:47.023205Z node 21 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 5 sessionId: base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0 2025-04-10T11:04:47.023246Z node 21 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: base64:aa|a28b9486-7ddd159e-57b60007-9153d1a3_0 is DEAD 2025-04-10T11:04:47.023750Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=3) Received event: NActors::TEvents::TEvPoison 2025-04-10T11:04:47.024271Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [21:7491638809630322740:2559] destroyed 2025-04-10T11:04:47.024359Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 3, State: StateIdle] TPartition::DropOwner. >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink >> KqpSqlIn::SecondaryIndex_TupleParameter [GOOD] >> KqpSqlIn::SecondaryIndex_TupleLiteral |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |74.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/login/ut/ydb-library-login-ut |74.9%| [LD] {RESULT} $(B)/ydb/library/login/ut/ydb-library-login-ut |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |74.9%| [LD] {RESULT} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |75.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpPg::Returning-useSink [GOOD] >> KqpPg::SelectIndex+useSink |75.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/login/ut/ydb-library-login-ut >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky >> KqpPg::MkqlTerminate [GOOD] >> KqpPg::NoSelectFullScan >> BuildStatsHistogram::Ten_Crossed_Log [GOOD] >> BuildStatsHistogram::Five_Five_Mixed |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk >> TKeyValueTest::TestCleanUpDataOnEmptyTablet >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin [GOOD] >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin >> KqpReturning::ReturningWorksIndexedOperationsWithDefault+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService >> TVersions::Wreck2Reverse [GOOD] >> TVersions::Wreck1 >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate [GOOD] >> KqpSort::Offset |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |75.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> DataStreams::TestStreamStorageRetention >> DataStreams::TestControlPlaneAndMeteringData |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> DataStreams::TestReservedResourcesMetering >> TNetClassifierTest::TestInitFromRemoteSource >> DataStreams::TestUpdateStream |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |75.1%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe >> TBlobStorageGroupInfoBlobMapTest::BelongsToSubgroupBenchmark [GOOD] >> TBlobStorageGroupInfoBlobMapTest::BasicChecks >> KqpExtractPredicateLookup::SimpleRange [GOOD] >> KqpExtractPredicateLookup::PointJoin >> DataStreams::TestGetShardIterator |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::BasicChecks [GOOD] >> KqpPg::SelectIndex+useSink [GOOD] >> KqpPg::SelectIndex-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::BasicChecks [GOOD] Test command err: None domains 1 new (ns): 120.516927 None domains 1 old (ns): 66.82464156 None domains 9 new (ns): 119.0430724 None domains 9 old (ns): 65.69940077 Mirror3 domains 4 new (ns): 127.6178747 Mirror3 domains 4 old (ns): 65.87673699 Mirror3 domains 9 new (ns): 129.5051369 Mirror3 domains 9 old (ns): 64.49657965 4Plus2Block domains 8 new (ns): 126.0659374 4Plus2Block domains 8 old (ns): 73.20346037 4Plus2Block domains 9 new (ns): 130.755522 4Plus2Block domains 9 old (ns): 76.07216079 ErasureMirror3of4 domains 8 new (ns): 149.4871722 ErasureMirror3of4 domains 8 old (ns): 82.85040862 ErasureMirror3of4 domains 9 new (ns): 121.272809 ErasureMirror3of4 domains 9 old (ns): 67.01252953 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 4511, MsgBus: 27898 2025-04-10T10:58:04.051133Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637080520679618:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:04.051218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cac/r3tmp/tmpZjcIhH/pdisk_1.dat 2025-04-10T10:58:04.716275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:58:04.716401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:58:04.726307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:58:04.758514Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4511, node 1 2025-04-10T10:58:04.951741Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:58:04.951767Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:58:04.951775Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:58:04.951894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27898 TClient is connected to server localhost:27898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:58:05.929929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:05.978578Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:58:05.992667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:06.210743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:06.427743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:06.539352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:58:09.058272Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637080520679618:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:58:09.058334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:58:09.994244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637106290485179:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:09.994336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:10.616344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:58:10.702596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:58:10.770460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:58:10.867227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:58:10.920934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:58:11.079843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:58:11.179079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637114880420302:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:11.179161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:11.179531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637114880420307:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:58:11.183832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:58:11.213046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491637114880420309:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:58:11.293906Z node 1 :TX_PROXY ERROR: Actor# [1:7491637114880420366:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:58:14.293642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T10:58:15.644281Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jrfnkdxsf4frcq2kamk5ffy4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWIxZjBkMjMtMTgxZGVkNDUtMjEwNDM3MC1jMWYwYWUxMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:15.655119Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jrfnkdxsf4frcq2kamk5ffy4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWIxZjBkMjMtMTgxZGVkNDUtMjEwNDM3MC1jMWYwYWUxMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:15.761659Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jrfnke227m8d27remhdgz6b5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2UzNGViZWEtNDhhYzA2MDYtODZkODQ0OTYtZjVlNDViYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:15.775183Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jrfnke227m8d27remhdgz6b5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2UzNGViZWEtNDhhYzA2MDYtODZkODQ0OTYtZjVlNDViYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:15.858370Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jrfnke53bp9255x8tm76cf9p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWIxZjBkMjMtMTgxZGVkNDUtMjEwNDM3MC1jMWYwYWUxMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:15.871786Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jrfnke53bp9255x8tm76cf9p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWIxZjBkMjMtMTgxZGVkNDUtMjEwNDM3MC1jMWYwYWUxMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:15.977056Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jrfnke8y4r55zh31y13hpg63, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2UzNGViZWEtNDhhYzA2MDYtODZkODQ0OTYtZjVlNDViYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:15.994850Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jrfnke8y4r55zh31y13hpg63, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2UzNGViZWEtNDhhYzA2MDYtODZkODQ0OTYtZjVlNDViYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:16.124043Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jrfnkedg8zf38ge9vhpwyy9x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWIxZjBkMjMtMTgxZGVkNDUtMjEwNDM3MC1jMWYwYWUxMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:16.175003Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jrfnkedg8zf38ge9vhpwyy9x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWIxZjBkMjMtMTgxZGVkNDUtMjEwNDM3MC1jMWYwYWUxMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T10:58:16.257289Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jrfnkehr49244d9vnphczjh4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2UzNGViZWEtNDhhYzA2MDYtODZkODQ0OTYtZjVlNDViYjk=, CurrentExecutionId: , CustomerSupplie ... : TxId: 281474976720629. Ctx: { TraceId: 01jrfnzkgafke4rr30wrtb7fh0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:54.549943Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720630. Ctx: { TraceId: 01jrfnzkgafke4rr30wrtb7fh0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:54.587516Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720631. Ctx: { TraceId: 01jrfnzkhnfgys4tajexsarfwx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:54.595829Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720632. Ctx: { TraceId: 01jrfnzkhnfgys4tajexsarfwx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:54.633605Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720633. Ctx: { TraceId: 01jrfnzkk13nttdf6tf5rce9m8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:54.639988Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720634. Ctx: { TraceId: 01jrfnzkk13nttdf6tf5rce9m8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:54.679869Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720635. Ctx: { TraceId: 01jrfnzkmjedv4etxm6c0zjcwk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:54.691233Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720636. Ctx: { TraceId: 01jrfnzkmjedv4etxm6c0zjcwk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:54.737286Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720637. Ctx: { TraceId: 01jrfnzkp5cqmqg6q978nfmk5n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:54.777690Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720638. Ctx: { TraceId: 01jrfnzkp5cqmqg6q978nfmk5n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:54.825273Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720639. Ctx: { TraceId: 01jrfnzks0ee08p68cpgtt5y7v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:54.836258Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720640. Ctx: { TraceId: 01jrfnzks0ee08p68cpgtt5y7v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:54.890189Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720641. Ctx: { TraceId: 01jrfnzkv00g9vc3n0yqqn4yrk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:54.904532Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720642. Ctx: { TraceId: 01jrfnzkv00g9vc3n0yqqn4yrk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:54.953423Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720643. Ctx: { TraceId: 01jrfnzkx2cyme1rvt3zsxswbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:54.963720Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720644. Ctx: { TraceId: 01jrfnzkx2cyme1rvt3zsxswbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.020830Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720645. Ctx: { TraceId: 01jrfnzkz5f8x6v7bf25a8ymkw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.030621Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720646. Ctx: { TraceId: 01jrfnzkz5f8x6v7bf25a8ymkw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.073440Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720647. Ctx: { TraceId: 01jrfnzm0w8c6rjhvb1d5y4yqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.083566Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720648. Ctx: { TraceId: 01jrfnzm0w8c6rjhvb1d5y4yqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.127825Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720649. Ctx: { TraceId: 01jrfnzm2j1xzvtgjnwxdpn0c5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.135011Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720650. Ctx: { TraceId: 01jrfnzm2j1xzvtgjnwxdpn0c5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.183220Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720651. Ctx: { TraceId: 01jrfnzm4aa7pyawvyxpxv4qn5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.191253Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720652. Ctx: { TraceId: 01jrfnzm4aa7pyawvyxpxv4qn5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.230066Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720653. Ctx: { TraceId: 01jrfnzm5s15bw2he4e3zr6m50, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.238724Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720654. Ctx: { TraceId: 01jrfnzm5s15bw2he4e3zr6m50, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.277916Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720655. Ctx: { TraceId: 01jrfnzm7740ewvk4t49mtbd6f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.297860Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720656. Ctx: { TraceId: 01jrfnzm7740ewvk4t49mtbd6f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.347002Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720657. Ctx: { TraceId: 01jrfnzm9d797dwxvxf7mkd18j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.355663Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720658. Ctx: { TraceId: 01jrfnzm9d797dwxvxf7mkd18j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.403326Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720659. Ctx: { TraceId: 01jrfnzmb42ymadaee8jzgyekp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.409454Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720660. Ctx: { TraceId: 01jrfnzmb42ymadaee8jzgyekp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.471592Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720661. Ctx: { TraceId: 01jrfnzmd8dbv1pfwajg0vpsen, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.480404Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720662. Ctx: { TraceId: 01jrfnzmd8dbv1pfwajg0vpsen, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTI1OTFmNTYtMWQyYWQ1ZjMtODZjZTVkNWMtMzE4Zjg1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.541283Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720663. Ctx: { TraceId: 01jrfnzmfedpfrnwhtyqj85ys7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:04:55.563098Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jrfnzmfedpfrnwhtyqj85ys7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTgwN2IzNGYtNTg5ZmE0MmEtZjUzNjEyZGYtNTRhMzA3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter+useSink >> BuildStatsHistogram::Five_Five_Mixed [GOOD] >> BuildStatsHistogram::Five_Five_Serial >> KqpYql::NonStrictDml [GOOD] >> KqpYql::JsonNumberPrecision |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> KqpScripting::EndOfQueryCommit [GOOD] >> KqpScripting::ExecuteYqlScriptPg |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] >> TNetClassifierTest::TestInitFromBadlyFormattedFile >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] Test command err: 2025-04-10T11:05:03.082239Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638884772769799:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:03.083138Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001889/r3tmp/tmp2uUGSo/pdisk_1.dat 2025-04-10T11:05:03.785067Z node 1 :HTTP ERROR: (#26,[::1]:11821) connection closed with error: Connection refused 2025-04-10T11:05:03.785494Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-10T11:05:03.787700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:03.793615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:03.816978Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:03.828530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:03.899465Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:03.899485Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:03.899494Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:03.904748Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TNetClassifierTest::TestInitFromFile [GOOD] |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> KqpSqlIn::SecondaryIndex_TupleLiteral [GOOD] >> KqpSqlIn::SecondaryIndex_TupleSelect |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |75.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut >> KqpPg::NoSelectFullScan [GOOD] >> KqpPg::LongDomainName >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] Test command err: 2025-04-10T11:05:04.498840Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638889512552161:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:04.505071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001883/r3tmp/tmpjF02zv/pdisk_1.dat 2025-04-10T11:05:05.291943Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:05.299398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:05.299533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:05.305787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:05.344587Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001883/r3tmp/yandex6vxIb0.tmp 2025-04-10T11:05:05.344614Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001883/r3tmp/yandex6vxIb0.tmp 2025-04-10T11:05:05.344996Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001883/r3tmp/yandex6vxIb0.tmp 2025-04-10T11:05:05.345175Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] >> TExecutorDb::RandomCoordinatorSimulation [GOOD] >> TExecutorDb::MultiPage >> KqpSort::Offset [GOOD] >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService [GOOD] |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] Test command err: ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 755b + FlatIndex{4} Label{3 rev 3, 172b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b {1, aaa} | 1 3 88b {1, b} | 2 6 86b {2, NULL} | 3 9 86b {2, ccx} | 3 11 86b {2, cxz} + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 208b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > {1, b} | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > {2, NULL} | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > {2, ccx} | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 777b + FlatIndex{4} Label{3 rev 3, 179b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b {1, aaa} | 1 3 88b {1, baaaa} | 2 6 86b {2, aaa} | 3 9 86b {2, ccx} | 3 11 86b {2, cxz} + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 223b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > {1, baaaa} | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > {2, aaa} | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > {2, ccx} | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, ab} | 2 2 42b {1, ac} | 3 3 42b {1, b} | 4 4 42b {1, bb} | 5 5 42b {2, NULL} | 6 6 42b {2, ab} | 7 7 42b {2, ac} | 8 8 42b {2, b} | 9 9 42b {2, bb} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, ab} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, ac} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, b} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bb} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, NULL} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, ab} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, ac} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, b} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bb} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= SLICES ======= { [0, 2), [2, 4), [4, 6), [6, 8), [8, 9), [9, 9] } ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, ab} | 2 2 42b {1, ac} | 3 3 42b {1, b} | 4 4 42b {1, bb} | 5 5 42b {2, NULL} | 6 6 42b {2, ab} | 7 7 42b {2, ac} | 8 8 42b {2, b} | 9 9 42b {2, bb} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, ab} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, ac} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, b} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bb} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, NULL} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, ab} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, ac} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, b} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bb} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccccd} | 1 1 41b {ccccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 83b 2r} data 320b + FlatIndex{2} Label{3 rev 3, 109b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 43b {ccccccd} | 1 1 43b {ccccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 83 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 83 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccccd} | 1 1 40b {cccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 82b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 42b {cccccd} | 1 1 42b {cccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 82 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 82 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 79b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 39b {ccccd} | 1 1 39b {ccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 79 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 79 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccd} | 1 1 41b {ccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 78b 2r} data 304b + FlatIndex{2} Label{3 rev 3, 101b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 38b {cccd} | 1 1 38b {cccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 78 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 78 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 75b 2r} data 292b + FlatIndex{2} Label{3 rev 3, 95b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 75 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 75 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 77b 2r} data 296b + FlatIndex{2} Label{3 rev 3, 97b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 77 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 77 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{2} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{2} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 103b} | P ... {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{48} Label{484 rev 1, 138b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{51} Label{514 rev 1, 138b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 4), [6, 8), [8, 12), [14, 16), [16, 18), [20, 28), [32, 34), [34, 38), [38, 39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b {0, 1} | 2 2 66b {0, 4} | 4 4 82b {0, 7} | 8 6 66b {0, 10} | 11 8 66b {1, 3} | 14 10 82b {1, 6} | 20 12 66b {1, 8} | 23 14 66b {2, NULL} | 26 16 82b {2, 4} | 36 18 66b {2, 7} | 39 20 66b {2, 10} | 42 22 82b {3, 3} | 48 24 66b {3, 6} | 53 26 66b {3, 8} | 58 28 82b {4, NULL} | 64 30 66b {4, 4} | 67 32 66b {4, 7} | 70 34 82b {4, 10} | 82 36 66b {5, 3} | 87 38 66b {5, 6} | 87 39 66b {5, 7} + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 4), [6, 8), [8, 12), [14, 16), [16, 18), [20, 28), [32, 34), [34, 38), [38, 39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b {0, 1} | 2 2 66b {0, 4} | 4 4 82b {0, 7} | 8 6 66b {0, 10} | 11 8 66b {1, 3} | 14 10 82b {1, 6} | 20 12 66b {1, 8} | 23 14 66b {2, NULL} | 26 16 82b {2, 4} | 36 18 66b {2, 7} | 39 20 66b {2, 10} | 42 22 82b {3, 3} | 48 24 66b {3, 6} | 53 26 66b {3, 8} | 58 28 82b {4, NULL} | 64 30 66b {4, 4} | 67 32 66b {4, 7} | 70 34 82b {4, 10} | 82 36 66b {5, 3} | 87 38 66b {5, 6} | 87 39 66b {5, 7} + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} >> TExecutorDb::MultiPage [GOOD] >> TExecutorDb::EncodedPage >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] >> TExecutorDb::EncodedPage [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest >> BuildStatsHistogram::Five_Five_Serial [GOOD] >> BuildStatsHistogram::Five_Five_Crossed >> DataStreams::TestGetShardIterator [GOOD] >> DataStreams::TestGetRecordsWithoutPermission ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 24205, MsgBus: 18044 2025-04-10T11:04:06.752752Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638640498913571:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:06.753913Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010fe/r3tmp/tmpx28Uz4/pdisk_1.dat 2025-04-10T11:04:07.557599Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:07.598169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:07.598276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:07.604526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24205, node 1 2025-04-10T11:04:07.740448Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:07.740471Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:07.740478Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:07.740599Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18044 TClient is connected to server localhost:18044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:09.008795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:09.023039Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:04:11.753011Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638640498913571:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:11.753070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:11.879251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638661973750630:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:11.879359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:12.518237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:04:12.674024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:04:12.748687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:04:12.822363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715762:2, at schemeshard: 72057594046644480 2025-04-10T11:04:12.880637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 2025-04-10T11:04:12.909625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:04:12.956921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715766:2, at schemeshard: 72057594046644480 2025-04-10T11:04:13.005119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715767:0, at schemeshard: 72057594046644480 2025-04-10T11:04:13.041691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715770:2, at schemeshard: 72057594046644480 2025-04-10T11:04:13.082416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715771:0, at schemeshard: 72057594046644480 2025-04-10T11:04:13.117999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715774:2, at schemeshard: 72057594046644480 2025-04-10T11:04:13.205026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715775:0, at schemeshard: 72057594046644480 2025-04-10T11:04:13.254359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:04:13.301589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715778:2, at schemeshard: 72057594046644480 2025-04-10T11:04:13.358753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715779:0, at schemeshard: 72057594046644480 2025-04-10T11:04:13.405346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715782:2, at schemeshard: 72057594046644480 2025-04-10T11:04:13.455229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715783:0, at schemeshard: 72057594046644480 2025-04-10T11:04:13.530498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638670563686578:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:13.530560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:13.800402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638670563686888:2453], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:13.800523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:13.800878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638670563686893:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:13.805265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-10T11:04:13.816428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638670563686895:2457], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-10T11:04:13.916480Z node 1 :TX_PROXY ERROR: Actor# [1:7491638670563686953:3541] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 23], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } [] Trying to start YDB, gRPC: 4405, MsgBus: 18712 2025-04-10T11:04:20.899110Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638699378147833:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:20.899201Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010fe/r3tmp/tmpez2IW7/pdisk_1.dat 2025-04-10T11:04:21.240447Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:21.280970Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:21.281058Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:21.287141Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4405, node 2 2025-04-10T11:04:21.500196Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:21.500218Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:21.500231Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:21.500381Z node 2 :NET_CLASSIFIER ERROR: got bad distributable con ... : [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491638844919446593:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:54.956162Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:55.019696Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.096233Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.168221Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.205224Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.243060Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491638827739575674:2086];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:55.243157Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:55.280312Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.346421Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.427352Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491638849214414413:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:55.427467Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:55.427870Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491638849214414418:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:55.433029Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:04:55.455681Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491638849214414420:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:04:55.509835Z node 5 :TX_PROXY ERROR: Actor# [5:7491638849214414475:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 26231, MsgBus: 4076 2025-04-10T11:04:59.159031Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491638865238197094:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:59.159385Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010fe/r3tmp/tmpR5KmhF/pdisk_1.dat 2025-04-10T11:04:59.334829Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:59.372958Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:59.373087Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:59.379720Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26231, node 6 2025-04-10T11:04:59.450857Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:59.450887Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:59.450898Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:59.451048Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4076 TClient is connected to server localhost:4076 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:00.334874Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:00.350331Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:05:00.431456Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:05:00.633574Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:00.725274Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:04.162363Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491638865238197094:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:04.162526Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:04.743215Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638886713035350:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:04.743355Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:04.779274Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:04.835139Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:04.921846Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:05.015125Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:05.068203Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:05.226085Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:05.358622Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638891008003170:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:05.358756Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:05.359061Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638891008003175:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:05.363619Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:05.381481Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491638891008003177:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:05:05.469103Z node 6 :TX_PROXY ERROR: Actor# [6:7491638891008003236:3467] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TFlatCxxDatabaseTest::BasicSchemaTest [GOOD] >> TFlatCxxDatabaseTest::RenameColumnSchemaTest [GOOD] >> TFlatCxxDatabaseTest::SchemaFillerTest [GOOD] >> TFlatDatabaseDecimal::UpdateRead [GOOD] >> TFlatEraseCacheTest::BasicUsage [GOOD] >> TFlatEraseCacheTest::BasicUsageReverse >> DataStreams::TestStreamStorageRetention [GOOD] >> DataStreams::TestStreamPagination >> TFlatEraseCacheTest::BasicUsageReverse [GOOD] >> TFlatEraseCacheTest::CacheEviction [GOOD] >> TFlatEraseCacheTest::StressGarbageCollection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::Offset [GOOD] Test command err: Trying to start YDB, gRPC: 15945, MsgBus: 1360 2025-04-10T11:03:56.817061Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638594783799278:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:56.817114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001110/r3tmp/tmpyLmzZj/pdisk_1.dat 2025-04-10T11:03:57.467244Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:57.467634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:57.467728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:57.476994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15945, node 1 2025-04-10T11:03:57.709532Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:57.709560Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:57.709570Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:57.709747Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1360 TClient is connected to server localhost:1360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:58.474885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:58.532789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:58.799522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:59.049839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:59.145625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:01.260094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638616258637533:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:01.260426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:01.744647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:04:01.820725Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638594783799278:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:01.820865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:01.852783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:04:01.890430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:04:01.934809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:04:01.979574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:04:02.040136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:04:02.131795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638620553605347:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:02.131913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:02.132772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638620553605352:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:02.137767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:04:02.161115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638620553605354:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:04:02.220122Z node 1 :TX_PROXY ERROR: Actor# [1:7491638620553605411:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:04:03.319297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:04:03.372949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:04:03.449943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20620, MsgBus: 24683 2025-04-10T11:04:08.712140Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638647070272554:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:08.721153Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001110/r3tmp/tmpz011bm/pdisk_1.dat 2025-04-10T11:04:08.964222Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20620, node 2 2025-04-10T11:04:09.059190Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:09.067697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:09.072762Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:04:09.105335Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:09.105367Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:09.105378Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:09.105539Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24683 TClient is connected to server localhost:24683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:09.941574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:09.957634Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:04:09.969560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, a ... 28632Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:55.547683Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491638848255897684:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:55.547790Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:55.618912Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.663621Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.720960Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.775324Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.826766Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.877207Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.971612Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491638848255898199:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:55.971770Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:55.971845Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491638848255898204:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:55.976586Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:04:55.989678Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491638848255898206:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:04:56.064845Z node 5 :TX_PROXY ERROR: Actor# [5:7491638852550865558:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 10087, MsgBus: 16860 2025-04-10T11:05:00.304199Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491638871433002352:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:00.304328Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001110/r3tmp/tmpN1pucg/pdisk_1.dat 2025-04-10T11:05:00.505609Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:00.508131Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:00.508274Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:00.514051Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10087, node 6 2025-04-10T11:05:00.574564Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:00.574605Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:00.574634Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:00.574804Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16860 TClient is connected to server localhost:16860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:01.268018Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:01.277941Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:05:01.292868Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:01.438895Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:01.793513Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:02.130581Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:05.320877Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491638871433002352:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:05.321022Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:05.389473Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638892907840612:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:05.389807Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:05.416148Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:05.470028Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:05.550449Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:05.598740Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:05.647112Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:05.710975Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:06.015796Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638897202808428:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:06.015964Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:06.016363Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638897202808433:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:06.023895Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:06.052347Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491638897202808435:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:05:06.123609Z node 6 :TX_PROXY ERROR: Actor# [6:7491638897202808494:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TFlatEraseCacheTest::StressGarbageCollection [GOOD] >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings [GOOD] >> TFlatExecutorLeases::Basics >> DataStreams::TestUpdateStream [GOOD] >> DataStreams::Test_AutoPartitioning_Describe >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 13726, MsgBus: 14929 2025-04-10T11:03:55.445267Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638594361004656:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:55.446404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001119/r3tmp/tmpGESbAN/pdisk_1.dat 2025-04-10T11:03:55.975825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:55.975926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:55.992195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:03:56.018466Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13726, node 1 2025-04-10T11:03:56.135872Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:56.135891Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:56.135899Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:56.136000Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14929 TClient is connected to server localhost:14929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:56.778807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:56.816594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:57.055539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:57.291527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:03:57.414555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:03:59.590282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638611540875604:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:59.590455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:59.979478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:04:00.021832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:04:00.084361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:04:00.168400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:04:00.204906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:04:00.243833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:04:00.308115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638615835843413:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:00.308212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:00.308610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638615835843418:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:00.315716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:04:00.332116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638615835843420:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:04:00.401282Z node 1 :TX_PROXY ERROR: Actor# [1:7491638615835843474:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:04:00.449682Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638594361004656:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:00.449759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:01.778346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:04:01.874998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:04:01.979695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5843, MsgBus: 65509 2025-04-10T11:04:06.566756Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638639827128310:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:06.566813Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001119/r3tmp/tmppURmcS/pdisk_1.dat 2025-04-10T11:04:06.821396Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:06.849207Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:06.849303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:06.852903Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5843, node 2 2025-04-10T11:04:06.970731Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:06.970760Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:06.970768Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:06.970907Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65509 TClient is connected to server localhost:65509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:07.608217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:07.617317Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:04:07.629528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, ... not found or you don't have access permissions } 2025-04-10T11:04:54.904928Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:04:54.969293Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.028893Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.080429Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.141037Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.203219Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:04:55.208164Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491638827435601869:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:55.208242Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:55.298642Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491638848910440636:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:55.298788Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:55.299150Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491638848910440641:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:55.305769Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:04:55.327604Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491638848910440643:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:04:55.402898Z node 5 :TX_PROXY ERROR: Actor# [5:7491638848910440698:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:04:56.679390Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26945, MsgBus: 65056 2025-04-10T11:04:59.256572Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491638867015231401:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001119/r3tmp/tmp0R7pTk/pdisk_1.dat 2025-04-10T11:04:59.311572Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:04:59.411166Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:59.461692Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:59.461851Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:59.463888Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26945, node 6 2025-04-10T11:04:59.534855Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:59.534887Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:59.534899Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:59.535082Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65056 TClient is connected to server localhost:65056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:00.298381Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:00.325106Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:00.417790Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:00.674548Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:00.774385Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:04.226270Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491638867015231401:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:04.226370Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:04.845352Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638888490069500:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:04.845460Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:04.920352Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:05.006566Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:05.124433Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:05.183714Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:05.255798Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:05.341414Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:05.443366Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638892785037323:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:05.443503Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:05.444097Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638892785037328:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:05.451909Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:05.468121Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491638892785037330:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:05:05.543016Z node 6 :TX_PROXY ERROR: Actor# [6:7491638892785037386:3466] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:07.188629Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> ColumnShardTiers::TieringUsage >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] Test command err: 2025-04-10T11:05:07.726304Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638902020869443:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:07.726419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001862/r3tmp/tmp6JD2wA/pdisk_1.dat 2025-04-10T11:05:08.305922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:08.306078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:08.312017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:08.317610Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:08.368912Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001862/r3tmp/yandexDB2kGm.tmp 2025-04-10T11:05:08.368948Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001862/r3tmp/yandexDB2kGm.tmp 2025-04-10T11:05:08.369071Z node 1 :NET_CLASSIFIER ERROR: invalid NetData format 2025-04-10T11:05:08.369114Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: /home/runner/.ya/build/build_root/2gt4/001862/r3tmp/yandexDB2kGm.tmp 2025-04-10T11:05:08.369222Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |75.3%| [TA] $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> S3SettingsConversion::Port [GOOD] >> KqpYql::JsonNumberPrecision [GOOD] >> DataStreams::TestControlPlaneAndMeteringData [GOOD] >> DataStreams::ChangeBetweenRetentionModes |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] >> KqpScripting::ExecuteYqlScriptPg [GOOD] |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpPg::SelectIndex-useSink [GOOD] >> KqpPg::TableDeleteAllData+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonNumberPrecision [GOOD] Test command err: Trying to start YDB, gRPC: 24452, MsgBus: 28958 2025-04-10T11:04:53.864327Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638842287845702:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:53.864475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d44/r3tmp/tmpVovZld/pdisk_1.dat 2025-04-10T11:04:54.858448Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:54.895163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:54.895288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:54.931861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:04:54.932694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:04:55.141197Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.110623s 2025-04-10T11:04:55.141335Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.110781s TServer::EnableGrpc on GrpcPort 24452, node 1 2025-04-10T11:04:56.136982Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:56.137013Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:56.137021Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:56.137180Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28958 TClient is connected to server localhost:28958 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:58.295750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:58.404656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:58.734910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:58.866020Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638842287845702:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:58.866100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:58.876481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:58.950996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:59.270232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638868057651078:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:59.270452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:01.425026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:01.465301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:01.498142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:01.531078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:01.578175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:01.661452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:01.770468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638876647586288:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:01.770563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:01.770876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638876647586293:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:01.774855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:01.794884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638876647586295:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:05:01.911994Z node 1 :TX_PROXY ERROR: Actor# [1:7491638876647586353:3475] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 18358, MsgBus: 20500 2025-04-10T11:05:06.500744Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638898997022455:2237];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d44/r3tmp/tmppKEPW7/pdisk_1.dat 2025-04-10T11:05:06.632450Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:05:06.693284Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:06.693403Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:06.698088Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:06.706777Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18358, node 2 2025-04-10T11:05:06.793674Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:06.793694Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:06.793706Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:06.793824Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20500 TClient is connected to server localhost:20500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:07.485287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:07.492377Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:05:07.501239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:05:07.597203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:05:07.859536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:07.995140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:10.676899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638916176893216:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:10.677036Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:10.744035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:10.794753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:10.843100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:10.883125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:10.924577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:10.967779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:11.065062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638920471861029:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:11.065136Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:11.065424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638920471861034:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:11.069842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:11.084145Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491638920471861036:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:05:11.175337Z node 2 :TX_PROXY ERROR: Actor# [2:7491638920471861091:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:11.478384Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491638898997022455:2237];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:11.478484Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TFlatExecutorLeases::Basics [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout >> KqpPg::CheckPgAutoParams-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter-useSink >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 3856, MsgBus: 28789 2025-04-10T11:04:53.858651Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638840524827068:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:53.858744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d3c/r3tmp/tmp6wAzb2/pdisk_1.dat 2025-04-10T11:04:54.846850Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:54.874791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:54.874920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:54.923622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:04:54.925008Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:04:55.142130Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.111217s 2025-04-10T11:04:55.142246Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.111340s TServer::EnableGrpc on GrpcPort 3856, node 1 2025-04-10T11:04:56.134107Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:56.134142Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:56.134177Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:56.134352Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28789 TClient is connected to server localhost:28789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:58.295580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:58.404687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:04:58.736598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:04:58.848962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:58.866550Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638840524827068:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:58.868813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:58.893179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:59.268978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638866294632553:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:59.269165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:01.427938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:01.465868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:01.508113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:01.546533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:01.595804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:01.637840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:01.766859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638874884567753:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:01.766976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:01.767327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638874884567758:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:01.775475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:01.797499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638874884567760:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:05:02.024120Z node 1 :TX_PROXY ERROR: Actor# [1:7491638879179535112:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:03.939364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:05:06.027781Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283105754, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 65275, MsgBus: 3426 2025-04-10T11:05:07.110639Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638900768081201:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:07.110721Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d3c/r3tmp/tmpV4HbpP/pdisk_1.dat 2025-04-10T11:05:07.245353Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:07.273059Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:07.273149Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:07.274865Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65275, node 2 2025-04-10T11:05:07.366800Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:07.366825Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:07.366832Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:07.366960Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3426 TClient is connected to server localhost:3426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:08.019552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:08.039452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:08.158027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:08.369722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:08.469266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:10.920329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638913652984781:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:10.920508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:10.981340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:11.054541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:11.096192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:11.143902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:11.190667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:11.244799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:11.332300Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638917947952596:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:11.332396Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:11.332833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638917947952601:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:11.337035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:11.352782Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491638917947952603:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:05:11.433633Z node 2 :TX_PROXY ERROR: Actor# [2:7491638917947952659:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:12.111050Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491638900768081201:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:12.111129Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |75.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |75.4%| [TA] {RESULT} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateSubdomain >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true >> TSchemeShardAuditSettings::CreateExtSubdomain |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TFlatExecutorLeases::BasicsLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLease |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> DataStreams::TestGetRecordsWithoutPermission [GOOD] >> DataStreams::TestGetRecordsWithCount |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> BuildStatsHistogram::Five_Five_Crossed [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels >> ColumnShardTiers::DSConfigsStub >> BuildStatsHistogram::Single_Small_2_Levels [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Single_Small_1_Level >> BuildStatsHistogram::Single_Small_1_Level [GOOD] >> BuildStatsHistogram::Single_Small_0_Levels [GOOD] >> BuildStatsHistogram::Mixed_Groups_History |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> DataStreams::TestReservedResourcesMetering [GOOD] >> DataStreams::TestReservedStorageMetering >> TPersQueueTest::FetchRequest [GOOD] >> TPersQueueTest::EventBatching >> TFlatExecutorLeases::BasicsInitialLease [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> DataStreams::Test_AutoPartitioning_Describe [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled >> KqpPg::LongDomainName [GOOD] >> BuildStatsHistogram::Mixed_Groups_History [GOOD] >> BuildStatsHistogram::Serial_Groups_History >> DataStreams::ChangeBetweenRetentionModes [GOOD] >> DataStreams::TestCreateExistingStream |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |75.8%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |75.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> BuildStatsHistogram::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Benchmark >> TFlatExecutorLeases::BasicsInitialLeaseTimeout [GOOD] >> TFlatTableDatetime::TestDate >> TFlatTableDatetime::TestDate [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime >> ColumnShardTiers::DSConfigsWithQueryServiceDdl |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> DataStreams::TestStreamPagination [GOOD] >> DataStreams::TestShardPagination >> S3SettingsConversion::FoldersStrictStyle [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] Test command err: Trying to start YDB, gRPC: 61201, MsgBus: 14491 2025-04-10T11:02:34.873781Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638243273505688:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:34.891827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cd4/r3tmp/tmpE4wKHm/pdisk_1.dat 2025-04-10T11:02:35.871746Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:35.925683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:35.927674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:35.930580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:02:35.949059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61201, node 1 2025-04-10T11:02:36.265409Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:36.265435Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:36.265445Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:36.265583Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14491 TClient is connected to server localhost:14491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:37.298803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:37.316026Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:02:39.874034Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638243273505688:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:39.874106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:41.026789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638273338277416:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:41.026937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:41.027479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638273338277428:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:41.032025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:02:41.054914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638273338277430:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:02:41.135467Z node 1 :TX_PROXY ERROR: Actor# [1:7491638273338277481:2349] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24710, MsgBus: 18955 2025-04-10T11:02:42.794447Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638278630100998:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cd4/r3tmp/tmpU0caZb/pdisk_1.dat 2025-04-10T11:02:42.939644Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:02:43.063185Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:43.155638Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:43.155728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:43.159583Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24710, node 2 2025-04-10T11:02:43.315413Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:43.315437Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:43.315445Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:43.315576Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18955 TClient is connected to server localhost:18955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:44.393505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:44.400638Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:02:47.710311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638300104937977:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:47.710421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:47.710732Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491638300104937989:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:47.715485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:02:47.732241Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:02:47.733156Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491638300104937991:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:02:47.738357Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491638278630100998:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:47.738441Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:47.821827Z node 2 :TX_PROXY ERROR: Actor# [2:7491638300104938043:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 3538, MsgBus: 29016 2025-04-10T11:02:49.391229Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491638307848611587:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:49.391273Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cd4/r3tmp/tmpmXGtre/pdisk_1.dat 2025-04-10T11:02:49.894910Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:49.933754Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:49.942458Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:49.945247Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServ ... se } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:59.790251Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:03.498449Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491638863210039711:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:03.498582Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:05.010800Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491638893274811464:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:05.010894Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491638893274811444:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:05.014570Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:05.042513Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:05:05.066408Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491638893274811468:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:05:05.182896Z node 10 :TX_PROXY ERROR: Actor# [10:7491638893274811519:2347] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:05.246097Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (null, 3)","aid [7, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (4, 3)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 15054, MsgBus: 25539 2025-04-10T11:05:09.967986Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7491638909053044967:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:09.968074Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cd4/r3tmp/tmpqhvc0T/pdisk_1.dat 2025-04-10T11:05:10.362128Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:10.382571Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:10.382744Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:10.387784Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15054, node 11 2025-04-10T11:05:10.583044Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:10.583076Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:10.583090Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:10.583285Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25539 TClient is connected to server localhost:25539 WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'... TClient::Ls request: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_D... (TRUNCATED) WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' success. 2025-04-10T11:05:11.741490Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:14.969353Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7491638909053044967:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:14.969496Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:17.202270Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491638943412783927:2339], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:17.202420Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491638943412783919:2336], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:17.202564Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:17.209847Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:05:17.236119Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7491638943412783944:2340], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:05:17.320364Z node 11 :TX_PROXY ERROR: Actor# [11:7491638943412783995:2350] txid# 281474976710659, issues: { message: "Check failed: path: \'/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:17.359047Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStrictStyle [GOOD] >> BuildStatsHistogram::Benchmark [GOOD] >> BuildStatsHistogram::Many_Mixed |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:05:19.249133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:05:19.249247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:19.249318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:05:19.249359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:05:19.249420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:05:19.249453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:05:19.249542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:19.249640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:05:19.250057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:05:19.395959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:05:19.396028Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:19.426282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:05:19.427088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:05:19.427568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:05:19.450782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:05:19.451547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:05:19.490123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:19.490848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:05:19.550388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:19.601891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:19.601976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:19.618330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:05:19.618434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:19.618518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:05:19.618787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.631916Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:05:19.846458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:19.855175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.870055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:05:19.879983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:05:19.880101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.889173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:19.889332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:05:19.889551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.889683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:05:19.889748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:05:19.889786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:05:19.891846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.891911Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:05:19.891957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:05:19.893735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.893785Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.893823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:19.893893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:05:19.907208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:05:19.913093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:05:19.920750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:05:19.922192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:19.922368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:19.922425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:19.924062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:05:19.924167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:19.924414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:19.924528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:05:19.935000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:19.935106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:19.935384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:19.935439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:05:19.935751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.935820Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:05:19.935932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:19.935965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:19.936004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:19.936054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:19.936105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:05:19.936151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:19.936188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:05:19.936219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:05:19.936295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:19.936337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:05:19.936378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:05:19.939124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:19.939263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:19.939310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:20.438839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-10T11:05:20.438872Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2025-04-10T11:05:20.438920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:20.439803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:20.439890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:20.439916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-10T11:05:20.439945Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-04-10T11:05:20.439973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-10T11:05:20.440044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-04-10T11:05:20.442543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2025-04-10T11:05:20.442711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2025-04-10T11:05:20.443477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:20.443582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:20.443628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2025-04-10T11:05:20.443712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:20.443752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-10T11:05:20.443788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 134 2025-04-10T11:05:20.445864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-10T11:05:20.446003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-10T11:05:20.447462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-10T11:05:20.447515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 112:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:05:20.447667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 134 -> 135 2025-04-10T11:05:20.447852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:20.447949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 FAKE_COORDINATOR: Erasing txId 112 2025-04-10T11:05:20.450109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:20.450175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:20.450312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-10T11:05:20.450425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:20.450471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-04-10T11:05:20.450511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 7 2025-04-10T11:05:20.450800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-10T11:05:20.450844Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2025-04-10T11:05:20.450875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 135 -> 240 2025-04-10T11:05:20.451723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:20.451823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:20.451860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-10T11:05:20.451900Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2025-04-10T11:05:20.451936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:20.453500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:20.453593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:20.453619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-10T11:05:20.453648Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-04-10T11:05:20.453688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-10T11:05:20.453766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-04-10T11:05:20.455546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-10T11:05:20.455588Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 112:0 ProgressState 2025-04-10T11:05:20.455654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-04-10T11:05:20.455694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-10T11:05:20.455729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-04-10T11:05:20.455762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-10T11:05:20.455785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: true 2025-04-10T11:05:20.455820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-10T11:05:20.455845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-04-10T11:05:20.455862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-04-10T11:05:20.455915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-10T11:05:20.456216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:05:20.456247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-10T11:05:20.456290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-04-10T11:05:20.456681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:05:20.456728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-10T11:05:20.456782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:20.457534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-10T11:05:20.459276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-10T11:05:20.461477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:05:20.461570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-04-10T11:05:20.461981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-04-10T11:05:20.462020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-04-10T11:05:20.462624Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-04-10T11:05:20.462711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-04-10T11:05:20.462740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:659:2650] TestWaitNotification: OK eventTxId 112 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:05:19.235048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:05:19.235218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:19.235299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:05:19.235347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:05:19.237329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:05:19.237425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:05:19.237595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:19.237720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:05:19.242952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:05:19.394572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:05:19.394649Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:19.417906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:05:19.418274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:05:19.418489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:05:19.448909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:05:19.449332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:05:19.490040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:19.490839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:05:19.546572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:19.601727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:19.601820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:19.617836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:05:19.617980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:19.618129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:05:19.618462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.647277Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:05:19.845748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:19.854539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.870290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:05:19.881638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:05:19.881810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.891478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:19.891649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:05:19.891843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.891924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:05:19.891982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:05:19.892026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:05:19.899305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.899405Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:05:19.899455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:05:19.903119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.903207Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.903272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:19.903339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:05:19.907621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:05:19.915363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:05:19.920803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:05:19.922213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:19.922412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:19.922483Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:19.923930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:05:19.923995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:19.924281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:19.924417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:05:19.935016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:19.935117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:19.935393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:19.935446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:05:19.935750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.935817Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:05:19.935931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:19.935976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:19.936019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:19.936085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:19.936134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:05:19.936195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:19.936241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:05:19.936274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:05:19.936359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:19.936406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:05:19.936443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:05:19.939351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:19.939494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:19.939533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:20.416414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:20.416442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-10T11:05:20.416475Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2025-04-10T11:05:20.416506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:20.417257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:20.417357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:20.417411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-10T11:05:20.417439Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-04-10T11:05:20.417469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-10T11:05:20.417542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-04-10T11:05:20.421624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2025-04-10T11:05:20.421803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2025-04-10T11:05:20.423019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:20.423166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:20.423223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2025-04-10T11:05:20.423265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:20.423293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-10T11:05:20.423529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 130 2025-04-10T11:05:20.423698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:20.423754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-10T11:05:20.424918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-10T11:05:20.425004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 FAKE_COORDINATOR: Erasing txId 112 2025-04-10T11:05:20.426708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:20.426806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:20.426926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-10T11:05:20.427036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:20.427068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-04-10T11:05:20.427101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 7 2025-04-10T11:05:20.427335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-10T11:05:20.427366Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2025-04-10T11:05:20.427419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-04-10T11:05:20.427452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-10T11:05:20.427479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-04-10T11:05:20.427520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-10T11:05:20.427547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2025-04-10T11:05:20.427568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-10T11:05:20.427587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-04-10T11:05:20.427623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-04-10T11:05:20.427695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-10T11:05:20.427719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 2, subscribers: 0 2025-04-10T11:05:20.427756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 27 2025-04-10T11:05:20.427778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 7], 18446744073709551615 2025-04-10T11:05:20.428539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:20.428603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:20.428624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-04-10T11:05:20.428655Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2025-04-10T11:05:20.428685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:20.430852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:20.430962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:20.430995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-04-10T11:05:20.431048Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-04-10T11:05:20.431085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-10T11:05:20.431209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-04-10T11:05:20.431634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:05:20.431673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-10T11:05:20.431757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-04-10T11:05:20.432274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:05:20.432327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-10T11:05:20.432399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:20.434853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-10T11:05:20.437562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-10T11:05:20.437678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:05:20.437772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-04-10T11:05:20.438141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-04-10T11:05:20.438202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-04-10T11:05:20.438797Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-04-10T11:05:20.438910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-04-10T11:05:20.438958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:649:2640] TestWaitNotification: OK eventTxId 112 |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStyleDeduction [GOOD] |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStyleDeduction [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |76.2%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpPg::InsertNoTargetColumns_Alter-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial+useSink >> DataStreams::TestCreateExistingStream [GOOD] >> DataStreams::ListStreamsValidation >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] Test command err: Trying to start YDB, gRPC: 3354, MsgBus: 22826 2025-04-10T11:04:03.702590Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638628187827508:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:03.702717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001107/r3tmp/tmpOoH6NZ/pdisk_1.dat 2025-04-10T11:04:04.473989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:04.479909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:04.482107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:04:04.506413Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3354, node 1 2025-04-10T11:04:04.807930Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:04.807952Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:04.807963Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:04.808068Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22826 TClient is connected to server localhost:22826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:04:05.748706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:05.792174Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:04:05.808880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:06.077789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:06.349567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:06.502874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:04:08.702442Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638628187827508:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:08.702523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:04:08.883480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638649662665763:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:08.883662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:09.414747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:04:09.513778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:04:09.564840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:04:09.649750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:04:09.717117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:04:09.796011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:04:09.874504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638653957633581:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:09.874604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:09.874818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638653957633586:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:09.883363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:04:09.904661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638653957633588:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:04:09.969676Z node 1 :TX_PROXY ERROR: Actor# [1:7491638653957633643:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:04:11.474809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:04:11.563821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:04:11.651817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 22331, MsgBus: 14057 2025-04-10T11:04:17.697451Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638687948500866:2252];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001107/r3tmp/tmpErWUle/pdisk_1.dat 2025-04-10T11:04:17.830372Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:04:17.934510Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:18.019570Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:18.019708Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:18.027456Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22331, node 2 2025-04-10T11:04:18.230396Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: ( ... PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:01.355356Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:01.369548Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491638876734436568:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:05:01.467895Z node 5 :TX_PROXY ERROR: Actor# [5:7491638876734436624:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:03.060693Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:05:03.149840Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:05:03.249320Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:60: Warning: At function: Filter, At function: Coalesce
:6:33: Warning: At function: SqlIn
:6:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:60: Warning: At function: Filter, At function: Coalesce
:6:33: Warning: At function: SqlIn
:6:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 17696, MsgBus: 1788 2025-04-10T11:05:09.493281Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491638911076879258:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:09.493447Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001107/r3tmp/tmpXsh1rr/pdisk_1.dat 2025-04-10T11:05:09.893555Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:09.924943Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:09.925064Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:09.931987Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17696, node 6 2025-04-10T11:05:10.100321Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:10.100349Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:10.100366Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:10.100544Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1788 TClient is connected to server localhost:1788 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:10.984754Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:11.017708Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:05:11.109992Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:05:11.419726Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:05:11.560988Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:05:14.494056Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491638911076879258:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:14.494190Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:15.313539Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638936846684671:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:15.313674Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:15.427218Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:15.518439Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:15.628668Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:15.697566Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:15.779994Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:15.861361Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:15.936025Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638936846685203:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:15.936139Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:15.936202Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491638936846685208:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:15.941777Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:15.956520Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491638936846685210:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:05:16.046092Z node 6 :TX_PROXY ERROR: Actor# [6:7491638941141652563:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:17.772074Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:05:17.835141Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:05:17.926025Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:17: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:56: Warning: At function: Filter, At function: Coalesce
:7:29: Warning: At function: SqlIn
:7:29: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:5:17: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] Test command err: 2025-04-10T11:05:03.407709Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638884902259255:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:03.407770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e2e/r3tmp/tmp7kFubA/pdisk_1.dat 2025-04-10T11:05:04.142463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:04.149232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:04.162692Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:04.179720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16593, node 1 2025-04-10T11:05:05.761495Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:05.761531Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:05.761551Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:05.761717Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:08.294167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:08.414655Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638884902259255:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:08.414761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:08.826935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:5349 2025-04-10T11:05:09.093591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:10.155939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:05:10.542126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:12.544258Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491638921574579895:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:12.544355Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e2e/r3tmp/tmp1YywfH/pdisk_1.dat 2025-04-10T11:05:12.778057Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:12.800535Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:12.800651Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:12.806117Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25281, node 4 2025-04-10T11:05:12.959016Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:12.959039Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:12.959051Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:12.959197Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13015 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:13.247394Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:13.437394Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:13015 2025-04-10T11:05:13.746538Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } ALTER_SCHEME: { Name: "test-topic" Split { Partition: 1 SplitBoundary: "a" } } 2025-04-10T11:05:15.138286Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 107:0, at schemeshard: 72057594046644480 2025-04-10T11:05:16.603901Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:16.753287Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:17.016929Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:17.303261Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:17.544571Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491638921574579895:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:17.544687Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:19.410787Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491638954298649980:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:19.410969Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e2e/r3tmp/tmprzDjO9/pdisk_1.dat 2025-04-10T11:05:19.708679Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:19.761714Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:19.761821Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:19.768776Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26633, node 7 2025-04-10T11:05:19.969518Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:19.969552Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:19.969564Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:19.969724Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:20.363099Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:20.461163Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:4770 2025-04-10T11:05:20.727858Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> DataStreams::TestShardPagination [GOOD] >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |76.4%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False >> TSchemeShardUserAttrsTest::MkDir >> TDataShardLocksTest::Points_OneTx >> TSchemeShardUserAttrsTest::UserConditionsAtAlter >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll >> TSchemeShardUserAttrsTest::SpecialAttributes >> TSchemeShardUserAttrsTest::SetAttrs |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps >> DataStreams::TestReservedStorageMetering [GOOD] >> DataStreams::TestReservedConsumersMetering >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestShardPagination [GOOD] Test command err: 2025-04-10T11:05:03.382837Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638886448951838:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:03.382919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e27/r3tmp/tmpyP9y92/pdisk_1.dat 2025-04-10T11:05:04.168974Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:04.173204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:04.173307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:04.186112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61771, node 1 2025-04-10T11:05:05.766967Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:05.766991Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:05.767000Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:05.767151Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5362 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:08.293984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:08.385344Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638886448951838:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:08.385452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:08.827027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:5362 2025-04-10T11:05:09.102921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:09.126819Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 40960, code: 500080 2025-04-10T11:05:10.124719Z node 1 :TX_PROXY ERROR: Actor# [1:7491638916513725164:3408] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/stream_TestStreamStorageRetention\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:12.178083Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491638922093918913:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:12.178138Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e27/r3tmp/tmpUf7VAG/pdisk_1.dat 2025-04-10T11:05:12.467779Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7878, node 4 2025-04-10T11:05:12.560114Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:12.560212Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:12.581486Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:12.665349Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:12.665387Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:12.665397Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:12.665556Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2450 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:12.970609Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:13.119592Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:2450 2025-04-10T11:05:13.387518Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:17.180621Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491638922093918913:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:17.180781Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:20.799188Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491638956845741487:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:20.799602Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e27/r3tmp/tmpZW6L9U/pdisk_1.dat 2025-04-10T11:05:21.147186Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:21.214929Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:21.215958Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:21.235347Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61970, node 7 2025-04-10T11:05:21.487141Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:21.487171Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:21.487182Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:21.487341Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:21.968894Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:22.060350Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:15089 2025-04-10T11:05:22.298776Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... >> TSchemeShardUserAttrsTest::MkDir [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] >> TSchemeShardUserAttrsTest::VariousUse [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::MkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:05:27.493095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:05:27.493232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:27.493284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:05:27.493330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:05:27.493388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:05:27.493416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:05:27.493475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:27.493559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:05:27.493818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:05:27.570345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:05:27.570408Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:27.587123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:05:27.587483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:05:27.587671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:05:27.599241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:05:27.599572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:05:27.600260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:27.600686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:05:27.604336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:27.605792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:27.605848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:27.606065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:05:27.606115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:27.606178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:05:27.606348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.614476Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:05:27.737339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:27.737642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.737851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:05:27.738125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:05:27.738212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.741041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:27.741194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:05:27.741433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.741492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:05:27.741528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:05:27.741562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:05:27.743929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.743989Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:05:27.744027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:05:27.746199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.746248Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.746307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:27.746357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:05:27.761974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:05:27.766064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:05:27.766348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:05:27.767423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:27.767608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:27.767679Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:27.768003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:05:27.768057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:27.768252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:27.768357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:05:27.771231Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:27.771282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:27.771496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:27.771540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:05:27.771787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.771835Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:05:27.771931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:27.771966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:27.772005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:27.772070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:27.772113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:05:27.772158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:27.772215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:05:27.772245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:05:27.772343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:27.772387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:05:27.772418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:05:27.775034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:27.775159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:27.775198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... .908080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T11:05:27.908502Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:05:27.908606Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:05:27.908645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:05:27.908678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:378:2369] 2025-04-10T11:05:27.908860Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-10T11:05:27.908902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:05:27.908919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:378:2369] 2025-04-10T11:05:27.909005Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:05:27.909033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:05:27.909061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:378:2369] 2025-04-10T11:05:27.909174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:05:27.909191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:378:2369] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-10T11:05:27.909559Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:05:27.909706Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 180us result status StatusSuccess 2025-04-10T11:05:27.910043Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:27.918663Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:05:27.918978Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 289us result status StatusSuccess 2025-04-10T11:05:27.919398Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 } ChildrenExist: true } Children { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:27.928942Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:05:27.929220Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 325us result status StatusSuccess 2025-04-10T11:05:27.929646Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } UserAttributes { Key: "AttrB2" Value: "ValB2" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:27.930358Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:05:27.930544Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA" took 181us result status StatusSuccess 2025-04-10T11:05:27.930865Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA" PathDescription { Self { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAA1" Value: "ValAA1" } UserAttributes { Key: "AttrAA2" Value: "ValAA2" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:27.931436Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:05:27.931643Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA/DirB" took 159us result status StatusSuccess 2025-04-10T11:05:27.931941Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA/DirB" PathDescription { Self { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAB1" Value: "ValAB1" } UserAttributes { Key: "AttrAB2" Value: "ValAB2" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |76.5%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:05:19.235581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:05:19.235688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:19.235739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:05:19.235778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:05:19.238262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:05:19.238317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:05:19.238421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:19.238525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:05:19.238968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:05:19.397069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:05:19.397119Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:19.424325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:05:19.424701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:05:19.424897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:05:19.454110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:05:19.454609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:05:19.490129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:19.490891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:05:19.546558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:19.599985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:19.600075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:19.614524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:05:19.614655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:19.614793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:05:19.615122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.632150Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:05:19.842143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:19.854445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.870018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:05:19.879976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:05:19.880099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.896725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:19.896966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:05:19.897230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.897361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:05:19.897459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:05:19.897510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:05:19.901677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.901797Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:05:19.901840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:05:19.906403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.906492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.906543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:19.906625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:05:19.913005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:05:19.917510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:05:19.921203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:05:19.922579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:19.922796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:19.922861Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:19.923984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:05:19.924072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:19.924276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:19.924397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:05:19.935116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:19.935201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:19.935434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:19.935481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:05:19.935756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.935810Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:05:19.935926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:19.935962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:19.936008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:19.936075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:19.936117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:05:19.936158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:19.936193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:05:19.936223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:05:19.936299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:19.936334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:05:19.936368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:05:19.939121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:19.939249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:19.939289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 233409618 TxId: 175 } 2025-04-10T11:05:27.526084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free owner tablets reply, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897, at schemeshard: 72057594046678944 2025-04-10T11:05:27.526194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 175:0, at schemeshard: 72057594046678944, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897 2025-04-10T11:05:27.526233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 HandleReply TDeleteExternalShards, Status: ALREADY, from Hive: 72057594037968897, Owner: 72075186233409618, at schemeshard: 72057594046678944 2025-04-10T11:05:27.526319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2025-04-10T11:05:27.526448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:27.526502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2025-04-10T11:05:27.527810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.527955Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:27.527977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:27.528060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-10T11:05:27.528133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:27.528153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-04-10T11:05:27.528176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-04-10T11:05:27.528386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.528414Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-04-10T11:05:27.528437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2025-04-10T11:05:27.529018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T11:05:27.529073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T11:05:27.529092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-10T11:05:27.529111Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-04-10T11:05:27.529132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:27.529695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T11:05:27.529747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T11:05:27.529764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-10T11:05:27.529783Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-04-10T11:05:27.529802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 6 2025-04-10T11:05:27.529850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-10T11:05:27.531462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:74 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:05:27.531494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:73 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:05:27.531510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:75 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:05:27.531649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.531678Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2025-04-10T11:05:27.531728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-10T11:05:27.531753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-10T11:05:27.531783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-10T11:05:27.531804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-10T11:05:27.531824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2025-04-10T11:05:27.531847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-10T11:05:27.531866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-04-10T11:05:27.531885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-04-10T11:05:27.531993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2025-04-10T11:05:27.532765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-10T11:05:27.533449Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 74 TabletID: 72075186233409619 2025-04-10T11:05:27.533994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 74 ShardOwnerId: 72057594046678944 ShardLocalIdx: 74, at schemeshard: 72057594046678944 2025-04-10T11:05:27.534222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 4 2025-04-10T11:05:27.534507Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 73 TabletID: 72075186233409618 2025-04-10T11:05:27.535527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:05:27.538340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 73 ShardOwnerId: 72057594046678944 ShardLocalIdx: 73, at schemeshard: 72057594046678944 2025-04-10T11:05:27.538599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 Forgetting tablet 72075186233409619 Forgetting tablet 72075186233409618 2025-04-10T11:05:27.540099Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 75 TabletID: 72075186233409620 Forgetting tablet 72075186233409620 2025-04-10T11:05:27.541260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 75 ShardOwnerId: 72057594046678944 ShardLocalIdx: 75, at schemeshard: 72057594046678944 2025-04-10T11:05:27.541516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-04-10T11:05:27.542065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-10T11:05:27.543420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:05:27.543466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-10T11:05:27.543576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-04-10T11:05:27.543835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:05:27.543871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-10T11:05:27.543925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:27.546766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:74 2025-04-10T11:05:27.546820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:74 tabletId 72075186233409619 2025-04-10T11:05:27.546957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:73 2025-04-10T11:05:27.546988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:73 tabletId 72075186233409618 2025-04-10T11:05:27.547223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:75 2025-04-10T11:05:27.547270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:75 tabletId 72075186233409620 2025-04-10T11:05:27.549141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:05:27.549236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-04-10T11:05:27.550387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-04-10T11:05:27.550415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-04-10T11:05:27.551364Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-04-10T11:05:27.551473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-04-10T11:05:27.551496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:6761:7736] TestWaitNotification: OK eventTxId 175 |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:05:27.867301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:05:27.867429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:27.867496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:05:27.867546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:05:27.867599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:05:27.867629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:05:27.867707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:27.867826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:05:27.868175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:05:27.956565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:05:27.956628Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:27.975517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:05:27.975905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:05:27.976087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:05:27.986233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:05:27.986536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:05:27.987288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:27.987710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:05:27.990118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:27.991675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:27.991832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:27.992096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:05:27.992159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:27.992205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:05:27.992411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.001153Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:05:28.141445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:28.141728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.141968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:05:28.142274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:05:28.142341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.144856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:28.145010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:05:28.145214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.145271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:05:28.145331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:05:28.145399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:05:28.147814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.147881Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:05:28.147922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:05:28.149667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.149721Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.149786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:28.149844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.159490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:05:28.163458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:05:28.163724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:05:28.164806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:28.164963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:28.165025Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:28.165343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:05:28.165417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:28.165646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:28.165746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:05:28.168158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:28.168201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:28.168439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:28.168478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:05:28.168667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.168711Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:05:28.168798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:28.168837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.168889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:28.168933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.168968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:05:28.169120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.169155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:05:28.169180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:05:28.169239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:28.169275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:05:28.169302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:05:28.171626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:28.171767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:28.171816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4-10T11:05:28.225797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-10T11:05:28.225862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T11:05:28.226257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.226318Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:05:28.226381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-10T11:05:28.226521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:05:28.227282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:05:28.227429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:05:28.227489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:05:28.227558Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-10T11:05:28.227602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:28.228254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:05:28.228343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:05:28.228373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:05:28.228422Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-10T11:05:28.228459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:05:28.228527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-10T11:05:28.231960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T11:05:28.232100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 2025-04-10T11:05:28.232792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:28.232920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:28.232991Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002, at schemeshard: 72057594046678944 2025-04-10T11:05:28.233173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-10T11:05:28.233345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:28.233432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:05:28.235582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:05:28.236684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:05:28.237442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:28.237495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:28.237616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:05:28.237695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:28.237724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-10T11:05:28.237752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:05:28.238029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.238066Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:05:28.238184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:05:28.238220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:05:28.238277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:05:28.238317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:05:28.238354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-10T11:05:28.238404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:05:28.238461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:05:28.238512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:05:28.238581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:05:28.238630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-10T11:05:28.238671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:05:28.238702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-10T11:05:28.239433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:05:28.239541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:05:28.239581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:05:28.239632Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:05:28.239687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:28.240418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:05:28.240478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:05:28.240505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:05:28.240526Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:05:28.240547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:05:28.240611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-10T11:05:28.246235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:05:28.246576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-04-10T11:05:28.249459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "DirD" } AlterUserAttributes { UserAttributes { Key: "__extra_path_symbols_allowed" Value: "./_" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:28.249807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/DirD, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.249945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, at schemeshard: 72057594046678944 2025-04-10T11:05:28.252361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "UserAttributes: attribute \'__extra_path_symbols_allowed\' has invalid value \'./_\', forbidden symbols are found" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:28.252539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, operation: CREATE DIRECTORY, path: /MyRoot/DirD TestModificationResult got TxId: 103, wait until txId: 103 |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |76.5%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:05:27.561676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:05:27.561827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:27.561879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:05:27.561926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:05:27.561974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:05:27.562007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:05:27.562070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:27.562192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:05:27.562556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:05:27.640009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:05:27.640080Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:27.655564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:05:27.655925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:05:27.656110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:05:27.665436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:05:27.665742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:05:27.666484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:27.666906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:05:27.669221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:27.670706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:27.670773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:27.670993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:05:27.671039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:27.671081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:05:27.671235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.681917Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:05:27.835231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:27.835449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.835600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:05:27.835811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:05:27.835855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.838299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:27.838442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:05:27.838583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.838632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:05:27.838664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:05:27.838725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:05:27.842062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.842113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:05:27.842163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:05:27.843775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.843817Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.843871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:27.843919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:05:27.847589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:05:27.853175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:05:27.853404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:05:27.854564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:27.854725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:27.854791Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:27.855175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:05:27.855254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:27.855476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:27.855582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:05:27.857932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:27.857982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:27.858172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:27.858224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:05:27.858446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.858507Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:05:27.858621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:27.858661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:27.858705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:27.858758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:27.858803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:05:27.858862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:27.858910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:05:27.858943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:05:27.859014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:27.859081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:05:27.859122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:05:27.861562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:27.861683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:27.861727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... G: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:05:27.972228Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:05:27.972357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:05:27.972398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:330:2321] TestWaitNotification: OK eventTxId 102 2025-04-10T11:05:27.973019Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:05:27.973215Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 223us result status StatusSuccess 2025-04-10T11:05:27.973597Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-10T11:05:27.978584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "DirA" UserAttributes { Key: "AttrA2" Value: "ValA2" } } ApplyIf { PathId: 2 PathVersion: 4 } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:27.978854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot/DirA, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.978987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-10T11:05:27.979182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:05:27.979235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.982920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:27.983139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirA 2025-04-10T11:05:27.983381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.983429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.983500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-10T11:05:27.983627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:05:27.986038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-10T11:05:27.987281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-10T11:05:27.987739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:27.987917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:27.987992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2025-04-10T11:05:27.988219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:05:27.988261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:05:27.988303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:05:27.988334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:05:27.988409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:05:27.988474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-10T11:05:27.988534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:05:27.988575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:05:27.988613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:05:27.988645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:05:27.988719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:05:27.988767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-04-10T11:05:27.988832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-10T11:05:27.994811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:27.994958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:05:27.995162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:27.995203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 FAKE_COORDINATOR: Erasing txId 103 2025-04-10T11:05:27.995741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:05:27.995870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:05:27.995922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:05:27.995989Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-10T11:05:27.996048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:05:27.996181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-10T11:05:28.002777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T11:05:28.003198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T11:05:28.003272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T11:05:28.003838Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:05:28.003947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:05:28.003990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:347:2338] TestWaitNotification: OK eventTxId 103 2025-04-10T11:05:28.004648Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:05:28.004925Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 240us result status StatusSuccess 2025-04-10T11:05:28.005313Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:05:27.748481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:05:27.748593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:27.748658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:05:27.748729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:05:27.748778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:05:27.748811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:05:27.748875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:27.748962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:05:27.749316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:05:27.826317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:05:27.826381Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:27.841939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:05:27.842233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:05:27.842424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:05:27.849278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:05:27.849565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:05:27.850126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:27.850575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:05:27.852637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:27.853791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:27.853844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:27.853999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:05:27.854041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:27.854073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:05:27.854251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.861024Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:05:27.995473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:27.995749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:27.995993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:05:27.996285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:05:27.996354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.002704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:28.002891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:05:28.003109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.003177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:05:28.003222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:05:28.003262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:05:28.006852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.006932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:05:28.006976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:05:28.009525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.009596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.009667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:28.009743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.014176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:05:28.016908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:05:28.017163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:05:28.018347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:28.018535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:28.018601Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:28.018915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:05:28.018991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:28.019226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:28.019396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:05:28.022070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:28.022123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:28.022345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:28.022396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:05:28.022688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.022749Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:05:28.022860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:28.022901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.022966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:28.023026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.023070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:05:28.023120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.023178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:05:28.023220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:05:28.023307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:28.023364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:05:28.023400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:05:28.025960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:28.026109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:28.026170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1/1 2025-04-10T11:05:28.290908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-10T11:05:28.290955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:28.291002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:05:28.291031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T11:05:28.291073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2025-04-10T11:05:28.291122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-10T11:05:28.291159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-04-10T11:05:28.291187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-04-10T11:05:28.291236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:05:28.291305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 3, subscribers: 0 2025-04-10T11:05:28.291338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-10T11:05:28.291372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 3], 7 2025-04-10T11:05:28.291403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-10T11:05:28.291903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-10T11:05:28.292157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 FAKE_COORDINATOR: Erasing txId 112 2025-04-10T11:05:28.293665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:28.293700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:28.293837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:05:28.293886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:05:28.293989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:28.294031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-04-10T11:05:28.294074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 3 2025-04-10T11:05:28.294095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 4 2025-04-10T11:05:28.294709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:28.294786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:28.294819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 112 2025-04-10T11:05:28.294890Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-10T11:05:28.294956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:05:28.295556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:28.295621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:28.295653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-04-10T11:05:28.295679Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-04-10T11:05:28.295707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:05:28.296313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:28.296386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-10T11:05:28.296438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-04-10T11:05:28.296470Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-10T11:05:28.296502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T11:05:28.296598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-04-10T11:05:28.300274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:05:28.300343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-10T11:05:28.300419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:05:28.302656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-10T11:05:28.303080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-10T11:05:28.305114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-10T11:05:28.305219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-04-10T11:05:28.305734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-04-10T11:05:28.305782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-04-10T11:05:28.306555Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-04-10T11:05:28.306722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-04-10T11:05:28.306763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:495:2486] TestWaitNotification: OK eventTxId 112 2025-04-10T11:05:28.307584Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:05:28.307808Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 251us result status StatusSuccess 2025-04-10T11:05:28.308194Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000008 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 113 2025-04-10T11:05:28.311581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "DirB" } ApplyIf { PathId: 2 PathVersion: 8 } ApplyIf { PathId: 3 PathVersion: 7 } ApplyIf { PathId: 4 PathVersion: 3 } } TxId: 113 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:28.311742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/DirB, pathId: 0, opId: 113:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.311859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 113:1, propose status:StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-10T11:05:28.314817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 113, response: Status: StatusPreconditionFailed Reason: "fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4]" TxId: 113 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:28.315025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 113, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], operation: DROP DIRECTORY, path: /MyRoot/DirB TestModificationResult got TxId: 113, wait until txId: 113 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:05:27.956244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:05:27.956365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:27.956421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:05:27.956466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:05:27.956518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:05:27.956548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:05:27.956626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:27.956802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:05:27.957200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:05:28.035223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:05:28.035273Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:28.049987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:05:28.050305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:05:28.050447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:05:28.058903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:05:28.059187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:05:28.059955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:28.060396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:05:28.064206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:28.065608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:28.065667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:28.065869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:05:28.065915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:28.065954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:05:28.066119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.074086Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:05:28.251640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:28.251945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.252210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:05:28.252516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:05:28.252585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.255514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:28.255683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:05:28.255920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.255988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:05:28.256031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:05:28.256076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:05:28.258737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.258813Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:05:28.258861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:05:28.261275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.261340Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.261430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:28.261497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.266028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:05:28.268821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:05:28.269105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:05:28.270375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:28.270547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:28.270625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:28.270977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:05:28.271046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:28.271285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:28.271407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:05:28.274431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:28.274494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:28.274715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:28.274771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:05:28.275065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.275137Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:05:28.275258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:28.275303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.275351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:28.275413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.275462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:05:28.275531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.275581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:05:28.275624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:05:28.275721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:28.275774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:05:28.275816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:05:28.278654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:28.278801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:28.278847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ationSubscriber for txId 101: satisfy waiter [1:320:2311] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 101 2025-04-10T11:05:28.371092Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:05:28.371310Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 264us result status StatusSuccess 2025-04-10T11:05:28.371750Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-10T11:05:28.383170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "MyRoot" UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:28.383341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.383420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:28.383557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:05:28.383596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.385761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:28.385879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: MyRoot 2025-04-10T11:05:28.386023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.386056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.386098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-10T11:05:28.386244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:05:28.387895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-10T11:05:28.388056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-10T11:05:28.388438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:28.388550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:28.388602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2025-04-10T11:05:28.388854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:05:28.388904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:05:28.388949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:05:28.388994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:05:28.389055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:28.389161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-10T11:05:28.389246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:05:28.389289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:05:28.389327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:05:28.389356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:05:28.389417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:05:28.389451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-04-10T11:05:28.389479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 6 FAKE_COORDINATOR: Erasing txId 103 2025-04-10T11:05:28.392003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:28.392077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:28.392305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:28.392376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-10T11:05:28.392970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:05:28.393091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:05:28.393141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:05:28.393186Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-10T11:05:28.393231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:28.393344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-10T11:05:28.395371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T11:05:28.395694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T11:05:28.395740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T11:05:28.396218Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:05:28.396315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:05:28.396356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:339:2330] TestWaitNotification: OK eventTxId 103 2025-04-10T11:05:28.396942Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:05:28.397188Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 230us result status StatusSuccess 2025-04-10T11:05:28.397723Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:05:27.997238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:05:27.997361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:27.997430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:05:27.997484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:05:27.997540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:05:27.997573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:05:27.997640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:27.997727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:05:27.998115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:05:28.084975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:05:28.085041Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:28.104415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:05:28.104754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:05:28.104940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:05:28.119394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:05:28.119711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:05:28.120493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:28.120906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:05:28.123402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:28.124811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:28.124878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:28.125093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:05:28.125146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:28.125183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:05:28.125325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.137339Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:05:28.268454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:28.268721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.268926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:05:28.269211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:05:28.269283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.275530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:28.275666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:05:28.275871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.275936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:05:28.275972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:05:28.276019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:05:28.278618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.278682Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:05:28.278732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:05:28.280675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.280735Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.280797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:28.280873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.284850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:05:28.288441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:05:28.288665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:05:28.289798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:28.289943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:28.290035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:28.290378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:05:28.290446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:28.290648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:28.290743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:05:28.295092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:28.295146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:28.295329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:28.295382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:05:28.295653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.295715Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:05:28.295855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:28.295904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.295950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:28.296003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.296064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:05:28.296116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:28.296162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:05:28.296196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:05:28.296302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:28.296347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:05:28.296384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:05:28.298841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:28.298960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:28.299019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 5 msg type: 269090816 2025-04-10T11:05:28.479443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000005 2025-04-10T11:05:28.480677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:28.480808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:28.480867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 105:0, step: 5000005, at schemeshard: 72057594046678944 2025-04-10T11:05:28.480991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 105:0, at schemeshard: 72057594046678944 2025-04-10T11:05:28.481058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T11:05:28.481100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T11:05:28.481160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T11:05:28.481197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T11:05:28.481264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:05:28.481336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T11:05:28.481419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-10T11:05:28.481477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T11:05:28.481520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-10T11:05:28.481556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-10T11:05:28.481634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:05:28.481679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2025-04-10T11:05:28.481714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-10T11:05:28.481751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-10T11:05:28.483017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:05:28.485743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:05:28.489871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:28.489938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:28.490127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:05:28.490332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:28.490376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-04-10T11:05:28.490422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 4 FAKE_COORDINATOR: Erasing txId 105 2025-04-10T11:05:28.491156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:05:28.491258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:05:28.491306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-10T11:05:28.491351Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-10T11:05:28.491406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-10T11:05:28.491822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:05:28.491889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:05:28.491912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-10T11:05:28.491934Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-10T11:05:28.491962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T11:05:28.492016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-10T11:05:28.492328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:05:28.492363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-10T11:05:28.492434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:05:28.495718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:05:28.496659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:05:28.497065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-10T11:05:28.497406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-10T11:05:28.497455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-10T11:05:28.498071Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-10T11:05:28.498208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T11:05:28.498264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:397:2388] TestWaitNotification: OK eventTxId 105 2025-04-10T11:05:28.498937Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirC" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:05:28.499188Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirC" took 248us result status StatusPathDoesNotExist 2025-04-10T11:05:28.499386Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirC\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/DirC" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:05:28.499952Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:05:28.500148Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 201us result status StatusSuccess 2025-04-10T11:05:28.500597Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] |76.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff |76.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> KqpPg::ValuesInsert+useSink [GOOD] >> KqpPg::ValuesInsert-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] Test command err: 2025-04-10T11:05:03.415520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638884178081246:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:03.415581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e11/r3tmp/tmpwiqxIf/pdisk_1.dat 2025-04-10T11:05:04.101736Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:04.174711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:04.174824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:04.203955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12058, node 1 2025-04-10T11:05:05.763187Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:05.763220Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:05.763227Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:05.763403Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:08.292077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:08.416379Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638884178081246:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:08.416480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:08.826958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:10949 2025-04-10T11:05:09.094531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:09.110728Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-10T11:05:10.507282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:05:10.975708Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037890:1][1:7491638914242854099:2381] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:6:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-10T11:05:11.326702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:11.735754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:11.778074Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-04-10T11:05:11.778125Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-04-10T11:05:11.778140Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-04-10T11:05:11.778171Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-04-10T11:05:11.778186Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-04-10T11:05:11.778199Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-04-10T11:05:11.778213Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-04-10T11:05:11.778243Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-04-10T11:05:11.778261Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-04-10T11:05:11.778274Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-04-10T11:05:11.778304Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-10T11:05:11.778319Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-04-10T11:05:11.778331Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-04-10T11:05:11.778348Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-04-10T11:05:11.791748Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-04-10T11:05:11.791783Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-04-10T11:05:11.813444Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,19) wasn't found 2025-04-10T11:05:11.813507Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2025-04-10T11:05:11.813558Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,13) wasn't found 2025-04-10T11:05:11.813612Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,10) wasn't found 2025-04-10T11:05:11.813652Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,16) wasn't found 2025-04-10T11:05:11.813684Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,21) wasn't found 2025-04-10T11:05:11.813718Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,18) wasn't found 2025-04-10T11:05:11.813762Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,15) wasn't found 2025-04-10T11:05:11.813808Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,12) wasn't found 2025-04-10T11:05:11.813850Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-04-10T11:05:11.813877Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,9) wasn't found 2025-04-10T11:05:11.813910Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,20) wasn't found 2025-04-10T11:05:11.813939Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,17) wasn't found 2025-04-10T11:05:11.813979Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,14) wasn't found 2025-04-10T11:05:13.810435Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491638927122003075:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:13.810535Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e11/r3tmp/tmppM99tw/pdisk_1.dat 2025-04-10T11:05:14.042845Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:14.135230Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:14.135338Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19274, node 4 2025-04-10T11:05:14.229634Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:14.339228Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:14.339284Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:14.339295Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:14.339457Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:14.697858Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:14.827247Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:61304 2025-04-10T11:05:15.124709Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:15.137694Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-10T11:05:15.543500Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:05:15.766946Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:15.958460Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:19.693448Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491638952827718826:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:19.693518Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e11/r3tmp/tmpBu3U4J/pdisk_1.dat 2025-04-10T11:05:19.902802Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:19.928118Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:19.928201Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:19.933812Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13991, node 7 2025-04-10T11:05:20.039054Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:20.039081Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:20.039093Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:20.039267Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:20.346723Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:20.444455Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:23294 2025-04-10T11:05:20.713270Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:20.729752Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-10T11:05:21.066845Z node 7 :TX_PROXY ERROR: Actor# [7:7491638961417655558:3425] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/stream_TestCreateExistingStream\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:25.443619Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491638978551676698:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:25.446885Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e11/r3tmp/tmpDmVrzA/pdisk_1.dat 2025-04-10T11:05:25.673568Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:25.749397Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:25.749544Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:25.753535Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3963, node 10 2025-04-10T11:05:25.871201Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:25.871231Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:25.871242Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:25.871432Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27693 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:26.209828Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:26.311524Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:27693 2025-04-10T11:05:26.560147Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |76.9%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |76.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |76.9%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut >> TKeyValueTest::TestCleanUpDataOnEmptyTablet [GOOD] >> TKeyValueTest::TestCleanUpDataOnEmptyTabletResetGeneration >> KqpPg::TableInsert+useSink [GOOD] >> KqpPg::TableInsert-useSink >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] >> TKeyValueTest::TestCleanUpDataOnEmptyTabletResetGeneration [GOOD] >> TKeyValueTest::TestCleanUpDataWithMockDisk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 23373, MsgBus: 22907 2025-04-10T11:02:33.816485Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638239147327320:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:33.816546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cf8/r3tmp/tmp0ABUfM/pdisk_1.dat 2025-04-10T11:02:34.349932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:34.350047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:34.356075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:34.391462Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23373, node 1 2025-04-10T11:02:34.514821Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:34.514854Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:34.514864Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:34.515009Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22907 TClient is connected to server localhost:22907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:35.192480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:35.218875Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 16 2025-04-10T11:02:37.813677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:02:38.172312Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:02:38.183633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:02:38.255706Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:02:38.324761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638260622164508:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:38.324889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:38.329631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638260622164520:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:38.339095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T11:02:38.357841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638260622164522:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T11:02:38.425239Z node 1 :TX_PROXY ERROR: Actor# [1:7491638260622164574:2455] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } f f t t 18 2025-04-10T11:02:38.815402Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638239147327320:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:38.815529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:38.819967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:02:38.923136Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:02:38.934709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:02:38.994553Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 21 2025-04-10T11:02:39.456355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:02:39.521987Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:02:39.526929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 23 2025-04-10T11:02:40.100409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:02:40.242430Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:02:40.254873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:02:40.361255Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 20 2025-04-10T11:02:40.816887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:02:40.954674Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:02:40.959711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:02:41.066392Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 700 2025-04-10T11:02:41.604861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-10T11:02:41.718388Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:02:41.728831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-10T11:02:41.804602Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 701 2025-04-10T11:02:42.197233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480 2025-04-10T11:02:42.278188Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:02:42.284664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480 2025-04-10T11:02:42.356689Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 25 2025-04-10T11:02:42.717361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 2025-04-10T11:02:42.789126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-04-10T11:02:42.854931Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill text 0 text 0 text 1 text 1 text 2 text 2 text 3 text 3 text 4 text 4 text 5 text 5 text 6 text 6 text 7 text 7 text 8 text 8 text 9 text 9 1042 2025-04-10T11:02:43.363058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 2025-04-10T11:02:43.487583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 bpchar 0 bpchar 0 bpchar 1 bpchar 1 bpchar 2 bpchar 2 bpchar 3 bpchar 3 bpchar 4 bpchar 4 bpchar 5 bpchar 5 bpchar 6 bpchar 6 bpchar 7 bpchar 7 bpchar 8 bpchar 8 bpchar 9 bpchar 9 1043 2025-04-10T11:02:44.258616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-04-10T11:02:44.357118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-10T11:02:44.423215Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill varchar 0 varchar 0 varcha ... e] [TPoolCreatorActor] ActorId: [10:7491638922389226049:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:05:12.661871Z node 10 :TX_PROXY ERROR: Actor# [10:7491638922389226100:2346] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:12.708889Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:05:13.169330Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14569, MsgBus: 1653 2025-04-10T11:05:15.117490Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7491638934013075019:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:15.117706Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cf8/r3tmp/tmpZsSIAv/pdisk_1.dat 2025-04-10T11:05:15.317179Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:15.376362Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:15.376533Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:15.379515Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14569, node 11 2025-04-10T11:05:15.481595Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:15.481631Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:15.481650Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:15.481865Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1653 TClient is connected to server localhost:1653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:16.537151Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:20.113892Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7491638934013075019:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:20.114028Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:21.407412Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491638959782879455:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:21.407861Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:21.408477Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491638959782879482:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:21.417348Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:05:21.449650Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7491638959782879484:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:05:21.529013Z node 11 :TX_PROXY ERROR: Actor# [11:7491638959782879535:2347] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:21.598793Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:05:22.598359Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13662, MsgBus: 7675 2025-04-10T11:05:25.058482Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7491638979420841553:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:25.059201Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cf8/r3tmp/tmpZMNIan/pdisk_1.dat 2025-04-10T11:05:25.363568Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:25.416645Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:25.416814Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:25.421971Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13662, node 12 2025-04-10T11:05:25.577269Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:25.577303Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:25.577321Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:25.577558Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7675 TClient is connected to server localhost:7675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:26.644629Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:30.042255Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7491638979420841553:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:30.042377Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:31.624605Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7491639005190645991:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:31.624779Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:31.625357Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7491639005190646017:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:31.634008Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:05:31.651701Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7491639005190646019:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:05:31.739039Z node 12 :TX_PROXY ERROR: Actor# [12:7491639005190646070:2348] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:31.800798Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |76.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction >> TVersions::Wreck1 [GOOD] >> TVersions::Wreck1Reverse >> TPersQueueTest::EventBatching [GOOD] >> TPersQueueTest::DisableWrongSettings >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi [GOOD] >> TKeyValueTest::TestWriteDeleteThenReadRemaining ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:76:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:78:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:80:2057] recipient: [10:79:2110] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:82:2057] recipient: [10:79:2110] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:81:2111] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:135:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:76:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:78:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:80:2057] recipient: [11:79:2110] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:82:2057] recipient: [11:79:2110] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:81:2111] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:135:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:77:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:80:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:81:2057] recipient: [12:79:2110] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:83:2057] recipient: [12:79:2110] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:82:2111] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:136:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:83:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:84:2057] recipient: [13:82:2113] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:86:2057] recipient: [13:82:2113] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:85:2114] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:139:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:83:2057] recipient: [14:82:2113] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:84:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:86:2057] recipient: [14:82:2113] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:85:2114] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:84:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:85:2057] recipient: [15:83:2113] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:87:2057] recipient: [15:83:2113] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:86:2114] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:140:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:87:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:88:2057] recipient: [16:86:2116] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:90:2057] recipient: [16:86:2116] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:89:2117] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:143:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:87:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:88:2057] recipient: [17:86:2116] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:90:2057] recipient: [17:86:2116] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:89:2117] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:143:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:88:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:89:2057] recipient: [18:87:2116] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:91:2057] recipient: [18:87:2116] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:90:2117] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:144:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:88:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:91:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:92:2057] recipient: [19:90:2119] Leader for TabletID 72057594037927937 is [19:93:2120] sender: [19:94:2057] recipient: [19:90:2119] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:93:2120] Leader for TabletID 72057594037927937 is [19:93:2120] sender: [19:147:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:88:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:91:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:92:2057] recipient: [20:90:2119] Leader for TabletID 72057594037927937 is [20:93:2120] sender: [20:94:2057] recipient: [20:90:2119] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:93:2120] Leader for TabletID 72057594037927937 is [20:93:2120] sender: [20:147:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:89:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:92:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:93:2057] recipient: [21:91:2119] Leader for TabletID 72057594037927937 is [21:94:2120] sender: [21:95:2057] recipient: [21:91:2119] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:94:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] >> DataStreams::TestReservedConsumersMetering [GOOD] |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |76.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] Test command err: Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{7} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b {0, 1} | 3 39 620b {5, 7} + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b {0, 1} | 1 39 2466b {5, 7} + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 8474b 40r} data 6832b + FlatIndex{15} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 7 0 1036b {0, 1} | 7 39 1036b {5, 7} + BTreeIndex{Empty, PageId: 7 RowCount: 40 DataSize: 1036 GroupDataSize: 7438 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{7} Label{74 rev 1, 1036b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 6 12 122b {1, 8} | 7 14 122b {2, NULL} | 8 16 122b {2, 4} | 9 18 122b {2, 7} | 10 20 122b {2, 10} | 11 22 122b {3, 3} | 12 24 122b {3, 6} | 13 26 122b {3, 8} | 14 28 122b {4, NULL} | 15 30 122b {4, 4} | 16 32 122b {4, 7} | 17 34 122b {4, 10} | 18 36 122b {5, 3} | 19 38 122b {5, 6} | 19 39 122b {5, 7} + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > {0, 4} | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > {0, 7} | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > {0, 10} | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > {1, 3} | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > {1, 6} | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > {1, 8} | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > {2, NULL} | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > {2, 4} | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > {2, 10} | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > {3, 3} | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > {3, 6} | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > {3, 8} | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > {4, NULL} | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > {4, 4} | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > {4, 7} | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > {4, 10} | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > {5, 3} | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > {5, 6} | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_2 ... 6:30:2062]) to queue queue_background_compaction 00000.571 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (insert task gen2-table-101-tablet-1 (56 by [16:30:2062])) 00000.571 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.582 DD| RESOURCE_BROKER: Finish task gen2-table-101-tablet-1 (56 by [16:30:2062]) (release resources {1, 0}) 00000.582 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 600.000000 to 300.000000 (remove task gen2-table-101-tablet-1 (56 by [16:30:2062])) 00000.582 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987988 by [16:8:2055]) from queue queue_background_compaction 00000.582 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987988 by [16:8:2055]) to queue queue_background_compaction 00000.582 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (insert task bckg-block (987987987988 by [16:8:2055])) 00000.582 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.586 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (63 by [16:30:2062]) priority=200 resources={1, 0} 00000.586 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (63 by [16:30:2062]) to queue queue_background_compaction 00000.586 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.588 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (63 by [16:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.588 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (63 by [16:30:2062]) to queue queue_compaction_gen0 00000.588 DD| RESOURCE_BROKER: Updated real resource usage for queue queue_compaction_gen0 from 0.000000 to 300.000000 00000.588 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (63 by [16:30:2062]) from queue queue_compaction_gen0 00000.588 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (63 by [16:30:2062]) to queue queue_compaction_gen0 00000.588 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.597 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (63 by [16:30:2062]) (release resources {1, 0}) 00000.598 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.609 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.620 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.632 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.644 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.644 DD| RESOURCE_BROKER: Update task gen1-table-101-tablet-1 (62 by [16:30:2062]) (priority=166 type=background_compaction_gen1 resources={1, 0} resubmit=0) 00000.644 DD| RESOURCE_BROKER: Assigning waiting task gen1-table-101-tablet-1 (62 by [16:30:2062]) to queue queue_background_compaction 00000.644 DD| RESOURCE_BROKER: Updated real resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (in-fly consumption {1, 0}) 00000.644 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.647 DD| RESOURCE_BROKER: Removing task gen1-table-101-tablet-1 (62 by [16:30:2062]) 00000.648 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.648 NN| TABLET_SAUSAGECACHE: Poison cache serviced 55 reqs hit {55 29100b} miss {0 0b} 00000.648 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.648 II| FAKE_ENV: DS.0 gone, left {9708b, 90}, put {69361b, 689} 00000.649 II| FAKE_ENV: DS.1 gone, left {49683b, 125}, put {120829b, 750} 00000.649 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.649 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.650 II| FAKE_ENV: All BS storage groups are stopped 00000.650 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 2.000m 00000.650 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 659}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:05:20.700412Z 00000.015 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.015 II| FAKE_ENV: Starting storage for BS group 0 00000.016 II| FAKE_ENV: Starting storage for BS group 1 00000.016 II| FAKE_ENV: Starting storage for BS group 2 00000.016 II| FAKE_ENV: Starting storage for BS group 3 00002.149 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00002.150 NN| TABLET_SAUSAGECACHE: Poison cache serviced 353 reqs hit {1164 6970614b} miss {0 0b} 00002.151 II| FAKE_ENV: Shut order, stopping 4 BS groups 00002.151 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199545b, 2023} 00002.151 II| FAKE_ENV: DS.1 gone, left {2023431b, 4}, put {7269821b, 2026} 00002.152 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00002.152 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00002.152 II| FAKE_ENV: All BS storage groups are stopped 00002.152 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00002.152 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:05:22.868930Z 00000.010 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.011 II| FAKE_ENV: Starting storage for BS group 0 00000.011 II| FAKE_ENV: Starting storage for BS group 1 00000.012 II| FAKE_ENV: Starting storage for BS group 2 00000.012 II| FAKE_ENV: Starting storage for BS group 3 00002.004 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00002.005 NN| TABLET_SAUSAGECACHE: Poison cache serviced 353 reqs hit {1164 6970614b} miss {0 0b} 00002.006 II| FAKE_ENV: Shut order, stopping 4 BS groups 00002.006 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199557b, 2023} 00002.006 II| FAKE_ENV: DS.1 gone, left {2023431b, 4}, put {7269821b, 2026} 00002.006 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00002.006 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00002.006 II| FAKE_ENV: All BS storage groups are stopped 00002.006 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00002.007 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:05:24.925434Z 00000.010 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.011 II| FAKE_ENV: Starting storage for BS group 0 00000.012 II| FAKE_ENV: Starting storage for BS group 1 00000.012 II| FAKE_ENV: Starting storage for BS group 2 00000.012 II| FAKE_ENV: Starting storage for BS group 3 00001.914 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.915 NN| TABLET_SAUSAGECACHE: Poison cache serviced 299 reqs hit {1012 6947830b} miss {0 0b} 00001.916 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.916 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199548b, 2023} 00001.916 II| FAKE_ENV: DS.1 gone, left {2007005b, 4}, put {7211282b, 2026} 00001.916 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.916 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.916 II| FAKE_ENV: All BS storage groups are stopped 00001.916 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.917 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:05:26.856809Z 00000.010 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.011 II| FAKE_ENV: Starting storage for BS group 0 00000.011 II| FAKE_ENV: Starting storage for BS group 1 00000.012 II| FAKE_ENV: Starting storage for BS group 2 00000.012 II| FAKE_ENV: Starting storage for BS group 3 00001.834 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.836 NN| TABLET_SAUSAGECACHE: Poison cache serviced 353 reqs hit {1164 6970614b} miss {0 0b} 00001.836 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.836 II| FAKE_ENV: DS.1 gone, left {2013604b, 4}, put {7237874b, 2026} 00001.837 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.837 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.837 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199551b, 2023} 00001.837 II| FAKE_ENV: All BS storage groups are stopped 00001.837 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.837 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:05:28.708643Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00001.578 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.579 NN| TABLET_SAUSAGECACHE: Poison cache serviced 299 reqs hit {1012 6947830b} miss {0 0b} 00001.579 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.580 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199551b, 2023} 00001.580 II| FAKE_ENV: DS.1 gone, left {2007005b, 4}, put {7211300b, 2026} 00001.580 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.580 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.580 II| FAKE_ENV: All BS storage groups are stopped 00001.580 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.580 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:05:30.322252Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00001.799 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.800 NN| TABLET_SAUSAGECACHE: Poison cache serviced 309 reqs hit {1118 6955338b} miss {2 9773b} 00001.801 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.801 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199551b, 2023} 00001.801 II| FAKE_ENV: DS.1 gone, left {2023431b, 4}, put {7269821b, 2026} 00001.802 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.802 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.802 II| FAKE_ENV: All BS storage groups are stopped 00001.802 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.802 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:05:32.138936Z 00000.011 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.012 II| FAKE_ENV: Starting storage for BS group 0 00000.013 II| FAKE_ENV: Starting storage for BS group 1 00000.013 II| FAKE_ENV: Starting storage for BS group 2 00000.013 II| FAKE_ENV: Starting storage for BS group 3 00003.108 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00003.109 NN| TABLET_SAUSAGECACHE: Poison cache serviced 651 reqs hit {780 4008302b} miss {1002 6916040b} 00003.110 II| FAKE_ENV: Shut order, stopping 4 BS groups 00003.110 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {200298b, 2033} 00003.110 II| FAKE_ENV: DS.1 gone, left {2023570b, 4}, put {9254542b, 2039} 00003.111 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00003.111 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00003.111 II| FAKE_ENV: All BS storage groups are stopped 00003.111 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00003.111 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |76.9%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |76.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction >> TKesusTest::TestSessionTimeoutAfterDetach ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestReservedConsumersMetering [GOOD] Test command err: 2025-04-10T11:05:03.426332Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638886568531694:2243];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:03.426441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e1c/r3tmp/tmpyIDpY6/pdisk_1.dat 2025-04-10T11:05:04.151030Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:04.173348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:04.173473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:04.189596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:04.368719Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.103830s 2025-04-10T11:05:04.368806Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.103938s TServer::EnableGrpc on GrpcPort 31064, node 1 2025-04-10T11:05:05.762745Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:05.762773Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:05.762781Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:05.762918Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:08.294091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:08.411521Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638886568531694:2243];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:08.411651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:08.826937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:5640 2025-04-10T11:05:09.103237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:10.157202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "1" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000007" } records { sequence_number: "1" shard_id: "shard-000007" } records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000007" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000009" } records { sequence_number: "1" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "5" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000008" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000009" } records { sequence_number: "2" shard_id: "shard-000006" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "7" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000008" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000006" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000009" } records { sequence_number: "8" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000009" } records { sequence_number: "9" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "10" shard_id: "shard-000001" } records { sequence_number: "10" shard_id: "shard-000009" } records { sequence_number: "10" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000005" } records { sequence_number: "4" shard_id: "shard-000008" } records { sequence_number: "11" shard_id: "shard-000004" } records { sequence_number: "12" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000005" } records { sequence_number: "11" shard_id: "shard-000001" } records { sequence_number: "11" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000006" } records { sequence_number: "12" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000007" } records { sequence_number: "7" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000007" } records { sequence_number: "13" shard_id: "shard-000004" } records { sequence_number: "8" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "12" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000008" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000006" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000009" } records { sequence_number: "13" shard_id: "shard-000001" } records { sequence_number: "14" shard_id: "shard-000009" } records { sequence_number: "14" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "15" shard_id: "shard-000001" } records { sequence_number: "15" shard_id: "shard-000009" } records { sequence_number: "15" shard_id: "shard-000004" } records { sequence_number: "9" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000008" } records { sequence_number: "16" shard_id: "shard-000004" } records { sequence_number: "17" shard_id: "shard-000004" } records { sequence_number: "10" shard_id: "shard-000005" } records { sequence_number: "16" shard_id: "shard-000001" } records { sequence_number: "16" shard_id: "shard-000009" } records { sequence_number: "6" shard_id: "shard-000006" } records { sequence_number: "17" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000007" } records { sequence_number: "10" shard_id: "shard-000007" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000007" } records { sequence_number: "18" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000005" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "17" shard_id: "shard-000009" } records { sequence_number: "7" shard_id: "shard-000008" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000006" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000009" } records { sequence_number: "18" shard_id: "shard-000001" } records { sequence_number: "19" shard_id: "shard-000009" } records { sequence_number: "19" shard_id: "shard-000004" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "20" shard_id: "s ... lder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744283129658-170","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":0,"unit":"second","start":1744283129,"finish":1744283129},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283129}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744283129658-171","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1744283129,"finish":1744283129},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283129}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744283129658-172","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1744283129,"finish":1744283129},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744283129}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1744283129710-173","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1744283129,"finish":1744283130},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283130}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744283129710-174","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1744283129,"finish":1744283130},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283130}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744283129710-175","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1744283129,"finish":1744283130},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283130}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744283129710-176","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744283129,"finish":1744283130},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744283130}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1744283130752-177","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1744283130,"finish":1744283131},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283131}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744283130752-178","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1744283130,"finish":1744283131},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283131}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744283130752-179","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1744283130,"finish":1744283131},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283131}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744283130752-180","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744283130,"finish":1744283131},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744283131}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1744283131769-181","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1744283131,"finish":1744283132},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283132}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744283131769-182","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1744283131,"finish":1744283132},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283132}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744283131769-183","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1744283131,"finish":1744283132},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283132}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744283131769-184","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744283131,"finish":1744283132},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744283132}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1744283132802-185","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1744283132,"finish":1744283133},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283133}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744283132802-186","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1744283132,"finish":1744283133},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283133}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744283132802-187","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1744283132,"finish":1744283133},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283133}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744283132802-188","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1744283132,"finish":1744283133},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744283133}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1744283133821-189","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1744283133,"finish":1744283135},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283135}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744283133821-190","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":2,"unit":"second","start":1744283133,"finish":1744283135},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283135}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1744283133821-191","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":112640,"unit":"mbyte*second","start":1744283133,"finish":1744283135},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1744283135}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1744283133821-192","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":2,"unit":"byte*second","start":1744283133,"finish":1744283135},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1744283135}' >> TKesusTest::TestUnregisterProxy >> TKesusTest::TestAttachOutOfSequence >> THDRRQuoterResourceTreeRuntimeTest::TestWeights >> TKesusTest::TestAcquireSemaphoreTimeout >> TKesusTest::TestQuoterHDRRParametersValidation >> TKesusTest::TestAcquireLocks >> TKesusTest::TestReleaseLockFailure >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestSessionDetach >> TKesusTest::TestAttachNewSessions >> TKesusTest::TestRegisterProxy >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease >> TKesusTest::TestQuoterResourceDescribe >> TKesusTest::TestKesusConfig >> TKesusTest::TestAcquireWaiterDowngrade >> TKesusTest::TestAcquireUpgrade >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |77.0%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests >> TKesusTest::TestQuoterAccountResourcesBurst |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |77.0%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TKesusTest::TestAttachNewSessions [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TKesusTest::TestAttachMissingSession >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestRegisterProxy [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration >> TKesusTest::TestUnregisterProxyBadGeneration >> TKesusTest::TestAcquireUpgrade [GOOD] >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] >> TKesusTest::TestAcquireTimeout >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceCreation >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister >> TKesusTest::TestRegisterProxyBadGeneration [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionDestroy >> TKesusTest::TestRegisterProxyFromDeadActor [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestSemaphoreData >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAttachTimeoutTooBig >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |77.0%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterResourceModification >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSessionStealing >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestCreateSemaphore >> TKesusTest::TestRegisterProxyLinkFailure [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2025-04-10T11:05:41.288253Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.303485Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.410473Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.410629Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.489560Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.510090Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=1442414472627977190, session=0, seqNo=0) 2025-04-10T11:05:41.510385Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:41.532705Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=1442414472627977190, session=1) 2025-04-10T11:05:41.533244Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:133:2159], cookie=15971302879689097307, session=0, seqNo=0) 2025-04-10T11:05:41.533416Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:41.546218Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:133:2159], cookie=15971302879689097307, session=2) 2025-04-10T11:05:41.547589Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-10T11:05:41.547798Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-10T11:05:41.547932Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:41.563401Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-10T11:05:41.563818Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=112, session=1, semaphore="Lock2" count=1) 2025-04-10T11:05:41.563983Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-10T11:05:41.564129Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-10T11:05:41.576272Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=112) 2025-04-10T11:05:41.576678Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:132:2158], cookie=333, name="Lock1") 2025-04-10T11:05:41.576782Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-04-10T11:05:41.576973Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:133:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-10T11:05:41.577083Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 3 "Lock1" 2025-04-10T11:05:41.577178Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-04-10T11:05:41.577318Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:133:2159], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-10T11:05:41.590434Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:132:2158], cookie=333) 2025-04-10T11:05:41.590531Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:133:2159], cookie=222) 2025-04-10T11:05:41.590576Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:133:2159], cookie=223) 2025-04-10T11:05:41.590960Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:132:2158], cookie=334, name="Lock2") 2025-04-10T11:05:41.591059Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-04-10T11:05:41.591118Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-10T11:05:41.608330Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:132:2158], cookie=334) 2025-04-10T11:05:41.609080Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:161:2185], cookie=8569589154879463144, name="Lock1") 2025-04-10T11:05:41.609227Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:161:2185], cookie=8569589154879463144) 2025-04-10T11:05:41.609795Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:164:2188], cookie=2579867024025267861, name="Lock2") 2025-04-10T11:05:41.609887Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:164:2188], cookie=2579867024025267861) 2025-04-10T11:05:41.789969Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.790090Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.792425Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.793146Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.830806Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.831005Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-10T11:05:41.831076Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-04-10T11:05:41.831528Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:204:2218], cookie=2256016528613276816, name="Lock1") 2025-04-10T11:05:41.831636Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:204:2218], cookie=2256016528613276816) 2025-04-10T11:05:41.832407Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:212:2225], cookie=12127262904201199198, name="Lock2") 2025-04-10T11:05:41.832516Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:212:2225], cookie=12127262904201199198) 2025-04-10T11:05:42.438137Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.438246Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.452089Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.452217Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.482020Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.482979Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=17280866280110693006, session=0, seqNo=0) 2025-04-10T11:05:42.483150Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:42.496175Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=17280866280110693006, session=1) 2025-04-10T11:05:42.496506Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=11907434543616534526, session=0, seqNo=0) 2025-04-10T11:05:42.496661Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:42.508742Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=11907434543616534526, session=2) 2025-04-10T11:05:42.509966Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-10T11:05:42.510141Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-10T11:05:42.510259Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:42.530847Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=111) 2025-04-10T11:05:42.531224Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=112, session=1, semaphore="Lock2" count=1) 2025-04-10T11:05:42.531393Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-10T11:05:42.531498Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-10T11:05:42.544856Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=112) 2025-04-10T11:05:42.545285Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=333, session=1, semaphore="Lock1" count=1) 2025-04-10T11:05:42.545617Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-10T11:05:42.545725Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-10T11:05:42.545830Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-10T11:05:42.558752Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=333) 2025-04-10T11:05:42.558846Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=222) 2025-04-10T11:05:42.558879Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=223) 2025-04-10T11:05:42.559466Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:157:2181], cookie=9536222123779707897, name="Lock1") 2025-04-10T11:05:42.559560Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:157:2181], cookie=9536222123779707897) 2025-04-10T11:05:42.560038Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:160:2184], cookie=190186072337809372, name="Lock2") 2025-04-10T11:05:42.560105Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:160:2184], cookie=190186072337809372) 2025-04-10T11:05:42.560547Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:163:2187], cookie=18404387813686641756, name="Lock1") 2025-04-10T11:05:42.560613Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:163:2187], cookie=18404387813686641756) 2025-04-10T11:05:42.561132Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:166:2190], cookie=665096208907436091, name="Lock2") 2025-04-10T11:05:42.561207Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:166:2190], cookie=665096208907436091) 2025-04-10T11:05:42.561498Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=444, session=2, semaphore="Lock2" count=1) 2025-04-10T11:05:42.561670Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-10T11:05:42.574000Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=444) 2025-04-10T11:05:42.574686Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:171:2195], cookie=16350833634276638246, name="Lock2") 2025-04-10T11:05:42.574782Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:171:2195], cookie=16350833634276638246) 2025-04-10T11:05:42.575309Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:174:2198], cookie=13588504043089105541, name="Lock2") 2025-04-10T11:05:42.575388Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:174:2198], cookie=13588504043089105541) 2025-04-10T11:05:42.594591Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.594691Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.595330Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.595979Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.631820Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.631985Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:42.632051Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-10T11:05:42.632081Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-10T11:05:42.632109Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-10T11:05:42.632436Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:214:2228], cookie=4423971881673420802, name="Lock1") 2025-04-10T11:05:42.632522Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:214:2228], cookie=4423971881673420802) 2025-04-10T11:05:42.633195Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:222:2235], cookie=7749349281648795468, name="Lock2") 2025-04-10T11:05:42.633278Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:222:2235], cookie=7749349281648795468) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] Test command err: 2025-04-10T11:02:49.152586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:02:49.153045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:02:49.153132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001163/r3tmp/tmpTrEH4z/pdisk_1.dat 2025-04-10T11:02:50.024373Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15008, node 1 2025-04-10T11:02:50.563746Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:50.563807Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:50.563856Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:50.564406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:02:50.571017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:02:50.671742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:50.671891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:50.693832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29430 2025-04-10T11:02:51.545144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:02:55.833025Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:02:55.903483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:55.903746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:55.934843Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:02:55.940437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:56.343937Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:56.344513Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:56.345045Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:56.345177Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:56.345517Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:56.345600Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:56.345697Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:56.345762Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:56.345874Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:56.629963Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:56.630088Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:56.654605Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:56.988297Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:57.053030Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:02:57.053136Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:02:57.087774Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:02:57.087982Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:02:57.088233Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:02:57.088297Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:02:57.088350Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:02:57.088407Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:02:57.088465Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:02:57.088515Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:02:57.088973Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:02:57.147187Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:02:57.147295Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1873:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:02:57.154336Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1885:2606] 2025-04-10T11:02:57.166691Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1926:2623] 2025-04-10T11:02:57.167285Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1926:2623], schemeshard id = 72075186224037897 2025-04-10T11:02:57.168885Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:02:57.196421Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:02:57.196497Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:02:57.196575Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:02:57.215935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:02:57.224149Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:02:57.224331Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:02:57.419935Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:02:57.734512Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:02:57.823778Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:02:59.119745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:59.119929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:59.146987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:02:59.296817Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:02:59.297065Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:02:59.297363Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:02:59.297546Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:02:59.297668Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:02:59.297816Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:02:59.297976Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:02:59.298115Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:02:59.298287Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:02:59.298428Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:02:59.298580Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:02:59.298700Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:02:59.331376Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:02:59.331480Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... ble, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-04-10T11:03:01.872322Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000026s ... waiting for TEvAnalyzeTableResponse 2025-04-10T11:03:03.941114Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2805:3164] 2025-04-10T11:03:03.945809Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2803:3224] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-10T11:03:03.945892Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId=operationId 2025-04-10T11:03:03.945938Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId=operationId , PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:03:03.984357Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-10T11:03:12.982743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:03:12.982830Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:14.197712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:03:14.197785Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:28.263046Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-10T11:05:28.263158Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:05:28.263203Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:05:28.263247Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:05:29.903406Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-10T11:05:29.903494Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 207.000000s, at schemeshard: 72075186224037897 2025-04-10T11:05:29.903861Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-04-10T11:05:29.918799Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:05:31.091259Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:05:31.091382Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:05:31.091436Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-10T11:05:31.091503Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-10T11:05:31.091556Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:05:31.092095Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:05:31.103436Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:05:31.120876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6981:5161], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:31.121054Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6992:5166], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:31.121184Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:31.164960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-10T11:05:31.243082Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6995:5169], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:05:31.506987Z node 2 :TX_PROXY ERROR: Actor# [2:7088:5215] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:31.929048Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7117:5230]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:05:31.929249Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:05:31.929348Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7119:5232] 2025-04-10T11:05:31.929443Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7119:5232] 2025-04-10T11:05:31.929706Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7120:5233] 2025-04-10T11:05:31.929819Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7120:5233], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:05:31.929890Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-10T11:05:31.930046Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7119:5232], server id = [2:7120:5233], tablet id = 72075186224037894, status = OK 2025-04-10T11:05:31.930097Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:05:31.930210Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7117:5230], StatRequests.size() = 1 2025-04-10T11:05:33.726674Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmQ4NjRkMWItNDA1M2FhYjEtMTdhYzI4ZWItYzY5YjQ5NDI=, TxId: 2025-04-10T11:05:33.726817Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmQ4NjRkMWItNDA1M2FhYjEtMTdhYzI4ZWItYzY5YjQ5NDI=, TxId: 2025-04-10T11:05:33.745039Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:05:33.773272Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:05:33.773331Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:05:33.805968Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:05:33.806077Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:05:33.895634Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7119:5232], schemeshard count = 1 2025-04-10T11:05:34.955774Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:05:34.955899Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:05:34.959182Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:05:34.976507Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:05:34.977297Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:05:34.977398Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-10T11:05:34.992164Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:05:35.014653Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2025-04-10T11:05:35.531337Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:05:35.531457Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:05:35.531510Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-10T11:05:35.531577Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:05:35.531637Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:05:35.540403Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:05:35.555528Z node 2 :STATISTICS ERROR: [72075186224037894] Delete long analyze operation, OperationId=operationId 2025-04-10T11:05:35.870813Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:05:35.871111Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:05:37.855849Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-10T11:05:37.855950Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:05:37.856002Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:05:37.856051Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:05:40.248276Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-10T11:05:40.248413Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 200.000000s, at schemeshard: 72075186224037897 2025-04-10T11:05:40.248650Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-04-10T11:05:40.412083Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:05:40.412311Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeDeadline::Complete. Send TEvAnalyzeResponse for deleted operation, OperationId=operationId, ActorId=[1:2803:3224] 2025-04-10T11:05:40.412365Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:05:40.412645Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:05:40.412689Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:05:40.413354Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey >> TNebiusAccessServiceTest::Authenticate >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TNebiusAccessServiceTest::Authenticate [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources >> TSequence::CreateSequence >> TKesusTest::TestCreateSemaphore [GOOD] >> TKesusTest::TestSessionStealingAnyKey [GOOD] >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterResourceDeletion >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |77.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats >> TNebiusAccessServiceTest::Authorize >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced >> TNebiusAccessServiceTest::Authorize [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2025-04-10T11:05:41.288478Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.300963Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.412467Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.412646Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.490216Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.511024Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=7266242493233534865, session=0, seqNo=0) 2025-04-10T11:05:41.511252Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:41.531981Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=7266242493233534865, session=1) 2025-04-10T11:05:41.542215Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=6039197371994995611, session=0, seqNo=0) 2025-04-10T11:05:41.542410Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:41.554868Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=6039197371994995611, session=2) 2025-04-10T11:05:41.987815Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.987949Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.005317Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.005477Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.031341Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.031934Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=18162148891215411273, session=1, seqNo=0) 2025-04-10T11:05:42.048898Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=18162148891215411273, session=1) 2025-04-10T11:05:42.493096Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.493201Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.522027Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.522192Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.549892Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.550795Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=2086005100311550196, session=0, seqNo=0) 2025-04-10T11:05:42.550935Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:42.564233Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=2086005100311550196, session=1) 2025-04-10T11:05:42.987085Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.987203Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:43.013719Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:43.013857Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:43.042269Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:43.042698Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[4:130:2156], cookie=16670357135275395334, path="") 2025-04-10T11:05:43.056024Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[4:130:2156], cookie=16670357135275395334, status=SUCCESS) 2025-04-10T11:05:43.057472Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:139:2163], cookie=13582148520599368388, session=0, seqNo=0) 2025-04-10T11:05:43.057662Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:43.078142Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:139:2163], cookie=13582148520599368388, session=1) 2025-04-10T11:05:43.079441Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:140:2164], cookie=111, session=0, seqNo=0) 2025-04-10T11:05:43.079586Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:43.079797Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path attach session=1 to sender=[4:140:2164], cookie=222, seqNo=0 2025-04-10T11:05:43.092549Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:140:2164], cookie=111, session=2) 2025-04-10T11:05:43.516165Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:43.516302Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:43.543338Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:43.543502Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:43.579267Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:43.579765Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[5:132:2158], cookie=18327122119607384784, path="") 2025-04-10T11:05:43.594214Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[5:132:2158], cookie=18327122119607384784, status=SUCCESS) 2025-04-10T11:05:43.595524Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:141:2165], cookie=2811587072893802503, session=0, seqNo=0) 2025-04-10T11:05:43.595705Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:43.608487Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:141:2165], cookie=2811587072893802503, session=1) 2025-04-10T11:05:43.609365Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:141:2165], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-10T11:05:43.611503Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-10T11:05:43.611667Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:43.612166Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:142:2166], cookie=111, session=0, seqNo=0) 2025-04-10T11:05:43.612278Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:43.612441Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:142:2166], cookie=222, session=1, seqNo=0) 2025-04-10T11:05:43.626830Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:141:2165], cookie=123) 2025-04-10T11:05:43.626943Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:142:2166], cookie=111, session=2) 2025-04-10T11:05:43.627007Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:142:2166], cookie=222, session=1) ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authenticate [GOOD] Test command err: 2025-04-10T11:05:43.601769Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Connect to grpc://localhost:8619 2025-04-10T11:05:43.616404Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Request AuthenticateRequest { iam_token: "**** (3C4833B6)" } 2025-04-10T11:05:43.640106Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Status 7 Permission Denied 2025-04-10T11:05:43.640774Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Request AuthenticateRequest { iam_token: "**** (86DDB286)" } 2025-04-10T11:05:43.644087Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Response AuthenticateResponse { account { user_account { id: "1234" } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] Test command err: 2025-04-10T11:05:41.288497Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.303359Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.436358Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.436535Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.490983Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.510239Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=16127144259749163643, session=0, seqNo=0) 2025-04-10T11:05:41.510561Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:41.536182Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=16127144259749163643, session=1) 2025-04-10T11:05:41.543384Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:132:2158], cookie=12041458182803078594, session=2) 2025-04-10T11:05:41.543473Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:132:2158], cookie=12041458182803078594) 2025-04-10T11:05:41.544194Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:132:2158], cookie=3268630889367635175 2025-04-10T11:05:41.545031Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=11102516013712817825, session=1, seqNo=0) 2025-04-10T11:05:41.558895Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=11102516013712817825, session=1) 2025-04-10T11:05:41.559432Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-10T11:05:41.559649Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-10T11:05:41.559805Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:41.559996Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:132:2158], cookie=7947696837163441127, session=1) 2025-04-10T11:05:41.570415Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-10T11:05:41.597809Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-10T11:05:41.597924Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-04-10T11:05:41.611837Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-10T11:05:41.611971Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:132:2158], cookie=7947696837163441127) 2025-04-10T11:05:41.612078Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-10T11:05:42.304694Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.304825Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.323876Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.324049Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.349909Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.350381Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[2:130:2156], cookie=9249550669529582374, path="") 2025-04-10T11:05:42.365004Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[2:130:2156], cookie=9249550669529582374, status=SUCCESS) 2025-04-10T11:05:42.365887Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:139:2163], cookie=111, session=0, seqNo=0) 2025-04-10T11:05:42.366052Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:42.366298Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[2:139:2163], cookie=13850019696395734478, session=1) 2025-04-10T11:05:42.376768Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-10T11:05:42.376883Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-10T11:05:42.390428Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:139:2163], cookie=111, session=1) 2025-04-10T11:05:42.390529Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[2:139:2163], cookie=13850019696395734478) 2025-04-10T11:05:42.390593Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-10T11:05:42.775185Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.775300Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.795099Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.795240Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.820169Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.820654Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=12087133134546405946, session=0, seqNo=0) 2025-04-10T11:05:42.820798Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:42.834788Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=12087133134546405946, session=1) 2025-04-10T11:05:42.835586Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:132:2158], cookie=6335050869545068014, session=1) 2025-04-10T11:05:42.835694Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-10T11:05:42.848263Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:132:2158], cookie=6335050869545068014) 2025-04-10T11:05:42.849222Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:149:2173], cookie=4240566355051119741) 2025-04-10T11:05:42.849299Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:149:2173], cookie=4240566355051119741) 2025-04-10T11:05:42.850000Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:152:2176], cookie=14410902682052255443, session=0, seqNo=0) 2025-04-10T11:05:42.850142Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:42.862501Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:152:2176], cookie=14410902682052255443, session=2) 2025-04-10T11:05:42.863814Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:132:2158], cookie=14570215495374733476, session=2) 2025-04-10T11:05:42.863895Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 2025-04-10T11:05:42.875792Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:132:2158], cookie=14570215495374733476) 2025-04-10T11:05:43.295918Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:43.296042Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:43.313039Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:43.313142Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:43.339688Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:43.340757Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=12345, session=0, seqNo=0) 2025-04-10T11:05:43.340942Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:43.356491Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=12345, session=1) 2025-04-10T11:05:43.357570Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=23456, session=1, seqNo=0) 2025-04-10T11:05:43.375222Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=23456, session=1) 2025-04-10T11:05:43.850355Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:43.850501Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:43.883175Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:43.883400Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:43.914918Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:43.916122Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=12345, session=0, seqNo=0) 2025-04-10T11:05:43.916314Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:43.929328Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=12345, session=1) 2025-04-10T11:05:43.930346Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:139:2163], cookie=23456, session=1, seqNo=0) 2025-04-10T11:05:43.943375Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:139:2163], cookie=23456, session=1) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2025-04-10T11:05:41.288432Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.300760Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.425609Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.425754Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.489964Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.510196Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=14157773823993120669, session=0, seqNo=222) 2025-04-10T11:05:41.510440Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:41.532199Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=14157773823993120669, session=1) 2025-04-10T11:05:41.532650Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:133:2159], cookie=8106818552770223698, session=1, seqNo=111) 2025-04-10T11:05:41.546007Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:133:2159], cookie=8106818552770223698, session=1) 2025-04-10T11:05:41.979949Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.980043Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.999702Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.999809Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.025880Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.026523Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=111, session=0, seqNo=42) 2025-04-10T11:05:42.026696Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:42.026862Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=222, session=1, seqNo=41) 2025-04-10T11:05:42.047031Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=111, session=1) 2025-04-10T11:05:42.047147Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=222, session=1) 2025-04-10T11:05:42.520604Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.520722Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.547820Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.547996Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.572370Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.572965Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=18357656096365505973, session=0, seqNo=0) 2025-04-10T11:05:42.573136Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:42.585249Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=18357656096365505973, session=1) 2025-04-10T11:05:42.604055Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:149:2173], cookie=11228554875640863000) 2025-04-10T11:05:42.604195Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:149:2173], cookie=11228554875640863000) 2025-04-10T11:05:43.013345Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:43.013485Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:43.026251Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:43.026396Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:43.054793Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:43.550918Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:43.551050Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:43.582482Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:43.582637Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:43.610713Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:43.611339Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=12057069487180571984, session=0, seqNo=0) 2025-04-10T11:05:43.611519Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:43.625931Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=12057069487180571984, session=1) 2025-04-10T11:05:43.626349Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-10T11:05:43.626550Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-10T11:05:43.626663Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:43.640031Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-04-10T11:05:43.640844Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:146:2170], cookie=18133242633052105748, name="Sem1", limit=42) 2025-04-10T11:05:43.640959Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-04-10T11:05:43.655001Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:146:2170], cookie=18133242633052105748) 2025-04-10T11:05:43.655543Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:151:2175], cookie=17051146599414071544, name="Sem1", limit=42) 2025-04-10T11:05:43.674090Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:151:2175], cookie=17051146599414071544) 2025-04-10T11:05:43.674817Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:156:2180], cookie=15149980807637555562, name="Sem1", limit=51) 2025-04-10T11:05:43.687196Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:156:2180], cookie=15149980807637555562) 2025-04-10T11:05:43.687897Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:161:2185], cookie=14901528911895857352, name="Lock1", limit=42) 2025-04-10T11:05:43.700023Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:161:2185], cookie=14901528911895857352) 2025-04-10T11:05:43.700739Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:166:2190], cookie=447056390847052533, name="Lock1", limit=18446744073709551615) 2025-04-10T11:05:43.714960Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:166:2190], cookie=447056390847052533) 2025-04-10T11:05:43.715680Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:171:2195], cookie=5871705759016966602, name="Sem1") 2025-04-10T11:05:43.715809Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:171:2195], cookie=5871705759016966602) 2025-04-10T11:05:43.716417Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:174:2198], cookie=15908957377218701149, name="Sem2") 2025-04-10T11:05:43.716476Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:174:2198], cookie=15908957377218701149) 2025-04-10T11:05:43.732184Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:43.732301Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:43.733000Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:43.733448Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:43.781898Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:43.782110Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:43.782581Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:214:2228], cookie=502965119081695299, name="Sem1") 2025-04-10T11:05:43.782698Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:214:2228], cookie=502965119081695299) 2025-04-10T11:05:43.783428Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:221:2234], cookie=17369086449770046622, name="Sem2") 2025-04-10T11:05:43.783508Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:221:2234], cookie=17369086449770046622) >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] >> TKesusTest::TestAllocatesResources [GOOD] >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |77.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authorize [GOOD] Test command err: 2025-04-10T11:05:44.261834Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Connect to grpc://localhost:5139 2025-04-10T11:05:44.279488Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2025-04-10T11:05:44.298466Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user_id" } } } } } 2025-04-10T11:05:44.299121Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (79225CA9)" } } } 2025-04-10T11:05:44.307748Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Status 7 Permission Denied 2025-04-10T11:05:44.309146Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "denied" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2025-04-10T11:05:44.311571Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Status 7 Permission Denied 2025-04-10T11:05:44.312256Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "p" } } iam_token: "**** (717F937C)" } } } 2025-04-10T11:05:44.314187Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Status 7 Permission Denied |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |77.1%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] Test command err: 2025-04-10T11:02:55.162797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:02:55.163275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:02:55.163359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00111c/r3tmp/tmpFGEpu5/pdisk_1.dat 2025-04-10T11:02:55.679780Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25291, node 1 2025-04-10T11:02:56.225375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:02:56.225905Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:56.225952Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:56.225988Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:56.230959Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:02:56.340369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:56.340519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:56.371490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1805 2025-04-10T11:02:57.274332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:03:01.093456Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:03:01.166962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:01.167215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:01.218832Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:03:01.221404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:03:01.498900Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:03:01.499637Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:03:01.500185Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:03:01.500348Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:03:01.500669Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:03:01.500819Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:03:01.500904Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:03:01.500997Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:03:01.501085Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:03:01.691933Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:01.692046Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:01.708702Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:03:01.887536Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:01.952578Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:03:01.952700Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:03:02.020248Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:03:02.020422Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:03:02.020715Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:03:02.020799Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:03:02.020883Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:03:02.020961Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:03:02.021108Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:03:02.021179Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:03:02.071271Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:03:02.102195Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:03:02.102353Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:03:02.112761Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2607] 2025-04-10T11:03:02.143443Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1914:2622] 2025-04-10T11:03:02.144528Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1914:2622], schemeshard id = 72075186224037897 2025-04-10T11:03:02.152955Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:03:02.182797Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:03:02.182870Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:03:02.182953Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:03:02.203549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:03:02.211369Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:03:02.211533Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:03:02.487713Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:03:02.726529Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:03:02.795969Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:03:03.871210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2235:3070], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:03.871417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:03.940087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:03:04.102912Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2321:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:03:04.103208Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2321:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:03:04.103619Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2321:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:03:04.103840Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2321:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:03:04.104022Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2321:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:03:04.104183Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2321:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:03:04.104341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2321:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:03:04.104515Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2321:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:03:04.104760Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2321:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:03:04.104909Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2321:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:03:04.105064Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2321:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:03:04.105206Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2321:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:03:04.148921Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:03:04.149073Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 11] 2025-04-10T11:05:36.176240Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7245:5311], schemeshard id = 72075186224037897 2025-04-10T11:05:36.176375Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7201:5284], server id = [2:7246:5312], tablet id = 72075186224037894, status = OK 2025-04-10T11:05:36.176422Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7246:5312] 2025-04-10T11:05:36.176529Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7246:5312], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-10T11:05:36.247142Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7257:5315] 2025-04-10T11:05:36.248434Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2796:3219] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-10T11:05:36.248517Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2796:3219] 2025-04-10T11:05:36.248613Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-10T11:05:37.319046Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:05:37.319171Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:05:37.319246Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-10T11:05:37.319325Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:05:37.319390Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:05:37.320723Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:05:37.337253Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:05:37.337775Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:05:37.337880Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:05:37.346352Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:05:37.363143Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:05:37.369359Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:05:37.370087Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7296:5340], server id = [2:7297:5341], tablet id = 72075186224037899, status = OK 2025-04-10T11:05:37.370254Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7296:5340], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:05:37.388580Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:05:37.388831Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:05:37.389163Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:05:37.389545Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:05:37.390020Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:05:37.394665Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7296:5340], server id = [2:7297:5341], tablet id = 72075186224037899 2025-04-10T11:05:37.394747Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:05:37.396586Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:05:37.474050Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7317:5360]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:05:37.474418Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:05:37.474499Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7317:5360], StatRequests.size() = 1 2025-04-10T11:05:37.770179Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzEwMWFkZDQtNGI5MTUyNGUtYjIwNTFkY2ItZmI1OGVjYTQ=, TxId: 2025-04-10T11:05:37.770307Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzEwMWFkZDQtNGI5MTUyNGUtYjIwNTFkY2ItZmI1OGVjYTQ=, TxId: 2025-04-10T11:05:37.771176Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:05:37.788071Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:05:37.788237Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:05:38.199552Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-10T11:05:38.199687Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:05:38.907187Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:05:38.907316Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:05:38.908128Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:05:38.926014Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:05:38.926549Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:05:38.926679Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-10T11:05:38.958284Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:05:40.010323Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:05:40.010432Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:05:40.010474Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-10T11:05:40.010742Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-10T11:05:40.011299Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-10T11:05:40.011446Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-10T11:05:40.034444Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:05:41.012766Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:05:41.012880Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:05:41.012956Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-10T11:05:41.993790Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:05:41.994022Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:05:42.006126Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:05:42.006328Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:05:42.006378Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:05:42.007208Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:05:42.026202Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:05:42.026751Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:05:42.026868Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:05:42.027442Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:05:42.060090Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:05:42.060362Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-10T11:05:42.060998Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7485:5450], server id = [2:7486:5451], tablet id = 72075186224037899, status = OK 2025-04-10T11:05:42.061120Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7485:5450], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:05:42.068996Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:05:42.069150Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:05:42.069424Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:05:42.069661Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:05:42.069978Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:05:42.072870Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7485:5450], server id = [2:7486:5451], tablet id = 72075186224037899 2025-04-10T11:05:42.072944Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:05:42.073948Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:05:42.109155Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTRjZGZiNDUtOWMxZWIxZmEtYmRhMDFjYTMtZDQ3NTY3MTA=, TxId: 2025-04-10T11:05:42.109251Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTRjZGZiNDUtOWMxZWIxZmEtYmRhMDFjYTMtZDQ3NTY3MTA=, TxId: 2025-04-10T11:05:42.110017Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:05:42.132025Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:05:42.132132Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2796:3219] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] 2025-04-10T11:05:23.974915Z node 1 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] 2025-04-10T11:05:35.538128Z node 2 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:449:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:452:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:453:2057] recipient: [4:451:2377] Leader for TabletID 72057594037927937 is [4:454:2378] sender: [4:455:2057] recipient: [4:451:2377] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:454:2378] Leader for TabletID 72057594037927937 is [4:454:2378] sender: [4:508:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:449:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:451:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:453:2057] recipient: [5:452:2377] Leader for TabletID 72057594037927937 is [5:454:2378] sender: [5:455:2057] recipient: [5:452:2377] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:454:2378] Leader for TabletID 72057594037927937 is [5:454:2378] sender: [5:508:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:450:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:453:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:454:2057] recipient: [6:452:2377] Leader for TabletID 72057594037927937 is [6:455:2378] sender: [6:456:2057] recipient: [6:452:2377] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:455:2378] Leader for TabletID 72057594037927937 is [6:455:2378] sender: [6:509:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2025-04-10T11:05:41.289255Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.302360Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.435258Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.435406Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.491475Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.510641Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=14934704340486892466, session=0, seqNo=0) 2025-04-10T11:05:41.510884Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:41.532076Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=14934704340486892466, session=1) 2025-04-10T11:05:41.532375Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=17946208106269102824, session=0, seqNo=0) 2025-04-10T11:05:41.532505Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:41.545548Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=17946208106269102824, session=2) 2025-04-10T11:05:41.546035Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Lock1" count=1) 2025-04-10T11:05:41.546308Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-10T11:05:41.546472Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:41.561857Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-10T11:05:41.571199Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2025-04-10T11:05:41.571672Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=333, session=2, semaphore="Lock1" count=1) 2025-04-10T11:05:41.571785Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2025-04-10T11:05:41.584156Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-04-10T11:05:41.584230Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=333) 2025-04-10T11:05:41.584845Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:148:2172], cookie=129330074792074186, name="Lock1") 2025-04-10T11:05:41.584943Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:148:2172], cookie=129330074792074186) 2025-04-10T11:05:42.251796Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.251913Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.271555Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.271659Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.302981Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.303597Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=10667470645719864189, session=0, seqNo=0) 2025-04-10T11:05:42.303785Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:42.318518Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=10667470645719864189, session=1) 2025-04-10T11:05:42.318947Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=7480105595407132686, session=0, seqNo=0) 2025-04-10T11:05:42.319102Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:42.335201Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=7480105595407132686, session=2) 2025-04-10T11:05:42.335615Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-10T11:05:42.335788Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-10T11:05:42.335891Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:42.350952Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=111) 2025-04-10T11:05:42.351358Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-10T11:05:42.351734Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2025-04-10T11:05:42.365859Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=222) 2025-04-10T11:05:42.365939Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=333) 2025-04-10T11:05:42.366589Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:146:2170], cookie=6430546241601166255, name="Lock1") 2025-04-10T11:05:42.366681Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:146:2170], cookie=6430546241601166255) 2025-04-10T11:05:42.367149Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:149:2173], cookie=17306447896028342558, name="Lock1") 2025-04-10T11:05:42.367220Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:149:2173], cookie=17306447896028342558) 2025-04-10T11:05:42.802869Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.803027Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.829718Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.829904Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.862508Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.863328Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=10510334044123610084, session=0, seqNo=0) 2025-04-10T11:05:42.863578Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:42.875985Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=10510334044123610084, session=1) 2025-04-10T11:05:42.876310Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=8015452079239276724, session=0, seqNo=0) 2025-04-10T11:05:42.876452Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:42.888716Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=8015452079239276724, session=2) 2025-04-10T11:05:42.889524Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-10T11:05:42.889701Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-10T11:05:42.889807Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:42.902320Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:132:2158], cookie=111) 2025-04-10T11:05:42.902749Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:132:2158], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-10T11:05:42.903396Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:132:2158], cookie=333, session=2, semaphore="Lock1" count=1) 2025-04-10T11:05:42.903506Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-04-10T11:05:42.915788Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:132:2158], cookie=222) 2025-04-10T11:05:42.915874Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:132:2158], cookie=333) 2025-04-10T11:05:42.916526Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:151:2175], cookie=14280598279217676856, name="Lock1") 2025-04-10T11:05:42.916635Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:151:2175], cookie=14280598279217676856) 2025-04-10T11:05:42.917117Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:154:2178], cookie=7899152034732962977, name="Lock1") 2025-04-10T11:05:42.917189Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:154:2178], cookie=7899152034732962977) 2025-04-10T11:05:42.933128Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.933267Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.934058Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.934821Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.975025Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.975190Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:42.975673Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:194:2208], cookie=2855578182758841271, name="Lock1") 2025-04-10T11:05:42.975764Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:194:2208], cookie=2855578182758841271) 2025-04-10T11:05:42.976400Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:202:2215], cookie=17388737304256177856, name="Lock1") 2025-04-10T11:05:42.976512Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:202:2215], cookie=17388737304256177856) 2025-04-10T11:05:43.520324Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:43.520464Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:43.540173Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:43.540314Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:43.569150Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:43.569832Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=17447543481906476625, session=0, seqNo=0) 2025-04-10T11:05:43.570027Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:43.582650Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=17447543481906476625, session=1) 2025-04-10T11:05:43.583033Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=6127900292912018575, session=0, seqNo=0) 2025-04-10T11:05:43.583202Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:43.596456Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=6127900292912018575, session=2) 2025-04-10T11:05:43.596891Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-10T11:05:43.597118Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-10T11:05:43.597251Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:43.610718Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=111) 2025-04-10T11:05:43.611154Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-10T11:05:43.611574Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=333, name="Lock1") 2025-04-10T11:05:43.611688Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-04-10T11:05:43.624296Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=222) 2025-04-10T11:05:43.624409Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=333) 2025-04-10T11:05:44.122388Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:44.122534Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:44.151055Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:44.151227Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:44.177915Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:44.191301Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=11314836754291803279, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-04-10T11:05:44.191670Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-10T11:05:44.207701Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=11314836754291803279) 2025-04-10T11:05:44.208480Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=5659619293967716011, path="/Root/Res", config={ }) 2025-04-10T11:05:44.208780Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-10T11:05:44.223306Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=5659619293967716011) 2025-04-10T11:05:44.225805Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 17796536876793044738. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:44.225900Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=17796536876793044738) 2025-04-10T11:05:44.226662Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:146:2170]. Cookie: 7849818790598938836. Data: { } 2025-04-10T11:05:44.226731Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:146:2170], cookie=7849818790598938836) 2025-04-10T11:05:44.269399Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T11:05:44.314077Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T11:05:44.350701Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T11:05:44.386733Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T11:05:44.430129Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters >> TKesusTest::TestSemaphoreSessionFailures [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] Test command err: 2025-04-10T11:05:41.289096Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.301112Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.418194Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.418369Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.493258Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.960044Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.960162Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.979186Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.979332Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.005944Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.490005Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.490182Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.520447Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.520623Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.550787Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.968988Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.969109Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.992590Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.992737Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:43.024881Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:43.035970Z node 4 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 5 2025-04-10T11:05:43.036615Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:187:2159]) 2025-04-10T11:05:43.730093Z node 6 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:43.730276Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:43.754691Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:43.754852Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute ... waiting for register request 2025-04-10T11:05:43.771961Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR ... waiting for register request (done) ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR 2025-04-10T11:05:43.772931Z node 6 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 7 2025-04-10T11:05:43.773418Z node 6 :KESUS_TABLET TRACE: Got TEvServerDisconnected([6:187:2159]) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] Test command err: 2025-04-10T11:00:49.073823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:49.073918Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:49.144315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:50.810468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:50.810543Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:50.855848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:52.566106Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:52.566243Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:52.615453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:56.304164Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:56.304244Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:56.379709Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:59.605745Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:59.605821Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:59.683520Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:01.337020Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:01.337099Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:01.399607Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:02.475616Z node 6 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 7 2025-04-10T11:01:02.475736Z node 6 :PIPE_SERVER ERROR: [72057594037936131] NodeDisconnected NodeId# 7 2025-04-10T11:01:02.475871Z node 6 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 7 2025-04-10T11:01:02.476432Z node 7 :TX_PROXY WARN: actor# [7:343:2087] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-10T11:01:03.663744Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:03.663819Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:03.735004Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:04.498556Z node 8 :BS_CONTROLLER ERROR: {BSC26@console_interaction.cpp:113} failed to parse config obtained from Console ErrorReason# ydb/library/yaml_config/yaml_config_parser.cpp:1268: Condition violated: `config.HasDomainsConfig()' Yaml# --- metadata: kind: MainConfig cluster: "" version: 1 config: log_config: cluster_name: cluster1 allowed_labels: test: type: enum values: ? true selector_config: [] 2025-04-10T11:01:06.304372Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:06.304466Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:06.431842Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:08.176665Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-10T11:01:08.415259Z node 16 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:01:08.415927Z node 16 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ccd/r3tmp/tmpJWgrvu/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:01:08.416752Z node 16 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ccd/r3tmp/tmpJWgrvu/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001ccd/r3tmp/tmpJWgrvu/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14567882574262458494 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:01:08.481552Z node 15 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:01:08.482080Z node 15 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ccd/r3tmp/tmpJWgrvu/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:01:08.490645Z node 15 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ccd/r3tmp/tmpJWgrvu/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001ccd/r3tmp/tmpJWgrvu/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4749811417568932648 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:01:08.541394Z node 17 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:01:08.542031Z node 17 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ccd/r3tmp/tmpJWgrvu/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:01:08.547720Z node 17 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ccd/r3tmp/tmpJWgrvu/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001ccd/r3tmp/tmpJWgrvu/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6093151336135620360 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:01:08.625157Z node 13 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:01:08.625747Z node 13 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ccd/r3tmp/tmpJWgrvu/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:01:08.625959Z node 13 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001ccd/r3tmp/tmpJWgrvu/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001ccd/r3tmp/tmpJWgrvu/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13535921510487569892 PDiskId# 1000 PDiskC ... 00-s[16/16]o)]} 2025-04-10T11:04:36.265219Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:04:36.265672Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:04:41.810297Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:04:41.810598Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:04:47.261332Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:04:47.261640Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:04:52.628073Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:04:52.628514Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:04:52.766895Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:04:52.767497Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:04:57.732768Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:04:57.733304Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:03.147692Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:03.148116Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:08.798774Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:08.799159Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:14.304044Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:14.304412Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:19.817003Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:19.817396Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:25.362342Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:25.362785Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:30.524743Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:30.525109Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:35.838960Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:35.839278Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:41.344305Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-10T11:05:41.344707Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] >> KqpNotNullColumns::ReplaceNotNullPk |77.1%| [TA] $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpKv::ReadRows_UnknownTable >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2025-04-10T11:05:41.288701Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.300999Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.426869Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.427038Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.490286Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.511044Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=17896925408090694903, session=0, seqNo=0) 2025-04-10T11:05:41.511223Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:41.538059Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=17896925408090694903, session=1) 2025-04-10T11:05:41.538419Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=16222180077623601544, session=0, seqNo=0) 2025-04-10T11:05:41.538558Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:41.550743Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=16222180077623601544, session=2) 2025-04-10T11:05:41.551042Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:132:2158], cookie=111, name="Lock1") 2025-04-10T11:05:41.566918Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:132:2158], cookie=111) 2025-04-10T11:05:41.567231Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-10T11:05:41.567386Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-10T11:05:41.567511Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:41.585309Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-04-10T11:05:41.585587Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:132:2158], cookie=333, name="Lock1") 2025-04-10T11:05:41.600482Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:132:2158], cookie=333) 2025-04-10T11:05:42.229331Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.229506Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.250047Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.250139Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.282834Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.283383Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=15300185402217622124, session=0, seqNo=0) 2025-04-10T11:05:42.283529Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:42.301789Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=15300185402217622124, session=1) 2025-04-10T11:05:42.302047Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=13906417776864863512, session=0, seqNo=0) 2025-04-10T11:05:42.302173Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:42.314660Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=13906417776864863512, session=2) 2025-04-10T11:05:42.315308Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:141:2165], cookie=4834504915151493865, name="Sem1", limit=1) 2025-04-10T11:05:42.315481Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-10T11:05:42.328220Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:141:2165], cookie=4834504915151493865) 2025-04-10T11:05:42.328690Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-10T11:05:42.328855Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-10T11:05:42.329035Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-10T11:05:42.341942Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=111) 2025-04-10T11:05:42.342030Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=222) 2025-04-10T11:05:42.342705Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:149:2173], cookie=18037671011751299553, name="Sem1") 2025-04-10T11:05:42.342838Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:149:2173], cookie=18037671011751299553) 2025-04-10T11:05:42.343380Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:152:2176], cookie=9159819098828768157, name="Sem1") 2025-04-10T11:05:42.343482Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:152:2176], cookie=9159819098828768157) 2025-04-10T11:05:42.343795Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:130:2156], cookie=333, name="Sem1") 2025-04-10T11:05:42.343929Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2025-04-10T11:05:42.362993Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:130:2156], cookie=333) 2025-04-10T11:05:42.363808Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:157:2181], cookie=10339144642887292001, name="Sem1") 2025-04-10T11:05:42.363906Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:157:2181], cookie=10339144642887292001) 2025-04-10T11:05:42.364407Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:160:2184], cookie=6610279447918376107, name="Sem1") 2025-04-10T11:05:42.364470Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:160:2184], cookie=6610279447918376107) 2025-04-10T11:05:42.364710Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:130:2156], cookie=444, name="Sem1") 2025-04-10T11:05:42.364801Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-04-10T11:05:42.380283Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:130:2156], cookie=444) 2025-04-10T11:05:42.381220Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:165:2189], cookie=3765810058570708560, name="Sem1") 2025-04-10T11:05:42.381336Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:165:2189], cookie=3765810058570708560) 2025-04-10T11:05:42.382016Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:168:2192], cookie=15949995365668306224, name="Sem1") 2025-04-10T11:05:42.382111Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:168:2192], cookie=15949995365668306224) 2025-04-10T11:05:42.893434Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.893518Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.912888Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.913039Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.937282Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.937689Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:132:2158], cookie=15726670295191708074, name="Sem1", limit=1) 2025-04-10T11:05:42.937848Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-10T11:05:42.950199Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:132:2158], cookie=15726670295191708074) 2025-04-10T11:05:42.950867Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:141:2165], cookie=8870199495020946868, name="Sem2", limit=1) 2025-04-10T11:05:42.951030Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-04-10T11:05:42.967732Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:141:2165], cookie=8870199495020946868) 2025-04-10T11:05:42.968387Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:146:2170], cookie=16195890092975239767, name="Sem1") 2025-04-10T11:05:42.968490Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:146:2170], cookie=16195890092975239767) 2025-04-10T11:05:42.968923Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:149:2173], cookie=900410543402509915, name="Sem2") 2025-04-10T11:05:42.968980Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:149:2173], cookie=900410543402509915) 2025-04-10T11:05:42.984079Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.984202Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.984902Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.985552Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:43.026826Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:43.027133Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:189:2203], cookie=9348019919776459690, name="Sem1") 2025-04-10T11:05:43.027196Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:189:2203], cookie=9348019919776459690) 2025-04-10T11:05:43.027718Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:195:2208], cookie=14957879432688027140, name="Sem2") 2025-04-10T11:05:43.027799Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:195:2208], cookie=14957879432688027140) 2025-04-10T11:05:43.028650Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:198:2211], cookie=5642743786836758237, name="Sem1", limit=1) 2025-04-10T11:05:43.051491Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:198:2211], cookie=5642743786836758237) 2025-04-10T11:05:43.052268Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:203:2216], cookie=5666616120346809847, name="Sem2", limit=1) 2025-04-10T11:05:43.065247Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:203:2216], cookie=5666616120346809847) 2025-04-10T11:05:43.066044Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:208:2221], cookie=8305459705818894754, name="Sem1") 2025-04-10T11:05:43.066183Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:208:2221], cookie=830 ... :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 9 "Sem1" 2025-04-10T11:05:44.007268Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:222:2245], cookie=3573357421805433980) 2025-04-10T11:05:44.008237Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:227:2250], cookie=4174687403162472511, name="Sem1", force=0) 2025-04-10T11:05:44.008361Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 9 "Sem1" 2025-04-10T11:05:44.021657Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:227:2250], cookie=4174687403162472511) 2025-04-10T11:05:44.022830Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:232:2255], cookie=6024305120369229835, name="Sem1", limit=1) 2025-04-10T11:05:44.023035Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 10 "Sem1" 2025-04-10T11:05:44.044705Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:232:2255], cookie=6024305120369229835) 2025-04-10T11:05:44.045799Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:237:2260], cookie=15404778524332110219, name="Sem1", force=0) 2025-04-10T11:05:44.045972Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 10 "Sem1" 2025-04-10T11:05:44.065478Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:237:2260], cookie=15404778524332110219) 2025-04-10T11:05:44.066883Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:242:2265], cookie=5608636370004770577, name="Sem1", limit=1) 2025-04-10T11:05:44.067129Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 11 "Sem1" 2025-04-10T11:05:44.080719Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:242:2265], cookie=5608636370004770577) 2025-04-10T11:05:44.081548Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-10T11:05:44.081820Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2025-04-10T11:05:44.100929Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=111) 2025-04-10T11:05:44.101889Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-10T11:05:44.116279Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=222) 2025-04-10T11:05:44.117120Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=333, name="Sem1") 2025-04-10T11:05:44.117295Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2025-04-10T11:05:44.147290Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=333) 2025-04-10T11:05:44.148208Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=444, session=2, semaphore="Sem1" count=1) 2025-04-10T11:05:44.164558Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=444) 2025-04-10T11:05:44.165347Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=555, name="Sem1") 2025-04-10T11:05:44.165521Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2025-04-10T11:05:44.165603Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2025-04-10T11:05:44.179435Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=555) 2025-04-10T11:05:44.843450Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:44.843617Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:44.886426Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:44.886625Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:44.927631Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:44.928370Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=17786709216690494304, session=0, seqNo=0) 2025-04-10T11:05:44.928580Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:44.950448Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=17786709216690494304, session=1) 2025-04-10T11:05:44.950933Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=112, name="Sem1", limit=5) 2025-04-10T11:05:44.951116Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-10T11:05:44.965757Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=112) 2025-04-10T11:05:44.966218Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=113, name="Sem1") 2025-04-10T11:05:44.982878Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=113) 2025-04-10T11:05:44.983436Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=114, name="Sem1", force=0) 2025-04-10T11:05:44.983589Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-04-10T11:05:45.002006Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=114) 2025-04-10T11:05:45.002391Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[5:132:2158], cookie=17364938145232513603 2025-04-10T11:05:45.002720Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=115, name="Sem1", limit=5) 2025-04-10T11:05:45.015996Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=115) 2025-04-10T11:05:45.016339Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=116, name="Sem1") 2025-04-10T11:05:45.029564Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=116) 2025-04-10T11:05:45.029923Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=117, name="Sem1", force=0) 2025-04-10T11:05:45.043077Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=117) 2025-04-10T11:05:45.043498Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=118, session=1, semaphore="Sem1" count=1) 2025-04-10T11:05:45.057985Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=118) 2025-04-10T11:05:45.058527Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=119, name="Sem1") 2025-04-10T11:05:45.079476Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=119) 2025-04-10T11:05:45.080074Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:132:2158], cookie=120, name="Sem1") 2025-04-10T11:05:45.080198Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:132:2158], cookie=120) 2025-04-10T11:05:45.080522Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[5:132:2158], cookie=12568636497006572547, session=1) 2025-04-10T11:05:45.080661Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-10T11:05:45.099193Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[5:132:2158], cookie=12568636497006572547) 2025-04-10T11:05:45.099799Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=121, name="Sem1", limit=5) 2025-04-10T11:05:45.126029Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=121) 2025-04-10T11:05:45.126567Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=122, name="Sem1") 2025-04-10T11:05:45.141629Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=122) 2025-04-10T11:05:45.142134Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=123, name="Sem1", force=0) 2025-04-10T11:05:45.159309Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=123) 2025-04-10T11:05:45.159748Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=124, session=1, semaphore="Sem1" count=1) 2025-04-10T11:05:45.175413Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=124) 2025-04-10T11:05:45.175941Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=125, name="Sem1") 2025-04-10T11:05:45.215629Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=125) 2025-04-10T11:05:45.216341Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:132:2158], cookie=126, name="Sem1") 2025-04-10T11:05:45.216523Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:132:2158], cookie=126) 2025-04-10T11:05:45.217686Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=127, name="Sem1", limit=5) 2025-04-10T11:05:45.217826Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=127) 2025-04-10T11:05:45.218231Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=128, name="Sem1") 2025-04-10T11:05:45.218333Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=128) 2025-04-10T11:05:45.218671Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=129, name="Sem1", force=0) 2025-04-10T11:05:45.218761Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=129) 2025-04-10T11:05:45.219243Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=130, session=1, semaphore="Sem1" count=1) 2025-04-10T11:05:45.219359Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=130) 2025-04-10T11:05:45.219695Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=131, name="Sem1") 2025-04-10T11:05:45.219780Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=131) 2025-04-10T11:05:45.220094Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:132:2158], cookie=132, name="Sem1") 2025-04-10T11:05:45.220185Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:132:2158], cookie=132) >> TSequence::CreateDropRecreate [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |77.1%| [TA] {RESULT} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |77.2%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::CheckPgAutoParams-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 4581, MsgBus: 11360 2025-04-10T11:02:34.238500Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638244439419470:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:34.238621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cf0/r3tmp/tmpPe1FD2/pdisk_1.dat 2025-04-10T11:02:35.013637Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:35.031669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:35.031785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:35.050488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4581, node 1 2025-04-10T11:02:35.357426Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:35.357457Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:35.357478Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:35.357603Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11360 TClient is connected to server localhost:11360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:36.516715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:36.569523Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:02:39.237287Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638244439419470:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:39.237414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:39.781935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-04-10T11:02:40.386402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:02:40.488519Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-04-10T11:02:40.624789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-04-10T11:02:41.025657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-04-10T11:02:41.337491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:02:41.482935Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill f f t t 2025-04-10T11:02:41.600691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 f f t t 2025-04-10T11:02:41.769366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 {f,f} {f,f} {t,t} {t,t} 2025-04-10T11:02:41.968754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:02:42.055393Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2025-04-10T11:02:42.112631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-10T11:02:42.239507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-10T11:02:42.412668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-10T11:02:42.488038Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-10T11:02:42.553349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480 {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-10T11:02:42.739421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 2025-04-10T11:02:42.875529Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-10T11:02:42.955173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-10T11:02:43.160523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-10T11:02:43.245343Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-10T11:02:43.342079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-10T11:02:43.576292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-10T11:02:43.750534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-10T11:02:43.858272Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-10T11:02:43.954621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-10T11:02:44.157127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-10T11:02:44.326068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710725:0, at schemeshard: 72057594046644480 2025-04-10T11:02:44.452081Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-10T11:02:44.525056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710729:0, at schemeshard: 720575940466444 ... ent=undelivered;self_id=[14:7491638875835020675:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:01.015215Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cf0/r3tmp/tmp4kRl9f/pdisk_1.dat 2025-04-10T11:05:01.257845Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:01.262462Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:01.262699Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:01.269516Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27351, node 14 2025-04-10T11:05:01.374477Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:01.374507Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:01.374524Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:01.374703Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7067 TClient is connected to server localhost:7067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:03.120055Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:03.135550Z node 14 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:05:06.018341Z node 14 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7491638875835020675:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:06.018489Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:08.965405Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7491638905899792415:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:08.965626Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:08.968060Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7491638905899792427:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:08.982656Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:05:09.006839Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7491638905899792429:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:05:09.112113Z node 14 :TX_PROXY ERROR: Actor# [14:7491638910194759778:2351] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:09.324225Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:05:09.838114Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:10.782278Z node 14 :KQP_COMPILE_ACTOR ERROR: Get parsing result with error, self: [14:7491638914489727429:2404], owner: [14:7491638905899792378:2326], statement id: 0 2025-04-10T11:05:10.782894Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=NmIwOGIwY2MtMTk5MmEzYjMtZjZlNzdmY2EtYjRhZDVlYzQ=, ActorId: [14:7491638914489727427:2403], ActorState: ExecuteState, TraceId: 01jrfp03bn68gc5ykm4gsqd4p5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:05:11.119101Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7491638918784694760:2416], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:55: Error: At function: PgOp
:2:55: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-04-10T11:05:11.119431Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=MjQwNzQwM2ItOTkzMzI4YTEtOTFjYTViZmEtMzcyMDA1M2E=, ActorId: [14:7491638918784694757:2414], ActorState: ExecuteState, TraceId: 01jrfp03nad7t21w6nqsdjhjep, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:05:11.169830Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7491638918784694774:2422], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:57: Error: At function: PgAnd
:2:67: Error: At function: PgOp
:2:67: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-04-10T11:05:11.173241Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZTE5ZTAyMmEtZWI1ZWQzZmQtNzAyNjdkOGQtYzUzYWQ0ZWY=, ActorId: [14:7491638918784694771:2420], ActorState: ExecuteState, TraceId: 01jrfp03py6mb8sjdzmtvyjpap, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:05:11.196179Z node 14 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfp03rkf34y2e93a99z68cc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=OGIxNjJhY2UtODljYjdjNTktNDExOTBhMWYtOTM1NWNiNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-04-10T11:05:11.196553Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=OGIxNjJhY2UtODljYjdjNTktNDExOTBhMWYtOTM1NWNiNzQ=, ActorId: [14:7491638918784694785:2427], ActorState: ExecuteState, TraceId: 01jrfp03rkf34y2e93a99z68cc, Create QueryResponse for error on request, msg: 2025-04-10T11:05:11.261397Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:05:11.406907Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:05:11.522372Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7491638918784694956:2452], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2025-04-10T11:05:11.525701Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=OGE4MWY4MS1kNzM0OTFmOC1hNWY0Nzg1OS00YmU3MjEy, ActorId: [14:7491638918784694953:2450], ActorState: ExecuteState, TraceId: 01jrfp04221rv1jmf5e8s6d8cf, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:05:11.571736Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7491638918784694969:2458], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/PgTable2] 2025-04-10T11:05:11.572092Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZmIwYTE4YTQtMTc1YzVmMWEtZjczYTQyYTAtOTBkYTFhMTI=, ActorId: [14:7491638918784694966:2456], ActorState: ExecuteState, TraceId: 01jrfp043j67mwdg68xc1q4gqr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:05:12.177810Z node 14 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfp045392rv47q1css9p6rq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=Mjc5ZjRhYTAtMTczMDYxZDQtMzY5YjhmNC00NzhjZjZmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-04-10T11:05:12.179883Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=Mjc5ZjRhYTAtMTczMDYxZDQtMzY5YjhmNC00NzhjZjZmZg==, ActorId: [14:7491638918784694979:2462], ActorState: ExecuteState, TraceId: 01jrfp045392rv47q1css9p6rq, Create QueryResponse for error on request, msg: 2025-04-10T11:05:12.240988Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:05:13.036207Z node 14 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 14, TabletId: 72075186224037892 not found 2025-04-10T11:05:13.089797Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2025-04-10T11:05:41.288660Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.301707Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.436527Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.436727Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.491162Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.512225Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:132:2158], cookie=2396810145568703677, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-04-10T11:05:41.512524Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-10T11:05:41.525646Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:132:2158], cookie=2396810145568703677) 2025-04-10T11:05:41.526617Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:141:2165], cookie=631141416249431587, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-04-10T11:05:41.526890Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2025-04-10T11:05:41.539601Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:141:2165], cookie=631141416249431587) 2025-04-10T11:05:41.540357Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:146:2170], cookie=12489482348203040893, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-04-10T11:05:41.540643Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2025-04-10T11:05:41.553155Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:146:2170], cookie=12489482348203040893) 2025-04-10T11:05:41.553911Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:151:2175], cookie=14456209233600390966, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-04-10T11:05:41.554220Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-04-10T11:05:41.567465Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:151:2175], cookie=14456209233600390966) 2025-04-10T11:05:41.568272Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:156:2180], cookie=7664693798880784699, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-04-10T11:05:41.568503Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2025-04-10T11:05:41.581480Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:156:2180], cookie=7664693798880784699) 2025-04-10T11:05:41.582364Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:161:2185], cookie=15961908102669247889, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2025-04-10T11:05:41.582677Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2025-04-10T11:05:41.601012Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:161:2185], cookie=15961908102669247889) 2025-04-10T11:05:41.601755Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:166:2190], cookie=9227729886608903513, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-04-10T11:05:41.601920Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 7 "Root2" 2025-04-10T11:05:41.614735Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:166:2190], cookie=9227729886608903513) 2025-04-10T11:05:41.615767Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:171:2195], cookie=6314095674679858935, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2025-04-10T11:05:41.616061Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2025-04-10T11:05:41.635500Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:171:2195], cookie=6314095674679858935) 2025-04-10T11:05:41.645515Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:176:2200], cookie=15976169500771298158, ids=[100], paths=[], recursive=0) 2025-04-10T11:05:41.645699Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:176:2200], cookie=15976169500771298158) 2025-04-10T11:05:41.646568Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:179:2203], cookie=15931266450764638448, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-04-10T11:05:41.646668Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:179:2203], cookie=15931266450764638448) 2025-04-10T11:05:41.647144Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:182:2206], cookie=4497477566953541103, ids=[], paths=[/Root, ], recursive=0) 2025-04-10T11:05:41.647234Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:182:2206], cookie=4497477566953541103) 2025-04-10T11:05:41.647759Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:185:2209], cookie=2381399949680871663, ids=[1, 1], paths=[], recursive=0) 2025-04-10T11:05:41.647817Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:185:2209], cookie=2381399949680871663) 2025-04-10T11:05:41.648324Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:188:2212], cookie=12778444805177121269, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-04-10T11:05:41.648396Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:188:2212], cookie=12778444805177121269) 2025-04-10T11:05:41.648857Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:191:2215], cookie=8091600645639106616, ids=[], paths=[], recursive=1) 2025-04-10T11:05:41.648919Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:191:2215], cookie=8091600645639106616) 2025-04-10T11:05:41.660901Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:194:2218], cookie=13241382484501301953, ids=[], paths=[], recursive=0) 2025-04-10T11:05:41.661021Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:194:2218], cookie=13241382484501301953) 2025-04-10T11:05:41.661642Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:197:2221], cookie=8619535179943395911, ids=[3, 2], paths=[], recursive=1) 2025-04-10T11:05:41.661716Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:197:2221], cookie=8619535179943395911) 2025-04-10T11:05:41.672458Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:200:2224], cookie=6109143419944742561, ids=[3, 2], paths=[], recursive=0) 2025-04-10T11:05:41.672635Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:200:2224], cookie=6109143419944742561) 2025-04-10T11:05:41.673495Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:203:2227], cookie=10939307258242888393, ids=[], paths=[Root2/], recursive=1) 2025-04-10T11:05:41.673610Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:203:2227], cookie=10939307258242888393) 2025-04-10T11:05:41.674242Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:206:2230], cookie=17060448256922242069, ids=[], paths=[Root2/], recursive=0) 2025-04-10T11:05:41.674312Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:206:2230], cookie=17060448256922242069) 2025-04-10T11:05:41.790096Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.790232Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.791003Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.791942Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.828818Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.829313Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:246:2260], cookie=6904776603900052457, ids=[100], paths=[], recursive=0) 2025-04-10T11:05:41.829448Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:246:2260], cookie=6904776603900052457) 2025-04-10T11:05:41.830404Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:252:2265], cookie=9088732425582847334, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-04-10T11:05:41.830530Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:252:2265], cookie=9088732425582847334) 2025-04-10T11:05:41.831358Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:255:2268], cookie=8414895550634297715, ids=[], paths=[/Root, ], recursive=0) 2025-04-10T11:05:41.831491Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:255:2268], cookie=8414895550634297715) 2025-04-10T11:05:41.832248Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:258:2271], cookie=10663659268793877093, ids=[1, 1], paths=[], recursive=0) 2025-04-10T11:05:41.832326Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:258:2271], cookie=10663659268793877093) 2025-04-10T11:05:41.833134Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:261:2274], cookie=14369447459578779995, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-04-10T11:05:41.833245Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:261:2274], cookie=14369447459578779995) 2025-04-10T11:05:41.833941Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:264:2277], cookie=9905589976803637600, ids=[], paths=[], recursive=1) 2025-04-10T11:05:41.834019Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:264:2277], cookie=9905589976803637600) 2025-04-10T11:05:41.834976Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:267:2280], cookie=16063951581890967607, ids=[], paths=[], recursive=0) 2025-04-10T11:05:41.835052Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:267:2280], cookie=16063951581890967607) 2025-04-10T11:05:41.835810Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:270:2283], cookie=3393914930250929298, ids=[3, 2], paths=[], recursive=1) 2025-04-10T11:05:41.835885Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:270:2283], cookie=3393914930250929298) 2025-04-10T11:05:41.836694Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:273:2286], cookie=10463535130907155003, ids=[3, 2], paths=[], recursive=0 ... _TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:44.507934Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:44.508483Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=8134162746396970616, path="/Root", config={ MaxUnitsPerSecond: 1 }) 2025-04-10T11:05:44.508771Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-10T11:05:44.522598Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=8134162746396970616) 2025-04-10T11:05:44.523409Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:139:2163], cookie=11246636104987455105, path="/Root/Q", config={ }) 2025-04-10T11:05:44.523685Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Q" 2025-04-10T11:05:44.542824Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:139:2163], cookie=11246636104987455105) 2025-04-10T11:05:44.543610Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:144:2168], cookie=6159729544053196708, path="/Root/Folder", config={ }) 2025-04-10T11:05:44.543902Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Folder" 2025-04-10T11:05:44.557971Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:144:2168], cookie=6159729544053196708) 2025-04-10T11:05:44.558866Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:149:2173], cookie=11579736888991026596, path="/Root/Folder/Q1", config={ }) 2025-04-10T11:05:44.559204Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-04-10T11:05:44.572421Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:149:2173], cookie=11579736888991026596) 2025-04-10T11:05:44.573223Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:154:2178], cookie=747959888002300606, ids=[], paths=[], recursive=1) 2025-04-10T11:05:44.573355Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:154:2178], cookie=747959888002300606) 2025-04-10T11:05:44.585065Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:160:2184], cookie=237122720498511024, ids=[], paths=[], recursive=1) 2025-04-10T11:05:44.585209Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:160:2184], cookie=237122720498511024) 2025-04-10T11:05:44.586226Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:166:2190], cookie=15024658115163896466, ids=[], paths=[], recursive=1) 2025-04-10T11:05:44.586322Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:166:2190], cookie=15024658115163896466) 2025-04-10T11:05:44.586852Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:169:2193], cookie=7199963659306669557, id=0, path="/Root/Folder/NonexistingRes") 2025-04-10T11:05:44.586952Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:169:2193], cookie=7199963659306669557) 2025-04-10T11:05:44.587474Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:172:2196], cookie=7880149082686244479, ids=[], paths=[], recursive=1) 2025-04-10T11:05:44.587546Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:172:2196], cookie=7880149082686244479) 2025-04-10T11:05:44.588042Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:175:2199], cookie=17431220833472727972, id=100, path="") 2025-04-10T11:05:44.588103Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:175:2199], cookie=17431220833472727972) 2025-04-10T11:05:44.588576Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:178:2202], cookie=15892411347777190191, ids=[], paths=[], recursive=1) 2025-04-10T11:05:44.588646Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:178:2202], cookie=15892411347777190191) 2025-04-10T11:05:44.600730Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:181:2205], cookie=2005367816505879460, id=3, path="") 2025-04-10T11:05:44.600849Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:181:2205], cookie=2005367816505879460) 2025-04-10T11:05:44.601453Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:184:2208], cookie=15534826476789927592, ids=[], paths=[], recursive=1) 2025-04-10T11:05:44.601538Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:184:2208], cookie=15534826476789927592) 2025-04-10T11:05:44.602190Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:187:2211], cookie=9306930083936168239, id=0, path="/Root/Folder/Q1") 2025-04-10T11:05:44.602412Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2025-04-10T11:05:44.616348Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:187:2211], cookie=9306930083936168239) 2025-04-10T11:05:44.617292Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:192:2216], cookie=13723920118609512177, ids=[], paths=[], recursive=1) 2025-04-10T11:05:44.617427Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:192:2216], cookie=13723920118609512177) 2025-04-10T11:05:44.632117Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:44.632236Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:44.632909Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:44.633574Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:44.685531Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:44.686000Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:232:2246], cookie=8759746472042646478, ids=[], paths=[], recursive=1) 2025-04-10T11:05:44.686107Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:232:2246], cookie=8759746472042646478) 2025-04-10T11:05:44.686914Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:238:2251], cookie=14901537684306079440, id=3, path="") 2025-04-10T11:05:44.687088Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2025-04-10T11:05:44.711658Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:238:2251], cookie=14901537684306079440) 2025-04-10T11:05:44.712471Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:243:2256], cookie=1998558609240868489, ids=[], paths=[], recursive=1) 2025-04-10T11:05:44.712557Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:243:2256], cookie=1998558609240868489) 2025-04-10T11:05:44.741147Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:44.741286Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:44.742087Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:44.742871Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:44.783373Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:44.783854Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:283:2286], cookie=6353216785615563444, ids=[], paths=[], recursive=1) 2025-04-10T11:05:44.783970Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:283:2286], cookie=6353216785615563444) 2025-04-10T11:05:45.423426Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:45.423576Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:45.454112Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:45.454291Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:45.479487Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:45.480082Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=10499643741048723850, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-04-10T11:05:45.480352Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Q1" 2025-04-10T11:05:45.498314Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=10499643741048723850) 2025-04-10T11:05:45.499164Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=17821654622122953513, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-04-10T11:05:45.499409Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Q2" 2025-04-10T11:05:45.513251Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=17821654622122953513) 2025-04-10T11:05:45.521410Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 4070219378389714730. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:45.521508Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=4070219378389714730) 2025-04-10T11:05:45.522601Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 8606897831364449669. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2025-04-10T11:05:45.522660Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=8606897831364449669) >> TKesusTest::TestGetQuoterResourceCounters [GOOD] >> KqpNewEngine::Update+UseSink >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient >> KqpRanges::IsNotNullInJsonValue >> KqpNewEngine::LocksSingleShard >> TDataShardLocksTest::UseLocksCache [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2025-04-10T11:05:41.288205Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.301184Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.430028Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.430167Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.490926Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.498113Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:132:2158], cookie=9691845761795453873, path="/foo/bar/baz") 2025-04-10T11:05:41.519516Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:132:2158], cookie=9691845761795453873, status=SUCCESS) 2025-04-10T11:05:41.528900Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:141:2165], cookie=9968864278736120633) 2025-04-10T11:05:41.541096Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:141:2165], cookie=9968864278736120633) 2025-04-10T11:05:41.542481Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:146:2170], cookie=7252185783555285297, path="/foo/bar/baz") 2025-04-10T11:05:41.556007Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:146:2170], cookie=7252185783555285297, status=SUCCESS) 2025-04-10T11:05:41.556598Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:151:2175], cookie=9371747313503945044) 2025-04-10T11:05:41.569203Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:151:2175], cookie=9371747313503945044) 2025-04-10T11:05:41.789966Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.790085Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.790828Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.791119Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.826691Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.827046Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:193:2207], cookie=3049879926433446320) 2025-04-10T11:05:41.839928Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:193:2207], cookie=3049879926433446320) 2025-04-10T11:05:41.840762Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:201:2214], cookie=2518640888214438042, path="/foo/bar/baz") 2025-04-10T11:05:41.853301Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:201:2214], cookie=2518640888214438042, status=SUCCESS) 2025-04-10T11:05:41.854004Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:206:2219], cookie=6246409354969557082, path="/foo/bar/baz") 2025-04-10T11:05:41.854084Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:206:2219], cookie=6246409354969557082, status=PRECONDITION_FAILED) 2025-04-10T11:05:42.447112Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.447232Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.467455Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.467617Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.493030Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.493425Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:130:2156], cookie=3263228097556293585, name="Lock1") 2025-04-10T11:05:42.493521Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:130:2156], cookie=3263228097556293585) 2025-04-10T11:05:42.899110Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.899226Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.920754Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.920897Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.945340Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.946034Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=13155877169125829067, session=0, seqNo=0) 2025-04-10T11:05:42.946292Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:42.959028Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=13155877169125829067, session=1) 2025-04-10T11:05:42.959425Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-10T11:05:42.959616Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-10T11:05:42.959725Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:42.972962Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:132:2158], cookie=111) 2025-04-10T11:05:42.973577Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:143:2167], cookie=321757065329539407, name="Lock1", force=0) 2025-04-10T11:05:42.986402Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:143:2167], cookie=321757065329539407) 2025-04-10T11:05:42.987014Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:148:2172], cookie=8912899106811350084, name="Sem1", force=0) 2025-04-10T11:05:42.999305Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:148:2172], cookie=8912899106811350084) 2025-04-10T11:05:42.999716Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:153:2177], cookie=4929533231050826520, name="Sem1", limit=42) 2025-04-10T11:05:42.999824Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-04-10T11:05:43.011852Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:153:2177], cookie=4929533231050826520) 2025-04-10T11:05:43.012397Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:158:2182], cookie=15807261328613426519, name="Sem1", force=0) 2025-04-10T11:05:43.012505Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 2 "Sem1" 2025-04-10T11:05:43.025274Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:158:2182], cookie=15807261328613426519) 2025-04-10T11:05:43.025817Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:163:2187], cookie=17978985087832460976, name="Sem1", force=0) 2025-04-10T11:05:43.038347Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:163:2187], cookie=17978985087832460976) 2025-04-10T11:05:43.639790Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:43.639913Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:43.660899Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:43.660997Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:43.686269Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:43.686725Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=6444263580909915583, session=0, seqNo=0) 2025-04-10T11:05:43.686900Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:43.700493Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=6444263580909915583, session=1) 2025-04-10T11:05:43.700835Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=4334039970044268547, session=0, seqNo=0) 2025-04-10T11:05:43.700977Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:43.712724Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=4334039970044268547, session=2) 2025-04-10T11:05:43.713066Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=2 from sender=[4:130:2156], cookie=13119345403804331616 2025-04-10T11:05:43.713662Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:142:2166], cookie=2400457553580512556, name="Sem1", limit=3) 2025-04-10T11:05:43.713854Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-10T11:05:43.725593Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:142:2166], cookie=2400457553580512556) 2025-04-10T11:05:43.725885Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=112, name="Sem1") 2025-04-10T11:05:43.725964Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=112) 2025-04-10T11:05:43.726166Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=113, name="Sem1") 2025-04-10T11:05:43.726262Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=113) 2025-04-10T11:05:43.726453Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=52931544265996198, session=2, seqNo=0) 2025-04-10T11:05:43.739497Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=52931544265996198, session=2) 2025-04-10T11:05:43.739873Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=114, name="Sem1") 2025-04-10T11:05:43.739980Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=114) 2025-04-10T11:05:43.740237Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=115, name="Sem1") 2025-04-10T11:05:43.740308Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=115) 2025-04-10T11:05:43.740814Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:149:2173], cookie=11522823423035018750, name="Sem1") 2025-04-10T11:05:43.753525Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:149:2173], cookie=11522823423035018750) 2025-04-10T11:05:43.753981Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=116, session=1, semaphore="Sem1" count=1) 2025-04-10T11:05:43.754172Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-10T11:05:43.769968Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=116) 2025-04-10T11:05:43.770414Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=117, session=2, semaphore="Sem1" count=2) 2025-04-10T11:05:43.770618Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-10T11:05:43.784558Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxS ... .788145Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-04-10T11:05:44.802247Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:200:2218], cookie=9922510909504062236) 2025-04-10T11:05:44.802725Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=126, session=1, semaphore="Sem2" count=3) 2025-04-10T11:05:44.802870Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Sem2" queue: next order #5 session 1 2025-04-10T11:05:44.816571Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=126) 2025-04-10T11:05:44.817024Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=127, name="Sem2") 2025-04-10T11:05:44.817128Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=127) 2025-04-10T11:05:44.817476Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=128, session=1, semaphore="Sem2" count=3) 2025-04-10T11:05:44.830494Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=128) 2025-04-10T11:05:45.194631Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:45.215636Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:45.226828Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=129, session=1, semaphore="Sem2" count=2) 2025-04-10T11:05:45.244770Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=129) 2025-04-10T11:05:45.245410Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=130, name="Sem2") 2025-04-10T11:05:45.245551Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=130) 2025-04-10T11:05:45.246058Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=131, session=1, semaphore="Sem2" count=1) 2025-04-10T11:05:45.260950Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=131) 2025-04-10T11:05:45.261583Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=132, name="Sem2") 2025-04-10T11:05:45.261702Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=132) 2025-04-10T11:05:45.262012Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=133, name="Sem2") 2025-04-10T11:05:45.262093Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=133) 2025-04-10T11:05:45.881757Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:45.881898Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:45.910025Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:45.910244Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:45.942782Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:45.954478Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=3731696027791302139, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2025-04-10T11:05:45.954863Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root1" 2025-04-10T11:05:45.971065Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=3731696027791302139) 2025-04-10T11:05:45.971876Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=15473327994446633227, path="/Root1/Res", config={ }) 2025-04-10T11:05:45.972187Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2025-04-10T11:05:45.984800Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=15473327994446633227) 2025-04-10T11:05:45.985623Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2170], cookie=17899761590025122563, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2025-04-10T11:05:45.985847Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root2" 2025-04-10T11:05:45.998564Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2170], cookie=17899761590025122563) 2025-04-10T11:05:45.999300Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:151:2175], cookie=9115351211193914199, path="/Root2/Res", config={ }) 2025-04-10T11:05:45.999598Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2025-04-10T11:05:46.015000Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:151:2175], cookie=9115351211193914199) 2025-04-10T11:05:46.015862Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:156:2180], cookie=3685641030641146666, path="/Root2/Res/Subres", config={ }) 2025-04-10T11:05:46.020350Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2025-04-10T11:05:46.034659Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:156:2180], cookie=3685641030641146666) 2025-04-10T11:05:46.036607Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:161:2185]. Cookie: 2912518050563172981. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:46.036719Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:161:2185], cookie=2912518050563172981) 2025-04-10T11:05:46.081130Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-10T11:05:46.129163Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-10T11:05:46.161266Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-10T11:05:46.162231Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:168:2189]. Cookie: 16022304488484277804. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-04-10T11:05:46.163294Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:171:2192]. Cookie: 2356824110141440914. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:46.163362Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:171:2192], cookie=2356824110141440914) 2025-04-10T11:05:46.195860Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:171:2192]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-10T11:05:46.238265Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:171:2192]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-10T11:05:46.239225Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:176:2196]. Cookie: 8589844416212390645. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-04-10T11:05:46.240230Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:161:2185]. Cookie: 8716289208179017285. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:46.240307Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:161:2185], cookie=8716289208179017285) 2025-04-10T11:05:46.241160Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:171:2192]. Cookie: 6699839691631834730. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:46.241224Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:171:2192], cookie=6699839691631834730) 2025-04-10T11:05:46.283119Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2025-04-10T11:05:46.283275Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:171:2192]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2025-04-10T11:05:46.284278Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:183:2203]. Cookie: 13109178045747508435. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } >> KqpNewEngine::PureExpr >> KqpNewEngine::ContainerRegistryCombiner >> KqpRanges::UpdateMulti >> KqpNewEngine::KeyColumnOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] Test command err: 2025-04-10T11:02:50.855740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:02:50.856346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:02:50.856451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001137/r3tmp/tmpKCV16x/pdisk_1.dat 2025-04-10T11:02:51.609320Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1889, node 1 2025-04-10T11:02:52.044613Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:52.044690Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:52.044726Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:52.045273Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:02:52.048843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:02:52.156001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:52.156179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:52.172082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27942 2025-04-10T11:02:52.946373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:02:57.535985Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:02:57.577669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:57.577796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:57.619892Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:02:57.627254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:57.978700Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:57.979297Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:57.982998Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:57.983196Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:57.983329Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:57.983639Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:57.983723Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:57.983800Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:57.983878Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:02:58.174754Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:58.174886Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:58.191927Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:02:58.389548Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:58.500274Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:02:58.500398Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:02:58.577027Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:02:58.589354Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:02:58.589711Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:02:58.589787Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:02:58.589859Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:02:58.589937Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:02:58.589996Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:02:58.590081Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:02:58.590779Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:02:58.625700Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:02:58.625845Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:02:58.648087Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:02:58.653074Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:02:58.653453Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:02:58.664715Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:02:58.684655Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:02:58.684720Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:02:58.684797Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:02:58.711975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:02:58.720202Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:02:58.720386Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:02:58.968735Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:02:59.167480Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:02:59.274853Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:03:00.487348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:00.487744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:00.514353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:03:01.008005Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:03:01.008282Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:03:01.008592Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:03:01.008733Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:03:01.008853Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:03:01.008996Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:03:01.009210Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:03:01.009348Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:03:01.009613Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:03:01.009743Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:03:01.009888Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:03:01.010041Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:03:01.045582Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:03:01.045672Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=T ... server id = [2:8562:6483], tablet id = 72075186224037901, status = OK 2025-04-10T11:05:40.505870Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8557:6478], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:05:40.506877Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8558:6479], server id = [2:8563:6484], tablet id = 72075186224037902, status = OK 2025-04-10T11:05:40.506938Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8558:6479], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:05:40.508597Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8559:6480], server id = [2:8564:6485], tablet id = 72075186224037903, status = OK 2025-04-10T11:05:40.508662Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8559:6480], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:05:40.516206Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:05:40.517026Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8555:6476], server id = [2:8560:6481], tablet id = 72075186224037899 2025-04-10T11:05:40.517084Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:05:40.518630Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8578:6496], server id = [2:8581:6497], tablet id = 72075186224037904, status = OK 2025-04-10T11:05:40.518755Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8578:6496], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:05:40.519387Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:05:40.520853Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8556:6477], server id = [2:8561:6482], tablet id = 72075186224037900 2025-04-10T11:05:40.520907Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:05:40.522072Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-10T11:05:40.522840Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8557:6478], server id = [2:8562:6483], tablet id = 72075186224037901 2025-04-10T11:05:40.522880Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:05:40.523754Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-10T11:05:40.524467Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8582:6498], server id = [2:8585:6501], tablet id = 72075186224037905, status = OK 2025-04-10T11:05:40.524592Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8582:6498], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:05:40.525059Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8558:6479], server id = [2:8563:6484], tablet id = 72075186224037902 2025-04-10T11:05:40.525109Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:05:40.526215Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-10T11:05:40.527403Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8584:6500], server id = [2:8588:6504], tablet id = 72075186224037906, status = OK 2025-04-10T11:05:40.527522Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8584:6500], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:05:40.528253Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8559:6480], server id = [2:8564:6485], tablet id = 72075186224037903 2025-04-10T11:05:40.528295Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:05:40.529480Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8587:6503], server id = [2:8593:6508], tablet id = 72075186224037907, status = OK 2025-04-10T11:05:40.529555Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8587:6503], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:05:40.530061Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8590:6506], server id = [2:8594:6509], tablet id = 72075186224037908, status = OK 2025-04-10T11:05:40.530117Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8590:6506], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:05:40.533145Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-10T11:05:40.534935Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8578:6496], server id = [2:8581:6497], tablet id = 72075186224037904 2025-04-10T11:05:40.534969Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:05:40.535345Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:05:40.535979Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8582:6498], server id = [2:8585:6501], tablet id = 72075186224037905 2025-04-10T11:05:40.536008Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:05:40.536725Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-10T11:05:40.537157Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8584:6500], server id = [2:8588:6504], tablet id = 72075186224037906 2025-04-10T11:05:40.537180Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:05:40.538043Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-10T11:05:40.538241Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8587:6503], server id = [2:8593:6508], tablet id = 72075186224037907 2025-04-10T11:05:40.538267Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:05:40.538844Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-10T11:05:40.538898Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:05:40.539135Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:05:40.539543Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8590:6506], server id = [2:8594:6509], tablet id = 72075186224037908 2025-04-10T11:05:40.539566Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:05:40.554197Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:05:40.554531Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-10T11:05:41.100973Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 3 2025-04-10T11:05:41.101096Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:05:44.257748Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:05:44.257994Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:05:44.490205Z node 2 :STATISTICS INFO: Node 3 is unavailable 2025-04-10T11:05:44.490310Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:05:44.490526Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-10T11:05:44.490571Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:05:44.490691Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:05:44.490795Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:05:44.491373Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:05:44.508284Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:05:44.508493Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-10T11:05:44.509165Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8719:6573], server id = [2:8720:6574], tablet id = 72075186224037900, status = OK 2025-04-10T11:05:44.509264Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8719:6573], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:05:44.510523Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:05:44.510625Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:05:44.510797Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:05:44.510974Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:05:44.511381Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:05:44.513690Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8719:6573], server id = [2:8720:6574], tablet id = 72075186224037900 2025-04-10T11:05:44.513726Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:05:44.514974Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:05:44.567086Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8738:6592]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:05:44.567414Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:05:44.567475Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8738:6592], StatRequests.size() = 1 2025-04-10T11:05:44.714029Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:06.000000Z, event interval end# 2025-04-10T11:05:42.000000Z 2025-04-10T11:05:44.714797Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTBlOTQyYmEtYzBlYjM5NTEtYTZjNTlmOGYtYzkzOWQwNTQ=, TxId: 2025-04-10T11:05:44.714845Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTBlOTQyYmEtYzBlYjM5NTEtYTZjNTlmOGYtYzkzOWQwNTQ=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-10T11:05:44.715485Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8752:6598]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:05:44.715698Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:05:44.716202Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:05:44.716260Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:05:44.719464Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:05:44.719542Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:05:44.719593Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:05:44.759448Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 probe = 3 |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/common/ydb-tests-fq-common >> KqpSqlIn::KeySuffix >> KqpNewEngine::DuplicatedResults >> KqpSqlIn::TableSource |77.2%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |77.2%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> KqpNotNullColumns::UpsertNotNullPk >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountLabels >> TSequence::CopyTableWithSequence |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks >> BuildStatsHistogram::Many_Mixed [GOOD] >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> BSCReadOnlyPDisk::ReadOnlySlay |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TSequence::CopyTableWithSequence [GOOD] |77.2%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |77.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> TSequence::AlterSequence >> BuildStatsHistogram::Many_Serial >> TKesusTest::TestQuoterAccountLabels [GOOD] >> KqpKv::ReadRows_UnknownTable [GOOD] >> KqpNotNullColumns::ReplaceNotNullPk [GOOD] >> KqpMergeCn::TopSortByDesc_Double_Limit3 >> KqpNewEngine::ContainerRegistryCombiner [GOOD] >> KqpNewEngine::LocksSingleShard [GOOD] >> KqpNewEngine::Update+UseSink [GOOD] >> KqpNotNullColumns::UpsertNotNullPk [GOOD] >> KqpNewEngine::PureExpr [GOOD] >> KqpNewEngine::KeyColumnOrder [GOOD] >> KqpNewEngine::DuplicatedResults [GOOD] >> KqpNotNullColumns::ReplaceNotNullPkPg >> TKesusTest::TestQuoterAccountResourcesForgetClient >> KqpRanges::UpdateMulti [GOOD] >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence >> KqpRanges::IsNotNullInJsonValue [GOOD] >> KqpNewEngine::BrokenLocksOnUpdate >> KqpNewEngine::LocksMultiShard >> KqpNewEngine::Update-UseSink >> TKesusTest::TestPassesUpdatedPropsToSession >> KqpNotNullColumns::UpsertNotNullPkPg >> KqpNewEngine::PureTxMixedWithDeferred >> KqpNewEngine::EmptyMapWithBroadcast >> KqpNewEngine::KeyColumnOrder2 >> KqpRanges::UpdateWhereInBigLiteralList >> KqpRanges::DuplicateKeyPredicateLiteral >> TSequence::AlterTableSetDefaultFromSequence [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2025-04-10T11:05:41.288662Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.301088Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.418821Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.418944Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.488517Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.503493Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:132:2158], cookie=10643304750314699404, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-10T11:05:41.506866Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-10T11:05:41.521977Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:132:2158], cookie=10643304750314699404) 2025-04-10T11:05:41.522673Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:142:2166], cookie=5057602728810926046, path="/Root/Res", config={ }) 2025-04-10T11:05:41.522953Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-10T11:05:41.535202Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:142:2166], cookie=5057602728810926046) 2025-04-10T11:05:41.544500Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:147:2171]. Cookie: 5560763872156463972. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:41.544588Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[1:147:2171], cookie=5560763872156463972) 2025-04-10T11:05:41.548615Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [1:147:2171]. Cookie: 15013067314502104250. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2025-04-10T11:05:41.548703Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[1:147:2171], cookie=15013067314502104250) 2025-04-10T11:05:43.915187Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:43.915299Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:43.941838Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:43.942054Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:43.967985Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:43.968511Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:130:2156], cookie=15875835714838780186, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-10T11:05:43.968843Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-10T11:05:43.982178Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:130:2156], cookie=15875835714838780186) 2025-04-10T11:05:43.983196Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:140:2164]. Cookie: 6162658649373584651. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:43.983259Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:140:2164], cookie=6162658649373584651) 2025-04-10T11:05:43.983903Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:140:2164]. Cookie: 14649611269182083492. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:43.983960Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:140:2164], cookie=14649611269182083492) 2025-04-10T11:05:43.984448Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:140:2164]. Cookie: 4968142932853109498. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-04-10T11:05:43.984511Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:140:2164], cookie=4968142932853109498) 2025-04-10T11:05:43.984933Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:140:2164]. Cookie: 3579205486159795523. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-04-10T11:05:43.984985Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:140:2164], cookie=3579205486159795523) 2025-04-10T11:05:46.440503Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:46.440632Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:46.462303Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:46.462478Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:46.487803Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:46.488298Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:132:2158], cookie=9182560568740756908, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-10T11:05:46.488705Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-10T11:05:46.502387Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:132:2158], cookie=9182560568740756908) 2025-04-10T11:05:46.503077Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:142:2166], cookie=2079388746277202349, path="/Root/Res1", config={ }) 2025-04-10T11:05:46.503342Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2025-04-10T11:05:46.515557Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:142:2166], cookie=2079388746277202349) 2025-04-10T11:05:46.516189Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:147:2171], cookie=12485622304073066225, path="/Root/Res2", config={ }) 2025-04-10T11:05:46.516422Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2025-04-10T11:05:46.533756Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:147:2171], cookie=12485622304073066225) 2025-04-10T11:05:46.538452Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:152:2176]. Cookie: 1409116676645100109. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:46.538553Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:152:2176], cookie=1409116676645100109) 2025-04-10T11:05:46.539397Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:152:2176]. Cookie: 2378718851152783873. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:46.539456Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:152:2176], cookie=2378718851152783873) 2025-04-10T11:05:46.540053Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:152:2176]. Cookie: 8468271108898421424. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1020500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1020500 } } 2025-04-10T11:05:46.540114Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:152:2176], cookie=8468271108898421424) 2025-04-10T11:05:48.893031Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:48.893161Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:48.916793Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:48.916968Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:48.947183Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:48.947739Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=17779987581481414586, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-04-10T11:05:48.948160Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-10T11:05:48.961741Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=17779987581481414586) 2025-04-10T11:05:48.963195Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:140:2164]. Cookie: 18156839481321145682. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { Enabled: true BillingPeriodSec: 2 Labels { key: "k1" value: "v1" } Labels { key: "k2" value: "v2" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:48.963284Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:140:2164], cookie=18156839481321145682) 2025-04-10T11:05:48.963907Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:140:2164]. Cookie: 8773789868033894945. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 26500 } } 2025-04-10T11:05:48.963968Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:140:2164], cookie=8773789868033894945) 2025-04-10T11:05:51.249841Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:51.249970Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:51.271749Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:51.271889Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:51.296440Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:51.296922Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=17456317753433971874, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-04-10T11:05:51.297210Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-10T11:05:51.309817Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=17456317753433971874) 2025-04-10T11:05:51.310572Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=6649795534395603148, path="/Root/Res", config={ }) 2025-04-10T11:05:51.310837Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-10T11:05:51.323374Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=6649795534395603148) 2025-04-10T11:05:51.324394Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 6021643980164102219. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:51.324463Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=6021643980164102219) 2025-04-10T11:05:51.325040Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:150:2174], cookie=11568318204125074254, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2025-04-10T11:05:51.325225Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Updated quoter resource 1 "Root" 2025-04-10T11:05:51.325444Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2025-04-10T11:05:51.337969Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:150:2174], cookie=11568318204125074254) 2025-04-10T11:05:51.338737Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:146:2170]. Cookie: 7909106629776460915. Data: { } 2025-04-10T11:05:51.338799Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:146:2170], cookie=7909106629776460915) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [GOOD] Test command err: 2025-04-10T11:05:31.919870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:05:31.920326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:05:31.920628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00157b/r3tmp/tmp2bJmuK/pdisk_1.dat 2025-04-10T11:05:33.583440Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.202052s 2025-04-10T11:05:33.583582Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.202231s 2025-04-10T11:05:33.657435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:05:33.805966Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:33.948359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:33.948583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:34.017551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:34.303376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:05:34.490343Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:673:2574]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:05:34.492246Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:673:2574]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:05:34.501041Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:673:2574] 2025-04-10T11:05:34.501471Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:05:34.602362Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:673:2574]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:05:34.602485Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:05:34.604016Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:05:34.604468Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:674:2575] 2025-04-10T11:05:34.604802Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:05:34.615007Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:05:34.615568Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:05:34.615725Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:05:34.660661Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:05:34.662353Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:05:34.662505Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:05:34.673292Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:05:34.673645Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:05:34.673767Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:705:2574] in generation 1 2025-04-10T11:05:34.674429Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:05:34.674547Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:05:34.676533Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T11:05:34.676602Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T11:05:34.676644Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T11:05:34.676986Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:05:34.677142Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:05:34.677217Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:706:2575] in generation 1 2025-04-10T11:05:34.688313Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:05:34.720344Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:05:34.745486Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:05:34.745736Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:709:2594] 2025-04-10T11:05:34.745791Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:05:34.745859Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:05:34.745903Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:05:34.746318Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:673:2574], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:05:34.746399Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:05:34.746525Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:05:34.746568Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T11:05:34.746649Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:05:34.746721Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:710:2595] 2025-04-10T11:05:34.746748Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T11:05:34.746776Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T11:05:34.746820Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:05:34.747323Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:674:2575], Recipient [1:674:2575]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:05:34.747361Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:05:34.747531Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:05:34.747651Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:05:34.747821Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:05:34.747873Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:05:34.747923Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:05:34.747976Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:05:34.748028Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:05:34.748071Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:05:34.748130Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:05:34.748188Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T11:05:34.748253Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T11:05:34.748790Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:05:34.748830Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:05:34.748860Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037889 2025-04-10T11:05:34.748902Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-10T11:05:34.748932Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-10T11:05:34.748957Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-10T11:05:34.749000Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:05:34.749128Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:689:2583], Recipient [1:673:2574]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:05:34.749226Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:05:34.749281Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:664:2569], serverId# [1:689:2583], sessionId# [0:0:0] 2025-04-10T11:05:34.749410Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:689:2583] 2025-04-10T11:05:34.749453Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:05:34.749594Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:05:34.749808Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:05:34.749867Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:05:34.749971Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:05:34.750063Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:05:34.750112Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:05:34.750185Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:05:34.750233Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:05:34.758049Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:05:34.758172Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:05:34.758249Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:05:34.758298Z node 1 :TX_DATASHARD TRA ... xecution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:05:46.527442Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:05:46.527470Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-04-10T11:05:46.527497Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:05:46.527527Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T11:05:46.527574Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:05:46.527617Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:05:46.527793Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:61:2108], Recipient [2:984:2785]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2025-04-10T11:05:46.543495Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfp161v29x8ms97ckxvefn7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2MxYTAyMzEtMWY5MDJjYmUtZDM3ZTEzYWUtMWE0MjRjNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:05:46.547014Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1012:2800], Recipient [2:984:2785]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-10T11:05:46.547209Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:05:46.547305Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-10T11:05:46.547421Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T11:05:46.547466Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:05:46.547508Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:05:46.547549Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:05:46.547604Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-10T11:05:46.547670Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T11:05:46.547700Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:05:46.547726Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:05:46.547753Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:05:46.547897Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-10T11:05:46.548204Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-04-10T11:05:46.548283Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is DelayComplete 2025-04-10T11:05:46.548334Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:05:46.548395Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:05:46.548443Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:05:46.548508Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T11:05:46.548559Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:05:46.548603Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-10T11:05:46.548650Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:05:46.559745Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:05:46.559840Z node 2 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715661] at 72075186224037888 on unit CompleteWrite 2025-04-10T11:05:46.559931Z node 2 :TX_DATASHARD DEBUG: Complete write [2500 : 281474976715661] from 72075186224037888 at tablet 72075186224037888 send result to client [2:941:2726] 2025-04-10T11:05:46.560043Z node 2 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-10T11:05:46.560093Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:05:46.560205Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:05:46.560270Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:05:46.560352Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-04-10T11:05:46.560474Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:05:46.560604Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:05:46.560655Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:05:46.560715Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1012:2800], 0} after executionsCount# 1 2025-04-10T11:05:46.560776Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1012:2800], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:05:46.560897Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1012:2800], 0} finished in read 2025-04-10T11:05:46.561198Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:984:2785], Recipient [2:673:2575]: {TEvReadSet step# 2500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-10T11:05:46.561253Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:05:46.561310Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2025-04-10T11:05:46.563260Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1012:2800], Recipient [2:984:2785]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:05:46.563352Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-10T11:05:46.563643Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1012:2800], Recipient [2:673:2575]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-04-10T11:05:46.563836Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-10T11:05:46.563927Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CheckRead 2025-04-10T11:05:46.564010Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-10T11:05:46.564046Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckRead 2025-04-10T11:05:46.564080Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-10T11:05:46.564115Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-10T11:05:46.564178Z node 2 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037889 2025-04-10T11:05:46.564220Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-10T11:05:46.564248Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-10T11:05:46.564276Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit ExecuteRead 2025-04-10T11:05:46.564302Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit ExecuteRead 2025-04-10T11:05:46.564443Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-04-10T11:05:46.564694Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-04-10T11:05:46.564757Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[2:1012:2800], 1} after executionsCount# 1 2025-04-10T11:05:46.564822Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1012:2800], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:05:46.564914Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1012:2800], 1} finished in read 2025-04-10T11:05:46.564994Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-10T11:05:46.565037Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-04-10T11:05:46.565068Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-04-10T11:05:46.565099Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-04-10T11:05:46.565149Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-10T11:05:46.565173Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-04-10T11:05:46.565200Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2025-04-10T11:05:46.565233Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-10T11:05:46.565361Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-10T11:05:46.566232Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1012:2800], Recipient [2:673:2575]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-04-10T11:05:46.566291Z node 2 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } >> KqpNotNullColumns::ReplaceNotNullPkPg [GOOD] |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts >> TPersQueueTest::DisableWrongSettings [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/ydb-tests-sql |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests >> TPersQueueTest::DisableDeduplication >> KqpNewEngine::InShardsWrite >> KqpNotNullColumns::SelectNotNullColumns >> KqpMergeCn::TopSortBy_Utf8_Limit2 >> KqpMergeCn::TopSortByDesc_Double_Limit3 [GOOD] >> KqpSqlIn::TableSource [GOOD] >> KqpSqlIn::SimpleKey_Negated >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut >> KqpSqlIn::KeySuffix [GOOD] >> KqpNotNullColumns::UpsertNotNullPkPg [GOOD] >> KqpNewEngine::BrokenLocksOnUpdate [GOOD] >> KqpNewEngine::DeferredEffects >> KqpNewEngine::PureTxMixedWithDeferred [GOOD] >> KqpNewEngine::LocksMultiShard [GOOD] >> KqpNotNullColumns::UpsertNotNullPg >> KqpNewEngine::ReadAfterWrite >> KqpSqlIn::KeySuffix_OnlyTail >> KqpNewEngine::LocksMultiShardOk |77.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |77.3%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |77.3%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |77.3%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |77.3%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |77.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |77.3%| [TA] $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNewEngine::Update-UseSink [GOOD] >> KqpNewEngine::UpdateFromParams >> KqpNewEngine::KeyColumnOrder2 [GOOD] >> KqpNewEngine::LocksEffects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2025-04-10T11:05:41.288171Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.300428Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.436379Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.436533Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.489942Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.504501Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:132:2158], cookie=6377043283268060535, path="/Res", config={ MaxUnitsPerSecond: -100 }) 2025-04-10T11:05:41.504729Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:132:2158], cookie=6377043283268060535) 2025-04-10T11:05:41.505346Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:139:2163], cookie=1772374615441998092, path="/ResWithoutMaxUnitsPerSecond", config={ }) 2025-04-10T11:05:41.505481Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:139:2163], cookie=1772374615441998092) 2025-04-10T11:05:41.506006Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:142:2166], cookie=16549300732662838953, path="/ResWithMaxUnitsPerSecond", config={ MaxUnitsPerSecond: 1 }) 2025-04-10T11:05:41.506579Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "ResWithMaxUnitsPerSecond" 2025-04-10T11:05:41.521050Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:142:2166], cookie=16549300732662838953) 2025-04-10T11:05:41.521897Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:147:2171], cookie=1597839658630495845, path="/ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond", config={ }) 2025-04-10T11:05:41.522176Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond" 2025-04-10T11:05:41.537101Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:147:2171], cookie=1597839658630495845) 2025-04-10T11:05:42.021893Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.022024Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.050677Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.050836Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.099212Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.099821Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:130:2156], cookie=6187435810062188728, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-04-10T11:05:42.100397Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-10T11:05:42.120101Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:130:2156], cookie=6187435810062188728) 2025-04-10T11:05:42.120865Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:140:2164], cookie=5495237327457625183, path="/Root/Res", config={ }) 2025-04-10T11:05:42.121158Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-10T11:05:42.140192Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:140:2164], cookie=5495237327457625183) 2025-04-10T11:05:42.154027Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:145:2169]. Cookie: 12156672417460749683. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:42.158250Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:145:2169], cookie=12156672417460749683) 2025-04-10T11:05:42.159205Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:145:2169]. Cookie: 656079500334256173. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 28000 } } 2025-04-10T11:05:42.159279Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:145:2169], cookie=656079500334256173) 2025-04-10T11:05:44.600531Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:44.600692Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:44.626584Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:44.626762Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:44.664318Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:44.664980Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:132:2158], cookie=12037750768892258111, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-10T11:05:44.665505Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-10T11:05:44.679463Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:132:2158], cookie=12037750768892258111) 2025-04-10T11:05:44.680261Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:142:2166], cookie=4131611657293294189, path="/Root/Res", config={ }) 2025-04-10T11:05:44.680592Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-10T11:05:44.695065Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:142:2166], cookie=4131611657293294189) 2025-04-10T11:05:44.696239Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:147:2171]. Cookie: 12377669843817021844. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:44.696331Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:147:2171], cookie=12377669843817021844) 2025-04-10T11:05:44.697017Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:147:2171]. Cookie: 1503449326982404720. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019000 } } 2025-04-10T11:05:44.697080Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:147:2171], cookie=1503449326982404720) 2025-04-10T11:05:47.128678Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:47.128795Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:47.149499Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:47.149656Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:47.184689Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:47.185190Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=15499958334879870597, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-10T11:05:47.185576Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-10T11:05:47.198656Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=15499958334879870597) 2025-04-10T11:05:47.199805Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:140:2164]. Cookie: 3743017596662539937. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:47.199895Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:140:2164], cookie=3743017596662539937) 2025-04-10T11:05:47.200485Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:140:2164]. Cookie: 13250777600334821640. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-04-10T11:05:47.200533Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:140:2164], cookie=13250777600334821640) 2025-04-10T11:05:47.200898Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:140:2164]. Cookie: 4326071762430943597. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-04-10T11:05:47.200937Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:140:2164], cookie=4326071762430943597) 2025-04-10T11:05:49.631558Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:49.631730Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:49.658599Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:49.658744Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:49.684054Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:49.684744Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=3725966380280548547, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-10T11:05:49.685152Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-10T11:05:49.702803Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=3725966380280548547) 2025-04-10T11:05:49.704022Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:142:2166]. Cookie: 14887243875797560689. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:49.704101Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:142:2166], cookie=14887243875797560689) 2025-04-10T11:05:49.704912Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:142:2166]. Cookie: 1362506359875265681. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2025-04-10T11:05:49.704973Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:142:2166], cookie=1362506359875265681) 2025-04-10T11:05:52.136675Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:173:2190]. Cookie: 5051181313636477971. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:52.136754Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:173:2190], cookie=5051181313636477971) 2025-04-10T11:05:52.137203Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:173:2190]. Cookie: 7156467762105801154. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2025-04-10T11:05:52.137238Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:173:2190], cookie=7156467762105801154) 2025-04-10T11:05:54.189638Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:200:2216]. Cookie: 1708887133677389158. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-10T11:05:54.189721Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:200:2216], cookie=1708887133677389158) 2025-04-10T11:05:54.190249Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:200:2216]. Cookie: 3641559291166775185. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2025-04-10T11:05:54.190294Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:200:2216], cookie=3641559291166775185) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:05:44.415145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:05:44.415292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:44.415421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:05:44.415476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:05:44.415535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:05:44.415573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:05:44.415648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:44.415737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:05:44.416130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:05:44.516585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:05:44.516669Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:44.543082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:05:44.543456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:05:44.543656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:05:44.553158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:05:44.553441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:05:44.554287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:44.554837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:05:44.557356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:44.558998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:44.559067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:44.559322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:05:44.559379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:44.559425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:05:44.559611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:05:44.567893Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:05:44.736707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:44.736999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:44.737270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:05:44.737566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:05:44.737628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:44.740616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:44.740798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:05:44.741053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:44.741133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:05:44.741173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:05:44.741206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:05:44.743900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:44.743974Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:05:44.744014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:05:44.747086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:44.747144Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:44.747251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:44.747308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:05:44.751438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:05:44.755520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:05:44.755799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:05:44.757018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:44.757274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:44.757353Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:44.757708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:05:44.757811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:44.758002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:44.758110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:05:44.761885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:44.761951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:44.762237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:44.762287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:05:44.762595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:44.762645Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:05:44.762753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:44.762808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:44.762867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:44.762923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:44.762972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:05:44.763017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:44.763063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:05:44.763095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:05:44.763188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:05:44.763226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:05:44.763259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:05:44.766173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:44.766330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:05:44.766369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-04-10T11:05:54.488854Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 0/1, is published: true 2025-04-10T11:05:54.488901Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:05:54.489641Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269550080, Sender [7:986:2932], Recipient [7:123:2149]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1395 } } 2025-04-10T11:05:54.489678Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransactionResult 2025-04-10T11:05:54.489758Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1395 } } 2025-04-10T11:05:54.489796Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-04-10T11:05:54.489943Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1395 } } 2025-04-10T11:05:54.490046Z node 7 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1395 } } 2025-04-10T11:05:54.490086Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:05:54.491235Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1050:2987], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:05:54.491279Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:05:54.491324Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T11:05:54.491563Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [7:986:2932], Recipient [7:123:2149]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 986 RawX2: 30064774004 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-10T11:05:54.491625Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-10T11:05:54.491785Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 986 RawX2: 30064774004 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-10T11:05:54.491841Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-04-10T11:05:54.492011Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 986 RawX2: 30064774004 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-10T11:05:54.492081Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:05:54.492219Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 986 RawX2: 30064774004 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-10T11:05:54.492318Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:54.492385Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-10T11:05:54.492439Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-10T11:05:54.492487Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 114:0 129 -> 240 2025-04-10T11:05:54.492697Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:05:54.495628Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:05:54.495831Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-04-10T11:05:54.495914Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:05:54.498142Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-04-10T11:05:54.498200Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:05:54.498321Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-10T11:05:54.498350Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:05:54.498531Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-10T11:05:54.498579Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:05:54.498634Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 114:0 2025-04-10T11:05:54.498779Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:986:2932] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2025-04-10T11:05:54.499209Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T11:05:54.499261Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T11:05:54.499368Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-10T11:05:54.499443Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 114:0 ProgressState 2025-04-10T11:05:54.499588Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:05:54.499622Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2025-04-10T11:05:54.499668Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-04-10T11:05:54.499735Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2025-04-10T11:05:54.499779Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-04-10T11:05:54.499830Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2025-04-10T11:05:54.499912Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:389:2357] message: TxId: 114 2025-04-10T11:05:54.499993Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-04-10T11:05:54.500067Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 114:0 2025-04-10T11:05:54.500143Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 114:0 2025-04-10T11:05:54.500304Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-10T11:05:54.503555Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:05:54.503680Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:389:2357] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2025-04-10T11:05:54.503901Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-04-10T11:05:54.503962Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1016:2954] 2025-04-10T11:05:54.504206Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1018:2956], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:05:54.504270Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:05:54.504316Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2025-04-10T11:05:54.505724Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:1059:2996], Recipient [7:123:2149]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2025-04-10T11:05:54.505795Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T11:05:54.507974Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:54.508252Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2025-04-10T11:05:54.508776Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2025-04-10T11:05:54.509071Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:05:54.513140Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:54.513408Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2025-04-10T11:05:54.513501Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 |77.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink 2025-04-10 11:05:44,255 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:05:44,517 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1113283 46.0M 45.8M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/2gt4/0017b7/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/chunk12/testing_out_stuff/test_tool.args 1113827 1.9G 1.9G 1.8G └─ ydb-core-kqp-ut-query --trace-path-append /home/runner/.ya/build/build_root/2gt4/0017b7/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/chunk12/ytest.repo Test command err: Trying to start YDB, gRPC: 3867, MsgBus: 29375 2025-04-10T10:55:47.145807Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636496711413113:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:47.154970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017b7/r3tmp/tmp7M2Kqn/pdisk_1.dat 2025-04-10T10:55:48.050981Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:55:48.076484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:55:48.076588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:55:48.095979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3867, node 1 2025-04-10T10:55:48.510929Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:55:48.510967Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:55:48.510976Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:55:48.511112Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29375 TClient is connected to server localhost:29375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:55:49.658660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:49.736941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:50.059386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:50.486445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:50.698868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:55:52.122399Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636496711413113:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:55:52.122556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:55:52.969791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636518186252229:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:52.969920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:53.311745Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.129416s 2025-04-10T10:55:53.311795Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.129520s 2025-04-10T10:55:53.381335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.528401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.626827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.774660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:55:53.882764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:55:54.004662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:55:54.133138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636526776187737:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:54.133232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:54.133620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636526776187742:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:55:54.137978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:55:54.177558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636526776187744:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:55:54.246522Z node 1 :TX_PROXY ERROR: Actor# [1:7491636526776187817:4782] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:55:55.654086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:03.050304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T10:56:03.050354Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T10:56:10.705428Z node 1 :TX_DATASHARD ERROR: CPU usage 70.6365 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037919 table: [/Root/LargeTable] 2025-04-10T10:57:11.208028Z node 1 :TX_DATASHARD ERROR: CPU usage 104.448 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037919 table: [/Root/LargeTable] 2025-04-10T10:57:48.529268Z node 1 :OPS_COMPACT ERROR: Compact{72075186224037919.1.307, eph 77} end=2, 17 blobs 0r (max 150), put Spent{time=3.035s,wait=0.445s,interrupts=5} 2025-04-10T10:57:48.536566Z node 1 :TABLET_EXECUTOR ERROR: Leader{72075186224037919:1:330} Compact 193 on TGenCompactionParams{1001: gen 1 epoch 0, 5 parts} step 307, product {0 parts epoch 0} thrown 2025-04-10T10:57:59.235639Z node 1 :OPS_COMPACT ERROR: Compact{72075186224037921.1.33, eph 79} end=2, 0 blobs 0r (max 1860), put Spent{time=2.051s,wait=0.000s,interrupts=0} 2025-04-10T10:57:59.236069Z node 1 :TABLET_EXECUTOR ERROR: Leader{72075186224037921:1:42} Compact 17 on TGenCompactionParams{1001: gen 255 epoch 0, 12 parts} step 33, product {0 parts epoch 0} thrown 2025-04-10T10:58:16.990490Z node 1 :TX_DATASHARD ERROR: CPU usage 98.4933 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037921 table: [/Root/LargeTable] 2025-04-10T10:58:47.254980Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxReassignGroups(72075186224037920)::Execute - tablet is not ready for group reassignment 2025-04-10T10:58:47.255200Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923013665696}: tablet 72075186224037920 could not find a group for channel 0 pool /Root:test 2025-04-10T10:58:47.255236Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923013665696}: tablet 72075186224037920 wasn't changed 2025-04-10T10:58:47.255261Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923013665696}: tablet 72075186224037920 skipped channel 0 2025-04-10T10:58:47.658347Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923015442688}: tablet 72075186224037921 could not find a group for channel 0 pool /Root:test 2025-04-10T10:58:47.658398Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923015442688}: tablet 72075186224037921 could not find a group for channel 1 pool /Root:test 2025-04-10T10:58:47.658418Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923015442688}: tablet 72075186224037921 wasn't changed 2025-04-10T10:58:47.658434Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923015442688}: tablet 72075186224037921 skipped channel 0 2025-04-10T10:58:47.658493Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923015442688}: tablet 72075186224037921 skipped channel 1 2025-04-10T10:58:48.597856Z node 1 :BS_SKELETON WARN: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-04-10T10:58:49.146665Z node 1 :BS_SKELETON WARN: PDiskId# 1 VDISK[82000000:_:0:0:0]: (2181038080) TDskSpaceTrackerActor: YELL ... ATASHARD ERROR: Prepare transaction failed. txid 24 at tablet 72075186224037890 errors: Status: STATUS_OUT_OF_SPACE Issues: { message: "Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 24" issue_code: 2006 severity: 1 } 2025-04-10T11:03:00.531741Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 24 at tablet 72075186224037890 Status: STATUS_OUT_OF_SPACE Issues: { message: "Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 24" issue_code: 2006 severity: 1 } 2025-04-10T11:03:00.559255Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491638355339296776:2339], Table: `/Root/LargeTable` ([72057594046644480:2:1]), SessionActorId: [2:7491637354611902096:2339]Got OUT_OF_SPACE for table `/Root/LargeTable`. ShardID=72075186224037890, Sink=[2:7491638355339296776:2339]. Ignored this error.{
: Error: Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 24, code: 2006 } 2025-04-10T11:03:00.614341Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491638351044329418:2339], SessionActorId: [2:7491637354611902096:2339], statusCode=OVERLOADED. Issue=
: Error: Tablet 72075186224037890 is out of space. Table `/Root/LargeTable`., code: 2006
: Error: Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 24, code: 2006 . sessionActorId=[2:7491637354611902096:2339]. isRollback=0 2025-04-10T11:03:00.674331Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjRlOTUwMTUtYzQ2OGRlMTktZTYyZjViY2ItMzA1MDdhYTI=, ActorId: [2:7491637354611902096:2339], ActorState: ExecuteState, TraceId: 01jrfnw2r48xbkegrecxf7xzky, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [2:7491638351044329419:2339] from: [2:7491638351044329418:2339] 2025-04-10T11:03:00.674592Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491638351044329419:2339] TxId: 281474976710761. Ctx: { TraceId: 01jrfnw2r48xbkegrecxf7xzky, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjRlOTUwMTUtYzQ2OGRlMTktZTYyZjViY2ItMzA1MDdhYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: {
: Error: Tablet 72075186224037890 is out of space. Table `/Root/LargeTable`., code: 2006 subissue: {
: Error: Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 24, code: 2006 } } 2025-04-10T11:03:00.674960Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjRlOTUwMTUtYzQ2OGRlMTktZTYyZjViY2ItMzA1MDdhYTI=, ActorId: [2:7491637354611902096:2339], ActorState: ExecuteState, TraceId: 01jrfnw2r48xbkegrecxf7xzky, Create QueryResponse for error on request, msg: 2025-04-10T11:03:00.757568Z node 2 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923039269792}: tablet 72075186224037889 could not find a group for channel 0 pool /Root:test 2025-04-10T11:03:00.757645Z node 2 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923039269792}: tablet 72075186224037889 could not find a group for channel 1 pool /Root:test 2025-04-10T11:03:00.757684Z node 2 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923039269792}: tablet 72075186224037889 could not find a group for channel 2 pool /Root:test 2025-04-10T11:03:00.757708Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923039269792}: tablet 72075186224037889 wasn't changed 2025-04-10T11:03:00.757731Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923039269792}: tablet 72075186224037889 skipped channel 0 2025-04-10T11:03:00.757772Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923039269792}: tablet 72075186224037889 skipped channel 1 2025-04-10T11:03:00.757795Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923039269792}: tablet 72075186224037889 skipped channel 2 Got out of space. Successfully inserted 30 x 0 lines, each of size 1048576bytes2025-04-10T11:03:00.927512Z node 2 :BS_SKELETON WARN: PDiskId# 1 VDISK[82000000:_:0:0:0]: (2181038080) TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-04-10T11:03:00.927586Z node 2 :BS_SKELETON WARN: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 Trying to start YDB, gRPC: 18383, MsgBus: 23719 2025-04-10T11:03:10.443784Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491638398888080792:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:10.469063Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017b7/r3tmp/tmpZ5ljLV/pdisk_1.dat 2025-04-10T11:03:11.013903Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:11.014059Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:11.022293Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:03:11.059112Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18383, node 3 2025-04-10T11:03:11.076651Z node 3 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:03:11.095368Z node 3 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:03:11.250976Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:11.251010Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:11.251022Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:11.251256Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23719 TClient is connected to server localhost:23719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:12.408605Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:12.474282Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:03:12.511205Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:15.448432Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491638398888080792:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:15.448554Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:03:18.674376Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638433247820158:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:18.674496Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491638433247820166:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:18.674569Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:18.681359Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:03:18.727447Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491638433247820172:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:03:18.796147Z node 3 :TX_PROXY ERROR: Actor# [3:7491638433247820251:2715] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:03:26.053584Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:03:26.053608Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:28.191605Z node 3 :TX_DATASHARD ERROR: CPU usage 72.4086 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/LargeTable] 2025-04-10T11:05:28.520545Z node 3 :TX_DATASHARD ERROR: CPU usage 67.4605 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/LargeTable] Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/0017b7/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/chunk12/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/0017b7/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/chunk12/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |77.4%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |77.4%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |77.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRanges::DuplicateKeyPredicateLiteral [GOOD] >> KqpRanges::DuplicateKeyPredicateParam >> KqpNotNullColumns::SelectNotNullColumns [GOOD] >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn |77.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNewEngine::EmptyMapWithBroadcast [GOOD] >> KqpNewEngine::FlatMapLambdaInnerPrecompute >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] >> BSCReadOnlyPDisk::ReadOnlyOneByOne >> KqpAgg::AggWithLookup |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |78.2%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut >> KqpSort::ReverseFirstKeyOptimized >> KqpRanges::UpdateWhereInNoFullScan+UseSink >> DataStreams::TestGetRecordsWithCount [GOOD] >> DataStreams::TestInvalidRetentionCombinations ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] Test command err: RandomSeed# 1829446878673341787 2025-04-10T11:05:51.316964Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.163586s 2025-04-10T11:05:51.323958Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.170566s >> KqpRanges::UpdateWhereInBigLiteralList [GOOD] >> KqpRanges::UpdateWhereInBigLiteralListPrefix >> KqpNotNullColumns::UpsertNotNullPg [GOOD] >> KqpRanges::DateKeyPredicate >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] Test command err: RandomSeed# 1590620647874836437 2025-04-10T11:05:51.316968Z 1 00h00m00.002560s :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.142514s 2025-04-10T11:05:51.323935Z 1 00h00m00.002560s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.149486s 2025-04-10T11:05:54.002121Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T11:05:54.003846Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8813589087447989041] 2025-04-10T11:05:54.050724Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> KqpNewEngine::InShardsWrite [GOOD] >> KqpNewEngine::Join |78.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |78.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |78.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |78.5%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import >> TDatabaseResolverTests::Ydb_Serverless_Timeout >> KqpMergeCn::TopSortBy_Utf8_Limit2 [GOOD] >> KqpMergeCn::TopSortBy_Timestamp_Limit2 >> KqpSort::TopSortParameter >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 [GOOD] >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 |78.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/example/ydb-tests-example >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |78.5%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> KqpNotNullColumns::InsertNotNullPk |78.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/config/ut/ydb-services-config-ut |78.5%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] Test command err: 2025-04-10T11:06:05.811870Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Ydb database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': Connection timeout >> KqpSqlIn::SimpleKey_Negated [GOOD] >> KqpSqlIn::TupleParameter >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout >> KqpNewEngine::ReadAfterWrite [GOOD] >> KqpNewEngine::ReadDifferentColumns >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] >> KqpNewEngine::LocksMultiShardOk [GOOD] >> KqpNewEngine::LocksNoMutations >> KqpNewEngine::LocksEffects [GOOD] >> KqpNewEngine::JoinWithParams >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi >> KqpSqlIn::KeySuffix_OnlyTail [GOOD] >> KqpSqlIn::KeySuffix_NotPointPrefix >> KqpNewEngine::DeferredEffects [GOOD] >> KqpNewEngine::Delete+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2025-04-10T11:05:41.288895Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.301351Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.412858Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.413013Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.489607Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.020106Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.020237Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.033721Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.033856Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.067008Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.514748Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.514861Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.530140Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.530309Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.556589Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.557239Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=14739481975108177605, session=0, seqNo=0) 2025-04-10T11:05:42.557458Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:42.572732Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=14739481975108177605, session=1) 2025-04-10T11:05:42.573395Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:141:2165], cookie=11642349959493109530) 2025-04-10T11:05:42.573488Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:141:2165], cookie=11642349959493109530) 2025-04-10T11:05:42.993523Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:43.011092Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:43.386453Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:43.399314Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:43.756795Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:43.770857Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:44.135852Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:44.151440Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:44.528819Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:44.542775Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:44.912499Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:44.929426Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:45.290489Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:45.304941Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:45.676962Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:45.699443Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:46.072832Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:46.085409Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:46.501090Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:46.515534Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:46.887826Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:46.901118Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:47.271514Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:47.292232Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:47.668888Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:47.683093Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:48.082487Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:48.105114Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:48.553147Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:48.567790Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:48.950475Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:48.973351Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:49.382463Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:49.396897Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:49.763668Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:49.777357Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:50.140444Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:50.155017Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:50.524576Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:50.536950Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:50.907279Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:50.923173Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:51.280506Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:51.293110Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:51.657446Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:51.672356Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:52.043914Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:52.059087Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:52.410500Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:52.423206Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:52.798797Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:52.811400Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:53.171011Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:53.183397Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:53.538954Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:53.551511Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:53.909660Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:53.923064Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:54.322783Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:54.341864Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:54.708429Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:54.726482Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:55.081467Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:55.094177Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:55.454333Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:55.468773Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:55.831583Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:55.844412Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:56.202758Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:56.218176Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:56.601316Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:56.615005Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:56.975218Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:56.991359Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:57.351315Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:57.364389Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:57.723516Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:57.742337Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:58.129219Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:58.143137Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:58.524967Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:58.542035Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:58.911055Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:58.923904Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:59.289705Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:59.302469Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:59.655651Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:59.668920Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:00.063471Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:00.076447Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:00.446531Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:00.459726Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:00.839564Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:00.855178Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:01.247752Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:01.267252Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:01.692270Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:01.707339Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:02.086945Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:02.099715Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:02.466660Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:02.491595Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:02.885458Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:02.903360Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:03.275965Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:03.292298Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:03.682543Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:03.695814Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:04.100743Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:04.120411Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:04.490615Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:04.510911Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:04.872157Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:04.894926Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:05.294639Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:05.320300Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:05.697932Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:05.710874Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:06.145600Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-10T11:06:06.145761Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-10T11:06:06.164452Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-10T11:06:06.178432Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:472:2479], cookie=15039845003743038576) 2025-04-10T11:06:06.178618Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:472:2479], cookie=15039845003743038576) 2025-04-10T11:06:06.718035Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:06:06.718213Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:06:06.738871Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:06:06.739026Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:06:06.774887Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:06:06.788561Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=4215951605899638172, path="Root", config={ MaxUnitsPerSecond: 100 }) 2025-04-10T11:06:06.788909Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-10T11:06:06.809050Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=4215951605899638172) 2025-04-10T11:06:06.813398Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:139:2163]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-10T11:06:06.813536Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:139:2163], cookie=0) 2025-04-10T11:06:06.814486Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:141:2165]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-10T11:06:06.814542Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:141:2165], cookie=0) 2025-04-10T11:06:06.860391Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-04-10T11:06:06.860493Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:139:2163]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-04-10T11:06:06.860738Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:144:2168]) 2025-04-10T11:06:06.860880Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2025-04-10T11:06:06.907368Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:139:2163]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } >> KqpNewEngine::UpdateFromParams [GOOD] >> KqpNewEngine::UpsertEmptyInput >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn >> KqpRanges::DuplicateKeyPredicateParam [GOOD] >> KqpRanges::DuplicateKeyPredicateMixed >> KqpNewEngine::OnlineRO_Consistent >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup >> DataStreams::TestInvalidRetentionCombinations [GOOD] >> KqpAgg::AggWithLookup [GOOD] >> KqpAgg::AggWithSelfLookup >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade >> KqpSort::ReverseFirstKeyOptimized [GOOD] >> KqpSort::ReverseLimitOptimized ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:02:15.183533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:02:15.183637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:15.183696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:02:15.183736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:02:15.183784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:02:15.183817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:02:15.183888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:02:15.183992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:02:15.184384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:02:15.282879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:02:15.282955Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:15.304135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:02:15.304519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:02:15.304698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:02:15.314408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:02:15.314677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:02:15.315451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:15.315927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:02:15.318263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:15.319792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:15.319870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:15.320100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:02:15.320150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:15.320197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:02:15.320353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.327943Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:02:15.507595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:02:15.507902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.508099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:02:15.508429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:02:15.508495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.512780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:15.512961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:02:15.513246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.513315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:02:15.513353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:02:15.513437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:02:15.516064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.516133Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:02:15.516175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:02:15.519334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.519404Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.519492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:15.519551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:02:15.523797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:02:15.526360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:02:15.526598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:02:15.527741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:02:15.527901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:02:15.527955Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:15.528274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:02:15.528342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:02:15.528534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:02:15.528655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:02:15.533288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:02:15.533347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:02:15.533575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:02:15.533630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:02:15.534413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:02:15.534472Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:02:15.534601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:15.534642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:15.534682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:02:15.534714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:15.534763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:02:15.534822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:02:15.534964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:02:15.535004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:02:15.535092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:02:15.535157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:02:15.535214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:02:15.537968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:15.538096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:02:15.538137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... imr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:06:04.506999Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:06:04.902180Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:06:04.902268Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:06:04.902386Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:122:2148], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:06:04.902429Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:06:05.326893Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:309:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:06:05.327216Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-10T11:06:05.327414Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:06:05.327467Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:06:05.327870Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:309:2296], Recipient [3:122:2148]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 12 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 437 Memory: 124232 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-10T11:06:05.327913Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T11:06:05.327966Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0437 2025-04-10T11:06:05.328086Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T11:06:05.328133Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-10T11:06:05.328280Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:122:2148], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:06:05.328321Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:06:05.386554Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T11:06:05.386651Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T11:06:05.386696Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-10T11:06:05.386786Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-10T11:06:05.386865Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-10T11:06:05.387005Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-10T11:06:05.387098Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-04-10T11:06:05.387253Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:06:05.398551Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T11:06:05.398636Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T11:06:05.398673Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:06:05.434542Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:712:2679]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:06:05.434829Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 3 2025-04-10T11:06:05.435272Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:712:2679], Recipient [3:122:2148]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 3 Generation: 2 Round: 12 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 80 Memory: 124232 } ShardState: 2 UserTablePartOwners: 72075186233409547 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 212 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-10T11:06:05.435333Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T11:06:05.435404Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.008 2025-04-10T11:06:05.435562Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T11:06:05.435623Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-10T11:06:05.467081Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue wakeup 2025-04-10T11:06:05.467194Z node 3 :FLAT_TX_SCHEMESHARD INFO: Borrowed compaction timeout for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, in queue# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-10T11:06:05.467250Z node 3 :FLAT_TX_SCHEMESHARD INFO: RunBorrowedCompaction for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-10T11:06:05.467343Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 3 seconds 2025-04-10T11:06:05.467389Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-04-10T11:06:05.467528Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T11:06:05.467595Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T11:06:05.467630Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-10T11:06:05.467703Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-10T11:06:05.467741Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-10T11:06:05.467930Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 13940 row count 100 2025-04-10T11:06:05.468019Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=CopyTable, is column=0, is olap=0, RowCount 100, DataSize 13940, with borrowed parts 2025-04-10T11:06:05.468251Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-04-10T11:06:05.468347Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:06:05.482543Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T11:06:05.482632Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T11:06:05.482681Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:06:05.750555Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:06:05.750647Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:06:05.750759Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:122:2148], Recipient [3:122:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:06:05.750807Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> KqpNewEngine::FlatMapLambdaInnerPrecompute [GOOD] >> KqpNewEngine::DqSourceLiteralRange >> KqpRanges::UpdateWhereInNoFullScan+UseSink [GOOD] >> KqpRanges::UpdateWhereInNoFullScan-UseSink >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestInvalidRetentionCombinations [GOOD] Test command err: 2025-04-10T11:05:04.415108Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638889589020694:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:04.415163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e0d/r3tmp/tmp1fudZo/pdisk_1.dat 2025-04-10T11:05:05.367286Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:05.444257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:05.444332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:05.458819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:05.475199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 11802, node 1 2025-04-10T11:05:05.763830Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:05.763862Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:05.763890Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:05.764011Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:08.294031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:08.827080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:22945 2025-04-10T11:05:09.094704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:09.418346Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638889589020694:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:09.418440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:12.136280Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491638922152209846:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:12.136339Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e0d/r3tmp/tmppFiyAe/pdisk_1.dat 2025-04-10T11:05:12.347566Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4535, node 4 2025-04-10T11:05:12.482906Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:12.483069Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:12.525310Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:12.543269Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:12.543293Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:12.543300Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:12.543433Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:12.798626Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:12.871785Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:1523 2025-04-10T11:05:13.176870Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:13.198289Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-10T11:05:13.395975Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:05:13.422027Z node 4 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [4:7491638926447178404:2818], for# user2@builtin, access# DescribeSchema 2025-04-10T11:05:13.435953Z node 4 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [4:7491638926447178407:2819], for# user2@builtin, access# DescribeSchema 2025-04-10T11:05:13.447125Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:17.651696Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491638944113276815:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:17.651756Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e0d/r3tmp/tmpg12BCQ/pdisk_1.dat 2025-04-10T11:05:17.859667Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15755, node 7 2025-04-10T11:05:17.970410Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:17.970486Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:17.972626Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:17.979405Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:17.979433Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:17.979439Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:17.979558Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:18.303745Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:18.383436Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:5399 2025-04-10T11:05:18.707344Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:18.724002Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-10T11:05:22.652420Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491638944113276815:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:22.652575Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:32.839783Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:05:32.839842Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:03.126509Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491639143425326023:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:03.132111Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e0d/r3tmp/tmp0X2gOT/pdisk_1.dat 2025-04-10T11:06:03.641288Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:03.688227Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:03.688365Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:03.704078Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21737, node 10 2025-04-10T11:06:03.821477Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:03.821510Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:03.821519Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:03.821731Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:04.232694Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:04.410994Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:20488 2025-04-10T11:06:04.807820Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting...
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 10, code: 500080
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 144, storage 0, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 130048, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 1049600, code: 500080 >> KqpNewEngine::Join [GOOD] >> KqpNewEngine::ItemsLimit >> BuildStatsHistogram::Many_Serial [GOOD] >> KqpNotNullColumns::InsertNotNullPk [GOOD] >> KqpNotNullColumns::InsertNotNull >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly >> KqpRanges::DateKeyPredicate [GOOD] >> KqpRanges::DeleteNotFullScan+UseSink |78.6%| [TA] $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpMergeCn::TopSortBy_Timestamp_Limit2 [GOOD] >> KqpNewEngine::Aggregate >> KqpRanges::UpdateWhereInBigLiteralListPrefix [GOOD] >> KqpRanges::UpdateWhereInFullScan+UseSink |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |78.6%| [TA] {RESULT} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |78.6%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |78.6%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut >> KqpSort::TopSortParameter [GOOD] >> KqpSort::TopSortExpr >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> TVersions::Wreck1Reverse [GOOD] >> TVersions::Wreck0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] Test command err: RandomSeed# 14819362319361800466 2025-04-10T11:06:14.584130Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.584361Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.584481Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.584602Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.584693Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.584769Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.584851Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.584935Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.586372Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.586517Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.586594Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.586662Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.586730Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.586793Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.586904Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.586969Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.587074Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.587178Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.587241Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.587287Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.587334Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.587383Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.587437Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.587486Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:14.590285Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.590412Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.590481Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.590543Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.590636Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.590711Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.590777Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.590843Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:14.828040Z 1 00h01m30.011024s :BS_LOCALRECOVERY CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "Some error reason" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> KqpNewEngine::ReadDifferentColumns [GOOD] >> KqpNewEngine::ReadDifferentColumnsPk |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |78.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> BuildStatsHistogram::Many_Serial [GOOD] Test command err: Got : 24000 2106439 49449 9 9 Expected: 24000 2106439 49449 9 9 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 9 9 Expected: 12816 1121048 49449 9 9 Got : 24000 3547100 81694 9 9 Expected: 24000 3547100 81694 9 9 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425282 81694 9 9 Expected: 9582 1425282 81694 9 9 Got : 24000 2460139 23760 9 9 Expected: 24000 2460139 23760 9 9 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060767 23760 9 9 Expected: 10440 1060767 23760 9 9 Got : 24000 4054050 46562 9 9 Expected: 24000 4054050 46562 9 9 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2273213 46562 9 9 Expected: 13570 2273213 46562 9 9 Got : 24000 2106459 49449 9 9 Expected: 24000 2106459 49449 9 9 Got : 24000 2460219 23555 9 9 Expected: 24000 2460219 23555 9 9 Got : 24000 4054270 46543 9 9 Expected: 24000 4054270 46543 9 9 Got : 24000 2106439 25272 38 44 Expected: 24000 2106439 25272 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 25272 20 23 Expected: 12816 1121048 25272 20 23 Got : 24000 3547100 49916 64 44 Expected: 24000 3547100 49916 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 49916 26 17 Expected: 9582 1425198 49916 26 17 Got : 24000 2460139 13170 42 41 Expected: 24000 2460139 13170 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 13170 18 18 Expected: 10440 1060798 13170 18 18 Got : 24000 4054050 29361 68 43 Expected: 24000 4054050 29361 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 29361 38 24 Expected: 13570 2277890 29361 38 24 Got : 24000 2106459 25428 38 44 Expected: 24000 2106459 25428 38 44 Got : 24000 2460219 13482 41 41 Expected: 24000 2460219 13482 41 41 Got : 24000 4054270 29970 67 43 Expected: 24000 4054270 29970 67 43 Got : 24000 2106479 25458 38 44 Expected: 24000 2106479 25458 38 44 Got : 24000 2460259 13528 42 41 Expected: 24000 2460259 13528 42 41 Got : 24000 4054290 30013 67 43 Expected: 24000 4054290 30013 67 43 1 parts: [0:0:1:0:0:0:0] 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 0% bytes, 4 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 205073 (actual 205115 - 0% error) 14% (actual 14%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 17416844 (actual 17420850 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (80065, 26696) value = 24008 (actual 24056 - 0% error) 10% (actual 10%) key = (160045, 53356) value = 48012 (actual 48061 - 0% error) 10% (actual 10%) key = (240238, 80087) value = 72016 (actual 72061 - 0% error) 10% (actual 10%) key = (320152, 106725) value = 96035 (actual 96085 - 0% error) 10% (actual 10%) key = (400354, 133459) value = 120047 (actual 120093 - 0% error) 10% (actual 10%) key = (480133, 160052) value = 144053 (actual 144100 - 0% error) 10% (actual 10%) key = (560080, 186701) value = 168060 (actual 168102 - 0% error) 10% (actual 10%) key = (639892, 213305) value = 192073 (actual 192119 - 0% error) 10% (actual 10%) key = (719776, 239933) value = 216090 (actual 216137 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2042645 - 0% error) 10% (actual 10%) key = (159427, 53150) value = 4076220 (actual 4080259 - 0% error) 10% (actual 10%) key = (239872, 79965) value = 6113940 (actual 6117932 - 0% error) 10% (actual 10%) key = (319834, 106619) value = 8152983 (actual 8156951 - 0% error) 10% (actual 10%) key = (400105, 133376) value = 10190566 (actual 10194584 - 0% error) 10% (actual 10%) key = (479833, 159952) value = 12228261 (actual 12232212 - 0% error) 10% (actual 10%) key = (559774, 186599) value = 14265925 (actual 14269984 - 0% error) 10% (actual 10%) key = (639385, 213136) value = 16304923 (actual 16308915 - 0% error) 10% (actual 10%) key = (719437, 239820) value = 18342658 (actual 18346641 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 51 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 217180 (actual 217228 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 18443184 (actual 18447186 - 0% error) 9% (actual 9%) { [12965, 17271), [20685, 27602), [31405, 43682), [58051, 73731), [81074, 85635), [86559, 89297), [92588, 112654), [134937, 148111), [152568, 158136), [169526, 171272), [181381, 184364), [188301, 199001), [201179, 227534) } 1 parts: [0:0:1:0:0:0:0] 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 3% bytes, 111 pages RowCountHistogram: 6% (actual 6%) key = (80152, 26725) value = 7654 (actual 7700 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 21908 (actual 21959 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 37729 (actual 37776 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 44561 (actual 44610 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 62406 (actual 62455 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 70269 (actual 70314 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 83950 (actual 83996 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 96207 (actual 96256 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 110645 (actual 110694 - 0% error) 12% (actual 12%) DataSizeHistogram: 6% (actual 6%) key = (80152, 26725) value = 650681 (actual 654673 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 1862907 (actual 1866988 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 3200081 (actual 3204123 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 3780473 (actual 3784554 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 5294670 (actual 5298760 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 5965285 (actual 5969310 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 7125413 (actual 7129406 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 8166922 (actual 8170966 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 9391370 (actual 9395383 - 0% error) 12% (actual 12%) { [12965, 17271), [20685, 27602), [31405, 43682), [58051, 73731), [81074, 85635), [86559, 89297), [92588, 112654), [134937, 148111), [152568, 158136), [169526, 171272), [181381, 184364), [188301, 199001), [201179, 227534) } Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (109672, 36565) value = 12716 (actual 12760 - 0% error) 10% (actual 10%) key = (200011, 66678) value = 25439 (actual 25485 - 0% error) 10% (actual 10%) key = (242497, 80840) value = 38151 (actual 38197 - 0% error) 10% (actual 10%) key = (323278, 107767) value = 50861 (actual 50910 - 0% error) 9% (actual 9%) key = (365755, 121926) value = 63568 (actual 63614 - 0% error) 10% (actual 10%) key = (482191, 160738) value = 76283 (actual 76335 - 0% error) 10% (actual 9%) key = (610882, 203635) value = 88992 (actual 89039 - 0% error) 10% (actual 10%) key = (673702, 224575) value = 101722 (actual 101768 - 0% error) 10% (actual 10%) key = (715753, 238592) value = 114435 (actual 114484 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) ... 140, NULL) (311209, NULL) (311281, NULL) (311344, NULL) (311416, NULL) [0:0:935:0:0:0:0] 100 rows, 100 pages, 4 levels: (311479, NULL) (311542, NULL) (311614, NULL) (311683, NULL) (311755, NULL) [0:0:936:0:0:0:0] 100 rows, 100 pages, 4 levels: (311821, NULL) (311890, NULL) (311956, NULL) (312034, NULL) (312100, NULL) [0:0:937:0:0:0:0] 100 rows, 100 pages, 4 levels: (312172, NULL) (312232, NULL) (312301, NULL) (312370, NULL) (312439, NULL) [0:0:938:0:0:0:0] 100 rows, 100 pages, 4 levels: (312508, NULL) (312571, NULL) (312637, NULL) (312700, NULL) (312760, NULL) [0:0:939:0:0:0:0] 100 rows, 100 pages, 4 levels: (312835, NULL) (312904, NULL) (312970, NULL) (313030, NULL) (313102, NULL) [0:0:940:0:0:0:0] 100 rows, 100 pages, 4 levels: (313174, NULL) (313240, NULL) (313300, NULL) (313366, NULL) (313429, NULL) [0:0:941:0:0:0:0] 100 rows, 100 pages, 4 levels: (313498, NULL) (313573, NULL) (313639, NULL) (313699, NULL) (313768, NULL) [0:0:942:0:0:0:0] 100 rows, 100 pages, 4 levels: (313828, NULL) (313891, NULL) (313957, NULL) (314023, NULL) (314086, NULL) [0:0:943:0:0:0:0] 100 rows, 100 pages, 4 levels: (314149, NULL) (314212, NULL) (314275, NULL) (314338, NULL) (314401, NULL) [0:0:944:0:0:0:0] 100 rows, 100 pages, 4 levels: (314464, NULL) (314530, NULL) (314590, NULL) (314656, NULL) (314719, NULL) [0:0:945:0:0:0:0] 100 rows, 100 pages, 4 levels: (314788, NULL) (314854, NULL) (314920, NULL) (314983, NULL) (315046, NULL) [0:0:946:0:0:0:0] 100 rows, 100 pages, 4 levels: (315109, NULL) (315178, NULL) (315238, NULL) (315304, NULL) (315370, NULL) [0:0:947:0:0:0:0] 100 rows, 100 pages, 4 levels: (315433, NULL) (315496, NULL) (315565, NULL) (315631, NULL) (315697, NULL) [0:0:948:0:0:0:0] 100 rows, 100 pages, 4 levels: (315766, NULL) (315826, NULL) (315889, NULL) (315952, NULL) (316024, NULL) [0:0:949:0:0:0:0] 100 rows, 100 pages, 4 levels: (316087, NULL) (316156, NULL) (316222, NULL) (316288, NULL) (316357, NULL) [0:0:950:0:0:0:0] 100 rows, 100 pages, 4 levels: (316432, NULL) (316498, NULL) (316564, NULL) (316636, NULL) (316705, NULL) [0:0:951:0:0:0:0] 100 rows, 100 pages, 4 levels: (316768, NULL) (316831, NULL) (316891, NULL) (316951, NULL) (317011, NULL) [0:0:952:0:0:0:0] 100 rows, 100 pages, 4 levels: (317080, NULL) (317143, NULL) (317218, NULL) (317287, NULL) (317356, NULL) [0:0:953:0:0:0:0] 100 rows, 100 pages, 4 levels: (317422, NULL) (317497, NULL) (317563, NULL) (317632, NULL) (317701, NULL) [0:0:954:0:0:0:0] 100 rows, 100 pages, 4 levels: (317764, NULL) (317824, NULL) (317887, NULL) (317953, NULL) (318019, NULL) [0:0:955:0:0:0:0] 100 rows, 100 pages, 4 levels: (318088, NULL) (318166, NULL) (318235, NULL) (318304, NULL) (318370, NULL) [0:0:956:0:0:0:0] 100 rows, 100 pages, 4 levels: (318442, NULL) (318511, NULL) (318574, NULL) (318640, NULL) (318703, NULL) [0:0:957:0:0:0:0] 100 rows, 100 pages, 4 levels: (318772, NULL) (318838, NULL) (318898, NULL) (318970, NULL) (319036, NULL) [0:0:958:0:0:0:0] 100 rows, 100 pages, 4 levels: (319099, NULL) (319162, NULL) (319225, NULL) (319294, NULL) (319360, NULL) [0:0:959:0:0:0:0] 100 rows, 100 pages, 4 levels: (319423, NULL) (319492, NULL) (319555, NULL) (319621, NULL) (319687, NULL) [0:0:960:0:0:0:0] 100 rows, 100 pages, 4 levels: (319753, NULL) (319828, NULL) (319900, NULL) (319963, NULL) (320035, NULL) [0:0:961:0:0:0:0] 100 rows, 100 pages, 4 levels: (320104, NULL) (320164, NULL) (320233, NULL) (320299, NULL) (320365, NULL) [0:0:962:0:0:0:0] 100 rows, 100 pages, 4 levels: (320428, NULL) (320500, NULL) (320569, NULL) (320629, NULL) (320698, NULL) [0:0:963:0:0:0:0] 100 rows, 100 pages, 4 levels: (320764, NULL) (320833, NULL) (320893, NULL) (320959, NULL) (321019, NULL) [0:0:964:0:0:0:0] 100 rows, 100 pages, 4 levels: (321085, NULL) (321151, NULL) (321214, NULL) (321277, NULL) (321352, NULL) [0:0:965:0:0:0:0] 100 rows, 100 pages, 4 levels: (321421, NULL) (321493, NULL) (321562, NULL) (321631, NULL) (321691, NULL) [0:0:966:0:0:0:0] 100 rows, 100 pages, 4 levels: (321757, NULL) (321823, NULL) (321886, NULL) (321949, NULL) (322009, NULL) [0:0:967:0:0:0:0] 100 rows, 100 pages, 4 levels: (322081, NULL) (322159, NULL) (322225, NULL) (322294, NULL) (322363, NULL) [0:0:968:0:0:0:0] 100 rows, 100 pages, 4 levels: (322429, NULL) (322498, NULL) (322564, NULL) (322642, NULL) (322711, NULL) [0:0:969:0:0:0:0] 100 rows, 100 pages, 4 levels: (322783, NULL) (322846, NULL) (322915, NULL) (322978, NULL) (323041, NULL) [0:0:970:0:0:0:0] 100 rows, 100 pages, 4 levels: (323104, NULL) (323164, NULL) (323230, NULL) (323305, NULL) (323368, NULL) [0:0:971:0:0:0:0] 100 rows, 100 pages, 4 levels: (323434, NULL) (323506, NULL) (323569, NULL) (323632, NULL) (323707, NULL) [0:0:972:0:0:0:0] 100 rows, 100 pages, 4 levels: (323776, NULL) (323851, NULL) (323917, NULL) (323986, NULL) (324052, NULL) [0:0:973:0:0:0:0] 100 rows, 100 pages, 4 levels: (324115, NULL) (324184, NULL) (324256, NULL) (324316, NULL) (324379, NULL) [0:0:974:0:0:0:0] 100 rows, 100 pages, 4 levels: (324442, NULL) (324502, NULL) (324568, NULL) (324631, NULL) (324703, NULL) [0:0:975:0:0:0:0] 100 rows, 100 pages, 4 levels: (324769, NULL) (324838, NULL) (324904, NULL) (324973, NULL) (325033, NULL) [0:0:976:0:0:0:0] 100 rows, 100 pages, 4 levels: (325105, NULL) (325174, NULL) (325234, NULL) (325297, NULL) (325363, NULL) [0:0:977:0:0:0:0] 100 rows, 100 pages, 4 levels: (325438, NULL) (325504, NULL) (325570, NULL) (325630, NULL) (325699, NULL) [0:0:978:0:0:0:0] 100 rows, 100 pages, 4 levels: (325771, NULL) (325834, NULL) (325900, NULL) (325966, NULL) (326032, NULL) [0:0:979:0:0:0:0] 100 rows, 100 pages, 4 levels: (326101, NULL) (326170, NULL) (326233, NULL) (326296, NULL) (326359, NULL) [0:0:980:0:0:0:0] 100 rows, 100 pages, 4 levels: (326434, NULL) (326497, NULL) (326563, NULL) (326632, NULL) (326701, NULL) [0:0:981:0:0:0:0] 100 rows, 100 pages, 4 levels: (326773, NULL) (326836, NULL) (326905, NULL) (326965, NULL) (327025, NULL) [0:0:982:0:0:0:0] 100 rows, 100 pages, 4 levels: (327097, NULL) (327169, NULL) (327232, NULL) (327301, NULL) (327364, NULL) [0:0:983:0:0:0:0] 100 rows, 100 pages, 4 levels: (327430, NULL) (327496, NULL) (327559, NULL) (327622, NULL) (327682, NULL) [0:0:984:0:0:0:0] 100 rows, 100 pages, 4 levels: (327742, NULL) (327811, NULL) (327871, NULL) (327934, NULL) (327997, NULL) [0:0:985:0:0:0:0] 100 rows, 100 pages, 4 levels: (328072, NULL) (328138, NULL) (328222, NULL) (328291, NULL) (328363, NULL) [0:0:986:0:0:0:0] 100 rows, 100 pages, 4 levels: (328432, NULL) (328501, NULL) (328573, NULL) (328648, NULL) (328717, NULL) [0:0:987:0:0:0:0] 100 rows, 100 pages, 4 levels: (328783, NULL) (328849, NULL) (328915, NULL) (328978, NULL) (329044, NULL) [0:0:988:0:0:0:0] 100 rows, 100 pages, 4 levels: (329119, NULL) (329185, NULL) (329248, NULL) (329317, NULL) (329383, NULL) [0:0:989:0:0:0:0] 100 rows, 100 pages, 4 levels: (329455, NULL) (329518, NULL) (329590, NULL) (329662, NULL) (329722, NULL) [0:0:990:0:0:0:0] 100 rows, 100 pages, 4 levels: (329782, NULL) (329854, NULL) (329917, NULL) (329983, NULL) (330049, NULL) [0:0:991:0:0:0:0] 100 rows, 100 pages, 4 levels: (330118, NULL) (330187, NULL) (330253, NULL) (330322, NULL) (330382, NULL) [0:0:992:0:0:0:0] 100 rows, 100 pages, 4 levels: (330454, NULL) (330520, NULL) (330595, NULL) (330673, NULL) (330739, NULL) [0:0:993:0:0:0:0] 100 rows, 100 pages, 4 levels: (330808, NULL) (330874, NULL) (330940, NULL) (331003, NULL) (331072, NULL) [0:0:994:0:0:0:0] 100 rows, 100 pages, 4 levels: (331132, NULL) (331204, NULL) (331276, NULL) (331342, NULL) (331405, NULL) [0:0:995:0:0:0:0] 100 rows, 100 pages, 4 levels: (331465, NULL) (331540, NULL) (331615, NULL) (331684, NULL) (331753, NULL) [0:0:996:0:0:0:0] 100 rows, 100 pages, 4 levels: (331816, NULL) (331891, NULL) (331960, NULL) (332026, NULL) (332086, NULL) [0:0:997:0:0:0:0] 100 rows, 100 pages, 4 levels: (332152, NULL) (332215, NULL) (332284, NULL) (332350, NULL) (332419, NULL) [0:0:998:0:0:0:0] 100 rows, 100 pages, 4 levels: (332491, NULL) (332557, NULL) (332623, NULL) (332686, NULL) (332752, NULL) [0:0:999:0:0:0:0] 100 rows, 100 pages, 4 levels: (332818, NULL) (332884, NULL) (332944, NULL) (333013, NULL) (333073, NULL) [0:0:1000:0:0:0:0] 100 rows, 100 pages, 4 levels: (333148, NULL) (333214, NULL) (333274, NULL) (333340, NULL) (333403, NULL) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 5% (actual 6%) key = (16984, 5669) value = 5100 (actual 6998 - -1% error) 10% (actual 9%) key = (50416, 16813) value = 15100 (actual 16798 - -1% error) 10% (actual 9%) key = (83701, 27908) value = 25100 (actual 26598 - -1% error) 10% (actual 9%) key = (116986, 39003) value = 35100 (actual 36398 - -1% error) 10% (actual 9%) key = (150319, 50114) value = 45100 (actual 46198 - -1% error) 10% (actual 9%) key = (183700, 61241) value = 55100 (actual 55998 - 0% error) 10% (actual 9%) key = (217081, 72368) value = 65100 (actual 65798 - 0% error) 10% (actual 9%) key = (250486, 83503) value = 75100 (actual 75598 - 0% error) 10% (actual 9%) key = (283771, 94598) value = 85100 (actual 85398 - 0% error) 14% (actual 14%) DataSizeHistogram: 5% (actual 6%) key = (16648, 5557) value = 524891 (actual 723287 - -1% error) 10% (actual 9%) key = (50086, 16703) value = 1569936 (actual 1747238 - -1% error) 9% (actual 9%) key = (83356, 27793) value = 2610698 (actual 2767306 - -1% error) 10% (actual 9%) key = (116647, 38890) value = 3652143 (actual 3787394 - -1% error) 9% (actual 9%) key = (149656, 49893) value = 4685435 (actual 4800597 - -1% error) 10% (actual 9%) key = (183040, 61021) value = 5728420 (actual 5822785 - 0% error) 10% (actual 9%) key = (216727, 72250) value = 6776444 (actual 6848929 - 0% error) 9% (actual 9%) key = (250144, 83389) value = 7813547 (actual 7865227 - 0% error) 9% (actual 9%) key = (283444, 94489) value = 8853697 (actual 8884838 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1000 pages RowCountHistogram: 10% (actual 11%) key = (33379, 11134) value = 10000 (actual 11800 - -1% error) 10% (actual 9%) key = (66721, 22248) value = 20000 (actual 21600 - -1% error) 10% (actual 9%) key = (100015, 33346) value = 30000 (actual 31400 - -1% error) 10% (actual 9%) key = (133258, 44427) value = 40000 (actual 41200 - -1% error) 10% (actual 9%) key = (166621, 55548) value = 50000 (actual 51000 - -1% error) 10% (actual 9%) key = (200041, 66688) value = 60000 (actual 60800 - 0% error) 10% (actual 9%) key = (233449, 77824) value = 70000 (actual 70600 - 0% error) 10% (actual 9%) key = (266824, 88949) value = 80000 (actual 80400 - 0% error) 10% (actual 9%) key = (300073, 100032) value = 90000 (actual 90200 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 11%) key = (33187, NULL) value = 1041247 (actual 1229534 - -1% error) 10% (actual 9%) key = (66517, NULL) value = 2082456 (actual 2249844 - -1% error) 10% (actual 9%) key = (99709, NULL) value = 3123684 (actual 3270138 - -1% error) 10% (actual 9%) key = (132925, NULL) value = 4164886 (actual 4290603 - -1% error) 10% (actual 9%) key = (166246, NULL) value = 5206111 (actual 5311117 - -1% error) 10% (actual 9%) key = (199678, NULL) value = 6247321 (actual 6331068 - 0% error) 10% (actual 9%) key = (233290, NULL) value = 7288529 (actual 7350869 - 0% error) 10% (actual 9%) key = (266701, NULL) value = 8329759 (actual 8371441 - 0% error) 10% (actual 9%) key = (300052, NULL) value = 9371030 (actual 9392083 - 0% error) 9% (actual 9%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) >> KqpNewEngine::UpsertEmptyInput [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumn >> KqpNewEngine::LocksNoMutations [GOOD] >> KqpNewEngine::LocksNoMutationsSharded |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg >> TPersQueueTest::DisableDeduplication [GOOD] >> TPersQueueTest::InflightLimit >> KqpNewEngine::Delete-UseSink >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 >> KqpNewEngine::JoinWithParams [GOOD] >> KqpNewEngine::LeftSemiJoin >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false >> KqpNewEngine::Delete+UseSink [GOOD] >> KqpNewEngine::DecimalColumn >> KqpNotNullColumns::InsertNotNull [GOOD] >> KqpNotNullColumns::InsertFromSelect >> KqpSort::ReverseLimitOptimized [GOOD] >> KqpSort::ReverseEightShardOptimized >> KqpRanges::DuplicateKeyPredicateMixed [GOOD] >> KqpRanges::DuplicateCompositeKeyPredicate >> KqpNewEngine::OnlineRO_Consistent [GOOD] >> KqpNewEngine::OnlineRO_Inconsistent >> BSCReadOnlyPDisk::ReadOnlyNotAllowed >> KqpSqlIn::TupleParameter [GOOD] >> KqpSqlIn::TupleLiteral |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |78.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut >> TKeyValueTest::TestCleanUpDataWithMockDisk [GOOD] >> KqpSqlIn::KeySuffix_NotPointPrefix [GOOD] >> KqpSqlIn::KeyTypeMissmatch_Int >> KqpPg::TableDeleteAllData+useSink [GOOD] >> KqpPg::TableDeleteAllData-useSink >> KqpRanges::UpdateWhereInNoFullScan-UseSink [GOOD] >> KqpRanges::UpdateWhereInWithNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCleanUpDataWithMockDisk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvForceTabletDataCleanup ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:82:2113] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:86:2057] recipient: [7:82:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:85:2114] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:89:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:90:2118] sender: [8:91:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:90:2118] Leader for TabletID 72057594037927937 is [8:90:2118] sender: [8:144:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:89:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:91:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:93:2057] recipient: [9:92:2121] Leader for TabletID 72057594037927937 is [9:94:2122] sender: [9:95:2057] recipient: [9:92:2121] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:94:2122] Leader for TabletID 72057594037927937 is [9:94:2122] sender: [9:148:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:92:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:93:2057] recipient: [10:91:2121] Leader for TabletID 72057594037927937 is [10:94:2122] sender: [10:95:2057] recipient: [10:91:2121] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:94:2122] Leader for TabletID 72057594037927937 is [10:94:2122] sender: [10:148:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:94:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:95:2057] recipient: [11:93:2123] Leader for TabletID 72057594037927937 is [11:96:2124] sender: [11:97:2057] recipient: [11:93:2123] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:96:2124] Leader for TabletID 72057594037927937 is [11:96:2124] sender: [11:150:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:1 ... 57594037927937 is [35:99:2125] sender: [35:153:2057] recipient: [35:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:17:2064] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:95:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:98:2057] recipient: [36:17:2064] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:99:2057] recipient: [36:97:2124] Leader for TabletID 72057594037927937 is [36:100:2125] sender: [36:101:2057] recipient: [36:97:2124] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:100:2125] Leader for TabletID 72057594037927937 is [36:100:2125] sender: [36:154:2057] recipient: [36:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:17:2064] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:98:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:101:2057] recipient: [37:17:2064] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:102:2057] recipient: [37:100:2127] Leader for TabletID 72057594037927937 is [37:103:2128] sender: [37:104:2057] recipient: [37:100:2127] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:103:2128] Leader for TabletID 72057594037927937 is [37:103:2128] sender: [37:157:2057] recipient: [37:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:17:2064] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:98:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:101:2057] recipient: [38:17:2064] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:102:2057] recipient: [38:100:2127] Leader for TabletID 72057594037927937 is [38:103:2128] sender: [38:104:2057] recipient: [38:100:2127] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:103:2128] Leader for TabletID 72057594037927937 is [38:103:2128] sender: [38:157:2057] recipient: [38:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:17:2064] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:99:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:102:2057] recipient: [39:17:2064] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:103:2057] recipient: [39:101:2127] Leader for TabletID 72057594037927937 is [39:104:2128] sender: [39:105:2057] recipient: [39:101:2127] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:104:2128] Leader for TabletID 72057594037927937 is [39:104:2128] sender: [39:158:2057] recipient: [39:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:17:2064] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:100:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:103:2057] recipient: [40:17:2064] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:104:2057] recipient: [40:102:2128] Leader for TabletID 72057594037927937 is [40:105:2129] sender: [40:106:2057] recipient: [40:102:2128] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:105:2129] Leader for TabletID 72057594037927937 is [40:105:2129] sender: [40:125:2057] recipient: [40:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:17:2064] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:101:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:104:2057] recipient: [41:17:2064] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:105:2057] recipient: [41:103:2129] Leader for TabletID 72057594037927937 is [41:106:2130] sender: [41:107:2057] recipient: [41:103:2129] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:106:2130] Leader for TabletID 72057594037927937 is [41:106:2130] sender: [41:126:2057] recipient: [41:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:17:2064] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:104:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:107:2057] recipient: [42:17:2064] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:108:2057] recipient: [42:106:2132] Leader for TabletID 72057594037927937 is [42:109:2133] sender: [42:110:2057] recipient: [42:106:2132] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:109:2133] Leader for TabletID 72057594037927937 is [42:109:2133] sender: [42:163:2057] recipient: [42:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:17:2064] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:104:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:107:2057] recipient: [43:17:2064] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:108:2057] recipient: [43:106:2132] Leader for TabletID 72057594037927937 is [43:109:2133] sender: [43:110:2057] recipient: [43:106:2132] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:109:2133] Leader for TabletID 72057594037927937 is [43:109:2133] sender: [43:163:2057] recipient: [43:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:17:2064] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletDataCleanup ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:104:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:107:2057] recipient: [44:17:2064] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:108:2057] recipient: [44:106:2132] Leader for TabletID 72057594037927937 is [44:109:2133] sender: [44:110:2057] recipient: [44:106:2132] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:109:2133] Leader for TabletID 72057594037927937 is [44:109:2133] sender: [44:163:2057] recipient: [44:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:17:2064] !Reboot 72057594037927937 (actor [45:58:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:109:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:112:2057] recipient: [45:17:2064] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:113:2057] recipient: [45:111:2136] Leader for TabletID 72057594037927937 is [45:114:2137] sender: [45:115:2057] recipient: [45:111:2136] !Reboot 72057594037927937 (actor [45:58:2099]) rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:114:2137] Leader for TabletID 72057594037927937 is [45:114:2137] sender: [45:168:2057] recipient: [45:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:54:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:54:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:17:2064] >> KqpNewEngine::DqSourceLiteralRange [GOOD] >> KqpNewEngine::DqSourceLimit >> KqpAgg::AggWithSelfLookup [GOOD] >> KqpAgg::AggWithSelfLookup2 >> KqpNewEngine::Aggregate [GOOD] >> KqpNewEngine::AggregateTuple >> KqpRanges::DeleteNotFullScan+UseSink [GOOD] >> KqpRanges::CastKeyBounds |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |78.7%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut >> TGroupMapperTest::MonteCarlo [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease >> KqpSort::TopSortExpr [GOOD] >> KqpSort::TopSortExprPk |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |78.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |78.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireSemaphore |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2025-04-10T11:05:41.287919Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.300722Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.426046Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.426206Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.490839Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.510603Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=14800328352266700400, session=0, seqNo=0) 2025-04-10T11:05:41.510821Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:41.532218Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=14800328352266700400, session=1) 2025-04-10T11:05:41.532513Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=8007836925379113545, session=0, seqNo=0) 2025-04-10T11:05:41.532642Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:41.545922Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=8007836925379113545, session=2) 2025-04-10T11:05:41.548991Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:143:2167], cookie=14861210523604487269, name="Sem1", limit=1) 2025-04-10T11:05:41.549145Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-10T11:05:41.561224Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:143:2167], cookie=14861210523604487269) 2025-04-10T11:05:41.561628Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-10T11:05:41.561795Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-10T11:05:41.562043Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-10T11:05:41.574345Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-10T11:05:41.574443Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-04-10T11:05:41.575079Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:151:2175], cookie=17542090786766669056, name="Sem1") 2025-04-10T11:05:41.575183Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:151:2175], cookie=17542090786766669056) 2025-04-10T11:05:41.575674Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:154:2178], cookie=4034340355987434442, name="Sem1") 2025-04-10T11:05:41.575764Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:154:2178], cookie=4034340355987434442) 2025-04-10T11:05:42.031651Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:42.046999Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:42.422401Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:42.444330Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:42.822343Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:42.839249Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:43.188320Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:43.207416Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:43.588877Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:43.605345Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:43.963974Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:43.978028Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:44.310720Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:44.327332Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:44.705364Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:44.719638Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:45.089986Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:45.123171Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:45.551293Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:45.567168Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:45.957714Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:45.974262Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:46.368958Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:46.384490Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:46.735165Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:46.752197Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:47.136910Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:47.158726Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:47.593456Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:47.606828Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:48.030509Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:48.046931Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:48.437746Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:48.450934Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:48.851477Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:48.867089Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:49.274548Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:49.296252Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:49.712837Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:49.725311Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:50.098295Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:50.112776Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:50.479810Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:50.492659Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:50.860024Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:50.873617Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:51.234565Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:51.247680Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:51.606486Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:51.620699Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:52.000167Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:52.012965Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:52.382802Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:52.396589Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:52.758912Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:52.771926Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:53.130861Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:53.143754Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:53.539154Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:53.552065Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:53.914547Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:53.927906Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:54.301424Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:54.314004Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:54.669571Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:54.681851Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:55.040762Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:55.054727Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:55.413534Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:55.427422Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:55.783873Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:55.804226Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:56.174499Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:56.199683Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:56.588935Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:56.602134Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:56.962716Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:56.975677Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:57.364143Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:57.376836Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:57.742561Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:57.762056Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:58.140863Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:58.153194Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:58.504301Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:58.520411Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:58.881909Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:58.894401Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:59.289606Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:59.30 ... 2057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:14.622715Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:15.032543Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:15.054897Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:15.441903Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:15.459589Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:15.890627Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:15.910639Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:16.314715Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:16.331568Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:16.726626Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:16.742741Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:17.117953Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:17.135475Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:17.526549Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:17.542662Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:17.960328Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:17.982555Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:18.378754Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:18.400386Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:18.798639Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:18.819089Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:19.214670Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:19.235087Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:19.646486Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:19.667324Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:20.140500Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:20.163133Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:20.562563Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:20.579408Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:20.986619Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:21.000538Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:21.396566Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:21.409740Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:21.790635Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:21.819026Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:22.262722Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:22.279293Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:22.715726Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:22.742511Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:23.159891Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:23.178444Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:23.566493Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:23.587361Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:23.975396Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:23.996068Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:24.390017Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-10T11:06:24.390137Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-10T11:06:24.390239Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-04-10T11:06:24.407187Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-10T11:06:24.419240Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:409:2409], cookie=1563825031848862469, name="Sem1") 2025-04-10T11:06:24.419424Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:409:2409], cookie=1563825031848862469) 2025-04-10T11:06:24.963322Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:06:24.963447Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:06:24.995085Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:06:24.995318Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:06:25.025058Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:06:25.025843Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=2821756915655452590, session=0, seqNo=0) 2025-04-10T11:06:25.026043Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:06:25.039194Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=2821756915655452590, session=1) 2025-04-10T11:06:25.039633Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=5392952458725971414, session=0, seqNo=0) 2025-04-10T11:06:25.039813Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:06:25.052209Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=5392952458725971414, session=2) 2025-04-10T11:06:25.052708Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=4348214203851325351, session=0, seqNo=0) 2025-04-10T11:06:25.052904Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2025-04-10T11:06:25.065857Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=4348214203851325351, session=3) 2025-04-10T11:06:25.066898Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=7750383125958849594, name="Sem1", limit=3) 2025-04-10T11:06:25.067128Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-10T11:06:25.083570Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=7750383125958849594) 2025-04-10T11:06:25.084144Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Sem1" count=2) 2025-04-10T11:06:25.084367Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-10T11:06:25.084697Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-10T11:06:25.084823Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-10T11:06:25.084969Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=3, semaphore="Sem1" count=1) 2025-04-10T11:06:25.103005Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-04-10T11:06:25.103139Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=222) 2025-04-10T11:06:25.103178Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-04-10T11:06:25.104029Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:153:2177], cookie=13179294916431593995, name="Sem1") 2025-04-10T11:06:25.104190Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:153:2177], cookie=13179294916431593995) 2025-04-10T11:06:25.104776Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:156:2180], cookie=16507064522236117943, name="Sem1") 2025-04-10T11:06:25.104880Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:156:2180], cookie=16507064522236117943) 2025-04-10T11:06:25.105185Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=444, session=1, semaphore="Sem1" count=1) 2025-04-10T11:06:25.105346Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-04-10T11:06:25.120642Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=444) 2025-04-10T11:06:25.121590Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:161:2185], cookie=13959337602482713883, name="Sem1") 2025-04-10T11:06:25.121743Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:161:2185], cookie=13959337602482713883) 2025-04-10T11:06:25.122425Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:164:2188], cookie=4255656977438829573, name="Sem1") 2025-04-10T11:06:25.122526Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:164:2188], cookie=4255656977438829573) 2025-04-10T11:06:25.146703Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:06:25.146856Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:06:25.147707Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:06:25.148509Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:06:25.207225Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:06:25.207513Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-10T11:06:25.207577Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-10T11:06:25.207611Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-04-10T11:06:25.208112Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:204:2218], cookie=13696896669603414558, name="Sem1") 2025-04-10T11:06:25.208228Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:204:2218], cookie=13696896669603414558) 2025-04-10T11:06:25.208967Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:213:2226], cookie=1486518856213638999, name="Sem1") 2025-04-10T11:06:25.209072Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:213:2226], cookie=1486518856213638999) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:76:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:79:2057] recipient: [13:78:2110] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:82:2057] recipient: [13:78:2110] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:81:2111] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:135:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:76:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:78:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:79:2110] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:82:2057] recipient: [14:79:2110] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:81:2111] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:135:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:77:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:80:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:79:2110] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:83:2057] recipient: [15:79:2110] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:82:2111] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:136:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:83:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:82:2113] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:86:2057] recipient: [16:82:2113] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:85:2114] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:139:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:83:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:82:2113] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:86:2057] recipient: [17:82:2113] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:85:2114] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:139:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:84:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:83:2113] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:87:2057] recipient: [18:83:2113] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:86:2114] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:104:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:86:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:87:2057] recipient: [19:85:2115] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:89:2057] recipient: [19:85:2115] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:88:2116] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:142:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:86:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:87:2057] recipient: [20:85:2115] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:89:2057] recipient: [20:85:2115] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:88:2116] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:142:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:87:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:88:2057] recipient: [21:86:2115] Leader for TabletID 72057594037927937 is [21:89:2116] sender: [21:90:2057] recipient: [21:86:2115] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:89:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] >> TKesusTest::TestAcquireSemaphore [GOOD] >> KqpNotNullColumns::InsertFromSelect [GOOD] >> KqpNotNullColumns::InsertNotNullPg+useSink >> KqpRanges::UpdateWhereInFullScan+UseSink [GOOD] >> KqpRanges::UpdateWhereInFullScan-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2025-04-10T11:05:41.288904Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.301478Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.412568Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.412701Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.490981Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.510302Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=6060286113883784332, session=0, seqNo=0) 2025-04-10T11:05:41.510526Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:41.532069Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=6060286113883784332, session=1) 2025-04-10T11:05:41.532364Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=10637912800155273029, session=0, seqNo=0) 2025-04-10T11:05:41.532496Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:41.547929Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=10637912800155273029, session=2) 2025-04-10T11:05:41.548797Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-10T11:05:41.548966Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-10T11:05:41.549098Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:41.549305Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=2, semaphore="Lock2" count=1) 2025-04-10T11:05:41.549401Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-10T11:05:41.549456Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-04-10T11:05:41.549544Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=333, session=1, semaphore="Lock2" count=1) 2025-04-10T11:05:41.549603Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-04-10T11:05:41.561909Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-10T11:05:41.562008Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-04-10T11:05:41.562045Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=333) 2025-04-10T11:05:41.562734Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:148:2172], cookie=12668325581142670468, name="Lock1") 2025-04-10T11:05:41.562856Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:148:2172], cookie=12668325581142670468) 2025-04-10T11:05:41.572481Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:151:2175], cookie=10887027935054157344, name="Lock2") 2025-04-10T11:05:41.572606Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:151:2175], cookie=10887027935054157344) 2025-04-10T11:05:41.790815Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.790903Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.791410Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.792012Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.830879Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.831097Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:41.831152Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-04-10T11:05:41.831195Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-04-10T11:05:41.831631Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:191:2205], cookie=14632492514269340797, name="Lock1") 2025-04-10T11:05:41.831733Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:191:2205], cookie=14632492514269340797) 2025-04-10T11:05:41.832318Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:199:2212], cookie=6520829089161405191, name="Lock2") 2025-04-10T11:05:41.832385Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:199:2212], cookie=6520829089161405191) 2025-04-10T11:05:42.327839Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:42.346320Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:42.699048Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:42.712535Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:43.067212Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:43.081479Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:43.448444Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:43.460487Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:43.827299Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:43.843091Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:44.198130Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:44.211552Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:44.565212Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:44.578677Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:44.942128Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:44.960335Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:45.340913Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:45.354598Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:45.786429Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:45.801757Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:46.165559Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:46.182751Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:46.547315Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:46.559869Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:46.921695Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:46.937642Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:47.317134Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:47.330364Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:47.796811Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:47.812771Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:48.194714Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:48.212942Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:48.578931Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:48.601913Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:49.010100Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:49.038879Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:49.444919Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:49.458951Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:49.853423Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:49.866007Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:50.224897Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:50.238091Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:50.609151Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:50.623199Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:50.984836Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:50.997280Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:51.352832Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:51.365493Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:51.743177Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:51.756475Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:52.126478Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:52.146531Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:52.502453Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:52.519049Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:52.880839Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:52.895206Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:53.248631Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:53.261403Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:53.628692Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:365:2367], cookie=17399544819472978858, name="Lock1") 2025-04-10T11:05:53.628892Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:365:2367], cookie=17399544819472978858) 2025-04-10T11:05:53.629989Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:368:2370], cookie=1243804978194300273, name="Lock2") 2025-04-10T11:05:53.630192Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:368:2370], cookie=1243804978194300273) 2025-04-10T11:05:53.674217Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:53.690178Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:54.078629Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:54.091313Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:54.457780Z node 1 :KESUS_TABLET DEBUG: [7205 ... 10T11:06:16.806967Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:17.238539Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:17.264797Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:17.701628Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:17.722923Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:18.154487Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:18.171105Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:18.586617Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:18.602349Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:19.053389Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:19.073426Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:19.490967Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:19.511956Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:19.920003Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:19.941101Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:20.354588Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:20.371133Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:20.790496Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:20.803134Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:21.226554Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:21.239080Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:21.692242Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:21.720230Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:22.146471Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:22.160147Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:22.566490Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:22.586884Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:22.990797Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:23.007253Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:23.437308Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:23.462006Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:23.858598Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:23.876774Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:24.279042Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:24.297651Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:24.695201Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:24.710846Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:25.118580Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:25.132736Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:25.553426Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-10T11:06:25.553573Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-10T11:06:25.553637Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-04-10T11:06:25.554081Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-10T11:06:25.554190Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-04-10T11:06:25.554239Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-10T11:06:25.575973Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-10T11:06:25.576977Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:329:2342], cookie=12880181697183870783, name="Lock1") 2025-04-10T11:06:25.577105Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:329:2342], cookie=12880181697183870783) 2025-04-10T11:06:25.577752Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:332:2345], cookie=5350022889212363048, name="Lock2") 2025-04-10T11:06:25.577851Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:332:2345], cookie=5350022889212363048) 2025-04-10T11:06:25.578447Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:335:2348], cookie=15266675177557179908) 2025-04-10T11:06:25.578530Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:335:2348], cookie=15266675177557179908) 2025-04-10T11:06:25.616798Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:06:25.616941Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:06:25.617672Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:06:25.618497Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:06:25.675121Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:06:25.675353Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-10T11:06:25.675428Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-10T11:06:25.675863Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:375:2378], cookie=15189176711103655955) 2025-04-10T11:06:25.675963Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:375:2378], cookie=15189176711103655955) 2025-04-10T11:06:25.676732Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:382:2384], cookie=17713983350969300256, name="Lock1") 2025-04-10T11:06:25.676825Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:382:2384], cookie=17713983350969300256) 2025-04-10T11:06:25.677592Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:385:2387], cookie=3349789678271548736, name="Lock2") 2025-04-10T11:06:25.677669Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:385:2387], cookie=3349789678271548736) 2025-04-10T11:06:26.222891Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:06:26.223027Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:06:26.246110Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:06:26.246318Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:06:26.273081Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:06:26.273870Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=8956462739446923005, session=0, seqNo=0) 2025-04-10T11:06:26.274088Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:06:26.287178Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=8956462739446923005, session=1) 2025-04-10T11:06:26.287595Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=17362637176115294621, session=0, seqNo=0) 2025-04-10T11:06:26.287775Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:06:26.300519Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=17362637176115294621, session=2) 2025-04-10T11:06:26.300966Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-10T11:06:26.315333Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-04-10T11:06:26.316150Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=13561024159790154280, name="Sem1", limit=1) 2025-04-10T11:06:26.316338Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-10T11:06:26.329634Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=13561024159790154280) 2025-04-10T11:06:26.330287Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=1, semaphore="Sem1" count=100500) 2025-04-10T11:06:26.344118Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-04-10T11:06:26.344640Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=222, session=1, semaphore="Sem1" count=1) 2025-04-10T11:06:26.344861Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-10T11:06:26.345143Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=2, semaphore="Sem1" count=1) 2025-04-10T11:06:26.365981Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=222) 2025-04-10T11:06:26.366109Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-04-10T11:06:26.366912Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:155:2179], cookie=2050873966339714258, name="Sem1") 2025-04-10T11:06:26.367045Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:155:2179], cookie=2050873966339714258) 2025-04-10T11:06:26.367623Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:158:2182], cookie=4964875292193530736, name="Sem1") 2025-04-10T11:06:26.367720Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:158:2182], cookie=4964875292193530736) 2025-04-10T11:06:26.368291Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:161:2185], cookie=15495360558965813220, name="Sem1", force=0) 2025-04-10T11:06:26.382238Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:161:2185], cookie=15495360558965813220) 2025-04-10T11:06:26.383076Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:166:2190], cookie=9985157467711437943, name="Sem1", force=1) 2025-04-10T11:06:26.383200Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-04-10T11:06:26.396214Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:166:2190], cookie=9985157467711437943) >> KqpNewEngine::ReadDifferentColumnsPk [GOOD] >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput >> KqpSort::ReverseEightShardOptimized [GOOD] >> KqpSort::PassLimit >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed >> KqpNewEngine::OnlineRO_Inconsistent [GOOD] >> KqpNewEngine::Nondeterministic >> KqpNotNullColumns::AlterAddNotNullColumn [GOOD] >> KqpNotNullColumns::AlterAddIndex >> KqpNewEngine::ItemsLimit [GOOD] >> KqpNewEngine::JoinDictWithPure >> KqpNewEngine::Delete-UseSink [GOOD] >> KqpNewEngine::DeleteOn+UseSink >> KqpNewEngine::LocksNoMutationsSharded [GOOD] >> KqpNewEngine::LocksInRoTx |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |78.7%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut >> KqpExtractPredicateLookup::PointJoin [GOOD] >> KqpExtractPredicateLookup::SqlInJoin >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks >> KqpNewEngine::LeftSemiJoin [GOOD] >> KqpNewEngine::JoinWithPrecompute >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] >> KqpNewEngine::DecimalColumn [GOOD] >> KqpNewEngine::DecimalColumn35 >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 11638880921582236448 2025-04-10T11:06:29.891153Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:29.891582Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:29.891699Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:29.891822Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:29.891890Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:29.891959Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:29.892038Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:29.893321Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:29.893463Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:29.893519Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:29.893574Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:29.893630Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:29.893695Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:29.893750Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:29.893854Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:29.893929Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:29.893964Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:29.894050Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:29.894101Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:29.920099Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:29.920256Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:06:29.936313Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:29.936452Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:29.936518Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:29.936654Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:29.936730Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:29.936784Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:06:29.936839Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MonteCarlo [GOOD] 2025-04-10 11:06:32,702 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:06:32,767 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1125912 46.2M 46.2M 23.1M test_tool run_ut @/home/runner/.ya/build/build_root/2gt4/000cef/ydb/core/mind/bscontroller/ut/test-results/unittest/testing_out_stuff/chunk14/testing_out_stuff/test_tool.args Test command err: Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/000cef/ydb/core/mind/bscontroller/ut/test-results/unittest/testing_out_stuff/chunk14/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/000cef/ydb/core/mind/bscontroller/ut/test-results/unittest/testing_out_stuff/chunk14/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) 2025-04-10 11:06:33,434 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-04-10 11:06:33,434 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 [GOOD] >> KqpMergeCn::SortBy_PK_Uint64_Desc |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dqrun/dqrun |78.8%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] >> KqpNewEngine::DqSourceLimit [GOOD] >> KqpNewEngine::DqSourceSequentialLimit |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |78.8%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] Test command err: Trying to start YDB, gRPC: 5368, MsgBus: 4154 2025-04-10T11:05:46.047584Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639067330422119:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:46.056013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010f5/r3tmp/tmp45oIuM/pdisk_1.dat 2025-04-10T11:05:46.481696Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:46.490724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:46.490861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:46.517418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5368, node 1 2025-04-10T11:05:46.570170Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:46.570199Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:46.570228Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:46.570378Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4154 TClient is connected to server localhost:4154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:47.213671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:47.262741Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:05:49.895250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639080215324585:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:49.895396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:50.206803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:05:50.331121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639084510291987:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:50.331223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:50.331554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639084510291992:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:50.339536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:05:50.354874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639084510291994:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:05:50.448752Z node 1 :TX_PROXY ERROR: Actor# [1:7491639084510292045:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:50.642134Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491639084510292088:2356], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing key column in input: Key for table: /Root/TestReplaceNotNullPk, code: 2029 2025-04-10T11:05:50.642410Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjVlNTM4MGYtMmRmNTk4NTYtYjAzYmQ0N2EtOWQ3MWRlYTE=, ActorId: [1:7491639080215324568:2329], ActorState: ExecuteState, TraceId: 01jrfp1a8fbqfchbzzg5d7tp6g, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-04-10T11:05:50.668502Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491639084510292098:2360], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:49: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:49: Error: Failed to convert 'Key': Null to Uint64
:1:49: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T11:05:50.668735Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjVlNTM4MGYtMmRmNTk4NTYtYjAzYmQ0N2EtOWQ3MWRlYTE=, ActorId: [1:7491639080215324568:2329], ActorState: ExecuteState, TraceId: 01jrfp1a9vene5crvp9zs7vms0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 9546, MsgBus: 5836 2025-04-10T11:05:51.408024Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639090406224297:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:51.410258Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010f5/r3tmp/tmp7S8yqX/pdisk_1.dat 2025-04-10T11:05:51.521867Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:51.560758Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:51.560872Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 9546, node 2 2025-04-10T11:05:51.562028Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:51.616937Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:51.616968Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:51.616975Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:51.617107Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5836 TClient is connected to server localhost:5836 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:52.199082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:54.626404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639103291126773:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:54.626490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:54.642691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:05:54.728744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639103291126875:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:54.728839Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:54.729209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639103291126880:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:54.733621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateRes ... e 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:18.983835Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31503, node 6 2025-04-10T11:06:19.254944Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:19.254970Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:19.254979Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:19.255148Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21324 TClient is connected to server localhost:21324 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:20.627159Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:23.217427Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639207835804704:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:23.217528Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:25.999728Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639237900576389:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:25.999857Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:26.021384Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:06:26.151426Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639242195543835:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:26.151565Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:26.151787Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639242195543840:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:26.157331Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:06:26.171967Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639242195543842:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:06:26.239932Z node 6 :TX_PROXY ERROR: Actor# [6:7491639242195543895:2445] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:27.959783Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7491639246490511365:2391], TxId: 281474976710663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfp2dws7f62yy8pqnbxk3r8. SessionId : ydb://session/3?node_id=6&id=NTdiY2ZkYmItM2ZlZjMyNmYtZjNjMzdmZmUtYTEzYjAyNjY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 }. 2025-04-10T11:06:27.960712Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7491639246490511366:2392], TxId: 281474976710663, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=6&id=NTdiY2ZkYmItM2ZlZjMyNmYtZjNjMzdmZmUtYTEzYjAyNjY=. TraceId : 01jrfp2dws7f62yy8pqnbxk3r8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [6:7491639246490511361:2335], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-04-10T11:06:27.961102Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7491639246490511367:2393], TxId: 281474976710663, task: 3. Ctx: { TraceId : 01jrfp2dws7f62yy8pqnbxk3r8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=6&id=NTdiY2ZkYmItM2ZlZjMyNmYtZjNjMzdmZmUtYTEzYjAyNjY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [6:7491639246490511361:2335], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-04-10T11:06:27.961283Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7491639246490511368:2394], TxId: 281474976710663, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=6&id=NTdiY2ZkYmItM2ZlZjMyNmYtZjNjMzdmZmUtYTEzYjAyNjY=. TraceId : 01jrfp2dws7f62yy8pqnbxk3r8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [6:7491639246490511361:2335], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-04-10T11:06:27.961854Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NTdiY2ZkYmItM2ZlZjMyNmYtZjNjMzdmZmUtYTEzYjAyNjY=, ActorId: [6:7491639237900576370:2335], ActorState: ExecuteState, TraceId: 01jrfp2dws7f62yy8pqnbxk3r8, Create QueryResponse for error on request, msg: 2025-04-10T11:06:28.034750Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:06:28.075607Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7491639250785478691:2398], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-04-10T11:06:28.079830Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NTdiY2ZkYmItM2ZlZjMyNmYtZjNjMzdmZmUtYTEzYjAyNjY=, ActorId: [6:7491639237900576370:2335], ActorState: ExecuteState, TraceId: 01jrfp2et9d6aeecfmntcfe2qg, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-10T11:06:28.130719Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7491639250785478711:2406], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-04-10T11:06:28.133301Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NTdiY2ZkYmItM2ZlZjMyNmYtZjNjMzdmZmUtYTEzYjAyNjY=, ActorId: [6:7491639237900576370:2335], ActorState: ExecuteState, TraceId: 01jrfp2ew2abpkv2nfbdsa30g5, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-10T11:06:28.189392Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7491639250785478730:2414], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-04-10T11:06:28.192034Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NTdiY2ZkYmItM2ZlZjMyNmYtZjNjMzdmZmUtYTEzYjAyNjY=, ActorId: [6:7491639237900576370:2335], ActorState: ExecuteState, TraceId: 01jrfp2exmdr5n35e88nteyrjg, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-10T11:06:28.807700Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:06:28.823623Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7491639250785478751:2422], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-04-10T11:06:28.826591Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NTdiY2ZkYmItM2ZlZjMyNmYtZjNjMzdmZmUtYTEzYjAyNjY=, ActorId: [6:7491639237900576370:2335], ActorState: ExecuteState, TraceId: 01jrfp2ezecw6n6hnc8ea9vh7g, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-10T11:06:29.721726Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:06:29.730972Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7491639250785478786:2433], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-04-10T11:06:29.733739Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NTdiY2ZkYmItM2ZlZjMyNmYtZjNjMzdmZmUtYTEzYjAyNjY=, ActorId: [6:7491639237900576370:2335], ActorState: ExecuteState, TraceId: 01jrfp2fkba2243pqm51z44v1a, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-10T11:06:30.711501Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:06:30.730257Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7491639255080446115:2445], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-04-10T11:06:30.734600Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NTdiY2ZkYmItM2ZlZjMyNmYtZjNjMzdmZmUtYTEzYjAyNjY=, ActorId: [6:7491639237900576370:2335], ActorState: ExecuteState, TraceId: 01jrfp2gfyb6g8hwr8jrzsbvsq, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:06:19.821311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:06:19.821429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:06:19.821483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:06:19.821524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:06:19.821569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:06:19.821615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:06:19.821689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:06:19.821774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:06:19.822166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:06:19.911429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:06:19.911496Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:19.930606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:06:19.931025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:06:19.931218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:06:19.943710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:06:19.944066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:06:19.944835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:06:19.945247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:06:19.947562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:06:19.948975Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:06:19.949036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:06:19.949262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:06:19.949363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:06:19.949425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:06:19.949600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:06:19.957120Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:06:20.175942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:06:20.176303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:06:20.176604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:06:20.178589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:06:20.178678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:06:20.195271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:06:20.195438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:06:20.195660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:06:20.195737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:06:20.195797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:06:20.195837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:06:20.205931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:06:20.206016Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:06:20.206059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:06:20.215113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:06:20.215182Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:06:20.215230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:06:20.215297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:06:20.225543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:06:20.232239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:06:20.232480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:06:20.233558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:06:20.233730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:06:20.233793Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:06:20.234080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:06:20.234137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:06:20.234363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:06:20.234451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:06:20.248624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:06:20.248693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:06:20.248907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:06:20.248956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:06:20.249216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:06:20.249271Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:06:20.249413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:06:20.249464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:06:20.249512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:06:20.249558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:06:20.249610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:06:20.249656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:06:20.249694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:06:20.249731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:06:20.249809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:06:20.249850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:06:20.249884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:06:20.256164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:06:20.256325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:06:20.256370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T11:06:34.250612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-10T11:06:34.250661Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2025-04-10T11:06:34.250725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:06:34.252427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T11:06:34.252554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T11:06:34.252583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-10T11:06:34.252622Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2025-04-10T11:06:34.252662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-10T11:06:34.252760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-10T11:06:34.260505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2025-04-10T11:06:34.260675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2025-04-10T11:06:34.271309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:06:34.271533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:06:34.271619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2025-04-10T11:06:34.271809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:06:34.271851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-10T11:06:34.271919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 134 2025-04-10T11:06:34.282544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-10T11:06:34.292678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-10T11:06:34.300009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-10T11:06:34.300133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:06:34.300398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2025-04-10T11:06:34.300683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:06:34.300807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 FAKE_COORDINATOR: Erasing txId 175 2025-04-10T11:06:34.311900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:06:34.312000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:06:34.312215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-10T11:06:34.312387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:06:34.312440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-04-10T11:06:34.312490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-04-10T11:06:34.312963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-10T11:06:34.313027Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-04-10T11:06:34.313073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2025-04-10T11:06:34.314411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T11:06:34.314534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T11:06:34.350319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-10T11:06:34.350432Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-04-10T11:06:34.350481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:06:34.352124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T11:06:34.352261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-10T11:06:34.352310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-10T11:06:34.352370Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-04-10T11:06:34.352409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-10T11:06:34.352510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-10T11:06:34.379774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-10T11:06:34.379869Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2025-04-10T11:06:34.379970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-10T11:06:34.380004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-10T11:06:34.380048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-10T11:06:34.380079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-10T11:06:34.380118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2025-04-10T11:06:34.380180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-10T11:06:34.380218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-04-10T11:06:34.380253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-04-10T11:06:34.380359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-04-10T11:06:34.381471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:06:34.381522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-10T11:06:34.381590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-04-10T11:06:34.390652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:06:34.390726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-10T11:06:34.390805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:06:34.391453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-10T11:06:34.391797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-10T11:06:34.400233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:06:34.400417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-04-10T11:06:34.402015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-04-10T11:06:34.402071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-04-10T11:06:34.403960Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-04-10T11:06:34.404156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-04-10T11:06:34.404202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2608:4599] TestWaitNotification: OK eventTxId 175 >> KqpRanges::UpdateWhereInWithNull [GOOD] >> KqpRanges::UpdateWhereInMultipleUpdate |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2025-04-10T11:05:41.288799Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.301242Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.412296Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.412457Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.488542Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.510191Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=13740778086379941689, session=0, seqNo=0) 2025-04-10T11:05:41.510467Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:41.532015Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=13740778086379941689, session=1) 2025-04-10T11:05:41.542011Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=111, session=1, semaphore="Lock1" count=1) 2025-04-10T11:05:41.542303Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-10T11:05:41.542479Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:41.557922Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=111) 2025-04-10T11:05:41.558455Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-10T11:05:41.571632Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-04-10T11:05:41.572227Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:148:2172], cookie=16791308717735936468, name="Lock1") 2025-04-10T11:05:41.572344Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:148:2172], cookie=16791308717735936468) 2025-04-10T11:05:42.066024Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:42.066493Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:42.085287Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:42.085467Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:42.126874Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:42.128093Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=3760606993451158686, session=0, seqNo=0) 2025-04-10T11:05:42.128314Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:42.144827Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=3760606993451158686, session=1) 2025-04-10T11:05:42.145201Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=12488698998472174983, session=0, seqNo=0) 2025-04-10T11:05:42.145407Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:05:42.161924Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=12488698998472174983, session=2) 2025-04-10T11:05:42.163362Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-10T11:05:42.163580Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-10T11:05:42.163694Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-10T11:05:42.176133Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=111) 2025-04-10T11:05:42.176517Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=112, session=1, semaphore="Lock2" count=1) 2025-04-10T11:05:42.176725Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-10T11:05:42.176816Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-10T11:05:42.192711Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=112) 2025-04-10T11:05:42.193230Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-10T11:05:42.193525Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-10T11:05:42.208803Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=222) 2025-04-10T11:05:42.208905Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=223) 2025-04-10T11:05:42.209423Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=333, session=2, semaphore="Lock1" count=1) 2025-04-10T11:05:42.209810Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-10T11:05:42.222764Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=333) 2025-04-10T11:05:42.222868Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=334) 2025-04-10T11:05:42.637759Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:42.658235Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:43.038921Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:43.051475Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:43.414735Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:43.432978Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:43.780805Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:43.793829Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:44.171946Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:44.186240Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:44.575170Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:44.589836Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:44.951967Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:44.964501Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:45.346578Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:45.359763Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:45.712392Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:45.728740Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:46.120932Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:46.139151Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:46.512134Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:46.530252Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:46.908855Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:46.921385Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:47.272798Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:47.287430Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:47.659194Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:47.680131Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:48.137707Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:48.158372Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:48.553110Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:48.567422Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:48.956745Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:48.975353Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:49.351871Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:49.380154Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:49.756825Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:49.770884Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:50.157748Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:50.171356Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:50.550254Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:50.564545Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:50.922525Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:50.939104Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:51.312444Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:51.324764Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:51.701641Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:51.717495Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:52.086564Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:52.099336Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:52.469278Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:52.483129Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:52.843355Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:52.859086Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:53.222505Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:53.235274Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:53.587354Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:53.605243Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:53.969076Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=2) 2025-04-10T11:05:53.969193Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 2 "Lock2" waiter link 2025-04-10T11:05:53.981883Z node 2 :KESUS_TABLET DEBUG: ... 2025-04-10T11:06:22.478976Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:22.922593Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:22.947119Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:23.346526Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:23.363119Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:23.749907Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:23.767238Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:24.174592Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:24.194933Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:24.589167Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:24.606036Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:25.038758Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:25.063050Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:25.483787Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:25.497675Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:25.904726Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:25.927667Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:26.345831Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:26.371007Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:26.775982Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:26.805000Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:27.266751Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:27.281017Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:27.669112Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:27.687240Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:28.075111Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:28.097090Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:28.490540Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:28.511080Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:28.902071Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:28.923352Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:29.376737Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:29.400132Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:29.834739Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:29.848616Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:30.266515Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:30.280329Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:30.689599Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:30.708951Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:31.114563Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:31.137410Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:31.587470Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:31.600921Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:32.026680Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:32.043461Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:32.461323Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:32.478092Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:32.886753Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:32.913041Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:33.331157Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:33.346956Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:33.854673Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2025-04-10T11:06:33.854819Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-04-10T11:06:33.878494Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2025-04-10T11:06:33.905797Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:534:2530], cookie=17874396946046330675) 2025-04-10T11:06:33.905965Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:534:2530], cookie=17874396946046330675) 2025-04-10T11:06:33.906724Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:537:2533], cookie=7142740976508786138) 2025-04-10T11:06:33.906824Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:537:2533], cookie=7142740976508786138) 2025-04-10T11:06:33.907430Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:540:2536], cookie=2303201298161466554, name="Lock1") 2025-04-10T11:06:33.907561Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:540:2536], cookie=2303201298161466554) 2025-04-10T11:06:33.908224Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:543:2539], cookie=9865398569372278291, name="Lock1") 2025-04-10T11:06:33.908310Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:543:2539], cookie=9865398569372278291) 2025-04-10T11:06:34.696855Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:06:34.696995Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:06:34.723612Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:06:34.723792Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:06:34.751163Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:06:34.751819Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=14673420215524250872, session=0, seqNo=0) 2025-04-10T11:06:34.751997Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:06:34.764830Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=14673420215524250872, session=1) 2025-04-10T11:06:34.765388Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=3251067769904061248, session=0, seqNo=0) 2025-04-10T11:06:34.765560Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-10T11:06:34.786222Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=3251067769904061248, session=2) 2025-04-10T11:06:34.786672Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=5095407468446867159, session=0, seqNo=0) 2025-04-10T11:06:34.786827Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2025-04-10T11:06:34.803073Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=5095407468446867159, session=3) 2025-04-10T11:06:34.803929Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=17806223303663609465, name="Sem1", limit=3) 2025-04-10T11:06:34.804121Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-10T11:06:34.818143Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=17806223303663609465) 2025-04-10T11:06:34.818664Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Sem1" count=2) 2025-04-10T11:06:34.818857Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-10T11:06:34.819105Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=222, session=2, semaphore="Sem1" count=2) 2025-04-10T11:06:34.819347Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=3, semaphore="Sem1" count=1) 2025-04-10T11:06:34.833819Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-04-10T11:06:34.833933Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=222) 2025-04-10T11:06:34.833969Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-04-10T11:06:34.834797Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:154:2178], cookie=6540726290816580805, name="Sem1") 2025-04-10T11:06:34.834926Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:154:2178], cookie=6540726290816580805) 2025-04-10T11:06:34.835515Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:157:2181], cookie=1990902921778110633, name="Sem1") 2025-04-10T11:06:34.835617Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:157:2181], cookie=1990902921778110633) 2025-04-10T11:06:34.836596Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=444, name="Sem1") 2025-04-10T11:06:34.836732Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-04-10T11:06:34.836810Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-10T11:06:34.836881Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-04-10T11:06:34.863160Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=444) 2025-04-10T11:06:34.864084Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:162:2186], cookie=14235300118934492863, name="Sem1") 2025-04-10T11:06:34.864207Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:162:2186], cookie=14235300118934492863) 2025-04-10T11:06:34.864779Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:165:2189], cookie=1806882906735710783, name="Sem1") 2025-04-10T11:06:34.864869Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:165:2189], cookie=1806882906735710783) >> KqpRanges::CastKeyBounds [GOOD] >> KqpRanges::DuplicateCompositeKeyPredicate [GOOD] >> KqpRanges::DeleteNotFullScan-UseSink >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionStealingSameKey |78.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSort::TopSortExprPk [GOOD] >> KqpSort::TopSortTableExpr >> KqpNotNullColumns::InsertNotNullPg+useSink [GOOD] >> KqpNotNullColumns::InsertNotNullPg-useSink |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut >> KqpNewEngine::AggregateTuple [GOOD] >> KqpNewEngine::AsyncIndexUpdate >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey |78.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |78.9%| [LD] {RESULT} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |78.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |78.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans >> TKesusTest::TestSessionStealingDifferentKey [GOOD] >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::CastKeyBounds [GOOD] Test command err: Trying to start YDB, gRPC: 30594, MsgBus: 28229 2025-04-10T11:05:48.587905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639077385741694:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:48.587957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eac/r3tmp/tmpT6wR63/pdisk_1.dat 2025-04-10T11:05:49.400351Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:49.401456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:49.401546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:49.406334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30594, node 1 2025-04-10T11:05:49.650925Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:49.650990Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:49.651008Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:49.651177Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28229 TClient is connected to server localhost:28229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:50.225077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:52.228697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639094565611471:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.228849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.547054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.667986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639094565611576:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.668107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.668186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639094565611581:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.671908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:05:52.681210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639094565611583:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:05:52.750193Z node 1 :TX_PROXY ERROR: Actor# [1:7491639094565611634:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:52.889130Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491639094565611677:2356], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestUpsertNotNullPk, code: 2029 2025-04-10T11:05:52.889564Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODMxNWY2Y2EtZWM4Y2E2ZWUtYzY2MzZjMjItODI2YjhlYWQ=, ActorId: [1:7491639094565611468:2329], ActorState: ExecuteState, TraceId: 01jrfp1cf54tzh2gdz7v1154qp, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-04-10T11:05:52.913093Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491639094565611686:2360], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T11:05:52.913319Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODMxNWY2Y2EtZWM4Y2E2ZWUtYzY2MzZjMjItODI2YjhlYWQ=, ActorId: [1:7491639094565611468:2329], ActorState: ExecuteState, TraceId: 01jrfp1cg1416ksgsvc5x1kn3d, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 10758, MsgBus: 18185 2025-04-10T11:05:53.668567Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639098435161947:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:53.668684Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eac/r3tmp/tmpsVWCrM/pdisk_1.dat 2025-04-10T11:05:53.809758Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:53.828963Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:53.829051Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:53.830519Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10758, node 2 2025-04-10T11:05:53.878268Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:53.878316Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:53.878326Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:53.878491Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18185 TClient is connected to server localhost:18185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:54.321703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:57.221406Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639115615031792:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:57.221497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:57.231723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:05:57.279034Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639115615031892:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:57.279147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:57.279253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639115615031897:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:57.283559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:05:57.295392Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] ... e: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:20.435454Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639214965347670:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:20.435593Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:20.435957Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639214965347675:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:20.442218Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:20.459257Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639214965347677:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:20.554057Z node 5 :TX_PROXY ERROR: Actor# [5:7491639214965347733:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Join2"],"PlanNodeId":1,"Operators":[{"Inputs":[],"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2","SinkType":"KqpTableSink"}],"Node Type":"Stage-Sink","Stats":{"ComputeNodes":[{"Tasks":[{"EgressRows":3,"NodeId":5,"FinishTimeMs":1744283183375,"EgressBytes":39,"TaskId":1,"Host":"ghrun-x5vqckr7qy","ComputeTimeUs":444}],"PeakMemoryUsageBytes":196608,"CpuTimeUs":3540}],"UseLlvm":"undefined","MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[1,1048576]},"Tasks":1,"FinishedTasks":1,"Egress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":39,"Max":39,"Min":39,"History":[1,39]}},"Name":"KqpTableSink","Egress":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Splits":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":39,"Max":39,"Min":39}},"Push":{"Chunks":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":39,"Max":39,"Min":39,"History":[1,39]}}}],"PhysicalStageId":0,"StageDurationUs":0,"EgressRows":{"Count":1,"Sum":3,"Max":3,"Min":3},"BaseTimeMs":1744283183374,"EgressBytes":{"Count":1,"Sum":39,"Max":39,"Min":39},"CpuTimeUs":{"Count":1,"Sum":2878,"Max":2878,"Min":2878,"History":[1,2878]}}}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":453597,"CpuTimeUs":449306},"ProcessCpuTimeUs":576,"TotalDurationUs":494765,"ResourcePoolId":"default","QueuedTimeUs":852},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-SelfCpu":2.878,"A-Cpu":2.878,"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2","SinkType":"KqpTableSink"}],"Node Type":"Delete"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 23412, MsgBus: 15035 2025-04-10T11:06:24.704260Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639232449512759:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:24.704451Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eac/r3tmp/tmpTyCosi/pdisk_1.dat 2025-04-10T11:06:25.064822Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:25.072967Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:25.073106Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:25.076599Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23412, node 6 2025-04-10T11:06:25.194997Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:25.195027Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:25.195040Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:25.195208Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15035 TClient is connected to server localhost:15035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:26.025255Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:26.032708Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:06:26.050792Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:26.154032Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:26.426898Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:26.546281Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:29.706360Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639232449512759:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:29.706468Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:30.329988Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639258219318319:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:30.330169Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:30.470112Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:30.568210Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:30.685024Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:30.767335Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:30.849535Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:30.943194Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:31.082771Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639262514286138:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:31.082945Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:31.088497Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639262514286143:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:31.095135Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:31.119065Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639262514286145:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:06:31.210759Z node 6 :TX_PROXY ERROR: Actor# [6:7491639262514286205:3471] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSqlIn::TupleLiteral [GOOD] >> KqpSqlIn::TupleSelect |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |79.1%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |79.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |79.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k >> KqpNewEngine::Nondeterministic [GOOD] >> KqpNewEngine::MultiUsagePrecompute >> KqpSqlIn::KeyTypeMissmatch_Int [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2025-04-10T11:05:41.287897Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:05:41.300435Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:05:41.435860Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:05:41.436043Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:05:41.496108Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:05:41.510671Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=3089726684376881440, session=0, seqNo=0) 2025-04-10T11:05:41.510873Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:05:41.532715Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=3089726684376881440, session=1) 2025-04-10T11:05:41.542257Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:132:2158], cookie=15428337706184605093 2025-04-10T11:05:41.547739Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:145:2169], cookie=11874163232913359194) 2025-04-10T11:05:41.547821Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:145:2169], cookie=11874163232913359194) 2025-04-10T11:05:42.017513Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:42.039120Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:42.413974Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:42.426912Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:42.777679Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:42.792472Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:43.146598Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:43.159828Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:43.524832Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:43.538851Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:43.909049Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:43.926915Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:44.276938Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:44.292554Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:44.672005Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:44.691002Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:45.036250Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:45.052658Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:45.510621Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:45.528608Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:45.925609Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:45.943802Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:46.320567Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:46.335528Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:46.713454Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:46.727325Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:47.087890Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:47.102308Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:47.555362Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:47.572737Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:47.940950Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:47.954300Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:48.333041Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:48.351805Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:48.813210Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:48.829961Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:49.211865Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:49.239101Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:49.655061Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:49.668545Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:50.067360Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:50.081199Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:50.432753Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:50.445686Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:50.789971Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:50.804951Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:51.181194Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:51.197388Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:51.549271Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:51.567136Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:51.946253Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:51.959941Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:52.315495Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:52.329506Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:52.684992Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:52.703406Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:53.060633Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:53.073450Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:53.463355Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:53.476133Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:53.850557Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:53.868763Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:54.243311Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:54.256095Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:54.597883Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:54.612720Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:54.963205Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:54.975927Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:55.356534Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:55.371118Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:55.756316Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:55.773009Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:56.132654Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:56.148420Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:56.535599Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:56.551342Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:56.918258Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:56.933014Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:57.330926Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:57.343894Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:57.695862Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:57.712585Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:58.087695Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:58.100439Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:58.462549Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:58.477714Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:58.837032Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:58.851234Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:59.226844Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:59.239609Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:59.598857Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:59.612330Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:05:59.978601Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:05:59.995274Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:00.373098Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:00.391981Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:00.755168Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:00.770239Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:01.168174Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:01.185043Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:01.539345Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:01.557458Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:01.929584Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:01.942880Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:02.319604Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:02.334292Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:02.732541Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:02.748541Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TT ... k::Execute 2025-04-10T11:06:16.388875Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:16.814530Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:16.834864Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:17.251724Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:17.273488Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:17.685340Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:17.705693Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:18.114456Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:18.132473Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:18.529162Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:18.548100Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:19.034549Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:19.055047Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:19.451231Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:19.471203Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:19.877944Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:19.895012Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:20.274794Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:20.288277Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:20.702182Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:20.723045Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:21.166504Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:21.183078Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:21.598577Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:21.614874Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:22.030478Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:22.050878Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:22.470955Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:22.487758Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:22.896616Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:22.915512Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:23.375352Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:23.394380Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:23.813849Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:23.834362Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:24.244660Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:24.263844Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:24.694549Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:24.714412Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:25.160308Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:25.184029Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:25.659254Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:25.675745Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:26.078557Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:26.098920Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:26.490554Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:26.510852Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:26.929319Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:26.951237Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:27.358581Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:27.378987Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:27.810573Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:27.830825Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:28.234661Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:28.256366Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:28.624149Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:28.647125Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:29.069776Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:29.099111Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:29.533047Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:29.553346Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:29.978683Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:29.999106Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:30.386469Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:30.402063Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:30.822708Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:30.835650Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:31.230002Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:31.251198Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:31.680535Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:31.707363Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:32.282617Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:32.306955Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:32.710439Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:32.725409Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:33.145588Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:33.159058Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:33.594105Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:33.613824Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:34.019510Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:34.044645Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:34.490726Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:34.511207Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:34.886767Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:34.904405Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:35.334799Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:35.355155Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:35.771142Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:35.787136Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:36.206628Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-10T11:06:36.227181Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-10T11:06:36.600392Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-10T11:06:36.600470Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-10T11:06:36.618842Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-10T11:06:36.631546Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[2:571:2565], cookie=15803591838142554615) 2025-04-10T11:06:36.631728Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[2:571:2565], cookie=15803591838142554615) 2025-04-10T11:06:37.529319Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:06:37.529483Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:06:37.558028Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:06:37.558267Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:06:37.590621Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:06:37.591802Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=12345, session=0, seqNo=0) 2025-04-10T11:06:37.592011Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:06:37.606927Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=12345, session=1) 2025-04-10T11:06:37.607936Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:139:2163], cookie=23456, session=1, seqNo=0) 2025-04-10T11:06:37.623200Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:139:2163], cookie=23456, session=1) 2025-04-10T11:06:38.199648Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-10T11:06:38.199783Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-10T11:06:38.217882Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-10T11:06:38.218073Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-10T11:06:38.247768Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-10T11:06:38.248948Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=12345, session=0, seqNo=0) 2025-04-10T11:06:38.249152Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-10T11:06:38.265589Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=12345, session=1) 2025-04-10T11:06:38.266734Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=23456, session=1, seqNo=0) 2025-04-10T11:06:38.281516Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=23456, session=1) >> KqpSqlIn::KeyTypeMissmatch_Str >> KqpAgg::AggWithSelfLookup2 [GOOD] >> KqpAgg::AggWithHop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 786209847927457540 |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |79.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 >> KqpNewEngine::DeleteOn+UseSink [GOOD] >> KqpNewEngine::DeleteOn-UseSink >> KqpRanges::UpdateWhereInFullScan-UseSink [GOOD] >> KqpRanges::ScanKeyPrefix |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> KqpNewEngine::JoinDictWithPure [GOOD] >> KqpNewEngine::IdxLookupExtractMembers >> TQueryResultSizeTrackerTest::CheckOnlyQueryResult [GOOD] |79.2%| [TA] $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |79.2%| [TA] {RESULT} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.2%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckOnlyQueryResult [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2025-04-10T11:05:21.903358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:05:21.903649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:05:21.903880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e8a/r3tmp/tmpsOlyPH/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22344, node 1 TClient is connected to server localhost:15644 2025-04-10T11:05:22.616205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:05:22.685745Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:22.693425Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:22.693527Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:22.693584Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:22.694109Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:05:22.744972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:22.745165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:22.758339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:22.890550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-10T11:05:23.015609Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:688:2580], Recipient [1:744:2626]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:05:23.017410Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:688:2580], Recipient [1:744:2626]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:05:23.017754Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:744:2626];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:05:23.044836Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:744:2626];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:05:23.045288Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-10T11:05:23.055665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:05:23.055986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:05:23.056339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:05:23.056476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:05:23.056593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:05:23.056773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:05:23.056920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:05:23.057074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:05:23.057231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:05:23.057406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:05:23.057560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:05:23.057699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:05:23.081935Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:688:2580], Recipient [1:744:2626]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:05:23.084042Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-04-10T11:05:23.084379Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:05:23.084451Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:05:23.084664Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:05:23.084843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:05:23.084950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:05:23.085006Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:05:23.085113Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:05:23.085188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:05:23.085241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:05:23.085281Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:05:23.085536Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:05:23.085622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:05:23.085682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:05:23.085733Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:05:23.085851Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:05:23.085950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:05:23.086015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:05:23.086071Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:05:23.090351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:05:23.090468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:05:23.090533Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:05:23.090609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:05:23.090687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:05:23.090740Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:05:23.091352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=75; 2025-04-10T11:05:23.091488Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=60; 2025-04-10T11:05:23.091610Z node 1 :TX_COLUMNSHARD INFO: tablet_id ... 45528;count=1;;14:size=2598864;count=4;;15:size=1210528;count=1;;16:size=994992;count=1;;17:size=1445928;count=1;;18:size=1445448;count=1;;19:size=1445400;count=1;;20:size=2164288;count=3;;21:size=1420056;count=1;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-04-10T11:05:56.974500Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435085, Sender [1:1282:3083], Recipient [1:744:2626]: NKikimr::NColumnShard::TEvPrivate::TEvGarbageCollectionFinished 2025-04-10T11:05:56.975154Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [1:1283:3084], Recipient [1:744:2626]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2025-04-10T11:05:56.975202Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-04-10T11:05:56.975469Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[29] (CS::GENERAL) apply at tablet 72075186224037888 2025-04-10T11:05:56.979680Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:19 Blob count: 1 2025-04-10T11:05:56.979841Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2911848;raw_bytes=96858239;count=2;records=82491} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=16012912;raw_bytes=539964581;count=8;records=453277} inactive {blob_bytes=24530552;raw_bytes=822698724;count=16;records=696683} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:75/0:size=4378;count=18;;1:size=57067;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445608;count=1;;7:size=1445920;count=1;;8:size=1445360;count=1;;9:size=2966864;count=4;;10:size=1445744;count=1;;11:size=1445408;count=1;;12:size=1445360;count=1;;13:size=1445528;count=1;;14:size=2598864;count=4;;15:size=1210528;count=1;;16:size=994992;count=1;;17:size=1445928;count=1;;18:size=1445448;count=1;;19:size=1445400;count=1;;20:size=2164288;count=3;;21:size=1420056;count=1;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:75/0:size=4447;count=19;;1:size=57067;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445608;count=1;;7:size=1445920;count=1;;8:size=1445360;count=1;;9:size=2966864;count=4;;10:size=1445744;count=1;;11:size=1445408;count=1;;12:size=1445360;count=1;;13:size=1445528;count=1;;14:size=2598864;count=4;;15:size=1210528;count=1;;16:size=994992;count=1;;17:size=1445928;count=1;;18:size=1445448;count=1;;19:size=1445400;count=1;;20:size=2164288;count=3;;21:size=1420056;count=1;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-04-10T11:05:56.994959Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-04-10T11:05:56.995074Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;fline=with_appended.cpp:65;portions=27,;task_id=c6223804-15fb11f0-bdac9333-33ad054e; 2025-04-10T11:05:56.995613Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:27;path_id:3;records_count:100082;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:3527144;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-04-10T11:05:56.995964Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/491609.000000s;; 2025-04-10T11:05:56.996152Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::c6223804-15fb11f0-bdac9333-33ad054e; 2025-04-10T11:05:56.996337Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:3;path_id:3;size:18931160;portions_count:27;); 2025-04-10T11:05:56.996413Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:05:56.996508Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:05:56.996630Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-04-10T11:05:56.996729Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;tablet_id=72075186224037888;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2025-04-10T11:05:56.996790Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:05:56.996862Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:05:56.996913Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:05:56.997023Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;tablet_id=72075186224037888;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.599000s; 2025-04-10T11:05:56.997087Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:05:56.997395Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:19 Blob count: 1 VERIFY failed (2025-04-10T11:05:56.997627Z): tablet_id=72075186224037888;task_id=c6223804-15fb11f0-bdac9333-33ad054e;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+873 (0x18D9C6A9) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x18D8A93B) NActors::TVerifyFormattedRecordWriter::~TVerifyFormattedRecordWriter()+326 (0x1A0A28D6) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+4577 (0x48A2F961) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+4797 (0x3079BF8D) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+899 (0x1EAB4133) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3856 (0x1E997B10) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+3444 (0x1E7DDF04) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+2821 (0x1E77AC85) NActors::IActor::Receive(TAutoPtr&)+237 (0x19FD40AD) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+3557 (0x35AD47C5) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+12602 (0x35ACD03A) NActors::TTestActorRuntimeBase::DispatchEvents(NActors::TDispatchOptions const&)+49 (0x35AC9DE1) NKikimr::Tests::NCS::THelperSchemaless::SendDataViaActorSystem(TBasicString>, std::__y1::shared_ptr, Ydb::StatusIds_StatusCode const&) const+7904 (0x36452850) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+4568 (0x1897A088) std::__y1::__function::__func, void ()>::operator()()+280 (0x1898C378) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x192493C6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19218EF9) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+1204 (0x1898B324) NUnitTest::TTestFactory::Execute()+2438 (0x1921A7C6) NUnitTest::RunMain(int, char**)+5213 (0x1924393D) ??+0 (0x7F9CAAE80D90) __libc_start_main+128 (0x7F9CAAE80E40) _start+41 (0x162EC029) >> TQueryResultSizeTrackerTest::CheckAll [GOOD] |79.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |79.3%| [LD] {RESULT} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut >> KqpSort::PassLimit [GOOD] >> KqpSort::OffsetPk |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |79.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |79.3%| [LD] {RESULT} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 2025-04-10 11:06:27,198 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:06:27,633 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1124653 46.6M 45.6M 23.6M test_tool run_ut @/home/runner/.ya/build/build_root/2gt4/00116c/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk31/testing_out_stuff/test_tool.args 1124908 1.7G 1.7G 1.7G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/2gt4/00116c/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/ch Test command err: 2025-04-10T10:56:29.313414Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T10:56:29.643621Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T10:56:29.680804Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T10:56:29.681119Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T10:56:29.696202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T10:56:29.696466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T10:56:29.696752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T10:56:29.696900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T10:56:29.697066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T10:56:29.697226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T10:56:29.697345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T10:56:29.697481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T10:56:29.697592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T10:56:29.697738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T10:56:29.697882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T10:56:29.698002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T10:56:29.762698Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T10:56:29.763004Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T10:56:29.763067Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T10:56:29.763276Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T10:56:29.763521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T10:56:29.763624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T10:56:29.763671Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T10:56:29.763767Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T10:56:29.763840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T10:56:29.763881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T10:56:29.763920Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T10:56:29.764113Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T10:56:29.764200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T10:56:29.764248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T10:56:29.764278Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T10:56:29.764380Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T10:56:29.764463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T10:56:29.764530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T10:56:29.764565Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T10:56:29.764664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T10:56:29.764705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T10:56:29.764742Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T10:56:29.764793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T10:56:29.764838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T10:56:29.764878Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T10:56:29.765402Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=89; 2025-04-10T10:56:29.765545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-04-10T10:56:29.765634Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-04-10T10:56:29.765719Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-10T10:56:29.765913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T10:56:29.765994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T10:56:29.766036Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T10:56:29.766274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T10:56:29.766328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T10:56:29.766361Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T10:56:29.766554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T10:56:29.766597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T10:56:29.766631Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T10:56:29.766831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T10:56:29.766876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T10:56:29.766912Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T10:56:29.767051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T10:56:29.767105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T10:56:29.767155Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... s=0-2855;object_exists=1; 2025-04-10T11:06:27.334095Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:117:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-10T11:06:27.334217Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:88:3024:0];range=bytes=0-3023;object_exists=1; 2025-04-10T11:06:27.334301Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:115:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-10T11:06:27.334401Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:101:2912:0];range=bytes=0-2911;object_exists=1; 2025-04-10T11:06:27.334505Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:83:2592:0];range=bytes=0-2591;object_exists=1; 2025-04-10T11:06:27.334587Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:142:2864:0];range=bytes=0-2863;object_exists=1; 2025-04-10T11:06:27.334687Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:99:2928:0];range=bytes=0-2927;object_exists=1; 2025-04-10T11:06:27.334791Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:89:2968:0];range=bytes=0-2967;object_exists=1; 2025-04-10T11:06:27.334879Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:104:2896:0];range=bytes=0-2895;object_exists=1; 2025-04-10T11:06:27.334975Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:94:3008:0];range=bytes=0-3007;object_exists=1; 2025-04-10T11:06:27.335058Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:103:2904:0];range=bytes=0-2903;object_exists=1; 2025-04-10T11:06:27.335151Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:84:2712:0];range=bytes=0-2711;object_exists=1; 2025-04-10T11:06:27.335240Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:161:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-10T11:06:27.335339Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:150:2800:0];range=bytes=0-2799;object_exists=1; 2025-04-10T11:06:27.335440Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:154:2792:0];range=bytes=0-2791;object_exists=1; 2025-04-10T11:06:27.335551Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:159:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-10T11:06:27.335659Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:151:2792:0];range=bytes=0-2791;object_exists=1; 2025-04-10T11:06:27.335767Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:98:2976:0];range=bytes=0-2975;object_exists=1; 2025-04-10T11:06:27.335872Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:118:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-10T11:06:27.335975Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:85:2760:0];range=bytes=0-2759;object_exists=1; 2025-04-10T11:06:27.336085Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:129:2872:0];range=bytes=0-2871;object_exists=1; 2025-04-10T11:06:27.336203Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:90:3016:0];range=bytes=0-3015;object_exists=1; 2025-04-10T11:06:27.336310Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:122:9640:0];range=bytes=0-9639;object_exists=1; 2025-04-10T11:06:27.336419Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:149:2800:0];range=bytes=0-2799;object_exists=1; 2025-04-10T11:06:27.336505Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:112:2816:0];range=bytes=0-2815;object_exists=1; 2025-04-10T11:06:27.336602Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:93:3008:0];range=bytes=0-3007;object_exists=1; 2025-04-10T11:06:27.336705Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:108:2880:0];range=bytes=0-2879;object_exists=1; 2025-04-10T11:06:27.336789Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:145:2856:0];range=bytes=0-2855;object_exists=1; 2025-04-10T11:06:27.336889Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:116:2776:0];range=bytes=0-2775;object_exists=1; 2025-04-10T11:06:27.336996Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:158:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-10T11:06:27.337094Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:107:2880:0];range=bytes=0-2879;object_exists=1; 2025-04-10T11:06:27.337206Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:120:2776:0];range=bytes=0-2775;object_exists=1; 2025-04-10T11:06:27.337293Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:91:3016:0];range=bytes=0-3015;object_exists=1; 2025-04-10T11:06:27.337391Z node 1 :S3_WRAPPER DEBUG: external_task_id=;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:4:255:143:2864:0];range=bytes=0-2863;object_exists=1; 2025-04-10T11:06:27.337557Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-10T11:06:27.341292Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:14934:16920];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T11:06:27.341530Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:14934:16920];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-10T11:06:27.341602Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:14934:16920];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-10T11:06:27.341662Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:14934:16920];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:06:27.344262Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:14934:16920];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T11:06:27.344441Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:14934:16920];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-10T11:06:27.344493Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:14934:16920];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-10T11:06:27.344543Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:14934:16920];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:06:27.435651Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-10T11:06:27.440640Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=OPTIONAL_ASSEMBLER::LAST;details={columns=(column_ids=6;column_names=message;);;};;scan_step_idx=8; 2025-04-10T11:06:27.441169Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=9; 2025-04-10T11:06:27.446135Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:14934:16920];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T11:06:27.446260Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:14934:16920];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-04-10T11:06:27.446333Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:14934:16920];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-04-10T11:06:27.446412Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:14934:16920];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=0;interval_id=1272;memory=10905841;count=1; 2025-04-10T11:06:27.456454Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:14934:16920];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=0; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/00116c/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk31/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/00116c/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk31/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckAll [GOOD] >> KqpNotNullColumns::AlterAddIndex [GOOD] >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |79.7%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |79.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] >> KqpNewEngine::DecimalColumn35 [GOOD] >> KqpNewEngine::ComplexLookupLimit >> KqpNewEngine::PkSelect1 |79.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput [GOOD] >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage >> KqpRanges::IsNullPartial >> KqpNewEngine::JoinWithPrecompute [GOOD] >> KqpNewEngine::LiteralKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::AlterAddIndex [GOOD] Test command err: Trying to start YDB, gRPC: 18751, MsgBus: 9118 2025-04-10T11:05:46.971847Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639068654135257:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:46.972009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010e1/r3tmp/tmp90NB0m/pdisk_1.dat 2025-04-10T11:05:47.574105Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:47.582255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:47.582377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:47.584534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18751, node 1 2025-04-10T11:05:47.698760Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:47.698798Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:47.698811Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:47.698948Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9118 TClient is connected to server localhost:9118 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:48.539197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:48.574040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:48.945953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.158132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:05:49.242570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:05:50.962042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639085834006199:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:50.962175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.316472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.350534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.379117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.406445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.436606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.489176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.561213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639090128974011:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.561312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.561573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639090128974016:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.566798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:51.580668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639090128974018:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:05:51.650826Z node 1 :TX_PROXY ERROR: Actor# [1:7491639090128974071:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:52.009900Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639068654135257:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:52.010013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4198, MsgBus: 11238 2025-04-10T11:05:53.706883Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639099109255630:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:53.706956Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010e1/r3tmp/tmpNNTmHu/pdisk_1.dat 2025-04-10T11:05:53.832951Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:53.862139Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:53.862270Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:53.864477Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4198, node 2 2025-04-10T11:05:53.924972Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:53.924991Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:53.925003Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:53.925100Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11238 TClient is connected to server localhost:11238 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:54.410189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:54.417088Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:05:54.428692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:54.503608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:54.662311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:05:54.736864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchem ... FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:25.424430Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:25.494103Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:25.588625Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:25.668676Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:25.788644Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:25.935427Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639238158974795:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:25.935529Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:25.935869Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639238158974800:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:25.941557Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:25.981499Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639238158974802:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:26.073972Z node 5 :TX_PROXY ERROR: Actor# [5:7491639242453942157:3471] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:27.827448Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26203, MsgBus: 10094 2025-04-10T11:06:29.774696Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639252000796065:2078];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010e1/r3tmp/tmpeY3e05/pdisk_1.dat 2025-04-10T11:06:30.070339Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:06:30.371970Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:30.442374Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:30.442500Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:30.451821Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26203, node 6 2025-04-10T11:06:30.714555Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:30.714581Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:30.714594Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:30.714766Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10094 TClient is connected to server localhost:10094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:33.354125Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:33.382570Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:33.902508Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:34.582019Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:34.776767Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:06:34.778659Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639252000796065:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:34.778718Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... 2025-04-10T11:06:38.270332Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639290655503476:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:38.270453Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:38.349805Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:38.399008Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:38.465687Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:38.519379Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:38.608076Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:38.812308Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:38.915345Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639290655504011:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:38.915502Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:38.919324Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639290655504016:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:38.929675Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:38.974056Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639290655504018:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:39.034551Z node 6 :TX_PROXY ERROR: Actor# [6:7491639294950471370:3471] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:40.676291Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:06:40.928153Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:06:41.021971Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 >> KqpNotNullColumns::InsertNotNullPg-useSink [GOOD] >> KqpNotNullColumns::FailedMultiEffects >> KqpNewEngine::LocksInRoTx [GOOD] >> KqpNewEngine::LookupColumns |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |80.0%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |80.0%| [LD] {RESULT} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |80.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpNewEngine::PrunePartitionsByLiteral >> KqpMergeCn::SortBy_PK_Uint64_Desc [GOOD] >> KqpMergeCn::SortBy_Int32 >> KqpNewEngine::DqSourceSequentialLimit [GOOD] >> KqpNewEngine::DqSourceLocksEffects >> KqpRanges::DeleteNotFullScan-UseSink [GOOD] |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup >> KqpSort::TopSortTableExpr [GOOD] >> KqpSort::TopSortResults |80.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpNewEngine::AsyncIndexUpdate [GOOD] >> KqpNewEngine::AutoChooseIndex >> KeyValueReadStorage::ReadRangeOk1Key [GOOD] >> KeyValueReadStorage::ReadRangeOk [GOOD] >> KeyValueReadStorage::ReadRangeNoData |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |80.9%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] Test command err: 2025-04-10T11:06:47.385577Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-10T11:06:47.388808Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-10T11:06:47.399663Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-04-10T11:06:47.399763Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-10T11:06:47.409430Z 1 00h00m00.000000s :KEYVALUE INFO: {KV320@keyvalue_storage_read_request.cpp:122} Inline read request KeyValue# 1 Status# OK 2025-04-10T11:06:47.409497Z 1 00h00m00.000000s :KEYVALUE DEBUG: {KV322@keyvalue_storage_read_request.cpp:134} Expected OK or UNKNOWN and given OK readCount# 0 2025-04-10T11:06:47.409560Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::DeleteNotFullScan-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24810, MsgBus: 24781 2025-04-10T11:05:47.078607Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639072993705489:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:47.078683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010cf/r3tmp/tmpyiNrai/pdisk_1.dat 2025-04-10T11:05:47.552945Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:47.556202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:47.556340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:47.561848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24810, node 1 2025-04-10T11:05:47.694638Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:47.694665Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:47.694673Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:47.694816Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24781 TClient is connected to server localhost:24781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:48.612329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:48.663321Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:05:48.675434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.073356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.289173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.410954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:51.081027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639090173576367:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.081154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.441798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.484711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.520643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.567157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.603280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.644032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.697140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639090173576877:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.697255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.697524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639090173576882:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.701654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:51.714202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639090173576884:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:05:51.806734Z node 1 :TX_PROXY ERROR: Actor# [1:7491639090173576937:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:52.076268Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639072993705489:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:52.076368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:52.881710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:05:53.094718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:05:53.261621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:05:53.391536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:05:53.665719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7575, MsgBus: 23022 2025-04-10T11:05:54.974676Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639101777075983:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:54.974769Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010cf/r3tmp/tmpvvMZXA/pdisk_1.dat 2025-04-10T11:05:55.075169Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7575, node 2 2025-04-10T11:05:55.116191Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:55.116304Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:55.120508Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:55.154771Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:55.154804Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:55.154813Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:55.154949Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23022 TClient is connected to server localhost:23022 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRoot ... 4:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:27.604256Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:27.604781Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639246558238720:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:27.615177Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:27.672409Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639246558238722:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:27.743641Z node 5 :TX_PROXY ERROR: Actor# [5:7491639246558238779:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:30.010792Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:06:30.494924Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:06:30.813205Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:06:31.094358Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:06:32.124349Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27120, MsgBus: 16866 2025-04-10T11:06:37.046751Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639288830763699:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:37.047875Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010cf/r3tmp/tmpT6W2vW/pdisk_1.dat 2025-04-10T11:06:37.552801Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:37.584751Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:37.584861Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:37.588163Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27120, node 6 2025-04-10T11:06:37.672150Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:37.672180Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:37.672199Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:37.672372Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16866 TClient is connected to server localhost:16866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:06:38.522393Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:06:38.547289Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:38.640278Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:38.942115Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:39.083344Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:42.046988Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639288830763699:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:42.047115Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:42.378420Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639310305601946:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:42.378601Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:42.447129Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:42.504075Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:42.601214Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:42.672364Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:42.780140Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:42.830628Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:42.915470Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639310305602459:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:42.915604Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:42.915860Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639310305602464:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:42.921121Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:42.935238Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639310305602466:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:43.024723Z node 6 :TX_PROXY ERROR: Actor# [6:7491639314600569819:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Join2"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","Stats":{"ComputeNodes":[{"Tasks":[{"NodeId":6,"FinishTimeMs":1744283205154,"TaskId":1,"Host":"ghrun-x5vqckr7qy","ComputeTimeUs":168}],"CpuTimeUs":1051}],"UseLlvm":"undefined","Tasks":1,"FinishedTasks":0,"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/Join2"}],"BaseTimeMs":1744283205154,"NodesScanShards":[],"CpuTimeUs":{"Count":1,"Sum":1051,"Max":1051,"Min":1051}},"CTE Name":"precompute_0_0"}],"Node Type":"Effect"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":459509,"CpuTimeUs":439299},"ProcessCpuTimeUs":2517,"TotalDurationUs":479826,"ResourcePoolId":"default","QueuedTimeUs":3345},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-SelfCpu":1.051,"A-Cpu":1.051,"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2"}],"Node Type":"Delete"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query"}} >> KqpNewEngine::DeleteOn-UseSink [GOOD] >> KqpNewEngine::DeleteWithBuiltin+UseSink >> KqpRanges::UpdateWhereInMultipleUpdate [GOOD] >> KqpRanges::ValidatePredicates |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |81.1%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption >> KqpAgg::AggWithHop [GOOD] >> KqpAgg::GroupByLimit >> KqpSqlIn::KeyTypeMissmatch_Str [GOOD] >> KqpSqlIn::InWithCast >> KqpNewEngine::MultiUsagePrecompute [GOOD] >> KqpNewEngine::OrderedScalarContext >> KqpNewEngine::IdxLookupExtractMembers [GOOD] >> KqpNewEngine::FlatmapLambdaMutiusedConnections >> KqpRanges::ScanKeyPrefix [GOOD] >> KqpNotNullColumns::FailedMultiEffects [GOOD] >> KqpNewEngine::PkSelect1 [GOOD] >> KqpNewEngine::PkSelect2 |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |81.5%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |81.6%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpSort::OffsetPk [GOOD] >> KqpSort::OffsetTopSort |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |81.6%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql >> ColumnShardTiers::DSConfigs [GOOD] >> KqpSqlIn::TupleSelect [GOOD] >> KqpSqlIn::TupleNotOnlyOfKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::FailedMultiEffects [GOOD] Test command err: Trying to start YDB, gRPC: 27988, MsgBus: 18533 2025-04-10T11:06:06.675873Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639156698288489:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:06.678705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008de/r3tmp/tmpr2g3wL/pdisk_1.dat 2025-04-10T11:06:07.270424Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:07.287493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:07.287630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:07.290901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27988, node 1 2025-04-10T11:06:07.414884Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:07.414909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:07.414921Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:07.415043Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18533 TClient is connected to server localhost:18533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:08.137278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:08.153768Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:06:10.605858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639173878158185:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:10.605979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:11.227816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:06:11.386064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639178173125586:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:11.386129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:11.386744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639178173125591:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:11.393879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:06:11.412295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639178173125593:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:06:11.482748Z node 1 :TX_PROXY ERROR: Actor# [1:7491639178173125644:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:11.646259Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639156698288489:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:11.646390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:11.787661Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491639178173125692:2358], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestInsertNotNullPk, code: 2029 2025-04-10T11:06:11.789271Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mzc5NDk1ZS01YjRhMzhhNi04NWJkODE5NS0yN2RhZWYxZA==, ActorId: [1:7491639173878158182:2329], ActorState: ExecuteState, TraceId: 01jrfp1ywjdkfppq47b8gwsszc, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-04-10T11:06:11.835744Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491639178173125701:2362], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T11:06:11.837172Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mzc5NDk1ZS01YjRhMzhhNi04NWJkODE5NS0yN2RhZWYxZA==, ActorId: [1:7491639173878158182:2329], ActorState: ExecuteState, TraceId: 01jrfp1yyz5wpc1m8265f24a95, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 20381, MsgBus: 20325 2025-04-10T11:06:13.331280Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639183753773168:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:13.331331Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008de/r3tmp/tmpk6meWf/pdisk_1.dat 2025-04-10T11:06:13.759364Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:13.795636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:13.795725Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:13.798436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20381, node 2 2025-04-10T11:06:14.007003Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:14.007040Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:14.007056Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:14.007212Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20325 TClient is connected to server localhost:20325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:15.107286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:18.081760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639205228610311:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:18.081884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:18.112045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:06:18.242575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639205228610413:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:18.242677Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:18.242959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [ ... Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:38.675234Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:38.686333Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:06:42.463861Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491639289024659057:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:42.463973Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:42.560193Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639310499496210:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:42.560360Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:42.588687Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:06:42.659915Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639310499496312:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:42.660075Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:42.660433Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639310499496317:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:42.667674Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:06:42.705090Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639310499496319:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:06:42.799188Z node 5 :TX_PROXY ERROR: Actor# [5:7491639310499496370:2400] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:43.242459Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7491639314794463734:2364], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-04-10T11:06:43.243703Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=YzNlMGI3M2QtNGVjMjExMS1hZDdiMzcxMi0xMDIzYTVmOA==, ActorId: [5:7491639310499496183:2330], ActorState: ExecuteState, TraceId: 01jrfp2xmn9z83atgk76fk0ecv, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-10T11:06:43.521404Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:06:43.534777Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7491639314794463743:2368], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Value, code: 2031 2025-04-10T11:06:43.538244Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=YzNlMGI3M2QtNGVjMjExMS1hZDdiMzcxMi0xMDIzYTVmOA==, ActorId: [5:7491639310499496183:2330], ActorState: ExecuteState, TraceId: 01jrfp2xnk8hjwb1r08sm3abre, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 24013, MsgBus: 10242 2025-04-10T11:06:44.559631Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639317799054022:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:44.560786Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008de/r3tmp/tmpATaafm/pdisk_1.dat 2025-04-10T11:06:44.685602Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:44.706475Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:44.706570Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:44.715730Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24013, node 6 2025-04-10T11:06:44.894825Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:44.894847Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:44.894863Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:44.895007Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10242 TClient is connected to server localhost:10242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:45.847468Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:45.868500Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:06:49.277580Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639339273891008:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:49.277697Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:49.319783Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:06:49.438837Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639339273891111:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:49.438982Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:49.439326Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639339273891116:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:49.445896Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:06:49.474573Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639339273891118:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:06:49.520941Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639317799054022:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:49.521034Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:49.540996Z node 6 :TX_PROXY ERROR: Actor# [6:7491639339273891169:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:49.744419Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7491639339273891215:2357], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:55: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64,'Value':String>
:3:55: Error: Failed to convert 'Value': Null to String
:3:55: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T11:06:49.744693Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=Y2ZhMjRmYWUtNGIxZTFjZjMtMTZlNWRhZjgtYjk5MGRhOGI=, ActorId: [6:7491639339273890995:2328], ActorState: ExecuteState, TraceId: 01jrfp33yscnhd1nqc3me4198f, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:06:49.787302Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |81.7%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |81.8%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::ScanKeyPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 2330, MsgBus: 16663 2025-04-10T11:05:47.787666Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639072732421511:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:47.787720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010ab/r3tmp/tmpM6ECZK/pdisk_1.dat 2025-04-10T11:05:48.481248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:48.481420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:48.484333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:48.491093Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2330, node 1 2025-04-10T11:05:48.666951Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:48.666974Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:48.666989Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:48.667180Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16663 TClient is connected to server localhost:16663 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:49.593742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.611097Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:05:49.627115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.802513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:05:49.979396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:05:50.056884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:51.753053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639089912292330:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.753177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.148485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.183362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.223203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.293679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.323427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.398949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.451180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639094207260142:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.451284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.451593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639094207260147:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.456474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:52.469628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639094207260149:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:05:52.531977Z node 1 :TX_PROXY ERROR: Actor# [1:7491639094207260203:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:52.787703Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639072732421511:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:52.788061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2950, MsgBus: 17198 2025-04-10T11:05:54.820424Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639104901995166:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:54.820519Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010ab/r3tmp/tmpfDHxV5/pdisk_1.dat 2025-04-10T11:05:54.918484Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2950, node 2 2025-04-10T11:05:54.954795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:54.954897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:54.956709Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:54.988921Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:54.988953Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:54.988961Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:54.989080Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17198 TClient is connected to server localhost:17198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:55.472487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:55.492496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:55.563074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:55.754284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:55.840268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateT ... se itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:33.456001Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:33.565240Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:33.627970Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:33.727472Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639270343790931:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:33.727608Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:33.727916Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639270343790936:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:33.734137Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:33.759654Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639270343790938:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:06:33.837959Z node 5 :TX_PROXY ERROR: Actor# [5:7491639270343790995:3459] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:35.705413Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:06:36.209667Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:06:36.576467Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-10T11:06:36.913179Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-10T11:06:37.648923Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:1:44: Warning: At function: Coalesce
:1:58: Warning: At function: SqlIn
:1:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 9887, MsgBus: 10358 2025-04-10T11:06:40.102663Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639300952312067:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:40.102910Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010ab/r3tmp/tmp9rQPu2/pdisk_1.dat 2025-04-10T11:06:40.555957Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:40.579804Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:40.579939Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:40.587588Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9887, node 6 2025-04-10T11:06:40.830944Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:40.830976Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:40.830991Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:40.831178Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10358 TClient is connected to server localhost:10358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:42.124005Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:42.170092Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:42.320527Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:42.650750Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:42.789116Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:45.106475Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639300952312067:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:45.106576Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:46.195763Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639326722117604:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:46.195884Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:46.320676Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:46.395014Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:46.450937Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:46.512062Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:46.605798Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:46.695075Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:46.820705Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639326722118126:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:46.820805Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:46.820897Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639326722118131:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:46.825965Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:46.844400Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639326722118133:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:46.919610Z node 6 :TX_PROXY ERROR: Actor# [6:7491639326722118188:3466] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::PatternCache 2025-04-10 11:06:36,794 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:06:37,539 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1127118 46.2M 45.1M 23.2M test_tool run_ut @/home/runner/.ya/build/build_root/2gt4/001599/ydb/core/kqp/ut/service/test-results/unittest/testing_out_stuff/chunk46/testing_out_stuff/test_tool.args 1127654 3.1G 3.1G 3.0G └─ ydb-core-kqp-ut-service --trace-path-append /home/runner/.ya/build/build_root/2gt4/001599/ydb/core/kqp/ut/service/test-results/unittest/testing_out_stuff/chunk46/ytest. Test command err: Trying to start YDB, gRPC: 5748, MsgBus: 18051 2025-04-10T10:56:39.430791Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491636721517139815:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:39.431340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001599/r3tmp/tmpwOxMZD/pdisk_1.dat 2025-04-10T10:56:40.084252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T10:56:40.084373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T10:56:40.091559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T10:56:40.094513Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5748, node 1 2025-04-10T10:56:40.201247Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T10:56:40.201277Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T10:56:40.201284Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T10:56:40.201408Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18051 TClient is connected to server localhost:18051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T10:56:40.908915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:40.923985Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T10:56:40.930513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:41.116556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:41.316812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:41.390400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T10:56:43.600592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636738697010641:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.600736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:43.893316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T10:56:43.976925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.014040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.091445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.164323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.212267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T10:56:44.311559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636742991978465:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:44.311641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:44.312002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491636742991978470:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T10:56:44.319593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T10:56:44.337083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491636742991978472:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T10:56:44.415833Z node 1 :TX_PROXY ERROR: Actor# [1:7491636742991978527:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T10:56:44.428031Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491636721517139815:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T10:56:44.428117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T10:56:45.499758Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZmVkM2M5MzgtZjFjZDNjMi1iYjRmMjliZi1lOGM1NDBkYQ==, ActorId: [1:7491636738697010614:2404], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T10:56:45.499817Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZmVkM2M5MzgtZjFjZDNjMi1iYjRmMjliZi1lOGM1NDBkYQ==, ActorId: [1:7491636738697010614:2404], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T10:56:45.499850Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmVkM2M5MzgtZjFjZDNjMi1iYjRmMjliZi1lOGM1NDBkYQ==, ActorId: [1:7491636738697010614:2404], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T10:56:45.499874Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmVkM2M5MzgtZjFjZDNjMi1iYjRmMjliZi1lOGM1NDBkYQ==, ActorId: [1:7491636738697010614:2404], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T10:56:45.499983Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmVkM2M5MzgtZjFjZDNjMi1iYjRmMjliZi1lOGM1NDBkYQ==, ActorId: [1:7491636738697010614:2404], ActorState: unknown state, Session actor destroyed 2025-04-10T10:56:45.507572Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NDFhOGIwMWMtMmVmZDc4ZGYtYTQ1ZjAwOWEtNDRkNThlMjI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NDFhOGIwMWMtMmVmZDc4ZGYtYTQ1ZjAwOWEtNDRkNThlMjI= 2025-04-10T10:56:45.507806Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NDFhOGIwMWMtMmVmZDc4ZGYtYTQ1ZjAwOWEtNDRkNThlMjI=, ActorId: [1:7491636747286946122:2499], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:56:45.521679Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjJlNzE1YzMtNGNmZmYyMjQtZTg0NDU5ZDEtNzhkYTM0MmM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YjJlNzE1YzMtNGNmZmYyMjQtZTg0NDU5ZDEtNzhkYTM0MmM= 2025-04-10T10:56:45.521909Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjJlNzE1YzMtNGNmZmYyMjQtZTg0NDU5ZDEtNzhkYTM0MmM=, ActorId: [1:7491636747286946124:2501], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:56:45.536501Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NjhkZjc4MTMtYWNkMzU5NjYtNzg0ODBiNDctODg1MGUxYTA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NjhkZjc4MTMtYWNkMzU5NjYtNzg0ODBiNDctODg1MGUxYTA= 2025-04-10T10:56:45.537194Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NjhkZjc4MTMtYWNkMzU5NjYtNzg0ODBiNDctODg1MGUxYTA=, ActorId: [1:7491636747286946126:2503], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:56:45.547945Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NjAxZjc4NDctMWE4NWQxOWYtYTE0MzExYjMtNGFlZjQzYmM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NjAxZjc4NDctMWE4NWQxOWYtYTE0MzExYjMtNGFlZjQzYmM= 2025-04-10T10:56:45.548531Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NjAxZjc4NDctMWE4NWQxOWYtYTE0MzExYjMtNGFlZjQzYmM=, ActorId: [1:7491636747286946128:2505], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:56:45.563670Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODVkZDk0NWItZmE4OWIzMDQtNzBkOWZhOS04Njc2Y2QyNw==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ODVkZDk0NWItZmE4OWIzMDQtNzBkOWZhOS04Njc2Y2QyNw== 2025-04-10T10:56:45.563883Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODVkZDk0NWItZmE4OWIzMDQtNzBkOWZhOS04Njc2Y2QyNw==, ActorId: [1:7491636747286946130:2507], ActorState: unknown state, session actor bootstrapped 2025-04-10T10:56:45.574301Z node 1 :KQP_SE ... LOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491638876326306736:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:01.562822Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:01.565934Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2025-04-10T11:05:01.579397Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306679:2332] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:05:01.579620Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306680:2333] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:05:01.579903Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306678:2331] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:05:01.580075Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306677:2330] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:05:01.580271Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306676:2329] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:05:01.580501Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306722:2345] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:05:01.580656Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306723:2346] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:05:01.580809Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306737:2355] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:05:01.584968Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306754:2365] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:05:01.604694Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491638876326306731:2394], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-10T11:05:01.604787Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491638876326306672:2377], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-10T11:05:01.604857Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491638876326306716:2389], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-10T11:05:01.604928Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491638876326306673:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-10T11:05:01.604997Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491638876326306717:2390], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-10T11:05:01.605064Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491638876326306674:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-10T11:05:01.605126Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491638876326306675:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-10T11:05:01.605344Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491638876326306749:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-10T11:05:01.605426Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491638876326306670:2375], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-10T11:05:01.605486Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491638876326306671:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-10T11:05:01.669288Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306828:2424] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:01.686256Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306833:2428] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:01.720782Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306842:2435] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:01.721129Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306849:2439] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:01.725935Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306852:2442] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:01.726281Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306851:2441] txid# 281474976710673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:01.726470Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306850:2440] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:01.726662Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306853:2443] txid# 281474976710675, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:01.726849Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306865:2448] txid# 281474976710676, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:01.727036Z node 7 :TX_PROXY ERROR: Actor# [7:7491638876326306866:2449] txid# 281474976710677, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:10.792380Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:05:10.792408Z node 7 :IMPORT WARN: Table profiles were not loaded Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/001599/ydb/core/kqp/ut/service/test-results/unittest/testing_out_stuff/chunk46/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/001599/ydb/core/kqp/ut/service/test-results/unittest/testing_out_stuff/chunk46/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> KqpRanges::IsNullPartial [GOOD] >> KqpRanges::LiteralOr >> KqpNewEngine::PrunePartitionsByLiteral [GOOD] >> KqpNewEngine::PrunePartitionsByExpr |82.0%| [TA] $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |82.0%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs [GOOD] Test command err: 2025-04-10T11:04:45.012424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:04:45.012652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:04:45.012875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f98/r3tmp/tmp0OzsR6/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8315, node 1 TClient is connected to server localhost:65502 2025-04-10T11:04:45.953351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:04:45.998280Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:46.003973Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:46.004062Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:46.004102Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:46.004588Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:04:46.047835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:46.047993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:46.060513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-04-10T11:04:58.143537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:756:2634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:58.143728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:58.147074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-10T11:04:58.343802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:873:2712], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:58.343968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:58.344386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:878:2717], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:04:58.350138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-10T11:04:58.487282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:880:2719], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:04:58.807540Z node 1 :TX_PROXY ERROR: Actor# [1:977:2787] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:04:59.426561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:04:59.965157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-10T11:05:00.650167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-10T11:05:01.393617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:05:02.044478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:05:03.380252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:05:03.732923Z node 1 :TX_DATASHARD NOTICE: Starting TBuildIndexScan BuildIndexId: 281474976715679 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 9 TargetName: "/Root/.metadata/secrets/values/index_by_secret_id/indexImplTable" IndexColumns: "secretId" IndexColumns: "ownerUserId" KeyRange { From: "\002\000\000\000\000\200\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } SnapshotTxId: 281474976710758 SnapshotStep: 13000 SeqNoGeneration: 2 SeqNoRound: 1 ScanSettings { } row version v13000/281474976710758 2025-04-10T11:05:03.734370Z node 1 :TX_DATASHARD NOTICE: FinishTBuildIndexScan: datashard: 72075186224037889, requested range: [(Utf8 : NULL, Utf8 : NULL) ; ()), last acked point: ()Stats { RowsSent: 0 BytesSent: 0 }Status { Code: SUCCESS Issues:
: Error: Shard or requested range is empty } BuildIndexId: 281474976715679 TabletId: 72075186224037889 Status: DONE UploadStatus: SUCCESS Issues { message: "Shard or requested range is empty" severity: 1 } RequestSeqNoGeneration: 2 RequestSeqNoRound: 1 2025-04-10T11:05:03.737751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-10T11:05:20.476229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 2025-04-10T11:05:21.050010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:05:21.050117Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-10T11:05:23.015302Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:215;event=skip_tier_manager_start;tier=/Root/tier1;has_secrets=1;tier_config=0; 2025-04-10T11:05:23.015422Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-04-10T11:05:23.015496Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={}; 2025-04-10T11:05:23.015616Z node 1 :TX_TIERING INFO: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-04-10T11:05:23.015945Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:154;event=watch_scheme_objects;names=/Root/tier1; 2025-04-10T11:05:23.016303Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:62;event=TEvRefreshSubscriberData;snapshot=secrets; 2025-04-10T11:05:23.016370Z node 1 :TX_TIERING INFO: fline=manager.cpp:271;event=update_secrets;tablet=0; 2025-04-10T11:05:23.016438Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-04-10T11:05:23.016514Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:05:23.018645Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:111;component=TSchemeObjectWatcher;event=NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult;path=Root/tier1; 2025-04-10T11:05:23.020208Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:140;event=object_fetched;path=/Root/tier1; 2025-04-10T11:05:23.020386Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:75;component=tiering_manager;event=object_updated;path=/Root/tier1; 2025-04-10T11:05:23.020576Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=1; 2025-04-10T11:05:23.020697Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-10T11:05:23.020778Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION=" ... :Tier '/Root/tier2' stopped at tablet 0 2025-04-10T11:06:26.147677Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-04-10T11:06:26.147725Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-04-10T11:06:37.924439Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-10T11:06:37.924571Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-10T11:06:37.924616Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-10T11:06:37.924915Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-10T11:06:37.925033Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-10T11:06:37.925103Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037892;has_config=0; 2025-04-10T11:06:37.925191Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 2025-04-10T11:06:37.925279Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037892 2025-04-10T11:06:37.925354Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-04-10T11:06:37.925485Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037892 2025-04-10T11:06:37.925585Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:06:37.925644Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-10T11:06:37.925684Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037893;has_config=0; 2025-04-10T11:06:37.925727Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 2025-04-10T11:06:37.925768Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037893 2025-04-10T11:06:37.925798Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-04-10T11:06:37.925840Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037893 2025-04-10T11:06:37.925888Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:06:37.925929Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-10T11:06:37.925970Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037894;has_config=0; 2025-04-10T11:06:37.926005Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037894 2025-04-10T11:06:37.926045Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037894 2025-04-10T11:06:37.926079Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-04-10T11:06:37.926121Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037894 2025-04-10T11:06:37.926192Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:06:37.926314Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-10T11:06:37.926454Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-10T11:06:37.926491Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-04-10T11:06:37.926529Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-10T11:06:37.926567Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-04-10T11:06:37.926600Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-10T11:06:37.926644Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-10T11:06:37.926688Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:06:37.926965Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3116:4393];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-10T11:06:37.927089Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3121:4396];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-10T11:06:37.927175Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3124:4399];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-10T11:06:37.927249Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-10T11:06:37.927289Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-04-10T11:06:37.927336Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-10T11:06:37.927377Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-04-10T11:06:37.927406Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-10T11:06:37.927452Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-10T11:06:37.927497Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-04-10T11:06:49.405479Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:06:49.405572Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:06:49.405611Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:06:49.405687Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:06:49.405820Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:06:49.405970Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:06:49.406025Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-10T11:06:49.406088Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-10T11:06:49.406196Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:06:49.406259Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:06:49.406288Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-10T11:06:49.406346Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-10T11:06:49.406386Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:06:49.406481Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:06:49.406551Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:06:49.406592Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2025-04-10T11:06:49.406625Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-04-10T11:06:49.406676Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:06:49.406711Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:06:49.406737Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2025-04-10T11:06:49.406765Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-04-10T11:06:49.406804Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:06:49.406837Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:06:49.406864Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2025-04-10T11:06:49.406892Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-04-10T11:06:49.406929Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:06:49.407024Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:06:49.407054Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-10T11:06:49.407088Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-10T11:06:49.407128Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:06:49.407268Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3116:4393];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-10T11:06:49.407358Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3121:4396];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-10T11:06:49.407411Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3124:4399];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 >> KqpPg::ValuesInsert-useSink [GOOD] >> PgCatalog::PgType >> KqpNewEngine::BlindWrite |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> KqpNewEngine::ComplexLookupLimit [GOOD] >> KqpNewEngine::LiteralKeys [GOOD] >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] >> KqpNewEngine::LookupColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::ComplexLookupLimit [GOOD] Test command err: Trying to start YDB, gRPC: 19082, MsgBus: 10924 2025-04-10T11:05:47.435325Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639073061548406:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:47.440669Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010bc/r3tmp/tmp1mt0ub/pdisk_1.dat 2025-04-10T11:05:48.004024Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:48.024530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:48.024687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:48.031144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19082, node 1 2025-04-10T11:05:48.150943Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:48.150971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:48.151002Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:48.151162Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10924 TClient is connected to server localhost:10924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:48.910706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:48.940271Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:05:51.021836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639090241418248:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.022018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.309586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.437489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639090241418350:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.437605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.437894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639090241418355:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.443523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:05:51.454761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639090241418357:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:05:51.522660Z node 1 :TX_PROXY ERROR: Actor# [1:7491639090241418410:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:52.433361Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639073061548406:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:52.444105Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:52.958193Z node 1 :RPC_REQUEST WARN: Client lost Trying to start YDB, gRPC: 30842, MsgBus: 10044 2025-04-10T11:05:53.572429Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639101361305577:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:53.572493Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010bc/r3tmp/tmpy33QfS/pdisk_1.dat 2025-04-10T11:05:53.682130Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30842, node 2 2025-04-10T11:05:53.706449Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:53.706552Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:53.708578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:53.754336Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:53.754362Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:53.754369Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:53.754478Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10044 TClient is connected to server localhost:10044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:54.188615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:54.205221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:54.278344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:54.457891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:54.532162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:56.918070Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639114246209231:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:56.918207Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:56.982576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:57.043776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:57.080505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:57.152218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:57.187143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:57.225704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T ... you don't have access permissions } 2025-04-10T11:06:39.080948Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:39.179383Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:39.279687Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:39.373398Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:39.453998Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:39.568052Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:39.775190Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:39.890559Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639296539473775:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:39.890696Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:39.891013Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639296539473780:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:39.897401Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:39.917182Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639296539473782:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:39.995293Z node 6 :TX_PROXY ERROR: Actor# [6:7491639296539473849:3466] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:41.714592Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28181, MsgBus: 6964 2025-04-10T11:06:43.721047Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639314962733575:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:43.721137Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010bc/r3tmp/tmpKs8uhE/pdisk_1.dat 2025-04-10T11:06:43.878698Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:43.909729Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:43.909863Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:43.911786Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28181, node 7 2025-04-10T11:06:44.032773Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:44.032803Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:44.032821Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:44.032978Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6964 TClient is connected to server localhost:6964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:44.923588Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:44.934999Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:06:44.953404Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:45.050399Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:45.331587Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:45.496314Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:48.724966Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639314962733575:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:48.725077Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:48.976524Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639336437571852:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:48.976854Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:49.002918Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:49.073743Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:49.142879Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:49.239526Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:49.306277Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:49.409798Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:49.530469Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639340732539671:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:49.530631Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:49.531085Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639340732539676:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:49.542838Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:49.635632Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639340732539678:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:06:49.705967Z node 7 :TX_PROXY ERROR: Actor# [7:7491639340732539737:3470] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:51.135906Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::LiteralKeys [GOOD] Test command err: Trying to start YDB, gRPC: 3388, MsgBus: 1255 2025-04-10T11:05:47.782744Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639075026721883:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:47.782815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010b4/r3tmp/tmprpbZwm/pdisk_1.dat 2025-04-10T11:05:48.484341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:48.484454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:48.484583Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:48.495676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3388, node 1 2025-04-10T11:05:48.627248Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:48.627285Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:48.627296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:48.627426Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1255 TClient is connected to server localhost:1255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:49.746233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.778005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.919132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:50.081261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:50.167552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:51.956365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639092206592846:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.956541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.298369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.329021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.398080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.433482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.466872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.499207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.548886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639096501560657:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.548978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.549101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639096501560662:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.552949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:52.563746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639096501560664:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:05:52.628044Z node 1 :TX_PROXY ERROR: Actor# [1:7491639096501560717:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:52.782727Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639075026721883:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:52.782785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:53.551990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:05:53.810953Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 14430, MsgBus: 15792 2025-04-10T11:05:54.578106Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639103171999349:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:54.578180Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010b4/r3tmp/tmpq5bDcC/pdisk_1.dat 2025-04-10T11:05:54.685974Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14430, node 2 2025-04-10T11:05:54.722530Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:54.722622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:54.724071Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:54.762742Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:54.762772Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:54.762783Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:54.762957Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15792 TClient is connected to server localhost:15792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:55.242910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:55.284799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:55.373832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:55.577038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting ... 59514Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:37.452593Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639287771282884:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:37.452746Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:37.527205Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:37.596730Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:37.650387Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:37.712061Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:37.815111Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:37.934121Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:38.060920Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639292066250705:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:38.061047Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:38.061488Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639292066250710:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:38.066582Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:38.081646Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639292066250712:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:38.177049Z node 6 :TX_PROXY ERROR: Actor# [6:7491639292066250768:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21385, MsgBus: 27525 2025-04-10T11:06:44.545235Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639320272054600:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:44.545399Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010b4/r3tmp/tmpCfngi1/pdisk_1.dat 2025-04-10T11:06:44.802501Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:44.834189Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:44.834327Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:44.836057Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21385, node 7 2025-04-10T11:06:44.936226Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:44.936258Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:44.936269Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:44.936449Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27525 TClient is connected to server localhost:27525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:45.947486Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:45.954819Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:06:45.967158Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:46.110137Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:46.496530Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:46.643891Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:49.545639Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639320272054600:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:49.545749Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:49.975812Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639341746892848:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:49.975936Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:50.019998Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.101265Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.157501Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.227110Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.314127Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.397483Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.473190Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639346041860665:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:50.473315Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:50.473656Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639346041860670:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:50.479869Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:50.500752Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639346041860672:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:50.588850Z node 7 :TX_PROXY ERROR: Actor# [7:7491639346041860729:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |82.2%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |82.2%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] Test command err: Trying to start YDB, gRPC: 65231, MsgBus: 5921 2025-04-10T11:05:47.280499Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639074248350483:2246];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:47.280962Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010c6/r3tmp/tmpFTTIoW/pdisk_1.dat 2025-04-10T11:05:47.734074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:47.734267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:47.738405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:47.781489Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65231, node 1 2025-04-10T11:05:47.978358Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:47.978393Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:47.978401Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:47.979806Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5921 TClient is connected to server localhost:5921 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:48.968962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:05:49.015381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:05:49.188056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.389990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.509008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:51.311400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639091428221256:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.311512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.647669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.684129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.717566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.747330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.783212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.862083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.953841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639091428221775:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.953972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.956418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639091428221780:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.960531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:51.972145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639091428221782:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:05:52.055124Z node 1 :TX_PROXY ERROR: Actor# [1:7491639095723189132:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:52.320642Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639074248350483:2246];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:52.332641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28925, MsgBus: 2276 2025-04-10T11:05:53.921014Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639100879379774:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:53.921074Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010c6/r3tmp/tmpvLD7j6/pdisk_1.dat 2025-04-10T11:05:54.078498Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:54.084436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:54.084534Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:54.086366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28925, node 2 2025-04-10T11:05:54.132509Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:54.132534Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:54.132542Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:54.132678Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2276 TClient is connected to server localhost:2276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:54.638078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:05:54.663923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:05:54.728602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:54.893198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:54.960662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:57.390971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPo ... 54501Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:37.708387Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639287315983006:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:37.708493Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:37.787510Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:37.908848Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:38.089694Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:38.164093Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:38.259779Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:38.347042Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:38.458278Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639291610950834:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:38.458429Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:38.462497Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639291610950839:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:38.472098Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:38.494505Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639291610950841:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:38.590305Z node 6 :TX_PROXY ERROR: Actor# [6:7491639291610950900:3472] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 61114, MsgBus: 62981 2025-04-10T11:06:44.106423Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639316463415829:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:44.106509Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010c6/r3tmp/tmpxGWunT/pdisk_1.dat 2025-04-10T11:06:44.305461Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:44.328522Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:44.328667Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:44.330377Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61114, node 7 2025-04-10T11:06:44.442919Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:44.442950Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:44.442964Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:44.443146Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62981 TClient is connected to server localhost:62981 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:45.409271Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:45.427491Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:06:45.440893Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:45.558254Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:45.926689Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:46.062891Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:49.110354Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639316463415829:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:49.110466Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:49.927593Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639337938254118:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:49.927720Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:50.007616Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.092198Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.139869Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.227775Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.286225Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.347177Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.418555Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639342233221933:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:50.418686Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:50.418972Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639342233221939:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:50.423070Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:50.525999Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639342233221941:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:50.614402Z node 7 :TX_PROXY ERROR: Actor# [7:7491639342233222005:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpNewEngine::DqSourceLocksEffects [GOOD] |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |82.3%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests >> KqpNewEngine::DeleteWithBuiltin+UseSink [GOOD] >> KqpNewEngine::DeleteWithBuiltin-UseSink >> KqpMergeCn::SortBy_Int32 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::LookupColumns [GOOD] Test command err: Trying to start YDB, gRPC: 9130, MsgBus: 17684 2025-04-10T11:05:47.156763Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639072315547819:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:47.158399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010d8/r3tmp/tmpQq0neW/pdisk_1.dat 2025-04-10T11:05:47.611682Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:47.619519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:47.619638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 9130, node 1 2025-04-10T11:05:47.625157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:47.681919Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:47.681952Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:47.681967Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:47.682131Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17684 TClient is connected to server localhost:17684 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:48.420070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:48.461432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:48.612234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:48.921278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.011854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:50.808586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639085200451464:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:50.808730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.206420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.238857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.311307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.343493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.382825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.418124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:51.482345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639089495419279:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.482447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.482821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639089495419284:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.487144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:51.505054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639089495419286:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:05:51.573462Z node 1 :TX_PROXY ERROR: Actor# [1:7491639089495419340:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:52.155162Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639072315547819:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:52.155234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16788, MsgBus: 21634 2025-04-10T11:05:53.570549Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639098789891641:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:53.570630Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010d8/r3tmp/tmpwyBzMD/pdisk_1.dat 2025-04-10T11:05:53.709051Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:53.730791Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:53.730882Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:53.732550Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16788, node 2 2025-04-10T11:05:53.814751Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:53.814783Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:53.814793Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:53.814945Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21634 TClient is connected to server localhost:21634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:54.301071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:54.319320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:54.399314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:54.555678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:54.642217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:57.276727Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] ... 5.884275Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:39.414069Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639296759394315:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:39.417300Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:39.452519Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:39.528079Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:39.594993Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:39.675767Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:39.800177Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:39.949187Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:40.082468Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639301054362136:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:40.082625Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:40.089054Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639301054362141:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:40.100351Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:40.131884Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639301054362143:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:40.199313Z node 6 :TX_PROXY ERROR: Actor# [6:7491639301054362207:3482] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 31524, MsgBus: 3300 2025-04-10T11:06:44.634217Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639319545509165:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:44.640533Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010d8/r3tmp/tmpWsbKDb/pdisk_1.dat 2025-04-10T11:06:44.841971Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:44.847708Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:44.847837Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:44.849509Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31524, node 7 2025-04-10T11:06:44.916010Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:44.916041Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:44.916052Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:44.916224Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3300 TClient is connected to server localhost:3300 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:45.915287Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:45.938499Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:46.046496Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:46.345513Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:46.456789Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:49.626468Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639319545509165:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:49.626580Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:50.607804Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639345315314738:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:50.607923Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:50.693643Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.786443Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.855138Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.942496Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:51.004003Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:51.057958Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:51.158683Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639349610282563:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:51.158867Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:51.159113Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639349610282569:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:51.170395Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:51.190211Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-10T11:06:51.192612Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639349610282571:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:06:51.260797Z node 7 :TX_PROXY ERROR: Actor# [7:7491639349610282625:3466] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |82.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |82.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |82.3%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut >> KqpNewEngine::PkSelect2 [GOOD] >> KqpNewEngine::PkRangeSelect1 >> KqpReturning::ReturningWorks+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DqSourceLocksEffects [GOOD] Test command err: Trying to start YDB, gRPC: 6784, MsgBus: 16501 2025-04-10T11:05:47.970290Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639072354907087:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:47.970802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001098/r3tmp/tmpFJvAKO/pdisk_1.dat 2025-04-10T11:05:48.646439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:48.646559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:48.646840Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:48.651194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6784, node 1 2025-04-10T11:05:48.882727Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:48.882744Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:48.882754Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:48.882867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16501 TClient is connected to server localhost:16501 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:49.667615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.687135Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:05:49.704051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.871676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:50.048929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:50.122311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:51.814127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639089534777901:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.814250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.164247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.198353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.234493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.270744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.308254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.349040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.397821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639093829745710:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.397901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.397957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639093829745715:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.402280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:52.418069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639093829745717:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:05:52.498213Z node 1 :TX_PROXY ERROR: Actor# [1:7491639093829745770:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:52.960099Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639072354907087:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:52.960169Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26688, MsgBus: 6075 2025-04-10T11:05:54.578469Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639102666538697:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:54.578534Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001098/r3tmp/tmpmFm3Mt/pdisk_1.dat 2025-04-10T11:05:54.717391Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26688, node 2 2025-04-10T11:05:54.746009Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:54.746093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:54.757947Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:54.793058Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:54.793088Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:54.793096Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:54.793243Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6075 TClient is connected to server localhost:6075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:55.257226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:55.277339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:05:55.351492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:05:55.556375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:55.672681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... t, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:42.900471Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:42.919569Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639310832716029:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:42.994189Z node 6 :TX_PROXY ERROR: Actor# [6:7491639310832716087:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15855, MsgBus: 16349 2025-04-10T11:06:46.295841Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639328546333953:2124];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:46.295911Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001098/r3tmp/tmpRtXRfS/pdisk_1.dat 2025-04-10T11:06:46.522583Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:46.548019Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:46.548191Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:46.550378Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15855, node 7 2025-04-10T11:06:46.666591Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:46.666618Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:46.666631Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:46.666827Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16349 TClient is connected to server localhost:16349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:47.572016Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:47.592409Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:06:47.613818Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:47.817452Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:48.154376Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:48.329080Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:51.298410Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639328546333953:2124];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:51.298510Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:52.129186Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639354316139430:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.129310Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.163498Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.254921Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.319379Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.365392Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.452163Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.540963Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.659421Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639354316139959:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.659590Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.659676Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639354316139964:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.665133Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:52.680119Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639354316139966:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:52.754030Z node 7 :TX_PROXY ERROR: Actor# [7:7491639354316140022:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:54.681542Z node 7 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-04-10T11:06:54.681882Z node 7 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:06:54.682137Z node 7 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:06:54.682525Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7491639362906075060:2503], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [7:7491639362906074932:2503]Got LOCKS BROKEN for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[7:7491639362906075060:2503].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:06:54.683291Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7491639362906075053:2503], SessionActorId: [7:7491639362906074932:2503], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[7:7491639362906074932:2503]. isRollback=0 2025-04-10T11:06:54.683710Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=OTA1NDZjZmMtZDJjMGFiOTgtMTFiMWViOWEtMjAzMDUwMDc=, ActorId: [7:7491639362906074932:2503], ActorState: ExecuteState, TraceId: 01jrfp38qzf3aajjvxt5pz7zhd, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [7:7491639362906075054:2503] from: [7:7491639362906075053:2503] 2025-04-10T11:06:54.683856Z node 7 :KQP_EXECUTER ERROR: ActorId: [7:7491639362906075054:2503] TxId: 281474976710673. Ctx: { TraceId: 01jrfp38qzf3aajjvxt5pz7zhd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTA1NDZjZmMtZDJjMGFiOTgtMTFiMWViOWEtMjAzMDUwMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:06:54.684158Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=OTA1NDZjZmMtZDJjMGFiOTgtMTFiMWViOWEtMjAzMDUwMDc=, ActorId: [7:7491639362906074932:2503], ActorState: ExecuteState, TraceId: 01jrfp38qzf3aajjvxt5pz7zhd, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::SortBy_Int32 [GOOD] Test command err: Trying to start YDB, gRPC: 29411, MsgBus: 30033 2025-04-10T11:05:46.253739Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639069354540812:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:46.253795Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010ec/r3tmp/tmpj3BFw3/pdisk_1.dat 2025-04-10T11:05:46.646660Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29411, node 1 2025-04-10T11:05:46.689533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:46.689641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:46.693976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:46.763431Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:46.763458Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:46.763470Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:46.763653Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30033 TClient is connected to server localhost:30033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:47.413611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.857350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639082239443351:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:49.857498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:50.116734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:05:50.266592Z node 1 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Unknown table '/Root/WrongTable' Trying to start YDB, gRPC: 12406, MsgBus: 14572 2025-04-10T11:05:51.051638Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639091771054906:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:51.051734Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010ec/r3tmp/tmpN99qcR/pdisk_1.dat 2025-04-10T11:05:51.173101Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:51.212772Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:51.212866Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:51.214585Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12406, node 2 2025-04-10T11:05:51.273719Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:51.273741Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:51.273748Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:51.273836Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14572 TClient is connected to server localhost:14572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:51.722832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:51.741062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:51.834108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:52.026137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:52.117231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:54.295416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639104655958565:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:54.295516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:54.390754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:54.424461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:54.462824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:54.497855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:54.535362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:54.570767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:54.617485Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639104655959075:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:54.617575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:54.617680Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639104655959080:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:54.623177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:54.634554Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491639104655959082:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:05:54.702746Z node 2 :TX_PROXY ERROR: Actor# [2:7491639104655959136:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:55.854595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:05:56.051737Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491639091771054906:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:56.051855Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base ... posed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:41.145862Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:41.233694Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:41.312572Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:41.434117Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:41.579942Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:41.691109Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639306067247578:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:41.691261Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:41.691636Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639306067247583:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:41.698077Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:41.734462Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639306067247585:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:41.801943Z node 6 :TX_PROXY ERROR: Actor# [6:7491639306067247642:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:43.380383Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:06:44.505207Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283204517, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 20200, MsgBus: 25422 2025-04-10T11:06:46.031548Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639327951947260:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:46.031616Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010ec/r3tmp/tmpQNy5e2/pdisk_1.dat 2025-04-10T11:06:46.343092Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:46.343233Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:46.346835Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20200, node 7 2025-04-10T11:06:46.362739Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:06:46.372786Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:46.372947Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:06:46.542971Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:46.543000Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:46.543013Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:46.543205Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25422 TClient is connected to server localhost:25422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:47.603669Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:47.631408Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:47.798670Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:48.126027Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:48.328711Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:51.031850Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639327951947260:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:51.031961Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:52.222526Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639353721752764:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.222645Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.305262Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.380401Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.442287Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.506685Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.575042Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.628838Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.704024Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639353721753280:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.704188Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.704624Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639353721753285:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.710278Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:52.729138Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639353721753287:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:52.813704Z node 7 :TX_PROXY ERROR: Actor# [7:7491639353721753345:3466] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:54.486423Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:06:55.595779Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283215626, txId: 281474976710673] shutting down >> KqpNewEngine::OrderedScalarContext [GOOD] >> KqpNewEngine::PagingNoPredicateExtract >> KqpSqlIn::InWithCast [GOOD] |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |82.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common >> KqpNotNullColumns::ReplaceNotNull >> KqpNewEngine::PrunePartitionsByExpr [GOOD] >> KqpNewEngine::PruneWritePartitions+UseSink >> KqpNewEngine::AutoChooseIndex [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLambda >> KqpSort::TopSortResults [GOOD] >> KqpSort::TopParameterFilter >> KqpNewEngine::FlatmapLambdaMutiusedConnections [GOOD] >> KqpNewEngine::FullScanCount >> KqpSqlIn::SecondaryIndex_PgKey >> TKeyValueTest::TestWriteReadPatchRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::InWithCast [GOOD] Test command err: Trying to start YDB, gRPC: 17240, MsgBus: 19545 2025-04-10T11:05:47.954397Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639074449369528:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:47.956021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010a1/r3tmp/tmpTfmIyp/pdisk_1.dat 2025-04-10T11:05:48.534520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:48.534640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:48.538251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:48.546371Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17240, node 1 2025-04-10T11:05:48.830284Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:48.830308Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:48.830318Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:48.830420Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19545 TClient is connected to server localhost:19545 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:49.741348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.765483Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:05:49.770384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:05:49.912965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:05:50.086537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:50.165021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:51.752287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639091629240339:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:51.752419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.106366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.146369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.220275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.257445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.289476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.360203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.413299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639095924208154:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.413382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.413499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639095924208159:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.416998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:52.428573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639095924208161:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:05:52.492802Z node 1 :TX_PROXY ERROR: Actor# [1:7491639095924208215:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:52.945208Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639074449369528:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:52.945302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:53.287508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:05:53.350392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:05:53.387589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 32402, MsgBus: 13640 2025-04-10T11:05:58.351427Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639120476311685:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:58.351545Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010a1/r3tmp/tmpinhF8K/pdisk_1.dat 2025-04-10T11:05:58.497160Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:58.524419Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:58.524491Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:58.526254Z node 2 :HIVE WARN: HIVE#72057594 ... ccess permissions } 2025-04-10T11:06:45.240534Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:45.294083Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:45.357614Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:45.403669Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:45.482385Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:45.595105Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:45.678844Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:45.787484Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639324484924432:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:45.787606Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:45.788064Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639324484924437:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:45.793972Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:45.821146Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639324484924439:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:06:45.914250Z node 5 :TX_PROXY ERROR: Actor# [5:7491639324484924496:3463] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Warning: Type annotation, code: 1030
:4:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:5:22: Warning: At function: Filter, At function: Coalesce
:6:23: Warning: At function: SqlIn
:6:23: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 1252, MsgBus: 6440 2025-04-10T11:06:49.562609Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639340585631974:2100];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:49.562747Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010a1/r3tmp/tmp5epYQ0/pdisk_1.dat 2025-04-10T11:06:49.729444Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:49.765395Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:49.765505Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:49.770909Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1252, node 6 2025-04-10T11:06:49.921155Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:49.921181Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:49.921191Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:49.921393Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6440 TClient is connected to server localhost:6440 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:50.852097Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:50.867666Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:06:50.879900Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:50.977988Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:51.265571Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:51.384332Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:54.566255Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639340585631974:2100];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:54.566371Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:54.686394Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639362060470173:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:54.686520Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:54.753890Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:54.843202Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:54.907513Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:54.959990Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:55.046102Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:55.129299Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:55.250455Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639366355437994:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:55.250590Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:55.251277Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639366355437999:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:55.256536Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:55.268990Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639366355438001:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:06:55.364013Z node 6 :TX_PROXY ERROR: Actor# [6:7491639366355438056:3459] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpNewEngine::BlindWrite [GOOD] >> KqpNewEngine::BlindWriteParameters >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi >> KqpRanges::LiteralOr [GOOD] >> KqpRanges::LiteralOrCompisite |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |82.4%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |82.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo >> KqpSort::OffsetTopSort [GOOD] |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut >> PgCatalog::PgType [GOOD] >> PgCatalog::InformationSchema |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |82.5%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::OffsetTopSort [GOOD] Test command err: Trying to start YDB, gRPC: 28725, MsgBus: 19574 2025-04-10T11:06:02.734646Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639139071543165:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:02.737563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00093b/r3tmp/tmpRTHsp1/pdisk_1.dat 2025-04-10T11:06:03.465917Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:03.484509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:03.484616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:03.498308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28725, node 1 2025-04-10T11:06:03.587399Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:03.587423Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:03.587431Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:03.588429Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19574 TClient is connected to server localhost:19574 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:04.301866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:04.355187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:04.513215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:04.819080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:04.919045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:06.980237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639156251413906:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:06.980385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:07.319179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:07.367121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:07.438754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:07.473542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:07.524068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:07.569199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:07.660424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639160546381718:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:07.660548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:07.660618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639160546381723:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:07.664755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:07.678124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639160546381725:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:07.731614Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639139071543165:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:07.731687Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:07.759385Z node 1 :TX_PROXY ERROR: Actor# [1:7491639160546381780:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 10538, MsgBus: 9505 2025-04-10T11:06:11.285012Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639175147904107:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:11.285085Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00093b/r3tmp/tmpdtL2lv/pdisk_1.dat 2025-04-10T11:06:11.561207Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:11.562560Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:11.562658Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:11.604380Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10538, node 2 2025-04-10T11:06:11.778024Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:11.778050Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:11.778061Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:11.778222Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9505 TClient is connected to server localhost:9505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:12.718406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:12.738280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:12.851562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:13.198808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:13.308843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:16.224011Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [ ... [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639327691460453:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:46.808111Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:46.867845Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:46.888580Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491639306216622316:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:46.888689Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:46.925078Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:46.982650Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:47.038578Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:47.085339Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:47.173176Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:47.315458Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639331986428275:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:47.315600Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:47.315673Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639331986428280:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:47.323922Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:47.345438Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639331986428282:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:47.442565Z node 5 :TX_PROXY ERROR: Actor# [5:7491639331986428338:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9226, MsgBus: 63864 2025-04-10T11:06:51.556499Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639349478570357:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:51.556589Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00093b/r3tmp/tmph7akIk/pdisk_1.dat 2025-04-10T11:06:51.787538Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:51.825879Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:51.826005Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:51.828922Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9226, node 6 2025-04-10T11:06:51.932748Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:51.932777Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:51.932790Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:51.932980Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63864 TClient is connected to server localhost:63864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:06:52.742439Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:52.779425Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:52.869646Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:06:53.194996Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:53.330675Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:56.556800Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639349478570357:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:56.556920Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:56.881391Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639370953408612:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:56.881528Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:56.955402Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.000966Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.046773Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.107193Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.153453Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.276421Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.333825Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639375248376425:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:57.333917Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:57.333928Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639375248376430:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:57.338657Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:57.350648Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639375248376432:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:57.447855Z node 6 :TX_PROXY ERROR: Actor# [6:7491639375248376487:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpNotNullColumns::ReplaceNotNull [GOOD] >> KqpNotNullColumns::ReplaceNotNullPg >> KqpNewEngine::PkRangeSelect1 [GOOD] >> KqpNewEngine::PkRangeSelect2 >> TVersions::Wreck0 [GOOD] >> TVersions::Wreck0Reverse >> KqpNewEngine::DeleteWithBuiltin-UseSink [GOOD] >> KqpNewEngine::DeleteON >> KqpPg::TableInsert-useSink [GOOD] >> KqpPg::TempTablesSessionsIsolation >> KqpNewEngine::PruneWritePartitions+UseSink [GOOD] >> KqpNewEngine::PruneWritePartitions-UseSink >> KqpSqlIn::TupleNotOnlyOfKeys [GOOD] >> RetryPolicy::RetryWithBatching [GOOD] >> KqpNewEngine::BlindWriteParameters [GOOD] >> KqpNewEngine::BlindWriteListParameter >> KqpReturning::ReturningWorks+QueryService [GOOD] >> KqpReturning::ReturningWorks-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::TupleNotOnlyOfKeys [GOOD] Test command err: Trying to start YDB, gRPC: 14270, MsgBus: 32615 2025-04-10T11:05:47.867739Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639072240564981:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:47.867798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f96/r3tmp/tmpPx14z5/pdisk_1.dat 2025-04-10T11:05:48.443285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:48.443398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:48.445662Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:48.451451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14270, node 1 2025-04-10T11:05:48.600421Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:48.600456Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:48.600468Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:48.600643Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32615 TClient is connected to server localhost:32615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:49.609714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.637425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:49.826810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:50.025550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:50.101693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:52.005413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639093715403244:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.005537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.431693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.467114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.511911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.543255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.577735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.619188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:05:52.682808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639093715403757:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.682894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.683180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639093715403762:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:52.687385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:05:52.699274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639093715403764:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:05:52.770479Z node 1 :TX_PROXY ERROR: Actor# [1:7491639093715403817:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:52.868379Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639072240564981:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:52.868470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:05:53.796550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:05:53.924797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:05:53.968421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:26: Warning: At function: Filter, At function: Coalesce
:5:49: Warning: At function: SqlIn
:5:49: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:4:21: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:26: Warning: At function: Filter, At function: Coalesce
:5:49: Warning: At function: SqlIn
:5:49: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:4:21: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 9170, MsgBus: 17029 2025-04-10T11:05:57.591834Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639116441478125:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:57.591952Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f96/r3tmp/tmpEmoQs3/pdisk_1.dat 2025-04-10T11:05:57.776175Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:57.806366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:57.806478Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:57.807988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9170, node 2 2025-04-10T11:05:57.869301Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:57.869329Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:57.869338Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:57.869484Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17029 TClient is connected to server localhost:17029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: ... PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:45.058644Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:45.095029Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639321268863533:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:45.194893Z node 5 :TX_PROXY ERROR: Actor# [5:7491639321268863594:3469] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:46.892171Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:06:46.961803Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:06:47.120841Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:17: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:22: Warning: At function: Filter, At function: Coalesce
:7:31: Warning: At function: SqlIn
:7:31: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:5:17: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 5380, MsgBus: 16993 2025-04-10T11:06:52.202889Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639353948150746:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f96/r3tmp/tmp0BPZ8D/pdisk_1.dat 2025-04-10T11:06:52.313021Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:06:52.402764Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:52.435470Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:52.435595Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:52.437719Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5380, node 6 2025-04-10T11:06:52.534718Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:52.534751Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:52.534764Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:52.534963Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16993 TClient is connected to server localhost:16993 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:53.373321Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:53.394067Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:53.509896Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:53.846827Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:53.969657Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:57.198373Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639353948150746:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:57.198478Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:57.302789Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639375422988855:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:57.302948Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:57.377359Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.420204Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.500197Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.541557Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.579932Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.620132Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.730244Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639375422989377:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:57.730357Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:57.730383Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639375422989382:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:57.735587Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:57.748029Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639375422989384:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:57.854096Z node 6 :TX_PROXY ERROR: Actor# [6:7491639375422989442:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:59.429592Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:06:59.507382Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:06:59.604404Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:37: Warning: At function: SqlIn
:7:37: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:37: Warning: At function: SqlIn
:7:37: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> KqpQueryPerf::MultiRead+QueryService >> KqpQueryPerf::Update-QueryService-UseSink >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink >> KqpQueryPerf::DeleteOn-QueryService-UseSink >> KqpWorkload::KV >> KqpQueryPerf::UpdateOn+QueryService+UseSink >> KqpQueryPerf::IndexInsert+QueryService-UseSink >> KqpNewEngine::PagingNoPredicateExtract [GOOD] >> KqpNewEngine::MultipleBroadcastJoin |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |82.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge >> KqpSort::TopParameterFilter [GOOD] >> KqpNotNullColumns::ReplaceNotNullPg [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup >> PgCatalog::InformationSchema [GOOD] >> PgCatalog::CheckSetConfig >> KqpRanges::LiteralOrCompisite [GOOD] >> KqpRanges::LiteralOrCompisiteCollision >> KqpNotNullColumns::UpsertNotNull >> KqpSqlIn::SecondaryIndex_PgKey [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::TopParameterFilter [GOOD] Test command err: Trying to start YDB, gRPC: 29414, MsgBus: 27941 2025-04-10T11:06:05.514950Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639150497300241:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:05.520826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008eb/r3tmp/tmpHp2iYa/pdisk_1.dat 2025-04-10T11:06:06.149612Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:06.151199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:06.151407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:06.157009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29414, node 1 2025-04-10T11:06:06.334832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:06.334873Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:06.334883Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:06.335027Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27941 TClient is connected to server localhost:27941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:07.286780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:07.307270Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:06:07.331022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:07.595487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:07.813674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:07.920824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:10.401749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639171972138369:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:10.401877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:10.510265Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639150497300241:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:10.510346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:10.762975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:10.894903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:10.940164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:11.027839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:11.095042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:11.160922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:11.268068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639176267106189:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:11.268157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:11.268579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639176267106194:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:11.272138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:11.287005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639176267106196:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:11.349655Z node 1 :TX_PROXY ERROR: Actor# [1:7491639176267106249:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 30659, MsgBus: 6927 2025-04-10T11:06:15.132298Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639192471591328:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:15.132366Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008eb/r3tmp/tmpMuRyQd/pdisk_1.dat 2025-04-10T11:06:15.440830Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:15.472454Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:15.472573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:15.476234Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30659, node 2 2025-04-10T11:06:15.704202Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:15.704227Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:15.704237Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:15.704416Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6927 TClient is connected to server localhost:6927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:06:16.392759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:06:16.437165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:16.576023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:16.827630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:06:16.928151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESch ... shard: 72057594046644480 2025-04-10T11:06:52.480216Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.548332Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.635960Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639353999802515:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.636108Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.636389Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639353999802520:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.641798Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:52.658978Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639353999802522:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:52.748360Z node 5 :TX_PROXY ERROR: Actor# [5:7491639353999802576:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:54.229990Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18199, MsgBus: 2938 2025-04-10T11:06:59.673856Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639383348230339:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:59.673962Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008eb/r3tmp/tmpDuorfW/pdisk_1.dat 2025-04-10T11:06:59.836834Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:59.880274Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:59.880411Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:59.882479Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18199, node 6 2025-04-10T11:06:59.968779Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:59.968807Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:59.968825Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:59.968986Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2938 TClient is connected to server localhost:2938 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:07:00.827729Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:07:00.852561Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:00.949578Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:01.227588Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:01.350792Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:04.676800Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639383348230339:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:04.676908Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:05.382404Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639409118035885:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:05.382588Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:05.464327Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.548193Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.630485Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.680682Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.744067Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.791908Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.859201Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639409118036405:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:05.859318Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:05.859574Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639409118036410:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:05.864514Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:05.885079Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639409118036412:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:05.943231Z node 6 :TX_PROXY ERROR: Actor# [6:7491639409118036467:3459] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ( (declare $limit (DataType 'Uint64)) (declare $value (DataType 'Int32)) (let $1 (KqpTable '"/Root/TwoShard" '"72057594046644480:2" '"" '1)) (let $2 '('"Key" '"Value1" '"Value2")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 (DataType 'Int32)) (let $5 (Min (Uint64 '"1001") $limit)) (let $6 (StructType '('"Key" (OptionalType (DataType 'Uint32))) '('"Value1" (OptionalType (DataType 'String))) '('"Value2" (OptionalType $4)))) (let $7 '('('"_logical_id" '497) '('"_id" '"745649d9-74431ffc-dadf28dd-81d42fd6") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($12) (block '( (let $13 (lambda '($16) (block '( (let $17 (Member $16 '"Value2")) (return (Member $16 '"Key") (Member $16 '"Value1") $17 (Coalesce (!= $17 $value) (Bool 'false))) )))) (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda '($18 $19 $20 $21) $21) $5)) (let $15 (lambda '($22 $23 $24 $25) $22 $23 $24)) (return (FromFlow (WideMap $14 $15))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($26) (FromFlow (NarrowMap (Take (ToFlow $26) $5) (lambda '($27 $28 $29) (AsStruct '('"Key" $27) '('"Value1" $28) '('"Value2" $29)))))) '('('"_logical_id" '510) '('"_id" '"7917149f-aa62a60c-334915be-bcb77abe")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '('('"$limit") '('"$value")) '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) >> KqpAgg::GroupByLimit [GOOD] >> KqpExtractPredicateLookup::OverflowLookup >> KqpNewEngine::FullScanCount [GOOD] >> ColumnShardTiers::DSConfigsStub [GOOD] |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |82.5%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark >> KqpNewEngine::PkRangeSelect2 [GOOD] >> KqpNewEngine::PkRangeSelect3 >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] >> KqpPg::TempTablesSessionsIsolation [FAIL] >> KqpPg::TempTablesDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] Test command err: Trying to start YDB, gRPC: 8225, MsgBus: 15077 2025-04-10T11:05:57.625383Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639115781888630:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:57.625478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000979/r3tmp/tmp6mcD2B/pdisk_1.dat 2025-04-10T11:05:58.044635Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:58.091221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:58.091367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:58.092543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8225, node 1 2025-04-10T11:05:58.154020Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:58.154049Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:58.154070Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:58.154259Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15077 TClient is connected to server localhost:15077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:58.743116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:58.765099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:58.907960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:59.094004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:59.163282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:00.800598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639128666792301:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:00.800743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:01.306121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:01.347055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:01.397732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:01.480832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:01.525425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:01.606518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:01.706128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639132961760121:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:01.706269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:01.706538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639132961760126:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:01.711054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:01.727683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639132961760128:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:01.811777Z node 1 :TX_PROXY ERROR: Actor# [1:7491639132961760184:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:02.626346Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639115781888630:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:02.626431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:02.979568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:06:04.196852Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283164218, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 15645, MsgBus: 17658 2025-04-10T11:06:05.168836Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639148878503655:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:05.168899Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000979/r3tmp/tmpLc4C8Y/pdisk_1.dat 2025-04-10T11:06:05.332591Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:05.357265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:05.357367Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:05.359065Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15645, node 2 2025-04-10T11:06:05.449906Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:05.449931Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:05.449939Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:05.450077Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17658 TClient is connected to server localhost:17658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:05.981462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:05.991749Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:06:06.000036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:06.070734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 wai ... , opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.572478Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.635172Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-10T11:06:52.645279Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.706592Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.764438Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.825073Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:52.943825Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639353012879402:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.944051Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.944561Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639353012879407:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:52.960126Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:52.986555Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639353012879409:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:53.091968Z node 6 :TX_PROXY ERROR: Actor# [6:7491639357307846763:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:54.821416Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:06:54.908987Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:06:55.016534Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14472, MsgBus: 17748 2025-04-10T11:06:59.086255Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639384688665988:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:59.091132Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000979/r3tmp/tmp86rJsA/pdisk_1.dat 2025-04-10T11:06:59.325627Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:59.330770Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:59.330903Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:59.337457Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14472, node 7 2025-04-10T11:06:59.457730Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:59.457760Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:59.457778Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:59.457997Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17748 TClient is connected to server localhost:17748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:00.417446Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:00.446688Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:00.563100Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:00.821091Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:00.936578Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:04.082732Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639384688665988:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:04.082802Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:04.870324Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639406163504260:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:04.870468Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:04.967132Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.066894Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.169570Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.225916Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.286457Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.366285Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.464903Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639410458472076:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:05.465088Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:05.466802Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639410458472081:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:05.474058Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:05.499862Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639410458472083:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:05.590721Z node 7 :TX_PROXY ERROR: Actor# [7:7491639410458472141:3466] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:07.279286Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::FullScanCount [GOOD] Test command err: Trying to start YDB, gRPC: 20157, MsgBus: 9449 2025-04-10T11:05:57.620939Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639116239712220:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:05:57.622651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ea6/r3tmp/tmpvKans8/pdisk_1.dat 2025-04-10T11:05:58.029009Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:58.038763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:58.038863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:58.041708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20157, node 1 2025-04-10T11:05:58.127875Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:58.127907Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:58.127920Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:58.128089Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9449 TClient is connected to server localhost:9449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:05:58.761507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:58.788087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:58.917463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:59.094602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:05:59.164349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:00.943369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639129124615737:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:00.943504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:01.384134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:01.434277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:01.468575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:01.503941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:01.538792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:01.615829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:01.692400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639133419583554:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:01.692481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:01.692498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639133419583559:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:01.696331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:01.705935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639133419583561:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:01.790087Z node 1 :TX_PROXY ERROR: Actor# [1:7491639133419583616:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:02.622566Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639116239712220:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:02.622655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15882, MsgBus: 24307 2025-04-10T11:06:04.215907Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639144378822842:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:04.215946Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ea6/r3tmp/tmp6Mdxwb/pdisk_1.dat 2025-04-10T11:06:04.440821Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:04.451764Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:04.451858Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:04.453704Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15882, node 2 2025-04-10T11:06:04.590693Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:04.590724Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:04.590732Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:04.590847Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24307 TClient is connected to server localhost:24307 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:05.095510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:05.103810Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:06:05.116643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:05.223737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:05.398307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:05.492101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreate ... tion/migrations;error=timeout; 2025-04-10T11:06:55.529720Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:55.599296Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:55.648619Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:55.693736Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:55.741542Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:55.807701Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639367305705229:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:55.807823Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639367305705234:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:55.807834Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:55.812844Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:55.825391Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639367305705236:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:06:55.885666Z node 6 :TX_PROXY ERROR: Actor# [6:7491639367305705290:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 6603, MsgBus: 5573 2025-04-10T11:06:59.833539Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639382684075292:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:59.834216Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ea6/r3tmp/tmpyQrQNf/pdisk_1.dat 2025-04-10T11:06:59.990041Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:00.030052Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:00.030337Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:00.032049Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6603, node 7 2025-04-10T11:07:00.118947Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:00.118980Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:00.119001Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:00.119198Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5573 TClient is connected to server localhost:5573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:00.977953Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:00.990481Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:01.003944Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:01.119950Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:01.384063Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:01.502877Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:04.833975Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639382684075292:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:04.834118Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:05.505485Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639408453880835:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:05.505646Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:05.550046Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.610132Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.665420Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.718616Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.772901Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.834317Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.956001Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639408453881352:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:05.956116Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:05.956413Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639408453881357:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:05.962383Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:05.985543Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639408453881359:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:06.078532Z node 7 :TX_PROXY ERROR: Actor# [7:7491639412748848712:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-04-10T11:00:55.155724Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.155765Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.155795Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:00:55.156350Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-10T11:00:55.156402Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.156433Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.157816Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006097s 2025-04-10T11:00:55.158318Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-10T11:00:55.158356Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.158380Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.158423Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006229s 2025-04-10T11:00:55.159105Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-10T11:00:55.159129Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.159162Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:00:55.159212Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008435s 2025-04-10T11:00:55.187185Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1744282855187142 2025-04-10T11:00:55.648836Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637820616800231:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:55.648897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:00:55.716739Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491637819129192948:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:55.716819Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c15/r3tmp/tmpoerXao/pdisk_1.dat 2025-04-10T11:00:56.132328Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:00:56.139807Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:00:56.696918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:00:56.750320Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:00:56.856851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:56.856975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:56.858139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:56.858214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:56.867565Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:00:56.867662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:56.872150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9854, node 1 2025-04-10T11:00:56.929792Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:56.936439Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:00:56.957559Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:00:57.111575Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000c15/r3tmp/yandexpDqknC.tmp 2025-04-10T11:00:57.111610Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000c15/r3tmp/yandexpDqknC.tmp 2025-04-10T11:00:57.111788Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000c15/r3tmp/yandexpDqknC.tmp 2025-04-10T11:00:57.111918Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:00:57.281263Z INFO: TTestServer started on Port 17608 GrpcPort 9854 TClient is connected to server localhost:17608 PQClient connected to localhost:9854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:57.851149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T11:01:00.650575Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491637820616800231:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:00.650688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:00.717720Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491637819129192948:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:01:00.717799Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:01:01.177884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637844898997045:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:01.178025Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:01.178251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491637844898997058:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:01.187182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:01:01.245591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491637846386605050:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:01.251384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:01:01.255505Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491637844898997060:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:01:01.363627Z node 2 :TX_PROXY ERROR: Actor# [2:7491637844898997088:2132] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:01:02.180060Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.274559s 2025-04-10T11:01:02.180112Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.274648s 2025-04-10T11:01:02.281701Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491637846386605120:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:01:02.285194Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491637844898997095:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:01:02.286385Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjJjZTA3LTFkNmQwNWJkLTk5MDhlMmExLTgxMThlMTMx, ActorId: [2:7491637844898997043:2310], ActorState: ExecuteState, TraceId: 01jrfnrfkeatdqjr1akeybc3c5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:01:02.288643Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. ... 4-10T11:07:03.319006Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000ptest-message-group-id 2025-04-10T11:07:03.319044Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000010_00000| 2025-04-10T11:07:03.319064Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:07:03.319115Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:07:03.319158Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:07:03.319247Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:07:03.319417Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 10 size 1208 2025-04-10T11:07:03.334252Z node 17 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 10 size 1208 actorID [17:7491639390123090545:2620] 2025-04-10T11:07:03.334478Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:07:03.334584Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:07:03.334706Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-10T11:07:03.334764Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:07:03.334816Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-10T11:07:03.337949Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 18 queued_in_partition_duration_ms: 12 } 2025-04-10T11:07:03.334846Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:07:03.334893Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-10T11:07:03.334928Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:07:03.338034Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session: acknoledged message 1 2025-04-10T11:07:03.334972Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-04-10T11:07:03.335002Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:07:03.338109Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session: acknoledged message 2 2025-04-10T11:07:03.335042Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2025-04-10T11:07:03.338173Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session: acknoledged message 3 2025-04-10T11:07:03.335075Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:07:03.338215Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session: acknoledged message 4 2025-04-10T11:07:03.335116Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-04-10T11:07:03.338268Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session: acknoledged message 5 2025-04-10T11:07:03.335155Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:07:03.335209Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2025-04-10T11:07:03.338303Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session: acknoledged message 6 2025-04-10T11:07:03.335242Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:07:03.338335Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session: acknoledged message 7 2025-04-10T11:07:03.335284Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2025-04-10T11:07:03.338374Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session: acknoledged message 8 2025-04-10T11:07:03.335318Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:07:03.338417Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session: acknoledged message 9 2025-04-10T11:07:03.335357Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2025-04-10T11:07:03.335389Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:07:03.338444Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session: acknoledged message 10 2025-04-10T11:07:03.335430Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2025-04-10T11:07:03.335788Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:07:03.335841Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-10T11:07:03.336131Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-10T11:07:03.336331Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-10T11:07:03.336948Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2025-04-10T11:07:03.337027Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 10 2025-04-10T11:07:03.337437Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-10T11:07:03.337483Z node 17 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T11:07:03.337524Z node 17 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 size 1208 2025-04-10T11:07:03.337605Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1744283223303 queuesize 0 startOffset 0 2025-04-10T11:07:03.338828Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session: close. Timeout = 0 ms 2025-04-10T11:07:03.338895Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session will now close 2025-04-10T11:07:03.338972Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session: aborting 2025-04-10T11:07:03.339606Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session: gracefully shut down, all writes complete 2025-04-10T11:07:03.339676Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0] Write session: destroy 2025-04-10T11:07:03.341359Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0 grpc read done: success: 0 data: 2025-04-10T11:07:03.341421Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0 grpc read failed 2025-04-10T11:07:03.341479Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0 grpc closed 2025-04-10T11:07:03.341519Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|d42fad76-e0985258-43b13203-54cf0e5b_0 is DEAD 2025-04-10T11:07:03.343270Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T11:07:03.343942Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7491639398713025395:2646] destroyed 2025-04-10T11:07:03.344016Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub [GOOD] Test command err: 2025-04-10T11:05:20.748511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:05:20.748689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:05:20.748823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e9c/r3tmp/tmp8hRLHN/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22994, node 1 TClient is connected to server localhost:30449 2025-04-10T11:05:21.638025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:05:21.694167Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:21.704638Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:21.704713Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:21.704758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:21.705157Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:05:21.747461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:21.747596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:21.759369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:05:21.951945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-10T11:05:22.187730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:05:22.187988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:05:22.188304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:05:22.188468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:05:22.188586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:05:22.188706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:05:22.188836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:05:22.188943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:05:22.189035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:05:22.189105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:05:22.189230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:05:22.189313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:05:22.212307Z node 1 :TX_TIERING INFO: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-04-10T11:05:22.214292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:05:22.214398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:05:22.214539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:05:22.214605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:05:22.214836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:05:22.214887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:05:22.215034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:05:22.215097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:05:22.215200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:05:22.215245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:05:22.215300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:05:22.215346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:05:22.222761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:05:22.222919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:05:22.223179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:05:22.223230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:05:22.223399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:05:22.223438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:05:22.223661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:05:22.223707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:05:22.223840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:05:22.223913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:05:22.255533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:05:22.255685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:05:22.255921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:05:22.256044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:05:22.256184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:05:22.256288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:05:22.256394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=norm ... -04-10T11:06:24.028833Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=1; 2025-04-10T11:06:24.028947Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-10T11:06:24.028995Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 0 2025-04-10T11:06:24.029059Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-10T11:06:24.029136Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-04-10T11:06:24.029209Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; Initialization finished REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=0;WAITING=1 2025-04-10T11:06:35.963891Z node 1 :TX_PROXY ERROR: Actor# [1:3594:4758] txid# 281474976715747, issues: { message: "Other entities depend on this data source, please remove them at the beginning: /Root/olapStore/olapTable" severity: 1 } REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=
: Error: Execution, code: 1060
:1:27: Error: Executing DROP OBJECT EXTERNAL_DATA_SOURCE
: Error:
: Error: Other entities depend on this data source, please remove them at the beginning: /Root/olapStore/olapTable, code: 2003 , code: 2003 ;EXPECTATION=0 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 2025-04-10T11:06:47.479754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715756:0, at schemeshard: 72057594046644480 2025-04-10T11:06:48.406989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715756;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715756; 2025-04-10T11:06:48.408227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715756;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715756; 2025-04-10T11:06:48.408419Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715756;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715756; REQUEST=DROP TABLE `/Root/olapStore/olapTable`;RESULT=
: Info: Execution, code: 1060
:1:12: Info: Executing DROP TABLE
: Info: Success, code: 4 ;EXPECTATION=1 FINISHED_REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-04-10T11:06:59.023962Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:06:59.024486Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:06:59.024555Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-10T11:06:59.024620Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-10T11:06:59.024688Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 0 2025-04-10T11:06:59.024739Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-10T11:06:59.024834Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-04-10T11:06:59.024910Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:06:59.025562Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:06:59.025618Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:06:59.025650Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:06:59.026491Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:06:59.026539Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037888;has_config=0; 2025-04-10T11:06:59.026578Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037888 2025-04-10T11:06:59.026615Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 72075186224037888 2025-04-10T11:06:59.026644Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037888 2025-04-10T11:06:59.026689Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 72075186224037888 2025-04-10T11:06:59.026742Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:06:59.026777Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:06:59.026804Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037889;has_config=0; 2025-04-10T11:06:59.026840Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037889 2025-04-10T11:06:59.026869Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 72075186224037889 2025-04-10T11:06:59.026900Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037889 2025-04-10T11:06:59.026938Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 72075186224037889 2025-04-10T11:06:59.026982Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:06:59.027014Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:06:59.027040Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037890;has_config=0; 2025-04-10T11:06:59.027068Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037890 2025-04-10T11:06:59.027097Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 72075186224037890 2025-04-10T11:06:59.027121Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037890 2025-04-10T11:06:59.027156Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 72075186224037890 2025-04-10T11:06:59.027197Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:06:59.027577Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037888;self_id=[1:744:2626];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-10T11:06:59.027678Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037889;self_id=[1:750:2629];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-10T11:06:59.027753Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037890;self_id=[1:759:2635];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-04-10T11:07:10.147411Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-10T11:07:10.147484Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-10T11:07:10.147515Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-10T11:07:10.147855Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-10T11:07:10.148078Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-10T11:07:10.148129Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-04-10T11:07:10.148186Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-10T11:07:10.148281Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:07:10.148468Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-10T11:07:10.148514Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037888;has_config=0; 2025-04-10T11:07:10.148542Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037888 2025-04-10T11:07:10.148580Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:07:10.148604Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-10T11:07:10.148624Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037889;has_config=0; 2025-04-10T11:07:10.148649Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037889 2025-04-10T11:07:10.148697Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:07:10.148844Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-10T11:07:10.148877Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037890;has_config=0; 2025-04-10T11:07:10.148911Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037890 2025-04-10T11:07:10.148956Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:07:10.149657Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037888;self_id=[1:744:2626];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-10T11:07:10.149722Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037889;self_id=[1:750:2629];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-10T11:07:10.149763Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037890;self_id=[1:759:2635];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 >> KqpNewEngine::DeleteON [GOOD] >> KqpNewEngine::DeleteByKey >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne [GOOD] >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne [GOOD] |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne [GOOD] |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne [GOOD] >> KqpNewEngine::BlindWriteListParameter [GOOD] >> KqpNewEngine::BatchUpload |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |82.5%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large >> KqpNewEngine::PruneWritePartitions-UseSink [GOOD] >> KqpNewEngine::PruneEffectPartitions+UseSink |82.6%| [TA] $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNewEngine::Select1 >> KqpSort::TopSortTableExprOffset >> KqpQueryPerf::IndexInsert-QueryService-UseSink >> KqpNewEngine::MultiSelect |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |82.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots >> TExportToS3Tests::UidAsIdempotencyKey >> TExportToS3Tests::DropSourceTableBeforeTransferring >> TExportToS3Tests::RebootDuringAbortion >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> TExportToS3Tests::CheckItemProgress >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings >> TExportToS3Tests::DropCopiesBeforeTransferring1 >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> TExportToS3Tests::ShouldRestartOnScanErrors >> KqpNotNullColumns::UpsertNotNull [GOOD] >> KqpNotNullColumns::UpdateTable_DontChangeNotNull >> KqpNewEngine::DeleteWithInputMultiConsumption+UseSink >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> PgCatalog::CheckSetConfig [FAIL] >> PgCatalog::PgDatabase+useSink >> KqpReturning::ReturningWorks-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDelete+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... bletID 72057594037927937 is [13:56:2097] sender: [13:87:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:90:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:91:2057] recipient: [13:89:2117] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:93:2057] recipient: [13:89:2117] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:92:2118] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:146:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:76:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:79:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:78:2110] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:82:2057] recipient: [16:78:2110] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:79:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:78:2110] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:82:2057] recipient: [17:78:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:81:2111] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:77:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:79:2110] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:83:2057] recipient: [18:79:2110] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:82:2111] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:136:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:82:2113] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:86:2057] recipient: [20:82:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:85:2114] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:83:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:84:2113] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:84:2113] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:104:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:142:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:50:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:50:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:83:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:86:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:87:2057] recipient: [23:85:2115] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:89:2057] recipient: [23:85:2115] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:88:2116] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:142:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:87:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:88:2057] recipient: [24:86:2115] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:90:2057] recipient: [24:86:2115] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:89:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] >> TPersQueueTest::InflightLimit [GOOD] |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/blobsan/blobsan |82.6%| [LD] {RESULT} $(B)/ydb/tools/blobsan/blobsan |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay_yt/query_replay_yt |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |82.6%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt >> KqpNewEngine::Select1 [GOOD] >> KqpNewEngine::Replace >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] >> KqpRanges::LiteralOrCompisiteCollision [GOOD] >> KqpRanges::MergeRanges >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] >> KqpQueryPerf::MultiRead+QueryService [GOOD] >> KqpNewEngine::PkRangeSelect3 [GOOD] >> KqpNewEngine::PkRangeSelect4 >> KqpNewEngine::MultipleBroadcastJoin [GOOD] |82.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/blobsan/blobsan >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> KqpPg::TempTablesDrop [FAIL] >> KqpPg::TempTablesWithCache >> KqpQueryPerf::DeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn-QueryService+UseSink >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TExportToS3Tests::CorruptedDyNumber ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:77:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:81:2057] recipient: [5:79:2110] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:83:2057] recipient: [5:79:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:82:2111] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:136:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:82:2113] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:86:2057] recipient: [7:82:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:85:2114] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:81:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:83:2113] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:87:2057] recipient: [8:83:2113] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:86:2114] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:104:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:85:2115] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:89:2057] recipient: [10:85:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:88:2116] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:84:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:86:2115] Leader for TabletID 72057594037927937 is [11:89:2116] sender: [11:90:2057] recipient: [11:86:2115] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:89:2116] Leader for TabletID 72057594037927937 is [11:89:2116] sender: [11:143:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:85:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:88:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:89:2057] recipient: [12:87:2116] Leader for TabletID 72057594037927937 is [12:90:2117] sender: [12:91:2057] recipient: [12:87:2116] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:90:2117] Leader for TabletID 72057594037927937 is [12:90:2117] sender: [12:110:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:86:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:88:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:90:2057] recipient: [13:89:2117] Leader for TabletID 72057594037927937 is [13:91:2118] sender: [13:92:2057] recipient: [13:89:2117] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:91:2118] Leader for TabletID 72057594037927937 is [13:91:2118] sender: [13:111:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:89:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:92:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:93:2057] recipient: [14:91:2120] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:95:2057] recipient: [14:91:2120] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:94:2121] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:148:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:89:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:92:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:93:2057] recipient: [15:91:2120] Leader for TabletID 72057594037927937 is [15:94:2121] sender: [15:95:2057] recipient: [15:91:2120] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:94:2121] Leader for TabletID 72057594037927937 is [15:94:2121] sender: [15:148:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> TExportToS3Tests::UserSID >> TExportToS3Tests::CheckItemProgress [GOOD] >> TExportToS3Tests::CancelledExportEndTime ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24923, MsgBus: 6228 2025-04-10T11:07:09.735319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639424100837715:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:09.735435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001724/r3tmp/tmpWI9SuL/pdisk_1.dat 2025-04-10T11:07:10.826705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:10.852239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:07:10.852705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:10.852776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:10.924925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24923, node 1 2025-04-10T11:07:12.258212Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:12.258250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:12.258259Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:12.258406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6228 2025-04-10T11:07:14.738357Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639424100837715:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:14.738438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:6228 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:14.953508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.082602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.394835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.547724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.649570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.873435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639449870643183:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:15.873545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:17.881825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:17.940458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:17.992017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.035342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.081968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.187734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.335133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639462755545709:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.335226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.335575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639462755545714:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.356365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:18.374950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639462755545716:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:18.458715Z node 1 :TX_PROXY ERROR: Actor# [1:7491639462755545773:3475] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpNotNullColumns::UpdateTable_DontChangeNotNull [GOOD] >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TExportToS3Tests::ShouldPreserveIncrBackupFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 17625, MsgBus: 26927 2025-04-10T11:07:09.736100Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639426947586884:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:09.736151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00171b/r3tmp/tmpqUxQbt/pdisk_1.dat 2025-04-10T11:07:10.831722Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:10.844885Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:07:10.887271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:10.887366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:10.888966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17625, node 1 2025-04-10T11:07:12.255648Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:12.255675Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:12.255685Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:12.255805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26927 2025-04-10T11:07:14.738250Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639426947586884:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:14.738314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:26927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:14.951233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.080660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.405836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.581423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.683380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.996793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639452717392359:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:15.996935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:17.884708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:17.944484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:17.979837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.012417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.047929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.185272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.337546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639465602294865:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.337671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.338076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639465602294870:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.356002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:18.377759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639465602294872:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:18.461266Z node 1 :TX_PROXY ERROR: Actor# [1:7491639465602294930:3476] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::ExportStartTime >> TExportToS3Tests::UserSID [GOOD] >> TExportToS3Tests::TablePermissions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1655, MsgBus: 6485 2025-04-10T11:07:09.738723Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639426485310929:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:09.738772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001712/r3tmp/tmpBCLRsP/pdisk_1.dat 2025-04-10T11:07:10.817514Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:10.856870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:10.856984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:10.886818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:07:10.887323Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 1655, node 1 2025-04-10T11:07:12.306650Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:12.306682Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:12.306693Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:12.306894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6485 2025-04-10T11:07:14.740390Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639426485310929:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:14.740479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:6485 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:14.954046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.080456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.388871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.583576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.713312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.975536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639452255116395:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:15.975684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:17.884053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:17.944061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:17.986104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.025161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.062902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.142531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.335520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639465140018905:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.335626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.335846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639465140018910:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.351927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:18.379058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639465140018912:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:18.458784Z node 1 :TX_PROXY ERROR: Actor# [1:7491639465140018967:3470] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::MultipleBroadcastJoin [GOOD] Test command err: Trying to start YDB, gRPC: 32230, MsgBus: 23690 2025-04-10T11:06:09.550794Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639167318442117:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:09.582723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008b2/r3tmp/tmp40Zu1h/pdisk_1.dat 2025-04-10T11:06:10.290836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:10.290982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:10.295602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32230, node 1 2025-04-10T11:06:10.559126Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:10.834659Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:10.834733Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:10.834741Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:10.834854Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23690 TClient is connected to server localhost:23690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:12.238200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:12.269907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:12.485985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:12.818508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:12.974044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:14.554028Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639167318442117:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:14.581814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:16.107977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639197383214958:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:16.108117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:16.612312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:16.692647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:16.728267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:16.772689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:16.817045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:16.892224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:16.993359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639197383215478:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:16.993513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:16.994015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639197383215483:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:16.999516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:17.029209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639197383215485:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:17.110357Z node 1 :TX_PROXY ERROR: Actor# [1:7491639201678182839:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:19.284925Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7491639210268117846:2505] TxId: 281474976710671. Ctx: { TraceId: 01jrfp25h83vkjkjy1dq8qcqst, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmNjMDkxYWMtNzI0NTQyOGYtZWY5ZDg3NzItNWJkNTZhYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 3, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T11:06:19.297521Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710671. Ctx: { TraceId: 01jrfp25h83vkjkjy1dq8qcqst, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmNjMDkxYWMtNzI0NTQyOGYtZWY5ZDg3NzItNWJkNTZhYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7491639210268117854:2517] 2025-04-10T11:06:19.297641Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710671. Ctx: { TraceId: 01jrfp25h83vkjkjy1dq8qcqst, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmNjMDkxYWMtNzI0NTQyOGYtZWY5ZDg3NzItNWJkNTZhYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7491639210268117855:2518] 2025-04-10T11:06:19.298062Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710671. Ctx: { TraceId: 01jrfp25h83vkjkjy1dq8qcqst, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmNjMDkxYWMtNzI0NTQyOGYtZWY5ZDg3NzItNWJkNTZhYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7491639210268117856:2519] Trying to start YDB, gRPC: 29598, MsgBus: 62239 2025-04-10T11:06:20.783192Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639213136491381:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:20.783267Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008b2/r3tmp/tmpoYQ6Rx/pdisk_1.dat 2025-04-10T11:06:20.982122Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:21.009619Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:21.009971Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:21.011417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29598, node 2 2025-04-10T11:06:21.189652Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:21.189678Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:21.189687Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:21.189892Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62239 TClient is connected to server localhost:62239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 ... 6715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:03.434698Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639376826953135:2151];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:03.435066Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:03.501394Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:03.571554Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:03.674894Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:03.767932Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:03.842869Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:03.946371Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639398301791819:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:03.946564Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:03.947006Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639398301791824:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:03.953466Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:03.979937Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639398301791826:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:04.046459Z node 6 :TX_PROXY ERROR: Actor# [6:7491639402596759177:3464] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 26944, MsgBus: 10030 2025-04-10T11:07:08.629821Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639419981191508:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:08.629876Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008b2/r3tmp/tmpE0qH1P/pdisk_1.dat 2025-04-10T11:07:08.859387Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:08.868251Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:08.868422Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:08.870446Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26944, node 7 2025-04-10T11:07:08.974272Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:08.974302Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:08.974313Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:08.974486Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10030 TClient is connected to server localhost:10030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:07:09.899477Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:07:09.914355Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:10.052568Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:10.435285Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:10.579273Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:13.634464Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639419981191508:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:13.654259Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:15.382173Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639450045964381:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:15.382351Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:15.498060Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:15.602353Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:15.693807Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:15.792151Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:15.871100Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:15.941102Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:16.090727Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639454340932218:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:16.090906Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:16.091208Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639454340932223:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:16.098099Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:16.114812Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639454340932225:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:16.196549Z node 7 :TX_PROXY ERROR: Actor# [7:7491639454340932281:3463] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:18.079662Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.133950Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.210389Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 [] >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> KqpNewEngine::BatchUpload [GOOD] >> KqpNewEngine::BrokenLocksAtROTx >> KqpNewEngine::MultiSelect [GOOD] >> KqpNewEngine::MultiOutput >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> TExportToS3Tests::CompletedExportEndTime >> KqpQueryPerf::IndexInsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert+QueryService+UseSink >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink >> TExportToS3Tests::CancelledExportEndTime [GOOD] >> TExportToS3Tests::Checksums >> KqpNewEngine::DeleteWithInputMultiConsumption+UseSink [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumption-UseSink >> KqpSort::TopSortTableExprOffset [GOOD] >> KqpSort::UnionAllSortLimit >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TExportToS3Tests::EnableChecksumsPersistance >> KqpQueryPerf::Update-QueryService+UseSink >> KqpNewEngine::DeleteByKey [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables >> TExportToS3Tests::TablePermissions [GOOD] >> TExportToS3Tests::ExportStartTime [GOOD] >> TExportToS3Tests::ExportPartitioningSettings |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots >> KqpNewEngine::PruneEffectPartitions+UseSink [GOOD] >> KqpNewEngine::PruneEffectPartitions-UseSink >> KqpNewEngine::SimpleUpsertSelect |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup [GOOD] >> KqpNotNullColumns::OptionalParametersDataQuery >> KqpNotNullColumns::InsertNotNullPkPg+useSink >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/docs/generator/generator |82.7%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/docs/generator/generator >> TExportToS3Tests::Checksums [GOOD] >> TExportToS3Tests::Changefeeds >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> TExportToS3Tests::ChecksumsWithCompression >> KqpQueryPerf::IndexInsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DeleteByKey [GOOD] Test command err: Trying to start YDB, gRPC: 11963, MsgBus: 30190 2025-04-10T11:06:18.484833Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639205413166639:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:18.485164Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000892/r3tmp/tmpAVw5u5/pdisk_1.dat 2025-04-10T11:06:19.358529Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:19.362681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:19.362793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:19.367259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11963, node 1 2025-04-10T11:06:19.618694Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:19.618723Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:19.618743Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:19.618866Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30190 TClient is connected to server localhost:30190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:20.804542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:20.852972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:21.133866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:21.652869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:21.765782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:23.468554Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639205413166639:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:23.468634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:25.094936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639235477939435:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:25.095063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:25.440358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:25.482623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:25.583333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:25.679122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:25.758313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:25.842134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:25.940233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639235477939965:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:25.940309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:25.940573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639235477939970:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:25.945658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:25.974384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639235477939972:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:26.047031Z node 1 :TX_PROXY ERROR: Actor# [1:7491639239772907327:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9040, MsgBus: 8072 2025-04-10T11:06:29.996130Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639254633955967:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:30.031167Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000892/r3tmp/tmppUWFed/pdisk_1.dat 2025-04-10T11:06:30.282620Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:30.353058Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:30.353136Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:30.359059Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9040, node 2 2025-04-10T11:06:30.546781Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:30.546805Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:30.546813Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:30.546925Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8072 TClient is connected to server localhost:8072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:32.406487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:32.460639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:32.860113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:33.399956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:33.622857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:34.938811Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp: ... .520037Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:10.004918Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639429178506815:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:10.005030Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:10.062378Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:10.144846Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:10.240803Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:10.345580Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:10.398307Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:10.495350Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:10.619864Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639429178507346:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:10.619986Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:10.620477Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639429178507351:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:10.626464Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:10.655093Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639429178507353:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:10.748787Z node 6 :TX_PROXY ERROR: Actor# [6:7491639429178507414:3469] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 2064, MsgBus: 14813 2025-04-10T11:07:14.946103Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639446809764199:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:14.946198Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000892/r3tmp/tmp2lfk8G/pdisk_1.dat 2025-04-10T11:07:15.107615Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:15.140372Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:15.140495Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:15.142370Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2064, node 7 2025-04-10T11:07:15.224755Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:15.224787Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:15.224798Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:15.224957Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14813 TClient is connected to server localhost:14813 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:16.226677Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:16.238378Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:16.261249Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:16.379297Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:16.717194Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:16.837350Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:19.946273Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639446809764199:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:19.946377Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:20.438423Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639472579569743:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:20.438551Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:20.468597Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:20.512577Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:20.553816Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:20.619910Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:20.664449Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:20.718302Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:20.789313Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639472579570254:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:20.789423Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639472579570259:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:20.789429Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:20.795223Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:20.812141Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639472579570261:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:20.909945Z node 7 :TX_PROXY ERROR: Actor# [7:7491639472579570317:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] >> TExportToS3Tests::EncryptedExport >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TablePermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:07:20.583529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:07:20.583695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.583767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:07:20.583836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:07:20.599104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:07:20.599213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:07:20.599339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.599507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:07:20.606340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:07:20.793694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:07:20.793766Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:20.813631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:07:20.814002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:07:20.814175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:07:20.851395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:07:20.851665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:07:20.891376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:20.903764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:07:20.949722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.036220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.036346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.049680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:07:21.049805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.049919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:07:21.050189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.064092Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:07:21.214039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:07:21.239061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.263680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:07:21.272048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:07:21.272176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.291461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.291632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:07:21.291858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.292021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:07:21.292064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:07:21.292140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:07:21.299121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.299192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:21.299256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:07:21.301247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.301300Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.301347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.301450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.319144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:21.322056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:07:21.340718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:07:21.342419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.342630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:21.342755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.363108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:07:21.363216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.363408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:07:21.363502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:07:21.379891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.379955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.380164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.380241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:07:21.380526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.380577Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:07:21.380679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.380713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.380766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.380815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.380866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:07:21.380919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.380957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:07:21.380994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:07:21.381094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:07:21.381140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:07:21.381174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:07:21.389177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.389329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.389389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... HEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2025-04-10T11:07:25.214924Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:25.215104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:25.215162Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:25.215240Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2025-04-10T11:07:25.215382Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:25.217665Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-04-10T11:07:25.217782Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-04-10T11:07:25.218121Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:25.218244Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 12884904044 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:25.218303Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-04-10T11:07:25.218437Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-04-10T11:07:25.218571Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:07:25.257948Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:25.258004Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:07:25.258316Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:25.258357Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:205:2207], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-04-10T11:07:25.258868Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:25.258926Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:25.259852Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-10T11:07:25.259939Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-10T11:07:25.259970Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-04-10T11:07:25.260003Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-10T11:07:25.260037Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:07:25.260117Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:10366 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DF18545D-47E0-4603-932C-FBAE2E5D4383 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: Erasing txId 281474976710759 2025-04-10T11:07:25.265957Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:10366 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 524435F8-B154-4DBF-9E92-FF06AD30569A amz-sdk-request: attempt=1 content-length: 137 content-md5: WeIr3D5bqIjvqMGEjx2JrA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 137 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:10366 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A083EDA3-6C2F-441B-B452-83E5159209F9 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:10366 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9382F1EE-5378-41F1-B2FD-10574916CD68 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-04-10T11:07:25.285263Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 452 RawX2: 12884904309 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-10T11:07:25.285327Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-04-10T11:07:25.285494Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 452 RawX2: 12884904309 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-10T11:07:25.285639Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 452 RawX2: 12884904309 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-10T11:07:25.285725Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:25.285773Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:25.285808Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:07:25.285851Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-04-10T11:07:25.286022Z node 3 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:25.288699Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:25.288878Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:25.288918Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-04-10T11:07:25.289029Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-10T11:07:25.289057Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-10T11:07:25.289089Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-10T11:07:25.289113Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-10T11:07:25.289142Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-04-10T11:07:25.289224Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:122:2148] message: TxId: 281474976710759 2025-04-10T11:07:25.289281Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-10T11:07:25.289321Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-04-10T11:07:25.289350Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-04-10T11:07:25.289491Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:07:25.291739Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-04-10T11:07:25.291810Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-04-10T11:07:25.293827Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:07:25.293895Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:483:2444] TestWaitNotification: OK eventTxId 103 >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> PgCatalog::PgDatabase+useSink [GOOD] >> PgCatalog::PgDatabase-useSink >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed >> TExportToS3Tests::ChecksumsWithCompression [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TExportToS3Tests::AuditCompletedExport >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> KqpNewEngine::Replace [GOOD] >> KqpNewEngine::SelfJoin >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage >> KqpSort::ReverseOptimized >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Int32_Limit3 >> KqpNewEngine::JoinIdxLookup >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] >> TExportToS3Tests::EncryptedExport [GOOD] >> KqpReturning::ReturningWorksIndexedDelete+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDelete-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ChecksumsWithCompression [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:07:20.583265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:07:20.583428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.583482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:07:20.583525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:07:20.595095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:07:20.595184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:07:20.595279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.595389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:07:20.603506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:07:20.784150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:07:20.784213Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:20.813637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:07:20.814007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:07:20.814164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:07:20.848770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:07:20.849216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:07:20.891060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:20.902943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:07:20.947638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.037398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.037483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.054330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:07:21.054428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.054500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:07:21.054697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.067067Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:07:21.214284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:07:21.238571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.262694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:07:21.267917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:07:21.268067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.288403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.288588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:07:21.288843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.289029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:07:21.289070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:07:21.289135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:07:21.295470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.295583Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:21.295634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:07:21.297925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.297989Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.298055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.298117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.314740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:21.324736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:07:21.344202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:07:21.345604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.345816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:21.345890Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.358775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:07:21.358903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.359140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:07:21.359286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:07:21.367464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.367543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.367738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.367802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:07:21.368086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.368138Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:07:21.368251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.368289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.368324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.368359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.368396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:07:21.368443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.368490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:07:21.368529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:07:21.368610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:07:21.368661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:07:21.368704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:07:21.376174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.376352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.376429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... el: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:27.689645Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-04-10T11:07:27.689774Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-04-10T11:07:27.690115Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:27.690258Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:27.690328Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-04-10T11:07:27.690473Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-04-10T11:07:27.690612Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:10072 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 24EC1457-A171-448A-929E-75211CA0B822 amz-sdk-request: attempt=1 content-length: 73 content-md5: a9Su4FHJt26Hhw4HV0+Ocg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-04-10T11:07:27.746920Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:27.746971Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:07:27.747212Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:27.747248Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2025-04-10T11:07:27.748005Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:27.748069Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: PUT /metadata.json.sha256 HTTP/1.1 HEADERS: Host: localhost:10072 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A4FF65C3-ADFA-4922-B27B-76988117225D amz-sdk-request: attempt=1 content-length: 78 content-md5: 5v+lOCwt7SV92xRPjSiuqQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json.sha256 / / 78 2025-04-10T11:07:27.777093Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-10T11:07:27.777210Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-10T11:07:27.777244Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-04-10T11:07:27.777282Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-10T11:07:27.777324Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:07:27.777437Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710759 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:10072 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D47E2C3E-5B1F-4886-8E0B-A3FEBC6074E9 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 2025-04-10T11:07:27.783451Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /scheme.pb.sha256 HTTP/1.1 HEADERS: Host: localhost:10072 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 09BF001A-52FF-4157-8C03-C5A21E93AF3B amz-sdk-request: attempt=1 content-length: 74 content-md5: NWNhlq1fHKxcSj+x5Xq9NQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb.sha256 / / 74 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:10072 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A4190A59-0FCD-46C2-90B3-B79A387B173C amz-sdk-request: attempt=1 content-length: 27 content-md5: CTqKvdXJPw0OgRdlsoR71Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 27 REQUEST: PUT /data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:10072 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D2989747-3F10-4826-8919-FDDFE2823A22 amz-sdk-request: attempt=1 content-length: 76 content-md5: gmOXObjloPe2DGxtDsgfpg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.sha256 / / 76 2025-04-10T11:07:27.823850Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 446 RawX2: 17179871599 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-04-10T11:07:27.823916Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-04-10T11:07:27.824057Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 446 RawX2: 17179871599 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-04-10T11:07:27.824191Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 446 RawX2: 17179871599 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-04-10T11:07:27.824270Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:27.824325Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:27.824368Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:07:27.824417Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-04-10T11:07:27.824601Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:27.829574Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:27.829910Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:27.829960Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-04-10T11:07:27.830085Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-10T11:07:27.830123Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-10T11:07:27.830186Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-10T11:07:27.830222Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-10T11:07:27.830257Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-04-10T11:07:27.830319Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:123:2149] message: TxId: 281474976710759 2025-04-10T11:07:27.830365Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-10T11:07:27.830402Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-04-10T11:07:27.830433Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-04-10T11:07:27.830548Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:07:27.832752Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-04-10T11:07:27.832810Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-04-10T11:07:27.838960Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:07:27.839031Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:477:2438] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:07:20.582551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:07:20.582774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.582892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:07:20.582945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:07:20.594326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:07:20.594451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:07:20.594591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.594743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:07:20.601805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:07:20.792331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:07:20.792415Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:20.814882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:07:20.815205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:07:20.815357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:07:20.850128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:07:20.850376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:07:20.891069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:20.903745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:07:20.947602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.036119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.036205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.049684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:07:21.049806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.049942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:07:21.050205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.057974Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:07:21.246399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:07:21.246658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.263364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:07:21.270919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:07:21.271030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.293900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.294062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:07:21.294274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.294369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:07:21.294408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:07:21.294454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:07:21.298624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.298689Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:21.298731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:07:21.304008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.304073Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.304130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.304197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.349142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:21.362170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:07:21.362468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:07:21.363683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.363952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:21.364106Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.364708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:07:21.364793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.365013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:07:21.365106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:07:21.387664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.387735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.387977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.388052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:07:21.388406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.388486Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:07:21.388635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.388676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.388716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.388753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.388795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:07:21.388843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.388884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:07:21.388921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:07:21.389026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:07:21.389074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:07:21.389117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:07:21.403049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.403337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.403445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ardId: 72075186233409548 CpuTimeUsec: 289 } } 2025-04-10T11:07:27.868454Z node 4 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-10T11:07:27.868569Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2025-04-10T11:07:27.868610Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2025-04-10T11:07:27.871059Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:27.871243Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:27.871301Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:27.871385Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2025-04-10T11:07:27.871537Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:27.874348Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-04-10T11:07:27.874469Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 281474976710759 at step: 5000005 2025-04-10T11:07:27.875069Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:27.875197Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:27.875268Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-04-10T11:07:27.875400Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-04-10T11:07:27.875516Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:5720 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4055F1F1-6F60-4220-AD1E-1578CB5A0633 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-04-10T11:07:27.927008Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:27.927062Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-04-10T11:07:27.927316Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:27.927370Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 6 2025-04-10T11:07:27.927491Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:27.927538Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2025-04-10T11:07:27.928451Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-10T11:07:27.928518Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-10T11:07:27.928539Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-04-10T11:07:27.928561Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-04-10T11:07:27.928582Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-04-10T11:07:27.928655Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:5720 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FC1B335E-E73A-4879-88D5-F50D75FD1B48 amz-sdk-request: attempt=1 content-length: 602 content-md5: GgrERoUcI3sF1n0Je2MTCQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 602 2025-04-10T11:07:27.933220Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:5720 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AB500DCC-FC01-41B9-84DA-72420D337195 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-04-10T11:07:27.959332Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 508 RawX2: 17179871650 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-10T11:07:27.959373Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409548, partId: 0 2025-04-10T11:07:27.959466Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 508 RawX2: 17179871650 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-10T11:07:27.959547Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 508 RawX2: 17179871650 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-10T11:07:27.959604Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:27.959636Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:27.959677Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-10T11:07:27.959719Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-04-10T11:07:27.959853Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:27.961418Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:27.961697Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:27.961747Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-04-10T11:07:27.961863Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-10T11:07:27.961894Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-10T11:07:27.961930Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-10T11:07:27.961959Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-10T11:07:27.962007Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-04-10T11:07:27.962067Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:123:2149] message: TxId: 281474976710759 2025-04-10T11:07:27.962112Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-10T11:07:27.962164Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-04-10T11:07:27.962197Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-04-10T11:07:27.962321Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-10T11:07:27.964797Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-04-10T11:07:27.964858Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-04-10T11:07:27.966605Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:07:27.966667Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:536:2486] TestWaitNotification: OK eventTxId 102 >> TExportToS3Tests::Changefeeds [GOOD] >> TExportToS3Tests::AuditCompletedExport [GOOD] >> TExportToS3Tests::AuditCancelledExport >> KqpNewEngine::PkRangeSelect4 [GOOD] >> KqpNewEngine::PrecomputeKey >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:07:20.583752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:07:20.583887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.583946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:07:20.583993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:07:20.595915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:07:20.595989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:07:20.596123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.596246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:07:20.604575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:07:20.784868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:07:20.784943Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:20.814536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:07:20.814811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:07:20.814950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:07:20.858644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:07:20.858918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:07:20.891014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:20.902938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:07:20.947250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.039028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.039114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.051920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:07:21.052024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.052148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:07:21.052383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.075549Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:07:21.251893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:07:21.252148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.263776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:07:21.270437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:07:21.270576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.291572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.291814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:07:21.292084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.292199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:07:21.292263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:07:21.292316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:07:21.297574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.297671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:21.297716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:07:21.310450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.310542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.310612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.310695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.351134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:21.353791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:07:21.354097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:07:21.358112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.358355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:21.358434Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.358833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:07:21.358913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.359141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:07:21.359257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:07:21.365497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.365593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.365754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.365803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:07:21.366040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.366089Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:07:21.366213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.366255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.366295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.366350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.366400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:07:21.366444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.366483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:07:21.366515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:07:21.366602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:07:21.366667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:07:21.366715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:07:21.398888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.399085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.399143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ardId: 72075186233409547 CpuTimeUsec: 322 } } 2025-04-10T11:07:28.546014Z node 4 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-10T11:07:28.546175Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409547, shardIdx: 72057594046678944:2, operationId: 281474976710760:0, left await: 0, at schemeshard: 72057594046678944 2025-04-10T11:07:28.546241Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 3 -> 128 2025-04-10T11:07:28.548562Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-10T11:07:28.548714Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-10T11:07:28.548775Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710760:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:28.548861Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2025-04-10T11:07:28.549029Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 5000006 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:28.551214Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-10T11:07:28.551366Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710760 at step: 5000006 2025-04-10T11:07:28.551989Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:28.552091Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:28.552148Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710760:0 HandleReply TEvOperationPlan, stepId: 5000006, at schemeshard: 72057594046678944 2025-04-10T11:07:28.552293Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 129 2025-04-10T11:07:28.552441Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:7894 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 25BDFD7A-C2B5-4988-BE4A-EBE47808CA85 amz-sdk-request: attempt=1 content-length: 73 content-md5: fOPVvXe4lXvxEe0jvYDDBA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000006 2025-04-10T11:07:28.607319Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:28.607386Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710760, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:07:28.607600Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:28.607644Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710760, path id: 4 2025-04-10T11:07:28.608216Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-10T11:07:28.608283Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710760:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:28.608985Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710760 2025-04-10T11:07:28.609078Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710760 2025-04-10T11:07:28.609111Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710760 2025-04-10T11:07:28.609159Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710760, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-10T11:07:28.609199Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:07:28.609290Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710760 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:7894 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 843C0C27-EB67-4312-97F2-CAE87E6209D0 amz-sdk-request: attempt=1 content-length: 465 content-md5: I8OyVo4ze5n8ehz5iBQr/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 465 2025-04-10T11:07:28.612105Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710760 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:7894 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A08DDA15-23DC-4F86-9F6D-A284D1EFBEFE amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-04-10T11:07:28.632284Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 485 RawX2: 17179871638 } Origin: 72075186233409547 State: 2 TxId: 281474976710760 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-10T11:07:28.632358Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710760, tablet: 72075186233409547, partId: 0 2025-04-10T11:07:28.632509Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944, message: Source { RawX1: 485 RawX2: 17179871638 } Origin: 72075186233409547 State: 2 TxId: 281474976710760 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-10T11:07:28.632642Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710760:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 485 RawX2: 17179871638 } Origin: 72075186233409547 State: 2 TxId: 281474976710760 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-10T11:07:28.632725Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710760:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:28.632775Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-10T11:07:28.632817Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710760:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:07:28.632870Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 129 -> 240 2025-04-10T11:07:28.633081Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710760:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:28.636759Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-10T11:07:28.637076Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-10T11:07:28.637126Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-10T11:07:28.637256Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-10T11:07:28.637298Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T11:07:28.637342Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-10T11:07:28.637391Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T11:07:28.637430Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-10T11:07:28.637519Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:123:2149] message: TxId: 281474976710760 2025-04-10T11:07:28.637575Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T11:07:28.637621Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-10T11:07:28.637653Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-10T11:07:28.637785Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:07:28.639848Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-10T11:07:28.639918Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-10T11:07:28.641829Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:07:28.641888Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:515:2476] TestWaitNotification: OK eventTxId 102 >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7502, MsgBus: 17212 2025-04-10T11:07:09.735380Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639424276734112:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:09.735438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ec/r3tmp/tmpMtHIIY/pdisk_1.dat 2025-04-10T11:07:10.875688Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:10.887401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:07:10.935563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:10.935638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:10.937947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7502, node 1 2025-04-10T11:07:12.276615Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:12.276648Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:12.276659Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:12.276794Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17212 TClient is connected to server localhost:17212 2025-04-10T11:07:14.735391Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639424276734112:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:14.735481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:14.955100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.027189Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:15.083187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.373425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.561988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.647774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.884524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639450046539588:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:15.884743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:17.881477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:17.924750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:17.965566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.037780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.067813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.146062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.335289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639462931442117:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.335379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.335791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639462931442122:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.356013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:18.380834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639462931442124:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:18.458713Z node 1 :TX_PROXY ERROR: Actor# [1:7491639462931442180:3479] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 28663, MsgBus: 63722 2025-04-10T11:07:23.083457Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639484483724049:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:23.083533Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ec/r3tmp/tmp6n1DPZ/pdisk_1.dat 2025-04-10T11:07:23.278206Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:23.278308Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:23.311466Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:23.312688Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28663, node 2 2025-04-10T11:07:23.378804Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:23.378835Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:23.378842Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:23.378996Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63722 TClient is connected to server localhost:63722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:24.011811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:24.038507Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:07:24.049984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:24.163448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:24.375372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:24.452581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:26.963253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639497368627689:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:26.963344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:27.037834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.085600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.115387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.162320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.210743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.334205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.446996Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639501663595505:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:27.447079Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:27.447415Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639501663595511:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:27.450650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:27.467380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491639501663595513:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:27.552117Z node 2 :TX_PROXY ERROR: Actor# [2:7491639501663595566:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:28.086263Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491639484483724049:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:28.086408Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TExportToS3Tests::RebootDuringCompletion >> KqpNewEngine::MultiOutput [GOOD] >> KqpNewEngine::MultiStatement ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::EncryptedExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:07:20.583321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:07:20.583686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.583787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:07:20.583851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:07:20.595201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:07:20.595301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:07:20.595432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.595636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:07:20.601884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:07:20.782025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:07:20.782132Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:20.813577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:07:20.813951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:07:20.814171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:07:20.849614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:07:20.849908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:07:20.891000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:20.903658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:07:20.947404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.036681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.036786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.049697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:07:21.049822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.049918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:07:21.050203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.079451Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:07:21.269958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:07:21.270386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.270621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:07:21.270984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:07:21.271071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.298978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.299191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:07:21.299417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.299540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:07:21.299587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:07:21.299636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:07:21.311206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.311299Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:21.311359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:07:21.315622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.315697Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.315743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.315805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.319879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:21.327130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:07:21.346301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:07:21.347649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.347876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:21.347950Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.358693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:07:21.358810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.359049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:07:21.359192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:07:21.362422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.362493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.362714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.362784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:07:21.363098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.363153Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:07:21.363275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.363319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.363364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.363418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.363459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:07:21.363501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.363537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:07:21.363570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:07:21.363639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:07:21.363685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:07:21.363723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:07:21.370352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.370568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.370640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4046678944 2025-04-10T11:07:29.012104Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-10T11:07:29.012173Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-04-10T11:07:29.012332Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:29.013162Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:29.013273Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:29.013307Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-10T11:07:29.013335Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-04-10T11:07:29.013382Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-10T11:07:29.014626Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:29.014697Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:29.014726Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-10T11:07:29.014773Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-04-10T11:07:29.014807Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:07:29.014888Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-10T11:07:29.017127Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-10T11:07:29.017276Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-10T11:07:29.017323Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-10T11:07:29.017395Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-10T11:07:29.018176Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-04-10T11:07:29.018304Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:07:29.018550Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000010 2025-04-10T11:07:29.019506Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:29.019622Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:29.019674Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000010, at schemeshard: 72057594046678944 2025-04-10T11:07:29.019803Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-10T11:07:29.019876Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-04-10T11:07:29.019917Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-10T11:07:29.019981Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-04-10T11:07:29.020021Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-10T11:07:29.020094Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:07:29.020167Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:07:29.020204Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-04-10T11:07:29.020261Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-10T11:07:29.020308Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-04-10T11:07:29.020345Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-04-10T11:07:29.020440Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:07:29.020480Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-04-10T11:07:29.020518Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-04-10T11:07:29.020561Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-10T11:07:29.021182Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-04-10T11:07:29.022702Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:29.022737Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:29.022884Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:07:29.022981Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:29.023016Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-04-10T11:07:29.023049Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 2025-04-10T11:07:29.023826Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:29.023921Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:29.023959Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-10T11:07:29.024009Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-04-10T11:07:29.024060Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-10T11:07:29.024894Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:29.024970Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:29.024999Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-10T11:07:29.025046Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-10T11:07:29.025076Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:07:29.025153Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-04-10T11:07:29.025206Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:123:2149] 2025-04-10T11:07:29.027868Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:29.028271Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:29.028359Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-04-10T11:07:29.028410Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-04-10T11:07:29.028455Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-10T11:07:29.028483Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-10T11:07:29.028514Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-04-10T11:07:29.030121Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-10T11:07:29.030232Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:07:29.030280Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:1120:2998] TestWaitNotification: OK eventTxId 103 >> KqpRanges::MergeRanges [GOOD] >> KqpRanges::Like ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::Changefeeds [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:07:20.584171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:07:20.584317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.584382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:07:20.584439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:07:20.599811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:07:20.599906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:07:20.600011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.600191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:07:20.600812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:07:20.782095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:07:20.782364Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:20.813730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:07:20.814167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:07:20.814350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:07:20.849474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:07:20.849848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:07:20.891058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:20.902972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:07:20.946040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.037150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.037242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.049682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:07:21.049812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.049917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:07:21.050200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.062194Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:07:21.229109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:07:21.238525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.263368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:07:21.271016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:07:21.271165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.288529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.288714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:07:21.288939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.289045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:07:21.289090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:07:21.289142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:07:21.294256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.294351Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:21.294398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:07:21.296696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.296777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.296823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.296882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.312257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:21.322722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:07:21.341953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:07:21.343592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.343830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:21.343959Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.356341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:07:21.356467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.356714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:07:21.356892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:07:21.366164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.366228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.366419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.366478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:07:21.366778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.366832Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:07:21.366957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.367014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.367063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.367103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.367139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:07:21.367184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.367223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:07:21.367259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:07:21.367341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:07:21.367389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:07:21.367427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:07:21.369893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.370056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.370103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046678944 2025-04-10T11:07:29.343457Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-10T11:07:29.343532Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710761 ready parts: 1/1 2025-04-10T11:07:29.343703Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:29.344974Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-10T11:07:29.345074Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-10T11:07:29.345108Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-10T11:07:29.345176Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-10T11:07:29.345234Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:07:29.346893Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-10T11:07:29.346993Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-10T11:07:29.347026Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-10T11:07:29.347058Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 7 2025-04-10T11:07:29.347098Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-04-10T11:07:29.347214Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-10T11:07:29.349788Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-10T11:07:29.349966Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-10T11:07:29.350019Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-10T11:07:29.350075Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-10T11:07:29.350646Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-04-10T11:07:29.350794Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000010 2025-04-10T11:07:29.351936Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:29.352048Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:29.352109Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000010, at schemeshard: 72057594046678944 2025-04-10T11:07:29.352250Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-10T11:07:29.352342Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-10T11:07:29.352388Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-10T11:07:29.352450Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-10T11:07:29.352494Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-10T11:07:29.352569Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:07:29.352965Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-04-10T11:07:29.353042Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-04-10T11:07:29.353103Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-10T11:07:29.353150Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-04-10T11:07:29.353191Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-04-10T11:07:29.353268Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-04-10T11:07:29.353340Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-04-10T11:07:29.353420Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-04-10T11:07:29.353470Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 9], 18446744073709551615 2025-04-10T11:07:29.355185Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-10T11:07:29.355320Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-10T11:07:29.357115Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:29.357163Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:29.357300Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 9] 2025-04-10T11:07:29.357437Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:29.357492Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-04-10T11:07:29.357529Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 9 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-04-10T11:07:29.358661Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-10T11:07:29.358774Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-10T11:07:29.358821Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-10T11:07:29.358876Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-04-10T11:07:29.358926Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:07:29.359445Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-10T11:07:29.359527Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-10T11:07:29.359557Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-10T11:07:29.359590Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-04-10T11:07:29.359619Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-04-10T11:07:29.359742Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-04-10T11:07:29.359827Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:123:2149] 2025-04-10T11:07:29.364820Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-10T11:07:29.365270Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-10T11:07:29.365394Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-10T11:07:29.365476Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-04-10T11:07:29.365541Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-10T11:07:29.365590Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-04-10T11:07:29.365624Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2025-04-10T11:07:29.367978Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-10T11:07:29.368092Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T11:07:29.368170Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [4:1381:3171] TestWaitNotification: OK eventTxId 105 >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumnPg >> KqpNotNullColumns::InsertNotNullPkPg+useSink [GOOD] >> KqpNotNullColumns::InsertNotNullPkPg-useSink >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] >> KqpNewEngine::SimpleUpsertSelect [GOOD] >> KqpNewEngine::ShuffleWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] Test command err: 2025-04-10T11:05:23.646478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:05:23.646681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:05:23.646863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e73/r3tmp/tmpDCPSSs/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7647, node 1 TClient is connected to server localhost:4128 2025-04-10T11:05:24.369319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:05:24.404714Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:24.409569Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:24.409629Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:24.409670Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:24.410017Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:05:24.447680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:24.447821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:24.460140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-04-10T11:05:36.315768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:36.315917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:757:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:36.316008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:36.324707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:05:36.351891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:762:2636], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:05:36.413975Z node 1 :TX_PROXY ERROR: Actor# [1:813:2668] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:36.716603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-10T11:05:37.817001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:05:38.374124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-04-10T11:05:39.532114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-10T11:05:40.523137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:05:41.081942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:05:42.554285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:05:42.895772Z node 1 :TX_DATASHARD NOTICE: Starting TBuildIndexScan BuildIndexId: 281474976715679 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 9 TargetName: "/Root/.metadata/secrets/values/index_by_secret_id/indexImplTable" IndexColumns: "secretId" IndexColumns: "ownerUserId" KeyRange { From: "\002\000\000\000\000\200\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } SnapshotTxId: 281474976710758 SnapshotStep: 13000 SeqNoGeneration: 2 SeqNoRound: 1 ScanSettings { } row version v13000/281474976710758 2025-04-10T11:05:42.897115Z node 1 :TX_DATASHARD NOTICE: FinishTBuildIndexScan: datashard: 72075186224037889, requested range: [(Utf8 : NULL, Utf8 : NULL) ; ()), last acked point: ()Stats { RowsSent: 0 BytesSent: 0 }Status { Code: SUCCESS Issues:
: Error: Shard or requested range is empty } BuildIndexId: 281474976715679 TabletId: 72075186224037889 Status: DONE UploadStatus: SUCCESS Issues { message: "Shard or requested range is empty" severity: 1 } RequestSeqNoGeneration: 2 RequestSeqNoRound: 1 2025-04-10T11:05:42.900256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:05:47.765729Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfp0w9hap1w1x54a88qm6da", SessionId: ydb://session/3?node_id=1&id=MzU5MDViOWItYWZlNDVhYWMtY2UzYmFjNDMtN2FkNzcyNGY=, Slow query, duration: 11.452546s, status: STATUS_CODE_UNSPECIFIED, user: root@builtin, results: 0b, text: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n ", parameters: 0b REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-10T11:05:59.095090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 2025-04-10T11:06:01.090494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:06:01.090583Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-10T11:06:01.450079Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:215;event=skip_tier_manager_start;tier=/Root/tier1;has_secrets=1;tier_config=0; 2025-04-10T11:06:01.450178Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-04-10T11:06:01.450243Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={}; 2025-04-10T11:06:01.450332Z node 1 :TX_TIERING INFO: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-04-10T11:06:01.450582Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:154;event=watch_scheme_objects;names=/Root/tier1; 2025-04-10T11:06:01.450854Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:62;event=TEvRefreshSubscriberData;snapshot=secrets; 2025-04-10T11:06:01.450907Z node 1 :TX_TIERING INFO: fline=manager.cpp:271;event=update_secrets;tablet=0; 2025-04-10T11:06:01.450968Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-04-10T11:06:01.451039Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:06:01.452580Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:111;component=TSchemeObjectWatcher;event=NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult;path=Root/tier1; 2025-04-10T11:06:01.454038Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:140;event=object_fetched;path=/Root/tier1; 2025-04-10T11:06:01.454737Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:75;component=tiering_manager;event=object_updated;path=/Root/tier1; 2025-04-10T11:06:01.454876Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=1; 2025-04-10T11:06:01.454966Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-10T11:06:01.455037Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025- ... :Tier '/Root/tier2' stopped at tablet 0 2025-04-10T11:07:05.902435Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-04-10T11:07:05.902485Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-04-10T11:07:17.195633Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-10T11:07:17.195735Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-10T11:07:17.195775Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-10T11:07:17.195822Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-10T11:07:17.196575Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-10T11:07:17.196650Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037892;has_config=0; 2025-04-10T11:07:17.196716Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 2025-04-10T11:07:17.196775Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037892 2025-04-10T11:07:17.196827Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-04-10T11:07:17.196900Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037892 2025-04-10T11:07:17.196987Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:07:17.197043Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-10T11:07:17.197073Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037893;has_config=0; 2025-04-10T11:07:17.197103Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 2025-04-10T11:07:17.197134Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037893 2025-04-10T11:07:17.197159Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-04-10T11:07:17.197192Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037893 2025-04-10T11:07:17.197234Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:07:17.197271Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-10T11:07:17.197299Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037894;has_config=0; 2025-04-10T11:07:17.197326Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037894 2025-04-10T11:07:17.197355Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037894 2025-04-10T11:07:17.197398Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-04-10T11:07:17.197432Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037894 2025-04-10T11:07:17.197472Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:07:17.197553Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-10T11:07:17.197581Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-04-10T11:07:17.197639Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-10T11:07:17.197672Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-04-10T11:07:17.197699Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-10T11:07:17.197734Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-10T11:07:17.197771Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:07:17.198892Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-10T11:07:17.199310Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-10T11:07:17.199354Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-04-10T11:07:17.199390Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-10T11:07:17.199426Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-04-10T11:07:17.199486Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-10T11:07:17.199533Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-10T11:07:17.199574Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:07:17.199866Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3157:4431];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-10T11:07:17.199977Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3166:4434];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-10T11:07:17.200126Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3173:4440];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-04-10T11:07:28.426213Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:07:28.426341Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:07:28.426389Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:07:28.426438Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:07:28.426616Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:07:28.427168Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-10T11:07:28.427652Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:07:28.427730Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-10T11:07:28.427802Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-10T11:07:28.427915Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:07:28.428081Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:07:28.428124Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2025-04-10T11:07:28.428167Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-04-10T11:07:28.428228Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:07:28.428272Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:07:28.428307Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2025-04-10T11:07:28.428342Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-04-10T11:07:28.428390Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:07:28.428571Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:07:28.428607Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2025-04-10T11:07:28.428643Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-04-10T11:07:28.428691Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:07:28.428841Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:07:28.428876Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-10T11:07:28.428912Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-10T11:07:28.428960Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:07:28.429232Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-10T11:07:28.429266Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-10T11:07:28.429300Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-10T11:07:28.429342Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-10T11:07:28.430320Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3157:4431];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-10T11:07:28.430441Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3166:4434];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-10T11:07:28.430514Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3173:4440];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 >> KqpNewEngine::DeleteWithInputMultiConsumption-UseSink [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit+UseSink >> TExportToS3Tests::AuditCancelledExport [GOOD] >> KqpSort::UnionAllSortLimit [GOOD] >> KqpSqlIn::CantRewrite >> KqpNewEngine::BrokenLocksAtROTx [GOOD] >> KqpNewEngine::BrokenLocksAtROTxSharded >> KqpPg::TempTablesWithCache [GOOD] >> KqpPg::TableDeleteWhere+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10480, MsgBus: 15118 2025-04-10T11:07:25.515236Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639495231210194:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:25.518190Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016c5/r3tmp/tmpHbvpvV/pdisk_1.dat 2025-04-10T11:07:25.949182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:25.949451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:25.969334Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:25.971401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10480, node 1 2025-04-10T11:07:26.098587Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:26.098607Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:26.098614Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:26.098738Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15118 TClient is connected to server localhost:15118 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:26.722311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:26.748293Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:26.764709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:26.953780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:27.142256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:27.242503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:29.059023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639512411081138:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:29.059166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:29.408482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.444210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.484683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.518954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.570028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.611029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.699611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639512411081653:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:29.699695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:29.699899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639512411081658:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:29.704404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:29.715821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639512411081660:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:29.780645Z node 1 :TX_PROXY ERROR: Actor# [1:7491639512411081715:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:30.517855Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639495231210194:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:30.517924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> TExportToS3Tests::SchemaMapping >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AuditCancelledExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:07:20.582943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:07:20.583132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.583198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:07:20.583250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:07:20.598333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:07:20.598460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:07:20.598590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.598769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:07:20.606011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:07:20.783991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:07:20.784068Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:20.819182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:07:20.819295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:07:20.819453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:07:20.851476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:07:20.851828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:07:20.891087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:20.903996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:07:20.950537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.054381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.054519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.054649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:07:21.054771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.054941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:07:21.055405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.063154Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-10T11:07:21.243918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:07:21.244237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.264086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:07:21.272489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:07:21.272626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.292008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.293078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:07:21.293341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.293495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:07:21.293555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:07:21.293599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:07:21.300501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.300580Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:21.300618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:07:21.307411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.307484Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.307549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.307618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.314988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:21.323111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:07:21.343144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:07:21.344627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.344831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:21.344907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.358852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:07:21.358961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.359188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:07:21.359309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:07:21.367806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.367897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.368080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.368134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:07:21.368455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.368508Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:07:21.368619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.368656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.368695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.368731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.368771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:07:21.368829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.368868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:07:21.368898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:07:21.368969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:07:21.369007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:07:21.369059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:07:21.371739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.371907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.371965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... es: 0 S3Settings { Endpoint: "localhost:9058" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" StorageClass: STORAGE_CLASS_UNSPECIFIED UseVirtualAddressing: true } Table { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 EnableChecksums: false EnablePermissions: false } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:07:31.808576Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:31.808726Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:07:31.809159Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:07:31.809221Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:31.810639Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:07:31.810708Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:07:31.812662Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:31.813042Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2025-04-10T11:07:31.813439Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2025-04-10T11:07:31.813527Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2025-04-10T11:07:31.813910Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:31.813998Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet# 72057594046678944 2025-04-10T11:07:31.814068Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2025-04-10T11:07:31.814117Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 2 -> 3 2025-04-10T11:07:31.817010Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2025-04-10T11:07:31.817116Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2025-04-10T11:07:31.817658Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:31.817719Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:31.817980Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-04-10T11:07:31.821422Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2025-04-10T11:07:31.821673Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:31.821724Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:07:31.821865Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-04-10T11:07:31.822705Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2025-04-10T11:07:31.822845Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2025-04-10T11:07:31.823799Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-04-10T11:07:31.823979Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2025-04-10T11:07:31.833580Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-04-10T11:07:31.833946Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:07:31.834008Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:562:2521] TestWaitNotification: OK eventTxId 102 AUDIT LOG buffer(7): 2025-04-10T11:07:30.859750Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-10T11:07:30.929742Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE TABLE, paths=[/MyRoot/Table], status=SUCCESS, detailed_status=StatusAccepted 2025-04-10T11:07:31.412363Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT START, status=SUCCESS, detailed_status=SUCCESS, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none} 2025-04-10T11:07:31.419751Z: component=schemeshard, tx_id=281474976710757, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE DIRECTORY, paths=[/MyRoot/export-102], status=SUCCESS, detailed_status=StatusAccepted 2025-04-10T11:07:31.448379Z: component=schemeshard, tx_id=281474976710758, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=CREATE TABLE COPY FROM, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2025-04-10T11:07:31.812898Z: component=schemeshard, tx_id=281474976710759, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=BACKUP TABLE, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2025-04-10T11:07:31.823171Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2025-04-10T11:07:30.922511Z, end_time=2025-04-10T11:08:00.970511Z AUDIT LOG checked line: 2025-04-10T11:07:31.823171Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2025-04-10T11:07:30.922511Z, end_time=2025-04-10T11:08:00.970511Z >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] >> KqpNotNullColumns::OptionalParametersDataQuery [GOOD] >> KqpNotNullColumns::OptionalParametersScanQuery |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> KqpNewEngine::PruneEffectPartitions-UseSink [GOOD] >> KqpNewEngine::PrimaryView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29019, MsgBus: 8160 2025-04-10T11:07:09.735286Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639426504360019:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:09.735355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001708/r3tmp/tmp2B1GcW/pdisk_1.dat 2025-04-10T11:07:10.875650Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:10.890554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:10.890649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:10.902359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:07:10.936804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29019, node 1 2025-04-10T11:07:12.282644Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:12.282677Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:12.282690Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:12.282864Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8160 2025-04-10T11:07:14.736936Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639426504360019:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:14.737048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:8160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:14.958843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.084908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.404151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.555750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.639893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.900804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639452274165479:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:15.900927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:17.883204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:17.923061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:17.993788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.028769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.065017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.148519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.338540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639465159068002:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.338648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.338861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639465159068007:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.356278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:18.375761Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:07:18.376207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639465159068009:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:18.461631Z node 1 :TX_PROXY ERROR: Actor# [1:7491639465159068066:3478] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:20.731935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:20.770055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:07:20.815779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7813, MsgBus: 26415 2025-04-10T11:07:25.047435Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639496098893015:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:25.047483Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001708/r3tmp/tmpsJcixm/pdisk_1.dat 2025-04-10T11:07:25.179192Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:25.211979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:25.212077Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:25.214836Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7813, node 2 2025-04-10T11:07:25.298573Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:25.298612Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:25.298622Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:25.298775Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26415 TClient is connected to server localhost:26415 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:25.859141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:07:25.874486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:07:25.946839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:26.160455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:26.262222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:28.740145Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639508983796660:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:28.740253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:28.805298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:28.844049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:28.882987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:28.921473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:28.962550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.040281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.103471Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639513278764473:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:29.103595Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:29.103747Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639513278764478:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:29.107348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:29.116961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491639513278764480:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:29.187948Z node 2 :TX_PROXY ERROR: Actor# [2:7491639513278764535:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:30.047949Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491639496098893015:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:30.048029Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:30.191441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:30.281399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:07:30.324797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpNewEngine::SelfJoin [GOOD] >> KqpNewEngine::ReadRangeWithParams >> KqpSort::ReverseOptimized [GOOD] >> KqpSort::ReverseOptimizedWithPredicate >> TExportToS3Tests::SchemaMapping [GOOD] >> TExportToS3Tests::SchemaMappingEncryption >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31677, MsgBus: 3158 2025-04-10T11:07:09.738623Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639425003105995:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:09.738677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016fe/r3tmp/tmpvL6tiZ/pdisk_1.dat 2025-04-10T11:07:10.844496Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:10.874338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:07:10.903157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:10.903830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:10.907628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31677, node 1 2025-04-10T11:07:12.252775Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:12.252801Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:12.252806Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:12.252933Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3158 2025-04-10T11:07:14.742277Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639425003105995:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:14.742367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:3158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:14.951406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.084269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting...2025-04-10T11:07:15.406749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:07:15.599526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.682466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.978959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639450772911465:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:15.979077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:17.881296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:17.923273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:17.968730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.043819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.089311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.126278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.336633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639463657813980:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.336725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.336940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639463657813987:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.361601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:18.373716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639463657813989:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:18.458742Z node 1 :TX_PROXY ERROR: Actor# [1:7491639463657814045:3476] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:20.729808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:20.808988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:07:20.859925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64686, MsgBus: 12212 2025-04-10T11:07:24.977156Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639489227713492:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:24.977265Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016fe/r3tmp/tmpTok82o/pdisk_1.dat 2025-04-10T11:07:25.140415Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:25.157412Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:25.157543Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:25.159133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64686, node 2 2025-04-10T11:07:25.230763Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:25.230791Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:25.230798Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:25.230943Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12212 TClient is connected to server localhost:12212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:25.773453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:25.792552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:25.875822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:26.101261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:26.214039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:28.853661Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639506407584443:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:28.853764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:28.919867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:28.996798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.067547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.140221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.213524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.269837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.330424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639510702552265:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:29.330515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:29.332157Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639510702552270:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:29.339152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:29.358018Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491639510702552272:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:29.424051Z node 2 :TX_PROXY ERROR: Actor# [2:7491639510702552325:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:29.977199Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491639489227713492:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:29.977274Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:30.628717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:30.707131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:07:30.776177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpMergeCn::TopSortBy_Int32_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Float_Limit4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16283, MsgBus: 3351 2025-04-10T11:07:09.768235Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639427748056398:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:09.769243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016e0/r3tmp/tmpvzjbIg/pdisk_1.dat 2025-04-10T11:07:10.866596Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:10.870352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:07:10.963137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:10.963252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:10.966880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16283, node 1 2025-04-10T11:07:12.251896Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:12.251920Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:12.251944Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:12.252176Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3351 2025-04-10T11:07:14.767507Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639427748056398:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:14.767599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:3351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:14.953073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.082640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.425058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.582673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.648284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.928844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639453517861852:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:15.928956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:17.882583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:17.939632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.011261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.049626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.079521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.152383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.336196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639466402764368:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.336287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.336627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639466402764373:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.351927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:18.369112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639466402764375:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:18.458937Z node 1 :TX_PROXY ERROR: Actor# [1:7491639466402764432:3474] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:20.729696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:20.842012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:07:20.875756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1358, MsgBus: 6602 2025-04-10T11:07:25.201494Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639492838754572:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:25.201547Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016e0/r3tmp/tmpVguLPg/pdisk_1.dat 2025-04-10T11:07:25.370653Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:25.380277Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:25.380378Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:25.383414Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1358, node 2 2025-04-10T11:07:25.478605Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:25.478635Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:25.478644Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:25.478774Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6602 TClient is connected to server localhost:6602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:26.044465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:26.053590Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:07:26.064293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:07:26.156320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:07:26.395039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:26.530970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:28.869024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639505723658212:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:28.869105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:28.916542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:28.954667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.029465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.102048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.179208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.245947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.306088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639510018626028:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:29.306215Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:29.306436Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639510018626033:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:29.310286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:29.336091Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491639510018626035:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:29.428403Z node 2 :TX_PROXY ERROR: Actor# [2:7491639510018626092:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:30.202275Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491639492838754572:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:30.202349Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:30.440298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:30.500404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:07:30.553571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> PgCatalog::PgDatabase-useSink [GOOD] >> PgCatalog::PgRoles >> VDiskBalancing::TestStopOneNode_Mirror3dc >> KqpNewEngine::JoinIdxLookup [GOOD] >> KqpNewEngine::JoinIdxLookupWithPredicate >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound >> TBlobStorageHullSstIt::TestSeekExactAndNext [GOOD] >> TBlobStorageHullSstIt::TestSeekExactAndPrev [GOOD] >> TBlobStorageHullWriteSst::BlockOneSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexWithSmallWriteBlocks [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexPartOutbound [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndexPartOutbound [GOOD] >> TBlobStorageHullSstIt::TestSeekToFirst [GOOD] >> TBlobStorageHullSstIt::TestSeekNotExactBefore [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound [GOOD] >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6901, MsgBus: 6287 2025-04-10T11:07:17.313998Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639462014832457:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:17.314033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016cc/r3tmp/tmpSpBjh7/pdisk_1.dat 2025-04-10T11:07:17.873044Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:17.890963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:17.891081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:17.893826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6901, node 1 2025-04-10T11:07:18.026816Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:18.026853Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:18.026867Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:18.027019Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6287 TClient is connected to server localhost:6287 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:18.686341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:18.706791Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:18.721141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:18.909717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:19.112888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:19.199934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:21.145039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639479194703416:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.145185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.645932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.717447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.750231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.798366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.831051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.864203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.910409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639479194703929:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.910477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.910512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639479194703934:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.914001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:21.924059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639479194703936:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:22.010744Z node 1 :TX_PROXY ERROR: Actor# [1:7491639479194703989:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:22.316081Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639462014832457:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:22.316186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:23.320909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:23.404112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:07:23.529418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2718, MsgBus: 15093 2025-04-10T11:07:26.880333Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639499156378479:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:26.880385Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016cc/r3tmp/tmpDNL2WI/pdisk_1.dat 2025-04-10T11:07:27.050177Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:27.085463Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:27.085564Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:27.087423Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2718, node 2 2025-04-10T11:07:27.162675Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:27.162697Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:27.162716Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:27.162880Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15093 TClient is connected to server localhost:15093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:27.717489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:27.738094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:27.825038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:28.048329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:28.136406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:30.610953Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639516336249424:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.611028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.679771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:30.758546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:30.805970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:30.882143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:30.962557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:31.018931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:31.075555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639520631217243:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:31.075641Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:31.075874Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639520631217248:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:31.079307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:31.100757Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491639520631217250:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:31.155899Z node 2 :TX_PROXY ERROR: Actor# [2:7491639520631217304:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:31.880575Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491639499156378479:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:31.880667Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:32.259084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:32.335983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:07:32.425480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpNewEngine::MultiStatement [GOOD] >> KqpNewEngine::MultiStatementMixPure |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex [GOOD] |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekNotExactBefore [GOOD] |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound [GOOD] |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexPartOutbound [GOOD] |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekExactAndPrev [GOOD] |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndexPartOutbound [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumnPg [GOOD] >> KqpNotNullColumns::AlterDropNotNullColumn >> TExportToS3Tests::SchemaMappingEncryption [GOOD] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey >> KqpNotNullColumns::InsertNotNullPkPg-useSink [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup >> KqpNewEngine::ShuffleWrite [GOOD] >> KqpNewEngine::StaleRO >> TQueueBackpressureTest::PerfTrivial >> TBlobStorageQueueTest::TMessageLost [GOOD] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |82.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat >> HullReplWriteSst::Basic |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> KqpPg::TableDeleteAllData-useSink [GOOD] >> KqpPg::PgUpdateCompoundKey+useSink |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TBlobStorageQueueTest::TMessageLost [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit+UseSink [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit-UseSink |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> TActorTest::TestWaitForFirstEvent >> TActorTest::TestCreateChildActor >> TActorTest::TestScheduleEvent |83.0%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut >> TActorTest::TestStateSwitch >> KqpNewEngine::PrecomputeKey [GOOD] >> TBlobStorageHullSstIt::TestSeekToLast [GOOD] >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex >> TActorTest::TestCreateChildActor [GOOD] >> TActorTest::TestBlockEvents >> TActorTest::TestScheduleEvent [GOOD] >> TActorTest::TestScheduleReaction >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex [GOOD] >> TActorTest::TestStateSwitch [GOOD] >> TActorTest::TestBlockEvents [GOOD] >> TActorTest::TestScheduleReaction [GOOD] >> TActorTest::TestWaitForFirstEvent [GOOD] >> TQueueBackpressureTest::PerfTrivial [GOOD] >> KqpRanges::NullInKey |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex [GOOD] >> KqpRanges::Like [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestBlockEvents [GOOD] Test command err: ... waiting for blocked 3 events ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for blocked 3 events (done) ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for blocked 1 more event ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for blocked 1 more event (done) ... waiting for processed 2 more events ... waiting for processed 2 more events (done) ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for processed 3 more events ... waiting for processed 3 more events (done) |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestScheduleReaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitForFirstEvent [GOOD] Test command err: ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger (done) ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger (done) |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestStateSwitch [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndex [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndexPg |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::PerfTrivial [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:07:31.604927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:07:31.605069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:31.605127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:07:31.605171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:07:31.605228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:07:31.605258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:07:31.605331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:31.605432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:07:31.605787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:07:31.700994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:07:31.701088Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:31.727712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:07:31.728246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:07:31.728652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:07:31.739319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:07:31.739720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:07:31.741107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:31.741662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:07:31.744424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:31.746732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:31.746828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:31.747155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:07:31.747258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:31.747355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:07:31.747589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:07:31.757213Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:07:31.927498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:07:31.927813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:31.928035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:07:31.928332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:07:31.928435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:31.931368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:31.931624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:07:31.931867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:31.931977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:07:31.932024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:07:31.932071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:07:31.934715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:31.934784Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:31.934836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:07:31.937124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:31.937192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:31.937242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:31.937311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:07:31.941491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:31.944042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:07:31.944256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:07:31.945536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:31.945758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:31.945831Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:31.946193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:07:31.946264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:31.946495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:07:31.946598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:07:31.949264Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:31.949313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:31.949592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:31.949651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:07:31.949960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:31.950025Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:07:31.950135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:31.950191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:31.950234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:31.950266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:31.950306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:07:31.950351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:31.950388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:07:31.950423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:07:31.950528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:07:31.950586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:07:31.950623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:07:31.953268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:31.953408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:31.953450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 36Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-04-10T11:07:39.126899Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-10T11:07:39.126966Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-10T11:07:39.128450Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-10T11:07:39.128555Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-10T11:07:39.128591Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-04-10T11:07:39.128627Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-10T11:07:39.128665Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:07:39.128752Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-04-10T11:07:39.130770Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-10T11:07:39.131031Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-04-10T11:07:39.131088Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-04-10T11:07:39.131178Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-04-10T11:07:39.132644Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-04-10T11:07:39.132831Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:07:39.133036Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-04-10T11:07:39.133638Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:39.133815Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:39.133883Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-04-10T11:07:39.134031Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-04-10T11:07:39.134126Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-04-10T11:07:39.134206Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-04-10T11:07:39.134273Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-04-10T11:07:39.134334Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-04-10T11:07:39.134435Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:07:39.134522Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T11:07:39.134580Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-04-10T11:07:39.134657Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-04-10T11:07:39.134714Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-04-10T11:07:39.134755Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-04-10T11:07:39.134842Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:07:39.134896Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-04-10T11:07:39.134946Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-10T11:07:39.134997Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-10T11:07:39.136790Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-04-10T11:07:39.138518Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:39.138564Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:39.138748Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:07:39.138930Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:39.138971Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-04-10T11:07:39.139020Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-04-10T11:07:39.139934Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-10T11:07:39.140034Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-10T11:07:39.140073Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-04-10T11:07:39.140130Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-10T11:07:39.140184Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-10T11:07:39.141141Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-10T11:07:39.141234Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-04-10T11:07:39.141271Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-04-10T11:07:39.141317Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-10T11:07:39.141359Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T11:07:39.141481Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-04-10T11:07:39.141542Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:123:2149] 2025-04-10T11:07:39.141939Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:07:39.142000Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-10T11:07:39.142123Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:07:39.144409Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-04-10T11:07:39.145923Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-04-10T11:07:39.146042Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-04-10T11:07:39.146126Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-04-10T11:07:39.146229Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-10T11:07:39.146294Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-04-10T11:07:39.146352Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 103, itemIdx# 4294967295 2025-04-10T11:07:39.146833Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:07:39.149576Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2025-04-10T11:07:39.149936Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T11:07:39.150015Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T11:07:39.150719Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:07:39.150856Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:07:39.150923Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:546:2505] TestWaitNotification: OK eventTxId 103 >> KqpNewEngine::BrokenLocksAtROTxSharded [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLimit >> KqpReturning::ReturningWorksIndexedDelete-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDeleteV2+QueryService >> TActorTest::TestDie [GOOD] >> TActorTest::TestFilteredGrab ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:07:20.582549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:07:20.582726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.582787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:07:20.582847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:07:20.594346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:07:20.594476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:07:20.594619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.594747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:07:20.600815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:07:20.784747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:07:20.784818Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:20.813876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:07:20.813987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:07:20.814222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:07:20.851512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:07:20.851747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:07:20.894256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:20.905257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:07:20.948485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.053766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.053912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.054057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:07:21.054122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.054202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:07:21.054512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.083661Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-10T11:07:21.384413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:07:21.384694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.384995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:07:21.385412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:07:21.385521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.389555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.389798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:07:21.389989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.390069Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:07:21.390107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:07:21.390136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:07:21.392419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.392491Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:21.392541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:07:21.400568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.400653Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.400723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.400801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.432026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:21.439596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:07:21.439942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:07:21.441143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.441414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:21.441484Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.441838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:07:21.441896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.442101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:07:21.447840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:07:21.460012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.460084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.460354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.460418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:07:21.460865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.460953Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:07:21.461092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.461137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.461194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.461234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.461295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:07:21.461389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.461459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:07:21.461600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:07:21.461704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:07:21.461776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:07:21.461826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:07:21.463867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.464017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.464058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 2025-04-10T11:07:39.578893Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-10T11:07:39.578969Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-04-10T11:07:39.579131Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:39.580001Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:39.580112Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:39.580146Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-10T11:07:39.580180Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-04-10T11:07:39.580228Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-10T11:07:39.582026Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:39.582138Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:39.582204Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-10T11:07:39.582234Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-04-10T11:07:39.582283Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:07:39.582388Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-10T11:07:39.583894Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-10T11:07:39.584000Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-10T11:07:39.584034Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-10T11:07:39.584069Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-10T11:07:39.585309Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-04-10T11:07:39.585432Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000009 2025-04-10T11:07:39.585888Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:39.586310Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:39.586419Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 12884904044 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:39.586468Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000009, at schemeshard: 72057594046678944 2025-04-10T11:07:39.586595Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-10T11:07:39.586687Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-04-10T11:07:39.586723Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-10T11:07:39.586781Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-04-10T11:07:39.586820Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-10T11:07:39.586876Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:07:39.586959Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:07:39.587006Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-04-10T11:07:39.587067Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-10T11:07:39.587108Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-04-10T11:07:39.587146Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-04-10T11:07:39.587218Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:07:39.587256Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-04-10T11:07:39.587297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-04-10T11:07:39.587327Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-10T11:07:39.588468Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:39.590160Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:39.590228Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:39.590422Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:07:39.590525Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:39.590565Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:208:2210], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-04-10T11:07:39.590601Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:208:2210], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-04-10T11:07:39.591360Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:39.591455Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:39.591494Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-10T11:07:39.591540Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-04-10T11:07:39.591586Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-10T11:07:39.591990Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:39.592070Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:39.592120Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-10T11:07:39.592160Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-10T11:07:39.592194Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:07:39.592261Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-04-10T11:07:39.592307Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:134:2157] 2025-04-10T11:07:39.595230Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:39.595661Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-10T11:07:39.595748Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-04-10T11:07:39.595800Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-04-10T11:07:39.595864Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-10T11:07:39.595897Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-10T11:07:39.595931Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-04-10T11:07:39.597567Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-10T11:07:39.597680Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:07:39.597727Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:776:2711] TestWaitNotification: OK eventTxId 103 |83.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PrecomputeKey [GOOD] Test command err: Trying to start YDB, gRPC: 62910, MsgBus: 61042 2025-04-10T11:06:43.789144Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639313415857177:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:43.789213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00088b/r3tmp/tmpatLu5V/pdisk_1.dat 2025-04-10T11:06:44.206942Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62910, node 1 2025-04-10T11:06:44.234570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:44.234689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:44.240155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:06:44.291071Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:44.291109Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:44.291121Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:44.291297Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61042 TClient is connected to server localhost:61042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:44.941969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:44.980604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:45.201700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:45.418474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:45.516986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:47.411622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639330595728130:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:47.411783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:47.879524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:47.919848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:47.959828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:48.043036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:48.099742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:48.157696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:48.240654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639334890695948:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:48.240732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:48.240816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639334890695953:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:48.244415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:48.259478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639334890695955:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:48.317155Z node 1 :TX_PROXY ERROR: Actor# [1:7491639334890696009:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:48.792915Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639313415857177:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:48.792995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27344, MsgBus: 61724 2025-04-10T11:06:50.727284Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639342364761129:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:50.727549Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00088b/r3tmp/tmpT4wk4L/pdisk_1.dat 2025-04-10T11:06:50.904176Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:50.905100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:50.905201Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:50.919450Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27344, node 2 2025-04-10T11:06:50.998804Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:50.998829Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:50.998836Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:50.998956Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61724 TClient is connected to server localhost:61724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:51.569147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:51.582747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:51.686011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:51.890516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:52.006914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:54.575411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService ... WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639501381456418:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:27.334634Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:27.407285Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.459998Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639479906618143:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:27.460091Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:27.500968Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.554454Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.604418Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.651180Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.707074Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.765427Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639501381456936:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:27.765537Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:27.765624Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639501381456942:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:27.771104Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:27.785998Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639501381456944:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:27.841378Z node 6 :TX_PROXY ERROR: Actor# [6:7491639501381456998:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 29720, MsgBus: 23524 2025-04-10T11:07:30.703618Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639516218896519:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:30.703737Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00088b/r3tmp/tmplxal8j/pdisk_1.dat 2025-04-10T11:07:31.017667Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:31.051764Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:31.051887Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:31.053853Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29720, node 7 2025-04-10T11:07:31.156720Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:31.156749Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:31.156761Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:31.156912Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23524 TClient is connected to server localhost:23524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:32.061023Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:32.075085Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:32.159706Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:32.445464Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:32.604932Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:35.710277Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639516218896519:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:35.710371Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:35.983739Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639537693734763:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:35.983864Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:36.054081Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:36.103443Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:36.152222Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:36.200922Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:36.247184Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:36.324403Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:36.401534Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639541988702576:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:36.401618Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:36.401710Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639541988702581:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:36.413124Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:36.433232Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639541988702583:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:36.520693Z node 7 :TX_PROXY ERROR: Actor# [7:7491639541988702639:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TActorTest::TestFilteredGrab [GOOD] >> KqpSqlIn::CantRewrite [GOOD] >> KqpSqlIn::ComplexKey >> KqpSort::ReverseOptimizedWithPredicate [GOOD] >> KqpSort::ReverseMixedOrderNotOptimized |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestFilteredGrab [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::Like [GOOD] Test command err: Trying to start YDB, gRPC: 18082, MsgBus: 16165 2025-04-10T11:06:44.180247Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639318046455409:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:44.180343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000876/r3tmp/tmpeqZVyF/pdisk_1.dat 2025-04-10T11:06:44.686643Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:44.687521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:44.687616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:44.694880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18082, node 1 2025-04-10T11:06:44.950758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:44.950787Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:44.950795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:44.950911Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16165 TClient is connected to server localhost:16165 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:45.845316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:45.899015Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:06:45.924914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:46.157965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:46.461043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:46.581518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:48.429058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639335226326371:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:48.429161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:48.791444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:48.831585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:48.927051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:49.005876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:49.058813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:49.148055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:49.185500Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639318046455409:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:49.185597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:49.265784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639339521294191:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:49.265874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:49.266520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639339521294196:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:49.271951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:49.293771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639339521294199:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:49.386390Z node 1 :TX_PROXY ERROR: Actor# [1:7491639339521294254:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:50.492053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.760369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.979668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:06:51.135563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:06:51.468877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18219, MsgBus: 22611 2025-04-10T11:06:52.878576Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639353978656296:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:52.898981Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000876/r3tmp/tmpgewExP/pdisk_1.dat 2025-04-10T11:06:53.040626Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:53.071939Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:53.072064Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:53.073889Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18219, node 2 2025-04-10T11:06:53.159556Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:53.159582Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:53.159592Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:53.159721Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22611 TClient is connected to server localhost:22611 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRo ... operation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.080720Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.134703Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.184083Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.238295Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.303564Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:27.305887Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491639481411888649:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:27.306558Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:27.410341Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639502886727442:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:27.410480Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:27.411199Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639502886727447:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:27.417756Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:27.433152Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639502886727449:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:27.537008Z node 5 :TX_PROXY ERROR: Actor# [5:7491639502886727506:3459] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:29.005010Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.773325Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283249807, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 28413, MsgBus: 28445 2025-04-10T11:07:31.435418Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639518511662301:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:31.435479Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000876/r3tmp/tmp89wRGP/pdisk_1.dat 2025-04-10T11:07:31.693722Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:31.733441Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:31.733575Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:31.737173Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28413, node 6 2025-04-10T11:07:31.848289Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:31.848318Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:31.848330Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:31.848505Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28445 TClient is connected to server localhost:28445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:32.762961Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:32.789140Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:32.877995Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:33.136371Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:33.242126Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:36.436016Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639518511662301:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:36.436138Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:36.978325Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639539986500570:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:36.978448Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:37.121898Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:37.170406Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:37.212461Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:37.298067Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:37.375949Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:37.425869Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:37.485917Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639544281468386:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:37.486011Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:37.486461Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639544281468391:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:37.491891Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:37.510737Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639544281468393:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:37.586590Z node 6 :TX_PROXY ERROR: Actor# [6:7491639544281468447:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:38.959522Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |83.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.0%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node >> KqpNotNullColumns::AlterDropNotNullColumn [GOOD] >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns >> KqpNewEngine::ReadRangeWithParams [GOOD] >> KqpNewEngine::ScalarFunctions >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] >> KqpMergeCn::TopSortBy_Float_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_String_Limit3 |83.1%| [TA] $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] Test command err: RandomSeed# 9510500465022068161 2025-04-10T11:07:37.795586Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.128329s 2025-04-10T11:07:37.795721Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.128504s SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2025-04-10T11:07:40.449426Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> KqpNewEngine::JoinIdxLookupWithPredicate [GOOD] >> KqpNewEngine::JoinPure >> PgCatalog::PgRoles [GOOD] >> PgCatalog::PgTables >> TQueueBackpressureTest::IncorrectMessageId [GOOD] >> KqpNewEngine::MultiStatementMixPure [GOOD] >> KqpNewEngine::MultiEffects |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::IncorrectMessageId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] Test command err: Trying to start YDB, gRPC: 25799, MsgBus: 62536 2025-04-10T11:06:58.925124Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639378767280919:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:58.925769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000820/r3tmp/tmpe5Ji5L/pdisk_1.dat 2025-04-10T11:06:59.398953Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25799, node 1 2025-04-10T11:06:59.409732Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:06:59.421244Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:06:59.423140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:59.423601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:59.427130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:06:59.552263Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:59.552285Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:59.552296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:59.552428Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62536 TClient is connected to server localhost:62536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:00.102434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:02.281414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639395947150766:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:02.281551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:02.613965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:07:02.778989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639395947150869:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:02.779091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:02.779523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639395947150874:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:02.783210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:07:02.793055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639395947150876:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:07:02.862721Z node 1 :TX_PROXY ERROR: Actor# [1:7491639395947150927:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:03.020531Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491639400242118269:2356], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-04-10T11:07:03.020792Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2I0NmQ2Yi1hNmZhZGM0OC1hOTBjZDg2Zi1mZWQyNWVhNg==, ActorId: [1:7491639395947150748:2329], ActorState: ExecuteState, TraceId: 01jrfp3gyv4m9qaw1819hbmvgg, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-10T11:07:03.070805Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491639400242118279:2360], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:47: Error: Failed to convert 'Value': Null to String
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T11:07:03.071189Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2I0NmQ2Yi1hNmZhZGM0OC1hOTBjZDg2Zi1mZWQyNWVhNg==, ActorId: [1:7491639395947150748:2329], ActorState: ExecuteState, TraceId: 01jrfp3h0edsdn22wxv6axzs83, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 7112, MsgBus: 19948 2025-04-10T11:07:04.080346Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639404061068956:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:04.080482Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000820/r3tmp/tmpVX4o9T/pdisk_1.dat 2025-04-10T11:07:04.271702Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:04.279267Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:04.279379Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:04.281920Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7112, node 2 2025-04-10T11:07:04.462049Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:04.462072Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:04.462079Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:04.462211Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19948 TClient is connected to server localhost:19948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:07:05.239737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:07:08.575315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639421240938802:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:08.575418Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:08.594034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:07:08.674174Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639421240938903:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:08.674269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:08.674563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639421240938908:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:08.678822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type ... de 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:31.436319Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:31.516495Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:31.552503Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491639499448981536:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:31.552584Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:31.564354Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:31.641610Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639520923820337:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:31.641740Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:31.642725Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639520923820342:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:31.648014Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:31.660593Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639520923820344:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:31.715794Z node 5 :TX_PROXY ERROR: Actor# [5:7491639520923820397:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:32.867928Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16659, MsgBus: 16780 2025-04-10T11:07:35.130914Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639535946353452:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:35.131020Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000820/r3tmp/tmpVKOgTy/pdisk_1.dat 2025-04-10T11:07:35.274277Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:35.297268Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:35.297396Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:35.302317Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16659, node 6 2025-04-10T11:07:35.362297Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:35.362329Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:35.362346Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:35.362562Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16780 TClient is connected to server localhost:16780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:36.057451Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:36.086125Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:36.172847Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:07:36.343023Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:07:36.428464Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:39.979784Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639553126224406:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:39.979912Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:40.039187Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:40.085629Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:40.128583Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:40.132197Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639535946353452:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:40.132315Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:40.173157Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:40.216812Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:40.256369Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:40.322038Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639557421192217:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:40.322140Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:40.322301Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639557421192222:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:40.326440Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:40.337071Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639557421192224:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:40.407179Z node 6 :TX_PROXY ERROR: Actor# [6:7491639557421192277:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:41.599734Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:42.148115Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283262183, txId: 281474976715673] shutting down 2025-04-10T11:07:42.366576Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283262400, txId: 281474976715675] shutting down 2025-04-10T11:07:42.524888Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283262561, txId: 281474976715677] shutting down >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup >> KqpNewEngine::StaleRO [GOOD] >> KqpNewEngine::SqlInFromCompact |83.1%| [TA] $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In [GOOD] >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |83.1%| [TA] {RESULT} $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.1%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical >> KqpNewEngine::PrimaryView [GOOD] >> KqpRanges::NullInKey [GOOD] >> KqpRanges::NullInKeySuffix >> TVersions::Wreck0Reverse [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit-UseSink [GOOD] >> KqpNewEngine::DependentSelect >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns [GOOD] >> KqpNotNullColumns::Describe |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |83.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PrimaryView [GOOD] Test command err: Trying to start YDB, gRPC: 15422, MsgBus: 28311 2025-04-10T11:06:45.566212Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639321205102226:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:45.574744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00084f/r3tmp/tmpAVnfnj/pdisk_1.dat 2025-04-10T11:06:46.118853Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:46.133026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:46.133152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:46.140956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15422, node 1 2025-04-10T11:06:46.408823Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:46.408845Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:46.408852Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:46.408987Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28311 TClient is connected to server localhost:28311 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:47.158790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:47.199196Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:06:47.212215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:47.402907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:47.602025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:47.693417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:49.670389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639338384973048:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:49.670538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:50.044035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.080674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.152478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.207996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.248421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.326201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:50.387608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639342679940860:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:50.387766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:50.388147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639342679940865:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:50.392358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:50.404567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639342679940867:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:50.463418Z node 1 :TX_PROXY ERROR: Actor# [1:7491639342679940920:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:50.559508Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639321205102226:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:50.559600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30538, MsgBus: 12708 2025-04-10T11:06:52.936199Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639351820552598:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:52.936263Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00084f/r3tmp/tmpPQI9ef/pdisk_1.dat 2025-04-10T11:06:53.067992Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30538, node 2 2025-04-10T11:06:53.104257Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:53.104348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:53.107333Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:06:53.218769Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:53.218796Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:53.218804Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:53.219145Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12708 TClient is connected to server localhost:12708 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:53.720397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:53.743170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:53.835786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:54.051549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:54.132182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCre ... pool default not found or you don't have access permissions } 2025-04-10T11:07:31.091347Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:31.149187Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:31.207739Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:31.263977Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:31.319319Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:31.442003Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:31.534137Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639521971813460:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:31.534268Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:31.534359Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639521971813465:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:31.539127Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:31.559860Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639521971813467:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:31.632481Z node 6 :TX_PROXY ERROR: Actor# [6:7491639521971813521:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21737, MsgBus: 63138 2025-04-10T11:07:35.489065Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639537297664312:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:35.489140Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00084f/r3tmp/tmp8RQI8g/pdisk_1.dat 2025-04-10T11:07:35.703644Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:35.749109Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:35.749247Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:35.751567Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21737, node 7 2025-04-10T11:07:35.848736Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:35.848761Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:35.848774Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:35.848931Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63138 TClient is connected to server localhost:63138 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:36.619757Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:07:36.630620Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:07:36.641061Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:07:36.748694Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:37.048419Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:37.166570Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:40.459963Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639558772502554:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:40.460101Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:40.489276Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639537297664312:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:40.489386Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:40.544455Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:40.596776Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:40.657458Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:40.710138Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:40.808511Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:40.884959Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:41.032268Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639563067470374:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:41.032381Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:41.032578Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639563067470379:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:41.040255Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:41.057621Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639563067470381:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:41.124536Z node 7 :TX_PROXY ERROR: Actor# [7:7491639563067470437:3462] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:42.458173Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:42.524170Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:07:42.592851Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpSort::ReverseMixedOrderNotOptimized [GOOD] >> KqpSort::ReverseRangeOptimized >> TPartBtreeIndexIteration::NoNodes_History >> KqpPg::PgUpdateCompoundKey+useSink [GOOD] >> KqpPg::PgUpdateCompoundKey-useSink >> KqpNewEngine::AutoChooseIndexOrderByLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TVersions::Wreck0Reverse [GOOD] Test command err: SmallQueue: MainQueue: {11 0f 1b}, {14 1f 1b}, {15 2f 1b}, {18 0f 1b}, {19 0f 1b}, {23 0f 1b}, {27 0f 1b} GhostQueue: 9, 12, 13, 16, 17, 20, 21, 24, 25, 28 0.28982 00000.000 II| FAKE_ENV: Born at 2025-04-10T11:03:34.519843Z 00000.013 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.014 II| FAKE_ENV: Starting storage for BS group 0 00000.015 II| FAKE_ENV: Starting storage for BS group 1 00000.015 II| FAKE_ENV: Starting storage for BS group 2 00000.015 II| FAKE_ENV: Starting storage for BS group 3 00000.021 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.021 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.024 NN| TABLET_SAUSAGECACHE: Update config MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.024 NN| TABLET_SAUSAGECACHE: Switch replacement policy from S3FIFO to ThreeLeveledLRU 00000.024 NN| TABLET_SAUSAGECACHE: Switch replacement policy done from S3FIFO to ThreeLeveledLRU 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{2, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.026 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.046 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{3, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.046 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.046 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.048 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{4, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.048 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.048 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.048 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{5, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{6, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{7, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{8, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{9, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.054 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.054 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.054 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.055 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{10, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.055 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.055 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.055 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.056 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.056 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{11, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.056 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.056 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.057 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.057 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.058 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{12, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.058 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.058 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.059 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.059 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.059 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{13, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.059 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.059 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.060 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.060 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.060 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{14, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.060 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxW ... CHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 5 ] owner [12:659:2684] 00000.538 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.538 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.538 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.538 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.538 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.538 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [4 4] 00000.538 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.538 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 4 ] 00000.539 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 4 ] 00000.539 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.539 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.539 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.539 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 100 4 ] 00000.539 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 4 ] owner [12:659:2684] 00000.540 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.540 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.540 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.540 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.540 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.540 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [3 4] 00000.540 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.540 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 3 ] 00000.540 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 3 ] 00000.540 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.540 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.540 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.540 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 100 3 ] 00000.540 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 3 ] owner [12:659:2684] 00000.541 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.541 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.541 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.541 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.541 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.541 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [2 4] 00000.541 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.541 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 2 ] 00000.541 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 2 ] 00000.541 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.541 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.541 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.542 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 100 2 ] 00000.542 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 2 ] owner [12:659:2684] 00000.542 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.542 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.542 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.542 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.542 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.542 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [1 4] 00000.542 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.542 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 1 ] 00000.542 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 1 ] 00000.542 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.543 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.543 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.543 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 1 100 ] 00000.543 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 1 ] owner [12:659:2684] 00000.543 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.543 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.543 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.543 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.543 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.543 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [0 4] 00000.543 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.543 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 0 ] 00000.543 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 0 ] 00000.544 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.544 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.544 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.544 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 0 100 ] 00000.544 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 0 ] owner [12:659:2684] Counters: Active:0/0, Passive:2772, MemLimit:-1 00000.544 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.545 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 10249619b +(0, 0b), 1 trc, -48685b acc} 00000.546 DD| TABLET_SAUSAGECACHE: Unregister owner [12:659:2684] 00000.547 NN| TABLET_SAUSAGECACHE: Poison cache serviced 201 reqs hit {0 0b} miss {202 20491164b} 00000.547 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.547 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10191b, 107} 00000.547 II| FAKE_ENV: DS.1 gone, left {10250914b, 5}, put {10299737b, 107} 00000.553 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.553 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.553 II| FAKE_ENV: All BS storage groups are stopped 00000.553 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.554 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 3357}, stopped >> KqpRanges::ValidatePredicates [GOOD] >> KqpRanges::ValidatePredicatesDataQuery >> KqpNewEngine::JoinPure [GOOD] >> KqpNewEngine::JoinPureUncomparableKeys >> KqpMergeCn::TopSortBy_String_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Date_Limit4 >> KqpExtractPredicateLookup::OverflowLookup [GOOD] >> KqpExtractPredicateLookup::ComplexRange >> KqpSqlIn::ComplexKey [GOOD] >> KqpSqlIn::Dict >> TPartBtreeIndexIteration::NoNodes_History [GOOD] >> TPartBtreeIndexIteration::OneNode >> GroupWriteTest::WriteHardRateDispatcher [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup >> KqpNewEngine::MultiEffects [GOOD] >> KqpNewEngine::MultiEffectsOnSameTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::AutoChooseIndexOrderByLimit [GOOD] Test command err: Trying to start YDB, gRPC: 2715, MsgBus: 7353 2025-04-10T11:06:53.876244Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639357001200716:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:53.876302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000843/r3tmp/tmppJ7sG6/pdisk_1.dat 2025-04-10T11:06:54.359021Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:54.408177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:54.408314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:54.410579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2715, node 1 2025-04-10T11:06:54.486941Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:54.486971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:54.486984Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:54.487174Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7353 TClient is connected to server localhost:7353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:55.118851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:55.146779Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:06:55.170300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:55.323872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:55.502142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:55.564214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:57.252701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639374181071459:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:57.252786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:57.576985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.604713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.633588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.665491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.702184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.736003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:57.789004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639374181071968:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:57.789120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:57.789211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639374181071973:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:57.793710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:57.805521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639374181071975:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:57.888452Z node 1 :TX_PROXY ERROR: Actor# [1:7491639374181072031:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:58.877083Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639357001200716:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:58.878530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25065, MsgBus: 10626 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000843/r3tmp/tmp6Ycsh9/pdisk_1.dat 2025-04-10T11:07:00.286465Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:07:00.396562Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:00.442473Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:00.442579Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:00.445745Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25065, node 2 2025-04-10T11:07:00.509068Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:00.509103Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:00.509115Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:00.509242Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10626 TClient is connected to server localhost:10626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:07:01.032081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:07:01.043620Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:01.060767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:01.151895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:01.373284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:01.452592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard ... _base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:38.194313Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639551329239378:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:38.194432Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:38.261191Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:38.303696Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:38.339650Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:38.385259Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:38.431088Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:38.478269Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:38.576802Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639551329239889:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:38.576874Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:38.576928Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639551329239894:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:38.581817Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:38.597484Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639551329239896:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:38.688954Z node 6 :TX_PROXY ERROR: Actor# [6:7491639551329239954:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19327, MsgBus: 6590 2025-04-10T11:07:41.668516Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639563001951785:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:41.668947Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000843/r3tmp/tmpfXRYuj/pdisk_1.dat 2025-04-10T11:07:41.833687Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:41.873347Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:41.873489Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:41.875285Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19327, node 7 2025-04-10T11:07:41.923385Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:41.923419Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:41.923430Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:41.923625Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6590 TClient is connected to server localhost:6590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:42.645443Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:42.663534Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:42.743688Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:42.930554Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:43.017105Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:46.149122Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639584476790043:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.149280Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.204817Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:46.243764Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:46.280764Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:46.320595Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:46.358394Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:46.430182Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:46.500918Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639584476790557:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.501017Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.501022Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639584476790562:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.505554Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:46.520652Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639584476790564:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:46.586646Z node 7 :TX_PROXY ERROR: Actor# [7:7491639584476790617:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:46.674429Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639563001951785:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:46.674495Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:47.956744Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] >> TBackupCollectionTests::CreateAbsolutePath >> KqpReturning::ReturningWorksIndexedDeleteV2+QueryService [GOOD] >> KqpReturning::ReturningTypes >> KqpNotNullColumns::Describe [GOOD] >> KqpNotNullColumns::CreateTableWithNotNullColumns >> KqpNewEngine::SqlInFromCompact [GOOD] >> KqpNewEngine::SqlInAsScalar ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] Test command err: RandomSeed# 2337791226696083865 >> KqpRanges::NullInKeySuffix [GOOD] >> KqpRanges::NullInPredicate >> TPartBtreeIndexIteration::OneNode [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups |83.1%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> ColumnShardTiers::TieringUsage [GOOD] >> KqpNewEngine::DependentSelect [GOOD] >> KqpNewEngine::DqSourceCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher [GOOD] Test command err: RandomSeed# 16535802054009692109 2025-04-10T11:02:21.256366Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 5 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-10T11:02:21.284498Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-10T11:02:21.284596Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 going to send TEvBlock {TabletId# 5 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-10T11:02:21.287927Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-10T11:02:21.304755Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:02:21.308275Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-10T11:02:28.185850Z 8 00h01m01.399777s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 295 2025-04-10T11:03:59.288833Z 3 00h01m05.443111s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 6759 2025-04-10T11:07:31.466512Z 2 00h01m09.620165s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 30583 2025-04-10T11:07:49.193480Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-10T11:07:49.193634Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:07:49.193714Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-10T11:07:49.193764Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:07:49.433325Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2025-04-10T11:07:49.433455Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |83.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |83.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |83.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots >> KqpSort::ReverseRangeOptimized [GOOD] >> KqpSort::ReverseRangeLimitOptimized |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage [GOOD] Test command err: 2025-04-10T11:05:15.772848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:05:15.773066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:05:15.773253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f30/r3tmp/tmpIM3V7u/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26440, node 1 TClient is connected to server localhost:11367 2025-04-10T11:05:16.441477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:05:16.489966Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:05:16.503204Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:05:16.503292Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:05:16.503330Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:05:16.503649Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:05:16.540140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:05:16.540277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:05:16.552072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-04-10T11:05:26.945325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:684:2574], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:26.945540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:27.067027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-10T11:05:27.428232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:826:2666], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:27.428318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:27.428557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:831:2671], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:05:27.431946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-10T11:05:27.565137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:833:2673], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:05:27.846072Z node 1 :TX_PROXY ERROR: Actor# [1:928:2739] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:05:28.491363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:05:29.024404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-10T11:05:29.809294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-10T11:05:30.495199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:05:30.887688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:05:31.905778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:05:32.245553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-10T11:05:49.534951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-10T11:06:00.971582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715709:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-10T11:06:04.207430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715726:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715726 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 15 2025-04-10T11:06:04.633826Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037892;self_id=[1:2931:4243];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:06:04.662051Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037892;self_id=[1:2931:4243];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:06:04.662604Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037892 2025-04-10T11:06:04.673396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2931:4243];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:06:04.673727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2931:4243];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:06:04.674116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2931:4243];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:06:04.674451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2931:4243];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:06:04.674655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2931:4243];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:06:04.674819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2931:4243];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:06:04.674981Z node ... Cleanup;portions_count=0;portions_prepared=2;drop=0;skip=0;portions_counter=2;chunks=18;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:07:53.702901Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=c7d2f48-15fc11f0-b96ccdca-c78d4781;tablet_id=72075186224037893;fline=manager.cpp:10;event=lock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::cac020a-15fc11f0-963f2e28-bb8d476d; 2025-04-10T11:07:53.703028Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=c7d2f48-15fc11f0-b96ccdca-c78d4781;tablet_id=72075186224037893;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:07:53.703173Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=c7d2f48-15fc11f0-b96ccdca-c78d4781;tablet_id=72075186224037893;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-04-10T11:07:53.703239Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=c7d2f48-15fc11f0-b96ccdca-c78d4781;tablet_id=72075186224037893;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:07:53.705972Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037893 Save Batch GenStep: 1:16 Blob count: 1 2025-04-10T11:07:53.706236Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=19;external_task_id=c7d2f48-15fc11f0-b96ccdca-c78d4781;mem=5382;cpu=0; 2025-04-10T11:07:53.706463Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:07:53.706594Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037893 Save Batch GenStep: 1:17 Blob count: 1 2025-04-10T11:07:53.707389Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=504;external_task_id=cac020a-15fc11f0-963f2e28-bb8d476d;type=CS::TTL;priority=0;; 2025-04-10T11:07:53.709130Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;self_id=[1:2941:4246];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=16;fline=storage.cpp:87;event=granule_compaction_weight;priority=(10,19999998864); 2025-04-10T11:07:53.709263Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;self_id=[1:2941:4246];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=16;fline=optimizer.h:894;stop_instant=NO_VALUE_OPTIONAL;size=2656;next=;count=2;info={bytes=1136;count=1;records=1};event=start_optimization;stop_point=;main_portion=19; 2025-04-10T11:07:53.709574Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2941:4246];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=manager.cpp:10;event=lock;process_id=CS::GENERAL::cad057e-15fc11f0-8e0f30c9-550f701f; 2025-04-10T11:07:53.709756Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=5382;external_task_id=cad057e-15fc11f0-8e0f30c9-550f701f;type=CS::GENERAL;priority=0;; 2025-04-10T11:07:53.710748Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=20;task=cpu=0;mem=504;external_task_id=cac020a-15fc11f0-963f2e28-bb8d476d;type=CS::TTL;priority=0;; 2025-04-10T11:07:53.710829Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=cac020a-15fc11f0-963f2e28-bb8d476d;mem=504;cpu=0; 2025-04-10T11:07:53.710890Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=cac020a-15fc11f0-963f2e28-bb8d476d;task_id=20;mem=504;cpu=0; 2025-04-10T11:07:53.711163Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=21;task=cpu=0;mem=5382;external_task_id=cad057e-15fc11f0-8e0f30c9-550f701f;type=CS::GENERAL;priority=0;; 2025-04-10T11:07:53.711201Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=cad057e-15fc11f0-8e0f30c9-550f701f;mem=5382;cpu=0; 2025-04-10T11:07:53.711240Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=cad057e-15fc11f0-8e0f30c9-550f701f;task_id=21;mem=5382;cpu=0; 2025-04-10T11:07:53.711318Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2985:4274];tablet_id=72075186224037893;parent=[1:2941:4246];fline=manager.cpp:82;event=ask_data;request=request_id=41;16={portions_count=2};; 2025-04-10T11:07:53.711538Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:2985:4274];tablet_id=72075186224037893;parent=[1:2941:4246];fline=columnshard_impl.cpp:1035;background=cleanup;changes_info=type=CS::CLEANUP::PORTIONS;details=(drop 2 portions(portion_id:18;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715729;);schema_version:1;level:0;column_size:1136;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1735645370000;tx_id=18446744073709551615;);)(portion_id:17;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715729;);schema_version:1;level:0;column_size:1520;index_size:0;meta:((produced=SPLIT_COMPACTED;));remove_snapshot:(plan_step=1735645370000;tx_id=18446744073709551615;);));; 2025-04-10T11:07:53.711783Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037893 2025-04-10T11:07:53.712011Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[45] (CS::CLEANUP::PORTIONS) apply at tablet 72075186224037893 2025-04-10T11:07:53.712789Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=4120;raw_bytes=59809;count=2;records=53} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=109952;raw_bytes=3578032;count=2;records=2983} inactive {blob_bytes=2656;raw_bytes=2178;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037893 2025-04-10T11:07:53.713104Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2985:4274];tablet_id=72075186224037893;parent=[1:2941:4246];fline=manager.cpp:82;event=ask_data;request=request_id=42;16={portions_count=2};; 2025-04-10T11:07:53.713333Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:2985:4274];tablet_id=72075186224037893;parent=[1:2941:4246];fline=columnshard_impl.cpp:881;event=compaction;external_task_id=cad057e-15fc11f0-8e0f30c9-550f701f; 2025-04-10T11:07:53.713464Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:2985:4274];tablet_id=72075186224037893;parent=[1:2941:4246];fline=columnshard_impl.cpp:620;event=start_changes;type=CS::GENERAL;task_id=cad057e-15fc11f0-8e0f30c9-550f701f; 2025-04-10T11:07:53.713737Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=cad057e-15fc11f0-8e0f30c9-550f701f;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-10T11:07:53.715237Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=cad057e-15fc11f0-8e0f30c9-550f701f; 2025-04-10T11:07:53.721741Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent_id=[1:2941:4246];fline=general_compaction.cpp:133;event=blobs_created_diff;appended=0;;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:264];;column_id:3;chunk_idx:0;blob_range:[NO_BLOB:264:256];;column_id:2;chunk_idx:0;blob_range:[NO_BLOB:520:232];;column_id:4294967040;chunk_idx:0;blob_range:[NO_BLOB:752:192];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:944:192];;column_id:4294967041;chunk_idx:0;blob_range:[NO_BLOB:1136:192];;column_id:4;chunk_idx:0;blob_range:[NO_BLOB:1328:192];;;;switched=(portion_id:20;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715729;);schema_version:1;level:0;column_size:1136;index_size:0;meta:((produced=INSERTED;)););(portion_id:19;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715729;);schema_version:1;level:0;column_size:1520;index_size:0;meta:((produced=SPLIT_COMPACTED;)););; 2025-04-10T11:07:53.721860Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;parent_id=[1:2941:4246];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-10T11:07:53.722195Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2941:4246];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-04-10T11:07:53.722679Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037893 2025-04-10T11:07:53.723026Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[47] (CS::GENERAL) apply at tablet 72075186224037893 2025-04-10T11:07:53.725000Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037893 Save Batch GenStep: 1:18 Blob count: 1 2025-04-10T11:07:53.725221Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=4120;raw_bytes=59809;count=2;records=53} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=109952;raw_bytes=3578032;count=2;records=2983} inactive {blob_bytes=2656;raw_bytes=2178;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037893 Cleaning waiting... Fake storage clean FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 >> KqpNotNullColumns::UpdateTable_UniqIndexPg [GOOD] >> KqpNotNullColumns::UpdateTable_Immediate >> TBackupCollectionTests::CreateAbsolutePath [GOOD] >> TBackupCollectionTests::Create >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In [GOOD] >> KqpSqlIn::PhasesCount >> KqpNewEngine::JoinPureUncomparableKeys [GOOD] >> KqpNewEngine::JoinProjectMulti >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] >> TBackupCollectionTests::Create [GOOD] >> TBackupCollectionTests::CreateTwice >> PgCatalog::PgTables [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups [GOOD] >> TPartBtreeIndexIteration::OneNode_History >> TBackupCollectionTests::CreateTwice [GOOD] >> TBackupCollectionTests::BackupAbsentCollection >> KqpMergeCn::TopSortBy_Date_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_Interval_Limit3 >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] Test command err: Trying to start YDB, gRPC: 9137, MsgBus: 19894 2025-04-10T11:07:27.160553Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639502665592386:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:27.160656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00073e/r3tmp/tmp4wboA9/pdisk_1.dat 2025-04-10T11:07:27.612110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:27.612212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:27.612403Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:27.617060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9137, node 1 2025-04-10T11:07:27.707222Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:27.707237Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:27.707242Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:27.707341Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19894 TClient is connected to server localhost:19894 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:28.299228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:28.312584Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:30.441033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639515550494948:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.441173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.738069Z node 1 :TX_PROXY ERROR: Actor# [1:7491639515550494969:2309] txid# 281474976710658, issues: { message: "It is not allowed to create not null data column: Value" severity: 1 } 2025-04-10T11:07:30.767330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639515550494977:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.767404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.782319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25107, MsgBus: 18398 2025-04-10T11:07:31.712398Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639521804154973:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:31.712636Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00073e/r3tmp/tmpYw9d0y/pdisk_1.dat 2025-04-10T11:07:31.862304Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:31.890861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:31.891006Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:31.893140Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25107, node 2 2025-04-10T11:07:31.944460Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:31.944486Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:31.944494Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:31.944652Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18398 TClient is connected to server localhost:18398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:32.647912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:32.662050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:32.755131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:32.962337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:33.057928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:35.510622Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639538984025920:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:35.510717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:35.598471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:35.687774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:35.733268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:35.770141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:35.809289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:35.846197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:35.937141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639538984026440:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:35.937250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:35.937478Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639538984026445:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:35.971475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:35.983450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491639538984026447:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:36.055010Z node 2 :TX_PROXY ERROR: Actor# [2:7491639543278 ... missions } 2025-04-10T11:07:42.218390Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:42.226279Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10264, MsgBus: 28070 2025-04-10T11:07:43.073881Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491639571923916613:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:43.073956Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00073e/r3tmp/tmpJasQgD/pdisk_1.dat 2025-04-10T11:07:43.222066Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:43.259495Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:43.259602Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:43.261373Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10264, node 4 2025-04-10T11:07:43.318840Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:43.318874Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:43.318884Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:43.319027Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28070 TClient is connected to server localhost:28070 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:43.770807Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:46.558530Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491639584808819159:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.558621Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.584884Z node 4 :TX_PROXY ERROR: Actor# [4:7491639584808819180:2305] txid# 281474976715658, issues: { message: "It is not allowed to create not null data column: Value" severity: 1 } 2025-04-10T11:07:46.611248Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491639584808819188:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.611339Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.636769Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16512, MsgBus: 31402 2025-04-10T11:07:47.690060Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491639589379211991:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:47.690226Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00073e/r3tmp/tmpj1WcZm/pdisk_1.dat 2025-04-10T11:07:47.838193Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:47.870573Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:47.870693Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:47.873048Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16512, node 5 2025-04-10T11:07:47.955182Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:47.955213Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:47.955222Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:47.955365Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31402 TClient is connected to server localhost:31402 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:48.504086Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:51.593206Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639606559081836:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:51.593313Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:51.653717Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19850, MsgBus: 25448 2025-04-10T11:07:52.640866Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639610388177297:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:52.640932Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00073e/r3tmp/tmppQZRim/pdisk_1.dat 2025-04-10T11:07:52.782981Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:52.806833Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:52.806955Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:52.810709Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19850, node 6 2025-04-10T11:07:52.865006Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:52.865043Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:52.865054Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:52.865285Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25448 TClient is connected to server localhost:25448 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:53.408518Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:56.467713Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> TBackupCollectionTests::BackupAbsentCollection [GOOD] >> TBackupCollectionTests::BackupDroppedCollection |83.2%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> KqpNewEngine::MultiEffectsOnSameTable [GOOD] >> KqpNewEngine::MultiUsageInnerConnection >> TExportToS3Tests::ShouldCheckQuotas >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup >> TBackupCollectionTests::BackupDroppedCollection [GOOD] >> TBackupCollectionTests::BackupAbsentDirs >> KqpNewEngine::ScalarFunctions [GOOD] >> KqpNewEngine::ScalarMultiUsage >> KqpNewEngine::SqlInAsScalar [GOOD] >> KqpNewEngine::SequentialReadsPragma-Enabled >> KqpRanges::NullInPredicate [GOOD] >> KqpRanges::NullInPredicateRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 27362, MsgBus: 65374 2025-04-10T11:02:39.529129Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638264878202467:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:39.529271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c7b/r3tmp/tmpV36M2T/pdisk_1.dat 2025-04-10T11:02:40.251848Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:40.291576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:40.293726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:40.315851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27362, node 1 2025-04-10T11:02:40.594508Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:40.594532Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:40.594542Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:40.594666Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65374 TClient is connected to server localhost:65374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:42.484021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:42.503525Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 16 2025-04-10T11:02:44.530382Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638264878202467:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:44.530488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:45.902809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '0'::int2, ARRAY ['false'::bool, 'false'::bool] ); 2025-04-10T11:02:46.161577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638294942974185:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:46.161722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:46.162397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638294942974197:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:46.166678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:02:46.184634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638294942974199:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:02:46.255498Z node 1 :TX_PROXY ERROR: Actor# [1:7491638294942974251:2406] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '1'::int2, ARRAY ['true'::bool, 'true'::bool] ); 18 2025-04-10T11:02:46.824033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:02:46.892776Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::"char", '0'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::"char", '1'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::"char", '2'::"char"] ); 21 2025-04-10T11:02:47.699156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:02:47.762396Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int2, '0'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int2, '1'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int2, '2'::int2] ); 23 2025-04-10T11:02:48.444125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int4, '0'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int4, '1'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int4, '2'::int4] ); 20 2025-04-10T11:02:49.677785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int8, '0'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int8, '1'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int8, '2'::int8] ); 700 2025-04-10T11:02:50.488596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:02:50.547315Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '0'::int2, ARRAY ['0.5'::float4, '0.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '1'::int2, ARRAY ['1.5'::float4, '1.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '2'::int2, ARRAY ['2.5'::float4, '2.5'::float4] ); 701 2025-04-10T11:02:51.106298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710692:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '0'::int2, ARRAY ['0.5'::float8, '0.5'::float8] ); --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '1'::int2, ARRAY ['1.5'::float8, '1.5'::float8] ); --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '2'::int2, ARRAY ['2.5'::float8, '2.5'::float8] ); 25 2025-04-10T11:02:52.004753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg1009_b (key, value) VALUES ( '0'::int2, ARRAY ['text 0'::text, 'text 0'::text] ); --!syntax_pg INSERT INTO Pg1009_b (key, value) VALUES ( '1'::int2, ARRAY ['text 1'::text, 'text 1'::text] ); --!syntax_pg INSERT INTO Pg1009_b (key, value) VALUES ( '2'::int2, ARRAY ['text 2'::text, 'text 2'::text] ); 1042 2025-04-10T11:02:53.005023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-04-10T11:02:53.153608Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1014_b (key, value) VALUES ( '0'::int2, ARRAY ['bpchar 0'::bpchar, 'bpchar 0'::bpchar] ); --!syntax_pg INSERT INTO Pg1014_b (key, value) VALUES ( '1'::int2, ARRAY ['bpchar 1'::bpchar, 'bpchar 1'::bpchar] ); --!syntax_pg INSERT INTO Pg1014_b (key, value) VALUES ( '2'::int2, ARRAY ['bpchar 2'::bpchar, 'bpchar 2'::bpchar] ); 1043 2025-04-10T11:02:53.791217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:0, at schemeshard: 72057594046644480 2025-04-10T11:02:53.862794Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1015_b (key, value) VALUES ( '0'::int2, ARRAY ['varchar 0'::varchar, 'varchar 0'::varchar] ); --!syntax_pg INSERT INTO Pg1015_b (key, value) VALUES ( '1'::int2, ARRA ... : true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:40.965131Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:40.981520Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:07:44.742351Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7491639554502628437:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:44.742452Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:46.262536Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7491639584567400182:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.262686Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.289518Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:07:46.384688Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7491639584567400287:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.384780Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7491639584567400293:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.384850Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.390743Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:07:46.404361Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7491639584567400295:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:07:46.485276Z node 9 :TX_PROXY ERROR: Actor# [9:7491639584567400346:2405] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:46.786896Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7491639584567400394:2363], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2025-04-10T11:07:46.790228Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=ZWQyOTg1YzUtNjY0OWI0NjMtMWFhZWY2N2YtNGM4NGMxMzk=, ActorId: [9:7491639584567400385:2358], ActorState: ExecuteState, TraceId: 01jrfp4vnq6pdf3a1q5q78en1x, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:07:46.847478Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 22203, MsgBus: 24955 2025-04-10T11:07:48.972289Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491639594084609699:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:48.972404Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c7b/r3tmp/tmpjwJSMb/pdisk_1.dat 2025-04-10T11:07:49.178335Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:49.230889Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:49.231038Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:49.232470Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22203, node 10 2025-04-10T11:07:49.311753Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:49.311785Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:49.311808Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:49.312071Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24955 TClient is connected to server localhost:24955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:50.274347Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:53.972890Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491639594084609699:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:53.973021Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:55.270444Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491639624149381446:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:55.270662Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:55.281079Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:07:55.367871Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491639624149381551:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:55.368030Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:55.368317Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491639624149381557:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:55.373796Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:07:55.387056Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491639624149381559:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:07:55.467532Z node 10 :TX_PROXY ERROR: Actor# [10:7491639624149381610:2405] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:56.034687Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7491639628444348976:2370], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2025-04-10T11:07:56.037920Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=YjliZTkzNTYtYWRhNjc2ZDgtNzRjNWZkZTYtYjVjYTRiYmI=, ActorId: [10:7491639628444348969:2366], ActorState: ExecuteState, TraceId: 01jrfp54q02x9d86cqnws4ewde, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:07:56.044613Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... >> TBackupCollectionTests::BackupAbsentDirs [GOOD] >> TBackupCollectionTests::BackupNonIncrementalCollection >> TPartBtreeIndexIteration::OneNode_History [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices >> KqpReturning::ReturningTypes [GOOD] |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots >> KqpSqlIn::Dict [GOOD] >> KqpSqlIn::Delete |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |83.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |83.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |83.3%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |83.3%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpExtractPredicateLookup::SqlInJoin [GOOD] >> KqpKv::BulkUpsert |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> TPartBtreeIndexIteration::OneNode_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> TExportToS3Tests::ShouldCheckQuotas [GOOD] >> KqpSort::ReverseRangeLimitOptimized [GOOD] >> KqpSort::TopParameter >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] >> KqpNewEngine::DqSourceCount [GOOD] >> KqpNewEngine::DqSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::ReturningTypes [GOOD] Test command err: Trying to start YDB, gRPC: 14804, MsgBus: 25254 2025-04-10T11:06:57.851145Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639373512342780:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:57.851613Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000835/r3tmp/tmpTKZYo9/pdisk_1.dat 2025-04-10T11:06:58.242341Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:58.292295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:58.292401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14804, node 1 2025-04-10T11:06:58.297547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:06:58.379940Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:58.379960Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:58.379969Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:58.380187Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25254 TClient is connected to server localhost:25254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:58.981614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:59.013144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:59.193455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:59.388199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:59.458306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:01.337921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639390692213749:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:01.338038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:01.702744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:01.751089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:01.802593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:01.875835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:01.933335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:01.979925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:02.042189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639394987181561:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:02.042345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:02.042753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639394987181566:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:02.047303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:02.062316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639394987181568:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:02.148105Z node 1 :TX_PROXY ERROR: Actor# [1:7491639394987181624:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:02.854672Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639373512342780:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:02.854740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:03.134257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:03.172891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:07:03.220168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62192, MsgBus: 13669 2025-04-10T11:07:07.408904Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639416353974346:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:07.408963Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000835/r3tmp/tmpkt7dy7/pdisk_1.dat 2025-04-10T11:07:07.653299Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:07.653411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:07.664283Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:07:07.667216Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62192, node 2 2025-04-10T11:07:07.954820Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:07.954842Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:07.954851Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:07.954994Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13669 TClient is connected to server localhost:13669 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:07:08.744252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:07:08.756493Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:07:08.769428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0 ... 4976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:46.301911Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:46.336091Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:46.371365Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:46.411638Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:46.482271Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:46.543847Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639583760539399:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.543975Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.544102Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639583760539404:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:46.548118Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:46.560205Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639583760539406:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:46.628375Z node 5 :TX_PROXY ERROR: Actor# [5:7491639583760539458:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:46.704725Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491639562285700623:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:46.704804Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:48.142360Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:48.187629Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:07:48.238593Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21041, MsgBus: 28814 2025-04-10T11:07:52.351079Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639611594589154:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:52.351207Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000835/r3tmp/tmpWMxQEy/pdisk_1.dat 2025-04-10T11:07:52.481175Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:52.518004Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:52.518112Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:52.519688Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21041, node 6 2025-04-10T11:07:52.578273Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:52.578300Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:52.578310Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:52.578489Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28814 TClient is connected to server localhost:28814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:53.179394Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:53.194544Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:53.276504Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:53.549078Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:53.669754Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:57.201264Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639633069427417:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:57.201391Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:57.300451Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:57.334887Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:57.351447Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639611594589154:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:57.351535Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:57.367682Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:57.412203Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:57.455930Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:57.500179Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:57.548772Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639633069427931:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:57.548805Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639633069427936:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:57.548882Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:57.552460Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:57.564507Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639633069427938:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:57.643329Z node 6 :TX_PROXY ERROR: Actor# [6:7491639633069427991:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:07:55.062681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:07:55.062842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:55.062891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:07:55.062924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:07:55.068801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:07:55.068885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:07:55.068987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:55.069063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:07:55.077892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:07:55.231008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:07:55.231114Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:55.288033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:07:55.288424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:07:55.288618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:07:55.309684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:07:55.310125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:07:55.360150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:55.364275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:07:55.400682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:55.465759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:55.465852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:55.476917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:07:55.477021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:55.477175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:07:55.477404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:07:55.483736Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:07:55.660707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:07:55.678473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:55.694779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:07:55.703026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:07:55.703217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:55.715258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:55.715452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:07:55.715702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:55.715845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:07:55.715903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:07:55.715966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:07:55.718350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:55.718433Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:55.718479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:07:55.720479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:55.720543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:55.720604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:55.720657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:07:55.731433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:55.733989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:07:55.744911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:07:55.746126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:55.746344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:55.746416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:55.756080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:07:55.756189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:55.756412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:07:55.756558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:07:55.769321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:55.769424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:55.769697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:55.769759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:07:55.770202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:55.770294Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:07:55.770415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:55.770455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:55.770518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:55.770561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:55.770607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:07:55.770688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:55.770733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:07:55.770790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:07:55.770895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:07:55.770954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:07:55.770988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:07:55.773417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:55.773565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:55.773614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Complete at tablet# 72057594046678944 2025-04-10T11:08:01.962698Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 105:1 2025-04-10T11:08:01.962883Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T11:08:01.962927Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T11:08:01.962998Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:1, at schemeshard: 72057594046678944 2025-04-10T11:08:01.963046Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:1 ProgressState 2025-04-10T11:08:01.963183Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:08:01.963221Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 2/2 2025-04-10T11:08:01.963266Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-04-10T11:08:01.963329Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 2/2 2025-04-10T11:08:01.963387Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-04-10T11:08:01.963444Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/2, is published: true 2025-04-10T11:08:01.963538Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:303:2294] message: TxId: 105 2025-04-10T11:08:01.963606Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-04-10T11:08:01.963668Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-10T11:08:01.963709Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-10T11:08:01.963795Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-10T11:08:01.963834Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:1 2025-04-10T11:08:01.963856Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:1 2025-04-10T11:08:01.963959Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-10T11:08:01.963994Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-10T11:08:01.966080Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:08:01.966258Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:303:2294] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 105 at schemeshard: 72057594046678944 2025-04-10T11:08:01.966496Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T11:08:01.966562Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:528:2489] 2025-04-10T11:08:01.966778Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:530:2491], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:08:01.966818Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:08:01.966847Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-04-10T11:08:01.967534Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:600:2559], Recipient [7:123:2149]: {TEvModifySchemeTransaction txid# 106 TabletId# 72057594046678944} 2025-04-10T11:08:01.967605Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T11:08:01.969914Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupIncrementalBackupCollection BackupIncrementalBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:08:01.970373Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-10T11:08:01.970661Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 4], parent name: MyCollection1, child name: 19700101000000Z_incremental, child id: [OwnerId: 72057594046678944, LocalPathId: 8], at schemeshard: 72057594046678944 2025-04-10T11:08:01.970728Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2025-04-10T11:08:01.970812Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:08:01.970913Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 106:1, explain: Incremental backup is disabled on this collection, at schemeshard: 72057594046678944 2025-04-10T11:08:01.970957Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:2, propose status:StatusInvalidParameter, reason: Incremental backup is disabled on this collection, at schemeshard: 72057594046678944 2025-04-10T11:08:01.972758Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Abort operation: IgniteOperation fail to propose a part, opId: 106:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusInvalidParameter, with reason: Incremental backup is disabled on this collection, tx message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupIncrementalBackupCollection BackupIncrementalBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 106 TabletId: 72057594046678944 2025-04-10T11:08:01.972883Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir AbortPropose, opId: 106:0, at schemeshard: 72057594046678944 2025-04-10T11:08:01.973118Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:08:01.976641Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Incremental backup is disabled on this collection" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:08:01.976889Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Incremental backup is disabled on this collection, operation: BACKUP INCREMENTAL, path: /MyRoot/.backups/collections/MyCollection1 2025-04-10T11:08:01.976982Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-10T11:08:01.977399Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-10T11:08:01.977472Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-10T11:08:01.977997Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:606:2565], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:08:01.978071Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:08:01.978117Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T11:08:01.978304Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:303:2294], Recipient [7:123:2149]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2025-04-10T11:08:01.978348Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-10T11:08:01.978445Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-10T11:08:01.978568Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-10T11:08:01.978625Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:604:2563] 2025-04-10T11:08:01.978901Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:606:2565], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:08:01.978944Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:08:01.979000Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 2025-04-10T11:08:01.979589Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:607:2566], Recipient [7:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-10T11:08:01.979675Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-10T11:08:01.979807Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:08:01.980134Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 324us result status StatusSuccess 2025-04-10T11:08:01.980652Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1" PathDescription { Self { Name: "MyCollection1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeBackupCollection CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 BackupCollectionVersion: 0 } ChildrenExist: true } Children { Name: "19700101000000Z_full" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } BackupCollectionDescription { Name: "MyCollection1" ExplicitEntryList { Entries { Type: ETypeTable Path: "/MyRoot/Table1" } } Cluster { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |83.3%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |83.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:07:20.582650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:07:20.582848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.582921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:07:20.583005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:07:20.594356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:07:20.594467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:07:20.594634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.594798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:07:20.601899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:07:20.782019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:07:20.782190Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:20.813613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:07:20.813992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:07:20.814233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:07:20.857829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:07:20.858107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:07:20.891100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:20.906706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:07:20.944950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.036016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.036166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.049686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:07:21.049809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.049920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:07:21.050195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.062310Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:07:21.238646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:07:21.238950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.263312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:07:21.268603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:07:21.268746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.291977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.292164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:07:21.292375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.292452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:07:21.292497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:07:21.292546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:07:21.307798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.307899Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:21.307953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:07:21.314831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.314900Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.315004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.315078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.324722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:21.331412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:07:21.350481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:07:21.352071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.352278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:21.352357Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.361171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:07:21.361309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.361531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:07:21.361651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:07:21.371702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.371779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.372103Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.372225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:07:21.372558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.372634Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:07:21.372742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.372784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.372828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.372866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.372911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:07:21.372957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.373001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:07:21.373041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:07:21.373122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:07:21.373198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:07:21.373242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:07:21.385532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.385704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.385754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 57594046678944 2025-04-10T11:08:01.825347Z node 5 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-04-10T11:08:01.825440Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720762 ready parts: 1/1 2025-04-10T11:08:01.825614Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976720762 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:08:01.826224Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-10T11:08:01.826326Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-10T11:08:01.826380Z node 5 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-04-10T11:08:01.826424Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-10T11:08:01.826469Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:08:01.827077Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-10T11:08:01.827161Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-10T11:08:01.827190Z node 5 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-04-10T11:08:01.827225Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-04-10T11:08:01.827260Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:08:01.827333Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-04-10T11:08:01.830708Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-10T11:08:01.830879Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720762, at schemeshard: 72057594046678944 2025-04-10T11:08:01.830938Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-04-10T11:08:01.830998Z node 5 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720762, at schemeshard: 72057594046678944 2025-04-10T11:08:01.832012Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720762:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976720762 msg type: 269090816 2025-04-10T11:08:01.832180Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720762, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976720762 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976720762 at step: 5000007 2025-04-10T11:08:01.832746Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:08:01.832871Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720762 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 21474838637 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:08:01.832947Z node 5 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976720762:0, step: 5000007, at schemeshard: 72057594046678944 2025-04-10T11:08:01.833085Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-04-10T11:08:01.833178Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2025-04-10T11:08:01.833227Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-04-10T11:08:01.833287Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2025-04-10T11:08:01.833336Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-04-10T11:08:01.833436Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:08:01.833517Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:08:01.833568Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 1/1, is published: false 2025-04-10T11:08:01.833629Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-04-10T11:08:01.833677Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720762:0 2025-04-10T11:08:01.833727Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720762:0 2025-04-10T11:08:01.833811Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:08:01.833860Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720762, publications: 2, subscribers: 1 2025-04-10T11:08:01.833907Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-10T11:08:01.833955Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-10T11:08:01.834861Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-04-10T11:08:01.834971Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-04-10T11:08:01.837055Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:08:01.837104Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:08:01.837286Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:08:01.837428Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:08:01.837467Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:336:2312], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 1 2025-04-10T11:08:01.837511Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:336:2312], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976720762 2025-04-10T11:08:01.838488Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-10T11:08:01.838593Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-10T11:08:01.838635Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720762 2025-04-10T11:08:01.838705Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-10T11:08:01.838763Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:08:01.839322Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-10T11:08:01.839420Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-10T11:08:01.839458Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720762 2025-04-10T11:08:01.839494Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-10T11:08:01.839525Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:08:01.839603Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720762, subscribers: 1 2025-04-10T11:08:01.839680Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:287:2274] 2025-04-10T11:08:01.843675Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-04-10T11:08:01.844077Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-04-10T11:08:01.844169Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976720762 2025-04-10T11:08:01.844231Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976720762 2025-04-10T11:08:01.844278Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-10T11:08:01.844310Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762 2025-04-10T11:08:01.844345Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762, id# 102, itemIdx# 4294967295 2025-04-10T11:08:01.846173Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-10T11:08:01.846277Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:08:01.846348Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:707:2647] TestWaitNotification: OK eventTxId 102 >> KqpNewEngine::JoinProjectMulti [GOOD] >> KqpNewEngine::JoinMultiConsumer >> TPartBtreeIndexIteration::OneNode_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices >> KqpSqlIn::PhasesCount [GOOD] >> KqpNotNullColumns::UpdateTable_Immediate [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |83.3%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |83.4%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |83.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |83.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::PhasesCount [GOOD] Test command err: Trying to start YDB, gRPC: 7004, MsgBus: 11617 2025-04-10T11:06:59.969000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639383157351136:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:59.971900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000814/r3tmp/tmpVLXDx6/pdisk_1.dat 2025-04-10T11:07:00.497720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:00.497837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:00.500492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7004, node 1 2025-04-10T11:07:00.552187Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:07:00.552227Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:07:00.664565Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:00.684401Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:00.684429Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:00.684441Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:00.684581Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11617 TClient is connected to server localhost:11617 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:01.284177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:01.301658Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:01.309071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:07:01.474218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:07:01.656945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:01.738216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:03.687230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639400337222100:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:03.687378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:04.127571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:04.160481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:04.207143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:04.269158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:04.319913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:04.363548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:04.422889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639404632189908:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:04.422982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:04.423204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639404632189913:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:04.427227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:04.444742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639404632189915:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:04.539951Z node 1 :TX_PROXY ERROR: Actor# [1:7491639404632189971:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:04.967680Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639383157351136:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:04.967777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:05.768561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.815410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:07:05.904284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:07:06.005137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:72: Warning: At function: Filter, At function: Coalesce
:5:84: Warning: At function: SqlIn
:5:84: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:72: Warning: At function: Filter, At function: Coalesce
:5:84: Warning: At function: SqlIn
:5:84: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 12965, MsgBus: 17470 2025-04-10T11:07:11.102681Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639434116410894:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:11.102816Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000814/r3tmp/tmpwPOEcF/pdisk_1.dat 2025-04-10T11:07:11.362526Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:11.408803Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:11.408933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:11.414677Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12965, node 2 2025-04-10T11:07:11.594851Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:11.594882Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:11.594890Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:11.595069Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17470 TClient is connected to server localhost:17470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" Path ... 4976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:50.079165Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:50.117195Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:50.151905Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:50.187072Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:50.261701Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:50.339382Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639601541159766:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:50.339475Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639601541159771:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:50.339489Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:50.343856Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:50.353922Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639601541159773:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:50.435881Z node 5 :TX_PROXY ERROR: Actor# [5:7491639601541159828:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:51.052647Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491639584361288306:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:51.052744Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:51.813116Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:51.890251Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:07:51.934812Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61469, MsgBus: 27210 2025-04-10T11:07:56.676349Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639625910182354:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:56.676430Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000814/r3tmp/tmpGx3lsD/pdisk_1.dat 2025-04-10T11:07:56.801978Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:56.836359Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:56.836459Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:56.838213Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61469, node 6 2025-04-10T11:07:56.886864Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:56.886893Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:56.886900Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:56.887066Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27210 TClient is connected to server localhost:27210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:57.541167Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:57.559533Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:57.640332Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:57.928982Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:58.079349Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:01.357318Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639647385020623:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:01.357460Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:01.390192Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:08:01.428094Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:08:01.467558Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:08:01.519820Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:08:01.568094Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:08:01.616082Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:08:01.676761Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639647385021135:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:01.676876Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:01.677020Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639625910182354:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:01.677161Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:08:01.677714Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639647385021140:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:01.684785Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:08:01.699713Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639647385021143:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:08:01.762605Z node 6 :TX_PROXY ERROR: Actor# [6:7491639647385021196:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::UpdateTable_Immediate [GOOD] Test command err: Trying to start YDB, gRPC: 26282, MsgBus: 31537 2025-04-10T11:07:10.972544Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639428741145786:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:10.979350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007f7/r3tmp/tmpBoU00L/pdisk_1.dat 2025-04-10T11:07:11.880007Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:11.891357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:11.891466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:11.894012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26282, node 1 2025-04-10T11:07:12.282912Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:12.282958Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:12.282970Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:12.283106Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31537 TClient is connected to server localhost:31537 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:13.643969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.974174Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639428741145786:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:15.978232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:16.145712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639454510950233:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:16.145852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:16.533476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:07:16.690631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639454510950339:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:16.690750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:16.691104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639454510950344:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:16.695741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:07:16.716293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639454510950346:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:07:16.807601Z node 1 :TX_PROXY ERROR: Actor# [1:7491639454510950398:2403] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:17.085260Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491639458805917741:2358], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-04-10T11:07:17.086362Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzJiMjI5MDQtY2FmMTI5YTgtNjFiMWMzNjQtYzVhOGYyNTc=, ActorId: [1:7491639454510950221:2329], ActorState: ExecuteState, TraceId: 01jrfp3yneddyqhep0tj3c3scg, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-10T11:07:17.144656Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491639458805917753:2363], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:45: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:45: Error: Failed to convert 'Value': Null to String
:1:45: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T11:07:17.146422Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzJiMjI5MDQtY2FmMTI5YTgtNjFiMWMzNjQtYzVhOGYyNTc=, ActorId: [1:7491639454510950221:2329], ActorState: ExecuteState, TraceId: 01jrfp3yqpaffyhb3gz34jc2xr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 13499, MsgBus: 13106 2025-04-10T11:07:17.947022Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639458258910557:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:17.947245Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007f7/r3tmp/tmpIwVfJi/pdisk_1.dat 2025-04-10T11:07:18.150558Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:18.153587Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:18.153660Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:18.163716Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13499, node 2 2025-04-10T11:07:18.257773Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:18.257794Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:18.257800Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:18.257890Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13106 TClient is connected to server localhost:13106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:18.721934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:18.730486Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:21.223294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639475438780400:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.223380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.260604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.356864Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639475438780500:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.356973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.357434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [W ... disk_1.dat 2025-04-10T11:07:41.280888Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:41.333850Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:41.333958Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:41.337890Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23787, node 5 2025-04-10T11:07:41.472781Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:41.472810Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:41.472819Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:41.472980Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25196 TClient is connected to server localhost:25196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:07:42.131157Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:07:45.200539Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639579857267225:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:45.200653Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:45.201182Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639579857267237:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:45.207569Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:07:45.218125Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639579857267239:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:07:45.279091Z node 5 :TX_PROXY ERROR: Actor# [5:7491639579857267290:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:45.326111Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:07:46.058434Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491639562677397569:2233];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:46.058521Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:53.550746Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfp50dqddfb4rcnazzvbze2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MjIwMGU3MTQtNTA2Y2I0ZWEtMzFhZDY2N2QtYWY5NzIwNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:07:53.551086Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=MjIwMGU3MTQtNTA2Y2I0ZWEtMzFhZDY2N2QtYWY5NzIwNDI=, ActorId: [5:7491639605627071979:2527], ActorState: ExecuteState, TraceId: 01jrfp50dqddfb4rcnazzvbze2, Create QueryResponse for error on request, msg: 2025-04-10T11:07:55.387682Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfp52bad0hmvdcha897c82d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NTUwYTgwMTMtMzU3YjA5MDctY2NkMWE2OS1iMzI4NzA2Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:07:55.388012Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=NTUwYTgwMTMtMzU3YjA5MDctY2NkMWE2OS1iMzI4NzA2Ng==, ActorId: [5:7491639614217006674:2554], ActorState: ExecuteState, TraceId: 01jrfp52bad0hmvdcha897c82d, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 14887, MsgBus: 30275 2025-04-10T11:07:56.224943Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639628542145296:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:56.225076Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007f7/r3tmp/tmpJN5H7m/pdisk_1.dat 2025-04-10T11:07:56.396271Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:56.401404Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:56.401524Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:56.403328Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14887, node 6 2025-04-10T11:07:56.476254Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:56.476284Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:56.476296Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:56.476456Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30275 TClient is connected to server localhost:30275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:57.104174Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:00.838634Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639645722015146:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:00.838815Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:00.869977Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:08:00.936146Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639645722015295:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:00.936271Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:00.936322Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639645722015300:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:00.941397Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:08:00.953394Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639645722015302:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:08:01.030706Z node 6 :TX_PROXY ERROR: Actor# [6:7491639650016982649:2431] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:08:01.225434Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639628542145296:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:01.225644Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:07:20.583310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:07:20.583487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.583548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:07:20.583596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:07:20.594531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:07:20.594661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:07:20.594778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:07:20.594926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:07:20.603559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:07:20.787998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:07:20.788074Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:20.813712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:07:20.814195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:07:20.814371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:07:20.848882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:07:20.849163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:07:20.891074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:20.903916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:07:20.947431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.037266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.037386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.051002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:07:21.051099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.051178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:07:21.051408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.067228Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:07:21.299799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:07:21.300112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.300319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:07:21.300649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:07:21.300741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.312177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.312372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:07:21.312651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.312778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:07:21.312832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:07:21.312883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:07:21.320144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.320253Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:07:21.320312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:07:21.323346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.323415Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.323468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.323541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.327866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:07:21.330074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:07:21.344160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:07:21.345612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:07:21.345853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:07:21.345933Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.361022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:07:21.361126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:07:21.361322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:07:21.361457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:07:21.364595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:07:21.364655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:07:21.364852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:07:21.364916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:07:21.365238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:07:21.365298Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:07:21.365435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.365474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.365521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:07:21.365553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.365588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:07:21.365631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:07:21.365671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:07:21.365701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:07:21.365781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:07:21.365825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:07:21.365860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:07:21.368290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.368429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:07:21.368479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... EBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0036 2025-04-10T11:07:54.798800Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0029 2025-04-10T11:07:54.829838Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-10T11:07:54.830074Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-04-10T11:07:54.830189Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 2, DataSize 70 2025-04-10T11:07:54.830332Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-10T11:07:54.830386Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0, RowCount 0, DataSize 0, with borrowed parts 2025-04-10T11:07:54.840809Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:07:58.301291Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0028 2025-04-10T11:07:58.323101Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0024 2025-04-10T11:07:58.353951Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-10T11:07:58.354140Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-04-10T11:07:58.354251Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 2, DataSize 70 2025-04-10T11:07:58.354393Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-10T11:07:58.354444Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0, RowCount 0, DataSize 0, with borrowed parts 2025-04-10T11:07:58.364787Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:08:01.796125Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0019 2025-04-10T11:08:01.818290Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0018 2025-04-10T11:08:01.860327Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-10T11:08:01.860514Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-04-10T11:08:01.860586Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 2, DataSize 70 2025-04-10T11:08:01.860689Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-10T11:08:01.860722Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0, RowCount 0, DataSize 0, with borrowed parts 2025-04-10T11:08:01.871117Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:08:04.627302Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [4:571:2529], attempt# 1 2025-04-10T11:08:04.654945Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvReset: self# [4:570:2528] 2025-04-10T11:08:04.665660Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [4:571:2529], sender# [4:570:2528] 2025-04-10T11:08:04.665802Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [4:570:2528] 2025-04-10T11:08:04.665995Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [4:571:2529], sender# [4:570:2528], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } 2025-04-10T11:08:04.666282Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [4:571:2529], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [6e3e0a41fdab8add833862f1bd2954c3,1d8dd09e584ce6a47582a31b591900e2,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:2369 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 85E2A02A-66B9-408A-9CDD-4D378B5EACC7 amz-sdk-request: attempt=1 content-length: 459 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-04-10T11:08:04.671010Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [4:571:2529], result# 2025-04-10T11:08:04.671331Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [4:570:2528], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-10T11:08:04.684208Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 444 RawX2: 17179871597 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T11:08:04.684317Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-04-10T11:08:04.684545Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 444 RawX2: 17179871597 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T11:08:04.684724Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 444 RawX2: 17179871597 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T11:08:04.684837Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:08:04.684892Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:08:04.684948Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:08:04.685005Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-04-10T11:08:04.685206Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:08:04.687856Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:08:04.688253Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-10T11:08:04.688314Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-04-10T11:08:04.688456Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-10T11:08:04.688498Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-10T11:08:04.688541Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-10T11:08:04.688577Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-10T11:08:04.688616Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-04-10T11:08:04.688693Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:123:2149] message: TxId: 281474976710759 2025-04-10T11:08:04.688743Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-10T11:08:04.688789Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-04-10T11:08:04.688842Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-04-10T11:08:04.688959Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:08:04.691248Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-04-10T11:08:04.691355Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-04-10T11:08:04.693269Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:08:04.693322Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:594:2548] TestWaitNotification: OK eventTxId 102 >> KqpMergeCn::TopSortBy_Interval_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Decimal_Limit5 |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots >> TPartBtreeIndexIteration::OneNode_History_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices >> KqpNewEngine::ScalarMultiUsage [GOOD] >> KqpNewEngine::SequentialReadsPragma+Enabled >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup [GOOD] |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService >> Secret::Simple >> Secret::Validation >> KqpRanges::NullInPredicateRow [GOOD] >> Secret::SimpleQueryService >> KqpRanges::NoFullScanAtScanQuery |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated >> Secret::DeactivatedQueryService >> KqpNewEngine::MultiUsageInnerConnection [GOOD] |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |83.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/pgwire/pgwire |83.6%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire |83.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::TableDeleteWhere+useSink [GOOD] >> KqpPg::TableDeleteWhere-useSink >> KqpSort::TopParameter [GOOD] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> PgCatalog::PgTables [GOOD] Test command err: Trying to start YDB, gRPC: 7210, MsgBus: 13756 2025-04-10T11:02:34.279840Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638243708566754:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:34.279909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cb2/r3tmp/tmpZOPHix/pdisk_1.dat 2025-04-10T11:02:34.996112Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:35.021267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:35.021386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:35.024419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7210, node 1 2025-04-10T11:02:35.134048Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:35.134075Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:35.134087Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:35.134230Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13756 TClient is connected to server localhost:13756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:36.067514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:36.106865Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 1042 2025-04-10T11:02:38.682769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce_pgbpchar_17472595041006102391_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) 2025-04-10T11:02:38.975911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638260888436696:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:38.975992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638260888436704:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:38.976049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:02:38.981134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:02:39.001350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638260888436710:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:02:39.100205Z node 1 :TX_PROXY ERROR: Actor# [1:7491638265183404057:2402] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:02:39.281872Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638243708566754:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:39.281978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:39.569661Z node 1 :TX_DATASHARD CRIT: Exception while executing KQP transaction [0:281474976710663] at 72075186224037888: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-04-10T11:02:39.573901Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710663 at tablet 72075186224037888 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2025-04-10T11:02:39.574231Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491638265183404117:2340] TxId: 281474976710663. Ctx: { TraceId: 01jrfnvf3sefgcnvjs9sdq9ypv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI2YTRlZDMtYmY3NDE4YmQtZTZkNjI5NmYtODYxYmExMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; 2025-04-10T11:02:39.584372Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzI2YTRlZDMtYmY3NDE4YmQtZTZkNjI5NmYtODYxYmExMg==, ActorId: [1:7491638260888436692:2340], ActorState: ExecuteState, TraceId: 01jrfnvf3sefgcnvjs9sdq9ypv, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-04-10T11:02:39.622118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce__pgbpchar_17472595041006102391_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) 2025-04-10T11:02:40.030773Z node 1 :TX_DATASHARD CRIT: Exception while executing KQP transaction [0:281474976710668] at 72075186224037889: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-04-10T11:02:40.038811Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710668 at tablet 72075186224037889 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2025-04-10T11:02:40.039035Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491638269478371550:2375] TxId: 281474976710668. Ctx: { TraceId: 01jrfnvfw145zwd733rssrsm5v, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjE1N2RjNWMtOTVmNDFkZTUtNWVkMzZmMTctMjVkMjY4Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; 2025-04-10T11:02:40.039254Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjE1N2RjNWMtOTVmNDFkZTUtNWVkMzZmMTctMjVkMjY4Mg==, ActorId: [1:7491638265183404208:2375], ActorState: ExecuteState, TraceId: 01jrfnvfw145zwd733rssrsm5v, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 1042 2025-04-10T11:02:40.095458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_2169371982377735806_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 4 --!syntax_pg INSERT INTO Coerce_pgbpchar_2169371982377735806_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) abcd 2025-04-10T11:02:40.668309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_2169371982377735806_5352544928909966465' Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 4 --!syntax_pg INSERT INTO Coerce__pgbpchar_2169371982377735806_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) {abcd,abcd} 1042 2025-04-10T11:02:41.187183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480
: Err ... : SetPath # /home/runner/.ya/build/build_root/2gt4/000cb2/r3tmp/tmpzUlwo5/pdisk_1.dat 2025-04-10T11:07:36.969305Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:37.029318Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:37.029923Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:37.033355Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12958, node 10 2025-04-10T11:07:37.131968Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:37.132002Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:37.132016Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:37.132201Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20999 TClient is connected to server localhost:20999 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:38.200011Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:41.724420Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491639540223551480:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:41.724542Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:42.988559Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491639565993355887:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:42.988559Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491639565993355910:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:42.988670Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:42.993324Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:07:43.007313Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491639565993355916:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:07:43.079639Z node 10 :TX_PROXY ERROR: Actor# [10:7491639570288323263:2346] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 26908, MsgBus: 2269 2025-04-10T11:07:44.474771Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7491639574833349193:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:44.475755Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cb2/r3tmp/tmpdjYIDi/pdisk_1.dat 2025-04-10T11:07:44.696353Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:44.738105Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:44.738258Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:44.740844Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26908, node 11 2025-04-10T11:07:44.797527Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:44.797554Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:44.797569Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:44.797756Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2269 TClient is connected to server localhost:2269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:45.758231Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:49.478320Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7491639574833349193:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:49.478447Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:50.590875Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491639600603153630:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:50.591066Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491639600603153655:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:50.591258Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:50.596792Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:07:50.619385Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7491639600603153658:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:07:50.693441Z node 11 :TX_PROXY ERROR: Actor# [11:7491639600603153709:2346] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:50.734662Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:07:50.807009Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:07:55.784775Z node 11 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 11, TabletId: 72075186224037888 not found 2025-04-10T11:07:55.812220Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.191565Z node 11 :KQP_COMPUTE ERROR: SelfId: [11:7491639626372958064:2452], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=11&id=NDliZDhhYzQtM2U2MWE0YTItM2I3MzI1NjMtNWZhNGIwNWE=. TraceId : 01jrfp54kkavcfj4a5w4b77kxm. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: "pg_proc" }. 2025-04-10T11:07:56.192212Z node 11 :KQP_COMPUTE ERROR: SelfId: [11:7491639626372958065:2453], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=11&id=NDliZDhhYzQtM2U2MWE0YTItM2I3MzI1NjMtNWZhNGIwNWE=. CustomerSuppliedId : . TraceId : 01jrfp54kkavcfj4a5w4b77kxm. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [11:7491639626372958061:2449], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:07:56.193005Z node 11 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=11&id=NDliZDhhYzQtM2U2MWE0YTItM2I3MzI1NjMtNWZhNGIwNWE=, ActorId: [11:7491639622077990758:2449], ActorState: ExecuteState, TraceId: 01jrfp54kkavcfj4a5w4b77kxm, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::MultiUsageInnerConnection [GOOD] Test command err: Trying to start YDB, gRPC: 61055, MsgBus: 29988 2025-04-10T11:07:17.321644Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639459180792729:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:17.321944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007c7/r3tmp/tmpRWAt6D/pdisk_1.dat 2025-04-10T11:07:17.971696Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:17.975768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:17.975857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:17.980142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61055, node 1 2025-04-10T11:07:18.139086Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:18.139109Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:18.139123Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:18.139253Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29988 TClient is connected to server localhost:29988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:18.697613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:18.712403Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:18.719957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:18.861137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:19.074491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:19.156440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:21.016419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639476360663558:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.016565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.472859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.512971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.581537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.614829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.657213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.731351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.809623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639476360664081:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.809705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639476360664086:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.809720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.813219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:21.823471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639476360664088:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:21.914218Z node 1 :TX_PROXY ERROR: Actor# [1:7491639476360664143:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:22.317078Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639459180792729:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:22.317194Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30387, MsgBus: 20260 2025-04-10T11:07:24.724273Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639489025902271:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:24.724441Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007c7/r3tmp/tmpJv9b2E/pdisk_1.dat 2025-04-10T11:07:24.862440Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:24.890964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:24.891052Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:24.893137Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30387, node 2 2025-04-10T11:07:24.950061Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:24.950082Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:24.950091Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:24.950290Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20260 TClient is connected to server localhost:20260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:25.441438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:25.447103Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:07:25.460078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:25.559508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:25.749325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting ...
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:56.061185Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:56.111298Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.145971Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.179539Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.213812Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.252292Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.303267Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.398675Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639627646129988:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:56.398799Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639627646129993:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:56.398797Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:56.403628Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:56.427132Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639627646129995:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:56.498407Z node 6 :TX_PROXY ERROR: Actor# [6:7491639627646130050:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:56.622837Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639606171291203:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:56.622942Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25395, MsgBus: 10219 2025-04-10T11:07:59.152550Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639640419199655:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:59.152660Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007c7/r3tmp/tmpe0HRLg/pdisk_1.dat 2025-04-10T11:07:59.284412Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:59.306342Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:59.306466Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:59.309444Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25395, node 7 2025-04-10T11:07:59.377258Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:59.377290Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:59.377305Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:59.377504Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10219 TClient is connected to server localhost:10219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:08:00.097542Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:00.106401Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:08:00.113045Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:00.202500Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:00.491414Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:00.611765Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:03.800160Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639657599070613:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:03.800298Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:03.864896Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:08:03.906956Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:08:03.950405Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:08:03.987858Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:08:04.025077Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:08:04.064461Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:08:04.109005Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639661894038420:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:04.109119Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:04.109145Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639661894038425:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:04.113245Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:08:04.125460Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639661894038427:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:08:04.152516Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639640419199655:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:04.152610Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:08:04.208986Z node 7 :TX_PROXY ERROR: Actor# [7:7491639661894038484:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 62207, MsgBus: 62389 2025-04-10T11:07:26.608871Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639499945363710:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:26.609027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000772/r3tmp/tmpjQKQqo/pdisk_1.dat 2025-04-10T11:07:27.053848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:27.053953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:27.055634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:07:27.094498Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62207, node 1 2025-04-10T11:07:27.195173Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:27.195195Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:27.195206Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:27.195337Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62389 TClient is connected to server localhost:62389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:27.845474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:30.068017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639517125233416:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.068289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.394036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:07:30.571732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639517125233518:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.571810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.572062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639517125233523:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.575976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:07:30.589180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639517125233525:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:07:30.693114Z node 1 :TX_PROXY ERROR: Actor# [1:7491639517125233579:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:31.079783Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491639521420200933:2329], TxId: 281474976710662, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=N2UwZWJmNjQtNWIwNzJmZGItZTFjY2ZjLTMyNjJhMWQ3. TraceId : 01jrfp4c6wdwqyrq9qf05graeq. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: key, code: 2031 }. 2025-04-10T11:07:31.080523Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2UwZWJmNjQtNWIwNzJmZGItZTFjY2ZjLTMyNjJhMWQ3, ActorId: [1:7491639517125233398:2329], ActorState: ExecuteState, TraceId: 01jrfp4c6wdwqyrq9qf05graeq, Create QueryResponse for error on request, msg: 2025-04-10T11:07:31.179057Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491639521420200950:2329], TxId: 281474976710663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=N2UwZWJmNjQtNWIwNzJmZGItZTFjY2ZjLTMyNjJhMWQ3. CustomerSuppliedId : . TraceId : 01jrfp4ccq82csg405ht0twt3q. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: key, code: 2031 }. 2025-04-10T11:07:31.179508Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2UwZWJmNjQtNWIwNzJmZGItZTFjY2ZjLTMyNjJhMWQ3, ActorId: [1:7491639517125233398:2329], ActorState: ExecuteState, TraceId: 01jrfp4ccq82csg405ht0twt3q, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 19546, MsgBus: 12302 2025-04-10T11:07:32.066792Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639526101547295:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000772/r3tmp/tmpz01zh3/pdisk_1.dat 2025-04-10T11:07:32.226410Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:07:32.293517Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:32.310483Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:32.310573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19546, node 2 2025-04-10T11:07:32.313874Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:07:32.450683Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:32.450719Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:32.450727Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:32.450845Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12302 TClient is connected to server localhost:12302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:32.929935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:32.936242Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:07:35.879511Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639538986449641:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:35.879609Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:35.938920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:07:36.035365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639543281417041:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:36.035451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:36.035974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639543281417046:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:36.041131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, ... 1474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:55.732766Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:55.762837Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:55.792124Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:55.825727Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:55.864809Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639625357057495:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:55.864907Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:55.864910Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639625357057500:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:55.868544Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:55.878908Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639625357057502:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:55.951721Z node 5 :TX_PROXY ERROR: Actor# [5:7491639625357057555:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:56.612506Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491639608177186025:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:56.612566Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:56.894412Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:57.028640Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30645, MsgBus: 1508 2025-04-10T11:07:59.194290Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639641346294592:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:59.194384Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000772/r3tmp/tmpd6Pa5p/pdisk_1.dat 2025-04-10T11:07:59.323278Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:59.359717Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:59.359842Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:59.361649Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30645, node 6 2025-04-10T11:07:59.431052Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:59.431093Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:59.431109Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:59.431295Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1508 TClient is connected to server localhost:1508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:08:00.074716Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:00.088909Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:00.174688Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:00.400931Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:00.502549Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:03.422187Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639658526165556:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:03.422290Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:03.492860Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:08:03.533262Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:08:03.631905Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:08:03.710772Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:08:03.755247Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:08:03.798093Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:08:03.845522Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639658526166071:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:03.845612Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639658526166076:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:03.845650Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:03.850095Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:08:03.862584Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639658526166078:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:08:03.958565Z node 6 :TX_PROXY ERROR: Actor# [6:7491639658526166133:3457] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:08:04.194795Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639641346294592:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:04.194885Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:08:05.094573Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:08:05.278135Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] Test command err: Trying to start YDB, gRPC: 4025, MsgBus: 9253 2025-04-10T11:07:26.532315Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639497518450759:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:26.536993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000782/r3tmp/tmpucXpuk/pdisk_1.dat 2025-04-10T11:07:27.084388Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:27.084819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:27.084924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:27.088858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4025, node 1 2025-04-10T11:07:27.230881Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:27.230904Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:27.230917Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:27.231067Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9253 TClient is connected to server localhost:9253 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:27.944449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:27.987262Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:30.151481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639514698320601:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.151646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.441163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:07:30.607766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639514698320706:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.607854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.608207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639514698320711:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:30.612501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:07:30.622918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639514698320713:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:07:30.707438Z node 1 :TX_PROXY ERROR: Actor# [1:7491639514698320765:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 64962, MsgBus: 4131 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000782/r3tmp/tmpTLwGDh/pdisk_1.dat 2025-04-10T11:07:32.234564Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:07:32.329707Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:32.337531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:32.337642Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:32.343617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64962, node 2 2025-04-10T11:07:32.558898Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:32.558928Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:32.558937Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:32.559070Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4131 TClient is connected to server localhost:4131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:33.032122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:33.056738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:33.165659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:33.358293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:33.435174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:35.931708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639537335467728:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:35.931809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:35.995140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:36.051694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:36.086965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:36.170107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:36.203831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:36.242210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:36.321581Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639541630435543:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:36.321663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:36.321710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639541 ... WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639627174981195:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:56.316381Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:56.344525Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.380201Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.426746Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.501010Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.569340Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.610721Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.669844Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639627174981712:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:56.669956Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:56.670062Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639627174981717:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:56.673907Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:56.685142Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639627174981719:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:56.750663Z node 5 :TX_PROXY ERROR: Actor# [5:7491639627174981774:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:57.726398Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491639609995110232:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:57.735625Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28514, MsgBus: 14790 2025-04-10T11:08:00.394318Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639644301042331:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:00.394409Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000782/r3tmp/tmpWa4USe/pdisk_1.dat 2025-04-10T11:08:00.595725Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:00.618678Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:00.618769Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:00.620274Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28514, node 6 2025-04-10T11:08:00.671005Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:00.671040Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:00.671052Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:00.671300Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14790 TClient is connected to server localhost:14790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:08:01.280281Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:01.298838Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:01.364419Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:01.557093Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:01.689665Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:04.763197Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639661480913285:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:04.763319Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:04.823122Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:08:04.861258Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:08:04.935715Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:08:04.977092Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:08:05.024176Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:08:05.104256Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:08:05.187886Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639665775881102:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:05.188002Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:05.188077Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639665775881108:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:05.192747Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:08:05.204960Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639665775881110:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:08:05.301785Z node 6 :TX_PROXY ERROR: Actor# [6:7491639665775881164:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:08:05.394628Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639644301042331:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:05.394842Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpKv::BulkUpsert [GOOD] >> KqpKv::ReadRows_ExternalBlobs+UseExtBlobsPrecharge |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |83.8%| [LD] {RESULT} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas >> KqpNewEngine::DqSource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::TopParameter [GOOD] Test command err: Trying to start YDB, gRPC: 12522, MsgBus: 22391 2025-04-10T11:07:29.259665Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639510777752149:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:29.259738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0006fc/r3tmp/tmp3KtkIF/pdisk_1.dat 2025-04-10T11:07:29.743268Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:29.747151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:29.747370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:29.751879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12522, node 1 2025-04-10T11:07:29.847239Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:29.847264Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:29.847277Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:29.847430Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22391 TClient is connected to server localhost:22391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:30.530320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:30.549725Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:30.561263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:30.684754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:30.860264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:30.946488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:32.818012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639523662655815:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:32.818202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:33.170723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:33.213775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:33.251226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:33.291803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:33.344520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:33.415081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:33.477050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639527957623629:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:33.477133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:33.477260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639527957623634:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:33.481407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:33.492693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639527957623636:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:33.599556Z node 1 :TX_PROXY ERROR: Actor# [1:7491639527957623692:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:34.266270Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639510777752149:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:34.266347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1124, MsgBus: 11597 2025-04-10T11:07:36.035574Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639540294896710:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:36.035879Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0006fc/r3tmp/tmpi6156N/pdisk_1.dat 2025-04-10T11:07:36.180841Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:36.192778Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:36.192854Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:36.194374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1124, node 2 2025-04-10T11:07:36.256244Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:36.256269Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:36.256284Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:36.256430Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11597 TClient is connected to server localhost:11597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:36.828718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:36.841062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:36.940516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:37.138107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:37.221708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreat ... WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639640434313227:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:59.367524Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:59.442893Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:59.495373Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:59.529239Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:59.563517Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:59.604059Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:59.651798Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:59.705638Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639640434313739:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:59.705729Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639640434313744:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:59.705781Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:59.710271Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:59.721907Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639640434313746:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:59.783793Z node 5 :TX_PROXY ERROR: Actor# [5:7491639640434313799:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:08:00.630977Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491639623254442271:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:00.631076Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21900, MsgBus: 15898 2025-04-10T11:08:02.358292Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639652353226729:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:02.358392Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0006fc/r3tmp/tmp3CjdKf/pdisk_1.dat 2025-04-10T11:08:02.553864Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:02.554004Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:02.578437Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:02.580079Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21900, node 6 2025-04-10T11:08:02.684305Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:02.684333Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:02.684345Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:02.684540Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15898 TClient is connected to server localhost:15898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:08:03.379629Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:03.398667Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:03.486031Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:03.756356Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:03.853503Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:06.675553Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639669533097694:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:06.675660Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:06.734720Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:08:06.771700Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:08:06.813206Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:08:06.856875Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:08:06.900635Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:08:06.961726Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:08:07.062860Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639673828065504:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:07.063002Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639673828065509:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:07.063007Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:07.069578Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:08:07.082275Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639673828065511:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:08:07.146162Z node 6 :TX_PROXY ERROR: Actor# [6:7491639673828065565:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:08:07.358686Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639652353226729:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:07.358787Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |83.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpSqlIn::Delete [GOOD] >> TCowBTreeTest::SeekForwardPermutationsInplace >> TSubgroupPartLayoutTest::CountEffectiveReplicas3of4 >> TCircularOperationQueueTest::ShouldStartInflight2 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue3 [GOOD] >> TCircularOperationQueueTest::ShouldNotStartUntilStart [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue100 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight3 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue10 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight1 [GOOD] >> TCircularOperationQueueTest::ShouldStartEmpty [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight10 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue2 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue1 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight100 [GOOD] >> TCircularOperationQueueTest::ShouldScheduleWakeupWhenNothingStarted >> TCowBTreeTest::ClearAndReuse >> TCircularQueueTest::ShouldRemoveCurrentLast [GOOD] >> TCircularQueueTest::ShouldPush [GOOD] >> TCowBTreeTest::Basics [GOOD] >> TCircularQueueTest::ShouldRemoveCurrent [GOOD] >> TConcurrentRWHashTest::TInsertTest [GOOD] >> TCircularQueueTest::ShouldNotRemoveMissing [GOOD] >> TCircularQueueTest::ShouldNotPushTwice [GOOD] >> TConcurrentRWHashTest::TEmptyGetTest [GOOD] >> TConcurrentRWHashTest::TEraseTest [GOOD] >> TConcurrentRWHashTest::TInsertIfAbsentTestFunc [GOOD] >> TConcurrentRWHashTest::TInsertIfAbsentTest [GOOD] >> TCircularQueueTest::ShouldRemove [GOOD] >> TCowBTreeTest::Empty [GOOD] >> TConcurrentRWHashTest::TRemoveTest [GOOD] >> TBlobStorageGroupInfoBlobMapTest::CheckCorrectBehaviourWithHashOverlow [GOOD] >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper >> TFragmentedBufferTest::TestReplaceWithSetMonolith [GOOD] >> THyperLogCounterTest::TestIncrement >> THazardTest::AutoProtectedPointers [GOOD] >> TFragmentedBufferTest::TestOverwriteRead [GOOD] >> TFragmentedBufferTest::TestIsNotMonolith [GOOD] >> THazardTest::CachedPointers [GOOD] >> THyperLogCounterTest::TestGetSet [GOOD] >> TFragmentedBufferTest::TestSetMonolith [GOOD] >> TFragmentedBufferTest::TestWriteRead [GOOD] >> TPriorityOperationQueueTest::ShouldStartEmpty [GOOD] >> TPriorityOperationQueueTest::ShouldStartByPriority [GOOD] >> TBlobStorageGroupInfoTest::TestBelongsToSubgroup >> TPriorityOperationQueueTest::ShouldStartByPriorityWithRemove [GOOD] >> TPriorityOperationQueueTest::ShouldUpdatePriorityReadyQueue [GOOD] >> TPriorityOperationQueueTest::ShouldUpdatePriorityWaitingQueue [GOOD] >> TPriorityOperationQueueTest::ShouldReturnExecTimeWhenUpdateRunningPriority [GOOD] >> TPriorityOperationQueueTest::UpdateNonExistingShouldReturnFalse [GOOD] >> TPriorityQueueTest::TestOrder [GOOD] >> TQueueInplaceTests::TestSimpleInplace [GOOD] >> TQueueInplaceTests::CleanInDestructor [GOOD] >> TSimpleCacheTest::TestSimpleCache [GOOD] >> TSimpleCacheTest::TestNotSoSimpleCache [GOOD] >> TStrongTypeTest::DefaultConstructorDeleted [GOOD] >> TStrongTypeTest::DefaultConstructorValue [GOOD] >> TTokenBucketTest::Unlimited [GOOD] >> TTokenBucketTest::Limited [GOOD] >> TTokenBucketTest::DelayCalculation [GOOD] >> TULID::ParseAndFormat [GOOD] >> TULID::HeadByteOrder [GOOD] >> TULID::TailByteOrder [GOOD] >> TULID::EveryBitOrder [GOOD] >> TULID::Generate >> TIntervalSetTest::IntervalVecTestAdd >> TIntervalSetTest::IntervalVecTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalVecTestEmpty [GOOD] >> TBlobStorageGroupInfoIterTest::PerRealmIterator [GOOD] >> TBlobStorageGroupInfoIterTest::WalkFailRealms [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas1of4 >> TCowBTreeTest::SeekForwardPermutationsInplace [GOOD] >> TCowBTreeTest::SeekForwardPermutationsThreadSafe [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsInplace [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsThreadSafe [GOOD] >> TCowBTreeTest::RandomInsertInplace >> TCircularOperationQueueTest::ShouldScheduleWakeupWhenNothingStarted [GOOD] >> TCircularOperationQueueTest::ShouldScheduleWakeupWhenHasWaitingAndStart [GOOD] >> TCircularOperationQueueTest::UseMinOperationRepeatDelayWhenTimeout [GOOD] >> TCircularOperationQueueTest::ShouldReturnExecTime [GOOD] >> TCircularOperationQueueTest::ShouldTryToStartAnotherOneWhenStartFails [GOOD] >> TBlobStorageGroupInfoIterTest::IteratorForward [GOOD] >> TCircularOperationQueueTest::ShouldShuffle [GOOD] >> TCircularOperationQueueTest::RemoveNonExistingWhenShuffle [GOOD] >> TCircularOperationQueueTest::ShouldTolerateInaccurateTimer [GOOD] >> TCircularQueueTest::Empty [GOOD] >> TCircularQueueTest::ShouldNextSingleItem [GOOD] >> TBlobStorageGroupInfoIterTest::IteratorBackward [GOOD] >> TCircularQueueTest::ShouldNextMulti [GOOD] >> TCircularQueueTest::ShouldGetQueue [GOOD] >> TIntervalSetTest::IntervalSetTestEmpty [GOOD] >> TIntervalSetTest::IntervalSetTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalSetTestAdd >> AddressClassifierTest::TestLabeledClassifier [GOOD] >> TCowBTreeTest::ClearAndReuse [GOOD] >> TBTreeTest::ClearAndReuse >> AddressClassifierTest::TestAddressParsing [GOOD] >> TBitsTest::TestNaiveClz [GOOD] >> TCowBTreeTest::MultipleSnapshots >> AddressClassifierTest::TestAddressExtraction [GOOD] >> TIntrusiveStackTest::TestEmptyPop [GOOD] >> AddressClassifierTest::TestClassfierWithAllIpTypes [GOOD] >> TBTreeTest::Basics [GOOD] >> AddressClassifierTest::TestLabeledClassifierFromNetData [GOOD] >> TIntrusiveStackTest::TestPushPop [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountNeverEmpty >> TBlobStorageGroupInfoIterTest::IteratorForwardAndBackward [GOOD] >> TBlobStorageGroupInfoIterTest::PerFailDomainRange [GOOD] >> THyperLogCounterTest::TestIncrement [GOOD] >> THyperLogCounterTest::TestAddRandom >> TULID::Generate [GOOD] >> TWildcardTest::TestWildcard [GOOD] >> TWildcardTest::TestWildcards [GOOD] >> TIntervalSetTest::IntervalVecTestAdd [GOOD] >> TIntervalSetTest::IntervalVecTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalVecTestSubtract [GOOD] >> TIntervalSetTest::IntervalVecTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestAddAgainstReference >> TBTreeTest::ClearAndReuse [GOOD] >> TBTreeTest::SeekForwardPermutationsInplace [GOOD] >> TBTreeTest::SeekForwardPermutationsThreadSafe [GOOD] >> TBTreeTest::SeekBackwardPermutationsInplace [GOOD] >> TBTreeTest::SeekBackwardPermutationsThreadSafe [GOOD] >> TBTreeTest::RandomInsertInplace >> TIntervalSetTest::IntervalVecTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecUnion >> TIntervalSetTest::IntervalSetTestAdd [GOOD] >> TIntervalSetTest::IntervalSetTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestSubtract [GOOD] >> TIntervalSetTest::IntervalSetTestSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestSubtractAgainstReference >> KqpNewEngine::JoinMultiConsumer [GOOD] >> KqpNewEngine::JoinSameKey >> THyperLogCounterTest::TestAddRandom [GOOD] >> THyperLogCounterTest::TestAddFixed >> TBlobStorageGroupInfoTest::TestBelongsToSubgroup [GOOD] >> TBlobStorageGroupInfoTest::SubgroupPartLayout >> THyperLogCounterTest::TestAddFixed [GOOD] >> THyperLogCounterTest::TestHybridIncrement [GOOD] >> THyperLogCounterTest::TestHybridAdd [GOOD] >> TIntervalSetTest::IntervalMapTestEmpty [GOOD] >> TIntervalSetTest::IntervalMapTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalMapTestAdd >> TIntervalSetTest::IntervalVecUnion [GOOD] >> TIntervalSetTest::IntervalVecUnionInplace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DqSource [GOOD] Test command err: Trying to start YDB, gRPC: 5007, MsgBus: 18718 2025-04-10T11:07:18.244902Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639464268649295:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:18.244958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007ad/r3tmp/tmpNXcVnh/pdisk_1.dat 2025-04-10T11:07:18.654028Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:18.688068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 5007, node 1 2025-04-10T11:07:18.688205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:18.695565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:07:18.779984Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:18.780015Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:18.780024Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:18.780205Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18718 TClient is connected to server localhost:18718 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:19.410964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:19.438911Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:19.455094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:07:19.622500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:19.789936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:19.915893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:21.792193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639477153552959:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.792299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:22.141483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:22.172707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:22.206417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:22.286379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:22.320403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:22.395094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:22.461832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639481448520778:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:22.461918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:22.462447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639481448520783:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:22.467420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:22.490926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639481448520785:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:22.570956Z node 1 :TX_PROXY ERROR: Actor# [1:7491639481448520841:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:23.283457Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639464268649295:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:23.283573Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13034, MsgBus: 29147 2025-04-10T11:07:25.301738Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639494516652024:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:25.301793Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007ad/r3tmp/tmpzmkg92/pdisk_1.dat 2025-04-10T11:07:25.429139Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:25.459549Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:25.459643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:25.461382Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13034, node 2 2025-04-10T11:07:25.570415Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:25.570444Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:25.570454Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:25.570579Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29147 TClient is connected to server localhost:29147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:26.079596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:26.088919Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:07:26.101341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:07:26.179113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:07:26.345720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting.. ... [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639640261229578:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:59.332702Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:59.401164Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:59.475683Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:59.509969Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:59.616413Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:59.657692Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:59.707868Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:59.793726Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639618786391311:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:59.793790Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:59.796553Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639640261230098:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:59.796677Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:59.796965Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639640261230103:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:59.804418Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:59.820465Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639640261230105:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:59.880908Z node 6 :TX_PROXY ERROR: Actor# [6:7491639640261230164:3459] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 3474, MsgBus: 10188 2025-04-10T11:08:02.756082Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639653294311018:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:02.756157Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007ad/r3tmp/tmpUpWNiF/pdisk_1.dat 2025-04-10T11:08:02.960978Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:02.978842Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:02.978953Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:02.981826Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3474, node 7 2025-04-10T11:08:03.034514Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:03.034547Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:03.034558Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:03.034751Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10188 TClient is connected to server localhost:10188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:08:03.839163Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:03.860975Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:03.947827Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:04.180065Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:04.285467Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:07.490321Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639674769149283:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:07.490462Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:07.567479Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:08:07.612409Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:08:07.667889Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:08:07.758366Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639653294311018:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:07.758475Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:08:07.791875Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:08:07.862365Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:08:07.922652Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:08:08.062574Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639679064117106:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:08.062707Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:08.063011Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639679064117111:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:08.069706Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:08:08.092185Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639679064117113:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:08:08.185684Z node 7 :TX_PROXY ERROR: Actor# [7:7491639679064117169:3466] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TIntervalSetTest::IntervalMapTestAdd [GOOD] >> TIntervalSetTest::IntervalMapTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestAddAgainstReference >> TIntervalSetTest::IntervalVecUnionInplace [GOOD] >> TIntervalSetTest::IntervalVecUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalVecIntersection >> TIntervalSetTest::IntervalMapTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestSubtractAgainstReference |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::WalkFailRealms [GOOD] >> TIntervalSetTest::IntervalMapTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapTestIsSubsetOfAgainstReference |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TCircularQueueTest::ShouldGetQueue [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices [GOOD] >> TPartGroupBtreeIndexIter::NoNodes [GOOD] >> TPartGroupBtreeIndexIter::OneNode >> TIntervalSetTest::IntervalSetTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestAddAgainstReference >> TIntervalSetTest::IntervalVecIntersection [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplace >> TPartGroupBtreeIndexIter::OneNode [GOOD] >> TPartGroupBtreeIndexIter::FewNodes >> TIntervalSetTest::IntervalMapTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapIntersection >> TPartGroupBtreeIndexIter::FewNodes [GOOD] >> TPartMulti::Basics >> TIntervalSetTest::IntervalSetTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestIsSubsetOfAgainstReference |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::PerFailDomainRange [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::IteratorBackward [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TWildcardTest::TestWildcards [GOOD] >> TPartMulti::Basics [GOOD] >> TPartMulti::BasicsReverse [GOOD] >> TPartSlice::TrivialMerge [GOOD] >> TPartSlice::SimpleMerge [GOOD] >> TPartSlice::ComplexMerge [GOOD] >> TPartSlice::LongTailMerge [GOOD] >> TPartSlice::CutSingle [GOOD] >> TPartSlice::CutMulti [GOOD] >> TPartSlice::LookupBasics [GOOD] >> TPartSlice::LookupFull [GOOD] >> TPartSlice::EqualByRowId [GOOD] >> TPartSlice::SupersetByRowId [GOOD] >> TPartSlice::Subtract [GOOD] >> TPartSlice::ParallelCompactions [GOOD] >> TPartSlice::UnsplitBorrow [GOOD] >> TPartSliceLoader::RestoreMissingSlice >> TIntervalSetTest::IntervalSetTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapUnion |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |83.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots >> TIntervalSetTest::IntervalVecIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalVecDifference >> TIntervalSetTest::IntervalMapUnion [GOOD] >> TIntervalSetTest::IntervalSetUnion >> TPartSliceLoader::RestoreMissingSlice [GOOD] >> TPartSliceLoader::RestoreOneSlice [GOOD] >> TPartSliceLoader::RestoreMissingSliceFullScreen >> TPartSliceLoader::RestoreMissingSliceFullScreen [GOOD] >> TPartSliceLoader::RestoreFromScreenIndexKeys >> TPartSliceLoader::RestoreFromScreenIndexKeys [GOOD] >> TPartSliceLoader::RestoreFromScreenDataKeys >> TIntervalSetTest::IntervalVecDifference [GOOD] >> TIntervalSetTest::IntervalVecDifferenceInplaceSelf [GOOD] >> TIntrusiveFixedHashSetTest::TestEmptyFind [GOOD] >> TIntrusiveFixedHashSetTest::TestPushFindClear [GOOD] >> TIntrusiveHeapTest::TestEmpty [GOOD] >> TIntrusiveHeapTest::TestAddRemove >> TPartSliceLoader::RestoreFromScreenDataKeys [GOOD] >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailRight [GOOD] >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeft [GOOD] >> TRowVersionRangesTest::MergeExtendLeftInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeftComplete [GOOD] >> TRowVersionRangesTest::MergeExtendRight [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeExtendBoth [GOOD] >> TRowVersionRangesTest::MergeHoleExact [GOOD] >> TRowVersionRangesTest::MergeHoleInner [GOOD] >> TRowVersionRangesTest::MergeAllOuter [GOOD] >> TRowVersionRangesTest::MergeAllInner [GOOD] >> TRowVersionRangesTest::MergeAllEdges [GOOD] >> TRowVersionRangesTest::ContainsEmpty [GOOD] >> TRowVersionRangesTest::ContainsNonEmpty [GOOD] >> TRowVersionRangesTest::ContainsInvalid [GOOD] >> TRowVersionRangesTest::AdjustDown [GOOD] >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] >> TIntrusiveHeapTest::TestAddRemove [GOOD] >> TIntrusiveHeapTest::TestUpdateNoChange [GOOD] >> TIntrusiveHeapTest::TestUpdateIncrease [GOOD] >> TIntrusiveHeapTest::TestUpdateDecrease [GOOD] >> TIntervalSetTest::IntervalMapIntersection [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplace >> TCowBTreeTest::RandomInsertInplace [GOOD] >> TCowBTreeTest::RandomInsertThreadSafe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::Delete [GOOD] Test command err: Trying to start YDB, gRPC: 3464, MsgBus: 15075 2025-04-10T11:07:17.336830Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639461019821882:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:17.336896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007d2/r3tmp/tmpojPzyl/pdisk_1.dat 2025-04-10T11:07:17.970560Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3464, node 1 2025-04-10T11:07:17.981817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:17.981942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:17.991052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:07:18.050084Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:18.050135Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:18.050172Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:18.050305Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15075 TClient is connected to server localhost:15075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:18.666659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:18.696152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:18.857352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:19.037356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:19.123386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:20.997603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639473904725330:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:20.997739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.493809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.526392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.561782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.601987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.689690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.766430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:21.816664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639478199693144:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.816761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.816842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639478199693149:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:21.820580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:21.829651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639478199693151:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:21.883466Z node 1 :TX_PROXY ERROR: Actor# [1:7491639478199693204:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:22.337411Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639461019821882:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:22.337507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12432, MsgBus: 18909 2025-04-10T11:07:25.390024Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639493202720940:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:25.390101Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007d2/r3tmp/tmpptVBYH/pdisk_1.dat 2025-04-10T11:07:25.580249Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:25.612965Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:25.613067Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:25.614574Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12432, node 2 2025-04-10T11:07:25.700116Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:25.700142Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:25.700154Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:25.700304Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18909 TClient is connected to server localhost:18909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:07:26.225244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:07:26.233884Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:26.251057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:26.345277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:26.555590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:26.662395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreat ... {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:55.699809Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491639623175208571:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:55.703957Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:55.714329Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491639623175208573:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:07:55.770936Z node 5 :TX_PROXY ERROR: Actor# [5:7491639623175208626:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:56.295109Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491639605995337092:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:56.295207Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:56.835742Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.876653Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.917436Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 27851, MsgBus: 17922 2025-04-10T11:08:01.235162Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491639648789540764:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:01.235289Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007d2/r3tmp/tmpgv8mTr/pdisk_1.dat 2025-04-10T11:08:01.365024Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:01.385525Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:01.385645Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:01.387168Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27851, node 6 2025-04-10T11:08:01.442063Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:01.442098Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:01.442111Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:01.442335Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17922 TClient is connected to server localhost:17922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:08:02.121637Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:02.138275Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:02.204636Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:02.438601Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:02.553837Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:05.999362Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639665969411725:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:05.999493Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:06.073524Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:08:06.119676Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:08:06.156669Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:08:06.196668Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:08:06.235196Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639648789540764:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:06.235280Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:08:06.239042Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:08:06.299269Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:08:06.357384Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639670264379530:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:06.357488Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:06.357612Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639670264379536:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:06.363075Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:08:06.376568Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639670264379538:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:08:06.436458Z node 6 :TX_PROXY ERROR: Actor# [6:7491639670264379593:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:08:07.870223Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:08:07.929082Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:08:08.048405Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |83.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit >> TCircularOperationQueueTest::CheckRemoveWaiting [GOOD] >> TCircularOperationQueueTest::CheckTimeoutWhenFirstItemRemoved >> TCacheTest::EnsureNoLeakAfterUnboundedCacheOnMapDtor [GOOD] >> TCircularOperationQueueTest::CheckTimeout [GOOD] >> TCircularOperationQueueTest::CheckOnDoneInflight1 [GOOD] >> TCircularOperationQueueTest::CheckRemoveNotRunning [GOOD] >> TCircularOperationQueueTest::CheckRemoveNotExisting [GOOD] >> TCacheTest::EnsureNoLeakAfterQ2CacheDtor [GOOD] >> TCacheCacheTest::Random [GOOD] >> TCacheTest::TestUpdateItemSize [GOOD] >> TCacheTest::TestLruCache [GOOD] >> TCircularOperationQueueTest::CheckOnDoneInflight2 [GOOD] >> TCircularOperationQueueTest::CheckRemoveRunning [GOOD] >> TCacheTest::TestUnboundedMapCache [GOOD] >> TCacheTest::TestSizeBasedOverflowCallback [GOOD] >> TCacheTest::Test2QCache [GOOD] >> TCircularOperationQueueTest::CheckOnDoneNotExisting [GOOD] >> TCacheTest::EnsureNoLeakAfterLruCacheDtor [GOOD] |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |83.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut >> TCircularOperationQueueTest::CheckTimeoutWhenFirstItemRemoved [GOOD] >> TCircularOperationQueueTest::RemoveExistingWhenShuffle [GOOD] >> TCircularOperationQueueTest::BasicRPSCheck [GOOD] >> TCircularOperationQueueTest::BasicRPSCheckWithRound [GOOD] >> TCircularOperationQueueTest::CheckWakeupAfterStop [GOOD] >> TCircularOperationQueueTest::CheckWakeupWhenRPSExhausted [GOOD] >> TCircularOperationQueueTest::CheckWakeupWhenRPSExhausted2 [GOOD] >> TCircularOperationQueueTest::CheckStartAfterStop [GOOD] >> TIntervalSetTest::IntervalSetUnion [GOOD] >> TIntervalSetTest::IntervalMapUnionInplace >> TBlobStorageGroupInfoIterTest::Domains [GOOD] >> TBlobStorageGroupInfoIterTest::Indexes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TCircularOperationQueueTest::CheckStartAfterStop [GOOD] Test command err: 0.2755 |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |83.9%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large >> TLockFreeIntrusiveStackTest::ConcurrentRefCountNeverEmpty [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountHeavyContention |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TIntrusiveHeapTest::TestUpdateDecrease [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalMapDifference >> TIntervalSetTest::IntervalMapUnionInplace [GOOD] >> TIntervalSetTest::IntervalSetUnionInplace |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::Indexes [GOOD] >> TCowBTreeTest::MultipleSnapshots [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithGc ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] Test command err: Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b {0, 1} | 1 39 2466b {5, 7} + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 6 12 122b {1, 8} | 7 14 122b {2, NULL} | 8 16 122b {2, 4} | 9 18 122b {2, 7} | 10 20 122b {2, 10} | 11 22 122b {3, 3} | 12 24 122b {3, 6} | 13 26 122b {3, 8} | 14 28 122b {4, NULL} | 15 30 122b {4, 4} | 16 32 122b {4, 7} | 17 34 122b {4, 10} | 18 36 122b {5, 3} | 19 38 122b {5, 6} | 19 39 122b {5, 7} + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > {0, 4} | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > {0, 7} | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > {0, 10} | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > {1, 3} | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > {1, 6} | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > {1, 8} | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > {2, NULL} | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > {2, 4} | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > {2, 10} | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > {3, 3} | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > {3, 6} | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > {3, 8} | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > {4, NULL} | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > {4, 4} | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > {4, 7} | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > {4, 10} | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > {5, 3} | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > {5, 6} | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 4910b 40r} data 6206b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 50b {0, 1} | 1 2 50b {0, 4} | 2 4 50b {0, 7} | 3 6 50b {0, 10} | 4 8 50b {1, 3} | 5 10 50b {1, 6} | 6 12 50b {1, 8} | 7 14 50b {2, NULL} | 8 16 50b {2, 4} | 10 18 50b {2, 7} | 11 20 50b {2, 10} | 12 22 50b {3, 3} | 13 24 50b {3, 6} | 15 26 50b {3, 8} | 16 28 50b {4, NULL} | 17 30 50b {4, 4} | 18 32 50b {4, 7} | 19 34 50b {4, 10} | 21 36 50b {5, 3} | 22 38 50b {5, 6} | 22 39 50b {5, 7} + BTreeIndex ... xxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4441b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 7 12 122b {1, 8} | 8 14 122b {2, NULL} | 9 16 122b {2, 4} | 11 18 122b {2, 7} | 12 20 122b {2, 10} | 13 22 122b {3, 3} | 15 24 122b {3, 6} | 16 26 122b {3, 8} | 17 28 122b {4, NULL} | 19 30 122b {4, 4} | 20 32 122b {4, 7} | 21 34 122b {4, 10} | 24 36 122b {5, 3} | 25 38 122b {5, 6} | 25 39 122b {5, 7} + BTreeIndex{PageId: 29 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 360 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 722 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 7 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 8 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 9 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 1454 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 11 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 12 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 13 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 1820 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 15 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 16 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 17 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 19 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 20 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 21 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 24 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 25 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{7} Label{74 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{8} Label{84 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{9} Label{94 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{11} Label{114 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{12} Label{124 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{13} Label{134 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{15} Label{154 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{16} Label{164 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{17} Label{174 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{19} Label{194 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{20} Label{204 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{21} Label{214 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{24} Label{244 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{25} Label{254 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} >> TIntervalSetTest::IntervalSetUnionInplace [GOOD] >> TIntervalSetTest::IntervalMapUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetUnionInplaceSelf >> TIntervalSetTest::IntervalSetUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetIntersection >> TIntervalSetTest::IntervalMapDifference [GOOD] >> TIntervalSetTest::IntervalMapDifferenceInplaceSelf [GOOD] >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] >> TBTreeTest::RandomInsertInplace [GOOD] >> TBTreeTest::RandomInsertThreadSafe |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf |84.0%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper [GOOD] >> TIntervalSetTest::IntervalSetIntersection [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplace >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup [GOOD] |84.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf >> TConsoleConfigTests::TestAddConfigItem |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |84.0%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut >> KqpMergeCn::TopSortBy_Decimal_Limit5 [GOOD] |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |84.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TIntervalSetTest::IntervalMapDifferenceInplaceSelf [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountHeavyContention [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoNeverEmpty >> TIntervalSetTest::IntervalSetIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetDifference ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper [GOOD] Test command err: [0:1:0:3:1]# 173 184 157 167 152 185 195 192 144 [0:1:1:1:1]# 189 195 192 171 157 161 167 155 196 [0:1:3:3:1]# 184 157 182 152 185 157 192 144 189 [0:1:3:4:0]# 148 154 155 158 194 160 156 163 140 [0:1:2:3:2]# 152 177 174 176 154 146 161 170 168 [0:1:1:2:1]# 157 167 152 189 195 192 171 157 161 [0:1:1:0:2]# 158 150 131 167 177 161 177 174 173 [0:1:3:0:1]# 161 155 171 196 154 167 184 157 182 [0:1:0:3:2]# 174 173 152 146 184 176 168 157 161 [0:1:2:2:0]# 163 140 161 148 162 159 168 178 190 [0:1:0:2:0]# 161 156 163 159 196 148 190 162 168 [0:1:3:2:1]# 152 185 157 192 144 189 161 155 171 [0:1:2:3:1]# 157 182 173 185 157 167 144 189 195 [0:1:3:1:2]# 157 161 170 131 190 158 161 178 167 [0:1:2:0:1]# 155 171 157 154 167 155 157 182 173 [0:1:3:0:2]# 131 190 158 161 178 167 173 152 177 [0:1:2:0:2]# 190 158 150 178 167 177 152 177 174 [0:1:2:4:1]# 154 167 155 157 182 173 185 157 167 [0:1:2:1:2]# 161 170 168 190 158 150 178 167 177 [0:1:2:4:2]# 178 167 177 152 177 174 176 154 146 [0:1:0:2:1]# 167 152 185 195 192 144 157 161 155 [0:1:0:0:0]# 190 162 168 174 148 154 177 158 194 [0:1:3:2:0]# 156 163 140 196 148 162 162 168 178 [0:1:1:0:1]# 171 157 161 167 155 196 182 173 184 [0:1:0:2:2]# 146 184 176 168 157 161 150 131 190 [0:1:1:0:0]# 178 190 162 155 174 148 160 177 158 [0:1:2:3:0]# 194 160 177 163 140 161 148 162 159 [0:1:2:4:0]# 154 155 174 194 160 177 163 140 161 [0:1:1:3:2]# 177 174 173 154 146 184 170 168 157 [0:1:2:1:1]# 144 189 195 155 171 157 154 167 155 [0:1:1:1:0]# 162 159 196 178 190 162 155 174 148 [0:1:1:3:1]# 182 173 184 157 167 152 189 195 192 [0:1:3:4:1]# 196 154 167 184 157 182 152 185 157 [0:1:1:4:2]# 167 177 161 177 174 173 154 146 184 [0:1:0:1:0]# 159 196 148 190 162 168 174 148 154 [0:1:3:4:2]# 161 178 167 173 152 177 184 176 154 [0:1:0:0:1]# 157 161 155 155 196 154 173 184 157 [0:1:1:4:0]# 155 174 148 160 177 158 140 161 156 [0:1:2:1:0]# 148 162 159 168 178 190 154 155 174 [0:1:2:0:0]# 168 178 190 154 155 174 194 160 177 [0:1:3:3:2]# 173 152 177 184 176 154 157 161 170 [0:1:0:4:0]# 174 148 154 177 158 194 161 156 163 [0:1:1:2:0]# 140 161 156 162 159 196 178 190 162 [0:1:0:1:1]# 195 192 144 157 161 155 155 196 154 [0:1:3:0:0]# 162 168 178 148 154 155 158 194 160 [0:1:3:1:1]# 192 144 189 161 155 171 196 154 167 [0:1:0:4:1]# 155 196 154 173 184 157 167 152 185 [0:1:2:2:1]# 185 157 167 144 189 195 155 171 157 [0:1:3:1:0]# 196 148 162 162 168 178 148 154 155 [0:1:2:2:2]# 176 154 146 161 170 168 190 158 150 [0:1:0:3:0]# 177 158 194 161 156 163 159 196 148 [0:1:3:3:0]# 158 194 160 156 163 140 196 148 162 [0:1:0:1:2]# 168 157 161 150 131 190 177 161 178 [0:1:3:2:2]# 184 176 154 157 161 170 131 190 158 [0:1:1:3:0]# 160 177 158 140 161 156 162 159 196 [0:1:1:2:2]# 154 146 184 170 168 157 158 150 131 [0:1:1:4:1]# 167 155 196 182 173 184 157 167 152 [0:1:1:1:2]# 170 168 157 158 150 131 167 177 161 [0:1:0:0:2]# 150 131 190 177 161 178 174 173 152 [0:1:0:4:2]# 177 161 178 174 173 152 146 184 176 mean# 166.6666667 dev# 15.11254078 >> TIntervalSetTest::IntervalSetDifference [GOOD] >> TIntervalSetTest::IntervalSetDifferenceInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetTestIterator [GOOD] >> KqpNewEngine::SequentialReadsPragma+Enabled [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithGc [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClear ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::TopSortBy_Decimal_Limit5 [GOOD] Test command err: Trying to start YDB, gRPC: 20847, MsgBus: 12619 2025-04-10T11:07:21.862411Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639476034808327:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:21.862507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000791/r3tmp/tmpGW6MOI/pdisk_1.dat 2025-04-10T11:07:22.306251Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:22.327276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:22.327697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:22.330301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20847, node 1 2025-04-10T11:07:22.418886Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:22.418926Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:22.418935Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:22.419053Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12619 TClient is connected to server localhost:12619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:23.289769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:23.326410Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:23.339391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:23.523452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:23.709560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:23.811692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:25.594290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639493214679304:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:25.594418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:25.900475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:25.932290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:25.968678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:26.002291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:26.036335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:26.086386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:26.180329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639497509647115:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:26.180444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:26.180836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639497509647120:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:26.185105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:26.195960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639497509647122:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:26.292414Z node 1 :TX_PROXY ERROR: Actor# [1:7491639497509647177:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:26.866209Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639476034808327:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:26.866305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:27.349020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:28.249880Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283248274, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 20072, MsgBus: 24269 2025-04-10T11:07:29.160162Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639511459185754:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:29.160219Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000791/r3tmp/tmpRUtXk4/pdisk_1.dat 2025-04-10T11:07:29.310069Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:29.325044Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:29.325396Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:29.327414Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20072, node 2 2025-04-10T11:07:29.390760Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:29.390788Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:29.390798Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:29.390934Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24269 TClient is connected to server localhost:24269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:29.884723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:07:29.898479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:07:29.982449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 w ... T11:08:03.113141Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:08:03.154346Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:08:03.196145Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:08:03.236191Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:08:03.327320Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639657979047097:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:03.327426Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:03.327505Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639657979047102:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:03.331906Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:08:03.347692Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639657979047104:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:08:03.447310Z node 6 :TX_PROXY ERROR: Actor# [6:7491639657979047160:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:08:03.691817Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639636504208312:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:03.691902Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:08:04.691098Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:08:05.809487Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283285843, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 10609, MsgBus: 4949 2025-04-10T11:08:06.841465Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639669739914661:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:06.841534Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000791/r3tmp/tmpWtL9xl/pdisk_1.dat 2025-04-10T11:08:07.011830Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:07.046914Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:07.047015Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:07.048530Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10609, node 7 2025-04-10T11:08:07.104755Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:07.104789Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:07.104802Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:07.105001Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4949 TClient is connected to server localhost:4949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:08:07.813262Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:07.870335Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:07.890329Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:08:08.034733Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:08.399230Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:08.552633Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:11.841686Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639669739914661:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:11.841816Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:08:12.036672Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639695509720215:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:12.036793Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:12.107367Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:08:12.239375Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:08:12.316755Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:08:12.370993Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:08:12.448904Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:08:12.502603Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:08:12.581526Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639695509720737:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:12.581660Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:12.581961Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639695509720742:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:12.587522Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:08:12.602696Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639695509720744:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:08:12.666786Z node 7 :TX_PROXY ERROR: Actor# [7:7491639695509720799:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:08:16.327981Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:08:19.452270Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283299458, txId: 281474976710673] shutting down |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |84.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots >> TConsoleConfigTests::TestAddConfigItem [GOOD] >> TConsoleConfigTests::TestAutoKind ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:05:18.280955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:05:18.281189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:18.281256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:05:18.281294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:05:18.289237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:05:18.289392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:05:18.289562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:05:18.289676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:05:18.301372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:05:18.545787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:05:18.546009Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:05:18.613037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:05:18.613412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:05:18.613615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:05:18.653295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:05:18.653813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:05:18.701949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:18.713737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:05:18.753221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:18.825419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:18.825529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:18.835492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:05:18.835601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:18.835739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:05:18.835996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:05:18.845453Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:05:19.050479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:05:19.066629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.089967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:05:19.097982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:05:19.098140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.126268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:19.126437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:05:19.126677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.126823Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:05:19.126878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:05:19.126915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:05:19.129839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.129917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:05:19.129958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:05:19.133049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.133124Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.133176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:19.133236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:05:19.150015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:05:19.152757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:05:19.166310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:05:19.167917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:05:19.168116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:05:19.168168Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:19.181888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:05:19.182020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:05:19.182327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:05:19.182434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:05:19.193771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:05:19.193849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:05:19.194072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:05:19.194129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:05:19.194442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:05:19.194514Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:05:19.194642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:19.194681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:05:19.194723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:05:19.194763Z no ... 048295Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:08:20.048537Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-10T11:08:20.048664Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-04-10T11:08:20.048696Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-04-10T11:08:20.048730Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-04-10T11:08:20.048758Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-04-10T11:08:20.048793Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2025-04-10T11:08:20.054580Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-10T11:08:20.054695Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-10T11:08:20.054729Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-10T11:08:20.054953Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-10T11:08:20.055029Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-10T11:08:20.055058Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-10T11:08:20.055092Z node 114 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-04-10T11:08:20.055130Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-10T11:08:20.055219Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-04-10T11:08:20.056638Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T11:08:20.056691Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:08:20.056916Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:08:20.057033Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-04-10T11:08:20.057065Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-10T11:08:20.057102Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-04-10T11:08:20.057129Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-10T11:08:20.057162Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-04-10T11:08:20.057195Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-10T11:08:20.057237Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-10T11:08:20.057264Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-10T11:08:20.057353Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:08:20.057402Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2025-04-10T11:08:20.057424Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2025-04-10T11:08:20.057452Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:08:20.057477Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2025-04-10T11:08:20.057499Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2025-04-10T11:08:20.057539Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-10T11:08:20.057930Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:08:20.063112Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:08:20.063232Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:08:20.063270Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:08:20.083587Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:08:20.083747Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:08:20.091432Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 347 RawX2: 489626274075 } TabletId: 72075186233409546 State: 4 2025-04-10T11:08:20.091533Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-04-10T11:08:20.098991Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:08:20.099539Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 2025-04-10T11:08:20.099721Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:08:20.099987Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409546 2025-04-10T11:08:20.102806Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:08:20.102856Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-10T11:08:20.102920Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T11:08:20.102958Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-10T11:08:20.102996Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:08:20.115172Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:08:20.115256Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-04-10T11:08:20.115480Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-10T11:08:20.115772Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-10T11:08:20.115815Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-10T11:08:20.117053Z node 114 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-10T11:08:20.117155Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-10T11:08:20.117191Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [114:631:2557] 2025-04-10T11:08:20.131445Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 350 RawX2: 489626274077 } TabletId: 72075186233409547 State: 4 2025-04-10T11:08:20.131539Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-04-10T11:08:20.139013Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:08:20.139578Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2025-04-10T11:08:20.140267Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:08:20.140538Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:08:20.141069Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:08:20.141116Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T11:08:20.141186Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:08:20.174903Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:08:20.175004Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-04-10T11:08:20.175556Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-10T11:08:20.175982Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-10T11:08:20.176048Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TIntervalSetTest::IntervalSetTestIterator [GOOD] |84.1%| [TA] $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/util/btree_benchmark/btree_benchmark |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.1%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.1%| [LD] {RESULT} $(B)/ydb/core/util/btree_benchmark/btree_benchmark |84.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/util/btree_benchmark/btree_benchmark >> TCowBTreeTest::MultipleSnapshotsWithClear [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClearWithGc >> TCowBTreeTest::RandomInsertThreadSafe [GOOD] >> TCowBTreeTest::SnapshotCascade [GOOD] >> TCowBTreeTest::SnapshotRollback ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::SequentialReadsPragma+Enabled [GOOD] Test command err: Trying to start YDB, gRPC: 3779, MsgBus: 20369 2025-04-10T11:07:16.915108Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639457706921811:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:16.915167Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007e8/r3tmp/tmplprp6g/pdisk_1.dat 2025-04-10T11:07:17.629450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:17.629565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:17.639252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:07:17.667100Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3779, node 1 2025-04-10T11:07:17.806832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:17.806857Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:17.806865Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:17.806996Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20369 TClient is connected to server localhost:20369 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:18.476956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:18.495184Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:20.399473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639474886791434:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:20.399478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639474886791426:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:20.399629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:20.404038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:07:20.416681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639474886791440:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:07:20.486871Z node 1 :TX_PROXY ERROR: Actor# [1:7491639474886791491:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21811, MsgBus: 61978 2025-04-10T11:07:21.861076Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639478933894404:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:21.861154Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007e8/r3tmp/tmp9U5Nn8/pdisk_1.dat 2025-04-10T11:07:21.998883Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:22.021181Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:22.021280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:22.023247Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21811, node 2 2025-04-10T11:07:22.160553Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:22.160598Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:22.160607Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:22.160734Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61978 TClient is connected to server localhost:61978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:22.791680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:22.803181Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:07:22.820630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:23.025151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:23.227106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:23.319855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:25.405075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639496113765364:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:25.405147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:25.498407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:25.538894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:25.578820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:25.627626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:25.667892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:25.712913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:25.780245Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639496113765875:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:25.780346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:25.780614Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491639496113765880:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:25.785228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2 ... : [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639662108991673:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:04.579196Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:04.638697Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:08:04.680479Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:08:04.716607Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:08:04.755936Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:08:04.796593Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:08:04.836006Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:08:04.891919Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639662108992186:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:04.892040Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639662108992191:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:04.892045Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:04.896666Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:08:04.909769Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639662108992193:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:08:04.970831Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639640634153402:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:04.970958Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:08:05.010862Z node 6 :TX_PROXY ERROR: Actor# [6:7491639666403959546:3457] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 11736, MsgBus: 9272 2025-04-10T11:08:08.356887Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639680357556824:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:08.357013Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007e8/r3tmp/tmpyrYLTZ/pdisk_1.dat 2025-04-10T11:08:08.541806Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:08.574790Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:08.574929Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:08.578506Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11736, node 7 2025-04-10T11:08:08.716996Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:08.717024Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:08.717036Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:08.717226Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9272 TClient is connected to server localhost:9272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:08:09.686190Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:09.709085Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:09.795390Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:10.106804Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:10.205975Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:13.357000Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639680357556824:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:13.357093Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:08:16.958449Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639714717297011:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:16.958593Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:17.082995Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:08:17.172528Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:08:17.265295Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:08:17.446517Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:08:17.536948Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:08:17.652112Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:08:17.817028Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639719012264835:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:17.817144Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:17.817549Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639719012264840:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:17.828463Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:08:17.859571Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639719012264842:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:08:17.957887Z node 7 :TX_PROXY ERROR: Actor# [7:7491639719012264901:3486] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TLockFreeIntrusiveStackTest::ConcurrentAutoNeverEmpty [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention >> KqpRanges::ValidatePredicatesDataQuery [GOOD] >> TConsoleConfigTests::TestAutoKind [GOOD] >> TConsoleConfigTests::TestAllowedScopes >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] >> TCowBTreeTest::SnapshotRollback [GOOD] >> TCowBTreeTest::SnapshotRollbackEarlyErase >> KqpKv::ReadRows_ExternalBlobs+UseExtBlobsPrecharge [GOOD] >> KqpKv::ReadRows_ExternalBlobs-UseExtBlobsPrecharge >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] >> TCowBTreeTest::SnapshotRollbackEarlyErase [GOOD] >> TCowBTreeTest::ShouldCallDtorsInplace [GOOD] >> TCowBTreeTest::ShouldCallDtorsThreadSafe >> TCowBTreeTest::ShouldCallDtorsThreadSafe [GOOD] >> TEventPriorityQueueTest::TestPriority [GOOD] >> TFastTlsTest::IterationAfterThreadDeath |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |84.1%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli >> TFastTlsTest::IterationAfterThreadDeath [GOOD] >> TFastTlsTest::ManyThreadLocals >> TFastTlsTest::ManyThreadLocals [GOOD] >> TFastTlsTest::ManyConcurrentKeys >> TConsoleConfigTests::TestAllowedScopes [GOOD] >> TConsoleConfigTests::TestAffectedConfigs >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::ValidatePredicatesDataQuery [GOOD] Test command err: Trying to start YDB, gRPC: 16489, MsgBus: 13052 2025-04-10T11:06:02.781203Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639136902142374:2260];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:02.781466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000916/r3tmp/tmp1HCB7X/pdisk_1.dat 2025-04-10T11:06:03.236870Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:03.265133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:03.265240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:03.270309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16489, node 1 2025-04-10T11:06:03.418763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:03.418787Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:03.418796Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:03.418925Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13052 TClient is connected to server localhost:13052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:04.125959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:04.143215Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:06:04.149407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:04.297633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:04.557234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:04.667599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:06.699244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639154082013135:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:06.699382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:07.053867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:07.103683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:07.153881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:07.208923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:07.249314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:07.336410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:07.420111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639158376980949:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:07.420256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:07.420789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639158376980954:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:07.425362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:07.442394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639158376980956:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:07.506701Z node 1 :TX_PROXY ERROR: Actor# [1:7491639158376981011:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:07.778620Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639136902142374:2260];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:07.778709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:06:08.616437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:06:08.908158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:06:09.103531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:06:09.376943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:06:09.823833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:1:44: Warning: At function: Coalesce
:1:58: Warning: At function: SqlIn
:1:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 14313, MsgBus: 13292 2025-04-10T11:06:11.730038Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639177843059931:2222];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000916/r3tmp/tmpWY3x1j/pdisk_1.dat 2025-04-10T11:06:11.769868Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:06:11.870598Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:11.915700Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:11.915824Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:11.918163Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14313, node 2 2025-04-10T11:06:12.150853Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:12.150875Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:12.150887Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:12.151040Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13292 TClient is connected to server localhost:13292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 Scheme ... 907445Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283266936, txId: 281474976715785] shutting down 2025-04-10T11:07:47.185176Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283267223, txId: 281474976715787] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < 9000 OR Key3 IS NULL ORDER BY `Value`; 2025-04-10T11:07:47.488943Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283267524, txId: 281474976715789] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]]] 2025-04-10T11:07:47.776969Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283267811, txId: 281474976715791] shutting down Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Value = 20 ORDER BY `Value`; 2025-04-10T11:07:48.018210Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283268056, txId: 281474976715793] shutting down EXPECTED: [[[20u]]] RECEIVED: [[[20u]]] 2025-04-10T11:07:48.302852Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283268336, txId: 281474976715795] shutting down Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE (Key1 <= 1000) OR (Key1 > 2000 AND Key1 < 5000) OR (Key1 >= 8000) ORDER BY `Value`; 2025-04-10T11:07:48.874583Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283268910, txId: 281474976715797] shutting down 2025-04-10T11:07:49.357600Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283269386, txId: 281474976715799] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[11u]];[[12u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[11u]];[[12u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < NULL ORDER BY `Value`; 2025-04-10T11:07:49.507518Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283269502, txId: 281474976715801] shutting down 2025-04-10T11:07:49.607685Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283269604, txId: 281474976715803] shutting down EXPECTED: [] RECEIVED: [] 2025-04-10T11:07:49.651530Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037920 not found 2025-04-10T11:07:49.651562Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037921 not found 2025-04-10T11:07:49.651574Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037919 not found Trying to start YDB, gRPC: 26526, MsgBus: 4991 2025-04-10T11:07:50.759101Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639599629539374:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:50.759160Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000916/r3tmp/tmpZ1BOHa/pdisk_1.dat 2025-04-10T11:07:50.952904Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:50.981222Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:50.981374Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:50.983143Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26526, node 7 2025-04-10T11:07:51.064294Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:51.064333Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:51.064347Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:51.064532Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4991 TClient is connected to server localhost:4991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:51.863766Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:51.878188Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:51.958746Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:52.192098Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:52.311939Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:55.744344Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639621104377639:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:55.744430Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:55.759177Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639599629539374:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:55.759277Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:55.808236Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:55.848736Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:55.884859Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:55.928113Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:55.968232Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.012086Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:56.063958Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639625399345455:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:56.064023Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639625399345460:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:56.064095Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:56.069293Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:56.084055Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639625399345462:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:56.165237Z node 7 :TX_PROXY ERROR: Actor# [7:7491639625399345515:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:57.481987Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:08:05.937745Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:08:05.937770Z node 7 :IMPORT WARN: Table profiles were not loaded >> TBTreeTest::RandomInsertThreadSafe [GOOD] >> TBTreeTest::DuplicateKeysInplace >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention [GOOD] >> TLogPriorityMuteTests::MuteUntilTest [GOOD] >> TLogPriorityMuteTests::AtomicMuteUntilTest [GOOD] >> TLogPriorityMuteTests::UnmuteTest [GOOD] >> TLogPriorityMuteTests::AtomicUnmuteTest [GOOD] >> TLogPriorityMuteTests::CheckPriorityWithSetMuteTest [GOOD] >> TLogPriorityMuteTests::AtomicCheckPriorityWithSetMuteTest [GOOD] >> TLogPriorityMuteTests::CheckPriorityWithSetMuteDurationTest [GOOD] >> TLogPriorityMuteTests::AtomicCheckPriorityWithSetMuteDurationTest [GOOD] >> TOneOneQueueTests::TestSimpleEnqueueDequeue [GOOD] >> TOneOneQueueTests::CleanInDestructor [GOOD] >> TOneOneQueueTests::ReadIterator [GOOD] >> TPageMapTest::TestResize [GOOD] >> TPageMapTest::TestRandom >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter >> TFastTlsTest::ManyConcurrentKeys [GOOD] >> TFifoQueueTest::ShouldPushPop [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead2 [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead3 [GOOD] >> TFragmentedBufferTest::Test3WriteRead [GOOD] >> TFragmentedBufferTest::Test5WriteRead [GOOD] >> TFragmentedBufferTest::TestGetMonolith [GOOD] >> TFragmentedBufferTest::CopyFrom [GOOD] >> TFragmentedBufferTest::ReadWriteRandom >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] >> TCowBTreeTest::MultipleSnapshotsWithClearWithGc [GOOD] >> TCowBTreeTest::DuplicateKeysInplace |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TCowBTreeTest::DuplicateKeysInplace [GOOD] >> TCowBTreeTest::DuplicateKeysThreadSafe |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TBTreeTest::DuplicateKeysInplace [GOOD] >> TBTreeTest::DuplicateKeysThreadSafe |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TBTreeTest::DuplicateKeysThreadSafe [GOOD] >> TBTreeTest::ShouldCallDtorsInplace [GOOD] >> TBTreeTest::ShouldCallDtorsThreadSafe [GOOD] >> TBTreeTest::Concurrent ------- [LD] {default-linux-x86_64, release, asan} $(B)/yql/tools/yqlrun/yqlrun ld.lld: warning: version script assignment of 'global' to symbol '__after_morecore_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'daylight' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__free_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_initialize_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__memalign_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_short_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__realloc_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timezone' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__libc_start_main' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensAfter' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensBefore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'abort' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bind' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'closedir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'connect' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_ctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'nanosleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'on_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_broadcast' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_timedlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_once' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_rdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedwrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_tryrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_trywrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_wrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'raise' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__res_iclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rmdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signalfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsuspend' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socket' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'unlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'usleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bcopy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dladdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'forkpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrusage' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbrtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memccpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'openpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_key_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'putenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'stpcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'swprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vswprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcschr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemset' failed: symbol not defined |84.1%| [LD] {RESULT} $(B)/yql/tools/yqlrun/yqlrun |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/tools/yqlrun/yqlrun |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TSubgroupPartLayoutTest::CountEffectiveReplicas1of4 [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TConsoleConfigTests::TestAffectedConfigs [GOOD] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TCowBTreeTest::DuplicateKeysThreadSafe [GOOD] >> TCowBTreeTest::IteratorDestructor [GOOD] >> TCowBTreeTest::Concurrent >> TBTreeTest::Concurrent [GOOD] >> TBTreeTest::IteratorDestructor [GOOD] >> TCacheCacheTest::MoveToWarm [GOOD] >> TCacheCacheTest::EvictNext [GOOD] >> CompressionTest::lz4_generator_basic [GOOD] >> CompressionTest::lz4_generator_deflates |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> CompressionTest::lz4_generator_deflates [GOOD] >> StLog::Basic >> StLog::Basic [GOOD] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TFragmentedBufferTest::ReadWriteRandom [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] >> Secret::DeactivatedQueryService [GOOD] >> TCowBTreeTest::Concurrent [GOOD] >> Secret::Deactivated [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 [GOOD] >> TCowBTreeTest::Alignment [GOOD] >> KqpNewEngine::JoinSameKey [GOOD] |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |84.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |84.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 [GOOD] Test command err: testing erasure none main# 0 main# 1 Checked 2 cases, took 22 us testing erasure block-4-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 main# 32 main# 33 main# 34 main# 35 main# 36 main# 37 main# 38 main# 39 main# 40 main# 41 main# 42 main# 43 main# 44 main# 45 main# 46 main# 47 main# 48 main# 49 main# 50 main# 51 main# 52 main# 53 main# 54 main# 55 main# 56 main# 57 main# 58 main# 59 main# 60 main# 61 main# 62 main# 63 Checked 262144 cases, took 830704 us testing erasure mirror-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 Checked 512 cases, took 456808 us testing erasure block-2-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 4096 cases, took 514673 us testing erasure mirror-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 Checked 64 cases, took 21 us testing erasure block-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 32768 cases, took 1314637 us testing erasure stripe-2-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 4096 cases, took 391158 us >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> StLog::Basic [GOOD] Test command err: Producer 0 worked for 0.1485937105 seconds Producer 1 worked for 0.1763011765 seconds Consumer 0 worked for 0.3546925569 seconds Consumer 1 worked for 0.5409489182 seconds Consumer 2 worked for 0.4370091913 seconds Consumer 3 worked for 0.4149014132 seconds ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated [GOOD] Test command err: 2025-04-10T11:08:15.483503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:301:2345], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00150e/r3tmp/tmporBBkK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18707, node 1 TClient is connected to server localhost:26810 2025-04-10T11:08:20.119921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:08:20.318721Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:20.371753Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:20.371819Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:20.371862Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:20.372153Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:08:20.430874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:20.447098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:20.478978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-10T11:08:35.607846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:686:2576], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:35.608016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TFragmentedBufferTest::ReadWriteRandom [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::JoinSameKey [GOOD] Test command err: Trying to start YDB, gRPC: 16738, MsgBus: 26212 2025-04-10T11:07:29.447601Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639511704680348:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:29.447656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0006ba/r3tmp/tmpbyQvxZ/pdisk_1.dat 2025-04-10T11:07:29.877973Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:29.893692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:29.893777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:29.896460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16738, node 1 2025-04-10T11:07:30.030519Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:30.030634Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:30.030643Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:30.030790Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26212 TClient is connected to server localhost:26212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:30.729309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:30.751360Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:07:30.758288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:30.926543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:31.110262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:31.195607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:33.044888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639528884551309:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:33.044985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:33.392052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:33.428031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:33.475278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:33.547196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:33.606112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:33.704214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:33.807725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639528884551829:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:33.807860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:33.808194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639528884551834:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:33.812955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:33.844007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639528884551836:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:33.920121Z node 1 :TX_PROXY ERROR: Actor# [1:7491639528884551891:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:34.450877Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639511704680348:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:34.450955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28038, MsgBus: 11889 2025-04-10T11:07:36.870320Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639539766052547:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:36.870376Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0006ba/r3tmp/tmpGnYpUe/pdisk_1.dat 2025-04-10T11:07:37.104462Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:37.139991Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:37.140094Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:37.143560Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28038, node 2 2025-04-10T11:07:37.210422Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:37.210447Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:37.210459Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:37.210588Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11889 TClient is connected to server localhost:11889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:37.741220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:37.758165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:37.835701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:38.052983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:38.135248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCre ... [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:09.073119Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:08:09.176777Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:08:09.259545Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:08:09.324557Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:08:09.399048Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:08:09.508019Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:08:09.614502Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491639660507419948:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:09.614651Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:08:09.619513Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639681982258741:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:09.619652Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:09.619956Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491639681982258746:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:09.630553Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:08:09.659228Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491639681982258748:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:08:09.752559Z node 6 :TX_PROXY ERROR: Actor# [6:7491639681982258807:3462] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 61015, MsgBus: 2790 2025-04-10T11:08:13.591552Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491639700997015387:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:13.591641Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0006ba/r3tmp/tmpbUGrW8/pdisk_1.dat 2025-04-10T11:08:14.131301Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:14.131418Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:14.155176Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:14.171979Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61015, node 7 2025-04-10T11:08:14.466780Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:14.466803Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:14.466817Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:14.466965Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2790 TClient is connected to server localhost:2790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:08:16.548642Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:16.555411Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:08:16.565624Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:16.701518Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:17.435761Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:17.837105Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:18.594216Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491639700997015387:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:18.594305Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:08:28.358398Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639765421526715:2450], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:28.358544Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:28.568880Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:08:28.697576Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:08:28.782657Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:08:28.916898Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:08:29.043215Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:08:29.106281Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:08:29.106324Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:29.235527Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:08:29.366609Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639769716494548:2503], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:29.366754Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:29.367279Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491639769716494553:2506], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:29.372919Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:08:29.399549Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491639769716494555:2507], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:08:29.474101Z node 7 :TX_PROXY ERROR: Actor# [7:7491639769716494611:3518] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService [GOOD] Test command err: 2025-04-10T11:08:15.373573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:301:2345], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001501/r3tmp/tmpY070lw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21892, node 1 TClient is connected to server localhost:19513 2025-04-10T11:08:20.117904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:08:20.317699Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:20.366549Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:20.366648Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:20.366690Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:20.367072Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:08:20.430869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:20.446492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:20.477421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-10T11:08:32.702240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:683:2575], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:32.702477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:694:2580], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:32.702592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:32.803933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:08:32.853114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:697:2583], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:08:32.982911Z node 1 :TX_PROXY ERROR: Actor# [1:748:2615] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:08:35.638589Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:758:2624], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled 2025-04-10T11:08:35.640754Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWE5ZTY4Yi00YTdhODMwNC03NzM4ZTUxLWQ3MzE5NmM5, ActorId: [1:681:2573], ActorState: ExecuteState, TraceId: 01jrfp68c7a9jprhvgga33g5y7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus ------- [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TCowBTreeTest::Alignment [GOOD] Test command err: Producer 0 worked for 0.3427413589 seconds Producer 1 worked for 0.09592949749 seconds Consumer 0 worked for 0.4201563435 seconds on a snapshot of size 20000 Consumer 1 worked for 0.1945865937 seconds on a snapshot of size 40000 Consumer 2 worked for 0.7437888843 seconds on a snapshot of size 60000 Consumer 3 worked for 0.5039928386 seconds on a snapshot of size 80000 Consumers had 1199983 successful seeks >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |84.3%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |84.3%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] Test command err: 2025-04-10T11:00:49.843010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:49.843096Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:49.893037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:51.895066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:51.895163Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:51.937970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:53.476238Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:53.476318Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:53.524841Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:55.095926Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:55.096004Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:55.170741Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:56.856781Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:56.856863Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:56.997129Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:00:58.726933Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:00:58.727006Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:00:58.787446Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:00.315392Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:00.315477Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:00.383644Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:02.254076Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:02.254197Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:02.307626Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:05.053713Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:05.053789Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:05.106194Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:08.078048Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:08.078178Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:08.135995Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:11.189806Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:11.189900Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:11.247406Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:14.336027Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:14.336136Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:14.418316Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:17.570946Z node 13 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:17.571046Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:17.699153Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:20.256260Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:20.256347Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:20.306603Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:22.617516Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:22.617602Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:22.690177Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:25.014596Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:25.014711Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:25.078020Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:28.327387Z node 17 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:28.327482Z node 17 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:28.391921Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:29.145273Z node 17 :CMS_CONFIGS ERROR: Unexpected config sender died for subscription id=1 2025-04-10T11:01:30.401706Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:30.401796Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:30.486323Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:31.510704Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:01:31.510820Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:31.630339Z node 18 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=8651011 serviceid=[0:0:0] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2025-04-10T11:01:32.717157Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:32.717261Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:32.766666Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:33.987329Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:01:33.987454Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:34.069671Z node 19 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[19:8246204620103118691:7960687] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2025-04-10T11:01:35.193906Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:35.194012Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:35.260209Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:39.216091Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:39.216183Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:39.294371Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:43.498583Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:43.498683Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:43.545191Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:45.166962Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:45.167059Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:45.215799Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:46.852232Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:01:46.852335Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:01:46.936767Z node 24 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:01:53.961616Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:01:53.961695Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:09.522703Z node 24 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[100:28538277257700723:0] nodeid=100 host=host100 tenant=tenant-100 nodetype=type100 kinds=2 lastprovidedconfig= 2025-04-10T11:06:11.189053Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:06:11.189153Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:11.271890Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:06:18.824731Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:06:18.824846Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:23.128859Z node 26 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:08:23.128949Z node 26 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:23.183502Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:08:27.553332Z node 27 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:08:27.553436Z node 27 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:27.599453Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:08:31.299941Z node 28 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:08:31.300037Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:31.379721Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:08:34.463224Z node 29 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:08:34.463345Z node 29 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:34.515916Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |84.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration >> TPageMapTest::TestRandom [GOOD] >> TPageMapTest::TestIntrusive [GOOD] >> TPageMapTest::TestSimplePointer [GOOD] >> TPageMapTest::TestSharedPointer [GOOD] >> TPageMapTest::TestSimplePointerFull |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.4%| [TA] $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TPageMapTest::TestSimplePointerFull [GOOD] >> TPriorityOperationQueueTest::ShouldNotStartUntilStart [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |84.4%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TPriorityOperationQueueTest::ShouldNotStartUntilStart [GOOD] |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] |84.5%| [TA] $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> KqpKv::ReadRows_ExternalBlobs-UseExtBlobsPrecharge [GOOD] >> KqpKv::ReadRows_Decimal |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |84.5%| [TA] {RESULT} $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |84.5%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::InflightLimit [GOOD] Test command err: === Server->StartServer(false); 2025-04-10T11:00:58.431064Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491637831199493855:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:00:58.431137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:00:58.680442Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ab6/r3tmp/tmpky7fdO/pdisk_1.dat 2025-04-10T11:00:58.932266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:00:58.932375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:00:58.938821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:00:58.939118Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16270, node 1 2025-04-10T11:00:59.055042Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000ab6/r3tmp/yandexHwSIxL.tmp 2025-04-10T11:00:59.055066Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000ab6/r3tmp/yandexHwSIxL.tmp 2025-04-10T11:00:59.055254Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000ab6/r3tmp/yandexHwSIxL.tmp 2025-04-10T11:00:59.055409Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:00:59.118850Z INFO: TTestServer started on Port 16023 GrpcPort 16270 TClient is connected to server localhost:16023 PQClient connected to localhost:16270 === TenantModeEnabled() = 0 === Init PQ - start server on port 16270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:00:59.701948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:00:59.711085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:00:59.711405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T11:00:59.711701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:00:59.711759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:00:59.712609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T11:00:59.712743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T11:00:59.712933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:00:59.712970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T11:00:59.712992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-10T11:00:59.713009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-10T11:00:59.713636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:00:59.713660Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:00:59.713672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-10T11:00:59.713995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:00:59.714016Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:00:59.714040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:00:59.714069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 waiting... 2025-04-10T11:00:59.718753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:00:59.719145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:59.719158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-10T11:00:59.719176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:00:59.719432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-10T11:00:59.719561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:00:59.721025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744282859767, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:00:59.721246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744282859767 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T11:00:59.721271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:00:59.721549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-10T11:00:59.721586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:00:59.721771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:00:59.721832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T11:00:59.722782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:00:59.722805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:00:59.723004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:00:59.723020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491637831199494372:2246], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-10T11:00:59.723059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:00:59.723080Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-10T11:00:59.723156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T11:00:59.723166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:00:59.723188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T11:00:59.723199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:00:59.723213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-10T11:00:59.723235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:00:59.723248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-10T11:00:59.723257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-10T11:00:59.723311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-10T11:00:59.723331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-10T11:00:59.723343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-10T11:00:59.725843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-10T11:00:59.725955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-10T11:00:59.725971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-04-10T11:00:59.725985Z node 1 :FLAT ... nter &> /-S/contrib/libs/cxxsupp/libcxx/include/variant:715:1 #7 0x1a3aa4ed in __union<2UL, const grpc_core::ChannelArgs::Pointer &> /-S/contrib/libs/cxxsupp/libcxx/include/variant:715:1 #8 0x1a3aa4ed in construct_at >, grpc_core::ChannelArgs::Pointer>, const std::__y1::in_place_index_t<2UL> &, const grpc_core::ChannelArgs::Pointer &, std::__y1::__variant_detail::__union<(std::__y1::__variant_detail::_Trait)1, 0UL, int, TBasicString >, grpc_core::ChannelArgs::Pointer> *> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/construct_at.h:41:46 #9 0x1a3aa4ed in __construct_at >, grpc_core::ChannelArgs::Pointer>, const std::__y1::in_place_index_t<2UL> &, const grpc_core::ChannelArgs::Pointer &, std::__y1::__variant_detail::__union<(std::__y1::__variant_detail::_Trait)1, 0UL, int, TBasicString >, grpc_core::ChannelArgs::Pointer> *> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/construct_at.h:49:10 #10 0x1a3aa4ed in operator() &> /-S/contrib/libs/cxxsupp/libcxx/include/variant:825:13 #11 0x1a3aa4ed in __invoke<(lambda at /-S/contrib/libs/cxxsupp/libcxx/include/variant:824:11), const std::__y1::__variant_detail::__alt<2UL, grpc_core::ChannelArgs::Pointer> &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #12 0x1a3aa4ed in decltype(auto) std::__y1::__variant_detail::__visitation::__base::__dispatcher<2ul>::__dispatch[abi:fe190000]>, grpc_core::ChannelArgs::Pointer>>::__generic_construct[abi:fe190000]>, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> const&>(std::__y1::__variant_detail::__ctor>, grpc_core::ChannelArgs::Pointer>>&, std::__y1::__variant_detail::__copy_constructor>, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> const&)::'lambda'(std::__y1::__variant_detail::__copy_constructor>, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> const&)&&, std::__y1::__variant_detail::__base<(std::__y1::__variant_detail::_Trait)1, int, TBasicString>, grpc_core::ChannelArgs::Pointer> const&>(std::__y1::__variant_detail::__copy_constructor>, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> const&, std::__y1::__variant_detail::__base<(std::__y1::__variant_detail::_Trait)1, int, TBasicString>, grpc_core::ChannelArgs::Pointer> const&) /-S/contrib/libs/cxxsupp/libcxx/include/variant:540:14 #13 0x1a3a5c48 in __visit_alt_at<(lambda at /-S/contrib/libs/cxxsupp/libcxx/include/variant:824:11), const std::__y1::__variant_detail::__copy_constructor >, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> &> /-S/contrib/libs/cxxsupp/libcxx/include/variant:502:12 #14 0x1a3a5c48 in __generic_construct >, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> &> /-S/contrib/libs/cxxsupp/libcxx/include/variant:822:7 #15 0x1a3a5c48 in __copy_constructor /-S/contrib/libs/cxxsupp/libcxx/include/variant:897:1 #16 0x1a3a5c48 in __assignment /-S/contrib/libs/cxxsupp/libcxx/include/variant:909:28 #17 0x1a3a5c48 in __move_assignment /-S/contrib/libs/cxxsupp/libcxx/include/variant:995:1 #18 0x1a3a5c48 in __copy_assignment /-S/contrib/libs/cxxsupp/libcxx/include/variant:1025:1 #19 0x1a3a5c48 in __impl /-S/contrib/libs/cxxsupp/libcxx/include/variant:1045:25 #20 0x1a3a5c48 in variant /-S/contrib/libs/cxxsupp/libcxx/include/variant:1192:35 #21 0x1a3a5c48 in grpc_core::AVL>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Rebalance(TBasicString>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>, std::__y1::shared_ptr>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Node> const&, std::__y1::shared_ptr>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Node> const&) /-S/contrib/libs/grpc/src/core/lib/avl/avl.h:252:30 #22 0x1a3a4d6a in grpc_core::AVL>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::AddKey(std::__y1::shared_ptr>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Node> const&, TBasicString>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>) /-S/contrib/libs/grpc/src/core/lib/avl/avl.h:265:14 #23 0x1a3a4d39 in grpc_core::AVL>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::AddKey(std::__y1::shared_ptr>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Node> const&, TBasicString>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>) /-S/contrib/libs/grpc/src/core/lib/avl/avl.h:266:24 #24 0x1a39cc47 in grpc_core::AVL>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Add(TBasicString>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>) const /-S/contrib/libs/grpc/src/core/lib/avl/avl.h:36:16 #25 0x1a39c62a in grpc_core::ChannelArgs::Set(std::__y1::basic_string_view>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>) const /-S/contrib/libs/grpc/src/core/lib/channel/channel_args.cc:158:28 #26 0x1a39bcf5 in grpc_core::ChannelArgs::Set(std::__y1::basic_string_view>, grpc_core::ChannelArgs::Pointer) const /-S/contrib/libs/grpc/src/core/lib/channel/channel_args.cc:150:10 #27 0x1a406266 in grpc_core::Channel::Create(char const*, grpc_core::ChannelArgs, grpc_channel_stack_type, grpc_transport*) /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:218:19 #28 0x1a78c951 in CreateChannel /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/client/chttp2_connector.cc:323:10 #29 0x1a78c951 in grpc_channel_create /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/client/chttp2_connector.cc:365:14 #30 0x1aff05c0 in grpc::(anonymous namespace)::InsecureChannelCredentialsImpl::CreateChannelWithInterceptors(TBasicString> const&, grpc::ChannelArguments const&, std::__y1::vector>, std::__y1::allocator>>>) /-S/contrib/libs/grpc/src/cpp/client/insecure_credentials.cc:55:13 #31 0x1aff039b in grpc::(anonymous namespace)::InsecureChannelCredentialsImpl::CreateChannelImpl(TBasicString> const&, grpc::ChannelArguments const&) /-S/contrib/libs/grpc/src/cpp/client/insecure_credentials.cc:40:12 #32 0x1afe8b54 in grpc::CreateCustomChannel(TBasicString> const&, std::__y1::shared_ptr const&, grpc::ChannelArguments const&) /-S/contrib/libs/grpc/src/cpp/client/create_channel.cc:50:25 #33 0x18f2a6a6 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TDirectReadTestSetup::Connect(NKikimr::NPersQueueTests::TPersQueueV1TestServer&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:824:23 #34 0x18c260f2 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TDirectReadTestSetup::TDirectReadTestSetup(NKikimr::NPersQueueTests::TPersQueueV1TestServer&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:806:13 #35 0x18c5f92a in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseDirectReadRestartTablet::Execute_(NUnitTest::TTestContext&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:1397:30 #36 0x18f09367 in operator() /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1 #37 0x18f09367 in __invoke<(lambda at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #38 0x18f09367 in __call<(lambda at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #39 0x18f09367 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #40 0x18f09367 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #41 0x199b0a95 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #42 0x199b0a95 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #43 0x199b0a95 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #44 0x199805e8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #45 0x18f08313 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute() /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1 #46 0x19981eb5 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #47 0x199ab00c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #48 0x7fd17955dd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 6429421 byte(s) leaked in 1605 allocation(s). >> TBlobStorageGroupInfoTest::SubgroupPartLayout [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoTest::SubgroupPartLayout [GOOD] >> test_auditlog.py::test_single_dml_query_logged[replace] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |84.6%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |84.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |84.7%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay/ydb_query_replay |84.7%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple Test command err: 2025-04-10T11:08:14.104697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:08:14.104921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:08:14.105081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014d9/r3tmp/tmpIRXzaE/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6641, node 1 TClient is connected to server localhost:24946 2025-04-10T11:08:20.121276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:08:20.318338Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:20.370645Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:20.370718Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:20.370758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:20.371123Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:08:20.431623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:20.450332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:20.480206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-10T11:08:35.613676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:811:2677], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:35.613846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:35.675046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-10T11:08:36.586596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:928:2755], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:36.586707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:36.592076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:933:2760], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:36.596461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-10T11:08:36.673052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:935:2762], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:08:39.950419Z node 1 :TX_PROXY ERROR: Actor# [1:1034:2832] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:08:41.799422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:08:42.649280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-10T11:08:43.919774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-10T11:08:45.481516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:08:46.595649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:08:50.725709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-04-10T11:08:51.477939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: GRpc error: (1): Cancelled on the server side
: Error: Grpc error response on endpoint localhost:6641 ;EXPECTATION=1 VERIFY failed (2025-04-10T11:08:54.083999Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/testlib/common_helper.cpp:157, auto NKikimr::Tests::NCommon::THelper::StartSchemaRequestTableServiceImpl(const TString &, const bool, const bool)::(anonymous class)::operator()(NThreading::TFuture)::(anonymous class)::operator()(NYdb::TAsyncStatus) const: (expectation == f.GetValueSync().IsSuccess()) library/cpp/testing/unittest/registar.cpp:37 RaiseError(): requirement UnittestThread failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+873 (0x18D578C9) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x18D45B5B) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+1218 (0x191CBDA2) ??+0 (0x35CDA331) ??+0 (0x35CDB5D3) bool NThreading::NImpl::TFutureState::TrySetValue(NYdb::Dev::TStatus&&)+1053 (0x1F14BE1D) NThreading::TFuture NYdb::Dev::NSessionPool::InjectSessionStatusInterception(std::__y1::shared_ptr, NThreading::TFuture, bool, TDuration, std::__y1::function)::'lambda'(NThreading::TFuture)::operator()(NThreading::TFuture)+874 (0x1F4EE13A) std::__y1::__function::__func NYdb::Dev::NSessionPool::InjectSessionStatusInterception(std::__y1::shared_ptr, NThreading::TFuture, bool, TDuration, std::__y1::function)::'lambda'(NThreading::TFuture), std::__y1::allocator NYdb::Dev::NSessionPool::InjectSessionStatusInterception(std::__y1::shared_ptr, NThreading::TFuture, bool, TDuration, std::__y1::function)::'lambda'(NThreading::TFuture)>, void (NThreading::TFuture const&)>::operator()(NThreading::TFuture const&)+67 (0x1F4EFC53) bool NThreading::NImpl::TFutureState::TrySetValue(NYdb::Dev::TStatus&&)+1053 (0x1F14BE1D) decltype(std::declval()(std::declval(), std::declval())) std::__y1::__invoke[abi:fe190000] NYdb::Dev::TClientImplCommon::RunSimple(Ydb::Table::ExecuteSchemeQueryRequest&&, NYdbGrpc::Dev::TSimpleRequestProcessor::TAsyncRequest, NYdb::Dev::TRpcRequestSettings const&)::'lambda'(google::protobuf::Any*, NYdb::Dev::TPlainStatus)&, google::protobuf::Any*, NYdb::Dev::TPlainStatus>(Ydb::Table::V1::TableService&&, google::protobuf::Any*&&, NYdb::Dev::TPlainStatus&&)+312 (0x1F83C038) void NYdb::Dev::TGRpcConnectionsImpl::RunDeferred(Ydb::Table::ExecuteSchemeQueryRequest&&, std::__y1::function&&, NYdbGrpc::Dev::TSimpleRequestProcessor::TAsyncRequest, std::__y1::shared_ptr, TDuration, NYdb::Dev::TRpcRequestSettings const&, std::__y1::shared_ptr)::'lambda'(Ydb::Operations::Operation*, NYdb::Dev::TPlainStatus)::operator()(Ydb::Operations::Operation*, NYdb::Dev::TPlainStatus)+576 (0x1F83B6C0) decltype(std::declval()(std::declval(), std::declval())) std::__y1::__invoke[abi:fe190000](Ydb::Table::ExecuteSchemeQueryRequest&&, std::__y1::function&&, NYdbGrpc::Dev::TSimpleRequestProcessor::TAsyncRequest, std::__y1::shared_ptr, TDuration, NYdb::Dev::TRpcRequestSettings const&, std::__y1::shared_ptr)::'lambda'(Ydb::Operations::Operation*, NYdb::Dev::TPlainStatus)&, Ydb::Operations::Operation*, NYdb::Dev::TPlainStatus>(Ydb::Table::V1::TableService&&, Ydb::Operations::Operation*&&, NYdb::Dev::TPlainStatus&&)+218 (0x1F83B2BA) void NYdb::Dev::TGRpcConnectionsImpl::RunDeferred(Ydb::Table::ExecuteSchemeQueryRequest&&, std::__y1::function&&, NYdbGrpc::Dev::TSimpleRequestProcessor::TAsyncRequest, std::__y1::shared_ptr, TDuration, NYdb::Dev::TRpcRequestSettings const&, bool, std::__y1::shared_ptr)::'lambda'(Ydb::Table::ExecuteSchemeQueryResponse*, NYdb::Dev::TPlainStatus)::operator()(Ydb::Table::ExecuteSchemeQueryResponse*, NYdb::Dev::TPlainStatus)+1075 (0x1F83A823) decltype(std::declval()(std::declval(), std::declval())) std::__y1::__invoke[abi:fe190000](Ydb::Table::ExecuteSchemeQueryRequest&&, std::__y1::function&&, NYdbGrpc::Dev::TSimpleRequestProcessor::TAsyncRequest, std::__y1::shared_ptr, TDuration, NYdb::Dev::TRpcRequestSettings const&, bool, std::__y1::shared_ptr)::'lambda'(Ydb::Table::ExecuteSchemeQueryResponse*, NYdb::Dev::TPlainStatus)&, Ydb::Table::ExecuteSchemeQueryResponse*, NYdb::Dev::TPlainStatus>(Ydb::Table::V1::TableService&&, Ydb::Table::ExecuteSchemeQueryResponse*&&+218 (0x1F83A22A) NYdb::Dev::TGRpcErrorResponse::Process(void*)+1678 (0x1F83510E) TAdaptiveThreadPool::TImpl::TThread::DoExecute()+966 (0x19B353C6) ??+0 (0x19B31F0D) ??+0 (0x18D5BD15) ??+0 (0x18A09A39) ??+0 (0x7F34767B7AC3) ??+0 (0x7F3476849850) >> KqpKv::ReadRows_Decimal [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |84.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |84.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |84.8%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |84.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |84.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |84.8%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |84.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpKv::ReadRows_Decimal [GOOD] Test command err: Trying to start YDB, gRPC: 17252, MsgBus: 1976 2025-04-10T11:03:52.959873Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638581371000526:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:52.959926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001122/r3tmp/tmpLZStXT/pdisk_1.dat 2025-04-10T11:03:53.554354Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:03:53.556812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:03:53.556914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:03:53.561513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17252, node 1 2025-04-10T11:03:53.685996Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:03:53.686034Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:03:53.686050Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:03:53.686227Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1976 TClient is connected to server localhost:1976 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:03:54.432781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:54.448171Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:54.487050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:03:54.682966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:03:54.906606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:03:55.002774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:03:56.926263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638598550871414:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:56.926390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:57.304789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:03:57.391011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:03:57.470035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:03:57.511458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:03:57.571314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:03:57.628321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:03:57.701509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638602845839229:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:57.701596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:57.701794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491638602845839234:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:03:57.708341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:03:57.724226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491638602845839236:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:03:57.796253Z node 1 :TX_PROXY ERROR: Actor# [1:7491638602845839291:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:03:57.960709Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638581371000526:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:03:58.043372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:03:59.177338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:03:59.283076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:03:59.347950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:03:59.434539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:03:59.486286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:03:59.523514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:03:59.558563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:03:59.638831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:03:59.686927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:03:59.736217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5198, MsgBus: 18781 2025-04-10T11:04:02.726137Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491638623669109887:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:04:02.726323Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001122/r3tmp/tmpcSEenI/pdisk_1.dat 2025-04-10T11:04:02.965047Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:04:03.013492Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:04:03.013590Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:04:03.016960Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5198, node 2 2025-04-10T11:04:03.096119Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:04:03.096142Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:04:03.096153Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:04:03.096282Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18781 TClient is conn ... ARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7491639693870417490:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:11.774871Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001122/r3tmp/tmp70480l/pdisk_1.dat 2025-04-10T11:08:12.067168Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:12.112570Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:12.112758Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:12.114733Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62765, node 18 2025-04-10T11:08:12.214776Z node 18 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:12.216950Z node 18 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:12.216972Z node 18 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:12.217245Z node 18 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20088 TClient is connected to server localhost:20088 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:08:14.752112Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:08:16.775399Z node 18 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7491639693870417490:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:16.775511Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:08:25.616471Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:08:27.050355Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:08:27.050386Z node 18 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 6350, MsgBus: 4437 2025-04-10T11:08:31.847590Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7491639779319257567:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:31.847668Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001122/r3tmp/tmpnKnV26/pdisk_1.dat 2025-04-10T11:08:32.685353Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:32.769350Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:32.769527Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:32.780260Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6350, node 19 2025-04-10T11:08:33.198941Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:33.198969Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:33.198984Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:33.199203Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4437 TClient is connected to server localhost:4437 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:08:35.572931Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:36.849487Z node 19 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[19:7491639779319257567:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:36.849635Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:08:47.331543Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:08:47.562330Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:08:47.562365Z node 19 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 17077, MsgBus: 17779 2025-04-10T11:08:51.443870Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7491639864545344405:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:51.443955Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001122/r3tmp/tmpejrIIF/pdisk_1.dat 2025-04-10T11:08:52.480695Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:52.588017Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:52.588214Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:52.631447Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17077, node 20 2025-04-10T11:08:53.032079Z node 20 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:53.032116Z node 20 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:53.032134Z node 20 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:53.032394Z node 20 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17779 TClient is connected to server localhost:17779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:08:56.022842Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:08:56.454310Z node 20 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7491639864545344405:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:08:56.454438Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:09:06.239731Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7491639928969854561:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:09:06.239924Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:09:06.280922Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:09:06.551729Z node 20 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Type mismatch, got type Uint64 for column Key22, but expected Decimal(22,9) 2025-04-10T11:09:06.580879Z node 20 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Type mismatch, got type Decimal(35,10) for column Key22, but expected Decimal(22,9) |84.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |84.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpWorkload::KV [GOOD] |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |84.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |84.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |84.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |85.0%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::KV [GOOD] Test command err: Trying to start YDB, gRPC: 12993, MsgBus: 2123 2025-04-10T11:07:09.746520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639424077868069:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:09.746601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016d3/r3tmp/tmpPbHzDb/pdisk_1.dat 2025-04-10T11:07:10.850106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:10.850248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:10.850560Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:10.851227Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:07:10.921931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12993, node 1 2025-04-10T11:07:12.276628Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:12.276650Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:12.276657Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:12.278648Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2123 TClient is connected to server localhost:2123 2025-04-10T11:07:14.735953Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639424077868069:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:14.736021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:14.952238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:15.385240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639449847672511:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:15.385421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:17.883780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:07:18.483413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639462732576041:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.483529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.483815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639462732576047:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:18.488773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:07:18.501337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639462732576049:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:07:18.589172Z node 1 :TX_PROXY ERROR: Actor# [1:7491639462732576100:3429] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:25.844757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:07:25.844813Z node 1 :IMPORT WARN: Table profiles were not loaded took: 0.352384s took: 0.311036s took: 0.408751s took: 0.397081s took: 0.411793s took: 0.333830s took: 0.404313s took: 0.378387s took: 0.416533s took: 0.413607s took: 0.532343s took: 0.541219s took: 0.563496s took: 0.573687s took: 0.576682s took: 0.583491s took: 0.586273s took: 0.589308s took: 0.589366s took: 0.591883s took: 0.723527s took: 0.733154s took: 0.715711s took: 0.740349s took: 0.742975s took: 0.774225s took: 0.779886s took: 0.750447s took: 0.796688s took: 0.833390s took: 0.140309s took: 0.141491s took: 0.144215s took: 0.142644s took: 0.143405s took: 0.145092s took: 0.140139s took: 0.150507s took: 0.152071s took: 0.140408s took: 0.321123s took: 0.322896s took: 0.295452s took: 0.328293s took: 0.333135s took: 0.353187s took: 0.363426s took: 0.370616s took: 0.331732s took: 0.441390s 2025-04-10T11:09:12.973155Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2025-04-10T11:09:12.974170Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T11:09:12.974721Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-04-10T11:09:12.974764Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2025-04-10T11:09:12.974783Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-04-10T11:09:12.974801Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-04-10T11:09:12.974820Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-04-10T11:09:12.974839Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-04-10T11:09:12.974857Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-04-10T11:09:12.974875Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-04-10T11:09:12.974892Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-04-10T11:09:12.974918Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-04-10T11:09:12.974946Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-04-10T11:09:13.036331Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-04-10T11:09:13.036379Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-04-10T11:09:13.036400Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-04-10T11:09:13.036422Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-10T11:09:13.036447Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-04-10T11:09:13.036467Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-04-10T11:09:13.036487Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-04-10T11:09:13.036508Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-04-10T11:09:13.036529Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-04-10T11:09:13.036553Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2025-04-10T11:09:13.036584Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-04-10T11:09:13.036618Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-10T11:09:13.041766Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-04-10T11:09:13.044342Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2025-04-10T11:09:13.044384Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-04-10T11:09:13.044416Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-10T11:09:13.044441Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-04-10T11:09:13.044461Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-10T11:09:13.044481Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2025-04-10T11:09:13.044499Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-04-10T11:09:13.044519Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-04-10T11:09:13.044537Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2025-04-10T11:09:13.044560Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-10T11:09:13.044580Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2025-04-10T11:09:13.044598Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-04-10T11:09:13.044617Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-04-10T11:09:13.044635Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |85.0%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |85.0%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ydb-tests-olap |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |85.1%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap >> Secret::SimpleQueryService [FAIL] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] >> KqpRanges::NoFullScanAtScanQuery [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous >> KqpRanges::NoFullScanAtDNFPredicate |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/etcd_proxy/etcd_proxy |85.1%| [LD] {RESULT} $(B)/ydb/apps/etcd_proxy/etcd_proxy |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/etcd_proxy/etcd_proxy >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] >> KqpPg::TableDeleteWhere-useSink [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TSubgroupPartLayoutTest::CountEffectiveReplicas3of4 [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |85.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |85.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |85.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |85.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |85.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001c36/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk10/testing_out_stuff/test_auditlog.py.test_dml_begin_commit_logged/audit.txt 2025-04-10T11:09:25.462645Z: {"tx_id":"01jrfp7w2b8htk3fj7eq83zg7d","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:25.462603Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-04-10T11:09:25.451225Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"BeginTransactionRequest","component":"grpc-proxy"} 2025-04-10T11:09:26.116824Z: {"tx_id":"01jrfp7w2b8htk3fj7eq83zg7d","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:26.116785Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","commit_tx":"0","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-10T11:09:25.469871Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-10T11:09:26.156004Z: {"tx_id":"01jrfp7w2b8htk3fj7eq83zg7d","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:26.155960Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-04-10T11:09:26.135125Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"CommitTransactionRequest","component":"grpc-proxy"} |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[upsert] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |85.4%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TableDeleteWhere-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 9168, MsgBus: 14411 2025-04-10T11:02:36.286458Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491638251702884337:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:36.293685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c8e/r3tmp/tmpQT1iAJ/pdisk_1.dat 2025-04-10T11:02:37.023111Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:02:37.046059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:02:37.046487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:02:37.051535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9168, node 1 2025-04-10T11:02:37.276906Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:02:37.276948Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:02:37.276960Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:02:37.277159Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14411 TClient is connected to server localhost:14411 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:02:38.804671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:02:38.833315Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:02:41.286721Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491638251702884337:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:02:41.286814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:02:42.312876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-04-10T11:02:42.569121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-04-10T11:02:42.672850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 abcd 2025-04-10T11:02:42.892614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-04-10T11:02:43.181476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 abcd 2025-04-10T11:02:43.419926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 {"abcd ","abcd "} 2025-04-10T11:02:43.646295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2025-04-10T11:02:43.784975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2025-04-10T11:02:43.940696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 abcd 2025-04-10T11:02:44.139531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-04-10T11:02:44.336756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 abcd 2025-04-10T11:02:44.537781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-04-10T11:02:44.748175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_17472595041006102391_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2025-04-10T11:02:44.860918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_17472595041006102391_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2025-04-10T11:02:44.977542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 1111 2025-04-10T11:02:45.110404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 {1111,1111} 2025-04-10T11:02:45.255124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_10103374131519304989_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2025-04-10T11:02:45.399034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_10103374131519304989_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2025-04-10T11:02:45.512909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarbit_17472595041006102391_5866627432374416336' Unable to coerce value for pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string too long for type bit varying(2) 2025-04-10T11:02:45.671740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarbit_17472595041006102391_11087201080355820517' Unable to coerce value for _pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string too long for type bit varying(2) 2025-04-10T11:02:45.828330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 1111 2025-04-10T11:02:46.025369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 {1111,1111} 2025-04-10T11:02:46.202723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 1111 2025-04-10T11:02:46.417690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 {1111,1111} 2025-04-10T11:02:46.629413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710 ... 01'::interval 1 year 2 mons 3 days 04:05:00 1 year 2 mons 3 days 04:05:00 2025-04-10T11:09:15.620910Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710793:0, at schemeshard: 72057594046644480 2025-04-10T11:09:15.785241Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg DELETE FROM Pg1187_b WHERE key = '{"P01-02-03T04:05:01","P01-02-03T04:05:01"}'::_interval {"1 year 2 mons 3 days 04:05:00","1 year 2 mons 3 days 04:05:00"} {"1 year 2 mons 3 days 04:05:00","1 year 2 mons 3 days 04:05:00"} 1560 2025-04-10T11:09:16.733997Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710797:0, at schemeshard: 72057594046644480 2025-04-10T11:09:16.894540Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:16.930955Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710799:0, at schemeshard: 72057594046644480 1562 2025-04-10T11:09:17.069506Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:17.101126Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710801:0, at schemeshard: 72057594046644480 2025-04-10T11:09:17.250661Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710802:0, at schemeshard: 72057594046644480 600 2025-04-10T11:09:17.402698Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:17.434785Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710804:0, at schemeshard: 72057594046644480 2025-04-10T11:09:17.603659Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710805:0, at schemeshard: 72057594046644480 628 2025-04-10T11:09:17.790061Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710806:0, at schemeshard: 72057594046644480 2025-04-10T11:09:17.895210Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:17.944481Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710808:0, at schemeshard: 72057594046644480 601 2025-04-10T11:09:18.101377Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710809:0, at schemeshard: 72057594046644480 2025-04-10T11:09:18.240942Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710810:0, at schemeshard: 72057594046644480 603 2025-04-10T11:09:18.450990Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710811:0, at schemeshard: 72057594046644480 2025-04-10T11:09:18.623215Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710812:0, at schemeshard: 72057594046644480 602 2025-04-10T11:09:18.794863Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:18.856375Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710814:0, at schemeshard: 72057594046644480 2025-04-10T11:09:19.051976Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710815:0, at schemeshard: 72057594046644480 604 2025-04-10T11:09:19.244216Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:19.271303Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710817:0, at schemeshard: 72057594046644480 2025-04-10T11:09:19.448164Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710818:0, at schemeshard: 72057594046644480 718 2025-04-10T11:09:19.649489Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710819:0, at schemeshard: 72057594046644480 2025-04-10T11:09:19.872358Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:19.962600Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710821:0, at schemeshard: 72057594046644480 869 2025-04-10T11:09:20.289978Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710822:0, at schemeshard: 72057594046644480 2025-04-10T11:09:20.577195Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710823:0, at schemeshard: 72057594046644480 650 2025-04-10T11:09:20.777116Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:20.816709Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710825:0, at schemeshard: 72057594046644480 2025-04-10T11:09:21.069708Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710826:0, at schemeshard: 72057594046644480 829 2025-04-10T11:09:21.365583Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:21.409747Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710828:0, at schemeshard: 72057594046644480 2025-04-10T11:09:21.674448Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710829:0, at schemeshard: 72057594046644480 774 2025-04-10T11:09:21.888241Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710830:0, at schemeshard: 72057594046644480 2025-04-10T11:09:22.191843Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710831:0, at schemeshard: 72057594046644480 2950 2025-04-10T11:09:22.784930Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710832:0, at schemeshard: 72057594046644480 2025-04-10T11:09:23.119345Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:23.158582Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710834:0, at schemeshard: 72057594046644480 114 2025-04-10T11:09:23.414861Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710835:0, at schemeshard: 72057594046644480 2025-04-10T11:09:23.573170Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710836:0, at schemeshard: 72057594046644480 3802 2025-04-10T11:09:23.735339Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:23.769637Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710838:0, at schemeshard: 72057594046644480 2025-04-10T11:09:24.061769Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:24.100678Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710840:0, at schemeshard: 72057594046644480 4072 2025-04-10T11:09:24.309806Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710841:0, at schemeshard: 72057594046644480 2025-04-10T11:09:24.498072Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:24.531794Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710843:0, at schemeshard: 72057594046644480 142 2025-04-10T11:09:24.790938Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710844:0, at schemeshard: 72057594046644480 2025-04-10T11:09:25.086104Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710845:0, at schemeshard: 72057594046644480 3615 2025-04-10T11:09:25.307688Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:25.339664Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710847:0, at schemeshard: 72057594046644480 2025-04-10T11:09:25.643331Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:25.700126Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710849:0, at schemeshard: 72057594046644480 3614 2025-04-10T11:09:25.964921Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710850:0, at schemeshard: 72057594046644480 2025-04-10T11:09:26.279621Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710851:0, at schemeshard: 72057594046644480 22 2025-04-10T11:09:26.445253Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:09:26.488376Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710853:0, at schemeshard: 72057594046644480 2025-04-10T11:09:26.729635Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710854:0, at schemeshard: 72057594046644480 2025-04-10T11:09:26.976734Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] >> KqpRanges::NoFullScanAtDNFPredicate [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::NoFullScanAtDNFPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 24924, MsgBus: 30409 2025-04-10T11:07:40.649412Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639558993063036:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:40.650588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000687/r3tmp/tmpcMM7JC/pdisk_1.dat 2025-04-10T11:07:41.173983Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24924, node 1 2025-04-10T11:07:41.203152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:41.203287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:41.207840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:07:41.281838Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:41.281873Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:41.281886Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:41.282013Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30409 TClient is connected to server localhost:30409 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:41.940166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:41.961171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:42.101398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:42.267066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:42.344716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:07:43.816927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639571877966633:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:43.817066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:44.106924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:07:44.137910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:07:44.163922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:07:44.190295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:07:44.218510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:07:44.291090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:07:44.338624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639576172934444:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:44.338697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:44.338734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639576172934449:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:07:44.342339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:07:44.353422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639576172934451:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:07:44.411273Z node 1 :TX_PROXY ERROR: Actor# [1:7491639576172934504:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:07:45.192443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:07:45.453474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:07:45.588678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:07:45.649356Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639558993063036:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:45.649432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:07:45.709707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:07:46.014381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64780, MsgBus: 4778 2025-04-10T11:07:47.132974Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639589511033853:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:07:47.133068Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000687/r3tmp/tmp5SftzM/pdisk_1.dat 2025-04-10T11:07:47.244813Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:07:47.278004Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:07:47.278106Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:07:47.279954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64780, node 2 2025-04-10T11:07:47.321775Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:07:47.321803Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:07:47.321812Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:07:47.321936Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4778 TClient is connected to server localhost:4778 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:07:47.784658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation ty ... 0, at schemeshard: 72057594046644480 2025-04-10T11:09:15.883885Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7491639966678509016:2509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:09:15.884088Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:09:15.885257Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7491639966678509022:2512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:09:15.892607Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:09:15.925964Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7491639966678509024:2513], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:09:15.989830Z node 8 :TX_PROXY ERROR: Actor# [8:7491639966678509080:3553] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:09:20.994978Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:09:24.871002Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283364873, txId: 281474976710673] shutting down ---------QUERY---------- --!syntax_v1 SELECT * FROM `/Root/TableWithIntKey` WHERE Key1 IN (1, 2, 100, 101, 102, 200, 201, 201, 1000, 1001, 1002, 2000, 2001, 2002) AND (Key1 > 2000) ORDER BY Key1; ---------RESULT--------- [[[2001];#];[[2002];[2]]] ------------------------ Trying to start YDB, gRPC: 2621, MsgBus: 26183 2025-04-10T11:09:27.026466Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491640014908379404:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:09:27.026526Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000687/r3tmp/tmpVs6RAV/pdisk_1.dat 2025-04-10T11:09:27.534975Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:09:27.547674Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:09:27.547795Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:09:27.557168Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2621, node 9 2025-04-10T11:09:27.830978Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:09:27.831007Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:09:27.831021Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:09:27.831189Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26183 TClient is connected to server localhost:26183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:09:29.312138Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:09:29.321276Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:09:29.337340Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:09:29.551103Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:09:29.963264Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:09:30.417460Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:09:32.027080Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7491640014908379404:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:09:32.027172Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:09:35.712284Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7491640053563086844:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:09:35.712418Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:09:35.893198Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:09:35.982467Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:09:36.170176Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:09:36.295123Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:09:36.399573Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:09:36.529490Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:09:36.709306Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7491640057858054681:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:09:36.709458Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:09:36.710003Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7491640057858054686:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:09:36.716809Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:09:36.770996Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7491640057858054688:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:09:36.843221Z node 9 :TX_PROXY ERROR: Actor# [9:7491640057858054745:3474] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:09:40.551046Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:09:42.530327Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:09:42.530363Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:09:43.103917Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283383115, txId: 281474976710673] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND (Key2 = 100 OR Key2 = 300) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]]] ------------------------ 2025-04-10T11:09:44.851055Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283384879, txId: 281474976710675] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND Key2 IN (100, 300, 400) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]];[[10u]]] ------------------------ |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |85.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |85.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |85.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> VDiskTest::HugeBlobWrite [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001c2d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk20/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.update/audit.txt 2025-04-10T11:09:38.241552Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:38.241494Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-10T11:09:37.818940Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> VDiskTest::HugeBlobWrite [GOOD] Test command err: Put id# [29:1:1:0:0:1048576:1] totalSize# 0 blobValueIndex# 45 Trim Put id# [25:1:1:0:0:1572864:1] totalSize# 1048576 blobValueIndex# 56 Put id# [8:1:1:0:0:40960:1] totalSize# 2621440 blobValueIndex# 20 Put id# [70:1:1:0:0:589824:1] totalSize# 2662400 blobValueIndex# 30 Change MinHugeBlobSize# 8192 Put id# [84:1:1:0:0:10:1] totalSize# 3252224 blobValueIndex# 7 Put id# [68:1:1:0:0:1048576:1] totalSize# 3252234 blobValueIndex# 47 Put id# [40:1:1:0:0:589824:1] totalSize# 4300810 blobValueIndex# 37 Put id# [31:1:1:0:0:10:1] totalSize# 4890634 blobValueIndex# 3 Put id# [38:1:1:0:0:10:1] totalSize# 4890644 blobValueIndex# 8 Put id# [5:1:1:0:0:1572864:1] totalSize# 4890654 blobValueIndex# 54 Put id# [30:1:1:0:0:1048576:1] totalSize# 6463518 blobValueIndex# 40 Put id# [29:1:2:0:0:1048576:1] totalSize# 7512094 blobValueIndex# 44 Put id# [100:1:1:0:0:40960:1] totalSize# 8560670 blobValueIndex# 26 Change MinHugeBlobSize# 524288 Restart Put id# [14:1:1:0:0:40960:1] totalSize# 8601630 blobValueIndex# 29 Change MinHugeBlobSize# 8192 Trim Put id# [23:1:1:0:0:1572864:1] totalSize# 8642590 blobValueIndex# 52 Put id# [36:1:1:0:0:1572864:1] totalSize# 10215454 blobValueIndex# 59 Trim Put id# [14:1:2:0:0:589824:1] totalSize# 11788318 blobValueIndex# 37 Change MinHugeBlobSize# 61440 Put id# [18:1:1:0:0:40960:1] totalSize# 12378142 blobValueIndex# 25 Trim Put id# [61:1:1:0:0:10:1] totalSize# 12419102 blobValueIndex# 0 Trim Put id# [89:1:1:0:0:1572864:1] totalSize# 12419112 blobValueIndex# 51 Put id# [5:1:2:0:0:40960:1] totalSize# 13991976 blobValueIndex# 20 Change MinHugeBlobSize# 65536 Put id# [81:1:1:0:0:1048576:1] totalSize# 14032936 blobValueIndex# 41 Change MinHugeBlobSize# 61440 Put id# [68:1:2:0:0:10:1] totalSize# 15081512 blobValueIndex# 2 Put id# [79:1:1:0:0:40960:1] totalSize# 15081522 blobValueIndex# 29 Trim Put id# [18:1:2:0:0:40960:1] totalSize# 15122482 blobValueIndex# 27 Trim Put id# [9:1:1:0:0:1572864:1] totalSize# 15163442 blobValueIndex# 51 Put id# [90:1:1:0:0:40960:1] totalSize# 16736306 blobValueIndex# 23 Put id# [18:1:3:0:0:1572864:1] totalSize# 16777266 blobValueIndex# 59 Put id# [31:1:2:0:0:1024:1] totalSize# 18350130 blobValueIndex# 15 Put id# [98:1:1:0:0:1024:1] totalSize# 18351154 blobValueIndex# 11 Change MinHugeBlobSize# 524288 Put id# [79:1:2:0:0:1048576:1] totalSize# 18352178 blobValueIndex# 46 Put id# [15:1:1:0:0:10:1] totalSize# 19400754 blobValueIndex# 5 Put id# [37:1:1:0:0:1048576:1] totalSize# 19400764 blobValueIndex# 40 Change MinHugeBlobSize# 65536 Put id# [27:1:1:0:0:1048576:1] totalSize# 20449340 blobValueIndex# 47 Put id# [84:1:2:0:0:1572864:1] totalSize# 21497916 blobValueIndex# 52 Put id# [56:1:1:0:0:1024:1] totalSize# 23070780 blobValueIndex# 15 Restart Put id# [25:1:2:0:0:1048576:1] totalSize# 23071804 blobValueIndex# 49 Put id# [65:1:1:0:0:40960:1] totalSize# 24120380 blobValueIndex# 25 Put id# [68:1:3:0:0:10:1] totalSize# 24161340 blobValueIndex# 6 Put id# [2:1:1:0:0:1048576:1] totalSize# 24161350 blobValueIndex# 45 Put id# [76:1:1:0:0:589824:1] totalSize# 25209926 blobValueIndex# 36 Put id# [23:1:2:0:0:1024:1] totalSize# 25799750 blobValueIndex# 14 Trim Put id# [20:1:1:0:0:1024:1] totalSize# 25800774 blobValueIndex# 18 Put id# [17:1:1:0:0:1024:1] totalSize# 25801798 blobValueIndex# 10 Trim Put id# [59:1:1:0:0:1048576:1] totalSize# 25802822 blobValueIndex# 41 Put id# [47:1:1:0:0:589824:1] totalSize# 26851398 blobValueIndex# 34 Change MinHugeBlobSize# 12288 Put id# [99:1:1:0:0:10:1] totalSize# 27441222 blobValueIndex# 7 Trim Put id# [61:1:2:0:0:1048576:1] totalSize# 27441232 blobValueIndex# 49 Change MinHugeBlobSize# 65536 Put id# [89:1:2:0:0:1048576:1] totalSize# 28489808 blobValueIndex# 44 Put id# [82:1:1:0:0:1024:1] totalSize# 29538384 blobValueIndex# 11 Put id# [2:1:2:0:0:589824:1] totalSize# 29539408 blobValueIndex# 30 Put id# [62:1:1:0:0:40960:1] totalSize# 30129232 blobValueIndex# 25 Restart Put id# [45:1:1:0:0:40960:1] totalSize# 30170192 blobValueIndex# 28 Trim Put id# [47:1:2:0:0:1572864:1] totalSize# 30211152 blobValueIndex# 53 Put id# [93:1:1:0:0:589824:1] totalSize# 31784016 blobValueIndex# 32 Put id# [4:1:1:0:0:1572864:1] totalSize# 32373840 blobValueIndex# 55 Change MinHugeBlobSize# 12288 Put id# [19:1:1:0:0:589824:1] totalSize# 33946704 blobValueIndex# 32 Change MinHugeBlobSize# 8192 Put id# [28:1:1:0:0:1572864:1] totalSize# 34536528 blobValueIndex# 58 Put id# [47:1:3:0:0:1048576:1] totalSize# 36109392 blobValueIndex# 42 Put id# [64:1:1:0:0:1024:1] totalSize# 37157968 blobValueIndex# 16 Trim Put id# [15:1:2:0:0:1572864:1] totalSize# 37158992 blobValueIndex# 52 Put id# [60:1:1:0:0:1048576:1] totalSize# 38731856 blobValueIndex# 40 Put id# [89:1:3:0:0:1572864:1] totalSize# 39780432 blobValueIndex# 58 Put id# [24:1:1:0:0:10:1] totalSize# 41353296 blobValueIndex# 0 Put id# [28:1:2:0:0:10:1] totalSize# 41353306 blobValueIndex# 9 Put id# [96:1:1:0:0:40960:1] totalSize# 41353316 blobValueIndex# 24 Put id# [37:1:2:0:0:1572864:1] totalSize# 41394276 blobValueIndex# 51 Put id# [92:1:1:0:0:1024:1] totalSize# 42967140 blobValueIndex# 15 Put id# [92:1:2:0:0:1572864:1] totalSize# 42968164 blobValueIndex# 56 Put id# [32:1:1:0:0:1048576:1] totalSize# 44541028 blobValueIndex# 48 Put id# [75:1:1:0:0:1024:1] totalSize# 45589604 blobValueIndex# 15 Put id# [62:1:2:0:0:589824:1] totalSize# 45590628 blobValueIndex# 31 Put id# [82:1:2:0:0:1024:1] totalSize# 46180452 blobValueIndex# 15 Put id# [52:1:1:0:0:1024:1] totalSize# 46181476 blobValueIndex# 18 Put id# [83:1:1:0:0:589824:1] totalSize# 46182500 blobValueIndex# 34 Put id# [51:1:1:0:0:10:1] totalSize# 46772324 blobValueIndex# 2 Put id# [37:1:3:0:0:10:1] totalSize# 46772334 blobValueIndex# 7 Trim Put id# [16:1:1:0:0:10:1] totalSize# 46772344 blobValueIndex# 9 Put id# [34:1:1:0:0:1572864:1] totalSize# 46772354 blobValueIndex# 55 Change MinHugeBlobSize# 12288 Put id# [44:1:1:0:0:589824:1] totalSize# 48345218 blobValueIndex# 36 Restart Put id# [80:1:1:0:0:10:1] totalSize# 48935042 blobValueIndex# 7 Put id# [13:1:1:0:0:1572864:1] totalSize# 48935052 blobValueIndex# 52 Put id# [88:1:1:0:0:40960:1] totalSize# 50507916 blobValueIndex# 21 Trim Put id# [89:1:4:0:0:1572864:1] totalSize# 50548876 blobValueIndex# 50 Put id# [66:1:1:0:0:10:1] totalSize# 52121740 blobValueIndex# 3 Trim Put id# [100:1:2:0:0:40960:1] totalSize# 52121750 blobValueIndex# 23 Change MinHugeBlobSize# 524288 Put id# [75:1:2:0:0:1024:1] totalSize# 52162710 blobValueIndex# 11 Put id# [57:1:1:0:0:1024:1] totalSize# 52163734 blobValueIndex# 16 Change MinHugeBlobSize# 65536 Put id# [53:1:1:0:0:1572864:1] totalSize# 52164758 blobValueIndex# 58 Put id# [62:1:3:0:0:1048576:1] totalSize# 53737622 blobValueIndex# 42 Put id# [72:1:1:0:0:589824:1] totalSize# 54786198 blobValueIndex# 39 Put id# [41:1:1:0:0:1048576:1] totalSize# 55376022 blobValueIndex# 42 Put id# [89:1:5:0:0:1048576:1] totalSize# 56424598 blobValueIndex# 48 Put id# [72:1:2:0:0:589824:1] totalSize# 57473174 blobValueIndex# 39 Put id# [17:1:2:0:0:1572864:1] totalSize# 58062998 blobValueIndex# 51 Put id# [83:1:2:0:0:589824:1] totalSize# 59635862 blobValueIndex# 31 Put id# [55:1:1:0:0:589824:1] totalSize# 60225686 blobValueIndex# 32 Change MinHugeBlobSize# 61440 Put id# [91:1:1:0:0:1048576:1] totalSize# 60815510 blobValueIndex# 46 Put id# [34:1:2:0:0:1048576:1] totalSize# 61864086 blobValueIndex# 45 Put id# [64:1:2:0:0:1572864:1] totalSize# 62912662 blobValueIndex# 55 Put id# [31:1:3:0:0:1024:1] totalSize# 64485526 blobValueIndex# 15 Change MinHugeBlobSize# 12288 Put id# [59:1:2:0:0:1048576:1] totalSize# 64486550 blobValueIndex# 49 Trim Put id# [89:1:6:0:0:1024:1] totalSize# 65535126 blobValueIndex# 18 Put id# [49:1:1:0:0:40960:1] totalSize# 65536150 blobValueIndex# 21 Put id# [84:1:3:0:0:10:1] totalSize# 65577110 blobValueIndex# 4 Put id# [52:1:2:0:0:40960:1] totalSize# 65577120 blobValueIndex# 29 Trim Put id# [65:1:2:0:0:1024:1] totalSize# 65618080 blobValueIndex# 15 Trim Put id# [62:1:4:0:0:40960:1] totalSize# 65619104 blobValueIndex# 21 Trim Put id# [24:1:2:0:0:10:1] totalSize# 65660064 blobValueIndex# 4 Trim Put id# [99:1:2:0:0:40960:1] totalSize# 65660074 blobValueIndex# 24 Put id# [96:1:2:0:0:589824:1] totalSize# 65701034 blobValueIndex# 32 Put id# [45:1:2:0:0:589824:1] totalSize# 66290858 blobValueIndex# 36 Put id# [62:1:5:0:0:1048576:1] totalSize# 66880682 blobValueIndex# 45 Put id# [47:1:4:0:0:10:1] totalSize# 67929258 blobValueIndex# 7 Put id# [16:1:2:0:0:40960:1] totalSize# 67929268 blobValueIndex# 25 Trim Put id# [6:1:1:0:0:1048576:1] totalSize# 67970228 blobValueIndex# 49 Put id# [33:1:1:0:0:1024:1] totalSize# 69018804 blobValueIndex# 10 Put id# [11:1:1:0:0:1572864:1] totalSize# 69019828 blobValueIndex# 53 Put id# [43:1:1:0:0:589824:1] totalSize# 70592692 blobValueIndex# 30 Put id# [76:1:2:0:0:40960:1] totalSize# 71182516 blobValueIndex# 28 Put id# [56:1:2:0:0:589824:1] totalSize# 71223476 blobValueIndex# 33 Change MinHugeBlobSize# 65536 Put id# [7:1:1:0:0:10:1] totalSize# 71813300 blobValueIndex# 0 Trim Put id# [52:1:3:0:0:1048576:1] totalSize# 71813310 blobValueIndex# 41 Put id# [1:1:1:0:0:589824:1] totalSize# 72861886 blobValueIndex# 34 Put id# [3:1:1:0:0:1024:1] totalSize# 73451710 blobValueIndex# 16 Put id# [39:1:1:0:0:40960:1] totalSize# 73452734 blobValueIndex# 22 Put id# [100:1:3:0:0:1572864:1] totalSize# 73493694 blobValueIndex# 53 Put id# [17:1:3:0:0:10:1] totalSize# 75066558 blobValueIndex# 0 Put id# [2:1:3:0:0:1048576:1] totalSize# 75066568 blobValueIndex# 47 Put id# [34:1:3:0:0:1048576:1] totalSize# 76115144 blobValueIndex# 41 Change MinHugeBlobSize# 8192 Put id# [23:1:3:0:0:1572864:1] totalSize# 77163720 blobValueIndex# 58 Put id# [44:1:2:0:0:589824:1] totalSize# 78736584 blobValueIndex# 31 Change MinHugeBlobSize# 61440 Trim Put id# [31:1:4:0:0:40960:1] totalSize# 79326408 blobValueIndex# 23 Put id# [22:1:1:0:0:40960:1] totalSize# 79367368 blobValueIndex# 20 Put id# [83:1:3:0:0:10:1] totalSize# 79408328 blobValueIndex# 2 Trim Put id# [90:1:2:0:0:10:1] totalSize# 79408338 blobValueIndex# 7 Trim Restart Put id# [77:1:1:0:0:1572864:1] totalSize# 79408348 blobValueIndex# 58 Put id# [9:1:2:0:0:40960:1] totalSize# 80981212 blobValueIndex# 21 Put id# [79:1:3:0:0:1572864:1] totalSize# 81022172 blobValueIndex# 50 Change MinHugeBlobSize# 524288 Put id# [49:1:2:0:0:10:1] totalSize# 82595036 blobValueIndex# 8 Put id# [74:1:1:0:0:1048576:1] totalSize# 82595046 blobValueIndex# 42 Restart Put id# [90:1:3:0:0:1572864:1] totalSize# 83643622 blobValueIndex# 58 Put id# [56:1:3:0:0:1024:1] totalSize# 85216486 blobValueIndex# 18 Put id# [86:1:1:0:0:1048576:1] totalSize# 85217510 blobValueIndex# 40 Put id# [30:1:2:0:0:40960:1] totalSize# 86266086 blobValueIndex# 27 Put id# [35:1:1:0:0:10:1] totalSize# 86307046 blobValueIndex# 7 Put id# [46:1:1:0:0:40960:1] totalSize# 86307056 blobValueIndex# 25 Put id# [87:1:1:0:0:40960:1] totalSize# 86348016 blobValueIndex# 29 Trim Put id# [42:1:1:0:0:1572864:1] totalSize# 86388976 blobValueIndex# 56 Trim Put id# [3:1:2:0:0:1024:1] totalSize# 87961840 blobValueIndex# 18 Put id# [28:1:3:0:0:1572864:1] totalSize# 87962864 blobValueIndex# 59 Trim Put id# [73:1:1:0:0:1024:1] totalSize# 89535728 blobValueIndex# 19 Put id# [95:1:1:0:0:1572864:1] totalSize# 89536752 blobValueIndex# 55 Put id# [94:1:1:0:0:1572864:1] totalSize# 91109616 blobValueIndex# 57 Put id# [79:1:4:0:0:10:1] totalSize# 92682480 blobValueIndex# 1 Put id# [66:1:2:0:0:1048576:1] totalSize# 92682490 blobValueIndex# 47 Restart Put id# [59:1:3:0:0:40960:1] totalSize# 93731066 blobValueIndex# 25 Put id# [30:1:3:0:0:1024:1] totalSize# 93772026 blobValueIndex# 19 Put id# [72:1:3:0:0:1572864:1] totalSize# 93773050 blobValueIndex# 56 Put id# [24:1:3:0:0:1048576:1] totalSize# 95345914 blobValueIndex# 47 Restart Put id# [84:1:4:0:0:1024:1] totalSize# 96394490 blobValueIndex# 13 Put id# [6:1:2:0:0:1048576:1] totalSize# 96395514 blobValueIndex# 41 Put id# [58:1:1:0:0:10:1] totalSize# 97444090 blobValueIndex# 0 Put id# [30:1:4:0:0:1024:1] totalSize# 97444100 blobValueIndex# 10 Change MinHugeBlobSize# 819 ... 40960:1] totalSize# 1095533986 blobValueIndex# 22 Trim Put id# [63:1:16:0:0:1572864:1] totalSize# 1095574946 blobValueIndex# 59 Put id# [4:1:23:0:0:1572864:1] totalSize# 1097147810 blobValueIndex# 56 Put id# [76:1:20:0:0:1572864:1] totalSize# 1098720674 blobValueIndex# 50 Trim Put id# [28:1:26:0:0:589824:1] totalSize# 1100293538 blobValueIndex# 36 Put id# [22:1:17:0:0:10:1] totalSize# 1100883362 blobValueIndex# 5 Trim Put id# [80:1:20:0:0:1572864:1] totalSize# 1100883372 blobValueIndex# 50 Put id# [23:1:35:0:0:1572864:1] totalSize# 1102456236 blobValueIndex# 55 Put id# [40:1:20:0:0:40960:1] totalSize# 1104029100 blobValueIndex# 29 Put id# [77:1:22:0:0:40960:1] totalSize# 1104070060 blobValueIndex# 27 Trim Put id# [63:1:17:0:0:1048576:1] totalSize# 1104111020 blobValueIndex# 45 Restart Put id# [69:1:25:0:0:589824:1] totalSize# 1105159596 blobValueIndex# 32 Put id# [74:1:22:0:0:1024:1] totalSize# 1105749420 blobValueIndex# 16 Change MinHugeBlobSize# 65536 Put id# [53:1:30:0:0:589824:1] totalSize# 1105750444 blobValueIndex# 35 Put id# [37:1:25:0:0:1048576:1] totalSize# 1106340268 blobValueIndex# 45 Put id# [16:1:20:0:0:10:1] totalSize# 1107388844 blobValueIndex# 9 Put id# [37:1:26:0:0:10:1] totalSize# 1107388854 blobValueIndex# 9 Change MinHugeBlobSize# 61440 Restart Put id# [92:1:17:0:0:10:1] totalSize# 1107388864 blobValueIndex# 4 Change MinHugeBlobSize# 8192 Put id# [19:1:23:0:0:589824:1] totalSize# 1107388874 blobValueIndex# 35 Put id# [46:1:28:0:0:1572864:1] totalSize# 1107978698 blobValueIndex# 53 Put id# [19:1:24:0:0:1048576:1] totalSize# 1109551562 blobValueIndex# 41 Restart Put id# [27:1:24:0:0:10:1] totalSize# 1110600138 blobValueIndex# 8 Put id# [30:1:24:0:0:40960:1] totalSize# 1110600148 blobValueIndex# 22 Put id# [83:1:26:0:0:1572864:1] totalSize# 1110641108 blobValueIndex# 50 Put id# [17:1:21:0:0:40960:1] totalSize# 1112213972 blobValueIndex# 26 Trim Put id# [41:1:24:0:0:40960:1] totalSize# 1112254932 blobValueIndex# 23 Put id# [32:1:21:0:0:10:1] totalSize# 1112295892 blobValueIndex# 9 Trim Put id# [95:1:22:0:0:589824:1] totalSize# 1112295902 blobValueIndex# 38 Put id# [66:1:20:0:0:1024:1] totalSize# 1112885726 blobValueIndex# 17 Put id# [11:1:25:0:0:1572864:1] totalSize# 1112886750 blobValueIndex# 56 Put id# [32:1:22:0:0:1024:1] totalSize# 1114459614 blobValueIndex# 16 Put id# [53:1:31:0:0:1024:1] totalSize# 1114460638 blobValueIndex# 16 Change MinHugeBlobSize# 65536 Restart Put id# [64:1:30:0:0:10:1] totalSize# 1114461662 blobValueIndex# 1 Put id# [6:1:23:0:0:1572864:1] totalSize# 1114461672 blobValueIndex# 53 Put id# [9:1:27:0:0:1048576:1] totalSize# 1116034536 blobValueIndex# 48 Put id# [60:1:27:0:0:589824:1] totalSize# 1117083112 blobValueIndex# 32 Put id# [80:1:21:0:0:1572864:1] totalSize# 1117672936 blobValueIndex# 57 Put id# [31:1:22:0:0:40960:1] totalSize# 1119245800 blobValueIndex# 22 Put id# [55:1:30:0:0:1572864:1] totalSize# 1119286760 blobValueIndex# 56 Put id# [29:1:18:0:0:1024:1] totalSize# 1120859624 blobValueIndex# 15 Put id# [95:1:23:0:0:1048576:1] totalSize# 1120860648 blobValueIndex# 41 Put id# [92:1:18:0:0:1572864:1] totalSize# 1121909224 blobValueIndex# 57 Put id# [83:1:27:0:0:1024:1] totalSize# 1123482088 blobValueIndex# 15 Put id# [1:1:29:0:0:589824:1] totalSize# 1123483112 blobValueIndex# 35 Put id# [13:1:17:0:0:40960:1] totalSize# 1124072936 blobValueIndex# 23 Restart Put id# [53:1:32:0:0:1024:1] totalSize# 1124113896 blobValueIndex# 14 Put id# [71:1:18:0:0:589824:1] totalSize# 1124114920 blobValueIndex# 35 Restart Put id# [63:1:18:0:0:40960:1] totalSize# 1124704744 blobValueIndex# 29 Put id# [11:1:26:0:0:10:1] totalSize# 1124745704 blobValueIndex# 1 Put id# [19:1:25:0:0:589824:1] totalSize# 1124745714 blobValueIndex# 32 Put id# [2:1:29:0:0:1048576:1] totalSize# 1125335538 blobValueIndex# 44 Put id# [71:1:19:0:0:1024:1] totalSize# 1126384114 blobValueIndex# 17 Trim Put id# [69:1:26:0:0:1572864:1] totalSize# 1126385138 blobValueIndex# 54 Trim Put id# [13:1:18:0:0:10:1] totalSize# 1127958002 blobValueIndex# 5 Put id# [63:1:19:0:0:1024:1] totalSize# 1127958012 blobValueIndex# 12 Put id# [47:1:17:0:0:40960:1] totalSize# 1127959036 blobValueIndex# 21 Put id# [42:1:28:0:0:589824:1] totalSize# 1127999996 blobValueIndex# 38 Change MinHugeBlobSize# 61440 Put id# [87:1:11:0:0:10:1] totalSize# 1128589820 blobValueIndex# 0 Change MinHugeBlobSize# 65536 Put id# [41:1:25:0:0:589824:1] totalSize# 1128589830 blobValueIndex# 36 Put id# [82:1:21:0:0:1572864:1] totalSize# 1129179654 blobValueIndex# 54 Put id# [56:1:17:0:0:589824:1] totalSize# 1130752518 blobValueIndex# 37 Trim Put id# [13:1:19:0:0:1572864:1] totalSize# 1131342342 blobValueIndex# 55 Put id# [91:1:18:0:0:10:1] totalSize# 1132915206 blobValueIndex# 0 Put id# [52:1:21:0:0:589824:1] totalSize# 1132915216 blobValueIndex# 33 Put id# [53:1:33:0:0:1024:1] totalSize# 1133505040 blobValueIndex# 17 Put id# [23:1:36:0:0:40960:1] totalSize# 1133506064 blobValueIndex# 21 Restart Put id# [58:1:20:0:0:1024:1] totalSize# 1133547024 blobValueIndex# 16 Put id# [47:1:18:0:0:10:1] totalSize# 1133548048 blobValueIndex# 3 Put id# [95:1:24:0:0:1024:1] totalSize# 1133548058 blobValueIndex# 11 Trim Put id# [52:1:22:0:0:10:1] totalSize# 1133549082 blobValueIndex# 1 Put id# [27:1:25:0:0:40960:1] totalSize# 1133549092 blobValueIndex# 29 Put id# [98:1:19:0:0:589824:1] totalSize# 1133590052 blobValueIndex# 35 Put id# [54:1:26:0:0:1572864:1] totalSize# 1134179876 blobValueIndex# 59 Trim Put id# [66:1:21:0:0:1024:1] totalSize# 1135752740 blobValueIndex# 19 Change MinHugeBlobSize# 12288 Put id# [73:1:16:0:0:1048576:1] totalSize# 1135753764 blobValueIndex# 48 Put id# [47:1:19:0:0:1048576:1] totalSize# 1136802340 blobValueIndex# 44 Put id# [54:1:27:0:0:1048576:1] totalSize# 1137850916 blobValueIndex# 40 Put id# [54:1:28:0:0:10:1] totalSize# 1138899492 blobValueIndex# 9 Put id# [62:1:26:0:0:1572864:1] totalSize# 1138899502 blobValueIndex# 55 Trim Put id# [81:1:21:0:0:589824:1] totalSize# 1140472366 blobValueIndex# 36 Restart Put id# [47:1:20:0:0:589824:1] totalSize# 1141062190 blobValueIndex# 35 Trim Put id# [95:1:25:0:0:40960:1] totalSize# 1141652014 blobValueIndex# 21 Put id# [47:1:21:0:0:1048576:1] totalSize# 1141692974 blobValueIndex# 41 Change MinHugeBlobSize# 65536 Put id# [7:1:18:0:0:1024:1] totalSize# 1142741550 blobValueIndex# 16 Put id# [94:1:17:0:0:10:1] totalSize# 1142742574 blobValueIndex# 8 Put id# [5:1:22:0:0:1572864:1] totalSize# 1142742584 blobValueIndex# 57 Put id# [21:1:19:0:0:1048576:1] totalSize# 1144315448 blobValueIndex# 44 Put id# [11:1:27:0:0:1048576:1] totalSize# 1145364024 blobValueIndex# 40 Trim Put id# [38:1:21:0:0:1048576:1] totalSize# 1146412600 blobValueIndex# 47 Trim Put id# [7:1:19:0:0:1024:1] totalSize# 1147461176 blobValueIndex# 16 Put id# [95:1:26:0:0:1572864:1] totalSize# 1147462200 blobValueIndex# 53 Change MinHugeBlobSize# 12288 Put id# [23:1:37:0:0:10:1] totalSize# 1149035064 blobValueIndex# 0 Put id# [50:1:16:0:0:589824:1] totalSize# 1149035074 blobValueIndex# 32 Put id# [91:1:19:0:0:589824:1] totalSize# 1149624898 blobValueIndex# 30 Put id# [57:1:21:0:0:1024:1] totalSize# 1150214722 blobValueIndex# 10 Put id# [74:1:23:0:0:1024:1] totalSize# 1150215746 blobValueIndex# 17 Put id# [72:1:17:0:0:1048576:1] totalSize# 1150216770 blobValueIndex# 44 Put id# [58:1:21:0:0:10:1] totalSize# 1151265346 blobValueIndex# 2 Change MinHugeBlobSize# 61440 Put id# [5:1:23:0:0:589824:1] totalSize# 1151265356 blobValueIndex# 31 Put id# [63:1:20:0:0:1024:1] totalSize# 1151855180 blobValueIndex# 15 Put id# [39:1:19:0:0:40960:1] totalSize# 1151856204 blobValueIndex# 26 Trim Put id# [67:1:21:0:0:1048576:1] totalSize# 1151897164 blobValueIndex# 46 Put id# [8:1:22:0:0:1572864:1] totalSize# 1152945740 blobValueIndex# 58 Trim Put id# [16:1:21:0:0:40960:1] totalSize# 1154518604 blobValueIndex# 26 Put id# [78:1:27:0:0:1024:1] totalSize# 1154559564 blobValueIndex# 13 Put id# [73:1:17:0:0:1024:1] totalSize# 1154560588 blobValueIndex# 16 Put id# [93:1:23:0:0:1572864:1] totalSize# 1154561612 blobValueIndex# 52 Put id# [28:1:27:0:0:589824:1] totalSize# 1156134476 blobValueIndex# 31 Trim Put id# [72:1:18:0:0:1048576:1] totalSize# 1156724300 blobValueIndex# 41 Put id# [97:1:16:0:0:40960:1] totalSize# 1157772876 blobValueIndex# 27 Put id# [97:1:17:0:0:1048576:1] totalSize# 1157813836 blobValueIndex# 45 Put id# [91:1:20:0:0:1572864:1] totalSize# 1158862412 blobValueIndex# 54 Put id# [86:1:14:0:0:1572864:1] totalSize# 1160435276 blobValueIndex# 55 Change MinHugeBlobSize# 12288 Put id# [54:1:29:0:0:589824:1] totalSize# 1162008140 blobValueIndex# 31 Put id# [44:1:17:0:0:10:1] totalSize# 1162597964 blobValueIndex# 0 Trim Put id# [79:1:22:0:0:1048576:1] totalSize# 1162597974 blobValueIndex# 46 Trim Put id# [7:1:20:0:0:1048576:1] totalSize# 1163646550 blobValueIndex# 43 Put id# [94:1:18:0:0:589824:1] totalSize# 1164695126 blobValueIndex# 38 Trim Put id# [88:1:26:0:0:1048576:1] totalSize# 1165284950 blobValueIndex# 48 Put id# [59:1:15:0:0:40960:1] totalSize# 1166333526 blobValueIndex# 21 Put id# [87:1:12:0:0:589824:1] totalSize# 1166374486 blobValueIndex# 32 Put id# [29:1:19:0:0:1024:1] totalSize# 1166964310 blobValueIndex# 12 Trim Put id# [89:1:29:0:0:10:1] totalSize# 1166965334 blobValueIndex# 2 Put id# [50:1:17:0:0:1048576:1] totalSize# 1166965344 blobValueIndex# 46 Put id# [62:1:27:0:0:1572864:1] totalSize# 1168013920 blobValueIndex# 55 Put id# [64:1:31:0:0:1572864:1] totalSize# 1169586784 blobValueIndex# 50 Put id# [87:1:13:0:0:1048576:1] totalSize# 1171159648 blobValueIndex# 44 Put id# [24:1:25:0:0:589824:1] totalSize# 1172208224 blobValueIndex# 31 Put id# [54:1:30:0:0:1048576:1] totalSize# 1172798048 blobValueIndex# 40 Put id# [10:1:19:0:0:1024:1] totalSize# 1173846624 blobValueIndex# 14 Put id# [39:1:20:0:0:1024:1] totalSize# 1173847648 blobValueIndex# 14 Put id# [99:1:20:0:0:40960:1] totalSize# 1173848672 blobValueIndex# 22 Trim Put id# [38:1:22:0:0:1024:1] totalSize# 1173889632 blobValueIndex# 12 Put id# [87:1:14:0:0:10:1] totalSize# 1173890656 blobValueIndex# 7 Trim Put id# [54:1:31:0:0:40960:1] totalSize# 1173890666 blobValueIndex# 20 Put id# [63:1:21:0:0:1572864:1] totalSize# 1173931626 blobValueIndex# 54 Put id# [80:1:22:0:0:1048576:1] totalSize# 1175504490 blobValueIndex# 45 Change MinHugeBlobSize# 8192 Put id# [40:1:21:0:0:10:1] totalSize# 1176553066 blobValueIndex# 1 Put id# [93:1:24:0:0:1572864:1] totalSize# 1176553076 blobValueIndex# 52 Put id# [26:1:20:0:0:40960:1] totalSize# 1178125940 blobValueIndex# 29 Put id# [7:1:21:0:0:589824:1] totalSize# 1178166900 blobValueIndex# 39 Put id# [88:1:27:0:0:1024:1] totalSize# 1178756724 blobValueIndex# 19 Put id# [80:1:23:0:0:1024:1] totalSize# 1178757748 blobValueIndex# 19 Change MinHugeBlobSize# 61440 Put id# [70:1:14:0:0:1024:1] totalSize# 1178758772 blobValueIndex# 12 Put id# [55:1:31:0:0:40960:1] totalSize# 1178759796 blobValueIndex# 20 Put id# [55:1:32:0:0:40960:1] totalSize# 1178800756 blobValueIndex# 26 Put id# [18:1:25:0:0:10:1] totalSize# 1178841716 blobValueIndex# 4 Put id# [35:1:27:0:0:1048576:1] totalSize# 1178841726 blobValueIndex# 46 Put id# [93:1:25:0:0:1572864:1] totalSize# 1179890302 blobValueIndex# 51 Put id# [93:1:26:0:0:1024:1] totalSize# 1181463166 blobValueIndex# 19 Put id# [18:1:26:0:0:10:1] totalSize# 1181464190 blobValueIndex# 9 Restart Put id# [65:1:20:0:0:1024:1] totalSize# 1181464200 blobValueIndex# 17 Put id# [43:1:27:0:0:10:1] totalSize# 1181465224 blobValueIndex# 4 Put id# [49:1:22:0:0:1572864:1] totalSize# 1181465234 blobValueIndex# 50 Put id# [17:1:22:0:0:589824:1] totalSize# 1183038098 blobValueIndex# 38 Trim Put id# [48:1:23:0:0:1024:1] totalSize# 1183627922 blobValueIndex# 17 Put id# [25:1:21:0:0:10:1] totalSize# 1183628946 blobValueIndex# 6 Put id# [31:1:23:0:0:10:1] totalSize# 1183628956 blobValueIndex# 0 Put id# [16:1:22:0:0:1572864:1] totalSize# 1183628966 blobValueIndex# 54 Put id# [34:1:23:0:0:1048576:1] totalSize# 1185201830 blobValueIndex# 47 Put id# [83:1:28:0:0:40960:1] totalSize# 1186250406 blobValueIndex# 28 Trim Restart |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ydb_stress_tool |85.5%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001c41/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk13/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_sid_is_unexpected/audit.txt 2025-04-10T11:09:26.992106Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:26.992060Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-04-10T11:09:26.859512Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-10T11:09:27.691761Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:27.691719Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-04-10T11:09:27.103127Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-10T11:09:28.139325Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:28.139277Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-04-10T11:09:27.807474Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-10T11:09:28.668458Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:28.668423Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-10T11:09:28.264176Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-10T11:09:29.031458Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:29.031407Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-04-10T11:09:28.783705Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-10T11:09:29.219132Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:29.219082Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-04-10T11:09:29.147171Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TA] $(B)/ydb/core/blobstorage/ut_vdisk2/test-results/unittest/{meta.json ... results_accumulator.log} |85.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk2/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001c99/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk18/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.replace/audit.txt 2025-04-10T11:09:31.906191Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:31.906131Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-04-10T11:09:31.659463Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dq/service_node/service_node |85.6%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/service_node/service_node |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/service_node/service_node |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |85.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpExtractPredicateLookup::ComplexRange [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001bbe/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk6/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_bad_dynconfig/audit.txt 2025-04-10T11:09:42.070842Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"ERROR","subject":"root@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |85.7%| [TA] $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |85.7%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] |85.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |85.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001bf6/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk12/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_sid_is_expected/audit.txt ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpExtractPredicateLookup::ComplexRange [GOOD] Test command err: Trying to start YDB, gRPC: 16018, MsgBus: 12024 2025-04-10T11:06:02.374383Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491639138088757229:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:02.374451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000955/r3tmp/tmpxr67GY/pdisk_1.dat 2025-04-10T11:06:02.897072Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:02.900263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:02.900370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:02.905757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16018, node 1 2025-04-10T11:06:03.060408Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:03.060444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:03.060455Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:03.060574Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12024 TClient is connected to server localhost:12024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:03.790681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:03.819574Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:06:03.837021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:04.004600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:04.190877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:04.259900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:06.236237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639155268628207:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:06.236412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:06.600413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:06:06.642471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:06:06.684523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:06:06.748678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:06:06.810756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:06:06.999609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:06:07.066269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639159563596022:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:07.066375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:07.066696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491639159563596027:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:06:07.071205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:06:07.088637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491639159563596029:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:06:07.142820Z node 1 :TX_PROXY ERROR: Actor# [1:7491639159563596083:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:06:07.378270Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491639138088757229:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:07.378419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8842, MsgBus: 22000 2025-04-10T11:06:10.746904Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491639171525930045:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:06:10.747795Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000955/r3tmp/tmpxEYYOn/pdisk_1.dat 2025-04-10T11:06:10.924768Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:06:10.940792Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:06:10.940910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:06:10.942390Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8842, node 2 2025-04-10T11:06:11.234674Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:06:11.234699Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:06:11.234710Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:06:11.234827Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22000 TClient is connected to server localhost:22000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:06:12.028651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:12.036365Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:06:12.052832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:12.161753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:06:12.369171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting.. ... 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:09:31.084898Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:09:31.290699Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:09:31.507624Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:09:31.699987Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:09:31.886331Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:09:33.546550Z node 13 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:09:33.546592Z node 13 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 29960, MsgBus: 61586 2025-04-10T11:09:39.289922Z node 14 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[14:7491640070667687247:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:09:39.305841Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000955/r3tmp/tmpjm3HoQ/pdisk_1.dat 2025-04-10T11:09:39.664935Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:09:39.759698Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:09:39.759879Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:09:39.762354Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29960, node 14 2025-04-10T11:09:40.132835Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:09:40.132873Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:09:40.132892Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:09:40.133122Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61586 TClient is connected to server localhost:61586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:09:43.539731Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:09:43.566724Z node 14 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:09:43.601702Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:09:43.812736Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:09:44.259547Z node 14 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7491640070667687247:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:09:44.289695Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:09:44.405423Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:09:44.621435Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:09:51.091994Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7491640122207296488:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:09:51.092152Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:09:51.273949Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:09:51.382933Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:09:51.506845Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:09:51.659513Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:09:51.770673Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:09:51.914093Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:09:52.078091Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7491640126502264321:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:09:52.078237Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:09:52.086392Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7491640126502264326:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:09:52.100957Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:09:52.166135Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7491640126502264328:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:09:52.258859Z node 14 :TX_PROXY ERROR: Actor# [14:7491640126502264387:3491] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:09:54.654264Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:09:54.654298Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:09:55.079532Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:09:55.181752Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:09:55.313752Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:09:55.430992Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:09:55.531066Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:09:55.587660Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:09:55.691938Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:09:55.797419Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:09:55.881540Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:09:55.986810Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001c02/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk14/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_unauthorized/audit.txt 2025-04-10T11:09:49.602757Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:49.602702Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-04-10T11:09:49.572847Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-10T11:09:49.762600Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:49.762560Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-04-10T11:09:49.717480Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-10T11:09:49.922567Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:49.922528Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-04-10T11:09:49.876052Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-10T11:09:50.094973Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:50.094929Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-10T11:09:50.043906Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-10T11:09:50.286686Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:50.286646Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-04-10T11:09:50.212675Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-10T11:09:50.466636Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:09:50.466592Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-04-10T11:09:50.404369Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TA] $(B)/ydb/core/kqp/ut/opt/test-results/unittest/{meta.json ... results_accumulator.log} |85.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/opt/test-results/unittest/{meta.json ... results_accumulator.log} |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |85.9%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001bbb/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk11/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_anonymous/audit.txt |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001baf/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk16/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.delete/audit.txt 2025-04-10T11:10:04.323392Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:10:04.323341Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-04-10T11:10:03.791059Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 [GOOD] Test command err: testing erasure block-3-1 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 256 cases, took 5194 us testing erasure stripe-4-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 main# 32 main# 33 main# 34 main# 35 main# 36 main# 37 main# 38 main# 39 main# 40 main# 41 main# 42 main# 43 main# 44 main# 45 main# 46 main# 47 main# 48 main# 49 main# 50 main# 51 main# 52 main# 53 main# 54 main# 55 main# 56 main# 57 main# 58 main# 59 main# 60 main# 61 main# 62 main# 63 Checked 262144 cases, took 2633945 us testing erasure block-2-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 1048576 cases, took 4426471 us testing erasure stripe-3-1 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 256 cases, took 165956 us testing erasure stripe-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 32768 cases, took 1432673 us testing erasure stripe-2-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 1048576 cases, took 2867529 us >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001b65/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk21/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.upsert/audit.txt 2025-04-10T11:10:10.855048Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:10:10.855007Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-04-10T11:10:10.750468Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |86.1%| [TA] $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] >> data_correctness.py::TestDataCorrectness::test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] >> unstable_connection.py::TestUnstableConnection::test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001b6a/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk17/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.insert/audit.txt 2025-04-10T11:10:13.916243Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:10:13.916196Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-04-10T11:10:13.793171Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001b52/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk9/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs1/audit.txt 2025-04-10T11:10:11.713102Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:10:11.713065Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-10T11:10:11.427232Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001b44/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk5/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_good_dynconfig/audit.txt 2025-04-10T11:10:21.194782Z: {"sanitized_token":"othe****ltin (27F910A9)","subject":"other-user@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] >> test_auditlog.py::test_dynconfig [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001ab6/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk7/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_good_dynconfig/audit.txt 2025-04-10T11:10:28.149197Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001ab0/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk4/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_bad_dynconfig/audit.txt 2025-04-10T11:10:28.855351Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"ERROR","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService [FAIL] Test command err: 2025-04-10T11:08:14.102788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:08:14.103075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:08:14.103259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00151e/r3tmp/tmpAU3AaC/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8639, node 1 TClient is connected to server localhost:23437 2025-04-10T11:08:20.121598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:08:20.318225Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:20.367828Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:20.367885Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:20.367922Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:20.368287Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:08:20.431227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:20.446487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:20.476358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-10T11:08:32.702804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:808:2676], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:32.702972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:818:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:32.703054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:32.800581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:08:32.854040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:822:2684], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:08:32.977023Z node 1 :TX_PROXY ERROR: Actor# [1:873:2716] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:08:35.685628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-10T11:08:40.304318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:08:41.599689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-04-10T11:08:43.373197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-10T11:08:44.310412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:08:45.739268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:08:50.881661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-04-10T11:08:51.795346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: GRpc error: (1): Cancelled on the server side ;EXPECTATION=1 GRpc shutdown warning: left infly: 1, spent: 3.654614 sec GRpc shutdown warning: left infly: 1, spent: 7.330445 sec GRpc shutdown warning: left infly: 1, spent: 11.069119 sec GRpc shutdown warning: left infly: 1, spent: 14.790627 sec GRpc shutdown warning: left infly: 1, spent: 18.582474 sec GRpc shutdown warning: left infly: 1, spent: 22.354644 sec GRpc shutdown warning: left infly: 1, spent: 26.050736 sec GRpc shutdown warning: left infly: 1, spent: 29.754755 sec GRpc shutdown warning: failed to shutdown all connections, left infly: 1, spent: 30.006624 sec assertion failed at ydb/core/testlib/common_helper.cpp:191, void NKikimr::Tests::NCommon::THelper::StartSchemaRequestQueryServiceImpl(const TString &, const bool, const bool) const: (*rrPtr) TBackTrace::Capture()+28 (0x18D0FC9C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x191CBB30) NKikimr::Tests::NCommon::THelper::StartSchemaRequestQueryServiceImpl(TBasicString> const&, bool, bool) const+4011 (0x35CC54BB) NKikimr::NTestSuiteSecret::SimpleImpl(bool)+2914 (0x189507F2) std::__y1::__function::__func, void ()>::operator()()+280 (0x18965078) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19202B76) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x191D26A9) NKikimr::NTestSuiteSecret::TCurrentTest::Execute()+1204 (0x18963F24) NUnitTest::TTestFactory::Execute()+2438 (0x191D3F76) NUnitTest::RunMain(int, char**)+5213 (0x191FD0ED) ??+0 (0x7FB82CFB9D90) __libc_start_main+128 (0x7FB82CFB9E40) _start+41 (0x162DA029) ================================================================= ==1336036==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 26080 byte(s) in 1 object(s) allocated from: #0 0x18a3fa7d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x1f055018 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x1f055018 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x1f055018 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x1f055018 in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x1f055018 in __vallocate /-S/contrib/libs/cxxsupp/libcxx/include/vector:789:25 #6 0x1f055018 in vector /-S/contrib/libs/cxxsupp/libcxx/include/vector:442:7 #7 0x1f055018 in make_unique >, unsigned long &> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:30 #8 0x1f055018 in grpc_core::Server::ChannelData::InitTransport(grpc_core::RefCountedPtr, grpc_core::RefCountedPtr, unsigned long, grpc_transport*, long) /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:1155:9 #9 0x1f05488d in grpc_core::Server::SetupTransport(grpc_transport*, grpc_pollset*, grpc_core::ChannelArgs const&, grpc_core::RefCountedPtr const&) /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:763:10 #10 0x273f0e78 in grpc_core::(anonymous namespace)::Chttp2ServerListener::ActiveConnection::HandshakingState::OnHandshakeDone(void*, y_absl::lts_y_20240722::Status) /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/server/chttp2_server.cc:479:52 #11 0x1e6dc51e in exec_ctx_run /-S/contrib/libs/grpc/src/core/lib/iomgr/exec_ctx.cc:45:3 #12 0x1e6dc51e in grpc_core::ExecCtx::Flush() /-S/contrib/libs/grpc/src/core/lib/iomgr/exec_ctx.cc:72:9 #13 0x1e8c38dd in end_worker /-S/contrib/libs/grpc/src/core/lib/iomgr/ev_epoll1_linux.cc:981:34 #14 0x1e8c38dd in pollset_work(grpc_pollset*, grpc_pollset_worker**, grpc_core::Timestamp) /-S/contrib/libs/grpc/src/core/lib/iomgr/ev_epoll1_linux.cc:1049:3 #15 0x1e8bf738 in pollset_work(grpc_pollset*, grpc_pollset_worker**, grpc_core::Timestamp) /-S/contrib/libs/grpc/src/core/lib/iomgr/ev_posix.cc:249:7 #16 0x1e8e3258 in grpc_pollset_work(grpc_pollset*, grpc_pollset_worker**, grpc_core::Timestamp) /-S/contrib/libs/grpc/src/core/lib/iomgr/pollset.cc:48:10 #17 0x1e89db67 in cq_next(grpc_completion_queue*, gpr_timespec, void*) /-S/contrib/libs/grpc/src/core/lib/surface/completion_queue.cc:1036:29 #18 0x1f0868fe in grpc::CompletionQueue::AsyncNextInternal(void**, bool*, gpr_timespec) /-S/contrib/libs/grpc/src/cpp/common/completion_queue_cc.cc:166:15 #19 0x2737a306 in Next /-S/contrib/libs/grpc/include/grpcpp/completion_queue.h:182:13 #20 0x2737a306 in PullEvents /-S/ydb/library/grpc/server/grpc_server.cpp:41:17 #21 0x2737a306 in operator() /-S/ydb/library/grpc/server/grpc_server.cpp:266:13 #22 0x2737a306 in __invoke<(lambda at /-S/ydb/library/grpc/server/grpc_server.cpp:265:49) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #23 0x2737a306 in __call<(lambda at /-S/ydb/library/grpc/server/grpc_server.cpp:265:49) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #24 0x2737a306 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #25 0x2737a306 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #26 0x19b319be in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #27 0x19b319be in operator() /-S/contrib/libs/cxxsupp/libcxx/include ... d::__y1::char_traits> const&, std::__y1::shared_ptr, std::__y1::shared_ptr, std::__y1::shared_ptr const&) /-S/ydb/services/metadata/initializer/accessor_init.cpp:109:30 #15 0x48713653 in NKikimr::NMetadata::NProvider::TBehaviourRegistrator::Handle(TAutoPtr, TDelete>&) /-S/ydb/services/metadata/ds_table/behaviour_registrator_actor.cpp:45:5 #16 0x48714cc4 in NKikimr::NMetadata::NProvider::TBehaviourRegistrator::StateMain(TAutoPtr&) /-S/ydb/services/metadata/ds_table/behaviour_registrator_actor.h:47:13 #17 0x19f3dbec in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #18 0x35a94854 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #19 0x35a8d0c9 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #20 0x35a97443 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #21 0x35c64673 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #22 0x35c63792 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #23 0x35c63792 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #24 0x35c5b9f2 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:577:24 #25 0x35c5b5ca in NActors::TTestActorRuntime::SimulateSleep(TDuration) /-S/ydb/core/testlib/actors/test_runtime.cpp:298:9 #26 0x35cc51a4 in NKikimr::Tests::NCommon::THelper::StartSchemaRequestQueryServiceImpl(TBasicString> const&, bool, bool) const /-S/ydb/core/testlib/common_helper.cpp:189:34 #27 0x189507f1 in NKikimr::NTestSuiteSecret::SimpleImpl(bool) /-S/ydb/services/metadata/secret/ut/ut_secret.cpp:189:21 #28 0x18965077 in operator() /-S/ydb/services/metadata/secret/ut/ut_secret.cpp:28:1 #29 0x18965077 in __invoke<(lambda at /-S/ydb/services/metadata/secret/ut/ut_secret.cpp:28:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #30 0x18965077 in __call<(lambda at /-S/ydb/services/metadata/secret/ut/ut_secret.cpp:28:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #31 0x18965077 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #32 0x18965077 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #33 0x19202b75 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #34 0x19202b75 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #35 0x19202b75 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #36 0x191d26a8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #37 0x18963f23 in NKikimr::NTestSuiteSecret::TCurrentTest::Execute() /-S/ydb/services/metadata/secret/ut/ut_secret.cpp:28:1 #38 0x191d3f75 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #39 0x191fd0ec in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #40 0x7fb82cfb9d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x18a3fa7d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x487230a6 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x487230a6 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x487230a6 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x487230a6 in __allocate_at_least *> > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x487230a6 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:358:25 #6 0x487230a6 in std::__y1::deque, std::__y1::allocator>>::__add_back_capacity() /-S/contrib/libs/cxxsupp/libcxx/include/deque:2144:51 #7 0x4871da75 in emplace_back &> /-S/contrib/libs/cxxsupp/libcxx/include/deque:1569:5 #8 0x4871da75 in NKikimr::NMetadata::NInitializer::TDSAccessorInitialized::OnPreparationFinished(TVector, std::__y1::allocator>> const&) /-S/ydb/services/metadata/initializer/accessor_init.cpp:70:19 #9 0x23edcc0b in NKikimr::NMetadata::NSecret::TSecretInitializer::DoPrepare(std::__y1::shared_ptr) const /-S/ydb/services/metadata/secret/initializer.cpp:49:17 #10 0x48720aad in Prepare /-S/ydb/services/metadata/abstract/initialization.h:14:16 #11 0x48720aad in NKikimr::NMetadata::NInitializer::TDSAccessorInitialized::Execute(NKikimr::NMetadata::NRequest::TConfig const&, TBasicString> const&, std::__y1::shared_ptr, std::__y1::shared_ptr, std::__y1::shared_ptr const&) /-S/ydb/services/metadata/initializer/accessor_init.cpp:109:30 #12 0x48713653 in NKikimr::NMetadata::NProvider::TBehaviourRegistrator::Handle(TAutoPtr, TDelete>&) /-S/ydb/services/metadata/ds_table/behaviour_registrator_actor.cpp:45:5 #13 0x48714cc4 in NKikimr::NMetadata::NProvider::TBehaviourRegistrator::StateMain(TAutoPtr&) /-S/ydb/services/metadata/ds_table/behaviour_registrator_actor.h:47:13 #14 0x19f3dbec in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #15 0x35a94854 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #16 0x35a8d0c9 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #17 0x35a97443 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #18 0x35c64673 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #19 0x35c63792 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #20 0x35c63792 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #21 0x35c5b9f2 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:577:24 #22 0x35c5b5ca in NActors::TTestActorRuntime::SimulateSleep(TDuration) /-S/ydb/core/testlib/actors/test_runtime.cpp:298:9 #23 0x35cc51a4 in NKikimr::Tests::NCommon::THelper::StartSchemaRequestQueryServiceImpl(TBasicString> const&, bool, bool) const /-S/ydb/core/testlib/common_helper.cpp:189:34 #24 0x189507f1 in NKikimr::NTestSuiteSecret::SimpleImpl(bool) /-S/ydb/services/metadata/secret/ut/ut_secret.cpp:189:21 #25 0x18965077 in operator() /-S/ydb/services/metadata/secret/ut/ut_secret.cpp:28:1 #26 0x18965077 in __invoke<(lambda at /-S/ydb/services/metadata/secret/ut/ut_secret.cpp:28:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #27 0x18965077 in __call<(lambda at /-S/ydb/services/metadata/secret/ut/ut_secret.cpp:28:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #28 0x18965077 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #29 0x18965077 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #30 0x19202b75 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #31 0x19202b75 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #32 0x19202b75 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #33 0x191d26a8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #34 0x18963f23 in NKikimr::NTestSuiteSecret::TCurrentTest::Execute() /-S/ydb/services/metadata/secret/ut/ut_secret.cpp:28:1 #35 0x191d3f75 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #36 0x191fd0ec in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #37 0x7fb82cfb9d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 413511 byte(s) leaked in 5078 allocation(s). >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001ac3/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk2/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_bad_dynconfig/audit.txt 2025-04-10T11:10:26.076279Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"{none}","remote_address":"127.0.0.1","status":"ERROR","subject":"{none}","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] >> TStorageServiceTest::ShouldRegister >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001a5e/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk19/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.select/audit.txt 2025-04-10T11:10:33.525664Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-10T11:10:33.525615Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-04-10T11:10:33.191484Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] >> TStorageServiceTest::ShouldRegister [GOOD] >> TStorageServiceTest::ShouldRegisterNextGeneration >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/001a2b/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk1/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_good_dynconfig/audit.txt 2025-04-10T11:10:38.846402Z: {"sanitized_token":"**** (C877DF61)","subject":"__bad__@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> TStorageServiceTest::ShouldRegisterNextGeneration [GOOD] >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/0019ce/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk15/testing_out_stuff/test_auditlog.py.test_dynconfig/audit.txt 2025-04-10T11:10:42.834845Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/0019b5/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk8/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs0/audit.txt 2025-04-10T11:10:43.869991Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","cloud_id":"cloud-id-A","end_time":"2025-04-10T11:10:43.869938Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-10T11:10:43.638274Z","subject":"root@builtin","detailed_status":"SUCCESS","resource_id":"database-id-C","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/0019b4/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk0/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_bad_dynconfig/audit.txt 2025-04-10T11:10:41.672914Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (C877DF61)","remote_address":"127.0.0.1","status":"ERROR","subject":"__bad__@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint [GOOD] >> TStorageServiceTest::ShouldSaveState >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2gt4/0019c0/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk3/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_good_dynconfig/audit.txt 2025-04-10T11:10:43.298295Z: {"sanitized_token":"{none}","subject":"{none}","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> TStorageServiceTest::ShouldSaveState [GOOD] >> TStorageServiceTest::ShouldUseGc >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] |86.2%| [TA] $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] |86.2%| [TA] {RESULT} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] >> TStorageServiceTest::ShouldCreateCheckpoint >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] >> TStorageServiceTest::ShouldCreateCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetCheckpoints >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> TStorageServiceTest::ShouldUseGc [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] Test command err: 2025-04-10T11:10:53.108916Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7491640377578405715:2048] with connection to localhost:15740:local 2025-04-10T11:10:53.130379Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:10:54.375529Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:10:54.375576Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:10:54.376009Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-10T11:10:55.961390Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-10T11:10:55.961434Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-10T11:10:55.961872Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-10T11:10:56.280787Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-10T11:10:56.280831Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-10T11:10:56.282510Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-10T11:10:56.771027Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:2] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointAfterGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-04-10T11:10:56.771102Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-10T11:10:58.322605Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7491640403745188714:2048] with connection to localhost:15740:local 2025-04-10T11:10:58.322698Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:10:58.717381Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:10:58.717424Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:10:58.718339Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-10T11:10:59.219351Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-04-10T11:10:59.219400Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-04-10T11:11:00.584918Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7491640413497460588:2048] with connection to localhost:15740:local 2025-04-10T11:11:00.585027Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:11:00.779147Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:11:00.779186Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:11:00.779699Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-04-10T11:11:01.226915Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to abort checkpoint:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-04-10T11:11:01.226954Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-04-10T11:11:02.401402Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7491640423938937904:2048] with connection to localhost:15740:local 2025-04-10T11:11:02.401509Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:11:02.770518Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:11:02.770551Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:11:02.770806Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-10T11:11:03.985550Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-10T11:11:03.985581Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-10T11:11:03.985907Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-10T11:11:04.367594Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Selected checkpoint '17:1' with status Pending, while expected PendingCommit, code: 400080 2025-04-10T11:11:04.367630Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-04-10T11:11:06.354231Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7491640438024469176:2048] with connection to localhost:15740:local 2025-04-10T11:11:06.354401Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:11:06.764075Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:11:06.764107Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:11:06.764609Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-10T11:11:07.827800Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-10T11:11:07.827840Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-10T11:11:07.828172Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-10T11:11:08.267388Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-10T11:11:08.267437Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-10T11:11:08.267801Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-10T11:11:08.510008Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-10T11:11:08.510049Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-10T11:11:08.510366Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-10T11:11:08.654819Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-04-10T11:11:08.654860Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams [GOOD] >> TStateStorageTest::ShouldIssueErrorOnNonExistentState >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> TCheckpointStorageTest::ShouldCreateCheckpoint >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> TStorageServiceTest::ShouldGetCheckpoints [GOOD] >> TStorageServiceTest::ShouldAbortCheckpoint >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] >> TStateStorageTest::ShouldIssueErrorOnNonExistentState [GOOD] >> TStateStorageTest::ShouldLoadLastSnapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldUseGc [GOOD] Test command err: 2025-04-10T11:10:53.811527Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7491640384975832902:2048] with connection to localhost:15472:local 2025-04-10T11:10:53.811678Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:10:54.668233Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:10:54.668265Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:10:56.511762Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7491640394324040809:2048] with connection to localhost:15472:local 2025-04-10T11:10:56.511868Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:10:56.828583Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:10:56.828637Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:10:56.833319Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-10T11:10:57.288714Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-10T11:10:57.288746Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-10T11:10:57.289180Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:10:57.602721Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldRegisterNextGeneration/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: RegisterCheck, code: 400130 2025-04-10T11:10:57.602762Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:10:58.793094Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7491640407083967012:2048] with connection to localhost:15472:local 2025-04-10T11:10:58.793187Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:10:59.222988Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:10:59.223031Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:10:59.223475Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-10T11:11:00.409376Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-10T11:11:00.409415Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-10T11:11:00.409727Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-10T11:11:01.075622Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-10T11:11:01.075665Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-10T11:11:01.076105Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-10T11:11:01.527479Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-10T11:11:01.527515Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-10T11:11:01.528266Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-10T11:11:01.730483Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-04-10T11:11:01.730530Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-10T11:11:01.738368Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-04-10T11:11:02.107151Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-04-10T11:11:02.107189Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-04-10T11:11:02.111421Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:02.527527Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:03.990185Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7491640432788926547:2048] with connection to localhost:15472:local 2025-04-10T11:11:03.994256Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:11:04.283740Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:11:04.283785Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:11:04.286427Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-10T11:11:05.478966Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-10T11:11:05.479004Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-10T11:11:05.482371Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-04-10T11:11:05.680440Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-04-10T11:11:05.681383Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-04-10T11:11:07.325164Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7491640443890362656:2048] with connection to localhost:15472:local 2025-04-10T11:11:07.325183Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [5:7491640448185330055:2130] 2025-04-10T11:11:07.325249Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:11:07.553561Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:11:07.554239Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:11:07.554853Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-10T11:11:08.593590Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-10T11:11:08.593626Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-10T11:11:08.593972Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-10T11:11:09.010728Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-10T11:11:09.010764Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-10T11:11:09.011320Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-10T11:11:09.298469Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'Completed' 2025-04-10T11:11:09.302228Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvNewCheckpointSucceeded 2025-04-10T11:11:09.302272Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-04-10T11:11:09.302330Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:1 for graph 'graph_graphich' 2025-04-10T11:11:09.339455Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-10T11:11:09.554437Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-10T11:11:09.554475Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-10T11:11:09.555015Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-10T11:11:09.688123Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-04-10T11:11:09.688158Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-10T11:11:09.688492Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-04-10T11:11:09.845895Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-04-10T11:11:09.845936Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvNewCheckpointSucceeded 2025-04-10T11:11:09.845962Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-04-10T11:11:09.846054Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:2 for graph 'graph_graphich' 2025-04-10T11:11:09.846669Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-04-10T11:11:10.025095Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-04-10T11:11:10.025131Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-04-10T11:11:10.031109Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-10T11:11:10.132563Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:2 2025-04-10T11:11:10.136922Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:1 2025-04-10T11:11:10.181063Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'PendingCommit' 2025-04-10T11:11:10.181101Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-10T11:11:10.181483Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCompleteCheckpointRequest 2025-04-10T11:11:10.369195Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'Completed' 2025-04-10T11:11:10.369238Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvNewCheckpointSucceeded 2025-04-10T11:11:10.369272Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCompleteCheckpointResponse 2025-04-10T11:11:10.369379Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:3 for graph 'graph_graphich' 2025-04-10T11:11:10.369831Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:10.396383Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:3 2025-04-10T11:11:10.590696Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:10.694235Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:10.706519Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:10.807204Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:10.824387Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:10.925120Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:10.946708Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:11.047372Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:11.066761Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:11.170540Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:11.185270Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:11.285962Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:11.302791Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:11.403466Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:11.416638Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:11.517542Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:11.529811Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:11.630855Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:11.640994Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:11.741649Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:11.757764Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:11.860890Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:11.878260Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:11.982259Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:11.994719Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:12.095623Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:12.128808Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:12.234700Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:12.247601Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:12.350901Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:12.372348Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:12.480685Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:12.563277Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> TCheckpointStorageTest::ShouldCreateCheckpoint [GOOD] >> TCheckpointStorageTest::ShouldCreateGetCheckpoints >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] >> TStateStorageTest::ShouldLoadLastSnapshot [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState >> ttl_unavailable_s3.py::TestUnavailableS3::test |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] >> TStateStorageTest::ShouldLoadIncrementSnapshot >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> TStorageServiceTest::ShouldAbortCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetState >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] >> TCheckpointStorageTest::ShouldCreateGetCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty [GOOD] >> TCheckpointStorageTest::ShouldDeleteGraph >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change >> TStorageServiceTest::ShouldGetState [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] >> TCheckpointStorageTest::ShouldDeleteGraph [GOOD] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldGetState [GOOD] Test command err: 2025-04-10T11:11:10.558314Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7491640457741817858:2048] with connection to localhost:2096:local 2025-04-10T11:11:10.558420Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:11:10.742684Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:11:10.742746Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:11:10.743597Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-10T11:11:11.736141Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-10T11:11:11.736184Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-10T11:11:13.183474Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7491640470664793500:2048] with connection to localhost:2096:local 2025-04-10T11:11:13.183606Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:11:13.420614Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:11:13.420652Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:11:13.421019Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-10T11:11:14.585306Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-10T11:11:14.585365Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-10T11:11:14.585714Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-10T11:11:14.838862Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-10T11:11:14.838900Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-10T11:11:14.839159Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-04-10T11:11:15.190128Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-04-10T11:11:15.190195Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-04-10T11:11:15.190913Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:15.557067Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:16.649139Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7491640486934478177:2048] with connection to localhost:2096:local 2025-04-10T11:11:16.649268Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:11:16.997540Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:11:16.997577Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:11:16.997875Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-10T11:11:18.212036Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-10T11:11:18.212073Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-10T11:11:18.212466Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-10T11:11:18.645850Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-10T11:11:18.645888Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-10T11:11:18.646313Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-10T11:11:18.941597Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-10T11:11:18.941633Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-10T11:11:18.942339Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-10T11:11:19.336761Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-04-10T11:11:19.336794Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-10T11:11:19.337164Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-04-10T11:11:19.493600Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-04-10T11:11:19.494074Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-04-10T11:11:19.495176Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-04-10T11:11:19.662856Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint aborted 2025-04-10T11:11:19.662895Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-04-10T11:11:19.663467Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvAbortCheckpointRequest 2025-04-10T11:11:19.838757Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint aborted 2025-04-10T11:11:19.838799Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvAbortCheckpointResponse 2025-04-10T11:11:19.839191Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-10T11:11:20.143391Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-10T11:11:21.532920Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7491640503090872613:2048] with connection to localhost:2096:local 2025-04-10T11:11:21.533075Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:11:21.772597Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:11:21.772637Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:11:21.774206Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-10T11:11:22.836156Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-10T11:11:22.836188Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-10T11:11:22.836588Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-04-10T11:11:23.061391Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-04-10T11:11:23.061444Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-04-10T11:11:23.061767Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvGetTaskState: tasks {1317} 2025-04-10T11:11:23.061801Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] GetState, tasks: 1317 2025-04-10T11:11:23.672958Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ListOfStates results: 2025-04-10T11:11:23.673066Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] taskId 1317 checkpoint id: 17:1, rows count: 1 2025-04-10T11:11:23.673113Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SkipStatesInFuture, skip 0 checkpoints 2025-04-10T11:11:23.694428Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SelectState: task_id 1317, seq_no 1, blob_seq_num 0 2025-04-10T11:11:24.205500Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] DeserializeState, task id 1317, blob size 49 2025-04-10T11:11:24.205572Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ApplyIncrements 2025-04-10T11:11:24.218342Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [{ Id: 1 Generation: 17 }] Send TEvGetTaskStateResult: tasks: {1317} >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] >> TCheckpointStorageTest::ShouldRegisterCoordinator >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] >> TCheckpointStorageTest::ShouldRegisterCoordinator [GOOD] >> TCheckpointStorageTest::ShouldGetCoordinators >> TStateStorageTest::ShouldDeleteNoCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints2 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo [GOOD] >> TGcTest::ShouldRemovePreviousCheckpoints >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> TCheckpointStorageTest::ShouldGetCoordinators [GOOD] >> TCheckpointStorageTest::ShouldMarkCheckpointsGc |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints2 [GOOD] >> TStateStorageTest::ShouldDeleteCheckpoints >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] >> TStateStorageTest::ShouldSaveGetOldSmallState >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] >> TStateStorageTest::ShouldDeleteCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteGraph >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] >> TCheckpointStorageTest::ShouldMarkCheckpointsGc [GOOD] >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints >> TStateStorageTest::ShouldSaveGetOldSmallState [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> TStateStorageTest::ShouldSaveGetOldBigState >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteGraph [GOOD] >> TStateStorageTest::ShouldGetMultipleStates >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] >> TStateStorageTest::ShouldSaveGetOldBigState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementSmallState >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementSmallState [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] >> TStateStorageTest::ShouldSaveGetIncrementBigState |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] >> TStorageServiceTest::ShouldNotRegisterPrevGeneration >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] >> TStateStorageTest::ShouldSaveGetIncrementBigState [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendState >> TGcTest::ShouldRemovePreviousCheckpoints [GOOD] >> TGcTest::ShouldIgnoreIncrementCheckpoint >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId >> Secret::ValidationQueryService [GOOD] >> TStateStorageTest::ShouldGetMultipleStates [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService [GOOD] Test command err: 2025-04-10T11:08:14.405309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:08:14.405534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:08:14.405696Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00151b/r3tmp/tmpNG5HNP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21349, node 1 TClient is connected to server localhost:3161 2025-04-10T11:08:20.125237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:08:20.318317Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:20.366733Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:20.366788Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:20.366825Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:20.367196Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:08:20.431318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:20.447041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:20.475323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-10T11:08:32.703232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:32.703397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:757:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:32.703488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:32.804419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:08:32.837970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:762:2636], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:08:32.973558Z node 1 :TX_PROXY ERROR: Actor# [1:813:2668] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:08:33.878625Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:823:2677], status: GENERIC_ERROR, issues:
:1:20: Error: mismatched input '-' expecting '(' 2025-04-10T11:08:33.946100Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWRmY2I2MGMtN2FkMTA4NzMtYzE0NTYwOGQtMTNmM2JjNTM=, ActorId: [1:746:2626], ActorState: ExecuteState, TraceId: 01jrfp68c88v4n317cygzcvpnd, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-04-10T11:08:45.001872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-10T11:08:46.797185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:08:47.507609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-04-10T11:08:49.021337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-10T11:08:50.695560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:08:52.135022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:08:53.253854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:08:55.029935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:08:59.423119Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDgwMDczNzAtOTNlZjkwNzItYTBhM2RkM2MtNWIwZGMzNWE=, ActorId: [1:841:2687], ActorState: ExecuteState, TraceId: 01jrfp6ktbbyssk3chf681pp81, Create QueryResponse for error on request, msg: 2025-04-10T11:08:59.423294Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfp6ktbbyssk3chf681pp81", SessionId: ydb://session/3?node_id=1&id=NDgwMDczNzAtOTNlZjkwNzItYTBhM2RkM2MtNWIwZGMzNWE=, Slow query, duration: 15.181464s, status: GENERIC_ERROR, user: root@builtin, results: 0b, text: "ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`", parameters: 0b 2025-04-10T11:08:59.435843Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01jrfp6ktbbyssk3chf681pp81, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDgwMDczNzAtOTNlZjkwNzItYTBhM2RkM2MtNWIwZGMzNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 2025-04-10T11:09:00.722044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:09:00.722115Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-10T11:09:47.509292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-04-10T11:09:50.197060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 2025-04-10T11:09:53.083760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715727:0, at schemeshard: 72057594046644480 2025-04-10T11:09:54.036736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715732:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 2025-04-10T11:10:09.641399Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTRhYmY0YjctNmMwNzI3OTctNDMyMjVhLTk1NjMzYzhl, ActorId: [1:3338:4573], ActorState: ExecuteState, TraceId: 01jrfp95dgdjq5tpafa84cdyhr, Create QueryResponse for error on request, msg: 2025-04-10T11:10:09.642893Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jrfp95dgdjq5tpafa84cdyhr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTRhYmY0YjctNmMwNzI3OTctNDMyMjVhLTk1NjMzYzhl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-10T11:10:24.101988Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=15; 2025-04-10T11:10:24.106267Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 15 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:10:24.106513Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 15 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:10:24.106844Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3735:4799], Table: `//Root/.metadata/secrets/access` ([72057594046644480:13:1]), SessionActorId: [1:3642:4799]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3735:4799].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:10:24.107436Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3728:4799], SessionActorId: [1:3642:4799], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:3642:4799]. isRollback=0 2025-04-10T11:10:24.107789Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWExOTU5ZDctMmJmNGIxNGUtOTczZGZiZWQtNjJhNjExYWY=, ActorId: [1:3642:4799], ActorState: ExecuteState, TraceId: 01jrfp9n65bvq2smzf6tn57p45, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3729:4799] from: [1:3728:4799] 2025-04-10T11:10:24.107981Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:3729:4799] TxId: 281474976715758. Ctx: { TraceId: 01jrfp9n65bvq2smzf6tn57p45, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWExOTU5ZDctMmJmNGIxNGUtOTczZGZiZWQtNjJhNjExYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:10:24.108266Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWExOTU5ZDctMmJmNGIxNGUtOTczZGZiZWQtNjJhNjExYWY=, ActorId: [1:3642:4799], ActorState: ExecuteState, TraceId: 01jrfp9n65bvq2smzf6tn57p45, Create QueryResponse for error on request, msg: 2025-04-10T11:10:24.118071Z node 1 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jrfp9mrk7mh57ffjh44vwsja" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=OWExOTU5ZDctMmJmNGIxNGUtOTczZGZiZWQtNjJhNjExYWY=" tx_control { tx_id: "01jrfp9mrk7mh57ffjh44vwsja" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-04-10T11:10:37.298489Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDRkODg4YzctYjEyNmFiZDctZTM1ODMzODctZTU3MTA0M2Q=, ActorId: [1:3940:5018], ActorState: ExecuteState, TraceId: 01jrfpa1dw85wj0ma90rnd7vew, Create QueryResponse for error on request, msg: 2025-04-10T11:10:37.301736Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715770. Ctx: { TraceId: 01jrfpa1dw85wj0ma90rnd7vew, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDRkODg4YzctYjEyNmFiZDctZTM1ODMzODctZTU3MTA0M2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-04-10T11:10:49.027204Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4273:5273], for# root@builtin, access# DescribeSchema 2025-04-10T11:10:49.027899Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4273:5273], for# root@builtin, access# DescribeSchema 2025-04-10T11:10:49.031088Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:4266:5266], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:10:49.033674Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTgzNGFjNjAtYmZkNTE4NTAtOWZhNzc2My0zNDI0MTNhZA==, ActorId: [1:4254:5255], ActorState: ExecuteState, TraceId: 01jrfpadmz2j004v6mag3g3dnw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-10T11:11:01.086486Z node 1 :TICKET_PARSER ERROR: Ticket **** (51449FAE): Could not find correct token validator 2025-04-10T11:11:02.169259Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzM4ZWYyMS1lOTg5ODIwOC0zZjFkYWIyNy00MmQ5NDVkYQ==, ActorId: [1:4519:5454], ActorState: ExecuteState, TraceId: 01jrfpasdz2gb22x469rwfgvmt, Create QueryResponse for error on request, msg: 2025-04-10T11:11:02.170995Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715794. Ctx: { TraceId: 01jrfpasdz2gb22x469rwfgvmt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM4ZWYyMS1lOTg5ODIwOC0zZjFkYWIyNy00MmQ5NDVkYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-10T11:11:15.410458Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDk2M2M4Y2QtZWI0YzY1My03MzllOWU5ZC00NzA5ZDRkYQ==, ActorId: [1:4912:5748], ActorState: ExecuteState, TraceId: 01jrfpb6bz0xssmva3zsckcazw, Create QueryResponse for error on request, msg: 2025-04-10T11:11:15.411819Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715810. Ctx: { TraceId: 01jrfpb6bz0xssmva3zsckcazw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDk2M2M4Y2QtZWI0YzY1My03MzllOWU5ZC00NzA5ZDRkYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-10T11:11:43.483307Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715843. Ctx: { TraceId: 01jrfpc2acb8xragrk4dp9tacy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRlZWY1MDYtODc2MDUzZTAtZDc1ZTExOWItZmYzNTBmOTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 E0410 11:11:47.329415076 1336039 backup_poller.cc:113] run_poller: UNKNOWN:Timer list shutdown {created_time:"2025-04-10T11:11:47.329108657+00:00"} |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] >> TStorageServiceTest::ShouldNotRegisterPrevGeneration [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] >> Secret::Validation [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Validation [GOOD] Test command err: 2025-04-10T11:08:14.701659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:08:14.701879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:08:14.702023Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014dd/r3tmp/tmpdlPP0p/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16122, node 1 TClient is connected to server localhost:28687 2025-04-10T11:08:20.123374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:08:20.319393Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:08:20.367902Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:08:20.367950Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:08:20.367984Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:08:20.368329Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:08:20.431500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:08:20.447447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:08:20.478399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-10T11:08:33.961706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:751:2629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:33.961839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-04-10T11:08:44.779540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:44.779669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:44.793438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-10T11:08:45.203045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:891:2725], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:45.203250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:45.203641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:896:2730], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:08:45.208219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-10T11:08:45.372199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:898:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:08:46.087295Z node 1 :TX_PROXY ERROR: Actor# [1:994:2799] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:08:47.105587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:08:47.936605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-10T11:08:49.069166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-10T11:08:50.394066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:08:51.960710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:08:55.161769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:08:55.559733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: Execution, code: 1060
:1:48: Error: Executing ALTER OBJECT SECRET
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 2025-04-10T11:09:01.512207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:09:01.512285Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-10T11:09:48.181699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-04-10T11:09:49.589302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-04-10T11:09:52.822956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715724:0, at schemeshard: 72057594046644480 2025-04-10T11:09:54.173314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715729:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Execution, code: 1060
:1:42: Error: Executing CREATE OBJECT SECRET_ACCESS
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-10T11:10:23.054540Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=16; 2025-04-10T11:10:23.062409Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 16 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:10:23.062766Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 16 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:10:23.063318Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3756:4809], Table: `//Root/.metadata/secrets/access` ([72057594046644480:13:1]), SessionActorId: [1:3663:4809]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3756:4809].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:10:23.074983Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3749:4809], SessionActorId: [1:3663:4809], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:3663:4809]. isRollback=0 2025-04-10T11:10:23.075766Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGJkNjg3ZDgtODdlN2RiZGEtMzhiMjdmZGItZmE1MzYyNGI=, ActorId: [1:3663:4809], ActorState: ExecuteState, TraceId: 01jrfp9m0p356vq1ccv8da03w8, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3750:4809] from: [1:3749:4809] 2025-04-10T11:10:23.076013Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:3750:4809] TxId: 281474976715756. Ctx: { TraceId: 01jrfp9m0p356vq1ccv8da03w8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGJkNjg3ZDgtODdlN2RiZGEtMzhiMjdmZGItZmE1MzYyNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:10:23.091378Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGJkNjg3ZDgtODdlN2RiZGEtMzhiMjdmZGItZmE1MzYyNGI=, ActorId: [1:3663:4809], ActorState: ExecuteState, TraceId: 01jrfp9m0p356vq1ccv8da03w8, Create QueryResponse for error on request, msg: 2025-04-10T11:10:23.166637Z node 1 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jrfp9kqydp8vbtn8eh38whtt" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=OGJkNjg3ZDgtODdlN2RiZGEtMzhiMjdmZGItZmE1MzYyNGI=" tx_control { tx_id: "01jrfp9kqydp8vbtn8eh38whtt" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Execution, code: 1060
:1:29: Error: Executing DROP OBJECT SECRET
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-04-10T11:10:50.503127Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4329:5304], for# root@builtin, access# DescribeSchema 2025-04-10T11:10:50.503268Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4329:5304], for# root@builtin, access# DescribeSchema 2025-04-10T11:10:50.505552Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:4326:5301], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:10:50.508377Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWRkZTZmZjMtNTUzMWU2MjgtOWEzNTUzZjgtNjI0MmVmNWQ=, ActorId: [1:4321:5297], ActorState: ExecuteState, TraceId: 01jrfpaf3q2g6vxq50kbzwf1ha, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-10T11:11:03.250740Z node 1 :TICKET_PARSER ERROR: Ticket **** (51449FAE): Could not find correct token validator REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing UPSERT OBJECT SECRET
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-10T11:11:45.907678Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715846. Ctx: { TraceId: 01jrfpc4ra21m6g8d2pjahe01g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzhhN2E5ZmQtOGNmOTcxNTYtYWUyYzYzMC1jOTYxNWQzZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldGetMultipleStates [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointTwice >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] |86.3%| [TA] $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.3%| [TA] {RESULT} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> TStorageServiceTest::ShouldNotCreateCheckpointTwice [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] >> TGcTest::ShouldIgnoreIncrementCheckpoint [GOOD] >> TStateStorageTest::ShouldCountStates >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] >> TStateStorageTest::ShouldCountStates [GOOD] >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] Test command err: 2025-04-10T11:11:40.446239Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [1:36:2083] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-04-10T11:11:40.841294Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-04-10T11:11:41.566632Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph' up to 11:3 Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-04-10T11:11:53.952737Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [2:36:2083] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/ShouldIgnoreIncrementCheckpoint"); SELECT * FROM checkpoints_graphs_description; 2025-04-10T11:11:54.672571Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-04-10T11:11:54.672648Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC skip increment checkpoint for graph 'graph' >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] Test command err: 2025-04-10T11:11:47.478406Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7491640611919028523:2048] with connection to localhost:2523:local 2025-04-10T11:11:47.478744Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:11:48.602350Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:11:48.602391Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:11:48.602843Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Got TEvRegisterCoordinatorRequest 2025-04-10T11:11:49.046513Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.16] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldNotRegisterPrevGeneration/coordinators_sync, pk: graph_graphich, current generation: 17, expected/new generation: 16, operation: RegisterCheck, code: 400130 2025-04-10T11:11:49.046537Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Send TEvRegisterCoordinatorResponse 2025-04-10T11:11:51.426280Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7491640626983245728:2048] with connection to localhost:2523:local 2025-04-10T11:11:51.438743Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-10T11:11:52.570444Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointWhenUnregistered/coordinators_sync, pk: graph_graphich, current generation: 0, expected/new generation: 17, operation: Check, code: 400130 2025-04-10T11:11:52.570477Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-10T11:11:54.992024Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7491640645227846195:2048] with connection to localhost:2523:local 2025-04-10T11:11:54.992119Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:11:55.556330Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:11:55.556361Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:11:55.574388Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-10T11:11:57.181883Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-10T11:11:57.181944Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-10T11:11:57.182307Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-10T11:11:58.026771Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Error: Constraint violated. Table: `local/TStorageServiceTestShouldNotCreateCheckpointTwice/checkpoints_metadata`., code: 2012
: Error: Duplicate keys have been found., code: 2012 2025-04-10T11:11:58.026817Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-10T11:12:00.386794Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7491640668301744351:2048] with connection to localhost:2523:local 2025-04-10T11:12:00.386891Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:12:00.723750Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:12:00.723797Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:12:00.726672Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-10T11:12:01.261469Z node 4 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-04-10T11:12:01.261515Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-10T11:12:03.558280Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7491640682144072120:2048] with connection to localhost:2523:local 2025-04-10T11:12:03.558391Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-10T11:12:04.073681Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-10T11:12:04.073724Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-10T11:12:04.074327Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-10T11:12:05.553609Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-10T11:12:05.553650Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-10T11:12:05.554752Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-10T11:12:06.105804Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-10T11:12:06.105846Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-10T11:12:06.106611Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-10T11:12:06.342161Z node 5 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-04-10T11:12:06.342207Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] |86.4%| [TA] $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.4%| [TA] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> HullReplWriteSst::Basic [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> HullReplWriteSst::Basic [GOOD] Test command err: commit chunk# 1 {ChunkIdx: 1 Offset: 101232640 Size: 32981748} 749583 commit chunk# 2 {ChunkIdx: 2 Offset: 101195776 Size: 33019676} 750445 commit chunk# 3 {ChunkIdx: 3 Offset: 101220352 Size: 32997368} 749938 commit chunk# 4 {ChunkIdx: 4 Offset: 101224448 Size: 32991824} 749812 commit chunk# 5 {ChunkIdx: 5 Offset: 101236736 Size: 32980956} 749565 commit chunk# 6 {ChunkIdx: 6 Offset: 101203968 Size: 33013736} 750310 commit chunk# 7 {ChunkIdx: 7 Offset: 101240832 Size: 32974708} 749423 commit chunk# 8 {ChunkIdx: 8 Offset: 101220352 Size: 32995432} 749894 commit chunk# 9 {ChunkIdx: 9 Offset: 101216256 Size: 32998248} 749958 commit chunk# 10 {ChunkIdx: 10 Offset: 101249024 Size: 32964588} 749193 commit chunk# 11 {ChunkIdx: 11 Offset: 101216256 Size: 33001152} 750024 commit chunk# 12 {ChunkIdx: 12 Offset: 101216256 Size: 32997764} 749947 commit chunk# 13 {ChunkIdx: 13 Offset: 101220352 Size: 32995960} 749906 commit chunk# 14 {ChunkIdx: 14 Offset: 101212160 Size: 33005420} 750121 commit chunk# 15 {ChunkIdx: 15 Offset: 101236736 Size: 32977172} 749479 commit chunk# 16 {ChunkIdx: 16 Offset: 101199872 Size: 33016420} 750371 commit chunk# 17 {ChunkIdx: 17 Offset: 101249024 Size: 32965380} 749211 commit chunk# 18 {ChunkIdx: 18 Offset: 101199872 Size: 33015320} 750346 commit chunk# 19 {ChunkIdx: 19 Offset: 101212160 Size: 33005552} 750124 commit chunk# 20 {ChunkIdx: 20 Offset: 101236736 Size: 32978272} 749504 commit chunk# 21 {ChunkIdx: 21 Offset: 101220352 Size: 32993892} 749859 commit chunk# 22 {ChunkIdx: 22 Offset: 101220352 Size: 32996268} 749913 commit chunk# 23 {ChunkIdx: 23 Offset: 101257216 Size: 32957988} 749043 commit chunk# 24 {ChunkIdx: 24 Offset: 101220352 Size: 32994684} 749877 commit chunk# 25 {ChunkIdx: 25 Offset: 101244928 Size: 32971232} 749344 commit chunk# 26 {ChunkIdx: 26 Offset: 101232640 Size: 32985048} 749658 commit chunk# 27 {ChunkIdx: 27 Offset: 101208064 Size: 33007884} 750177 commit chunk# 28 {ChunkIdx: 28 Offset: 101208064 Size: 33009644} 750217 commit chunk# 29 {ChunkIdx: 29 Offset: 101236736 Size: 32978844} 749517 commit chunk# 30 {ChunkIdx: 30 Offset: 101220352 Size: 32996268} 749913 commit chunk# 31 {ChunkIdx: 31 Offset: 101236736 Size: 32980956} 749565 commit chunk# 32 {ChunkIdx: 32 Offset: 101224448 Size: 32989272} 749754 commit chunk# 33 {ChunkIdx: 33 Offset: 101212160 Size: 33002560} 750056 commit chunk# 34 {ChunkIdx: 34 Offset: 101212160 Size: 33005024} 750112 >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] |86.5%| [TA] $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] >> overlapping_portions.py::TestOverlappingPortions::test |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> VDiskBalancing::TestRandom_Block42 |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> unstable_connection.py::TestUnstableConnection::test [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> test_cp_ic.py::TestCpIc::test_discovery >> test_retry.py::TestRetry::test_fail_first[kikimr0] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_dispatch.py::TestMapping::test_mapping >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] [GOOD] >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] >> data_correctness.py::TestDataCorrectness::test [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService-UseSink >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 13137919078563160270 SEND TEvPut with key [1:1:1:0:0:3201024:0] 2025-04-10T11:13:13.776047Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-10T11:13:13.776600Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2025-04-10T11:13:13.954715Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options |86.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] |86.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] |86.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] |86.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |86.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] |86.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response |86.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] |86.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService+UseSink |86.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] |86.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |86.7%| [TA] $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.7%| [TA] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> unstable_connection.py::TestUnstableConnection::test [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed |86.7%| [TA] $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range |86.7%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13273, MsgBus: 6779 2025-04-10T11:13:15.997659Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491640997866284287:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:13:15.997740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016c0/r3tmp/tmpjvgGGn/pdisk_1.dat 2025-04-10T11:13:16.750631Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:13:16.758799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:13:16.758919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:13:16.771855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13273, node 1 2025-04-10T11:13:17.072388Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:13:17.072422Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:13:17.072432Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:13:17.072534Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6779 TClient is connected to server localhost:6779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:13:18.256230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:13:18.326741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:13:18.615887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:13:18.873217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:13:19.008190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:13:20.790943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641019341122454:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:13:20.791065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:13:20.991746Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491640997866284287:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:13:20.991825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:13:21.442327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:13:21.521051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:13:21.558250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:13:21.597060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:13:21.637050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:13:21.685111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:13:21.814286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641023636090277:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:13:21.814388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:13:21.814698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641023636090282:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:13:21.821127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:13:21.839677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641023636090284:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:13:21.941459Z node 1 :TX_PROXY ERROR: Actor# [1:7491641023636090341:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:13:23.648928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:13:23.720877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:13:23.813603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15899, MsgBus: 25463 2025-04-10T11:13:28.268846Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641053303929067:2225];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016c0/r3tmp/tmpsBt36C/pdisk_1.dat 2025-04-10T11:13:28.328769Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:13:28.472531Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:13:28.507537Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:13:28.507630Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:13:28.512671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15899, node 2 2025-04-10T11:13:28.646874Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:13:28.646901Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:13:28.646910Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:13:28.647044Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25463 TClient is connected to server localhost:25463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:13:29.388060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:13:29.399100Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:13:29.421016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:13:29.539011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:13:29.701844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:13:29.808774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:13:32.208272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641070483799825:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:13:32.208396Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:13:32.242381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:13:32.288559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:13:32.342814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:13:32.390082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:13:32.445531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:13:32.489288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:13:32.562219Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641070483800338:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:13:32.562330Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:13:32.562529Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641070483800343:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:13:32.567137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:13:32.581284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641070483800345:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:13:32.668374Z node 2 :TX_PROXY ERROR: Actor# [2:7491641070483800400:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:13:33.250274Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641053303929067:2225];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:13:33.250377Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:13:34.031019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:13:34.089875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:13:34.199067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] >> test_ttl.py::TestTTLOnIndexedTable::test_case >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> VDiskBalancing::TestStopOneNode_Block42 >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_correctness.py::TestDataCorrectness::test [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] Test command err: RandomSeed# 14095261263977606490 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2025-04-10T11:13:49.521647Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:188:17] ServerId# [1:296:58] TabletId# 72057594037932033 PipeClientId# [3:188:17] 2025-04-10T11:13:49.521839Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:223:17] ServerId# [1:301:63] TabletId# 72057594037932033 PipeClientId# [8:223:17] 2025-04-10T11:13:49.521924Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:209:17] ServerId# [1:299:61] TabletId# 72057594037932033 PipeClientId# [6:209:17] 2025-04-10T11:13:49.521997Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:202:17] ServerId# [1:298:60] TabletId# 72057594037932033 PipeClientId# [5:202:17] 2025-04-10T11:13:49.522083Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:195:17] ServerId# [1:297:59] TabletId# 72057594037932033 PipeClientId# [4:195:17] 2025-04-10T11:13:49.522274Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:181:17] ServerId# [1:295:57] TabletId# 72057594037932033 PipeClientId# [2:181:17] 2025-04-10T11:13:49.522374Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:216:17] ServerId# [1:300:62] TabletId# 72057594037932033 PipeClientId# [7:216:17] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] >> test_ttl.py::TestTTLAlterSettings::test_case >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> test_disk.py::TestSafeDiskBreak::test_erase_method >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] >> TStateStorageConfig::TestReplicaSelection >> TBlobStorageGroupTypeTest::TestCorrectLayout [GOOD] >> TGuardianImpl::FollowerTracker [GOOD] >> TGuardianImpl::FollowerTrackerDuplicates [GOOD] >> TLocalDbTest::BackupTaskNameChangedAtLoadTime [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TLocalDbTest::BackupTaskNameChangedAtLoadTime [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] >> TBlobStorageGroupTypeTest::OutputInfoAboutErasureSpecies [GOOD] >> Path::Name_RussianAlphabet_SetLocale_C_UTF8 [GOOD] >> Path::Name_WeirdLocale_RegularName [GOOD] >> Path::Name_WeirdLocale_WeirdName [GOOD] >> TLogoBlobIdHashTest::SimpleTest [GOOD] >> TLogoBlobIdHashTest::SimpleTestPartIdDoesNotMatter [GOOD] >> TLogoBlobIdHashTest::SimpleTestBlobSizeDoesNotMatter [GOOD] >> TLogoBlobIdHashTest::SimpleTestWithDifferentChannel [GOOD] >> TStateStorageConfig::TestReplicaSelection [GOOD] >> TStateStorageConfig::TestMultiReplicaFailDomains |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> TMemoryStatsAggregator::Aggregate_Summarize_ExternalConsumption_DifferentHosts >> TLogoBlobTest::LogoBlobSort [GOOD] >> TMemoryStatsAggregator::Aggregate_Empty [GOOD] >> TMemoryStatsAggregator::Aggregate_Single [GOOD] >> TMemoryStatsAggregator::Aggregate_ExternalConsumption_CollidingHosts [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_ExternalConsumption_DifferentHosts [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_NoExternalConsumption_DifferentHosts [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_ExternalConsumption_OneHost [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_NoExternalConsumption_OneHost [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> Path::Name_WeirdLocale_WeirdName [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TLogoBlobIdHashTest::SimpleTestWithDifferentChannel [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> Path::Name_EnglishAlphabet [GOOD] >> Path::Name_RussianAlphabet [GOOD] >> Path::Name_RussianAlphabet_SetLocale_C [GOOD] >> Path::Name_ExtraSymbols [GOOD] >> TLogoBlobIdHashTest::SimpleTestWithDifferentTabletId [GOOD] >> TLogoBlobIdHashTest::SimpleTestWithDifferentSteps [GOOD] >> TLogoBlobTest::LogoBlobParse [GOOD] >> TLogoBlobTest::LogoBlobCompare [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TMemoryStatsAggregator::Aggregate_ExternalConsumption_CollidingHosts [GOOD] Test command err: AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 65 MemAvailable: 85 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 145 SoftLimit: 165 TargetUtilization: 185 ExternalConsumption: 194 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> Path::Name_ExtraSymbols [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TMemoryStatsAggregator::Aggregate_Summarize_NoExternalConsumption_OneHost [GOOD] Test command err: AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 ExternalConsumption: 306 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 80 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 >> Path::CanonizeOld [GOOD] >> Path::CanonizeFast [GOOD] >> Path::CanonizedStringIsSame1 [GOOD] >> Path::CanonizedStringIsSame2 [GOOD] >> Path::Name_AllSymbols [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TLogoBlobTest::LogoBlobCompare [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> Path::Name_AllSymbols [GOOD] >> TableIndex::CompatibleSecondaryIndex [GOOD] >> TableIndex::NotCompatibleSecondaryIndex [GOOD] >> TableIndex::CompatibleVectorIndex [GOOD] >> TableIndex::NotCompatibleVectorIndex [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TableIndex::NotCompatibleVectorIndex [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> TStateStorageConfig::TestMultiReplicaFailDomains [GOOD] >> TStateStorageConfig::TestReplicaSelectionUniqueCombinations >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> test_dispatch.py::TestMapping::test_mapping [GOOD] >> KqpInplaceUpdate::SingleRowArithm+UseSink >> KqpWrite::CastValues >> KqpWrite::Insert >> KqpImmediateEffects::Upsert >> KqpEffects::InsertAbort_Literal_Conflict+UseSink >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> KqpImmediateEffects::DeleteAfterUpsert >> KqpEffects::InsertAbort_Params_Duplicates+UseSink >> KqpInplaceUpdate::SingleRowSimple+UseSink >> KqpImmediateEffects::ConflictingKeyRW1WR2 >> KqpEffects::InsertAbort_Params_Success >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink >> KqpEffects::InsertRevert_Literal_Success >> KqpImmediateEffects::UpdateAfterInsert >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] >> TStateStorageConfig::TestReplicaSelectionUniqueCombinations [GOOD] >> TStateStorageConfig::UniformityTest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WR2 >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] >> KqpImmediateEffects::UpdateOn |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] >> KqpImmediateEffects::Insert >> KqpImmediateEffects::InsertDuplicates+UseSink |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> KqpEffects::InsertAbort_Params_Success [GOOD] >> KqpEffects::InsertAbort_Select_Conflict+UseSink >> KqpWrite::CastValues [GOOD] >> KqpWrite::CastValuesOptional >> KqpEffects::InsertAbort_Params_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates-UseSink >> KqpEffects::InsertAbort_Literal_Conflict+UseSink [GOOD] >> KqpEffects::DeletePkPrefixWithIndex |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> KqpInplaceUpdate::SingleRowArithm+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowArithm-UseSink >> KqpImmediateEffects::Upsert [GOOD] >> KqpImmediateEffects::UpsertAfterInsert >> KqpImmediateEffects::DeleteAfterUpsert [GOOD] >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex >> KqpEffects::InsertRevert_Literal_Success [GOOD] >> KqpEffects::UpdateOn_Literal >> KqpImmediateEffects::ConflictingKeyRW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink >> KqpWrite::UpsertNullKey >> TStateStorageConfig::UniformityTest [GOOD] >> KqpWrite::Insert [GOOD] >> KqpWrite::InsertRevert >> KqpInplaceUpdate::SingleRowSimple+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowSimple-UseSink >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::Interactive |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TStateStorageConfig::UniformityTest [GOOD] >> KqpImmediateEffects::MultiShardUpsertAfterRead |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] |87.0%| [TA] $(B)/ydb/core/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.0%| [TA] {RESULT} $(B)/ydb/core/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] >> KqpImmediateEffects::UpdateAfterInsert [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict >> KqpImmediateEffects::WriteThenReadWithCommit >> KqpImmediateEffects::AlreadyBrokenImmediateEffects >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink >> KqpImmediateEffects::ConflictingKeyW1RR2 >> KqpImmediateEffects::ConflictingKeyW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WRR2 >> KqpEffects::InsertAbort_Literal_Success |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> KqpImmediateEffects::UpdateOn [GOOD] >> KqpImmediateEffects::UpdateAfterUpsert >> KqpEffects::UpdateOn_Literal [GOOD] >> KqpEffects::InsertAbort_Select_Conflict+UseSink [GOOD] >> KqpEffects::DeletePkPrefixWithIndex [GOOD] >> KqpWrite::CastValuesOptional [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates+UseSink >> test_dispatch.py::TestMapping::test_idle >> KqpImmediateEffects::UpsertAfterInsert [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey+UseSink |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] >> KqpImmediateEffects::InsertDuplicates+UseSink [GOOD] >> KqpImmediateEffects::InsertDuplicates-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Literal [GOOD] Test command err: Trying to start YDB, gRPC: 10658, MsgBus: 25227 2025-04-10T11:14:10.391585Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641234639482481:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:10.392177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001227/r3tmp/tmpLuGsFm/pdisk_1.dat 2025-04-10T11:14:10.969942Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:10.976578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:10.976710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:10.983066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10658, node 1 2025-04-10T11:14:11.142743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:11.142767Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:11.142775Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:11.142891Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25227 TClient is connected to server localhost:25227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:11.906318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:11.932512Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:11.944570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.249384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.540684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.620903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:14.339371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641251819353288:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.339495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.717801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.755318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.847203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.886116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.920698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.975353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.062518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641256114321104:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.062601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.063064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641256114321109:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.066904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:15.082531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641256114321111:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:15.159857Z node 1 :TX_PROXY ERROR: Actor# [1:7491641256114321166:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:15.407116Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641234639482481:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:15.407188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7824, MsgBus: 22316 2025-04-10T11:14:17.495975Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641262402998845:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:17.496257Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001227/r3tmp/tmp7K5bWv/pdisk_1.dat 2025-04-10T11:14:17.576185Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:17.621312Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:17.621417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7824, node 2 2025-04-10T11:14:17.623509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:17.714018Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:17.714050Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:17.714058Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:17.714189Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22316 TClient is connected to server localhost:22316 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:18.207211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.218861Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:18.236971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.367290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.569684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.668962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:20.667389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641275287902342:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:20.667485Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:20.714828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:20.786783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:20.867607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:20.909687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:20.950931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.027331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.089036Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641279582870160:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.089157Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.089602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641279582870165:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.093763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:21.108527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641279582870167:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:21.173225Z node 2 :TX_PROXY ERROR: Actor# [2:7491641279582870220:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:22.477706Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641262402998845:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.477774Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22863, MsgBus: 14396 2025-04-10T11:14:10.206534Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641232181216162:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:10.206634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00122d/r3tmp/tmpWoY6js/pdisk_1.dat 2025-04-10T11:14:10.739448Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:10.741048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:10.741981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:10.757379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22863, node 1 2025-04-10T11:14:11.064649Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:11.064670Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:11.064675Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:11.064757Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14396 TClient is connected to server localhost:14396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:11.905386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:11.956489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:12.102905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:14:12.392126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.508092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:13.972117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641245066119844:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:13.972247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.560626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.638413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.671947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.707829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.741532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.815349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.891652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641249361087657:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.891755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.892015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641249361087662:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.895752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:14.911124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641249361087664:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:15.004044Z node 1 :TX_PROXY ERROR: Actor# [1:7491641249361087721:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:15.206864Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641232181216162:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:15.206935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8734, MsgBus: 16688 2025-04-10T11:14:17.240325Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641262685066796:2084];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00122d/r3tmp/tmpI0SEFt/pdisk_1.dat 2025-04-10T11:14:17.285870Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:14:17.333266Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8734, node 2 2025-04-10T11:14:17.384301Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:17.384387Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:17.385422Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:17.426608Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:17.426630Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:17.426636Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:17.426732Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16688 TClient is connected to server localhost:16688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:17.975510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:17.987231Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:18.004331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.124868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.377831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.470645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:20.479910Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641275569970410:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:20.479963Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:20.552515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:20.592427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:20.627510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:20.662588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:20.696986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:20.741532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:20.823589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641275569970924:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:20.823694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:20.823836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641275569970929:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:20.826952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:20.836894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641275569970932:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:20.894241Z node 2 :TX_PROXY ERROR: Actor# [2:7491641275569970984:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:21.791730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.045160Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976715673; 2025-04-10T11:14:22.058296Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641284159906024:2496], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [2:7491641279864938574:2496]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[2:7491641284159906024:2496].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:14:22.058821Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641284159906013:2496], SessionActorId: [2:7491641279864938574:2496], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7491641279864938574:2496]. isRollback=0 2025-04-10T11:14:22.059051Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTIzMDE2MzktZjkzY2YwYjItMmYzOTFhNjctNWQxZjlmNmU=, ActorId: [2:7491641279864938574:2496], ActorState: ExecuteState, TraceId: 01jrfpgxkb47p8h0x1gzz1qaq9, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7491641284159906014:2496] from: [2:7491641284159906013:2496] 2025-04-10T11:14:22.059146Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491641284159906014:2496] TxId: 281474976715673. Ctx: { TraceId: 01jrfpgxkb47p8h0x1gzz1qaq9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTIzMDE2MzktZjkzY2YwYjItMmYzOTFhNjctNWQxZjlmNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:14:22.059344Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTIzMDE2MzktZjkzY2YwYjItMmYzOTFhNjctNWQxZjlmNmU=, ActorId: [2:7491641279864938574:2496], ActorState: ExecuteState, TraceId: 01jrfpgxkb47p8h0x1gzz1qaq9, Create QueryResponse for error on request, msg: 2025-04-10T11:14:22.242236Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641262685066796:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.242362Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpImmediateEffects::Insert [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] >> KqpImmediateEffects::Interactive [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValuesOptional [GOOD] Test command err: Trying to start YDB, gRPC: 27026, MsgBus: 22101 2025-04-10T11:14:10.231571Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641233358416919:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:10.231641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001234/r3tmp/tmpmdCAqQ/pdisk_1.dat 2025-04-10T11:14:10.854443Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:10.856135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:10.856227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:10.864825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27026, node 1 2025-04-10T11:14:11.098041Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:11.098069Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:11.098076Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:11.099016Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22101 TClient is connected to server localhost:22101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:11.933681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:11.981267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.236258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.464515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.625300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:14.348225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641250538287647:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.348368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.735327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.771176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.817147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.854325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.899222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.971878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.065228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641254833255462:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.065315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.065593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641254833255467:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.069527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:15.082496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641254833255469:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:15.187099Z node 1 :TX_PROXY ERROR: Actor# [1:7491641254833255525:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:15.231872Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641233358416919:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:15.231959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11151, MsgBus: 28106 2025-04-10T11:14:17.264450Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641262867498066:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:17.264523Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001234/r3tmp/tmpJobaKw/pdisk_1.dat 2025-04-10T11:14:17.387102Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11151, node 2 2025-04-10T11:14:17.408000Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:17.408090Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:17.409513Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:17.510161Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:17.510186Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:17.510194Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:17.510335Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28106 TClient is connected to server localhost:28106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:18.070301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.078548Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:18.092587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.161555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.405914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.529545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:20.830085Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641275752401734:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:20.830189Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:20.913831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:20.990708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.022231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.062224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.101868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.179439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.256464Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641280047369547:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.256578Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.256640Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641280047369552:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.260653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:21.286600Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641280047369554:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:21.342197Z node 2 :TX_PROXY ERROR: Actor# [2:7491641280047369609:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:22.266288Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641262867498066:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.266374Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeletePkPrefixWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 14139, MsgBus: 62366 2025-04-10T11:14:10.273580Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641234392161755:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:10.274700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00124b/r3tmp/tmpH445Ks/pdisk_1.dat 2025-04-10T11:14:10.890771Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:10.901139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:10.901247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:10.903183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14139, node 1 2025-04-10T11:14:11.055147Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:11.055171Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:11.055182Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:11.055323Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62366 TClient is connected to server localhost:62366 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:12.150958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.175044Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:12.187730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.448503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.681903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.777758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:14.375760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641251572032587:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.375861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.694241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.732915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.765917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.813474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.852276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.925451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.985199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641251572033106:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.985280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.985707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641251572033111:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.989690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:15.000914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641251572033113:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:15.084270Z node 1 :TX_PROXY ERROR: Actor# [1:7491641255867000462:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:15.270581Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641234392161755:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:15.270650Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:16.141248Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-10T11:14:16.151427Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:14:16.151603Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:14:16.151850Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641260161968073:2497], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7491641260161968051:2497]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7491641260161968073:2497].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:14:16.152419Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641260161968066:2497], SessionActorId: [1:7491641260161968051:2497], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7491641260161968051:2497]. isRollback=0 2025-04-10T11:14:16.152665Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTg2YWY4YWEtY2UzMmEyZmQtODQ1YmNlYjMtY2E2MDgzZjA=, ActorId: [1:7491641260161968051:2497], ActorState: ExecuteState, TraceId: 01jrfpgqvh7jpsqeqs8az46nw6, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7491641260161968067:2497] from: [1:7491641260161968066:2497] 2025-04-10T11:14:16.152739Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491641260161968067:2497] TxId: 281474976710671. Ctx: { TraceId: 01jrfpgqvh7jpsqeqs8az46nw6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg2YWY4YWEtY2UzMmEyZmQtODQ1YmNlYjMtY2E2MDgzZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:14:16.152950Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTg2YWY4YWEtY2UzMmEyZmQtODQ1YmNlYjMtY2E2MDgzZjA=, ActorId: [1:7491641260161968051:2497], ActorState: ExecuteState, TraceId: 01jrfpgqvh7jpsqeqs8az46nw6, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 13092, MsgBus: 24251 2025-04-10T11:14:17.315211Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641262629902025:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:17.315274Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00124b/r3tmp/tmpxsfC8T/pdisk_1.dat 2025-04-10T11:14:17.480187Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13092, node 2 2025-04-10T11:14:17.526896Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:17.526986Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:17.532359Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:17.572912Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:17.572932Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:17.572939Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:17.573057Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24251 TClient is connected to server localhost:24251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:18.115687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.137267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.257193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.464573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.547437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:21.094040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641279809772985:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.094247Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.145126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.194989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.235678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.277870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.346818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.436448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.530317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641279809773515:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.536451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:21.559685Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641279809773517:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:21.571014Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641279809773510:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.571118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.647447Z node 2 :TX_PROXY ERROR: Actor# [2:7491641279809773572:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:22.317864Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641262629902025:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.317935Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:22.654595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 9348, MsgBus: 28610 2025-04-10T11:14:10.202687Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641233533835482:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:10.209545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001233/r3tmp/tmpq4o2tC/pdisk_1.dat 2025-04-10T11:14:10.827919Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:10.856671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:10.856769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:10.859342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9348, node 1 2025-04-10T11:14:11.061984Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:11.062004Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:11.062016Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:11.062115Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28610 TClient is connected to server localhost:28610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:11.953171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:11.993981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:12.253114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:14:12.473564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:12.573964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.046839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641250713706298:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.046927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.560238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.600172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.639528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.672723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.698619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.776021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.861261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641250713706814:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.861348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.861667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641250713706820:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.864917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:14.875678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641250713706822:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:14.946435Z node 1 :TX_PROXY ERROR: Actor# [1:7491641250713706876:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:15.201635Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641233533835482:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:15.201695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:16.083259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63028, MsgBus: 23489 2025-04-10T11:14:17.468461Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641262666801748:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001233/r3tmp/tmpj8E0iN/pdisk_1.dat 2025-04-10T11:14:17.601769Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:14:17.680831Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:17.701471Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:17.701550Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:17.702830Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63028, node 2 2025-04-10T11:14:17.846624Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:17.846649Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:17.846655Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:17.846760Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23489 TClient is connected to server localhost:23489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:18.255301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.262118Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:18.272789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.378942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.593701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.664775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:21.710294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641279846672530:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.710417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.777277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.844253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.890209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.967734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.004096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.063442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.159468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641284141640346:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.159554Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.159755Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641284141640351:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.163713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:22.175094Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641284141640353:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:22.268809Z node 2 :TX_PROXY ERROR: Actor# [2:7491641284141640409:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:22.461105Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641262666801748:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.461163Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:23.240764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpWrite::UpsertNullKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25267, MsgBus: 64640 2025-04-10T11:14:10.317837Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641233304417086:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:10.318359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00123a/r3tmp/tmpf6UcQJ/pdisk_1.dat 2025-04-10T11:14:10.926026Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:10.931269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:10.931386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:10.935302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25267, node 1 2025-04-10T11:14:11.070031Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:11.070055Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:11.070061Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:11.070176Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64640 TClient is connected to server localhost:64640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:11.806869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:11.838660Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:11.852706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.068631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.285191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.380213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:14.107454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641250484287904:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.107595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.565740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.614670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.642479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.673616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.731649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.806208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.882320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641250484288422:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.882448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.886302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641250484288427:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.890417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:14.901786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641250484288429:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:14.968717Z node 1 :TX_PROXY ERROR: Actor# [1:7491641250484288482:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:15.285193Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641233304417086:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:15.285253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:16.253620Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-10T11:14:16.266957Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:14:16.267139Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:14:16.267385Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641259074223417:2497], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7491641259074223374:2497]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7491641259074223417:2497].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:14:16.267968Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641259074223410:2497], SessionActorId: [1:7491641259074223374:2497], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7491641259074223374:2497]. isRollback=0 2025-04-10T11:14:16.268189Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWM3MzJiODEtMjk2NWU1NDUtN2M5NGMzNWYtMmVhODZlMmE=, ActorId: [1:7491641259074223374:2497], ActorState: ExecuteState, TraceId: 01jrfpgqvhc9v0zvqepaktkw2b, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7491641259074223411:2497] from: [1:7491641259074223410:2497] 2025-04-10T11:14:16.268255Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491641259074223411:2497] TxId: 281474976710671. Ctx: { TraceId: 01jrfpgqvhc9v0zvqepaktkw2b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWM3MzJiODEtMjk2NWU1NDUtN2M5NGMzNWYtMmVhODZlMmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:14:16.268455Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWM3MzJiODEtMjk2NWU1NDUtN2M5NGMzNWYtMmVhODZlMmE=, ActorId: [1:7491641259074223374:2497], ActorState: ExecuteState, TraceId: 01jrfpgqvhc9v0zvqepaktkw2b, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 28615, MsgBus: 28455 2025-04-10T11:14:17.314083Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641263902453985:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:17.314135Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00123a/r3tmp/tmp8jlMSs/pdisk_1.dat 2025-04-10T11:14:17.453813Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:17.504209Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:17.504296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:17.505652Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28615, node 2 2025-04-10T11:14:17.570457Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:17.570477Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:17.570484Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:17.570603Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28455 TClient is connected to server localhost:28455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:18.095550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.110532Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:18.120292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.199046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.420710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.532815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:21.009860Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641281082324940:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.009935Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.080284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.123847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.197527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.230429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.270577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.317061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.406751Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641281082325458:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.406834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.407113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641281082325463:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.411681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:21.423777Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641281082325465:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:21.508342Z node 2 :TX_PROXY ERROR: Actor# [2:7491641281082325520:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:22.318335Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641263902453985:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.318415Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:23.226420Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641289672260449:2502], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZTcyYTU3MjEtNjEyMjc3YTktZjViNzkyYTEtOTY5OTg0MTU=. TraceId : 01jrfpgye45f4t7r53py54srkg. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-10T11:14:23.226826Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641289672260450:2503], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01jrfpgye45f4t7r53py54srkg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZTcyYTU3MjEtNjEyMjc3YTktZjViNzkyYTEtOTY5OTg0MTU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7491641289672260446:2489], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:14:23.227379Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTcyYTU3MjEtNjEyMjc3YTktZjViNzkyYTEtOTY5OTg0MTU=, ActorId: [2:7491641285377293076:2489], ActorState: ExecuteState, TraceId: 01jrfpgye45f4t7r53py54srkg, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 13688, MsgBus: 13090 2025-04-10T11:14:10.228396Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641235329306917:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:10.228459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001245/r3tmp/tmp72U7ib/pdisk_1.dat 2025-04-10T11:14:10.735468Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:10.828307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:10.828435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:10.839043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13688, node 1 2025-04-10T11:14:11.058684Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:11.058712Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:11.058739Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:11.058860Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13090 TClient is connected to server localhost:13090 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:11.928827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:11.951442Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:11.967646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.196580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.411282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.548561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:14.073231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641252509177868:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.073387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.562280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.594778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.631845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.667497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.711295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.741423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.825206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641252509178381:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.825303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.825500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641252509178386:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.831015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:14.847613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641252509178388:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:14.903091Z node 1 :TX_PROXY ERROR: Actor# [1:7491641252509178441:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:15.228617Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641235329306917:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:15.228699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:16.099767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28145, MsgBus: 12584 2025-04-10T11:14:17.628576Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641264264748483:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001245/r3tmp/tmp2Vk3fH/pdisk_1.dat 2025-04-10T11:14:17.714614Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:14:17.831872Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:17.834314Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:17.834385Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:17.835576Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28145, node 2 2025-04-10T11:14:17.995508Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:17.995534Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:17.995541Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:17.995641Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12584 TClient is connected to server localhost:12584 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:14:18.522520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:18.533408Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:18.538062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.624314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.859210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.937131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:21.375461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641281444619268:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.375574Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.423204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.463246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.501794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.576396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.667025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.731610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.811230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641281444619788:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.811341Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.811747Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641281444619794:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.816558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:21.836547Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-10T11:14:21.837040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641281444619796:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:21.901168Z node 2 :TX_PROXY ERROR: Actor# [2:7491641281444619852:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:22.602070Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641264264748483:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.602178Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:22.943383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.698408Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=8; 2025-04-10T11:14:23.698790Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 8 at tablet 72075186224037919 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:14:23.700344Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 8 at tablet 72075186224037919 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:14:23.700625Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641290034555048:2499], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [2:7491641285739587444:2499]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[2:7491641290034555048:2499].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:14:23.701389Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641290034555041:2499], SessionActorId: [2:7491641285739587444:2499], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7491641285739587444:2499]. isRollback=0 2025-04-10T11:14:23.701687Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTQ1Yzk4YWYtM2JlNWE0MGEtZTk1NTllYTctZWE5YTRlMmI=, ActorId: [2:7491641285739587444:2499], ActorState: ExecuteState, TraceId: 01jrfpgz5x5p1b74m08ednymfw, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7491641290034555112:2499] from: [2:7491641290034555041:2499] 2025-04-10T11:14:23.701801Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491641290034555112:2499] TxId: 281474976715678. Ctx: { TraceId: 01jrfpgz5x5p1b74m08ednymfw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTQ1Yzk4YWYtM2JlNWE0MGEtZTk1NTllYTctZWE5YTRlMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:14:23.702001Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTQ1Yzk4YWYtM2JlNWE0MGEtZTk1NTllYTctZWE5YTRlMmI=, ActorId: [2:7491641285739587444:2499], ActorState: ExecuteState, TraceId: 01jrfpgz5x5p1b74m08ednymfw, Create QueryResponse for error on request, msg: >> KqpImmediateEffects::MultiShardUpsertAfterRead [GOOD] >> KqpImmediateEffects::ManyFlushes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12140, MsgBus: 12142 2025-04-10T11:14:10.200418Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641233024274460:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:10.202377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001250/r3tmp/tmp9K2spd/pdisk_1.dat 2025-04-10T11:14:10.815149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:10.815256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:10.818300Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:10.820859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12140, node 1 2025-04-10T11:14:11.060044Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:11.060066Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:11.060082Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:11.060195Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12142 TClient is connected to server localhost:12142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:11.933380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:11.962341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.139468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:12.377622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:14:12.505865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:13.989731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641245909178102:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:13.989819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.559746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.595056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.634932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.707274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.755306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.820873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.883885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641250204145913:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.883990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.884187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641250204145918:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.888528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:14.901589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641250204145920:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:14.965706Z node 1 :TX_PROXY ERROR: Actor# [1:7491641250204145975:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:15.199623Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641233024274460:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:15.199686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:16.133579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21343, MsgBus: 17784 2025-04-10T11:14:17.450882Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641263864991694:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:17.450918Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001250/r3tmp/tmp5JYxHX/pdisk_1.dat 2025-04-10T11:14:17.656046Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:17.686661Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:17.686738Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:17.687958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21343, node 2 2025-04-10T11:14:17.825712Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:17.825742Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:17.825750Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:17.825878Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17784 TClient is connected to server localhost:17784 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:18.661909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.674796Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:18.696845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.809816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:19.017846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:14:19.113374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:21.541236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641281044862656:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.541355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.622280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.656163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.701368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.749157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.831956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.880482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.953238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641281044863174:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.953372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.953735Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641281044863179:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.957015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:21.976141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641281044863181:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:22.069278Z node 2 :TX_PROXY ERROR: Actor# [2:7491641285339830533:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:22.451150Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641263864991694:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.451219Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:23.307668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpWrite::InsertRevert [GOOD] >> KqpImmediateEffects::Replace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25649, MsgBus: 19737 2025-04-10T11:14:10.251393Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641235864234284:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:10.253270Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00122e/r3tmp/tmpa50a61/pdisk_1.dat 2025-04-10T11:14:10.882836Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:10.900522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:10.900640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:10.903262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25649, node 1 2025-04-10T11:14:11.060893Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:11.060917Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:11.060923Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:11.061042Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19737 TClient is connected to server localhost:19737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:11.909943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:11.949237Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:11.974118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.139809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.395720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:12.494617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.244911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641253044105238:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.245071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.609596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.642593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.677601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.716186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.757925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.830638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.915778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641253044105755:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.915881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.916068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641253044105760:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.920979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:14.937582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641253044105762:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:15.000542Z node 1 :TX_PROXY ERROR: Actor# [1:7491641253044105817:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:15.254138Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641235864234284:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:15.254216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:16.067492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29962, MsgBus: 15221 2025-04-10T11:14:17.654289Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641264935932222:2090];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:17.662924Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00122e/r3tmp/tmpcYynEB/pdisk_1.dat 2025-04-10T11:14:17.818687Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:17.829723Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:17.829802Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:17.831249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29962, node 2 2025-04-10T11:14:18.001918Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:18.001946Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:18.001954Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:18.002071Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15221 TClient is connected to server localhost:15221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:18.720001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.737233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.873630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.095942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.173368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:21.882803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641282115803133:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.882942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.913714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.986918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.035937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.079190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.121597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.207293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.364123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641286410770953:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.364222Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.364671Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641286410770958:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.369360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:22.386942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641286410770960:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:22.456619Z node 2 :TX_PROXY ERROR: Actor# [2:7491641286410771016:3459] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:22.653246Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641264935932222:2090];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.653610Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:23.810181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::ImmediateUpdate >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] >> KqpInplaceUpdate::BigRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7046, MsgBus: 26176 2025-04-10T11:14:10.225652Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641232856205055:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:10.225711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001228/r3tmp/tmpFnYSAx/pdisk_1.dat 2025-04-10T11:14:10.941601Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:10.952376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:10.952470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:10.955864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7046, node 1 2025-04-10T11:14:11.192300Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:11.192349Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:11.192359Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:11.192510Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26176 TClient is connected to server localhost:26176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:11.824703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:11.840408Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:11.852344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.128828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.410313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.561487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:14.657353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641250036076018:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.657449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.941544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.019560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.050460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.083260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.115079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.184850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.226097Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641232856205055:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:15.226170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:15.248243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641254331043829:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.248342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.248577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641254331043834:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.253995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:15.278498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641254331043836:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:15.343497Z node 1 :TX_PROXY ERROR: Actor# [1:7491641254331043888:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:16.464924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20259, MsgBus: 23332 2025-04-10T11:14:17.987064Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641262497908813:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:17.987115Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001228/r3tmp/tmpElaJoY/pdisk_1.dat 2025-04-10T11:14:18.152124Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:18.177186Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:18.177260Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:18.178996Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20259, node 2 2025-04-10T11:14:18.381546Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:18.381568Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:18.381575Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:18.381694Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23332 TClient is connected to server localhost:23332 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:19.074389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.088075Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:19.101970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.185918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.357790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.472920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.054346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641283972747071:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.054408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.129770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.193695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.259384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.345744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.392065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.445767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.508515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641283972747587:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.508627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.509565Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641283972747592:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.513732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:22.533386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641283972747594:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:22.619149Z node 2 :TX_PROXY ERROR: Actor# [2:7491641283972747650:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:22.987623Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641262497908813:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.987694Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:23.827649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 64467, MsgBus: 5434 2025-04-10T11:14:10.212880Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641232036792202:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:10.212938Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001268/r3tmp/tmpwbjE1U/pdisk_1.dat 2025-04-10T11:14:10.874099Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:10.893210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:10.893351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:10.903013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64467, node 1 2025-04-10T11:14:11.055408Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:11.055439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:11.055447Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:11.055651Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5434 TClient is connected to server localhost:5434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:12.077675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.111168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:12.384362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:14:12.612895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.724291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:14.327387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641249216663156:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.327496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.672109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.704269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.744077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.785620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.827304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.875135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.927969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641249216663670:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.928062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.928124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641249216663675:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.932131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:14.945985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641249216663677:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:15.023423Z node 1 :TX_PROXY ERROR: Actor# [1:7491641253511631026:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:15.214384Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641232036792202:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:15.214446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:16.099568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25057, MsgBus: 16451 2025-04-10T11:14:17.735775Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641262638134468:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:17.735847Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001268/r3tmp/tmpnUDqJU/pdisk_1.dat 2025-04-10T11:14:17.878667Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:17.892854Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:17.892954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:17.894786Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25057, node 2 2025-04-10T11:14:17.947653Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:17.947679Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:17.947689Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:17.947825Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16451 TClient is connected to server localhost:16451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:14:18.712241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:18.745465Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:18.760240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.840690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.011327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.101690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:21.513104Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641279818005427:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.513208Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.545343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.613734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.658766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.721139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.773251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.892271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.980834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641279818005947:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.980931Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.981344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641279818005952:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.985183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:22.001246Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641279818005954:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:22.084812Z node 2 :TX_PROXY ERROR: Actor# [2:7491641284112973305:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:22.734476Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641262638134468:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.734547Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:23.491648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 28871, MsgBus: 28882 2025-04-10T11:14:10.335357Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641233335650107:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:10.335394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001239/r3tmp/tmppij7zK/pdisk_1.dat 2025-04-10T11:14:11.194522Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:11.226088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:11.226305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:11.234202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28871, node 1 2025-04-10T11:14:11.372434Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:11.372457Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:11.372464Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:11.372585Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28882 TClient is connected to server localhost:28882 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:12.184520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.227232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.414869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.672802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.779767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:14.711898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641250515521056:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.712070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.076031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.112062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.146646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.186429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.222008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.297261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.335865Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641233335650107:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:15.335942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:15.390279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641254810488874:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.390379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.391032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641254810488879:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.396253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:15.407292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641254810488881:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:15.508052Z node 1 :TX_PROXY ERROR: Actor# [1:7491641254810488938:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:16.525124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27945, MsgBus: 2404 2025-04-10T11:14:18.097432Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641268795669700:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:18.115590Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001239/r3tmp/tmpfFVGpy/pdisk_1.dat 2025-04-10T11:14:18.280866Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:18.294423Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:18.294506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:18.296105Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27945, node 2 2025-04-10T11:14:18.352240Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:18.352262Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:18.352270Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:18.352388Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2404 TClient is connected to server localhost:2404 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:18.903302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.913650Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.924189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:14:19.044623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.258018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.365037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:21.851280Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641281680573363:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.851440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.957854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.018044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.057075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.129136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.177648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.257046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.344088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641285975541181:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.344190Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.344482Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641285975541186:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.348429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:22.366227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641285975541188:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:22.434301Z node 2 :TX_PROXY ERROR: Actor# [2:7491641285975541243:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:23.098645Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641268795669700:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:23.098725Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:23.378229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::Delete >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] Test command err: Trying to start YDB, gRPC: 2743, MsgBus: 21732 2025-04-10T11:14:12.524213Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641240999107315:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:12.525272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001221/r3tmp/tmp4kwjAQ/pdisk_1.dat 2025-04-10T11:14:13.062211Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:13.064714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:13.064806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:13.067959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2743, node 1 2025-04-10T11:14:13.258928Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:13.258951Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:13.258959Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:13.259077Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21732 TClient is connected to server localhost:21732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:13.879408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:13.895389Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:13.909461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:14.052456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:14.265028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:14.348505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:16.207989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641258178978241:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:16.208099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:16.505875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:16.543581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:16.580669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:16.613001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:16.640755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:16.674922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:16.736923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641258178978752:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:16.737002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:16.737485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641258178978757:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:16.742347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:16.752053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641258178978759:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:16.812355Z node 1 :TX_PROXY ERROR: Actor# [1:7491641258178978813:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:17.514627Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641240999107315:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:17.547996Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:17.789291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11769, MsgBus: 3272 2025-04-10T11:14:19.398865Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641272209667000:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:19.398912Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001221/r3tmp/tmpQPM91Y/pdisk_1.dat 2025-04-10T11:14:19.583115Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:19.583189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:19.585374Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:19.588483Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11769, node 2 2025-04-10T11:14:19.678875Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:19.678900Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:19.678908Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:19.679012Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3272 TClient is connected to server localhost:3272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:20.124602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:20.135086Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:20.148937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:20.230205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:20.410663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:14:20.489135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.603799Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641285094570504:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.603883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.686885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.728999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.770672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.817041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.858447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.924336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.010473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641289389538315:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:23.010607Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:23.010840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641289389538320:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:23.015119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:23.029494Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641289389538322:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:23.101178Z node 2 :TX_PROXY ERROR: Actor# [2:7491641289389538377:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:24.252103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:24.402471Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641272209667000:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:24.402559Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:24.828857Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715675; 2025-04-10T11:14:24.851515Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641293684506319:2525], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [2:7491641293684506217:2525]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[2:7491641293684506319:2525].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:14:24.852269Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641293684506312:2525], SessionActorId: [2:7491641293684506217:2525], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7491641293684506217:2525]. isRollback=0 2025-04-10T11:14:24.852603Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTMzMjU4OGQtNjBmNzBkN2MtY2M5Yjg3MC1lMDU5NjIyOQ==, ActorId: [2:7491641293684506217:2525], ActorState: ExecuteState, TraceId: 01jrfph0djerwhewq2d9wacbvj, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7491641293684506313:2525] from: [2:7491641293684506312:2525] 2025-04-10T11:14:24.852718Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491641293684506313:2525] TxId: 281474976715675. Ctx: { TraceId: 01jrfph0djerwhewq2d9wacbvj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTMzMjU4OGQtNjBmNzBkN2MtY2M5Yjg3MC1lMDU5NjIyOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:14:24.852976Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTMzMjU4OGQtNjBmNzBkN2MtY2M5Yjg3MC1lMDU5NjIyOQ==, ActorId: [2:7491641293684506217:2525], ActorState: ExecuteState, TraceId: 01jrfph0djerwhewq2d9wacbvj, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::UpsertNullKey [GOOD] Test command err: Trying to start YDB, gRPC: 20862, MsgBus: 9413 2025-04-10T11:14:17.855776Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641265330005999:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:17.855819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001211/r3tmp/tmpRYsOie/pdisk_1.dat 2025-04-10T11:14:18.482023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:18.482129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:18.486525Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:18.493938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20862, node 1 2025-04-10T11:14:18.751703Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:18.751738Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:18.751748Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:18.751877Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9413 TClient is connected to server localhost:9413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:19.557848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.580256Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:19.596545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.740939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:20.013097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:20.106111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.112165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641286804844250:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.112285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.765994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.821780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.856925Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641265330005999:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.857025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:22.900617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.948732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.993035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.083009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.208304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641291099812072:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:23.208399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:23.209087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641291099812077:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:23.213761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:23.237071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641291099812079:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:23.315500Z node 1 :TX_PROXY ERROR: Actor# [1:7491641291099812134:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpInplaceUpdate::SingleRowIf+UseSink >> KqpEffects::InsertRevert_Literal_Duplicates >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::InsertRevert [GOOD] Test command err: Trying to start YDB, gRPC: 21507, MsgBus: 28314 2025-04-10T11:14:10.248577Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641235953308821:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:10.272560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001222/r3tmp/tmptIrdlO/pdisk_1.dat 2025-04-10T11:14:10.977524Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:10.991654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:10.991764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:11.023240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21507, node 1 2025-04-10T11:14:11.244291Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:11.244311Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:11.244317Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:11.244428Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28314 TClient is connected to server localhost:28314 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:11.996236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.014864Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:12.021302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.253113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.528148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.657909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:14.568761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641253133179763:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.568897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.884749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.921283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.957087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.995108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.074395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.149540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:15.203677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641257428147576:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.203774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.203831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641257428147581:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:15.207974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:15.221980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641257428147583:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:15.250500Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641235953308821:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:15.250579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:15.306598Z node 1 :TX_PROXY ERROR: Actor# [1:7491641257428147637:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:16.272138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:16.460960Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976710672;
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Duplicate keys have been found., code: 2012 2025-04-10T11:14:16.474830Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641261723115348:2497], Table: `/Root/KeyValue` ([72057594046644480:6:1]), SessionActorId: [1:7491641261723115227:2497]Got CONSTRAINT VIOLATION for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[1:7491641261723115348:2497].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:14:16.475321Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641261723115337:2497], SessionActorId: [1:7491641261723115227:2497], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7491641261723115227:2497]. isRollback=0 2025-04-10T11:14:16.475536Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2M1NTBiOTgtMTZkZTRjYzctY2U1MzMxNjMtY2JkMDVkZmY=, ActorId: [1:7491641261723115227:2497], ActorState: ExecuteState, TraceId: 01jrfpgr40byjpjpq88q2skn4z, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7491641261723115338:2497] from: [1:7491641261723115337:2497] 2025-04-10T11:14:16.475613Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491641261723115338:2497] TxId: 281474976710672. Ctx: { TraceId: 01jrfpgr40byjpjpq88q2skn4z, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2M1NTBiOTgtMTZkZTRjYzctY2U1MzMxNjMtY2JkMDVkZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:14:16.475819Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2M1NTBiOTgtMTZkZTRjYzctY2U1MzMxNjMtY2JkMDVkZmY=, ActorId: [1:7491641261723115227:2497], ActorState: ExecuteState, TraceId: 01jrfpgr40byjpjpq88q2skn4z, Create QueryResponse for error on request, msg: 2025-04-10T11:14:16.478846Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710672; 2025-04-10T11:14:16.479118Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1744283656507 : 281474976710672] from 72075186224037919 at tablet 72075186224037919, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-04-10T11:14:16.608143Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976710673; 2025-04-10T11:14:16.609123Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641261723115397:2497], Table: `/Root/KeyValue` ([72057594046644480:6:1]), SessionActorId: [1:7491641261723115227:2497]Got CONSTRAINT VIOLATION for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[1:7491641261723115397:2497].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:14:16.609199Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641261723115387:2497], SessionActorId: [1:7491641261723115227:2497], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7491641261723115227:2497]. isRollback=0 2025-04-10T11:14:16.609394Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2M1NTBiOTgtMTZkZTRjYzctY2U1MzMxNjMtY2JkMDVkZmY=, ActorId: [1:7491641261723115227:2497], ActorState: ExecuteState, TraceId: 01jrfpgr94ac34mkn6e3jcb1p8, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7491641261723115388:2497] from: [1:7491641261723115387:2497] 2025-04-10T11:14:16.609532Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491641261723115388:2497] TxId: 281474976710673. Ctx: { TraceId: 01jrfpgr94ac34mkn6e3jcb1p8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2M1NTBiOTgtMTZkZTRjYzctY2U1MzMxNjMtY2JkMDVkZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:14:16.609745Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2M1NTBiOTgtMTZkZTRjYzctY2U1MzMxNjMtY2JkMDVkZmY=, ActorId: [1:7491641261723115227:2497], ActorState: ExecuteState, TraceId: 01jrfpgr94ac34mkn6e3jcb1p8, Create QueryResponse for error on request, msg:
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Duplicate keys have been found., code: 2012 2025-04-10T11:14:16.612405Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710673; 2025-04-10T11:14:16.612567Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1744283656654 : 281474976710673] from 72075186224037919 at tablet 72075186224037919, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } Trying to start YDB, gRPC: 20951, MsgBus: 14533 2025-04-10T11:14:17.862721Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641263559196959:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:17.862788Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001222/r3tmp/tmpPShUlG/pdisk_1.dat 2025-04-10T11:14:18.229648Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:18.239749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:18.240356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:18.241932Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20951, node 2 2025-04-10T11:14:18.434762Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:18.434786Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:18.434793Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:18.434914Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14533 TClient is connected to server localhost:14533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:19.174882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.204398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.289517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.484692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.562436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.372461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641285034035211:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.372562Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.456719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.540909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.607012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.660856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.724390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.778448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.854417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641285034035724:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.854487Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.854853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641285034035729:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.857993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:22.863656Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641263559196959:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.863742Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:22.870846Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641285034035731:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:22.938141Z node 2 :TX_PROXY ERROR: Actor# [2:7491641285034035785:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:24.195717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::AlreadyBrokenImmediateEffects [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink >> KqpEffects::InsertAbort_Literal_Success [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink >> KqpInplaceUpdate::SingleRowStr+UseSink >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink >> KqpImmediateEffects::ConflictingKeyW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink >> KqpImmediateEffects::UpsertDuplicates |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> KqpImmediateEffects::UpsertAfterInsertWithIndex >> KqpEffects::UpdateOn_Params >> KqpEffects::InsertAbort_Params_Conflict+UseSink >> KqpImmediateEffects::ReplaceDuplicates >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] >> KqpEffects::InsertAbort_Select_Success ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 17341, MsgBus: 4680 2025-04-10T11:14:10.218831Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641234808258932:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:10.219809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00123f/r3tmp/tmpQCFoiP/pdisk_1.dat 2025-04-10T11:14:10.843654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:10.843792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:10.845594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:10.887594Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17341, node 1 2025-04-10T11:14:11.056981Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:11.057011Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:11.057017Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:11.057132Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4680 TClient is connected to server localhost:4680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:11.956711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:11.984438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:12.135701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:14:12.349058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:12.462916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:14.102428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641251988129896:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.102562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.560106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.611124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.647143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.675526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.706653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.793211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:14.889669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641251988130415:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.889816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.890059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641251988130420:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:14.894742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:14.912290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641251988130422:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:14.982346Z node 1 :TX_PROXY ERROR: Actor# [1:7491641251988130477:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:15.219326Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641234808258932:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:15.219384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:16.089525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13937, MsgBus: 13674 2025-04-10T11:14:17.412422Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641263678705967:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:17.412498Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00123f/r3tmp/tmpdOVEMV/pdisk_1.dat 2025-04-10T11:14:17.549602Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:17.565399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:17.565475Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:17.566914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13937, node 2 2025-04-10T11:14:17.725951Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:17.725974Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:17.725981Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:17.726094Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13674 TClient is connected to server localhost:13674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:18.289022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.310636Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:18.325123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.443321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.705901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.817040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:21.071434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641280858576915:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.071540Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.143565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.183388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.255942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.301035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.351035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.401803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.477153Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641280858577430:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.477232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.477422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641280858577435:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.481429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:21.494539Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641280858577437:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:21.595640Z node 2 :TX_PROXY ERROR: Actor# [2:7491641280858577492:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:22.409408Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641263678705967:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.409514Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:22.609662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.678121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.732961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 20980, MsgBus: 16252 2025-04-10T11:14:14.576988Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641249801408515:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:14.577374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00121c/r3tmp/tmprOJXGV/pdisk_1.dat 2025-04-10T11:14:14.996253Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:14.999582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:14.999692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:15.004840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20980, node 1 2025-04-10T11:14:15.107585Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:15.107612Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:15.107623Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:15.107818Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16252 TClient is connected to server localhost:16252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:15.752084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:15.791360Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:15.807749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:15.935502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:16.130690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:14:16.225738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:17.864670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641262686312029:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:17.864798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:18.235766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:18.311911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:18.368935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:18.422420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:18.458586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:18.534566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:18.636934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641266981279852:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:18.637019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:18.637253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641266981279857:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:18.641982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:18.654140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641266981279859:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:18.753701Z node 1 :TX_PROXY ERROR: Actor# [1:7491641266981279914:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:19.570239Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641249801408515:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:19.570302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:19.892602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26218, MsgBus: 21938 2025-04-10T11:14:21.273623Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641279847939467:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:21.273682Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00121c/r3tmp/tmpIN78dZ/pdisk_1.dat 2025-04-10T11:14:21.470694Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:21.478564Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:21.478634Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:21.480197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26218, node 2 2025-04-10T11:14:21.534844Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:21.534865Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:21.534876Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:21.535017Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21938 TClient is connected to server localhost:21938 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:22.119051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.140954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.287555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.464261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.554444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:25.578262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641297027810387:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:25.578368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:25.641865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.679903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.720532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.756256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.797385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.888451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.019527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641301322778199:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:26.019643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:26.020023Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641301322778204:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:26.028945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:26.058654Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641301322778206:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:26.141224Z node 2 :TX_PROXY ERROR: Actor# [2:7491641301322778262:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:26.274107Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641279847939467:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:26.274222Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:27.314790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:28.058455Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=7; 2025-04-10T11:14:28.058856Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 7 at tablet 72075186224037919 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:14:28.060375Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 7 at tablet 72075186224037919 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:14:28.060617Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641305617746194:2499], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [2:7491641305617745855:2499]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[2:7491641305617746194:2499].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:14:28.061206Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641305617746187:2499], SessionActorId: [2:7491641305617745855:2499], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7491641305617745855:2499]. isRollback=0 2025-04-10T11:14:28.061596Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDMzYjU3N2YtMTgzMDRkNTktYzkzMDAyYTQtYjk0MDEzNzg=, ActorId: [2:7491641305617745855:2499], ActorState: ExecuteState, TraceId: 01jrfph3f75prdgj8e2qjxzm52, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7491641309912713550:2499] from: [2:7491641305617746187:2499] 2025-04-10T11:14:28.061691Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491641309912713550:2499] TxId: 281474976715677. Ctx: { TraceId: 01jrfph3f75prdgj8e2qjxzm52, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDMzYjU3N2YtMTgzMDRkNTktYzkzMDAyYTQtYjk0MDEzNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:14:28.061866Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDMzYjU3N2YtMTgzMDRkNTktYzkzMDAyYTQtYjk0MDEzNzg=, ActorId: [2:7491641305617745855:2499], ActorState: ExecuteState, TraceId: 01jrfph3f75prdgj8e2qjxzm52, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 26226, MsgBus: 5543 2025-04-10T11:14:15.119429Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641255401940034:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:15.119495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00121b/r3tmp/tmpIm5ayi/pdisk_1.dat 2025-04-10T11:14:15.582852Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:15.600551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:15.600748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:15.602834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26226, node 1 2025-04-10T11:14:15.710803Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:15.710837Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:15.710844Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:15.711008Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5543 TClient is connected to server localhost:5543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:16.422373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:16.447626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:16.588946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:16.742481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:14:16.808512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.844396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641268286843695:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:18.844524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:19.301834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:19.342491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:19.383363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:19.423397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:19.469086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:19.520503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:19.588392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641272581811504:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:19.588462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:19.588809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641272581811509:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:19.593379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:19.609186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641272581811511:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:19.690047Z node 1 :TX_PROXY ERROR: Actor# [1:7491641272581811566:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:20.122383Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641255401940034:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:20.122461Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:20.748122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62606, MsgBus: 6018 2025-04-10T11:14:22.524034Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641284399800537:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.524490Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00121b/r3tmp/tmpJIyx1b/pdisk_1.dat 2025-04-10T11:14:22.727032Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:22.739881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:22.739969Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:22.741090Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62606, node 2 2025-04-10T11:14:22.862701Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:22.862729Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:22.862738Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:22.862850Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6018 TClient is connected to server localhost:6018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:23.577535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:23.594632Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:23.608824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:23.715315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.917175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:24.005670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:26.663297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641301579671485:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:26.663444Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:26.718104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.757776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.793298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.845432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.889044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.975421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:27.080612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641305874639303:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:27.080713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:27.081367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641305874639308:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:27.086388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:27.107202Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641305874639310:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:27.173379Z node 2 :TX_PROXY ERROR: Actor# [2:7491641305874639365:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:27.526293Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641284399800537:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:27.526387Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:28.293701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpWrite::ProjectReplace+UseSink >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Conflict-UseSink >> KqpImmediateEffects::InsertExistingKey+UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey-UseSink >> KqpImmediateEffects::InsertDuplicates-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink >> KqpImmediateEffects::ConflictingKeyR1WR2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 25671, MsgBus: 8003 2025-04-10T11:14:17.130208Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641262701326029:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:17.130315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001216/r3tmp/tmp3vB7gh/pdisk_1.dat 2025-04-10T11:14:17.656860Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:17.661254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:17.661375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:17.664823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25671, node 1 2025-04-10T11:14:17.740826Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:17.740854Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:17.740859Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:17.740949Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8003 TClient is connected to server localhost:8003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:18.385741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.438823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.661228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.910741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.019838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:20.990783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641275586229707:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:20.990895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.373404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.447683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.522197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.560167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.591167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.631536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.743849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641279881197527:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.743965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.744419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641279881197532:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.749207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:21.760575Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:14:21.761808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641279881197534:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:21.843463Z node 1 :TX_PROXY ERROR: Actor# [1:7491641279881197589:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:22.130528Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641262701326029:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.130604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:23.198471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11672, MsgBus: 13690 2025-04-10T11:14:25.168440Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641300025851139:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:25.168501Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001216/r3tmp/tmpWvF01u/pdisk_1.dat 2025-04-10T11:14:25.370611Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:25.396862Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:25.397077Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:25.398926Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11672, node 2 2025-04-10T11:14:25.534773Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:25.534799Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:25.534813Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:25.534961Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13690 TClient is connected to server localhost:13690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:26.114502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:26.125478Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:26.134311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.221461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:26.387647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.472579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.031288Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641317205722101:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.031372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.082575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.145530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.225971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.268758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.321034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.376431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.476763Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641317205722622:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.476850Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.477425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641317205722627:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.481798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:29.503185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641317205722629:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:29.584557Z node 2 :TX_PROXY ERROR: Actor# [2:7491641317205722684:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:30.170105Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641300025851139:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:30.170201Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:30.660539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:30.976853Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976715673; 2025-04-10T11:14:30.988916Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641321500690522:2497], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [2:7491641321500690275:2497]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[2:7491641321500690522:2497].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:14:30.989611Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641321500690513:2497], SessionActorId: [2:7491641321500690275:2497], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7491641321500690275:2497]. isRollback=0 2025-04-10T11:14:30.989872Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWJiY2M4MzMtOWM1MmQzNGYtM2U5Zjg4ODMtYThlMTc2ODU=, ActorId: [2:7491641321500690275:2497], ActorState: ExecuteState, TraceId: 01jrfph69ebnhy5jk4qh9w7f12, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7491641321500690514:2497] from: [2:7491641321500690513:2497] 2025-04-10T11:14:30.989957Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491641321500690514:2497] TxId: 281474976715673. Ctx: { TraceId: 01jrfph69ebnhy5jk4qh9w7f12, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWJiY2M4MzMtOWM1MmQzNGYtM2U5Zjg4ODMtYThlMTc2ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:14:30.990236Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWJiY2M4MzMtOWM1MmQzNGYtM2U5Zjg4ODMtYThlMTc2ODU=, ActorId: [2:7491641321500690275:2497], ActorState: ExecuteState, TraceId: 01jrfph69ebnhy5jk4qh9w7f12, Create QueryResponse for error on request, msg: 2025-04-10T11:14:30.991403Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715673; 2025-04-10T11:14:30.991604Z node 2 :TX_DATASHARD ERROR: Complete volatile write [1744283671025 : 281474976715673] from 72075186224037921 at tablet 72075186224037921, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-04-10T11:14:30.991774Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715673; 2025-04-10T11:14:30.991842Z node 2 :TX_DATASHARD ERROR: Complete volatile write [1744283671025 : 281474976715673] from 72075186224037920 at tablet 72075186224037920, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } >> KqpImmediateEffects::ConflictingKeyRW1RR2 |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] Test command err: !!! simulating S3 hang up -- sending SIGSTOP !!! simulating S3 recovery -- sending SIGCONT >> KqpInplaceUpdate::BigRow [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertDuplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14891, MsgBus: 65512 2025-04-10T11:14:17.178426Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641265254046081:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:17.179705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001213/r3tmp/tmp1BX4fG/pdisk_1.dat 2025-04-10T11:14:17.690318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:17.690407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:17.691641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:17.698047Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14891, node 1 2025-04-10T11:14:17.818785Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:17.818831Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:17.818845Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:17.818989Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65512 TClient is connected to server localhost:65512 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:18.726214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:18.774519Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:18.793884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.007683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.210013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:19.292244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:21.312142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641282433917027:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.312390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:21.730554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.788599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.869040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.953248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:21.987806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.053228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.170354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641286728884844:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.170443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.174308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641286728884849:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.178241Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641265254046081:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:22.178304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:22.188306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:22.215085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641286728884851:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:22.311600Z node 1 :TX_PROXY ERROR: Actor# [1:7491641286728884912:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:23.324643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.829357Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=4; 2025-04-10T11:14:23.837705Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037919 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:14:23.837842Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037919 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:14:23.838087Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641291023852680:2500], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [1:7491641291023852500:2500]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[1:7491641291023852680:2500].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:14:23.838605Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641291023852664:2500], SessionActorId: [1:7491641291023852500:2500], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7491641291023852500:2500]. isRollback=0 2025-04-10T11:14:23.838844Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTdkZjUxOTQtYzRkN2MzNTAtZDMwMjA1MGItNzIxNzNlNmM=, ActorId: [1:7491641291023852500:2500], ActorState: ExecuteState, TraceId: 01jrfpgz45fwe8d0ryfdpt78gs, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7491641291023852674:2500] from: [1:7491641291023852664:2500] 2025-04-10T11:14:23.838926Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491641291023852674:2500] TxId: 281474976710674. Ctx: { TraceId: 01jrfpgz45fwe8d0ryfdpt78gs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTdkZjUxOTQtYzRkN2MzNTAtZDMwMjA1MGItNzIxNzNlNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:14:23.839070Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTdkZjUxOTQtYzRkN2MzNTAtZDMwMjA1MGItNzIxNzNlNmM=, ActorId: [1:7491641291023852500:2500], ActorState: ExecuteState, TraceId: 01jrfpgz45fwe8d0ryfdpt78gs, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 23657, MsgBus: 6613 2025-04-10T11:14:24.928021Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641294389774378:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:24.928074Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001213/r3tmp/tmp3zv8cQ/pdisk_1.dat 2025-04-10T11:14:25.188500Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23657, node 2 2025-04-10T11:14:25.278671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:25.278774Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:25.280613Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:25.371694Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:25.371719Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:25.371727Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:25.371845Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6613 TClient is connected to server localhost:6613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:26.001156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:26.018407Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:26.036646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.143319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:26.382017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:26.480118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.997272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641311569645325:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:28.997406Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.047568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.092439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.134648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.182997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.224226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.287350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.381059Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641315864613135:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.381189Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.381407Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641315864613141:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.384988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:29.400269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641315864613143:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:29.477011Z node 2 :TX_PROXY ERROR: Actor# [2:7491641315864613198:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:29.930375Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641294389774378:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:29.940374Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:30.973144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.915036Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641324454548344:2522], TxId: 281474976710676, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NGI4ZTg1NGMtM2EwNTY5ZjMtMjJiNWJlNTAtOWQzZGFmOTU=. CustomerSuppliedId : . TraceId : 01jrfph6wkeef5kdjj4b74rj4d. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-10T11:14:31.915266Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641324454548346:2523], TxId: 281474976710676, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NGI4ZTg1NGMtM2EwNTY5ZjMtMjJiNWJlNTAtOWQzZGFmOTU=. TraceId : 01jrfph6wkeef5kdjj4b74rj4d. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7491641324454548341:2489], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:14:31.915690Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGI4ZTg1NGMtM2EwNTY5ZjMtMjJiNWJlNTAtOWQzZGFmOTU=, ActorId: [2:7491641320159580753:2489], ActorState: ExecuteState, TraceId: 01jrfph6wkeef5kdjj4b74rj4d, Create QueryResponse for error on request, msg: >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> KqpImmediateEffects::ManyFlushes [GOOD] >> KqpImmediateEffects::ImmediateUpdate [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] >> KqpImmediateEffects::Replace [GOOD] >> KqpImmediateEffects::MultipleEffectsWithIndex >> TIncrHugeBasicTest::Recovery [GOOD] >> KqpImmediateEffects::Delete [GOOD] >> KqpImmediateEffects::DeleteAfterInsert |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> KqpEffects::InsertRevert_Literal_Duplicates [GOOD] >> KqpEffects::InsertRevert_Literal_Conflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::BigRow [GOOD] Test command err: Trying to start YDB, gRPC: 22333, MsgBus: 5491 2025-04-10T11:14:19.434333Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641274052519513:2173];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:19.435594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001207/r3tmp/tmp31QrYQ/pdisk_1.dat 2025-04-10T11:14:19.914549Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:19.932249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:19.932321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:19.934046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22333, node 1 2025-04-10T11:14:20.120301Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:20.120322Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:20.120333Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:20.120466Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5491 TClient is connected to server localhost:5491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:20.765877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:20.790582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:14:20.965861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:21.161692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:21.256182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:23.404995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641291232390350:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:23.405172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:23.839116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.885044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.925670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:24.001141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:24.046380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:24.160187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:24.252807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641295527358163:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:24.252905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:24.254178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641295527358168:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:24.259964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:24.278163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641295527358170:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:24.342832Z node 1 :TX_PROXY ERROR: Actor# [1:7491641295527358225:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:24.444930Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641274052519513:2173];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:24.445003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:25.424798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.820867Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491641299822326071:2498] TxId: 281474976710673. Ctx: { TraceId: 01jrfph1839dd4r62trg5hfqdy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-04-10T11:14:25.820930Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2025-04-10T11:14:25.821478Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673. Resolved key sets: 1 2025-04-10T11:14:25.821692Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 16] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-10T11:14:25.821784Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491641299822326071:2498] TxId: 281474976710673. Ctx: { TraceId: 01jrfph1839dd4r62trg5hfqdy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2025-04-10T11:14:25.822030Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-10T11:14:25.822799Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710673. Shard resolve complete, resolved shards: 1 2025-04-10T11:14:25.822856Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491641299822326071:2498] TxId: 281474976710673. Ctx: { TraceId: 01jrfph1839dd4r62trg5hfqdy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-04-10T11:14:25.822921Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491641299822326071:2498] TxId: 281474976710673. Ctx: { TraceId: 01jrfph1839dd4r62trg5hfqdy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037919] 2025-04-10T11:14:25.822999Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673. Ctx: { TraceId: 01jrfph1839dd4r62trg5hfqdy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, snapshot: {18446744073709551615, 1744283665677} 2025-04-10T11:14:25.823300Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673. Ctx: { TraceId: 01jrfph1839dd4r62trg5hfqdy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [1:7491641299822326075:2498] 2025-04-10T11:14:25.823346Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673. Ctx: { TraceId: 01jrfph1839dd4r62trg5hfqdy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , P ... ActorId: [1:7491641304117293448:2498] TxId: 281474976710675. Ctx: { TraceId: 01jrfph1gb3nf9vb31b7bfn565, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7491641304117293452:2498], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 794 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 197 FinishTimeMs: 1744283666033 OutputRows: 1 OutputBytes: 22 Tables { TablePath: "/Root/TestImmediateEffects" ReadRows: 1 ReadBytes: 22 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 22 ComputeCpuTimeUs: 100 BuildCpuTimeUs: 97 HostName: "ghrun-x5vqckr7qy" NodeId: 1 StartTimeMs: 1744283666032 CreateTimeMs: 1744283666031 } MaxMemoryUsage: 1048576 } 2025-04-10T11:14:26.033545Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710675. Ctx: { TraceId: 01jrfph1gb3nf9vb31b7bfn565, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7491641304117293452:2498] 2025-04-10T11:14:26.033678Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491641304117293448:2498] TxId: 281474976710675. Ctx: { TraceId: 01jrfph1gb3nf9vb31b7bfn565, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:14:26.033702Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491641304117293448:2498] TxId: 281474976710675. Ctx: { TraceId: 01jrfph1gb3nf9vb31b7bfn565, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000794s ReadRows: 1 ReadBytes: 22 ru: 1 rate limiter was not found force flag: 1 2025-04-10T11:14:26.034575Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710676. Resolved key sets: 0 2025-04-10T11:14:26.034643Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710676. Ctx: { TraceId: 01jrfph1gb3nf9vb31b7bfn565, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {18446744073709551615, 1744283665677} 2025-04-10T11:14:26.034665Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7491641304117293455:2498] TxId: 281474976710676. Ctx: { TraceId: 01jrfph1gb3nf9vb31b7bfn565, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T11:14:26.034708Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491641304117293455:2498] TxId: 281474976710676. Ctx: { TraceId: 01jrfph1gb3nf9vb31b7bfn565, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send Commit to BufferActor=[1:7491641304117293439:2498] 2025-04-10T11:14:26.034751Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491641304117293455:2498] TxId: 281474976710676. Ctx: { TraceId: 01jrfph1gb3nf9vb31b7bfn565, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-10T11:14:26.043065Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491641304117293455:2498] TxId: 281474976710676. Ctx: { TraceId: 01jrfph1gb3nf9vb31b7bfn565, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4NzJmMWQtZDZhODVkYTYtODM0NDRiZTUtZWMzY2E0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. Trying to start YDB, gRPC: 1523, MsgBus: 15593 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001207/r3tmp/tmpzx9KCj/pdisk_1.dat 2025-04-10T11:14:27.175634Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:27.279806Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:27.302133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:27.302223Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:27.304751Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1523, node 2 2025-04-10T11:14:27.347250Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:27.347274Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:27.347283Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:27.347649Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15593 TClient is connected to server localhost:15593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:27.834685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:27.857177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:27.970610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.155061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.237841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.070774Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641325074636036:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:31.070890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:31.192849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.258133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.330034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.375072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.448621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.504656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.575797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641325074636552:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:31.575917Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:31.576802Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641325074636557:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:31.581728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:31.604103Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641325074636559:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:31.662644Z node 2 :TX_PROXY ERROR: Actor# [2:7491641325074636613:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:32.887249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::Recovery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ManyFlushes [GOOD] >> TIncrHugeBlobIdDict::Basic [GOOD] Test command err: Trying to start YDB, gRPC: 21632, MsgBus: 11289 2025-04-10T11:14:18.554534Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641268681667762:2274];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:18.554673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00120d/r3tmp/tmpKVBzbE/pdisk_1.dat 2025-04-10T11:14:19.132445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:19.132560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:19.136205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:19.171117Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21632, node 1 2025-04-10T11:14:19.350424Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:19.350447Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:19.350454Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:19.358741Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11289 TClient is connected to server localhost:11289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:20.029565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:20.069286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:20.294574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:20.506079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:20.590780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.743919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641285861538482:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:22.744019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:23.346603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.413481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.459068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.512401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.549104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.558922Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641268681667762:2274];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:23.558982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:23.606218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.684878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641290156506294:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:23.684970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:23.685423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641290156506299:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:23.689514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:23.707570Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:14:23.707748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641290156506301:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:23.806336Z node 1 :TX_PROXY ERROR: Actor# [1:7491641290156506359:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:25.044069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21156, MsgBus: 12089 2025-04-10T11:14:26.588038Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641301799075242:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:26.588094Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00120d/r3tmp/tmpATE3Gb/pdisk_1.dat 2025-04-10T11:14:26.734674Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:26.736145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:26.736234Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:26.737727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21156, node 2 2025-04-10T11:14:26.834899Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:26.834930Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:26.834937Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:26.835074Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12089 TClient is connected to server localhost:12089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:14:27.687507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:27.712171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:14:27.843427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.014134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.106134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.847475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641318978946222:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:30.847554Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:30.926727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.001249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.044780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.089109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.159850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.241058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.336828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641323273914035:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:31.336962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:31.337366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641323273914041:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:31.341006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:31.366305Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641323273914043:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:31.452369Z node 2 :TX_PROXY ERROR: Actor# [2:7491641323273914098:3457] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:31.591255Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641301799075242:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:31.591408Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:32.743247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowIf-UseSink >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 28236, MsgBus: 26276 2025-04-10T11:14:20.648475Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641274770712507:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:20.649626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001205/r3tmp/tmp260l5a/pdisk_1.dat 2025-04-10T11:14:21.063998Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:21.081964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:21.082110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:21.084037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28236, node 1 2025-04-10T11:14:21.181522Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:21.181544Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:21.181554Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:21.181652Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26276 TClient is connected to server localhost:26276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:21.783418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:21.833853Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:21.857590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.043112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.240550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.350902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:24.439184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641291950583393:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:24.439301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:24.822556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:24.865407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:24.911682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:24.991380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.052235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.129559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.234319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641296245551217:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:25.234399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:25.234721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641296245551222:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:25.239932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:25.253482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641296245551224:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:25.316557Z node 1 :TX_PROXY ERROR: Actor# [1:7491641296245551278:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:25.648419Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641274770712507:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:25.648508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:26.286129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:27.142521Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTcwOTRhMTEtNTk1NWY0YWEtZjQ5MTcyMi1jN2Y1NWZjMg==, ActorId: [1:7491641300540519141:2528], ActorState: ExecuteState, TraceId: 01jrfph2kg3z09026pr41s7y2d, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-10T11:14:27.160158Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTcwOTRhMTEtNTk1NWY0YWEtZjQ5MTcyMi1jN2Y1NWZjMg==, ActorId: [1:7491641300540519141:2528], ActorState: ReadyState, TraceId: 01jrfph2pq6ezzq2wm0rg5y781, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 19074, MsgBus: 13424 2025-04-10T11:14:28.149979Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641310711672592:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:28.150517Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001205/r3tmp/tmpjm7OAJ/pdisk_1.dat 2025-04-10T11:14:28.376428Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:28.377255Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:28.377320Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:28.378389Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19074, node 2 2025-04-10T11:14:28.482625Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:28.482647Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:28.482654Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:28.482768Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13424 TClient is connected to server localhost:13424 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:14:28.992014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.016854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.169738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.396840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.526858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.949317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641323596576240:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:31.949406Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.008951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.049818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.095231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.145560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.208564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.292840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.377439Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641327891544054:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.377545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.377889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641327891544059:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.382022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:32.400430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641327891544062:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:32.470425Z node 2 :TX_PROXY ERROR: Actor# [2:7491641327891544116:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:33.150452Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641310711672592:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:33.150534Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:33.638960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBlobIdDict::Basic [GOOD] >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowStr-UseSink >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> KqpEffects::UpdateOn_Params [GOOD] >> KqpEffects::UpdateOn_Select >> TIncrHugeBasicTest::WriteReadDeleteEnumRecover [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict-UseSink |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnumRecover [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> KqpImmediateEffects::UpsertDuplicates [GOOD] >> KqpImmediateEffects::UpsertExistingKey |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::Defrag >> TCircleBufStringStreamTest::TestNotAligned [GOOD] >> TCircleBufStringStreamTest::TestOverflow [GOOD] >> TCircleBufTest::EmptyTest [GOOD] >> TCircleBufTest::OverflowTest [GOOD] >> KqpImmediateEffects::ReplaceDuplicates [GOOD] >> KqpImmediateEffects::ReplaceExistingKey >> TVDiskConfigTest::RtmrProblem1 [GOOD] >> TVDiskConfigTest::RtmrProblem2 [GOOD] >> TVDiskConfigTest::ThreeLevels [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 32062, MsgBus: 12275 2025-04-10T11:14:21.482826Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641282972514618:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:21.485295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011fa/r3tmp/tmpRpanoX/pdisk_1.dat 2025-04-10T11:14:22.017640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:22.017721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:22.023024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:22.090463Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32062, node 1 2025-04-10T11:14:22.297696Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:22.297721Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:22.297729Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:22.297846Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12275 TClient is connected to server localhost:12275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:22.951142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.975663Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:22.982172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:23.173472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:23.374233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:23.479495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.440331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641300152385465:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:25.440417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:25.757382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.808687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.858571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.946295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.007734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.059488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.160856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641304447353281:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:26.160962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:26.161319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641304447353286:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:26.165743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:26.181016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641304447353288:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:26.240840Z node 1 :TX_PROXY ERROR: Actor# [1:7491641304447353343:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:26.475622Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641282972514618:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:26.475738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29763, MsgBus: 14951 2025-04-10T11:14:28.630236Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641309611374375:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:28.632223Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011fa/r3tmp/tmpWOuFyK/pdisk_1.dat 2025-04-10T11:14:28.794141Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:28.824909Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:28.824986Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29763, node 2 2025-04-10T11:14:28.826645Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:29.014637Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:29.014657Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:29.014663Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:29.014756Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14951 TClient is connected to server localhost:14951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:29.603437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.638451Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:29.656683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.746189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.918676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.069469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:33.092243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641331086212612:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.092342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.140430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.186079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.270374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.317079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.362960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.444401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.528532Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641331086213129:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.528630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.534332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641331086213134:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.542072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:33.563886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641331086213137:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:33.633317Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641309611374375:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:33.633430Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:33.640676Z node 2 :TX_PROXY ERROR: Actor# [2:7491641331086213191:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:35.166380Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641339676148135:2505], TxId: 281474976715672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YTk0YjNjZjktODdhMmEyMDAtYTEyNzE3NzItMjcwNjQ4YzU=. CustomerSuppliedId : . TraceId : 01jrfpha6c7gjtyjsgacmd37ck. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-10T11:14:35.167074Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641339676148136:2506], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jrfpha6c7gjtyjsgacmd37ck. SessionId : ydb://session/3?node_id=2&id=YTk0YjNjZjktODdhMmEyMDAtYTEyNzE3NzItMjcwNjQ4YzU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7491641339676148132:2491], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:14:35.167622Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTk0YjNjZjktODdhMmEyMDAtYTEyNzE3NzItMjcwNjQ4YzU=, ActorId: [2:7491641335381180752:2491], ActorState: ExecuteState, TraceId: 01jrfpha6c7gjtyjsgacmd37ck, Create QueryResponse for error on request, msg: >> TIncrHugeBasicTest::WriteReadDeleteEnum [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3787, MsgBus: 16294 2025-04-10T11:14:19.314586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641271682031300:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:19.314901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00120b/r3tmp/tmppPyseU/pdisk_1.dat 2025-04-10T11:14:19.815609Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3787, node 1 2025-04-10T11:14:19.851440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:19.851538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:19.856916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:19.966987Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:19.967012Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:19.967017Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:19.967131Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16294 TClient is connected to server localhost:16294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:20.718752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:20.734355Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:20.751120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:20.899577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:21.076147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:21.159874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:23.460863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641288861902023:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:23.461050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:23.872611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.918046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:23.987120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:24.041700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:24.094878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:24.159990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:24.244051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641293156869831:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:24.244119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:24.244361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641293156869836:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:24.249288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:24.267263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641293156869838:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:24.318537Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641271682031300:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:24.318742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:24.366690Z node 1 :TX_PROXY ERROR: Actor# [1:7491641293156869894:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:25.631139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22884, MsgBus: 11929 2025-04-10T11:14:27.374726Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641305617683494:2126];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:27.374861Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00120b/r3tmp/tmpWJDTE9/pdisk_1.dat 2025-04-10T11:14:27.687224Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22884, node 2 2025-04-10T11:14:27.743451Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:27.743558Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:27.745195Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:27.805305Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:27.805346Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:27.805356Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:27.805488Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11929 TClient is connected to server localhost:11929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:28.551495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.570238Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:28.592634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.685093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.865950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.944997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.027453Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641327092521695:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.027527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.081364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.132497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.177247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.223770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.290585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.378881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.383258Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641305617683494:2126];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:32.384099Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:32.459171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641327092522217:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.459319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.459596Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641327092522223:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.464326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:32.482327Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641327092522225:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:32.549808Z node 2 :TX_PROXY ERROR: Actor# [2:7491641327092522278:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:34.632059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 5416, MsgBus: 19634 2025-04-10T11:14:21.161975Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641282811472632:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:21.162088Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011ff/r3tmp/tmpRW096n/pdisk_1.dat 2025-04-10T11:14:21.648286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:21.648348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:21.651137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:21.654631Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5416, node 1 2025-04-10T11:14:21.802185Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:21.802208Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:21.802214Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:21.802325Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19634 TClient is connected to server localhost:19634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:22.526012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.556556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:22.775710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:14:22.988631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:23.092811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:25.611494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641299991343602:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:25.611660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:25.978783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.023643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.101117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.143971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.161855Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641282811472632:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:26.161966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:26.218697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.267816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.326740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641304286311416:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:26.326832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:26.327069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641304286311421:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:26.330908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:26.343061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641304286311424:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:26.399261Z node 1 :TX_PROXY ERROR: Actor# [1:7491641304286311479:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:27.358306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1514, MsgBus: 62885 2025-04-10T11:14:28.912989Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641312037939785:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:28.913066Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011ff/r3tmp/tmpARRwxe/pdisk_1.dat 2025-04-10T11:14:29.119969Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:29.134073Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:29.134198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:29.135457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1514, node 2 2025-04-10T11:14:29.269467Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:29.269489Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:29.269495Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:29.269614Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62885 TClient is connected to server localhost:62885 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:29.875397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.882535Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:29.894416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.034594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.297751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.421309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:33.478711Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641333512778057:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.478804Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.511084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.582580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.637632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.691628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.747353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.840942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.913462Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641312037939785:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:33.913549Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:33.926657Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641333512778575:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.926777Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.927017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641333512778580:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.936022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:33.972988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641333512778582:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:34.039098Z node 2 :TX_PROXY ERROR: Actor# [2:7491641337807745936:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:35.200587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:36.045020Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because it cannot acquire locks;tx_id=5; 2025-04-10T11:14:36.045195Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037919 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-04-10T11:14:36.045362Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037919 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-04-10T11:14:36.045570Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641346397681218:2501], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [2:7491641342102713532:2501]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[2:7491641346397681218:2501].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2025-04-10T11:14:36.046084Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641342102713861:2501], SessionActorId: [2:7491641342102713532:2501], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[2:7491641342102713532:2501]. isRollback=0 2025-04-10T11:14:36.046328Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWIzZjRmMTMtNTRjNjllZjktMWYxYmI1NzItMWE0OTc4NTI=, ActorId: [2:7491641342102713532:2501], ActorState: ExecuteState, TraceId: 01jrfphb884g47kpkjvvye1vg3, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7491641346397681212:2501] from: [2:7491641342102713861:2501] 2025-04-10T11:14:36.046420Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491641346397681212:2501] TxId: 281474976715675. Ctx: { TraceId: 01jrfphb884g47kpkjvvye1vg3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIzZjRmMTMtNTRjNjllZjktMWYxYmI1NzItMWE0OTc4NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } 2025-04-10T11:14:36.046636Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWIzZjRmMTMtNTRjNjllZjktMWYxYmI1NzItMWE0OTc4NTI=, ActorId: [2:7491641342102713532:2501], ActorState: ExecuteState, TraceId: 01jrfphb884g47kpkjvvye1vg3, Create QueryResponse for error on request, msg: >> TLsnMngrTests::AllocLsnForLocalUse2Threads >> KqpEffects::InsertAbort_Select_Success [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates-UseSink >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::ThreeLevels [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufTest::OverflowTest [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnum [GOOD] >> KqpEffects::InsertAbort_Params_Conflict+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Conflict-UseSink |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10573, MsgBus: 19662 2025-04-10T11:14:20.739689Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641276943076401:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:20.739726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001200/r3tmp/tmppyofo6/pdisk_1.dat 2025-04-10T11:14:21.249600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:21.249738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:21.258977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:21.312099Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10573, node 1 2025-04-10T11:14:21.490712Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:21.490743Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:21.490751Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:21.490882Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19662 TClient is connected to server localhost:19662 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:22.409396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.443162Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:22.462296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.755289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:22.993663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:23.110918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:25.252203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641298417914641:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:25.252342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:25.662548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.707649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.741516Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641276943076401:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:25.741717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:25.746411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.779908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.817898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:25.913991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.003429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641302712882459:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:26.003544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:26.003867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641302712882464:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:26.009647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:26.035580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641302712882466:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:26.123160Z node 1 :TX_PROXY ERROR: Actor# [1:7491641302712882522:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:27.200761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25180, MsgBus: 20777 2025-04-10T11:14:28.913040Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641311541129074:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:28.913081Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001200/r3tmp/tmpnci7yX/pdisk_1.dat 2025-04-10T11:14:29.231201Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:29.240448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:29.240538Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:29.241665Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25180, node 2 2025-04-10T11:14:29.354781Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:29.354808Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:29.354815Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:29.354928Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20777 TClient is connected to server localhost:20777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:29.876529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.887855Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:29.902308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.031644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.246618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.472104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:33.606585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641333015967189:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.606662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.664985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.706009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.753705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.836699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.909427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.915271Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641311541129074:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:33.915654Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:34.039654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.182867Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641337310935007:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.182956Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.183387Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641337310935012:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.189048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:34.202026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641337310935014:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:34.296569Z node 2 :TX_PROXY ERROR: Actor# [2:7491641337310935071:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:35.871434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> TTrackable::TVector [GOOD] >> TTrackable::TList [GOOD] >> TTrackable::TString [GOOD] >> TCircleBufTest::SimpleTest [GOOD] >> TCircleBufTest::PtrTest [GOOD] >> TLsnAllocTrackerTests::Test1 [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse >> TResizableCircleBufTest::Test1 [GOOD] >> TResizableCircleBufTest::Test2 [GOOD] >> TTrackable::TBuffer [GOOD] >> KqpWrite::ProjectReplace+UseSink [GOOD] >> KqpWrite::ProjectReplace-UseSink >> TBlobStorageSyncNeighborsTest::IterateOverAllDisks [GOOD] >> TBlobStorageSyncNeighborsTest::SerDes [GOOD] >> TBlobStorageSyncNeighborsTest::CheckVDiskIterators [GOOD] >> TCircleBufStringStreamTest::TestAligned [GOOD] >> TBlobStorageSyncNeighborsTest::CheckRevLookup [GOOD] >> TBlobStorageSyncNeighborsTest::CheckIsMyDomain [GOOD] >> TBlobStorageSyncNeighborsTest::CheckFailDomainsIterators [GOOD] >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TTrackable::TString [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TTrackable::TBuffer [GOOD] |87.3%| [TA] $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |87.3%| [TA] {RESULT} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufStringStreamTest::TestAligned [GOOD] >> TVDiskConfigTest::JustConfig [GOOD] >> TVDiskConfigTest::Basic [GOOD] >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse2Threads [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse10Threads >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] >> DSProxyStrategyTest::Restore_mirror3dc >> DSProxyStrategyTest::Restore_block42 |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WRR2 |87.3%| [TA] $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |87.3%| [TA] {RESULT} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> TopicNameConverterForCPTest::BadLegacyTopics [GOOD] >> TopicNameConverterForCPTest::BadModernTopics [GOOD] >> TopicNameConverterTest::Paths [GOOD] >> TopicNameConverterTest::PathFromDiscoveryConverter [GOOD] >> DiscoveryConverterTest::MinimalName [GOOD] >> DiscoveryConverterTest::WithLogbrokerPath [GOOD] >> DiscoveryConverterTest::FullLegacyNamesWithRootDatabase [GOOD] >> DiscoveryConverterTest::FullLegacyPath [GOOD] >> TopicNameConverterForCPTest::CorrectLegacyTopics [GOOD] >> TopicNameConverterForCPTest::CorrectModernTopics [GOOD] >> DiscoveryConverterTest::FullLegacyNames [GOOD] >> DiscoveryConverterTest::FirstClass [GOOD] |87.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterForCPTest::BadModernTopics [GOOD] >> TopicNameConverterTest::LegacyStyle [GOOD] >> TopicNameConverterTest::FirstClass [GOOD] >> TopicNameConverterTest::LegacyStyleDoubleName [GOOD] >> TopicNameConverterTest::NoTopicName [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::PathFromDiscoveryConverter [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterForCPTest::CorrectModernTopics [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::WithLogbrokerPath [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::FullLegacyPath [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::FirstClass [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::FirstClass [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::NoTopicName [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21742, MsgBus: 6607 2025-04-10T11:14:24.342872Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641294334826309:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:24.356326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011f9/r3tmp/tmpAhtXgD/pdisk_1.dat 2025-04-10T11:14:24.897869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:24.897975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:24.902787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:24.906559Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21742, node 1 2025-04-10T11:14:25.050011Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:25.050038Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:25.050046Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:25.050208Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6607 TClient is connected to server localhost:6607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:25.805651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:25.845032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:26.073699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:26.254396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.352944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.425644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641311514697276:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:28.425945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:28.714566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:28.791277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:28.874656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:28.936732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.024694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.091003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.214328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641315809665101:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.214449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.214800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641315809665106:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.220091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:29.238992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641315809665108:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:29.343031Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641294334826309:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:29.343140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:29.345727Z node 1 :TX_PROXY ERROR: Actor# [1:7491641315809665166:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:30.516040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:30.990237Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976710673; 2025-04-10T11:14:31.004873Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641320104632944:2500], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7491641320104632759:2500]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7491641320104632944:2500].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:14:31.005373Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641320104632933:2500], SessionActorId: [1:7491641320104632759:2500], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7491641320104632759:2500]. isRollback=0 2025-04-10T11:14:31.005628Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWFhYTM2N2MtMWViN2NhNjMtZjc3YzI3Ny0xYzE5NzFhOQ==, ActorId: [1:7491641320104632759:2500], ActorState: ExecuteState, TraceId: 01jrfph65p3shdrs080kkznk39, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7491641320104632934:2500] from: [1:7491641320104632933:2500] 2025-04-10T11:14:31.005710Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491641320104632934:2500] TxId: 281474976710673. Ctx: { TraceId: 01jrfph65p3shdrs080kkznk39, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFhYTM2N2MtMWViN2NhNjMtZjc3YzI3Ny0xYzE5NzFhOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:14:31.005883Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWFhYTM2N2MtMWViN2NhNjMtZjc3YzI3Ny0xYzE5NzFhOQ==, ActorId: [1:7491641320104632759:2500], ActorState: ExecuteState, TraceId: 01jrfph65p3shdrs080kkznk39, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 8742, MsgBus: 5539 2025-04-10T11:14:32.512439Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641327777789068:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:32.512495Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011f9/r3tmp/tmpta4ATD/pdisk_1.dat 2025-04-10T11:14:32.763379Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8742, node 2 2025-04-10T11:14:32.854863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:32.854961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:32.858121Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:32.981215Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:32.981239Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:32.981247Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:32.981382Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5539 TClient is connected to server localhost:5539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:33.816017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:33.852287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:33.937439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.203436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.312702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:37.176932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641349252627261:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:37.177049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:37.229141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.290085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.346831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.386215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.428208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.495597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.536440Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641327777789068:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:37.536540Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:37.583666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641349252627777:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:37.583742Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:37.583948Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641349252627782:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:37.588637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:37.614710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641349252627784:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:37.677037Z node 2 :TX_PROXY ERROR: Actor# [2:7491641349252627839:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:39.531486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:40.033540Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641362137530210:2521], TxId: 281474976710675, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NmM3MzQ4My1iZjJiMGJlZS1lZWYwZjU4Ni1lNjk0OTdjNg==. TraceId : 01jrfphezx63dnft04gkj82f97. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-10T11:14:40.033852Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641362137530212:2522], TxId: 281474976710675, task: 2. Ctx: { TraceId : 01jrfphezx63dnft04gkj82f97. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NmM3MzQ4My1iZjJiMGJlZS1lZWYwZjU4Ni1lNjk0OTdjNg==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7491641362137530207:2492], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:14:40.034361Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmM3MzQ4My1iZjJiMGJlZS1lZWYwZjU4Ni1lNjk0OTdjNg==, ActorId: [2:7491641357842562708:2492], ActorState: ExecuteState, TraceId: 01jrfphezx63dnft04gkj82f97, Create QueryResponse for error on request, msg: >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted >> DiscoveryConverterTest::DiscoveryConverter [GOOD] >> DiscoveryConverterTest::EmptyModern [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26435, MsgBus: 17677 2025-04-10T11:14:24.901476Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641294128301254:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:24.903518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011f3/r3tmp/tmpNUm4qA/pdisk_1.dat 2025-04-10T11:14:25.444654Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:25.466616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:25.466751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:25.469415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26435, node 1 2025-04-10T11:14:25.627190Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:25.627221Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:25.627230Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:25.627352Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17677 TClient is connected to server localhost:17677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:26.496856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:26.535321Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:26.554173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:26.719185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:14:26.936829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:27.049512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.104114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641315603139496:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.104219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.498508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.578775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.637306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.678609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.721831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.778701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.866641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641315603140010:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.866743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.867156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641315603140015:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:29.871986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:29.891799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641315603140017:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:29.898895Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641294128301254:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:29.898963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:29.994559Z node 1 :TX_PROXY ERROR: Actor# [1:7491641315603140076:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:31.224678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.772155Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=4; 2025-04-10T11:14:31.789243Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037919 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:14:31.789428Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037919 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:14:31.789590Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641324193075171:2501], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [1:7491641324193074966:2501]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[1:7491641324193075171:2501].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:14:31.790218Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641324193075155:2501], SessionActorId: [1:7491641324193074966:2501], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7491641324193074966:2501]. isRollback=0 2025-04-10T11:14:31.790500Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2EwMzQ2NC0zMjBmYzllNS1iY2ExODBkOC1iYjgzYzdkNA==, ActorId: [1:7491641324193074966:2501], ActorState: ExecuteState, TraceId: 01jrfph6tm32343d21x4yg4e95, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7491641324193075165:2501] from: [1:7491641324193075155:2501] 2025-04-10T11:14:31.790582Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491641324193075165:2501] TxId: 281474976710674. Ctx: { TraceId: 01jrfph6tm32343d21x4yg4e95, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2EwMzQ2NC0zMjBmYzllNS1iY2ExODBkOC1iYjgzYzdkNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:14:31.790768Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2EwMzQ2NC0zMjBmYzllNS1iY2ExODBkOC1iYjgzYzdkNA==, ActorId: [1:7491641324193074966:2501], ActorState: ExecuteState, TraceId: 01jrfph6tm32343d21x4yg4e95, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 14240, MsgBus: 7940 2025-04-10T11:14:32.887905Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641330112445490:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:32.887963Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011f3/r3tmp/tmpAE2qWE/pdisk_1.dat 2025-04-10T11:14:33.249406Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:33.261951Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:33.262021Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:33.267270Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14240, node 2 2025-04-10T11:14:33.462962Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:33.462987Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:33.462994Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:33.463152Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7940 TClient is connected to server localhost:7940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:34.119714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.137524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.221180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.402314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.529888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:37.611476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641351587283602:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:37.611563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:37.665585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.729022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.807340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.882725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.888239Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641330112445490:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:37.888298Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:37.969479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:38.062965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:38.186008Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641355882251422:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:38.186090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:38.186728Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641355882251427:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:38.191402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:38.208403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641355882251429:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:38.308531Z node 2 :TX_PROXY ERROR: Actor# [2:7491641355882251487:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:39.961689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:40.794955Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641364472186611:2524], TxId: 281474976710676, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWY5YmViOTYtYWFkMGQ0YTItYTIyOTZjM2YtODVhOGYxYWI=. CustomerSuppliedId : . TraceId : 01jrfphfjwe739kxjdw49mmd09. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-10T11:14:40.795596Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641364472186612:2525], TxId: 281474976710676, task: 2. Ctx: { TraceId : 01jrfphfjwe739kxjdw49mmd09. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWY5YmViOTYtYWFkMGQ0YTItYTIyOTZjM2YtODVhOGYxYWI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7491641364472186608:2492], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:14:40.795993Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWY5YmViOTYtYWFkMGQ0YTItYTIyOTZjM2YtODVhOGYxYWI=, ActorId: [2:7491641360177219063:2492], ActorState: ExecuteState, TraceId: 01jrfphfjwe739kxjdw49mmd09, Create QueryResponse for error on request, msg: |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::EmptyModern [GOOD] >> TYardTest::TestChunkReadRandomOffset |87.4%| [TA] $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [TA] {RESULT} $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPDiskUtil::TChunkIdFormatter [GOOD] >> TPDiskUtil::TOwnerPrintTest [GOOD] >> TPDiskUtil::TChunkStateEnumPrintTest [GOOD] >> TPDiskUtil::TIoResultEnumPrintTest [GOOD] >> TPDiskUtil::TIoTypeEnumPrintTest [GOOD] >> TPDiskUtil::TestNVMeSerial [GOOD] >> TPDiskUtil::TestDeviceList [GOOD] >> TPDiskUtil::TestBufferPool >> TYardTest::TestWholeLogRead >> TBlobStoragePDiskCrypto::TestMixedStreamCypher >> TPDiskRaces::KillOwnerWhileDeletingChunk >> PDiskCompatibilityInfo::OldCompatible >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] >> TYardTest::TestIncorrectRequests >> TPDiskTest::TestThatEveryValueOfEStateEnumKeepsItIntegerValue [GOOD] >> TPDiskTest::TestPDiskOwnerRecreation >> TBlobStoragePDiskCrypto::TestMixedStreamCypher [GOOD] >> TBlobStoragePDiskCrypto::TestInplaceStreamCypher >> TYardTest::TestInit >> TBlobStoragePDiskCrypto::TestInplaceStreamCypher [GOOD] >> TBlockDeviceTest::TestDeviceWithSubmitGetThread >> KqpImmediateEffects::DeleteAfterInsert [GOOD] >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] >> TYardTest::TestWholeLogRead [GOOD] >> TYardTest::TestSysLogReordering ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] Test command err: Trying to start YDB, gRPC: 25066, MsgBus: 18913 2025-04-10T11:14:27.175050Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641305587043392:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:27.175129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011f0/r3tmp/tmpHmrCDk/pdisk_1.dat 2025-04-10T11:14:27.631103Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:27.652257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:27.652346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:27.655110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25066, node 1 2025-04-10T11:14:27.898769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:27.898798Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:27.898805Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:27.898902Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18913 TClient is connected to server localhost:18913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:28.781751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.808534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.989219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.215609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.296852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.596587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641322766914206:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:31.596688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:31.905559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.946912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.990205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.029100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.086046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.123115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.127363Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641305587043392:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:32.127506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:32.182577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641327061882016:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.182650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.183255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641327061882021:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.187599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:32.205490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641327061882023:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:32.295926Z node 1 :TX_PROXY ERROR: Actor# [1:7491641327061882081:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:33.490570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8886, MsgBus: 32061 2025-04-10T11:14:35.134367Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641339343665464:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:35.134426Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011f0/r3tmp/tmpiXyA3W/pdisk_1.dat 2025-04-10T11:14:35.407538Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:35.407625Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:35.413129Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:35.445521Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8886, node 2 2025-04-10T11:14:35.670731Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:35.670755Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:35.670762Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:35.670868Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32061 TClient is connected to server localhost:32061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:36.347329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:36.366413Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:36.384245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:36.523862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:36.710620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:36.837168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.665255Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641356523536402:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:39.665422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:39.710656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:39.753130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:39.792934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:39.828479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:39.871938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:39.969293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:40.062657Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641360818504217:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:40.062740Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:40.062991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641360818504222:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:40.066848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:40.078797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641360818504224:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:40.134566Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641339343665464:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:40.134650Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:40.169833Z node 2 :TX_PROXY ERROR: Actor# [2:7491641360818504280:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:41.357623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> PDiskCompatibilityInfo::OldCompatible [GOOD] >> PDiskCompatibilityInfo::Incompatible >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink [GOOD] >> TBlockDeviceTest::TestDeviceWithSubmitGetThread [GOOD] >> TBlockDeviceTest::TestWriteSectorMapAllTypes >> TPDiskTest::TestAbstractPDiskInterface >> TYardTest::TestIncorrectRequests [GOOD] >> TYardTest::TestEmptyLogRead >> TPDiskTest::TestAbstractPDiskInterface [GOOD] >> TPDiskTest::TestPDiskActorErrorState >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] >> TPDiskTest::TestPDiskActorErrorState [GOOD] >> TPDiskTest::TestPDiskActorPDiskStopStart >> TYardTest::TestInit [GOOD] >> TYardTest::TestInitOnIncompleteFormat >> PDiskCompatibilityInfo::Incompatible [GOOD] >> PDiskCompatibilityInfo::NewIncompatibleWithDefault >> TYardTest::TestEmptyLogRead [GOOD] >> TYardTest::TestChunkWriteReadWithHddSectorMap >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] >> KqpEffects::UpdateOn_Select [GOOD] >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] >> TPDiskUtil::TestBufferPool [GOOD] >> TSectorMapPerformance::TestHDD1960GBRead100MBOnFirstSector >> KqpImmediateEffects::UpsertExistingKey [GOOD] >> TPDiskTest::TestPDiskActorPDiskStopStart [GOOD] >> TPDiskTest::TestChunkWriteRelease >> PDiskCompatibilityInfo::NewIncompatibleWithDefault [GOOD] >> PDiskCompatibilityInfo::Trunk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 14319, MsgBus: 5113 2025-04-10T11:14:27.998725Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641304811922678:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:28.002877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011e0/r3tmp/tmpw6bmcr/pdisk_1.dat 2025-04-10T11:14:28.537781Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:28.540248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:28.540374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:28.543362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14319, node 1 2025-04-10T11:14:28.633352Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:28.633378Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:28.633387Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:28.633501Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5113 TClient is connected to server localhost:5113 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:29.381265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.428743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.641793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:29.842320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:14:29.917684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.361980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641326286760787:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.362239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.801089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.874286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.964821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.992061Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641304811922678:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:32.992116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:33.011501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.067041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.140066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.230298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641330581728600:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.230469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.233686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641330581728605:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.238135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:33.261109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641330581728607:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:33.343612Z node 1 :TX_PROXY ERROR: Actor# [1:7491641330581728664:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 5243, MsgBus: 7925 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011e0/r3tmp/tmpq7xE1o/pdisk_1.dat 2025-04-10T11:14:36.742345Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:36.805773Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:36.810692Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:36.810772Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:36.819195Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5243, node 2 2025-04-10T11:14:37.034966Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:37.034990Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:37.034998Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:37.035109Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7925 TClient is connected to server localhost:7925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:37.831906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:37.856567Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:37.868382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.975264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.393797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.600189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:41.151253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641365718449267:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.151337Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.218216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.258905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.344227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.388747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.431212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.504624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.623827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641365718449785:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.623927Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.624159Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641365718449790:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.629442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:41.652401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641365718449792:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:41.728904Z node 2 :TX_PROXY ERROR: Actor# [2:7491641365718449848:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpImmediateEffects::ReplaceExistingKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 20714, MsgBus: 12620 2025-04-10T11:14:27.252241Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641305212003396:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:27.252353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011e6/r3tmp/tmpU8IXVY/pdisk_1.dat 2025-04-10T11:14:27.818402Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:27.839369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:27.839491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:27.842418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20714, node 1 2025-04-10T11:14:28.039669Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:28.039695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:28.039705Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:28.039828Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12620 TClient is connected to server localhost:12620 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:28.785065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.800274Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:28.814069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.018233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.301585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.402593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.648896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641322391874348:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:31.649021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.047687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.133310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.177201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.227696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.262305Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641305212003396:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:32.262424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:32.302557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.387282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.486710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641326686842176:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.486857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.494385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641326686842181:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.504273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:32.532591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641326686842183:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:32.604319Z node 1 :TX_PROXY ERROR: Actor# [1:7491641326686842238:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:33.886950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30776, MsgBus: 62034 2025-04-10T11:14:36.401823Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641347481165085:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:36.426330Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011e6/r3tmp/tmp3h8egq/pdisk_1.dat 2025-04-10T11:14:36.802511Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:36.826782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:36.826866Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:36.834406Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30776, node 2 2025-04-10T11:14:37.026703Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:37.026727Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:37.026734Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:37.026863Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62034 TClient is connected to server localhost:62034 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:37.839521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:37.860069Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:37.881046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.009517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.246682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.345915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.896873Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641364661036030:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:40.896990Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:40.957970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.004982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.055346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.110261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.169043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.249947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.359508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641368956003846:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.359569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.359864Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641368956003851:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.364203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:41.385152Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641368956003853:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:41.405859Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641347481165085:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:41.405949Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:41.449390Z node 2 :TX_PROXY ERROR: Actor# [2:7491641368956003908:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:42.757316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> PDiskCompatibilityInfo::Trunk [GOOD] >> PDiskCompatibilityInfo::SuppressCompatibilityCheck >> TLsnMngrTests::AllocLsnForLocalUse10Threads [GOOD] >> TOutOfSpaceStateTests::TestLocal [GOOD] >> TOutOfSpaceStateTests::TestGlobal [GOOD] >> TYardTest::TestInitOnIncompleteFormat [GOOD] >> TYardTest::TestInitOwner ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61237, MsgBus: 19839 2025-04-10T11:14:27.845017Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641305275476189:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:27.845877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011dc/r3tmp/tmpobv3UU/pdisk_1.dat 2025-04-10T11:14:28.510502Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:28.515793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:28.515919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:28.520612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61237, node 1 2025-04-10T11:14:28.706782Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:28.706816Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:28.706827Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:28.706985Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19839 TClient is connected to server localhost:19839 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:29.392090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.415975Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:29.428680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.659373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:29.929908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.069178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.395375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641326750314463:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.395485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.852313Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641305275476189:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:32.852401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:32.864266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.943279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.988238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.044531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.127198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.210644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.325726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641331045282279:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.325812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.326197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641331045282284:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.331270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:33.349570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641331045282286:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:33.409624Z node 1 :TX_PROXY ERROR: Actor# [1:7491641331045282343:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:34.664779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25718, MsgBus: 21824 2025-04-10T11:14:36.664093Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641344024654364:2140];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011dc/r3tmp/tmpGpOuPH/pdisk_1.dat 2025-04-10T11:14:36.846977Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:14:36.991205Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:37.017860Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:37.017950Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:37.023661Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25718, node 2 2025-04-10T11:14:37.251421Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:37.251444Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:37.251452Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:37.251576Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21824 TClient is connected to server localhost:21824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:37.960239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.001845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:38.100112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:14:38.339953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.477491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:41.312326Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641365499492515:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.312409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.379766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.436423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.489935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.530236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.603546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.663260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.667320Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641344024654364:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:41.667643Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:41.766604Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641365499493033:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.766757Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.767186Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641365499493040:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.777012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:41.795707Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641365499493042:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:41.863457Z node 2 :TX_PROXY ERROR: Actor# [2:7491641365499493097:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:43.031995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5074, MsgBus: 14109 2025-04-10T11:14:28.385123Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641310411590273:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:28.385212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011d2/r3tmp/tmpRIaI7x/pdisk_1.dat 2025-04-10T11:14:29.066584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:29.066682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:29.069121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:29.098303Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5074, node 1 2025-04-10T11:14:29.171240Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:29.171287Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:29.171295Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:29.171409Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14109 TClient is connected to server localhost:14109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:30.166262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.203537Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:30.225088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.452783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.766913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.869215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.996160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641327591461032:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.996280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.362869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.383037Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641310411590273:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:33.383115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:33.445455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.509775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.552107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.594830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.655779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.719710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641331886428842:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.719805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.719983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641331886428848:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.723844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:33.738743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641331886428850:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:33.835295Z node 1 :TX_PROXY ERROR: Actor# [1:7491641331886428905:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:35.103470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16024, MsgBus: 13508 2025-04-10T11:14:36.634644Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641347616246898:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:36.635448Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011d2/r3tmp/tmp0SPIHu/pdisk_1.dat 2025-04-10T11:14:36.892513Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:36.944929Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:36.945015Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:36.947857Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16024, node 2 2025-04-10T11:14:37.178936Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:37.178965Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:37.178973Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:37.179121Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13508 TClient is connected to server localhost:13508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:38.075602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.091001Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:38.117556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.252076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.588596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.749338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:41.475215Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641369091085099:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.475295Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.554483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.605019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.634870Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641347616246898:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:41.635044Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:41.651507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.732375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.816675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.897163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.972515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641369091085627:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.972614Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.972826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641369091085632:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.976669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:41.986763Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641369091085634:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:42.041927Z node 2 :TX_PROXY ERROR: Actor# [2:7491641373386052983:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:43.142842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TOutOfSpaceStateTests::TestGlobal [GOOD] >> KqpWrite::ProjectReplace-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26112, MsgBus: 28495 2025-04-10T11:14:28.250910Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641312863025448:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:28.250946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011d6/r3tmp/tmpxqG1Cx/pdisk_1.dat 2025-04-10T11:14:28.863608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:28.863712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:28.881814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:28.943815Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26112, node 1 2025-04-10T11:14:29.102635Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:29.102657Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:29.102669Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:29.102769Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28495 TClient is connected to server localhost:28495 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:29.979533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.029208Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:30.041385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.266699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.469925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.579175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.556283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641330042896207:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.556424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.120303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.185284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.253104Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641312863025448:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:33.253174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:33.268318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.312680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.393421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.443488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.531711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641334337864025:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.531836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.532352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641334337864030:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.537425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:33.557515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641334337864032:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:33.631057Z node 1 :TX_PROXY ERROR: Actor# [1:7491641334337864088:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:34.912322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8634, MsgBus: 17546 2025-04-10T11:14:37.070819Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641346293708773:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:37.090494Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011d6/r3tmp/tmpLW2ocB/pdisk_1.dat 2025-04-10T11:14:37.291118Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:37.293772Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:37.293862Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:37.296751Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8634, node 2 2025-04-10T11:14:37.466652Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:37.466674Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:37.466681Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:37.466784Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17546 TClient is connected to server localhost:17546 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:14:38.275821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:38.286131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.370450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.664793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.870472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:41.610308Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641367768547025:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.610418Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.658392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.705407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.751893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.795780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.834968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.887334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.950419Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641367768547535:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.950548Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.950910Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641367768547540:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.956188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:41.972796Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641367768547542:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:42.024994Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641346293708773:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:42.025060Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:42.037247Z node 2 :TX_PROXY ERROR: Actor# [2:7491641372063514891:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:43.187735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TYardTest::TestInitOwner [GOOD] >> TYardTest::TestLogWriteRead >> PDiskCompatibilityInfo::SuppressCompatibilityCheck [GOOD] >> PDiskCompatibilityInfo::Migration >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] >> TYardTest::TestChunkWriteReadWithHddSectorMap [GOOD] >> TYardTest::TestChunkWriteReadMultiple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12192, MsgBus: 28278 2025-04-10T11:14:29.074736Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641313976423828:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:29.075764Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011c6/r3tmp/tmpm8BTph/pdisk_1.dat 2025-04-10T11:14:29.663576Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:29.686494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:29.686633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:29.691331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12192, node 1 2025-04-10T11:14:29.866061Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:29.866083Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:29.866089Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:29.866238Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28278 TClient is connected to server localhost:28278 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:30.693018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.722729Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:30.745374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.994016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:31.380970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.488687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:33.516979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641331156294788:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.517116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.873741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.964518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.013913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.060555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.077366Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641313976423828:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:34.077444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:34.102788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.159395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.226998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641335451262598:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.227095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.227405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641335451262603:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.231371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:34.242916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641335451262605:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:34.331935Z node 1 :TX_PROXY ERROR: Actor# [1:7491641335451262662:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:35.674890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24891, MsgBus: 1745 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011c6/r3tmp/tmpaP2xcw/pdisk_1.dat 2025-04-10T11:14:37.786722Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:37.790412Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:37.874499Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:37.874603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:37.877685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24891, node 2 2025-04-10T11:14:38.022560Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:38.022585Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:38.022594Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:38.022716Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1745 TClient is connected to server localhost:1745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:38.926269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.935210Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:38.944275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.043651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.232716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.318882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:41.838078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641365540385928:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.838195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.918692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.967460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.043932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.085699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.139295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.225062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.321706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641369835353743:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.321814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.321996Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641369835353748:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.325740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:42.345827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641369835353750:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:42.444506Z node 2 :TX_PROXY ERROR: Actor# [2:7491641369835353805:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:44.206051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Select [GOOD] Test command err: Trying to start YDB, gRPC: 23075, MsgBus: 1541 2025-04-10T11:14:29.815231Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641314354052003:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:29.815643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001165/r3tmp/tmp6MYxSi/pdisk_1.dat 2025-04-10T11:14:30.494669Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:30.497024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:30.497129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:30.501831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23075, node 1 2025-04-10T11:14:30.726579Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:30.726601Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:30.726608Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:30.726709Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1541 TClient is connected to server localhost:1541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:31.553610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.571978Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:31.594871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.783808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.987981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.066287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.263217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641335828890117:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.263339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.633287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.703755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.780193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.802974Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641314354052003:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:34.803019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:34.820693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.903810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.968809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.054750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641340123857933:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.054831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.054874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641340123857938:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.058742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:35.070957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641340123857940:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:35.171532Z node 1 :TX_PROXY ERROR: Actor# [1:7491641340123857996:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 3153, MsgBus: 6884 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001165/r3tmp/tmp0LhfH8/pdisk_1.dat 2025-04-10T11:14:37.792050Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641348679536911:2293];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:37.792092Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:14:37.907020Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:37.907106Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:37.915608Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:37.924325Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3153, node 2 2025-04-10T11:14:38.174787Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:38.174816Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:38.174824Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:38.174953Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6884 TClient is connected to server localhost:6884 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:39.268108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.286803Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:39.300974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.423595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.669585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.767101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:42.498319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641370154374734:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.498415Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.552309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.633565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.678655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.717786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.767379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.797497Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641348679536911:2293];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:42.797558Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:42.815616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.870660Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641370154375244:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.870764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.871790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641370154375249:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.880756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:42.904761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641370154375251:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:42.977255Z node 2 :TX_PROXY ERROR: Actor# [2:7491641370154375310:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TYardTest::TestLogWriteRead [GOOD] >> TYardTest::TestLogWriteReadMedium ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14332, MsgBus: 27127 2025-04-10T11:14:29.139202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641315075424554:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:29.139875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011be/r3tmp/tmpXNuzPW/pdisk_1.dat 2025-04-10T11:14:29.778407Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:29.818541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:29.818651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:29.831358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14332, node 1 2025-04-10T11:14:30.038485Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:30.038511Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:30.038526Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:30.038674Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27127 TClient is connected to server localhost:27127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:30.746871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.766976Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:30.784013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.981605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.262118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.388095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:33.387926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641332255295481:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.388120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.752162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.810792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.897214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:33.936654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.011704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.115519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.145678Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641315075424554:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:34.145793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:34.202414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641336550263304:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.202502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.202965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641336550263309:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.208100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:34.232907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641336550263311:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:34.304668Z node 1 :TX_PROXY ERROR: Actor# [1:7491641336550263366:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:35.790838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3313, MsgBus: 22851 2025-04-10T11:14:37.526626Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641348369663343:2231];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:37.579275Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011be/r3tmp/tmpErlnSQ/pdisk_1.dat 2025-04-10T11:14:37.764740Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:37.797882Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:37.798118Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:37.800263Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3313, node 2 2025-04-10T11:14:37.987277Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:37.987306Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:37.987315Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:37.987463Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22851 TClient is connected to server localhost:22851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:38.914786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.925890Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:38.944621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.080575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.486627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.606111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:42.102691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641369844501442:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.102790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.158751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.207690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.252995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.300151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.343638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.446781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.504513Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641369844501954:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.504642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.504933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641369844501959:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.509066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:42.515266Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641348369663343:2231];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:42.515387Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:42.526363Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641369844501961:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:42.605858Z node 2 :TX_PROXY ERROR: Actor# [2:7491641369844502014:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:43.764023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |87.5%| [TA] $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> PDiskCompatibilityInfo::Migration [GOOD] >> ReadOnlyPDisk::SimpleRestartReadOnly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26919, MsgBus: 8802 2025-04-10T11:14:28.909921Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641310042698020:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:28.909978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011ca/r3tmp/tmppw2px1/pdisk_1.dat 2025-04-10T11:14:29.583778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:29.583885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:29.591255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:29.638610Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26919, node 1 2025-04-10T11:14:29.903841Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:29.903863Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:29.903870Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:29.903985Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8802 TClient is connected to server localhost:8802 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:31.000175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.016070Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:31.034269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.282414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.522671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.635304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:33.929139Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641310042698020:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:33.929292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:33.961298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641331517536146:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.961423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.705091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.754993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.839940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.885942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.950328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.998207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.075564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641340107471264:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.075622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.075851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641340107471269:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.079173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:35.098356Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:14:35.099370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641340107471271:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:35.183289Z node 1 :TX_PROXY ERROR: Actor# [1:7491641340107471327:3468] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:36.595417Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-10T11:14:36.637662Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:14:36.637857Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:14:36.638031Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641344402438951:2503], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7491641344402438929:2503]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7491641344402438951:2503].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:14:36.638622Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641344402438944:2503], SessionActorId: [1:7491641344402438929:2503], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7491641344402438929:2503]. isRollback=0 2025-04-10T11:14:36.638900Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzY1NmExZmUtMzFiOGQxMjYtZDRjOTg0ODctY2MxZmMyMzY=, ActorId: [1:7491641344402438929:2503], ActorState: ExecuteState, TraceId: 01jrfphbvcfnghnwh608evr2fk, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7491641344402438945:2503] from: [1:7491641344402438944:2503] 2025-04-10T11:14:36.639050Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491641344402438945:2503] TxId: 281474976710671. Ctx: { TraceId: 01jrfphbvcfnghnwh608evr2fk, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzY1NmExZmUtMzFiOGQxMjYtZDRjOTg0ODctY2MxZmMyMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:14:36.639259Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzY1NmExZmUtMzFiOGQxMjYtZDRjOTg0ODctY2MxZmMyMzY=, ActorId: [1:7491641344402438929:2503], ActorState: ExecuteState, TraceId: 01jrfphbvcfnghnwh608evr2fk, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 9386, MsgBus: 19166 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011ca/r3tmp/tmp2QARWt/pdisk_1.dat 2025-04-10T11:14:38.310293Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:38.349171Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:38.373863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:38.373945Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:38.375265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9386, node 2 2025-04-10T11:14:38.525100Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:38.525125Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:38.525132Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:38.525261Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19166 TClient is connected to server localhost:19166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:39.329782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.351209Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:39.365649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.478373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.705132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:39.806772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.017438Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641373238006842:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.017517Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.084119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.147669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.198192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.269958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.326446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.369128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.465046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641373238007363:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.465123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.465353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641373238007368:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.469566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:42.487401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641373238007370:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:42.545800Z node 2 :TX_PROXY ERROR: Actor# [2:7491641373238007426:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:44.224570Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641381827942370:2503], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZGNiNmJhY2ItMjc4MDc1MjYtNjU5OWM3ZTItMjgxMmQ0YTk=. TraceId : 01jrfphjwe6pfmm2ke6zb7eta7. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-10T11:14:44.225429Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641381827942371:2504], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGNiNmJhY2ItMjc4MDc1MjYtNjU5OWM3ZTItMjgxMmQ0YTk=. TraceId : 01jrfphjwe6pfmm2ke6zb7eta7. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7491641381827942367:2489], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:14:44.225844Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGNiNmJhY2ItMjc4MDc1MjYtNjU5OWM3ZTItMjgxMmQ0YTk=, ActorId: [2:7491641377532974981:2489], ActorState: ExecuteState, TraceId: 01jrfphjwe6pfmm2ke6zb7eta7, Create QueryResponse for error on request, msg: >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] >> TYardTest::TestLogWriteReadMedium [GOOD] >> TYardTest::TestLogWriteReadMediumWithHddSectorMap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ReplaceExistingKey [GOOD] Test command err: Trying to start YDB, gRPC: 12251, MsgBus: 12389 2025-04-10T11:14:29.961863Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641314039010250:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:29.961912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00115e/r3tmp/tmpxjZhaq/pdisk_1.dat 2025-04-10T11:14:30.736618Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:30.749772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:30.749920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:30.760599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12251, node 1 2025-04-10T11:14:31.142688Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:31.142712Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:31.142721Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:31.142870Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12389 TClient is connected to server localhost:12389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:31.965197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.018401Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.042138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.291413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.566552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.677047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.764046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641335513848503:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.764152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.964857Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641314039010250:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:34.964920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:35.238900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.288151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.323095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.361695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.410919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.502211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.570350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641339808816317:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.570476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.570929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641339808816322:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.574391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:35.585240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641339808816325:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:35.670115Z node 1 :TX_PROXY ERROR: Actor# [1:7491641339808816379:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:36.917953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12708, MsgBus: 1565 2025-04-10T11:14:38.790345Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641353327060175:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:38.790396Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00115e/r3tmp/tmprFjgeB/pdisk_1.dat 2025-04-10T11:14:39.068342Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:39.068425Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:39.072430Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:39.074562Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12708, node 2 2025-04-10T11:14:39.219018Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:39.219043Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:39.219055Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:39.219173Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1565 TClient is connected to server localhost:1565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:39.885377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.894909Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:39.906898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.095970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.285063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.375232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:42.713681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641370506931101:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.713776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.773346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.848862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.887837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.930658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.967158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.035644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.111585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641374801898910:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.111696Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.111936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641374801898915:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.116357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:43.134490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641374801898917:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:43.237087Z node 2 :TX_PROXY ERROR: Actor# [2:7491641374801898973:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:43.810593Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641353327060175:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:43.810884Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:44.306291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertExistingKey [GOOD] Test command err: Trying to start YDB, gRPC: 29581, MsgBus: 12249 2025-04-10T11:14:29.438725Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641314394220722:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:29.439587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011a6/r3tmp/tmpRYLI5Z/pdisk_1.dat 2025-04-10T11:14:30.168889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:30.168973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:30.178409Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:30.179933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29581, node 1 2025-04-10T11:14:30.397525Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:30.397551Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:30.397566Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:30.397702Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12249 TClient is connected to server localhost:12249 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:31.499907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.533529Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:31.549237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.756066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.997853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.089616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.082948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641335869058935:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.083086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.439422Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641314394220722:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:34.439515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:34.487320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.578066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.620702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.660677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.707360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.764394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.859151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641335869059448:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.859264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.859643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641335869059453:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.863712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:34.877439Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:14:34.877612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641335869059455:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:34.973762Z node 1 :TX_PROXY ERROR: Actor# [1:7491641335869059511:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:36.554175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9244, MsgBus: 63024 2025-04-10T11:14:38.270281Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641355720749722:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:38.270324Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011a6/r3tmp/tmpfFRAyF/pdisk_1.dat 2025-04-10T11:14:38.522260Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9244, node 2 2025-04-10T11:14:38.595107Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:38.595193Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:38.647201Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:38.868940Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:38.868964Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:38.868971Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:38.869071Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63024 TClient is connected to server localhost:63024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:39.539344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.555164Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:39.566713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.693919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.916899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.019362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:42.151198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641372900620670:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.151274Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.221840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.273055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.352674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.404432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.452270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.516907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.637835Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641372900621193:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.638000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.642608Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641372900621198:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:42.650470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:42.669005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641372900621200:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:42.730691Z node 2 :TX_PROXY ERROR: Actor# [2:7491641372900621255:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:43.272211Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641355720749722:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:43.272276Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:43.850889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> ReadOnlyPDisk::SimpleRestartReadOnly [GOOD] >> ReadOnlyPDisk::StartReadOnlyUnformattedShouldFail >> ReadOnlyPDisk::StartReadOnlyUnformattedShouldFail [GOOD] >> ReadOnlyPDisk::StartReadOnlyZeroedShouldFail >> ReadOnlyPDisk::StartReadOnlyZeroedShouldFail [GOOD] >> ReadOnlyPDisk::VDiskStartsOnReadOnlyPDisk >> TBlockDeviceTest::TestWriteSectorMapAllTypes [GOOD] >> TBlockDeviceTest::WriteReadRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9338, MsgBus: 21860 2025-04-10T11:14:32.023467Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641328854106253:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:32.023522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001156/r3tmp/tmpJ428jo/pdisk_1.dat 2025-04-10T11:14:32.785738Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:32.822725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:32.822834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:32.835067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9338, node 1 2025-04-10T11:14:33.036115Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:33.036135Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:33.036144Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:33.036260Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21860 TClient is connected to server localhost:21860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:33.998263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.015454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:34.023562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.202780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.421135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.538413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:36.919856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641346033977247:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:36.919944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:37.026241Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641328854106253:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:37.026301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:37.279341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.316767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.354756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.428080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.461900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.504430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:37.570932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641350328945059:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:37.570986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:37.571166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641350328945064:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:37.575346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:37.595522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641350328945066:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:37.680766Z node 1 :TX_PROXY ERROR: Actor# [1:7491641350328945122:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15175, MsgBus: 26193 2025-04-10T11:14:39.882737Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641356661268263:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:39.883750Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001156/r3tmp/tmphd1xz4/pdisk_1.dat 2025-04-10T11:14:40.024793Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:40.068912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:40.069015Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:40.072004Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15175, node 2 2025-04-10T11:14:40.218954Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:40.218986Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:40.218997Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:40.219146Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26193 TClient is connected to server localhost:26193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:14:40.783584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:40.790130Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:40.801753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.889648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:41.102550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:41.185450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:43.731595Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641373841139060:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.731745Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.781680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.871750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.931889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.993820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:44.043423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:44.130780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:44.213311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641378136106873:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:44.213428Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:44.213807Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641378136106878:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:44.218590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:44.230507Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641378136106880:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:44.288615Z node 2 :TX_PROXY ERROR: Actor# [2:7491641378136106932:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:44.814764Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641356661268263:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:44.814855Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSectorMapPerformance::TestHDD1960GBRead100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestHDD1960GBRead100MBOnLastSector >> TPDiskTest::TestPDiskOwnerRecreation [GOOD] >> TPDiskTest::TestPDiskOwnerRecreationWithStableOwner >> TYardTest::TestLogWriteReadMediumWithHddSectorMap [GOOD] >> TYardTest::TestLogWriteReadLarge >> ReadOnlyPDisk::VDiskStartsOnReadOnlyPDisk [GOOD] >> ReadOnlyPDisk::ReadOnlyPDiskEvents ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] Test command err: Trying to start YDB, gRPC: 1103, MsgBus: 13501 2025-04-10T11:14:29.335845Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641314887311910:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:29.336811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011b2/r3tmp/tmpU7qwJi/pdisk_1.dat 2025-04-10T11:14:29.898537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:29.898662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:29.900136Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:29.908286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1103, node 1 2025-04-10T11:14:30.022688Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:30.022710Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:30.022716Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:30.022839Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13501 TClient is connected to server localhost:13501 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:30.938861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:30.976581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.159552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.426566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.516207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:33.723301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641332067182647:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:33.723411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.232379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.278889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.337379Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641314887311910:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:34.338248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:34.346938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.434259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.538001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.625380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.732830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641336362150475:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.732924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.733185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641336362150480:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.737213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:34.751899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641336362150482:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:34.821132Z node 1 :TX_PROXY ERROR: Actor# [1:7491641336362150537:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:36.506663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6829, MsgBus: 27895 2025-04-10T11:14:39.313831Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641356849369254:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011b2/r3tmp/tmpySvkyr/pdisk_1.dat 2025-04-10T11:14:39.411145Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:14:39.530743Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:39.533049Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:39.533120Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:39.535279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6829, node 2 2025-04-10T11:14:39.694687Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:39.694706Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:39.694713Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:39.694845Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27895 TClient is connected to server localhost:27895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:40.241030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.264037Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:40.284880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.375922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.596053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:40.690616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.235704Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641374029240034:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.235785Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.285501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.367274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.418129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.470306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.523096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.606972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.719141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641374029240553:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.719229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.719550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641374029240558:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.724654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:43.737431Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641374029240560:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:43.829470Z node 2 :TX_PROXY ERROR: Actor# [2:7491641374029240615:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:44.251517Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641356849369254:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:44.251600Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:45.395060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2039, MsgBus: 30200 2025-04-10T11:14:29.845661Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641313760063317:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:29.861109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00119a/r3tmp/tmpeCjLnQ/pdisk_1.dat 2025-04-10T11:14:30.552042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:30.552183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:30.561385Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:30.561864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2039, node 1 2025-04-10T11:14:30.732030Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:30.732057Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:30.732070Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:30.732201Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30200 TClient is connected to server localhost:30200 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:31.630550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.651153Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:31.661008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.877807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.094119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.196139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.784298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641335234901551:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.784404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.856546Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641313760063317:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:34.856615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:35.375083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.432301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.501911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.594229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.669453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.718818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.802559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641339529869374:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.802689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.802980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641339529869379:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.807541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:35.846177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641339529869381:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:35.917194Z node 1 :TX_PROXY ERROR: Actor# [1:7491641339529869441:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:37.463042Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-10T11:14:37.488133Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:14:37.488396Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:14:37.488633Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641348119804392:2502], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7491641348119804341:2502]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7491641348119804392:2502].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:14:37.489290Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641348119804385:2502], SessionActorId: [1:7491641348119804341:2502], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7491641348119804341:2502]. isRollback=0 2025-04-10T11:14:37.489539Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGQ1Y2Y5MzEtYTc4MTA0NzUtYTUxZTM0MmQtNWFjN2QzY2M=, ActorId: [1:7491641348119804341:2502], ActorState: ExecuteState, TraceId: 01jrfphcj221wm8v6v6xzf6256, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7491641348119804386:2502] from: [1:7491641348119804385:2502] 2025-04-10T11:14:37.489616Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491641348119804386:2502] TxId: 281474976710671. Ctx: { TraceId: 01jrfphcj221wm8v6v6xzf6256, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQ1Y2Y5MzEtYTc4MTA0NzUtYTUxZTM0MmQtNWFjN2QzY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:14:37.489798Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGQ1Y2Y5MzEtYTc4MTA0NzUtYTUxZTM0MmQtNWFjN2QzY2M=, ActorId: [1:7491641348119804341:2502], ActorState: ExecuteState, TraceId: 01jrfphcj221wm8v6v6xzf6256, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 2314, MsgBus: 12727 2025-04-10T11:14:39.407121Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641360177813585:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:39.407159Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00119a/r3tmp/tmpX2sTzg/pdisk_1.dat 2025-04-10T11:14:39.688414Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:39.710875Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:39.710966Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:39.713358Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2314, node 2 2025-04-10T11:14:39.878787Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:39.878812Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:39.878820Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:39.878941Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12727 TClient is connected to server localhost:12727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:40.496210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.514969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.605586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.848875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.939070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:43.415408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641377357684541:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.415507Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.499833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.596981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.650840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.721215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.783967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.862521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.982768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641377357685057:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.982891Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.983342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641377357685062:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.988453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:44.007145Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:14:44.007458Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641377357685064:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:44.073548Z node 2 :TX_PROXY ERROR: Actor# [2:7491641381652652415:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:44.416831Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641360177813585:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:44.416943Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:45.735106Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641385947620051:2504], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZjdmMWYyNGYtNmE3ZDgyOGEtODYzMjU5N2MtMjE2ZTIzODU=. TraceId : 01jrfphmfre23pak8a8ef6b47e. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-10T11:14:45.735542Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641385947620052:2505], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZjdmMWYyNGYtNmE3ZDgyOGEtODYzMjU5N2MtMjE2ZTIzODU=. TraceId : 01jrfphmfre23pak8a8ef6b47e. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7491641385947620048:2489], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:14:45.735968Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjdmMWYyNGYtNmE3ZDgyOGEtODYzMjU5N2MtMjE2ZTIzODU=, ActorId: [2:7491641385947619970:2489], ActorState: ExecuteState, TraceId: 01jrfphmfre23pak8a8ef6b47e, Create QueryResponse for error on request, msg: >> TYardTest::TestLogWriteReadLarge [GOOD] >> TYardTest::TestLogWriteCutEqual >> ReadOnlyPDisk::ReadOnlyPDiskEvents [GOOD] >> ShredPDisk::EmptyShred ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9080, MsgBus: 61593 2025-04-10T11:14:30.451432Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641319358935253:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:30.459037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00115b/r3tmp/tmppHnLfb/pdisk_1.dat 2025-04-10T11:14:31.027338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:31.027470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:31.030710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:31.095196Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9080, node 1 2025-04-10T11:14:31.282374Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:31.282397Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:31.282412Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:31.282536Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61593 TClient is connected to server localhost:61593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:32.147887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.180573Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:32.197237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.369990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.660328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:32.761977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.965764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641336538806061:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.965852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.273990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.313012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.357373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.395302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.438674Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641319358935253:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:35.438732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:35.459572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.516165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.602074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641340833773870:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.602172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.602497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641340833773875:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.607175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:35.635454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641340833773877:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:35.687091Z node 1 :TX_PROXY ERROR: Actor# [1:7491641340833773934:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:37.166399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15227, MsgBus: 28330 2025-04-10T11:14:38.998210Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641355204084853:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:38.998342Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00115b/r3tmp/tmp5Uwgyd/pdisk_1.dat 2025-04-10T11:14:39.235041Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:39.253871Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:39.253960Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:39.256087Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15227, node 2 2025-04-10T11:14:39.448066Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:39.448099Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:39.448107Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:39.448230Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28330 TClient is connected to server localhost:28330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:40.093989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.116900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.194669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:40.350070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:14:40.435398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:43.431393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641376678923106:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.431473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.491573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.559450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.645073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.696379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.781147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.891008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.969224Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641355204084853:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:43.969290Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:43.982985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641376678923628:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.983095Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.983457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641376678923633:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:43.988393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:44.021266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641376678923635:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:44.100465Z node 2 :TX_PROXY ERROR: Actor# [2:7491641380973890989:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:45.484641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:46.374066Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641389563826103:2521], TxId: 281474976715675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWIyNTE0ZTQtYjkyNWMyMmEtNDBkYWQ2NWUtNjExZGZkNTc=. TraceId : 01jrfphmtafnph4r9c3c4y8p3m. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-10T11:14:46.374957Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641389563826105:2522], TxId: 281474976715675, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWIyNTE0ZTQtYjkyNWMyMmEtNDBkYWQ2NWUtNjExZGZkNTc=. TraceId : 01jrfphmtafnph4r9c3c4y8p3m. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7491641389563826100:2492], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:14:46.375462Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWIyNTE0ZTQtYjkyNWMyMmEtNDBkYWQ2NWUtNjExZGZkNTc=, ActorId: [2:7491641385268858559:2492], ActorState: ExecuteState, TraceId: 01jrfphmtafnph4r9c3c4y8p3m, Create QueryResponse for error on request, msg: >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] >> ShredPDisk::EmptyShred [GOOD] >> ShredPDisk::SimpleShred >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty >> DataShardTxOrder::ReadWriteReorder >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 >> DataShardTxOrder::ImmediateBetweenOnline >> DataShardOutOfOrder::UncommittedReadSetAck >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> DataShardOutOfOrder::TestSnapshotReadPriority >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate >> TxOrderInternals::OperationOrder [GOOD] >> DataShardTxOrder::DelayData >> DataShardTxOrder::ZigZag_oo >> DataShardTxOrder::ImmediateBetweenOnline_Init >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink >> DataShardTxOrder::ZigZag >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply >> DataShardOutOfOrder::TestPlannedTimeoutSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4621, MsgBus: 64068 2025-04-10T11:14:33.123088Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641330945613461:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:33.123124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001150/r3tmp/tmpsxz8Vg/pdisk_1.dat 2025-04-10T11:14:33.890524Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:33.915282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:33.915352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:33.930678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4621, node 1 2025-04-10T11:14:34.065883Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:34.065909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:34.065921Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:34.066046Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64068 TClient is connected to server localhost:64068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:34.763730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.807751Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:34.829985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:35.133818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:35.345802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.433744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:37.963991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641348125484440:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:37.964141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:38.126266Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641330945613461:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:38.147631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:38.493770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:38.588580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:38.652522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:38.739831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:38.802866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:38.905207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:39.006513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641352420452266:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:39.006623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:39.006898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641356715419567:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:39.015016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:39.035279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641356715419569:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:39.122124Z node 1 :TX_PROXY ERROR: Actor# [1:7491641356715419626:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:40.186882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62417, MsgBus: 64226 2025-04-10T11:14:41.669608Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641365136858882:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:41.670123Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001150/r3tmp/tmpV6kv3T/pdisk_1.dat 2025-04-10T11:14:41.852638Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:41.852715Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:41.856858Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:41.873528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62417, node 2 2025-04-10T11:14:42.062772Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:42.062801Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:42.062808Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:42.062952Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64226 TClient is connected to server localhost:64226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:42.823956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:42.853968Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:42.861836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:42.947511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:14:43.173807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:43.248617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:46.083332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641386611697118:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:46.083444Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:46.132404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:46.187056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:46.229833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:46.304410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:46.348544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:46.428342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:46.533175Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641386611697642:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:46.533282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:46.533657Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641386611697647:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:46.539345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:46.560493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641386611697649:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:46.623064Z node 2 :TX_PROXY ERROR: Actor# [2:7491641386611697703:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:46.669869Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641365136858882:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:46.669963Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:47.775858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 17928, MsgBus: 63603 2025-04-10T11:14:33.296767Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641333449300365:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:33.296831Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00114c/r3tmp/tmpL1YODl/pdisk_1.dat 2025-04-10T11:14:33.883307Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:33.901234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:33.901348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:33.905866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17928, node 1 2025-04-10T11:14:34.173868Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:34.173909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:34.173921Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:34.174039Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63603 TClient is connected to server localhost:63603 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:35.222088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:35.258887Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:35.275788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:35.536898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:35.846037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:35.945742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.246063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641354924138427:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:38.246292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:38.300622Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641333449300365:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:38.300682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:38.707028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:38.815962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:38.884043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:38.934246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:39.013201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:39.115861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:39.243085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641359219106250:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:39.243170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:39.243563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641359219106255:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:39.248252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:39.279572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641359219106257:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:39.350993Z node 1 :TX_PROXY ERROR: Actor# [1:7491641359219106316:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:40.496008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20017, MsgBus: 20412 2025-04-10T11:14:41.974138Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641366517164558:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:41.974341Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00114c/r3tmp/tmpGbupL4/pdisk_1.dat 2025-04-10T11:14:42.153109Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:42.153184Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:42.154499Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20017, node 2 2025-04-10T11:14:42.184564Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:42.260156Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:42.260171Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:42.260176Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:42.260267Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20412 TClient is connected to server localhost:20412 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:42.864494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:42.872174Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:14:42.888617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:43.065026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:43.260545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:43.343671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:46.147884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641387992002777:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:46.147987Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:46.227032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:46.301388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:46.336680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:46.368464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:46.404029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:46.452418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:46.520401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641387992003292:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:46.520516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:46.520820Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641387992003297:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:46.524721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:46.538118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641387992003299:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:46.605017Z node 2 :TX_PROXY ERROR: Actor# [2:7491641387992003352:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:46.973889Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641366517164558:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:46.973969Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:47.761569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TPDiskTest::TestChunkWriteRelease [GOOD] >> TPDiskTest::TestPDiskManyOwnersInitiation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 16635, MsgBus: 5772 2025-04-10T11:14:27.129641Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641308578420342:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:27.129678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011eb/r3tmp/tmpj6t3qK/pdisk_1.dat 2025-04-10T11:14:27.569828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:27.569926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:27.576411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:27.604830Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16635, node 1 2025-04-10T11:14:27.754695Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:27.754720Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:27.754726Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:27.754857Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5772 TClient is connected to server localhost:5772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:28.420761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.446703Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:28.455288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.709287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:28.993265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:29.197946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.695244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641325758291297:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:31.695411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.141377Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641308578420342:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:32.141447Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:32.189430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.234954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.285178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.323218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.369973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.452891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:32.550731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641330053259119:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.550880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.550993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641330053259124:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:32.559546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:32.587653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641330053259126:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:32.687816Z node 1 :TX_PROXY ERROR: Actor# [1:7491641330053259185:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:33.983633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1614, MsgBus: 22660 2025-04-10T11:14:35.980549Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641341056261512:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:35.980597Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011eb/r3tmp/tmp3QjXw9/pdisk_1.dat 2025-04-10T11:14:36.379670Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:36.431544Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:36.431639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:36.439699Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1614, node 2 2025-04-10T11:14:36.666669Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:36.666690Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:36.666699Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:36.666805Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22660 TClient is connected to server localhost:22660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:37.419348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:37.437475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:37.556301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:37.845215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:38.012867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:40.811069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641362531099648:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:40.811157Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:40.872091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:40.911413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:40.968046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:40.986245Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641341056261512:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:40.986312Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:41.024938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.112285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.171444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:41.246558Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641366826067461:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.246679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.247013Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641366826067467:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:41.251234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:41.275223Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641366826067469:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:41.326743Z node 2 :TX_PROXY ERROR: Actor# [2:7491641366826067522:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:42.382601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.489609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:14:42.569264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> TxOrderInternals::OperationOrder [GOOD] >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] >> TSectorMapPerformance::TestHDD1960GBRead100MBOnLastSector [GOOD] >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnFirstSector >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] >> ShredPDisk::SimpleShred [GOOD] >> ShredPDisk::KillVDiskWhilePreShredding >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnLastSector >> TPDiskTest::TestPDiskOwnerRecreationWithStableOwner [GOOD] >> TPDiskTest::TestVDiskMock >> TPDiskTest::TestPDiskManyOwnersInitiation [GOOD] >> TPDiskTest::TestLogWriteReadWithRestarts >> DataShardTxOrder::ZigZag_oo8_dirty >> DataShardOutOfOrder::TestReadTableWriteConflict >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 28672, MsgBus: 61810 2025-04-10T11:14:34.443488Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641334813770678:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:34.443530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001148/r3tmp/tmpcvdGpY/pdisk_1.dat 2025-04-10T11:14:35.120118Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:35.123095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:35.123225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:35.130308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28672, node 1 2025-04-10T11:14:35.370717Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:35.370758Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:35.370768Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:35.370880Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61810 TClient is connected to server localhost:61810 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:36.352878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:36.394400Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:36.414438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:36.693772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:36.982595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:37.074511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:39.436242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641356288608903:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:39.436380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:39.459973Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641334813770678:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:39.460041Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:39.813579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:39.860621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:39.911124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:39.971690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:40.017674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:40.095512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:40.211786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641360583576728:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:40.211877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:40.212103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641360583576733:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:40.220648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:40.232340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641360583576735:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:40.306359Z node 1 :TX_PROXY ERROR: Actor# [1:7491641360583576790:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:41.460764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17333, MsgBus: 28995 2025-04-10T11:14:42.951358Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641370472368667:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:42.951398Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001148/r3tmp/tmpt9HXsa/pdisk_1.dat 2025-04-10T11:14:43.113279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:43.113381Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:43.115915Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:43.131748Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17333, node 2 2025-04-10T11:14:43.230680Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:43.230703Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:43.230709Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:43.230814Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28995 TClient is connected to server localhost:28995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:44.065702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:44.073470Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:44.081013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:44.183618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:44.363486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:44.538670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:47.954272Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641370472368667:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:47.972057Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:48.083130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641396242174237:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:48.083224Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:48.142846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:48.214024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:48.263013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:48.309005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:48.353391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:48.428411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:48.494662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641396242174756:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:48.494777Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:48.494989Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641396242174761:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:48.500277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:48.515705Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641396242174763:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:48.609369Z node 2 :TX_PROXY ERROR: Actor# [2:7491641396242174819:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:49.851066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:50.673419Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because it cannot acquire locks;tx_id=6; 2025-04-10T11:14:50.673649Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037919 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-04-10T11:14:50.673858Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037919 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-04-10T11:14:50.674060Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641404832110103:2501], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [2:7491641400537142436:2501]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[2:7491641404832110103:2501].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2025-04-10T11:14:50.674645Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491641404832110047:2501], SessionActorId: [2:7491641400537142436:2501], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[2:7491641400537142436:2501]. isRollback=0 2025-04-10T11:14:50.674958Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjIyMmIwMTItNTgxZTBhYmYtMzdiMGQ0ZmMtNTU1NTliMGM=, ActorId: [2:7491641400537142436:2501], ActorState: ExecuteState, TraceId: 01jrfphsh3bzwatz7y8jedgv4g, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7491641404832110097:2501] from: [2:7491641404832110047:2501] 2025-04-10T11:14:50.675055Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491641404832110097:2501] TxId: 281474976710676. Ctx: { TraceId: 01jrfphsh3bzwatz7y8jedgv4g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjIyMmIwMTItNTgxZTBhYmYtMzdiMGQ0ZmMtNTU1NTliMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } 2025-04-10T11:14:50.675314Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjIyMmIwMTItNTgxZTBhYmYtMzdiMGQ0ZmMtNTU1NTliMGM=, ActorId: [2:7491641400537142436:2501], ActorState: ExecuteState, TraceId: 01jrfphsh3bzwatz7y8jedgv4g, Create QueryResponse for error on request, msg: >> TPDiskTest::TestVDiskMock [GOOD] >> TPDiskTest::TestRealFile >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier >> ShredPDisk::KillVDiskWhilePreShredding [GOOD] >> ShredPDisk::KillVDiskWhileShredding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 7083, MsgBus: 19657 2025-04-10T11:14:29.587966Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641314719897002:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:29.588352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00118e/r3tmp/tmpakHfsE/pdisk_1.dat 2025-04-10T11:14:30.243554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:30.243650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:30.245625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:30.286047Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7083, node 1 2025-04-10T11:14:30.478737Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:30.478761Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:30.478770Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:30.478900Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19657 TClient is connected to server localhost:19657 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:31.402178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.427006Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:14:31.437504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:14:31.648523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:14:31.898723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:31.992805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:34.363937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641336194735130:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.364039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:34.582248Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641314719897002:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:34.582325Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:34.734493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.824210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.858771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:34.939406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.012596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.081989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:35.207329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641340489702952:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.207427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.207795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641340489702957:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:35.216041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:35.252384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641340489702959:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:14:35.347085Z node 1 :TX_PROXY ERROR: Actor# [1:7491641340489703020:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:36.556110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:14:36.633379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:14:36.689870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25086, MsgBus: 23316 2025-04-10T11:14:43.213286Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641375037420861:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:43.213341Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00118e/r3tmp/tmpZx6q1F/pdisk_1.dat 2025-04-10T11:14:43.622957Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:43.698486Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:43.698584Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:43.700980Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25086, node 2 2025-04-10T11:14:43.906086Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:14:43.906110Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:14:43.906118Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:14:43.906296Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23316 TClient is connected to server localhost:23316 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:14:44.743578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:44.769701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:44.871308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:45.091539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:45.184754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:14:47.882581Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641392217291819:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:47.882691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:47.949691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:14:47.994883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:14:48.072331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:14:48.157852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:14:48.208131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:14:48.213804Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641375037420861:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:14:48.213890Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:14:48.289683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:14:48.407042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641396512259649:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:48.407146Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:48.409289Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641396512259654:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:48.415089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:14:48.437994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641396512259656:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:14:48.498455Z node 2 :TX_PROXY ERROR: Actor# [2:7491641396512259711:3459] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:49.602892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] >> DataShardTxOrder::ReadWriteReorder [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite >> TBlockDeviceTest::WriteReadRestart [GOOD] >> TColorLimitsTest::Colors [GOOD] >> TColorLimitsTest::OwnerFreeSpaceShare [GOOD] >> TLogCache::Simple [GOOD] >> TLogCache::EraseRangeOnEmpty [GOOD] >> TLogCache::EraseRangeOutsideOfData [GOOD] >> TLogCache::EraseRangeSingleMinElement [GOOD] >> TLogCache::EraseRangeSingleMidElement [GOOD] >> TLogCache::EraseRangeSingleMaxElement [GOOD] >> TLogCache::EraseRangeSample [GOOD] >> TLogCache::EraseRangeAllExact [GOOD] >> TLogCache::EraseRangeAllAmple [GOOD] >> TPDiskRaces::Decommit >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnLastSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBRead100MBOnFirstSector ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ReadWriteReorder [GOOD] Test command err: 2025-04-10T11:14:51.222002Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:51.240962Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:51.246358Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-04-10T11:14:51.250370Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:51.291522Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:51.429853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:14:51.429928Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:51.444040Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:51.444586Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:51.446170Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:14:51.446248Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:14:51.446313Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:14:51.446712Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:51.446980Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:51.447060Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:192:2148] in generation 2 2025-04-10T11:14:51.540518Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:51.592522Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:14:51.593063Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:51.593511Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:217:2215] 2025-04-10T11:14:51.593645Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:14:51.593792Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:14:51.593948Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.594816Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.595804Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.596485Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:14:51.596694Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:14:51.596859Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.596964Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:51.597049Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:14:51.597178Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:51.597268Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:51.597374Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:14:51.597563Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:51.597740Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.597802Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.597897Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2025-04-10T11:14:51.600564Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\n\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:14:51.600613Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:51.600758Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:14:51.600983Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:14:51.601064Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:14:51.601131Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:14:51.601212Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.601303Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:14:51.601403Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:14:51.601502Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.601914Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:51.601962Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:14:51.602031Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:14:51.602100Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.602204Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:14:51.602296Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:14:51.602371Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:14:51.602425Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.602475Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:51.620222Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:14:51.620322Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.620377Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.620436Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:51.620524Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:51.621189Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:223:2221], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.621267Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.621344Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:222:2220], serverId# [1:223:2221], sessionId# [0:0:0] 2025-04-10T11:14:51.621535Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:51.621578Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:51.621726Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.621842Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.621887Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:51.621960Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:51.629495Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:51.629597Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.629899Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.629954Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.630030Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.630103Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:51.630194Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:51.630244Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:14:51.630282Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:14:51.630332Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.630376Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:51.630427Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:51.630486Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:51.630704Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:14:51.630747Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.630774Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:51.630800Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:14:51.630825Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:14:51.630900Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.630932Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:14:51.630981Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:51.631030Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:51.631098Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:14:51.631136Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:14:51.631173Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:14:51.631247Z node 1 :TX_DATASH ... CE: Advance execution plan for [1000005:12] at 9437185 executing on unit ExecuteDataTx 2025-04-10T11:14:52.673083Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompleteOperation 2025-04-10T11:14:52.673127Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompleteOperation 2025-04-10T11:14:52.673312Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is DelayComplete 2025-04-10T11:14:52.673361Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompleteOperation 2025-04-10T11:14:52.673397Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompletedOperations 2025-04-10T11:14:52.673434Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompletedOperations 2025-04-10T11:14:52.673474Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-10T11:14:52.673509Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompletedOperations 2025-04-10T11:14:52.673538Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437185 has finished 2025-04-10T11:14:52.673578Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:52.673610Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-10T11:14:52.673664Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-10T11:14:52.673704Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-10T11:14:52.673866Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:237:2230]: {TEvPlanStep step# 1000005 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:52.673912Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:52.674016Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit WaitForPlan 2025-04-10T11:14:52.674049Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-10T11:14:52.674076Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:52.674105Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:52.674262Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 12 at step 1000005 at tablet 9437184 { Transactions { TxId: 12 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000005 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:52.674294Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:52.674490Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:237:2230], Recipient [1:237:2230]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:52.674545Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:52.674591Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:52.674626Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:52.674667Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:52.674719Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000005:12] in PlanQueue unit at 9437184 2025-04-10T11:14:52.674751Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit PlanQueue 2025-04-10T11:14:52.674780Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-10T11:14:52.674809Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:52.674836Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:52.674883Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:52.675605Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:12 keys extracted: 3 2025-04-10T11:14:52.675656Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-10T11:14:52.675683Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:52.675709Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit FinalizeDataTxPlan 2025-04-10T11:14:52.675752Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit FinalizeDataTxPlan 2025-04-10T11:14:52.675790Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-10T11:14:52.675813Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit FinalizeDataTxPlan 2025-04-10T11:14:52.675836Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:52.675858Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:52.675920Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically complete end at 9437184 2025-04-10T11:14:52.675955Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically incomplete end at 9437184 2025-04-10T11:14:52.676013Z node 1 :TX_DATASHARD TRACE: Activated operation [1000005:12] at 9437184 2025-04-10T11:14:52.676058Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-10T11:14:52.676087Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-10T11:14:52.676111Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit BuildDataTxOutRS 2025-04-10T11:14:52.676133Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit BuildDataTxOutRS 2025-04-10T11:14:52.676184Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-10T11:14:52.676204Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildDataTxOutRS 2025-04-10T11:14:52.676238Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit StoreAndSendOutRS 2025-04-10T11:14:52.676255Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit StoreAndSendOutRS 2025-04-10T11:14:52.676274Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-10T11:14:52.676289Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit StoreAndSendOutRS 2025-04-10T11:14:52.676302Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit PrepareDataTxInRS 2025-04-10T11:14:52.676316Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit PrepareDataTxInRS 2025-04-10T11:14:52.676334Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-10T11:14:52.676351Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit PrepareDataTxInRS 2025-04-10T11:14:52.676364Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit LoadAndWaitInRS 2025-04-10T11:14:52.676378Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit LoadAndWaitInRS 2025-04-10T11:14:52.676400Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-10T11:14:52.676421Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadAndWaitInRS 2025-04-10T11:14:52.676446Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit ExecuteDataTx 2025-04-10T11:14:52.676463Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit ExecuteDataTx 2025-04-10T11:14:52.676749Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:12] at tablet 9437184 with status COMPLETE 2025-04-10T11:14:52.676784Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:12] at 9437184: {NSelectRow: 3, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 3, SelectRowBytes: 24, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T11:14:52.676814Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-10T11:14:52.676829Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit ExecuteDataTx 2025-04-10T11:14:52.676844Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompleteOperation 2025-04-10T11:14:52.676858Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompleteOperation 2025-04-10T11:14:52.676960Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is DelayComplete 2025-04-10T11:14:52.676977Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompleteOperation 2025-04-10T11:14:52.676994Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompletedOperations 2025-04-10T11:14:52.677013Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompletedOperations 2025-04-10T11:14:52.677032Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-10T11:14:52.677099Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompletedOperations 2025-04-10T11:14:52.677119Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437184 has finished 2025-04-10T11:14:52.677147Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:52.677168Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:52.677186Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:52.677204Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:52.694131Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000005 txid# 12} 2025-04-10T11:14:52.694243Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000005} 2025-04-10T11:14:52.694306Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-10T11:14:52.694383Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437185 on unit CompleteOperation 2025-04-10T11:14:52.694441Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437185 at tablet 9437185 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:14:52.694500Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-10T11:14:52.694885Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 12} 2025-04-10T11:14:52.694933Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2025-04-10T11:14:52.694971Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:52.694998Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437184 on unit CompleteOperation 2025-04-10T11:14:52.695037Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:14:52.695068Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] Test command err: 2025-04-10T11:14:51.351113Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:51.351832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:14:51.351897Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:51.366743Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:51.367224Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:14:51.367558Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:51.422719Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:51.448201Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:51.448639Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:51.450240Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:14:51.450359Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:14:51.450427Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:14:51.450734Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:51.450866Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:51.450910Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:198:2154] in generation 2 2025-04-10T11:14:51.543285Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:51.604203Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:14:51.604816Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:51.605237Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-10T11:14:51.605414Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:14:51.605584Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:14:51.605735Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.606581Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.606802Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.607443Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:14:51.607723Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:14:51.607948Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.608073Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:51.608190Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:14:51.608365Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:51.608517Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:51.608625Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:14:51.608746Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:51.608996Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.609078Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.609285Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-10T11:14:51.613816Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:14:51.613972Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:51.614187Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:14:51.614580Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:14:51.614798Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:14:51.614917Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:14:51.615071Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.615206Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:14:51.615295Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:14:51.615411Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.616030Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:51.616099Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:14:51.616210Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:14:51.616313Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.616396Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:14:51.616457Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:14:51.616512Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:14:51.616586Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.616643Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:51.630546Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:14:51.630666Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.630715Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.630787Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:51.630880Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:51.631758Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.631891Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.631960Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-10T11:14:51.632136Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:51.632174Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:51.632366Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.632444Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.632490Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:51.632558Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:51.646719Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:51.646842Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.647159Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.647208Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.647320Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.647396Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:51.647460Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:51.647536Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:14:51.647604Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:14:51.647686Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.647729Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:51.647802Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:51.647857Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:51.648091Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:14:51.648131Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.648156Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:51.648187Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:14:51.648210Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:14:51.648298Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.648326Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:14:51.648372Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:51.648421Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:51.648507Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:14:51.648575Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:14:51.648618Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:14:51.648685Z node 1 :TX_DATA ... 2025-04-10T11:14:52.807065Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:52.807120Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:4] at 9437184 on unit CompleteOperation 2025-04-10T11:14:52.807186Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 4] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-10T11:14:52.807259Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-10T11:14:52.807316Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:52.807471Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-10T11:14:52.807513Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:52.807539Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-10T11:14:52.807556Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-10T11:14:52.807576Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:52.807601Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:6] at 9437184 on unit CompleteOperation 2025-04-10T11:14:52.807659Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 6] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-10T11:14:52.807707Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-10T11:14:52.807733Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:52.807879Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-10T11:14:52.807908Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-10T11:14:52.807946Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-10T11:14:52.807967Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:52.808006Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:7] at 9437184 on unit CompleteOperation 2025-04-10T11:14:52.808043Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 7] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-10T11:14:52.808085Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-10T11:14:52.808109Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:52.808253Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:52.808284Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:8] at 9437184 on unit CompleteOperation 2025-04-10T11:14:52.808322Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 8] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-10T11:14:52.808384Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-10T11:14:52.808408Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:52.808517Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:52.808540Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:9] at 9437184 on unit CompleteOperation 2025-04-10T11:14:52.808573Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 9] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-10T11:14:52.808625Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-10T11:14:52.808659Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:52.808775Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:52.808819Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:14] at 9437184 on unit FinishPropose 2025-04-10T11:14:52.808883Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 14 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-10T11:14:52.808959Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:52.809100Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:52.809127Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:10] at 9437184 on unit CompleteOperation 2025-04-10T11:14:52.809159Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 10] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-10T11:14:52.809198Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-10T11:14:52.809226Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:52.809335Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:52.809357Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:12] at 9437184 on unit CompleteOperation 2025-04-10T11:14:52.809406Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 12] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-10T11:14:52.809458Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-10T11:14:52.809480Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:52.809610Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:52.809635Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:13] at 9437184 on unit CompleteOperation 2025-04-10T11:14:52.809679Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 13] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:14:52.809712Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:52.809794Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-10T11:14:52.809816Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:52.809836Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:5] at 9437184 on unit CompleteOperation 2025-04-10T11:14:52.809864Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 5] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-10T11:14:52.809897Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-10T11:14:52.809918Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:52.810168Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:342:2310]: {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-10T11:14:52.810219Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:52.810264Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 4 2025-04-10T11:14:52.810339Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:342:2310]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-10T11:14:52.810370Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:52.810401Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-10T11:14:52.810461Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:342:2310]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-10T11:14:52.810482Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:52.810501Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-10T11:14:52.810564Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:342:2310]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-10T11:14:52.810594Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:52.810615Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-10T11:14:52.810687Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:342:2310]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-10T11:14:52.810743Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:52.810768Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-10T11:14:52.810841Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:342:2310]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-10T11:14:52.810871Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:52.810907Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-10T11:14:52.810978Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:342:2310]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-10T11:14:52.811000Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:52.811021Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-10T11:14:52.811068Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:342:2310]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-10T11:14:52.811088Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:52.811109Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 expect 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - interm - 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - >> TSectorMapPerformance::TestSSD1960GBRead100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBWrite100MBOnFirstSector |87.5%| [TA] $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |87.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} >> TSectorMapPerformance::TestSSD1960GBWrite100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBRead1000MBOnFirstSector >> ShredPDisk::KillVDiskWhileShredding [GOOD] >> ShredPDisk::InitVDiskAfterShredding >> TYardTest::TestChunkWriteReadMultiple [GOOD] >> TYardTest::TestChunkWriteReadMultipleWithHddSectorMap >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 >> DataShardTxOrder::RandomPoints_DelayRS_Reboot >> DataShardOutOfOrder::UncommittedReads >> TPDiskTest::TestLogWriteReadWithRestarts [GOOD] >> TPDiskTest::TestLogSpliceNonceJump >> DataShardTxOrder::RandomPoints_DelayRS >> TPDiskTest::TestRealFile [GOOD] >> TPDiskTest::TestSIGSEGVInTUndelivered >> ShredPDisk::InitVDiskAfterShredding [GOOD] >> ShredPDisk::ReinitVDiskWhilePreShredding >> TPDiskTest::TestSIGSEGVInTUndelivered [GOOD] >> TPDiskTest::WrongPDiskKey >> TYardTest::TestChunkReadRandomOffset [GOOD] >> TYardTest::TestChunkWriteRead >> TPDiskTest::WrongPDiskKey [GOOD] >> TPDiskUtil::AtomicBlockCounterFunctional [GOOD] >> TPDiskUtil::AtomicBlockCounterSeqno [GOOD] >> TPDiskUtil::Light >> TPDiskUtil::Light [GOOD] >> TPDiskUtil::LightOverflow [GOOD] >> TPDiskUtil::DriveEstimator >> TPDiskTest::TestLogSpliceNonceJump [GOOD] >> TPDiskTest::TestMultipleLogSpliceNonceJump >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate >> ShredPDisk::ReinitVDiskWhilePreShredding [GOOD] >> ShredPDisk::ReinitVDiskWhileShredding >> TYardTest::TestChunkWriteRead [GOOD] >> TYardTest::TestChunkWrite20Read02 >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink >> TYardTest::TestChunkWrite20Read02 [GOOD] >> TYardTest::TestChunkContinuity2 >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] >> TYardTest::TestChunkContinuity2 [GOOD] >> TYardTest::TestChunkContinuity3000 >> DataShardOutOfOrder::TestPlannedTimeoutSplit [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead >> TSectorMapPerformance::TestSSD1960GBRead1000MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBWrite1000MBOnFirstSector >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> TYardTest::TestChunkContinuity3000 [GOOD] >> TYardTest::TestChunkContinuity9000 >> ShredPDisk::ReinitVDiskWhileShredding [GOOD] >> ShredPDisk::RetryPreShredCompactError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] Test command err: 2025-04-10T11:14:51.350946Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:51.351827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:14:51.351871Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:51.371600Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:51.372113Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:14:51.372387Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:51.421503Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:51.437168Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:51.445259Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:51.446926Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:14:51.446997Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:14:51.447064Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:14:51.447441Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:51.447651Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:51.447722Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:198:2154] in generation 2 2025-04-10T11:14:51.546836Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:51.583084Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:14:51.585015Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:51.585246Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-10T11:14:51.585309Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:14:51.585386Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:14:51.585452Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.585868Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.586480Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.587460Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:14:51.587594Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:14:51.587706Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.587748Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:51.587810Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:14:51.587849Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:51.587901Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:51.587946Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:14:51.587987Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:51.588087Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.588136Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.588192Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-10T11:14:51.594305Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:14:51.594395Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:51.594523Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:14:51.594701Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:14:51.594758Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:14:51.594840Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:14:51.594918Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.594972Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:14:51.595018Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:14:51.595075Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.595387Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:51.595429Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:14:51.595469Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:14:51.595505Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.595551Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:14:51.595583Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:14:51.595617Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:14:51.595670Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.595707Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:51.615051Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:14:51.615143Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.615192Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.615240Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:51.615365Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:51.615975Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.616063Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.616116Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-10T11:14:51.616247Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:51.616281Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:51.616462Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.616519Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.616560Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:51.616624Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:51.622704Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:51.622802Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.623075Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.623119Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.623193Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.623239Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:51.623274Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:51.623320Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:14:51.623358Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:14:51.623404Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.623442Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:51.623484Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:51.623537Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:51.623718Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:14:51.623750Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.623770Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:51.623793Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:14:51.623814Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:14:51.623895Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.623924Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:14:51.623959Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:51.623996Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:51.624056Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:14:51.624088Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:14:51.624127Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:14:51.624184Z node 1 :TX_DATA ... 6 Flags# 0 Seqno# 44} 2025-04-10T11:14:57.428107Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:57.428201Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:57.428226Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2025-04-10T11:14:57.428259Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-04-10T11:14:57.428298Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-10T11:14:57.428323Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:57.428416Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:57.428442Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-04-10T11:14:57.428499Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:14:57.428562Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-10T11:14:57.428595Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:57.428695Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:57.428721Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-04-10T11:14:57.428755Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:14:57.428781Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:57.428864Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:57.428891Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-04-10T11:14:57.428922Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:14:57.428956Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:57.429081Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:57.429113Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-04-10T11:14:57.429146Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:14:57.429176Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:57.429256Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:57.429281Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-10T11:14:57.429344Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:14:57.429396Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:57.429690Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-10T11:14:57.429745Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.429789Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-10T11:14:57.429891Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:341:2309]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-10T11:14:57.429921Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.429946Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-10T11:14:57.430032Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:341:2309]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-10T11:14:57.430060Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.430088Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-10T11:14:57.430805Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-10T11:14:57.430870Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.430904Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-10T11:14:57.431006Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:341:2309]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-10T11:14:57.431032Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.431061Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-10T11:14:57.431177Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-10T11:14:57.431208Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.431236Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-10T11:14:57.431330Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-10T11:14:57.431356Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.431381Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-10T11:14:57.431479Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-10T11:14:57.431514Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.431537Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-10T11:14:57.431608Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-10T11:14:57.431635Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.431659Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-10T11:14:57.431789Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-10T11:14:57.431818Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.431842Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-04-10T11:14:57.431912Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-10T11:14:57.431940Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.431965Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-10T11:14:57.432025Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-10T11:14:57.432049Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.432079Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-10T11:14:57.432172Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-10T11:14:57.432195Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.432218Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-10T11:14:57.432317Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-10T11:14:57.432359Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.432382Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-10T11:14:57.432453Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-10T11:14:57.432478Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.432501Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-10T11:14:57.432581Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-10T11:14:57.432611Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.432648Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] Test command err: 2025-04-10T11:14:51.222072Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:51.240826Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:51.246328Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-04-10T11:14:51.250370Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:51.291654Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:51.434996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:14:51.435046Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:51.449742Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:51.450828Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:51.453279Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:14:51.453363Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:14:51.453416Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:14:51.453781Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:51.454017Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:51.454081Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:192:2148] in generation 2 2025-04-10T11:14:51.536662Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:51.611555Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:14:51.611735Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:51.611832Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:217:2215] 2025-04-10T11:14:51.611867Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:14:51.611901Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:14:51.611942Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.612175Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.612241Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.612532Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:14:51.612656Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:14:51.612720Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.612771Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:51.612819Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:14:51.612856Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:51.612903Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:51.612944Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:14:51.612996Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:51.613094Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.613134Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.613188Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2025-04-10T11:14:51.616347Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:14:51.616412Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:51.616519Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:14:51.616694Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:14:51.616745Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:14:51.616804Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:14:51.616875Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.616917Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:14:51.616950Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:14:51.617003Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.617295Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:51.617372Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:14:51.617427Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:14:51.617463Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.617522Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:14:51.617560Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:14:51.617595Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:14:51.617628Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.617656Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:51.630677Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:14:51.630769Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.630814Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.630869Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:51.630950Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:51.631627Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:223:2221], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.631698Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.631767Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:222:2220], serverId# [1:223:2221], sessionId# [0:0:0] 2025-04-10T11:14:51.631942Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:51.631995Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:51.632134Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.632178Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.632225Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:51.632337Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:51.636668Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:51.636778Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.637126Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.637182Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.637254Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.637304Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:51.637373Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:51.637419Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:14:51.637474Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:14:51.637542Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.637590Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:51.637634Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:51.637692Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:51.637906Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:14:51.637945Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.637973Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:51.638000Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:14:51.638027Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:14:51.638122Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.638338Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:14:51.638388Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:51.638431Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:51.638502Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:14:51.638544Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:14:51.638582Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:14:51.638648Z node 1 :TX_DATA ... lt to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:14:57.573131Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:57.573215Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:57.573243Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-04-10T11:14:57.573296Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:14:57.573350Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:57.573451Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:57.573478Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-04-10T11:14:57.573519Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:14:57.573562Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:57.573654Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:57.573702Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-04-10T11:14:57.573745Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:14:57.573773Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:57.573862Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:57.573888Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-04-10T11:14:57.573938Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:14:57.573968Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:57.574080Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:57.574121Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-10T11:14:57.574526Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 104 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 34} 2025-04-10T11:14:57.574576Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.574622Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 104 2025-04-10T11:14:57.574793Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 110 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 36} 2025-04-10T11:14:57.574827Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.574856Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 110 2025-04-10T11:14:57.574938Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 113 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 37} 2025-04-10T11:14:57.574987Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.575016Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 113 2025-04-10T11:14:57.575129Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 107 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 35} 2025-04-10T11:14:57.575160Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.575188Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 107 2025-04-10T11:14:57.575295Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-10T11:14:57.575325Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.575354Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-10T11:14:57.575420Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-10T11:14:57.575449Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.575476Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-10T11:14:57.575563Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-10T11:14:57.575609Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.575641Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-10T11:14:57.575783Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-10T11:14:57.575817Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.575844Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-10T11:14:57.575904Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-10T11:14:57.575939Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.575965Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-10T11:14:57.576084Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-10T11:14:57.576119Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.576149Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-10T11:14:57.576257Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-10T11:14:57.576314Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.576340Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-10T11:14:57.576452Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-10T11:14:57.576482Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.576513Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-10T11:14:57.576578Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-10T11:14:57.576611Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.576642Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-10T11:14:57.576724Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-10T11:14:57.576750Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.576774Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-10T11:14:57.576875Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-10T11:14:57.576918Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.576952Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-10T11:14:57.577095Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-10T11:14:57.577125Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.577169Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-10T11:14:57.598029Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:57.598093Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-10T11:14:57.598181Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-04-10T11:14:57.598257Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-10T11:14:57.598333Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:57.598660Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2230]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-10T11:14:57.598715Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.598752Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> TPDiskTest::TestMultipleLogSpliceNonceJump [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyLogWrite >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] Test command err: 2025-04-10T11:14:51.355222Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:51.356169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:14:51.356212Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:51.371700Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:51.372146Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:14:51.372388Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:51.421567Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:51.436881Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:51.441919Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:51.443520Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:14:51.443598Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:14:51.443645Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:14:51.444098Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:51.444310Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:51.444381Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:198:2154] in generation 2 2025-04-10T11:14:51.546833Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:51.605358Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:14:51.605601Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:51.605760Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-10T11:14:51.605819Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:14:51.605905Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:14:51.605966Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.606794Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.606899Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.607259Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:14:51.607396Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:14:51.607510Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.607569Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:51.607631Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:14:51.607678Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:51.607745Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:51.607812Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:14:51.607864Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:51.607997Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.608039Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.608108Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-10T11:14:51.611059Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:14:51.611127Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:51.611256Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:14:51.611447Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:14:51.611514Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:14:51.611598Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:14:51.611677Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.611733Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:14:51.611779Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:14:51.611837Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.612195Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:51.612245Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:14:51.612286Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:14:51.612327Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.612381Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:14:51.612425Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:14:51.612474Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:14:51.612517Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.612547Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:51.625640Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:14:51.625722Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.625768Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.625817Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:51.625897Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:51.626488Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.626560Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.626608Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-10T11:14:51.626749Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:51.626796Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:51.626936Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.626979Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.627026Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:51.627077Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:51.631215Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:51.631304Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.631538Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.631580Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.631639Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.631689Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:51.631725Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:51.631764Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:14:51.631798Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:14:51.631844Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.631878Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:51.631931Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:51.631983Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:51.632132Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:14:51.632166Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.632188Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:51.632208Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:14:51.632228Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:14:51.632387Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.632423Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:14:51.632454Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:51.632488Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:51.632543Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:14:51.632581Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:14:51.632614Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:14:51.632671Z node 1 :TX_DATA ... dy candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2025-04-10T11:14:57.520914Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2025-04-10T11:14:57.520952Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-10T11:14:57.520998Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2025-04-10T11:14:57.521044Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2025-04-10T11:14:57.521078Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2025-04-10T11:14:57.521681Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2025-04-10T11:14:57.521743Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T11:14:57.521793Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2025-04-10T11:14:57.521825Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2025-04-10T11:14:57.521857Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2025-04-10T11:14:57.521891Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2025-04-10T11:14:57.522374Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2025-04-10T11:14:57.522431Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2025-04-10T11:14:57.522473Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2025-04-10T11:14:57.522503Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2025-04-10T11:14:57.522551Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-10T11:14:57.522586Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2025-04-10T11:14:57.522616Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2025-04-10T11:14:57.522650Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:57.522696Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-10T11:14:57.522728Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-10T11:14:57.522777Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-10T11:14:57.523103Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 110 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 36} 2025-04-10T11:14:57.523167Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.523208Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 110 2025-04-10T11:14:57.523301Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:342:2310]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-10T11:14:57.523330Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.523375Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-10T11:14:57.523528Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 113 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 37} 2025-04-10T11:14:57.523562Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.523588Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 113 2025-04-10T11:14:57.523673Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-10T11:14:57.523708Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.523735Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-10T11:14:57.523842Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-10T11:14:57.523867Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.523897Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-10T11:14:57.523972Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-10T11:14:57.524002Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.524043Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-10T11:14:57.524151Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-10T11:14:57.524177Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.524200Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-10T11:14:57.524328Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-10T11:14:57.524385Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.524415Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-10T11:14:57.524539Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-10T11:14:57.524570Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.524604Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-10T11:14:57.524697Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-10T11:14:57.524725Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.524749Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-10T11:14:57.524818Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-10T11:14:57.524843Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.524866Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-10T11:14:57.525000Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-10T11:14:57.525029Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.525056Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-10T11:14:57.525114Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-10T11:14:57.525147Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.525173Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-10T11:14:57.525272Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-10T11:14:57.525302Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.525348Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-10T11:14:57.525442Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-10T11:14:57.525470Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.525495Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-10T11:14:57.542843Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:57.542912Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-10T11:14:57.542984Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-10T11:14:57.543105Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-10T11:14:57.543153Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:57.543494Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-10T11:14:57.543554Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.543597Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> TYardTest::TestChunkContinuity9000 [GOOD] >> TYardTest::TestChunkLock >> TYardTest::TestChunkLock [GOOD] >> TYardTest::TestChunkUnlock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: 2025-04-10T11:14:54.050484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:14:54.050780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:54.051014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b6e/r3tmp/tmpDcheUL/pdisk_1.dat 2025-04-10T11:14:54.520564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:14:54.577671Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:54.631455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:54.631574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:54.643027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:54.728741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:54.773231Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:54.774861Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:54.775425Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:14:54.775729Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:54.824562Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:54.825390Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:54.825542Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:54.827383Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:14:54.827493Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:14:54.827561Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:14:54.827961Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:54.828092Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:54.828174Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:14:54.842278Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:54.875263Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:14:54.875466Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:54.875607Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:14:54.875645Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:14:54.875682Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:14:54.875719Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:54.875960Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:54.876018Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:54.876410Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:14:54.876524Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:14:54.876605Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:14:54.876655Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:54.876710Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:14:54.876746Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:14:54.876778Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:14:54.876810Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:14:54.876855Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:14:54.877020Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:54.877065Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:54.877109Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:14:54.877519Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:14:54.877629Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:54.877746Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:14:54.877983Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:14:54.879590Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:14:54.879765Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:14:54.879833Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:14:54.879874Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:14:54.879908Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:14:54.879967Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:54.880333Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:54.880375Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:14:54.880423Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:14:54.880461Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:54.880523Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:14:54.880561Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:14:54.880602Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:14:54.880652Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:14:54.880677Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:54.882169Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:14:54.882221Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:14:54.894486Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:14:54.894546Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:54.894584Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:54.894629Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:14:54.894687Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:55.048180Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:55.048228Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:55.048265Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:14:55.048684Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:14:55.048724Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:55.048834Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:14:55.048874Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:14:55.048910Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:14:55.048942Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:14:55.060025Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:14:55.060101Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:55.061503Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:55.061552Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:55.061616Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:14:5 ... de 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:57.428587Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:666:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 1 Status: STATUS_NOT_FOUND 2025-04-10T11:14:57.428725Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:755:2634]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 1 Status: STATUS_NOT_FOUND 2025-04-10T11:14:57.442908Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-10T11:14:57.443049Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:666:2570], Recipient [1:755:2634]: {TEvReadSet step# 3001 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-10T11:14:57.443087Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.443136Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715664 2025-04-10T11:14:57.443220Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-10T11:14:57.443273Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:755:2634], Recipient [1:666:2570]: {TEvReadSet step# 3001 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-10T11:14:57.443290Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:57.443305Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 ... performing the first select 2025-04-10T11:14:58.166178Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfpj0j5abh6nbbk00bsw9z1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJhMjgwODMtYzZmZjg0ZmEtNTQzODI3ZGUtZjY2MzU5YWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:14:58.170557Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1108:2878], Recipient [1:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-04-10T11:14:58.170790Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:14:58.170874Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-04-10T11:14:58.170992Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-10T11:14:58.171033Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:14:58.171074Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:58.171124Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:14:58.171177Z node 1 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-04-10T11:14:58.171218Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-10T11:14:58.171265Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:14:58.171301Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:14:58.171332Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:14:58.171470Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-10T11:14:58.171726Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:14:58.171787Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-04-10T11:14:58.171842Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1108:2878], 0} after executionsCount# 1 2025-04-10T11:14:58.171918Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1108:2878], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:14:58.172015Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1108:2878], 0} finished in read 2025-04-10T11:14:58.172099Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-10T11:14:58.172133Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:14:58.172166Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:14:58.172200Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:14:58.172240Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-10T11:14:58.172263Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:14:58.172297Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-10T11:14:58.172335Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:14:58.172425Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:14:58.172602Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1110:2879], Recipient [1:755:2634]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-04-10T11:14:58.172997Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:666:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-04-10T11:14:58.173057Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-10T11:14:58.173120Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2025-04-10T11:14:58.173202Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-10T11:14:58.173228Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2025-04-10T11:14:58.173252Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:58.173276Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-10T11:14:58.173314Z node 1 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037889 2025-04-10T11:14:58.173358Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-10T11:14:58.173386Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-10T11:14:58.173407Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2025-04-10T11:14:58.173427Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2025-04-10T11:14:58.173542Z node 1 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-10T11:14:58.173743Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-10T11:14:58.173777Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-04-10T11:14:58.173820Z node 1 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[1:1110:2879], 0} after executionsCount# 1 2025-04-10T11:14:58.173868Z node 1 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[1:1110:2879], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:14:58.173935Z node 1 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[1:1110:2879], 0} finished in read 2025-04-10T11:14:58.174001Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-10T11:14:58.174030Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2025-04-10T11:14:58.174057Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2025-04-10T11:14:58.174081Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2025-04-10T11:14:58.174116Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-10T11:14:58.174137Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2025-04-10T11:14:58.174944Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037889 has finished 2025-04-10T11:14:58.174990Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-10T11:14:58.175069Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-10T11:14:58.175231Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:755:2634]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-04-10T11:14:58.176496Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1108:2878], Recipient [1:666:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:14:58.176583Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-10T11:14:58.178041Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1110:2879], Recipient [1:755:2634]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:14:58.178089Z node 1 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> DataShardTxOrder::ZigZag [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite >> TYardTest::TestChunkUnlock [GOOD] >> TYardTest::TestChunkUnlockHarakiri |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] >> DataShardOutOfOrder::TestReadTableWriteConflict [GOOD] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2025-04-10T11:14:51.593455Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:51.594429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:14:51.594476Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:51.605432Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:51.605939Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:14:51.606287Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:51.652173Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:51.659641Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:51.660258Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:51.661903Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:14:51.661963Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:14:51.662015Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:14:51.662665Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:51.662854Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:51.662935Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:198:2154] in generation 2 2025-04-10T11:14:51.723128Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:51.754674Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:14:51.754852Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:51.754960Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-10T11:14:51.755004Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:14:51.755037Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:14:51.755068Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.755293Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.755358Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.755562Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:14:51.755656Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:14:51.755735Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.755768Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:51.755800Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:14:51.755824Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:51.755859Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:51.755886Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:14:51.755917Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:51.755985Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.756011Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.756049Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-10T11:14:51.757920Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:14:51.757958Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:51.758020Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:14:51.758132Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:14:51.758192Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:14:51.758294Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:14:51.758340Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.758378Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:14:51.758410Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:14:51.758446Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.758712Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:51.758738Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:14:51.758762Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:14:51.758786Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.758854Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:14:51.758881Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:14:51.758910Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:14:51.758940Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.758965Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:51.771027Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:14:51.771111Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.771151Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.771191Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:51.771256Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:51.772298Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.772373Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.772425Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-10T11:14:51.772575Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:51.772604Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:51.772767Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.772805Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.772840Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:51.772885Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:51.776643Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:51.776735Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.776984Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.777024Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.777096Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.777135Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:51.777168Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:51.777207Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:14:51.777242Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:14:51.777290Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.777343Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:51.777384Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:51.777440Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:51.777630Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:14:51.777659Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.777682Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:51.777704Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:14:51.777758Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:14:51.777840Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.777864Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:14:51.777895Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:51.777932Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:51.778002Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:14:51.778034Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:14:51.778066Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:14:51.778121Z node 1 :TX_DATA ... [1000005:154] at 9437184 on unit CompleteOperation 2025-04-10T11:14:58.927138Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-04-10T11:14:58.927163Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-10T11:14:58.927180Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:58.927303Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:342:2310]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-10T11:14:58.927333Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:58.927357Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-04-10T11:14:58.927494Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:342:2310]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-10T11:14:58.927540Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:58.927572Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-10T11:14:58.927668Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:58.927698Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:134] at 9437186 on unit CompleteOperation 2025-04-10T11:14:58.927755Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 134] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-04-10T11:14:58.927803Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-10T11:14:58.927832Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:58.927936Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-10T11:14:58.927962Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-10T11:14:58.927989Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:58.928014Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2025-04-10T11:14:58.928048Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-04-10T11:14:58.928100Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-10T11:14:58.928130Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:58.928235Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-10T11:14:58.928271Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-10T11:14:58.928300Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:58.928324Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-04-10T11:14:58.928361Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-04-10T11:14:58.928401Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-10T11:14:58.928424Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:58.928540Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-10T11:14:58.928581Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-10T11:14:58.928607Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:58.928630Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-04-10T11:14:58.928675Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-04-10T11:14:58.928720Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-10T11:14:58.928755Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:58.928855Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:58.928895Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-04-10T11:14:58.928929Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-04-10T11:14:58.928980Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-10T11:14:58.929011Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:58.929098Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:58.929120Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-04-10T11:14:58.929155Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-04-10T11:14:58.929188Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-10T11:14:58.929211Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:58.929290Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:14:58.929350Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-10T11:14:58.929402Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-04-10T11:14:58.929440Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-10T11:14:58.929464Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:14:58.929731Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-10T11:14:58.929780Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:58.929812Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-10T11:14:58.929876Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:342:2310]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-10T11:14:58.929900Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:58.929933Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-10T11:14:58.930040Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-10T11:14:58.930069Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:58.930092Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-10T11:14:58.930179Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:342:2310]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-10T11:14:58.930211Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:58.930237Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-10T11:14:58.930341Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-10T11:14:58.930377Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:58.930408Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-10T11:14:58.930510Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-10T11:14:58.930540Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:58.930588Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-10T11:14:58.930673Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-10T11:14:58.930697Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:58.930718Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-10T11:14:58.930814Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-10T11:14:58.930840Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:58.930867Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-10T11:14:58.930927Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-10T11:14:58.930948Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:14:58.931076Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> TPDiskTest::TestFakeErrorPDiskManyLogWrite [GOOD] >> TPDiskTest::TestFakeErrorPDiskLogRead >> TYardTest::TestChunkUnlockHarakiri [GOOD] >> TYardTest::TestChunkUnlockRestart >> ShredPDisk::RetryPreShredCompactError [GOOD] >> ShredPDisk::RetryShredError >> DataShardTxOrder::ForceOnlineBetweenOnline >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> TSectorMapPerformance::TestSSD1960GBWrite1000MBOnFirstSector [GOOD] >> TYardTest::TestBadDeviceInit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag [GOOD] Test command err: 2025-04-10T11:14:51.356558Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:51.357500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:14:51.357543Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:51.372911Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:51.373382Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:14:51.373661Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:51.441242Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:51.449669Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:51.450269Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:51.452222Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:14:51.452286Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:14:51.452465Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:14:51.452913Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:51.453122Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:51.453184Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:198:2154] in generation 2 2025-04-10T11:14:51.565695Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:51.626771Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:14:51.627036Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:51.627176Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-10T11:14:51.627221Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:14:51.627263Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:14:51.627309Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.627627Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.627713Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.628057Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:14:51.628169Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:14:51.628284Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.628332Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:51.628402Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:14:51.628442Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:51.628497Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:51.628543Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:14:51.628595Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:51.628721Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.628778Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.628846Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-10T11:14:51.631718Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:14:51.631782Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:51.631865Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:14:51.632013Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:14:51.632074Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:14:51.632127Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:14:51.632174Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.632221Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:14:51.632273Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:14:51.632317Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.632623Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:51.632671Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:14:51.632716Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:14:51.632751Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.632794Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:14:51.632818Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:14:51.632850Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:14:51.632897Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.632941Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:51.646079Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:14:51.646180Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.646304Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.646359Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:51.646425Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:51.647051Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.647122Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.647178Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-10T11:14:51.647343Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:51.647380Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:51.647525Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.647572Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.647634Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:51.647710Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:51.652478Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:51.652571Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.652859Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.652917Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.653005Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.653059Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:51.653100Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:51.653172Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:14:51.653222Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:14:51.653272Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.653319Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:51.653382Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:51.653443Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:51.653660Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:14:51.653705Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.653734Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:51.653760Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:14:51.653794Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:14:51.653870Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.653918Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:14:51.653964Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:51.654006Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:51.654069Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:14:51.654112Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:14:51.654297Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:14:51.654392Z node 1 :TX_DATA ... aitInRS 2025-04-10T11:14:59.660146Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:14:59.660161Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-04-10T11:14:59.660175Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-04-10T11:14:59.660203Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-04-10T11:14:59.660665Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-04-10T11:14:59.660764Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T11:14:59.660842Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:14:59.660875Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-04-10T11:14:59.660901Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-04-10T11:14:59.660920Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-04-10T11:14:59.661121Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-04-10T11:14:59.661147Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-04-10T11:14:59.661178Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-04-10T11:14:59.661218Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-04-10T11:14:59.661247Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:14:59.661264Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-04-10T11:14:59.661289Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-04-10T11:14:59.661343Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:59.661385Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-10T11:14:59.661424Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-10T11:14:59.661456Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-10T11:14:59.661734Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:232:2225], Recipient [2:232:2225]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:59.661767Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:59.661813Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:59.661845Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:59.661873Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:59.661920Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2025-04-10T11:14:59.661958Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2025-04-10T11:14:59.661993Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:14:59.662014Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:59.662034Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:59.662056Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:59.662976Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2025-04-10T11:14:59.663012Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:14:59.663034Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:59.663057Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2025-04-10T11:14:59.663095Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2025-04-10T11:14:59.663130Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:14:59.663147Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2025-04-10T11:14:59.663164Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:59.663181Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:59.663215Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437184 2025-04-10T11:14:59.663254Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437184 2025-04-10T11:14:59.663284Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437184 2025-04-10T11:14:59.663314Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:14:59.663334Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-10T11:14:59.663362Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2025-04-10T11:14:59.663384Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2025-04-10T11:14:59.663424Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:14:59.663441Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2025-04-10T11:14:59.663456Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2025-04-10T11:14:59.663470Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2025-04-10T11:14:59.663486Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:14:59.663500Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2025-04-10T11:14:59.663517Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2025-04-10T11:14:59.663531Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2025-04-10T11:14:59.663555Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:14:59.663583Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2025-04-10T11:14:59.663606Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2025-04-10T11:14:59.663629Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2025-04-10T11:14:59.663647Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:14:59.663661Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-04-10T11:14:59.663676Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-04-10T11:14:59.663692Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-04-10T11:14:59.663994Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-04-10T11:14:59.664037Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T11:14:59.664078Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:14:59.664097Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-04-10T11:14:59.664120Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-04-10T11:14:59.664144Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-04-10T11:14:59.664298Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-04-10T11:14:59.664320Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-04-10T11:14:59.664343Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-04-10T11:14:59.664370Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-04-10T11:14:59.664401Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:14:59.664421Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-04-10T11:14:59.664440Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-04-10T11:14:59.664463Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:59.664564Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:59.664601Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:59.664633Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:59.681732Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-04-10T11:14:59.681821Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-04-10T11:14:59.681939Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:59.681998Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-04-10T11:14:59.682078Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:14:59.682195Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:59.683521Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-04-10T11:14:59.683568Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-04-10T11:14:59.683617Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-10T11:14:59.683651Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-04-10T11:14:59.683701Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:14:59.683743Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> DataShardTxOrder::RandomDotRanges_DelayRS >> TYardTest::TestChunkUnlockRestart [GOOD] >> TYardTest::TestChunkReserve >> TPDiskTest::TestFakeErrorPDiskLogRead [GOOD] >> TPDiskTest::TestFakeErrorPDiskSysLogRead >> TYardTest::TestBadDeviceInit [GOOD] >> TYardTest::Test3AsyncLog >> DataShardOutOfOrder::TestOutOfOrderLockLost >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite >> DataShardScan::ScanFollowedByUpdate >> TYardTest::TestChunkReserve [GOOD] >> TYardTest::TestCheckSpace |87.6%| [TA] $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |87.6%| [TA] {RESULT} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> TPDiskTest::TestFakeErrorPDiskSysLogRead [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkRead >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] >> TYardTest::TestCheckSpace [GOOD] >> TYardTest::TestBootingState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] Test command err: 2025-04-10T11:14:53.259836Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:53.261153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:14:53.261203Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:53.274033Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:53.274902Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:14:53.275190Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:53.327608Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:53.342831Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:53.343766Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:53.345819Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:14:53.345926Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:14:53.345981Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:14:53.346474Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:53.346655Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:53.346729Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:199:2154] in generation 2 2025-04-10T11:14:53.420751Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:53.468478Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:14:53.468654Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:53.468761Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-10T11:14:53.468804Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:14:53.468839Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:14:53.468873Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:53.469107Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:53.469172Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:53.469465Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:14:53.469554Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:14:53.469618Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:53.469654Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:53.469705Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:14:53.469741Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:53.469780Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:53.469819Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:14:53.469860Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:53.469947Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:53.469993Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:53.470046Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-10T11:14:53.472644Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:14:53.472705Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:53.472796Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:14:53.472999Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:14:53.473055Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:14:53.473125Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:14:53.473180Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:53.473214Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:14:53.473269Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:14:53.473302Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:53.473671Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:53.473710Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:14:53.473747Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:14:53.473782Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:53.473837Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:14:53.473863Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:14:53.473911Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:14:53.473953Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:53.473986Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:53.491163Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:14:53.491225Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:53.491260Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:53.491295Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:53.491366Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:53.491846Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:53.491911Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:53.492013Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-10T11:14:53.492123Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:53.492159Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:53.492266Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:53.492308Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:53.492363Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:53.492401Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:53.497459Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:53.497541Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:53.497739Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:53.497775Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:53.497829Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:53.497869Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:53.497902Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:53.497949Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:14:53.497987Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:14:53.498039Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:53.498111Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:53.498184Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:53.498225Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:53.498438Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:14:53.498490Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:53.498525Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:53.498556Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:14:53.498579Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:14:53.498663Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:53.498688Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:14:53.498739Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:53.498786Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:53.498856Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:14:53.498903Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:14:53.498936Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:14:53.498980Z node 1 :TX_DATA ... CE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-04-10T11:15:00.580596Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-04-10T11:15:00.580619Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-04-10T11:15:00.580818Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-04-10T11:15:00.580847Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-04-10T11:15:00.580902Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-04-10T11:15:00.580939Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-04-10T11:15:00.580973Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:15:00.580994Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-04-10T11:15:00.581023Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-04-10T11:15:00.581067Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:00.581108Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:15:00.581154Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:15:00.581190Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:15:00.581375Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [2:99:2134], Recipient [2:343:2311]: {TEvPlanStep step# 1000016 MediatorId# 0 TabletID 9437185} 2025-04-10T11:15:00.581408Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:15:00.581509Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit WaitForPlan 2025-04-10T11:15:00.581549Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:15:00.581583Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit WaitForPlan 2025-04-10T11:15:00.581622Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PlanQueue 2025-04-10T11:15:00.581735Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 45 at step 1000016 at tablet 9437185 { Transactions { TxId: 45 AckTo { RawX1: 99 RawX2: 8589936726 } } Step: 1000016 MediatorID: 0 TabletID: 9437185 } 2025-04-10T11:15:00.581763Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-10T11:15:00.581935Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:343:2311], Recipient [2:343:2311]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:00.581966Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:00.582002Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-10T11:15:00.582032Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:15:00.582054Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-10T11:15:00.582086Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2025-04-10T11:15:00.582110Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2025-04-10T11:15:00.582136Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:15:00.586288Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2025-04-10T11:15:00.586330Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2025-04-10T11:15:00.586360Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2025-04-10T11:15:00.587015Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2025-04-10T11:15:00.587058Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:15:00.587084Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2025-04-10T11:15:00.587109Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2025-04-10T11:15:00.587134Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2025-04-10T11:15:00.587162Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:15:00.587181Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2025-04-10T11:15:00.587220Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:00.587246Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2025-04-10T11:15:00.587308Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437185 2025-04-10T11:15:00.587338Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437185 2025-04-10T11:15:00.587365Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437185 2025-04-10T11:15:00.587400Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:15:00.587436Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2025-04-10T11:15:00.587462Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2025-04-10T11:15:00.587486Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2025-04-10T11:15:00.587526Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:15:00.587548Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2025-04-10T11:15:00.587570Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2025-04-10T11:15:00.587591Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2025-04-10T11:15:00.587612Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:15:00.587632Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2025-04-10T11:15:00.587651Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2025-04-10T11:15:00.587671Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2025-04-10T11:15:00.587695Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:15:00.587718Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2025-04-10T11:15:00.587739Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2025-04-10T11:15:00.587763Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2025-04-10T11:15:00.587794Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:15:00.587829Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-04-10T11:15:00.587852Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-04-10T11:15:00.587877Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-04-10T11:15:00.588291Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-04-10T11:15:00.588341Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T11:15:00.588400Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:15:00.588426Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-04-10T11:15:00.588449Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-04-10T11:15:00.588475Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-04-10T11:15:00.588667Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-04-10T11:15:00.588698Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-04-10T11:15:00.588725Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-04-10T11:15:00.588751Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-04-10T11:15:00.588780Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:15:00.588816Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-04-10T11:15:00.588853Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-04-10T11:15:00.588882Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:00.588906Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-10T11:15:00.588931Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-10T11:15:00.588954Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-10T11:15:00.604809Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-04-10T11:15:00.604882Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-04-10T11:15:00.604948Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-10T11:15:00.605011Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-04-10T11:15:00.605079Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:00.605127Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-10T11:15:00.605434Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-04-10T11:15:00.605475Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-04-10T11:15:00.605512Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:00.605538Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-04-10T11:15:00.605575Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:00.605605Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TYardTest::Test3AsyncLog [GOOD] >> TYardTest::Test3HugeAsyncLog >> DataShardTxOrder::RandomPointsAndRanges >> ShredPDisk::RetryShredError [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite >> DataShardOutOfOrder::UncommittedReads [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> ShredPDisk::RetryShredError [GOOD] Test command err: /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:368 /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:368 /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:368 /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:368 /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:368 /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:368 /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:368 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] Test command err: 2025-04-10T11:14:54.148220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:14:54.148468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:54.148694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b9d/r3tmp/tmpPVyXgv/pdisk_1.dat 2025-04-10T11:14:54.593112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:14:54.602099Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-04-10T11:14:54.602186Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-04-10T11:14:54.637179Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:54.680888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:54.680999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:54.692773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:54.770515Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-04-10T11:14:54.770585Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-04-10T11:14:54.770767Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-04-10T11:14:54.771061Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-04-10T11:14:54.771107Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2404] Proxy 2025-04-10T11:14:54.772788Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-10T11:14:54.772858Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-10T11:14:54.772885Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-04-10T11:14:54.772912Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-04-10T11:14:54.777941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:54.816751Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2564], Recipient [1:665:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:54.817728Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2564], Recipient [1:665:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:54.818059Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:665:2570] 2025-04-10T11:14:54.818291Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:54.859834Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2564], Recipient [1:665:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:54.860412Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:54.860487Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:54.862311Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:14:54.862405Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:14:54.862466Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:14:54.862842Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:54.862962Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:54.863048Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:681:2570] in generation 1 2025-04-10T11:14:54.863372Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:54.895547Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:14:54.895725Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:54.895831Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2580] 2025-04-10T11:14:54.895885Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:14:54.895925Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:14:54.895973Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:54.896192Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:665:2570], Recipient [1:665:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:54.896244Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:54.896907Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:14:54.897015Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:14:54.897108Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:14:54.897153Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:54.897191Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:14:54.897225Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:14:54.897301Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:14:54.897357Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:14:54.897405Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:14:54.897521Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:672:2574], Recipient [1:665:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:54.897555Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:54.897600Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2568], serverId# [1:672:2574], sessionId# [0:0:0] 2025-04-10T11:14:54.898003Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:672:2574] 2025-04-10T11:14:54.898051Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:54.898188Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:14:54.898398Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:14:54.898451Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:14:54.898545Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:14:54.898592Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:14:54.898629Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:14:54.898678Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:14:54.898714Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:54.899020Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:54.899058Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:14:54.899092Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:14:54.899125Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:54.899200Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:14:54.899243Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:14:54.899291Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:14:54.899323Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:14:54.899361Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:54.900129Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:14:54.900175Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:54.900204Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:54.900242Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:14:54.900311Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:54.902176Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 HANDLE EvProposeTransaction marker# C0 2025-04-10T11:14:54.902244Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 step# 1000 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-04-10T11:14:54.902572Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:684:2581], Recipient [1:665:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:14:54.902619Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:14:55.062465Z node 1 :TX_COORDINATOR DEBUG: Transaction 281474976715657 has been planned 2025-04-10T11:14:55.062546Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for mediator 72057594046382081 tablet 72057594046644480 2025-04-10T11:14:55.062585Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for ... 000 2025-04-10T11:15:00.959966Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4000} 2025-04-10T11:15:00.960053Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 270270977, Sender [1:24:2071], Recipient [1:1377:3052]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 4000} 2025-04-10T11:15:00.960094Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2025-04-10T11:15:00.960133Z node 1 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 4000 at tablet 72075186224037888 2025-04-10T11:15:00.960187Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:15:00.960498Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 1 SubscriptionId: 2 LatestStep: 4000 2025-04-10T11:15:00.961005Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 1 TimeBarrier# 4000} 2025-04-10T11:15:01.110422Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jrfpj3qa8ejfp3cc01b3gdag, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ExYTc1MDQtY2FhMTkyZDktMzQ3M2ExMzYtOTgyNmJhZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:01.112530Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1440:3091], Recipient [1:1377:3052]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976715682 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-10T11:15:01.112704Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:15:01.112776Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2025-04-10T11:15:01.112865Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:15:01.112911Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:15:01.112948Z node 1 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:01.112979Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:15:01.113122Z node 1 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-04-10T11:15:01.113162Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:15:01.113185Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:15:01.113207Z node 1 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:15:01.113228Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:15:01.113378Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976715682 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-10T11:15:01.113663Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715682, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:15:01.113712Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2025-04-10T11:15:01.113758Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1440:3091], 0} after executionsCount# 1 2025-04-10T11:15:01.113809Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1440:3091], 0} sends rowCount# 5, bytes# 160, quota rows left# 996, quota bytes left# 5242720, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:15:01.113891Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1440:3091], 0} finished in read 2025-04-10T11:15:01.113957Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:15:01.113987Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:15:01.114013Z node 1 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:15:01.114037Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:15:01.114104Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:15:01.114128Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:15:01.114176Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-04-10T11:15:01.114215Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:15:01.114315Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:15:01.115328Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1440:3091], Recipient [1:1377:3052]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:15:01.115419Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } } 2025-04-10T11:15:01.254882Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep 2025-04-10T11:15:01.254973Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4500 in 0.499900s at 4.450000s 2025-04-10T11:15:01.256171Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jrfpj3w1axecdc57p2ssea97, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTU1MmQ2M2UtMzM3YjE1YzItNDU3MmQ0MzgtYWNmNjU3ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:01.258113Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1464:3108], Recipient [1:1377:3052]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715685 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-10T11:15:01.260339Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:15:01.260436Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-10T11:15:01.260533Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T11:15:01.260576Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:15:01.260612Z node 1 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:01.260652Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:15:01.260768Z node 1 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-10T11:15:01.260819Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T11:15:01.260849Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:15:01.260873Z node 1 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:15:01.260897Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:15:01.261055Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715685 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-10T11:15:01.261384Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715685, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:15:01.261443Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-04-10T11:15:01.261493Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1464:3108], 0} after executionsCount# 1 2025-04-10T11:15:01.261547Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1464:3108], 0} sends rowCount# 6, bytes# 192, quota rows left# 995, quota bytes left# 5242688, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:15:01.261625Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1464:3108], 0} finished in read 2025-04-10T11:15:01.261730Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T11:15:01.261759Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:15:01.261787Z node 1 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:15:01.261811Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:15:01.261861Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T11:15:01.261884Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:15:01.261913Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-10T11:15:01.261951Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:15:01.262052Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:15:01.262383Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:1377:3052]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715685 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-04-10T11:15:01.263347Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1464:3108], Recipient [1:1377:3052]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:15:01.263418Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } }, { items { uint32_value: 11 } items { uint32_value: 11 } } >> DataShardTxOrder::RandomPoints_DelayData >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] Test command err: 2025-04-10T11:14:55.460766Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:55.462059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:14:55.462110Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:55.475334Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:55.475729Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:14:55.475920Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:55.523281Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:55.556511Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:55.558652Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:55.560324Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:14:55.560395Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:14:55.560441Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:14:55.561006Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:55.561160Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:55.561313Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:199:2154] in generation 2 2025-04-10T11:14:55.663087Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:55.711492Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:14:55.711708Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:55.711838Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-10T11:14:55.711875Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:14:55.711908Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:14:55.711940Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:55.712184Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:55.712277Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:55.712595Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:14:55.712729Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:14:55.712796Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:55.712841Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:55.712921Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:14:55.712989Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:55.713028Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:55.713070Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:14:55.713122Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:55.713268Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:55.713315Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:55.713395Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-10T11:14:55.716181Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:14:55.716237Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:55.716355Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:14:55.716578Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:14:55.716641Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:14:55.716721Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:14:55.716794Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:55.716835Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:14:55.716896Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:14:55.716941Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:55.717296Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:55.717359Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:14:55.717403Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:14:55.717438Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:55.717489Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:14:55.717522Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:14:55.717550Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:14:55.717574Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:55.717608Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:55.730945Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:14:55.731016Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:55.731044Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:55.731098Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:55.731163Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:55.731705Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:55.731757Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:55.731797Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-10T11:14:55.731925Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:55.731948Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:55.732078Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:55.732122Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-10T11:14:55.732159Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:55.732196Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:55.736935Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:55.737016Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:55.737264Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:55.737308Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:55.737397Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:55.737442Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:55.737484Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:55.737526Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-10T11:14:55.737564Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-10T11:14:55.737612Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-10T11:14:55.737682Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:55.737725Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:55.737777Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:55.737944Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-10T11:14:55.737973Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-10T11:14:55.738016Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:55.738034Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:14:55.738050Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:14:55.738112Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:55.738164Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:14:55.738208Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:55.738244Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:55.738303Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-04-10T11:14:55.738341Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically incomplete end at 9437184 2025-04-10T11:14:55.738400Z node 1 :TX_DATASHARD TRACE: Activated operation [2:1] at 9437184 2025-04-10T11:14:55.738453Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-10T11:14:55.738478Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1 ... 10T11:15:02.202944Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-10T11:15:02.202964Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:02.203033Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:02.203049Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:137] at 9437186 on unit CompleteOperation 2025-04-10T11:15:02.203071Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 137] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-04-10T11:15:02.203095Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-10T11:15:02.203107Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:02.203165Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:02.203182Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:140] at 9437186 on unit CompleteOperation 2025-04-10T11:15:02.203247Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:02.203271Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-10T11:15:02.203292Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:02.203367Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:02.203391Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:143] at 9437186 on unit CompleteOperation 2025-04-10T11:15:02.203418Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:02.203443Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:02.203535Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:02.203558Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:146] at 9437186 on unit CompleteOperation 2025-04-10T11:15:02.203590Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:02.203613Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:02.203691Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:02.203724Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:149] at 9437186 on unit CompleteOperation 2025-04-10T11:15:02.203769Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:02.203796Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:02.203881Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:02.203925Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:152] at 9437186 on unit CompleteOperation 2025-04-10T11:15:02.203973Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:02.204014Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:02.204361Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-10T11:15:02.204427Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.204468Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-10T11:15:02.204562Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:344:2311]: {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-10T11:15:02.204597Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.204622Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-10T11:15:02.204716Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-10T11:15:02.204749Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.204776Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-10T11:15:02.204839Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:344:2311]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-10T11:15:02.204866Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.204889Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-10T11:15:02.204975Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-10T11:15:02.205009Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.205033Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-10T11:15:02.205090Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:344:2311]: {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-10T11:15:02.205121Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.205143Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-10T11:15:02.205267Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-10T11:15:02.205300Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.205341Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-10T11:15:02.205464Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-10T11:15:02.205508Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.205534Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-10T11:15:02.205603Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-10T11:15:02.205629Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.205665Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-10T11:15:02.205774Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-10T11:15:02.205801Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.205831Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-10T11:15:02.205899Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-10T11:15:02.205927Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.205949Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-04-10T11:15:02.206033Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-10T11:15:02.206061Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.206084Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-10T11:15:02.206191Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-10T11:15:02.206217Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.206240Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-10T11:15:02.206312Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-10T11:15:02.206425Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.206462Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-10T11:15:02.206549Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-10T11:15:02.206580Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.206601Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-10T11:15:02.206706Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-10T11:15:02.206735Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:02.206758Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 >> TYardTest::Test3HugeAsyncLog [GOOD] >> TYardTest::TestAllocateAllChunks >> TYardTest::TestChunkWriteReadMultipleWithHddSectorMap [GOOD] >> TYardTest::TestChunkWriteReadWhole ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReads [GOOD] Test command err: 2025-04-10T11:14:59.284916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:14:59.285118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:59.285289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b3e/r3tmp/tmpTR9DjS/pdisk_1.dat 2025-04-10T11:14:59.735559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:14:59.776320Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:59.820904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:59.821021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:59.832684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:59.929646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:59.994587Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:59.995813Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:59.996292Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:14:59.996560Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:00.084123Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:15:00.084882Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:00.085012Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:15:00.087112Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:15:00.087205Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:15:00.087268Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:15:00.087668Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:15:00.087804Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:15:00.087887Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:15:00.088329Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:15:00.117536Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:15:00.117745Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:15:00.117884Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:15:00.117923Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:15:00.117962Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:15:00.117998Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:15:00.118299Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:00.118374Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:00.118802Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:15:00.118965Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:15:00.119043Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:15:00.119114Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:00.119159Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:15:00.119204Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:15:00.119245Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:15:00.119283Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:15:00.119335Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:15:00.119490Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:00.119538Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:00.119596Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:15:00.120032Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:15:00.120106Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:15:00.120236Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:15:00.120493Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:15:00.120553Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:15:00.120669Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:15:00.120725Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:15:00.120772Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:15:00.120812Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:15:00.120856Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:15:00.121245Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:15:00.121303Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:15:00.121381Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:15:00.121437Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:15:00.121513Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:15:00.121549Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:15:00.121587Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:15:00.121640Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:15:00.121696Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:15:00.122737Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:15:00.122801Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:15:00.122844Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:15:00.122889Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:15:00.122955Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:15:00.125472Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:15:00.125525Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:15:00.296413Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:00.296497Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:00.296540Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:15:00.296962Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:15:00.297014Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:15:00.297144Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:15:00.297199Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:15:00.297241Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:15:00.297282Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:15:00.301696Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:15:00.302130Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:15:00.311333Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:00.311389Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:00.311464Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:15:0 ... 0T11:15:01.580255Z node 1 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-10T11:15:01.580300Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:5] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:15:01.580355Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 5 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-04-10T11:15:01.580431Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... waiting for blocked commit 2025-04-10T11:15:01.646876Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfpj4akf5pthcsf1rjqmmg0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjViNDJhYjYtZTkyZTYwNzctNGUzYTE3MTMtNGY2ZGNlYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:01.649261Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [1:961:2749], Recipient [1:666:2570]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxMode: MODE_IMMEDIATE 2025-04-10T11:15:01.649316Z node 1 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-04-10T11:15:01.649461Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-10T11:15:01.649508Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-10T11:15:01.649575Z node 1 :TX_DATASHARD TRACE: TTxWrite:: execute at tablet# 72075186224037888 2025-04-10T11:15:01.649689Z node 1 :TX_DATASHARD TRACE: Parsing write transaction for 0 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxMode: MODE_IMMEDIATE 2025-04-10T11:15:01.649756Z node 1 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2025-04-10T11:15:01.649810Z node 1 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-04-10T11:15:01.649905Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-04-10T11:15:01.649955Z node 1 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-10T11:15:01.649994Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-04-10T11:15:01.650028Z node 1 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:01.650056Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:15:01.650114Z node 1 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-04-10T11:15:01.650484Z node 1 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2025-04-10T11:15:01.650532Z node 1 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-10T11:15:01.650579Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:15:01.650609Z node 1 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-04-10T11:15:01.650634Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-04-10T11:15:01.650664Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037888 2025-04-10T11:15:01.650727Z node 1 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-04-10T11:15:01.650879Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:6] at 72075186224037888, row count=1 2025-04-10T11:15:01.650927Z node 1 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-10T11:15:01.650993Z node 1 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:15:01.651021Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-04-10T11:15:01.651062Z node 1 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-04-10T11:15:01.651109Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:15:01.651143Z node 1 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-04-10T11:15:01.651168Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-04-10T11:15:01.651222Z node 1 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:15:01.651251Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:15:01.651312Z node 1 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-10T11:15:01.651335Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:15:01.651363Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished ... blocked commit for tablet 72075186224037888 2025-04-10T11:15:01.837149Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfpj4ck2v198ndcsdj03fpx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjIyNzdiNGYtZTUyNjQ0MS02MjNiNjZhYS0zNmM0N2VhZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:01.838808Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:979:2773], Recipient [1:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-10T11:15:01.839024Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:15:01.839096Z node 1 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-04-10T11:15:01.839137Z node 1 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v2500/18446744073709551615 2025-04-10T11:15:01.839197Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-10T11:15:01.839305Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:15:01.839347Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:15:01.839396Z node 1 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:01.839431Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:15:01.839470Z node 1 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-10T11:15:01.839508Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:15:01.839531Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:15:01.839552Z node 1 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:15:01.839608Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:15:01.839704Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-10T11:15:01.839921Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is DelayComplete 2025-04-10T11:15:01.839957Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:15:01.839994Z node 1 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:15:01.840023Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:15:01.840059Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:15:01.840080Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:15:01.840105Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-10T11:15:01.840141Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:15:01.917315Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 3000 2025-04-10T11:15:01.917448Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} 2025-04-10T11:15:02.069130Z node 1 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-10T11:15:02.069198Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:15:02.069253Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 1000 ms, status: STATUS_COMPLETED 2025-04-10T11:15:02.069386Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:15:02.069492Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:15:02.069527Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:15:02.069570Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:979:2773], 0} after executionsCount# 1 2025-04-10T11:15:02.069620Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:979:2773], 0} sends rowCount# 4, bytes# 128, quota rows left# 997, quota bytes left# 5242752, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:15:02.069739Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:979:2773], 0} finished in read 2025-04-10T11:15:02.071758Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:979:2773], Recipient [1:666:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:15:02.071824Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 4 } items { uint32_value: 4 } } >> TYardTest::TestAllocateAllChunks [GOOD] >> TYardTest::TestBootingState [GOOD] >> TYardTest::TestChunkRecommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: 2025-04-10T11:14:54.085132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:14:54.085344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:54.085502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b57/r3tmp/tmp64Dteg/pdisk_1.dat 2025-04-10T11:14:54.527066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:14:54.579381Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:54.621201Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:14:54.622556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:54.622702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:54.622908Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:14:54.634590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:54.719337Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T11:14:54.719408Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T11:14:54.719680Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T11:14:54.859317Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T11:14:54.859406Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:14:54.859892Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:14:54.859980Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:14:54.860304Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:14:54.860477Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:14:54.860567Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T11:14:54.862592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:54.863046Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T11:14:54.863697Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T11:14:54.863752Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T11:14:54.900045Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:54.901188Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:54.902006Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:54.902508Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:672:2573] 2025-04-10T11:14:54.902901Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:54.950765Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:54.951238Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:54.951749Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:674:2575] 2025-04-10T11:14:54.952004Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:54.961064Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:54.962655Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:54.962849Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:54.964794Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:14:54.964873Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:14:54.964953Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:14:54.965450Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:54.965733Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:54.965835Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:705:2573] in generation 1 2025-04-10T11:14:54.966268Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:54.966396Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:54.967952Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T11:14:54.968016Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T11:14:54.968070Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T11:14:54.968435Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:54.968521Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:54.968604Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:706:2575] in generation 1 2025-04-10T11:14:54.979515Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:55.027989Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:14:55.028219Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:55.028398Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:709:2594] 2025-04-10T11:14:55.028436Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:14:55.028470Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:14:55.028500Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:55.028697Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:672:2573], Recipient [1:672:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:55.028747Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:55.028843Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:55.028890Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T11:14:55.028986Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:55.029056Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:710:2595] 2025-04-10T11:14:55.029082Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T11:14:55.029115Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T11:14:55.029158Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:14:55.029556Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:674:2575], Recipient [1:674:2575]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:55.029601Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:55.029799Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:14:55.029911Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:14:55.030096Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:14:55.031081Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:55.031185Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:14:55.031234Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:14:55.031270Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:14:55.031333Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:14:55.031401Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:14:55.031468Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T11:14:55.031539Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T11:14:55.032069Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:687:2582], Recipient [1:672:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:55.032112Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:55.032180Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:664:2569], serverId# [1:687:2582], sessionId# [0:0:0] 2025-04-10T11:14:55.032250Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 202 ... an for [0:2] at 72075186224037888 executing on unit CheckWrite 2025-04-10T11:15:02.496291Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:02.496364Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:15:02.496414Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T11:15:02.496482Z node 2 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-04-10T11:15:02.496522Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:15:02.496561Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:15:02.496588Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteWrite 2025-04-10T11:15:02.496611Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteWrite 2025-04-10T11:15:02.496639Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:15:02.496691Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T11:15:02.496815Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=1 2025-04-10T11:15:02.496872Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-10T11:15:02.496927Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:15:02.496958Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-04-10T11:15:02.496992Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-04-10T11:15:02.497029Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:15:02.497113Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:15:02.497143Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-04-10T11:15:02.497196Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:15:02.497231Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:15:02.497273Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:15:02.497297Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:15:02.497345Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-04-10T11:15:02.508141Z node 2 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-10T11:15:02.508223Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:15:02.508276Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-04-10T11:15:02.508395Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:15:02.510528Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvProposeTransaction 2025-04-10T11:15:02.510602Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 281474976715661 ProcessProposeTransaction 2025-04-10T11:15:02.510709Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [2:861:2691] DataReq marker# P0 2025-04-10T11:15:02.510854Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] Cookie# 0 txid# 281474976715661 HANDLE TDataReq marker# P1 2025-04-10T11:15:02.511169Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] txid# 281474976715661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2025-04-10T11:15:02.511399Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] txid# 281474976715661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-04-10T11:15:02.511491Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] txid# 281474976715661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2025-04-10T11:15:02.511752Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:861:2691], Recipient [2:666:2570]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 861 RawX2: 8589937283 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t]\003\000\000\000\000\000\000\021\203\n\000\000\002\000\000\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 2025-04-10T11:15:02.511814Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:15:02.511929Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:15:02.512149Z node 2 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2025-04-10T11:15:02.512439Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2025-04-10T11:15:02.512504Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-10T11:15:02.512576Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2025-04-10T11:15:02.512628Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:02.512685Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:15:02.512736Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-10T11:15:02.512819Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715661] at 72075186224037888 2025-04-10T11:15:02.512868Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-10T11:15:02.512898Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:15:02.512923Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit MakeScanSnapshot 2025-04-10T11:15:02.512949Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit MakeScanSnapshot 2025-04-10T11:15:02.512979Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-10T11:15:02.513021Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit MakeScanSnapshot 2025-04-10T11:15:02.513053Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit WaitForStreamClearance 2025-04-10T11:15:02.513078Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-04-10T11:15:02.513132Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:861:2691] for [0:281474976715661] at 72075186224037888 2025-04-10T11:15:02.513169Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-04-10T11:15:02.513228Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:15:02.513359Z node 2 :TX_PROXY DEBUG: Got clearance request, shard: 72075186224037888, txid: 281474976715661 2025-04-10T11:15:02.513452Z node 2 :TX_PROXY DEBUG: Collected all clerance requests, txid: 281474976715661 2025-04-10T11:15:02.513499Z node 2 :TX_PROXY DEBUG: Send stream clearance, shard: 72075186224037888, txid: 281474976715661, cleared: 1 2025-04-10T11:15:02.513661Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:861:2691], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715661 2025-04-10T11:15:02.513720Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-10T11:15:02.513885Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:861:2691], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2025-04-10T11:15:02.513928Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-10T11:15:02.514020Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:02.514055Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:02.514139Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:15:02.514210Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:15:02.514268Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-10T11:15:02.514308Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-04-10T11:15:02.514354Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715661] at 72075186224037888 2025-04-10T11:15:02.514395Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-10T11:15:02.514456Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-10T11:15:02.514508Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit ReadTableScan 2025-04-10T11:15:02.514557Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit ReadTableScan 2025-04-10T11:15:02.514844Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-04-10T11:15:02.514880Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:15:02.514925Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T11:15:02.514970Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:15:02.515010Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:15:02.515082Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:15:02.515707Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:867:2696], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-10T11:15:02.515753Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor >> TYardTest::TestChunkRecommit [GOOD] >> TYardTest::TestChunkRestartRecommit >> TPDiskRaces::KillOwnerWhileDeletingChunk [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflight >> TYardTest::TestChunkWriteReadWhole [GOOD] >> TYardTest::TestChunkWriteReadWholeWithHddSectorMap >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] >> TBlobStorageHullFresh::AppendixPerf >> DataShardScan::ScanFollowedByUpdate [GOOD] >> TBlobStorageHullFreshSegment::PerfAppendix ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] Test command err: 2025-04-10T11:14:57.361050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:14:57.361705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:57.361777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:14:57.363466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:14:57.363759Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:57.364061Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b83/r3tmp/tmp7u19Az/pdisk_1.dat 2025-04-10T11:14:57.831163Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:58.064435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:14:58.165302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:58.165481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:58.171981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:58.172093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:58.186906Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:14:58.187517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:58.187921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:58.479508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:58.564478Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:1257:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:58.569595Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:1257:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:58.570041Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1282:2390] 2025-04-10T11:14:58.570291Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:58.619669Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:1257:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:58.625222Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:58.625928Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:58.628435Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:14:58.628525Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:14:58.628600Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:14:58.629083Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:58.629210Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:58.629295Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1305:2390] in generation 1 2025-04-10T11:14:58.633413Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:58.662688Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:14:58.662879Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:58.663025Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1310:2407] 2025-04-10T11:14:58.663065Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:14:58.663113Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:14:58.663154Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:58.663434Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1282:2390], Recipient [2:1282:2390]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:58.663496Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:58.664012Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:14:58.664122Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:14:58.664204Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:14:58.664243Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:58.664278Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:14:58.664312Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:14:58.664349Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:14:58.664416Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:14:58.664474Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:14:58.735283Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1314:2408], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:58.735349Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:58.735411Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1265:2774], serverId# [2:1314:2408], sessionId# [0:0:0] 2025-04-10T11:14:58.735732Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:840:2467], Recipient [2:1314:2408] 2025-04-10T11:14:58.735782Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:58.735906Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:14:58.736145Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:14:58.736224Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:14:58.736322Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:14:58.736376Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:14:58.736415Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:14:58.736458Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:14:58.736499Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:58.736797Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:58.736849Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:14:58.736896Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:14:58.737006Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:58.737077Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:14:58.737111Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:14:58.737148Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:14:58.737185Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:14:58.737209Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:58.747802Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:14:58.747866Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:58.747913Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:58.747963Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:58.748028Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:58.749380Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [2:1316:2410], Recipient [2:1282:2390]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:14:58.749434Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:14:59.145207Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1348:2418], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:59.145591Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:59.145646Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1345:2797], serverId# [2:1348:2418], sessionId# [0:0:0] 2025-04-10T11:14:59.151734Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:1057:2618], Recipient [2:1348:2418] 2025-04-10T11:14:59.151806Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:59.151980Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:14:59.152024Z node 2 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] ... :2454], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:15:01.711811Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1718:2454], 0} finished in read 2025-04-10T11:15:01.711875Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-10T11:15:01.711921Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:15:01.711957Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:15:01.711984Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:15:01.712026Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-10T11:15:01.712048Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:15:01.712077Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-04-10T11:15:01.712127Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:15:01.712223Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:15:01.713288Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1718:2454], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:15:01.713382Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-10T11:15:01.862995Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfpj4enca6am8a6aypq818m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjgzZjMzYTctYzljYmUyODYtNjZmZjYxM2ItZDEzZWQzZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:01.865518Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1736:2455], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-10T11:15:01.865865Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:15:01.865950Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-10T11:15:01.866061Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:15:01.866114Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:15:01.866189Z node 2 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:01.866231Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:15:01.866275Z node 2 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-10T11:15:01.866320Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:15:01.866350Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:15:01.866374Z node 2 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:15:01.866398Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:15:01.866529Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-10T11:15:01.866798Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715662, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:15:01.866864Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-10T11:15:01.866923Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1736:2455], 0} after executionsCount# 1 2025-04-10T11:15:01.866981Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1736:2455], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:15:01.867074Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1736:2455], 0} finished in read 2025-04-10T11:15:01.867140Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:15:01.867172Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:15:01.867199Z node 2 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:15:01.867227Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:15:01.867270Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:15:01.867293Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:15:01.867326Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-10T11:15:01.867372Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:15:01.867484Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:15:01.868584Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1736:2455], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:15:01.868643Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-04-10T11:15:02.055400Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfpj4kz0h4bqq44wrnvy3mg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGE3YjY5YzAtY2I4OWFhNTYtNTg3NDk2OWItYzVjOTY2NGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:02.060864Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1760:2456], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-10T11:15:02.061222Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:15:02.061312Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2025-04-10T11:15:02.061455Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-10T11:15:02.061497Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:15:02.061538Z node 2 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:02.061574Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:15:02.061624Z node 2 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2025-04-10T11:15:02.061671Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-10T11:15:02.061728Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:15:02.061760Z node 2 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:15:02.061784Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:15:02.061934Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-10T11:15:02.062275Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715666, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:15:02.062338Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-04-10T11:15:02.062391Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1760:2456], 0} after executionsCount# 1 2025-04-10T11:15:02.062444Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1760:2456], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:15:02.062541Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1760:2456], 0} finished in read 2025-04-10T11:15:02.062627Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-10T11:15:02.062658Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:15:02.062686Z node 2 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:15:02.062733Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:15:02.062784Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-10T11:15:02.062822Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:15:02.062863Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-04-10T11:15:02.062904Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:15:02.063012Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:15:02.064206Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1760:2456], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:15:02.064275Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-10T11:15:02.064976Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:240:2131], Recipient [2:1282:2390]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 1 Status: STATUS_SUBSCRIBED { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> TPDiskTest::TestFakeErrorPDiskManyChunkRead [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkWrite >> TBlobStorageHullFresh::SimpleForward [GOOD] >> TBlobStorageHullFresh::SimpleBackwardMiddle [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestAllocateAllChunks [GOOD] >> TYardTest::TestChunkRestartRecommit [GOOD] >> TYardTest::TestChunkDelete >> TFreshAppendixTest::IterateForwardIncluding [GOOD] >> TFreshAppendixTest::IterateForwardExcluding [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] >> DataShardTxOrder::DelayData [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::SimpleBackwardMiddle [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] >> TYardTest::TestChunkDelete [GOOD] >> TYardTest::TestChunkForget >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardScan::ScanFollowedByUpdate [GOOD] Test command err: 2025-04-10T11:15:01.966526Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:15:01.967716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:01.967760Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:01.979644Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:15:01.980109Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:15:01.980318Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:02.050095Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:15:02.063376Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:02.064036Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:15:02.065807Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:15:02.065879Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:15:02.065926Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:15:02.066316Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:15:02.066509Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:15:02.066583Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:199:2154] in generation 2 2025-04-10T11:15:02.130838Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:15:02.168360Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:15:02.168561Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:15:02.168672Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-10T11:15:02.168710Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:15:02.168750Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:15:02.168789Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:02.169004Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:02.169063Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:02.169374Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:15:02.169480Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:15:02.169575Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:15:02.169619Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:02.169671Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:15:02.169716Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:15:02.169758Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:15:02.169792Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:15:02.169831Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:02.169917Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:02.169976Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:02.170036Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-10T11:15:02.177417Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:15:02.177483Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:15:02.177586Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:15:02.177794Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:15:02.177844Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:15:02.177917Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:15:02.177965Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:15:02.178005Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:15:02.178057Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:15:02.178090Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:15:02.179574Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:15:02.179630Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:15:02.179663Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:15:02.179700Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:15:02.179752Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:15:02.179789Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:15:02.179830Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:15:02.179868Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:15:02.179901Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:15:02.193028Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:15:02.193100Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:15:02.193134Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:15:02.193178Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:15:02.193252Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:15:02.193834Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:02.193890Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:02.193934Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-10T11:15:02.194066Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:15:02.194102Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:15:02.194248Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:15:02.194289Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:02.194360Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:15:02.194399Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:15:02.198295Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:15:02.198362Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:02.198562Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:02.198599Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:02.198649Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:15:02.198689Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:15:02.198723Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:15:02.198768Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:15:02.198802Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:15:02.198846Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:02.198884Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:15:02.198935Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:15:02.198970Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:15:02.199151Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:15:02.199195Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:02.199229Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:15:02.199257Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:15:02.199326Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:15:02.199400Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:15:02.199425Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:15:02.199463Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:02.199509Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:15:02.199576Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:15:02.199625Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:15:02.199659Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:15:02.199700Z node 1 :TX_D ... essageQuota: 9 2025-04-10T11:15:04.349584Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 9437186, TxId: 36, MessageQuota: 10 2025-04-10T11:15:04.349754Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 9437186, TxId: 36, Size: 22, Rows: 0, PendingAcks: 1, MessageQuota: 9 2025-04-10T11:15:04.349898Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 9437184, TxId: 36, MessageQuota: 10 2025-04-10T11:15:04.350065Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 9437184, TxId: 36, Size: 22, Rows: 0, PendingAcks: 1, MessageQuota: 9 2025-04-10T11:15:04.350189Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 9437185, TxId: 36, PendingAcks: 0 2025-04-10T11:15:04.350261Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 9437185, TxId: 36, MessageQuota: 9 2025-04-10T11:15:04.350606Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 9437186, TxId: 36, PendingAcks: 0 2025-04-10T11:15:04.350645Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 9437186, TxId: 36, MessageQuota: 9 2025-04-10T11:15:04.350906Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 9437184, TxId: 36, PendingAcks: 0 2025-04-10T11:15:04.350947Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 9437184, TxId: 36, MessageQuota: 9 2025-04-10T11:15:04.351302Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 9437185 2025-04-10T11:15:04.351360Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 36, at: 9437185 2025-04-10T11:15:04.351450Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 9437186 2025-04-10T11:15:04.351475Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 36, at: 9437186 2025-04-10T11:15:04.351515Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 9437184 2025-04-10T11:15:04.351533Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 36, at: 9437184 2025-04-10T11:15:04.351691Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:454:2396], Recipient [1:454:2396]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:04.351740Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:04.351809Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-04-10T11:15:04.351840Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:15:04.351873Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000006:36] at 9437186 for ReadTableScan 2025-04-10T11:15:04.351900Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437186 on unit ReadTableScan 2025-04-10T11:15:04.351937Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [1000006:36] at 9437186 error: , IsFatalError: 0 2025-04-10T11:15:04.351982Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437186 is Executed 2025-04-10T11:15:04.352009Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437186 executing on unit ReadTableScan 2025-04-10T11:15:04.352035Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437186 to execution unit CompleteOperation 2025-04-10T11:15:04.352062Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437186 on unit CompleteOperation 2025-04-10T11:15:04.352311Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437186 is DelayComplete 2025-04-10T11:15:04.352355Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437186 executing on unit CompleteOperation 2025-04-10T11:15:04.352405Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437186 to execution unit CompletedOperations 2025-04-10T11:15:04.352465Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437186 on unit CompletedOperations 2025-04-10T11:15:04.352507Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437186 is Executed 2025-04-10T11:15:04.352529Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437186 executing on unit CompletedOperations 2025-04-10T11:15:04.352555Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000006:36] at 9437186 has finished 2025-04-10T11:15:04.352594Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:04.352617Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-10T11:15:04.352653Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-10T11:15:04.352686Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-10T11:15:04.352880Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:235:2228], Recipient [1:235:2228]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:04.352917Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:04.352961Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:15:04.352988Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:15:04.353053Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000006:36] at 9437184 for ReadTableScan 2025-04-10T11:15:04.353091Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437184 on unit ReadTableScan 2025-04-10T11:15:04.353127Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [1000006:36] at 9437184 error: , IsFatalError: 0 2025-04-10T11:15:04.353157Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437184 is Executed 2025-04-10T11:15:04.353180Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437184 executing on unit ReadTableScan 2025-04-10T11:15:04.353214Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437184 to execution unit CompleteOperation 2025-04-10T11:15:04.353243Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437184 on unit CompleteOperation 2025-04-10T11:15:04.353414Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437184 is DelayComplete 2025-04-10T11:15:04.353443Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437184 executing on unit CompleteOperation 2025-04-10T11:15:04.353466Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437184 to execution unit CompletedOperations 2025-04-10T11:15:04.353498Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437184 on unit CompletedOperations 2025-04-10T11:15:04.353557Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437184 is Executed 2025-04-10T11:15:04.353589Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437184 executing on unit CompletedOperations 2025-04-10T11:15:04.353611Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000006:36] at 9437184 has finished 2025-04-10T11:15:04.353636Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:04.353656Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:15:04.353679Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:15:04.353702Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:15:04.353891Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:345:2312], Recipient [1:345:2312]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:04.353925Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:04.353990Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-10T11:15:04.354023Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:15:04.354053Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000006:36] at 9437185 for ReadTableScan 2025-04-10T11:15:04.354091Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437185 on unit ReadTableScan 2025-04-10T11:15:04.354126Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [1000006:36] at 9437185 error: , IsFatalError: 0 2025-04-10T11:15:04.354316Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437185 is Executed 2025-04-10T11:15:04.354349Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437185 executing on unit ReadTableScan 2025-04-10T11:15:04.354374Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437185 to execution unit CompleteOperation 2025-04-10T11:15:04.354398Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437185 on unit CompleteOperation 2025-04-10T11:15:04.354587Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437185 is DelayComplete 2025-04-10T11:15:04.354621Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437185 executing on unit CompleteOperation 2025-04-10T11:15:04.354645Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437185 to execution unit CompletedOperations 2025-04-10T11:15:04.354696Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437185 on unit CompletedOperations 2025-04-10T11:15:04.354732Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437185 is Executed 2025-04-10T11:15:04.354754Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437185 executing on unit CompletedOperations 2025-04-10T11:15:04.354788Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000006:36] at 9437185 has finished 2025-04-10T11:15:04.354812Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:04.354853Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-10T11:15:04.354878Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-10T11:15:04.354900Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-10T11:15:04.377508Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-10T11:15:04.377582Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-10T11:15:04.377613Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000006:36] at 9437185 on unit CompleteOperation 2025-04-10T11:15:04.377701Z node 1 :TX_DATASHARD DEBUG: Complete [1000006 : 36] from 9437185 at tablet 9437185 send result to client [1:99:2134], exec latency: 4 ms, propose latency: 6 ms 2025-04-10T11:15:04.377761Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-10T11:15:04.378393Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:04.378435Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:04.378461Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000006:36] at 9437186 on unit CompleteOperation 2025-04-10T11:15:04.378503Z node 1 :TX_DATASHARD DEBUG: Complete [1000006 : 36] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 4 ms, propose latency: 6 ms 2025-04-10T11:15:04.378534Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:04.378649Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:04.378692Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:04.378723Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000006:36] at 9437184 on unit CompleteOperation 2025-04-10T11:15:04.378756Z node 1 :TX_DATASHARD DEBUG: Complete [1000006 : 36] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 4 ms, propose latency: 6 ms 2025-04-10T11:15:04.378829Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateForwardExcluding [GOOD] >> TBlobStorageHullFresh::SimpleBackWardEnd2Times [GOOD] >> TYardTest::TestChunkWriteReadWholeWithHddSectorMap [GOOD] >> TBlobStorageHullFresh::Perf >> TYardTest::TestHttpInfo >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] >> TYardTest::TestHttpInfo [GOOD] >> TYardTest::TestHttpInfoFileDoesntExist >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] >> TYardTest::TestChunkForget [GOOD] >> TYardTest::TestChunkFlushReboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] Test command err: 2025-04-10T11:14:53.787753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:14:53.787963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:53.788170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ba0/r3tmp/tmp4POGlv/pdisk_1.dat 2025-04-10T11:14:54.213509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:14:54.291969Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:54.344074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:54.344205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:54.361459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:54.465820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:54.541284Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:54.546466Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:54.546985Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:14:54.547229Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:54.626011Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:54.626790Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:54.626900Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:54.628666Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:14:54.628770Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:14:54.628828Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:14:54.629231Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:54.629387Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:54.629458Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:14:54.640166Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:54.686495Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:14:54.686662Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:54.686779Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:14:54.686815Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:14:54.686844Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:14:54.686878Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:54.687103Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:54.687163Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:54.687483Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:14:54.687571Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:14:54.687667Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:14:54.687702Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:54.687740Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:14:54.687772Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:14:54.687801Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:14:54.687830Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:14:54.687866Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:14:54.687980Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:54.688009Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:54.688039Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:14:54.688419Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:14:54.688467Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:54.688592Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:14:54.688811Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:14:54.688864Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:14:54.688949Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:14:54.688994Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:14:54.689025Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:14:54.689059Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:14:54.689091Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:54.689399Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:54.689435Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:14:54.689469Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:14:54.689501Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:54.689560Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:14:54.689587Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:14:54.689615Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:14:54.689662Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:14:54.689701Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:54.691163Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:14:54.691221Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:14:54.701852Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:14:54.701933Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:54.701968Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:54.702012Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:14:54.702077Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:54.864299Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:54.864348Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:54.864381Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:14:54.864750Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:14:54.864783Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:54.864883Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:14:54.864921Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:14:54.864969Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:14:54.865008Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:14:54.876290Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:14:54.876362Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:54.877450Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:54.877506Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:54.877559Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:14:5 ... because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state); 2025-04-10T11:15:04.148660Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1031:2738] TxId: 281474976715671. Ctx: { TraceId: 01jrfpj5wn8xmn1w3qwfmez084, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGMwNWE0Y2QtMmViZjJiYzYtYzQxNDZhNzgtMjg4MzU3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-04-10T11:15:04.148692Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1031:2738] TxId: 281474976715671. Ctx: { TraceId: 01jrfpj5wn8xmn1w3qwfmez084, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGMwNWE0Y2QtMmViZjJiYzYtYzQxNDZhNzgtMjg4MzU3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2025-04-10T11:15:04.148742Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1031:2738] TxId: 281474976715671. Ctx: { TraceId: 01jrfpj5wn8xmn1w3qwfmez084, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGMwNWE0Y2QtMmViZjJiYzYtYzQxNDZhNzgtMjg4MzU3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:15:04.150377Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jrfpj5wfbt6ra052msgtvkvc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjllNzM4YmYtOGJhOWExYTMtN2YwMWMyN2YtNzI0MDQ2MjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:04.150419Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715677. Ctx: { TraceId: 01jrfpj5wfbt6ra052msgtvkvc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjllNzM4YmYtOGJhOWExYTMtN2YwMWMyN2YtNzI0MDQ2MjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-10T11:15:04.150454Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1144:2723] TxId: 281474976715677. Ctx: { TraceId: 01jrfpj5wfbt6ra052msgtvkvc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjllNzM4YmYtOGJhOWExYTMtN2YwMWMyN2YtNzI0MDQ2MjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T11:15:04.150510Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1144:2723] TxId: 281474976715677. Ctx: { TraceId: 01jrfpj5wfbt6ra052msgtvkvc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjllNzM4YmYtOGJhOWExYTMtN2YwMWMyN2YtNzI0MDQ2MjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:15:04.150545Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1144:2723] TxId: 281474976715677. Ctx: { TraceId: 01jrfpj5wfbt6ra052msgtvkvc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjllNzM4YmYtOGJhOWExYTMtN2YwMWMyN2YtNzI0MDQ2MjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-10T11:15:04.150731Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGMwNWE0Y2QtMmViZjJiYzYtYzQxNDZhNzgtMjg4MzU3NTk=, ActorId: [2:903:2738], ActorState: ExecuteState, TraceId: 01jrfpj5wn8xmn1w3qwfmez084, Create QueryResponse for error on request, msg: 2025-04-10T11:15:04.151473Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715678. Resolved key sets: 0 2025-04-10T11:15:04.152023Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jrfpj5wj6hra1az87s9fj99m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmNlMDlmMjgtYWVmODZkY2QtMzA1ZGYxNmEtNzA1YWQxMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:04.152070Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715678. Ctx: { TraceId: 01jrfpj5wj6hra1az87s9fj99m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmNlMDlmMjgtYWVmODZkY2QtMzA1ZGYxNmEtNzA1YWQxMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-10T11:15:04.152105Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1151:2725] TxId: 281474976715678. Ctx: { TraceId: 01jrfpj5wj6hra1az87s9fj99m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmNlMDlmMjgtYWVmODZkY2QtMzA1ZGYxNmEtNzA1YWQxMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T11:15:04.152156Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1151:2725] TxId: 281474976715678. Ctx: { TraceId: 01jrfpj5wj6hra1az87s9fj99m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmNlMDlmMjgtYWVmODZkY2QtMzA1ZGYxNmEtNzA1YWQxMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:15:04.152207Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1151:2725] TxId: 281474976715678. Ctx: { TraceId: 01jrfpj5wj6hra1az87s9fj99m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmNlMDlmMjgtYWVmODZkY2QtMzA1ZGYxNmEtNzA1YWQxMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-10T11:15:04.152865Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715679. Resolved key sets: 0 2025-04-10T11:15:04.153366Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jrfpj5wk1gdtkmcw6p8aq5wh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTBhODBiZjMtYjdiZTJkZmMtN2I2MzVjODUtNjJiYzRjZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:04.153403Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715679. Ctx: { TraceId: 01jrfpj5wk1gdtkmcw6p8aq5wh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTBhODBiZjMtYjdiZTJkZmMtN2I2MzVjODUtNjJiYzRjZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-10T11:15:04.153445Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1156:2729] TxId: 281474976715679. Ctx: { TraceId: 01jrfpj5wk1gdtkmcw6p8aq5wh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTBhODBiZjMtYjdiZTJkZmMtN2I2MzVjODUtNjJiYzRjZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T11:15:04.153493Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1156:2729] TxId: 281474976715679. Ctx: { TraceId: 01jrfpj5wk1gdtkmcw6p8aq5wh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTBhODBiZjMtYjdiZTJkZmMtN2I2MzVjODUtNjJiYzRjZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:15:04.153528Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1156:2729] TxId: 281474976715679. Ctx: { TraceId: 01jrfpj5wk1gdtkmcw6p8aq5wh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTBhODBiZjMtYjdiZTJkZmMtN2I2MzVjODUtNjJiYzRjZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-10T11:15:04.153560Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715680. Resolved key sets: 0 2025-04-10T11:15:04.153801Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jrfpj5wkayqqjn74h74yn8nj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGNmYzVlZTQtZjNmYWNhNTItNmE5ZjdhNWItYWRlY2ZjNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:04.153836Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715680. Ctx: { TraceId: 01jrfpj5wkayqqjn74h74yn8nj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGNmYzVlZTQtZjNmYWNhNTItNmE5ZjdhNWItYWRlY2ZjNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-10T11:15:04.153869Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1157:2731] TxId: 281474976715680. Ctx: { TraceId: 01jrfpj5wkayqqjn74h74yn8nj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGNmYzVlZTQtZjNmYWNhNTItNmE5ZjdhNWItYWRlY2ZjNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T11:15:04.153918Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1157:2731] TxId: 281474976715680. Ctx: { TraceId: 01jrfpj5wkayqqjn74h74yn8nj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGNmYzVlZTQtZjNmYWNhNTItNmE5ZjdhNWItYWRlY2ZjNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:15:04.153961Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1157:2731] TxId: 281474976715680. Ctx: { TraceId: 01jrfpj5wkayqqjn74h74yn8nj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGNmYzVlZTQtZjNmYWNhNTItNmE5ZjdhNWItYWRlY2ZjNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-10T11:15:04.154502Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715681. Resolved key sets: 0 2025-04-10T11:15:04.154607Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jrfpj5wn8xmn1w3qwfmez084, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGMwNWE0Y2QtMmViZjJiYzYtYzQxNDZhNzgtMjg4MzU3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:04.154638Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715681. Ctx: { TraceId: 01jrfpj5wn8xmn1w3qwfmez084, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGMwNWE0Y2QtMmViZjJiYzYtYzQxNDZhNzgtMjg4MzU3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-10T11:15:04.154670Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1166:2738] TxId: 281474976715681. Ctx: { TraceId: 01jrfpj5wn8xmn1w3qwfmez084, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGMwNWE0Y2QtMmViZjJiYzYtYzQxNDZhNzgtMjg4MzU3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T11:15:04.154718Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1166:2738] TxId: 281474976715681. Ctx: { TraceId: 01jrfpj5wn8xmn1w3qwfmez084, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGMwNWE0Y2QtMmViZjJiYzYtYzQxNDZhNzgtMjg4MzU3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:15:04.154760Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1166:2738] TxId: 281474976715681. Ctx: { TraceId: 01jrfpj5wn8xmn1w3qwfmez084, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGMwNWE0Y2QtMmViZjJiYzYtYzQxNDZhNzgtMjg4MzU3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 >> TSTreeTest::Basic [GOOD] >> TSVecTest::Basic [GOOD] >> TYardTest::TestHttpInfoFileDoesntExist [GOOD] >> TYardTest::TestFirstRecordToKeep >> TFreshAppendixTest::IterateBackwardAll [GOOD] >> TFreshAppendixTest::IterateBackwardExcluding [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkWrite [GOOD] >> TPDiskTest::TestLogSpliceChunkReserve >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::DelayData [GOOD] Test command err: 2025-04-10T11:14:51.207782Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:51.240970Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:51.246332Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-04-10T11:14:51.250362Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:51.294993Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:51.429257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:14:51.429313Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:51.437626Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:51.438075Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:51.439430Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:14:51.439486Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:14:51.439528Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:14:51.440195Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:51.440442Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:51.440510Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:192:2148] in generation 2 2025-04-10T11:14:51.540162Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:51.587410Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:14:51.587652Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:51.587803Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:217:2215] 2025-04-10T11:14:51.587862Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:14:51.587920Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:14:51.587982Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.588319Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.588415Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.588740Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:14:51.588890Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:14:51.588971Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.589023Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:51.589083Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:14:51.589162Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:51.589229Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:51.589285Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:14:51.589361Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:51.589490Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.589526Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.589591Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2025-04-10T11:14:51.594236Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\002\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:14:51.594305Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:51.594401Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:14:51.594717Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:14:51.594779Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:14:51.594834Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:14:51.594904Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.594973Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:14:51.595025Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:14:51.595104Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.595406Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:51.595451Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:14:51.595491Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:14:51.595532Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.595579Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:14:51.595611Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:14:51.595649Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:14:51.595687Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.595711Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:51.618900Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:14:51.618966Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.619005Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.619051Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:51.619106Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:51.619528Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:223:2221], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.619576Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.619618Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:222:2220], serverId# [1:223:2221], sessionId# [0:0:0] 2025-04-10T11:14:51.619739Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:51.619760Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:51.619855Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.619888Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.619936Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:51.619980Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:51.624108Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:51.624183Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.624428Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.624468Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.624529Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.624585Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:51.624626Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:51.624664Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:14:51.624702Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:14:51.624744Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.624799Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:51.624843Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:51.624895Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:51.625055Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:14:51.625087Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.625115Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:51.625137Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:14:51.625164Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:14:51.625218Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.625247Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:14:51.625311Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:51.625366Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:51.625428Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:14:51.625468Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:14:51.625503Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:14:51.625563Z node 1 :TX_D ... e 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit PrepareDataTxInRS 2025-04-10T11:15:04.850070Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit LoadAndWaitInRS 2025-04-10T11:15:04.850097Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit LoadAndWaitInRS 2025-04-10T11:15:04.850119Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-04-10T11:15:04.850325Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit LoadAndWaitInRS 2025-04-10T11:15:04.850355Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit ExecuteDataTx 2025-04-10T11:15:04.850379Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit ExecuteDataTx 2025-04-10T11:15:04.850857Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:506] at tablet 9437184 with status COMPLETE 2025-04-10T11:15:04.850941Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:506] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 81, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T11:15:04.850997Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-04-10T11:15:04.851026Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit ExecuteDataTx 2025-04-10T11:15:04.851052Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompleteOperation 2025-04-10T11:15:04.851081Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompleteOperation 2025-04-10T11:15:04.851330Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is DelayComplete 2025-04-10T11:15:04.851377Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompleteOperation 2025-04-10T11:15:04.851431Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompletedOperations 2025-04-10T11:15:04.851486Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompletedOperations 2025-04-10T11:15:04.851524Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-04-10T11:15:04.851548Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompletedOperations 2025-04-10T11:15:04.851585Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:506] at 9437184 has finished 2025-04-10T11:15:04.851637Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:15:04.851679Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:15:04.851728Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000005:507] in PlanQueue unit at 9437184 2025-04-10T11:15:04.852235Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:237:2230], Recipient [1:237:2230]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:04.852281Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:04.852352Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:15:04.852393Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:15:04.852440Z node 1 :TX_DATASHARD DEBUG: Return cached ready operation [1000005:507] at 9437184 2025-04-10T11:15:04.852480Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PlanQueue 2025-04-10T11:15:04.852512Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-10T11:15:04.852539Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PlanQueue 2025-04-10T11:15:04.852583Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:15:04.852613Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadTxDetails 2025-04-10T11:15:04.854345Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:507 keys extracted: 1 2025-04-10T11:15:04.854406Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-10T11:15:04.854440Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:15:04.854470Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit FinalizeDataTxPlan 2025-04-10T11:15:04.854502Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit FinalizeDataTxPlan 2025-04-10T11:15:04.854540Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-10T11:15:04.854568Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit FinalizeDataTxPlan 2025-04-10T11:15:04.854591Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:04.854609Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:15:04.854669Z node 1 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically complete end at 9437184 2025-04-10T11:15:04.854701Z node 1 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically incomplete end at 9437184 2025-04-10T11:15:04.854746Z node 1 :TX_DATASHARD TRACE: Activated operation [1000005:507] at 9437184 2025-04-10T11:15:04.854786Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-10T11:15:04.854818Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-10T11:15:04.854845Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildDataTxOutRS 2025-04-10T11:15:04.854868Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildDataTxOutRS 2025-04-10T11:15:04.854936Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-10T11:15:04.854958Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildDataTxOutRS 2025-04-10T11:15:04.854978Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit StoreAndSendOutRS 2025-04-10T11:15:04.855001Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit StoreAndSendOutRS 2025-04-10T11:15:04.855027Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-10T11:15:04.855073Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit StoreAndSendOutRS 2025-04-10T11:15:04.855114Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit PrepareDataTxInRS 2025-04-10T11:15:04.855140Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PrepareDataTxInRS 2025-04-10T11:15:04.855171Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-10T11:15:04.855195Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PrepareDataTxInRS 2025-04-10T11:15:04.855217Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadAndWaitInRS 2025-04-10T11:15:04.855235Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadAndWaitInRS 2025-04-10T11:15:04.855255Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-10T11:15:04.855286Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadAndWaitInRS 2025-04-10T11:15:04.855312Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit ExecuteDataTx 2025-04-10T11:15:04.855338Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit ExecuteDataTx 2025-04-10T11:15:04.855778Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:507] at tablet 9437184 with status COMPLETE 2025-04-10T11:15:04.855831Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:507] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 11, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T11:15:04.855885Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:15:04.855935Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit ExecuteDataTx 2025-04-10T11:15:04.855985Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompleteOperation 2025-04-10T11:15:04.856032Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompleteOperation 2025-04-10T11:15:04.856218Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is DelayComplete 2025-04-10T11:15:04.856249Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompleteOperation 2025-04-10T11:15:04.856278Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompletedOperations 2025-04-10T11:15:04.856312Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompletedOperations 2025-04-10T11:15:04.856344Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-10T11:15:04.856367Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompletedOperations 2025-04-10T11:15:04.856392Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:507] at 9437184 has finished 2025-04-10T11:15:04.856437Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:04.856469Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:15:04.856510Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:15:04.856566Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:15:04.879744Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 506 txid# 507} 2025-04-10T11:15:04.879840Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2025-04-10T11:15:04.879912Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:04.879963Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2025-04-10T11:15:04.880040Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:04.880132Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:04.880338Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:04.880374Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2025-04-10T11:15:04.880420Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:04.880454Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: 2025-04-10T11:14:53.893478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:14:53.893625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:53.893765Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b75/r3tmp/tmpS4YzdE/pdisk_1.dat 2025-04-10T11:14:54.273511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:14:54.311326Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:54.349907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:54.350005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:54.361473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:54.472574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:54.921281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:14:55.207377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:926:2736], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:55.207519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:936:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:55.207608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:55.219336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:14:55.395929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:940:2744], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:14:55.460319Z node 1 :TX_PROXY ERROR: Actor# [1:1001:2786] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:56.460336Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfphy31e1s7rhypr9w7m9kp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc3M2ZhMGUtMzU5OTY0MzEtNmViYTNjMTQtZDE5OWRhYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:14:56.613999Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfphzcvfafsjhs8z2szc8py, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTY3ZTYxNTItZjA5MjMyMDctZTQ2NmNlZDEtOGM2YThkYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:14:57.258066Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfphzge8173qxft68614yxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA4MjNjMDktYzA0NjFkMTItMTU4OWU5MDMtZjhjMWI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets 2025-04-10T11:14:57.417475Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfpj04f2tgmzykkqk4708sp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA4MjNjMDktYzA0NjFkMTItMTU4OWU5MDMtZjhjMWI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... sending immediate upsert ... waiting for immediate propose 2025-04-10T11:14:57.526610Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfpj08y60pg5qa6mk4es48v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTUyZjA2MzgtNDllYWM1NDAtOGYxY2YyMjYtOGFiYTViODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... immediate upsert is blocked 2025-04-10T11:14:57.567477Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting;tx_id=281474976715666; 2025-04-10T11:14:57.589106Z node 1 :KQP_COMPUTE WARN: SelfId: [1:1198:2840], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [1:1079:2840]Got OVERLOADED for table `/Root/table-1`. ShardID=72075186224037889, Sink=[1:1198:2840]. Ignored this error.{
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } 2025-04-10T11:14:57.589825Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:1191:2840], SessionActorId: [1:1079:2840], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 . sessionActorId=[1:1079:2840]. isRollback=0 2025-04-10T11:14:57.597774Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTUyZjA2MzgtNDllYWM1NDAtOGYxY2YyMjYtOGFiYTViODE=, ActorId: [1:1079:2840], ActorState: ExecuteState, TraceId: 01jrfpj08y60pg5qa6mk4es48v, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:1192:2840] from: [1:1191:2840] 2025-04-10T11:14:57.601037Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1192:2840] TxId: 281474976715665. Ctx: { TraceId: 01jrfpj08y60pg5qa6mk4es48v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTUyZjA2MzgtNDllYWM1NDAtOGYxY2YyMjYtOGFiYTViODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006 subissue: {
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } } 2025-04-10T11:14:57.601847Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTUyZjA2MzgtNDllYWM1NDAtOGYxY2YyMjYtOGFiYTViODE=, ActorId: [1:1079:2840], ActorState: ExecuteState, TraceId: 01jrfpj08y60pg5qa6mk4es48v, Create QueryResponse for error on request, msg: 2025-04-10T11:14:57.602473Z node 1 :KQP_COMPUTE WARN: SelfId: [1:1164:2842], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [1:1081:2842]TEvDeliveryProblem was received from tablet: 72075186224037889 2025-04-10T11:14:57.602611Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:1154:2842], SessionActorId: [1:1081:2842], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 . sessionActorId=[1:1081:2842]. isRollback=0 2025-04-10T11:14:57.603348Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzA4MjNjMDktYzA0NjFkMTItMTU4OWU5MDMtZjhjMWI4ZjE=, ActorId: [1:1081:2842], ActorState: ExecuteState, TraceId: 01jrfpj04f2tgmzykkqk4708sp, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [1:1155:2842] from: [1:1154:2842] 2025-04-10T11:14:57.604648Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1155:2842] TxId: 281474976715664. Ctx: { TraceId: 01jrfpj04f2tgmzykkqk4708sp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA4MjNjMDktYzA0NjFkMTItMTU4OWU5MDMtZjhjMWI4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNDETERMINED: {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 } 2025-04-10T11:14:57.605247Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzA4MjNjMDktYzA0NjFkMTItMTU4OWU5MDMtZjhjMWI4ZjE=, ActorId: [1:1081:2842], ActorState: ExecuteState, TraceId: 01jrfpj04f2tgmzykkqk4708sp, Create QueryResponse for error on request, msg: 2025-04-10T11:14:58.044699Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfpj0m22amhzcqec0x1w6k2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGUzZjNlZWQtOGQ3NWQyYzEtZTE5ZjEwZGMtNDMwNzRhOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-04-10T11:15:02.390175Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:02.390417Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:15:02.390662Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b75/r3tmp/tmpZk0Xby/pdisk_1.dat 2025-04-10T11:15:02.704872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:02.734274Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:02.771145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:02.771318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:02.783521Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:02.866529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:03.154505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:15:03.438059Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:832:2684], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:03.438167Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:841:2689], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:03.438232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:03.443107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:15:03.613218Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:846:2692], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:15:03.652842Z node 2 :TX_PROXY ERROR: Actor# [2:906:2733] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:03.748286Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfpj64ce80t6seq8fcsk1x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGU2NGFiNDgtYzEyYmRhMmQtOWIyYTU5YTUtNzkwYzJmZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:03.837406Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfpj6esdta25ctmkj7g3cb1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDVhMDY4MmQtNzM4OTA2OWYtZjU1NTYxNDgtMjhmNmJjOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for readsets 2025-04-10T11:15:04.504498Z node 2 :KQP_COMPUTE WARN: SelfId: [2:993:2772], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [2:968:2772]TEvDeliveryProblem was received from tablet: 72075186224037888 2025-04-10T11:15:04.504671Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:983:2772], SessionActorId: [2:968:2772], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 . sessionActorId=[2:968:2772]. isRollback=0 2025-04-10T11:15:04.505056Z node 2 :KQP_COMPUTE WARN: TxId: 281474976715664, task: 1, CA Id [2:1020:2812]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-04-10T11:15:04.505387Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzliMTAyYTMtNzVjZmQwZTMtODRhYjExNDMtZTM4YzMwOGQ=, ActorId: [2:968:2772], ActorState: ExecuteState, TraceId: 01jrfpj6hkd3fgwd9by1a9kwfr, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [2:984:2772] from: [2:983:2772] 2025-04-10T11:15:04.505911Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:984:2772] TxId: 281474976715663. Ctx: { TraceId: 01jrfpj6hkd3fgwd9by1a9kwfr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzliMTAyYTMtNzVjZmQwZTMtODRhYjExNDMtZTM4YzMwOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNDETERMINED: {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 } 2025-04-10T11:15:04.506940Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzliMTAyYTMtNzVjZmQwZTMtODRhYjExNDMtZTM4YzMwOGQ=, ActorId: [2:968:2772], ActorState: ExecuteState, TraceId: 01jrfpj6hkd3fgwd9by1a9kwfr, Create QueryResponse for error on request, msg: { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] >> TBlobStorageHullFresh::SimpleBackwardEnd [GOOD] >> TBlobStorageHullFresh::SimpleBackWardMiddle2Times [GOOD] >> TYardTest::TestChunkFlushReboot [GOOD] >> TYardTest::TestChunkDeletionWhileWriting |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TSVecTest::Basic [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink >> TYardTest::TestFirstRecordToKeep [GOOD] >> TYardTest::TestHugeChunkAndLotsOfTinyAsyncLogOrder |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateBackwardExcluding [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] Test command err: 2025-04-10T11:14:54.136858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:14:54.137068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:54.137266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b63/r3tmp/tmpKL5Tbv/pdisk_1.dat 2025-04-10T11:14:54.552018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:14:54.598130Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:54.639086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:54.639222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:54.650781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:54.739325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:55.098907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:14:55.459655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:55.459785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:840:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:55.459884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:14:55.475267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:14:55.664635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:14:55.760668Z node 1 :TX_PROXY ERROR: Actor# [1:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:14:56.460436Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfphyawcdwtsbehhkcvsh4t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA1YTVkYmItYjM3MDQ1NGItOGIzMWMwNDgtMjBkYjBmOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:14:56.601451Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfphzct9rhjbn6w27nk1vva, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTAxM2RmM2EtZjJhOTg0Ni1hZWU3NjA2My1mNTcyODdlNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2025-04-10T11:14:57.607851Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfpj093fz7symhqd9w7ebh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGRlOTM0MDMtNjQ1ZjllMmQtMTM3ZTQwZDQtYjhlYmE3Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:14:57.708705Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfpj0ef49m9mfvjhh7cf0tf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTI1YmYxZjQtZjFlY2U1ZWUtZmQ3MWQwZmEtYzcwYzBiODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... shards are ready for read-only immediate transactions ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2025-04-10T11:15:02.148889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:02.149139Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:15:02.149411Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b63/r3tmp/tmpKIQkbo/pdisk_1.dat 2025-04-10T11:15:02.465861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:02.499662Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:02.537315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:02.537469Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:02.549129Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:02.637037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:02.913031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:15:03.185010Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:03.185109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:840:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:03.185178Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:03.191806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:15:03.355353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:15:03.392174Z node 2 :TX_PROXY ERROR: Actor# [2:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:03.457879Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfpj5wf6y8sgm7w8ny1s13r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTM2MjU5ZGEtMWNmMDAyYzAtYTZjZjFmYWYtMjQ4ZmY0NDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:03.547225Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfpj65se999zbv78vyewde0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjZjNzI2OWUtMWRkNWJmNDktMTU3N2ExOTgtOThhMGIzMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:04.252599Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfpj6em95y829e03j58vxq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzgxZTllMi0xZjA3MDM2MS1jYjFmN2U0YS03MjliMGYxMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-10T11:15:04.613889Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfpj77612qn0tenmzbfqasm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDA1OTdjYWItNDA2ZGYzNWMtMmM3ZTMwYWMtZTlhYmRiNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:04.729891Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfpj79q8qe1499330wcf6gb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzgxZTllMi0xZjA3MDM2MS1jYjFmN2U0YS03MjliMGYxMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:04.829350Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfpj7cy4rfrczrwhs0r1z3j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzgxZTllMi0xZjA3MDM2MS1jYjFmN2U0YS03MjliMGYxMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:04.895652Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzgxZTllMi0xZjA3MDM2MS1jYjFmN2U0YS03MjliMGYxMg==, ActorId: [2:975:2779], ActorState: ExecuteState, TraceId: 01jrfpj7g14q7hyh9mswmbj8q5, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> TBlobStorageHullFresh::SolomonStandCrash [GOOD] >> TBlobStorageHullFreshSegment::IteratorTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] Test command err: 2025-04-10T11:14:57.533847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:14:57.543164Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:57.543260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:14:57.545294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:14:57.545645Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:57.545977Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b6f/r3tmp/tmp8Zgalp/pdisk_1.dat 2025-04-10T11:14:58.037976Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:58.270215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:14:58.400718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:58.400858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:58.404655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:58.404756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:58.422247Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:14:58.422818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:58.423263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:58.734073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:58.861566Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:1257:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:58.872486Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:1257:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:58.873251Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1282:2390] 2025-04-10T11:14:58.873589Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:58.948009Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:1257:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:58.970405Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:58.971418Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:58.973665Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:14:58.973754Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:14:58.973830Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:14:58.974303Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:58.974435Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:58.974528Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1305:2390] in generation 1 2025-04-10T11:14:58.983980Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:59.030226Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:14:59.030465Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:59.030625Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1310:2407] 2025-04-10T11:14:59.030674Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:14:59.030737Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:14:59.030780Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:59.031066Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1282:2390], Recipient [2:1282:2390]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:59.031137Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:59.031424Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:14:59.031550Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:14:59.031653Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:14:59.031698Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:59.031743Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:14:59.031782Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:14:59.031819Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:14:59.031870Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:14:59.031934Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:14:59.115457Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1314:2408], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:59.115533Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:59.115600Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1265:2774], serverId# [2:1314:2408], sessionId# [0:0:0] 2025-04-10T11:14:59.115945Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:840:2467], Recipient [2:1314:2408] 2025-04-10T11:14:59.115996Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:59.116146Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:14:59.116385Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:14:59.116463Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:14:59.116585Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:14:59.116641Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:14:59.116696Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:14:59.116739Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:14:59.116790Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:59.117107Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:59.117161Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:14:59.117219Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:14:59.117268Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:59.117347Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:14:59.117392Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:14:59.117432Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:14:59.117468Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:14:59.117567Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:59.129135Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:14:59.129205Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:59.129243Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:59.129306Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:59.129409Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:59.130839Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [2:1316:2410], Recipient [2:1282:2390]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:14:59.130898Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:14:59.571579Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1348:2418], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:59.571646Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:59.571711Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1345:2797], serverId# [2:1348:2418], sessionId# [0:0:0] 2025-04-10T11:14:59.579385Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:1057:2618], Recipient [2:1348:2418] 2025-04-10T11:14:59.579487Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:59.579668Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:14:59.579717Z node 2 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] ... RD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:04.100522Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037889 consumer 72075186224037889 txId 281474976715669 2025-04-10T11:15:04.148887Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:2335:2585], Recipient [2:2206:2544]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:04.148953Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:04.149002Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:2200:3328], serverId# [2:2335:2585], sessionId# [0:0:0] 2025-04-10T11:15:04.149865Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:2336:2586], Recipient [2:2206:2544]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:04.149914Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:04.149955Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [2:2198:2542], serverId# [2:2336:2586], sessionId# [0:0:0] 2025-04-10T11:15:04.150103Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2078:2514], Recipient [2:2206:2544]: {TEvReadSet step# 2521 txid# 281474976715667 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-10T11:15:04.150210Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:15:04.150256Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715667 2025-04-10T11:15:04.150344Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2521 txid# 281474976715667 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-10T11:15:04.150941Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:15:04.151302Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2078:2514], Recipient [2:2206:2544]: {TEvReadSet step# 2671 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-10T11:15:04.151353Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:15:04.151385Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715669 2025-04-10T11:15:04.151444Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2671 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-10T11:15:04.151647Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2078:2514], Recipient [2:2206:2544]: {TEvReadSet step# 2671 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-04-10T11:15:04.151685Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:15:04.151715Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715669 2025-04-10T11:15:04.151767Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2671 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-04-10T11:15:04.152284Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:2138:3281], Recipient [2:2335:2585] 2025-04-10T11:15:04.152329Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:15:04.152372Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715668 2025-04-10T11:15:04.152431Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2522 txid# 281474976715668 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-10T11:15:04.153262Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:15:04.153690Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:2138:3281], Recipient [2:2335:2585] 2025-04-10T11:15:04.153742Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:15:04.153782Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715669 2025-04-10T11:15:04.153833Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2671 txid# 281474976715669 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-10T11:15:04.153982Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:2138:3281], Recipient [2:2335:2585] 2025-04-10T11:15:04.154009Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:15:04.154042Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715669 2025-04-10T11:15:04.154094Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2671 txid# 281474976715669 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-04-10T11:15:04.154326Z node 2 :TX_DATASHARD DEBUG: Complete volatile write [2671 : 281474976715669] from 72075186224037890 at tablet 72075186224037890 send result to client [1:2291:3346] 2025-04-10T11:15:04.154912Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:15:04.169548Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-10T11:15:04.170005Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2206:2544], Recipient [2:2078:2514]: {TEvReadSet step# 2521 txid# 281474976715667 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 1} 2025-04-10T11:15:04.170061Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:04.170101Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715667 2025-04-10T11:15:04.171030Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-10T11:15:04.171108Z node 2 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037890 {TEvReadSet step# 2671 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-10T11:15:04.171166Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-10T11:15:04.171301Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2206:2544], Recipient [2:2078:2514]: {TEvReadSet step# 2671 txid# 281474976715669 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-10T11:15:04.171340Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:15:04.171382Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037890 dest 72075186224037888 producer 72075186224037890 txId 281474976715669 2025-04-10T11:15:04.171448Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 2671 txid# 281474976715669 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-10T11:15:04.171496Z node 2 :TX_DATASHARD NOTICE: Outdated readset for 2671:281474976715669 at 72075186224037888 2025-04-10T11:15:04.171654Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-10T11:15:04.174372Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-10T11:15:04.174531Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-10T11:15:04.174582Z node 2 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037890 {TEvReadSet step# 2671 txid# 281474976715669 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-10T11:15:04.174785Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2206:2544], Recipient [1:2138:3281] 2025-04-10T11:15:04.174822Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:04.174862Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715668 2025-04-10T11:15:04.174938Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-10T11:15:04.175058Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2206:2544], Recipient [2:2078:2514]: {TEvReadSet step# 2671 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 3} 2025-04-10T11:15:04.175089Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:04.175114Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715669 2025-04-10T11:15:04.175328Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2206:2544], Recipient [1:2138:3281] 2025-04-10T11:15:04.175361Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:15:04.175400Z node 1 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037890 dest 72075186224037889 producer 72075186224037890 txId 281474976715669 2025-04-10T11:15:04.175463Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2671 txid# 281474976715669 TabletSource# 72075186224037890 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037890 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-10T11:15:04.175498Z node 1 :TX_DATASHARD NOTICE: Outdated readset for 2671:281474976715669 at 72075186224037889 2025-04-10T11:15:04.175544Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-10T11:15:04.175733Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2206:2544], Recipient [1:2138:3281] 2025-04-10T11:15:04.175764Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:04.175797Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715669 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] Test command err: 2025-04-10T11:14:53.787754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:14:53.787959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:53.788144Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b58/r3tmp/tmpSPuyn9/pdisk_1.dat 2025-04-10T11:14:54.211773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:14:54.295524Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:54.345929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:54.346036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:54.362417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:54.465960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:54.543460Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:54.544515Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:54.544936Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:14:54.545192Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:54.629701Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:54.633947Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:54.634108Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:54.635900Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:14:54.635997Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:14:54.636060Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:14:54.636490Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:54.636666Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:54.636746Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:14:54.647341Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:54.669946Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:14:54.670131Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:54.670264Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:14:54.670300Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:14:54.670334Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:14:54.670367Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:54.670599Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:54.670700Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:54.671026Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:14:54.671116Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:14:54.671172Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:14:54.671234Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:54.671279Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:14:54.671316Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:14:54.671349Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:14:54.671401Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:14:54.671455Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:14:54.671583Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:54.671615Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:54.671660Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:14:54.672248Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:14:54.672296Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:54.672404Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:14:54.672628Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:14:54.672686Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:14:54.672780Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:14:54.672836Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:14:54.672887Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:14:54.672920Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:14:54.672972Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:54.673337Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:54.673385Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:14:54.673418Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:14:54.673451Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:54.673529Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:14:54.673563Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:14:54.673597Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:14:54.673645Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:14:54.673671Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:54.675267Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:14:54.675336Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:14:54.686045Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:14:54.686110Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:54.686163Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:54.686221Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:14:54.686297Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:54.847561Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:54.847612Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:54.847648Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:14:54.848092Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:14:54.848132Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:54.848271Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:14:54.848322Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:14:54.848362Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:14:54.848415Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:14:54.863791Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:14:54.863877Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:54.864936Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:54.864987Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:54.865060Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:14:5 ... actorId: [2:1186:2938] 2025-04-10T11:15:04.411836Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2025-04-10T11:15:04.411904Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2025-04-10T11:15:04.411942Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-10T11:15:04.412150Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1026:2823], Recipient [2:1026:2823]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:04.412190Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:04.412353Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1026:2823], Recipient [2:755:2634]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037892 OperationCookie: 281474976715665 2025-04-10T11:15:04.412399Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715665 2025-04-10T11:15:04.412797Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1181:2933], Recipient [2:755:2634]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037892 ClientId: [2:1181:2933] ServerId: [2:1183:2935] } 2025-04-10T11:15:04.412851Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-10T11:15:04.413280Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1026:2823]: {TEvRegisterTabletResult TabletId# 72075186224037892 Entry# 2000} 2025-04-10T11:15:04.413314Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-10T11:15:04.413377Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-04-10T11:15:04.413421Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-10T11:15:04.413538Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-10T11:15:04.413589Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:04.413645Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037892 2025-04-10T11:15:04.413692Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-04-10T11:15:04.413728Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-04-10T11:15:04.413766Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-04-10T11:15:04.413809Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-10T11:15:04.413983Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1183:2935], Recipient [2:1026:2823]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:15:04.414031Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:15:04.414090Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:1181:2933], serverId# [2:1183:2935], sessionId# [0:0:0] 2025-04-10T11:15:04.414442Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1026:2823]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-04-10T11:15:04.414486Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-10T11:15:04.414538Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 0 next step 2000 2025-04-10T11:15:04.414602Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-10T11:15:04.414674Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037892 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-10T11:15:04.425986Z node 2 :TX_DATASHARD DEBUG: 72075186224037893 ack snapshot OpId 281474976715665 2025-04-10T11:15:04.426167Z node 2 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037893 2025-04-10T11:15:04.426295Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:15:04.426387Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-04-10T11:15:04.426455Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037893, actorId: [2:1190:2942] 2025-04-10T11:15:04.426491Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037893 2025-04-10T11:15:04.426536Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037893 2025-04-10T11:15:04.426575Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-10T11:15:04.426799Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1031:2825], Recipient [2:1031:2825]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:04.426850Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:04.427056Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1031:2825], Recipient [2:755:2634]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2025-04-10T11:15:04.427112Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2025-04-10T11:15:04.427559Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1182:2934], Recipient [2:755:2634]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1182:2934] ServerId: [2:1184:2936] } 2025-04-10T11:15:04.427605Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-10T11:15:04.428017Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1031:2825]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 2000} 2025-04-10T11:15:04.428062Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-10T11:15:04.428106Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 2000 2025-04-10T11:15:04.428144Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-10T11:15:04.428296Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-10T11:15:04.428345Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:04.428387Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037893 2025-04-10T11:15:04.428426Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037893 has no attached operations 2025-04-10T11:15:04.428462Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037893 2025-04-10T11:15:04.428496Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037893 TxInFly 0 2025-04-10T11:15:04.428547Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-10T11:15:04.428632Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1184:2936], Recipient [2:1031:2825]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:15:04.428669Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:15:04.428715Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1182:2934], serverId# [2:1184:2936], sessionId# [0:0:0] 2025-04-10T11:15:04.429092Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1031:2825]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-04-10T11:15:04.429132Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-10T11:15:04.429171Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 2000 2025-04-10T11:15:04.429234Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037893: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-10T11:15:04.429297Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-10T11:15:04.442815Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715665 2025-04-10T11:15:04.447238Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:409:2404], Recipient [2:763:2638] 2025-04-10T11:15:04.447335Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-04-10T11:15:04.450126Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2025-04-10T11:15:04.450441Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:15:04.451176Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:748:2629], Recipient [2:755:2634]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-10T11:15:05.017566Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:977:2681], Recipient [2:666:2570]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 977 RawX2: 8589937273 } TxBody: " \0008\000`\200\200\200\005j\324\006\010\001\022\225\006\010\001\022\024\n\022\t\321\003\000\000\000\000\000\000\021y\n\000\000\002\000\000\000\032\256\002\010\240\215\006\022\207\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? \004\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\ 2025-04-10T11:15:05.017672Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:15:05.017819Z node 2 :TX_DATASHARD NOTICE: Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-04-10T11:15:05.018401Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2025-04-10T11:15:05.018917Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 >> TBlobStorageHullFresh::Perf [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] Test command err: 2025-04-10T11:14:55.969303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:14:55.969523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:55.969715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b4e/r3tmp/tmpC7eCye/pdisk_1.dat 2025-04-10T11:14:56.367440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:14:56.414045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:56.459083Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:14:56.460260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:56.460365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:56.460499Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:14:56.474122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:56.559535Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T11:14:56.559614Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T11:14:56.559772Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T11:14:56.703613Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T11:14:56.703704Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:14:56.704283Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:14:56.704401Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:14:56.704789Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:14:56.704957Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:14:56.705047Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T11:14:56.706860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:56.707270Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T11:14:56.707881Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T11:14:56.707960Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T11:14:56.775218Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:56.776392Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:56.777210Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:56.777711Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:672:2573] 2025-04-10T11:14:56.777983Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:56.829689Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:56.830067Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:56.830517Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:674:2575] 2025-04-10T11:14:56.830726Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:56.838964Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:56.840173Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:56.840335Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:56.841911Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:14:56.841983Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:14:56.842030Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:14:56.842550Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:56.842748Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:56.842819Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:705:2573] in generation 1 2025-04-10T11:14:56.843124Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:56.843185Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:56.844433Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T11:14:56.844482Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T11:14:56.844531Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T11:14:56.844769Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:56.844837Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:56.844885Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:706:2575] in generation 1 2025-04-10T11:14:56.856475Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:56.892920Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:14:56.893103Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:56.893242Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:709:2594] 2025-04-10T11:14:56.893288Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:14:56.893337Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:14:56.893373Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:56.893619Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:672:2573], Recipient [1:672:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:56.893724Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:56.893826Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:56.893870Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T11:14:56.893963Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:56.894025Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:710:2595] 2025-04-10T11:14:56.894051Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T11:14:56.894081Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T11:14:56.894124Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:14:56.894433Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:674:2575], Recipient [1:674:2575]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:56.894467Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:56.894643Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:14:56.894725Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:14:56.894877Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:14:56.894916Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:56.894974Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:14:56.895013Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:14:56.895062Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:14:56.895099Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:14:56.895143Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:14:56.895210Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T11:14:56.895285Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T11:14:56.895737Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:687:2582], Recipient [1:672:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:56.895777Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:56.895832Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:664:2569], serverId# [1:687:2582], sessionId# [0:0:0] 2025-04-10T11:14:56.895878Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 202 ... /3?node_id=2&id=NGUzMWQ4ZDMtOTIxNTZmNi0yMDBlZGFhZi1kOWUzMGJmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-04-10T11:15:05.179845Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715662. Ctx: { TraceId: 01jrfpj7rd0jwp0gah0dn2f157, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGUzMWQ4ZDMtOTIxNTZmNi0yMDBlZGFhZi1kOWUzMGJmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [2:976:2764] 2025-04-10T11:15:05.179899Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715662. Ctx: { TraceId: 01jrfpj7rd0jwp0gah0dn2f157, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGUzMWQ4ZDMtOTIxNTZmNi0yMDBlZGFhZi1kOWUzMGJmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [2:976:2764], channels: 0 2025-04-10T11:15:05.179987Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01jrfpj7rd0jwp0gah0dn2f157, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGUzMWQ4ZDMtOTIxNTZmNi0yMDBlZGFhZi1kOWUzMGJmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:976:2764], 2025-04-10T11:15:05.180067Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01jrfpj7rd0jwp0gah0dn2f157, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGUzMWQ4ZDMtOTIxNTZmNi0yMDBlZGFhZi1kOWUzMGJmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:976:2764], 2025-04-10T11:15:05.180126Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01jrfpj7rd0jwp0gah0dn2f157, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGUzMWQ4ZDMtOTIxNTZmNi0yMDBlZGFhZi1kOWUzMGJmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-10T11:15:05.181355Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01jrfpj7rd0jwp0gah0dn2f157, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGUzMWQ4ZDMtOTIxNTZmNi0yMDBlZGFhZi1kOWUzMGJmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:976:2764], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-10T11:15:05.181440Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01jrfpj7rd0jwp0gah0dn2f157, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGUzMWQ4ZDMtOTIxNTZmNi0yMDBlZGFhZi1kOWUzMGJmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:976:2764], 2025-04-10T11:15:05.181519Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01jrfpj7rd0jwp0gah0dn2f157, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGUzMWQ4ZDMtOTIxNTZmNi0yMDBlZGFhZi1kOWUzMGJmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:976:2764], 2025-04-10T11:15:05.182409Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01jrfpj7rd0jwp0gah0dn2f157, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGUzMWQ4ZDMtOTIxNTZmNi0yMDBlZGFhZi1kOWUzMGJmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:976:2764], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1035 Tasks { TaskId: 1 CpuTimeUs: 420 FinishTimeMs: 1744283705182 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 21 BuildCpuTimeUs: 399 HostName: "ghrun-x5vqckr7qy" NodeId: 2 CreateTimeMs: 1744283705180 } MaxMemoryUsage: 1048576 } 2025-04-10T11:15:05.182546Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715662. Ctx: { TraceId: 01jrfpj7rd0jwp0gah0dn2f157, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGUzMWQ4ZDMtOTIxNTZmNi0yMDBlZGFhZi1kOWUzMGJmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:976:2764] 2025-04-10T11:15:05.182656Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01jrfpj7rd0jwp0gah0dn2f157, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGUzMWQ4ZDMtOTIxNTZmNi0yMDBlZGFhZi1kOWUzMGJmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send Commit to BufferActor=[2:972:2764] 2025-04-10T11:15:05.182758Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01jrfpj7rd0jwp0gah0dn2f157, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGUzMWQ4ZDMtOTIxNTZmNi0yMDBlZGFhZi1kOWUzMGJmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001035s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-10T11:15:05.183425Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:981:2780], Recipient [2:931:2747]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:05.183497Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:05.183553Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:980:2779], serverId# [2:981:2780], sessionId# [0:0:0] 2025-04-10T11:15:05.183808Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [2:979:2764], Recipient [2:931:2747]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxMode: MODE_IMMEDIATE 2025-04-10T11:15:05.183849Z node 2 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-04-10T11:15:05.184003Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [2:931:2747], Recipient [2:931:2747]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-10T11:15:05.184041Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-10T11:15:05.184133Z node 2 :TX_DATASHARD TRACE: TTxWrite:: execute at tablet# 72075186224037888 2025-04-10T11:15:05.184252Z node 2 :TX_DATASHARD TRACE: Parsing write transaction for 0 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxMode: MODE_IMMEDIATE 2025-04-10T11:15:05.184317Z node 2 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2025-04-10T11:15:05.184364Z node 2 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-04-10T11:15:05.184477Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CheckWrite 2025-04-10T11:15:05.184543Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:15:05.184589Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckWrite 2025-04-10T11:15:05.184632Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:05.184676Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:15:05.184731Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-10T11:15:05.184799Z node 2 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-04-10T11:15:05.184849Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:15:05.184876Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:15:05.184904Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteWrite 2025-04-10T11:15:05.184933Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteWrite 2025-04-10T11:15:05.184976Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:15:05.185035Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-10T11:15:05.185183Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=1 2025-04-10T11:15:05.185242Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-10T11:15:05.185334Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:15:05.185379Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-04-10T11:15:05.185430Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-04-10T11:15:05.185472Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:15:05.185562Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:15:05.185595Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-04-10T11:15:05.185643Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:15:05.185684Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:15:05.185734Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:15:05.185761Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:15:05.185799Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-04-10T11:15:05.196643Z node 2 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-10T11:15:05.196710Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:15:05.196763Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-04-10T11:15:05.196870Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:15:05.197414Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01jrfpj7rd0jwp0gah0dn2f157, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGUzMWQ4ZDMtOTIxNTZmNi0yMDBlZGFhZi1kOWUzMGJmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:15:05.197477Z node 2 :KQP_EXECUTER TRACE: ActorId: [2:973:2764] TxId: 281474976715662. Ctx: { TraceId: 01jrfpj7rd0jwp0gah0dn2f157, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGUzMWQ4ZDMtOTIxNTZmNi0yMDBlZGFhZi1kOWUzMGJmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::SimpleBackWardMiddle2Times [GOOD] >> TYardTest::TestChunkDeletionWhileWriting [GOOD] >> TYardTest::TestChunkPriorityBlock >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] Test command err: 2025-04-10T11:14:51.213578Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:51.240997Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:51.246347Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-04-10T11:14:51.250405Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:51.294875Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:51.435247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:14:51.435309Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:51.452916Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:51.453498Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:51.457440Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:14:51.457531Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:14:51.457594Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:14:51.457985Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:51.458309Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:51.458387Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:192:2148] in generation 2 2025-04-10T11:14:51.549161Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:51.594595Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:14:51.594961Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:51.595197Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:217:2215] 2025-04-10T11:14:51.595307Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:14:51.595394Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:14:51.595492Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.595929Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.596042Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.596575Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:14:51.596778Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:14:51.596921Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.597048Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:51.597143Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:14:51.597235Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:51.597386Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:51.597455Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:14:51.597530Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:51.597700Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.597767Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.597877Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2025-04-10T11:14:51.601183Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:14:51.601280Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:51.601449Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:14:51.601654Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:14:51.601774Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:14:51.601845Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:14:51.601959Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.602055Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:14:51.602131Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:14:51.602270Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.602666Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:51.602722Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:14:51.602779Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:14:51.602831Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.602894Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:14:51.602936Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:14:51.602984Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:14:51.603049Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.603094Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:51.617569Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:14:51.617674Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.617735Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.617861Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:51.617964Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:51.618763Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:223:2221], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.618856Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.618943Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:222:2220], serverId# [1:223:2221], sessionId# [0:0:0] 2025-04-10T11:14:51.619171Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:51.619217Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:51.620506Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.620585Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.620643Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:51.620710Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:51.625456Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:51.625541Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.625846Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.625912Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.625981Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.626055Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:51.626094Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:51.626425Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:14:51.626477Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:14:51.626543Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.626614Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:51.626671Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:51.626724Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:51.626967Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:14:51.627013Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.627043Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:51.627069Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:14:51.627094Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:14:51.627154Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.627206Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:14:51.627243Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:51.627293Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:51.627349Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:14:51.627387Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:14:51.627443Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:14:51.627513Z node 1 :TX_D ... eady operations at 9437184 2025-04-10T11:15:05.419635Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:05.419707Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2025-04-10T11:15:05.419782Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-04-10T11:15:05.419904Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-10T11:15:05.419970Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:05.420170Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-10T11:15:05.420222Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:05.420254Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2025-04-10T11:15:05.420317Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:05.420386Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-10T11:15:05.420417Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:05.420540Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-10T11:15:05.420565Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-10T11:15:05.420590Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:05.420617Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:508] at 9437184 on unit CompleteOperation 2025-04-10T11:15:05.420656Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 508] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:05.420695Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-10T11:15:05.420724Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:05.420824Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-10T11:15:05.420847Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-10T11:15:05.420870Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-10T11:15:05.420913Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:05.420963Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:509] at 9437184 on unit CompleteOperation 2025-04-10T11:15:05.421017Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 509] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:05.421072Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-10T11:15:05.421103Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:05.421223Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-10T11:15:05.421268Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:05.421296Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:510] at 9437184 on unit CompleteOperation 2025-04-10T11:15:05.421351Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 510] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:05.421392Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-10T11:15:05.421420Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:05.421520Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:05.421545Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:511] at 9437184 on unit CompleteOperation 2025-04-10T11:15:05.421596Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 511] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:05.421658Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-10T11:15:05.421688Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:05.421799Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:05.421826Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:512] at 9437184 on unit CompleteOperation 2025-04-10T11:15:05.421881Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 512] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:05.421925Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-10T11:15:05.421954Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:05.422071Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:05.422107Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:516] at 9437184 on unit FinishPropose 2025-04-10T11:15:05.422219Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 516 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-10T11:15:05.422311Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:05.422467Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:05.422515Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:514] at 9437184 on unit CompleteOperation 2025-04-10T11:15:05.422558Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 514] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-04-10T11:15:05.422611Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-10T11:15:05.422641Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:05.422773Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:05.422802Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:515] at 9437184 on unit CompleteOperation 2025-04-10T11:15:05.422845Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 515] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:05.422886Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:05.423079Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:237:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-10T11:15:05.423119Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:05.423178Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 506 2025-04-10T11:15:05.423599Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:237:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-10T11:15:05.423641Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:05.423676Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 507 2025-04-10T11:15:05.423842Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:237:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-10T11:15:05.423894Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:05.423925Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 508 2025-04-10T11:15:05.424050Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:237:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-10T11:15:05.424080Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:05.424107Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 509 2025-04-10T11:15:05.424224Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:237:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-10T11:15:05.424254Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:05.424281Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2025-04-10T11:15:05.424446Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:237:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-10T11:15:05.424478Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:05.424505Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2025-04-10T11:15:05.424626Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:237:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-10T11:15:05.424659Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:05.424707Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2025-04-10T11:15:05.424850Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:237:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-10T11:15:05.424905Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:05.424966Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 expect 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - interm 5 6 - 6 6 - - - - - - - - - - - - - - - - - - - - - - - - - - - >> TYardTest::TestChunkPriorityBlock [GOOD] >> TBlobStorageHullFreshSegment::IteratorTest [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::Perf [GOOD] >> TFreshAppendixTest::IterateForwardAll [GOOD] >> TFreshAppendixTest::IterateBackwardIncluding [GOOD] >> TBlobStorageHullHugeHeap::AllocateAllReleaseAll [GOOD] >> TBlobStorageHullHugeHeap::AllocateAllSerializeDeserializeReleaseAll [GOOD] >> TopTest::Test2 [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFreshSegment::IteratorTest [GOOD] >> TBlobStorageHullHugeChain::HeapAllocLargeStandard [GOOD] >> TBlobStorageHullHugeChain::HeapAllocLargeNonStandard [GOOD] >> TBlobStorageHullHugeHeap::WriteRestore [GOOD] >> TBlobStorageHullHugeKeeperPersState::SerializeParse [GOOD] >> TopTest::Test1 [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestChunkPriorityBlock [GOOD] >> TBlobStorageHullHugeChain::AllocFreeAllocTest [GOOD] >> TBlobStorageHullHugeChain::AllocFreeRestartAllocTest [GOOD] >> THugeHeapCtxTests::Basic [GOOD] >> TBlobStorageHullHugeChain::HeapAllocSmall [GOOD] >> TBlobStorageHullHugeHeap::AllocateAllFromOneChunk [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateBackwardIncluding [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TopTest::Test2 [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite >> TChainLayoutBuilder::TestProdConf [GOOD] >> TChainLayoutBuilder::TestMilestoneId [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::AllocateAllSerializeDeserializeReleaseAll [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeKeeperPersState::SerializeParse [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeChain::HeapAllocLargeNonStandard [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TopTest::Test1 [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TChainLayoutBuilder::TestMilestoneId [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::AllocateAllFromOneChunk [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeChain::AllocFreeRestartAllocTest [GOOD] >> DataShardOutOfOrder::TestOutOfOrderLockLost [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> THugeHeapCtxTests::Basic [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] Test command err: 2025-04-10T11:14:56.379540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:14:56.379760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:56.379932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b4b/r3tmp/tmp5c8JqC/pdisk_1.dat 2025-04-10T11:14:56.830005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:14:56.884660Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:56.930362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:56.930457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:56.943453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:57.043031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:57.090692Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:57.091770Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:57.092210Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:14:57.092507Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:57.141372Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:57.142142Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:57.142275Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:57.143988Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:14:57.144085Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:14:57.144141Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:14:57.144542Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:57.144672Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:57.144753Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:14:57.155475Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:57.192889Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:14:57.193108Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:57.193255Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:14:57.193296Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:14:57.193353Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:14:57.193396Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:57.193638Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:57.193694Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:57.194002Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:14:57.194093Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:14:57.194207Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:14:57.194259Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:57.194312Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:14:57.194351Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:14:57.194388Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:14:57.194420Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:14:57.194464Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:14:57.194595Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:57.194642Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:57.194689Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:14:57.195117Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:14:57.195179Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:57.195301Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:14:57.195526Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:14:57.195596Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:14:57.195719Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:14:57.195776Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:14:57.195820Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:14:57.195858Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:14:57.195897Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:57.196249Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:57.196293Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:14:57.196325Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:14:57.196364Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:57.196423Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:14:57.196455Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:14:57.196502Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:14:57.196540Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:14:57.196614Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:57.198124Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:14:57.198231Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:14:57.208959Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:14:57.209040Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:57.209078Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:57.209148Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:14:57.209217Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:57.364561Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:57.364615Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:57.364649Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:14:57.365050Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:14:57.365088Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:57.365198Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:14:57.365236Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:14:57.365293Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:14:57.365347Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:14:57.386920Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:14:57.387007Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:57.388088Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:57.388143Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:57.388195Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:14:5 ... raceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1086:2878], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 646 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 321 FinishTimeMs: 1744283706688 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 258 BuildCpuTimeUs: 63 HostName: "ghrun-x5vqckr7qy" NodeId: 2 StartTimeMs: 1744283706687 CreateTimeMs: 1744283706679 } MaxMemoryUsage: 1048576 } 2025-04-10T11:15:06.690770Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1086:2878] 2025-04-10T11:15:06.690838Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1090:2882], CA [2:1087:2879], CA [2:1088:2880], CA [2:1085:2877], CA [2:1089:2881], 2025-04-10T11:15:06.690893Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1090:2882], CA [2:1087:2879], CA [2:1088:2880], CA [2:1085:2877], CA [2:1089:2881], 2025-04-10T11:15:06.691601Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1085:2877], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 639 DurationUs: 2000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 179 FinishTimeMs: 1744283706689 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 82 BuildCpuTimeUs: 97 HostName: "ghrun-x5vqckr7qy" NodeId: 2 StartTimeMs: 1744283706687 CreateTimeMs: 1744283706678 } MaxMemoryUsage: 1048576 } 2025-04-10T11:15:06.691703Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1085:2877] 2025-04-10T11:15:06.691783Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1090:2882], CA [2:1087:2879], CA [2:1088:2880], CA [2:1089:2881], 2025-04-10T11:15:06.691832Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1090:2882], CA [2:1087:2879], CA [2:1088:2880], CA [2:1089:2881], 2025-04-10T11:15:06.692559Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1087:2879], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 494 DurationUs: 3000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 212 FinishTimeMs: 1744283706690 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 174 BuildCpuTimeUs: 38 HostName: "ghrun-x5vqckr7qy" NodeId: 2 StartTimeMs: 1744283706687 CreateTimeMs: 1744283706679 } MaxMemoryUsage: 1048576 } 2025-04-10T11:15:06.692634Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1087:2879] 2025-04-10T11:15:06.692690Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1090:2882], CA [2:1088:2880], CA [2:1089:2881], 2025-04-10T11:15:06.692740Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1090:2882], CA [2:1088:2880], CA [2:1089:2881], 2025-04-10T11:15:06.692880Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1088:2880], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1038 DurationUs: 2000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 457 FinishTimeMs: 1744283706691 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 399 BuildCpuTimeUs: 58 HostName: "ghrun-x5vqckr7qy" NodeId: 2 StartTimeMs: 1744283706689 CreateTimeMs: 1744283706679 } MaxMemoryUsage: 1048576 } 2025-04-10T11:15:06.692953Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1088:2880] 2025-04-10T11:15:06.693004Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1090:2882], CA [2:1089:2881], 2025-04-10T11:15:06.693047Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1090:2882], CA [2:1089:2881], 2025-04-10T11:15:06.693485Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1089:2881], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 498 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 205 FinishTimeMs: 1744283706692 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 137 BuildCpuTimeUs: 68 HostName: "ghrun-x5vqckr7qy" NodeId: 2 CreateTimeMs: 1744283706679 } MaxMemoryUsage: 1048576 } 2025-04-10T11:15:06.693561Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1089:2881] 2025-04-10T11:15:06.693609Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1090:2882], 2025-04-10T11:15:06.693648Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1090:2882], 2025-04-10T11:15:06.693784Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1090:2882], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 450 DurationUs: 1000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 189 FinishTimeMs: 1744283706693 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 148 BuildCpuTimeUs: 41 HostName: "ghrun-x5vqckr7qy" NodeId: 2 StartTimeMs: 1744283706692 CreateTimeMs: 1744283706679 } MaxMemoryUsage: 1048576 } 2025-04-10T11:15:06.693840Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1090:2882] 2025-04-10T11:15:06.694055Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:15:06.694133Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jrfpj8rc4hxnk2pj40q35r0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjgwYmM0YmUtYTc5MjU3ZTQtNzlmYjQ4NmYtZDA4Yjc4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.004979s ReadRows: 2 ReadBytes: 16 ru: 3 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink [GOOD] Test command err: 2025-04-10T11:14:56.059815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:14:56.059977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:14:56.060099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b4c/r3tmp/tmpuavkEz/pdisk_1.dat 2025-04-10T11:14:56.518695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:14:56.561825Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:56.603492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:14:56.603615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:14:56.615137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:14:56.699338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:14:56.740644Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:56.741852Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:56.744082Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:14:56.749461Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:56.813143Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:56.814025Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:56.814141Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:56.816193Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:14:56.816299Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:14:56.816363Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:14:56.816768Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:56.816896Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:56.816974Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:14:56.828111Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:56.881361Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:14:56.881514Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:56.881607Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:14:56.881633Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:14:56.881669Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:14:56.881700Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:56.881856Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:56.881899Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:56.882120Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:14:56.887877Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:14:56.888031Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:14:56.888069Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:56.888100Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:14:56.888145Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:14:56.888182Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:14:56.888216Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:14:56.888251Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:14:56.888366Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:56.888394Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:56.888435Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:14:56.888765Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:14:56.888799Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:56.888918Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:14:56.889073Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:14:56.889122Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:14:56.889527Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:14:56.889570Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:14:56.889603Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:14:56.889685Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:14:56.889710Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:56.890063Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:56.890096Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:14:56.890121Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:14:56.890163Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:56.890218Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:14:56.890249Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:14:56.890295Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:14:56.890332Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:14:56.890371Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:56.891811Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:14:56.891876Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:14:56.902635Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:14:56.902692Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:14:56.902727Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:14:56.902771Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:14:56.902841Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:57.057700Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:57.057741Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:57.057774Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:14:57.058082Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:14:57.058109Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:57.058221Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:14:57.058268Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:14:57.058305Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:14:57.058338Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:14:57.067440Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:14:57.067524Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:14:57.068409Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:57.068448Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:57.068503Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:14:5 ... 86224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:15:06.476909Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-10T11:15:06.477004Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037892, actorId: [2:1199:2936] 2025-04-10T11:15:06.477051Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2025-04-10T11:15:06.477119Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2025-04-10T11:15:06.477172Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-10T11:15:06.477419Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1045:2823], Recipient [2:755:2634]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037892 OperationCookie: 281474976715665 2025-04-10T11:15:06.477489Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715665 2025-04-10T11:15:06.477866Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1045:2823], Recipient [2:1045:2823]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:06.477910Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:06.478178Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1194:2931], Recipient [2:755:2634]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037892 ClientId: [2:1194:2931] ServerId: [2:1196:2933] } 2025-04-10T11:15:06.478228Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-10T11:15:06.478784Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1045:2823]: {TEvRegisterTabletResult TabletId# 72075186224037892 Entry# 2000} 2025-04-10T11:15:06.478828Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-10T11:15:06.478883Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-04-10T11:15:06.478931Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-10T11:15:06.479078Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-10T11:15:06.479149Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:06.479198Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037892 2025-04-10T11:15:06.479247Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-04-10T11:15:06.479300Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-04-10T11:15:06.479344Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-04-10T11:15:06.479407Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-10T11:15:06.479523Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1196:2933], Recipient [2:1045:2823]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:15:06.479569Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:15:06.479633Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:1194:2931], serverId# [2:1196:2933], sessionId# [0:0:0] 2025-04-10T11:15:06.480023Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1045:2823]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-04-10T11:15:06.480064Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-10T11:15:06.480115Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 0 next step 2000 2025-04-10T11:15:06.480180Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-10T11:15:06.480265Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037892 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-10T11:15:06.491729Z node 2 :TX_DATASHARD DEBUG: 72075186224037893 ack snapshot OpId 281474976715665 2025-04-10T11:15:06.491842Z node 2 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037893 2025-04-10T11:15:06.491925Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:15:06.491982Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-04-10T11:15:06.492022Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037893, actorId: [2:1203:2940] 2025-04-10T11:15:06.492043Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037893 2025-04-10T11:15:06.492070Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037893 2025-04-10T11:15:06.492091Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-10T11:15:06.492276Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1049:2825], Recipient [2:755:2634]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2025-04-10T11:15:06.492339Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2025-04-10T11:15:06.492638Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1049:2825], Recipient [2:1049:2825]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:06.492680Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:06.492944Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1195:2932], Recipient [2:755:2634]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1195:2932] ServerId: [2:1197:2934] } 2025-04-10T11:15:06.492972Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-10T11:15:06.493312Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1049:2825]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 2000} 2025-04-10T11:15:06.493369Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-10T11:15:06.493400Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 2000 2025-04-10T11:15:06.493435Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-10T11:15:06.493508Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-10T11:15:06.493539Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:06.493563Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037893 2025-04-10T11:15:06.493591Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037893 has no attached operations 2025-04-10T11:15:06.493620Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037893 2025-04-10T11:15:06.493649Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037893 TxInFly 0 2025-04-10T11:15:06.493699Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-10T11:15:06.493837Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1197:2934], Recipient [2:1049:2825]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:15:06.493870Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:15:06.493905Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1195:2932], serverId# [2:1197:2934], sessionId# [0:0:0] 2025-04-10T11:15:06.498347Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1049:2825]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-04-10T11:15:06.498408Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-10T11:15:06.498450Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 2000 2025-04-10T11:15:06.498503Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037893: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-10T11:15:06.498570Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-10T11:15:06.510807Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715665 2025-04-10T11:15:06.514026Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:409:2404], Recipient [2:763:2638] 2025-04-10T11:15:06.514119Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-04-10T11:15:06.516815Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2025-04-10T11:15:06.516909Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:15:06.517598Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:748:2629], Recipient [2:755:2634]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-10T11:15:07.114766Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [2:995:2681], Recipient [2:755:2634]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 281474976715663 TxMode: MODE_VOLATILE_PREPARE Locks { SendingShards: 72075186224037888 SendingShards: 72075186224037889 ReceivingShards: 72075186224037888 ReceivingShards: 72075186224037889 Op: Commit } 2025-04-10T11:15:07.114839Z node 2 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037889 2025-04-10T11:15:07.114954Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 281474976715663 because datashard 72075186224037889: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=281474976715663; 2025-04-10T11:15:07.115021Z node 2 :TX_DATASHARD NOTICE: Rejecting data TxId 281474976715663 because datashard 72075186224037889: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-04-10T11:15:07.115486Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2025-04-10T11:15:07.115979Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> TYardTest::TestHugeChunkAndLotsOfTinyAsyncLogOrder [GOOD] >> TYardTest::TestDamagedFirstRecordToKeep >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite >> TBlobStorageAnubisAlgo::Mirror3 [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] >> TBlobStorageHullHugeHeap::RecoveryMode [GOOD] >> TBlobStorageHullHugeHeap::BorderValues [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> TBlobStorageAnubisAlgo::Mirror3 [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitwiseAnd [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement1 [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitsBefore2 [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::BorderValues [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> TBlobStorageIngress::BarrierIngressQuorumBasicMirror3_4_2 [GOOD] >> TBlobStorageIngress::BarrierIngressQuorumBasic4Plus2_8_1 [GOOD] >> TBlobStorageIngress::BarrierIngressQuorumMirror3 [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] >> TBlobStorageIngress::Ingress [GOOD] >> TBlobStorageIngress::IngressCacheMirror3 [GOOD] >> TBlobStorageIngress::IngressCache4Plus2 [GOOD] >> TPDiskTest::TestLogSpliceChunkReserve [GOOD] >> TPDiskTest::SpaceColor [GOOD] >> TPDiskTest::TestPDiskOnDifferentKeys >> TBlobStorageIngress::IngressPartsWeMustHaveLocally [GOOD] >> TBlobStorageIngress::IngressLocalParts [GOOD] >> TBlobStorageIngress::IngressPrintDistribution [GOOD] >> TBlobStorageIngressMatrix::MatrixTest [GOOD] >> TBlobStorageIngress::IngressCreateFromRepl [GOOD] >> TBlobStorageIngress::IngressGetMainReplica [GOOD] >> TBlobStorageIngressMatrix::ShiftedBitVecBase [GOOD] >> TBlobStorageIngressMatrix::ShiftedHandoffBitVec [GOOD] >> TBlobStorageIngress::IngressHandoffPartsDelete [GOOD] >> TBlobStorageIngressMatrix::VectorTest [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitsBefore1 [GOOD] >> TBlobStorageIngressMatrix::ShiftedMainBitVec [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestBitsBefore2 [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> TPDiskTest::TestPDiskOnDifferentKeys [GOOD] >> TPDiskTest::RecreateWithInvalidPDiskKey |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::IngressHandoffPartsDelete [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::BarrierIngressQuorumMirror3 [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::ShiftedHandoffBitVec [GOOD] |87.8%| [TA] $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] Test command err: 2025-04-10T11:15:01.507600Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:15:01.508865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:01.508915Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:01.517947Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:15:01.518579Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:15:01.518937Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:01.567087Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:15:01.596132Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:01.601990Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:15:01.603844Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:15:01.603925Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:15:01.603995Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:15:01.604475Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:15:01.604703Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:15:01.604783Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:199:2154] in generation 2 2025-04-10T11:15:01.706304Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:15:01.750539Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:15:01.750754Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:15:01.750868Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-10T11:15:01.750910Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:15:01.750952Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:15:01.750996Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:01.751254Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:01.751326Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:01.751637Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:15:01.751733Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:15:01.751807Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:15:01.751851Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:01.751903Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:15:01.751944Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:15:01.751994Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:15:01.752040Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:15:01.752090Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:01.752200Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:01.752248Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:01.752311Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-10T11:15:01.755177Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:15:01.755258Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:15:01.755361Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:15:01.755578Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:15:01.755646Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:15:01.755724Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:15:01.755785Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:15:01.755829Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:15:01.755918Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:15:01.755961Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:15:01.756290Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:15:01.756338Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:15:01.756376Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:15:01.756414Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:15:01.756466Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:15:01.756504Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:15:01.756555Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:15:01.756609Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:15:01.756637Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:15:01.778866Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:15:01.778954Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:15:01.779000Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:15:01.779069Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:15:01.779150Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:15:01.779759Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:01.779826Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:01.779874Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-10T11:15:01.780029Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:15:01.780067Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:15:01.780217Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:15:01.780270Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:01.780334Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:15:01.780375Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:15:01.801789Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:15:01.801904Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:01.806274Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:01.806384Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:01.806479Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:15:01.806538Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:15:01.806587Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:15:01.806648Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:15:01.806692Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:15:01.806751Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:01.806802Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:15:01.806874Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:15:01.806922Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:15:01.807108Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:15:01.807202Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:01.807233Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:15:01.807263Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:15:01.807290Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:15:01.807379Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:15:01.807420Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:15:01.807483Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:01.807536Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:15:01.807609Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:15:01.807674Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:15:01.807719Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:15:01.807771Z node 1 :TX_DATA ... 1000005:149] at 9437186 on unit CompleteOperation 2025-04-10T11:15:08.820874Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:08.820918Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:08.821062Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:08.821352Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-10T11:15:08.821398Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:08.821435Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-10T11:15:08.821536Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:345:2312]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-10T11:15:08.821571Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:08.821594Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-10T11:15:08.821656Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:235:2228], Recipient [1:454:2396]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-04-10T11:15:08.821683Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:15:08.821713Z node 1 :TX_DATASHARD DEBUG: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2025-04-10T11:15:08.821775Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-04-10T11:15:08.821822Z node 1 :TX_DATASHARD TRACE: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2025-04-10T11:15:08.821921Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-10T11:15:08.822028Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-10T11:15:08.822054Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:08.822079Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-10T11:15:08.822138Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:345:2312]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-10T11:15:08.822192Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:08.822236Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-10T11:15:08.822336Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:454:2396], Recipient [1:454:2396]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:08.822368Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:08.822417Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-04-10T11:15:08.822486Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:15:08.822538Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2025-04-10T11:15:08.822571Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2025-04-10T11:15:08.822621Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-10T11:15:08.822654Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2025-04-10T11:15:08.822686Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2025-04-10T11:15:08.822727Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2025-04-10T11:15:08.823192Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2025-04-10T11:15:08.823265Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T11:15:08.823349Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2025-04-10T11:15:08.823380Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2025-04-10T11:15:08.823409Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2025-04-10T11:15:08.823441Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2025-04-10T11:15:08.823706Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2025-04-10T11:15:08.823741Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2025-04-10T11:15:08.823783Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2025-04-10T11:15:08.823818Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2025-04-10T11:15:08.823851Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-10T11:15:08.823873Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2025-04-10T11:15:08.823900Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2025-04-10T11:15:08.823929Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:08.823985Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-10T11:15:08.824018Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-10T11:15:08.824049Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-10T11:15:08.824335Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-10T11:15:08.824375Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:08.824405Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-10T11:15:08.824469Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:345:2312]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-10T11:15:08.824493Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:08.824515Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-10T11:15:08.824621Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-10T11:15:08.824662Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:08.824694Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-10T11:15:08.824811Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-10T11:15:08.824839Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:08.824863Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-10T11:15:08.824999Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-10T11:15:08.825041Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:08.825087Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-10T11:15:08.825173Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-10T11:15:08.825195Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:08.825217Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-10T11:15:08.825268Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-10T11:15:08.825288Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:08.825314Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-10T11:15:08.840845Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:08.840902Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-10T11:15:08.840959Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-10T11:15:08.841042Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-10T11:15:08.841090Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:08.841360Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-10T11:15:08.841415Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:08.841448Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::IngressCache4Plus2 [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::ShiftedMainBitVec [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::IngressPrintDistribution [GOOD] |87.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageIngressMatrix::VectorTestEmpty [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement2 [GOOD] >> TPDiskTest::RecreateWithInvalidPDiskKey [GOOD] >> TPDiskTest::SmallDisk10Gb >> TYardTest::TestDamagedFirstRecordToKeep [GOOD] >> TYardTest::TestDamageAtTheBoundary >> TBlobStorageHullFreshSegment::PerfAppendix [GOOD] >> TBlobStorageHullFreshSegment::PerfSkipList |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement2 [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] Test command err: 2025-04-10T11:15:03.661057Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:15:03.662105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:03.662175Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:03.673212Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:15:03.673681Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:15:03.673961Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:03.721063Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:15:03.730780Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:03.731384Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:15:03.733631Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:15:03.733770Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:15:03.733867Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:15:03.734325Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:15:03.734541Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:15:03.734606Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:198:2154] in generation 2 2025-04-10T11:15:03.804698Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:15:03.835245Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:15:03.835439Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:15:03.835555Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-10T11:15:03.835599Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:15:03.835640Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:15:03.835688Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:03.835944Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:03.836011Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:03.836261Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:15:03.836350Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:15:03.836429Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:15:03.836471Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:03.836507Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:15:03.836544Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:15:03.836603Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:15:03.836655Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:15:03.836711Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:03.837063Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:03.837104Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:03.837163Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-10T11:15:03.839952Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:15:03.840013Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:15:03.840109Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:15:03.840278Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:15:03.840332Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:15:03.840415Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:15:03.840503Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:15:03.840552Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:15:03.840593Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:15:03.840660Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:15:03.841025Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:15:03.841071Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:15:03.841110Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:15:03.841145Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:15:03.841224Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:15:03.841260Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:15:03.841295Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:15:03.841359Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:15:03.841394Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:15:03.854428Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:15:03.854509Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:15:03.854676Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:15:03.854719Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:15:03.854787Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:15:03.855427Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:03.855516Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:03.855570Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-10T11:15:03.855736Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:15:03.855781Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:15:03.855920Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:15:03.855969Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:03.856010Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:15:03.856070Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:15:03.860278Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:15:03.860388Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:03.860656Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:03.860705Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:03.860772Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:15:03.860817Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:15:03.860861Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:15:03.860910Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:15:03.860955Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:15:03.861003Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:03.861045Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:15:03.861090Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:15:03.861146Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:15:03.861340Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:15:03.861378Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:03.861405Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:15:03.861428Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:15:03.861453Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:15:03.861536Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:15:03.861566Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:15:03.861610Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:03.861654Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:15:03.861724Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:15:03.861766Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:15:03.861805Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:15:03.861878Z node 1 :TX_DATA ... 6 2025-04-10T11:15:09.663752Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:134] at 9437186 on unit CompleteOperation 2025-04-10T11:15:09.663809Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 134] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-10T11:15:09.663861Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-10T11:15:09.663908Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:09.664018Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:09.664046Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2025-04-10T11:15:09.664079Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-04-10T11:15:09.664118Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-10T11:15:09.664156Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:09.664253Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:09.664278Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-04-10T11:15:09.664321Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:15:09.664360Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-10T11:15:09.664394Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:09.664496Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:09.664520Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-04-10T11:15:09.664591Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:15:09.664626Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:09.664714Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:09.664739Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-04-10T11:15:09.664770Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:15:09.664794Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:09.664878Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:09.664905Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-04-10T11:15:09.664938Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:15:09.664983Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:09.665090Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:15:09.665116Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-10T11:15:09.665171Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:15:09.665202Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:15:09.665467Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:342:2310]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-10T11:15:09.665516Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:09.665547Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-10T11:15:09.665684Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-10T11:15:09.665722Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:09.665752Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-10T11:15:09.665828Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:342:2310]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-10T11:15:09.665852Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:09.665910Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-10T11:15:09.666020Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-10T11:15:09.666052Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:09.666077Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-10T11:15:09.666139Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-10T11:15:09.666210Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:09.666238Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-10T11:15:09.666327Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-10T11:15:09.666356Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:09.666383Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-10T11:15:09.666464Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-10T11:15:09.666502Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:09.666552Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-10T11:15:09.666670Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-10T11:15:09.666700Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:09.666725Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-10T11:15:09.666810Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-10T11:15:09.666838Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:09.666862Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-10T11:15:09.666966Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-10T11:15:09.666998Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:09.667023Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-04-10T11:15:09.667089Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-10T11:15:09.667112Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:09.667151Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-10T11:15:09.667214Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-10T11:15:09.667259Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:09.667282Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-10T11:15:09.667382Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-10T11:15:09.667421Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:09.667481Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-10T11:15:09.667581Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-10T11:15:09.667608Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:09.667635Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-10T11:15:09.667686Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:452:2394], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-10T11:15:09.667711Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:15:09.667754Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 >> TBlobStorageCompStrat::Test1 |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |87.9%| [TA] $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TPDiskTest::SmallDisk10Gb [GOOD] >> TPDiskTest::SuprisinglySmallDisk >> test_dispatch.py::TestMapping::test_idle [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> AuthTokenAllowed::PassOnEmptyListAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthTokenAllowed::FailOnListAndTokenWithEmptyUserSid [GOOD] >> AuthTokenAllowed::FailOnListAndNoToken [GOOD] >> AuthTokenAllowed::FailOnListAndTokenWithEmptyUserSidAndGroups [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> AuthTokenAllowed::PassOnListMatchGroupSid [GOOD] >> AuthTokenAllowed::FailOnListMatchGroupSid [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndEmptyToken [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndNoToken [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndInvalidTokenSerialized [GOOD] >> AuthDatabaseAdmin::PassOnOwnerMatchUserSid [GOOD] >> AuthDatabaseAdmin::PassOnOwnerMatchUserSidWithGroup [GOOD] >> AuthTokenAllowed::FailOnListAndEmptyToken [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndToken [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndTokenWithEmptyUserSid [GOOD] >> TBlobStorageCompStrat::Test1 [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthDatabaseAdmin::PassOnOwnerMatchGroupSid [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndTokenWithEmptyUserSid [GOOD] |87.9%| [TA] $(B)/ydb/core/blobstorage/vdisk/ingress/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPDiskTest::SuprisinglySmallDisk [GOOD] >> TPDiskTest::TestChunkWriteCrossOwner |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnListMatchGroupSid [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::FailOnListAndTokenWithEmptyUserSidAndGroups [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndInvalidTokenSerialized [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndTokenWithEmptyUserSid [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::FailOnOwnerAndTokenWithEmptyUserSid [GOOD] >> TPDiskTest::TestChunkWriteCrossOwner [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndEmptyToken [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndNoToken [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::FailOnListAndEmptyToken [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndEmptyToken [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TBlobStorageCompStrat::Test1 [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::FailOnOwnerAndNoToken [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskTest::TestChunkWriteCrossOwner [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob |87.9%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TA] $(B)/ydb/core/base/ut_auth/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageHullFreshSegment::PerfSkipList [GOOD] |88.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [TA] {RESULT} $(B)/ydb/core/base/ut_auth/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::Replace-QueryService-UseSink |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> TPDiskRaces::Decommit [GOOD] >> ShredPDisk::SimpleShredRepeat >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFreshSegment::PerfSkipList [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink >> KqpQueryPerf::MultiRead-QueryService >> ShredPDisk::SimpleShredRepeat [GOOD] >> ShredPDisk::SimpleShredRepeatAfterPDiskRestart >> DataShardTxOrder::ZigZag_oo [GOOD] >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService >> VDiskBalancing::TestRandom_Mirror3dc >> TYardTest::TestDamageAtTheBoundary [GOOD] >> TYardTest::TestDestroySystem >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite [GOOD] >> ShredPDisk::SimpleShredRepeatAfterPDiskRestart [GOOD] >> ShredPDisk::SimpleShredDirtyChunks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] Test command err: 2025-04-10T11:14:51.209266Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:51.240832Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:51.246330Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-04-10T11:14:51.250394Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:51.291461Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:51.426377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:14:51.426428Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:51.434379Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:51.434819Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:51.438080Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:14:51.438176Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:14:51.438229Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:14:51.440228Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:51.440482Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:51.440566Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:192:2148] in generation 2 2025-04-10T11:14:51.547059Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:51.594749Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:14:51.594962Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:51.595113Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:217:2215] 2025-04-10T11:14:51.595177Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:14:51.595237Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:14:51.595294Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.595613Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.595691Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.596043Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:14:51.596190Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:14:51.596274Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.596340Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:51.596421Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:14:51.596491Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:51.596557Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:51.596614Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:14:51.596674Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:51.596789Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.596825Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.596891Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2025-04-10T11:14:51.599816Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\004\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:14:51.599882Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:51.599979Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:14:51.600174Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:14:51.600235Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:14:51.600300Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:14:51.600375Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.600437Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:14:51.600482Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:14:51.600600Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.600941Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:51.600982Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:14:51.601025Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:14:51.601064Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.601120Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:14:51.601149Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:14:51.601185Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:14:51.601220Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.601245Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:51.619070Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:14:51.619138Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.619178Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.619224Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:51.619299Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:51.619869Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:223:2221], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.619933Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.619978Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:222:2220], serverId# [1:223:2221], sessionId# [0:0:0] 2025-04-10T11:14:51.620169Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:51.620204Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:51.620329Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.620374Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.620410Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:51.620471Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:51.624462Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:51.624548Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.624754Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.624796Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.624854Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.624918Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:51.624958Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:51.624994Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:14:51.625033Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:14:51.625094Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.625132Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:51.625168Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:51.625214Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:51.625394Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:14:51.625431Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.625455Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:51.625476Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:14:51.625497Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:14:51.625549Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.625577Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:14:51.625636Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:51.625675Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:51.625719Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:14:51.625752Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:14:51.625783Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:14:51.625842Z node 1 :TX_DATA ... aitInRS 2025-04-10T11:15:14.583788Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:15:14.583817Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-04-10T11:15:14.583850Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-04-10T11:15:14.583881Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-04-10T11:15:14.584451Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-04-10T11:15:14.584540Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T11:15:14.584605Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:15:14.584636Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-04-10T11:15:14.584666Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-04-10T11:15:14.584698Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-04-10T11:15:14.584946Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-04-10T11:15:14.585016Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-04-10T11:15:14.585064Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-04-10T11:15:14.585104Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-04-10T11:15:14.585146Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-10T11:15:14.585173Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-04-10T11:15:14.585209Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-04-10T11:15:14.585268Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:14.585335Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-10T11:15:14.585386Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-10T11:15:14.585438Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-10T11:15:14.585759Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [6:233:2226], Recipient [6:233:2226]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:14.585802Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:14.585859Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:15:14.585898Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:15:14.585929Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:15:14.585968Z node 6 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2025-04-10T11:15:14.585999Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2025-04-10T11:15:14.586033Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:15:14.586063Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2025-04-10T11:15:14.586094Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:15:14.586127Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2025-04-10T11:15:14.586912Z node 6 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2025-04-10T11:15:14.586964Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:15:14.586996Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:15:14.587025Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2025-04-10T11:15:14.587056Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2025-04-10T11:15:14.587100Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:15:14.587128Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2025-04-10T11:15:14.587156Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:14.587185Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:15:14.587233Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437184 2025-04-10T11:15:14.587272Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437184 2025-04-10T11:15:14.587310Z node 6 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437184 2025-04-10T11:15:14.587359Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:15:14.587391Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-10T11:15:14.587421Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2025-04-10T11:15:14.587453Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2025-04-10T11:15:14.587508Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:15:14.587539Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2025-04-10T11:15:14.587564Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2025-04-10T11:15:14.587593Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2025-04-10T11:15:14.587627Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:15:14.587653Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2025-04-10T11:15:14.587680Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2025-04-10T11:15:14.587708Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2025-04-10T11:15:14.587741Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:15:14.587767Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2025-04-10T11:15:14.587802Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2025-04-10T11:15:14.587830Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2025-04-10T11:15:14.587861Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:15:14.587885Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-04-10T11:15:14.587911Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-04-10T11:15:14.587943Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-04-10T11:15:14.588344Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-04-10T11:15:14.588399Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T11:15:14.588448Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:15:14.588478Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-04-10T11:15:14.588506Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-04-10T11:15:14.588538Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-04-10T11:15:14.588765Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-04-10T11:15:14.588801Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-04-10T11:15:14.588835Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-04-10T11:15:14.588874Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-04-10T11:15:14.588913Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-10T11:15:14.588941Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-04-10T11:15:14.588974Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-04-10T11:15:14.589010Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:14.589046Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:15:14.589084Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:15:14.589121Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:15:14.602965Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-04-10T11:15:14.603064Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-04-10T11:15:14.603145Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:14.603216Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-04-10T11:15:14.603307Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [6:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:14.603370Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:14.603717Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-04-10T11:15:14.603762Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-04-10T11:15:14.603811Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-10T11:15:14.603846Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-04-10T11:15:14.603899Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [6:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:15:14.603936Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] Test command err: 2025-04-10T11:15:04.629277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:04.629532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:04.629749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b34/r3tmp/tmpgeiXc7/pdisk_1.dat 2025-04-10T11:15:05.030697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:05.076164Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:05.114782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:05.114893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:05.126473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:05.222908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:05.259091Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:15:05.260311Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:15:05.260793Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:15:05.261063Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:05.307992Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:15:05.309179Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:05.309304Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:15:05.311190Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:15:05.311306Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:15:05.311373Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:15:05.311772Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:15:05.311903Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:15:05.311982Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:15:05.322717Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:15:05.355200Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:15:05.355356Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:15:05.355450Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:15:05.355479Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:15:05.355524Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:15:05.355560Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:15:05.355756Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:05.355817Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:05.356053Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:15:05.356114Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:15:05.356189Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:15:05.356221Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:05.356255Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:15:05.356285Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:15:05.356310Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:15:05.356334Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:15:05.356365Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:15:05.356447Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:05.356488Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:05.356527Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:15:05.356851Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:15:05.356902Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:15:05.357060Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:15:05.357308Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:15:05.357405Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:15:05.357526Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:15:05.357583Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:15:05.357619Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:15:05.357654Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:15:05.357693Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:15:05.358037Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:15:05.358091Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:15:05.358131Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:15:05.358214Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:15:05.358271Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:15:05.358296Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:15:05.358325Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:15:05.358361Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:15:05.358394Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:15:05.359609Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:15:05.359662Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:15:05.370350Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:15:05.370431Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:15:05.370470Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:15:05.370523Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:15:05.370596Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:15:05.526860Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:05.526926Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:05.526974Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:15:05.527462Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:15:05.527505Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:15:05.527637Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:15:05.527688Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:15:05.527749Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:15:05.527797Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:15:05.541850Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:15:05.541956Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:15:05.546726Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:05.546805Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:05.546886Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:15:0 ... TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1048:2844], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 468 DurationUs: 1000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 113 FinishTimeMs: 1744283714549 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 58 BuildCpuTimeUs: 55 HostName: "ghrun-x5vqckr7qy" NodeId: 2 StartTimeMs: 1744283714548 CreateTimeMs: 1744283714541 } MaxMemoryUsage: 1048576 } 2025-04-10T11:15:14.551409Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1048:2844] 2025-04-10T11:15:14.551465Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1053:2849], CA [2:1050:2846], CA [2:1051:2847], CA [2:1052:2848], CA [2:1049:2845], 2025-04-10T11:15:14.551512Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1053:2849], CA [2:1050:2846], CA [2:1051:2847], CA [2:1052:2848], CA [2:1049:2845], 2025-04-10T11:15:14.552028Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1049:2845], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 705 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 418 FinishTimeMs: 1744283714549 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 364 BuildCpuTimeUs: 54 HostName: "ghrun-x5vqckr7qy" NodeId: 2 StartTimeMs: 1744283714548 CreateTimeMs: 1744283714541 } MaxMemoryUsage: 1048576 } 2025-04-10T11:15:14.552100Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1049:2845] 2025-04-10T11:15:14.552162Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1053:2849], CA [2:1050:2846], CA [2:1051:2847], CA [2:1052:2848], 2025-04-10T11:15:14.552203Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1053:2849], CA [2:1050:2846], CA [2:1051:2847], CA [2:1052:2848], 2025-04-10T11:15:14.552470Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1050:2846], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 545 DurationUs: 2000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 312 FinishTimeMs: 1744283714550 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 260 BuildCpuTimeUs: 52 HostName: "ghrun-x5vqckr7qy" NodeId: 2 StartTimeMs: 1744283714548 CreateTimeMs: 1744283714541 } MaxMemoryUsage: 1048576 } 2025-04-10T11:15:14.552527Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1050:2846] 2025-04-10T11:15:14.552566Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1053:2849], CA [2:1051:2847], CA [2:1052:2848], 2025-04-10T11:15:14.552591Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1053:2849], CA [2:1051:2847], CA [2:1052:2848], 2025-04-10T11:15:14.552667Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1051:2847], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 641 DurationUs: 2000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 355 FinishTimeMs: 1744283714551 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 292 BuildCpuTimeUs: 63 HostName: "ghrun-x5vqckr7qy" NodeId: 2 StartTimeMs: 1744283714549 CreateTimeMs: 1744283714541 } MaxMemoryUsage: 1048576 } 2025-04-10T11:15:14.552703Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1051:2847] 2025-04-10T11:15:14.552726Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1053:2849], CA [2:1052:2848], 2025-04-10T11:15:14.552750Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1053:2849], CA [2:1052:2848], 2025-04-10T11:15:14.553035Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1052:2848], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 445 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 188 FinishTimeMs: 1744283714552 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 123 BuildCpuTimeUs: 65 HostName: "ghrun-x5vqckr7qy" NodeId: 2 CreateTimeMs: 1744283714541 } MaxMemoryUsage: 1048576 } 2025-04-10T11:15:14.553091Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1052:2848] 2025-04-10T11:15:14.553145Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1053:2849], 2025-04-10T11:15:14.553182Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1053:2849], 2025-04-10T11:15:14.553291Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1053:2849], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 447 DurationUs: 1000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 177 FinishTimeMs: 1744283714552 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 129 BuildCpuTimeUs: 48 HostName: "ghrun-x5vqckr7qy" NodeId: 2 StartTimeMs: 1744283714551 CreateTimeMs: 1744283714542 } MaxMemoryUsage: 1048576 } 2025-04-10T11:15:14.553364Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1053:2849] 2025-04-10T11:15:14.553539Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:15:14.553598Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1041:2825] TxId: 281474976715665. Ctx: { TraceId: 01jrfpjggechwwwj1380eyxk3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzMGIwOTItYzY2MzkyOWUtMzMzODVlM2MtN2JjMzMxNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.004291s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TDatabaseResolverTests::Greenplum_MasterNode >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] >> TYardTest::TestDestroySystem [GOOD] >> TYardTest::TestCutMultipleLogChunks >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] >> ShredPDisk::SimpleShredDirtyChunks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 1001070283226956749 SEND TEvPut with key [1:1:1:0:0:3201024:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:3201024:0] 2025-04-10T11:15:14.612432Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:188:17] ServerId# [1:296:58] TabletId# 72057594037932033 PipeClientId# [3:188:17] 2025-04-10T11:15:14.612653Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:223:17] ServerId# [1:301:63] TabletId# 72057594037932033 PipeClientId# [8:223:17] 2025-04-10T11:15:14.612766Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:209:17] ServerId# [1:299:61] TabletId# 72057594037932033 PipeClientId# [6:209:17] 2025-04-10T11:15:14.612871Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:202:17] ServerId# [1:298:60] TabletId# 72057594037932033 PipeClientId# [5:202:17] 2025-04-10T11:15:14.613022Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:195:17] ServerId# [1:297:59] TabletId# 72057594037932033 PipeClientId# [4:195:17] 2025-04-10T11:15:14.613124Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:181:17] ServerId# [1:295:57] TabletId# 72057594037932033 PipeClientId# [2:181:17] 2025-04-10T11:15:14.613221Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:216:17] ServerId# [1:300:62] TabletId# 72057594037932033 PipeClientId# [7:216:17] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite [GOOD] Test command err: 2025-04-10T11:15:05.738610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:05.738827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:05.738999Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b21/r3tmp/tmpFL8sLe/pdisk_1.dat 2025-04-10T11:15:06.142921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:06.189107Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:06.236282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:06.236415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:06.252559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:06.346039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:06.678487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2025-04-10T11:15:06.964468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:06.964580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:840:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:06.964654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:06.972690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:15:07.142281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:15:07.208998Z node 1 :TX_PROXY ERROR: Actor# [1:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:07.521071Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfpj9jjfp8pmfqm9chgkqwh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjE3YmE4YzEtNzJmYWU4ZGItZGU3ODk5MzUtYWVmMzZjMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:07.603251Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfpja4t9ms224rvajmqbbc3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjU3ZDA1MjgtZGE0ZGRiNS1hYWI2MDAxZS02NWIzODU4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2025-04-10T11:15:08.061939Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfpja8s80akg5nztzh5rhwd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZmYzk4NjQtMjMwYjQzMzYtOTk0NjcxMmUtNmRmZjg4ZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2025-04-10T11:15:08.157888Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfpjanf71ax2eyfe0aaedrg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZmYzk4NjQtMjMwYjQzMzYtOTk0NjcxMmUtNmRmZjg4ZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet 2025-04-10T11:15:08.343059Z node 1 :KQP_COMPUTE WARN: SelfId: [1:1032:2772], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [1:968:2772]TEvDeliveryProblem was received from tablet: 72075186224037888 ===== Waiting for commit response ===== Last SELECT 2025-04-10T11:15:08.645463Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfpjb210xvn4c7cweztc6p3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY4YTAwY2YtNTdjYjc2My02ZmIwMmU0Mi04OWYxMDBiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } 2025-04-10T11:15:13.011493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:13.011773Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:15:13.011986Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b21/r3tmp/tmpH61Qu9/pdisk_1.dat 2025-04-10T11:15:13.325913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:13.359868Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:13.396760Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:13.396873Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:13.408325Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:13.506073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:13.807556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2025-04-10T11:15:14.103419Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:14.103544Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:840:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:14.103641Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:14.109698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:15:14.283628Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:15:14.326689Z node 2 :TX_PROXY ERROR: Actor# [2:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:14.471577Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfpjghn038yr3qjqpt1d6kp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDE3MGVkYmUtY2ZiNmUyMGUtZGQ5ZDU4OS04M2Q5MDMzZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:14.584978Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfpjgy238tm2a6vzz43y6ev, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGJiYjE4ZjEtZGZlNGYzMS1iY2I4YTZlMi1lNzJkMjZmYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2025-04-10T11:15:15.000373Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfpjh1g3pns990yvna98gx8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjIyZmUyNDUtZWI2YzI0ODktZDgxZWM4ZS1iNTcyNTc1OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2025-04-10T11:15:15.117216Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfpjhe64gzc3dg1d919dh3f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjIyZmUyNDUtZWI2YzI0ODktZDgxZWM4ZS1iNTcyNTc1OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT 2025-04-10T11:15:15.593416Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfpjhvm6gckpjpjgpz3746j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzA4MWEwM2UtOTg5N2M2NzgtNWU4YWY5MmYtMzU1MGFmMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } >> TYardTest::TestCutMultipleLogChunks [GOOD] >> TYardTest::TestDestructionWhileWritingChunk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] Test command err: RandomSeed# 1789520437154270402 SEND TEvPut with key [1:1:1:0:0:100:0] 2025-04-10T11:15:14.998555Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-10T11:15:14.999122Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2025-04-10T11:15:15.077966Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] Test command err: RandomSeed# 1574669750478664915 SEND TEvPut with key [1:1:1:0:0:533504:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:533504:0] 2025-04-10T11:15:14.792654Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2025-04-10T11:15:17.153229Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. >> KeyValueReadStorage::ReadOk [GOOD] >> KeyValueReadStorage::ReadNotWholeBlobOk >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk >> KeyValueReadStorage::ReadNotWholeBlobOk [GOOD] >> KeyValueReadStorage::ReadOneItemError [GOOD] >> TYardTest::TestDestructionWhileWritingChunk [GOOD] >> TYardTest::TestDestructionWhileReadingChunk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> ShredPDisk::SimpleShredDirtyChunks [GOOD] Test command err: GREEN 0.5025125628 0 CYAN 0.8623115578 0.862 LIGHT_YELLOW 0.8934673367 0.893 YELLOW 0.9145728643 0.914 LIGHT_ORANGE 0.9306532663 0.93 PRE_ORANGE 0.9467336683 0.946 ORANGE 0.9668341709 0.966 RED 0.9879396985 0.987 BLACK 0.9979899497 0.997 /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:368 /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:368 /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:368 >> TYardTest::TestDestructionWhileReadingChunk [GOOD] >> TYardTest::TestDestructionWhileReadingLog ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] Test command err: 2025-04-10T11:15:06.142623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:06.142894Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:06.143112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b27/r3tmp/tmp8rNDGK/pdisk_1.dat 2025-04-10T11:15:06.563629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:06.620109Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:06.659876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:06.660026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:06.671708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:06.765032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:07.101934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:15:07.387140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:07.387223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:840:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:07.387277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:07.392983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:15:07.559084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:15:07.633266Z node 1 :TX_PROXY ERROR: Actor# [1:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:07.938137Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfpj9zr0d72z7n49s0bhtqe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjBkNjgyYzMtMzBhZDkwYzEtODJiY2Q4ZWQtNjg4NWY3ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:08.038855Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfpjahv2vc9p7yvmmy20wzx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDE0MzhjYjYtMWQ4YTY0ZTgtNDcyNDU2NWUtMjYyN2IzOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:08.728527Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfpjav9041g3e29jecm42aq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjViNjQzYTAtN2Q1MTNhMzItNWVmMDM1ZC01N2Q2NGViZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-10T11:15:09.105092Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfpjbk5ccjvcspe57g0p5hh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmI3MWZiOWMtZWZmMjBiN2UtZTZlYjc5M2UtYzgzZTVkYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:09.209538Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfpjbp00c8wbkhna6pa9hkv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjViNjQzYTAtN2Q1MTNhMzItNWVmMDM1ZC01N2Q2NGViZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:09.321888Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfpjbsm2cyjmr4zepbqk4vx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjViNjQzYTAtN2Q1MTNhMzItNWVmMDM1ZC01N2Q2NGViZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:09.401554Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjViNjQzYTAtN2Q1MTNhMzItNWVmMDM1ZC01N2Q2NGViZQ==, ActorId: [1:967:2781], ActorState: ExecuteState, TraceId: 01jrfpjbwd8k4va61aq2bxrqxg, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-10T11:15:09.413486Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfpjbwd8k4va61aq2bxrqxg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjViNjQzYTAtN2Q1MTNhMzItNWVmMDM1ZC01N2Q2NGViZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:13.492128Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:13.492340Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:15:13.492580Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b27/r3tmp/tmpu2kMXF/pdisk_1.dat 2025-04-10T11:15:13.782377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:13.818869Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:13.861024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:13.861189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:13.872958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:13.959744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:14.261133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:15:14.552486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:14.552595Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:840:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:14.552671Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:14.558706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:15:14.720740Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:15:14.758738Z node 2 :TX_PROXY ERROR: Actor# [2:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:14.819116Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfpjgzpdbtypdfe996ymt8x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk4MzU0MzYtM2I3OTlmYTctNjcwZmJkZS1mMGQ3OWIwNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:14.898453Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfpjh8wdbwdrfys92xtenkx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmNjZTEzMGQtNmI5MDc1NmUtNDkyMTJmNGMtZjkwNTgxMDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the first select 2025-04-10T11:15:15.532506Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfpjhhq6e17ka8dgasgkkzw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjVmYjg5MmMtNmYyYjRjYWUtNDhmYTQ5YzAtM2Q1OGM4NGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-10T11:15:15.908099Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfpjhyz9cws73kqq5q3qj4y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2ZlNTYwZTQtY2ZmNGI1ZjgtODM4ZGU4MmQtYmVhOWU0ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } ... waiting for commit read sets 2025-04-10T11:15:16.007386Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfpjjaf15h08d7rem2wrzqk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2ZlNTYwZTQtY2ZmNGI1ZjgtODM4ZGU4MmQtYmVhOWU0ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ... performing an upsert 2025-04-10T11:15:16.449389Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfpjjqs0v0j1e91ss1pq1cw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDg2MGFiMjQtMjM5ZDM2NTAtNzEzNDMzNTgtN2E2MzRjYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the second select 2025-04-10T11:15:16.552591Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfpjjvj1fp19c90w75pz0dz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjVmYjg5MmMtNmYyYjRjYWUtNDhmYTQ5YzAtM2Q1OGM4NGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the third select 2025-04-10T11:15:16.647754Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jrfpjjycfwfewa91qzkvrk03, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjVmYjg5MmMtNmYyYjRjYWUtNDhmYTQ5YzAtM2Q1OGM4NGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the last upsert and commit 2025-04-10T11:15:16.709221Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjVmYjg5MmMtNmYyYjRjYWUtNDhmYTQ5YzAtM2Q1OGM4NGU=, ActorId: [2:975:2779], ActorState: ExecuteState, TraceId: 01jrfpjk1c48r7vyq6zec1pevk, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadOneItemError [GOOD] Test command err: 2025-04-10T11:15:17.814289Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-10T11:15:17.817613Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-10T11:15:17.832373Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-10T11:15:17.832454Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-10T11:15:17.845430Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-10T11:15:17.845552Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV317@keyvalue_storage_read_request.cpp:310} Unexpected EvGetResult. KeyValue# 1 Status# OK Id# [1:2:3:2:0:1:0] ResponseStatus# ERROR Deadline# 586524-01-19T08:01:49.551615Z Now# 1970-01-01T00:00:00.000000Z SentAt# 1970-01-01T00:00:00.000000Z GotAt# 2025-04-10T11:15:17.845211Z ErrorReason# |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] Test command err: 2025-04-10T11:15:04.606412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:04.606735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:04.606953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b35/r3tmp/tmpUnpMuC/pdisk_1.dat 2025-04-10T11:15:05.026119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:05.069606Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:05.109755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:05.109887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:05.121566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:05.204000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:05.265051Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:15:05.266377Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:15:05.266905Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:15:05.267201Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:05.316278Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:15:05.317182Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:05.317334Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:15:05.319446Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:15:05.319562Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:15:05.319635Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:15:05.320091Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:15:05.320255Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:15:05.321226Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:15:05.335142Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:15:05.379523Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:15:05.379805Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:15:05.379972Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:15:05.380021Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:15:05.380070Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:15:05.380122Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:15:05.380435Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:05.380508Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:05.380917Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:15:05.381021Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:15:05.381138Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:15:05.381201Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:05.381273Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:15:05.381340Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:15:05.381383Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:15:05.381451Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:15:05.381519Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:15:05.381671Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:05.381716Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:05.381764Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:15:05.382386Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:15:05.382464Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:15:05.382611Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:15:05.382855Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:15:05.382941Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:15:05.383050Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:15:05.383112Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:15:05.383157Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:15:05.383199Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:15:05.383242Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:15:05.383609Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:15:05.383659Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:15:05.383704Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:15:05.383746Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:15:05.383823Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:15:05.383859Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:15:05.383899Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:15:05.383975Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:15:05.384030Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:15:05.385939Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:15:05.386005Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:15:05.397088Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:15:05.397178Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:15:05.397221Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:15:05.397297Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:15:05.397418Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:15:05.560456Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:05.560519Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:05.560563Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:15:05.560974Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:15:05.561014Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:15:05.561140Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:15:05.561190Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:15:05.561238Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:15:05.561278Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:15:05.569413Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:15:05.569516Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:15:05.570872Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:05.570934Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:05.570999Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:15:0 ... 1jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2025-04-10T11:15:16.436034Z node 2 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-10T11:15:16.436272Z node 2 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976715670. Shard resolve complete, resolved shards: 1 2025-04-10T11:15:16.436361Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-04-10T11:15:16.436450Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 2: [72075186224037888] 2025-04-10T11:15:16.436539Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:16.436609Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-10T11:15:16.436980Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [2:1277:2993] 2025-04-10T11:15:16.437076Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [2:1277:2993], channels: 1 2025-04-10T11:15:16.437159Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T11:15:16.437229Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1277:2993], 2025-04-10T11:15:16.437299Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1277:2993], 2025-04-10T11:15:16.437378Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-10T11:15:16.438311Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1277:2993], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-10T11:15:16.438396Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1277:2993], 2025-04-10T11:15:16.438473Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1277:2993], 2025-04-10T11:15:16.438838Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1279:2993], Recipient [2:1203:2949]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-04-10T11:15:16.439021Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:15:16.439100Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v4014/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v4014/18446744073709551615 2025-04-10T11:15:16.439159Z node 2 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v5000/18446744073709551615 2025-04-10T11:15:16.439234Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-10T11:15:16.439331Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T11:15:16.439381Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:15:16.439430Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:16.439475Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:15:16.439537Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-10T11:15:16.439586Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T11:15:16.439615Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:15:16.439641Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:15:16.439667Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:15:16.439783Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-10T11:15:16.440017Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1279:2993], 0} after executionsCount# 1 2025-04-10T11:15:16.440095Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1279:2993], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:15:16.440192Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1279:2993], 0} finished in read 2025-04-10T11:15:16.440268Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T11:15:16.440301Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:15:16.440332Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:15:16.440362Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:15:16.440408Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T11:15:16.440437Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:15:16.440472Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-10T11:15:16.440521Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:15:16.441294Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1279:2993], Recipient [2:1203:2949]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:15:16.441382Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-10T11:15:16.442103Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1277:2993], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1144 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 220 FinishTimeMs: 1744283716441 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 85 BuildCpuTimeUs: 135 HostName: "ghrun-x5vqckr7qy" NodeId: 2 StartTimeMs: 1744283716440 CreateTimeMs: 1744283716437 } MaxMemoryUsage: 1048576 } 2025-04-10T11:15:16.442290Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1277:2993] 2025-04-10T11:15:16.442473Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:15:16.442555Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1273:2993] TxId: 281474976715670. Ctx: { TraceId: 01jrfpjjnwe48he66b037hv4fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWNlM2UtNzM5ZTgwYTAtZWU3ZDM0ZmQtNWI2MmE3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001144s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 { items { uint32_value: 7 } items { uint32_value: 4 } } >> KqpQueryPerf::Insert+QueryService-UseSink >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] >> TYardTest::TestDestructionWhileReadingLog [GOOD] >> TYardTest::TestFormatInfo >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] >> TYardTest::TestFormatInfo [GOOD] >> TYardTest::TestEnormousDisk >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir >> TSchemeShardSubDomainTest::CopyRejects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] Test command err: RandomSeed# 10507218933394988525 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:100:0] 2025-04-10T11:15:15.531912Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:6332:830] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Start compaction Finish compaction >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> TSchemeShardSubDomainTest::Restart >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateDelete >> KqpQueryPerf::Upsert+QueryService-UseSink >> TSchemeShardSubDomainTest::CreateDropNbs >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TKeyValueTest::TestBasicWriteRead >> KqpQueryPerf::Replace-QueryService+UseSink >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] >> KqpQueryPerf::MultiRead-QueryService [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29055, MsgBus: 27908 2025-04-10T11:15:13.583914Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641503123421943:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:13.584002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016be/r3tmp/tmp93G0NI/pdisk_1.dat 2025-04-10T11:15:14.055943Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:14.070304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:14.070441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:14.075761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29055, node 1 2025-04-10T11:15:14.164045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:14.164069Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:14.164092Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:14.164236Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27908 TClient is connected to server localhost:27908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:14.821548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:15:14.851669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:15:15.037098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:15.228315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:15:15.323761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:15:16.933462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641516008325628:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:16.933603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:17.330181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:17.372632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:17.420181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:17.473498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:17.510228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:17.594831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:17.702350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641520303293446:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:17.702466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:17.702614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641520303293452:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:17.707056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:17.718589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641520303293454:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:17.792931Z node 1 :TX_PROXY ERROR: Actor# [1:7491641520303293508:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:18.586678Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641503123421943:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:18.586751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] >> TSchemeShardSubDomainTest::Restart [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:20.044539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:20.044663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.044739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:20.044797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:20.044853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:20.044888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:20.044977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.045109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:20.045609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:20.138588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:20.138672Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:20.162511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:20.162856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:20.163044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:20.172418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:20.172945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:20.177895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.179538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:20.188215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.199942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.200214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.202382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:20.202477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.202558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:20.202758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.215165Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:20.381490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:20.381778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.382036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:20.382315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:20.382375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.387897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.388058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:20.388279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.388361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:20.388416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:20.388455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:20.391455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.391528Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:20.391566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:20.395425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.395482Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.395530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.395612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.398910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:20.401831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:20.402034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:20.403197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.403336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.403386Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.403790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:20.403881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.404089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:20.404273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:20.407225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.407281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.407503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.407554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:20.407810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.407861Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:20.407968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.408004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.408071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.408105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.408145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:20.408189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.408221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:20.408247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:20.408307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:20.408348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:20.408387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:20.410358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.410463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.410525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:15:20.410624Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:15:20.410662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:20.410753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:15:20.413148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:15:20.413646Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-04-10T11:15:20.416119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Name: "USER_1" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:20.416317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_1, opId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.416397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, at schemeshard: 72057594046678944 2025-04-10T11:15:20.420094Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-10T11:15:20.441295Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-10T11:15:20.444629Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:15:20.448154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with coordinators, but no mediators" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.448324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, operation: CREATE DATABASE, path: /MyRoot/USER_1 2025-04-10T11:15:20.449649Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 2025-04-10T11:15:20.452719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Mediators: 1 Name: "USER_2" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:20.453013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_2, opId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.453118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, at schemeshard: 72057594046678944 2025-04-10T11:15:20.455804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with mediators, but no coordinators" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.455964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, operation: CREATE DATABASE, path: /MyRoot/USER_2 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-10T11:15:20.456295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-10T11:15:20.456373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-10T11:15:20.459528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:15:20.459575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:15:20.460237Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-10T11:15:20.460365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:15:20.460406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:288:2279] 2025-04-10T11:15:20.460662Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:20.460719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:20.460742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:288:2279] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-10T11:15:20.461236Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:20.461448Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 237us result status StatusPathDoesNotExist 2025-04-10T11:15:20.464390Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:20.466869Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:20.467107Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 254us result status StatusPathDoesNotExist 2025-04-10T11:15:20.467268Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:20.467820Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:20.468024Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 164us result status StatusSuccess 2025-04-10T11:15:20.474854Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::RedefineErrors >> TSchemeShardSubDomainTest::CopyRejects [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:20.038497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:20.038622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.038685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:20.038737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:20.038789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:20.038818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:20.038892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.038989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:20.042739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:20.140306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:20.140364Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:20.159017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:20.159341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:20.159555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:20.169029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:20.172366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:20.177884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.179662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:20.187419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.199972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.200085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.202358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:20.202471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.202572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:20.202842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.210583Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:20.350196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:20.352363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.353384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:20.358396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:20.358501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.366054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.366590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:20.366825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.366889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:20.366931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:20.366975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:20.369465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.369574Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:20.369613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:20.371658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.371735Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.371781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.371862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.377231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:20.379256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:20.379514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:20.381939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.382083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.382129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.383580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:20.383643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.386397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:20.386572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:20.390681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.390745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.390968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.391015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:20.391310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.391415Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:20.391536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.391581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.391642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.391675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.391713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:20.391756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.391793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:20.391823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:20.391897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:20.391938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:20.391976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:20.397799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.397939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.397997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046316545 2025-04-10T11:15:20.456747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-10T11:15:20.456860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-10T11:15:20.457183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.457308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.457377Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:15:20.457606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-10T11:15:20.457664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:15:20.457831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:20.457916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:20.457971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-10T11:15:20.460132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.460174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.460338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:20.460456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.460514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-10T11:15:20.460579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-10T11:15:20.460964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.461011Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-10T11:15:20.461116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:15:20.461153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:20.461191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:15:20.461227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:20.461266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-10T11:15:20.461303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:20.461354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-10T11:15:20.461387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-10T11:15:20.461449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:20.461490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-10T11:15:20.461550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:15:20.461598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-10T11:15:20.462133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:20.462236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:20.462273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:15:20.462313Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:15:20.462405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:20.463024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:20.463102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:20.463133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:15:20.463157Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:15:20.463188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:20.463260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-10T11:15:20.466373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T11:15:20.466456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-10T11:15:20.466717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-10T11:15:20.466782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-10T11:15:20.467329Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-10T11:15:20.467436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:15:20.467485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:307:2298] TestWaitNotification: OK eventTxId 100 2025-04-10T11:15:20.467961Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:20.468149Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 214us result status StatusSuccess 2025-04-10T11:15:20.474848Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.477922Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:20.478276Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 300us result status StatusSuccess 2025-04-10T11:15:20.478834Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:20.166045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:20.166133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.166218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:20.166261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:20.166301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:20.166330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:20.166400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.166501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:20.166857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:20.250945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:20.251000Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:20.266382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:20.266634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:20.266781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:20.276202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:20.276517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:20.277357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.277753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:20.290835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.292316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.292427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.292656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:20.292707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.292775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:20.292934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.302941Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:20.474906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:20.475235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.475549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:20.475855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:20.475936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.479843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.479990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:20.480177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.480244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:20.480280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:20.480330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:20.486111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.486208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:20.486247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:20.490852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.490956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.491018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.491090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.503480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:20.506029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:20.506300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:20.507661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.507861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.507923Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.508299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:20.508370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.508563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:20.508678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:20.511413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.511475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.511718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.511783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:20.512096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.512151Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:20.512302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.512343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.512419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.512470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.512514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:20.512565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.512605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:20.512644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:20.512729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:20.512782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:20.512824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:20.515818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.515951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.516000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ureStatus> execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409551 2025-04-10T11:15:20.758508Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-10T11:15:20.758543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409551 shardIdx# 72057594046678944:6 at schemeshard# 72057594046678944 2025-04-10T11:15:20.758575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2025-04-10T11:15:20.760886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.762965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.763076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.763223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.763277Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.763323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-04-10T11:15:20.763371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-10T11:15:20.763492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:20.765061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-10T11:15:20.765184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-10T11:15:20.765510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.765608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.765659Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-10T11:15:20.765972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-10T11:15:20.766024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-10T11:15:20.766229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:20.766302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-10T11:15:20.766355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:15:20.768359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.768428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.768596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:20.768714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.768753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:15:20.768792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:15:20.769107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.769153Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:15:20.769270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:20.769304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:20.769358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:20.769413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:20.769452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:15:20.769504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:20.769546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:15:20.769577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:15:20.769771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-10T11:15:20.769802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2025-04-10T11:15:20.769826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:15:20.769845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-10T11:15:20.770460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:20.770534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:20.770566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:20.770605Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:15:20.770635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:20.771300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:20.771393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:20.771430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:20.771453Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:15:20.771478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-10T11:15:20.771544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-04-10T11:15:20.771588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:567:2476] 2025-04-10T11:15:20.778179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:20.778616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:20.778690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:20.778722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:568:2477] TestWaitNotification: OK eventTxId 101 2025-04-10T11:15:20.779256Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:20.779482Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 242us result status StatusSuccess 2025-04-10T11:15:20.780018Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:20.150007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:20.150099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.150205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:20.150250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:20.150307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:20.150337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:20.150410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.150495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:20.150840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:20.245025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:20.245089Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:20.270298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:20.270653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:20.270833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:20.290985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:20.291231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:20.291973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.292326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:20.294343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.295598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.295663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.295856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:20.295898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.295950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:20.296090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.309018Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:20.496727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:20.496994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.497230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:20.497497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:20.497558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.500194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.500323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:20.500500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.500569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:20.500601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:20.500635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:20.502589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.502658Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:20.502696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:20.504387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.504433Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.504477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.504534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.508470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:20.510487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:20.510713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:20.511748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.511892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.511944Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.512376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:20.512443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.512644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:20.512760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:20.519092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.519140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.519321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.519361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:20.519594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.519638Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:20.519775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.519814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.519880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.519914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.519951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:20.520006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.520045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:20.520084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:20.520149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:20.520190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:20.520220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:20.522559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.522686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.522721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4046678944 2025-04-10T11:15:20.678380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:463:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:466:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:467:2058] recipient: [1:465:2417] Leader for TabletID 72057594046678944 is [1:468:2418] sender: [1:469:2058] recipient: [1:465:2417] 2025-04-10T11:15:20.740746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:20.740873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.740915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:20.740950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:20.740985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:20.741013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:20.741093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.741176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:20.741517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:20.761410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:20.762706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:20.762912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:20.763029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:20.763069Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:20.763213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:20.764059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:20.764144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:20.764233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.764753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.765207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-10T11:15:20.765491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.765603Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T11:15:20.765801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.765909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.766092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:20.766132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:20.766206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:20.766251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:20.766373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.766462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.766665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-04-10T11:15:20.767009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.767134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.769069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.769159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.771721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.771868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.772023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.772212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.772938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.773143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.773441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.773645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.773725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.773784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.783522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.783605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.784406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:20.784488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.784535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:20.785842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:468:2418] sender: [1:528:2058] recipient: [1:15:2062] 2025-04-10T11:15:20.841119Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:20.841395Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 276us result status StatusSuccess 2025-04-10T11:15:20.841769Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.842390Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:20.842539Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 211us result status StatusSuccess 2025-04-10T11:15:20.842899Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:20.038907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:20.039007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.039070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:20.039114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:20.039157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:20.039185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:20.039253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.039359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:20.039755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:20.153509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:20.153567Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:20.172091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:20.172339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:20.172517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:20.180477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:20.180796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:20.181547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.182110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:20.187241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.200804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.200887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.201035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:20.201097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.201199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:20.201349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.219494Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:20.394488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:20.394772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.395028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:20.395325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:20.395387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.398901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.399039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:20.399226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.399287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:20.399321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:20.399353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:20.407082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.407154Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:20.407229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:20.409437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.409485Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.409525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.409584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.413407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:20.415866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:20.416064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:20.417205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.417353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.417398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.417704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:20.417760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.417943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:20.418040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:20.420415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.420530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.420700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.420743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:20.421155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.421206Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:20.421308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.421363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.421425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.421459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.421496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:20.421535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.421570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:20.421598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:20.421664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:20.421723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:20.421757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:20.423712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.423860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.423900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tion IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-10T11:15:20.837296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:15:20.837361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:15:20.837407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:15:20.837634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 9 2025-04-10T11:15:20.837683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-10T11:15:20.837713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-04-10T11:15:20.837738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 4], 3 2025-04-10T11:15:20.838588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:20.838689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:20.838749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:15:20.838795Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-04-10T11:15:20.838832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:20.839576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:20.839670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:20.839701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:15:20.839728Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-10T11:15:20.839769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 8 2025-04-10T11:15:20.839865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-10T11:15:20.842796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:15:20.843784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2025-04-10T11:15:20.844049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-10T11:15:20.844094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-10T11:15:20.844192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:15:20.844214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-10T11:15:20.844269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:15:20.844303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:15:20.844880Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-10T11:15:20.844976Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:20.845026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:15:20.845076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:938:2772] 2025-04-10T11:15:20.845263Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:15:20.845309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:20.845351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:938:2772] 2025-04-10T11:15:20.845456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:15:20.845477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:938:2772] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-10T11:15:20.845978Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:20.846212Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_0" took 239us result status StatusSuccess 2025-04-10T11:15:20.846677Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.847247Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:20.847475Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_1" took 205us result status StatusSuccess 2025-04-10T11:15:20.847826Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_1" PathDescription { Self { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 4 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.848368Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:20.848521Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains" took 170us result status StatusSuccess 2025-04-10T11:15:20.848850Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains" PathDescription { Self { Name: "SubDomains" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:20.179120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:20.179231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.179308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:20.179355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:20.179395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:20.179424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:20.179494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.179565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:20.179987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:20.275269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:20.275341Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:20.296514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:20.296705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:20.296847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:20.307176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:20.307406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:20.308037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.308362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:20.310196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.311661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.311727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.311946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:20.312018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.312083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:20.312278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.319617Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:20.436113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:20.436381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.436604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:20.436812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:20.436865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.443523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.443642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:20.443786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.443844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:20.443879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:20.443912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:20.445894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.445947Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:20.445982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:20.447623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.447664Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.447721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.447770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.457148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:20.459277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:20.459456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:20.460596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.460723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.460767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.461076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:20.461129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.461298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:20.461413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:20.463335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.463382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.463542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.463583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:20.463880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.463931Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:20.464015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.464070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.464130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.464162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.464197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:20.464238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.464270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:20.464302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:20.464368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:20.464410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:20.464442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:20.466710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.466838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.466890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... rd: 72057594046678944, cookie: 103 2025-04-10T11:15:21.050917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:21.051066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:21.051391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:21.051447Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-10T11:15:21.051528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:15:21.051554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:21.051592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:15:21.051620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:21.051671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-10T11:15:21.051731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:487:2443] message: TxId: 103 2025-04-10T11:15:21.051768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:21.051812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:15:21.051841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:15:21.051932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-10T11:15:21.053858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:15:21.053902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:488:2444] TestWaitNotification: OK eventTxId 103 2025-04-10T11:15:21.054516Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:21.054728Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 230us result status StatusSuccess 2025-04-10T11:15:21.055224Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:21.055860Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:21.056126Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 227us result status StatusSuccess 2025-04-10T11:15:21.056564Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:21.057117Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:21.057289Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 178us result status StatusSuccess 2025-04-10T11:15:21.057629Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:21.058353Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:21.058565Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 376us result status StatusSuccess 2025-04-10T11:15:21.058935Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] Test command err: 2025-04-10T11:15:10.068173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:10.068427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:10.068652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000afe/r3tmp/tmpu9HvjK/pdisk_1.dat 2025-04-10T11:15:10.550408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:10.644327Z node 1 :KQP_RESOURCE_MANAGER INFO: Updated table service config: ComputeActorsCount: 10000 ChannelBufferSize: 8388608 MkqlLightProgramMemoryLimit: 1048576 MkqlHeavyProgramMemoryLimit: 31457280 QueryMemoryLimit: 32212254720 PublishStatisticsIntervalSec: 2 MaxTotalChannelBuffersSize: 2147483648 MinChannelBufferSize: 2048 2025-04-10T11:15:10.644456Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:15:10.644514Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 7 2025-04-10T11:15:10.644660Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Updated table service config. 2025-04-10T11:15:10.644776Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:10.688396Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:15:10.689657Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:15:10.689897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:10.690032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:10.703799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:10.793831Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T11:15:10.793898Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T11:15:10.794092Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:642:2550] 2025-04-10T11:15:10.934381Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value1" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T11:15:10.934506Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:15:10.935222Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:15:10.935388Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:15:10.935822Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:15:10.936039Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:15:10.936174Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T11:15:10.938274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:10.938880Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T11:15:10.939442Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T11:15:10.939519Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T11:15:10.975475Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:667:2571]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:15:10.976882Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:667:2571]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:15:10.977520Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:667:2571] 2025-04-10T11:15:10.977832Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:11.030957Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:667:2571]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:15:11.031593Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:11.031685Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:15:11.033245Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:15:11.033344Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:15:11.033414Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:15:11.033828Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:15:11.033953Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:15:11.034024Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:683:2571] in generation 1 2025-04-10T11:15:11.045172Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:15:11.079903Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:15:11.080116Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:15:11.080226Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:685:2581] 2025-04-10T11:15:11.080256Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:15:11.080288Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:15:11.080319Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:15:11.080514Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:667:2571], Recipient [1:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:11.080571Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:11.080876Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:15:11.080959Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:15:11.081016Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:15:11.081068Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:11.081102Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:15:11.081136Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:15:11.081183Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:15:11.081213Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:15:11.081250Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:15:11.081678Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:674:2575], Recipient [1:667:2571]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:11.081720Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:11.081764Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:674:2575], sessionId# [0:0:0] 2025-04-10T11:15:11.081819Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:674:2575] 2025-04-10T11:15:11.081859Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:15:11.081982Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:15:11.082216Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:15:11.082280Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:15:11.082413Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:15:11.082469Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:15:11.082521Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:15:11.082560Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:15:11.082587Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:15:11.082875Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:15:11.082922Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:15:11.082954Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:15:11.082997Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:15:11.083055Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:15:11.083082Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:15:11.083116Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:15:11.083150Z node 1 :TX_DATASH ... ocessing event TEvDataShard::TEvSchemaChangedResult 2025-04-10T11:15:20.175056Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037888 state Ready 2025-04-10T11:15:20.175127Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 --- resending captured proposals --- waiting for result 2025-04-10T11:15:20.176686Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553206, Sender [2:890:2720], Recipient [2:666:2571]: NKikimrTxDataShard.TEvKqpScan TxId: 281474976715662 ScanId: 2 LocalPathId: 2 TablePath: "/Root/table-1" SchemaVersion: 1 ColumnTags: 3 ColumnTypes: 2 Ranges { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } Snapshot { Step: 2000 TxId: 281474976715661 } Generation: 1 ItemsLimit: 0 Reverse: false DataFormat: FORMAT_CELLVEC StatsMode: DQ_STATS_MODE_NONE ColumnTypeInfos { } LockNodeId: 0 2025-04-10T11:15:20.176828Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715662. Table '/Root/table-1' schema version changed at 72075186224037888 2025-04-10T11:15:20.176988Z node 2 :KQP_COMPUTE WARN: SelfId: [2:890:2720]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/table-1' scheme changed., code: 2028 , tablet id: 72075186224037888, actor_id: [2:666:2571] 2025-04-10T11:15:20.177046Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:890:2720]. Enqueue for resolve 72075186224037888 2025-04-10T11:15:20.177111Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:359;event=scanner_finished;tablet_id=72075186224037888;stop_shard=1; 2025-04-10T11:15:20.177173Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:93;event=stop_scanner;actor_id=NO_VALUE_OPTIONAL;message=;final_flag=1; 2025-04-10T11:15:20.177285Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:890:2720]. Sending TEvResolveKeySet update for table '/Root/table-1', range: [(Uint32 : NULL) ; ()), attempt #1 2025-04-10T11:15:20.177511Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:890:2720]. Received TEvResolveKeySetResult update for table '/Root/table-1' 2025-04-10T11:15:20.177554Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:890:2720]. Resolve request failed for table '/Root/table-1', ErrorCount# 1 2025-04-10T11:15:20.177648Z node 2 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:167 :TEvTerminateFromFetcher: [2:890:2720]/[2:888:2718] 2025-04-10T11:15:20.177739Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:888:2718], TxId: 281474976715662, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZWJhYzk1MmQtNTc5NzRiOTktZWNlMTU4ZTItMTFmN2IzYzA=. TraceId : 01jrfpjn2rb1syqz0pvccjth0p. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: SCHEME_ERROR KIKIMR_SCHEME_MISMATCH: {
: Error: Table '/Root/table-1' scheme changed., code: 2028 }. 2025-04-10T11:15:20.177913Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 1. pass away 2025-04-10T11:15:20.178050Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-04-10T11:15:20.180677Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-10T11:15:20.180822Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_fetcher_actor.cpp:101;event=TEvTerminateFromCompute;sender=[2:888:2718];info={
: Error: COMPUTE_STATE_FAILURE }; 2025-04-10T11:15:20.180883Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:289;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2025-04-10T11:15:20.181064Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 1970-01-01T00:00:04.000000Z, after 1.550000s 2025-04-10T11:15:20.181302Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01jrfpjn2rb1syqz0pvccjth0p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWJhYzk1MmQtNTc5NzRiOTktZWNlMTU4ZTItMTFmN2IzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:888:2718], task: 1, state: COMPUTE_STATE_FAILURE, stats: { CpuTimeUs: 222917 Tasks { TaskId: 1 CpuTimeUs: 220724 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 16 BuildCpuTimeUs: 220708 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-x5vqckr7qy" NodeId: 2 CreateTimeMs: 1744283719393 } MaxMemoryUsage: 1048576 } 2025-04-10T11:15:20.181416Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715662. Ctx: { TraceId: 01jrfpjn2rb1syqz0pvccjth0p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWJhYzk1MmQtNTc5NzRiOTktZWNlMTU4ZTItMTFmN2IzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:888:2718] 2025-04-10T11:15:20.181508Z node 2 :KQP_EXECUTER INFO: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01jrfpjn2rb1syqz0pvccjth0p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWJhYzk1MmQtNTc5NzRiOTktZWNlMTU4ZTItMTFmN2IzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-04-10T11:15:20.181581Z node 2 :KQP_EXECUTER INFO: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01jrfpjn2rb1syqz0pvccjth0p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWJhYzk1MmQtNTc5NzRiOTktZWNlMTU4ZTItMTFmN2IzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. aborting compute actor execution, message: {
: Error: Terminate execution }, compute actor: [2:889:2719], task: 2 2025-04-10T11:15:20.181739Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01jrfpjn2rb1syqz0pvccjth0p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWJhYzk1MmQtNTc5NzRiOTktZWNlMTU4ZTItMTFmN2IzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:15:20.181852Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:889:2719], TxId: 281474976715662, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZWJhYzk1MmQtNTc5NzRiOTktZWNlMTU4ZTItMTFmN2IzYzA=. TraceId : 01jrfpjn2rb1syqz0pvccjth0p. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646735 2025-04-10T11:15:20.181958Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:889:2719], TxId: 281474976715662, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZWJhYzk1MmQtNTc5NzRiOTktZWNlMTU4ZTItMTFmN2IzYzA=. TraceId : 01jrfpjn2rb1syqz0pvccjth0p. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Handle abort execution event from: [2:883:2692], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-04-10T11:15:20.182075Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 2. pass away 2025-04-10T11:15:20.182205Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-04-10T11:15:20.185425Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-10T11:15:20.185681Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWJhYzk1MmQtNTc5NzRiOTktZWNlMTU4ZTItMTFmN2IzYzA=, ActorId: [2:857:2692], ActorState: ExecuteState, TraceId: 01jrfpjn2rb1syqz0pvccjth0p, Create QueryResponse for error on request, msg: 2025-04-10T11:15:20.186890Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvExecuteKqpTransaction 2025-04-10T11:15:20.186964Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 281474976715664 ProcessProposeKqpTransaction 2025-04-10T11:15:20.187070Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-04-10T11:15:20.187153Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvProposeTransaction 2025-04-10T11:15:20.187201Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 0 ProcessProposeTransaction 2025-04-10T11:15:20.187314Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 0 reqId# [2:925:2751] SnapshotReq marker# P0 2025-04-10T11:15:20.187826Z node 2 :TX_PROXY DEBUG: Actor# [2:927:2751] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-04-10T11:15:20.188017Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Resolved key sets: 0 2025-04-10T11:15:20.188109Z node 2 :TX_PROXY DEBUG: Actor# [2:927:2751] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-04-10T11:15:20.188205Z node 2 :TX_PROXY DEBUG: Actor# [2:925:2751] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-04-10T11:15:20.188409Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfpjn2rb1syqz0pvccjth0p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWJhYzk1MmQtNTc5NzRiOTktZWNlMTU4ZTItMTFmN2IzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:20.188465Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Ctx: { TraceId: 01jrfpjn2rb1syqz0pvccjth0p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWJhYzk1MmQtNTc5NzRiOTktZWNlMTU4ZTItMTFmN2IzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-10T11:15:20.188524Z node 2 :KQP_EXECUTER INFO: ActorId: [2:924:2692] TxId: 281474976715664. Ctx: { TraceId: 01jrfpjn2rb1syqz0pvccjth0p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWJhYzk1MmQtNTc5NzRiOTktZWNlMTU4ZTItMTFmN2IzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T11:15:20.188623Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:924:2692] TxId: 281474976715664. Ctx: { TraceId: 01jrfpjn2rb1syqz0pvccjth0p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWJhYzk1MmQtNTc5NzRiOTktZWNlMTU4ZTItMTFmN2IzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:15:20.188690Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:924:2692] TxId: 281474976715664. Ctx: { TraceId: 01jrfpjn2rb1syqz0pvccjth0p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWJhYzk1MmQtNTc5NzRiOTktZWNlMTU4ZTItMTFmN2IzYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-10T11:15:20.188837Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:925:2751], Recipient [2:666:2571]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-04-10T11:15:20.189189Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:594:2519], selfId: [2:57:2104], source: [2:857:2692] 2025-04-10T11:15:20.189826Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZWJhYzk1MmQtNTc5NzRiOTktZWNlMTU4ZTItMTFmN2IzYzA=, workerId: [2:857:2692], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:20.564331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:20.564408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.564457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:20.564497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:20.564533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:20.564557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:20.564598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.564666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:20.565016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:20.669584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:20.669641Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:20.701430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:20.701817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:20.702098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:20.719608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:20.719979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:20.720744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.721168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:20.723631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.725295Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.725399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.725641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:20.725698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.725761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:20.726035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.734481Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:20.876645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:20.876888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.877141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:20.877369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:20.877427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.880021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.880183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:20.880366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.880419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:20.880463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:20.880501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:20.883032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.883110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:20.883167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:20.885312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.885376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.885422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.885487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.889649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:20.892270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:20.892542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:20.893540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.893697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.893745Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.894090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:20.894142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.894365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:20.894463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:20.896723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.896774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.896984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.897026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:20.897334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.897389Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:20.897496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.897533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.897591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.897627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.897663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:20.897703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.897751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:20.897790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:20.897876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:20.897918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:20.897953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:20.900568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.900724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.900769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-04-10T11:15:21.215541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:21.215584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:15:21.215622Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-10T11:15:21.215662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:15:21.217551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:21.217671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:21.217701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:15:21.217745Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:15:21.217784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:21.217865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-10T11:15:21.220156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:21.220236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:21.220266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:21.220316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:21.220816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:15:21.222003Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-10T11:15:21.224431Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2025-04-10T11:15:21.224989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:21.225346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:21.225812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:21.226113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:15:21.226531Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-10T11:15:21.226684Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409547 2025-04-10T11:15:21.227736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:21.227960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409549 2025-04-10T11:15:21.228242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-10T11:15:21.228356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:15:21.229850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:21.229925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T11:15:21.230033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:21.231530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:21.231595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:21.231754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:21.232321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:15:21.232629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:15:21.232908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:21.232971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:15:21.235640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:15:21.235708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-10T11:15:21.236285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:21.236326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:15:21.236750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-10T11:15:21.236810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-10T11:15:21.238391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:21.238565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:21.238614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:21.238712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:21.239016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:21.240639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:15:21.240948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:15:21.240996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:15:21.241524Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:15:21.241637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:15:21.241674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:540:2495] TestWaitNotification: OK eventTxId 102 2025-04-10T11:15:21.259175Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:21.259436Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 275us result status StatusPathDoesNotExist 2025-04-10T11:15:21.259623Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:21.260382Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:21.260559Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 195us result status StatusPathDoesNotExist 2025-04-10T11:15:21.260710Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 17269, MsgBus: 61375 2025-04-10T11:15:14.450232Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641506575769011:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:14.450578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016b4/r3tmp/tmpKpFmkb/pdisk_1.dat 2025-04-10T11:15:14.857100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:14.857195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:14.857340Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:14.864013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17269, node 1 2025-04-10T11:15:14.976590Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:14.976616Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:14.976626Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:14.976746Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61375 TClient is connected to server localhost:61375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:15.605733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:15.646672Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:15:15.676848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:15.856506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:16.038638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:16.122097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:17.892458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641519460672681:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:17.892583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:18.269001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:18.352632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:18.395130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:18.435431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:18.488146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:18.544039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:18.617810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641523755640494:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:18.617886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:18.618311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641523755640499:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:18.623516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:18.636927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641523755640501:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:18.735389Z node 1 :TX_PROXY ERROR: Actor# [1:7491641523755640557:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:19.450316Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641506575769011:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:19.450391Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27223, MsgBus: 16310 2025-04-10T11:15:13.915502Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641505726881287:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:13.916011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016b6/r3tmp/tmpNAZGOd/pdisk_1.dat 2025-04-10T11:15:14.391253Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:14.406408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:14.406541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:14.411611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27223, node 1 2025-04-10T11:15:14.512104Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:14.512235Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:14.512242Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:14.512387Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16310 TClient is connected to server localhost:16310 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:15.198763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:15:15.226824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:15:15.439808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:15:15.615308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:15:15.718520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:17.435920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641522906752115:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:17.436034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:17.791631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:17.827841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:17.870356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:17.911082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:17.981856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:18.056280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:18.114373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641527201719931:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:18.114470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:18.114574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641527201719936:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:18.119540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:18.129838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641527201719938:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:18.201919Z node 1 :TX_PROXY ERROR: Actor# [1:7491641527201719992:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:18.917362Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641505726881287:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:18.917432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::RangeLimitRead+QueryService >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_dispatch.py::TestMapping::test_idle [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] >> KqpQueryPerf::DeleteOn+QueryService-UseSink >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink >> TSchemeShardSubDomainTest::CreateAndWait ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:22.024064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:22.024162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:22.024229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:22.024277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:22.024318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:22.024349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:22.024416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:22.024501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:22.024887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:22.114818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:22.114878Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:22.130649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:22.130953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:22.131115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:22.139512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:22.139762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:22.140328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:22.140574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:22.142084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:22.143212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:22.143257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:22.143458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:22.143507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:22.143544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:22.143701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.150119Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:22.274226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:22.274490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.274732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:22.274938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:22.274986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.277061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:22.277200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:22.277395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.277450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:22.277489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:22.277536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:22.279562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.279615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:22.279649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:22.281269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.281308Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.281369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:22.281422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:22.284202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:22.285861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:22.286044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:22.286991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:22.287109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:22.287154Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:22.287442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:22.287493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:22.287673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:22.287768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:22.295361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:22.295422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:22.295606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:22.295649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:22.295864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.295904Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:22.296009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:22.296055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:22.296097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:22.296123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:22.296171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:22.296209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:22.296236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:22.296260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:22.296333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:22.296367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:22.296393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:22.304275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:22.304409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:22.304445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... G: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:573:2527] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-10T11:15:22.575665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 2 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:22.575874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/USER_0, opId: 108:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.576035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:22.576212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:22.576255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 108:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.578394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAccepted TxId: 108 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:22.578519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /MyRoot/USER_0 2025-04-10T11:15:22.578671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.578726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 108:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:22.578762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 108:0 ProgressState no shards to create, do next state 2025-04-10T11:15:22.578820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 2 -> 3 2025-04-10T11:15:22.581239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.581287Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 108:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:22.581334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 3 -> 128 2025-04-10T11:15:22.583157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.583220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.583262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 108:0, at tablet# 72057594046678944 2025-04-10T11:15:22.583312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2025-04-10T11:15:22.583435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:22.585126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2025-04-10T11:15:22.585238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000007 2025-04-10T11:15:22.585591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:22.585714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:22.585755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-04-10T11:15:22.586009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 240 2025-04-10T11:15:22.586087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-04-10T11:15:22.586276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:22.586350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:22.588632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:22.588678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:22.588880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:22.588924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 108, path id: 2 FAKE_COORDINATOR: Erasing txId 108 2025-04-10T11:15:22.589272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.589352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2025-04-10T11:15:22.589449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-10T11:15:22.589480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-10T11:15:22.589517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-10T11:15:22.589563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-10T11:15:22.589596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2025-04-10T11:15:22.589632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-10T11:15:22.589661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-04-10T11:15:22.589680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-04-10T11:15:22.589735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T11:15:22.589772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 108, publications: 1, subscribers: 0 2025-04-10T11:15:22.589798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 2], 8 2025-04-10T11:15:22.590228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-10T11:15:22.590322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-10T11:15:22.590352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2025-04-10T11:15:22.590378Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-10T11:15:22.590407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:22.590470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2025-04-10T11:15:22.592974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-10T11:15:22.593184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-10T11:15:22.593213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-10T11:15:22.593693Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-10T11:15:22.593758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-10T11:15:22.593783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:598:2552] TestWaitNotification: OK eventTxId 108 2025-04-10T11:15:22.594370Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:22.594543Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 181us result status StatusSuccess 2025-04-10T11:15:22.594902Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 6 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 6 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] >> KqpQueryPerf::Replace+QueryService-UseSink >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:20.045769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:20.045881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.045939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:20.045988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:20.046035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:20.046064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:20.046171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.046257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:20.046673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:20.140547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:20.140598Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:20.162400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:20.162680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:20.162856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:20.173253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:20.173527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:20.178235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.180447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:20.191514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.199991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.200151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.202392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:20.202513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.202646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:20.202911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.210835Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:20.388944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:20.389217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.389461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:20.389697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:20.389757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.392033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.392158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:20.392316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.392373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:20.392420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:20.392486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:20.394664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.394729Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:20.394766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:20.401356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.401425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.401467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.401545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.416117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:20.423502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:20.423715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:20.424704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.424827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.424872Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.425169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:20.425220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.425409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:20.425498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:20.427811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.427881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.428075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.428113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:20.428358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.428399Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:20.428498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.428544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.428599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.428630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.428673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:20.428715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.428750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:20.428779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:20.428854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:20.428894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:20.428942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:20.433468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.433618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.433659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 46678944 2025-04-10T11:15:22.831992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 436 RawX2: 8589936991 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-10T11:15:22.832039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:22.832070Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.832112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-04-10T11:15:22.832148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-10T11:15:22.832169Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 129 -> 240 2025-04-10T11:15:22.834366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.834956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.835050Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0ProgressState, operation type TxCopyTable 2025-04-10T11:15:22.835123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 106:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-10T11:15:22.835175Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 106, done: 0, blocked: 1 2025-04-10T11:15:22.835280Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 106 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-10T11:15:22.835327Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 240 -> 240 2025-04-10T11:15:22.837764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.837832Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-10T11:15:22.837964Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T11:15:22.838011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T11:15:22.838062Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T11:15:22.838098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T11:15:22.838207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-10T11:15:22.838313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:646:2568] message: TxId: 106 2025-04-10T11:15:22.838374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T11:15:22.838424Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-10T11:15:22.838463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-10T11:15:22.838646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-10T11:15:22.838693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:15:22.841113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-10T11:15:22.841187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:822:2720] TestWaitNotification: OK eventTxId 106 2025-04-10T11:15:22.842126Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:22.842430Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table" took 352us result status StatusSuccess 2025-04-10T11:15:22.842905Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table" PathDescription { Self { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:22.843698Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:22.843919Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dst" took 254us result status StatusSuccess 2025-04-10T11:15:22.844295Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dst" PathDescription { Self { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:22.845106Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:22.845305Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 217us result status StatusSuccess 2025-04-10T11:15:22.845825Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SchemeQuotas >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk >> KqpQueryPerf::RangeRead+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:23.283120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:23.283261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:23.283355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:23.283417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:23.283473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:23.283528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:23.283606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:23.283734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:23.284249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:23.368218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:23.368295Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:23.396778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:23.397080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:23.397230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:23.435241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:23.435557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:23.436399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:23.437031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:23.439933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:23.441596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:23.441674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:23.441897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:23.441942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:23.441982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:23.442141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.449634Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:23.575845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:23.576095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.576316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:23.576510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:23.576567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.579274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:23.579462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:23.579714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.579807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:23.579854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:23.579898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:23.583148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.583227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:23.583336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:23.587109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.587177Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.587236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:23.587315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:23.591571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:23.599130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:23.599365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:23.600526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:23.600699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:23.600756Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:23.601132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:23.601195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:23.601430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:23.601534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:23.616586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:23.616665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:23.616909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:23.616960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:23.617258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.617390Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:23.617537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:23.617583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:23.617651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:23.617700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:23.617748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:23.617798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:23.617843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:23.617879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:23.617964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:23.618011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:23.618050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:23.621015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:23.621176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:23.621222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:15:23.621272Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:15:23.621360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:23.621520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:15:23.639509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:15:23.640312Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-04-10T11:15:23.644649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:23.645017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.645154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.645706Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-10T11:15:23.669225Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-10T11:15:23.670229Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:15:23.675334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: TimeCastBucketsPerMediator is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:23.675550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2025-04-10T11:15:23.677134Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-10T11:15:23.677483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-10T11:15:23.677547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-10T11:15:23.678097Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-10T11:15:23.678261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:15:23.678309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:284:2275] TestWaitNotification: OK eventTxId 100 2025-04-10T11:15:23.678854Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:23.679147Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 294us result status StatusPathDoesNotExist 2025-04-10T11:15:23.679358Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardSubDomainTest::LS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:23.449479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:23.449581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:23.449643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:23.449748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:23.449796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:23.449825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:23.449892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:23.450010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:23.450437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:23.540884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:23.540963Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:23.564918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:23.565316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:23.565548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:23.581074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:23.581432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:23.582463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:23.582930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:23.585103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:23.587107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:23.587182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:23.587451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:23.587507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:23.587556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:23.587772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.595507Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:23.741631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:23.741936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.742283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:23.742563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:23.742627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.745711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:23.745872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:23.746069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.746133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:23.746208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:23.746258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:23.748592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.748658Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:23.748697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:23.750869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.750925Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.750973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:23.751044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:23.754863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:23.757787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:23.758012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:23.759149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:23.759306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:23.759357Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:23.759708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:23.759780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:23.759953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:23.760051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:23.762767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:23.762819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:23.763038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:23.763082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:23.763358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.763406Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:23.763512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:23.763554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:23.763624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:23.763663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:23.763704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:23.763780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:23.763817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:23.763846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:23.763917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:23.763954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:23.763993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:23.766536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:23.766659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:23.766699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... txid 100:0 3 -> 128 2025-04-10T11:15:23.818701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T11:15:23.818929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T11:15:23.820324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.820385Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.820429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:15:23.820483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-10T11:15:23.820676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:23.837412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-10T11:15:23.837593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-10T11:15:23.838086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:23.838233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:23.838285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:15:23.838577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-10T11:15:23.838654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:15:23.838869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:23.838930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:23.838984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:23.842759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:23.842812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:23.842986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:23.843119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:23.843166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-10T11:15:23.843207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2025-04-10T11:15:23.843563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:15:23.843604Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-10T11:15:23.843730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:15:23.843769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:23.843826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:15:23.843860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:23.843914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-10T11:15:23.843958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:23.843994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-10T11:15:23.844039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-10T11:15:23.844131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:23.844174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-10T11:15:23.844213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:15:23.844244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-10T11:15:23.845224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:23.845348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:23.845417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:15:23.845476Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:15:23.845531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:23.846344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:23.846471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:23.846502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:15:23.846528Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:15:23.846571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:23.846640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-10T11:15:23.856686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T11:15:23.857399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-10T11:15:23.857791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-10T11:15:23.857847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-10T11:15:23.857956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:15:23.857981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:15:23.858577Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-10T11:15:23.858709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:15:23.858756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:311:2302] 2025-04-10T11:15:23.859016Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:23.859137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:23.859178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:311:2302] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-10T11:15:23.859708Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:23.859973Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 284us result status StatusSuccess 2025-04-10T11:15:23.860526Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflight [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflightMock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:23.995125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:23.995216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:23.995281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:23.995333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:23.995388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:23.995418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:23.995482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:23.995557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:23.995941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:24.081423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:24.081479Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:24.097431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:24.097678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:24.097831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:24.105313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:24.105586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:24.106222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:24.106578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:24.108552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:24.109892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:24.109971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:24.110280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:24.110351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:24.110408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:24.110634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.117477Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:24.254743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:24.254996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.255222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:24.255449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:24.255526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.257860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:24.258011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:24.258181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.258239Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:24.258278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:24.258309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:24.261621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.261745Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:24.261799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:24.265374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.265450Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.265507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:24.265573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.270126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:24.273379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:24.273618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:24.274941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:24.275122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:24.275218Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:24.275618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:24.275699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:24.275907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:24.276037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:24.278944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:24.279003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:24.279237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:24.279287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:24.279549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.279605Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:24.279733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:24.279795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.279861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:24.279914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.279960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:24.280023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.280074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:24.280123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:24.280200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:24.280246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:24.280284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:24.283375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:24.283508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:24.283547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:24.357311Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-10T11:15:24.357737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-10T11:15:24.357844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-10T11:15:24.358029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:24.358090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:15:24.358180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:15:24.361537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:24.361592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:24.361733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:15:24.361855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:24.361907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:15:24.361972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-10T11:15:24.362085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.362165Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:15:24.362263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:24.362300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:24.362352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:24.362384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:24.362424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:15:24.362469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:24.362505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:15:24.362539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:15:24.362619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:15:24.362655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-10T11:15:24.362685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-10T11:15:24.362714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-10T11:15:24.364177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:24.364272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:24.364320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:24.364359Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-10T11:15:24.364392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:24.366926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:24.367052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:24.367092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:24.367129Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-10T11:15:24.367161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:15:24.367255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:15:24.370291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:24.371546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 100, wait until txId: 101 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-10T11:15:24.371847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-10T11:15:24.371899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-10T11:15:24.372045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:15:24.372071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:15:24.372591Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-10T11:15:24.372705Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:24.372776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:15:24.372818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:331:2322] 2025-04-10T11:15:24.372884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:24.372915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:331:2322] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-10T11:15:24.373508Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:24.373814Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/USER_0" took 310us result status StatusSuccess 2025-04-10T11:15:24.374393Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:24.375020Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:24.375188Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir" took 190us result status StatusSuccess 2025-04-10T11:15:24.375555Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:23.983345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:23.983456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:23.983533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:23.983583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:23.983648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:23.983687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:23.983754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:23.983858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:23.984398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:24.062053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:24.062117Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:24.079071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:24.079340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:24.079542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:24.087221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:24.087528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:24.088227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:24.088580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:24.090539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:24.091986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:24.092047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:24.092261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:24.092308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:24.092348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:24.092500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.099585Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:24.236019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:24.236334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.236579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:24.236818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:24.236874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.240378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:24.240517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:24.240734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.240791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:24.240826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:24.240860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:24.244798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.244873Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:24.244918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:24.247487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.247545Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.247588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:24.247657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.250825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:24.253205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:24.253410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:24.254485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:24.254619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:24.254685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:24.255037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:24.255103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:24.255275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:24.255391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:24.257879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:24.257946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:24.258182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:24.258230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:24.258483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.258531Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:24.258630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:24.258671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.258733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:24.258768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.258803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:24.258841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.258875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:24.258905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:24.258968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:24.259007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:24.259051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:24.261140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:24.261250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:24.261300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:24.323509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:15:24.323536Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:15:24.323564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:24.324240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:24.324321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:24.324347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:15:24.324373Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:15:24.324399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-10T11:15:24.324449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 1 2025-04-10T11:15:24.324476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:276:2267] 2025-04-10T11:15:24.326393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:24.326439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:24.326465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:24.326489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:24.326512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:24.326536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:24.328025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:15:24.328141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:15:24.328186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:15:24.328214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:277:2268] 2025-04-10T11:15:24.328378Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 2025-04-10T11:15:24.329269Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-04-10T11:15:24.329398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-10T11:15:24.329629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-10T11:15:24.329820Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-04-10T11:15:24.329992Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 2025-04-10T11:15:24.330076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:24.330272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T11:15:24.330423Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-04-10T11:15:24.330541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:24.330655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:24.330789Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-04-10T11:15:24.331025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-10T11:15:24.331146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:24.331449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:24.331576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:24.331846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-10T11:15:24.331972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:24.332218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:24.332255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:24.332351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:24.332935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:24.332980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:24.333072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:24.333874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-10T11:15:24.336022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:24.336162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:15:24.336241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-10T11:15:24.338320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:24.338435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-10T11:15:24.338553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:24.338671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-10T11:15:24.339352Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:24.339542Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 204us result status StatusPathDoesNotExist 2025-04-10T11:15:24.339736Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:24.340132Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:24.340286Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 155us result status StatusSuccess 2025-04-10T11:15:24.340642Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:20.037459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:20.037565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.037633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:20.037677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:20.038740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:20.038811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:20.038892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.038975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:20.042279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:20.138164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:20.138228Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:20.155881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:20.156182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:20.156371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:20.184539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:20.184811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:20.185563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.185957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:20.195090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.200223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.200302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.202364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:20.202451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.202516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:20.202721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.210335Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:20.354544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:20.354844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.355108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:20.355343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:20.355425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.364607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.364751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:20.364926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.364982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:20.365018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:20.365054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:20.367498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.367619Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:20.367670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:20.373458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.373515Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.373558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.373631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.377127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:20.383876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:20.384088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:20.385202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.385370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.385443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.385772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:20.385841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.386423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:20.386551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:20.391082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.391128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.391314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.391365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:20.391609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.391661Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:20.391749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.391782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.391840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.391876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.391911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:20.391944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.391976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:20.392003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:20.392059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:20.392090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:20.392117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:20.401222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.401381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.401424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... sage: Source { RawX1: 524 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-10T11:15:24.124129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-04-10T11:15:24.124492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 524 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-10T11:15:24.124753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-04-10T11:15:24.125771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:24.125950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-04-10T11:15:24.126063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-10T11:15:24.126213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-04-10T11:15:24.126489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-04-10T11:15:24.126812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-04-10T11:15:24.127214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-04-10T11:15:24.127440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-10T11:15:24.132021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:24.132771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:24.133177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-10T11:15:24.133342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-10T11:15:24.133725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-04-10T11:15:24.134111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-10T11:15:24.134228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:446:2398], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-04-10T11:15:24.134353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:446:2398], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-04-10T11:15:24.134818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:24.134924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-04-10T11:15:24.135086Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:24.135181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-04-10T11:15:24.135238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-10T11:15:24.136648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-10T11:15:24.136829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-10T11:15:24.136919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-10T11:15:24.136982Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-04-10T11:15:24.137038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-04-10T11:15:24.138542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-10T11:15:24.138669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-10T11:15:24.138701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-10T11:15:24.138736Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:15:24.138772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-10T11:15:24.138861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-10T11:15:24.146441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:24.146556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-04-10T11:15:24.147032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-10T11:15:24.147352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:15:24.147401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:24.147469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:15:24.147522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:24.147589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-10T11:15:24.147752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:550:2489] message: TxId: 104 2025-04-10T11:15:24.147823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:24.147867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T11:15:24.147925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T11:15:24.148069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-04-10T11:15:24.148735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-10T11:15:24.148783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-10T11:15:24.149348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-10T11:15:24.151834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-10T11:15:24.153516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-10T11:15:24.153592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:446:2398], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-10T11:15:24.153747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:15:24.153837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:824:2742] 2025-04-10T11:15:24.154855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-04-10T11:15:24.156943Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-10T11:15:24.157222Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 370us result status StatusSuccess 2025-04-10T11:15:24.157876Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas >> KqpQueryPerf::Insert+QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert+QueryService+UseSink >> TSchemeShardSubDomainTest::Redefine >> KqpQueryPerf::UpdateOn-QueryService-UseSink >> TSchemeShardSubDomainTest::LS [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclare ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::LS [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:25.260356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:25.260480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:25.260554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:25.260609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:25.260660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:25.260708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:25.260785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:25.260906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:25.261355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:25.358397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:25.358464Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:25.376647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:25.376952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:25.377150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:25.392318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:25.392543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:25.393206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:25.393636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:25.395817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:25.397016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:25.397067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:25.397497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:25.397548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:25.397595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:25.397844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:25.407519Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:25.559002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:25.559307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:25.559633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:25.559903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:25.559971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:25.562992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:25.563202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:25.563455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:25.563523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:25.563566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:25.563605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:25.566064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:25.566133Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:25.566211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:25.568352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:25.568411Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:25.568450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:25.568504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:25.572307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:25.574435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:25.574599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:25.575464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:25.575596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:25.575641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:25.575930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:25.575976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:25.576136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:25.576215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:25.578238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:25.578281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:25.578448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:25.578483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:25.578690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:25.578725Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:25.578815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:25.578854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:25.578913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:25.578942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:25.578976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:25.579011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:25.579053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:25.579078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:25.579136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:25.579170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:25.579200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:25.589497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:25.589714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:25.589769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... trongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-10T11:15:25.724922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-10T11:15:25.725307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:25.725486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:25.725540Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:15:25.725939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-10T11:15:25.726011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:15:25.726215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:25.726278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:25.726353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-10T11:15:25.728684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:25.728791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:25.728988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:25.729168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:25.729215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-10T11:15:25.729256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-10T11:15:25.729347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:15:25.729386Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-10T11:15:25.729519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:15:25.729573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:25.729618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:15:25.729651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:25.729692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-10T11:15:25.729767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:25.729818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-10T11:15:25.729859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-10T11:15:25.730075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T11:15:25.730132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-10T11:15:25.730191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:15:25.730245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-10T11:15:25.731254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:25.731353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:25.731398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:15:25.731441Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:15:25.731479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:25.732328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:25.732558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:25.732591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:15:25.732633Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:15:25.732665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:25.732733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-10T11:15:25.736255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T11:15:25.737087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-10T11:15:25.737353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-10T11:15:25.737406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-10T11:15:25.737839Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-10T11:15:25.737934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:15:25.737972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:457:2411] TestWaitNotification: OK eventTxId 100 2025-04-10T11:15:25.738404Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:25.738625Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 232us result status StatusSuccess 2025-04-10T11:15:25.739025Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:25.739528Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:25.739703Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 172us result status StatusSuccess 2025-04-10T11:15:25.740033Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] >> TSchemeShardSubDomainTest::Redefine [GOOD] |88.2%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution >> KqpQueryPerf::RangeLimitRead-QueryService >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 62100, MsgBus: 1227 2025-04-10T11:15:20.108469Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641535696007741:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:20.108496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016a0/r3tmp/tmpQTdyIc/pdisk_1.dat 2025-04-10T11:15:20.569944Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:20.577345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:20.577449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:20.581189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62100, node 1 2025-04-10T11:15:20.722240Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:20.722260Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:20.722268Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:20.722387Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1227 TClient is connected to server localhost:1227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:21.262094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:21.297092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:21.445276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:21.594087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:21.686200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:23.267089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641548580911399:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:23.267214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:23.633845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:23.670848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:23.699184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:23.732104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:23.801415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:23.851724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:23.908054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641548580911916:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:23.908177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:23.908549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641548580911921:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:23.912882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:23.927919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641548580911923:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:23.993835Z node 1 :TX_PROXY ERROR: Actor# [1:7491641548580911975:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:25.108589Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641535696007741:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:25.108675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Redefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:26.218651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:26.218744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:26.218806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:26.218853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:26.218894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:26.218924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:26.218984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:26.219120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:26.219545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:26.305155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:26.305227Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:26.323409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:26.323701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:26.323868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:26.331908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:26.332188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:26.332868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:26.333221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:26.335106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:26.336387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:26.336483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:26.336671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:26.336715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:26.336765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:26.336917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:26.344064Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:26.473611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:26.473862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:26.474080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:26.474394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:26.474463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:26.476985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:26.477154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:26.477400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:26.477482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:26.477528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:26.477601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:26.479942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:26.480012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:26.480060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:26.481880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:26.481929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:26.481978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:26.482037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:26.486277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:26.488618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:26.488852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:26.490022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:26.490211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:26.490274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:26.490608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:26.490673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:26.490865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:26.490969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:26.493337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:26.493392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:26.493616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:26.493677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:26.493966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:26.494019Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:26.494140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:26.494198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:26.494272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:26.494311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:26.494352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:26.494418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:26.494466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:26.494506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:26.494579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:26.494623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:26.494663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:26.497391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:26.497512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:26.497552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... sReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:26.758300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-10T11:15:26.758335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:26.758368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T11:15:26.758401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T11:15:26.758574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T11:15:26.758610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-10T11:15:26.758645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-10T11:15:26.758679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-10T11:15:26.759387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:15:26.759470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:15:26.759508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:15:26.759544Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:15:26.759580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:26.760500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:15:26.760588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:15:26.760615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:15:26.760646Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:15:26.760671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:26.760734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-10T11:15:26.762556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:26.762630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:26.762659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:26.763991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:15:26.764877Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-10T11:15:26.767593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:26.767964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:26.768652Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-10T11:15:26.769293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:26.769534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:26.770058Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-04-10T11:15:26.771154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:26.771338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-04-10T11:15:26.772341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:26.772396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:26.772535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:26.773005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:15:26.773369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:26.773415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:26.773493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:26.779310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:26.779383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:15:26.782846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:15:26.782915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-10T11:15:26.783056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:26.783094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:15:26.783309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:26.783390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-10T11:15:26.783726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-10T11:15:26.783778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-10T11:15:26.784284Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-10T11:15:26.784409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:15:26.784457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:572:2526] TestWaitNotification: OK eventTxId 104 2025-04-10T11:15:26.785108Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:26.785299Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 208us result status StatusPathDoesNotExist 2025-04-10T11:15:26.785506Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:26.786231Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:26.786421Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 200us result status StatusSuccess 2025-04-10T11:15:26.786765Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:26.891342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:26.891496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:26.891567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:26.891626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:26.891683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:26.891716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:26.891798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:26.892000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:26.892455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:26.980505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:26.980574Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:27.006371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:27.006761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:27.006979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:27.017727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:27.017953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:27.018712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:27.019072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:27.021100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:27.022387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:27.022444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:27.022635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:27.022688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:27.022723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:27.022845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.031337Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:27.206122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:27.206462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.206737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:27.206981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:27.207039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.209953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:27.210233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:27.210456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.210529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:27.210586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:27.210642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:27.213132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.213206Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:27.213257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:27.215467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.215532Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.215582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:27.215677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:27.220082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:27.222762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:27.222991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:27.224147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:27.224308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:27.224376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:27.224737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:27.224811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:27.225072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:27.225191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:27.228279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:27.228336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:27.228543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:27.228594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:27.228888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.228940Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:27.229098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:27.229142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:27.229212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:27.229250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:27.229295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:27.229393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:27.229444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:27.229480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:27.229661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:27.229709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:27.229755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:27.233156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:27.233312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:27.233370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:27.280663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:15:27.280695Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-10T11:15:27.280728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:27.280816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2025-04-10T11:15:27.286732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.286812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 100:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:27.286868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2025-04-10T11:15:27.288024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T11:15:27.288200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T11:15:27.289990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.290048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.290102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:15:27.290201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-10T11:15:27.290369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:27.292548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-10T11:15:27.292686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-10T11:15:27.293087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:27.293229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:27.293286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:15:27.293601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-10T11:15:27.293663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:15:27.293846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:27.293930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:27.294006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-10T11:15:27.296409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:27.296463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:27.296616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:27.296762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:27.296822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-10T11:15:27.296884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-10T11:15:27.296975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.297035Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-10T11:15:27.297154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:15:27.297201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:27.297250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:15:27.297305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:27.297369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-10T11:15:27.297416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:27.297458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-10T11:15:27.297508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-10T11:15:27.297590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:27.297637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-04-10T11:15:27.297675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:15:27.297707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-10T11:15:27.298844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:27.298969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:27.299008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:15:27.299053Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:15:27.299106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:27.299793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:27.299889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:27.299924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:15:27.299954Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:15:27.299984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:27.300051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-04-10T11:15:27.300104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:275:2266] 2025-04-10T11:15:27.303355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T11:15:27.304470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T11:15:27.304578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:15:27.304617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:276:2267] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2025-04-10T11:15:27.305167Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:27.305419Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 256us result status StatusSuccess 2025-04-10T11:15:27.305876Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19681, MsgBus: 12601 2025-04-10T11:15:20.909793Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641534786586614:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:20.920359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00169c/r3tmp/tmpS0GWmz/pdisk_1.dat 2025-04-10T11:15:21.376545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:21.388209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:21.388346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:21.416053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19681, node 1 2025-04-10T11:15:21.469879Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:21.469911Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:21.469919Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:21.470106Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12601 TClient is connected to server localhost:12601 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:22.117241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:22.155486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:22.331305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:22.503711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:15:22.581439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:15:24.320270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641551966457436:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:24.320446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:24.606945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:24.651639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:24.696408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:24.733839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:24.771916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:24.853713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:24.938615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641551966457952:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:24.938711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:24.938907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641551966457957:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:24.942313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:24.955745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641551966457959:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:25.042549Z node 1 :TX_PROXY ERROR: Actor# [1:7491641556261425312:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:25.914002Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641534786586614:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:25.914073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:24.562676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:24.562804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:24.562872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:24.562923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:24.562986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:24.563043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:24.563131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:24.563229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:24.563629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:24.654416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:24.654482Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:24.671613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:24.671915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:24.672088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:24.680201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:24.680541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:24.681309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:24.681729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:24.683843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:24.685345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:24.685459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:24.685688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:24.685746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:24.685795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:24.685951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.693705Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:24.820389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:24.820653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.821127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:24.821363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:24.821439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.825892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:24.826011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:24.826199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.826258Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:24.826299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:24.826333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:24.828826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.828899Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:24.828943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:24.831089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.831146Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.831193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:24.831264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.835254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:24.837676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:24.837875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:24.838943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:24.839091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:24.839147Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:24.839450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:24.839525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:24.839714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:24.839816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:24.842042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:24.842106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:24.842313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:24.842362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:24.842733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.842787Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:24.842894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:24.842931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.842994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:24.843042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.843085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:24.843131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.843173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:24.843205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:24.843275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:24.843318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:24.843354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:24.845849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:24.845973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:24.846029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... bletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:27.038054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.038185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, schema: Name: "Table11" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key", at schemeshard: 72057594046678944 2025-04-10T11:15:27.038729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_0, child name: Table11, child id: [OwnerId: 72057594046678944, LocalPathId: 10], at schemeshard: 72057594046678944 2025-04-10T11:15:27.038814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2025-04-10T11:15:27.038873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 1 2025-04-10T11:15:27.038999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-04-10T11:15:27.039118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 1 -> 2 2025-04-10T11:15:27.039816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 137:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:27.039886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 137:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.040050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 11 2025-04-10T11:15:27.040161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 3 2025-04-10T11:15:27.043029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 137, response: Status: StatusAccepted TxId: 137 SchemeshardId: 72057594046678944 PathId: 10, at schemeshard: 72057594046678944 2025-04-10T11:15:27.043228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 137, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-04-10T11:15:27.043537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:27.043601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:27.043864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2025-04-10T11:15:27.043961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:27.044004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1039:2901], at schemeshard: 72057594046678944, txId: 137, path id: 2 2025-04-10T11:15:27.044050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1039:2901], at schemeshard: 72057594046678944, txId: 137, path id: 10 2025-04-10T11:15:27.044350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.044427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046678944 2025-04-10T11:15:27.044685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-10T11:15:27.045889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-04-10T11:15:27.046006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-04-10T11:15:27.046043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-04-10T11:15:27.046093Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18 2025-04-10T11:15:27.046141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 2025-04-10T11:15:27.047226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-04-10T11:15:27.047352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-04-10T11:15:27.047381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-04-10T11:15:27.047429Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 10], version: 1 2025-04-10T11:15:27.047475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2025-04-10T11:15:27.047551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 137, ready parts: 0/1, is published: true 2025-04-10T11:15:27.050768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:10 msg type: 268697601 2025-04-10T11:15:27.050933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72057594037968897 2025-04-10T11:15:27.051019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-04-10T11:15:27.052385Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-10T11:15:27.054255Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 10, type DataShard, boot OK, tablet id 72075186233409555 2025-04-10T11:15:27.054610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-04-10T11:15:27.054665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-04-10T11:15:27.054823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 137:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-04-10T11:15:27.054881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-04-10T11:15:27.054963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-04-10T11:15:27.055081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 2 -> 3 2025-04-10T11:15:27.056588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-04-10T11:15:27.059311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-04-10T11:15:27.061426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 137:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.062014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.062085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId# 137:0 ProgressState at tabletId# 72057594046678944 2025-04-10T11:15:27.062200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 seqNo: 4:5 2025-04-10T11:15:27.062686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 988 RawX2: 4294970157 } TxBody: "\n\236\004\n\007Table11\020\n\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020\n:\004\010\004\020\005" TxId: 137 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } SubDomainPathId: 2 2025-04-10T11:15:27.067473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:10 msg type: 269549568 2025-04-10T11:15:27.067683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72075186233409555 TestModificationResult got TxId: 137, wait until txId: 137 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:27.704068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:27.704194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:27.704832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:27.704905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:27.704954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:27.704987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:27.705069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:27.705194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:27.705681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:27.805825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:27.805907Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:27.825700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:27.826082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:27.826308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:27.834641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:27.835026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:27.835854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:27.836487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:27.839444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:27.841138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:27.841250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:27.841408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:27.841468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:27.841541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:27.841691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.851637Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:27.990779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:27.991092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.991376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:27.991702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:27.991786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.994624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:27.994779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:27.995026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.995109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:27.995155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:27.995198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:27.998223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.998286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:27.998346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:28.000766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:28.000824Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:28.000872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:28.000939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:28.004926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:28.007416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:28.007621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:28.008808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:28.008968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:28.009033Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:28.009377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:28.009444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:28.009649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:28.009760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:28.012305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:28.012357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:28.012597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:28.012656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:28.013022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:28.013078Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:28.013204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:28.013270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:28.013378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:28.013424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:28.013470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:28.013520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:28.013564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:28.013600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:28.013675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:28.013720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:28.013763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:28.015980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:28.016135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:28.016183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:15:28.016226Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:15:28.016274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:28.016414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:15:28.020418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:15:28.021027Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-04-10T11:15:28.021751Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-10T11:15:28.045491Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-10T11:15:28.048879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { Coordinators: 1 Mediators: 1 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:28.049224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:15:28.049355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, at schemeshard: 72057594046678944 2025-04-10T11:15:28.050352Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:15:28.053526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: plan resolution is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:28.053721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2025-04-10T11:15:28.055099Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-10T11:15:28.055362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-10T11:15:28.055408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-10T11:15:28.055923Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-10T11:15:28.056034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:15:28.056073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:284:2275] TestWaitNotification: OK eventTxId 100 2025-04-10T11:15:28.056726Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:28.056991Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 268us result status StatusPathDoesNotExist 2025-04-10T11:15:28.057197Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:27.834118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:27.834271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:27.834349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:27.834409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:27.834466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:27.834498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:27.834578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:27.834701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:27.835172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:27.935296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:27.935355Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:27.954290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:27.954583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:27.954793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:27.967356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:27.967655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:27.968432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:27.968869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:27.971013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:27.972623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:27.972683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:27.972910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:27.972962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:27.973004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:27.973174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:27.981590Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:28.109723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:28.110035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:28.110392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:28.110586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:28.110634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:28.113401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:28.113521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:28.113739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:28.113883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:28.113941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:28.113980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:28.118086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:28.118169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:28.118206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:28.120101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:28.120139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:28.120170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:28.120253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:28.129921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:28.132517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:28.132717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:28.134060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:28.134246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:28.134305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:28.134659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:28.134726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:28.134922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:28.135032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:28.137491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:28.137546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:28.137754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:28.137797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:28.138051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:28.138087Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:28.138203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:28.138232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:28.138272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:28.138300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:28.138327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:28.138361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:28.138389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:28.138411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:28.138485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:28.138528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:28.138561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:28.141232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:28.141390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:28.141457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:15:28.220747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:28.220791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:28.220967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:15:28.221088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:28.221129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:15:28.221184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-10T11:15:28.221265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:28.221307Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:15:28.221454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:28.221498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:28.221590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:28.221624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:28.221666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:15:28.221720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:28.221761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:15:28.221796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:15:28.221878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:15:28.221918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-10T11:15:28.221985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-10T11:15:28.222040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-10T11:15:28.223214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:28.223308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:28.223354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:28.223402Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-10T11:15:28.223444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:28.224279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:28.224364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:28.224408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:28.224469Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-10T11:15:28.224504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:15:28.224590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:15:28.229391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:28.229984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:15:28.230293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:15:28.230361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:15:28.230924Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:28.231041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:28.231109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:333:2324] TestWaitNotification: OK eventTxId 101 2025-04-10T11:15:28.231654Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:28.232130Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 466us result status StatusSuccess 2025-04-10T11:15:28.232681Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:28.233352Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:28.233558Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 220us result status StatusSuccess 2025-04-10T11:15:28.233949Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:28.234699Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/MyDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:28.234904Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/MyDir" took 200us result status StatusSuccess 2025-04-10T11:15:28.235181Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/MyDir" PathDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 4173, MsgBus: 23716 2025-04-10T11:15:22.579915Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641542782591806:2259];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:22.579979Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001695/r3tmp/tmpdGH8Vw/pdisk_1.dat 2025-04-10T11:15:22.906055Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4173, node 1 2025-04-10T11:15:22.967070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:22.967216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:22.969240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:22.984171Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:22.984199Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:22.984208Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:22.984358Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23716 TClient is connected to server localhost:23716 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:23.629314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:23.643086Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:23.662950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:23.811242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:15:23.978477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:24.062665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:25.802798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641555667495288:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:25.803149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:26.108648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:26.144987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:26.193663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:26.233986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:26.268379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:26.309577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:26.395082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641559962463098:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:26.395296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:26.395346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641559962463103:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:26.398845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:26.412449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641559962463105:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:26.497822Z node 1 :TX_PROXY ERROR: Actor# [1:7491641559962463160:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:27.576806Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641542782591806:2259];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:27.576927Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn+QueryService+UseSink >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] >> KqpQueryPerf::RangeRead+QueryService [GOOD] >> KqpQueryPerf::Upsert-QueryService-UseSink >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TPDiskUtil::DriveEstimator [GOOD] >> TPDiskUtil::OffsetParsingCorrectness >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:29.318817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:29.318919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:29.318970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:29.319011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:29.319060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:29.319086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:29.319141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:29.319217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:29.319563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:29.399750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:29.399815Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:29.414340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:29.414630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:29.414828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:29.423610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:29.423884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:29.424577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:29.424974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:29.430939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:29.432280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:29.432338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:29.432540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:29.432600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:29.432643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:29.432808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.440073Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:29.576682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:29.577000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.577236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:29.577482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:29.577535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.579949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:29.580092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:29.580343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.580399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:29.580432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:29.580463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:29.582463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.582513Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:29.582552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:29.584192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.584261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.584314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:29.584377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:29.588138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:29.590192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:29.590583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:29.591685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:29.591841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:29.591897Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:29.592257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:29.592334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:29.592548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:29.592669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:29.594990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:29.595043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:29.595199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:29.595235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:29.595494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.595546Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:29.595664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:29.595708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:29.595765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:29.595817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:29.595867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:29.595935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:29.595982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:29.596018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:29.596102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:29.596137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:29.596166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:29.598795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:29.598932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:29.598978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... _TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:15:29.637729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-10T11:15:29.638112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:29.638292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:29.638347Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-10T11:15:29.638559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-10T11:15:29.638618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-10T11:15:29.638800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:29.638867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:29.638943Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no IsActiveChild, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:29.639664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:29.641694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:29.641746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:29.641919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:29.642059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:29.642103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:15:29.642169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:15:29.642547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.642604Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:15:29.642725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:29.642769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:29.642818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:29.642858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:29.642900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:15:29.642951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:29.642998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:15:29.643036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:15:29.643131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:29.643187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-10T11:15:29.643230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:15:29.643302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-10T11:15:29.644024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:29.644140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:29.644197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:29.644253Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:15:29.644305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:29.645101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:29.645180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:29.645212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:29.645245Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:15:29.645280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:29.645370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:15:29.648561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:29.649812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-10T11:15:29.652942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:29.653203Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } 2025-04-10T11:15:29.653254Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, path /MyRoot/SomeDatabase 2025-04-10T11:15:29.653432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-04-10T11:15:29.653483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-04-10T11:15:29.656241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: " TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:29.656460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-04-10T11:15:29.656838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:15:29.656913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-10T11:15:29.657039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:15:29.657065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:15:29.657612Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:29.657732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:29.657779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:307:2298] 2025-04-10T11:15:29.658103Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:15:29.658200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:15:29.658230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:307:2298] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TPDiskUtil::OffsetParsingCorrectness [GOOD] >> TPDiskUtil::PayloadParsingTest [GOOD] >> TPDiskUtil::SectorRestorator [GOOD] >> TPDiskUtil::SectorRestoratorOldNewHash [GOOD] >> TPDiskUtil::SectorPrint [GOOD] >> TPDiskUtil::SectorMap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 13065, MsgBus: 12510 2025-04-10T11:15:15.627910Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641512196345951:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:15.627944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ab/r3tmp/tmpq2KFKG/pdisk_1.dat 2025-04-10T11:15:16.061358Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:16.062042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:16.062131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:16.077502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13065, node 1 2025-04-10T11:15:16.230103Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:16.230130Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:16.230137Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:16.230292Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12510 TClient is connected to server localhost:12510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:16.800129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:16.824074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:16.985132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:17.245943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:17.345944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:19.390847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641529376216916:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:19.391012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:19.722920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:19.756783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:19.802473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:19.835081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:19.912928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:19.960536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:20.037058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641533671184727:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:20.037202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:20.037628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641533671184732:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:20.042098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:20.061185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641533671184734:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:20.142999Z node 1 :TX_PROXY ERROR: Actor# [1:7491641533671184789:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:20.628120Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641512196345951:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:20.628333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31042, MsgBus: 28194 2025-04-10T11:15:22.912303Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641545089424598:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:22.912644Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ab/r3tmp/tmpqy4Pr7/pdisk_1.dat 2025-04-10T11:15:23.031537Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:23.063549Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:23.063643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:23.067409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31042, node 2 2025-04-10T11:15:23.113288Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:23.113312Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:23.113330Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:23.113428Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28194 TClient is connected to server localhost:28194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:23.597456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:23.617054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:23.693485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:15:23.867003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:15:23.990184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:26.403257Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641562269295532:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:26.403376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:26.454373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:26.497779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:26.531932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:26.575013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:26.654983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:26.731614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:26.817904Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641562269296058:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:26.817970Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:26.820610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641562269296063:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:26.830525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:26.841393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641562269296065:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:15:26.941355Z node 2 :TX_PROXY ERROR: Actor# [2:7491641562269296121:3457] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:27.913599Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641545089424598:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:27.913688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:24.544531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:24.544633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:24.544701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:24.544751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:24.544796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:24.544824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:24.544879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:24.544962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:24.545317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:24.634312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:24.634375Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:24.650605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:24.650937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:24.651132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:24.667006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:24.667239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:24.667779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:24.668123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:24.669904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:24.671475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:24.671540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:24.671737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:24.671790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:24.671846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:24.672035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.682044Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:24.829920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:24.830216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.830484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:24.830724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:24.830772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.833533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:24.833759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:24.833937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.833995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:24.834029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:24.834063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:24.837198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.837259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:24.837296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:24.839463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.839508Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.839561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:24.839648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.848878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:24.851302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:24.851551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:24.852542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:24.852689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:24.852734Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:24.853015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:24.853064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:24.853243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:24.853362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:24.855904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:24.855957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:24.856136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:24.856178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:24.856436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:24.856492Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:24.856590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:24.856623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.856669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:24.856700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.856735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:24.856783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:24.856826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:24.856859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:24.856936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:24.856981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:24.857012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:24.859623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:24.859737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:24.859773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... ed, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 524 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-10T11:15:29.790475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-04-10T11:15:29.790622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 524 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-10T11:15:29.790691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-04-10T11:15:29.791113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:29.791192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-04-10T11:15:29.791258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-10T11:15:29.791303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-04-10T11:15:29.791402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-04-10T11:15:29.791534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-04-10T11:15:29.791676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-04-10T11:15:29.791787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-10T11:15:29.794590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:29.796014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:29.796287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-10T11:15:29.796344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-10T11:15:29.796558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-04-10T11:15:29.796781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-10T11:15:29.796844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:446:2398], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-04-10T11:15:29.796907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:446:2398], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-04-10T11:15:29.797000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:29.797050Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-04-10T11:15:29.797153Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:29.797200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-04-10T11:15:29.797243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-10T11:15:29.798968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-10T11:15:29.799091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-10T11:15:29.799135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-10T11:15:29.799182Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-04-10T11:15:29.799237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-04-10T11:15:29.800326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-10T11:15:29.800473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-10T11:15:29.800509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-10T11:15:29.800541Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:15:29.800574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-10T11:15:29.800648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-10T11:15:29.805118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:29.805195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-04-10T11:15:29.805716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-10T11:15:29.805947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:15:29.805991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:29.806047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:15:29.806088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:29.806131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-10T11:15:29.806252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:550:2489] message: TxId: 104 2025-04-10T11:15:29.806311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:29.806362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T11:15:29.806406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T11:15:29.806535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-04-10T11:15:29.806941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-10T11:15:29.806994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-10T11:15:29.808235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-10T11:15:29.808690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-10T11:15:29.810442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-10T11:15:29.810517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:446:2398], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-10T11:15:29.810623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:15:29.810664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:754:2671] 2025-04-10T11:15:29.811565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-04-10T11:15:29.812731Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-10T11:15:29.812966Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 268us result status StatusSuccess 2025-04-10T11:15:29.813493Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TPDiskUtil::SectorMap [GOOD] >> TPDiskUtil::FormatSectorMap >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] >> TPDiskUtil::FormatSectorMap [GOOD] >> TPDiskUtil::SectorMapStoreLoadFromFile [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8911, MsgBus: 21316 2025-04-10T11:15:23.961250Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641546763424511:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:23.961443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001690/r3tmp/tmpKD47Yv/pdisk_1.dat 2025-04-10T11:15:24.324392Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8911, node 1 2025-04-10T11:15:24.338087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:24.341966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:24.376225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:24.405014Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:24.405044Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:24.405051Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:24.405177Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21316 TClient is connected to server localhost:21316 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:24.999799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:25.017952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:25.153226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:25.349362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:25.435131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:27.099080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641563943295484:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.099191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.407304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.478169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.515903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.558811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.601742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.665164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.724026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641563943295998:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.724133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.724442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641563943296003:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.729571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:27.748192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641563943296005:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:27.815123Z node 1 :TX_PROXY ERROR: Actor# [1:7491641563943296058:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:28.965967Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641546763424511:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:28.966060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TKeyValueTest::TestBasicWriteRead [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun >> TSchemeShardSubDomainTest::DeleteAdd >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe >> KqpQueryPerf::UpdateOn+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 1908, MsgBus: 8365 2025-04-10T11:15:24.528388Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641549953354917:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:24.528449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001689/r3tmp/tmpx7IMHg/pdisk_1.dat 2025-04-10T11:15:24.999810Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:25.029813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:25.029945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 1908, node 1 2025-04-10T11:15:25.031782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:25.096980Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:25.097009Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:25.097023Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:25.097147Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8365 TClient is connected to server localhost:8365 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:25.689740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:25.706250Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:15:25.716402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:25.877106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:26.069412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:26.156287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:27.895086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641562838258588:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.895201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:28.211955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:28.244852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:28.279382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:28.316655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:28.347741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:28.385020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:28.423022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641567133226394:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:28.423101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641567133226399:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:28.423102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:28.426988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:28.437030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641567133226401:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:28.539934Z node 1 :TX_PROXY ERROR: Actor# [1:7491641567133226455:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:29.528134Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641549953354917:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:29.528511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskUtil::SectorMapStoreLoadFromFile [GOOD] Test command err: Path# /home/runner/.ya/build/build_root/2gt4/000d76/r3tmp/tmplyNJcU//pdisk/data.bin >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17243, MsgBus: 7426 2025-04-10T11:15:18.823508Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641527677361097:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:18.837713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016aa/r3tmp/tmpIWpJhF/pdisk_1.dat 2025-04-10T11:15:19.363954Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:19.371871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:19.371988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:19.375544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17243, node 1 2025-04-10T11:15:19.510850Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:19.510893Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:19.510902Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:19.511052Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7426 TClient is connected to server localhost:7426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:20.280632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:20.318870Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:15:20.328644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:20.505417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:20.753066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:20.871286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:22.475885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641544857232080:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:22.476024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:22.808739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:22.845858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:22.876329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:22.913544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:22.996638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:23.060151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:23.117368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641549152199892:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:23.117452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:23.117653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641549152199897:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:23.121844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:23.135017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641549152199899:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:23.228547Z node 1 :TX_PROXY ERROR: Actor# [1:7491641549152199955:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:23.817292Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641527677361097:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:23.817363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 63705, MsgBus: 7778 2025-04-10T11:15:25.535019Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641556618733214:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:25.535082Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016aa/r3tmp/tmp6TMLC1/pdisk_1.dat 2025-04-10T11:15:25.637219Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:25.655881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:25.655963Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:25.657835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63705, node 2 2025-04-10T11:15:25.702537Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:25.702563Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:25.702573Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:25.702703Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7778 TClient is connected to server localhost:7778 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:26.192184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:26.225607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:26.308765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:26.478324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:26.550811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:28.753875Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641569503636882:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:28.753978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:28.834262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:28.868015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:28.898683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:28.931588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:28.965117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:29.006697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:29.086358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641573798604692:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:29.086453Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:29.086462Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641573798604697:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:29.089314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:29.099233Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641573798604699:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:15:29.152185Z node 2 :TX_PROXY ERROR: Actor# [2:7491641573798604752:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService >> TSchemeShardSubDomainTest::SchemeLimitsRejects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetTopicMetadata request" ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetReadSessionsInfo request" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4314, MsgBus: 25321 2025-04-10T11:15:25.857529Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641554327339480:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:25.857600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001688/r3tmp/tmpQ5eBrM/pdisk_1.dat 2025-04-10T11:15:26.330634Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:26.333194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:26.333287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:26.337586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4314, node 1 2025-04-10T11:15:26.416782Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:26.416814Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:26.416823Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:26.416969Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25321 TClient is connected to server localhost:25321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:27.002168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:27.045712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:27.204775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:27.378186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:27.453359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:29.119201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641571507210443:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:29.119340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:29.465616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:29.496787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:29.528103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:29.559212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:29.589193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:29.629649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:29.673010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641571507210954:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:29.673116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:29.673760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641571507210959:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:29.677824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:29.690876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641571507210961:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:29.767458Z node 1 :TX_PROXY ERROR: Actor# [1:7491641571507211015:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:30.857573Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641554327339480:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:30.857637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:31.019476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:31.019638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:31.019719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:31.019778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:31.019836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:31.019880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:31.019953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:31.020083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:31.020496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:31.111899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:31.111968Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:31.129415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:31.129793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:31.129994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:31.139831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:31.140170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:31.140957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:31.141420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:31.143788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:31.145488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:31.145563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:31.145811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:31.145866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:31.145921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:31.146114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.154194Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:31.303896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:31.304214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.304525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:31.304834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:31.304898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.308342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:31.308518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:31.308759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.308832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:31.308874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:31.308912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:31.311532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.311609Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:31.311685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:31.313860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.313915Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.313980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:31.314050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:31.318218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:31.320698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:31.320903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:31.322075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:31.322265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:31.322321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:31.322666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:31.322737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:31.322931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:31.323032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:31.325640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:31.325694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:31.325923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:31.325973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:31.326308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.326365Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:31.326487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:31.326528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:31.326612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:31.326653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:31.326697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:31.326742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:31.326781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:31.326817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:31.326898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:31.326940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:31.326979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:31.335751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:31.335938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:31.335986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 10T11:15:31.956624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 106 2025-04-10T11:15:31.956669Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-10T11:15:31.956707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:15:31.958533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T11:15:31.958656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T11:15:31.958691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-10T11:15:31.958722Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:15:31.958779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:31.958859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-10T11:15:31.960594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:31.960649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:31.960678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:31.960714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:31.960759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:31.961608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T11:15:31.963185Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2025-04-10T11:15:31.963473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-10T11:15:31.963895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409550 2025-04-10T11:15:31.964266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T11:15:31.967987Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-10T11:15:31.969874Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-10T11:15:31.970956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:31.971285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-04-10T11:15:31.974329Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-10T11:15:31.974627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:31.974933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409548 2025-04-10T11:15:31.975904Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409547 2025-04-10T11:15:31.977042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:31.977354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409549 2025-04-10T11:15:31.978034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-10T11:15:31.978399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:15:31.979929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:31.979985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T11:15:31.980065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:31.980732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T11:15:31.981016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:31.981072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:31.981189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:31.982637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-10T11:15:31.982713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-10T11:15:31.982841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:31.982891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:15:31.986310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:15:31.986384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-10T11:15:31.986564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:31.986591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:15:31.986654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-10T11:15:31.986707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-10T11:15:31.987094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:31.987247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:31.987304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:31.987410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:31.987683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:31.989258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-10T11:15:31.989639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-10T11:15:31.989698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-10T11:15:31.990348Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-10T11:15:31.990460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-10T11:15:31.990510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:920:2823] TestWaitNotification: OK eventTxId 106 2025-04-10T11:15:31.991333Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:31.991556Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 236us result status StatusSuccess 2025-04-10T11:15:31.991958Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] >> TSchemeShardSubDomainTest::DeclareAndDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:31.484684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:31.484781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:31.484852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:31.484902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:31.484947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:31.484977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:31.485048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:31.485131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:31.485533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:31.571633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:31.571698Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:31.587754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:31.588058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:31.588285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:31.595561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:31.595798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:31.596459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:31.596831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:31.598859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:31.600362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:31.600423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:31.600643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:31.600699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:31.600741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:31.600925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.608106Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:31.758041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:31.758343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.758622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:31.758874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:31.758929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.761418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:31.761589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:31.761795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.761856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:31.761899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:31.761934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:31.764146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.764216Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:31.764253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:31.766219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.766272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.766313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:31.766387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:31.770373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:31.772230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:31.772444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:31.773450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:31.773626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:31.773682Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:31.774005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:31.774061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:31.774265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:31.774359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:31.776400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:31.776451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:31.776628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:31.776668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:31.776898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.776940Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:31.777038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:31.777074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:31.777154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:31.777202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:31.777238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:31.777277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:31.777313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:31.777379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:31.777447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:31.777485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:31.777517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:31.779927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:31.780049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:31.780086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6316545 cookie: 0:102 msg type: 269090816 2025-04-10T11:15:32.217660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000004 2025-04-10T11:15:32.217944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:32.218024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:32.218058Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-10T11:15:32.218379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-10T11:15:32.218428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-10T11:15:32.218555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:32.218616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-04-10T11:15:32.218662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:15:32.220477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:32.220506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:32.220670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:15:32.220752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:32.220779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-10T11:15:32.220803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-10T11:15:32.220847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:15:32.220871Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:15:32.220996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:15:32.221034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:15:32.221067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:15:32.221102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:15:32.221131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-10T11:15:32.221163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:15:32.221193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:15:32.221220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:15:32.221388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2025-04-10T11:15:32.221424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-10T11:15:32.221447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-10T11:15:32.221466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-10T11:15:32.222075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:32.222130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:32.222178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:15:32.222208Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-10T11:15:32.222239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:32.222934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:32.222989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:32.223006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:15:32.223024Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-10T11:15:32.223041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-04-10T11:15:32.223083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-10T11:15:32.225205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:15:32.226007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:15:32.226196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:15:32.226223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:15:32.226531Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:15:32.226603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:15:32.226656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:978:2801] TestWaitNotification: OK eventTxId 102 2025-04-10T11:15:32.227184Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:32.227368Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 207us result status StatusSuccess 2025-04-10T11:15:32.227657Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:32.228082Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:32.228211Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 139us result status StatusSuccess 2025-04-10T11:15:32.228447Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11498, MsgBus: 19633 2025-04-10T11:15:14.415745Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641510115178851:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:14.415897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016b5/r3tmp/tmpIyyOSp/pdisk_1.dat 2025-04-10T11:15:14.797948Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:14.817675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:14.817743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11498, node 1 2025-04-10T11:15:14.819328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:14.878631Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:14.878649Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:14.878656Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:14.878761Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19633 TClient is connected to server localhost:19633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:15.541775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:15.565088Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:15:15.579047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:15.735111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:15.913516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:15.992312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:18.147411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641527295049825:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:18.147572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:18.459387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:18.498536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:18.540330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:18.633041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:18.680343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:18.740917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:18.839554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641527295050346:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:18.839685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:18.840006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641527295050351:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:18.844447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:18.858427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641527295050353:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:18.939842Z node 1 :TX_PROXY ERROR: Actor# [1:7491641527295050408:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:19.418376Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641510115178851:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:19.418452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:15:20.142009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:15:20.188830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:15:20.233515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7415, MsgBus: 17412 2025-04-10T11:15:23.549673Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641545268612225:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:23.550053Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016b5/r3tmp/tmpQHlCDH/pdisk_1.dat 2025-04-10T11:15:23.713514Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:23.732434Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:23.732523Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:23.734312Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7415, node 2 2025-04-10T11:15:23.798770Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:23.798798Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:23.798808Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:23.798938Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17412 TClient is connected to server localhost:17412 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:15:24.310572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:24.337596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:24.416201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:24.660065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:24.787488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:27.248689Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641562448483185:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.248785Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.304222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.341113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.415810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.463988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.503775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.592939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.659979Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641562448483705:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.660087Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.660304Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641562448483710:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.664847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:27.677191Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641562448483712:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:15:27.741008Z node 2 :TX_PROXY ERROR: Actor# [2:7491641562448483765:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:28.549986Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641545268612225:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:28.550071Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:15:28.731602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:15:28.812885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:15:28.859337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpQueryPerf::IdxLookupJoin+QueryService >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition >> TSchemeShardSubDomainTest::ForceDropTwice >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 4519, MsgBus: 22827 2025-04-10T11:15:27.423858Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641563166342713:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:27.423915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001684/r3tmp/tmpziKydX/pdisk_1.dat 2025-04-10T11:15:27.864545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:27.864649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:27.867696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:27.910521Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4519, node 1 2025-04-10T11:15:27.972017Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:27.972052Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:27.972066Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:27.972232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22827 TClient is connected to server localhost:22827 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:28.567169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:28.596128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:28.744631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:28.898686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:28.965689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:30.635674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641576051246373:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:30.635779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:30.962301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:30.993426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:31.019748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:31.047748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:31.076716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:31.149277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:31.193345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641580346214182:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:31.193428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:31.193628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641580346214187:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:31.197709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:31.208732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641580346214189:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:31.266466Z node 1 :TX_PROXY ERROR: Actor# [1:7491641580346214242:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TKeyValueTest::TestObtainLockNewApi >> TSchemeShardSubDomainTest::DeleteAndRestart >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13662, MsgBus: 24956 2025-04-10T11:15:19.901088Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641529793855451:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:19.901179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016a1/r3tmp/tmpZHbRuR/pdisk_1.dat 2025-04-10T11:15:20.417565Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13662, node 1 2025-04-10T11:15:20.454483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:20.454602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:20.458794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:20.578272Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:20.578299Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:20.578308Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:20.578458Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24956 TClient is connected to server localhost:24956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:21.202272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:21.231193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:21.384039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:21.562757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:21.669767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:23.543329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641546973726439:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:23.543453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:23.858339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:23.901975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:23.943093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:24.019508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:24.055716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:24.092117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:24.193674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641551268694255:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:24.193752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:24.193969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641551268694260:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:24.199259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:24.212286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641551268694262:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:24.299017Z node 1 :TX_PROXY ERROR: Actor# [1:7491641551268694318:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:24.903231Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641529793855451:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:24.903302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 32622, MsgBus: 10920 2025-04-10T11:15:26.947840Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641561766926041:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:26.947897Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016a1/r3tmp/tmp3ycWnR/pdisk_1.dat 2025-04-10T11:15:27.105173Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:27.119801Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:27.119890Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:27.122316Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32622, node 2 2025-04-10T11:15:27.186786Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:27.186813Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:27.186824Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:27.186956Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10920 TClient is connected to server localhost:10920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:27.718444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:27.752821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:27.841638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:28.037409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:28.111092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:30.523621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641578946796992:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:30.523686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:30.596727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:30.630225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:30.674471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:30.715792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:30.753843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:30.794319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:30.840635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641578946797504:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:30.840746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:30.840831Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641578946797509:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:30.844438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:30.854887Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641578946797511:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:15:30.925058Z node 2 :TX_PROXY ERROR: Actor# [2:7491641578946797564:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:31.948157Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641561766926041:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:31.948248Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:33.691946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:33.692040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:33.692105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:33.692155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:33.692204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:33.692232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:33.692301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:33.692400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:33.692808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:33.765745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:33.765796Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:33.779688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:33.779980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:33.780173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:33.787091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:33.787354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:33.788046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:33.788398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:33.790443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:33.791856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:33.791906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:33.792097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:33.792147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:33.792204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:33.792367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.799247Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:33.936997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:33.937288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.937603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:33.937817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:33.937866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.940047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:33.940204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:33.940404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.940498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:33.940558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:33.940605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:33.945828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.945883Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:33.945910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:33.947592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.947641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.947685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:33.947742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:33.950755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:33.952566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:33.952752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:33.953545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:33.953660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:33.953712Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:33.954073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:33.954118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:33.954286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:33.954359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:33.956456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:33.956502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:33.956692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:33.956737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:33.957015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.957065Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:33.957176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:33.957215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:33.957268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:33.957302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:33.957352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:33.957404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:33.957438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:33.957468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:33.957522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:33.957557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:33.957611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:33.959987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:33.960128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:33.960174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 44, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:34.018269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:34.018308Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:15:34.018352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:34.018438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-10T11:15:34.020237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-10T11:15:34.020378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-10T11:15:34.021065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:34.021168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:34.021220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropSubdomain TPropose operationId# 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-10T11:15:34.021296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:34.021349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:34.021485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-04-10T11:15:34.021623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:34.021683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:34.022539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:34.023822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:15:34.025336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:34.025375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:34.025537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:34.025666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:34.025700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:15:34.025732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:15:34.026115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:34.026185Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-04-10T11:15:34.026238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:34.026270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:34.026315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:34.026363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:34.026407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:15:34.026439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:34.026484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:15:34.026528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:15:34.026606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:34.026645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-10T11:15:34.026678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-10T11:15:34.026708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-10T11:15:34.027214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:34.027291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:34.027326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:34.027365Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:15:34.027408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:34.028105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:34.028207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:34.028238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:34.028268Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:15:34.028298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:34.028372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:15:34.028603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:34.028648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:34.028729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:34.029029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:34.029068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:34.029148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:34.031900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:34.033733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:34.033837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:34.033914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:15:34.034137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:15:34.034201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:15:34.034672Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:34.034755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:34.034790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:338:2329] TestWaitNotification: OK eventTxId 101 2025-04-10T11:15:34.035296Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:34.035489Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 204us result status StatusPathDoesNotExist 2025-04-10T11:15:34.035670Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TIncrHugeBasicTest::Defrag [GOOD] >> KqpQueryPerf::Upsert-QueryService+UseSink >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop >> TSchemeShardSubDomainTest::SetSchemeLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:31.674953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:31.675078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:31.675133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:31.675181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:31.675230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:31.675256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:31.675322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:31.675398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:31.675808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:31.751879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:31.751923Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:31.766544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:31.766811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:31.766950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:31.774246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:31.774497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:31.775012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:31.775379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:31.777129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:31.778261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:31.778318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:31.778449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:31.778485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:31.778524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:31.778663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.784682Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:31.918681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:31.918980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.919253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:31.919507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:31.919578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.922083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:31.922263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:31.922459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.922536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:31.922577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:31.922634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:31.924907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.924973Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:31.925017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:31.927126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.927193Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.927251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:31.927328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:31.931477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:31.933949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:31.934245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:31.935472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:31.935625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:31.935688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:31.936035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:31.936106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:31.936319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:31.936471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:31.939089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:31.939140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:31.939318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:31.939357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:31.939628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:31.939674Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:31.939783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:31.939833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:31.939875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:31.939907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:31.939943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:31.939986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:31.940018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:31.940046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:31.940106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:31.940140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:31.940169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:31.942521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:31.942643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:31.942705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4-10T11:15:33.824933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.825066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.825107Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TPropose operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:15:33.825169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2025-04-10T11:15:33.825307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:33.826851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2025-04-10T11:15:33.826969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 108 at step: 5000004 2025-04-10T11:15:33.827332Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:33.827443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:33.827495Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TPropose operationId# 108:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000004 2025-04-10T11:15:33.827638Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 129 2025-04-10T11:15:33.827813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:33.827884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-10T11:15:33.828652Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=108;fline=tx_controller.cpp:212;event=finished_tx;tx_id=108; FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-10T11:15:33.830507Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:33.830607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:33.830808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-10T11:15:33.830972Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:33.831021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:337:2313], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-04-10T11:15:33.831068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:337:2313], at schemeshard: 72057594046678944, txId: 108, path id: 5 2025-04-10T11:15:33.831156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.831209Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:15:33.831283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409549 2025-04-10T11:15:33.832305Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-10T11:15:33.832486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-10T11:15:33.832544Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-10T11:15:33.832590Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-10T11:15:33.832635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:15:33.833456Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-04-10T11:15:33.833535Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-04-10T11:15:33.833587Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-10T11:15:33.833620Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-04-10T11:15:33.833665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-10T11:15:33.833734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-04-10T11:15:33.837090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275382275 2025-04-10T11:15:33.838628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-04-10T11:15:33.838881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 FAKE_COORDINATOR: Erasing txId 108 2025-04-10T11:15:33.851163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-04-10T11:15:33.851222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2025-04-10T11:15:33.851363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-04-10T11:15:33.851439Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 2025-04-10T11:15:33.853403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.853593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.853636Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2025-04-10T11:15:33.853746Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-10T11:15:33.853800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-10T11:15:33.853864Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-10T11:15:33.853900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-10T11:15:33.853948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-04-10T11:15:33.854019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:495:2445] message: TxId: 108 2025-04-10T11:15:33.854068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-10T11:15:33.854109Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-04-10T11:15:33.854166Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-04-10T11:15:33.854291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-10T11:15:33.856201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-10T11:15:33.856275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:884:2798] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2025-04-10T11:15:33.859877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore1" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "comment2" Type: "Utf8" } } } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:33.860142Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterOlapStore Propose, path: /MyRoot/OlapStore1, opId: 109:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.860553Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:33.863246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusSchemeError Reason: "Too many columns. new: 4. Limit: 3" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:33.863415Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-04-10T11:15:33.863858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-04-10T11:15:33.863907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-04-10T11:15:33.864500Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-04-10T11:15:33.864625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-04-10T11:15:33.864677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:919:2833] TestWaitNotification: OK eventTxId 109 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:20.036882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:20.037030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.037105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:20.037156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:20.038059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:20.038118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:20.038301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.038424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:20.039773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:20.139995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:20.140047Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:20.174991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:20.175321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:20.175469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:20.195374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:20.195682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:20.196579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.197000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:20.200532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.201906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.201972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.202336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:20.202388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.202429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:20.202558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.210810Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:20.355061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:20.355355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.355591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:20.355791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:20.355854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.363032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.363180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:20.363402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.363471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:20.363507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:20.363540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:20.368841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.368916Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:20.368956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:20.371515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.371568Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.371608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.371670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.377242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:20.379470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:20.379639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:20.381942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.382098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.382171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.383582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:20.383730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.386700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:20.386858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:20.395570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.395658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.395884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.395938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:20.396234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.396301Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:20.396434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.396475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.396546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.396600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.396646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:20.396693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.396734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:20.396771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:20.396852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:20.396896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:20.396934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:20.399299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.399409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:20.399442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 4046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 768 RawX2: 4294970000 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-10T11:15:33.829100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 0 2025-04-10T11:15:33.829217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 768 RawX2: 4294970000 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-10T11:15:33.829267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-10T11:15:33.829956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.830030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-10T11:15:33.830080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-10T11:15:33.830118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-04-10T11:15:33.830242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-10T11:15:33.830384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-04-10T11:15:33.830520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:33.830584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:15:33.832530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.834042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.834230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:33.834261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:33.834382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:15:33.834477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:33.834516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-04-10T11:15:33.834553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 107, path id: 4 2025-04-10T11:15:33.834779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.834815Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:15:33.834902Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.834944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-10T11:15:33.834979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-04-10T11:15:33.835912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-04-10T11:15:33.836011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-04-10T11:15:33.836046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-10T11:15:33.836085Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 14 2025-04-10T11:15:33.836131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:33.836806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-10T11:15:33.836888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-10T11:15:33.836916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-10T11:15:33.836943Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-10T11:15:33.836981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:15:33.837044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-04-10T11:15:33.839647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.839708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:33.840059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:15:33.840220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-10T11:15:33.840255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-10T11:15:33.840294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-10T11:15:33.840331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-10T11:15:33.840368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-10T11:15:33.840418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-10T11:15:33.840459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-10T11:15:33.840491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-10T11:15:33.840591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:15:33.841031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:33.841065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:33.841819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-10T11:15:33.842177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-10T11:15:33.843296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:33.843355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-10T11:15:33.843999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 15 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification wait txId: 107 2025-04-10T11:15:33.844541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-04-10T11:15:33.844595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-04-10T11:15:33.845152Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-04-10T11:15:33.845239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-10T11:15:33.845274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1002:2929] TestWaitNotification: OK eventTxId 107 2025-04-10T11:15:33.845926Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:33.846169Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 207us result status StatusSuccess 2025-04-10T11:15:33.846553Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::Defrag [GOOD] Test command err: 2025-04-10T11:14:39.110659Z :BS_INCRHUGE DEBUG: BlockSize# 8128 BlocksInChunk# 2304 BlocksInMinBlob# 65 MaxBlobsPerChunk# 35 BlocksInDataSection# 2303 BlocksInIndexSection# 1 2025-04-10T11:14:39.110787Z :BS_INCRHUGE INFO: [PDisk# 000000001 Recovery] [IncrHugeKeeper PDisk# 000000001] starting ReadLog 2025-04-10T11:14:39.112075Z :BS_INCRHUGE INFO: [PDisk# 000000001 Recovery] [IncrHugeKeeper PDisk# 000000001] finished ReadLog 2025-04-10T11:14:39.112113Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Recovery] ApplyReadLog Chunks# [] Deletes# [] Owners# {} CurrentSerNum# 0 NextLsn# 1 2025-04-10T11:14:39.112225Z :BS_INCRHUGE INFO: [PDisk# 000000001 Recovery] [IncrHugeKeeper PDisk# 000000001] ready 2025-04-10T11:14:39.112249Z :TEST DEBUG: finished Init Reference# [] Enumerated# [] InFlightDeletes# [] 2025-04-10T11:14:39.112273Z :TEST DEBUG: ActionsTaken# 1 2025-04-10T11:14:39.112286Z :TEST DEBUG: GetNumRequestsInFlight# 0 InFlightWritesSize# 0 2025-04-10T11:14:39.116166Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:811717:0:0] Lsn# 0 NumReq# 0 2025-04-10T11:14:39.117870Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 0 HandleWrite Lsn# 0 DataSize# 811717 WriteQueueSize# 1 WriteInProgressItemsSize# 0 2025-04-10T11:14:39.117900Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 0 2025-04-10T11:14:39.117920Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem entry 2025-04-10T11:14:39.118221Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem no free chunks 2025-04-10T11:14:39.119281Z :TEST DEBUG: GetNumRequestsInFlight# 1 InFlightWritesSize# 1 2025-04-10T11:14:39.123056Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:1745495:1:0] Lsn# 1 NumReq# 1 2025-04-10T11:14:39.126255Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 1 HandleWrite Lsn# 1 DataSize# 1745495 WriteQueueSize# 2 WriteInProgressItemsSize# 0 2025-04-10T11:14:39.126276Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 2 WriteInProgressItemsSize# 0 2025-04-10T11:14:39.126321Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem entry 2025-04-10T11:14:39.126345Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem no free chunks 2025-04-10T11:14:39.127179Z :TEST DEBUG: GetNumRequestsInFlight# 2 InFlightWritesSize# 2 2025-04-10T11:14:39.128030Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:602037:2:0] Lsn# 2 NumReq# 2 2025-04-10T11:14:39.129129Z :TEST DEBUG: GetNumRequestsInFlight# 3 InFlightWritesSize# 3 2025-04-10T11:14:39.131422Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ApplyLogChunkItem Lsn# 1 Status# OK 2025-04-10T11:14:39.131489Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 2 ChunkSerNum# 1000 2025-04-10T11:14:39.131517Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 3 ChunkSerNum# 1001 2025-04-10T11:14:39.131529Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 4 ChunkSerNum# 1002 2025-04-10T11:14:39.131559Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 5 ChunkSerNum# 1003 2025-04-10T11:14:39.131572Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 6 ChunkSerNum# 1004 2025-04-10T11:14:39.131585Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 7 ChunkSerNum# 1005 2025-04-10T11:14:39.131596Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 8 ChunkSerNum# 1006 2025-04-10T11:14:39.131607Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 9 ChunkSerNum# 1007 2025-04-10T11:14:39.131625Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 2 WriteInProgressItemsSize# 0 2025-04-10T11:14:39.131648Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem entry 2025-04-10T11:14:39.132352Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem OffsetInBlocks# 0 IndexInsideChunk# 0 SizeInBlocks# 100 SizeInBytes# 812800 Offset# 0 Size# 812800 End# 812800 Id# 0000000000000000 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-04-10T11:14:39.132581Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 1 ProcessWriteItem entry 2025-04-10T11:14:39.132931Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 1 ProcessWriteItem OffsetInBlocks# 100 IndexInsideChunk# 1 SizeInBlocks# 215 SizeInBytes# 1747520 Offset# 812800 Size# 1747520 End# 2560320 Id# 0000000000000001 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-04-10T11:14:39.132993Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2 HandleWrite Lsn# 2 DataSize# 602037 WriteQueueSize# 1 WriteInProgressItemsSize# 2 2025-04-10T11:14:39.133007Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 2 2025-04-10T11:14:39.133032Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2 ProcessWriteItem entry 2025-04-10T11:14:39.133203Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2 ProcessWriteItem OffsetInBlocks# 315 IndexInsideChunk# 2 SizeInBlocks# 75 SizeInBytes# 609600 Offset# 2560320 Size# 609600 End# 3169920 Id# 0000000000000002 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-04-10T11:14:39.140012Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:1287465:3:0] Lsn# 3 NumReq# 3 2025-04-10T11:14:39.142287Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 3 HandleWrite Lsn# 3 DataSize# 1287465 WriteQueueSize# 1 WriteInProgressItemsSize# 3 2025-04-10T11:14:39.142307Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 3 2025-04-10T11:14:39.142329Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 3 ProcessWriteItem entry 2025-04-10T11:14:39.142678Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 3 ProcessWriteItem OffsetInBlocks# 390 IndexInsideChunk# 3 SizeInBlocks# 159 SizeInBytes# 1292352 Offset# 3169920 Size# 1292352 End# 4462272 Id# 0000000000000003 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-04-10T11:14:39.142954Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 0 ApplyBlobWrite Status# OK 2025-04-10T11:14:39.143099Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 0 WriteInProgressItemsSize# 3 2025-04-10T11:14:39.143126Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] overall efficiency 0.030 2025-04-10T11:14:39.148172Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 1 ApplyBlobWrite Status# OK 2025-04-10T11:14:39.148490Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 0 WriteInProgressItemsSize# 2 2025-04-10T11:14:39.148521Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] overall efficiency 0.030 2025-04-10T11:14:39.148573Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2 ApplyBlobWrite Status# OK 2025-04-10T11:14:39.148679Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 0 WriteInProgressItemsSize# 1 2025-04-10T11:14:39.148692Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] overall efficiency 0.030 2025-04-10T11:14:39.149294Z :TEST DEBUG: GetNumRequestsInFlight# 4 InFlightWritesSize# 4 2025-04-10T11:14:39.151836Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 4 HandleWrite Lsn# 4 DataSize# 1501676 WriteQueueSize# 1 WriteInProgressItemsSize# 1 2025-04-10T11:14:39.151853Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 1 2025-04-10T11:14:39.151869Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 4 ProcessWriteItem entry 2025-04-10T11:14:39.152190Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 4 ProcessWriteItem OffsetInBlocks# 549 IndexInsideChunk# 4 SizeInBlocks# 185 SizeInBytes# 1503680 Offset# 4462272 Size# 1503680 End# 5965952 Id# 0000000000000004 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-04-10T11:14:39.156824Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 3 ApplyBlobWrite Status# OK 2025-04-10T11:14:39.157076Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 0 WriteInProgressItemsSize# 1 2025-04-10T11:14:39.157102Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] overall efficiency 0.040 2025-04-10T11:14:39.157318Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:1501676:4:0] Lsn# 4 NumReq# 4 2025-04-10T11:14:39.160209Z :TEST DEBUG: GetNumRequestsInFlight# 5 InFlightWritesSize# 5 2025-04-10T11:14:39.161533Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:687721:5:0] Lsn# 5 NumReq# 5 2025-04-10T11:14:39.162865Z :TEST DEBUG: GetNumRequestsInFlight# 6 InFlightWritesSize# 6 2025-04-10T11:14:39.165380Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:1957662:6:0] Lsn# 6 NumReq# 6 2025-04-10T11:14:39.166297Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 5 HandleWrite Lsn# 5 DataSize# 687721 WriteQueueSize# 1 WriteInProgressItemsSize# 1 2025-04-10T11:14:39.166315Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 1 2025-04-10T11:14:39.166332Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 5 ProcessWriteItem entry 2025-04-10T11:14:39.166529Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 5 ProcessWriteItem OffsetInBlocks# 734 IndexInsideChunk# 5 SizeInBlocks# 85 SizeInBytes# 690880 Offset# 5965952 Size# 690880 End# 6656832 Id# 0000000000000005 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-04-10T11:14:39.166570Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 6 HandleWrite Lsn# 6 DataSize# 1957662 WriteQueueSize# 1 WriteInProgressItemsSize# 2 2025-04-10T11:14:39.166581Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 2 2025-04-10T11:14:39.166595Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 6 ProcessWriteItem entry 2025-04-10T11:14:39.166992Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 6 ProcessWriteItem OffsetInBlocks# 819 IndexInsideChunk# 6 SizeInBlocks# 241 SizeInBytes# 1958848 Offset# 6656832 Size# 1958848 End# 8615680 Id# 0000000000000006 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-04-10T11:14:39.170777Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 4 ApplyBlobWrite Status# OK 2025-04-10T11:14:39.171391Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 0 WriteInProgressItemsSize# 2 2025-04-10T11:14:39.171420Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] overall efficiency 0.058 2025-04-10T11:14:39.176396Z :TEST DEBUG: GetNumRequestsInFlight# 7 InFlightWritesSize# 7 2025-04-10T11:14:39.179545Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:1824284:7:0] Lsn# 7 NumReq# 7 2025-04-10T11:14:39.180360Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 7 HandleWrite Lsn# 7 DataSize# 1824284 WriteQueueSize# 1 WriteInProgressItemsSize# 2 2025-04-10T11:14:39.180386Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 2 2025-04-10T11:14:39.180401Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 7 ProcessWriteItem entry 2025-04-10T11:14:39.180769Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 7 ProcessWriteItem OffsetInBlocks# 1060 IndexInsideChunk# 7 SizeInBlocks# 225 SizeInBytes# 1828800 Offset# 8615680 Size# 1828800 End# 10444480 Id# 0000000000000007 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-04-10T11:14:39.182339Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 5 ApplyBlobWrite Status# OK 2025-04-10T11:14:39.182552Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 0 WriteInProgressItemsSize# 2 2025-04-10T11:14:39.182602Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] overall efficiency 0.070 2025-04-10T11:14:39.186564Z :TEST DEBUG: finished Write Id# 0000000000000000 LogoBlobId# [1:1:1:0:811717:0:0] Lsn# 0 2025-04-10T11:14:39.186620Z :TEST INFO: BytesWritten# 0 MB ElapsedTime# 0.156048s Speed# 0.00 MB/s 2025-04-10T11:14:39.186636Z :TEST DEBUG: ActionsTaken# 2 2025-04-10T11:14:39.186653Z :TEST DEBUG: GetNumRequestsInFlight# 7 InFlightWritesSize# 7 2025-04-10T11:14:39.186673Z :TEST DEBUG: sent Delete Id# 0000000000000000 NumReq# 7 2025-04-10T11:14:39.186710Z :TEST DEBUG: finished Write Id# 0000000000000001 LogoBlobId# [1:1:1:0:1745495:1:0] Lsn# 1 2025-04-10T11:14:39.186732Z :TEST IN ... zeInBytes# 1958848 Offset# 10338816 Size# 1958848 End# 12297664 Id# 0000000000000007 ChunkIdx# 34 ChunkSerNum# 1348 Defrag# false 2025-04-10T11:15:34.454564Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 557 HandleWrite Lsn# 1191 DataSize# 1138840 WriteQueueSize# 8 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.454583Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 8 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.455097Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 545 ApplyBlobWrite Status# OK 2025-04-10T11:15:34.455167Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] generating virtual log record deleteLocator# {ChunkIdx# 32 ChunkSerNum# 1346 Id# 0000000000000000 IndexInsideChunk# 5 SizeInBlocks# 212} 2025-04-10T11:15:34.455221Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] LogVirtualBlobDeletes ChunkIdx# 32 ChunkSerNum# 1346 Id# 0000000000000000 IndexInsideChunk# 5 SizeInBlocks# 212 Lsn# 1380 2025-04-10T11:15:34.455251Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 1380 Virtual# true 2025-04-10T11:15:34.455305Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] sending TEvChunkRead ChunkIdx# 32 OffsetInBlocks# 2052 sizeInBlocks# 196 2025-04-10T11:15:34.456212Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 8 WriteInProgressItemsSize# 4 2025-04-10T11:15:34.456243Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 550 ProcessWriteItem entry 2025-04-10T11:15:34.456439Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 550 ProcessWriteItem OffsetInBlocks# 1513 IndexInsideChunk# 10 SizeInBlocks# 89 SizeInBytes# 723392 Offset# 12297664 Size# 723392 End# 13021056 Id# 000000000000000d ChunkIdx# 34 ChunkSerNum# 1348 Defrag# false 2025-04-10T11:15:34.456495Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] ApplyRead offsetInBlocks# 1656 index# 9 Status# OK 2025-04-10T11:15:34.456630Z :TEST DEBUG: GetNumRequestsInFlight# 44 InFlightWritesSize# 35 2025-04-10T11:15:34.457564Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] EnqueueDefragWrite chunkIdx# 32 index# 9 Id# 000000000000001a 2025-04-10T11:15:34.457614Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 8 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.458188Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 546 ApplyBlobWrite Status# OK 2025-04-10T11:15:34.459184Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 8 WriteInProgressItemsSize# 4 2025-04-10T11:15:34.459212Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 551 ProcessWriteItem entry 2025-04-10T11:15:34.459459Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 551 ProcessWriteItem OffsetInBlocks# 1602 IndexInsideChunk# 11 SizeInBlocks# 133 SizeInBytes# 1081024 Offset# 13021056 Size# 1081024 End# 14102080 Id# 0000000000000002 ChunkIdx# 34 ChunkSerNum# 1348 Defrag# false 2025-04-10T11:15:34.459737Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] ApplyRead offsetInBlocks# 1974 index# 11 Status# OK 2025-04-10T11:15:34.460232Z :TEST DEBUG: sent Write LogoBlobId# [1:2:1:0:1859449:1192:0] Lsn# 1192 NumReq# 44 2025-04-10T11:15:34.460693Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] EnqueueDefragWrite chunkIdx# 32 index# 11 Id# 000000000000001b 2025-04-10T11:15:34.460732Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 8 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.461082Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 560 HandleWrite Lsn# 1192 DataSize# 1859449 WriteQueueSize# 9 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.461109Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 9 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.461160Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 547 ApplyBlobWrite Status# OK 2025-04-10T11:15:34.461788Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 9 WriteInProgressItemsSize# 4 2025-04-10T11:15:34.461821Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 552 ProcessWriteItem entry 2025-04-10T11:15:34.462021Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 552 ProcessWriteItem OffsetInBlocks# 1735 IndexInsideChunk# 12 SizeInBlocks# 94 SizeInBytes# 764032 Offset# 14102080 Size# 764032 End# 14866112 Id# 0000000000000013 ChunkIdx# 34 ChunkSerNum# 1348 Defrag# false 2025-04-10T11:15:34.462489Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 548 ApplyBlobWrite Status# OK 2025-04-10T11:15:34.462993Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 8 WriteInProgressItemsSize# 4 2025-04-10T11:15:34.463048Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 553 ProcessWriteItem entry 2025-04-10T11:15:34.463071Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 553 DeleteInProgress# false 2025-04-10T11:15:34.463505Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 553 ProcessWriteItem OffsetInBlocks# 1829 IndexInsideChunk# 13 SizeInBlocks# 232 SizeInBytes# 1885696 Offset# 14866112 Size# 1885696 End# 16751808 Id# 000000000000002f ChunkIdx# 34 ChunkSerNum# 1348 Defrag# true 2025-04-10T11:15:34.464080Z :TEST DEBUG: GetNumRequestsInFlight# 45 InFlightWritesSize# 36 2025-04-10T11:15:34.465421Z :TEST DEBUG: sent Write LogoBlobId# [1:2:1:0:834833:1193:0] Lsn# 1193 NumReq# 45 2025-04-10T11:15:34.465471Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 561 HandleWrite Lsn# 1193 DataSize# 834833 WriteQueueSize# 8 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.465493Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 8 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.467131Z :TEST DEBUG: GetNumRequestsInFlight# 46 InFlightWritesSize# 37 2025-04-10T11:15:34.467311Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] ApplyRead offsetInBlocks# 2052 index# 12 Status# OK 2025-04-10T11:15:34.467985Z :TEST DEBUG: sent Write LogoBlobId# [1:2:1:0:694023:1194:0] Lsn# 1194 NumReq# 46 2025-04-10T11:15:34.469160Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] EnqueueDefragWrite chunkIdx# 32 index# 12 Id# 000000000000001f 2025-04-10T11:15:34.469204Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 9 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.469455Z :TEST DEBUG: GetNumRequestsInFlight# 47 InFlightWritesSize# 38 2025-04-10T11:15:34.471921Z :TEST DEBUG: sent Write LogoBlobId# [1:2:1:0:1224587:1195:0] Lsn# 1195 NumReq# 47 2025-04-10T11:15:34.474382Z :TEST DEBUG: GetNumRequestsInFlight# 48 InFlightWritesSize# 39 2025-04-10T11:15:34.477031Z :TEST DEBUG: sent Write LogoBlobId# [1:2:1:0:1354391:1196:0] Lsn# 1196 NumReq# 48 2025-04-10T11:15:34.479784Z :TEST DEBUG: GetNumRequestsInFlight# 49 InFlightWritesSize# 40 2025-04-10T11:15:34.480914Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 549 ApplyBlobWrite Status# OK 2025-04-10T11:15:34.481660Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 9 WriteInProgressItemsSize# 4 2025-04-10T11:15:34.481688Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 554 ProcessWriteItem entry 2025-04-10T11:15:34.481729Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] IndexWrite chunkIdx# 34 offset# 16751808 size# 1975104 end# 18726912 2025-04-10T11:15:34.482191Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 554 ProcessWriteItem OffsetInBlocks# 0 IndexInsideChunk# 0 SizeInBlocks# 250 SizeInBytes# 2032000 Offset# 0 Size# 2032000 End# 2032000 Id# 0000000000000023 ChunkIdx# 35 ChunkSerNum# 1349 Defrag# false 2025-04-10T11:15:34.482311Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 550 ApplyBlobWrite Status# OK 2025-04-10T11:15:34.482688Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 8 WriteInProgressItemsSize# 4 2025-04-10T11:15:34.482715Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 555 ProcessWriteItem entry 2025-04-10T11:15:34.482738Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 555 DeleteInProgress# false 2025-04-10T11:15:34.483086Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 555 ProcessWriteItem OffsetInBlocks# 250 IndexInsideChunk# 1 SizeInBlocks# 206 SizeInBytes# 1674368 Offset# 2032000 Size# 1674368 End# 3706368 Id# 000000000000000a ChunkIdx# 35 ChunkSerNum# 1349 Defrag# true 2025-04-10T11:15:34.483139Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 563 HandleWrite Lsn# 1194 DataSize# 694023 WriteQueueSize# 8 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.483169Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 8 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.483191Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 551 ApplyBlobWrite Status# OK 2025-04-10T11:15:34.483346Z :TEST DEBUG: sent Write LogoBlobId# [1:2:1:0:1578629:1197:0] Lsn# 1197 NumReq# 49 2025-04-10T11:15:34.483802Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 8 WriteInProgressItemsSize# 4 2025-04-10T11:15:34.483827Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 556 ProcessWriteItem entry 2025-04-10T11:15:34.484012Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 556 ProcessWriteItem OffsetInBlocks# 456 IndexInsideChunk# 2 SizeInBlocks# 88 SizeInBytes# 715264 Offset# 3706368 Size# 715264 End# 4421632 Id# 0000000000000022 ChunkIdx# 35 ChunkSerNum# 1349 Defrag# false 2025-04-10T11:15:34.484054Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 564 HandleWrite Lsn# 1195 DataSize# 1224587 WriteQueueSize# 8 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.484072Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 8 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.484092Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 552 ApplyBlobWrite Status# OK 2025-04-10T11:15:34.484467Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 8 WriteInProgressItemsSize# 4 2025-04-10T11:15:34.484491Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 557 ProcessWriteItem entry 2025-04-10T11:15:34.484752Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 557 ProcessWriteItem OffsetInBlocks# 544 IndexInsideChunk# 3 SizeInBlocks# 141 SizeInBytes# 1146048 Offset# 4421632 Size# 1146048 End# 5567680 Id# 000000000000002b ChunkIdx# 35 ChunkSerNum# 1349 Defrag# false 2025-04-10T11:15:34.484787Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 553 ApplyBlobWrite Status# OK 2025-04-10T11:15:34.484839Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] generating virtual log record deleteLocator# {ChunkIdx# 32 ChunkSerNum# 1346 Id# 0000000000000000 IndexInsideChunk# 6 SizeInBlocks# 232} 2025-04-10T11:15:34.484886Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] LogVirtualBlobDeletes ChunkIdx# 32 ChunkSerNum# 1346 Id# 0000000000000000 IndexInsideChunk# 6 SizeInBlocks# 232 Lsn# 1380 2025-04-10T11:15:34.484912Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 1380 Virtual# true 2025-04-10T11:15:34.485750Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 7 WriteInProgressItemsSize# 4 2025-04-10T11:15:34.485778Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 558 ProcessWriteItem entry 2025-04-10T11:15:34.485800Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 558 DeleteInProgress# false 2025-04-10T11:15:34.486001Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 558 ProcessWriteItem OffsetInBlocks# 685 IndexInsideChunk# 4 SizeInBlocks# 103 SizeInBytes# 837184 Offset# 5567680 Size# 837184 End# 6404864 Id# 000000000000001a ChunkIdx# 35 ChunkSerNum# 1349 Defrag# true 2025-04-10T11:15:34.486061Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 565 HandleWrite Lsn# 1196 DataSize# 1354391 WriteQueueSize# 7 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.486079Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 7 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.486101Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 566 HandleWrite Lsn# 1197 DataSize# 1578629 WriteQueueSize# 8 WriteInProgressItemsSize# 5 2025-04-10T11:15:34.486114Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 8 WriteInProgressItemsSize# 5 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TYardTest::TestSysLogReordering [GOOD] >> TYardTest::TestStartingPoints ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:34.423055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:34.423155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:34.423232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:34.423284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:34.423326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:34.423353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:34.423405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:34.423500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:34.423886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:34.513436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:34.513498Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:34.530529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:34.530882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:34.531072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:34.539579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:34.539856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:34.540613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:34.541011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:34.543184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:34.544710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:34.544786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:34.545021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:34.545075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:34.545128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:34.545310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:34.552460Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:34.671078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:34.671383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:34.671631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:34.671878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:34.671943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:34.674633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:34.674779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:34.674970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:34.675032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:34.675068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:34.675104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:34.677512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:34.677592Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:34.677645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:34.679811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:34.679872Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:34.679918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:34.679988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:34.684186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:34.686677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:34.686893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:34.687973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:34.688122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:34.688173Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:34.688504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:34.688567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:34.688749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:34.688863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:34.691234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:34.691302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:34.691517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:34.691590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:34.691855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:34.691906Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:34.692021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:34.692062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:34.692126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:34.692171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:34.692211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:34.692256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:34.692297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:34.692329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:34.692403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:34.692450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:34.692485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:34.695106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:34.695261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:34.695311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 8944 2025-04-10T11:15:34.959916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:34.959958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:34.959996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:34.960933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:15:34.961946Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2025-04-10T11:15:34.962199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-10T11:15:34.962532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-10T11:15:34.962874Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-10T11:15:34.963085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:34.963289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 Forgetting tablet 72075186233409546 2025-04-10T11:15:34.964525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:15:34.965088Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-10T11:15:34.965818Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-04-10T11:15:34.966043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:34.966338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409548 2025-04-10T11:15:34.968478Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-10T11:15:34.968671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-10T11:15:34.968873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 2025-04-10T11:15:34.970451Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-10T11:15:34.970650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:34.970837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-04-10T11:15:34.972581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-10T11:15:34.972771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:34.974099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:34.974206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:34.974352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:34.974815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:34.974868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:34.974930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:34.975379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-10T11:15:34.975430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-10T11:15:34.977677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:34.977748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:15:34.977842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:15:34.977877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-10T11:15:34.980277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-10T11:15:34.980337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-10T11:15:34.980410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:34.980440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:15:34.980505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-10T11:15:34.980558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-10T11:15:34.982442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:34.982537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2025-04-10T11:15:34.982807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:15:34.982852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-04-10T11:15:34.982953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T11:15:34.982975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T11:15:34.983468Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:15:34.985120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:15:34.985192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:673:2573] 2025-04-10T11:15:34.985384Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:15:34.985464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:15:34.985492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:673:2573] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-10T11:15:34.985983Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:34.986196Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 214us result status StatusPathDoesNotExist 2025-04-10T11:15:34.986398Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:34.986831Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:34.987021Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 162us result status StatusSuccess 2025-04-10T11:15:34.987471Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] |88.3%| [TA] $(B)/ydb/core/blobstorage/incrhuge/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::KvRead+QueryService >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] |88.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:21.817954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:21.818045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:21.818087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:21.818138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:21.818210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:21.818235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:21.818292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:21.818384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:21.818781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:21.907985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:21.908048Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:21.929383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:21.929693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:21.929893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:21.938099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:21.938437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:21.939166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:21.939591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:21.941816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:21.943301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:21.943358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:21.943535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:21.943580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:21.943619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:21.943804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:21.952481Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:22.087193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:22.087456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.087709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:22.087970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:22.088033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.090279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:22.090432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:22.090614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.090676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:22.090731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:22.090769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:22.092933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.093002Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:22.093047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:22.094976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.095043Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.095096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:22.095167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:22.099348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:22.101298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:22.101491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:22.102538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:22.102684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:22.102735Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:22.103063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:22.103154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:22.103338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:22.103429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:22.105744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:22.105800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:22.105992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:22.106033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:22.106352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:22.106421Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:22.106532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:22.106568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:22.106617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:22.106656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:22.106698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:22.106737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:22.106772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:22.106801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:22.106872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:22.106929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:22.106963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:22.109451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:22.109594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:22.109640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... : Source { RawX1: 524 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-10T11:15:34.946421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-04-10T11:15:34.946567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 524 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-10T11:15:34.946635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-04-10T11:15:34.948733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:34.948794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-04-10T11:15:34.948837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-10T11:15:34.948879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-04-10T11:15:34.948963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-04-10T11:15:34.949074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-04-10T11:15:34.949220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-04-10T11:15:34.949272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-10T11:15:34.952490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:34.952796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:34.954694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-10T11:15:34.954755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-10T11:15:34.954932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-04-10T11:15:34.955154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-10T11:15:34.955219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:446:2398], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-04-10T11:15:34.955275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:446:2398], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-04-10T11:15:34.955631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:34.955693Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-04-10T11:15:34.955793Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:34.955836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-04-10T11:15:34.955879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-10T11:15:34.957139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-04-10T11:15:34.957271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-04-10T11:15:34.957336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-10T11:15:34.957380Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 11 2025-04-10T11:15:34.957424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-04-10T11:15:34.958740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-10T11:15:34.958826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-10T11:15:34.958857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-10T11:15:34.958908Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:15:34.958945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-10T11:15:34.959020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-10T11:15:34.961675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-10T11:15:34.961736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-04-10T11:15:34.962115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-10T11:15:34.962335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:15:34.962375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:34.962420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:15:34.962453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:34.962492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-10T11:15:34.962557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:550:2489] message: TxId: 104 2025-04-10T11:15:34.962603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:34.962644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T11:15:34.962679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T11:15:34.962805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-04-10T11:15:34.963904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-10T11:15:34.963943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-10T11:15:34.964628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-10T11:15:34.966554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-10T11:15:34.966765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-10T11:15:34.966818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:446:2398], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-10T11:15:34.967269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:15:34.967330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1431:3340] 2025-04-10T11:15:34.967892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-04-10T11:15:34.972846Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-10T11:15:34.973080Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 262us result status StatusSuccess 2025-04-10T11:15:34.973628Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout >> TYardTest::TestStartingPoints [GOOD] >> TYardTest::TestWhiteboard >> TSchemeShardSubDomainTest::RestartAtInFly >> KqpQueryPerf::Update+QueryService+UseSink >> TSchemeShardSubDomainTest::CreateWithNoEqualName >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:35.289583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:35.289693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:35.289762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:35.289812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:35.289861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:35.289894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:35.289968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:35.290058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:35.290499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:35.370607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:35.370654Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:35.384509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:35.384780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:35.384955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:35.393130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:35.393409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:35.394197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:35.394562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:35.396581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:35.398072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:35.398168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:35.398404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:35.398459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:35.398510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:35.398690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.405792Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:35.532240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:35.532529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.532781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:35.533007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:35.533067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.535386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:35.535523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:35.535673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.535722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:35.535754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:35.535784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:35.537678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.537726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:35.537758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:35.539428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.539467Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.539502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:35.539565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:35.550182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:35.553138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:35.553377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:35.554571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:35.554715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:35.554782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:35.555131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:35.555194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:35.555384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:35.555492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:35.557820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:35.557866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:35.558077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:35.558166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:35.558441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.558499Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:35.558605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:35.558635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:35.558683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:35.558727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:35.558771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:35.558822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:35.558865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:35.558898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:35.558983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:35.559030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:35.559071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:35.561694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:35.561818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:35.561855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TDropForceUnsafe TPropose, operationId: 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-10T11:15:35.639420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:35.639466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:35.639580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-04-10T11:15:35.639763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:35.639810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:35.640530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:35.641856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:15:35.642810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:35.642843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:35.642936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:35.643065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:35.643095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:15:35.643134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:15:35.643383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.643426Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-04-10T11:15:35.643522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:35.643561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:35.643603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:35.643634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:35.643666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:15:35.643697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:35.643725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:15:35.643767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:15:35.643865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:35.643897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-10T11:15:35.643936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-10T11:15:35.643968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-10T11:15:35.644327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:35.644378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:35.644401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:35.644432Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:15:35.644474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:35.644950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:35.645006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:35.645026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:35.645045Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:15:35.645065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:35.645124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:15:35.645349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:35.645392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:35.645458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:35.645742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:35.645786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:35.645845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:35.648649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:35.648891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:35.650195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:35.650302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:15:35.650578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:15:35.650626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:15:35.651091Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:35.651209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:35.651261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:339:2330] TestWaitNotification: OK eventTxId 101 2025-04-10T11:15:35.651760Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:35.652019Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 229us result status StatusPathDoesNotExist 2025-04-10T11:15:35.652224Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:35.652769Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:35.652945Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 190us result status StatusSuccess 2025-04-10T11:15:35.653387Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:35.013872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:35.013965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:35.014031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:35.014088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:35.014132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:35.014185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:35.014249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:35.014341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:35.014722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:35.105347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:35.105402Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:35.121938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:35.122269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:35.122445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:35.130567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:35.130835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:35.131515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:35.131883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:35.133985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:35.135448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:35.135516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:35.135731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:35.135782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:35.135835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:35.136019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.143147Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:35.292725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:35.292987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.293224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:35.293470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:35.293527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.295682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:35.295818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:35.296007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.296075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:35.296112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:35.296163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:35.298304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.298368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:35.298406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:35.300016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.300071Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.300112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:35.300186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:35.304320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:35.306482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:35.306687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:35.307691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:35.307824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:35.307876Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:35.308180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:35.308234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:35.308433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:35.308592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:35.310871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:35.310918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:35.311084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:35.311124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:35.311353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.311399Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:35.311526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:35.311562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:35.311639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:35.311677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:35.311717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:35.311763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:35.311804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:35.311833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:35.311897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:35.311934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:35.311967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:35.320261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:35.320437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:35.320483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 678944 is [1:547:2482] sender: [1:608:2058] recipient: [1:15:2062] 2025-04-10T11:15:35.632879Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:35.633013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:35.633081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:606:2528] TestWaitNotification: OK eventTxId 101 2025-04-10T11:15:35.633677Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:35.633880Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 189us result status StatusPathDoesNotExist 2025-04-10T11:15:35.634079Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:35.635345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:547:2482] sender: [1:612:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:547:2482] sender: [1:615:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:547:2482] sender: [1:616:2058] recipient: [1:614:2533] Leader for TabletID 72057594046678944 is [1:617:2534] sender: [1:618:2058] recipient: [1:614:2533] 2025-04-10T11:15:35.675870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:35.675966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:35.676012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:35.676053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:35.676109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:35.676142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:35.676208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:35.676276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:35.676584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:35.693610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:35.694695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:35.694922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:35.695020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:35.695057Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:35.695318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:35.695855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:35.695949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.696005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.696344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.696402Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T11:15:35.696573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.696643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.696731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.696807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.696894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.697038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.697256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.697359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.697653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.697710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.697946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.698111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.698221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.698421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.698534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.698681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.698952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.699196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.699253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.699304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.706561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:35.706660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:35.707276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:35.707342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:35.707384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:35.708773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:617:2534] sender: [1:676:2058] recipient: [1:15:2062] 2025-04-10T11:15:35.741366Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:35.741608Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 288us result status StatusPathDoesNotExist 2025-04-10T11:15:35.741793Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:35.742537Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:35.742712Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 185us result status StatusSuccess 2025-04-10T11:15:35.743137Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } >> TSchemeShardSubDomainTest::DeclareDefineAndDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13128, MsgBus: 18119 2025-04-10T11:15:23.569291Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641549140047671:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:23.569379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001694/r3tmp/tmpqiqYA0/pdisk_1.dat 2025-04-10T11:15:24.005095Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:24.007050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:24.007171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:24.038131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13128, node 1 2025-04-10T11:15:24.098220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:24.098244Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:24.098261Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:24.098448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18119 TClient is connected to server localhost:18119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:24.799596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:24.827412Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:15:24.855583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:25.014788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:25.221360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:25.309691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:27.124655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641566319918624:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.124809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.575195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.619025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.659885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.691269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.728113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.766900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:27.834738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641566319919139:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.834861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.834945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641566319919144:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:27.839285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:27.856580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641566319919146:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:27.959208Z node 1 :TX_PROXY ERROR: Actor# [1:7491641566319919202:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:28.568392Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641549140047671:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:28.568539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18845, MsgBus: 3921 2025-04-10T11:15:29.993467Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641572873569290:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:29.993568Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001694/r3tmp/tmp3JFoUA/pdisk_1.dat 2025-04-10T11:15:30.119869Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:30.149035Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:30.149126Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:30.150478Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18845, node 2 2025-04-10T11:15:30.195617Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:30.195651Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:30.195659Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:30.195805Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3921 TClient is connected to server localhost:3921 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:30.628741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:30.647236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:30.724202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:30.866298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:30.929539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:33.306690Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641590053440265:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:33.306764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:33.346630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:33.376264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:33.403673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:33.434833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:33.504363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:33.573938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:33.657828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641590053440785:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:33.657965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:33.658048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641590053440790:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:33.662046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:33.673195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641590053440792:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:15:33.740080Z node 2 :TX_PROXY ERROR: Actor# [2:7491641590053440848:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:35.282967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:35.283076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:35.283145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:35.283201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:35.283252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:35.283286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:35.283378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:35.283490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:35.283926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:35.358473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:35.358529Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:35.373172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:35.373491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:35.373695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:35.381883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:35.382112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:35.382781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:35.383115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:35.385385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:35.386943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:35.387035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:35.387250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:35.387304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:35.387351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:35.387586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.395097Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:35.523349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:35.523600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.523814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:35.524015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:35.524062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.527194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:35.527359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:35.527559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.527626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:35.527675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:35.527705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:35.529835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.529886Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:35.529916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:35.531472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.531512Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.531548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:35.531595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:35.534272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:35.535804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:35.535945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:35.536681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:35.536797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:35.536837Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:35.537075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:35.537120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:35.537264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:35.537363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:35.539054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:35.539094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:35.539247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:35.539281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:35.539477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.539515Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:35.539608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:35.539634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:35.539677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:35.539709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:35.539752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:35.539784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:35.539814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:35.539838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:35.539935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:35.539966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:35.539991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:35.541734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:35.541820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:35.541850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-10T11:15:35.769763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-10T11:15:35.770584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:35.770731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:35.770787Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:15:35.771178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-10T11:15:35.771248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:15:35.771511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:35.771596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:35.771659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-10T11:15:35.773948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:35.774007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:35.774254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:35.774376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:35.774431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:334:2310], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-10T11:15:35.774478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:334:2310], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-10T11:15:35.774831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:15:35.774877Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-10T11:15:35.774996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:15:35.775038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:35.775087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:15:35.775123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:35.775166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-10T11:15:35.775214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:35.775269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-10T11:15:35.775321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-10T11:15:35.775523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:35.775575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-10T11:15:35.775615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:15:35.775652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-10T11:15:35.776445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:35.776541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:35.776576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:15:35.776629Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:15:35.776678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:35.777749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:35.777841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:35.777879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:15:35.777923Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:15:35.777955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:35.778046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-10T11:15:35.780736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T11:15:35.781641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-10T11:15:35.781881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-10T11:15:35.781926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-10T11:15:35.782336Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-10T11:15:35.782448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:15:35.782488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:481:2430] TestWaitNotification: OK eventTxId 100 2025-04-10T11:15:35.782924Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:35.783188Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 222us result status StatusSuccess 2025-04-10T11:15:35.783574Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 3 ShardsInside: 2 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 DatabaseQuotas { data_stream_shards_quota: 3 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:35.784035Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:35.784184Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 159us result status StatusSuccess 2025-04-10T11:15:35.784454Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 3 ShardsInside: 0 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7452, MsgBus: 5544 2025-04-10T11:15:30.402997Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641576771023170:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:30.403074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00167a/r3tmp/tmpvYYtUe/pdisk_1.dat 2025-04-10T11:15:30.705200Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7452, node 1 2025-04-10T11:15:30.771488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:30.771602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:30.773638Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:30.773695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:30.773704Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:30.773837Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:30.775520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5544 TClient is connected to server localhost:5544 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:31.292871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:31.329151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:31.479772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:31.651236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:31.715495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:33.439673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641589655926835:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:33.439797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:33.711505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:33.737448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:33.805363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:33.834715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:33.862369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:33.891896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:33.977750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641589655927350:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:33.977844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:33.977887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641589655927355:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:33.981722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:34.009588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641589655927357:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:34.073564Z node 1 :TX_PROXY ERROR: Actor# [1:7491641593950894708:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::Update+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:33.071617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:33.071689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:33.071735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:33.071773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:33.071808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:33.071839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:33.071903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:33.071988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:33.072310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:33.154502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:33.154569Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:33.168367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:33.168673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:33.168880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:33.176278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:33.176552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:33.177154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:33.177534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:33.179385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:33.180748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:33.180803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:33.181011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:33.181053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:33.181094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:33.181312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.186702Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:33.292134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:33.292418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.292689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:33.292959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:33.293078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.295392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:33.295541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:33.295774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.295891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:33.296131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:33.296168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:33.298307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.298367Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:33.298404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:33.300498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.300550Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.300596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:33.300660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:33.304635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:33.307015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:33.307214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:33.308365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:33.308518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:33.308574Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:33.308903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:33.308962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:33.309150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:33.309267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:33.311949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:33.311998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:33.312269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:33.312322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:33.312616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.312667Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:33.312772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:33.312810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:33.312871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:33.312910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:33.312998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:33.313052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:33.313090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:33.313123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:33.313201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:33.313241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:33.313307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:33.316118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:33.316248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:33.316286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 139 2025-04-10T11:15:36.267306Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 16], version: 18446744073709551615 2025-04-10T11:15:36.267330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 4 2025-04-10T11:15:36.267983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2025-04-10T11:15:36.268071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2025-04-10T11:15:36.268099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 139 2025-04-10T11:15:36.268146Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:15:36.268181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:36.268265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 139, subscribers: 0 2025-04-10T11:15:36.269537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:36.269584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:15 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:36.269633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:14 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:36.269658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:36.269678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:16 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:36.271203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-04-10T11:15:36.272252Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-10T11:15:36.273497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:36.273781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:36.274051Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 15 TabletID: 72075186233409556 Forgetting tablet 72075186233409546 2025-04-10T11:15:36.275384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 15 ShardOwnerId: 72057594046678944 ShardLocalIdx: 15, at schemeshard: 72057594046678944 2025-04-10T11:15:36.275616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 3 2025-04-10T11:15:36.275843Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 14 TabletID: 72075186233409555 2025-04-10T11:15:36.277683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 14 ShardOwnerId: 72057594046678944 ShardLocalIdx: 14, at schemeshard: 72057594046678944 2025-04-10T11:15:36.277862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 2 Forgetting tablet 72075186233409556 2025-04-10T11:15:36.278556Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409555 2025-04-10T11:15:36.279555Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 16 TabletID: 72075186233409557 Forgetting tablet 72075186233409547 2025-04-10T11:15:36.280836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:36.281017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409557 2025-04-10T11:15:36.282074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 16 ShardOwnerId: 72057594046678944 ShardLocalIdx: 16, at schemeshard: 72057594046678944 2025-04-10T11:15:36.282288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 2025-04-10T11:15:36.283229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-04-10T11:15:36.283383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-04-10T11:15:36.283465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:36.283501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-04-10T11:15:36.283565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:36.283869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:36.283921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:36.284039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:36.284770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:36.284828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:15:36.284928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-04-10T11:15:36.284957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409556 2025-04-10T11:15:36.287986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:14 2025-04-10T11:15:36.288035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:14 tabletId 72075186233409555 2025-04-10T11:15:36.288127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:36.288155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:15:36.288259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2025-04-10T11:15:36.288301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409557 2025-04-10T11:15:36.288376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:36.288627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:36.288701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:36.288798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:36.289080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:36.290906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 139, wait until txId: 139 TestWaitNotification wait txId: 139 2025-04-10T11:15:36.291778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: send EvNotifyTxCompletion 2025-04-10T11:15:36.291823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 139 2025-04-10T11:15:36.292746Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 139, at schemeshard: 72057594046678944 2025-04-10T11:15:36.292878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: got EvNotifyTxCompletionResult 2025-04-10T11:15:36.292929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: satisfy waiter [1:2266:4038] TestWaitNotification: OK eventTxId 139 2025-04-10T11:15:36.294333Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:36.294536Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 231us result status StatusSuccess 2025-04-10T11:15:36.294903Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 5 ShardsInside: 0 ShardsLimit: 6 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 20 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TYardTest::TestWhiteboard [GOOD] >> TYardTest::TestMultiYardLogLatency >> TStoragePoolsQuotasTest::DifferentQuotasInteraction >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:36.343995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:36.344099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:36.344168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:36.344223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:36.344279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:36.344330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:36.344421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:36.344506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:36.344855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:36.437995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:36.438066Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:36.453055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:36.453354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:36.453518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:36.462250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:36.462570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:36.463322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:36.463728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:36.466069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:36.467568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:36.467638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:36.467916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:36.467971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:36.468019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:36.468198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:36.475022Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:36.594401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:36.594698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:36.595006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:36.595285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:36.595355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:36.598218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:36.598367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:36.598595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:36.598659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:36.598701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:36.598741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:36.601027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:36.601096Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:36.601153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:36.602882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:36.602919Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:36.602950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:36.603001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:36.605610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:36.607099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:36.607250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:36.608013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:36.608118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:36.608155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:36.608369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:36.608412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:36.608554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:36.608655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:36.610359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:36.610404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:36.610551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:36.610588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:36.610766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:36.610803Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:36.610894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:36.610940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:36.610997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:36.611024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:36.611056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:36.611088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:36.611116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:36.611139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:36.611184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:36.611214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:36.611240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:36.612928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:36.613011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:36.613048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 37.261430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:37.261563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:37.261620Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 5000003, at tablet: 72057594046678944 2025-04-10T11:15:37.261736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-04-10T11:15:37.262300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-04-10T11:15:37.262476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409550 TxId: 104 Status: OK 2025-04-10T11:15:37.262559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409550 TxId: 104 Status: OK 2025-04-10T11:15:37.262606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-10T11:15:37.262664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-04-10T11:15:37.263016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-04-10T11:15:37.263117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409551 TxId: 104 Status: OK 2025-04-10T11:15:37.263167Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409551 TxId: 104 Status: OK 2025-04-10T11:15:37.263197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-10T11:15:37.263226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-10T11:15:37.269630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.269875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 104 2025-04-10T11:15:37.361737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-04-10T11:15:37.361912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-04-10T11:15:37.362000Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-04-10T11:15:37.362047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.362092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-04-10T11:15:37.362135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-10T11:15:37.362492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-04-10T11:15:37.362572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-04-10T11:15:37.362610Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-04-10T11:15:37.362640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.362661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-10T11:15:37.362811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-10T11:15:37.362957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:37.363058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T11:15:37.366563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.367070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.367408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:37.367461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:37.367567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:15:37.367730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:37.367766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:334:2310], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-10T11:15:37.367810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:334:2310], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-10T11:15:37.368220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.368274Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-10T11:15:37.368360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:15:37.368391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:37.368423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:15:37.368452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:37.368491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-10T11:15:37.368539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:37.368573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T11:15:37.368597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T11:15:37.368716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-04-10T11:15:37.368752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-10T11:15:37.368792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-10T11:15:37.368815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-10T11:15:37.369410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:15:37.369494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:15:37.369523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:15:37.369555Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:15:37.369593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:15:37.369992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:15:37.370038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:15:37.370055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:15:37.370086Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-10T11:15:37.370108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T11:15:37.370176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-10T11:15:37.380557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:15:37.380769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:36.768680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:36.768778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:36.768844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:36.768892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:36.768936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:36.768966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:36.769035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:36.769121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:36.769593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:36.858842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:36.858906Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:36.880868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:36.881198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:36.881424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:36.900839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:36.901127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:36.901885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:36.902329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:36.904477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:36.906087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:36.906163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:36.906398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:36.906446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:36.906485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:36.906658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:36.913787Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:37.053228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:37.053551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.053817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:37.054068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:37.054137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.056999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:37.057147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:37.057382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.057462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:37.057512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:37.057562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:37.059887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.059953Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:37.059995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:37.061797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.061835Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.061872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:37.061937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:37.065533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:37.067671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:37.067880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:37.068946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:37.069071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:37.069112Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:37.069427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:37.069495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:37.069711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:37.069823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:37.071918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:37.071980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:37.072125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:37.072166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:37.072373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.072424Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:37.072530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:37.072560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:37.072607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:37.072638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:37.072677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:37.072719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:37.072757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:37.072782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:37.072840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:37.072879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:37.072916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:37.074988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:37.075107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:37.075147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Seconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:37.223943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:37.223994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:37.224032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:37.224084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:37.224115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:37.224196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:37.224279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:37.224595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:37.241524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:37.242756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:37.242923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:37.243028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:37.243070Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:37.243183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:37.243719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:37.243799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:37.243876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.243940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.244174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-10T11:15:37.244504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.244630Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T11:15:37.244815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.244891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.245028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:37.245076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:37.245105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:37.245126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:37.245305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.245411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.245614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-04-10T11:15:37.245939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.246054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.246408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.246471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.246630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.246747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.246867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.247028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.247091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.247229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.247448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.247560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.247606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.247645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.254658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:37.254742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:37.254899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:37.254946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:37.254984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:37.255082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2025-04-10T11:15:37.307683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-10T11:15:37.307807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:462:2412] sender: [1:522:2058] recipient: [1:15:2062] 2025-04-10T11:15:37.308517Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-10T11:15:37.308648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:15:37.308684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:520:2457] TestWaitNotification: OK eventTxId 100 2025-04-10T11:15:37.309175Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:37.309437Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 264us result status StatusSuccess 2025-04-10T11:15:37.309929Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:37.310502Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:37.310737Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 236us result status StatusSuccess 2025-04-10T11:15:37.311090Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15355, MsgBus: 31130 2025-04-10T11:15:31.341934Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641582914583679:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:31.342168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001678/r3tmp/tmp8ohAK4/pdisk_1.dat 2025-04-10T11:15:31.661339Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15355, node 1 2025-04-10T11:15:31.711558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:31.711876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:31.720690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:31.744056Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:31.744086Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:31.744122Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:31.744309Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31130 TClient is connected to server localhost:31130 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:32.301584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:32.326866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:32.483566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:32.632288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:32.710258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:34.304518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641595799487346:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:34.304633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:34.591187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:34.615739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:34.643628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:34.672308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:34.701534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:34.733434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:34.809964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641595799487863:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:34.810037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:34.810071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641595799487868:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:34.813259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:34.821867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641595799487870:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:34.919373Z node 1 :TX_PROXY ERROR: Actor# [1:7491641595799487925:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:36.342222Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641582914583679:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:36.342322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Delete+QueryService-UseSink >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:37.041153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:37.041251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:37.041350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:37.041418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:37.041468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:37.041497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:37.041576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:37.041686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:37.042084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:37.124066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:37.124131Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:37.142995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:37.143234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:37.143402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:37.151200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:37.151469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:37.152155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:37.152526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:37.155014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:37.156484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:37.156548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:37.156795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:37.156862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:37.156909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:37.157090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.164697Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:37.282987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:37.283289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.283543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:37.283785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:37.283844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.286510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:37.286642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:37.286821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.286905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:37.286950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:37.286986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:37.288988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.289045Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:37.289083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:37.290703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.290753Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.290793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:37.290853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:37.294547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:37.296466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:37.296672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:37.297735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:37.297877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:37.297947Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:37.298323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:37.298385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:37.298587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:37.298693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:37.300827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:37.300884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:37.301049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:37.301094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:37.301338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.301400Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:37.301514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:37.301551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:37.301608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:37.301642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:37.301692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:37.301733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:37.301768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:37.301798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:37.301862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:37.301903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:37.301937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:37.312456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:37.312608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:37.312655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:15:37.466738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:37.466805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:37.466931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:37.467053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:37.467090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-10T11:15:37.467126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T11:15:37.467395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.467446Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2025-04-10T11:15:37.467503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:15:37.467539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:15:37.467582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:15:37.467618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:15:37.467671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-10T11:15:37.467723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:15:37.467762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:15:37.467795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:15:37.468003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T11:15:37.468052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-10T11:15:37.468086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-10T11:15:37.468117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-10T11:15:37.468797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:37.468890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:37.468924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:15:37.468966Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:15:37.469007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:37.469697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:37.469786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:37.469820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:15:37.469868Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:15:37.469903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:37.469974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-10T11:15:37.471703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:37.471786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:37.471813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:37.473181Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-10T11:15:37.473411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:37.473715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:37.474081Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-10T11:15:37.474863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:37.475089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 2025-04-10T11:15:37.475812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:15:37.476041Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-04-10T11:15:37.476367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:37.476598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:37.477222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409547 2025-04-10T11:15:37.477576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:37.477619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:37.477718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:37.478114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:37.478173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:37.478225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:37.480726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:37.480790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:15:37.480871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:15:37.480889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-10T11:15:37.481149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:37.481184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:15:37.482168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:37.482274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:15:37.482632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:15:37.482696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:15:37.483180Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:15:37.483286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:15:37.483331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:516:2470] TestWaitNotification: OK eventTxId 102 2025-04-10T11:15:37.483758Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:37.483916Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 182us result status StatusPathDoesNotExist 2025-04-10T11:15:37.484083Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:36.972012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:36.972153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:36.972226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:36.972292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:36.972346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:36.972385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:36.972463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:36.972572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:36.973050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:37.070568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:37.070633Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:37.087804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:37.088121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:37.088376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:37.096472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:37.096705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:37.097279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:37.097605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:37.099361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:37.100570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:37.100617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:37.100779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:37.100815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:37.100855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:37.101030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.106831Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:37.216785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:37.217089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.217382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:37.217648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:37.217727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.220324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:37.220469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:37.220673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.220744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:37.220791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:37.220830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:37.222946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.223002Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:37.223090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:37.224877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.224921Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.224966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:37.225018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:37.234896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:37.237674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:37.237927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:37.239117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:37.239284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:37.239354Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:37.239699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:37.239765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:37.239981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:37.240089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:37.242533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:37.242576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:37.242732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:37.242766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:37.243035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.243106Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:37.243226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:37.243267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:37.243314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:37.243360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:37.243401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:37.243640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:37.243677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:37.243708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:37.243765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:37.243798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:37.243828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:37.253042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:37.253207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:37.253256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:15:37.718376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.719452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.719855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.719915Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:15:37.720046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:15:37.720093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:15:37.720217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:15:37.720261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:15:37.720303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T11:15:37.720411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:626:2560] message: TxId: 102 2025-04-10T11:15:37.720474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:15:37.720531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:15:37.720568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:15:37.720705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:15:37.722768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:15:37.722806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:627:2561] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 108 2025-04-10T11:15:37.725636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "USER_3" } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:37.725847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/USER_3, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.725970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-10T11:15:37.728484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/USER_3\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 108 SchemeshardId: 72057594046678944 PathId: 5 PathCreateTxId: 106, at schemeshard: 72057594046678944 2025-04-10T11:15:37.728647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/USER_3 TestModificationResult got TxId: 108, wait until txId: 108 2025-04-10T11:15:37.729150Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:37.729460Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 262us result status StatusSuccess 2025-04-10T11:15:37.729929Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:37.730535Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:37.730717Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 173us result status StatusSuccess 2025-04-10T11:15:37.731074Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "USER_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:37.731738Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:37.731853Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 125us result status StatusSuccess 2025-04-10T11:15:37.732092Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_2" PathDescription { Self { Name: "USER_2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:37.732560Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:37.732732Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_3" took 138us result status StatusSuccess 2025-04-10T11:15:37.732991Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_3" PathDescription { Self { Name: "USER_3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 106 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 5 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 5 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:32.809602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:32.809711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:32.809764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:32.809819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:32.809857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:32.809877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:32.809949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:32.810044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:32.810423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:32.890613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:32.890691Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:32.904196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:32.904485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:32.904717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:32.916164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:32.916450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:32.916989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:32.917419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:32.919954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:32.921479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:32.921541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:32.921902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:32.921957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:32.922002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:32.922203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:32.929760Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:33.053092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:33.053378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.053621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:33.053869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:33.053932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.059121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:33.059304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:33.059498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.059564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:33.059603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:33.059637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:33.061918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.061978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:33.062015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:33.063996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.064046Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.064105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:33.064173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:33.068194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:33.070891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:33.071132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:33.072373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:33.072557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:33.072620Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:33.073007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:33.073082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:33.073289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:33.073425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:33.076244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:33.076303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:33.076497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:33.076542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:33.076815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:33.076888Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:33.077007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:33.077048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:33.077113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:33.077156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:33.077197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:33.077245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:33.077312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:33.077365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:33.077469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:33.077533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:33.077576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:33.080688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:33.080829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:33.080876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... hemeshard: 72057594046678944, message: Source { RawX1: 440 RawX2: 4294969700 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-10T11:15:37.695739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-04-10T11:15:37.695839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 440 RawX2: 4294969700 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-10T11:15:37.695880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-10T11:15:37.696273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.696326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-10T11:15:37.696363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-10T11:15:37.696413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-04-10T11:15:37.696502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-10T11:15:37.696628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-04-10T11:15:37.696749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:37.696848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:15:37.700290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.700955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.701183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:37.701215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:37.701445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:15:37.701583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:37.701628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-10T11:15:37.701684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-10T11:15:37.701955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.702027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:15:37.702139Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.702225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-10T11:15:37.702269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-10T11:15:37.703447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:37.703546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:37.703578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:15:37.703613Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-10T11:15:37.703716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:37.705468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:37.705555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:37.705582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:15:37.705614Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-10T11:15:37.705647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:15:37.705772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-10T11:15:37.706950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:37.707000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:37.707445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:15:37.707672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:15:37.707709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:37.707760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:15:37.707793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:37.707829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-10T11:15:37.707915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:408:2375] message: TxId: 103 2025-04-10T11:15:37.707974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:37.708013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:15:37.708040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:15:37.708130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:15:37.709035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:37.709092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:37.710805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:15:37.711308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:15:37.712555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:37.712612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-10T11:15:37.712701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:15:37.712753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:667:2601] 2025-04-10T11:15:37.713543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-04-10T11:15:37.714522Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:37.714754Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 243us result status StatusSuccess 2025-04-10T11:15:37.715219Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService |88.4%| [TA] $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-10T11:15:37.129353Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:15:37.141373Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:15:37.141844Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-10T11:15:37.141912Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:37.141955Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-10T11:15:37.142006Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:37.142059Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:37.142239Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-10T11:15:37.143119Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:259:2251], now have 1 active actors on pipe 2025-04-10T11:15:37.143201Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:15:37.166324Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:37.169720Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:37.169872Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:37.173874Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:37.174042Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:37.175733Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:37.176943Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:267:2257] 2025-04-10T11:15:37.179705Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-10T11:15:37.180651Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:267:2257] 2025-04-10T11:15:37.180722Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:37.180794Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:15:37.185709Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:270:2259], now have 1 active actors on pipe 2025-04-10T11:15:37.238992Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:15:37.242701Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:15:37.243000Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-10T11:15:37.243047Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:37.243089Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-10T11:15:37.243146Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:37.243210Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:37.243302Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-10T11:15:37.244040Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:404:2359], now have 1 active actors on pipe 2025-04-10T11:15:37.244174Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:15:37.244390Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:15:37.246915Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:15:37.247067Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:37.247847Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:15:37.247955Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:37.248287Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:37.248531Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:412:2365] 2025-04-10T11:15:37.250845Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T11:15:37.250917Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:412:2365] 2025-04-10T11:15:37.250981Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:37.251035Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T11:15:37.251886Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:415:2367], now have 1 active actors on pipe 2025-04-10T11:15:37.253657Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:423:2370], now have 1 active actors on pipe 2025-04-10T11:15:37.253734Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:425:2371], now have 1 active actors on pipe 2025-04-10T11:15:37.254075Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:423:2370] destroyed 2025-04-10T11:15:37.254475Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:425:2371] destroyed 2025-04-10T11:15:37.662639Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:15:37.666833Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:15:37.667194Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-10T11:15:37.667242Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:37.667282Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-10T11:15:37.667333Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:37.667384Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:37.667445Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-10T11:15:37.668090Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:262:2254], now have 1 active actors on pipe 2025-04-10T11:15:37.668251Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:15:37.668449Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 3(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:37.670827Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:37.670972Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:37.671684Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 3 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:37.671820Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:37.672168Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:37.672371Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [3:270:2260] 2025-04-10T11:15:37.674965Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing c ... xId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:37.763172Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-10T11:15:37.763222Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:37.763271Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:37.763329Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-10T11:15:37.764016Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:468:2408], now have 1 active actors on pipe 2025-04-10T11:15:37.764157Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:15:37.764364Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 5(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:15:37.766898Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:15:37.767027Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:37.767855Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 5 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:15:37.767963Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:37.768325Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:37.768551Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:476:2414] 2025-04-10T11:15:37.770928Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-10T11:15:37.771003Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:476:2414] 2025-04-10T11:15:37.771058Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:37.771118Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-10T11:15:37.771990Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:479:2416], now have 1 active actors on pipe 2025-04-10T11:15:37.789295Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:15:37.793827Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:15:37.794211Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-10T11:15:37.794259Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:37.794300Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-10T11:15:37.794338Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:37.794385Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:37.794449Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-10T11:15:37.795165Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:528:2453], now have 1 active actors on pipe 2025-04-10T11:15:37.795243Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:15:37.795437Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 6(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:15:37.797950Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:15:37.798240Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:37.799035Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 6 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:15:37.799164Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:37.799525Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:37.799755Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:536:2459] 2025-04-10T11:15:37.801787Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T11:15:37.801872Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:536:2459] 2025-04-10T11:15:37.801947Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:37.801991Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T11:15:37.802775Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:539:2461], now have 1 active actors on pipe 2025-04-10T11:15:37.804296Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:547:2464], now have 1 active actors on pipe 2025-04-10T11:15:37.804404Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:548:2465], now have 1 active actors on pipe 2025-04-10T11:15:37.804516Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:549:2465], now have 1 active actors on pipe 2025-04-10T11:15:37.815531Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:554:2469], now have 1 active actors on pipe 2025-04-10T11:15:37.841258Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:15:37.843944Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:15:37.844309Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-10T11:15:37.844361Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:37.844508Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:37.845314Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:37.845386Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-10T11:15:37.845520Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:37.845877Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:37.846144Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:611:2514] 2025-04-10T11:15:37.848173Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-10T11:15:37.849741Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-10T11:15:37.850082Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-10T11:15:37.850467Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-10T11:15:37.850811Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-10T11:15:37.850866Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:15:37.850907Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:15:37.850950Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T11:15:37.851006Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:611:2514] 2025-04-10T11:15:37.851061Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:37.851111Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T11:15:37.852016Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:548:2465] destroyed 2025-04-10T11:15:37.852092Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:547:2464] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> KqpQueryPerf::IdxLookupJoin+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoin-QueryService >> TYardTest::TestMultiYardLogLatency [GOOD] >> TYardTest::TestMultiYardStartingPoints >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] |88.4%| [TA] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] >> TYardTest::TestMultiYardStartingPoints [GOOD] >> TYardTest::TestMultiYardLogMultipleWriteRead >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDefine >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas >> KqpQueryPerf::Replace+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:38.305528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:38.305624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:38.305677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:38.305715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:38.305752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:38.305778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:38.305835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:38.305921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:38.306281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:38.381898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:38.381954Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:38.395359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:38.395558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:38.395689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:38.403431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:38.403706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:38.404379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:38.404799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:38.406980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:38.408373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:38.408439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:38.408664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:38.408716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:38.408757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:38.408923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.416264Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:38.524985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:38.525195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.525390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:38.525617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:38.525658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.527944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:38.528141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:38.528336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.528392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:38.528432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:38.528465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:38.530661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.530723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:38.530759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:38.532457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.532491Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.532521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:38.532570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:38.535286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:38.537085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:38.537234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:38.537987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:38.538097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:38.538132Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:38.538457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:38.538512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:38.538667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:38.538741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:38.540886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:38.540940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:38.541101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:38.541143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:38.541403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.541447Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:38.541549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:38.541585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:38.541636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:38.541667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:38.541699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:38.541738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:38.541780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:38.541814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:38.541883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:38.541917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:38.541956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:38.544298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:38.544401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:38.544433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3409552 2025-04-10T11:15:39.029545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-04-10T11:15:39.029784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T11:15:39.032483Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-10T11:15:39.032809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:39.032870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-10T11:15:39.032947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:15:39.033004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T11:15:39.033039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 Forgetting tablet 72075186233409552 2025-04-10T11:15:39.036132Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-10T11:15:39.036347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:39.036608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 Forgetting tablet 72075186233409546 2025-04-10T11:15:39.038260Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-04-10T11:15:39.039038Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-10T11:15:39.039309Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-10T11:15:39.039492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:39.039733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409551 2025-04-10T11:15:39.042432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-10T11:15:39.042675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-04-10T11:15:39.044196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:39.044411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:39.044973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-10T11:15:39.045176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:39.045640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:15:39.046206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:15:39.046368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:39.046416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:39.046578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:39.050621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-10T11:15:39.050691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-10T11:15:39.050802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-10T11:15:39.050829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-04-10T11:15:39.050941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:39.051033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:39.051084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:39.051171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:39.052205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:39.052246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:15:39.054515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:15:39.054559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-10T11:15:39.054675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-10T11:15:39.054703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-10T11:15:39.054792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:39.054816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:15:39.054890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-10T11:15:39.054933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-10T11:15:39.055146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:39.057018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T11:15:39.057296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T11:15:39.057384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T11:15:39.057898Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:15:39.058010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:15:39.058071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:785:2674] TestWaitNotification: OK eventTxId 103 2025-04-10T11:15:39.058668Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:39.058907Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 237us result status StatusPathDoesNotExist 2025-04-10T11:15:39.059109Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:39.059568Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:39.059773Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 214us result status StatusSuccess 2025-04-10T11:15:39.060148Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:38.895885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:38.895967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:38.896024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:38.896068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:38.896125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:38.896152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:38.896217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:38.896288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:38.896623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:38.966368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:38.966435Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:38.984356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:38.984641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:38.984798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:38.997463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:38.997733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:38.998434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:38.998786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:39.000671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:39.002007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:39.002064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:39.002313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:39.002360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:39.002399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:39.002560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.009124Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:39.130113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:39.130412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.130649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:39.130874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:39.130930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.133298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:39.133455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:39.133637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.133694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:39.133735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:39.133765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:39.136101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.136159Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:39.136194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:39.138024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.138067Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.138106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:39.138196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:39.141644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:39.143964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:39.144209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:39.145433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:39.145645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:39.145708Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:39.146067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:39.146118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:39.146299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:39.146381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:39.148688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:39.148731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:39.148915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:39.148962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:39.149182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.149226Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:39.149344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:39.149416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:39.149472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:39.149508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:39.149547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:39.149585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:39.149626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:39.149656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:39.149716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:39.149749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:39.149774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:39.151957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:39.152087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:39.152120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... O: TDropSubdomain TPropose operationId# 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-10T11:15:39.229436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:39.229476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:39.229645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-04-10T11:15:39.229806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:39.229893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:39.231003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:39.232593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:15:39.233892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:39.233946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:39.234113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:39.234275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:39.234306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:15:39.234339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:15:39.234631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.234673Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-04-10T11:15:39.234717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:39.234747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:39.234779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:39.234807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:39.234847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:15:39.234922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:39.234969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:15:39.234997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:15:39.235067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:39.235101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-10T11:15:39.235130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-10T11:15:39.235154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-10T11:15:39.235648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:39.235723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:39.235755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:39.235789Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:15:39.235822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:39.236393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:39.236471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:39.236496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:39.236529Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:15:39.236559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:39.236637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:15:39.236848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:39.236886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:39.236975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:39.237235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:39.237273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:39.237353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:39.240508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:39.240788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:39.241900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:39.241997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:15:39.242245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:15:39.242291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:15:39.242705Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:39.242795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:39.242850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:339:2330] TestWaitNotification: OK eventTxId 101 2025-04-10T11:15:39.243267Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:39.243452Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 214us result status StatusPathDoesNotExist 2025-04-10T11:15:39.243654Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:39.244268Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:39.244459Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 226us result status StatusSuccess 2025-04-10T11:15:39.244891Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TSchemeShardSubDomainTest::Delete >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-10T11:15:38.897961Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:15:38.901708Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:15:38.902058Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-10T11:15:38.902104Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:38.902138Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-10T11:15:38.902208Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:38.902251Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:38.902308Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-10T11:15:38.902874Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2025-04-10T11:15:38.902951Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:15:38.920303Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:38.922834Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:38.922998Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:38.923774Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:38.923976Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:38.924329Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:38.924653Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:268:2258] 2025-04-10T11:15:38.926900Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-10T11:15:38.926964Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2258] 2025-04-10T11:15:38.927018Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:38.927066Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:15:38.927803Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:271:2260], now have 1 active actors on pipe 2025-04-10T11:15:38.979825Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:15:38.984927Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:15:38.985313Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-10T11:15:38.985399Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:38.985443Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-10T11:15:38.985503Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:38.985576Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:38.985654Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-10T11:15:38.986390Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:405:2360], now have 1 active actors on pipe 2025-04-10T11:15:38.986537Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:15:38.986749Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:15:38.989293Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:15:38.989438Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:38.990129Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:15:38.990296Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:38.990716Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:38.991004Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:413:2366] 2025-04-10T11:15:38.992461Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-10T11:15:38.992513Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:413:2366] 2025-04-10T11:15:38.992562Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:38.992603Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-10T11:15:38.993218Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:416:2368], now have 1 active actors on pipe 2025-04-10T11:15:39.011841Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:15:39.016978Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:15:39.017421Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-10T11:15:39.017478Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:39.017521Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-10T11:15:39.017569Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:39.017624Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:39.017685Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-10T11:15:39.018452Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:465:2405], now have 1 active actors on pipe 2025-04-10T11:15:39.018593Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:15:39.018809Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 3(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:15:39.021695Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:15:39.021835Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:39.022702Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 3 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:15:39.022847Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:39.023282Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:39.023536Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:473:2411] 2025-04-10T11:15:39.025738Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T11:15:39.025816Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:473:2411] 2025-04-10T11:15:39.025881Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:39.025938Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T11:15:39.026895Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:476:2413], now have 1 active actors on pipe REQUEST MetaRequest { CmdGetReadSessionsInfo { ClientId: "client_id" Topic: "rt3.dc1--topic1" Topic: "rt3.dc1--topic2" } } Ticket: "client_id@builtin" 2025-04-10T11:15:39.045236Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:485:2416], now have 1 active actors on pipe 2025-04-10T11:15:39.045916Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:488:2417], now have 1 active actors on pipe 2025-04-10T11:15:39.046340Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:489:2417], now have 1 active actors on pipe 2025-04-10T11:15:39.047021Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:485:2416] destroyed 2025-04-10T11:15:39.047690Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [2:488:2417] destroyed 2025-04-10T11:15:39.047791Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:489:2417] destroyed RESULT Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } PartitionResult { Partition: 2 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:39.154600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:39.154702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:39.154770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:39.154818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:39.154862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:39.154892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:39.154963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:39.155060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:39.155458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:39.230622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:39.230674Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:39.247175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:39.247399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:39.247523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:39.254057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:39.254330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:39.255139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:39.255529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:39.257420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:39.258914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:39.258983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:39.259217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:39.259275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:39.259321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:39.259519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.267402Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:39.379873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:39.380165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.380410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:39.380700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:39.380761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.383658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:39.383850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:39.384092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.384170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:39.384337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:39.384383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:39.386901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.386970Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:39.387021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:39.389407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.389467Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.389511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:39.389578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:39.393703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:39.397514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:39.397716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:39.398919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:39.399115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:39.399183Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:39.399526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:39.399588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:39.399767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:39.399878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:39.402659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:39.402710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:39.402866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:39.402895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:39.403143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.403193Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:39.403297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:39.403329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:39.403376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:39.403402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:39.403437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:39.403468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:39.403501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:39.403532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:39.403623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:39.403666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:39.403698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:39.406021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:39.406164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:39.406201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-10T11:15:39.453314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-10T11:15:39.453701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:39.453831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:39.453877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:15:39.454140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-10T11:15:39.454222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-10T11:15:39.454384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:39.454446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:39.454497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:39.456565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:39.456608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:39.456764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:39.456891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:39.456932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-10T11:15:39.456973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2025-04-10T11:15:39.457352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:15:39.457395Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-10T11:15:39.457487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:15:39.457524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:39.457557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:15:39.457580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:39.457606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-10T11:15:39.457647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:15:39.457677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-10T11:15:39.457707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-10T11:15:39.457763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:39.457793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-10T11:15:39.457820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:15:39.457841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-10T11:15:39.458512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:39.458576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:39.458599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:15:39.458634Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:15:39.458671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:39.459266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:39.459335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:15:39.459365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:15:39.459389Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:15:39.459416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:39.459488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-10T11:15:39.462748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T11:15:39.463227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-10T11:15:39.463481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-10T11:15:39.463515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-10T11:15:39.463607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:15:39.463622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:15:39.464014Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-10T11:15:39.464104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:15:39.464148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:311:2302] 2025-04-10T11:15:39.464370Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:39.464420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:39.464442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:311:2302] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-10T11:15:39.464884Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:39.465086Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 246us result status StatusSuccess 2025-04-10T11:15:39.465494Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:39.465974Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:39.466194Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 150us result status StatusPathDoesNotExist 2025-04-10T11:15:39.466304Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/USER_0\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/USER_0" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 4943, MsgBus: 3397 2025-04-10T11:15:28.418977Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641567513222072:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:28.419074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00167c/r3tmp/tmpXvf4cU/pdisk_1.dat 2025-04-10T11:15:28.713987Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4943, node 1 2025-04-10T11:15:28.786375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:28.788415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:28.795250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:28.819485Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:28.819524Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:28.819535Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:28.819701Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3397 TClient is connected to server localhost:3397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:29.394874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:29.422621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:29.558243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:29.717592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:29.791724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:31.511784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641580398125755:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:31.512610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:31.903321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:31.941861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:32.011619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:32.061349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:32.097857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:32.131593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:32.173655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641584693093565:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:32.173712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:32.173773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641584693093570:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:32.178048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:32.187167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641584693093572:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:32.292217Z node 1 :TX_PROXY ERROR: Actor# [1:7491641584693093626:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:33.418833Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641567513222072:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:33.418924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27059, MsgBus: 29123 2025-04-10T11:15:34.201056Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641596639439394:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:34.201147Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00167c/r3tmp/tmpLIOJ42/pdisk_1.dat 2025-04-10T11:15:34.328845Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27059, node 2 2025-04-10T11:15:34.344182Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:34.344248Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:34.345987Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:34.377569Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:34.377611Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:34.377620Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:34.377741Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29123 TClient is connected to server localhost:29123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:34.757780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:34.784938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:34.854865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:34.990926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:35.066043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:36.993281Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641605229375773:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:36.993411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:37.012316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:37.044653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:37.072948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:37.099215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:37.130409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:37.165109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:37.244269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641609524343586:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:37.244350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641609524343591:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:37.244352Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:37.247932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:37.256946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641609524343593:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:15:37.336032Z node 2 :TX_PROXY ERROR: Actor# [2:7491641609524343648:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop >> TYardTest::TestMultiYardLogMultipleWriteRead [GOOD] >> TYardTest::TestSysLogOverwrite >> TSchemeShardSubDomainTest::Create >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] >> TSchemeShardSubDomainTest::Delete [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22287, MsgBus: 16618 2025-04-10T11:15:34.932120Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641595570208086:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:34.932221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00166c/r3tmp/tmpEIhGGr/pdisk_1.dat 2025-04-10T11:15:35.279302Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22287, node 1 2025-04-10T11:15:35.328868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:35.328981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:35.331199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:35.350050Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:35.350074Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:35.350084Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:35.350254Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16618 TClient is connected to server localhost:16618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:35.849038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:35.871712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:35.989456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:36.146259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:36.224374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:38.154406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641612750079050:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:38.154543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:38.438613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:38.468466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:38.493755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:38.523754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:38.558502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:38.592815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:38.639614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641612750079558:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:38.639693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:38.639705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641612750079563:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:38.643720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:38.653573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641612750079565:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:38.721528Z node 1 :TX_PROXY ERROR: Actor# [1:7491641612750079619:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:40.155554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:40.155659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:40.155706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:40.155761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:40.155802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:40.155827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:40.155878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:40.155983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:40.156446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:40.233580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:40.233662Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:40.250658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:40.251077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:40.251281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:40.260999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:40.261304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:40.262129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:40.262641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:40.265294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.266735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:40.266790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.266971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:40.267020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:40.267069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:40.267218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.274788Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:40.424876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:40.425191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.425501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:40.425755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:40.425819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.428702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:40.428859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:40.429073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.429146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:40.429205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:40.429253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:40.431677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.431746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:40.431795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:40.433721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.433770Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.433815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:40.433879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.438064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:40.440470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:40.440673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:40.441800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:40.441958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:40.442018Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:40.442378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:40.442470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:40.442673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:40.442778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:40.445198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:40.445250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:40.445465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.445512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:40.445773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.445849Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:40.445968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:40.446006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.446074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:40.446109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.446167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:40.446234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.446295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:40.446329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:40.446405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:40.446445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:40.446483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:40.449076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:40.449210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:40.449254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxId: 101, partId: 0, tablet: 72075186233409548 2025-04-10T11:15:40.576701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-10T11:15:40.576916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409546 2025-04-10T11:15:40.576976Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-10T11:15:40.577023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409546 shardIdx# 72057594046678944:1 at schemeshard# 72057594046678944 2025-04-10T11:15:40.582542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-10T11:15:40.582742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409547 2025-04-10T11:15:40.582796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-10T11:15:40.582844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409547 shardIdx# 72057594046678944:2 at schemeshard# 72057594046678944 2025-04-10T11:15:40.585933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.588645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409548, partId: 0 2025-04-10T11:15:40.588803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409548 2025-04-10T11:15:40.588854Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-10T11:15:40.588901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-04-10T11:15:40.588941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2025-04-10T11:15:40.589845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.591981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.592154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.592210Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.592253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-04-10T11:15:40.592306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-10T11:15:40.592463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:40.594525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-10T11:15:40.594650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-10T11:15:40.595001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:40.595153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:40.595208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-10T11:15:40.595523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-10T11:15:40.595578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-10T11:15:40.595758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:40.595841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:15:40.598106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:40.598175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:40.598400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.598459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:15:40.598807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.598865Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:15:40.598972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:40.599005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:40.599049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:40.599087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:40.599130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:15:40.599174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:40.599220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:15:40.599261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:15:40.599466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T11:15:40.599517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 1 2025-04-10T11:15:40.599571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-04-10T11:15:40.600270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:40.600381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:40.600424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:40.600467Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-10T11:15:40.600511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:40.600608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-04-10T11:15:40.600658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:306:2297] 2025-04-10T11:15:40.604144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:40.604247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:40.604286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:313:2304] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-10T11:15:40.604920Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:40.605194Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 263us result status StatusSuccess 2025-04-10T11:15:40.605607Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] >> TSchemeShardSubDomainTest::Create [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels >> KqpQueryPerf::KvRead+QueryService [GOOD] >> KqpQueryPerf::KvRead-QueryService >> TYardTest::TestSysLogOverwrite [GOOD] >> TYardTest::TestUpsAndDownsAtTheBoundary >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:40.209964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:40.210073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:40.210138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:40.210218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:40.210273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:40.210321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:40.210394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:40.210510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:40.210870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:40.292295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:40.292365Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:40.312904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:40.313260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:40.313601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:40.328317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:40.328579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:40.329161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:40.329500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:40.331573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.332861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:40.332921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.333129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:40.333177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:40.333215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:40.333381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.340266Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:40.494907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:40.495200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.495473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:40.495785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:40.495849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.498332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:40.498491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:40.498665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.498721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:40.498756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:40.498802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:40.500740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.500826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:40.500892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:40.502717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.502787Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.502832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:40.502880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.512283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:40.517847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:40.518038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:40.519057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:40.519221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:40.519279Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:40.519640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:40.519834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:40.520031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:40.520133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:40.522094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:40.522136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:40.522301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.522344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:40.522580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.522631Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:40.522739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:40.522769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.522824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:40.522850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.522879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:40.522912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.522941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:40.522967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:40.523022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:40.523053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:40.523083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:40.525621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:40.525733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:40.525779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... , at schemeshard: 72057594046678944, message: Source { RawX1: 600 RawX2: 4294969837 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-10T11:15:41.208249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2025-04-10T11:15:41.208387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 600 RawX2: 4294969837 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-10T11:15:41.208439Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:15:41.208526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 600 RawX2: 4294969837 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-10T11:15:41.208595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:41.208643Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.208705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-10T11:15:41.208757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-04-10T11:15:41.209201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 605 RawX2: 4294969840 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-10T11:15:41.209239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 2 2025-04-10T11:15:41.209349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:2, at schemeshard: 72057594046678944, message: Source { RawX1: 605 RawX2: 4294969840 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-10T11:15:41.209399Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:15:41.209468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 605 RawX2: 4294969840 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-10T11:15:41.209512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:2, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:41.209550Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:2, at schemeshard: 72057594046678944 2025-04-10T11:15:41.209579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-10T11:15:41.209609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:2 129 -> 240 2025-04-10T11:15:41.213204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-10T11:15:41.213389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-10T11:15:41.216406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-10T11:15:41.216529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-10T11:15:41.216614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.216725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-04-10T11:15:41.216841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.216976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.217027Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:0 ProgressState 2025-04-10T11:15:41.217148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 2/3 2025-04-10T11:15:41.217211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2025-04-10T11:15:41.217259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 2/3 2025-04-10T11:15:41.217301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2025-04-10T11:15:41.217367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 2/3, is published: true 2025-04-10T11:15:41.217769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-04-10T11:15:41.217902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2025-04-10T11:15:41.217948Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:2 ProgressState 2025-04-10T11:15:41.218015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:2 progress is 3/3 2025-04-10T11:15:41.218045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-04-10T11:15:41.218080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:2 progress is 3/3 2025-04-10T11:15:41.218105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-04-10T11:15:41.218132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 3/3, is published: true 2025-04-10T11:15:41.218270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:480:2429] message: TxId: 107 2025-04-10T11:15:41.218341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-04-10T11:15:41.218391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-10T11:15:41.218430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-10T11:15:41.218557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:15:41.218603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:1 2025-04-10T11:15:41.218625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:1 2025-04-10T11:15:41.218693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-10T11:15:41.218726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:2 2025-04-10T11:15:41.218747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:2 2025-04-10T11:15:41.218795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-10T11:15:41.221285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-10T11:15:41.221346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:534:2483] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-10T11:15:41.225393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "Table7" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value0" Type: "Utf8" } Columns { Name: "Value1" Type: "Utf8" } Columns { Name: "Value2" Type: "Utf8" } Columns { Name: "Value3" Type: "Utf8" } Columns { Name: "Value4" Type: "Utf8" } KeyColumnNames: "RowId" } IndexDescription { Name: "UserDefinedIndexByValue0" KeyColumnNames: "Value0" } IndexDescription { Name: "UserDefinedIndexByValue1" KeyColumnNames: "Value1" } IndexDescription { Name: "UserDefinedIndexByValue2" KeyColumnNames: "Value2" } IndexDescription { Name: "UserDefinedIndexByValue3" KeyColumnNames: "Value3" } IndexDescription { Name: "UserDefinedIndexByValue4" KeyColumnNames: "Value4" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:41.225953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/USER_0/Table7 domain path id: [OwnerId: 72057594046678944, LocalPathId: 2] domain path: /MyRoot/USER_0 shardsToCreate: 6 GetShardsInside: 4 MaxShards: 7 2025-04-10T11:15:41.226068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 108:0, explain: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-04-10T11:15:41.226123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-04-10T11:15:41.228355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusResourceExhausted Reason: "indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:41.228563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/USER_0/Table7 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-10T11:15:41.229015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-10T11:15:41.229061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-10T11:15:41.229606Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-10T11:15:41.229706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-10T11:15:41.229751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:730:2648] TestWaitNotification: OK eventTxId 108 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Delete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:40.634218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:40.634312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:40.634370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:40.634426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:40.634478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:40.634504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:40.634619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:40.634702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:40.635099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:40.717655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:40.717710Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:40.734267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:40.734575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:40.734759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:40.747560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:40.747781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:40.748384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:40.748721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:40.750772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.752219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:40.752278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.752487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:40.752539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:40.752581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:40.752754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.759894Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:40.912128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:40.912388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.912635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:40.912880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:40.912951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.916614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:40.916779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:40.916960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.917019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:40.917058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:40.917090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:40.920104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.920169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:40.920203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:40.921827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.921865Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.921897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:40.921944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.924612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:40.926293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:40.926436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:40.927219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:40.927325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:40.927363Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:40.927594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:40.927630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:40.927788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:40.927886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:40.929600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:40.929634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:40.929816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.929847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:40.930045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.930087Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:40.930194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:40.930230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.930282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:40.930314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.930347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:40.930377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.930403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:40.930442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:40.930490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:40.930521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:40.930547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:40.932735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:40.932829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:40.932856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:41.080679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:41.080707Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:15:41.080748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:41.080814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:15:41.082731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:41.082788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:41.082825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:41.084793Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-10T11:15:41.085367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:41.085695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:41.086833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:41.086977Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-10T11:15:41.087140Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-04-10T11:15:41.088120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:41.088325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-04-10T11:15:41.088730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:41.088880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:41.089626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:41.089957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:41.090005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:41.090133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:41.090578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:41.090636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:41.090704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:41.092444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:41.092504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:15:41.093536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:15:41.093570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-10T11:15:41.093631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:41.093665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:15:41.095572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:41.095693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:15:41.095902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:15:41.095940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:15:41.096361Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:41.096452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:41.096488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:499:2453] TestWaitNotification: OK eventTxId 101 2025-04-10T11:15:41.097025Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:41.097208Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 223us result status StatusPathDoesNotExist 2025-04-10T11:15:41.097392Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:41.097917Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:41.098082Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 169us result status StatusSuccess 2025-04-10T11:15:41.102530Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-04-10T11:15:41.103079Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-10T11:15:41.103165Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-04-10T11:15:41.103244Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-04-10T11:15:41.103726Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:41.103907Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 188us result status StatusSuccess 2025-04-10T11:15:41.104217Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TSchemeShardSubDomainTest::CreateDropSolomon ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:76:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:78:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:80:2057] recipient: [10:79:2110] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:82:2057] recipient: [10:79:2110] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:81:2111] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:135:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:76:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:78:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:80:2057] recipient: [11:79:2110] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:82:2057] recipient: [11:79:2110] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:81:2111] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:135:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:77:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:80:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:81:2057] recipient: [12:79:2110] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:83:2057] recipient: [12:79:2110] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:82:2111] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:136:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:83:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:84:2057] recipient: [13:82:2113] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:86:2057] recipient: [13:82:2113] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:85:2114] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:139:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:83:2057] recipient: [14:82:2113] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:84:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:86:2057] recipient: [14:82:2113] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:85:2114] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:84:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:85:2057] recipient: [15:83:2113] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:87:2057] recipient: [15:83:2113] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:86:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] 2025-04-10T11:15:41.229889Z node 17 :KEYVALUE ERROR: {KV323@keyvalue_storage_read_request.cpp:254} Received BLOCKED EvGetResult. KeyValue# 72057594037927937 Status# BLOCKED Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 0 ErrorReason# block race detected 2025-04-10T11:15:41.233640Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 HandleBlockBlobStorageResult, msg->Status: ALREADY, not discovered Marker# TSYS21 2025-04-10T11:15:41.233717Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 Type: KeyValue, EReason: ReasonBootBSError, SuggestedGeneration: 0, KnownGeneration: 3 Marker# TSYS31 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:29.202280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:29.202380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:29.202437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:29.202485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:29.202564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:29.202605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:29.202678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:29.202765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:29.203128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:29.291672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:29.291734Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:29.308096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:29.308356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:29.308541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:29.317026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:29.317350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:29.318188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:29.318584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:29.320796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:29.322341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:29.322405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:29.322621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:29.322671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:29.322727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:29.322899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.329786Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:29.458371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:29.458663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.458931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:29.459138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:29.459200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.461739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:29.461917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:29.462169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.462249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:29.462305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:29.462342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:29.464739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.464807Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:29.464871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:29.467064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.467115Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.467162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:29.467226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:29.477233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:29.479310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:29.479489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:29.480590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:29.480742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:29.480793Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:29.481127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:29.481185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:29.481385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:29.481485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:29.483915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:29.483971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:29.484127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:29.484180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:29.484422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:29.484464Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:29.484558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:29.484584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:29.484634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:29.484670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:29.484703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:29.484738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:29.484766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:29.484790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:29.484850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:29.484884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:29.484915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:29.487256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:29.487417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:29.487466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... teStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:41.018268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:485:2443] sender: [1:769:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:485:2443] sender: [1:772:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:485:2443] sender: [1:773:2058] recipient: [1:771:2691] Leader for TabletID 72057594046678944 is [1:774:2692] sender: [1:775:2058] recipient: [1:771:2691] 2025-04-10T11:15:41.068082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:41.068199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:41.068256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:41.068322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:41.068362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:41.068400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:41.068461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:41.068562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:41.068920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:41.088641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:41.090107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:41.090340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:41.090542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:41.090580Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:41.090732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:41.091372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:41.091484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:41.091541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table2, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T11:15:41.091603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.091658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.092069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:41.092178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-10T11:15:41.092226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-10T11:15:41.092319Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T11:15:41.092573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2025-04-10T11:15:41.092766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.092885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:41.092928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:41.092963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:15:41.092985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:15:41.093117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:41.093348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.093626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-04-10T11:15:41.093899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.094038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.094418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.094507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.094670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.094741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.094812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.094973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.095037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.095307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.095487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.095653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.095703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.095746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.104819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:41.104902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:41.105352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:41.105417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:41.105487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:41.105624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:774:2692] sender: [1:828:2058] recipient: [1:15:2062] 2025-04-10T11:15:41.138193Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:41.138403Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 221us result status StatusSuccess 2025-04-10T11:15:41.138744Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:40.870918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:40.871000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:40.871047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:40.871085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:40.871128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:40.871149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:40.871200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:40.871302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:40.871681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:40.961827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:40.961891Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:40.979151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:40.979473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:40.979657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:40.988079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:40.988386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:40.989179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:40.989649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:40.993552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.995101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:40.995176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.995422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:40.995484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:40.995532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:40.995713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.003157Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:41.153484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:41.153763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.154037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:41.154293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:41.154354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.156848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:41.157011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:41.157230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.157300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:41.157375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:41.157423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:41.159561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.159629Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:41.159670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:41.161592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.161648Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.161689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:41.161749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:41.166053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:41.168398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:41.168679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:41.169795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:41.169961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:41.170014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:41.170364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:41.170422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:41.170607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:41.170730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:41.173060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:41.173112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:41.173338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:41.173399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:41.173662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.173729Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:41.173844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:41.173881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:41.173946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:41.173982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:41.174023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:41.174072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:41.174107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:41.174140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:41.174237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:41.174277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:41.174317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:41.176834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:41.176961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:41.177000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... AckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:41.348160Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2025-04-10T11:15:41.349568Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-10T11:15:41.349752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-10T11:15:41.350099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-10T11:15:41.350604Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-10T11:15:41.351375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:41.351582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 Forgetting tablet 72075186233409546 2025-04-10T11:15:41.352450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:41.352624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:41.352943Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 Forgetting tablet 72075186233409548 2025-04-10T11:15:41.354218Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-10T11:15:41.354329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-10T11:15:41.354449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:41.354851Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-10T11:15:41.355041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 Forgetting tablet 72075186233409551 2025-04-10T11:15:41.355441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:41.355557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-04-10T11:15:41.356166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-10T11:15:41.356315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-04-10T11:15:41.357215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:41.357267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:41.357401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:41.357786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:41.357825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:41.357873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:41.360033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-10T11:15:41.360076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-10T11:15:41.360277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2025-04-10T11:15:41.360853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:41.360885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:15:41.362876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:15:41.362922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-10T11:15:41.363037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-10T11:15:41.363064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-10T11:15:41.363196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:41.363220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:15:41.363265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-04-10T11:15:41.363656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-10T11:15:41.363725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-10T11:15:41.364193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409549, at schemeshard: 72057594046678944 2025-04-10T11:15:41.365271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:41.365422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-10T11:15:41.365680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-10T11:15:41.365736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-10T11:15:41.365855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:15:41.365878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:15:41.366355Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-10T11:15:41.366450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:15:41.366490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:615:2516] 2025-04-10T11:15:41.366640Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:41.366724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:41.366749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:615:2516] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-10T11:15:41.367185Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:41.367370Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 239us result status StatusPathDoesNotExist 2025-04-10T11:15:41.367555Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:41.368049Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:41.368240Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 199us result status StatusSuccess 2025-04-10T11:15:41.368626Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-10T11:15:40.480361Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:15:40.486189Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:15:40.486629Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-10T11:15:40.486698Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:40.486740Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-10T11:15:40.486799Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:40.486855Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:40.486934Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-10T11:15:40.487791Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2025-04-10T11:15:40.487890Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:15:40.515804Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:40.519301Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:40.519507Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:40.520531Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:40.520715Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:40.521207Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:40.521694Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:268:2258] 2025-04-10T11:15:40.524588Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-10T11:15:40.524667Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2258] 2025-04-10T11:15:40.524744Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:40.524806Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:15:40.525813Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:271:2260], now have 1 active actors on pipe 2025-04-10T11:15:40.591878Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:15:40.596060Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:15:40.596423Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-10T11:15:40.596482Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:40.596527Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-10T11:15:40.596571Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:40.596642Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:40.596720Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-10T11:15:40.597493Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:405:2360], now have 1 active actors on pipe 2025-04-10T11:15:40.597624Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:15:40.597837Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:15:40.600400Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:15:40.600569Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:40.601456Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:15:40.601585Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:40.601999Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:40.602284Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:413:2366] 2025-04-10T11:15:40.604652Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T11:15:40.604727Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:413:2366] 2025-04-10T11:15:40.604792Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:40.604852Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T11:15:40.605762Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:416:2368], now have 1 active actors on pipe 2025-04-10T11:15:40.607239Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:422:2371], now have 1 active actors on pipe 2025-04-10T11:15:40.607527Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:424:2372], now have 1 active actors on pipe 2025-04-10T11:15:40.608118Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:422:2371] destroyed 2025-04-10T11:15:40.608500Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:424:2372] destroyed Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:37.895648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:37.895738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:37.895780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:37.895819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:37.895860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:37.895879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:37.895927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:37.896002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:37.896324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:37.973695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:37.973752Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:38.004032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:38.004362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:38.004543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:38.013361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:38.013599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:38.014272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:38.014633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:38.016826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:38.018190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:38.018262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:38.018504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:38.018555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:38.018600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:38.018821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.025401Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:38.162290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:38.162514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.162707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:38.162877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:38.162926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.165072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:38.165225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:38.165430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.165487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:38.165528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:38.165563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:38.168010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.168073Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:38.168113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:38.170092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.170171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.170217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:38.170272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:38.174126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:38.176354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:38.176607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:38.177614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:38.177760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:38.177804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:38.178195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:38.178252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:38.178420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:38.178537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:38.180810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:38.180854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:38.181038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:38.181080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:38.181365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:38.181412Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:38.181542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:38.181584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:38.181639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:38.181670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:38.181708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:38.181763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:38.181796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:38.181823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:38.181891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:38.181928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:38.181960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:38.184447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:38.184559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:38.184616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 69700 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-10T11:15:41.411939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-04-10T11:15:41.412079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 440 RawX2: 4294969700 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-10T11:15:41.412137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-10T11:15:41.413876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.413942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-10T11:15:41.413996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-10T11:15:41.414040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-04-10T11:15:41.414127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-10T11:15:41.414314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-04-10T11:15:41.414461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:41.414531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:15:41.416417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.418359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.418676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:41.418735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:41.418932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:15:41.419140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:41.419193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-10T11:15:41.419257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-10T11:15:41.419657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.419716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:15:41.419820Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.419867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-10T11:15:41.419914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-10T11:15:41.421205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:41.421339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:41.421397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:15:41.421448Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-10T11:15:41.421491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:41.422359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:41.422456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:41.422492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:15:41.422522Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-10T11:15:41.422572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:15:41.422649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-10T11:15:41.426285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.426349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:41.426795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:15:41.427017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:15:41.427073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:41.427119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:15:41.427156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:41.427202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-10T11:15:41.427280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:408:2375] message: TxId: 103 2025-04-10T11:15:41.427325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:41.427380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:15:41.427420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:15:41.427526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:15:41.427986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:41.428038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:41.428328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:15:41.430125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:15:41.431369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:41.431424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-10T11:15:41.431505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:15:41.431548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:739:2674] 2025-04-10T11:15:41.432424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-04-10T11:15:41.434119Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:41.434403Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 296us result status StatusSuccess 2025-04-10T11:15:41.434894Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:25.832484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:25.832616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:25.832684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:25.832742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:25.832800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:25.832840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:25.832912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:25.833014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:25.833692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:25.931690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:25.931770Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:25.952530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:25.952858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:25.953054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:25.964416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:25.964710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:25.965474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:25.965918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:25.968100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:25.969566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:25.969628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:25.969828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:25.969877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:25.969917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:25.970080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:25.981668Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:26.131871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:26.132096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:26.132284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:26.132482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:26.132531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:26.134734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:26.134889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:26.135096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:26.135162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:26.135199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:26.135233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:26.137874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:26.137938Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:26.137973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:26.140004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:26.140068Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:26.140113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:26.140184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:26.143936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:26.151204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:26.151419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:26.152499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:26.152636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:26.152697Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:26.152999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:26.153058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:26.153230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:26.153362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:26.157023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:26.157085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:26.157253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:26.157299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:26.157542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:26.157588Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:26.157691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:26.157759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:26.157821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:26.157853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:26.157889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:26.157926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:26.157960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:26.157992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:26.158058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:26.158097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:26.158129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:26.160641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:26.160753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:26.160789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... Id 72075186233409548 2025-04-10T11:15:40.869881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-10T11:15:40.869932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-10T11:15:40.870796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:40.870924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 AckTo { RawX1: 0 RawX2: 0 } } Step: 3200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:40.870978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropPQ TPropose, operationId: 103:0 HandleReply TEvOperationPlan, step: 3200, at schemeshard: 72057594046678944 2025-04-10T11:15:40.871145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:15:40.871217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-10T11:15:40.871399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:40.871462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:15:40.872994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:40.873032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:40.873260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:40.873291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:40.873413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:40.873466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:15:40.873576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.873599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-10T11:15:40.873631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-10T11:15:40.873672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-10T11:15:40.874139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.874200Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-10T11:15:40.874270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:15:40.874297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:40.874333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:15:40.874361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:40.874387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-10T11:15:40.874417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:40.874454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:15:40.874478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:15:40.874571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:15:40.874605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-04-10T11:15:40.874644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 9 2025-04-10T11:15:40.874668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-10T11:15:40.875104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:40.875176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:40.875207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:15:40.875239Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-10T11:15:40.875270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:15:40.875501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:40.875530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T11:15:40.875574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:40.875874Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-04-10T11:15:40.876397Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-04-10T11:15:40.876494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:40.876557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:40.876581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:15:40.876606Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 9 2025-04-10T11:15:40.876646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:40.876723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-10T11:15:40.877007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:40.877479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-10T11:15:40.880879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:15:40.880988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:40.882865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:15:40.882963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:15:40.883038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T11:15:40.883494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T11:15:40.883536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T11:15:40.884005Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:15:40.884137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:15:40.884181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:779:2694] TestWaitNotification: OK eventTxId 103 2025-04-10T11:15:41.352094Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:41.352326Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 255us result status StatusSuccess 2025-04-10T11:15:41.352737Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29945, MsgBus: 20837 2025-04-10T11:15:36.591968Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641604254340539:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:36.592145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001663/r3tmp/tmp7hs5jf/pdisk_1.dat 2025-04-10T11:15:36.890290Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29945, node 1 2025-04-10T11:15:36.945739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:36.946290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:36.955193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:36.991974Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:36.991994Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:36.992007Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:36.992134Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20837 TClient is connected to server localhost:20837 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:37.510338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:37.530978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:37.665719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:37.815465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:37.875002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:39.410573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641617139244214:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:39.410695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:39.700352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:39.727871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:39.753786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:39.781259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:39.809026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:39.838897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:39.881520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641617139244724:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:39.881622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:39.881724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641617139244729:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:39.885825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:39.895776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641617139244731:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:39.950188Z node 1 :TX_PROXY ERROR: Actor# [1:7491641617139244785:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName >> KqpQueryPerf::RangeRead-QueryService >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25464, MsgBus: 11468 2025-04-10T11:15:37.218536Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641609037558713:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:37.218653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001658/r3tmp/tmpiF3eG3/pdisk_1.dat 2025-04-10T11:15:37.534493Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25464, node 1 2025-04-10T11:15:37.603257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:37.603348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:37.604962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:37.606735Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:37.606766Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:37.606780Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:37.606910Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11468 TClient is connected to server localhost:11468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:38.140269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:38.163199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:38.307001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:38.453083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:38.521447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:40.004674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641621922462380:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:40.004785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:40.276958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:40.307466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:40.339555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:40.368261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:40.394251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:40.426248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:40.465910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641621922462892:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:40.465978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:40.466210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641621922462897:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:40.469978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:40.483418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641621922462899:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:40.552280Z node 1 :TX_PROXY ERROR: Actor# [1:7491641621922462953:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:41.163859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:41.163958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:41.164020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:41.164063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:41.164124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:41.164158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:41.164240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:41.164357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:41.164721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:41.238264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:41.238330Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:41.251051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:41.251285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:41.251457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:41.257717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:41.257948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:41.258493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:41.258804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:41.260381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:41.261570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:41.261632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:41.261772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:41.261812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:41.261853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:41.261980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.267187Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:41.374280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:41.374504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.374708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:41.375082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:41.375127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.377236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:41.377425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:41.377596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.377650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:41.377680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:41.377718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:41.379528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.379602Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:41.379639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:41.381558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.381611Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.381656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:41.381727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:41.385266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:41.387509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:41.387699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:41.388548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:41.388679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:41.388734Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:41.388982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:41.389025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:41.389202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:41.389278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:41.391309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:41.391352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:41.391507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:41.391538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:41.391742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:41.391783Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:41.391880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:41.391933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:41.391967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:41.391994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:41.392022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:41.392052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:41.392092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:41.392125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:41.392187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:41.392213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:41.392238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:41.394371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:41.394480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:41.394516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-04-10T11:15:42.614664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:15:42.614700Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-10T11:15:42.614735Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-10T11:15:42.614774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:15:42.615982Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:15:42.616067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:15:42.616097Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-10T11:15:42.616136Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:15:42.616170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:42.616246Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-10T11:15:42.618267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:42.618331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:42.618367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:42.618399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:42.618923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:15:42.619818Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-10T11:15:42.620089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:42.620409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:42.621868Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-10T11:15:42.622462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:42.626360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:15:42.627992Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-10T11:15:42.628278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:42.628605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2025-04-10T11:15:42.630455Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409549 2025-04-10T11:15:42.631272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-10T11:15:42.631513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:15:42.632213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:42.632278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T11:15:42.632366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:42.633179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:42.633245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:42.633418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:42.633799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:15:42.634318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:15:42.635921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:42.635984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:15:42.636143Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:15:42.636181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-10T11:15:42.636250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:42.636277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:15:42.638894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-10T11:15:42.638959Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-10T11:15:42.639211Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:42.639335Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:42.639419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:42.639476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:42.639590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:42.641491Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-10T11:15:42.641813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-10T11:15:42.641868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-10T11:15:42.642437Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-10T11:15:42.642552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T11:15:42.642600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:655:2607] TestWaitNotification: OK eventTxId 105 2025-04-10T11:15:42.643563Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:42.643779Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 266us result status StatusPathDoesNotExist 2025-04-10T11:15:42.643982Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:42.644620Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:42.644792Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 189us result status StatusPathDoesNotExist 2025-04-10T11:15:42.644950Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop >> TSchemeShardSubDomainTest::CreateForceDropSolomon ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } >> KqpQueryPerf::Insert-QueryService-UseSink >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> KqpQueryPerf::Delete+QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete+QueryService+UseSink >> KqpWorkload::STOCK >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:42.550799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:42.550907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:42.550966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:42.551008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:42.551047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:42.551073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:42.551131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:42.551221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:42.551560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:42.619304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:42.619347Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:42.632334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:42.632598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:42.632784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:42.644532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:42.644739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:42.645299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:42.645633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:42.648001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:42.649625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:42.649690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:42.649935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:42.649987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:42.650028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:42.650209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.656243Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:42.769975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:42.770269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.770438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:42.770607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:42.770660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.773175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:42.773332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:42.773549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.773620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:42.773657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:42.773692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:42.776320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.776381Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:42.776417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:42.778533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.778583Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.778626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:42.778692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:42.782653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:42.785333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:42.785535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:42.786693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:42.786856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:42.786949Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:42.787284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:42.787352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:42.787563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:42.787672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:42.790410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:42.790477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:42.790669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:42.790711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:42.790968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.791014Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:42.791113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:42.791151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:42.791215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:42.791256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:42.791303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:42.791345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:42.791383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:42.791415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:42.791487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:42.791539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:42.791582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:42.794229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:42.794345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:42.794384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-04-10T11:15:43.201812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:15:43.202314Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-04-10T11:15:43.203450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T11:15:43.203663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:43.204343Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-10T11:15:43.204808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-10T11:15:43.204978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 2025-04-10T11:15:43.205641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409547 2025-04-10T11:15:43.206516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:43.206733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409549 2025-04-10T11:15:43.207241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-10T11:15:43.207402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:43.208596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:43.208647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:43.208808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:43.211548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-10T11:15:43.211605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-10T11:15:43.211753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-10T11:15:43.211781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-04-10T11:15:43.211964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409552, at schemeshard: 72057594046678944 2025-04-10T11:15:43.212035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:43.212059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:15:43.212173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:43.212268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:43.212313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:43.212381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:43.215554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:15:43.215602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-10T11:15:43.215693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-10T11:15:43.215717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-10T11:15:43.215782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:43.215803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:15:43.215889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-10T11:15:43.215927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-10T11:15:43.216050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:43.219075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-04-10T11:15:43.219366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:15:43.219414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-10T11:15:43.219513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:15:43.219533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:15:43.220006Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:43.220143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:43.220202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:722:2612] 2025-04-10T11:15:43.220363Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:15:43.220448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:15:43.220472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:722:2612] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-10T11:15:43.221040Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:43.221235Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 204us result status StatusPathDoesNotExist 2025-04-10T11:15:43.221460Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:43.221920Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:43.222091Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 174us result status StatusPathDoesNotExist 2025-04-10T11:15:43.222265Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:43.222717Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:43.222875Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 155us result status StatusSuccess 2025-04-10T11:15:43.223217Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] >> TYardTest::TestUpsAndDownsAtTheBoundary [GOOD] >> TYardTest::TestUnflushedChunk >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:42.550511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:42.550598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:42.550647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:42.550686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:42.550723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:42.550745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:42.550795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:42.550863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:42.551179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:42.639987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:42.640051Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:42.657039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:42.657382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:42.657651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:42.668550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:42.668846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:42.669606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:42.670011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:42.672202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:42.673774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:42.673847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:42.674083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:42.674143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:42.674217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:42.674407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.682440Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:42.810250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:42.810563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.810761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:42.810969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:42.811034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.813647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:42.813774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:42.813953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.814008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:42.814049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:42.814091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:42.816654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.816730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:42.816775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:42.818874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.818925Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.818969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:42.819036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:42.822945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:42.825813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:42.826027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:42.827230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:42.827395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:42.827455Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:42.827928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:42.828023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:42.828242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:42.828365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:42.830894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:42.830949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:42.831132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:42.831179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:42.831487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:42.831542Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:42.831667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:42.831708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:42.831774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:42.831816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:42.831853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:42.831897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:42.831952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:42.831988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:42.832064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:42.832124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:42.832165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:42.834718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:42.834829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:42.834859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 919163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:43.919286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:43.919442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:43.919479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-10T11:15:43.919519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-10T11:15:43.919733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:15:43.919794Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 104:0 ProgressState 2025-04-10T11:15:43.919901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:15:43.919940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:43.919986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:15:43.920025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:43.920074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-10T11:15:43.920123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:15:43.920166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T11:15:43.920199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T11:15:43.920382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:43.920426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-10T11:15:43.920462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-10T11:15:43.920508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-10T11:15:43.921344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:15:43.921424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:15:43.921453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:15:43.921490Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:15:43.921532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:43.922231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:15:43.922332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:15:43.922365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:15:43.922388Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:15:43.922419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:43.922503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-10T11:15:43.924617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:43.924677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:43.925968Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-10T11:15:43.926188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:43.926485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 2025-04-10T11:15:43.927787Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-10T11:15:43.928486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:43.928730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-04-10T11:15:43.930026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:43.930083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:43.930218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:43.930734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:15:43.930856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:43.930898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:43.930969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:43.931182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:15:43.934006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:43.934066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:15:43.934212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:43.934277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:15:43.934878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:43.934990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-10T11:15:43.935280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-10T11:15:43.935335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-10T11:15:43.935955Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-10T11:15:43.936048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:15:43.936088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:2096:3701] TestWaitNotification: OK eventTxId 104 2025-04-10T11:15:43.945343Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:43.945566Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 271us result status StatusPathDoesNotExist 2025-04-10T11:15:43.945774Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:43.946767Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:43.946940Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 173us result status StatusPathDoesNotExist 2025-04-10T11:15:43.947103Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflightMock [GOOD] >> TPDiskRaces::DecommitWithInflight >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:86:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:86:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:87:2116] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:91:2057] recipient: [11:87:2116] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:90:2117] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:110:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:86:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:89:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:90:2057] recipient: [12:88:2117] Leader for TabletID 72057594037927937 is [12:91:2118] sender: [12:92:2057] recipient: [12:88:2117] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:91:2118] Leader for TabletID 72057594037927937 is [12:91:2118] sender: [12:111:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:89:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:92:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:93:2057] recipient: [13:91:2120] Leader for TabletID 72057594037927937 is [13:94:2121] sender: [13:95:2057] recipient: [13:91:2120] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:94:2121] Leader for TabletID 72057594037927937 is [13:94:2121] sender: [13:148:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:89:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:92:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:93:2057] recipient: [14:91:2120] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:95:2057] recipient: [14:91:2120] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:94:2121] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:148:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:90:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:93:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:94:2057] recipient: [15:92:2120] Leader for TabletID 72057594037927937 is [15:95:2121] sender: [15:96:2057] recipient: [15:92:2120] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:95:2121] Leader for TabletID 72057594037927937 is [15:95:2121] sender: [15:149:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:44.079842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:44.079916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:44.079956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:44.079989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:44.080022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:44.080040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:44.080083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:44.080150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:44.080485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:44.143745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:44.143786Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:44.154319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:44.154512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:44.154641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:44.160724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:44.160907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:44.161429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:44.161686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:44.163118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:44.164144Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:44.164185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:44.164316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:44.164350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:44.164377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:44.164475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.169640Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:44.277117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:44.277397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.277637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:44.277863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:44.277923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.279985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:44.280115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:44.280276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.280339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:44.280381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:44.280425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:44.282421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.282477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:44.282510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:44.284157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.284201Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.284243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:44.284302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.287524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:44.289179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:44.289345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:44.290369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:44.290493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:44.290539Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:44.290805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:44.290865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:44.291044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:44.291131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:44.292981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:44.293026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:44.293183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:44.293223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:44.293452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.293484Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:44.293565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:44.293602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.293645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:44.293676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.293710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:44.293777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.293813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:44.293848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:44.293921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:44.293964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:44.293994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:44.296103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:44.296209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:44.296238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... -10T11:15:44.511274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:44.511802Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-10T11:15:44.512012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:15:44.512561Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2025-04-10T11:15:44.513772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:44.513944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409549 2025-04-10T11:15:44.514449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-10T11:15:44.514583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:44.514980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:44.515030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:44.515159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:15:44.518783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-10T11:15:44.518838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-10T11:15:44.519107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2025-04-10T11:15:44.519212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-10T11:15:44.519341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:44.519417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:44.519474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:44.519546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:44.522295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:44.522341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:15:44.522440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:15:44.522463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-10T11:15:44.522902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-10T11:15:44.522942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-10T11:15:44.523143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:44.523170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:15:44.523433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-10T11:15:44.523470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-10T11:15:44.524224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-04-10T11:15:44.524413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409549, at schemeshard: 72057594046678944 2025-04-10T11:15:44.524853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:44.524972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2025-04-10T11:15:44.525307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-10T11:15:44.525380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-10T11:15:44.525489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:15:44.525509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-10T11:15:44.525605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:15:44.525629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:15:44.526101Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-10T11:15:44.526384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-10T11:15:44.526436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:627:2529] 2025-04-10T11:15:44.526591Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:44.526729Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:15:44.526780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:44.526828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:627:2529] 2025-04-10T11:15:44.526960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:15:44.526984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:627:2529] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-10T11:15:44.527432Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:44.527648Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 186us result status StatusPathDoesNotExist 2025-04-10T11:15:44.527847Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:44.528447Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:44.528620Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 176us result status StatusPathDoesNotExist 2025-04-10T11:15:44.528752Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:44.529168Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:44.529286Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 219us result status StatusSuccess 2025-04-10T11:15:44.529575Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:43.817925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:43.818008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:43.818060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:43.818105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:43.818164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:43.818196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:43.818249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:43.818362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:43.818704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:43.884891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:43.884945Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:43.902635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:43.902999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:43.903191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:43.913094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:43.913576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:43.914382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:43.914756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:43.917039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:43.918583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:43.918650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:43.918880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:43.918929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:43.918971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:43.919115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:43.925781Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:44.095792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:44.096166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.096456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:44.096717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:44.096778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.101851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:44.102011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:44.102247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.102400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:44.102445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:44.102484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:44.105105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.105172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:44.105219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:44.107475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.107542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.107598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:44.107673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.112020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:44.114441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:44.114696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:44.116034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:44.116206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:44.116264Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:44.116630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:44.116706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:44.116901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:44.117019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:44.119683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:44.119737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:44.119940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:44.119988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:44.120283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.120340Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:44.120455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:44.120496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.120570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:44.120614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.120654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:44.120700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.120741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:44.120795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:44.120869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:44.120918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:44.120959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:44.123771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:44.123910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:44.123952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Id: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-10T11:15:44.475160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T11:15:44.476424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:44.476524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:15:44.476557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:15:44.476606Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-10T11:15:44.476645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:15:44.476736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-10T11:15:44.477751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1315 } } 2025-04-10T11:15:44.477803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-04-10T11:15:44.477946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1315 } } 2025-04-10T11:15:44.478046Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1315 } } 2025-04-10T11:15:44.479513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 498 RawX2: 4294969745 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:15:44.479565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-04-10T11:15:44.479704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 498 RawX2: 4294969745 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:15:44.479764Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:15:44.479850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 498 RawX2: 4294969745 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:15:44.479921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:44.479972Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.480040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-10T11:15:44.480097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T11:15:44.483847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:15:44.484452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:15:44.484638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.484965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.485249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.485299Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:15:44.485538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:15:44.485580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:15:44.485624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:15:44.485664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:15:44.485704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T11:15:44.485776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:306:2297] message: TxId: 102 2025-04-10T11:15:44.485838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:15:44.485885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:15:44.485932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:15:44.486053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:15:44.487932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:15:44.487974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:526:2469] TestWaitNotification: OK eventTxId 102 2025-04-10T11:15:44.488454Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:44.488662Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 224us result status StatusSuccess 2025-04-10T11:15:44.489115Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:44.489774Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:44.490014Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 201us result status StatusSuccess 2025-04-10T11:15:44.490431Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TYardTest::TestUnflushedChunk [GOOD] >> TYardTest::TestRedZoneSurvivability ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 23966, MsgBus: 10744 2025-04-10T11:15:33.410813Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641588970531989:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:33.411078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00166e/r3tmp/tmpUlv25D/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23966, node 1 2025-04-10T11:15:33.748879Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:15:33.749110Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:15:33.757900Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:33.788614Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:33.788655Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:33.788664Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:33.788844Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:33.800234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:33.800379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:33.801999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10744 TClient is connected to server localhost:10744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:34.272889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:34.303848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:34.408144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:34.565303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:34.632920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:36.308965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641601855435641:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:36.309075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:36.574575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:36.603587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:36.632770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:36.663304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:36.691535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:36.719574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:36.754581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641601855436148:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:36.754653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:36.754703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641601855436154:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:36.758444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:36.768267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641601855436156:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:36.872785Z node 1 :TX_PROXY ERROR: Actor# [1:7491641601855436211:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:38.410687Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641588970531989:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:38.410784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20622, MsgBus: 15509 2025-04-10T11:15:39.054873Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641616542183367:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:39.054932Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00166e/r3tmp/tmpHGgJx4/pdisk_1.dat 2025-04-10T11:15:39.135174Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20622, node 2 2025-04-10T11:15:39.183444Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:39.183545Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:39.184772Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:39.195053Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:39.195080Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:39.195089Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:39.195228Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15509 TClient is connected to server localhost:15509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:39.609257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:39.626373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:39.723248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:39.861856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:39.935191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:41.931025Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641625132119745:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:41.931144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:41.966777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:41.999380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:42.032109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:42.076832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:42.111470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:42.142751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:42.187374Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641629427087551:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:42.187460Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:42.187663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641629427087556:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:42.191356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:42.201175Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641629427087558:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:15:42.257030Z node 2 :TX_PROXY ERROR: Actor# [2:7491641629427087611:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 64520, MsgBus: 3302 2025-04-10T11:15:32.701057Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641585431640352:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:32.701221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001670/r3tmp/tmpR88yJu/pdisk_1.dat 2025-04-10T11:15:33.042551Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64520, node 1 2025-04-10T11:15:33.098376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:33.098638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:33.103109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:33.122792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:33.122826Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:33.122839Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:33.123032Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3302 TClient is connected to server localhost:3302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:33.630644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:33.652255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:33.797061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:33.934847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:34.001035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:35.527074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641598316544029:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:35.527188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:35.886332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:35.917372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:35.947274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:35.975826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:36.006106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:36.073872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:36.114615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641602611511839:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:36.114709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:36.114804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641602611511844:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:36.118783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:36.127819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641602611511846:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:36.205592Z node 1 :TX_PROXY ERROR: Actor# [1:7491641602611511899:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:37.701102Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641585431640352:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:37.701191Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10718, MsgBus: 13934 2025-04-10T11:15:38.816298Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641612931043176:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:38.816347Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001670/r3tmp/tmpxxlE9a/pdisk_1.dat 2025-04-10T11:15:38.918989Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10718, node 2 2025-04-10T11:15:38.961773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:38.961878Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:38.965084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:38.990074Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:38.990103Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:38.990112Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:38.990272Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13934 TClient is connected to server localhost:13934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:39.442672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:39.458487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:39.510709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:39.649061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:39.714615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:41.849100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641625815946835:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:41.849210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:41.888771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:41.917983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:41.951783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:41.995071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:42.026882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:42.060455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:42.112894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641630110914640:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:42.113012Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:42.113024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641630110914646:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:42.116615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:42.125420Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641630110914648:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:15:42.179678Z node 2 :TX_PROXY ERROR: Actor# [2:7491641630110914700:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:43.816396Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641612931043176:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:43.816479Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:44.423185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:44.423296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:44.423359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:44.423407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:44.423453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:44.423478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:44.423535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:44.423627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:44.423997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:44.498722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:44.498770Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:44.519802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:44.520060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:44.520213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:44.529128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:44.529427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:44.530341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:44.530752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:44.533014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:44.534464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:44.534527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:44.534753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:44.534932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:44.534994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:44.535185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.543125Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:44.682721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:44.683013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.683288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:44.683539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:44.683604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.686140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:44.686291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:44.686472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.686541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:44.686576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:44.686605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:44.688552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.688600Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:44.688631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:44.690107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.690175Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.690223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:44.690281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.699052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:44.701467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:44.701657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:44.702689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:44.702844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:44.702916Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:44.703221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:44.703290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:44.703480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:44.703579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:44.705910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:44.705952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:44.706097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:44.706143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:44.706392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.706430Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:44.706527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:44.706555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.706603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:44.706631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.706664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:44.706700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.706737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:44.706762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:44.706821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:44.706853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:44.706888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:44.709298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:44.709437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:44.709490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... cookie: 103 2025-04-10T11:15:45.189543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:15:45.192167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:45.192310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:45.192680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:45.192736Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-10T11:15:45.192835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:15:45.192869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:45.192909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:15:45.192942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:45.192991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-10T11:15:45.193060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:487:2443] message: TxId: 103 2025-04-10T11:15:45.193109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:45.193147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:15:45.193197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:15:45.193356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-10T11:15:45.195579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:15:45.195628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:488:2444] TestWaitNotification: OK eventTxId 103 2025-04-10T11:15:45.196267Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:45.196529Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 257us result status StatusSuccess 2025-04-10T11:15:45.197049Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:45.197730Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:45.197998Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 272us result status StatusSuccess 2025-04-10T11:15:45.198464Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:45.199016Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:45.199193Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 176us result status StatusSuccess 2025-04-10T11:15:45.199529Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:45.200122Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:45.200374Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 234us result status StatusSuccess 2025-04-10T11:15:45.200752Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12943, MsgBus: 7846 2025-04-10T11:15:40.051000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641621947761783:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:40.051066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00164a/r3tmp/tmpvQUEEX/pdisk_1.dat 2025-04-10T11:15:40.368156Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12943, node 1 2025-04-10T11:15:40.437037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:40.437159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:40.439573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:40.473555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:40.473585Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:40.473594Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:40.473741Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7846 TClient is connected to server localhost:7846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:40.984165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:41.009475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:41.125728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:41.260552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:41.316710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:42.879110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641630537698173:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:42.879223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:43.194340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:43.216813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:43.244344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:43.274224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:43.302064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:43.332206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:43.373551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641634832665981:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:43.373633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:43.373658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641634832665986:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:43.377253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:43.386139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641634832665988:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:43.467968Z node 1 :TX_PROXY ERROR: Actor# [1:7491641634832666042:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:44.189075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:44.189173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:44.189241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:44.189290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:44.189353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:44.189380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:44.189451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:44.189553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:44.189941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:44.278654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:44.278717Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:44.296813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:44.297108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:44.297301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:44.304844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:44.305107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:44.305810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:44.306221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:44.308285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:44.309769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:44.309832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:44.310047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:44.310097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:44.310135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:44.310317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.317017Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:44.446942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:44.447209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.447414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:44.447598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:44.447640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.449880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:44.450105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:44.450356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.450415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:44.450442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:44.450469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:44.452091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.452138Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:44.452167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:44.453656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.453687Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.453717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:44.453760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.456666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:44.458260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:44.458426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:44.459190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:44.459294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:44.459325Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:44.459562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:44.459605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:44.459737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:44.459821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:44.461596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:44.461636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:44.461787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:44.461815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:44.462020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:44.462054Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:44.462135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:44.462185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.462228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:44.462255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.462288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:44.462320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:44.462356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:44.462384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:44.462428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:44.462466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:44.462498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:44.464591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:44.464699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:44.464732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... dIdx 72057594046678944:39 2025-04-10T11:15:45.433942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:39 tabletId 72075186233409584 Forgetting tablet 72075186233409581 2025-04-10T11:15:45.434061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2025-04-10T11:15:45.434119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2025-04-10T11:15:45.434239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2025-04-10T11:15:45.434261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2025-04-10T11:15:45.434330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2025-04-10T11:15:45.434352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2025-04-10T11:15:45.434403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2025-04-10T11:15:45.434423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2025-04-10T11:15:45.438711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2025-04-10T11:15:45.438767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2025-04-10T11:15:45.438849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2025-04-10T11:15:45.438870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2025-04-10T11:15:45.438946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2025-04-10T11:15:45.438988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2025-04-10T11:15:45.439097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:15:45.439135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-10T11:15:45.440098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2025-04-10T11:15:45.440136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2025-04-10T11:15:45.440335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:8 2025-04-10T11:15:45.440359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:8 tabletId 72075186233409553 2025-04-10T11:15:45.440410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:12 2025-04-10T11:15:45.440431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:12 tabletId 72075186233409557 2025-04-10T11:15:45.441007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-10T11:15:45.441041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-04-10T11:15:45.445164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2025-04-10T11:15:45.445210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409561 2025-04-10T11:15:45.445306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:25 2025-04-10T11:15:45.445382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:25 tabletId 72075186233409570 2025-04-10T11:15:45.445573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:20 2025-04-10T11:15:45.445601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:20 tabletId 72075186233409565 2025-04-10T11:15:45.445679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:29 2025-04-10T11:15:45.445699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:29 tabletId 72075186233409574 2025-04-10T11:15:45.445770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:33 2025-04-10T11:15:45.445793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:33 tabletId 72075186233409578 2025-04-10T11:15:45.445838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:45.445877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:15:45.446048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:37 2025-04-10T11:15:45.446072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:37 tabletId 72075186233409582 2025-04-10T11:15:45.446168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2025-04-10T11:15:45.446192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2025-04-10T11:15:45.446254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-10T11:15:45.446292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-10T11:15:45.446395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2025-04-10T11:15:45.446416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-04-10T11:15:45.446468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-04-10T11:15:45.446489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-04-10T11:15:45.446591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2025-04-10T11:15:45.446619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-04-10T11:15:45.451287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-10T11:15:45.451342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-04-10T11:15:45.451440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-04-10T11:15:45.451466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-04-10T11:15:45.451532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2025-04-10T11:15:45.451564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-04-10T11:15:45.451647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2025-04-10T11:15:45.451687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-04-10T11:15:45.451785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:45.451814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:15:45.451894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2025-04-10T11:15:45.451941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-04-10T11:15:45.452155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:45.452272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:15:45.452335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:45.452378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:15:45.452471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:45.454871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T11:15:45.455186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T11:15:45.455233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T11:15:45.455751Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:15:45.455860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:15:45.455899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:2061:3662] TestWaitNotification: OK eventTxId 103 2025-04-10T11:15:45.456519Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:45.456724Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 225us result status StatusPathDoesNotExist 2025-04-10T11:15:45.456931Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:15:45.457557Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:45.457731Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 176us result status StatusPathDoesNotExist 2025-04-10T11:15:45.457863Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks >> KqpQueryPerf::KvRead-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:45.638247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:45.638380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:45.638438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:45.638488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:45.638537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:45.638570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:45.638652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:45.638740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:45.639272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:45.721287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:45.721362Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:45.733615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:45.733896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:45.734109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:45.740964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:45.741243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:45.741804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:45.742124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:45.743777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:45.744975Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:45.745021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:45.745183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:45.745220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:45.745268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:45.745428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:45.751530Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:45.869555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:45.869799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:45.870007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:45.870214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:45.870276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:45.872443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:45.872574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:45.872750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:45.872808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:45.872840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:45.872866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:45.874747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:45.874793Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:45.874824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:45.876340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:45.876385Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:45.876426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:45.876475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:45.886162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:45.888264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:45.888417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:45.889254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:45.889425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:45.889473Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:45.889744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:45.889800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:45.889969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:45.890059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:45.892086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:45.892129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:45.892304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:45.892341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:45.892543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:45.892581Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:45.892665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:45.892710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:45.892771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:45.892801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:45.892831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:45.892880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:45.892929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:45.892959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:45.893025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:45.893056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:45.893082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:45.895180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:45.895318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:45.895362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :45.932609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:45.933820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:45.933868Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:45.933905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-04-10T11:15:45.933974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-10T11:15:45.934111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:45.935567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-10T11:15:45.935693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-10T11:15:45.935968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:45.936080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:45.936130Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-10T11:15:45.936401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-10T11:15:45.936450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-10T11:15:45.936629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:45.936683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:45.936725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:15:45.938228Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:45.938257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:45.938397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:45.938479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:45.938505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:15:45.938556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:15:45.938889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:15:45.938923Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:15:45.939008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:45.939037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:45.939079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:15:45.939109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:45.939162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:15:45.939196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:15:45.939246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:15:45.939274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:15:45.939322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:15:45.939351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-10T11:15:45.939396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:15:45.939431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-10T11:15:45.939953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:45.940016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:45.940042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:45.940076Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:15:45.940138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:45.940616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:45.940687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:15:45.940730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:15:45.940763Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:15:45.940795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:15:45.940873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:15:45.943731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:15:45.943810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-10T11:15:45.946353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:45.946540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/SomeDatabase, opId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:15:45.946713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, at schemeshard: 72057594046678944 2025-04-10T11:15:45.948750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:45.948943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-04-10T11:15:45.949252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:15:45.949303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-10T11:15:45.949427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:15:45.949452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:15:45.949932Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:15:45.950050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:15:45.950091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:311:2302] 2025-04-10T11:15:45.950246Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:15:45.950341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:15:45.950367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:311:2302] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-04-10T11:15:44.296745Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:15:44.303582Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:15:44.303942Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-10T11:15:44.303998Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:44.304047Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-10T11:15:44.304100Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:44.304148Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:44.304210Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-10T11:15:44.304894Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:259:2251], now have 1 active actors on pipe 2025-04-10T11:15:44.305044Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:15:44.322899Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:44.329194Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:44.329354Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:44.330349Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:44.330526Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:44.331038Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:44.331426Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:267:2257] 2025-04-10T11:15:44.333816Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-10T11:15:44.333884Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:267:2257] 2025-04-10T11:15:44.333945Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:44.333992Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:15:44.335180Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:270:2259], now have 1 active actors on pipe 2025-04-10T11:15:44.392455Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:15:44.396260Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:15:44.396570Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-10T11:15:44.396620Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:44.396661Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-10T11:15:44.396699Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:44.396750Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:44.396844Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-10T11:15:44.397531Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:404:2359], now have 1 active actors on pipe 2025-04-10T11:15:44.397641Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:15:44.397827Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:44.400116Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:44.400239Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:44.401038Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:15:44.401152Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:44.401504Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:44.401735Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [2:412:2365] 2025-04-10T11:15:44.403905Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-10T11:15:44.403976Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:412:2365] 2025-04-10T11:15:44.404032Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:44.404087Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:15:44.404883Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:415:2367], now have 1 active actors on pipe 2025-04-10T11:15:44.422798Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:15:44.428657Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:15:44.429038Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-10T11:15:44.429108Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:44.429152Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-10T11:15:44.429195Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:44.429249Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:44.429305Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-10T11:15:44.430094Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:464:2404], now have 1 active actors on pipe 2025-04-10T11:15:44.430184Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:15:44.430368Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:15:44.433021Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:15:44.433152Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:44.433988Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:15:44.434129Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:44.434567Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:44.434806Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:472:2410] 2025-04-10T11:15:44.436908Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-10T11:15:44.436981Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:472:2410] 2025-04-10T11:15:44.437038Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:44.437083Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Proc ... EBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:45.877494Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-10T11:15:45.877549Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:45.877606Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:45.877676Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-10T11:15:45.878562Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:466:2406], now have 1 active actors on pipe 2025-04-10T11:15:45.878642Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:15:45.878859Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 11(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:15:45.881386Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:15:45.881536Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:45.882297Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 11 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:15:45.882460Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:45.882838Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:45.883088Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [4:474:2412] 2025-04-10T11:15:45.885496Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-10T11:15:45.885578Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:474:2412] 2025-04-10T11:15:45.885650Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:45.885711Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-10T11:15:45.886638Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:477:2414], now have 1 active actors on pipe 2025-04-10T11:15:45.905895Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:15:45.909842Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:15:45.910193Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-10T11:15:45.910249Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:45.910296Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-10T11:15:45.910342Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:45.910402Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:45.910473Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-10T11:15:45.911218Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:526:2451], now have 1 active actors on pipe 2025-04-10T11:15:45.911339Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:15:45.911562Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 12(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:15:45.914037Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:15:45.914218Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:45.914866Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 12 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:15:45.915020Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:45.915397Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:45.915619Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:534:2457] 2025-04-10T11:15:45.917848Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T11:15:45.917928Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:534:2457] 2025-04-10T11:15:45.917995Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:45.918052Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T11:15:45.918941Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:537:2459], now have 1 active actors on pipe 2025-04-10T11:15:45.920092Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [4:543:2462], now have 1 active actors on pipe 2025-04-10T11:15:45.920260Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:544:2463], now have 1 active actors on pipe 2025-04-10T11:15:45.920521Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:545:2463], now have 1 active actors on pipe 2025-04-10T11:15:45.931919Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:553:2470], now have 1 active actors on pipe 2025-04-10T11:15:45.960368Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:15:45.963535Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:15:45.963901Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-10T11:15:45.963963Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:15:45.964135Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:15:45.964726Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:15:45.964786Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-10T11:15:45.964914Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:15:45.965277Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:15:45.965523Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:610:2515] 2025-04-10T11:15:45.967972Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-10T11:15:45.969407Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-10T11:15:45.969732Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-10T11:15:45.970108Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-10T11:15:45.970413Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-10T11:15:45.970472Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:15:45.970526Z node 4 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:15:45.970573Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T11:15:45.970638Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:610:2515] 2025-04-10T11:15:45.970710Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:15:45.970777Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T11:15:45.971862Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [4:543:2462] destroyed 2025-04-10T11:15:45.972111Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [4:544:2463] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionOffsetsResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "partition is not ready yet" } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } ErrorCode: OK } } } |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTableServerless >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave >> TraverseDatashard::TraverseTwoTables >> AnalyzeDatashard::AnalyzeOneTable >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17471, MsgBus: 14754 2025-04-10T11:15:40.570722Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641618418241624:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:40.570953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001640/r3tmp/tmpAUQCWV/pdisk_1.dat 2025-04-10T11:15:40.864579Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17471, node 1 2025-04-10T11:15:40.929459Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:40.929482Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:40.929489Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:40.929603Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:40.942038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:40.942172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:40.944563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14754 TClient is connected to server localhost:14754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:41.412740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:41.443897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:41.558693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:41.711307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:41.778976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:43.410334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641631303145291:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:43.410403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:43.673681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:43.705055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:43.771798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:43.799571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:43.830538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:43.905401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:43.982809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641631303145815:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:43.982899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:43.982988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641631303145820:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:43.986657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:43.997510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641631303145822:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:44.070172Z node 1 :TX_PROXY ERROR: Actor# [1:7491641635598113172:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:45.570285Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641618418241624:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:45.570363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard >> AnalyzeColumnshard::AnalyzeSameOperationId >> AnalyzeColumnshard::AnalyzeStatus >> AnalyzeColumnshard::AnalyzeTwoColumnTables >> TYardTest::TestRedZoneSurvivability [GOOD] >> TYardTest::TestSlay |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 4423, MsgBus: 6149 2025-04-10T11:15:36.134788Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641604297589898:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:36.134935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001664/r3tmp/tmpUQwObm/pdisk_1.dat 2025-04-10T11:15:36.450610Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:36.465810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:36.465895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:36.468421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4423, node 1 2025-04-10T11:15:36.537594Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:36.537621Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:36.537631Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:36.537778Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6149 TClient is connected to server localhost:6149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:37.047449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:37.080596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:37.208360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:37.352875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:37.423479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:39.313348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641617182493572:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:39.313481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:39.669361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:39.735421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:39.763357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:39.793855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:39.823049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:39.891599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:39.934316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641617182494088:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:39.934424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:39.934651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641617182494093:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:39.937798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:39.947364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641617182494095:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:40.023559Z node 1 :TX_PROXY ERROR: Actor# [1:7491641621477461444:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:41.134436Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641604297589898:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:41.134499Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9970, MsgBus: 20503 2025-04-10T11:15:41.893265Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641624173305690:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:41.893378Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001664/r3tmp/tmpuuoTiO/pdisk_1.dat 2025-04-10T11:15:41.991589Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9970, node 2 2025-04-10T11:15:42.029221Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:42.029334Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:42.030700Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:42.063598Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:42.063618Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:42.063623Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:42.063710Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20503 TClient is connected to server localhost:20503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:42.504420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:42.512843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:42.589845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:42.725095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:42.800100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:45.013001Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641637058209358:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:45.013122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:45.052613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:45.083415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:45.113243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:45.146904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:45.178052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:45.248847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:45.298323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641641353177168:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:45.298423Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:45.298643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641641353177173:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:45.302088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:45.312377Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641641353177175:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:15:45.392725Z node 2 :TX_PROXY ERROR: Actor# [2:7491641641353177228:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> AnalyzeColumnshard::AnalyzeTable >> TYardTest::TestSlay [GOOD] >> TYardTest::TestSlayRace >> KqpQueryPerf::RangeRead-QueryService [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave >> TYardTest::TestSlayRace [GOOD] >> TYardTest::TestSlayRecreate >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TYardTest::TestSlayRecreate [GOOD] >> TYardTest::TestSlayLogWriteRaceActor >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 6239, MsgBus: 23850 2025-04-10T11:15:43.435205Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641633755917982:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:43.435438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001635/r3tmp/tmpvcly5h/pdisk_1.dat 2025-04-10T11:15:43.741137Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6239, node 1 2025-04-10T11:15:43.808012Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:43.808052Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:43.808063Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:43.808248Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:43.824487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:43.824648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:43.826493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23850 TClient is connected to server localhost:23850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:44.337759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:44.371454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:44.522526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:44.652197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:44.710194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:46.172780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641646640821639:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:46.172898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:46.459312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:46.496255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:46.529634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:46.565388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:46.593787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:46.623527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:46.670130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641646640822148:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:46.670219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:46.670462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641646640822153:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:46.674139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:46.684475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641646640822155:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:46.771049Z node 1 :TX_PROXY ERROR: Actor# [1:7491641646640822209:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes >> AnalyzeDatashard::DropTableNavigateError >> KqpQueryPerf::Insert-QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert-QueryService+UseSink >> KqpQueryPerf::IndexUpsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService+UseSink |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6410, MsgBus: 5543 2025-04-10T11:15:38.510457Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641613273273624:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:38.510621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001651/r3tmp/tmpMwHzBh/pdisk_1.dat 2025-04-10T11:15:38.805998Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6410, node 1 2025-04-10T11:15:38.882263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:38.882432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:38.884530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:38.896630Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:38.896663Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:38.896671Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:38.896791Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5543 TClient is connected to server localhost:5543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:39.408697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:39.438427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:39.582206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:39.716844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:39.779932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:41.531858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641626158177291:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:41.532010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:41.801800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:41.830888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:41.859558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:41.886840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:41.917035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:41.951876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:42.001230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641630453145097:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:42.001364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:42.001646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641630453145102:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:42.005674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:42.016461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641630453145104:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:42.099056Z node 1 :TX_PROXY ERROR: Actor# [1:7491641630453145158:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13556, MsgBus: 2400 2025-04-10T11:15:44.136246Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641638218724386:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:44.136321Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001651/r3tmp/tmp02cnVz/pdisk_1.dat 2025-04-10T11:15:44.223104Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13556, node 2 2025-04-10T11:15:44.271507Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:44.271623Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:44.273132Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:44.285497Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:44.285534Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:44.285539Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:44.285629Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2400 TClient is connected to server localhost:2400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:44.736808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:44.753862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:44.829578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:44.977603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:45.067536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:47.062992Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641651103628061:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:47.063083Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:47.108956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:47.140353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:47.170526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:47.200233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:47.234009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:47.273667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:47.326514Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641651103628575:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:47.326613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:47.326871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641651103628580:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:47.331125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:47.341271Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641651103628582:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:15:47.395182Z node 2 :TX_PROXY ERROR: Actor# [2:7491641651103628635:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TBlobStorageHullFresh::AppendixPerf [GOOD] >> TBlobStorageHullFresh::AppendixPerf_Tune >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] >> AnalyzeColumnshard::AnalyzeServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:15:20.360595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:20.360785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.360829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:20.360869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:20.361687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:20.361732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:20.361891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:20.361968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:20.363420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:20.466849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:15:20.466943Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:15:20.502306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:20.502651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:20.502837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:20.517157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:20.517614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:20.530277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.534039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:20.561249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.583563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.583677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.584712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:20.584811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.584936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:20.585191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:15:20.595611Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:15:20.793377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:20.798412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.806291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:20.810020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:20.810181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.814570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.814738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:20.814991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.815176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:20.815242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:20.815284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:20.818077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.818170Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:20.818215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:20.822124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.822227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.822282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.822345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.830594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:20.833428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:20.834406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:20.835712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:20.835880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:20.835932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.837133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:20.837226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:20.837503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:20.837593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:20.840507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:20.840560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:20.840752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:20.840796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:20.841060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:20.841125Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:20.841251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.841286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:20.841344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:20.841377Z no ... perationId: 1003:2, at schemeshard: 72057594046678944 2025-04-10T11:15:50.622365Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:50.622694Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-10T11:15:50.622848Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-04-10T11:15:50.622887Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-04-10T11:15:50.622930Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-04-10T11:15:50.622967Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-04-10T11:15:50.623010Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2025-04-10T11:15:50.628764Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-10T11:15:50.628853Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-10T11:15:50.628879Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-10T11:15:50.629044Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-10T11:15:50.629087Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-10T11:15:50.629106Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-10T11:15:50.629131Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-04-10T11:15:50.629163Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-10T11:15:50.629237Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-04-10T11:15:50.630235Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T11:15:50.630288Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:50.630560Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:15:50.630710Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-04-10T11:15:50.630752Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-10T11:15:50.630800Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-04-10T11:15:50.630838Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-10T11:15:50.630878Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-04-10T11:15:50.630922Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-10T11:15:50.630970Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-10T11:15:50.631006Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-10T11:15:50.631124Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:15:50.631171Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2025-04-10T11:15:50.631200Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2025-04-10T11:15:50.631236Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:15:50.631265Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2025-04-10T11:15:50.631291Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2025-04-10T11:15:50.631345Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-10T11:15:50.632869Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:15:50.634629Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:15:50.634761Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:15:50.634814Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:15:50.640701Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:15:50.641397Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:15:50.645101Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 340 RawX2: 111669152020 } TabletId: 72075186233409546 State: 4 2025-04-10T11:15:50.645176Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-04-10T11:15:50.646676Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:50.647053Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 2025-04-10T11:15:50.647236Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:15:50.647465Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409546 2025-04-10T11:15:50.647927Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:50.647962Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-10T11:15:50.648017Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T11:15:50.648062Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-10T11:15:50.648092Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:15:50.651962Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:15:50.652031Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-04-10T11:15:50.652219Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-10T11:15:50.652424Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-10T11:15:50.652459Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-10T11:15:50.653077Z node 26 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-10T11:15:50.653233Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-10T11:15:50.653266Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:622:2548] 2025-04-10T11:15:50.657856Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 347 RawX2: 111669152025 } TabletId: 72075186233409547 State: 4 2025-04-10T11:15:50.657926Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-04-10T11:15:50.660226Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:15:50.660641Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2025-04-10T11:15:50.660844Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:50.661096Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:15:50.661364Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:15:50.661409Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T11:15:50.661485Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:50.666328Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:15:50.666404Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-04-10T11:15:50.666669Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-10T11:15:50.667078Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-10T11:15:50.667157Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> AnalyzeColumnshard::AnalyzeRebootColumnShard >> TraverseDatashard::TraverseOneTable >> AnalyzeColumnshard::AnalyzeMultiOperationId >> TraverseDatashard::TraverseTwoTablesServerless >> TBlobStorageHullFresh::AppendixPerf_Tune [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::AppendixPerf_Tune [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] |88.6%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:15:40.234262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:15:40.234354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:40.234410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:15:40.234453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:15:40.234492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:15:40.234521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:15:40.234584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:15:40.234674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:15:40.235037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:40.318748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:40.318812Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:40.334858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:40.335136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:15:40.335293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:15:40.343046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:15:40.343274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:15:40.343966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:40.344337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:15:40.346188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.347463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:40.347522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.347758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:15:40.347802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:40.347840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:15:40.348011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.354787Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:15:40.510618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:15:40.510874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.511142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:15:40.511381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:15:40.511450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.514220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:40.514402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:15:40.514602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.514665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:15:40.514704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:15:40.514743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:15:40.519062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.519140Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:40.519815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:15:40.522446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.522501Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.522545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:40.522606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.526392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:15:40.528578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:15:40.528749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:15:40.529796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:15:40.529919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:15:40.529963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:40.530262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:15:40.530319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:15:40.530475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:15:40.530563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:15:40.532770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:40.532821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:15:40.532994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:40.533034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:15:40.533288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:15:40.533350Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:15:40.533464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:40.533497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.533549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:15:40.533590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.533632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:15:40.533671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:15:40.533702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:15:40.533728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:15:40.533789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:15:40.533826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:15:40.533857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:15:40.536219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:40.536334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:15:40.536388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-10T11:15:52.863166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-04-10T11:15:52.863276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 440 RawX2: 4294969700 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-10T11:15:52.863321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-10T11:15:52.865358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:52.865449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-10T11:15:52.865528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-10T11:15:52.865581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-04-10T11:15:52.865675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-10T11:15:52.865818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-04-10T11:15:52.865958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:15:52.866031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:15:52.867955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:52.869523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:52.871561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:52.871635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:52.871858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:15:52.872082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:52.872136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-10T11:15:52.872189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-10T11:15:52.872581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:52.872652Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:15:52.872765Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:52.872807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-10T11:15:52.872851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-10T11:15:52.873772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:52.873863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:52.873900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:15:52.873937Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2025-04-10T11:15:52.873973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:15:52.874708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:52.874775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:15:52.874797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:15:52.874820Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-10T11:15:52.874843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:15:52.874898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-10T11:15:52.877714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:15:52.877761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:15:52.878093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:15:52.878294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:15:52.878328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:52.878361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:15:52.878398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:52.878442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-10T11:15:52.878505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:408:2375] message: TxId: 103 2025-04-10T11:15:52.878538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:15:52.878569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:15:52.878598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:15:52.878693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:15:52.879097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:15:52.879137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:15:52.879692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:15:52.881030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:15:52.882026Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:15:52.882081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-10T11:15:52.882388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:15:52.882428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1347:3273] 2025-04-10T11:15:52.883024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 11 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-04-10T11:15:52.887467Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:15:52.887691Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 270us result status StatusSuccess 2025-04-10T11:15:52.888103Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6514, MsgBus: 14369 2025-04-10T11:15:39.152380Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641615249512360:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:39.152516Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00164c/r3tmp/tmpFgcXsM/pdisk_1.dat 2025-04-10T11:15:39.447053Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6514, node 1 2025-04-10T11:15:39.515283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:39.515423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:39.517075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:39.520315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:39.520342Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:39.520351Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:39.520500Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14369 TClient is connected to server localhost:14369 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:40.046614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:40.071525Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:15:40.085839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:40.229724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:40.396962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:40.468901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:41.936591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641623839448740:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:41.936719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:42.262783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:42.290905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:42.318267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:42.346735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:42.378182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:42.413571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:42.459637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641628134416547:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:42.459714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:42.459848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641628134416552:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:42.463470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:42.474427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641628134416554:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:42.573732Z node 1 :TX_PROXY ERROR: Actor# [1:7491641628134416608:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:43.565929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:15:43.637007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:15:43.672075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:15:44.152552Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641615249512360:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:44.152642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20267, MsgBus: 29320 2025-04-10T11:15:46.047357Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641645352117195:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:46.047462Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00164c/r3tmp/tmpLdvq6t/pdisk_1.dat 2025-04-10T11:15:46.128474Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20267, node 2 2025-04-10T11:15:46.179788Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:46.179868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:46.181584Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:46.190504Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:46.190528Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:46.190548Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:46.190671Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29320 TClient is connected to server localhost:29320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:46.621332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:46.638489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:46.694237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:46.882738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:46.953729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:48.959807Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641653942053547:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:48.959882Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:49.007159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:49.038394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:49.077593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:49.114237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:49.144576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:49.179787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:49.269812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641658237021361:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:49.269960Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:49.270218Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641658237021366:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:49.274197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:49.285688Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641658237021368:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:15:49.368193Z node 2 :TX_PROXY ERROR: Actor# [2:7491641658237021423:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:50.380263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:15:50.417687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:15:50.491984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:15:51.058372Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641645352117195:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:51.058449Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] |88.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28616, MsgBus: 62383 2025-04-10T11:15:44.059591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641638254032348:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:44.059684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001634/r3tmp/tmpfpwWOq/pdisk_1.dat 2025-04-10T11:15:44.367351Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28616, node 1 2025-04-10T11:15:44.447891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:44.447985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:44.449972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:44.450867Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:44.450895Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:44.450916Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:44.451113Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62383 TClient is connected to server localhost:62383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:44.909053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:44.941501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:45.105061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:45.265302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:45.335254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:47.384593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641651138936012:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:47.384750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:47.741459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:47.769365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:47.798005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:47.832252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:47.900367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:47.967829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:48.021611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641655433903826:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:48.021713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:48.021738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641655433903831:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:48.025293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:48.035729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641655433903833:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:48.093909Z node 1 :TX_PROXY ERROR: Actor# [1:7491641655433903887:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:49.060056Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641638254032348:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:49.060129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15143, MsgBus: 16946 2025-04-10T11:15:50.104076Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641663070443539:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:50.104139Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001634/r3tmp/tmp3HxIrT/pdisk_1.dat 2025-04-10T11:15:50.187978Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15143, node 2 2025-04-10T11:15:50.239356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:50.239483Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:50.240926Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:50.259832Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:50.259855Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:50.259863Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:50.260155Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16946 TClient is connected to server localhost:16946 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:15:50.713162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:50.734857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:50.791712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:50.986704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:51.064970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:52.842287Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641671660379907:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:52.842416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:52.886318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:52.915583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:52.944933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:52.980495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:53.010436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:53.043212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:53.134422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641675955347716:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:53.134567Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:53.134636Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641675955347721:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:53.137909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:53.146705Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641675955347723:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:15:53.221838Z node 2 :TX_PROXY ERROR: Actor# [2:7491641675955347778:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseError [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TYardTest::TestSlayLogWriteRaceActor [GOOD] >> TYardTest::TestStartingPointReboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15229, MsgBus: 28524 2025-04-10T11:15:42.058412Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641627011600018:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:42.058471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00163c/r3tmp/tmpEyEJdo/pdisk_1.dat 2025-04-10T11:15:42.409207Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15229, node 1 2025-04-10T11:15:42.446713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:42.446814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:42.448519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:42.499769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:42.499790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:42.499800Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:42.499917Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28524 TClient is connected to server localhost:28524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:42.981069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:43.002924Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:15:43.047589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:43.164814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:43.326784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:43.406669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:45.039337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641639896503693:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:45.039516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:45.336370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:45.361555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:45.390713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:45.423648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:45.453392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:45.524730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:45.604400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641639896504214:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:45.604452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:45.604565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641639896504219:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:45.607544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:45.616311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641639896504221:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:15:45.694589Z node 1 :TX_PROXY ERROR: Actor# [1:7491641639896504276:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:46.701280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:15:46.737768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:15:46.771245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:15:47.058962Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641627011600018:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:47.059066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6318, MsgBus: 30310 2025-04-10T11:15:50.119232Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641665435147988:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:50.119305Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00163c/r3tmp/tmpcxWOp2/pdisk_1.dat 2025-04-10T11:15:50.214748Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6318, node 2 2025-04-10T11:15:50.252259Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:50.252350Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:50.254365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:50.301547Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:50.301574Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:50.301582Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:50.301738Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30310 TClient is connected to server localhost:30310 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:15:50.727949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:50.738587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:50.787237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:50.962799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:51.029453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:53.279282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641678320051649:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:53.279351Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:53.327133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:15:53.358826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:15:53.391112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:15:53.420629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:15:53.458275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:15:53.494647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:15:53.538052Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641678320052159:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:53.538123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:53.538368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641678320052164:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:53.541253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:15:53.550862Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641678320052166:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:15:53.627075Z node 2 :TX_PROXY ERROR: Actor# [2:7491641678320052220:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:54.431265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:15:54.502090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:15:54.577430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:15:55.119377Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641665435147988:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:55.119453Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate >> TraverseColumnShard::TraverseServerlessColumnTable |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] Test command err: 2025-04-10T11:15:50.754913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:50.755377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:50.755492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010c9/r3tmp/tmp6AUo5h/pdisk_1.dat 2025-04-10T11:15:51.183228Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31100, node 1 2025-04-10T11:15:51.433672Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:51.433731Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:51.433764Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:51.434248Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:51.437144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:51.527811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:51.527972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:51.541081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28281 2025-04-10T11:15:52.052809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:55.200727Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:55.235252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.235433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.267860Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:55.270324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.514583Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.514974Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.515412Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.515567Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.515629Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.515906Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.516021Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.516088Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.516146Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.680654Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.680750Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.694254Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.852921Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:55.901243Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:55.901360Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:55.943600Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:55.944787Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:55.945014Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:55.945080Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:55.945142Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:55.945207Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:55.945288Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:55.945355Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:55.945883Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:55.969195Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.969338Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.977094Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:55.981188Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:55.981541Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:15:55.991445Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:56.041783Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:56.041862Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:56.041946Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:56.058425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:56.064289Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:56.064392Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:56.229485Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:56.385642Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:56.461831Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:57.463730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.463902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.484302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:15:57.923089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2550:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.923229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.924484Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2555:3131]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:15:57.924663Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:15:57.924720Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2557:3133] 2025-04-10T11:15:57.924774Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2557:3133] 2025-04-10T11:15:57.925191Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2558:2994] 2025-04-10T11:15:57.925377Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2557:3133], server id = [2:2558:2994], tablet id = 72075186224037894, status = OK 2025-04-10T11:15:57.925506Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2558:2994], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:15:57.925594Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:15:57.925858Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:15:57.925914Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2555:3131], StatRequests.size() = 1 2025-04-10T11:15:57.941059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2562:3137], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.941205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.941618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2567:3142], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.947873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:15:58.108761Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:15:58.108836Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:15:58.195255Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2557:3133], schemeshard count = 1 2025-04-10T11:15:58.540760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2569:3144], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T11:15:58.689550Z node 1 :TX_PROXY ERROR: Actor# [1:2688:3218] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:58.701051Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2711:3234]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:15:58.701213Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:15:58.701258Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2711:3234], StatRequests.size() = 1 2025-04-10T11:15:58.840677Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfpkv9x5n5z8gggvxfps1we, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjkyNWZkOWItOTFjNTA3OGUtOWMzMGY0MTUtYjY3MzU3Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:58.952145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037897 2025-04-10T11:15:59.336995Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3058:3299]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:15:59.337166Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:15:59.337200Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:3058:3299], StatRequests.size() = 1 2025-04-10T11:15:59.356369Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3067:3308]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:15:59.356497Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-10T11:15:59.356519Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:3067:3308], StatRequests.size() = 1 2025-04-10T11:15:59.386045Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfpkwpy2kccvr9abfm7rzc1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM0Njk3NjItZjIzYjRiMGEtNzUyMmEzZi0yM2U4ZTVjOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:15:59.446713Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3117:3262]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:15:59.448983Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:15:59.449025Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:15:59.449459Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:15:59.449495Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:15:59.449532Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:15:59.474634Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-10T11:15:59.475690Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-10T11:15:59.476089Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3142:3275]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:15:59.479276Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:15:59.479342Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:15:59.479732Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:15:59.479785Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:15:59.479844Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:15:59.482904Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-10T11:15:59.483239Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> TraverseDatashard::TraverseOneTableServerless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTableServerless [GOOD] Test command err: 2025-04-10T11:15:50.435396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:50.435712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:50.435769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010d2/r3tmp/tmpJ8TXmS/pdisk_1.dat 2025-04-10T11:15:50.856601Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23167, node 1 2025-04-10T11:15:51.179070Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:51.179121Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:51.179144Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:51.179487Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:51.186086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:51.276878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:51.277410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:51.292758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11059 2025-04-10T11:15:51.820467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:55.245366Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:55.283889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.284073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.312364Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:55.314769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.548811Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.549363Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.549944Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.550079Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.550180Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.550442Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.550529Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.550597Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.550686Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.716763Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.716879Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.730344Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.873624Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:55.914643Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:55.914735Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:55.948803Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:55.950057Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:55.950314Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:55.950386Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:55.950443Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:55.950498Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:55.950554Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:55.950609Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:55.951131Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:55.975134Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.975268Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.983152Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:55.987850Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:55.988208Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:15:55.996223Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-10T11:15:56.013092Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:56.013173Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:56.013262Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-10T11:15:56.031143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:56.044359Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:56.044510Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:56.244432Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:56.388447Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:56.465533Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:57.285275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:15:58.039832Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:58.187588Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-10T11:15:58.187659Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:15:58.187738Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2594:2946], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:15:58.188925Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2596:2948] 2025-04-10T11:15:58.189046Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2596:2948], schemeshard id = 72075186224037899 2025-04-10T11:15:59.464247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2727:3248], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.464366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.479315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-10T11:15:59.851248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3030:3296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.851403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.897285Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3035:3300]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:15:59.897540Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:15:59.897745Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-10T11:15:59.897819Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3038:3303] 2025-04-10T11:15:59.897902Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3038:3303] 2025-04-10T11:15:59.898501Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3039:3186] 2025-04-10T11:15:59.898857Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3038:3303], server id = [2:3039:3186], tablet id = 72075186224037894, status = OK 2025-04-10T11:15:59.899029Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:3039:3186], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:15:59.899111Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:15:59.899358Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:15:59.899440Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3035:3300], StatRequests.size() = 1 2025-04-10T11:15:59.918972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3043:3307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.919118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.919553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3048:3312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.926102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-04-10T11:16:00.071626Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:16:00.071722Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:16:00.116208Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3038:3303], schemeshard count = 1 2025-04-10T11:16:00.384202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3050:3314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-10T11:16:00.587398Z node 1 :TX_PROXY ERROR: Actor# [1:3175:3386] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:00.596716Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3198:3402]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:00.596881Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:16:00.596911Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3198:3402], StatRequests.size() = 1 2025-04-10T11:16:00.644220Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfpkx79726ddxbk38f0rnx5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODE5OWVmYzktNzFhNjg5MjQtZTBiY2M1MS1hNmY0NjZjYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:16:00.735009Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3240:3239]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:00.737186Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:16:00.737235Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:16:00.737714Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:16:00.737783Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-10T11:16:00.737842Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:16:00.755202Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-10T11:16:00.755466Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] >> AnalyzeColumnshard::AnalyzeTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:76:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:78:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:80:2057] recipient: [10:79:2110] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:82:2057] recipient: [10:79:2110] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:81:2111] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:135:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:76:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:78:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:80:2057] recipient: [11:79:2110] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:82:2057] recipient: [11:79:2110] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:81:2111] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:135:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:77:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:80:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:81:2057] recipient: [12:79:2110] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:83:2057] recipient: [12:79:2110] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:82:2111] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:136:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:83:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:84:2057] recipient: [13:82:2113] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:86:2057] recipient: [13:82:2113] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:85:2114] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:139:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:83:2057] recipient: [14:82:2113] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:84:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:86:2057] recipient: [14:82:2113] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:85:2114] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:84:2057] recipient: [15:83:2113] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:85:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:87:2057] recipient: [15:83:2113] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:86:2114] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:140:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:87:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:88:2057] recipient: [16:86:2116] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:90:2057] recipient: [16:86:2116] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:89:2117] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:143:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:87:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:88:2057] recipient: [17:86:2116] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:90:2057] recipient: [17:86:2116] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:89:2117] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:143:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:88:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:89:2057] recipient: [18:87:2116] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:91:2057] recipient: [18:87:2116] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:90:2117] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:144:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] >> TraverseDatashard::TraverseOneTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTable [GOOD] Test command err: 2025-04-10T11:15:51.143855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:51.144356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:51.144442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001094/r3tmp/tmpYRMaVa/pdisk_1.dat 2025-04-10T11:15:51.499106Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11495, node 1 2025-04-10T11:15:51.739767Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:51.739836Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:51.739870Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:51.740385Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:51.742833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:51.827236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:51.827394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:51.840852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3363 2025-04-10T11:15:52.341046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:55.498635Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:55.534318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.534499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.562470Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:55.564793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.814688Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.815320Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.815914Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.816046Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.816104Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.816314Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.816388Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.816478Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.816605Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.983719Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.983842Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.996537Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:56.126093Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:56.172808Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:56.172918Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:56.216861Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:56.218077Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:56.218295Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:56.218344Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:56.218384Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:56.218432Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:56.218484Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:56.218539Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:56.218980Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:56.243417Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:56.243540Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1875:2599], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:56.250278Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:56.255294Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-04-10T11:15:56.255788Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-04-10T11:15:56.261904Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:56.280237Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:56.280291Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:56.280372Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:56.297945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:56.305992Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:56.306252Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:56.507657Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:56.668532Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:56.745662Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:57.769298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2242:3075], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.769462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.785886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:15:57.901936Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:57.902192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:15:57.902515Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:15:57.902684Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:15:57.902825Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:15:57.902968Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:15:57.903101Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:15:57.903234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:15:57.903352Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:15:57.903520Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:15:57.903706Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:15:57.903851Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:15:57.927721Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:15:57.927805Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:15:57.927911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:15:57.927956Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:15:57.928136Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:15:57.928164Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:15:57.928243Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:15:57.928274Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:15:57.928332Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:15:57.928361Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:15:57.928396Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:15:57.928426Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:15:57.929243Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:15:57.929349Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:15:57.929591Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:15:57.929673Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:15:57.929861Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:15:57.929911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:15:57.930138Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:15:57.930212Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:15:57.930370Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:15:57.930433Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:15:58.055693Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:15:59.276780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2587:3126], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.276970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.280979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037897 2025-04-10T11:15:59.410485Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:16:00.286063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2685:3171], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:00.286210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:00.289007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-04-10T11:16:00.319523Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000020s FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable [GOOD] Test command err: 2025-04-10T11:15:55.232046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:55.232383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:55.232450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008bd/r3tmp/tmpe3MMa7/pdisk_1.dat 2025-04-10T11:15:55.584167Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9589, node 1 2025-04-10T11:15:55.815470Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:55.815528Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:55.815560Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:55.816029Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:55.818587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:55.903114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.903307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.916922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9588 2025-04-10T11:15:56.436849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:59.185397Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:59.214463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:59.214695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:59.240930Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:59.242837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:59.465219Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.465829Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.466373Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.466514Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.466595Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.466892Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.466986Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.467067Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.467140Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.628773Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:59.628888Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:59.641380Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:59.779622Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:59.828705Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:59.828798Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:59.864580Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:59.865448Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:59.865611Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:59.865656Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:59.865704Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:59.865743Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:59.865778Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:59.865814Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:59.866184Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:59.886953Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:59.887086Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:59.894011Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:59.898022Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:59.898346Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:15:59.907380Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:59.922580Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:59.922630Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:59.922682Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:59.934770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:59.939851Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:59.939955Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:16:00.081826Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:16:00.254297Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:16:00.330392Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:01.369331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:01.369511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:01.391638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:16:01.879875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2550:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:01.880055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:01.881428Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2555:3131]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:01.881625Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:16:01.881728Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2557:3133] 2025-04-10T11:16:01.881799Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2557:3133] 2025-04-10T11:16:01.882435Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2558:2994] 2025-04-10T11:16:01.882706Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2557:3133], server id = [2:2558:2994], tablet id = 72075186224037894, status = OK 2025-04-10T11:16:01.882875Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2558:2994], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:16:01.882949Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:16:01.883158Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:16:01.883236Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2555:3131], StatRequests.size() = 1 2025-04-10T11:16:01.903966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2562:3137], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:01.904089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:01.904557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2567:3142], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:01.911455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:16:02.093955Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:16:02.094028Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:16:02.169046Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2557:3133], schemeshard count = 1 2025-04-10T11:16:02.515789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2569:3144], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T11:16:02.673998Z node 1 :TX_PROXY ERROR: Actor# [1:2688:3218] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:02.683449Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2711:3234]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:02.683616Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:16:02.683663Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2711:3234], StatRequests.size() = 1 2025-04-10T11:16:02.726091Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfpkz540hp1mp9rveqwn5gy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NiYTMzYzQtMmI0ZTEwMjgtYjhlNzJlZDktOGViOGRjYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:16:02.786540Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:2760:3044]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:02.788788Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:16:02.788839Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:16:02.789368Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:16:02.789410Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:16:02.789467Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:16:02.802835Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-10T11:16:02.803129Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 >> TYardTest::TestStartingPointReboots [GOOD] >> TYardTest::TestRestartAtNonceJump >> TPDiskRaces::DecommitWithInflight [GOOD] >> TPDiskRaces::DecommitWithInflightMock >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve >> TKeyValueTest::TestObtainLockNewApi [GOOD] >> TKeyValueTest::TestRenameToLongKey >> TraverseColumnShard::TraverseColumnTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] Test command err: 2025-04-10T11:15:54.918539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:54.918820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:54.918879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00089d/r3tmp/tmp1Njfcb/pdisk_1.dat 2025-04-10T11:15:55.257863Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26066, node 1 2025-04-10T11:15:55.498886Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:55.498942Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:55.498975Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:55.499447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:55.502076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:55.597413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.597572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.610828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23719 2025-04-10T11:15:56.132998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:58.819460Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:58.848367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:58.848503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:58.876226Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:58.878434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:59.096571Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.096956Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.097454Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.097587Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.097659Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.097902Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.097983Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.098079Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.098170Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.256301Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:59.256406Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:59.268849Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:59.375377Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:59.413462Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:59.413537Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:59.441857Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:59.442664Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:59.442822Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:59.442873Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:59.442924Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:59.442957Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:59.443015Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:59.443049Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:59.443366Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:59.463329Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:59.463422Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:59.469011Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:59.472044Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:59.472286Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:15:59.481163Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-10T11:15:59.497572Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:59.497631Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:59.497698Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-10T11:15:59.512362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:59.517156Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:59.517244Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:59.678091Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:59.796959Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:59.883536Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:00.673772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:16:01.423729Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:01.561353Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-10T11:16:01.561423Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:16:01.561522Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2598:2947], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:16:01.562657Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2600:2949] 2025-04-10T11:16:01.562843Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2600:2949], schemeshard id = 72075186224037899 2025-04-10T11:16:02.569668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2730:3253], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:02.569861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:02.584861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-10T11:16:02.792946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3025:3299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:02.793068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:02.828956Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3030:3303]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:02.829147Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:16:02.829284Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-10T11:16:02.829347Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3033:3306] 2025-04-10T11:16:02.829411Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3033:3306] 2025-04-10T11:16:02.829965Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3034:3175] 2025-04-10T11:16:02.830179Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3033:3306], server id = [2:3034:3175], tablet id = 72075186224037894, status = OK 2025-04-10T11:16:02.830346Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:3034:3175], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:16:02.830419Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:16:02.830691Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:16:02.830769Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3030:3303], StatRequests.size() = 1 2025-04-10T11:16:02.845703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3038:3310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:02.845852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:02.846329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3043:3315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:02.851759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-04-10T11:16:02.981152Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:16:02.981236Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:16:03.035628Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3033:3306], schemeshard count = 1 2025-04-10T11:16:03.349439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3045:3317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-10T11:16:03.594386Z node 1 :TX_PROXY ERROR: Actor# [1:3172:3388] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:03.601954Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3195:3404]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:03.602077Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:16:03.602100Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3195:3404], StatRequests.size() = 1 2025-04-10T11:16:03.640649Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfpm0376ntdd9981nmtdy0x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWI3ZWIyYTctOTg1M2E3NTEtNmU0ZGEyMC0yYTY0M2EzYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:16:03.719120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72075186224037899 2025-04-10T11:16:04.126696Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3529:3467]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:04.126931Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:16:04.126976Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:3529:3467], StatRequests.size() = 1 2025-04-10T11:16:04.150738Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3538:3476]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:04.150934Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-10T11:16:04.150965Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:3538:3476], StatRequests.size() = 1 2025-04-10T11:16:04.236017Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfpm1ch283tmrbhdwz8v57x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5NzVmZTItZDQyN2Y4NGYtMzJhZjg0YS1mMGE3YzU2NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:16:04.275657Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3580:3435]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:04.278404Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:16:04.278472Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:16:04.278924Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:16:04.278982Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-10T11:16:04.279029Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:16:04.293478Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-10T11:16:04.293720Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-10T11:16:04.294068Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3605:3448]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:04.297212Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:16:04.297280Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:16:04.297685Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:16:04.297735Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-10T11:16:04.297780Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 3] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:16:04.300225Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-10T11:16:04.300461Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> TYardTest::TestRestartAtNonceJump [GOOD] >> TYardTest::TestRestartAtChunkEnd >> AnalyzeColumnshard::Analyze |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] >> TYardTest::TestRestartAtChunkEnd [GOOD] >> TYardTestRestore::TestRestore15 >> TYardTestRestore::TestRestore15 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:86:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:86:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... TabletID 72057594037927937 is [13:56:2097] sender: [13:90:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:91:2057] recipient: [13:89:2117] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:93:2057] recipient: [13:89:2117] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:92:2118] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:146:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:76:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:79:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:78:2110] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:82:2057] recipient: [16:78:2110] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:79:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:78:2110] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:82:2057] recipient: [17:78:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:81:2111] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:77:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:79:2110] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:83:2057] recipient: [18:79:2110] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:82:2111] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:136:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:82:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:83:2113] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:83:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:82:2113] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:86:2057] recipient: [20:82:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:85:2114] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:83:2113] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:83:2113] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:140:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:84:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:88:2057] recipient: [22:86:2116] Leader for TabletID 72057594037927937 is [22:89:2117] sender: [22:90:2057] recipient: [22:86:2116] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:89:2117] Leader for TabletID 72057594037927937 is [22:89:2117] sender: [22:143:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:50:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:50:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:84:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:87:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:88:2057] recipient: [23:86:2116] Leader for TabletID 72057594037927937 is [23:89:2117] sender: [23:90:2057] recipient: [23:86:2116] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:89:2117] Leader for TabletID 72057594037927937 is [23:89:2117] sender: [23:143:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:85:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:88:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:89:2057] recipient: [24:87:2116] Leader for TabletID 72057594037927937 is [24:90:2117] sender: [24:91:2057] recipient: [24:87:2116] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:90:2117] Leader for TabletID 72057594037927937 is [24:90:2117] sender: [24:144:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTestRestore::TestRestore15 [GOOD] |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] >> AnalyzeDatashard::AnalyzeTwoTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] Test command err: 2025-04-10T11:15:58.281588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:58.281827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:58.281894Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000858/r3tmp/tmp4H2Hhq/pdisk_1.dat 2025-04-10T11:15:58.552276Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1245, node 1 2025-04-10T11:15:58.772883Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:58.772933Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:58.772966Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:58.773401Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:58.775993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:58.863207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:58.863334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:58.876281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26528 2025-04-10T11:15:59.336820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:16:02.139940Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:16:02.165183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:02.165312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:02.192327Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:16:02.194388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:02.418837Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.419355Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.419826Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.419970Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.420050Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.420335Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.420426Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.420503Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.420578Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.583092Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:02.583194Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:02.595907Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:02.710808Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:02.758340Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:16:02.758475Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:16:02.796375Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:16:02.797647Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:16:02.797901Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:16:02.797961Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:16:02.798007Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:16:02.798051Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:16:02.798094Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:16:02.798167Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:16:02.798580Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:16:02.822517Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:02.822680Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:02.831615Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:16:02.836459Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:16:02.836859Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:16:02.846983Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-10T11:16:02.863124Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:16:02.863204Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:16:02.863306Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-10T11:16:02.879173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:16:02.887125Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:16:02.887267Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:16:03.042384Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:16:03.218179Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:16:03.295875Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:04.125126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:16:04.897568Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:05.083867Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-10T11:16:05.083930Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:16:05.084027Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2599:2948], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:16:05.086904Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2609:2953] 2025-04-10T11:16:05.087197Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2609:2953], schemeshard id = 72075186224037899 2025-04-10T11:16:06.097307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:16:06.645370Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:06.913307Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-04-10T11:16:06.913401Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-04-10T11:16:06.913500Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3086:3149], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037905 2025-04-10T11:16:06.915273Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3088:3151] 2025-04-10T11:16:06.915567Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3088:3151], schemeshard id = 72075186224037905 2025-04-10T11:16:07.922670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3214:3420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:07.922873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:07.938105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-04-10T11:16:08.259261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3523:3470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:08.259391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:08.300966Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3528:3474]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:08.301192Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:16:08.301385Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-10T11:16:08.301454Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3531:3477] 2025-04-10T11:16:08.301509Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3531:3477] 2025-04-10T11:16:08.302026Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3532:3389] 2025-04-10T11:16:08.302363Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3531:3477], server id = [2:3532:3389], tablet id = 72075186224037894, status = OK 2025-04-10T11:16:08.302573Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:3532:3389], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:16:08.302643Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:16:08.302947Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:16:08.303010Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3528:3474], StatRequests.size() = 1 2025-04-10T11:16:08.315574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3536:3481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:08.315671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:08.316015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3541:3486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:08.321093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-04-10T11:16:08.537360Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:16:08.537440Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:16:08.637998Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3531:3477], schemeshard count = 1 2025-04-10T11:16:08.910285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3543:3488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-10T11:16:09.065231Z node 1 :TX_PROXY ERROR: Actor# [1:3671:3565] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:09.083201Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3694:3581]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:09.083333Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:16:09.083357Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3694:3581], StatRequests.size() = 1 2025-04-10T11:16:09.125282Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfpm5e1fqh8xq4hf3wtxse8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTljNWE4N2ItZGM3ZmU0YTUtZDZhN2JmNzEtYTcxNTYwNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:16:09.220129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72075186224037905 2025-04-10T11:16:09.602233Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:4051:3645]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:09.602443Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:16:09.602856Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 1, schemeshard count = 1, urgent = 0 2025-04-10T11:16:09.602925Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:16:09.603209Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:16:09.603286Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:4051:3645], StatRequests.size() = 1 2025-04-10T11:16:09.628872Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:4060:3654]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:09.629031Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-10T11:16:09.629067Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:4060:3654], StatRequests.size() = 1 2025-04-10T11:16:09.671187Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jrfpm6qm35nqv51dyyjye2d6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGIyNzg2NGItYzg0NGQyMTQtN2ZjZGYxNDEtZjE4OGJmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:16:09.708399Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4104:3658]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:09.710747Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:16:09.710803Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:16:09.711132Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:16:09.711167Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-10T11:16:09.711200Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:16:09.724337Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-10T11:16:09.724599Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-10T11:16:09.725033Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4129:3671]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:09.727757Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:16:09.727807Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:16:09.728280Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:16:09.728327Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-10T11:16:09.728367Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:16:09.730585Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-10T11:16:09.730889Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/oom/py3test >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] >> TYardTest::TestLogWriteCutEqual [GOOD] >> TYardTest::TestLogWriteCutEqualRandomWait >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectAll >> GenericFederatedQuery::IcebergHadoopTokenSelectAll >> GenericFederatedQuery::IcebergHiveBasicSelectAll >> GenericFederatedQuery::YdbFilterPushdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:76:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:79:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:78:2110] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:82:2057] recipient: [5:78:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:81:2111] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:135:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:77:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:79:2110] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:81:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:83:2057] recipient: [6:79:2110] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:82:2111] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:136:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:79:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:82:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:81:2112] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:85:2057] recipient: [7:81:2112] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:84:2113] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:138:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:79:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:82:2057] recipient: [8:81:2112] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:84:2113] sender: [8:85:2057] recipient: [8:81:2112] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:84:2113] Leader for TabletID 72057594037927937 is [8:84:2113] sender: [8:138:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:80:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:82:2112] Leader for TabletID 72057594037927937 is [9:85:2113] sender: [9:86:2057] recipient: [9:82:2112] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:85:2113] Leader for TabletID 72057594037927937 is [9:85:2113] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:85:2115] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:89:2057] recipient: [10:85:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:88:2116] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:83:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:85:2115] Leader for TabletID 72057594037927937 is [11:88:2116] sender: [11:89:2057] recipient: [11:85:2115] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:88:2116] Leader for TabletID 72057594037927937 is [11:88:2116] sender: [11:142:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:84:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:87:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:88:2057] recipient: [12:86:2115] Leader for TabletID 72057594037927937 is [12:89:2116] sender: [12:90:2057] recipient: [12:86:2115] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:89:2116] Leader for TabletID 72057594037927937 is [12:89:2116] sender: [12:143:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] Test command err: 2025-04-10T11:14:56.541840Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:56.543318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:14:56.543366Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:56.553903Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:56.554453Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:14:56.554723Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:56.611504Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:56.628593Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:56.629218Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:56.631051Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:14:56.631129Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:14:56.631183Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:14:56.631655Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:56.631865Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:56.631943Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:199:2154] in generation 2 2025-04-10T11:14:56.723199Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:56.758977Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:14:56.759154Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:56.759265Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-10T11:14:56.759309Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:14:56.759349Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:14:56.759388Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:56.759623Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:56.759696Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:56.759981Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:14:56.760074Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:14:56.760144Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:56.760182Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:56.760233Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:14:56.760267Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:56.760329Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:56.760370Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:14:56.760422Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:56.760526Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:56.760567Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:56.760620Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-10T11:14:56.764042Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:14:56.764099Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:56.764183Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:14:56.764349Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:14:56.764399Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:14:56.764466Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:14:56.764520Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:56.764558Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:14:56.764608Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:14:56.764643Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:56.764954Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:56.764994Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:14:56.765030Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:14:56.765064Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:56.765119Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:14:56.765145Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:14:56.765191Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:14:56.765234Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:56.765270Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:56.783195Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:14:56.783285Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:56.783328Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:56.783370Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:56.783436Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:56.784035Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:56.784093Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:56.784134Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-10T11:14:56.784309Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:56.784353Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:56.784477Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:56.784516Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:56.784575Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:56.784614Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:56.790740Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:56.790824Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:56.791014Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:56.791049Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:56.791101Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:56.791140Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:56.791173Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:56.791211Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:14:56.791248Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:14:56.791289Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:56.791327Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:56.791380Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:56.791417Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:56.791580Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:14:56.791617Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:56.792598Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:56.792631Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:14:56.792672Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:14:56.792747Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:56.792776Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:14:56.792813Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:56.792845Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:56.792901Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:14:56.792947Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:14:56.792984Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:14:56.793041Z node 1 :TX_DATA ... 5-04-10T11:16:13.504847Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.504962Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.504981Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.505008Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.505030Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.505122Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.505141Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.505171Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.505191Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.505292Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.505311Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.505354Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.505376Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.505486Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.505505Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.505534Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.505557Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.505699Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.505717Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.505748Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.505767Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.505920Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.505949Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.505988Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.506020Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.506139Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.506174Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.506205Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.506228Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.506327Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.506352Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.506391Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.506420Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.506572Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.506593Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.506621Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.506642Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.506751Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.506770Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.506798Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.506817Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.506934Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.506964Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.507004Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.507033Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.507128Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.507148Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.507174Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.507195Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.507311Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.507331Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.507360Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.507381Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.507526Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.507555Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.507590Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.507611Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.507799Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-10T11:16:13.507838Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:13.507873Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2025-04-10T11:16:13.507961Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-10T11:16:13.507982Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:13.508003Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-10T11:16:13.508078Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-10T11:16:13.508107Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:13.508133Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-10T11:16:13.508205Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-10T11:16:13.508226Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:13.508244Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-10T11:16:13.508294Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-10T11:16:13.508314Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:13.508333Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-10T11:16:13.508384Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-10T11:16:13.508403Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:13.508421Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-10T11:16:13.508467Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-10T11:16:13.508485Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:13.508504Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-10T11:16:13.508556Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-10T11:16:13.508575Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:13.508596Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 28 30 20 31 24 31 27 31 24 24 30 31 30 31 10 22 18 31 23 31 11 31 23 11 10 11 1 31 23 - - - actual 28 30 20 31 24 31 27 31 24 24 30 31 30 31 10 22 18 31 23 31 11 31 23 11 10 11 1 31 23 - - - interm 7 3 2 6 2 - 1 3 - 0 - - 6 0 6 1 3 3 - 6 6 3 6 - - - 1 1 - - - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] Test command err: 2025-04-10T11:14:51.359882Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:51.361457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:14:51.361516Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:51.378055Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:51.378682Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:14:51.378982Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:51.430968Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:51.444107Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:51.444947Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:51.446953Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:14:51.447041Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:14:51.447100Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:14:51.447544Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:51.447715Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:51.447778Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:199:2154] in generation 2 2025-04-10T11:14:51.537281Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:51.590033Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:14:51.590252Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:51.590357Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-10T11:14:51.590397Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:14:51.590431Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:14:51.590468Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.590701Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.590768Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.591017Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:14:51.591108Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:14:51.591172Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.591209Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:51.591262Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:14:51.591301Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:51.591341Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:51.591375Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:14:51.591413Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:51.591501Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.591535Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.591589Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-10T11:14:51.594129Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:14:51.594487Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:51.594566Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:14:51.594751Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:14:51.594794Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:14:51.594866Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:14:51.594920Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.594953Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:14:51.595005Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:14:51.595038Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.595363Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:51.595396Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:14:51.595427Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:14:51.595458Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.595508Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:14:51.595533Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:14:51.595572Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:14:51.595616Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.595638Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:51.608287Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:14:51.608388Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:51.608436Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:51.608504Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:51.608618Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:51.618814Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.618903Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:51.618960Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-10T11:14:51.619148Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:51.619200Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:51.619386Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:51.619441Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.619553Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:51.619608Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:51.627240Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:51.627360Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:51.627648Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.627695Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:51.627763Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:51.627821Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:51.627864Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:51.627912Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:14:51.627954Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:14:51.628003Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.628048Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:51.628105Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:51.628148Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:51.628353Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:14:51.628417Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:51.628447Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:51.628471Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:14:51.628498Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:14:51.628574Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:51.628602Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:14:51.628655Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:51.628701Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:51.628764Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:14:51.628822Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:14:51.628862Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:14:51.628938Z node 1 :TX_DATA ... CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.874072Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.874106Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:21] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.874169Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 21] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.874204Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.874413Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.874444Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:22] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.874486Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 22] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.874519Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.874690Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.874724Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:23] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.874768Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 23] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.874804Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.874971Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.875007Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.875049Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.875083Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.875249Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.875282Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.875327Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.875360Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.875517Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.875549Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.875593Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.875628Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.875848Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.875883Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.875930Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.875964Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.876155Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.876188Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.876232Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.876265Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.876475Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.876511Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.876557Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.876590Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.876733Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.876767Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.876811Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.876845Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.876987Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.877019Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.877064Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.877101Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.877253Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.877285Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.877344Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.877377Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.877525Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.877557Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.877599Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.877641Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.877784Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.877817Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.877863Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.877895Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.878030Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.878060Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.878103Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.878138Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.878338Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.878374Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.878419Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.878453Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.878593Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:13.878625Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-10T11:16:13.878670Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:13.878702Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:13.879011Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:799:2725], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-10T11:16:13.879065Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:13.879106Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-10T11:16:13.879240Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:799:2725], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-10T11:16:13.879283Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:13.879317Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-10T11:16:13.879406Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:799:2725], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-10T11:16:13.879439Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:13.879471Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-10T11:16:13.879558Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:799:2725], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-10T11:16:13.879591Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:13.879621Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-10T11:16:13.879710Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:799:2725], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-10T11:16:13.879743Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:13.879774Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 31 31 26 27 22 18 23 16 21 22 30 23 30 15 23 21 23 15 30 21 3 21 23 22 3 16 10 15 - - - - actual 31 31 26 27 22 18 23 16 21 22 30 23 30 15 23 21 23 15 30 21 3 21 23 22 3 16 10 15 - - - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - >> GenericFederatedQuery::IcebergHiveSaSelectAll >> GenericFederatedQuery::IcebergHadoopSaSelectAll >> GenericFederatedQuery::IcebergHadoopBasicSelectAll >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] Test command err: 2025-04-10T11:15:01.695037Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:15:01.696012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:01.696056Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:01.707006Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:15:01.707522Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:15:01.707823Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:01.755231Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:15:01.764674Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:01.765251Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:15:01.767127Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:15:01.767212Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:15:01.767278Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:15:01.767675Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:15:01.767868Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:15:01.767930Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:198:2154] in generation 2 2025-04-10T11:15:01.849861Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:15:01.892146Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:15:01.892290Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:15:01.892380Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-10T11:15:01.892410Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:15:01.892436Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:15:01.892468Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:01.892664Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:01.892727Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:01.892924Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:15:01.892998Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:15:01.893077Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:15:01.893108Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:01.893134Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:15:01.893159Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:15:01.893195Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:15:01.893225Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:15:01.893257Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:01.893354Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:01.893384Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:01.893434Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-10T11:15:01.895680Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:15:01.895726Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:15:01.895798Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:15:01.895976Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:15:01.896027Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:15:01.896104Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:15:01.896171Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:15:01.896214Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:15:01.896249Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:15:01.896299Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:15:01.896641Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:15:01.896701Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:15:01.896748Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:15:01.896785Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:15:01.896851Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:15:01.896887Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:15:01.896920Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:15:01.896966Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:15:01.897012Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:15:01.909230Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:15:01.909307Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:15:01.909360Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:15:01.909407Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:15:01.909467Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:15:01.910010Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:01.910095Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:01.910167Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-10T11:15:01.910297Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:15:01.910328Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:15:01.910439Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:15:01.910482Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:01.910515Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:15:01.910576Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:15:01.926033Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:15:01.926133Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:01.927370Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:01.927429Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:01.927504Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:15:01.927551Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:15:01.927587Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:15:01.927633Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:15:01.927668Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:15:01.927723Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:01.927763Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:15:01.927805Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:15:01.927872Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:15:01.928042Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:15:01.928082Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:01.928106Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:15:01.928133Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:15:01.928156Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:15:01.928271Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:15:01.928305Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:15:01.928340Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:01.928381Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:15:01.928446Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:15:01.928485Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:15:01.928521Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:15:01.928585Z node 1 :TX_DATA ... ropose latency: 1 ms 2025-04-10T11:16:16.633189Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:16.633281Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:16.633299Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-10T11:16:16.633347Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:16.633368Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:16.633489Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:16.633508Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-10T11:16:16.633538Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:16.633560Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:16.633651Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:16.633669Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-10T11:16:16.633700Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:16.633720Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:16.633850Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:16.633880Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-10T11:16:16.633921Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:16.633953Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:16.634135Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:16.634189Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-10T11:16:16.634231Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:16.634264Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:16.634452Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:16.634484Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-10T11:16:16.634525Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:16.634556Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:16.634744Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:16.634778Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-10T11:16:16.634818Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:16.634852Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:16.635002Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:16.635031Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-10T11:16:16.635073Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:16.635109Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:16.635314Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:16.635346Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-10T11:16:16.635386Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:16.635419Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:16.635558Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:16.635588Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-10T11:16:16.635630Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:16.635663Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:16.635819Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:16.635851Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-10T11:16:16.635893Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:16.635923Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:16.636060Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:16.636092Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-10T11:16:16.636131Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:16.636164Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:16.636367Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:16.636399Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-10T11:16:16.636440Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:16.636477Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:16.636720Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-10T11:16:16.636766Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:16.636806Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2025-04-10T11:16:16.636981Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-10T11:16:16.637016Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:16.637048Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-10T11:16:16.637134Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-10T11:16:16.637165Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:16.637196Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-10T11:16:16.637300Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-10T11:16:16.637346Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:16.637375Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-10T11:16:16.637438Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-10T11:16:16.637469Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:16.637498Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-10T11:16:16.637588Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-10T11:16:16.637619Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:16.637647Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-10T11:16:16.637731Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-10T11:16:16.637763Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:16.637794Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-10T11:16:16.637877Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-10T11:16:16.637913Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:16.637945Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-10T11:16:16.638023Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-10T11:16:16.638055Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:16.638082Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 28 31 31 31 29 28 29 29 31 31 25 31 27 27 31 31 31 31 31 - 27 - 23 27 24 24 7 - 23 - - - actual 28 31 31 31 29 28 29 29 31 31 25 31 27 27 31 31 31 31 31 - 27 - 23 27 24 24 7 - 23 - - - interm 5 4 2 4 5 3 6 5 4 2 - - - 2 3 1 1 3 4 - 4 - 4 - - 4 4 - - - - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] Test command err: 2025-04-10T11:14:55.548145Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:14:55.549140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:14:55.549194Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:14:55.560028Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:14:55.560504Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:14:55.560778Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:14:55.608845Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:14:55.618260Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:14:55.618951Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:14:55.620744Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:14:55.620819Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:14:55.620870Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:14:55.621372Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:14:55.621549Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:14:55.621597Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:198:2154] in generation 2 2025-04-10T11:14:55.690368Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:14:55.723485Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:14:55.723670Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:14:55.723777Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-10T11:14:55.723815Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:14:55.723854Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:14:55.723893Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:55.724139Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:55.724221Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:55.724452Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:14:55.724569Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:14:55.724642Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:55.724678Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:14:55.724711Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:14:55.724741Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:14:55.724787Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:14:55.724848Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:14:55.724890Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:14:55.724986Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:55.725020Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:55.725069Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-10T11:14:55.727729Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:14:55.727808Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:14:55.727908Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:14:55.728073Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:14:55.728113Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:14:55.728187Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:14:55.728244Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:55.728294Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:14:55.728329Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:14:55.728383Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:55.728682Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:14:55.728733Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:14:55.728774Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:14:55.728807Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:55.728853Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:14:55.728878Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:14:55.728914Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:14:55.728947Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:55.728982Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:14:55.740832Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:14:55.740898Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:14:55.740940Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:14:55.740977Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:14:55.741036Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:14:55.741584Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:55.741636Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:14:55.741669Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-10T11:14:55.741761Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:14:55.741788Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:14:55.741913Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:14:55.741959Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:55.741991Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:14:55.742040Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:14:55.745542Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:14:55.745625Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:14:55.745878Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:55.745925Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:14:55.745986Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:14:55.746025Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:14:55.746049Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:14:55.746077Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:14:55.746102Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:14:55.746135Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:55.746481Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:14:55.746529Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:14:55.746573Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:14:55.746720Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:14:55.746749Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:14:55.746772Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:14:55.746794Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:14:55.746828Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:14:55.746916Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:14:55.746940Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:14:55.746980Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:14:55.747019Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:14:55.747093Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:14:55.747129Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:14:55.747163Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:14:55.747219Z node 1 :TX_DATA ... :17.260415Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.260567Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:17.260600Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:23] at 9437184 on unit CompleteOperation 2025-04-10T11:16:17.260646Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 23] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:17.260681Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.260908Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:17.260946Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-04-10T11:16:17.260988Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:17.261023Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.261207Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:17.261230Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-10T11:16:17.261259Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:17.261284Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.261409Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:17.261429Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-10T11:16:17.261459Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:17.261481Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.261575Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:17.261614Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-10T11:16:17.261654Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:17.261687Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.261805Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:17.261828Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-10T11:16:17.261856Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:17.261879Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.262001Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:17.262020Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-10T11:16:17.262049Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:17.262072Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.262196Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:17.262217Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-10T11:16:17.262245Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:17.262264Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.262397Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:17.262442Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-10T11:16:17.262485Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:17.262517Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.262688Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:17.262723Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-10T11:16:17.262767Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:17.262798Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.262917Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:17.262950Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-10T11:16:17.262990Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:17.263021Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.263173Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:17.263194Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-10T11:16:17.263227Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:17.263250Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.263368Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:17.263389Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-10T11:16:17.263418Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:17.263439Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.263547Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:17.263571Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-10T11:16:17.263600Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:17.263621Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.263734Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:17.263764Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-10T11:16:17.263791Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:16:17.263812Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:17.264050Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:799:2725], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-10T11:16:17.264087Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:17.264117Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-10T11:16:17.264169Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:799:2725], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-10T11:16:17.264192Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:17.264213Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-10T11:16:17.264269Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:799:2725], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-10T11:16:17.264291Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:17.264312Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-10T11:16:17.264382Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:799:2725], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-10T11:16:17.264414Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:17.264446Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-10T11:16:17.264537Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:799:2725], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-10T11:16:17.264566Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:17.264587Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-10T11:16:17.264642Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:799:2725], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-10T11:16:17.264662Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:17.264682Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-10T11:16:17.264735Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:799:2725], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-10T11:16:17.264757Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:17.264778Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 17 31 31 26 31 26 26 29 23 26 16 24 18 10 16 24 25 19 19 4 25 28 8 19 28 - 28 18 8 - - - actual 17 31 31 26 31 26 26 29 23 26 16 24 18 10 16 24 25 19 19 4 25 28 8 19 28 - 28 18 8 - - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - >> GenericFederatedQuery::IcebergHiveTokenSelectAll >> GenericFederatedQuery::YdbManagedSelectAll >> GenericFederatedQuery::YdbFilterPushdown [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId >> TKeyValueTest::TestRenameToLongKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:86:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:141:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:84:2115] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:88:2057] recipient: [8:84:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:87:2116] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:141:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:86:2117] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:87:2117] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:90:2057] recipient: [10:87:2117] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2118] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:89:2119] Leader for TabletID 72057594037927937 is [11:91:2120] sender: [11:92:2057] recipient: [11:89:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2120] Leader for TabletID 72057594037927937 is [11:91:2120] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Re ... is [18:56:2097] sender: [18:95:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:96:2057] recipient: [18:94:2123] Leader for TabletID 72057594037927937 is [18:97:2124] sender: [18:98:2057] recipient: [18:94:2123] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:97:2124] Leader for TabletID 72057594037927937 is [18:97:2124] sender: [18:151:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:76:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:79:2057] recipient: [21:78:2110] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:80:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:81:2111] sender: [21:82:2057] recipient: [21:78:2110] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:81:2111] Leader for TabletID 72057594037927937 is [21:81:2111] sender: [21:135:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:76:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:79:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:80:2057] recipient: [22:78:2110] Leader for TabletID 72057594037927937 is [22:81:2111] sender: [22:82:2057] recipient: [22:78:2110] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:81:2111] Leader for TabletID 72057594037927937 is [22:81:2111] sender: [22:135:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:50:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:50:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:77:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:80:2057] recipient: [23:79:2110] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:81:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:82:2111] sender: [23:83:2057] recipient: [23:79:2110] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:82:2111] Leader for TabletID 72057594037927937 is [23:82:2111] sender: [23:136:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:80:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:83:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:82:2113] Leader for TabletID 72057594037927937 is [24:85:2114] sender: [24:86:2057] recipient: [24:82:2113] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:85:2114] Leader for TabletID 72057594037927937 is [24:85:2114] sender: [24:139:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:80:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:83:2057] recipient: [25:82:2113] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:84:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:85:2114] sender: [25:86:2057] recipient: [25:82:2113] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:85:2114] Leader for TabletID 72057594037927937 is [25:85:2114] sender: [25:139:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:81:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:84:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:85:2057] recipient: [26:83:2113] Leader for TabletID 72057594037927937 is [26:86:2114] sender: [26:87:2057] recipient: [26:83:2113] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:86:2114] Leader for TabletID 72057594037927937 is [26:86:2114] sender: [26:140:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:84:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:86:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:88:2057] recipient: [27:87:2116] Leader for TabletID 72057594037927937 is [27:89:2117] sender: [27:90:2057] recipient: [27:87:2116] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:89:2117] Leader for TabletID 72057594037927937 is [27:89:2117] sender: [27:143:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:84:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:87:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:88:2057] recipient: [28:86:2116] Leader for TabletID 72057594037927937 is [28:89:2117] sender: [28:90:2057] recipient: [28:86:2116] !Reboot 72057594037927937 (actor [28:56:2097]) rebooted! !Reboot 72057594037927937 (actor [28:56:2097]) tablet resolver refreshed! new actor is[28:89:2117] Leader for TabletID 72057594037927937 is [28:89:2117] sender: [28:143:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:85:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:88:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:89:2057] recipient: [29:87:2116] Leader for TabletID 72057594037927937 is [29:90:2117] sender: [29:91:2057] recipient: [29:87:2116] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:90:2117] Leader for TabletID 72057594037927937 is [29:90:2117] sender: [29:144:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:50:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:50:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] >> GenericFederatedQuery::IcebergHadoopTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant >> GenericFederatedQuery::IcebergHiveBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectConstant >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi >> GenericFederatedQuery::ClickHouseManagedSelectAll [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectConstant >> GenericFederatedQuery::IcebergHiveSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectConstant |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken >> GenericFederatedQuery::IcebergHadoopSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectConstant >> TPDiskRaces::DecommitWithInflightMock [GOOD] >> TPDiskRaces::KillOwnerWhileDecommitting >> GenericFederatedQuery::IcebergHadoopBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant >> VDiskBalancing::TestRandom_Block42 [GOOD] |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Block42 [GOOD] Test command err: RandomSeed# 4048189338474476657 2025-04-10T11:13:01.575950Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.104032s 2025-04-10T11:13:01.576106Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.104233s Step = 0 SEND TEvPut with key [1:1:0:0:0:585447:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:585447:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:619381:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:619381:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:725585:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:725585:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:2934723:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:2934723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2025-04-10T11:13:03.937948Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Step = 6 SEND TEvPut with key [1:1:6:0:0:3044947:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:3044947:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Stop node 7 2025-04-10T11:13:04.183217Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 7 SEND TEvPut with key [1:1:7:0:0:582354:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:582354:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 8 SEND TEvPut with key [1:1:8:0:0:1478820:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:1478820:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 9 SEND TEvPut with key [1:1:9:0:0:1360774:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:1360774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Start node 4 Step = 10 SEND TEvPut with key [1:1:10:0:0:1727870:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:1727870:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 11 SEND TEvPut with key [1:1:11:0:0:1883457:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:1883457:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 12 SEND TEvPut with key [1:1:12:0:0:568368:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:568368:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 13 SEND TEvPut with key [1:1:13:0:0:896600:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:896600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 15 SEND TEvPut with key [1:1:15:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 16 SEND TEvPut with key [1:1:16:0:0:670396:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:670396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 17 SEND TEvPut with key [1:1:17:0:0:1584741:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:1584741:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 18 SEND TEvPut with key [1:1:18:0:0:2384818:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:2384818:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 19 SEND TEvPut with key [1:1:19:0:0:2867010:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:2867010:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 20 SEND TEvPut with key [1:1:20:0:0:2911789:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:2911789:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 21 SEND TEvPut with key [1:1:21:0:0:2463622:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:2463622:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 23 SEND TEvPut with key [1:1:23:0:0:2119770:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:2119770:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 25 SEND TEvPut with key [1:1:25:0:0:2648607:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:2648607:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Stop node 0 2025-04-10T11:13:05.691959Z 3 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:188:17] ServerId# [1:296:58] TabletId# 72057594037932033 PipeClientId# [3:188:17] 2025-04-10T11:13:05.692302Z 6 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:209:17] ServerId# [1:299:61] TabletId# 72057594037932033 PipeClientId# [6:209:17] 2025-04-10T11:13:05.692451Z 5 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:7660:16] ServerId# [1:7669:1092] TabletId# 72057594037932033 PipeClientId# [5:7660:16] 2025-04-10T11:13:05.692567Z 4 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:195:17] ServerId# [1:297:59] TabletId# 72057594037932033 PipeClientId# [4:195:17] 2025-04-10T11:13:05.692692Z 2 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:181:17] ServerId# [1:295:57] TabletId# 72057594037932033 PipeClientId# [2:181:17] 2025-04-10T11:13:05.692795Z 7 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:216:17] ServerId# [1:300:62] TabletId# 72057594037932033 PipeClientId# [7:216:17] Step = 26 SEND TEvPut with key [1:1:26:0:0:539431:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:539431:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 28 SEND TEvPut with key [1:1:28:0:0:2673563:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:2673563:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 30 SEND TEvPut with key [1:1:30:0:0:2398732:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:2398732:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Compact vdisk 2 Step = 31 SEND TEvPut with key [1:1:31:0:0:2302132:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:2302132:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 32 SEND TEvPut with key [1:1:32:0:0:3112269:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:3112269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 33 SEND TEvPut with key [1:1:33:0:0:883758:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:883758:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 34 SEND TEvPut with key [1:1:34:0:0:1212958:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:1212958:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 35 SEND TEvPut with key [1:1:35:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 38 SEND TEvPut with key [1:1:38:0:0:1252178:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:1252178:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 39 SEND TEvPut with key [1:1:39:0:0:1897783:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:1897783:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 40 SEND TEvPut with key [1:1:40:0:0:1486678:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:1486678:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 41 SEND TEvPut with key [1:1:41:0:0:1285964:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:1285964:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 42 SEND TEvPut with key [1:1:42:0:0:1221731:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:1221731:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 43 SEND TEvPut with key [1:1:43:0:0:1613844:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:1613844:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 44 SEND TEvPut with key [1:1:44:0:0:2582908:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:2582908:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 45 SEND TEvPut with key [1:1:45:0:0:1703743:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:1703743:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 46 SEND TEvPut with key [1:1:46:0:0:1362981:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:1362981:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 47 SEND TEvPut with key [1:1:47:0:0:1469807:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:1469807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 48 SEND TEvPut with key [1:1:48:0:0:2832565:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:2832565:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 49 SEND TEvPut with key [1:1:49:0:0:1960611:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:1960611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 50 SEND TEvPut with key [1:1:50:0:0:1164230:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:1164230:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 51 SEND TEvPut with key [1:1:51:0:0:836900:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:836900:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 52 SEND TEvPut with key [1:1:52:0:0:838380:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:838380:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 53 SEND TEvPut with key [1:1:53:0:0:1975575:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:1975575:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Start node 0 Step = 54 SEND TEvPut with key [1:1:54:0:0:1888556:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:1888556:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 55 SEND TEvPut with key [1:1:55:0:0:715063:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:715063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 57 SEND TEvPut with key [1:1:57:0:0:1491407:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:1491407 ... ND TEvPut with key [1:1:936:0:0:2748248:0] TEvPutResult: TEvPutResult {Id# [1:1:936:0:0:2748248:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 937 SEND TEvPut with key [1:1:937:0:0:112302:0] TEvPutResult: TEvPutResult {Id# [1:1:937:0:0:112302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 938 SEND TEvPut with key [1:1:938:0:0:800417:0] TEvPutResult: TEvPutResult {Id# [1:1:938:0:0:800417:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 939 SEND TEvPut with key [1:1:939:0:0:2336442:0] TEvPutResult: TEvPutResult {Id# [1:1:939:0:0:2336442:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 940 SEND TEvPut with key [1:1:940:0:0:982070:0] TEvPutResult: TEvPutResult {Id# [1:1:940:0:0:982070:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Start node 4 Step = 941 SEND TEvPut with key [1:1:941:0:0:713632:0] TEvPutResult: TEvPutResult {Id# [1:1:941:0:0:713632:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 942 SEND TEvPut with key [1:1:942:0:0:1644191:0] TEvPutResult: TEvPutResult {Id# [1:1:942:0:0:1644191:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 943 SEND TEvPut with key [1:1:943:0:0:254634:0] TEvPutResult: TEvPutResult {Id# [1:1:943:0:0:254634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 944 SEND TEvPut with key [1:1:944:0:0:1141270:0] TEvPutResult: TEvPutResult {Id# [1:1:944:0:0:1141270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 945 SEND TEvPut with key [1:1:945:0:0:610103:0] TEvPutResult: TEvPutResult {Id# [1:1:945:0:0:610103:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Compact vdisk 6 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999561} Step = 948 SEND TEvPut with key [1:1:948:0:0:645630:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:645630:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999561} Step = 949 SEND TEvPut with key [1:1:949:0:0:2125890:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:2125890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999561} Step = 950 SEND TEvPut with key [1:1:950:0:0:2544891:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:2544891:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999561} Step = 951 SEND TEvPut with key [1:1:951:0:0:647007:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:647007:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999561} Step = 952 SEND TEvPut with key [1:1:952:0:0:2031652:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:2031652:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999561} Step = 953 SEND TEvPut with key [1:1:953:0:0:2109805:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:2109805:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999561} Stop node 3 2025-04-10T11:15:46.214851Z 1 00h28m30.751872s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:1353403:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:1353403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Stop node 4 2025-04-10T11:15:47.440470Z 1 00h28m40.755029s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 955 SEND TEvPut with key [1:1:955:0:0:1286278:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:1286278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Start node 3 Step = 956 SEND TEvPut with key [1:1:956:0:0:1875483:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:1875483:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 957 SEND TEvPut with key [1:1:957:0:0:1021388:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:1021388:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Start node 4 Step = 958 SEND TEvPut with key [1:1:958:0:0:860806:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:860806:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 961 SEND TEvPut with key [1:1:961:0:0:1661659:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:1661659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 962 SEND TEvPut with key [1:1:962:0:0:771410:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:771410:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 963 SEND TEvPut with key [1:1:963:0:0:1414281:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:1414281:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 964 SEND TEvPut with key [1:1:964:0:0:2848837:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:2848837:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 965 SEND TEvPut with key [1:1:965:0:0:989600:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:989600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 966 SEND TEvPut with key [1:1:966:0:0:2761296:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:2761296:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 967 SEND TEvPut with key [1:1:967:0:0:981163:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:981163:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 969 SEND TEvPut with key [1:1:969:0:0:626285:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:626285:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Stop node 7 2025-04-10T11:15:49.863079Z 1 00h29m10.757077s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 971 SEND TEvPut with key [1:1:971:0:0:972888:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:972888:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 972 SEND TEvPut with key [1:1:972:0:0:786055:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:786055:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 973 SEND TEvPut with key [1:1:973:0:0:2707502:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:2707502:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Stop node 1 2025-04-10T11:15:50.444265Z 1 00h29m20.786144s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:2660812:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:2660812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Start node 1 Step = 975 SEND TEvPut with key [1:1:975:0:0:3005283:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:3005283:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Stop node 1 2025-04-10T11:15:50.966007Z 1 00h29m40.793515s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:1542748:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:1542748:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 977 SEND TEvPut with key [1:1:977:0:0:2837300:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:2837300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 980 SEND TEvPut with key [1:1:980:0:0:1760402:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:1760402:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 981 SEND TEvPut with key [1:1:981:0:0:1711812:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:1711812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 982 SEND TEvPut with key [1:1:982:0:0:1422922:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:1422922:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 983 SEND TEvPut with key [1:1:983:0:0:2533122:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:2533122:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 985 SEND TEvPut with key [1:1:985:0:0:1862506:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:1862506:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 987 SEND TEvPut with key [1:1:987:0:0:672278:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:672278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 988 SEND TEvPut with key [1:1:988:0:0:2042425:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:2042425:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 989 SEND TEvPut with key [1:1:989:0:0:1201477:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:1201477:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 990 SEND TEvPut with key [1:1:990:0:0:1724337:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:1724337:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 991 SEND TEvPut with key [1:1:991:0:0:2174403:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:2174403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 993 SEND TEvPut with key [1:1:993:0:0:618508:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:618508:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 994 SEND TEvPut with key [1:1:994:0:0:2278246:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:2278246:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 995 SEND TEvPut with key [1:1:995:0:0:2001881:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:2001881:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 996 SEND TEvPut with key [1:1:996:0:0:1759634:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:1759634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 997 SEND TEvPut with key [1:1:997:0:0:2469234:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:2469234:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 998 SEND TEvPut with key [1:1:998:0:0:1329395:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:1329395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 999 SEND TEvPut with key [1:1:999:0:0:1243807:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:1243807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Starting nodes Start compaction 1 Start checking |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> GenericFederatedQuery::IcebergHiveTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectConstant ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 32119, MsgBus: 3419 2025-04-10T11:16:14.764560Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641764305988490:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:14.764697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d98/r3tmp/tmp05Z4sv/pdisk_1.dat 2025-04-10T11:16:15.062990Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32119, node 1 2025-04-10T11:16:15.117583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:15.117707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:15.119450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:15.193022Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:15.193064Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:15.193072Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:15.193193Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3419 TClient is connected to server localhost:3419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:15.788231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:16:17.094187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641777190891042:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.094311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.333530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-10T11:16:17.426922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641777190891164:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.427010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.427123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641777190891169:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.429952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:17.438200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641777190891171:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:16:17.521604Z node 1 :TX_PROXY ERROR: Actor# [1:7491641777190891211:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:18.089883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:16:18.409237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-10T11:16:18.727669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:16:19.074998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:16:19.387289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:19.764527Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641764305988490:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:19.764611Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:19.799761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:19.852115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:21.292795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710700:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 20168, MsgBus: 20025 2025-04-10T11:16:22.124822Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641800192570465:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:22.124927Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d98/r3tmp/tmphXLeDq/pdisk_1.dat 2025-04-10T11:16:22.211820Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20168, node 2 2025-04-10T11:16:22.252835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:22.252924Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:22.254555Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:22.267354Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:22.267378Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:22.267387Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:22.267502Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20025 TClient is connected to server localhost:20025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:22.636875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 25860, MsgBus: 62824 2025-04-10T11:16:25.288771Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491641814996803343:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:25.288834Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d98/r3tmp/tmp1hpLDE/pdisk_1.dat 2025-04-10T11:16:25.393345Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25860, node 3 2025-04-10T11:16:25.420426Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:25.420521Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:25.422256Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:25.432215Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:25.432248Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:25.432258Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:25.432390Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62824 TClient is connected to server localhost:62824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:25.791691Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> BSCStopPDisk::PDiskStop |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> BSCStopPDisk::PDiskStop [GOOD] >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] >> GenericFederatedQuery::YdbManagedSelectConstant |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> BSCStopPDisk::PDiskStop [GOOD] Test command err: RandomSeed# 4810528198109515082 >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectCount >> GenericFederatedQuery::IcebergHiveBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectCount |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> FormatTimes::ParseDuration [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectCount >> GenericFederatedQuery::IcebergHadoopSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectCount |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::ParseDuration [GOOD] |88.8%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> NaiveFragmentWriterTest::Long |88.8%| [TA] $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TA] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> NaiveFragmentWriterTest::Long [GOOD] >> ReorderCodecTest::Basic [GOOD] >> RunLengthCodec::BasicTest32 [GOOD] >> RunLengthCodec::BasicTest64 [GOOD] |88.8%| [TA] $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpWorkload::STOCK [GOOD] >> TBlobStorageSyncLogKeeper::CutLog_EntryPointNewFormat [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectConstant [GOOD] >> TBlobStorageSyncLogDsk::SeveralChunks [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant [GOOD] >> TBlobStorageSyncLogMem::EmptyMemRecLog [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount >> TBlobStorageSyncLogMem::FilledIn1 [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectCount >> TBlobStorageSyncLogDsk::OverlappingPages_OnePageIndexed [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectConstant [GOOD] >> TBlobStorageSyncLogMem::EmptyMemRecLogPutAfterSnapshot [GOOD] >> TBlobStorageSyncLogDsk::OverlappingPages_SeveralPagesIndexed [GOOD] >> TBlobStorageSyncLogDsk::TrimLog [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectCount |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> RunLengthCodec::BasicTest64 [GOOD] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogMem::EmptyMemRecLogPutAfterSnapshot [GOOD] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogDsk::TrimLog [GOOD] >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectCount [GOOD] >> GenericFederatedQuery::YdbSelectCount >> GenericFederatedQuery::IcebergHiveBasicSelectCount [GOOD] >> TBlobStorageSyncLogMem::FilledIn1PutAfterSnapshot [GOOD] >> TBlobStorageSyncLogData::SerializeParseEmpty1_Proto [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown >> VarLengthIntCodec::BasicTest64 [GOOD] >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown >> GenericFederatedQuery::IcebergHadoopSaSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectCount [GOOD] >> TBlobStorageSyncLogData::SerializeParseEmpty2_Proto [GOOD] >> TBlobStorageSyncLogMem::ManyLogoBlobsPerf >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown >> SemiSortedDeltaCodec::Random32 >> VarLengthIntCodec::Random32 >> GenericFederatedQuery::IcebergHiveSaFilterPushdown >> TBlobStorageSyncLogMem::ManyLogoBlobsPerf [GOOD] >> TBlobStorageSyncLogMem::ManyLogoBlobsBuildSwapSnapshot [GOOD] >> VarLengthIntCodec::Random32 [GOOD] >> SemiSortedDeltaCodec::Random32 [GOOD] >> VarLengthIntCodec::BasicTest32 [GOOD] >> VarLengthIntCodec::Random64 [GOOD] >> SemiSortedDeltaCodec::Random64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::STOCK [GOOD] Test command err: Trying to start YDB, gRPC: 20298, MsgBus: 7181 2025-04-10T11:15:44.273227Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641638769112640:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:44.273737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001629/r3tmp/tmpk2Vv8L/pdisk_1.dat 2025-04-10T11:15:44.565272Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20298, node 1 2025-04-10T11:15:44.651230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:44.651349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:44.653094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:44.654660Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:44.654683Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:44.654693Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:44.654804Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7181 TClient is connected to server localhost:7181 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:15:45.131456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:15:46.970042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641647359047887:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:46.970169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:47.292891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:15:47.404749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:15:48.012358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:15:48.362532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641655948986628:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:48.362632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:48.362653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641655948986633:2636], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:48.366112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T11:15:48.375280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641655948986635:2637], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T11:15:48.454454Z node 1 :TX_PROXY ERROR: Actor# [1:7491641655948986746:5119] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:15:49.270770Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641638769112640:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:15:49.270842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:15:59.564049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:15:59.564088Z node 1 :IMPORT WARN: Table profiles were not loaded took: 0.527284s took: 0.558617s took: 0.558951s took: 0.559451s took: 0.562137s took: 0.569230s took: 0.569985s took: 0.570597s took: 0.571906s took: 0.573883s 2025-04-10T11:16:27.375756Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711078; 2025-04-10T11:16:27.377060Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641823452724795:4967], Table: `/Root/stock` ([72057594046644480:2:1]), SessionActorId: [1:7491641801977886389:4967]Got LOCKS BROKEN for table `/Root/stock`. ShardID=72075186224037888, Sink=[1:7491641823452724795:4967].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:16:27.377607Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641823452723848:4967], SessionActorId: [1:7491641801977886389:4967], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7491641801977886389:4967]. isRollback=0 2025-04-10T11:16:27.377841Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTFjYTRiNTMtNDRmZGViZi05NTNlMTVkMS0xODgzN2Mw, ActorId: [1:7491641801977886389:4967], ActorState: ExecuteState, TraceId: 01jrfpmm6jd98dx6t5whftmv53, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7491641823452724627:4967] from: [1:7491641823452723848:4967] 2025-04-10T11:16:27.378620Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491641823452724627:4967] TxId: 281474976711078. Ctx: { TraceId: 01jrfpmm6jd98dx6t5whftmv53, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTFjYTRiNTMtNDRmZGViZi05NTNlMTVkMS0xODgzN2Mw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:16:27.378858Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTFjYTRiNTMtNDRmZGViZi05NTNlMTVkMS0xODgzN2Mw, ActorId: [1:7491641801977886389:4967], ActorState: ExecuteState, TraceId: 01jrfpmm6jd98dx6t5whftmv53, Create QueryResponse for error on request, msg: took: 4.587114s 2025-04-10T11:16:27.383760Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976711078; 2025-04-10T11:16:27.383955Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1744283787422 : 281474976711078] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-04-10T11:16:27.388266Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711079; took: 4.593781s took: 4.593920s 2025-04-10T11:16:27.392951Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976711078; 2025-04-10T11:16:27.393104Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1744283787422 : 281474976711078] from 72075186224037905 at tablet 72075186224037905, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } took: 4.601328s took: 4.599944s 2025-04-10T11:16:27.399227Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641823452724817:4974], Table: `/Root/stock` ([72057594046644480:2:1]), SessionActorId: [1:7491641801977886396:4974]Got LOCKS BROKEN for table `/Root/stock`. ShardID=72075186224037888, Sink=[1:7491641823452724817:4974].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:16:27.399317Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641823452723842:4974], SessionActorId: [1:7491641801977886396:4974], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7491641801977886396:4974]. isRollback=0 2025-04-10T11:16:27.399576Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWY5OTI2My1iZDA1NDY1NS1kYTM2NGUyOC04YjJjYjNmMA==, ActorId: [1:7491641801977886396:4974], ActorState: ExecuteState, TraceId: 01jrfpmm60210r911gsvkfr2hd, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7491641823452724631:4974] from: [1:7491641823452723842:4974] 2025-04-10T11:16:27.399657Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491641823452724631:4974] TxId: 281474976711079. Ctx: { TraceId: 01jrfpmm60210r911gsvkfr2hd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWY5OTI2My1iZDA1NDY1NS1kYTM2NGUyOC04YjJjYjNmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } took: 4.603802s 2025-04-10T11:16:27.399837Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWY5OTI2My1iZDA1NDY1NS1kYTM2NGUyOC04YjJjYjNmMA==, ActorId: [1:7491641801977886396:4974], A ... 1:16:32.984692Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-04-10T11:16:32.984704Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-04-10T11:16:32.984734Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-04-10T11:16:32.986830Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-04-10T11:16:32.986868Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-04-10T11:16:32.986883Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2025-04-10T11:16:32.986892Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-04-10T11:16:32.987896Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-04-10T11:16:32.987933Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-04-10T11:16:32.987952Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-04-10T11:16:32.987967Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2025-04-10T11:16:32.987981Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-04-10T11:16:32.987995Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-04-10T11:16:32.988012Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2025-04-10T11:16:32.988023Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-10T11:16:32.988034Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-10T11:16:32.988870Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-10T11:16:32.993869Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-04-10T11:16:32.993899Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-04-10T11:16:32.993915Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-04-10T11:16:32.993932Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-04-10T11:16:32.993946Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-04-10T11:16:32.995439Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-04-10T11:16:32.995460Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-04-10T11:16:32.995471Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-04-10T11:16:32.996094Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-04-10T11:16:32.996117Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-04-10T11:16:33.002269Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-04-10T11:16:33.004480Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-10T11:16:33.014061Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-04-10T11:16:33.015526Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2025-04-10T11:16:33.019216Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2025-04-10T11:16:33.019878Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-04-10T11:16:33.021464Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-04-10T11:16:33.023053Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-10T11:16:33.025302Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-10T11:16:33.027600Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-10T11:16:33.035095Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2025-04-10T11:16:33.124376Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037963 not found 2025-04-10T11:16:33.124414Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037950 not found 2025-04-10T11:16:33.124428Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 2025-04-10T11:16:33.124441Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037945 not found 2025-04-10T11:16:33.124454Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037956 not found 2025-04-10T11:16:33.124501Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037959 not found 2025-04-10T11:16:33.124748Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2025-04-10T11:16:33.137774Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037957 not found 2025-04-10T11:16:33.137800Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037962 not found 2025-04-10T11:16:33.137835Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037954 not found 2025-04-10T11:16:33.137844Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037939 not found 2025-04-10T11:16:33.137852Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037953 not found 2025-04-10T11:16:33.137860Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037935 not found 2025-04-10T11:16:33.137867Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found 2025-04-10T11:16:33.137874Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037958 not found 2025-04-10T11:16:33.137883Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037967 not found 2025-04-10T11:16:33.137890Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037946 not found 2025-04-10T11:16:33.137917Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037955 not found 2025-04-10T11:16:33.137929Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found 2025-04-10T11:16:33.137937Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037937 not found 2025-04-10T11:16:33.137966Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037936 not found 2025-04-10T11:16:33.137978Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2025-04-10T11:16:33.137985Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037948 not found 2025-04-10T11:16:33.137993Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037952 not found 2025-04-10T11:16:33.137999Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037961 not found 2025-04-10T11:16:33.138006Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2025-04-10T11:16:33.150618Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037966 not found 2025-04-10T11:16:33.159334Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2025-04-10T11:16:33.159375Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037949 not found 2025-04-10T11:16:33.159391Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2025-04-10T11:16:33.159408Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037951 not found 2025-04-10T11:16:33.159432Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found 2025-04-10T11:16:33.159458Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037947 not found 2025-04-10T11:16:33.159490Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037968 not found 2025-04-10T11:16:33.159601Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037965 not found 2025-04-10T11:16:33.159789Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037969 not found 2025-04-10T11:16:33.159818Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037964 not found 2025-04-10T11:16:33.159836Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037960 not found 2025-04-10T11:16:33.159855Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037938 not found 2025-04-10T11:16:33.159882Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037943 not found |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> VarLengthIntCodec::BasicTest32 [GOOD] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> VarLengthIntCodec::Random64 [GOOD] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> SemiSortedDeltaCodec::Random64 [GOOD] |88.9%| [TA] {RESULT} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::IcebergHadoopBasicSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown >> TBlobStorageSyncLogDsk::AddByOne [GOOD] >> TBlobStorageSyncLogDsk::AddFive [GOOD] >> TBlobStorageSyncLogDsk::ComplicatedSerializeWithOverlapping [GOOD] >> TBlobStorageSyncLogDsk::DeleteChunks [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogDsk::DeleteChunks [GOOD] |88.9%| [TA] $(B)/ydb/core/blobstorage/vdisk/synclog/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_retry.py::TestRetry::test_fail_first[kikimr0] [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown >> DataShardTxOrder::RandomPointsAndRanges [GOOD] >> TPDiskRaces::KillOwnerWhileDecommitting [GOOD] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflight >> TOlap::CreateStore >> TOlap::CreateTableWithNullableKeysNotAllowed >> TOlapNaming::AlterColumnStoreOk >> TOlap::CustomDefaultPresets >> GenericFederatedQuery::YdbSelectCount [GOOD] >> TOlap::CreateTableWithNullableKeysNotAllowed [GOOD] >> TOlap::CreateTableWithNullableKeys >> TOlap::CreateStore [GOOD] >> TOlap::CreateDropTable >> TOlap::CustomDefaultPresets [GOOD] >> TOlap::Decimal >> TOlapNaming::AlterColumnStoreOk [GOOD] >> TOlapNaming::AlterColumnStoreFailed ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 6664, MsgBus: 62811 2025-04-10T11:16:21.182699Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641797917396756:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:21.184074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d77/r3tmp/tmpeCylPv/pdisk_1.dat 2025-04-10T11:16:21.478724Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6664, node 1 2025-04-10T11:16:21.517941Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:21.517969Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:21.517980Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:21.518143Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:16:21.561829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:21.561955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:21.563681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62811 TClient is connected to server localhost:62811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:21.993862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:16:23.392379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641806507331938:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:23.392531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:23.591909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-10T11:16:23.703742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641806507332059:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:23.703871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:23.703965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641806507332064:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:23.706925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:23.713098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641806507332066:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:16:23.811043Z node 1 :TX_PROXY ERROR: Actor# [1:7491641806507332106:2396] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:24.293780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:16:24.677841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-10T11:16:25.126534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:16:25.483027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:16:25.809915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:26.176089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:26.182529Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641797917396756:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:26.182600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:26.203375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:27.591331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710697:0, at schemeshard: 72057594046644480 2025-04-10T11:16:27.609019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2025-04-10T11:16:27.610359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710700:0, at schemeshard: 72057594046644480 2025-04-10T11:16:27.611270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" po ... YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 3358, MsgBus: 7921 2025-04-10T11:16:37.974207Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491641867214908572:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:37.974310Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d77/r3tmp/tmpHpaRMg/pdisk_1.dat 2025-04-10T11:16:38.075571Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3358, node 3 2025-04-10T11:16:38.113830Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:38.113929Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:38.115931Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:38.137454Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:38.137481Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:38.137503Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:38.137648Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7921 TClient is connected to server localhost:7921 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:38.593970Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:16:40.733343Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491641880099811118:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:40.733416Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:40.750528Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-10T11:16:40.786503Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491641880099811237:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:40.786588Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:40.786672Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491641880099811242:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:40.790853Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:40.800728Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491641880099811244:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:16:40.862953Z node 3 :TX_PROXY ERROR: Actor# [3:7491641880099811284:2394] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:41.259242Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:16:41.693762Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-10T11:16:42.127142Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:16:42.563616Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:16:42.974618Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491641867214908572:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:42.974689Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:43.022099Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:43.458537Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:43.488048Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:45.055007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715700:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPointsAndRanges [GOOD] Test command err: 2025-04-10T11:15:02.746214Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:15:02.747187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:02.747232Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:02.757500Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:15:02.757956Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:15:02.758229Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:02.805466Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:15:02.817682Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:02.818254Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:15:02.820038Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:15:02.820106Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:15:02.820156Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:15:02.820581Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:15:02.820798Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:15:02.820855Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:198:2154] in generation 2 2025-04-10T11:15:02.881545Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:15:02.928385Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:15:02.928613Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:15:02.928729Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-10T11:15:02.928780Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:15:02.928821Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:15:02.928864Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:02.929170Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:02.929241Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:02.929545Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:15:02.929673Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:15:02.929757Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:15:02.929802Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:02.929842Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:15:02.929882Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:15:02.929942Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:15:02.929985Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:15:02.930038Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:02.930222Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:02.930267Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:02.930329Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-10T11:15:02.933183Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:15:02.933247Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:15:02.933356Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:15:02.933534Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:15:02.933588Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:15:02.933681Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:15:02.933751Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:15:02.933804Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:15:02.933851Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:15:02.933907Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:15:02.934313Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:15:02.934357Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:15:02.934397Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:15:02.934433Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:15:02.934490Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:15:02.934518Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:15:02.934552Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:15:02.934589Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:15:02.934619Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:15:02.946979Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:15:02.947051Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:15:02.947085Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:15:02.947127Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:15:02.947186Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:15:02.947772Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:02.947834Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:02.947875Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-10T11:15:02.947975Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:15:02.947998Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:15:02.948118Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:15:02.948157Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:02.948190Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:15:02.948233Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:15:02.952210Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:15:02.952305Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:02.952583Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:02.952634Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:02.952702Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:15:02.952869Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:15:02.952910Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:15:02.952961Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:15:02.953001Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:15:02.953055Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:02.953100Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:15:02.953149Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:15:02.953203Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:15:02.953414Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:15:02.953456Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:02.953492Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:15:02.953524Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:15:02.953566Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:15:02.953649Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:15:02.953685Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:15:02.953748Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:02.953794Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:15:02.953862Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:15:02.953899Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:15:02.953943Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:15:02.953997Z node 1 :TX_D ... us for [1000004:403] at 9437185 is ExecutedNoMoreRestarts 2025-04-10T11:16:43.537408Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit ExecuteDataTx 2025-04-10T11:16:43.537439Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437185 to execution unit CompleteOperation 2025-04-10T11:16:43.537465Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437185 on unit CompleteOperation 2025-04-10T11:16:43.537674Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is DelayComplete 2025-04-10T11:16:43.537698Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit CompleteOperation 2025-04-10T11:16:43.537720Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437185 to execution unit CompletedOperations 2025-04-10T11:16:43.537742Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437185 on unit CompletedOperations 2025-04-10T11:16:43.537766Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is Executed 2025-04-10T11:16:43.537784Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit CompletedOperations 2025-04-10T11:16:43.537806Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437185 has finished 2025-04-10T11:16:43.537832Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:16:43.537852Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-10T11:16:43.537876Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-10T11:16:43.537897Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-10T11:16:43.554817Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:16:43.554883Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:402] at 9437186 on unit CompleteOperation 2025-04-10T11:16:43.554936Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 402] from 9437186 at tablet 9437186 send result to client [4:99:2134], exec latency: 3 ms, propose latency: 5 ms 2025-04-10T11:16:43.554993Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 399} 2025-04-10T11:16:43.555025Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:16:43.555189Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:16:43.555209Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437186 on unit StoreAndSendOutRS 2025-04-10T11:16:43.555233Z node 4 :TX_DATASHARD DEBUG: Send RS 400 at 9437186 from 9437186 to 9437184 txId 403 2025-04-10T11:16:43.555287Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:16:43.555321Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-10T11:16:43.555350Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-10T11:16:43.555376Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437186 on unit CompleteOperation 2025-04-10T11:16:43.555419Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437186 at tablet 9437186 send result to client [4:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:16:43.555467Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2025-04-10T11:16:43.555493Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-10T11:16:43.556034Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:454:2396], Recipient [4:343:2311]: {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 399} 2025-04-10T11:16:43.556077Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:43.556116Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437186 consumer 9437186 txId 402 2025-04-10T11:16:43.556542Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [4:454:2396], Recipient [4:232:2225]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletProducer# 9437186 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-04-10T11:16:43.556579Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:16:43.556608Z node 4 :TX_DATASHARD DEBUG: Receive RS at 9437184 source 9437186 dest 9437184 producer 9437186 txId 403 2025-04-10T11:16:43.556666Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437184 got read set: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletProducer# 9437186 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-04-10T11:16:43.556703Z node 4 :TX_DATASHARD TRACE: Filled readset for [1000004:403] from=9437186 to=9437184origin=9437186 2025-04-10T11:16:43.556755Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-10T11:16:43.556813Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:454:2396], Recipient [4:343:2311]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2025-04-10T11:16:43.556833Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:43.556860Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437186 consumer 9437186 txId 403 2025-04-10T11:16:43.557185Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [4:232:2225], Recipient [4:232:2225]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:16:43.557214Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:16:43.557253Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:16:43.557284Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:16:43.557330Z node 4 :TX_DATASHARD DEBUG: Found ready candidate operation [1000004:403] at 9437184 for LoadAndWaitInRS 2025-04-10T11:16:43.557357Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit LoadAndWaitInRS 2025-04-10T11:16:43.557385Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2025-04-10T11:16:43.557409Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit LoadAndWaitInRS 2025-04-10T11:16:43.557431Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit ExecuteDataTx 2025-04-10T11:16:43.557451Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit ExecuteDataTx 2025-04-10T11:16:43.558925Z node 4 :TX_DATASHARD TRACE: Executed operation [1000004:403] at tablet 9437184 with status COMPLETE 2025-04-10T11:16:43.558997Z node 4 :TX_DATASHARD TRACE: Datashard execution counters for [1000004:403] at 9437184: {NSelectRow: 4, NSelectRange: 1, NUpdateRow: 7, NEraseRow: 0, SelectRowRows: 3, SelectRowBytes: 24, SelectRangeRows: 47, SelectRangeBytes: 376, UpdateRowBytes: 53, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-10T11:16:43.559073Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:16:43.559105Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit ExecuteDataTx 2025-04-10T11:16:43.559135Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompleteOperation 2025-04-10T11:16:43.559167Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompleteOperation 2025-04-10T11:16:43.559443Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is DelayComplete 2025-04-10T11:16:43.559478Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompleteOperation 2025-04-10T11:16:43.559511Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompletedOperations 2025-04-10T11:16:43.559544Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompletedOperations 2025-04-10T11:16:43.559582Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2025-04-10T11:16:43.559605Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompletedOperations 2025-04-10T11:16:43.559634Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437184 has finished 2025-04-10T11:16:43.559668Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:16:43.559696Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:16:43.559728Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:16:43.559759Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:16:43.578950Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-10T11:16:43.579024Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437185 on unit CompleteOperation 2025-04-10T11:16:43.579083Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437185 at tablet 9437185 send result to client [4:99:2134], exec latency: 3 ms, propose latency: 5 ms 2025-04-10T11:16:43.579139Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2025-04-10T11:16:43.579169Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-10T11:16:43.579871Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:343:2311], Recipient [4:232:2225]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2025-04-10T11:16:43.579928Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:43.579987Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437185 consumer 9437185 txId 403 2025-04-10T11:16:43.598746Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:16:43.598814Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437184 on unit CompleteOperation 2025-04-10T11:16:43.598878Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437184 at tablet 9437184 send result to client [4:99:2134], exec latency: 3 ms, propose latency: 5 ms 2025-04-10T11:16:43.598936Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-04-10T11:16:43.598971Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:16:43.599334Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:232:2225], Recipient [4:454:2396]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-04-10T11:16:43.599380Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:16:43.599420Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 403 >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] >> TOlap::CreateTableWithNullableKeys [GOOD] >> TOlap::Decimal [GOOD] >> test_retry.py::TestRetry::test_low_rate[kikimr0] >> TOlapNaming::AlterColumnStoreFailed [GOOD] >> TOlap::CreateDropTable [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableWithNullableKeys [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:16:46.348923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:16:46.349004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:16:46.349046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:16:46.349077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:16:46.349789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:16:46.349817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:16:46.349880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:16:46.349962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:16:46.350901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:16:46.416008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:16:46.416059Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:46.427889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:16:46.428160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:16:46.428305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:16:46.436291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:16:46.436517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:16:46.438677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:46.439738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:16:46.444274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:46.449640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:46.449703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:46.450494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:16:46.450548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:46.450634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:16:46.450781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.456290Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:16:46.540767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:16:46.542245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.543835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:16:46.544920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:16:46.544983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.547420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:46.547528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:16:46.547664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.547749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:16:46.547794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:16:46.547824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:16:46.549379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.549429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:16:46.549455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:16:46.550760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.550805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.550842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:46.550873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.554836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:16:46.556218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:16:46.557119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:16:46.557877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:46.558004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:16:46.558054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:46.559261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:16:46.559330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:46.559452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:16:46.559514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:16:46.560992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:46.561023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:46.561142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:46.561173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:16:46.561457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.561500Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:16:46.561579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:46.561603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.561629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:46.561651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.561678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:16:46.561729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.561755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:16:46.561774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:16:46.561814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:16:46.561837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:16:46.561862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:16:46.563380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:46.563453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:46.563486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 78944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:16:47.560065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:16:47.560156Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:47.560186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-10T11:16:47.560216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-10T11:16:47.560234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 4 2025-04-10T11:16:47.560491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:16:47.560527Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedWaitParts operationId# 104:0 ProgressState at schemeshard: 72057594046678944 2025-04-10T11:16:47.560579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TDropColumnTable TProposedWaitParts operationId# 104:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409547 2025-04-10T11:16:47.561023Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:16:47.561110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:16:47.561137Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:16:47.561166Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-10T11:16:47.561203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:16:47.561772Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:16:47.561827Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:16:47.561843Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:16:47.561859Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-10T11:16:47.561876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:16:47.562808Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:16:47.562858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:16:47.562873Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:16:47.562890Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-04-10T11:16:47.562922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:16:47.562971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-10T11:16:47.563795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382275 2025-04-10T11:16:47.565162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:16:47.565618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:16:47.565851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:16:47.577578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 104 2025-04-10T11:16:47.577631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409547, partId: 0 2025-04-10T11:16:47.577723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 104 2025-04-10T11:16:47.577757Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 130 FAKE_COORDINATOR: Erasing txId 104 2025-04-10T11:16:47.579141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:16:47.579271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:16:47.579306Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedDeleteParts operationId# 104:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:16:47.579367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:16:47.579446Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:16:47.579474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:16:47.579511Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:16:47.579534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:16:47.579561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-10T11:16:47.579621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:371:2350] message: TxId: 104 2025-04-10T11:16:47.579663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:16:47.579710Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T11:16:47.579751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T11:16:47.579846Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:16:47.581987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:16:47.582072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:16:47.582106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:608:2568] 2025-04-10T11:16:47.582434Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-10T11:16:47.583650Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[2:474:2443];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; Forgetting tablet 72075186233409547 2025-04-10T11:16:47.586913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:16:47.587877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T11:16:47.588346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:16:47.588399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-10T11:16:47.588458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:16:47.590677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:16:47.590732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:16:47.591081Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-04-10T11:16:47.591451Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/MyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:16:47.591612Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyDir/MyTable" took 179us result status StatusPathDoesNotExist 2025-04-10T11:16:47.591743Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/MyTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 3])" Path: "/MyRoot/MyDir/MyTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:16:47.592096Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-04-10T11:16:47.592152Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 4 took 62us result status StatusPathDoesNotExist 2025-04-10T11:16:47.592198Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:16:46.349041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:16:46.349163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:16:46.349223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:16:46.349264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:16:46.349825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:16:46.349867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:16:46.349929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:16:46.350018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:16:46.350975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:16:46.416021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:16:46.416076Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:46.428180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:16:46.428390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:16:46.428540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:16:46.436113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:16:46.436362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:16:46.438690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:46.439741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:16:46.444492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:46.449620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:46.449682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:46.450522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:16:46.450588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:46.450645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:16:46.450818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.456907Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:16:46.579621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:16:46.579859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.580050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:16:46.580222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:16:46.580270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.582399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:46.582517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:16:46.582654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.582707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:16:46.582751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:16:46.582782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:16:46.584608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.584656Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:16:46.584687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:16:46.586062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.586112Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.586176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:46.586215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.588728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:16:46.590312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:16:46.590512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:16:46.591254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:46.591360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:16:46.591416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:46.591758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:16:46.591849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:46.592052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:16:46.592180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:16:46.593944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:46.593982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:46.594121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:46.594170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:16:46.594378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.594429Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:16:46.594514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:46.594539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.594584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:46.594613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.594639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:16:46.594669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.594707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:16:46.594734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:16:46.594781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:16:46.594814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:16:46.594842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:16:46.596441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:46.596524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:46.596552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1:16:47.739489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:16:47.740718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-10T11:16:47.740854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 2025-04-10T11:16:47.741159Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:47.741240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:16:47.741281Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TPropose operationId# 101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2025-04-10T11:16:47.741454Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2025-04-10T11:16:47.741579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:16:47.741649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:16:47.742534Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tx_controller.cpp:212;event=finished_tx;tx_id=101; FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-04-10T11:16:47.743490Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:47.743527Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:47.743651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:16:47.743747Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:47.743778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:16:47.743826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:16:47.744095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:16:47.744134Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:16:47.744185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-04-10T11:16:47.744683Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:16:47.744761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:16:47.744796Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:16:47.744828Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:16:47.744861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:16:47.745509Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:16:47.745566Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:16:47.745585Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:16:47.745609Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:16:47.745630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:16:47.745679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-10T11:16:47.746946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-04-10T11:16:47.747735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:16:47.748499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:16:47.770705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-04-10T11:16:47.770767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-10T11:16:47.770867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:16:47.772321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:16:47.772473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:16:47.772514Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:16:47.772606Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:16:47.772637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:16:47.772675Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:16:47.772704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:16:47.772737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-10T11:16:47.772793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:341:2320] message: TxId: 101 2025-04-10T11:16:47.772831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:16:47.772867Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:16:47.772892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:16:47.773016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:16:47.774346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:16:47.774387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:342:2321] TestWaitNotification: OK eventTxId 101 2025-04-10T11:16:47.774790Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:16:47.774963Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 195us result status StatusSuccess 2025-04-10T11:16:47.775400Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Decimal(35,9)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnStoreFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:16:46.348933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:16:46.349026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:16:46.349088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:16:46.349125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:16:46.349807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:16:46.349837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:16:46.349901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:16:46.349960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:16:46.350903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:16:46.416607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:16:46.416648Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:46.427815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:16:46.428068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:16:46.428210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:16:46.435934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:16:46.436129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:16:46.438707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:46.439797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:16:46.444320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:46.449645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:46.449712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:46.450523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:16:46.450589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:46.450643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:16:46.450809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.456320Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:16:46.556116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:16:46.556326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.556485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:16:46.556641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:16:46.556694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.558498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:46.558598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:16:46.558724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.558771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:16:46.558816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:16:46.558853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:16:46.560363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.560402Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:16:46.560427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:16:46.561755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.561818Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.561876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:46.561921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.574260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:16:46.576196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:16:46.576362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:16:46.577169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:46.577293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:16:46.577349Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:46.577562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:16:46.577629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:46.577759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:16:46.577810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:16:46.579812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:46.579851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:46.580014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:46.580044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:16:46.580232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.580271Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:16:46.580349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:46.580381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.580413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:46.580446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.580474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:16:46.580499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.580528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:16:46.580560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:16:46.580609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:16:46.580633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:16:46.580656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:16:46.582303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:46.582398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:46.582436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T11:16:47.763353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-10T11:16:47.763828Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:47.763922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:16:47.763983Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TPropose operationId# 102:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000003 2025-04-10T11:16:47.764179Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-10T11:16:47.764318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:16:47.764386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:16:47.769050Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=tx_controller.cpp:212;event=finished_tx;tx_id=102; FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-10T11:16:47.771094Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:47.771150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:16:47.771366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:16:47.771517Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:47.771563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T11:16:47.771621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-10T11:16:47.771693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:16:47.771753Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:16:47.771818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-04-10T11:16:47.772840Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:16:47.772967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:16:47.773011Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:16:47.773058Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-10T11:16:47.773108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:16:47.773669Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:16:47.773748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:16:47.773776Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:16:47.773806Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-10T11:16:47.773838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:16:47.773911Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-10T11:16:47.776288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-04-10T11:16:47.776370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:3 msg type: 268697639 2025-04-10T11:16:47.776459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2025-04-10T11:16:47.777113Z node 2 :HIVE INFO: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2025-04-10T11:16:47.777703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2025-04-10T11:16:47.777856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2025-04-10T11:16:47.778195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:16:47.778597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:16:47.779811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:16:47.791991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2025-04-10T11:16:47.792060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T11:16:47.792220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:16:47.794029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:16:47.794197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:16:47.794245Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:16:47.794395Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:16:47.794459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:16:47.794513Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:16:47.794550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:16:47.794612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T11:16:47.794688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:341:2320] message: TxId: 102 2025-04-10T11:16:47.794744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:16:47.794790Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:16:47.794832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:16:47.794974Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:16:47.796638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:16:47.796711Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:402:2374] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-04-10T11:16:47.800403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "mess age" Type: "Utf8" } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:16:47.800675Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterOlapStore Propose, path: /MyRoot/OlapStore, opId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:16:47.800966Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2025-04-10T11:16:47.803395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:16:47.803562Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T11:16:47.803855Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T11:16:47.803901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T11:16:47.804363Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:16:47.804489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:16:47.804534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:440:2412] TestWaitNotification: OK eventTxId 103 >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] >> TOlap::StoreStats ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 22494, MsgBus: 8569 2025-04-10T11:16:14.684258Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641764301526385:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:14.684388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d99/r3tmp/tmpIrazFT/pdisk_1.dat 2025-04-10T11:16:15.037104Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22494, node 1 2025-04-10T11:16:15.078163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:15.078293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:15.080103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:15.192957Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:15.192988Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:15.192998Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:15.193119Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8569 TClient is connected to server localhost:8569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:15.791920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:16:16.940396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641772891461639:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:16.940529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.257237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-10T11:16:17.362496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641777186429058:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.362570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.362583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641777186429063:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.365266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:17.372170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641777186429065:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:16:17.455202Z node 1 :TX_PROXY ERROR: Actor# [1:7491641777186429105:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:18.088975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:16:18.414254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-10T11:16:18.739283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:16:19.098603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:16:19.437432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:19.684297Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641764301526385:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:19.684398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:19.753711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:19.784856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:21.165841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710698:0, at schemeshard: 72057594046644480 2025-04-10T11:16:21.189902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2025-04-10T11:16:21.191307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 2025-04-10T11:16:21.193226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710700:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_ ... 81474976715658:2, at schemeshard: 72057594046644480 2025-04-10T11:16:42.284718Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491641888766174183:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:42.284788Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:42.284802Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491641888766174188:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:42.288323Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:42.295442Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491641888766174190:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:16:42.368012Z node 4 :TX_PROXY ERROR: Actor# [4:7491641888766174230:2396] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:42.838412Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:16:43.291905Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-10T11:16:43.769925Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:16:44.150760Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491641875881271516:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:44.150869Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:44.273106Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-10T11:16:44.724857Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:45.183392Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:45.221803Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:47.024225Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> TOlap::CreateDropStandaloneTable >> THullDsGenericNWayIt::ForwardIteration [GOOD] >> THullDsGenericNWayIt::BackwardIteration [GOOD] >> TBlobStorageLinearTrackBar::TestLinearTrackBarDouble [GOOD] >> TBlobStorageLinearTrackBar::TestLinearTrackBarWithDecimal [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsGenericNWayIt::BackwardIteration [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageLinearTrackBar::TestLinearTrackBarWithDecimal [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 10489, MsgBus: 4832 2025-04-10T11:16:14.684679Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641767245257749:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:14.684826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001db1/r3tmp/tmpLtKBkc/pdisk_1.dat 2025-04-10T11:16:14.999523Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:15.070632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:15.071608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:15.074297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10489, node 1 2025-04-10T11:16:15.192901Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:15.192928Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:15.192936Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:15.193051Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4832 TClient is connected to server localhost:4832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:15.812732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:16:16.925606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641775835193004:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:16.925737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.257115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-10T11:16:17.357835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641780130160423:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.357900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.357939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641780130160428:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.362322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:17.368482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641780130160430:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:16:17.443895Z node 1 :TX_PROXY ERROR: Actor# [1:7491641780130160470:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:18.089151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:16:18.415184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-10T11:16:18.770183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:16:19.147855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:16:19.501184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:19.684996Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641767245257749:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:19.685092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:19.855045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:19.884122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:21.298469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710700:0, at schemeshard: 72057594046644480 2025-04-10T11:16:21.314906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 2025-04-10T11:16:21.316028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-04-10T11:16:21.317603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" ... or: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:42.668247Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491641886171559669:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:42.668253Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:42.672190Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:42.681468Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491641886171559671:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:16:42.740475Z node 4 :TX_PROXY ERROR: Actor# [4:7491641886171559711:2394] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:43.190801Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:16:43.667952Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-10T11:16:44.165950Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:16:44.702472Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:16:44.786272Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491641873286656999:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:44.786364Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:45.149283Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:45.666773Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:45.698031Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:47.355192Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> THullDsHeapItTest::HeapAppendixTreeForwardIteratorBenchmark >> THullDsHeapItTest::HeapAppendixTreeForwardIteratorBenchmark [GOOD] >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark [GOOD] >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark [GOOD] >> TOlapNaming::CreateColumnTableOk >> TBlobStorageDiskBlob::CreateFromDistinctParts [GOOD] >> TBlobStorageDiskBlob::CreateIterate [GOOD] >> TBlobStorageDiskBlob::Merge [GOOD] >> TBlobStorageHullDecimal::TestMkDecimal [GOOD] >> TOlap::CreateDropStandaloneTable [GOOD] >> TOlap::AlterStore |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageDiskBlob::CreateIterate [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestMkDecimal [GOOD] >> THullDsHeapItTest::HeapForwardIteratorAllEntities [GOOD] >> THullDsHeapItTest::HeapBackwardIteratorAllEntities [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapBackwardIteratorAllEntities [GOOD] >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] >> SysViewQueryHistory::TopReadBytesAdd [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 21215, MsgBus: 29318 2025-04-10T11:16:16.579062Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641776161053962:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:16.579151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d8a/r3tmp/tmp0Lc3hJ/pdisk_1.dat 2025-04-10T11:16:16.872459Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21215, node 1 2025-04-10T11:16:16.949824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:16.949975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:16.950763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:16.950798Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:16.950811Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:16.950949Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:16:16.952000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29318 TClient is connected to server localhost:29318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:17.413223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:16:18.884261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641784750989215:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:18.884404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:19.097173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-10T11:16:19.220679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641789045956634:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:19.220745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641789045956639:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:19.220792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:19.223630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:19.231510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641789045956641:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:16:19.330590Z node 1 :TX_PROXY ERROR: Actor# [1:7491641789045956681:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:19.785858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:16:20.104094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-10T11:16:20.508799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:16:20.863224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:16:21.190186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:21.579269Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641776161053962:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:21.579359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:21.581250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:21.619436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:23.027379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710700:0, at schemeshard: 72057594046644480 2025-04-10T11:16:23.045873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-04-10T11:16:23.047204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 2025-04-10T11:16:23.048179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: " ... 81474976715658:2, at schemeshard: 72057594046644480 2025-04-10T11:16:43.874482Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491641890229205740:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:43.874552Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491641890229205745:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:43.874572Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:43.877652Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:43.885796Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491641890229205747:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:16:43.954177Z node 4 :TX_PROXY ERROR: Actor# [4:7491641890229205813:2403] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:44.394457Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:16:44.817129Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-10T11:16:45.305260Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:16:45.828609Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:16:45.887525Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491641877344303076:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:45.887623Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:46.348685Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-10T11:16:46.821900Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:46.852298Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:48.656732Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715704:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> SysViewQueryHistory::ServiceQueryHistoryAdd [GOOD] >> SysViewQueryHistory::TopDurationAdd [GOOD] >> TOlap::AlterStore [GOOD] >> TOlap::AlterTtl |88.9%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::TopReadBytesAdd [GOOD] >> TYardTest::TestEnormousDisk [GOOD] >> SysViewQueryHistory::AddDedup [GOOD] >> SysViewQueryHistory::AddDedup2 [GOOD] >> SysViewQueryHistory::ScanQueryHistoryMerge [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::ServiceQueryHistoryAdd [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::TopDurationAdd [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::ScanQueryHistoryMerge [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AddDedup2 [GOOD] >> SysViewQueryHistory::AddDedupRandom [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 18885, MsgBus: 16700 2025-04-10T11:16:16.784636Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641776435060957:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:16.784739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d86/r3tmp/tmp6uypy3/pdisk_1.dat 2025-04-10T11:16:17.080489Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18885, node 1 2025-04-10T11:16:17.146907Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:17.146929Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:17.146935Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:17.147038Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:16:17.163404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:17.163529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:17.165087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16700 TClient is connected to server localhost:16700 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:17.576943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:16:19.126066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641789319963510:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:19.126288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:19.340037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-10T11:16:19.435749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641789319963632:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:19.435816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:19.435997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641789319963637:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:19.438332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:19.445366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641789319963639:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:16:19.526882Z node 1 :TX_PROXY ERROR: Actor# [1:7491641789319963679:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:20.048425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:16:20.402359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-10T11:16:20.769793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:16:21.122321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:16:21.498298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:21.784597Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641776435060957:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:21.784667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:21.889530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:21.920790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:23.323615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710700:0, at schemeshard: 72057594046644480 2025-04-10T11:16:23.342069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 2025-04-10T11:16:23.343717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-04-10T11:16:23.345125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } ... .723555Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491641899873294111:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:45.723625Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:45.723639Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491641899873294116:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:45.726121Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:45.732757Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491641899873294118:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:16:45.823111Z node 4 :TX_PROXY ERROR: Actor# [4:7491641899873294158:2397] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:46.243863Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:16:46.626074Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480 2025-04-10T11:16:47.129165Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:16:47.607464Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-10T11:16:47.671817Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491641886988391441:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:47.678750Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:48.052425Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:48.515529Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:48.550398Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:50.280443Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 19398, MsgBus: 30529 2025-04-10T11:16:16.321142Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641777046277221:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:16.321202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d8e/r3tmp/tmpKdhjJs/pdisk_1.dat 2025-04-10T11:16:16.569590Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19398, node 1 2025-04-10T11:16:16.638917Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:16.638947Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:16.638964Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:16.639159Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:16:16.662829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:16.662984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:16.665088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30529 TClient is connected to server localhost:30529 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:17.064887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:16:18.528077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641785636212477:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:18.528186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:18.725822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-10T11:16:18.821219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641785636212598:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:18.821287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:18.821348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641785636212603:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:18.823613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:18.829795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641785636212605:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:16:18.913510Z node 1 :TX_PROXY ERROR: Actor# [1:7491641785636212645:2396] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:19.388162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:16:19.696918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-10T11:16:20.045065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:16:20.365886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-10T11:16:20.647178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:20.948390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:20.980687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:21.320795Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641777046277221:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:21.320885Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:22.337555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-10T11:16:22.357236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-04-10T11:16:22.358868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-10T11:16:22.360916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIV ... Id: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:44.843805Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:44.843899Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491641895549565249:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:44.846748Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:44.855438Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491641895549565251:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:16:44.928037Z node 4 :TX_PROXY ERROR: Actor# [4:7491641895549565291:2394] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:45.403918Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:16:45.846572Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-10T11:16:46.262392Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:16:46.766831Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:16:47.153201Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491641886959629885:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:47.153267Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:47.312785Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:47.845687Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:47.881639Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:49.584349Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> SysViewQueryHistory::StableMerge [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AddDedupRandom [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestEnormousDisk [GOOD] >> SysViewQueryHistory::AggrMerge [GOOD] >> TOlap::AlterTtl [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::StableMerge [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AggrMerge [GOOD] >> SysViewQueryHistory::StableMerge2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:16:49.568147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:16:49.568234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:16:49.568281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:16:49.568308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:16:49.568346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:16:49.568369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:16:49.568414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:16:49.568471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:16:49.568743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:16:49.637918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:16:49.637977Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:49.651679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:16:49.651968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:16:49.652138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:16:49.659331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:16:49.659515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:16:49.660048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:49.660319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:16:49.661874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:49.663205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:49.663261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:49.663415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:16:49.663459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:49.663494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:16:49.663631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.669424Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:16:49.775266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:16:49.775525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.775717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:16:49.775912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:16:49.775964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.777999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:49.778175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:16:49.778381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.778449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:16:49.778518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:16:49.778558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:16:49.780313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.780359Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:16:49.780387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:16:49.781643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.781685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.781731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:49.781770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:16:49.784277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:16:49.785727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:16:49.785877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:16:49.786616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:49.786725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:16:49.786761Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:49.786960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:16:49.787013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:49.787186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:16:49.787293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:16:49.789164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:49.789227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:49.789421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:49.789466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:16:49.789752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.789820Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:16:49.789932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:49.789973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:49.790010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:49.790044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:49.790079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:16:49.790117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:49.790197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:16:49.790247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:16:49.790316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:16:49.790347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:16:49.790377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:16:49.791998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:49.792087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:49.792122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-10T11:16:52.424556Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 106, at schemeshard: 72057594046678944 2025-04-10T11:16:52.424586Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-04-10T11:16:52.424616Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 106, at schemeshard: 72057594046678944 2025-04-10T11:16:52.447133Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 106 MinStep: 0 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2025-04-10T11:16:52.447209Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-04-10T11:16:52.447336Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 106 MinStep: 0 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2025-04-10T11:16:52.447388Z node 3 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-10T11:16:52.447437Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 106:0, left await: 0, at schemeshard: 72057594046678944 2025-04-10T11:16:52.447476Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 3 -> 128 2025-04-10T11:16:52.449372Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-10T11:16:52.449548Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-10T11:16:52.449609Z node 3 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId# 106:0 HandleReply ProgressState at tablet: 72057594046678944 2025-04-10T11:16:52.449683Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 106 ready parts: 1/1 2025-04-10T11:16:52.449806Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 106 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:16:52.451678Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:106 msg type: 269090816 2025-04-10T11:16:52.451831Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 106 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 106 at step: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 106 at step: 5000007 2025-04-10T11:16:52.462749Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=106;fline=tx_controller.cpp:212;event=finished_tx;tx_id=106; 2025-04-10T11:16:52.463144Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:52.463291Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 106 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 12884904046 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:16:52.463363Z node 3 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId# 106:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000007 2025-04-10T11:16:52.464223Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 128 -> 129 2025-04-10T11:16:52.464520Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:16:52.464630Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000007 2025-04-10T11:16:52.470367Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:52.470428Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:16:52.470667Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:16:52.470866Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:52.470920Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 106, path id: 2 2025-04-10T11:16:52.470971Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 106, path id: 3 2025-04-10T11:16:52.471477Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-10T11:16:52.471543Z node 3 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:16:52.471611Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-04-10T11:16:52.474968Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T11:16:52.475098Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T11:16:52.475147Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-10T11:16:52.475195Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-10T11:16:52.475241Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:16:52.477436Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T11:16:52.477529Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T11:16:52.477560Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-10T11:16:52.477589Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 14 2025-04-10T11:16:52.477622Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:16:52.477706Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-04-10T11:16:52.480094Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-04-10T11:16:52.480892Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T11:16:52.482259Z node 3 :TX_TIERING ERROR: fline=manager.cpp:158;error=cannot_read_secrets;reason=Can't read access key: No such secret: SId:secret; 2025-04-10T11:16:52.482881Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-10T11:16:52.495164Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-04-10T11:16:52.495232Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-04-10T11:16:52.495371Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 FAKE_COORDINATOR: Erasing txId 106 2025-04-10T11:16:52.497126Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-10T11:16:52.497298Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-10T11:16:52.497363Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-10T11:16:52.497490Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T11:16:52.497531Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T11:16:52.497578Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T11:16:52.497617Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T11:16:52.497657Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-10T11:16:52.497724Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:340:2319] message: TxId: 106 2025-04-10T11:16:52.497781Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T11:16:52.497824Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-10T11:16:52.497862Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-10T11:16:52.498004Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:16:52.499698Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-10T11:16:52.499752Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:549:2520] TestWaitNotification: OK eventTxId 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::StableMerge2 [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] |89.0%| [TA] $(B)/ydb/core/sys_view/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TA] {RESULT} $(B)/ydb/core/sys_view/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 9394, MsgBus: 23036 2025-04-10T11:16:14.684371Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641765165970834:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:14.684513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001da9/r3tmp/tmpxJ1q9Y/pdisk_1.dat 2025-04-10T11:16:15.043201Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9394, node 1 2025-04-10T11:16:15.079805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:15.079928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:15.081650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:15.081720Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:16:15.192973Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:15.193003Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:15.193011Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:15.193149Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23036 TClient is connected to server localhost:23036 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:15.799254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:16:16.906139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641773755906088:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:16.906878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.257123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-10T11:16:17.357797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641778050873507:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.357876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.357926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641778050873512:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:17.362247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:17.368996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641778050873514:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:16:17.427806Z node 1 :TX_PROXY ERROR: Actor# [1:7491641778050873554:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:18.088890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:16:18.521801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-10T11:16:18.882340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:16:19.248258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:16:19.570868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:19.684403Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641765165970834:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:19.684490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:19.969346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:20.028460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:22.364099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710713:0, at schemeshard: 72057594046644480 2025-04-10T11:16:22.383281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-04-10T11:16:22.384917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-10T11:16:22.386359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "col1" type { ... CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:43.921963Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:16:46.456561Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491641903278864283:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:46.456688Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:46.479297Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-10T11:16:46.514352Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491641903278864403:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:46.514444Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:46.514472Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491641903278864408:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:46.517444Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:46.527679Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491641903278864410:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:16:46.606220Z node 4 :TX_PROXY ERROR: Actor# [4:7491641903278864450:2395] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:47.141055Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:16:47.657353Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-10T11:16:48.193451Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:16:48.346129Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491641890393961743:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:48.346216Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:48.709081Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:16:49.186911Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:49.660290Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:49.695328Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:52.754700Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715715:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 16542, MsgBus: 13463 2025-04-10T11:16:19.663298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641785959590792:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:19.663427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d7b/r3tmp/tmpKyPrDX/pdisk_1.dat 2025-04-10T11:16:19.929481Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16542, node 1 2025-04-10T11:16:19.967247Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:19.967274Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:19.967280Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:19.967444Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:16:20.000346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:20.000482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:20.002445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13463 TClient is connected to server localhost:13463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:20.381869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:16:21.770986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641794549526046:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:21.771114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:21.972251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-10T11:16:22.075253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641798844493465:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:22.075337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641798844493470:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:22.075345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:22.077726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:22.083579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641798844493472:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:16:22.180269Z node 1 :TX_PROXY ERROR: Actor# [1:7491641798844493512:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:22.669076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:16:23.046188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-10T11:16:23.508916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:16:23.956399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:16:24.331112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:24.663573Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641785959590792:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:24.663654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:24.733417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:24.761447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:26.047744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710697:0, at schemeshard: 72057594046644480 2025-04-10T11:16:26.064034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2025-04-10T11:16:26.066091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710700:0, at schemeshard: 72057594046644480 2025-04-10T11:16:26.067310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIV ... Id: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:48.631548Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:48.631615Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491641914554496011:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:48.635851Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-10T11:16:48.643420Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491641914554496013:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:16:48.701065Z node 4 :TX_PROXY ERROR: Actor# [4:7491641914554496053:2396] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:16:49.134992Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:16:49.536800Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480 2025-04-10T11:16:49.965681Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:16:50.357242Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491641901669593339:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:50.357307Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:16:50.452576Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-10T11:16:50.884452Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-10T11:16:51.316165Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:16:51.348136Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:16:53.200287Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive { uri: "hive_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 |89.0%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:16:46.348953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:16:46.349060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:16:46.349102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:16:46.349134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:16:46.349823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:16:46.349868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:16:46.349940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:16:46.350028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:16:46.350914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:16:46.420087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:16:46.420129Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:46.429723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:16:46.429889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:16:46.429990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:16:46.435489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:16:46.435757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:16:46.438657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:46.439734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:16:46.444465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:46.449649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:46.449710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:46.450542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:16:46.450590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:46.450630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:16:46.450752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.456190Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:16:46.552924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:16:46.553137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.553324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:16:46.553566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:16:46.553625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.555384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:46.555482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:16:46.555585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.555628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:16:46.555681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:16:46.555716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:16:46.557134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.557174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:16:46.557198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:16:46.558606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.558646Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.558685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:46.558729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.561266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:16:46.562698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:16:46.562814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:16:46.563587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:46.563677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:16:46.563707Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:46.563910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:16:46.563954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:46.564078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:16:46.564132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:16:46.565558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:46.565593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:46.565716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:46.565745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:16:46.565951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:46.565995Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:16:46.566062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:46.566086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.566109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:46.566135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.566188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:16:46.566223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:46.566247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:16:46.566268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:16:46.566307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:16:46.566332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:16:46.566359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:16:46.567914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:46.567990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:46.568015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... pipe to deleted shardIdx 72057594046678944:50 tabletId 72075186233409595 2025-04-10T11:16:54.255445Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:16:54.255487Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-10T11:16:54.255552Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:16:54.255952Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:52 2025-04-10T11:16:54.255988Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:52 tabletId 72075186233409597 2025-04-10T11:16:54.256061Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:46 2025-04-10T11:16:54.256088Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:46 tabletId 72075186233409591 2025-04-10T11:16:54.256170Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:48 2025-04-10T11:16:54.256195Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:48 tabletId 72075186233409593 2025-04-10T11:16:54.256259Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2025-04-10T11:16:54.256282Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2025-04-10T11:16:54.256343Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:44 2025-04-10T11:16:54.256367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:44 tabletId 72075186233409589 2025-04-10T11:16:54.256440Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:65 2025-04-10T11:16:54.256458Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:65 tabletId 72075186233409610 2025-04-10T11:16:54.256500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:63 2025-04-10T11:16:54.256514Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:63 tabletId 72075186233409608 2025-04-10T11:16:54.256556Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:61 2025-04-10T11:16:54.256571Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:61 tabletId 72075186233409606 2025-04-10T11:16:54.261570Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:59 2025-04-10T11:16:54.261624Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:59 tabletId 72075186233409604 2025-04-10T11:16:54.262586Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-10T11:16:54.262622Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-10T11:16:54.262737Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:16:54.262762Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:16:54.262860Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-10T11:16:54.262885Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-10T11:16:54.262991Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-04-10T11:16:54.263016Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-04-10T11:16:54.263101Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2025-04-10T11:16:54.263124Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2025-04-10T11:16:54.263198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2025-04-10T11:16:54.263221Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-04-10T11:16:54.263303Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2025-04-10T11:16:54.263328Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2025-04-10T11:16:54.265130Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-04-10T11:16:54.265163Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-04-10T11:16:54.265636Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2025-04-10T11:16:54.265659Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2025-04-10T11:16:54.266468Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2025-04-10T11:16:54.266496Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-04-10T11:16:54.266565Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2025-04-10T11:16:54.266581Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2025-04-10T11:16:54.266637Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2025-04-10T11:16:54.266655Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-04-10T11:16:54.266713Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2025-04-10T11:16:54.266730Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2025-04-10T11:16:54.266780Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:40 2025-04-10T11:16:54.266797Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:40 tabletId 72075186233409585 2025-04-10T11:16:54.266837Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2025-04-10T11:16:54.266858Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-04-10T11:16:54.267681Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2025-04-10T11:16:54.267713Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2025-04-10T11:16:54.268024Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2025-04-10T11:16:54.268047Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-04-10T11:16:54.268089Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2025-04-10T11:16:54.268105Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2025-04-10T11:16:54.268292Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2025-04-10T11:16:54.268310Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2025-04-10T11:16:54.268724Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-10T11:16:54.268745Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-04-10T11:16:54.268809Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:57 2025-04-10T11:16:54.268825Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:57 tabletId 72075186233409602 2025-04-10T11:16:54.268900Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:53 2025-04-10T11:16:54.268916Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:53 tabletId 72075186233409598 2025-04-10T11:16:54.268960Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:55 2025-04-10T11:16:54.268976Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:55 tabletId 72075186233409600 2025-04-10T11:16:54.269297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:49 2025-04-10T11:16:54.269362Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:49 tabletId 72075186233409594 2025-04-10T11:16:54.269460Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:47 2025-04-10T11:16:54.269486Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:47 tabletId 72075186233409592 2025-04-10T11:16:54.272559Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:51 2025-04-10T11:16:54.272600Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:51 tabletId 72075186233409596 2025-04-10T11:16:54.272684Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:45 2025-04-10T11:16:54.272702Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:45 tabletId 72075186233409590 2025-04-10T11:16:54.272740Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:43 2025-04-10T11:16:54.272756Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:43 tabletId 72075186233409588 2025-04-10T11:16:54.272797Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:41 2025-04-10T11:16:54.272833Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:41 tabletId 72075186233409586 2025-04-10T11:16:54.272946Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 2025-04-10T11:16:54.273465Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:16:54.273678Z node 3 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyDir/ColumnTable" took 233us result status StatusPathDoesNotExist 2025-04-10T11:16:54.273810Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/MyDir/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:16:54.274416Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-04-10T11:16:54.274512Z node 3 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 4 took 104us result status StatusPathDoesNotExist 2025-04-10T11:16:54.274587Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] >> TOlapNaming::CreateColumnTableOk [GOOD] >> TOlapNaming::CreateColumnTableFailed |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] |89.1%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TOlapNaming::CreateColumnTableFailed [GOOD] >> YdbSdkSessionsPool::WaitQueue10 >> YdbSdkSessionsPool::WaitQueue1 >> YdbSdkSessionsPool::PeriodicTask10 >> YdbSdkSessionsPool::RunSmallPlan >> YdbSdkSessionsPool::CustomPlan >> YdbSdkSessionsPool::StressTestSync10 >> YdbSdkSessionsPool::StressTestAsync10 >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnTableFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:16:50.924672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:16:50.924765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:16:50.924817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:16:50.924866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:16:50.924920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:16:50.924951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:16:50.925003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:16:50.925087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:16:50.925407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:16:51.001558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:16:51.001629Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:51.018560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:16:51.018922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:16:51.019123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:16:51.026692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:16:51.026918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:16:51.027468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:51.027776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:16:51.029489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:51.030775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:51.030841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:51.030985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:16:51.031024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:51.031056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:16:51.031159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:16:51.036944Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:16:51.135869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:16:51.136126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:51.136335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:16:51.136575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:16:51.136649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:51.139100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:51.139273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:16:51.139445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:51.139497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:16:51.139540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:16:51.139571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:16:51.141630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:51.141694Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:16:51.141734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:16:51.143623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:51.143682Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:51.143734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:51.143773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:16:51.159441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:16:51.162025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:16:51.162286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:16:51.163431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:51.163606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:16:51.163664Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:51.163951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:16:51.164042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:51.164251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:16:51.164344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:16:51.166782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:51.166837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:51.167045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:51.167090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:16:51.167359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:51.167428Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:16:51.167556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:51.167591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:51.167630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:51.167672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:51.167709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:16:51.167750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:51.167799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:16:51.167841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:16:51.167911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:16:51.167954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:16:51.167991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:16:51.170440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:51.170568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:51.170606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... State, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:16:56.652766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:16:56.652798Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:16:56.655156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:56.655203Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:16:56.655246Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:16:56.656543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:56.656578Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:56.656632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:56.656683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:16:56.656804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:16:56.657897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:16:56.658048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:16:56.658656Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:56.658748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:16:56.658785Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:56.658997Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:16:56.659038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:56.659167Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:16:56.659219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:16:56.660535Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:56.660574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:56.660706Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:56.660738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:16:56.660999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:56.661039Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:16:56.661120Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:56.661150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:56.661180Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:56.661207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:56.661235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:16:56.661268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:56.661296Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:16:56.661335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:16:56.661400Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:16:56.661431Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:16:56.661458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:16:56.661820Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:56.661889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:56.661916Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:16:56.661946Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:16:56.661973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:16:56.662038Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:16:56.664091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:16:56.664526Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:16:56.665211Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Bootstrap 2025-04-10T11:16:56.680090Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Become StateWork (SchemeCache [2:274:2265]) 2025-04-10T11:16:56.682129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "mess age" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:16:56.682401Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:16:56.682594Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2025-04-10T11:16:56.683423Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:16:56.685243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:16:56.685385Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-04-10T11:16:56.686100Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:16:56.686322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:16:56.686355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:16:56.686661Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:16:56.686743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:16:56.686787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:284:2275] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-10T11:16:56.688694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "~!@#$%^&*()+=asdfa" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:16:56.688865Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:16:56.688997Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', at schemeshard: 72057594046678944 2025-04-10T11:16:56.690499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Invalid name for column \'~!@#$%^&*()+=asdfa\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:16:56.690591Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:16:56.690738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:16:56.690763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:16:56.690990Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:16:56.691044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:16:56.691073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:291:2282] TestWaitNotification: OK eventTxId 102 >> YdbSdkSessionsPool::StressTestAsync1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] Test command err: RandomSeed# 16124111571429948031 Step = 0 SEND TEvPut with key [1:1:0:0:0:51943:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:51943:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:85877:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:85877:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:192081:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:192081:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:267203:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:267203:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 3 2025-04-10T11:15:17.783105Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 6 SEND TEvPut with key [1:1:6:0:0:377427:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:377427:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2025-04-10T11:15:18.034812Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 7 SEND TEvPut with key [1:1:7:0:0:48850:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:48850:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 8 SEND TEvPut with key [1:1:8:0:0:411812:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:411812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 9 SEND TEvPut with key [1:1:9:0:0:293766:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:293766:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start node 3 Step = 10 SEND TEvPut with key [1:1:10:0:0:127358:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:127358:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 11 SEND TEvPut with key [1:1:11:0:0:282945:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:282945:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 12 SEND TEvPut with key [1:1:12:0:0:34864:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:34864:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 13 SEND TEvPut with key [1:1:13:0:0:363096:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:363096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 15 SEND TEvPut with key [1:1:15:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 16 SEND TEvPut with key [1:1:16:0:0:136892:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:136892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 17 SEND TEvPut with key [1:1:17:0:0:517733:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:517733:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 18 SEND TEvPut with key [1:1:18:0:0:250802:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:250802:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 19 SEND TEvPut with key [1:1:19:0:0:199490:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:199490:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 20 SEND TEvPut with key [1:1:20:0:0:244269:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:244269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 21 SEND TEvPut with key [1:1:21:0:0:329606:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:329606:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 23 SEND TEvPut with key [1:1:23:0:0:519258:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:519258:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 25 SEND TEvPut with key [1:1:25:0:0:514591:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:514591:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Stop node 7 2025-04-10T11:15:19.039895Z 1 00h01m30.100512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 26 SEND TEvPut with key [1:1:26:0:0:5927:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:5927:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 28 SEND TEvPut with key [1:1:28:0:0:6043:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:6043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 30 SEND TEvPut with key [1:1:30:0:0:264716:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:264716:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Compact vdisk 3 Step = 31 SEND TEvPut with key [1:1:31:0:0:168116:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:168116:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 32 SEND TEvPut with key [1:1:32:0:0:444749:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:444749:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 33 SEND TEvPut with key [1:1:33:0:0:350254:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:350254:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 34 SEND TEvPut with key [1:1:34:0:0:145950:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:145950:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 35 SEND TEvPut with key [1:1:35:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 38 SEND TEvPut with key [1:1:38:0:0:185170:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:185170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 39 SEND TEvPut with key [1:1:39:0:0:297271:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:297271:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 40 SEND TEvPut with key [1:1:40:0:0:419670:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:419670:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 41 SEND TEvPut with key [1:1:41:0:0:218956:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:218956:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 42 SEND TEvPut with key [1:1:42:0:0:154723:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:154723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 43 SEND TEvPut with key [1:1:43:0:0:13332:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:13332:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 44 SEND TEvPut with key [1:1:44:0:0:448892:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:448892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 45 SEND TEvPut with key [1:1:45:0:0:103231:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:103231:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 46 SEND TEvPut with key [1:1:46:0:0:295973:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:295973:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 47 SEND TEvPut with key [1:1:47:0:0:402799:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:402799:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 48 SEND TEvPut with key [1:1:48:0:0:165045:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:165045:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 49 SEND TEvPut with key [1:1:49:0:0:360099:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:360099:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 50 SEND TEvPut with key [1:1:50:0:0:97222:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:97222:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 51 SEND TEvPut with key [1:1:51:0:0:303396:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:303396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 52 SEND TEvPut with key [1:1:52:0:0:304876:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:304876:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 53 SEND TEvPut with key [1:1:53:0:0:375063:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:375063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Start node 4 Step = 54 SEND TEvPut with key [1:1:54:0:0:288044:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:288044:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999866} Step = 55 SEND TEvPut with key [1:1:55:0:0:181559:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:181559:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999866} Step = 57 SEND TEvPut with key [1:1:57:0:0:424399:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:424399:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 58 SEND TEvPut with key [1:1:58:0:0:169341:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:169341:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999902} Step = 59 SEND TEvPut with key [1:1:59:0:0:405932:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:405932:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999902} Step = 60 SEND TEvPut with key [1:1:60:0:0:190148:0] TEvPutResult: TEvPutResult {Id# [1:1:60:0:0:190148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Stop node 3 2025-04-10T11:15:20.940815Z 1 00h02m00.150512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Wipe node 0 2025-04-10T11:15:21.125942Z 1 00h02m10.161024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-10T11:15:21.128752Z 1 00h02m10.161024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 14427238890659173966] Step = 61 SEND TEvPut with key [1:1:61:0:0:500240:0] 2025-04-10T11:15:22.656797Z 1 00h03m50.161024s :BS_PROXY ERROR: Group# 2181038080 StateEstablishingSessions Wakeup TIMEOUT Marker# DSP12 TEvPutResult: TEvPutResult {Id# [1:1:61:0:0:500240:0] Status# ERROR StatusFlags# { } ErrorReason# "Timeout while establishing sessions (DSPE4)." ApproximateFreeSpaceShare# 0} Step = 62 SEND TEvPut with key [1:1:62:0:0:354994:0] TEvPutResult: TEvPutResult {Id# [1:1:62:0:0:354994:0] Status# ERROR StatusFlags# { } ErrorReason# "Timeout while establishing sessions (DSPE4)." ApproximateFreeSpa ... :1:945:0:0:76599:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Compact vdisk 2 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 948 SEND TEvPut with key [1:1:948:0:0:112126:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:112126:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 949 SEND TEvPut with key [1:1:949:0:0:525378:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:525378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 950 SEND TEvPut with key [1:1:950:0:0:410875:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:410875:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 951 SEND TEvPut with key [1:1:951:0:0:113503:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:113503:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 952 SEND TEvPut with key [1:1:952:0:0:431140:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:431140:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 953 SEND TEvPut with key [1:1:953:0:0:509293:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:509293:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 3 2025-04-10T11:16:37.740707Z 1 00h28m00.900512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:286395:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:286395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 1 2025-04-10T11:16:38.020125Z 1 00h28m10.901024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 955 SEND TEvPut with key [1:1:955:0:0:219270:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:219270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Start node 1 Step = 956 SEND TEvPut with key [1:1:956:0:0:274971:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:274971:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Step = 957 SEND TEvPut with key [1:1:957:0:0:487884:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:487884:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Start node 3 Step = 958 SEND TEvPut with key [1:1:958:0:0:327302:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:327302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 961 SEND TEvPut with key [1:1:961:0:0:61147:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:61147:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 962 SEND TEvPut with key [1:1:962:0:0:237906:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:237906:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 963 SEND TEvPut with key [1:1:963:0:0:347273:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:347273:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 964 SEND TEvPut with key [1:1:964:0:0:181317:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:181317:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 965 SEND TEvPut with key [1:1:965:0:0:456096:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:456096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 966 SEND TEvPut with key [1:1:966:0:0:93776:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:93776:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 967 SEND TEvPut with key [1:1:967:0:0:447659:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:447659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 969 SEND TEvPut with key [1:1:969:0:0:92781:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:92781:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Stop node 0 2025-04-10T11:16:39.261059Z 9 00h28m40.904096s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [9:127516:347] ServerId# [1:128559:173] TabletId# 72057594037932033 PipeClientId# [9:127516:347] 2025-04-10T11:16:39.261338Z 8 00h28m40.904096s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:158193:17] ServerId# [1:158203:4103] TabletId# 72057594037932033 PipeClientId# [8:158193:17] 2025-04-10T11:16:39.261450Z 7 00h28m40.904096s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:157143:17] ServerId# [1:157150:3976] TabletId# 72057594037932033 PipeClientId# [7:157143:17] 2025-04-10T11:16:39.261594Z 6 00h28m40.904096s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:134190:17] ServerId# [1:134197:1011] TabletId# 72057594037932033 PipeClientId# [6:134190:17] 2025-04-10T11:16:39.261733Z 5 00h28m40.904096s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:154221:17] ServerId# [1:154229:3597] TabletId# 72057594037932033 PipeClientId# [5:154221:17] 2025-04-10T11:16:39.261826Z 4 00h28m40.904096s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:163152:17] ServerId# [1:163162:4703] TabletId# 72057594037932033 PipeClientId# [4:163152:17] 2025-04-10T11:16:39.261926Z 3 00h28m40.904096s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:153132:17] ServerId# [1:153142:3473] TabletId# 72057594037932033 PipeClientId# [3:153132:17] 2025-04-10T11:16:39.262032Z 2 00h28m40.904096s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:162184:17] ServerId# [1:162191:4593] TabletId# 72057594037932033 PipeClientId# [2:162184:17] Step = 971 SEND TEvPut with key [1:1:971:0:0:439384:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:439384:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 972 SEND TEvPut with key [1:1:972:0:0:252551:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:252551:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 973 SEND TEvPut with key [1:1:973:0:0:39982:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:39982:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Stop node 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:526796:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:526796:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Start node 0 Step = 975 SEND TEvPut with key [1:1:975:0:0:337763:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:337763:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Stop node 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:475740:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:475740:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 977 SEND TEvPut with key [1:1:977:0:0:169780:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:169780:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 980 SEND TEvPut with key [1:1:980:0:0:159890:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:159890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 981 SEND TEvPut with key [1:1:981:0:0:111300:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:111300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 982 SEND TEvPut with key [1:1:982:0:0:355914:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:355914:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 983 SEND TEvPut with key [1:1:983:0:0:399106:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:399106:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 985 SEND TEvPut with key [1:1:985:0:0:261994:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:261994:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 987 SEND TEvPut with key [1:1:987:0:0:138774:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:138774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 988 SEND TEvPut with key [1:1:988:0:0:441913:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:441913:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 989 SEND TEvPut with key [1:1:989:0:0:134469:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:134469:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 990 SEND TEvPut with key [1:1:990:0:0:123825:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:123825:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 991 SEND TEvPut with key [1:1:991:0:0:40387:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:40387:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Stop node 7 2025-04-10T11:16:40.831088Z 1 00h29m20.913584s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 993 SEND TEvPut with key [1:1:993:0:0:455894:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:455894:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Compact vdisk 0 Step = 994 SEND TEvPut with key [1:1:994:0:0:54378:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:54378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Compact vdisk 6 Step = 995 SEND TEvPut with key [1:1:995:0:0:487669:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:487669:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999829} Step = 996 SEND TEvPut with key [1:1:996:0:0:194641:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:194641:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 997 SEND TEvPut with key [1:1:997:0:0:74188:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:74188:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 998 SEND TEvPut with key [1:1:998:0:0:136082:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:136082:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 999 SEND TEvPut with key [1:1:999:0:0:145518:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:145518:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Starting nodes Start compaction 1 Start checking |89.1%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkTx::OlapDeferredEffects >> KqpSinkLocks::InvalidateOnCommit >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink >> KqpSinkLocks::TInvalidate >> KqpSnapshotIsolation::TSimpleOltpNoSink >> KqpTx::RollbackByIdle >> YdbSdkSessionsPool::RunSmallPlan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::RunSmallPlan [GOOD] Test command err: 2025-04-10T11:16:57.759316Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641950985220364:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:57.759438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00180e/r3tmp/tmpNOrA3d/pdisk_1.dat 2025-04-10T11:16:58.107561Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:58.140068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:58.140187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:58.145498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11865, node 1 2025-04-10T11:16:58.255006Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:58.255046Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:58.255054Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:58.255213Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:58.632733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> YdbSdkSessionsPool::WaitQueue1 [GOOD] >> YdbSdkSessionsPool::WaitQueue10 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue1 [GOOD] Test command err: 2025-04-10T11:16:57.754455Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641951390738143:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:57.754604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017ed/r3tmp/tmplQWckI/pdisk_1.dat 2025-04-10T11:16:58.101015Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23553, node 1 2025-04-10T11:16:58.139150Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:16:58.139308Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:16:58.140184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:58.140282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:58.151908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:58.254942Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:58.254975Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:58.254980Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:58.255119Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:58.632813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue10 [GOOD] Test command err: 2025-04-10T11:16:57.754488Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641949772707337:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:57.754600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001805/r3tmp/tmpREVN9F/pdisk_1.dat 2025-04-10T11:16:58.093365Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:58.118447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:58.119034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19405, node 1 2025-04-10T11:16:58.131929Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:16:58.131957Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:16:58.141328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:58.254630Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:58.254651Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:58.254656Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:58.254771Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26659 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:58.698401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> KqpSnapshotRead::TestReadOnly+withSink >> KqpTx::CommitRoTx >> KqpTx::CommitRequired >> KqpTx::RollbackByIdle [GOOD] >> KqpTx::RollbackInvalidated >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflight [GOOD] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflightMock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:87:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:87:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:87:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:87:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:92:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:94:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:93:2120] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:147:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 6:2097]) rebooted! !Reboot 72057594037927937 (actor [44:56:2097]) tablet resolver refreshed! new actor is[44:101:2126] Leader for TabletID 72057594037927937 is [44:101:2126] sender: [44:155:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:57:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:74:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:97:2057] recipient: [45:36:2083] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:100:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:101:2057] recipient: [45:99:2125] Leader for TabletID 72057594037927937 is [45:102:2126] sender: [45:103:2057] recipient: [45:99:2125] !Reboot 72057594037927937 (actor [45:56:2097]) rebooted! !Reboot 72057594037927937 (actor [45:56:2097]) tablet resolver refreshed! new actor is[45:102:2126] Leader for TabletID 72057594037927937 is [45:102:2126] sender: [45:120:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:54:2057] recipient: [46:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:54:2057] recipient: [46:51:2095] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:57:2057] recipient: [46:51:2095] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:74:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:99:2057] recipient: [46:36:2083] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:102:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:103:2057] recipient: [46:101:2127] Leader for TabletID 72057594037927937 is [46:104:2128] sender: [46:105:2057] recipient: [46:101:2127] !Reboot 72057594037927937 (actor [46:56:2097]) rebooted! !Reboot 72057594037927937 (actor [46:56:2097]) tablet resolver refreshed! new actor is[46:104:2128] Leader for TabletID 72057594037927937 is [46:104:2128] sender: [46:158:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:54:2057] recipient: [47:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:54:2057] recipient: [47:51:2095] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:57:2057] recipient: [47:51:2095] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:74:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:99:2057] recipient: [47:36:2083] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:102:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:103:2057] recipient: [47:101:2127] Leader for TabletID 72057594037927937 is [47:104:2128] sender: [47:105:2057] recipient: [47:101:2127] !Reboot 72057594037927937 (actor [47:56:2097]) rebooted! !Reboot 72057594037927937 (actor [47:56:2097]) tablet resolver refreshed! new actor is[47:104:2128] Leader for TabletID 72057594037927937 is [47:104:2128] sender: [47:158:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:54:2057] recipient: [48:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:54:2057] recipient: [48:51:2095] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:57:2057] recipient: [48:51:2095] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:74:2057] recipient: [48:14:2061] !Reboot 72057594037927937 (actor [48:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:100:2057] recipient: [48:36:2083] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:102:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:104:2057] recipient: [48:103:2127] Leader for TabletID 72057594037927937 is [48:105:2128] sender: [48:106:2057] recipient: [48:103:2127] !Reboot 72057594037927937 (actor [48:56:2097]) rebooted! !Reboot 72057594037927937 (actor [48:56:2097]) tablet resolver refreshed! new actor is[48:105:2128] Leader for TabletID 72057594037927937 is [48:105:2128] sender: [48:123:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:54:2057] recipient: [49:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:54:2057] recipient: [49:50:2095] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:57:2057] recipient: [49:50:2095] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:74:2057] recipient: [49:14:2061] !Reboot 72057594037927937 (actor [49:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:102:2057] recipient: [49:36:2083] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:105:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:106:2057] recipient: [49:104:2129] Leader for TabletID 72057594037927937 is [49:107:2130] sender: [49:108:2057] recipient: [49:104:2129] !Reboot 72057594037927937 (actor [49:56:2097]) rebooted! !Reboot 72057594037927937 (actor [49:56:2097]) tablet resolver refreshed! new actor is[49:107:2130] Leader for TabletID 72057594037927937 is [49:107:2130] sender: [49:161:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:54:2057] recipient: [50:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:54:2057] recipient: [50:51:2095] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:57:2057] recipient: [50:51:2095] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:74:2057] recipient: [50:14:2061] !Reboot 72057594037927937 (actor [50:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:102:2057] recipient: [50:36:2083] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:105:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:106:2057] recipient: [50:104:2129] Leader for TabletID 72057594037927937 is [50:107:2130] sender: [50:108:2057] recipient: [50:104:2129] !Reboot 72057594037927937 (actor [50:56:2097]) rebooted! !Reboot 72057594037927937 (actor [50:56:2097]) tablet resolver refreshed! new actor is[50:107:2130] Leader for TabletID 72057594037927937 is [50:107:2130] sender: [50:161:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:54:2057] recipient: [51:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:54:2057] recipient: [51:51:2095] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:57:2057] recipient: [51:51:2095] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:74:2057] recipient: [51:14:2061] !Reboot 72057594037927937 (actor [51:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:103:2057] recipient: [51:36:2083] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:106:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:107:2057] recipient: [51:105:2129] Leader for TabletID 72057594037927937 is [51:108:2130] sender: [51:109:2057] recipient: [51:105:2129] !Reboot 72057594037927937 (actor [51:56:2097]) rebooted! !Reboot 72057594037927937 (actor [51:56:2097]) tablet resolver refreshed! new actor is[51:108:2130] Leader for TabletID 72057594037927937 is [51:108:2130] sender: [51:126:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:54:2057] recipient: [52:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:54:2057] recipient: [52:50:2095] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:57:2057] recipient: [52:50:2095] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:74:2057] recipient: [52:14:2061] !Reboot 72057594037927937 (actor [52:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:105:2057] recipient: [52:36:2083] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:108:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:109:2057] recipient: [52:107:2131] Leader for TabletID 72057594037927937 is [52:110:2132] sender: [52:111:2057] recipient: [52:107:2131] !Reboot 72057594037927937 (actor [52:56:2097]) rebooted! !Reboot 72057594037927937 (actor [52:56:2097]) tablet resolver refreshed! new actor is[52:110:2132] Leader for TabletID 72057594037927937 is [52:110:2132] sender: [52:164:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:54:2057] recipient: [53:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:54:2057] recipient: [53:50:2095] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:57:2057] recipient: [53:50:2095] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:74:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:105:2057] recipient: [53:36:2083] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:108:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:109:2057] recipient: [53:107:2131] Leader for TabletID 72057594037927937 is [53:110:2132] sender: [53:111:2057] recipient: [53:107:2131] !Reboot 72057594037927937 (actor [53:56:2097]) rebooted! !Reboot 72057594037927937 (actor [53:56:2097]) tablet resolver refreshed! new actor is[53:110:2132] Leader for TabletID 72057594037927937 is [53:110:2132] sender: [53:164:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:54:2057] recipient: [54:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:54:2057] recipient: [54:51:2095] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:57:2057] recipient: [54:51:2095] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:74:2057] recipient: [54:14:2061] !Reboot 72057594037927937 (actor [54:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:106:2057] recipient: [54:36:2083] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:109:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:110:2057] recipient: [54:108:2131] Leader for TabletID 72057594037927937 is [54:111:2132] sender: [54:112:2057] recipient: [54:108:2131] !Reboot 72057594037927937 (actor [54:56:2097]) rebooted! !Reboot 72057594037927937 (actor [54:56:2097]) tablet resolver refreshed! new actor is[54:111:2132] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:54:2057] recipient: [55:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:54:2057] recipient: [55:51:2095] Leader for TabletID 72057594037927937 is [55:56:2097] sender: [55:57:2057] recipient: [55:51:2095] Leader for TabletID 72057594037927937 is [55:56:2097] sender: [55:74:2057] recipient: [55:14:2061] >> KqpSnapshotRead::TestReadOnly+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink >> KqpSinkLocks::DifferentKeyUpdate >> KqpTx::CommitRoTx [GOOD] >> KqpTx::CommitRoTx_TLI >> KqpTx::CommitRequired [GOOD] >> KqpTx::CommitPrepared >> KqpTx::RollbackInvalidated [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackInvalidated [GOOD] Test command err: Trying to start YDB, gRPC: 29685, MsgBus: 13553 2025-04-10T11:17:00.628420Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641962188290791:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:00.628518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fb2/r3tmp/tmpsO3pWn/pdisk_1.dat 2025-04-10T11:17:00.908255Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29685, node 1 2025-04-10T11:17:00.975907Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:00.975939Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:00.975951Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:00.976051Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:00.984050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:00.984156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:00.985954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13553 TClient is connected to server localhost:13553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:01.387534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:01.408294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:01.489413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:01.616652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:01.683886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:02.788099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641970778227169:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.788173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:03.006219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:03.028261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:03.051066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:03.071544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:03.093472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:03.123364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:03.197109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641975073194978:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:03.197155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:03.197209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641975073194983:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:03.200505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:03.208805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641975073194985:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:17:03.271130Z node 1 :TX_PROXY ERROR: Actor# [1:7491641975073195039:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:03.946015Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YThkZDE0OGUtYmYyNzkyYjktZDQ2NWFiYjEtNTE4ODg0MDc=, ActorId: [1:7491641975073195322:2495], ActorState: ReadyState, TraceId: 01jrfpnvt269jzrhwkjcf3b5q1, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 1868, MsgBus: 19094 2025-04-10T11:17:04.783543Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641981927805652:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:04.783713Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fb2/r3tmp/tmplDRQu0/pdisk_1.dat 2025-04-10T11:17:04.870701Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1868, node 2 2025-04-10T11:17:04.913100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:04.913185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:04.914694Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:04.917099Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:04.917121Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:04.917129Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:04.917255Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19094 TClient is connected to server localhost:19094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:05.203210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:05.221077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:05.266231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:05.408388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:05.481431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:06.978853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641990517742019:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:06.978923Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:07.011378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:07.033169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:07.054608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:07.078296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:07.101241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:07.166483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:07.213972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641994812709828:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:07.214045Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641994812709833:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:07.214056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:07.216752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:07.224278Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641994812709835:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:07.310419Z node 2 :TX_PROXY ERROR: Actor# [2:7491641994812709889:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:07.901299Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491641994812710190:2504], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:17:07.901514Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjgxNzhiMDYtZjExMWYwMzgtMzE3OTdlNDEtODc5MDg2YTg=, ActorId: [2:7491641994812710166:2494], ActorState: ExecuteState, TraceId: 01jrfpnzn510den1nvc1etcqzr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 01jrfpnzmz0pmhp63tbknekskh 2025-04-10T11:17:07.907023Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjgxNzhiMDYtZjExMWYwMzgtMzE3OTdlNDEtODc5MDg2YTg=, ActorId: [2:7491641994812710166:2494], ActorState: ReadyState, TraceId: 01jrfpnzp2a7nvczarjqpj7eva, Create QueryResponse for error on request, msg: >> YdbSdkSessionsPool::CustomPlan [GOOD] >> YdbSdkSessionsPool::FailTest >> YdbSdkSessionsPool::StressTestSync10 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync10 [GOOD] Test command err: 2025-04-10T11:16:57.761146Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641950357590357:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:57.761286Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001803/r3tmp/tmpWARXqx/pdisk_1.dat 2025-04-10T11:16:58.073833Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17286, node 1 2025-04-10T11:16:58.151891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:58.151983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:58.169534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:58.254782Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:58.254811Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:58.254818Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:58.254931Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12311 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:58.632454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:02.761087Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641950357590357:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:02.761166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSnapshotIsolation::TConflictWriteOltpNoSink >> KqpSinkLocks::InvalidateOnCommit [GOOD] >> KqpSinkLocks::InvalidateOlapOnCommit >> KqpSinkLocks::TInvalidate [GOOD] >> KqpSinkLocks::OlapUncommittedRead >> KqpLocksTricky::TestNoLocksIssue-withSink >> TOlap::StoreStats [GOOD] >> TOlap::StoreStatsQuota >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] >> KqpTx::CommitRoTx_TLI [GOOD] >> KqpTx::CommitPrepared [GOOD] >> YdbSdkSessionsPool::FailTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 2018, MsgBus: 2272 2025-04-10T11:17:03.237629Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641978728511023:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:03.237738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fa5/r3tmp/tmpDhBe3z/pdisk_1.dat 2025-04-10T11:17:03.492827Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2018, node 1 2025-04-10T11:17:03.532352Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:03.532373Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:03.532382Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:03.532493Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:03.578642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:03.578738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:03.580641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2272 TClient is connected to server localhost:2272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:03.960931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:03.984383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:04.092244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:04.208232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:04.262284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:05.468287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641987318447385:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:05.468390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:05.722365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:05.746237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:05.769257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:05.791879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:05.816477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:05.841427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:05.886864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641987318447892:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:05.886915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:05.886939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641987318447897:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:05.889930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:05.896567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641987318447899:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:17:05.993912Z node 1 :TX_PROXY ERROR: Actor# [1:7491641987318447954:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 31485, MsgBus: 19232 2025-04-10T11:17:07.628224Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641993702461339:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:07.628344Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fa5/r3tmp/tmpum3u0I/pdisk_1.dat 2025-04-10T11:17:07.702613Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31485, node 2 2025-04-10T11:17:07.747039Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:07.747068Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:07.747076Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:07.747189Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:07.760697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:07.760782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:07.762347Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19232 TClient is connected to server localhost:19232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:08.111332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:08.121854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:08.167788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:08.302541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:08.378763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:09.767518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642002292397705:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:09.767626Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:09.781563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:09.806473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:09.830931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:09.855634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:09.881802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:09.948562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:10.023664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642006587365518:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.023759Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.023830Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642006587365523:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.027121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:10.035599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642006587365525:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:10.132053Z node 2 :TX_PROXY ERROR: Actor# [2:7491642006587365579:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:11.488743Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTQ4MzEzODgtY2QwOTMyYjgtOGRjM2M2ZWUtMzgxYjRjYTE=, ActorId: [2:7491642006587365830:2487], ActorState: ExecuteState, TraceId: 01jrfpp33ydmh3sqk90vg411f1, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 61062, MsgBus: 17397 2025-04-10T11:17:00.120216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641966077392555:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:00.120315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001010/r3tmp/tmpTycsNy/pdisk_1.dat 2025-04-10T11:17:00.421580Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61062, node 1 2025-04-10T11:17:00.482523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:00.482593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:00.484446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:00.550395Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:00.550420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:00.550425Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:00.550526Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17397 TClient is connected to server localhost:17397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:01.124556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:01.152722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:01.276155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:01.386276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:01.437258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:02.287363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641974667328924:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.287463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.694275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:02.717424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:02.740366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:02.760827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:02.788301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:02.816715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:02.855540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641974667329442:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.855611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.855629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641974667329447:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.858284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:02.866466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641974667329449:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:17:02.926176Z node 1 :TX_PROXY ERROR: Actor# [1:7491641974667329502:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:05.120365Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641966077392555:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:05.120446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20216, MsgBus: 20093 2025-04-10T11:17:05.735442Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641983721329911:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:05.735535Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001010/r3tmp/tmpNl00xx/pdisk_1.dat 2025-04-10T11:17:05.815044Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20216, node 2 2025-04-10T11:17:05.859570Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:05.859593Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:05.859600Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:05.859700Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:05.870420Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:05.870504Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:05.871964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20093 TClient is connected to server localhost:20093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:06.258459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:06.273388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:06.341384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:06.447764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:06.519626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:07.810973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641992311266288:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:07.811092Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:07.824051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:07.847238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:07.868694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:07.890048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:07.912995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:07.936798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:07.972024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641992311266787:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:07.972101Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:07.972120Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491641992311266792:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:07.975174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:07.983030Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491641992311266794:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:08.061040Z node 2 :TX_PROXY ERROR: Actor# [2:7491641996606234144:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:08.623521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:17:08.659117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:17:08.689593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:17:10.752726Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491641983721329911:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:10.752981Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitRoTx_TLI [GOOD] Test command err: Trying to start YDB, gRPC: 13041, MsgBus: 26573 2025-04-10T11:17:04.332743Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641979455103428:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:04.332996Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f9a/r3tmp/tmpyt62f5/pdisk_1.dat 2025-04-10T11:17:04.589142Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13041, node 1 2025-04-10T11:17:04.643021Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:04.643044Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:04.643054Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:04.643158Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:04.693505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:04.693682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:04.695770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26573 TClient is connected to server localhost:26573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:05.087232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:05.107875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:05.242757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:05.358670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:05.411560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:06.630739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641988045039799:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:06.630850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:06.856244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:06.879645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:06.900901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:06.922492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:06.946004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:06.970670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:07.024190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641992340007602:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:07.024257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:07.024284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641992340007607:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:07.026951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:07.034000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641992340007609:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:17:07.098859Z node 1 :TX_PROXY ERROR: Actor# [1:7491641992340007663:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 14330, MsgBus: 13457 2025-04-10T11:17:08.309822Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641998144912624:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:08.309905Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f9a/r3tmp/tmpElqEOm/pdisk_1.dat 2025-04-10T11:17:08.398301Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14330, node 2 2025-04-10T11:17:08.445271Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:08.445404Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:08.446782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:08.455695Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:08.455718Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:08.455725Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:08.455844Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13457 TClient is connected to server localhost:13457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:08.863461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:08.878991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:08.920550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:09.027743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:09.095797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:10.641903Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642006734848988:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.641972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.665790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:10.689298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:10.714889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:10.743308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:10.772138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:10.839988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:10.878037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642006734849503:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.878123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.878129Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642006734849508:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.881034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:10.887272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642006734849510:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:10.973966Z node 2 :TX_PROXY ERROR: Actor# [2:7491642006734849563:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitPrepared [GOOD] Test command err: Trying to start YDB, gRPC: 16931, MsgBus: 1919 2025-04-10T11:17:04.324176Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641981522831055:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:04.324223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fa2/r3tmp/tmpWYsu4z/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16931, node 1 2025-04-10T11:17:04.638466Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:04.644146Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:17:04.644955Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:17:04.667433Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:04.667459Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:04.667478Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:04.667607Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:04.704561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:04.704710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:04.706452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1919 TClient is connected to server localhost:1919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:05.099321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:05.121944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:05.247328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:05.367026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:05.441632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:06.663886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641990112767430:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:06.663989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:06.888522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:06.911847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:06.934097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:06.957081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:06.981674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:07.046445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:07.076363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641994407735239:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:07.076451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:07.076459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641994407735244:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:07.078825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:07.085503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641994407735246:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:17:07.181840Z node 1 :TX_PROXY ERROR: Actor# [1:7491641994407735300:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 11599, MsgBus: 16113 2025-04-10T11:17:08.353701Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491641998930157082:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:08.353788Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fa2/r3tmp/tmpC9tFGm/pdisk_1.dat 2025-04-10T11:17:08.432373Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11599, node 2 2025-04-10T11:17:08.477905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:08.477980Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:08.479546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:08.486167Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:08.486191Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:08.486198Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:08.486307Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16113 TClient is connected to server localhost:16113 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:08.813023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:08.820141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:08.890759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:08.994273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:09.061505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:10.504303Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642007520093451:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.504368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.553187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:10.577098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:10.599596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:10.624157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:10.650798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:10.715640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:10.749792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642007520093964:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.749851Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642007520093969:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.749884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.752880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:10.760052Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642007520093971:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:10.844827Z node 2 :TX_PROXY ERROR: Actor# [2:7491642007520094025:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::FailTest [GOOD] Test command err: 2025-04-10T11:16:57.758467Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641949839347025:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:57.758594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001815/r3tmp/tmpQ8pfJb/pdisk_1.dat 2025-04-10T11:16:58.100218Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:58.121763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:58.121883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24645, node 1 2025-04-10T11:16:58.139730Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:16:58.139752Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:16:58.155329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:58.254686Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:58.254730Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:58.254740Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:58.254854Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:58.638449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:02.758752Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641949839347025:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:02.758855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:17:09.833500Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491642002642275220:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:09.833561Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001815/r3tmp/tmpuirxfW/pdisk_1.dat 2025-04-10T11:17:09.946848Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:09.978652Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:09.978727Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:09.980692Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17681, node 4 2025-04-10T11:17:10.027315Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:10.027349Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:10.027356Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:10.027511Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20424 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:10.220350Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> KqpSinkTx::OlapLocksAbortOnCommit >> KqpTx::TooManyTx >> KqpLocks::DifferentKeyUpdate >> KqpSinkTx::OlapSnapshotROInteractive1 >> KqpSinkTx::OlapDeferredEffects [GOOD] >> KqpSinkTx::OlapExplicitTcl >> KqpTx::DeferredEffects >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink >> KqpSinkLocks::DifferentKeyUpdate [GOOD] >> KqpSinkLocks::DifferentKeyUpdateOlap >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] >> KqpSnapshotIsolation::TReadOnlyOlap >> KqpTx::TooManyTx [GOOD] >> KqpTx::SnapshotROInteractive2 >> KqpLocks::DifferentKeyUpdate [GOOD] >> KqpLocks::EmptyRange >> KqpTx::DeferredEffects [GOOD] >> KqpTx::CommitStats >> KqpLocksTricky::TestNoLocksIssue-withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] Test command err: 2025-04-10T11:16:57.761825Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641953140759602:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:57.761897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017e6/r3tmp/tmpqLctby/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61356, node 1 2025-04-10T11:16:58.157306Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:16:58.157346Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:16:58.163580Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:58.197854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:58.197971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:58.202941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:58.254933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:58.254957Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:58.254967Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:58.255099Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11415 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:58.651141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:02.761826Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641953140759602:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:02.761911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:17:13.160033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:17:13.160078Z node 1 :IMPORT WARN: Table profiles were not loaded >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite >> KqpLocks::EmptyRange [GOOD] >> KqpLocks::EmptyRangeAlreadyBroken >> KqpTx::SnapshotROInteractive2 [GOOD] >> KqpTx::CommitStats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 16149, MsgBus: 10785 2025-04-10T11:17:00.120313Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641965292726330:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:00.120689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fd3/r3tmp/tmpmMIu4Q/pdisk_1.dat 2025-04-10T11:17:00.408813Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16149, node 1 2025-04-10T11:17:00.475137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:00.475696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:00.478412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:00.550433Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:00.550461Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:00.550477Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:00.550603Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10785 TClient is connected to server localhost:10785 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:01.091430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:02.352025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641973882661581:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.352280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641973882661557:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.352622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.356009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:02.363709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641973882661595:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:17:02.449618Z node 1 :TX_PROXY ERROR: Actor# [1:7491641973882661646:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:02.755611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:02.850811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:03.572863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:04.753562Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWQ2MmYwYzEtZTg3ODUwYTgtZTMyZjE4YTgtOWE5N2Q4ZWM=, ActorId: [1:7491641982472604756:2967], ActorState: ExecuteState, TraceId: 01jrfpnwd83x28966ve2g90dpp, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-10T11:17:05.120345Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641965292726330:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:05.120425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:17:15.393631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:17:15.393676Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18F8A7C7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18F817AA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F5EA7184D8F 18. ??:0: ?? @ 0x7F5EA7184E3F 19. ??:0: ?? @ 0x1656B028 Trying to start YDB, gRPC: 22202, MsgBus: 13642 2025-04-10T11:17:16.331086Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642031916133077:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:16.331191Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fd3/r3tmp/tmpkpr7IB/pdisk_1.dat 2025-04-10T11:17:16.440097Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:16.468127Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:16.468208Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:16.469961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22202, node 2 2025-04-10T11:17:16.502957Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:16.503009Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:16.503027Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:16.503149Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13642 TClient is connected to server localhost:13642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:16.854335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:16.871903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:16.933800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:17.096289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:17.152142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:18.696452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642040506069440:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:18.696546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:18.746697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:18.771870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:18.799741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:18.825661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:18.854701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:18.884543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:18.917239Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642040506069947:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:18.917319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642040506069952:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:18.917331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:18.919961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:18.926680Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642040506069954:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:18.998887Z node 2 :TX_PROXY ERROR: Actor# [2:7491642040506070008:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 29465, MsgBus: 3156 2025-04-10T11:17:14.444202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642025425673344:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:14.444278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f86/r3tmp/tmpOxrUz0/pdisk_1.dat 2025-04-10T11:17:14.676088Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29465, node 1 2025-04-10T11:17:14.728530Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:14.728552Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:14.728560Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:14.728669Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:14.791877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:14.791970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:14.793642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3156 TClient is connected to server localhost:3156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:15.162744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:15.185288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:15.311631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:15.464239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:15.531229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:16.861336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642034015609720:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:16.861431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.128813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.154219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.177495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.200184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.226208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.272205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.350102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642038310577529:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.350186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.350288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642038310577534:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.353849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:17.362474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642038310577536:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:17.420702Z node 1 :TX_PROXY ERROR: Actor# [1:7491642038310577589:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:18.141437Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mzk4MTZhOTYtNjU0NjQ4NjktNjA0OGVjYzMtOTM5OTc3Njk=, ActorId: [1:7491642042605545173:2495], ActorState: ReadyState, TraceId: 01jrfpp9nf7v39b2p3qsb3q2p8, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:855: Too many transactions, current active: 2 MaxTxPerSession: 2 Trying to start YDB, gRPC: 11763, MsgBus: 9183 2025-04-10T11:17:18.732936Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642041096196205:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:18.733014Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f86/r3tmp/tmpUSH1tL/pdisk_1.dat 2025-04-10T11:17:18.821242Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11763, node 2 2025-04-10T11:17:18.864555Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:18.864633Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:18.865848Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:18.875792Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:18.875815Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:18.875823Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:18.875922Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9183 TClient is connected to server localhost:9183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:19.291799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:19.309249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:19.380513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:19.521770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:19.585566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:21.212121Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642053981099866:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:21.212213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:21.246069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:21.274168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:21.298885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:21.321409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:21.344021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:21.371517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:21.410718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642053981100375:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:21.410796Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642053981100380:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:21.410814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:21.413881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:21.422918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642053981100382:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:21.510880Z node 2 :TX_PROXY ERROR: Actor# [2:7491642053981100437:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitStats [GOOD] Test command err: Trying to start YDB, gRPC: 1485, MsgBus: 64049 2025-04-10T11:17:15.211704Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642029419432468:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:15.211815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f71/r3tmp/tmps9I1KJ/pdisk_1.dat 2025-04-10T11:17:15.462615Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1485, node 1 2025-04-10T11:17:15.535300Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:15.535320Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:15.535331Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:15.535497Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:15.546351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:15.546504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:15.548200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64049 TClient is connected to server localhost:64049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:15.947308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:15.965626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:16.088411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:16.206793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:16.264143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:17.650825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642038009368842:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.650970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.952008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.979824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:18.006008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:18.032816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:18.078788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:18.106215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:18.143496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642042304336650:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:18.143575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:18.143577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642042304336656:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:18.146658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:18.154395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642042304336658:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:18.224004Z node 1 :TX_PROXY ERROR: Actor# [1:7491642042304336713:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 29111, MsgBus: 16538 2025-04-10T11:17:19.902459Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642046929168908:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:19.902522Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f71/r3tmp/tmpHTqWVr/pdisk_1.dat 2025-04-10T11:17:19.997935Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29111, node 2 2025-04-10T11:17:20.029423Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:20.029508Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:20.030998Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:20.041923Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:20.041943Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:20.041952Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:20.042097Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16538 TClient is connected to server localhost:16538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:20.424243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:20.433289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:20.477844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:20.619431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:20.694194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:22.405278Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642059814072558:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:22.405393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:22.432204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:22.462100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:22.488884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:22.514333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:22.540712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:22.573247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:22.612656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642059814073066:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:22.612766Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:22.612857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642059814073071:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:22.615842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:22.624497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642059814073073:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:22.680587Z node 2 :TX_PROXY ERROR: Actor# [2:7491642059814073127:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink >> KqpSinkLocks::OlapUncommittedRead [GOOD] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflightMock [GOOD] >> TPDiskRaces::OwnerRecreationRaces >> KqpLocks::TwoPhaseTx >> KqpSinkLocks::EmptyRangeOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapUncommittedRead [GOOD] Test command err: Trying to start YDB, gRPC: 64166, MsgBus: 29996 2025-04-10T11:17:00.120405Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641964078784479:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:00.120553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fc1/r3tmp/tmpZ3MkHA/pdisk_1.dat 2025-04-10T11:17:00.401000Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64166, node 1 2025-04-10T11:17:00.506977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:00.507071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:00.508727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:00.550409Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:00.550438Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:00.550448Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:00.550596Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29996 TClient is connected to server localhost:29996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:01.081251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:02.196402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641972668719729:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.196411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641972668719737:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.196539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.202557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:02.213181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641972668719743:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:17:02.291030Z node 1 :TX_PROXY ERROR: Actor# [1:7491641972668719794:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:02.700463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:02.805837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:03.485167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:04.884449Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-04-10T11:17:04.884606Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:17:04.884732Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:17:04.884982Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641981258663292:2966], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7491641981258662736:2966]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7491641981258663292:2966].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:17:04.885618Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641981258663278:2966], SessionActorId: [1:7491641981258662736:2966], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7491641981258662736:2966]. isRollback=0 2025-04-10T11:17:04.885817Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTdlMDQzZTQtN2IwNmFjNDYtZGQxYTZjYzQtMjg1ZDkxOWU=, ActorId: [1:7491641981258662736:2966], ActorState: ExecuteState, TraceId: 01jrfpnwn6074ptz86t5739ezb, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7491641981258663279:2966] from: [1:7491641981258663278:2966] 2025-04-10T11:17:04.885897Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491641981258663279:2966] TxId: 281474976715665. Ctx: { TraceId: 01jrfpnwn6074ptz86t5739ezb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdlMDQzZTQtN2IwNmFjNDYtZGQxYTZjYzQtMjg1ZDkxOWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:17:04.886084Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTdlMDQzZTQtN2IwNmFjNDYtZGQxYTZjYzQtMjg1ZDkxOWU=, ActorId: [1:7491641981258662736:2966], ActorState: ExecuteState, TraceId: 01jrfpnwn6074ptz86t5739ezb, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 WAIT_INDEXATION: 0 2025-04-10T11:17:05.120681Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641964078784479:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:05.120754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 10929, MsgBus: 7416 2025-04-10T11:17:10.822811Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642008172395989:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:10.822925Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fc1/r3tmp/tmptpkftq/pdisk_1.dat 2025-04-10T11:17:10.905214Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10929, node 2 2025-04-10T11:17:10.955434Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:10.955524Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:10.957011Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:10.958286Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:10.958302Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:10.958306Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:10.958380Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7416 TClient is connected to server localhost:7416 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:11.274336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:12.850739Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642016762331228:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 202 ... odicWakeup;tablet_id=72075186224037973;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.275882Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037973;self_id=[2:7491642021057300183:2499];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037973;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.276076Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;self_id=[2:7491642021057300171:2496];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037972;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.276216Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;self_id=[2:7491642021057300171:2496];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037972;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.276470Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[2:7491642021057300480:2568];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037924;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.276534Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;self_id=[2:7491642021057300472:2565];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037931;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.276585Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[2:7491642021057300480:2568];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037924;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.276645Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;self_id=[2:7491642021057300472:2565];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037931;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.276747Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[2:7491642021057300111:2487];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037978;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.276766Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[2:7491642021057300379:2540];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037913;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.276859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[2:7491642021057300111:2487];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037978;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.277000Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;self_id=[2:7491642021057300235:2510];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037957;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.277048Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[2:7491642021057300379:2540];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037913;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.277107Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;self_id=[2:7491642021057300235:2510];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037957;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.277227Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[2:7491642021057300496:2572];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037932;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.277248Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7491642021057299879:2472];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037993;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.277379Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[2:7491642021057300496:2572];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037932;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.277480Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7491642021057299879:2472];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037993;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.277563Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037958;self_id=[2:7491642021057300229:2508];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037958;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.277658Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037958;self_id=[2:7491642021057300229:2508];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037958;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.277776Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[2:7491642021057300100:2484];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037986;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.277790Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:7491642021057300476:2566];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037908;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.277864Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[2:7491642021057300100:2484];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037986;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.277894Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:7491642021057300476:2566];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037908;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.277951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;self_id=[2:7491642021057300313:2530];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037935;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.278060Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;self_id=[2:7491642021057300313:2530];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037935;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.278174Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[2:7491642021057300442:2554];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037919;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.278296Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[2:7491642021057300442:2554];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037919;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.278579Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037977;self_id=[2:7491642021057300187:2501];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037977;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.278673Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037977;self_id=[2:7491642021057300187:2501];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037977;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.278813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[2:7491642021057300218:2506];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037940;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:19.278895Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[2:7491642021057300218:2506];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037940;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-04-10T11:17:19.419441Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037888;self_id=[2:7491642021057298784:2343];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:19.419575Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037892;self_id=[2:7491642021057298807:2349];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:19.419605Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037893;self_id=[2:7491642021057298805:2348];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:19.419647Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037897;self_id=[2:7491642021057298798:2345];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:19.419683Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037891;self_id=[2:7491642021057298800:2346];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:19.419718Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037894;self_id=[2:7491642021057298901:2352];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:19.419743Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037895;self_id=[2:7491642021057298803:2347];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:19.419780Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037889;self_id=[2:7491642021057298786:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:19.419806Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037890;self_id=[2:7491642021057298816:2350];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:19.419876Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037896;self_id=[2:7491642021057298819:2351];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 15027, MsgBus: 14216 2025-04-10T11:17:00.120216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641963184387224:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:00.120312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00101e/r3tmp/tmpHvZtEl/pdisk_1.dat 2025-04-10T11:17:00.404126Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15027, node 1 2025-04-10T11:17:00.481562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:00.481684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:00.483321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:00.550561Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:00.550611Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:00.550623Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:00.550776Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14216 TClient is connected to server localhost:14216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:01.106242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:02.211528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641971774322485:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.211671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641971774322477:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.211873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.214906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:02.223044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641971774322491:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:02.293151Z node 1 :TX_PROXY ERROR: Actor# [1:7491641971774322542:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:02.700863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:02.805273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:03.531453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:04.830366Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-04-10T11:17:04.830555Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:17:04.830669Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:17:04.830839Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641980364265936:2968], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7491641980364265639:2968]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7491641980364265936:2968].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:17:04.831593Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491641980364265925:2968], SessionActorId: [1:7491641980364265639:2968], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7491641980364265639:2968]. isRollback=0 2025-04-10T11:17:04.831838Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzAwNDk1ZjYtNTZmZWM3MDItNWI5NjY0ODYtYWY0ODIwNjM=, ActorId: [1:7491641980364265639:2968], ActorState: ExecuteState, TraceId: 01jrfpnwnf74e1fd8aa1g8azhr, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7491641980364265926:2968] from: [1:7491641980364265925:2968] 2025-04-10T11:17:04.831920Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491641980364265926:2968] TxId: 281474976710665. Ctx: { TraceId: 01jrfpnwnf74e1fd8aa1g8azhr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzAwNDk1ZjYtNTZmZWM3MDItNWI5NjY0ODYtYWY0ODIwNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:17:04.832144Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzAwNDk1ZjYtNTZmZWM3MDItNWI5NjY0ODYtYWY0ODIwNjM=, ActorId: [1:7491641980364265639:2968], ActorState: ExecuteState, TraceId: 01jrfpnwnf74e1fd8aa1g8azhr, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 WAIT_INDEXATION: 0 2025-04-10T11:17:05.120394Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641963184387224:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:05.120520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 15434, MsgBus: 8590 2025-04-10T11:17:10.801141Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642008162511445:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:10.801265Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00101e/r3tmp/tmpalFEPm/pdisk_1.dat 2025-04-10T11:17:10.891399Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15434, node 2 2025-04-10T11:17:10.931729Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:10.931843Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:10.933458Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:10.951241Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:10.951270Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:10.951280Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:10.951398Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8590 TClient is connected to server localhost:8590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:11.282645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:13.211663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642021047413994:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 202 ... d; 2025-04-10T11:17:20.675098Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[2:7491642025342383311:2585];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037904;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.675152Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[2:7491642025342383227:2560];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037925;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.675185Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[2:7491642025342383311:2585];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037904;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.675284Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[2:7491642025342383227:2560];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037925;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.675292Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[2:7491642025342383105:2536];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037951;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.675379Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[2:7491642025342383105:2536];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037951;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.675488Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;self_id=[2:7491642025342382873:2503];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037970;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.675519Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;self_id=[2:7491642025342382861:2499];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037980;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.675611Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;self_id=[2:7491642025342382873:2503];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037970;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.675612Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;self_id=[2:7491642025342382861:2499];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037980;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.675706Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;self_id=[2:7491642025342383232:2561];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037918;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.675798Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;self_id=[2:7491642025342383232:2561];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037918;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.692867Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7491642033932321375:3111];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.693100Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7491642033932321375:3111];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.694341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[2:7491642033932321354:3108];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038000;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.694555Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[2:7491642033932321354:3108];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038000;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.695908Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=5e9d70c0-15fd11f0-ba72e35a-4799ae05;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.696327Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.696653Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[2:7491642033932321388:3118];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038091;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.696882Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[2:7491642033932321388:3118];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038091;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.697045Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7491642033932321333:3101];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038095;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.697156Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7491642033932321333:3101];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038095;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.698353Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7491642033932321329:3099];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038090;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.698550Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7491642033932321329:3099];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038090;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.700200Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038030;self_id=[2:7491642033932321352:3107];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038030;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.700377Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038030;self_id=[2:7491642033932321352:3107];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038030;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.708846Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7491642033932321313:3095];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038081;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.709037Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7491642033932321313:3095];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038081;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.709295Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038078;self_id=[2:7491642033932321487:3127];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038078;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.709528Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038078;self_id=[2:7491642033932321487:3127];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038078;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.709744Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7491642033932321325:3097];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038070;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.709868Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7491642033932321325:3097];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.710405Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7491642033932321309:3093];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.710405Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7491642033932321493:3128];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.710554Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7491642033932321493:3128];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.710564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7491642033932321309:3093];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.710846Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7491642033932321582:3182];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.710986Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7491642033932321582:3182];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.711120Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[2:7491642033932321503:3133];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.711274Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[2:7491642033932321503:3133];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.712215Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7491642033932321398:3120];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.712428Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7491642033932321398:3120];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.713849Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;self_id=[2:7491642033932321345:3105];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038083;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:20.714025Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;self_id=[2:7491642033932321345:3105];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038083;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpTx::LocksAbortOnCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 28954, MsgBus: 8818 2025-04-10T11:17:14.743254Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642022597758948:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:14.743313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f7d/r3tmp/tmpbu0Gv5/pdisk_1.dat 2025-04-10T11:17:15.040350Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28954, node 1 2025-04-10T11:17:15.082040Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:15.082065Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:15.082072Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:15.082276Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:15.087969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:15.088106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:15.089810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8818 TClient is connected to server localhost:8818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:15.547046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:15.563502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:15.668378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:15.789619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:15.862188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:17.149131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642035482662604:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.149229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.365521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.389507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.413509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.440919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.467501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.495538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.529433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642035482663112:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.529514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.529556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642035482663117:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.532895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:17.543168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642035482663119:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:17:17.608940Z node 1 :TX_PROXY ERROR: Actor# [1:7491642035482663173:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 62293, MsgBus: 22976 2025-04-10T11:17:19.202309Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642043658756316:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:19.202377Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f7d/r3tmp/tmpACb0cL/pdisk_1.dat 2025-04-10T11:17:19.320045Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:19.334077Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:19.334164Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:19.335360Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62293, node 2 2025-04-10T11:17:19.366316Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:19.366355Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:19.366362Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:19.366465Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22976 TClient is connected to server localhost:22976 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:19.687274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:19.703934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:19.746051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:19.873045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:19.948072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:21.581920Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642052248692680:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:21.582010Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { : Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:21.823093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:21.830907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642052248693195:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:21.893983Z node 2 :TX_PROXY ERROR: Actor# [2:7491642052248693248:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:22.797887Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2025-04-10T11:17:22.819953Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037914 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:17:22.820123Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037914 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:17:22.820339Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642056543660939:2494], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [2:7491642056543660827:2494]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037914, Sink=[2:7491642056543660939:2494].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:17:22.820981Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642056543660932:2494], SessionActorId: [2:7491642056543660827:2494], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7491642056543660827:2494]. isRollback=0 2025-04-10T11:17:22.842701Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGU2NmE4MzEtNWI1ZDZjMzYtYjhjODYyZGItNWY3ZjJiYzU=, ActorId: [2:7491642056543660827:2494], ActorState: ExecuteState, TraceId: 01jrfppe5z87610gbpkppwb9wm, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7491642056543660933:2494] from: [2:7491642056543660932:2494] 2025-04-10T11:17:22.842833Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491642056543660933:2494] TxId: 281474976715674. Ctx: { TraceId: 01jrfppe5z87610gbpkppwb9wm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGU2NmE4MzEtNWI1ZDZjMzYtYjhjODYyZGItNWY3ZjJiYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:17:22.843077Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGU2NmE4MzEtNWI1ZDZjMzYtYjhjODYyZGItNWY3ZjJiYzU=, ActorId: [2:7491642056543660827:2494], ActorState: ExecuteState, TraceId: 01jrfppe5z87610gbpkppwb9wm, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 Trying to start YDB, gRPC: 21269, MsgBus: 16201 2025-04-10T11:17:23.463451Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491642063476488058:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:23.463517Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f7d/r3tmp/tmpt8nUJa/pdisk_1.dat 2025-04-10T11:17:23.534226Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21269, node 3 2025-04-10T11:17:23.583752Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:23.583785Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:23.583793Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:23.583939Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:23.587560Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:23.587641Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:23.589217Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16201 TClient is connected to server localhost:16201 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:23.928639Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:23.938309Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:23.979107Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:24.120626Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:24.190961Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:25.791162Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642072066424414:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:25.791252Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:25.832091Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:25.856610Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:25.881383Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:25.912649Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:25.941979Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:25.973508Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:26.011115Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642076361392217:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:26.011219Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:26.011282Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642076361392222:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:26.013814Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:26.020843Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491642076361392224:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:26.084004Z node 3 :TX_PROXY ERROR: Actor# [3:7491642076361392278:3436] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:27.075701Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDExYzM5ZGUtNGM4YWU4NjMtZWM1Yzc5YmYtNGE5NGUzNGM=, ActorId: [3:7491642076361392558:2494], ActorState: ExecuteState, TraceId: 01jrfppj90cbqfyfsqm64fj8fw, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken >> KqpTx::ExplicitTcl >> KqpSinkTx::OlapSnapshotROInteractive1 [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 >> KqpSinkLocks::TInvalidateOlap >> KqpLocks::TwoPhaseTx [GOOD] >> KqpLocks::MixedTxFail-useSink >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink >> KqpSinkTx::OlapExplicitTcl [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink >> KqpSinkTx::OlapLocksAbortOnCommit [GOOD] >> KqpSinkTx::OlapSnapshotRO >> KqpTx::LocksAbortOnCommit [GOOD] >> KqpTx::MixEnginesOldNew ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapExplicitTcl [GOOD] Test command err: Trying to start YDB, gRPC: 2723, MsgBus: 6075 2025-04-10T11:17:00.122844Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641964346367772:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:00.123229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001005/r3tmp/tmpl0D4yz/pdisk_1.dat 2025-04-10T11:17:00.445508Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2723, node 1 2025-04-10T11:17:00.501672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:00.501760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:00.503508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:00.550394Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:00.550418Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:00.550423Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:00.550549Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6075 TClient is connected to server localhost:6075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:01.098661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:02.196445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641972936303012:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.196547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491641972936302986:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.196917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:02.202340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:02.211109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491641972936303015:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:02.302993Z node 1 :TX_PROXY ERROR: Actor# [1:7491641972936303066:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:02.706893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:02.822114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491641972936303264:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:02.822114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491641972936303266:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:02.822384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491641972936303266:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:02.822659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491641972936303266:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:02.822790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491641972936303266:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:02.822813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491641972936303264:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:02.822953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491641972936303266:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:02.822975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491641972936303264:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:02.823078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491641972936303264:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:02.823139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491641972936303266:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:02.823168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491641972936303264:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:02.823241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491641972936303264:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:02.823251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491641972936303266:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:02.823327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491641972936303264:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:02.823369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491641972936303266:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:02.823441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491641972936303264:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:02.823489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491641972936303266:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:02.823534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491641972936303264:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:02.823639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491641972936303266:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:02.823649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491641972936303264:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:02.823749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491641972936303264:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:02.823785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491641972936303266:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:02.823850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491641972936303264:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:02.823896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491641972936303266:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:02.852633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491641972936303250:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:02.852692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491641972936303250:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:02.852876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id= ... COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[2:7491642042167739289:2457];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037949;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.381333Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[2:7491642042167738980:2424];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037981;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.381423Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;self_id=[2:7491642042167739337:2461];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037957;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.381452Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[2:7491642042167739506:2488];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037936;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.381570Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[2:7491642042167739506:2488];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037936;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.381605Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;self_id=[2:7491642042167739337:2461];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037957;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.381756Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[2:7491642042167738926:2411];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037898;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.381927Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[2:7491642042167738926:2411];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037898;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.391165Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;self_id=[2:7491642042167739327:2458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037952;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.391444Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;self_id=[2:7491642042167739327:2458];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037952;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.391672Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037961;self_id=[2:7491642042167738948:2418];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037961;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.391850Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037961;self_id=[2:7491642042167738948:2418];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037961;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.392093Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[2:7491642042167739557:2512];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037940;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.392276Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[2:7491642042167739557:2512];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037940;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.392583Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;self_id=[2:7491642042167738946:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037983;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.392701Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;self_id=[2:7491642042167738946:2417];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037983;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.392809Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[2:7491642042167739273:2454];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037971;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.392907Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[2:7491642042167739273:2454];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037971;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.393075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[2:7491642042167739425:2464];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037951;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.393174Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[2:7491642042167739425:2464];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037951;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.393331Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;self_id=[2:7491642042167739516:2494];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037939;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.393458Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;self_id=[2:7491642042167739516:2494];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037939;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.393585Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7491642042167739612:2521];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037909;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.393686Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7491642042167739612:2521];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037909;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.394298Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[2:7491642042167739106:2440];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037968;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.394460Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[2:7491642042167739106:2440];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037968;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.394580Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037945;self_id=[2:7491642042167739442:2467];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037945;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.394677Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037945;self_id=[2:7491642042167739442:2467];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037945;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.394805Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7491642042167738918:2408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.394907Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7491642042167738918:2408];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037899;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.394998Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;self_id=[2:7491642042167739112:2442];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037974;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.395090Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;self_id=[2:7491642042167739112:2442];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037974;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.395225Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[2:7491642042167739534:2503];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037916;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.395322Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[2:7491642042167739534:2503];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037916;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.395350Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmVhNjc5YmUtZTY2MTE2ODctNjlkMjU5NS1lMzZkNTgwNw==, ActorId: [2:7491642067937550849:3865], ActorState: ReadyState, TraceId: 01jrfppgrc48cbef1hv6fzg4gs, Create QueryResponse for error on request, msg: 2025-04-10T11:17:25.395436Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[2:7491642042167738976:2422];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037929;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.395544Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[2:7491642042167738976:2422];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037929;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.395677Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[2:7491642042167739561:2514];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037937;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.395778Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[2:7491642042167739561:2514];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037937;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.396226Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[2:7491642042167739439:2466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037920;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.396389Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[2:7491642042167739439:2466];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037920;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.396691Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[2:7491642042167739631:2526];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037923;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:25.396803Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[2:7491642042167739631:2526];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037923;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-10T11:17:30.036919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:17:30.036948Z node 2 :IMPORT WARN: Table profiles were not loaded >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] >> KqpTx::ExplicitTcl [GOOD] >> KqpTx::EmptyTxOnCommit >> KqpSnapshotIsolation::TReadOnlyOlap [GOOD] >> KqpTx::RollbackManyTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] Test command err: Trying to start YDB, gRPC: 23140, MsgBus: 27908 2025-04-10T11:17:07.954301Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641993692529240:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:07.954433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f94/r3tmp/tmpkJf33h/pdisk_1.dat 2025-04-10T11:17:08.191506Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23140, node 1 2025-04-10T11:17:08.252792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:08.252820Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:08.252826Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:08.252911Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:08.281479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:08.281616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:08.283179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27908 TClient is connected to server localhost:27908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:08.667206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:10.026684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642006577431770:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.026707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642006577431800:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.026926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:10.030714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:10.040162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642006577431807:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:10.136424Z node 1 :TX_PROXY ERROR: Actor# [1:7491642006577431858:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:10.342495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:10.440908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:11.135867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 2025-04-10T11:17:12.954495Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641993692529240:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:12.954585Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 23416, MsgBus: 16379 2025-04-10T11:17:18.088973Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642039153095726:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:18.089095Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f94/r3tmp/tmpvd0KsS/pdisk_1.dat 2025-04-10T11:17:18.191303Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23416, node 2 2025-04-10T11:17:18.229261Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:18.229461Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:18.231044Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:18.252627Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:18.252644Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:18.252649Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:18.252762Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16379 TClient is connected to server localhost:16379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:18.634288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:20.655520Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642047743030950:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:20.655581Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642047743030979:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:20.655649Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:20.658596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:20.666256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642047743030988:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:17:20.753576Z node 2 :TX_PROXY ERROR: Actor# [2:7491642047743031039:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:20.806726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:20.927235Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7491642047743031231:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:20.927402Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7491642047743031231:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:20.927637Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7491642047743031231:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:20.927754Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7491642047743031231:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:20.927858Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7491642047743 ... 5-04-10T11:17:27.267204Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7491642064922905650:3131];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038066;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.267364Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7491642064922905877:3152];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.268644Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7491642064922905858:3149];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.269059Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7491642064922905858:3149];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.269711Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7491642064922906430:3281];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.270083Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7491642064922906430:3281];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.272975Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7491642064922906037:3206];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.273304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7491642064922906037:3206];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.273892Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7491642064922906374:3272];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.274214Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7491642064922906374:3272];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.274782Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7491642064922906120:3245];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.275076Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7491642064922906120:3245];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.275283Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7491642064922905908:3168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.275535Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7491642064922905908:3168];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.277671Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7491642064922906104:3234];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.278017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7491642064922906104:3234];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.278405Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7491642064922905924:3177];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.278717Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7491642064922905924:3177];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.279839Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7491642064922906071:3212];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.280104Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7491642064922906071:3212];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.280391Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7491642064922906009:3194];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.280804Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7491642064922906009:3194];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.282807Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7491642064922906111:3239];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.283138Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7491642064922906111:3239];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.283890Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7491642064922906363:3268];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.284149Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7491642064922906363:3268];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.284942Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7491642064922906218:3264];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.285323Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7491642064922906218:3264];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.285920Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[2:7491642052038000113:2540];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037934;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.286164Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[2:7491642052038000113:2540];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037934;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.287883Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715667; 2025-04-10T11:17:27.289016Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7491642064922906091:3224];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.289045Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7491642064922906029:3201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.289334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7491642064922906091:3224];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.289357Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7491642064922906029:3201];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.289615Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;self_id=[2:7491642064922905465:3102];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038089;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.289921Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;self_id=[2:7491642064922905465:3102];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038089;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.289986Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.292121Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7491642064922906209:3259];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.292499Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7491642064922906209:3259];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.292584Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7491642064922906084:3219];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.292871Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7491642064922906084:3219];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.294620Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7491642064922906097:3229];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.294891Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7491642064922906097:3229];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpLocks::MixedTxFail-useSink [GOOD] >> KqpLocksTricky::TestNoLocksIssue+withSink >> KqpSinkTx::SnapshotROInteractive2 >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] >> KqpTx::MixEnginesOldNew [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOlap [GOOD] Test command err: Trying to start YDB, gRPC: 29483, MsgBus: 4755 2025-04-10T11:17:10.811205Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642006124950843:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:10.811327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f91/r3tmp/tmpTFv24y/pdisk_1.dat 2025-04-10T11:17:11.089531Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29483, node 1 2025-04-10T11:17:11.155401Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:11.155427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:11.155435Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:11.155538Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:11.166110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:11.166249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:11.168004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4755 TClient is connected to server localhost:4755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:11.584038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:12.903106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642014714886084:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:12.903218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642014714886109:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:12.903268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:12.906477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:12.913346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642014714886112:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:12.975627Z node 1 :TX_PROXY ERROR: Actor# [1:7491642014714886163:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:13.214428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:13.330008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:13.995347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:14.971240Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTk4MzM3YjUtMmI1N2IzM2MtY2Y3MGQ4MjQtNzEwNjZlNDU=, ActorId: [1:7491642023304829172:2966], ActorState: ExecuteState, TraceId: 01jrfpp6gj98pnr1p85cqzs8ag, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-10T11:17:15.811411Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642006124950843:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:15.811511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18F90D57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18F81E2A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F092E5A4D8F 18. ??:0: ?? @ 0x7F092E5A4E3F 19. ??:0: ?? @ 0x1656B028 Trying to start YDB, gRPC: 12927, MsgBus: 22295 2025-04-10T11:17:18.713262Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642042153225871:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:18.713340Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f91/r3tmp/tmp90kpvn/pdisk_1.dat 2025-04-10T11:17:18.797685Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12927, node 2 2025-04-10T11:17:18.838210Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:18.838303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:18.840014Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:18.850211Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:18.850229Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:18.850237Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:18.850318Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22295 TClient is connected to server localhost:22295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:19.180734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:21.295193Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642055038128384:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:21.295247Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642055038128404:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:21.295320Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool de ... =skip_indexation;reason=disabled; 2025-04-10T11:17:27.595705Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;self_id=[2:7491642059333097422:2517];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037957;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.595805Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[2:7491642059333097394:2504];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037951;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.595911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[2:7491642059333097394:2504];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037951;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.596019Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;self_id=[2:7491642059333097098:2483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037994;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.596112Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;self_id=[2:7491642059333097098:2483];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037994;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.596226Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;self_id=[2:7491642059333097090:2481];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037987;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.596319Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;self_id=[2:7491642059333097090:2481];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037987;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.597495Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037995;self_id=[2:7491642059333097075:2474];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037995;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.597652Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037995;self_id=[2:7491642059333097075:2474];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037995;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.597763Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;self_id=[2:7491642059333097480:2545];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037935;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.597865Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;self_id=[2:7491642059333097480:2545];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037935;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.598001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[2:7491642059333097450:2530];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037923;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.598109Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[2:7491642059333097450:2530];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037923;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.598594Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037946;self_id=[2:7491642059333097387:2501];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037946;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.598760Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037946;self_id=[2:7491642059333097387:2501];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037946;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.598973Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7491642055038128656:2343];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.599079Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7491642055038128656:2343];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.599956Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[2:7491642059333097413:2513];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037919;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.600094Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[2:7491642059333097413:2513];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037919;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.600221Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037963;self_id=[2:7491642059333097368:2496];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037963;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.600336Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037963;self_id=[2:7491642059333097368:2496];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037963;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.600476Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7491642055038128682:2349];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.600733Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7491642055038128682:2349];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.601015Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037989;self_id=[2:7491642059333097079:2476];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037989;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.601165Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037989;self_id=[2:7491642059333097079:2476];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037989;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.770800Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;self_id=[2:7491642059333097077:2475];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037991;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.771077Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;self_id=[2:7491642059333097077:2475];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037991;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.771135Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715667; 2025-04-10T11:17:27.772257Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[2:7491642059333097359:2493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037986;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.772474Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;commit_tx_id=281474976715667;commit_lock_id=281474976715666;fline=manager.cpp:94;broken_lock_id=281474976715665; 2025-04-10T11:17:27.772700Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.772810Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[2:7491642059333097409:2511];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037971;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.773015Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[2:7491642059333097409:2511];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037971;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.773155Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;self_id=[2:7491642059333097417:2515];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037947;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.773286Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;self_id=[2:7491642059333097417:2515];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037947;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.774278Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[2:7491642059333097470:2540];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037913;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.774679Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[2:7491642059333097470:2540];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037913;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.775035Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[2:7491642059333097462:2536];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037932;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.775180Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[2:7491642059333097462:2536];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037932;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.775334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[2:7491642059333097702:2567];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037901;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.775460Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[2:7491642059333097702:2567];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037901;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.825640Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[2:7491642059333097359:2493];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037986;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.845337Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;self_id=[2:7491642059333097391:2503];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037979;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:27.845658Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;self_id=[2:7491642059333097391:2503];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037979;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 17756, MsgBus: 21564 2025-04-10T11:17:25.507865Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642073283977767:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:25.507923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f5c/r3tmp/tmpkdDNK8/pdisk_1.dat 2025-04-10T11:17:25.765662Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17756, node 1 2025-04-10T11:17:25.800835Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:25.800857Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:25.800862Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:25.801015Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:25.841283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:25.841498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:25.843013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21564 TClient is connected to server localhost:21564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:26.203987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:26.220257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:26.344646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:26.480800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:26.547991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:27.704367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642081873914150:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:27.704458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:27.932640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:27.964067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:27.987246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:28.009832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:28.034253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:28.082008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:28.114715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642086168881956:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:28.114796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:28.114832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642086168881961:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:28.118021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:28.125995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642086168881963:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:17:28.197991Z node 1 :TX_PROXY ERROR: Actor# [1:7491642086168882017:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:30.507704Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642073283977767:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:30.507778Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:17:31.321397Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTM3NWI5OGQtYzZmM2UxNTgtN2YxNTdiNmMtNTFlYjhhOGQ=, ActorId: [1:7491642086168882266:2487], ActorState: ExecuteState, TraceId: 01jrfpppche0yqq8168qz1sqpd, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken Trying to start YDB, gRPC: 15045, MsgBus: 8616 2025-04-10T11:17:31.989188Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642097749907302:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:31.989305Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f5c/r3tmp/tmp5HT23M/pdisk_1.dat 2025-04-10T11:17:32.073953Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15045, node 2 2025-04-10T11:17:32.114265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:32.114356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:32.116265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:32.127706Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:32.127744Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:32.127749Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:32.127873Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8616 TClient is connected to server localhost:8616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:32.450879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:32.458041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:32.499681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:32.618679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:32.671980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:34.220885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642110634810968:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:34.220962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:34.258275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:34.282087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:34.307512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:34.334172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:34.361163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:34.390291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:34.426038Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642110634811479:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:34.426108Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:34.426138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642110634811484:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:34.429101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:34.437055Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642110634811486:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:34.522362Z node 2 :TX_PROXY ERROR: Actor# [2:7491642110634811540:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:35.679635Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjE0NDE1NWQtY2RhNGRjMjAtMjliODA0NmQtN2RkZDFiMjc=, ActorId: [2:7491642114929779119:2494], ActorState: ExecuteState, TraceId: 01jrfpptrc36z8th83wydzr6yg, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> KqpTx::EmptyTxOnCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::MixEnginesOldNew [GOOD] Test command err: Trying to start YDB, gRPC: 19390, MsgBus: 12762 2025-04-10T11:17:28.202327Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642082804202943:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:28.202446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f23/r3tmp/tmpBDbZyF/pdisk_1.dat 2025-04-10T11:17:28.439763Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19390, node 1 2025-04-10T11:17:28.491539Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:28.491565Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:28.491580Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:28.491769Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:28.548184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:28.548290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:28.550299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12762 TClient is connected to server localhost:12762 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:28.847376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:28.866643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:28.943663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:29.052398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:29.125334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:30.260566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642091394139311:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:30.260673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:30.489728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:30.514105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:30.537300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:30.560255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:30.583269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:30.649356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:30.712306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642091394139826:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:30.712375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:30.712429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642091394139831:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:30.715132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:30.721968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642091394139833:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:17:30.802379Z node 1 :TX_PROXY ERROR: Actor# [1:7491642091394139886:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:32.007781Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODg2NDc5NmItNTdmOGQ4MjAtNmE0MTE0ODAtZmFjMTM2MmY=, ActorId: [1:7491642095689107436:2487], ActorState: ExecuteState, TraceId: 01jrfppq4669pk6fdewnp5wknr, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-10T11:17:32.015798Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODg2NDc5NmItNTdmOGQ4MjAtNmE0MTE0ODAtZmFjMTM2MmY=, ActorId: [1:7491642095689107436:2487], ActorState: ReadyState, TraceId: 01jrfppq7f5d066zwnw1gpwp26, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 23454, MsgBus: 19550 2025-04-10T11:17:32.631515Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642101033066007:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:32.631566Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f23/r3tmp/tmprVYUdf/pdisk_1.dat 2025-04-10T11:17:32.704842Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23454, node 2 2025-04-10T11:17:32.753735Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:32.753775Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:32.753782Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:32.753869Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:32.760538Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:32.760623Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:32.762057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19550 TClient is connected to server localhost:19550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:33.124559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:33.137646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:33.173146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:33.286463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:33.342401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:35.053327Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642113917969684:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:35.053384Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:35.071893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:35.095700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:35.118107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:35.142597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:35.167785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:35.194528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:35.229039Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642113917970190:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:35.229105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:35.229145Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642113917970195:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:35.231765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:35.238732Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642113917970197:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:35.304853Z node 2 :TX_PROXY ERROR: Actor# [2:7491642113917970250:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 >> KqpSnapshotRead::TestSnapshotExpiration-withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::EmptyTxOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 61635, MsgBus: 16336 2025-04-10T11:17:29.525567Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642089523379479:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:29.525842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f09/r3tmp/tmpLSuYH8/pdisk_1.dat 2025-04-10T11:17:29.759432Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61635, node 1 2025-04-10T11:17:29.816706Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:29.816728Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:29.816739Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:29.816953Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:29.850627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:29.850727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:29.852490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16336 TClient is connected to server localhost:16336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:30.195374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:30.216120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:30.294183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:30.405909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:30.480440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:31.610220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642098113315859:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:31.610329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:31.800869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:31.823139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:31.848409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:31.870080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:31.892617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:31.917984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:31.968746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642098113316368:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:31.968863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:31.968889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642098113316373:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:31.971938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:31.979751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642098113316375:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:32.071534Z node 1 :TX_PROXY ERROR: Actor# [1:7491642102408283726:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:33.032017Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTljMTUxYTMtOWM3ZTQ5OTEtNjliOWM4ZWYtYmQ0MzdkNDM=, ActorId: [1:7491642102408284011:2495], ActorState: ReadyState, TraceId: 01jrfppr6x5ts6jv9cpv8rcvg6, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 21773, MsgBus: 13655 2025-04-10T11:17:33.559931Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642106863986873:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:33.559989Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f09/r3tmp/tmp7BYzop/pdisk_1.dat 2025-04-10T11:17:33.643396Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21773, node 2 2025-04-10T11:17:33.691121Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:33.691210Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:33.692810Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:33.703640Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:33.703663Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:33.703670Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:33.703773Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13655 TClient is connected to server localhost:13655 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:34.023159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:34.030074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:34.068476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:34.214006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:34.283420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:35.901762Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642115453923254:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:35.901852Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:35.933196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:35.956330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:35.981036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:36.008163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:36.036197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:36.102901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:36.157017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642119748891068:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:36.157090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:36.157145Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642119748891073:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:36.160026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:36.168910Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642119748891075:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:36.266178Z node 2 :TX_PROXY ERROR: Actor# [2:7491642119748891128:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 >> KqpSinkMvcc::SnapshotExpiration >> DSProxyStrategyTest::Restore_block42 [GOOD] >> KqpSinkTx::OlapInvalidateOnError |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> DSProxyStrategyTest::Restore_block42 [GOOD] >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] >> KqpSinkLocks::EmptyRangeOlap [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] Test command err: 2025-04-10T11:15:50.454880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:50.455304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:50.455379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010db/r3tmp/tmpelwuad/pdisk_1.dat 2025-04-10T11:15:50.858857Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12860, node 1 2025-04-10T11:15:51.179871Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:51.179929Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:51.179965Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:51.180468Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:51.186294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:51.281984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:51.282117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:51.295221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6810 2025-04-10T11:15:51.818433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:54.732324Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:54.778359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:54.778564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:54.813726Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:54.816546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.074552Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.077278Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.077962Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.078116Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.078426Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.078545Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.078633Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.078715Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.078795Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.254251Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.254381Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.267912Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.408021Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:55.462766Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:55.462865Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:55.496833Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:55.497357Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:55.497609Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:55.497710Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:55.497782Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:55.497845Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:55.497907Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:55.497973Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:55.498472Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:55.524388Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1868:2593] 2025-04-10T11:15:55.526638Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:55.541673Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:55.541740Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:55.541832Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:55.546636Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.546743Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1918:2621], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.564691Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1979:2648] 2025-04-10T11:15:55.565454Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1979:2648], schemeshard id = 72075186224037897 2025-04-10T11:15:55.590167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:55.598295Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:55.598468Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:55.817594Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:55.963907Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:56.085592Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:57.126347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2245:3077], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.126471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.204795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:15:57.322110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:57.322393Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:15:57.322734Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:15:57.322898Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:15:57.323091Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:15:57.323234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:15:57.323360Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:15:57.323499Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:15:57.323622Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:15:57.323801Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:15:57.323931Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:15:57.324043Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:15:57.356079Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:15:57.356198Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 11:17:33.402762Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:17:33.419050Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:17:33.419597Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:17:33.419655Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-10T11:17:33.433257Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:17:34.120654Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:17:34.120713Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:17:34.120744Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-10T11:17:34.120780Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:17:34.120820Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:17:34.121417Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:17:34.134230Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-10T11:17:34.134317Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:17:34.134670Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:17:34.134731Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) ... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2025-04-10T11:17:34.135760Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-10T11:17:34.135842Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-10T11:17:34.136319Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2804:3226] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-10T11:17:34.136360Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2804:3226] 2025-04-10T11:17:34.136528Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:17:34.150379Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:17:34.150476Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-10T11:17:34.150522Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:17:34.150824Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:17:34.151544Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7253:5309], server id = [2:7254:5310], tablet id = 72075186224037899, status = OK 2025-04-10T11:17:34.151664Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7253:5309], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:17:34.157597Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:17:34.157714Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:17:34.158004Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:17:34.158240Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:17:34.158527Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7253:5309], server id = [2:7254:5310], tablet id = 72075186224037899 2025-04-10T11:17:34.158565Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:17:34.158769Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:17:34.162311Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:17:34.199602Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7274:5329]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:17:34.199858Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:17:34.199911Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7274:5329], StatRequests.size() = 1 2025-04-10T11:17:34.324804Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmY5N2U4NjAtM2Y0ODRlM2YtZjZiMzdmZDItOTY1YmQzZjA=, TxId: 2025-04-10T11:17:34.324870Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmY5N2U4NjAtM2Y0ODRlM2YtZjZiMzdmZDItOTY1YmQzZjA=, TxId: 2025-04-10T11:17:34.325341Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:17:34.339306Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:17:34.339381Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:17:34.612891Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-10T11:17:34.612995Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:17:35.020204Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:17:35.020297Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:17:35.020356Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-10T11:17:35.651511Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:35.651715Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:17:35.673494Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:17:35.673624Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:17:35.673678Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:17:35.674225Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:17:35.687727Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:17:35.688055Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:17:35.688118Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:17:35.688555Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:17:35.712984Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:17:35.713167Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-10T11:17:35.713791Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7368:5379], server id = [2:7369:5380], tablet id = 72075186224037899, status = OK 2025-04-10T11:17:35.713886Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7368:5379], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:17:35.715196Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:17:35.715288Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:17:35.715539Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7368:5379], server id = [2:7369:5380], tablet id = 72075186224037899 2025-04-10T11:17:35.715570Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:17:35.715664Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:17:35.715822Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:17:35.716175Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:17:35.718759Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:17:35.740887Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTZkZTVmMzctOGQ3MTI5N2ItZTU3MDExZWUtNzU2OGI1NmQ=, TxId: 2025-04-10T11:17:35.740951Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTZkZTVmMzctOGQ3MTI5N2ItZTU3MDExZWUtNzU2OGI1NmQ=, TxId: 2025-04-10T11:17:35.741481Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:17:35.755734Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:17:35.755803Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2804:3226] 2025-04-10T11:17:36.044902Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-10T11:17:36.044994Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:17:36.989220Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:17:37.570832Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:37.581798Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-10T11:17:37.581883Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-04-10T11:17:38.392458Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:17:38.392524Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:17:39.070545Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:39.070744Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:17:39.092399Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-10T11:17:39.092463Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout >> KqpSinkTx::SnapshotRO >> KqpTx::RollbackManyTx [GOOD] >> KqpTx::RollbackRoTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 30808, MsgBus: 19699 2025-04-10T11:17:13.685827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:17:13.685967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:17:13.686117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f87/r3tmp/tmpnl8MnP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30808, node 1 2025-04-10T11:17:14.042942Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:14.046001Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:14.046056Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:14.046086Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:14.046276Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:14.080812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:14.080949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:14.092260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19699 TClient is connected to server localhost:19699 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:14.300227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:14.412638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:14.740040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:15.079448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:15.388435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:15.974597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1810:3405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:15.974781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:15.995874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:16.212627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:16.446508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:16.679489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:16.910346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.218848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.473061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2396:3856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.473130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.473374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2401:3861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.477092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:17.630792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2403:3863], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:17.676050Z node 1 :TX_PROXY ERROR: Actor# [1:2468:3909] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:18.440259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:17:18.700317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:17:19.060704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7682, MsgBus: 9198 2025-04-10T11:17:23.055027Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:17:23.055173Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:17:23.055342Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f87/r3tmp/tmp6mGn2a/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7682, node 2 2025-04-10T11:17:23.388839Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:23.388907Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:23.388948Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:23.389658Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:23.389899Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:23.425705Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:23.425850Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:23.437274Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9198 TClient is connected to server localhost:9198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:23.664042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:23.776650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04 ... 57594046644480 2025-04-10T11:17:25.426507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:25.668487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:25.944778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:26.172120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:26.486997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:26.765981Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2396:3854], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:26.766084Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:26.766442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2401:3859], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:26.772553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:26.947670Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2403:3861], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:26.993650Z node 2 :TX_PROXY ERROR: Actor# [2:2468:3907] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:27.706673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:17:27.951578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:17:28.296905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15079, MsgBus: 2906 2025-04-10T11:17:32.702252Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:17:32.702518Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:17:32.702596Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f87/r3tmp/tmpkVMS9j/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15079, node 3 2025-04-10T11:17:33.048442Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:33.049188Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:33.049237Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:33.049263Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:33.049504Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:33.083711Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:33.083848Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:33.094874Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2906 TClient is connected to server localhost:2906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:33.313783Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:33.428755Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:33.717673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:34.059364Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:34.359269Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:34.862765Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1807:3403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:34.862954Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:34.880208Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:35.120564Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:35.352230Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:35.622852Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:35.864444Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:36.184903Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:36.458406Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2395:3856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:36.458527Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:36.458947Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2400:3861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:36.465845Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:36.643378Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2402:3863], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:36.689240Z node 3 :TX_PROXY ERROR: Actor# [3:2465:3907] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:37.709082Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:17:37.949240Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:17:38.380299Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 >> KqpSinkMvcc::OlapNamedStatement >> KqpSinkLocks::TInvalidateOlap [GOOD] >> KqpSinkLocks::UncommittedRead >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] >> KqpTx::RollbackRoTx [GOOD] >> TPDiskRaces::OwnerRecreationRaces [GOOD] >> TPDiskRaces::OwnerKilledWhileReadingLog >> KqpSinkTx::SnapshotROInteractive2 [GOOD] >> KqpSnapshotIsolation::TConflictReadWriteOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 10181, MsgBus: 64425 2025-04-10T11:17:26.163927Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642075915687148:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:26.164085Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f44/r3tmp/tmpAWvqC0/pdisk_1.dat 2025-04-10T11:17:26.417677Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:26.436152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:26.436250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:26.437906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10181, node 1 2025-04-10T11:17:26.496674Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:26.496696Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:26.496708Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:26.496823Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64425 TClient is connected to server localhost:64425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:26.883742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:26.905958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:27.008864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:27.153810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:27.229674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:28.261470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642084505623531:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:28.261548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:28.457500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:28.480493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:28.504664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:28.525783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:28.548124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:28.573962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:28.607818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642084505624036:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:28.607924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:28.607975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642084505624041:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:28.610905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:28.618450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642084505624043:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:28.697265Z node 1 :TX_PROXY ERROR: Actor# [1:7491642084505624098:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:29.623096Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715675; 2025-04-10T11:17:29.629977Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642088800591725:2495], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7491642088800591677:2495]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7491642088800591725:2495].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:17:29.630430Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642088800591717:2495], SessionActorId: [1:7491642088800591677:2495], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7491642088800591677:2495]. isRollback=0 2025-04-10T11:17:29.630690Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWRhZjI3OTctYWMyMzA2MDYtYWU3YzUzNTQtMjBmOWE5ZDc=, ActorId: [1:7491642088800591677:2495], ActorState: ExecuteState, TraceId: 01jrfppmwm8fksgvd0d5vvrt4f, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7491642088800591795:2495] from: [1:7491642088800591717:2495] 2025-04-10T11:17:29.630770Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491642088800591795:2495] TxId: 281474976715675. Ctx: { TraceId: 01jrfppmwm8fksgvd0d5vvrt4f, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRhZjI3OTctYWMyMzA2MDYtYWU3YzUzNTQtMjBmOWE5ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:17:29.630904Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWRhZjI3OTctYWMyMzA2MDYtYWU3YzUzNTQtMjBmOWE5ZDc=, ActorId: [1:7491642088800591677:2495], ActorState: ExecuteState, TraceId: 01jrfppmwm8fksgvd0d5vvrt4f, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 Trying to start YDB, gRPC: 63400, MsgBus: 3231 2025-04-10T11:17:30.174117Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642093889729099:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:30.174252Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f44/r3tmp/tmpefPhOI/pdisk_1.dat 2025-04-10T11:17:30.243776Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63400, node 2 2025-04-10T11:17:30.290869Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:30.290884Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:30.290888Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:30.290970Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:30.299606Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:30.299676Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:30.301296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3231 TClient is connected to server localhost:3231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty ... =finished_tx;tx_id=281474976715661; 2025-04-10T11:17:34.543790Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715664; 2025-04-10T11:17:34.543789Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715664; 2025-04-10T11:17:34.544247Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[2:7491642102479665111:2372];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=14;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037899;receive=72075186224037907; 2025-04-10T11:17:34.544252Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715664; 2025-04-10T11:17:34.544331Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[2:7491642102479665111:2372];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=15;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037899;receive=72075186224037903; 2025-04-10T11:17:34.544377Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[2:7491642102479665111:2372];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=16;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037899;receive=72075186224037907; 2025-04-10T11:17:34.544412Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[2:7491642102479665111:2372];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=17;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037899;receive=72075186224037903; 2025-04-10T11:17:34.544670Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715664; 2025-04-10T11:17:34.893920Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGM4NmVlZDMtZGFmNjBmNDEtYzQ0MjBkOTUtNmI3MjQ4NmI=, ActorId: [2:7491642111069601948:2808], ActorState: ExecuteState, TraceId: 01jrfppszcah6xzcb6cnh7pnx4, Create QueryResponse for error on request, msg: 2025-04-10T11:17:34.895431Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=281474976715670;tx_id=281474976715670;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715670; 2025-04-10T11:17:34.895635Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;self_id=[2:7491642102479665052:2364];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715665;problem=finished; 2025-04-10T11:17:34.896793Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Complete;fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976715670; 2025-04-10T11:17:34.896896Z node 2 :TX_COLUMNSHARD_TX WARN: fline=manager.cpp:134;event=abort;tx_id=281474976715665;problem=finished; Trying to start YDB, gRPC: 15715, MsgBus: 6085 2025-04-10T11:17:37.922528Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:17:37.922876Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:17:37.922967Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f44/r3tmp/tmpbgBPtT/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15715, node 3 2025-04-10T11:17:38.276879Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:38.277785Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:38.277825Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:38.277854Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:38.278113Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:38.312404Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:38.312510Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:38.323865Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6085 TClient is connected to server localhost:6085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:38.554482Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:38.585792Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:38.838103Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:39.259674Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:39.534573Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:40.046523Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1806:3402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:40.046725Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:40.065501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:40.276641Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:40.535320Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:40.777921Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:41.042231Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:41.312688Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:41.640282Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2392:3853], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:41.640370Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:41.640727Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2397:3858], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:41.646312Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:41.797045Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2399:3860], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:41.864749Z node 3 :TX_PROXY ERROR: Actor# [3:2464:3906] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:42.747390Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:17:42.991882Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:17:43.329637Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackRoTx [GOOD] Test command err: Trying to start YDB, gRPC: 22070, MsgBus: 21021 2025-04-10T11:17:34.309412Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642111634040101:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:34.309550Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eeb/r3tmp/tmpfRIr51/pdisk_1.dat 2025-04-10T11:17:34.551529Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:34.561803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:34.561913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:34.563844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22070, node 1 2025-04-10T11:17:34.611008Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:34.611033Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:34.611044Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:34.611204Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21021 TClient is connected to server localhost:21021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:35.007596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:35.032685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:35.143992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:35.286484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:35.345034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:36.522391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642120223976480:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:36.522495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:36.734006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:36.756557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:36.779565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:36.806575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:36.831173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:36.897631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:36.944946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642120223976996:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:36.944999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:36.945008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642120223977001:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:36.947854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:36.955548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642120223977003:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:17:37.037210Z node 1 :TX_PROXY ERROR: Actor# [1:7491642124518944352:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:39.309329Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642111634040101:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:39.309401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18647, MsgBus: 12166 2025-04-10T11:17:41.742259Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642137994006165:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:41.742393Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eeb/r3tmp/tmp8KicGB/pdisk_1.dat 2025-04-10T11:17:41.832075Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18647, node 2 2025-04-10T11:17:41.874187Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:41.874253Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:41.875497Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:41.881486Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:41.881514Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:41.881525Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:41.881655Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12166 TClient is connected to server localhost:12166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:42.234591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:42.241944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:42.309560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:42.412999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:42.482088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:44.128345Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642150878909816:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:44.128430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:44.145799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:44.172299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:44.198829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:44.226699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:44.255188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:44.320909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:44.354698Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642150878910333:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:44.354759Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:44.354907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642150878910338:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:44.357795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:44.366220Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642150878910340:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:44.459051Z node 2 :TX_PROXY ERROR: Actor# [2:7491642150878910394:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:45.204103Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGMzMjQwOTQtOGRjMmRjM2UtOTBkM2ViNjUtY2I2YTBhNTg=, ActorId: [2:7491642155173877972:2494], ActorState: ReadyState, TraceId: 01jrfpq43b6kg6byj1nwat3bh1, Create QueryResponse for error on request, msg: >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] >> KqpSnapshotIsolation::TSimpleOltp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 24987, MsgBus: 12875 2025-04-10T11:17:14.782303Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642024779803266:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:14.782452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f73/r3tmp/tmpYm272n/pdisk_1.dat 2025-04-10T11:17:15.096866Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24987, node 1 2025-04-10T11:17:15.170544Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:15.170579Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:15.170584Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:15.170678Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:15.192157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:15.192308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:15.194047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12875 TClient is connected to server localhost:12875 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:15.599465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:17.091853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642037664705823:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.091969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642037664705804:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.092072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:17.095273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:17.104100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642037664705833:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:17.172037Z node 1 :TX_PROXY ERROR: Actor# [1:7491642037664705884:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:17.435783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.552420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491642037664706070:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:17.552548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491642037664706070:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:17.552798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491642037664706070:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:17.552973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491642037664706070:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:17.553136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491642037664706070:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:17.553284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491642037664706070:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:17.553463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491642037664706070:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:17.553623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491642037664706070:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:17.553724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491642037664706070:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:17.553836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491642037664706070:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:17.553933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491642037664706070:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:17.554023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491642037664706070:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:17.560974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642037664706089:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:17.561051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642037664706089:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:17.561400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642037664706089:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:17.561632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642037664706089:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:17.561778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642037664706089:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:17.561907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642037664706089:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:17.562097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642037664706089:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:17.562248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642037664706089:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:17.562409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642037664706089:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:17.562540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642037664706089:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:17.562699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642037664706089:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:17.562909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642037664706089:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:17.580703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642037664706077:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:17.580768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642037664706077:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:17.580930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;sel ... anStep;tablet_id=72075186224037910;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.439372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;self_id=[2:7491642106373114237:2506];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037991;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.439477Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;self_id=[2:7491642106373114237:2506];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037991;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.439574Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[2:7491642106373113913:2493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037913;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.439666Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[2:7491642106373113913:2493];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037913;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.439760Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[2:7491642106373113273:2414];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037914;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.440075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[2:7491642106373113273:2414];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037914;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.440227Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[2:7491642106373113299:2427];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037968;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.440532Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[2:7491642106373113299:2427];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037968;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.441046Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[2:7491642106373112878:2407];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037898;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.441158Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[2:7491642106373112878:2407];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037898;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.441282Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[2:7491642106373114270:2521];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037981;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.441388Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[2:7491642106373114270:2521];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037981;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.441490Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;self_id=[2:7491642106373113530:2448];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037941;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.441884Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;self_id=[2:7491642106373113530:2448];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037941;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.442775Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[2:7491642106373113767:2476];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037936;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.443118Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[2:7491642106373113767:2476];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037936;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.443235Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037985;self_id=[2:7491642106373114287:2532];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037985;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.443516Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037985;self_id=[2:7491642106373114287:2532];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037985;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.443668Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;self_id=[2:7491642106373114268:2520];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037996;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.443767Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;self_id=[2:7491642106373114268:2520];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037996;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.444288Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[2:7491642106373113759:2474];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037932;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.444544Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[2:7491642106373114209:2495];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037986;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.444618Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[2:7491642106373113759:2474];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037932;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.444884Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[2:7491642106373114209:2495];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037986;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.447931Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;self_id=[2:7491642106373114239:2507];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037988;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.448067Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;self_id=[2:7491642106373114239:2507];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037988;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.465188Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;task_id=6a6692c4-15fd11f0-a22c969e-b1c205fa;tablet_id=72075186224037993;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.465583Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.478881Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7491642119258019305:3050];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038092;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.479127Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7491642119258019305:3050];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038092;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.479360Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7491642119258019654:3089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038052;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.479524Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7491642119258019654:3089];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038052;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.480395Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7491642119258019299:3048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.480543Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7491642119258019299:3048];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.481911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[2:7491642119258019294:3046];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038097;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.482056Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[2:7491642119258019294:3046];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038097;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.483504Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7491642119258019643:3086];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.483675Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7491642119258019643:3086];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.483981Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7491642119258019786:3178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.484111Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7491642119258019786:3178];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.487056Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[2:7491642119258019422:3051];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038093;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.487215Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[2:7491642119258019422:3051];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038093;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:40.487551Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[2:7491642119258019296:3047];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038096;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-10T11:17:45.220465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:17:45.220508Z node 2 :IMPORT WARN: Table profiles were not loaded >> KqpSinkMvcc::OltpNamedStatementNoSink >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] >> KqpSinkTx::ExplicitTcl >> KqpSinkTx::OlapSnapshotRO [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotRO [GOOD] Test command err: Trying to start YDB, gRPC: 2900, MsgBus: 31104 2025-04-10T11:17:14.503266Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642024344268934:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:14.503331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f81/r3tmp/tmpe0pt8u/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2900, node 1 2025-04-10T11:17:14.758352Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:17:14.758396Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:17:14.764208Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:14.783391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:14.783412Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:14.783417Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:14.783532Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:14.844561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:14.844690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:14.846367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31104 TClient is connected to server localhost:31104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:15.180271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:16.645708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642032934204186:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:16.645775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642032934204162:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:16.646164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:16.649566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:16.657724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642032934204199:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:16.728420Z node 1 :TX_PROXY ERROR: Actor# [1:7491642032934204250:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:17.000922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:17.137117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491642037229171758:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:17.137292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491642037229171758:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:17.137519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491642037229171758:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:17.137594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491642037229171758:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:17.137678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491642037229171758:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:17.137798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491642037229171758:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:17.137866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491642037229171758:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:17.137923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491642037229171758:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:17.138005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491642037229171758:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:17.138109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491642037229171758:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:17.138219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491642037229171758:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:17.138317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491642037229171758:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:17.152157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642037229171736:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:17.152326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642037229171736:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:17.152645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642037229171736:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:17.152851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642037229171736:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:17.153014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642037229171736:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:17.153202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642037229171736:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:17.153377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642037229171736:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:17.153546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642037229171736:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:17.153781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642037229171736:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:17.153957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642037229171736:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:17.154165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642037229171736:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:17.154442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642037229171736:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:17.162941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642037229171766:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:17.162988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642037229171766:2351];tablet_id=72075186224037894;process=TTxInitSchema::Execu ... olumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.585858Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7491642133303518186:3230];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.585970Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7491642133303518162:3223];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038052;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.586056Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7491642133303518162:3223];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038052;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.586877Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7491642133303518359:3257];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.586990Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7491642133303518359:3257];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.588730Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7491642133303518382:3263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.588857Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7491642133303518382:3263];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.588986Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7491642133303518658:3352];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.589076Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7491642133303518658:3352];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.589292Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7491642133303518418:3273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.589418Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7491642133303518418:3273];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.589529Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7491642133303518415:3272];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.589629Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7491642133303518415:3272];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.589739Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7491642133303518212:3236];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.589829Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7491642133303518212:3236];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.589935Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7491642133303518603:3305];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.590031Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7491642133303518603:3305];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.590221Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7491642133303518361:3258];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.590326Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7491642133303518361:3258];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.590413Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7491642133303518396:3267];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.590497Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7491642133303518396:3267];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.590600Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7491642133303518424:3275];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.590687Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7491642133303518424:3275];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.590764Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7491642133303517647:3074];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038081;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.591024Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7491642133303518431:3276];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.591165Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7491642133303518431:3276];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.592416Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7491642133303518817:3365];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.592538Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7491642133303518817:3365];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.592638Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7491642133303518352:3252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.592721Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7491642133303518352:3252];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.592822Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7491642133303518437:3277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.592932Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7491642133303518437:3277];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.593025Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7491642133303518420:3274];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.593108Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7491642133303518420:3274];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.593190Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7491642133303517811:3111];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038068;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.593270Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7491642133303517811:3111];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038068;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.594507Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7491642133303518404:3270];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.594630Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7491642133303518404:3270];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.594794Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7491642133303518555:3294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.594917Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7491642133303518555:3294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.594986Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7491642133303517855:3150];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:43.716925Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODFiMTUwYzAtNzVlNWFiMTctOTAxNTc1NTItYjM4OWQ0MjM=, ActorId: [2:7491642146188423433:3867], ActorState: ExecuteState, TraceId: 01jrfpq2khehnqk1z1shygmtrn, Create QueryResponse for error on request, msg:
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-10T11:17:47.673025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:17:47.673048Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 >> KqpSinkTx::SnapshotRO [GOOD] >> KqpSinkTx::SnapshotROInteractive1 >> KqpSinkTx::OlapInvalidateOnError [FAIL] >> KqpSinkTx::OlapInteractive >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 >> KqpSnapshotRead::TestSnapshotExpiration-withSink [GOOD] >> KqpTx::BeginTransactionBadMode >> KqpSinkLocks::UncommittedRead [GOOD] >> KqpSnapshotIsolation::TSimpleOltp [FAIL] >> KqpSnapshotIsolation::TSimpleOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] Test command err: Trying to start YDB, gRPC: 26992, MsgBus: 4523 2025-04-10T11:17:23.207037Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642062344624630:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:23.207253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f66/r3tmp/tmpnoj1ZF/pdisk_1.dat 2025-04-10T11:17:23.440747Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26992, node 1 2025-04-10T11:17:23.497756Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:23.497785Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:23.497791Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:23.497949Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:23.522844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:23.522991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:23.524997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4523 TClient is connected to server localhost:4523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:23.870818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:25.289695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642070934559891:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:25.289779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642070934559880:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:25.290029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:25.293566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:25.301981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642070934559898:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:25.375149Z node 1 :TX_PROXY ERROR: Actor# [1:7491642070934559949:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:25.566391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:25.675996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642070934560141:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:25.675996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491642070934560139:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:25.676146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491642070934560139:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:25.676346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491642070934560139:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:25.676436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491642070934560139:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:25.676545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491642070934560139:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:25.676631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491642070934560139:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:25.676643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642070934560141:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:25.676743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491642070934560139:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:25.676813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642070934560141:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:25.676825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491642070934560139:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:25.676892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642070934560141:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:25.676921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491642070934560139:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:25.676990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642070934560141:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:25.677045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491642070934560139:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:25.677085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642070934560141:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:25.677171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491642070934560139:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:25.677202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642070934560141:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:25.677299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491642070934560139:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:25.677302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642070934560141:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:25.677463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642070934560141:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:25.677579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642070934560141:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:25.677693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642070934560141:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:25.677806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642070934560141:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:25.704350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491642070934560160:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:25.704404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491642070934560160:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:25.704627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_i ... node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037955;self_id=[2:7491642141839775016:2489];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037955;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.106974Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037956;self_id=[2:7491642141839774882:2449];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037956;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107025Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037970;self_id=[2:7491642141839774963:2471];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037970;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107048Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037971;self_id=[2:7491642141839774930:2458];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037971;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107095Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037992;self_id=[2:7491642141839774767:2438];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037992;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107103Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037972;self_id=[2:7491642141839774990:2480];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037972;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107145Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037973;self_id=[2:7491642141839774732:2433];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037973;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107157Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037993;self_id=[2:7491642141839774746:2437];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037993;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107219Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037974;self_id=[2:7491642141839774712:2425];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037974;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107237Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037994;self_id=[2:7491642141839774726:2431];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037994;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107284Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037975;self_id=[2:7491642141839774779:2439];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037975;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107293Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037995;self_id=[2:7491642141839774693:2423];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037995;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107340Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037996;self_id=[2:7491642141839774572:2416];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037996;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107343Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037976;self_id=[2:7491642141839774721:2429];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037976;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107380Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037963;self_id=[2:7491642141839774928:2457];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037963;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107395Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037986;self_id=[2:7491642141839774714:2426];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037986;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107430Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037964;self_id=[2:7491642141839774888:2450];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037964;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107440Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037977;self_id=[2:7491642141839774739:2434];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037977;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107480Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037965;self_id=[2:7491642141839774984:2477];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037965;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107489Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037983;self_id=[2:7491642141839774974:2476];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037983;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107523Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037966;self_id=[2:7491642141839774936:2460];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037966;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107543Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037984;self_id=[2:7491642141839774958:2469];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037984;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107573Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037967;self_id=[2:7491642141839774880:2448];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037967;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107580Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037985;self_id=[2:7491642141839774940:2461];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037985;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107612Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037982;self_id=[2:7491642141839774838:2443];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037982;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107614Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037969;self_id=[2:7491642141839774895:2453];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037969;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107651Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037979;self_id=[2:7491642141839774743:2436];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037979;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107654Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037978;self_id=[2:7491642141839774705:2424];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037978;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107708Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037980;self_id=[2:7491642141839774942:2462];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037980;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107762Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037981;self_id=[2:7491642141839774843:2444];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037981;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107819Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037989;self_id=[2:7491642141839774716:2427];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037989;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107893Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037987;self_id=[2:7491642141839774826:2441];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037987;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107938Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037990;self_id=[2:7491642141839774730:2432];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037990;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.107987Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037988;self_id=[2:7491642141839774741:2435];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037988;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.108036Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037997;self_id=[2:7491642141839774570:2415];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037997;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:48.108091Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037991;self_id=[2:7491642141839774724:2430];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037991;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 >> AnalyzeDatashard::AnalyzeOneTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::UncommittedRead [GOOD] Test command err: Trying to start YDB, gRPC: 18023, MsgBus: 13745 2025-04-10T11:17:30.164892Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642093837493789:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:30.164966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000efc/r3tmp/tmpckxdQ0/pdisk_1.dat 2025-04-10T11:17:30.402709Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18023, node 1 2025-04-10T11:17:30.457232Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:30.457255Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:30.457266Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:30.457427Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:30.505048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:30.505175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:30.506902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13745 TClient is connected to server localhost:13745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:30.870829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:32.262607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642102427429045:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:32.262709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642102427429056:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:32.262808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:32.266882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:32.276146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642102427429060:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:32.359109Z node 1 :TX_PROXY ERROR: Actor# [1:7491642102427429111:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:32.648429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:32.792047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642102427429302:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:32.792285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642102427429302:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:32.792522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642102427429302:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:32.792629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642102427429302:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:32.792725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642102427429302:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:32.792830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642102427429302:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:32.792906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642102427429302:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:32.792965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642102427429302:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:32.793080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642102427429302:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:32.793160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642102427429302:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:32.793225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642102427429302:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:32.793302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642102427429302:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:32.807065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642102427429304:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:32.807162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642102427429304:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:32.807430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642102427429304:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:32.807604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642102427429304:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:32.807787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642102427429304:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:32.807948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642102427429304:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:32.808085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642102427429304:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:32.808259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642102427429304:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:32.808407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642102427429304:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:32.808534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642102427429304:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:32.808699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642102427429304:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:32.808817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642102427429304:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:32.813023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:17:32.813095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:17:32.813237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_N ... UTER ERROR: ActorId: [1:7491642128197242910:3913] TxId: 281474976710669. Ctx: { TraceId: 01jrfppxpqd0fz6py595erp0gq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmUzODA3ZGEtOTNhYTc3ZjEtOGY1YmFkOTctMThlZTc1Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } } 2025-04-10T11:17:38.721160Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037889 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-10T11:17:38.721200Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037889;self_id=[1:7491642102427429310:2346];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:38.721242Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037890 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-10T11:17:38.721267Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmUzODA3ZGEtOTNhYTc3ZjEtOGY1YmFkOTctMThlZTc1Zjc=, ActorId: [1:7491642123902274869:3913], ActorState: ExecuteState, TraceId: 01jrfppxpqd0fz6py595erp0gq, Create QueryResponse for error on request, msg: 2025-04-10T11:17:38.721276Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037890;self_id=[1:7491642102427429318:2350];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:38.721325Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037888 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-10T11:17:38.721357Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037888;self_id=[1:7491642102427429302:2343];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:38.721400Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037891 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-10T11:17:38.721430Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037891;self_id=[1:7491642102427429308:2345];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:38.721469Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037893 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-10T11:17:38.721533Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037893;self_id=[1:7491642102427429316:2349];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:38.721616Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037894 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-10T11:17:38.721664Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037894;self_id=[1:7491642102427429431:2352];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:38.721708Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037895 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-10T11:17:38.721739Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037895;self_id=[1:7491642102427429312:2347];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:38.721796Z node 1 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037896 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710669 2025-04-10T11:17:38.721830Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037896;self_id=[1:7491642102427429304:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:17:38.721865Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037897;self_id=[1:7491642102427429314:2348];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0;
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 65310, MsgBus: 15779 2025-04-10T11:17:45.202521Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642157028670981:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:45.202645Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000efc/r3tmp/tmpi1KmcG/pdisk_1.dat 2025-04-10T11:17:45.336451Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:45.350991Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:45.351078Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:45.352530Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65310, node 2 2025-04-10T11:17:45.390270Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:45.390292Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:45.390297Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:45.390423Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15779 TClient is connected to server localhost:15779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:45.804562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:47.731310Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642165618606225:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:47.731366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642165618606236:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:47.731409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:47.735112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:47.743405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642165618606243:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:17:47.831335Z node 2 :TX_PROXY ERROR: Actor# [2:7491642165618606294:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:47.872441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:47.949227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:48.760531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 2025-04-10T11:17:50.202781Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642157028670981:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:50.202858Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSinkTx::LocksAbortOnCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable [GOOD] Test command err: 2025-04-10T11:15:50.645454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:50.645880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:50.645958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001101/r3tmp/tmp7o8go7/pdisk_1.dat 2025-04-10T11:15:51.025041Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24948, node 1 2025-04-10T11:15:51.282175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:51.282230Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:51.282262Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:51.282722Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:51.285692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:51.375028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:51.375192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:51.389085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26135 2025-04-10T11:15:51.931916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:55.221707Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:55.251957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.252143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.279908Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:55.282712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.527249Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.527801Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.528524Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.528694Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.528780Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.529098Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.529203Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.529301Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.529412Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.693001Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.693117Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.706468Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.836105Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:55.884821Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:55.884945Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:55.925129Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:55.926317Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:55.926526Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:55.926599Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:55.926690Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:55.926741Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:55.926782Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:55.926826Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:55.927276Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:55.949112Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.949239Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.956680Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:55.960711Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:55.961055Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:15:55.970765Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:55.989201Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:55.989264Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:55.989355Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:56.006604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:56.013587Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:56.013726Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:56.197422Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:56.339601Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:56.416358Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:57.410567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.410742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.438053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:15:57.886915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2550:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.887096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.888635Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2555:3131]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:15:57.888863Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:15:57.888953Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2557:3133] 2025-04-10T11:15:57.889878Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2557:3133] 2025-04-10T11:15:57.890575Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2558:2994] 2025-04-10T11:15:57.890858Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2557:3133], server id = [2:2558:2994], tablet id = 72075186224037894, status = OK 2025-04-10T11:15:57.891063Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2558:2994], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:15:57.891142Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:15:57.892310Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:15:57.892424Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2555:3131], StatRequests.size() = 1 2025-04-10T11:15:57.939956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2562:3137], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.940079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.940516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2567:3142], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.948369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:15:58.130274Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:15:58.130376Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:15:58.205655Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2557:3133], schemeshard count = 1 2025-04-10T11:15:58.553039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... EvPropagateStatistics, node id = 1 2025-04-10T11:17:18.298716Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:20.800319Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:20.800760Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:17:24.273132Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:26.902049Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:26.902417Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:17:30.458718Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:33.070521Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:33.070947Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:17:36.787521Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:39.319834Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:39.320355Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:17:43.070264Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:45.468333Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:45.468787Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:17:49.146796Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:50.452873Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-10T11:17:50.452957Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:17:50.452994Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:17:50.453027Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:17:51.920580Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:51.920938Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:17:51.963471Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-10T11:17:51.963535Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 196.000000s, at schemeshard: 72075186224037897 2025-04-10T11:17:51.963774Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 2025-04-10T11:17:51.976778Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:17:53.267312Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:17:53.267402Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:17:53.267436Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-10T11:17:53.267478Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-10T11:17:53.267519Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:17:53.267901Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:17:53.278043Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:17:53.280467Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6634:4667], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:53.280555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6644:4672], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:53.280651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:53.287873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-10T11:17:53.326116Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6648:4675], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:17:53.503439Z node 2 :TX_PROXY ERROR: Actor# [2:6748:4724] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:53.532726Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6777:4739]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:17:53.532873Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:17:53.532927Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:6779:4741] 2025-04-10T11:17:53.532970Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:6779:4741] 2025-04-10T11:17:53.533174Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:6780:4742] 2025-04-10T11:17:53.533271Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:6780:4742], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:17:53.533338Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-10T11:17:53.533413Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:6779:4741], server id = [2:6780:4742], tablet id = 72075186224037894, status = OK 2025-04-10T11:17:53.533462Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:17:53.533505Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:6777:4739], StatRequests.size() = 1 2025-04-10T11:17:53.628188Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWI2ZWQ4OGYtYzdmNjg1MjctN2VlYjgwMjYtZjdiZTlkOGI=, TxId: 2025-04-10T11:17:53.628270Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWI2ZWQ4OGYtYzdmNjg1MjctN2VlYjgwMjYtZjdiZTlkOGI=, TxId: 2025-04-10T11:17:53.628990Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:17:53.641470Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:17:53.641529Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:17:53.704794Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:17:53.704894Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:17:53.768025Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:6779:4741], schemeshard count = 1 2025-04-10T11:17:54.745697Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:17:54.745780Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-10T11:17:54.745818Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:17:56.007752Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:56.018629Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:17:56.018775Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-10T11:17:56.018810Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:17:56.019086Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:17:56.021423Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:17:56.031161Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:04.000000Z, event interval end# 2025-04-10T11:17:54.000000Z 2025-04-10T11:17:56.031670Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjEyZGQyYWQtOTZkMTk1MjAtYmYxYTUwNDktN2NkNjlmZjQ=, TxId: 2025-04-10T11:17:56.031708Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjEyZGQyYWQtOTZkMTk1MjAtYmYxYTUwNDktN2NkNjlmZjQ=, TxId: 2025-04-10T11:17:56.032103Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:17:56.045344Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:17:56.045397Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2760:3247] 2025-04-10T11:17:56.045805Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6899:4813]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:17:56.048304Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:17:56.048353Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:17:56.051524Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:17:56.051577Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:17:56.051621Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:17:56.053578Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-10T11:17:56.053790Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] >> KqpLocks::Invalidate >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] >> KqpSinkMvcc::OltpNamedStatement >> KqpSnapshotIsolation::TConflictReadWriteOltp >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] >> KqpSinkMvcc::OlapNamedStatement [GOOD] >> KqpSinkMvcc::OlapMultiSinks >> KqpTx::BeginTransactionBadMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] Test command err: Trying to start YDB, gRPC: 31465, MsgBus: 30318 2025-04-10T11:17:26.453927Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642077172482445:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:26.454051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f2d/r3tmp/tmpeKljQn/pdisk_1.dat 2025-04-10T11:17:26.742908Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31465, node 1 2025-04-10T11:17:26.798015Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:26.798041Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:26.798063Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:26.798173Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:26.829709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:26.829844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:26.831549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30318 TClient is connected to server localhost:30318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:27.219048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:28.473459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642085762417694:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:28.473537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642085762417708:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:28.473618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:28.477662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:28.486727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642085762417715:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:17:28.585801Z node 1 :TX_PROXY ERROR: Actor# [1:7491642085762417766:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:28.802859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:28.899428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642085762417958:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:28.899594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642085762417958:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:28.899808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642085762417958:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:28.899907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642085762417958:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:28.899990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642085762417958:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:28.900095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642085762417958:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:28.900184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642085762417958:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:28.900282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642085762417958:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:28.900291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642085762417952:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:28.900330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642085762417952:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:28.900366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642085762417958:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:28.900428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642085762417958:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:28.900493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642085762417952:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:28.900523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642085762417958:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:28.900590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642085762417958:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:28.900592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642085762417952:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:28.900679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642085762417952:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:28.900769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642085762417952:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:28.900839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642085762417952:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:28.900947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642085762417952:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:28.901082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642085762417952:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:28.901154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642085762417952:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:28.901239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642085762417952:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:28.901302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642085762417952:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:28.923478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491642085762417977:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:28.923547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491642085762417977:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:28.923578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;sel ... 4780Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[2:7491642167688945309:3060];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038093;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.054934Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7491642167688945373:3092];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038066;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.055047Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7491642167688945373:3092];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038066;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.055162Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7491642167688945301:3057];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.055266Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7491642167688945301:3057];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.055376Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[2:7491642167688945235:3056];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038094;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.055477Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[2:7491642167688945235:3056];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038094;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.061378Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7491642167688945360:3083];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038063;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.061555Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7491642167688945360:3083];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.061703Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7491642167688945434:3123];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.061813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7491642167688945434:3123];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.062055Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[2:7491642167688945231:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038097;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.062121Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7491642167688945729:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.062204Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[2:7491642167688945231:3054];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038097;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.062331Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7491642167688945729:3192];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.062956Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7491642167688945347:3075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038081;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.063090Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7491642167688945347:3075];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038081;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.063210Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7491642167688945487:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.063307Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7491642167688945487:3146];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.065291Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7491642171983913244:3244];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.065505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7491642171983913244:3244];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.065636Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7491642167688945512:3160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.065739Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7491642167688945512:3160];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.067103Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;self_id=[2:7491642167688945339:3069];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038080;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.067252Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;self_id=[2:7491642167688945339:3069];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038080;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.068146Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7491642167688945454:3137];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.068294Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7491642167688945454:3137];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.068669Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7491642167688945443:3129];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038070;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.068788Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7491642167688945443:3129];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.072372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7491642167688945388:3100];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038068;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.072548Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7491642167688945388:3100];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038068;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.078361Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7491642171983913280:3267];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.078534Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7491642171983913280:3267];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.078664Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[2:7491642167688945311:3061];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038082;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.078839Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[2:7491642167688945311:3061];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038082;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.078983Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[2:7491642167688945337:3068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.079090Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[2:7491642167688945337:3068];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.079194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7491642167688945405:3107];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.079320Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7491642167688945405:3107];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.079449Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7491642167688945307:3059];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038061;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.079556Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7491642167688945307:3059];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038061;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.080978Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[2:7491642167688945511:3159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038071;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:52.081159Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[2:7491642167688945511:3159];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038071;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-10T11:17:56.216873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:17:56.216904Z node 2 :IMPORT WARN: Table profiles were not loaded >> KqpSinkTx::ExplicitTcl [GOOD] >> KqpSinkTx::Interactive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::BeginTransactionBadMode [GOOD] Test command err: Trying to start YDB, gRPC: 27247, MsgBus: 4256 2025-04-10T11:17:38.374094Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642126929572769:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:38.374219Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ec3/r3tmp/tmpZ51znc/pdisk_1.dat 2025-04-10T11:17:38.607947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:38.608012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:38.609783Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:38.609968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27247, node 1 2025-04-10T11:17:38.655848Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:38.655877Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:38.655884Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:38.656009Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4256 TClient is connected to server localhost:4256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:39.024292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:39.041145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:39.155090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:39.275996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:39.337267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:40.670188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642135519509142:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:40.670299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:40.879196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:40.905581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:40.929759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:40.953947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:40.977518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:41.019505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:41.050085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642139814476945:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:41.050158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:41.050247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642139814476950:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:41.053813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:41.063108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642139814476952:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:17:41.147490Z node 1 :TX_PROXY ERROR: Actor# [1:7491642139814477007:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:43.374013Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642126929572769:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:43.374098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:17:53.609920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:17:53.609965Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:54.999834Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642195649052813:2649], TxId: 281474976710683, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjhjNzgxZDctNjU1ZDEwNzgtMzY4OTI0NjItZjk5NDZhZGE=. TraceId : 01jrfpqdfvcb61g5m2q7h7175q. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1744283862000/18446744073709551615 shard 72075186224037888 with lowWatermark v1744283862104/18446744073709551615 (node# 1 state# Ready) } } 2025-04-10T11:17:55.000241Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642195649052813:2649], TxId: 281474976710683, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjhjNzgxZDctNjU1ZDEwNzgtMzY4OTI0NjItZjk5NDZhZGE=. TraceId : 01jrfpqdfvcb61g5m2q7h7175q. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1744283862000/18446744073709551615 shard 72075186224037888 with lowWatermark v1744283862104/18446744073709551615 (node# 1 state# Ready) } }. 2025-04-10T11:17:55.000534Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642195649052814:2650], TxId: 281474976710683, task: 2. Ctx: { TraceId : 01jrfpqdfvcb61g5m2q7h7175q. SessionId : ydb://session/3?node_id=1&id=MjhjNzgxZDctNjU1ZDEwNzgtMzY4OTI0NjItZjk5NDZhZGE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491642195649052809:2487], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T11:17:55.000834Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjhjNzgxZDctNjU1ZDEwNzgtMzY4OTI0NjItZjk5NDZhZGE=, ActorId: [1:7491642139814477256:2487], ActorState: ExecuteState, TraceId: 01jrfpqdfvcb61g5m2q7h7175q, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 28037, MsgBus: 24543 2025-04-10T11:17:55.597545Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642198340290980:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:55.597606Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ec3/r3tmp/tmpSc1EAy/pdisk_1.dat 2025-04-10T11:17:55.671891Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28037, node 2 2025-04-10T11:17:55.722874Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:55.722988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:55.724409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:55.729244Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:55.729278Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:55.729282Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:55.729390Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24543 TClient is connected to server localhost:24543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:56.066049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:56.083983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:56.136192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:56.261199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:56.334112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:57.898804Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642206930227359:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:57.898873Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:57.935652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:57.960862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:17:57.986524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:17:58.010505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:17:58.033928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:17:58.098139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:17:58.144702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642211225195169:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:58.144756Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642211225195174:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:58.144757Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:58.147699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:17:58.156074Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642211225195176:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:17:58.226698Z node 2 :TX_PROXY ERROR: Actor# [2:7491642211225195230:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSinkLocks::EmptyRange >> KqpSinkTx::SnapshotROInteractive1 [GOOD] >> KqpSnapshotIsolation::TReadOnlyOltp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 17921, MsgBus: 4699 2025-04-10T11:17:36.253715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642120746827134:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:36.254260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ed2/r3tmp/tmpU2LS4R/pdisk_1.dat 2025-04-10T11:17:36.517761Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17921, node 1 2025-04-10T11:17:36.574871Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:36.574897Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:36.574914Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:36.575042Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:36.602381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:36.602534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:36.604628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4699 TClient is connected to server localhost:4699 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:36.993001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:38.379717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642129336762383:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:38.379801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642129336762394:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:38.379857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:38.383744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:38.393201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642129336762397:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:38.478245Z node 1 :TX_PROXY ERROR: Actor# [1:7491642129336762448:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:38.736737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:38.834367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:39.600869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 2025-04-10T11:17:41.253665Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642120746827134:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:41.253733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 29199, MsgBus: 26447 2025-04-10T11:17:46.718174Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642160631206666:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:46.718236Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ed2/r3tmp/tmpqv1GYb/pdisk_1.dat 2025-04-10T11:17:46.805878Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29199, node 2 2025-04-10T11:17:46.846120Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:46.846224Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:46.847844Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:46.864234Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:46.864255Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:46.864263Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:46.864371Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26447 TClient is connected to server localhost:26447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:47.230539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:49.018761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642173516109191:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:49.018885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642173516109214:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:49.018954Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:49.022476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:49.030234Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642173516109228:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:17:49.123838Z node 2 :TX_PROXY ERROR: Actor# [2:7491642173516109279:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:49.163423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:49.306136Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7491642173516109464:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:49.306138Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7491642173516109463:2343];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:49.306317Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7491642173516109464:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:49.306578Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7491642173516109464:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:49.306662Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7491642173516109464:2344];tablet_id=7 ... 075186224037919;self_id=[2:7491642177811078458:2558];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037919;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.086829Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[2:7491642177811078519:2583];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037901;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.087195Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:7491642177811078472:2559];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037908;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.087234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037956;self_id=[2:7491642177811078288:2515];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037956;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.087305Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:7491642177811078472:2559];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037908;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.087405Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[2:7491642177811078664:2585];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037907;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.087438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037956;self_id=[2:7491642177811078288:2515];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037956;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.087497Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[2:7491642177811078664:2585];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037907;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.087745Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037962;self_id=[2:7491642173516110538:2468];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037962;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.087884Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037962;self_id=[2:7491642173516110538:2468];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037962;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.089355Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7491642173516110611:2476];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037900;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.089543Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7491642173516110611:2476];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037900;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.091309Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7491642173516110534:2466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037993;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.091452Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7491642173516110534:2466];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037993;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.110838Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037998;self_id=[2:7491642186401015784:3086];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037998;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.111116Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037998;self_id=[2:7491642186401015784:3086];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037998;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.124132Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7491642173516109472:2345];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.124346Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7491642173516109472:2345];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.124590Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7491642173516109504:2350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.124705Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7491642173516109504:2350];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.127687Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7491642173516109522:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.127858Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7491642173516109522:2351];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.128679Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7491642173516110540:2469];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.128693Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7491642173516109596:2352];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.128835Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7491642173516110540:2469];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037899;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.128861Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7491642173516109596:2352];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.129682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7491642173516109464:2344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.129814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7491642173516109464:2344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.130197Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;self_id=[2:7491642173516110536:2467];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037994;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.130259Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7491642173516109476:2347];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.130293Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;self_id=[2:7491642173516110536:2467];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037994;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.130390Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7491642173516109476:2347];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.131211Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:7491642173516109499:2348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.131323Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:7491642173516109499:2348];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.134338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;self_id=[2:7491642173516110772:2492];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037980;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:17:58.134520Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;self_id=[2:7491642173516110772:2492];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037980;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18F9B008 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18F826DA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F58AE19CD8F 18. ??:0: ?? @ 0x7F58AE19CE3F 19. ??:0: ?? @ 0x1656B028 >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 >> KqpLocks::Invalidate [GOOD] >> KqpLocks::InvalidateOnCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::SnapshotROInteractive1 [GOOD] Test command err: Trying to start YDB, gRPC: 31299, MsgBus: 26013 2025-04-10T11:17:41.598698Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642139743119850:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:41.598776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eaa/r3tmp/tmpLDYpPV/pdisk_1.dat 2025-04-10T11:17:41.892253Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31299, node 1 2025-04-10T11:17:41.947521Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:41.947544Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:41.947549Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:41.947653Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:41.959153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:41.959313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:41.961088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26013 TClient is connected to server localhost:26013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:42.385505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:43.711642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642148333055108:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:43.711707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642148333055101:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:43.711867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:43.714982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:43.722191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642148333055118:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:43.806779Z node 1 :TX_PROXY ERROR: Actor# [1:7491642148333055169:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:44.005822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:44.094756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:44.800407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:46.272183Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2IwMzgyMTMtOGM5ZTFhNzktY2NlNjgxMjMtOWNhNWIxZGM=, ActorId: [1:7491642156922998141:2967], ActorState: ExecuteState, TraceId: 01jrfpq51q4bnfhqk3bxymnbch, Create QueryResponse for error on request, msg:
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 WAIT_INDEXATION: 0 2025-04-10T11:17:46.598867Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642139743119850:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:46.598935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 22482, MsgBus: 6832 2025-04-10T11:17:52.038196Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642187369105298:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:52.038247Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eaa/r3tmp/tmpIFpimb/pdisk_1.dat 2025-04-10T11:17:52.119248Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22482, node 2 2025-04-10T11:17:52.164104Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:52.164172Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:52.165229Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:52.172884Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:52.172907Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:52.172914Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:52.173018Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6832 TClient is connected to server localhost:6832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:52.499504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:54.143535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642195959040543:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:54.143593Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642195959040530:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:54.143627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:54.146856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:54.154091Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642195959040559:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:17:54.211619Z node 2 :TX_PROXY ERROR: Actor# [2:7491642195959040610:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:54.248072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:54.276845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:54.982122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 2025-04-10T11:17:57.038585Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642187369105298:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:57.038678Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSinkMvcc::SnapshotExpiration [GOOD] >> KqpSinkTx::DeferredEffects >> KqpTx::RollbackTx >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] >> KqpSnapshotIsolation::TConflictWriteOltp ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] Test command err: 2025-04-10T11:16:58.479770Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641956742827936:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:58.479843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017df/r3tmp/tmpLCdb1d/pdisk_1.dat 2025-04-10T11:16:58.803970Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10980, node 1 2025-04-10T11:16:58.843643Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:16:58.843827Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:16:58.850316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:58.850419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:58.861979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:58.865129Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:58.865151Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:58.865157Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:58.865286Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:59.081786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:03.480022Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641956742827936:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:03.480108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:17:13.803481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:17:13.803536Z node 1 :IMPORT WARN: Table profiles were not loaded >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink >> TPDiskRaces::OwnerKilledWhileReadingLog [GOOD] >> TPDiskRaces::OwnerKilledWhileReadingLogAndThenKillLastOwner >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 10663, MsgBus: 28924 2025-04-10T11:17:42.748382Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642143931937805:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:42.748444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ea5/r3tmp/tmpOYLTMl/pdisk_1.dat 2025-04-10T11:17:43.038771Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10663, node 1 2025-04-10T11:17:43.095477Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:43.095494Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:43.095502Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:43.095620Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:43.140102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:43.140204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:43.141848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28924 TClient is connected to server localhost:28924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:43.511895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:44.765729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642152521873058:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:44.765729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642152521873066:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:44.765808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:44.768826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:44.775880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642152521873072:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:44.854568Z node 1 :TX_PROXY ERROR: Actor# [1:7491642152521873123:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:45.075611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:45.162446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:45.822401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:47.748653Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642143931937805:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:47.748708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:17:49.135477Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710666; 2025-04-10T11:17:49.136490Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642173996718619:2966], SessionActorId: [1:7491642161111816157:2966], Got LOCKS BROKEN for table. ShardID=72075186224037989, Sink=[1:7491642173996718619:2966].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:17:49.137283Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710666; 2025-04-10T11:17:49.137419Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1744283869181 : 281474976710666] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-04-10T11:17:49.147695Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642173996718619:2966], SessionActorId: [1:7491642161111816157:2966], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7491642161111816157:2966]. isRollback=0 2025-04-10T11:17:49.147978Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2ExOTk5YjUtMjMzMTI0YTctODgyYWU0OTktYTNjOWE1NmY=, ActorId: [1:7491642161111816157:2966], ActorState: ExecuteState, TraceId: 01jrfpq7svff2bswd0hqedzna8, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7491642173996718620:2966] from: [1:7491642173996718619:2966] 2025-04-10T11:17:49.148039Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491642173996718620:2966] TxId: 281474976710666. Ctx: { TraceId: 01jrfpq7svff2bswd0hqedzna8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ExOTk5YjUtMjMzMTI0YTctODgyYWU0OTktYTNjOWE1NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:17:49.148217Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2ExOTk5YjUtMjMzMTI0YTctODgyYWU0OTktYTNjOWE1NmY=, ActorId: [1:7491642161111816157:2966], ActorState: ExecuteState, TraceId: 01jrfpq7svff2bswd0hqedzna8, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 16647, MsgBus: 10777 2025-04-10T11:17:54.886254Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642194080643492:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:54.886332Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ea5/r3tmp/tmpafWiDx/pdisk_1.dat 2025-04-10T11:17:54.954169Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16647, node 2 2025-04-10T11:17:55.001761Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:55.001780Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:55.001788Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:55.001904Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:55.009705Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:55.009769Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:55.011428Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10777 TClient is connected to server localhost:10777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:55.354562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:57.164861Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642206965546047:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:57.164906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642206965546021:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:57.165217Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:57.168524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:57.176202Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642206965546050:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:17:57.254315Z node 2 :TX_PROXY ERROR: Actor# [2:7491642206965546101:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:57.288031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:57.345880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:58.222021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:59.677820Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmZhNTJlYjYtNDUzNTc1NmEtZjY4ODY2MjItM2Q2Yzk5NTY=, ActorId: [2:7491642215555489211:2968], ActorState: ExecuteState, TraceId: 01jrfpqj68bep75svchgkbv796, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken WAIT_INDEXATION: 0 2025-04-10T11:17:59.886667Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642194080643492:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:59.886754Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] >> KqpLocks::InvalidateOnCommit [GOOD] >> KqpLocks::MixedTxFail+useSink >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink >> KqpTx::RollbackTx [GOOD] >> KqpTx::RollbackTx2 >> KqpSinkTx::LocksAbortOnCommit [GOOD] >> KqpSinkTx::InvalidateOnError >> KqpTx::SnapshotRO >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink |89.3%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |89.3%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpSinkTx::Interactive [GOOD] >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] >> KqpSnapshotIsolation::TReadOnlyOltpNoSink >> KqpSinkMvcc::OltpNamedStatement [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 2364, MsgBus: 1893 2025-04-10T11:17:49.687881Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642173145146757:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:49.687954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e98/r3tmp/tmpDPNFVy/pdisk_1.dat 2025-04-10T11:17:49.910982Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:49.925439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:49.925548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:49.926973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2364, node 1 2025-04-10T11:17:49.978505Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:49.978523Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:49.978529Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:49.978627Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1893 TClient is connected to server localhost:1893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:50.370464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:51.676327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642181735082015:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:51.676444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642181735081998:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:51.676587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:51.680011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:51.688267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642181735082020:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:51.790964Z node 1 :TX_PROXY ERROR: Actor# [1:7491642181735082073:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:52.004526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:52.112430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:52.782682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:53.773527Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWY0MzNiZmMtNzA0OGE5YWQtN2JjMDNlMDYtYzViMDAxM2Y=, ActorId: [1:7491642190325025151:2966], ActorState: ReadyState, TraceId: 01jrfpqcf738d5jz2ea1asb31w, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 2025-04-10T11:17:54.687546Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642173145146757:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:54.687634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 28427, MsgBus: 15435 2025-04-10T11:17:59.607570Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642216862231878:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:59.607670Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e98/r3tmp/tmpAVBqsR/pdisk_1.dat 2025-04-10T11:17:59.710187Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28427, node 2 2025-04-10T11:17:59.746455Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:59.746571Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:59.748770Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:59.775831Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:59.775882Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:59.775894Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:59.776049Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15435 TClient is connected to server localhost:15435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:00.135576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:01.983613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642225452167127:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:01.983687Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642225452167131:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:01.983736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:01.987377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:01.995881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642225452167141:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:02.087579Z node 2 :TX_PROXY ERROR: Actor# [2:7491642229747134488:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:02.121085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:02.154829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:03.036797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 2025-04-10T11:18:04.608022Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642216862231878:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:04.608083Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSinkTx::OlapInteractive [GOOD] >> KqpSnapshotIsolation::TSimpleOlap [GOOD] >> KqpSinkLocks::EmptyRange [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBroken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpNamedStatement [GOOD] Test command err: Trying to start YDB, gRPC: 3952, MsgBus: 26542 2025-04-10T11:17:48.371603Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642170183917984:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:48.371654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e9a/r3tmp/tmpaALiSO/pdisk_1.dat 2025-04-10T11:17:48.624755Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3952, node 1 2025-04-10T11:17:48.673800Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:48.673821Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:48.673826Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:48.673910Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:48.721191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:48.721299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:48.722806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26542 TClient is connected to server localhost:26542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:49.055800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:50.503498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642178773853214:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:50.503590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:50.503914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642178773853250:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:50.506734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:50.513619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642178773853252:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:17:50.612227Z node 1 :TX_PROXY ERROR: Actor# [1:7491642178773853304:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:50.823181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:50.916655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:51.574757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 2025-04-10T11:17:53.371714Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642170183917984:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:53.371801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 28947, MsgBus: 28150 2025-04-10T11:17:59.022271Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642219107994243:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:59.022336Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e9a/r3tmp/tmpsYkfV4/pdisk_1.dat 2025-04-10T11:17:59.116036Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28947, node 2 2025-04-10T11:17:59.150625Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:59.150719Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:59.152236Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:17:59.168358Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:59.168385Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:59.168392Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:59.168503Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28150 TClient is connected to server localhost:28150 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:59.523552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:01.494665Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642227697929498:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:01.494754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642227697929472:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:01.495050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:01.497572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:01.505265Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642227697929501:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:01.595683Z node 2 :TX_PROXY ERROR: Actor# [2:7491642227697929552:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:01.645087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:01.678488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:02.548998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:04.022252Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642219107994243:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:04.022295Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpTx::RollbackTx2 [GOOD] >> KqpTx::SnapshotRO [GOOD] >> KqpTx::SnapshotROInteractive1 >> KqpTx::InteractiveTx >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackTx2 [GOOD] Test command err: Trying to start YDB, gRPC: 19441, MsgBus: 26110 2025-04-10T11:18:03.798715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642235619280272:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:03.798819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e79/r3tmp/tmphtD0Bs/pdisk_1.dat 2025-04-10T11:18:04.077957Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19441, node 1 2025-04-10T11:18:04.158345Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:04.158373Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:04.158378Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:04.158480Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:04.163890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:04.163987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:04.165614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26110 TClient is connected to server localhost:26110 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:04.626395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:04.649952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:04.768982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:04.895980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:04.953703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:05.959932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642244209216659:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:05.960052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:06.162283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:06.185221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:06.206701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:06.229733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:06.254775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:06.321744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:06.359688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642248504184467:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:06.359750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:06.359935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642248504184472:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:06.363168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:06.370785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642248504184474:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:18:06.445144Z node 1 :TX_PROXY ERROR: Actor# [1:7491642248504184528:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:07.231343Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzE2YzhkMDktY2QyNGYwMmItOWFjOTEyZGItOGZkZTU2ZmY=, ActorId: [1:7491642252799152107:2495], ActorState: ReadyState, TraceId: 01jrfpqsks7sqrv01q29vka58b, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 62517, MsgBus: 26811 2025-04-10T11:18:07.816889Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642253133360817:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:07.816969Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e79/r3tmp/tmpkGAfh3/pdisk_1.dat 2025-04-10T11:18:07.903608Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62517, node 2 2025-04-10T11:18:07.956210Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:07.956330Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:07.957418Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:07.957438Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:07.957448Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:07.957591Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:07.957905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26811 TClient is connected to server localhost:26811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:08.272929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:08.279445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:08.320628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:08.432696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:08.503038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:10.212116Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642266018264490:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.212187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.233088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:10.257496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:10.283448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:10.308510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:10.333280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:10.366391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:10.401007Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642266018264996:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.401100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.401151Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642266018265001:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.404755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:10.412955Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642266018265003:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:18:10.507093Z node 2 :TX_PROXY ERROR: Actor# [2:7491642266018265058:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:11.368541Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODZhOGM1ZjktODRhNDRmOTQtMTA0NjNiZmMtYmI4MGNkMmM=, ActorId: [2:7491642270313232633:2494], ActorState: ReadyState, TraceId: 01jrfpqxn88ykp6w3w0sjj4h6j, Create QueryResponse for error on request, msg: >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] >> KqpSnapshotIsolation::TConflictWriteOlap >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] >> KqpSnapshotRead::TestReadOnly-withSink >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpLocks::MixedTxFail+useSink [GOOD] >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOlap [GOOD] Test command err: Trying to start YDB, gRPC: 4828, MsgBus: 24142 2025-04-10T11:17:48.220420Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642168194867358:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:48.220549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e9d/r3tmp/tmpq0erG1/pdisk_1.dat 2025-04-10T11:17:48.476789Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4828, node 1 2025-04-10T11:17:48.537288Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:48.537334Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:48.537343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:48.537476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:48.554047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:48.554142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:48.555814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24142 TClient is connected to server localhost:24142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:48.945555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:50.310799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642176784802588:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:50.310872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642176784802622:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:50.310925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:50.314287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:50.322084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642176784802625:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:50.412485Z node 1 :TX_PROXY ERROR: Actor# [1:7491642176784802676:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:50.617730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:50.732702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:51.402164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:52.345147Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmExNjhhYi1hYzJhOTg5Yi1jMWZiMjEzNC00NjNkMWY3ZA==, ActorId: [1:7491642185374745605:2966], ActorState: ExecuteState, TraceId: 01jrfpqaxg9znc7d4c0a5nfaa0, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-10T11:17:53.220427Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642168194867358:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:53.220483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18F8A7C7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18F81582 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F0067821D8F 18. ??:0: ?? @ 0x7F0067821E3F 19. ??:0: ?? @ 0x1656B028 Trying to start YDB, gRPC: 23550, MsgBus: 14766 2025-04-10T11:17:55.925651Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642200112235081:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:55.925709Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e9d/r3tmp/tmp1vcRnE/pdisk_1.dat 2025-04-10T11:17:56.004368Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23550, node 2 2025-04-10T11:17:56.054863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:56.054937Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:56.055751Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:56.055764Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:56.055770Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:56.055862Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:56.056416Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14766 TClient is connected to server localhost:14766 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:56.375338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:58.165509Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642212997137624:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:58.165591Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642212997137610:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:58.165822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, ... 10T11:18:04.892594Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[2:7491642225882045670:3184];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038097;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.892846Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7491642230177013189:3261];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.892972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[2:7491642225882045670:3184];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038097;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.893201Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7491642230177013253:3274];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.893297Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7491642230177013189:3261];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.893537Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[2:7491642230177013078:3229];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038074;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.893540Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7491642230177013253:3274];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.893746Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;self_id=[2:7491642225882045681:3189];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038089;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.893982Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[2:7491642230177013078:3229];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038074;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.894080Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;self_id=[2:7491642225882045681:3189];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038089;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.894202Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7491642230177013176:3256];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.894239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7491642230177013007:3206];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038061;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.894555Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7491642230177013007:3206];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038061;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.894564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7491642230177013176:3256];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.894716Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[2:7491642217292106779:2544];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037922;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.894752Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7491642225882045675:3186];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038095;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.894868Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[2:7491642217292106779:2544];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037922;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.895162Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7491642225882045675:3186];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038095;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.895361Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7491642230177013499:3282];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.895703Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7491642230177013499:3282];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.895989Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7491642230177013259:3275];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038052;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.896376Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7491642230177013259:3275];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038052;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.896627Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7491642230177013083:3231];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.896995Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7491642230177013083:3231];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.898001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[2:7491642230177013085:3232];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.898453Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[2:7491642230177013085:3232];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.898682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7491642230177013071:3227];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.899028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7491642230177013071:3227];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.899219Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[2:7491642230177013076:3228];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.899614Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[2:7491642230177013076:3228];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.899817Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7491642230177013274:3277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.900178Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7491642230177013274:3277];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:05.066170Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[2:7491642217292106878:2560];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037926;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:05.066496Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[2:7491642217292106878:2560];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037926;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:05.105569Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;self_id=[2:7491642217292106854:2555];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037917;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:05.105846Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;self_id=[2:7491642217292106854:2555];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037917;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:05.123487Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715667; 2025-04-10T11:18:05.124859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:05.125006Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[2:7491642217292106868:2558];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037916;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:05.125271Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[2:7491642217292106868:2558];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037916;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:05.228726Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[2:7491642217292106797:2547];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037927;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:05.228908Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[2:7491642217292106797:2547];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037927;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:05.229042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[2:7491642217292106883:2562];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037903;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:05.229143Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[2:7491642217292106883:2562];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037903;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] >> KqpSinkTx::DeferredEffects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 32143, MsgBus: 16100 2025-04-10T11:17:39.994454Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642129437906561:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:39.994528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eae/r3tmp/tmp4JktUt/pdisk_1.dat 2025-04-10T11:17:40.252054Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32143, node 1 2025-04-10T11:17:40.285679Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:40.285706Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:40.285713Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:40.285817Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:40.320509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:40.320633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:40.322483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16100 TClient is connected to server localhost:16100 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:40.655641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:42.176610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642142322809098:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:42.176616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642142322809123:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:42.176694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:42.180101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:42.189596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642142322809127:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:42.269981Z node 1 :TX_PROXY ERROR: Actor# [1:7491642142322809178:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:42.468016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:42.591646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642142322809368:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:42.591906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642142322809368:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:42.592183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642142322809368:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:42.592353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642142322809368:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:42.592469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642142322809368:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:42.592617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642142322809368:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:42.592744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642142322809368:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:42.592905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642142322809368:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:42.593046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642142322809368:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:42.593176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642142322809368:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:42.593319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642142322809368:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:42.593442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491642142322809368:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:42.594006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142322809382:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:42.594083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142322809382:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:42.594321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142322809382:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:42.594476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142322809382:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:42.594632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142322809382:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:42.594831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142322809382:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:42.594994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142322809382:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:42.595190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142322809382:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:42.595352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142322809382:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:42.595526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142322809382:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:42.595651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142322809382:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:42.595781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142322809382:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:42.628847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642142322809370:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:42.628907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491642142322809370:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:42.629145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;sel ... 9094Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7491642211456404408:3252];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.340496Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7491642211456404752:3304];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.340683Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7491642211456404752:3304];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.340809Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7491642211456404395:3245];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.340957Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7491642211456404395:3245];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.341451Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7491642211456404816:3313];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.341603Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7491642211456404816:3313];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.343491Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7491642211456404383:3237];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.343663Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7491642211456404383:3237];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.344514Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7491642211456404742:3300];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.344696Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7491642211456404742:3300];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.346571Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7491642211456404756:3305];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.346744Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7491642211456404756:3305];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.346879Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7491642211456404371:3231];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038029;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.347028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7491642211456404371:3231];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038029;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.352691Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7491642211456404730:3299];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.352948Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7491642211456404730:3299];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.354780Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7491642211456404052:3169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.354926Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7491642211456404052:3169];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.355802Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7491642211456404381:3236];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.355948Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7491642211456404381:3236];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.356795Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7491642211456404616:3288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.356943Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7491642211456404616:3288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.357315Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7491642211456404760:3307];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.357464Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7491642211456404760:3307];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.359862Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7491642211456404351:3227];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.360018Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7491642211456404351:3227];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.367090Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7491642211456404677:3293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.367310Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7491642211456404677:3293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.368630Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7491642211456404744:3301];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.368807Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7491642211456404744:3301];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.373749Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7491642211456404456:3262];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.374000Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7491642211456404456:3262];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.599067Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037956;self_id=[2:7491642198571497942:2459];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037956;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.599321Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037956;self_id=[2:7491642198571497942:2459];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037956;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.629287Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[2:7491642198571498094:2502];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037929;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.629608Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[2:7491642198571498094:2502];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037929;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.687171Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037953;self_id=[2:7491642198571497933:2455];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037953;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.687487Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037953;self_id=[2:7491642198571497933:2455];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037953;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.688149Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;self_id=[2:7491642198571497891:2445];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037970;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.688386Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;self_id=[2:7491642198571497891:2445];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037970;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.717436Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;self_id=[2:7491642198571497846:2432];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037984;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:04.717777Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;self_id=[2:7491642198571497846:2432];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037984;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-10T11:18:07.174861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:18:07.174890Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] Test command err: Trying to start YDB, gRPC: 15587, MsgBus: 18025 2025-04-10T11:17:52.756674Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642186846566242:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:52.756746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e91/r3tmp/tmp6EEo7L/pdisk_1.dat 2025-04-10T11:17:52.980502Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15587, node 1 2025-04-10T11:17:53.030336Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:53.030362Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:53.030368Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:53.030491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:53.078585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:53.078715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:53.080291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18025 TClient is connected to server localhost:18025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:53.408316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:54.665821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642195436501484:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:54.665883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642195436501498:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:54.665951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:54.668977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:54.676496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642195436501505:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:54.751745Z node 1 :TX_PROXY ERROR: Actor# [1:7491642195436501556:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:54.993666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:55.082387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:55.705795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 2025-04-10T11:17:57.756536Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642186846566242:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:57.756586Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 7860, MsgBus: 26246 2025-04-10T11:18:02.775051Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642232305309063:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:02.775105Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e91/r3tmp/tmpjJ6XKB/pdisk_1.dat 2025-04-10T11:18:02.863572Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7860, node 2 2025-04-10T11:18:02.903108Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:02.903206Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:02.905620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:02.925344Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:02.925391Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:02.925399Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:02.925522Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26246 TClient is connected to server localhost:26246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:03.294108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:05.376330Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642245190211593:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:05.376398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:05.376452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642245190211612:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:05.379920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:05.388428Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642245190211622:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:05.465859Z node 2 :TX_PROXY ERROR: Actor# [2:7491642245190211673:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:05.503898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:05.535603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:06.317254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:07.591148Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715666; 2025-04-10T11:18:07.591974Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642253780154984:2967], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [2:7491642253780154704:2967]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[2:7491642253780154984:2967].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:18:07.592493Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642253780154975:2967], SessionActorId: [2:7491642253780154704:2967], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7491642253780154704:2967]. isRollback=0 2025-04-10T11:18:07.592757Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTgzNDRkMzEtMzJlMDg4NjUtMWNhNzA0MGItYzdjZTEyY2Q=, ActorId: [2:7491642253780154704:2967], ActorState: ExecuteState, TraceId: 01jrfpqsxwfjfm03jt6wg49qgf, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7491642253780154976:2967] from: [2:7491642253780154975:2967] 2025-04-10T11:18:07.592867Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491642253780154976:2967] TxId: 281474976715666. Ctx: { TraceId: 01jrfpqsxwfjfm03jt6wg49qgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTgzNDRkMzEtMzJlMDg4NjUtMWNhNzA0MGItYzdjZTEyY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:18:07.593110Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTgzNDRkMzEtMzJlMDg4NjUtMWNhNzA0MGItYzdjZTEyY2Q=, ActorId: [2:7491642253780154704:2967], ActorState: ExecuteState, TraceId: 01jrfpqsxwfjfm03jt6wg49qgf, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 2025-04-10T11:18:07.775321Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642232305309063:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:07.775389Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] Test command err: 2025-04-10T11:15:56.454908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:56.455315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:56.455393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00087e/r3tmp/tmpi4Pwky/pdisk_1.dat 2025-04-10T11:15:56.826709Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19782, node 1 2025-04-10T11:15:57.051812Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:57.051857Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:57.051887Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:57.052339Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:57.055219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:57.138209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:57.138314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:57.151021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16898 2025-04-10T11:15:57.649070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:16:00.536781Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:16:00.575624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:00.575833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:00.605065Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:16:00.607823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:00.843740Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:00.844203Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:00.844711Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:00.844813Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:00.844864Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:00.845062Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:00.845141Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:00.845233Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:00.845307Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:01.008796Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:01.008933Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:01.022400Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:01.177018Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:01.225824Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:16:01.225922Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:16:01.259632Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:16:01.260706Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:16:01.260918Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:16:01.260983Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:16:01.261048Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:16:01.261111Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:16:01.261209Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:16:01.261265Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:16:01.261785Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:16:01.285878Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:01.285993Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:01.292683Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:16:01.296288Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:16:01.296585Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:16:01.306058Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:16:01.325936Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:16:01.326019Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:16:01.326113Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:16:01.345491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:16:01.360335Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:16:01.360498Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:16:01.603869Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:16:01.745380Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:16:01.853992Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:02.826651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:02.826790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:02.845645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:16:02.938543Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:02.938709Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:16:02.938936Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:16:02.939033Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:16:02.939117Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:16:02.939207Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:16:02.939281Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:16:02.939347Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:16:02.939464Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:16:02.939550Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:16:02.939625Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:16:02.939715Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:16:02.959931Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:16:02.960019Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 04-10T11:16:16.819475Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:07.969202Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-10T11:18:07.969287Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:18:07.969361Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:18:07.969401Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:18:09.205739Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-10T11:18:09.205799Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 201.000000s, at schemeshard: 72075186224037897 2025-04-10T11:18:09.206049Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-04-10T11:18:09.218848Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:18:10.117862Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:10.117936Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:18:10.117967Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-10T11:18:10.118001Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-10T11:18:10.118039Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:18:10.118350Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:10.121100Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:18:10.123862Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6988:5163], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.123928Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6999:5168], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.123993Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.132341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-10T11:18:10.170941Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7002:5171], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:18:10.343311Z node 2 :TX_PROXY ERROR: Actor# [2:7098:5217] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:10.400310Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7127:5232]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:10.400475Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:18:10.400531Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7129:5234] 2025-04-10T11:18:10.400585Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7129:5234] 2025-04-10T11:18:10.400855Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7130:5235] 2025-04-10T11:18:10.400957Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7130:5235], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:18:10.401021Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-10T11:18:10.401122Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7129:5234], server id = [2:7130:5235], tablet id = 72075186224037894, status = OK 2025-04-10T11:18:10.401197Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:18:10.401267Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7127:5232], StatRequests.size() = 1 2025-04-10T11:18:10.500817Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTUyOGJlYzUtODhiYzVjODktODg4MzQ5MjItZGE0ZWEyYjg=, TxId: 2025-04-10T11:18:10.500886Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTUyOGJlYzUtODhiYzVjODktODg4MzQ5MjItZGE0ZWEyYjg=, TxId: 2025-04-10T11:18:10.501395Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:10.514831Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:18:10.514907Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:10.547324Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:18:10.547412Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:18:10.611617Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7129:5234], schemeshard count = 1 2025-04-10T11:18:11.381026Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:11.381117Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:11.384045Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:11.399614Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:11.400119Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:11.400172Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-10T11:18:11.413415Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:11.435086Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-10T11:18:11.436247Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-10T11:18:11.436356Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-10T11:18:11.449482Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:18:12.410724Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:12.410848Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:12.410898Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:12.411343Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:12.428394Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:12.428691Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:12.428748Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:12.429444Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:12.442370Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:12.442573Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:18:12.443010Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7256:5311], server id = [2:7257:5312], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:12.443101Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7256:5311], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:12.446249Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:12.446351Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:12.446562Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:12.446722Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:12.446887Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7256:5311], server id = [2:7257:5312], tablet id = 72075186224037899 2025-04-10T11:18:12.446920Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:12.447082Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:12.449161Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:12.473770Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7277:5331]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:12.473942Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:12.473979Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7277:5331], StatRequests.size() = 1 2025-04-10T11:18:12.557484Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzU1NjU4ODAtZDAyNTA0N2ItZmZlZjRiMjgtYTEzZjhlZTA=, TxId: 2025-04-10T11:18:12.557534Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzU1NjU4ODAtZDAyNTA0N2ItZmZlZjRiMjgtYTEzZjhlZTA=, TxId: 2025-04-10T11:18:12.557893Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:12.571149Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:12.571232Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2805:3226] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::MixedTxFail+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 28442, MsgBus: 15734 2025-04-10T11:17:58.439794Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642215204594016:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:58.439907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e8e/r3tmp/tmpw4PytY/pdisk_1.dat 2025-04-10T11:17:58.693045Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28442, node 1 2025-04-10T11:17:58.750230Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:58.750271Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:58.750283Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:58.750438Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:58.792449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:58.792576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:58.794192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15734 TClient is connected to server localhost:15734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:59.150394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:59.178305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:59.278504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:59.398738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:59.476382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:00.697580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642223794530391:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:00.697682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:00.945812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:00.971198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:00.997013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:01.024111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:01.052009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:01.117617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:01.193858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642228089498204:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:01.193923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642228089498209:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:01.193932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:01.197190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:01.205183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642228089498211:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:18:01.307362Z node 1 :TX_PROXY ERROR: Actor# [1:7491642228089498266:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:02.164956Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-04-10T11:18:02.165210Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037914 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:18:02.165377Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037914 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:18:02.165583Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642232384465929:2495], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7491642228089498544:2495]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7491642232384465929:2495].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:18:02.166028Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642232384465915:2495], SessionActorId: [1:7491642228089498544:2495], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7491642228089498544:2495]. isRollback=0 2025-04-10T11:18:02.166282Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzBhNjc5OTUtMTRmNTY4ZmYtZTVjNzE1YmUtYWY4NGYzYWI=, ActorId: [1:7491642228089498544:2495], ActorState: ExecuteState, TraceId: 01jrfpqmkzbwcab17x007qfhpd, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7491642232384465916:2495] from: [1:7491642232384465915:2495] 2025-04-10T11:18:02.166346Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491642232384465916:2495] TxId: 281474976710672. Ctx: { TraceId: 01jrfpqmkzbwcab17x007qfhpd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzBhNjc5OTUtMTRmNTY4ZmYtZTVjNzE1YmUtYWY4NGYzYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:18:02.166619Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzBhNjc5OTUtMTRmNTY4ZmYtZTVjNzE1YmUtYWY4NGYzYWI=, ActorId: [1:7491642228089498544:2495], ActorState: ExecuteState, TraceId: 01jrfpqmkzbwcab17x007qfhpd, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 Trying to start YDB, gRPC: 4576, MsgBus: 28229 2025-04-10T11:18:03.084274Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642235330932353:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:03.084350Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e8e/r3tmp/tmpvu0Ld3/pdisk_1.dat 2025-04-10T11:18:03.206731Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4576, node 2 2025-04-10T11:18:03.246575Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:03.246676Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:03.248318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:03.271101Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:03.271133Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:03.271141Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:03.271288Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28229 TClient is connec ... =TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.220706Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.221083Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.224774Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.227891Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.229052Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.233127Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.234800Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.236800Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.241186Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.241297Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.245687Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.245694Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.249913Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.249938Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.254812Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.259142Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.263801Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.267984Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.272017Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.276997Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.281300Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.283926Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.285539Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.289080Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.289768Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.293434Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.294537Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.297841Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:18:12.473626Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715664; 2025-04-10T11:18:12.473631Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715664; 2025-04-10T11:18:12.474035Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[3:7491642266023905632:2360];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=14;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037907;receive=72075186224037899; 2025-04-10T11:18:12.474112Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715664; 2025-04-10T11:18:12.474120Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[3:7491642266023905632:2360];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=15;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037907;receive=72075186224037899; 2025-04-10T11:18:12.474207Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[3:7491642266023905632:2360];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=16;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037907;receive=72075186224037903; 2025-04-10T11:18:12.474273Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[3:7491642266023905632:2360];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=17;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037907;receive=72075186224037903; 2025-04-10T11:18:12.474596Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715664; 2025-04-10T11:18:12.602115Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491642253139002276:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:12.602234Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:18:12.706225Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715668; 2025-04-10T11:18:12.706887Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491642274613842518:2808], SessionActorId: [3:7491642274613842461:2808], Got LOCKS BROKEN for table. ShardID=72075186224037888, Sink=[3:7491642274613842518:2808].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:18:12.707042Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491642274613842518:2808], SessionActorId: [3:7491642274613842461:2808], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/DataShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7491642274613842461:2808]. isRollback=0 2025-04-10T11:18:12.707248Z node 3 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037936;self_id=[3:7491642266023905693:2370];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:12.707259Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZjUzMzk5OGMtOTRiNzliNGItMTI1ODE0MTktZTY1ZDNiZTk=, ActorId: [3:7491642274613842461:2808], ActorState: ExecuteState, TraceId: 01jrfpqywt9793vjh5046a13vx, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7491642274613842544:2808] from: [3:7491642274613842518:2808] 2025-04-10T11:18:12.707348Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491642274613842544:2808] TxId: 281474976715668. Ctx: { TraceId: 01jrfpqywt9793vjh5046a13vx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjUzMzk5OGMtOTRiNzliNGItMTI1ODE0MTktZTY1ZDNiZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/DataShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:18:12.707539Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZjUzMzk5OGMtOTRiNzliNGItMTI1ODE0MTktZTY1ZDNiZTk=, ActorId: [3:7491642274613842461:2808], ActorState: ExecuteState, TraceId: 01jrfpqywt9793vjh5046a13vx, Create QueryResponse for error on request, msg: 2025-04-10T11:18:12.708613Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=281474976715668;tx_id=281474976715668;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715668; 2025-04-10T11:18:12.710115Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Complete;fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976715668; >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> KqpSinkMvcc::OlapMultiSinks [FAIL] >> KqpSinkMvcc::OltpMultiSinksNoSinks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] Test command err: 2025-04-10T11:15:55.217939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:55.218323Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:55.218383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008e6/r3tmp/tmp9jkK5A/pdisk_1.dat 2025-04-10T11:15:55.620387Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12729, node 1 2025-04-10T11:15:55.854526Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:55.854577Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:55.854610Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:55.855118Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:55.857989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:55.945662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.945795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.959013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12714 2025-04-10T11:15:56.471238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:59.252319Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:59.289838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:59.290015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:59.317816Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:59.320091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:59.562764Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.563158Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.563519Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.563612Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.563661Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.563850Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.563906Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.563954Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.564003Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.720406Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:59.720550Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:59.733973Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:59.871037Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:59.920173Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:59.920284Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:59.958681Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:59.959884Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:59.960140Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:59.960208Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:59.960268Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:59.960327Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:59.960394Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:59.960446Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:59.960968Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:59.984463Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:59.984607Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:59.992435Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:59.996742Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:59.997081Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:16:00.007763Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:16:00.026492Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:16:00.026581Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:16:00.026661Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:16:00.045255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:16:00.059541Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:16:00.059719Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:16:00.273158Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:16:00.419714Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:16:00.528407Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:01.536647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:01.536821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:01.557747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:16:01.663398Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:01.663592Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:16:01.663865Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:16:01.663975Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:16:01.664062Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:16:01.664150Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:16:01.664239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:16:01.664335Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:16:01.664464Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:16:01.664554Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:16:01.664631Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:16:01.664699Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:16:01.685793Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:16:01.685875Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... =, TxId: 2025-04-10T11:18:08.888897Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:08.901997Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:18:08.902059Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:08.945345Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:18:08.945440Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:18:08.999095Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7130:5236], schemeshard count = 1 2025-04-10T11:18:09.975149Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:09.975234Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:09.977597Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:09.991799Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:09.992255Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:09.992309Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-10T11:18:10.004578Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:11.171195Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:11.171271Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:18:11.171304Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-10T11:18:11.171342Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:11.171384Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:11.172176Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:11.185727Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-10T11:18:11.185847Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:11.186455Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:11.186534Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2025-04-10T11:18:11.188102Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:11.188677Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T11:18:11.192954Z node 2 :STATISTICS ERROR: [72075186224037894] TEvDeliveryProblem with ColumnShard=72075186224037899 2025-04-10T11:18:11.208379Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:11.208562Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:18:11.270197Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7265:5315];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-10T11:18:11.441262Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7260:5312], server id = [2:7376:5410], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:11.441440Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7260:5312], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:11.445397Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:11.445552Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:11.445819Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:11.446014Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:11.446268Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7260:5312], server id = [2:7376:5410], tablet id = 72075186224037899 2025-04-10T11:18:11.446312Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:11.446562Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:11.449991Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:11.484897Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7396:5429]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:11.485085Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:11.485119Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7396:5429], StatRequests.size() = 1 2025-04-10T11:18:11.563024Z node 2 :STATISTICS DEBUG: [72075186224037894] Reset DeliveryProblem to ColumnShard=72075186224037899 2025-04-10T11:18:11.690825Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGVkZGQ0OTEtM2Q3ODE4NzgtYzg1Y2Q5YjctZmI4MzZlN2U=, TxId: 2025-04-10T11:18:11.690892Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGVkZGQ0OTEtM2Q3ODE4NzgtYzg1Y2Q5YjctZmI4MzZlN2U=, TxId: 2025-04-10T11:18:11.691285Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:11.704574Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:11.704637Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:11.935462Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-10T11:18:11.935563Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:18:12.321499Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:12.321587Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:18:12.321618Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-10T11:18:12.321785Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-10T11:18:12.322441Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-10T11:18:12.322511Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-10T11:18:12.335018Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:18:12.929153Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:18:12.929361Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:18:12.951017Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:12.951172Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:12.951229Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:12.951775Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:12.965067Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:12.965450Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:12.965534Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:12.965988Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:12.979749Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:12.979962Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-10T11:18:12.980621Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7496:5485], server id = [2:7497:5486], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:12.980761Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7496:5485], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:12.982141Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:12.982284Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:12.982526Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:12.982712Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:12.982882Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7496:5485], server id = [2:7497:5486], tablet id = 72075186224037899 2025-04-10T11:18:12.982921Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:12.983156Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:12.986485Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:13.030312Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTM4MDNiZDYtYzhkYmZiYTktOGUzOGVmNTctNzBlMzZlZGE=, TxId: 2025-04-10T11:18:13.030393Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTM4MDNiZDYtYzhkYmZiYTktOGUzOGVmNTctNzBlMzZlZGE=, TxId: 2025-04-10T11:18:13.030856Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:13.045192Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:13.045261Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2803:3226] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 64850, MsgBus: 27119 2025-04-10T11:17:59.353700Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642217544043274:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:59.353763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e8b/r3tmp/tmpzxnnGR/pdisk_1.dat 2025-04-10T11:17:59.632308Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64850, node 1 2025-04-10T11:17:59.709538Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:59.709562Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:59.709571Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:59.709725Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:59.711370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:59.711464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:59.713180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27119 TClient is connected to server localhost:27119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:00.134582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:01.701826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642226133978535:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:01.701932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642226133978527:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:01.702088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:01.705983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:01.714546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642226133978541:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:01.814760Z node 1 :TX_PROXY ERROR: Actor# [1:7491642226133978593:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:02.063588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:02.178448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:02.949644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:03.940959Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDNiMjVhOGItODIyNmE4Mi01MzJkMWViLTVkZGExMDk5, ActorId: [1:7491642234723921728:2968], ActorState: ExecuteState, TraceId: 01jrfpqpab524bveghsvt1540p, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-10T11:18:04.353720Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642217544043274:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:04.353793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18F986F7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18F82282 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F6D68729D8F 18. ??:0: ?? @ 0x7F6D68729E3F 19. ??:0: ?? @ 0x1656B028 Trying to start YDB, gRPC: 4219, MsgBus: 65489 2025-04-10T11:18:07.724473Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642253219766625:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:07.724529Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e8b/r3tmp/tmpKykD2m/pdisk_1.dat 2025-04-10T11:18:07.807959Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4219, node 2 2025-04-10T11:18:07.853438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:07.853508Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:07.854163Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:07.854184Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:07.854192Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:07.854279Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:07.854851Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65489 TClient is connected to server localhost:65489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:08.185976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:10.046864Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642266104669172:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.046930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642266104669152:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.047272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.050295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:10.058404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642266104669184:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:10.134832Z node 2 :TX_PROXY ERROR: Actor# [2:7491642266104669235:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:10.177539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:10.213822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:11.065578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:12.197568Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODc2MmFkNjQtODY1YTljNjItZmQ2OTAzNDAtYTEwNGU0ZDc=, ActorId: [2:7491642274694612265:2967], ActorState: ExecuteState, TraceId: 01jrfpqyde4vtnsjjpmkqczs9v, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18F986F7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18F824AA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F6D68729D8F 18. ??:0: ?? @ 0x7F6D68729E3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] Test command err: 2025-04-10T11:15:50.561077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:50.561372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:50.561424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00109b/r3tmp/tmp6XZOdq/pdisk_1.dat 2025-04-10T11:15:50.996003Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6552, node 1 2025-04-10T11:15:51.246656Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:51.246723Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:51.246762Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:51.247276Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:51.250440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:51.336289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:51.336444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:51.349788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5995 2025-04-10T11:15:51.901035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:54.900008Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:54.937849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:54.938036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:54.967698Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:54.970223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.214842Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.215379Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.215966Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.216114Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.216199Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.216480Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.216572Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.216649Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.216720Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.388286Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.388434Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.402223Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.564975Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:55.626909Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:55.627049Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:55.673159Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:55.674535Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:55.674818Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:55.674899Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:55.674972Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:55.675044Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:55.675100Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:55.675157Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:55.675726Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:55.699922Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.700075Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.707700Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:55.712693Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:55.713106Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:15:55.724971Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:55.745353Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:55.745442Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:55.745541Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:55.764774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:55.818927Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:55.819092Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:55.973811Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:56.164437Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:56.242389Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:57.317989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.318177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.336835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:15:57.443866Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:57.444102Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:15:57.471276Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:15:57.471458Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:15:57.471588Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:15:57.471692Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:15:57.471808Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:15:57.471924Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:15:57.472079Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:15:57.472170Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:15:57.472254Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:15:57.472343Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:15:57.494609Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:15:57.494724Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descri ... DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:08.974466Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7554:5529], server id = [2:7555:5530], tablet id = 72075186224037899 2025-04-10T11:18:08.974539Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:08.974755Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:08.977520Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:09.015543Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7575:5549]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:09.015870Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:09.015918Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7575:5549], StatRequests.size() = 1 2025-04-10T11:18:09.131389Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTQyMWYzNmYtODYzZTczOGUtZjE5NjUwMzYtMmM3NzhiMTU=, TxId: 2025-04-10T11:18:09.131464Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTQyMWYzNmYtODYzZTczOGUtZjE5NjUwMzYtMmM3NzhiMTU=, TxId: 2025-04-10T11:18:09.131933Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:09.145957Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:09.146022Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3265:3373] 2025-04-10T11:18:09.548189Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-10T11:18:09.548259Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:18:10.135366Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:10.135449Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-04-10T11:18:10.138452Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:10.154632Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:10.155176Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:10.155260Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 5], AnalyzedShards 1 2025-04-10T11:18:10.179645Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:10.190432Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-10T11:18:10.191281Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-10T11:18:10.191354Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-10T11:18:10.204148Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:18:11.220006Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:11.220100Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-04-10T11:18:11.220160Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-10T11:18:11.220759Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:11.234729Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:11.235056Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:11.235103Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:11.235361Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:11.259524Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:11.259675Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-10T11:18:11.260244Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7696:5609], server id = [2:7697:5610], tablet id = 72075186224037900, status = OK 2025-04-10T11:18:11.260340Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7696:5609], path = { OwnerId: 72075186224037897 LocalId: 5 } 2025-04-10T11:18:11.263240Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:18:11.263342Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:11.263525Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:11.263700Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:11.263921Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:11.266130Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7696:5609], server id = [2:7697:5610], tablet id = 72075186224037900 2025-04-10T11:18:11.266189Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:11.266859Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:11.287504Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTJjMGZkM2ItYjFjNzY1NzctNzc0OGE1ZWQtYjkxYjI4YWI=, TxId: 2025-04-10T11:18:11.287559Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTJjMGZkM2ItYjFjNzY1NzctNzc0OGE1ZWQtYjkxYjI4YWI=, TxId: 2025-04-10T11:18:11.287903Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:11.312283Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-10T11:18:11.312337Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:11.721235Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-10T11:18:11.721323Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:18:12.250286Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:18:12.250495Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:18:12.261382Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:12.261469Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:18:12.261506Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-10T11:18:13.166835Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:13.166966Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-04-10T11:18:13.167011Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-10T11:18:13.167569Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:13.181467Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:13.181770Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:13.181819Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:13.182051Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:13.194939Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:13.195093Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-10T11:18:13.195460Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7797:5667], server id = [2:7798:5668], tablet id = 72075186224037900, status = OK 2025-04-10T11:18:13.195521Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7797:5667], path = { OwnerId: 72075186224037897 LocalId: 5 } 2025-04-10T11:18:13.196591Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:18:13.196682Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:13.196814Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:13.196933Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:13.197093Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:13.198708Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7797:5667], server id = [2:7798:5668], tablet id = 72075186224037900 2025-04-10T11:18:13.198733Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:13.199179Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:13.217613Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Mjk0YzM2NzktNGZjNjc2MGMtYTAxZTFmYWYtNjBhNzZmOWY=, TxId: 2025-04-10T11:18:13.217662Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Mjk0YzM2NzktNGZjNjc2MGMtYTAxZTFmYWYtNjBhNzZmOWY=, TxId: 2025-04-10T11:18:13.218076Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:13.231222Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-10T11:18:13.231273Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3265:3373] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::DeferredEffects [GOOD] Test command err: Trying to start YDB, gRPC: 29595, MsgBus: 16624 2025-04-10T11:17:39.110964Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642130664729372:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:39.111059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eb3/r3tmp/tmpEJHRBQ/pdisk_1.dat 2025-04-10T11:17:39.363264Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29595, node 1 2025-04-10T11:17:39.417363Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:39.417396Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:39.417411Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:39.417574Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:39.449352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:39.449491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:39.451221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16624 TClient is connected to server localhost:16624 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:39.833083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:41.327835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642139254664629:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:41.327891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642139254664610:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:41.328132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:41.331791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:41.339181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642139254664639:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:41.398314Z node 1 :TX_PROXY ERROR: Actor# [1:7491642139254664690:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:41.624830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:41.746999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:17:42.471370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:17:44.110997Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642130664729372:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:44.111067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:17:54.357663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:17:54.357689Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:17:57.848445Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642207974151553:3279], TxId: 281474976710679, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MzEyYjgyMTMtOTViMDI5ZjUtYTI1NjU0OGQtYzdmY2Y4MjQ=. CustomerSuppliedId : . TraceId : 01jrfpqgemfjr7732fpjqjzw6m. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1744283863364/18446744073709551615 shard 72075186224037889 with lowWatermark v1744283863595/18446744073709551615 (node# 1 state# Ready) } } 2025-04-10T11:17:57.849028Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642207974151553:3279], TxId: 281474976710679, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MzEyYjgyMTMtOTViMDI5ZjUtYTI1NjU0OGQtYzdmY2Y4MjQ=. CustomerSuppliedId : . TraceId : 01jrfpqgemfjr7732fpjqjzw6m. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1744283863364/18446744073709551615 shard 72075186224037889 with lowWatermark v1744283863595/18446744073709551615 (node# 1 state# Ready) } }. 2025-04-10T11:17:57.849417Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642207974151556:3281], TxId: 281474976710679, task: 3. Ctx: { TraceId : 01jrfpqgemfjr7732fpjqjzw6m. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MzEyYjgyMTMtOTViMDI5ZjUtYTI1NjU0OGQtYzdmY2Y4MjQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7491642207974151549:2966], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T11:17:57.849707Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzEyYjgyMTMtOTViMDI5ZjUtYTI1NjU0OGQtYzdmY2Y4MjQ=, ActorId: [1:7491642147844607682:2966], ActorState: ExecuteState, TraceId: 01jrfpqgemfjr7732fpjqjzw6m, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 24695, MsgBus: 4763 2025-04-10T11:18:03.681727Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642232882510681:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:03.681834Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eb3/r3tmp/tmp7LB3eo/pdisk_1.dat 2025-04-10T11:18:03.801978Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:03.828507Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:03.828572Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24695, node 2 2025-04-10T11:18:03.830327Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:03.859322Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:03.859348Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:03.859357Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:03.859501Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4763 TClient is connected to server localhost:4763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:04.233692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:06.065078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642245767413211:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:06.065142Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:06.065203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642245767413230:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:06.068222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:06.076842Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642245767413240:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:06.131424Z node 2 :TX_PROXY ERROR: Actor# [2:7491642245767413291:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:06.175506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:06.245393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:07.129049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 2025-04-10T11:18:08.681931Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642232882510681:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:08.682005Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] >> AnalyzeColumnshard::Analyze [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 12588, MsgBus: 7945 2025-04-10T11:17:38.785068Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642129139814213:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:38.785163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eb6/r3tmp/tmp9A9oWa/pdisk_1.dat 2025-04-10T11:17:38.993333Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12588, node 1 2025-04-10T11:17:39.054934Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:39.054957Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:39.054965Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:39.055113Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:39.114137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:39.114262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:39.116093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7945 TClient is connected to server localhost:7945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:39.460494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:40.737131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642137729749465:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:40.737302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642137729749473:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:40.737362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:40.741739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:40.750520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642137729749479:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:17:40.829435Z node 1 :TX_PROXY ERROR: Actor# [1:7491642137729749532:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:41.015304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:41.128909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142024717022:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:41.129077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142024717022:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:41.129284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142024717022:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:41.129404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142024717022:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:41.129472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142024717022:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:41.129546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142024717022:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:41.129624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142024717022:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:41.129712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142024717022:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:41.129846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142024717022:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:41.129927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142024717022:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:41.130012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142024717022:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:41.130089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491642142024717022:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:41.132801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491642142024717016:2343];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:41.132856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491642142024717016:2343];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:41.133023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491642142024717016:2343];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:41.133158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491642142024717016:2343];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:41.133324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491642142024717016:2343];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:41.133449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491642142024717016:2343];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:41.133618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491642142024717016:2343];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:41.133770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491642142024717016:2343];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:41.133886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491642142024717016:2343];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:41.134028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491642142024717016:2343];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:41.134134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491642142024717016:2343];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:41.134255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491642142024717016:2343];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:41.163763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642142024717028:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:41.163828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642142024717028:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:41.164006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_i ... 4;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592196Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037958;self_id=[2:7491642222786556726:2472];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037958;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592236Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037966;self_id=[2:7491642222786556648:2465];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037966;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592263Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037959;self_id=[2:7491642222786556644:2463];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037959;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592281Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037971;self_id=[2:7491642222786556732:2475];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037971;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592319Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037976;self_id=[2:7491642222786556606:2448];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037976;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592332Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037967;self_id=[2:7491642222786556640:2461];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037967;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592365Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037979;self_id=[2:7491642222786556629:2456];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037979;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592372Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037968;self_id=[2:7491642222786556646:2464];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037968;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592405Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037977;self_id=[2:7491642222786556657:2469];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037977;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592408Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037969;self_id=[2:7491642222786556575:2431];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037969;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592446Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037965;self_id=[2:7491642222786556599:2444];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037965;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592466Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037972;self_id=[2:7491642222786556762:2485];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037972;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592509Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037973;self_id=[2:7491642222786556803:2493];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037973;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592519Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037980;self_id=[2:7491642222786556579:2433];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037980;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592550Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037974;self_id=[2:7491642222786556741:2479];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037974;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592565Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037982;self_id=[2:7491642218491589257:2421];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037982;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592590Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037975;self_id=[2:7491642222786556577:2432];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037975;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592603Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037981;self_id=[2:7491642218491589264:2424];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037981;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592625Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037970;self_id=[2:7491642222786556637:2460];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037970;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592637Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037983;self_id=[2:7491642222786556718:2470];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037983;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592666Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037992;self_id=[2:7491642218491589262:2423];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037992;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592672Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037985;self_id=[2:7491642222786556587:2437];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037985;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592702Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037993;self_id=[2:7491642222786556604:2447];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037993;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592730Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037984;self_id=[2:7491642222786556585:2436];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037984;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592765Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037994;self_id=[2:7491642218491589272:2427];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037994;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592785Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037986;self_id=[2:7491642218491589274:2428];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037986;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592808Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037978;self_id=[2:7491642222786556635:2459];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037978;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592826Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037987;self_id=[2:7491642222786556610:2450];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037987;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592850Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037996;self_id=[2:7491642218491589270:2426];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037996;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592864Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037988;self_id=[2:7491642222786556655:2468];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037988;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592886Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037997;self_id=[2:7491642218491589277:2430];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037997;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592905Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037989;self_id=[2:7491642222786556590:2438];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037989;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592942Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037990;self_id=[2:7491642222786556601:2445];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037990;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.592981Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037991;self_id=[2:7491642218491589260:2422];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037991;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-10T11:18:08.593023Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037995;self_id=[2:7491642218491589266:2425];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037995;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-10T11:18:11.386896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:18:11.386925Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::TwoTables >> GroupWriteTest::ByTableName >> KqpTx::SnapshotROInteractive1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::Analyze [GOOD] Test command err: 2025-04-10T11:16:09.430986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:16:09.431227Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:16:09.431272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0006f2/r3tmp/tmpGVEGoJ/pdisk_1.dat 2025-04-10T11:16:09.706638Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2708, node 1 2025-04-10T11:16:09.931617Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:09.931670Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:09.931705Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:09.932224Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:16:09.935254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:16:10.017545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:10.017648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:10.029961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32414 2025-04-10T11:16:10.487901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:16:13.056194Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:16:13.084959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:13.085059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:13.111604Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:16:13.113501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:13.345581Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:13.346301Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:13.347016Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:13.347187Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:13.347303Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:13.347616Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:13.347719Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:13.347817Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:13.347915Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:13.508862Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:13.508988Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:13.522569Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:13.648185Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:13.692067Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:16:13.692163Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:16:13.728979Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:16:13.730308Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:16:13.730582Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:16:13.730652Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:16:13.730717Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:16:13.730780Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:16:13.730836Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:16:13.730894Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:16:13.731474Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:16:13.753681Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:13.753795Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:13.760162Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:16:13.763459Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:16:13.763760Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:16:13.772424Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:16:13.787369Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:16:13.787438Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:16:13.787502Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:16:13.802716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:16:13.808998Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:16:13.809149Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:16:13.981805Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:16:14.137364Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:16:14.214794Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:15.240465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:15.240610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:15.258778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:16:15.360587Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:15.360830Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:16:15.361204Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:16:15.361381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:16:15.361510Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:16:15.361646Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:16:15.361764Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:16:15.361887Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:16:15.362082Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:16:15.362270Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:16:15.362408Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:16:15.362530Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:16:15.392078Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:16:15.392195Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 04-10T11:16:29.750950Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:10.716195Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-10T11:18:10.716276Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:18:10.716327Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:18:10.716371Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:18:11.853627Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-10T11:18:11.853723Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 234.000000s, at schemeshard: 72075186224037897 2025-04-10T11:18:11.854086Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-04-10T11:18:11.878766Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:18:12.677331Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:12.677441Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:18:12.677494Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-10T11:18:12.677564Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-10T11:18:12.677652Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:18:12.678202Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:12.682374Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:18:12.686315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6990:5164], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:12.686440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7000:5169], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:12.686530Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:12.699379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-10T11:18:12.757094Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7004:5172], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:18:12.928896Z node 2 :TX_PROXY ERROR: Actor# [2:7099:5219] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:13.003353Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7128:5234]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:13.003632Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:18:13.003730Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7130:5236] 2025-04-10T11:18:13.003815Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7130:5236] 2025-04-10T11:18:13.004223Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7131:5237] 2025-04-10T11:18:13.004315Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7130:5236], server id = [2:7131:5237], tablet id = 72075186224037894, status = OK 2025-04-10T11:18:13.004393Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7131:5237], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:18:13.004468Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-10T11:18:13.004600Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:18:13.004690Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7128:5234], StatRequests.size() = 1 2025-04-10T11:18:13.154398Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzVkNTk1ZGUtZmFmMDUyNGYtYTQ1MDE4NTgtZWQ4YjQ5Yzk=, TxId: 2025-04-10T11:18:13.154483Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzVkNTk1ZGUtZmFmMDUyNGYtYTQ1MDE4NTgtZWQ4YjQ5Yzk=, TxId: 2025-04-10T11:18:13.154993Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:13.191894Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:18:13.191981Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:13.213716Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:18:13.213821Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:18:13.267983Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7130:5236], schemeshard count = 1 2025-04-10T11:18:13.963578Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:13.963683Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:13.966480Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:13.981977Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:13.982548Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:13.982611Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-10T11:18:14.006112Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:14.028188Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-10T11:18:14.029359Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-10T11:18:14.029490Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-10T11:18:14.042970Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:18:15.076930Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:15.077064Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:15.077111Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:15.077692Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:15.091500Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:15.091837Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:15.091889Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:15.092671Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:15.105738Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:15.105926Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:18:15.106399Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7260:5313], server id = [2:7261:5314], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:15.106493Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7260:5313], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:15.109733Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:15.109876Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:15.110185Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:15.110351Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:15.110572Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7260:5313], server id = [2:7261:5314], tablet id = 72075186224037899 2025-04-10T11:18:15.110622Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:15.110808Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:15.113581Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:15.146432Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7281:5333]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:15.146688Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:15.146753Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7281:5333], StatRequests.size() = 1 2025-04-10T11:18:15.279756Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzEyOTM0ZjAtYTIwZTc0ODAtN2QwZmMxN2UtYWYyOTc1NzA=, TxId: 2025-04-10T11:18:15.279832Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzEyOTM0ZjAtYTIwZTc0ODAtN2QwZmMxN2UtYWYyOTc1NzA=, TxId: 2025-04-10T11:18:15.280612Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:15.295232Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:15.295324Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2803:3226] >> KqpTx::InteractiveTx [GOOD] >> KqpTx::InvalidateOnError >> AnalyzeDatashard::DropTableNavigateError [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] >> GroupWriteTest::WithRead >> KqpSnapshotRead::TestReadOnly-withSink [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration+withSink >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] |89.3%| [TA] $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] Test command err: 2025-04-10T11:15:51.785626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:51.786060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:51.786171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00102d/r3tmp/tmpmJOvqP/pdisk_1.dat 2025-04-10T11:15:52.146472Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3327, node 1 2025-04-10T11:15:52.385066Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:52.385127Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:52.385158Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:52.385624Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:52.388342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:52.479451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:52.479588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:52.493441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2761 2025-04-10T11:15:53.023122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:56.410175Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:56.446590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:56.446763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:56.474556Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:56.476789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:56.718555Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.719024Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.719469Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.719572Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.719623Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.719913Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.720010Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.720090Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.720167Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.878602Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:56.878729Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:56.891587Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:57.043268Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:57.093382Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:57.093471Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:57.125518Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:57.126462Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:57.126638Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:57.126688Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:57.126733Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:57.126789Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:57.126828Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:57.126868Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:57.127256Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:57.149514Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:57.149617Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:57.157353Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:57.161485Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:57.161847Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:15:57.171875Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:57.188898Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:57.188962Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:57.189052Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:57.206391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:57.213970Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:57.214117Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:57.448675Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:57.591121Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:57.667767Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:58.677967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:58.678089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:58.703076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:15:59.000365Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:59.000566Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:15:59.000824Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:15:59.000907Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:15:59.000989Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:15:59.001096Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:15:59.001178Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:15:59.001266Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:15:59.001375Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:15:59.001476Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:15:59.001554Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:15:59.001630Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:15:59.024624Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:59.024719Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=TT ... [72075186224037894] Subscribed for config changes 2025-04-10T11:18:15.469419Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:15.469498Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:15.469667Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:18:15.471065Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:15.471145Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:15.472302Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:15.527567Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:15.527811Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:18:15.529042Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8613:6512], server id = [2:8618:6517], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:15.529487Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8613:6512], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:15.529874Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8614:6513], server id = [2:8619:6518], tablet id = 72075186224037900, status = OK 2025-04-10T11:18:15.529941Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8614:6513], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:15.530217Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8615:6514], server id = [2:8620:6519], tablet id = 72075186224037901, status = OK 2025-04-10T11:18:15.530279Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8615:6514], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:15.532135Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8616:6515], server id = [2:8621:6520], tablet id = 72075186224037902, status = OK 2025-04-10T11:18:15.532203Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8616:6515], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:15.533386Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8617:6516], server id = [2:8622:6521], tablet id = 72075186224037903, status = OK 2025-04-10T11:18:15.533447Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8617:6516], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:15.538780Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:15.539487Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8613:6512], server id = [2:8618:6517], tablet id = 72075186224037899 2025-04-10T11:18:15.539534Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:15.540901Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:18:15.541458Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8614:6513], server id = [2:8619:6518], tablet id = 72075186224037900 2025-04-10T11:18:15.541492Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:15.541709Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8635:6532], server id = [2:8638:6533], tablet id = 72075186224037904, status = OK 2025-04-10T11:18:15.541795Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8635:6532], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:15.543697Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8639:6534], server id = [2:8640:6535], tablet id = 72075186224037905, status = OK 2025-04-10T11:18:15.543771Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8639:6534], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:15.544667Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-10T11:18:15.545856Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8615:6514], server id = [2:8620:6519], tablet id = 72075186224037901 2025-04-10T11:18:15.545892Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:15.546329Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-10T11:18:15.547255Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8616:6515], server id = [2:8621:6520], tablet id = 72075186224037902 2025-04-10T11:18:15.547287Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:15.547672Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8644:6538], server id = [2:8648:6542], tablet id = 72075186224037906, status = OK 2025-04-10T11:18:15.547751Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8644:6538], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:15.548853Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-10T11:18:15.549831Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8646:6540], server id = [2:8650:6543], tablet id = 72075186224037907, status = OK 2025-04-10T11:18:15.549914Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8646:6540], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:15.550304Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8617:6516], server id = [2:8622:6521], tablet id = 72075186224037903 2025-04-10T11:18:15.550340Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:15.552443Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8651:6544], server id = [2:8655:6547], tablet id = 72075186224037908, status = OK 2025-04-10T11:18:15.552515Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8651:6544], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:15.553239Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-10T11:18:15.554407Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8635:6532], server id = [2:8638:6533], tablet id = 72075186224037904 2025-04-10T11:18:15.554441Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:15.555428Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:18:15.555908Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8639:6534], server id = [2:8640:6535], tablet id = 72075186224037905 2025-04-10T11:18:15.555940Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:15.557391Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-10T11:18:15.557855Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8644:6538], server id = [2:8648:6542], tablet id = 72075186224037906 2025-04-10T11:18:15.557886Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:15.559110Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-10T11:18:15.559603Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8646:6540], server id = [2:8650:6543], tablet id = 72075186224037907 2025-04-10T11:18:15.559636Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:15.560086Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-10T11:18:15.560143Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:15.560415Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:15.560631Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:15.560924Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:15.564002Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8651:6544], server id = [2:8655:6547], tablet id = 72075186224037908 2025-04-10T11:18:15.564077Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:15.564876Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:15.606034Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8679:6568]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:15.606375Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:15.606438Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8679:6568], StatRequests.size() = 1 2025-04-10T11:18:15.730010Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8690:6571] 2025-04-10T11:18:15.730285Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8690:6571], schemeshard id = 72075186224037897 2025-04-10T11:18:15.730410Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8564:6481], server id = [2:8691:6572], tablet id = 72075186224037894, status = OK 2025-04-10T11:18:15.730532Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8691:6572] 2025-04-10T11:18:15.730568Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8691:6572], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-10T11:18:15.746461Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWNhMTMxYjYtNTQzYmNkYjYtMWZmZGVmOC03YzViOGY4YQ==, TxId: 2025-04-10T11:18:15.746552Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWNhMTMxYjYtNTQzYmNkYjYtMWZmZGVmOC03YzViOGY4YQ==, TxId: 2025-04-10T11:18:15.747021Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:15.782459Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:15.782533Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:15.880145Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8698:6578]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:15.880517Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:15.880575Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:18:15.883615Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:15.883686Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:18:15.883742Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:18:15.889821Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |89.4%| [TA] {RESULT} $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapMultiSinks [FAIL] Test command err: Trying to start YDB, gRPC: 16293, MsgBus: 29063 2025-04-10T11:17:43.645528Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642147264855228:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:43.645731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ea1/r3tmp/tmphs6Toa/pdisk_1.dat 2025-04-10T11:17:43.885304Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16293, node 1 2025-04-10T11:17:43.941235Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:43.941253Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:43.941260Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:43.941376Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:43.991302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:43.991384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:43.993038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29063 TClient is connected to server localhost:29063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:44.348407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:45.710821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642155854790495:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:45.710936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642155854790471:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:45.711056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:45.714791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:45.725150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642155854790498:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:17:45.828938Z node 1 :TX_PROXY ERROR: Actor# [1:7491642155854790549:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:46.071547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:46.187455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642160149758044:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:46.187471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642160149758037:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:17:46.187597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642160149758044:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:46.187827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642160149758044:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:46.187926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642160149758044:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:46.188014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642160149758044:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:46.188053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642160149758037:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:17:46.188184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642160149758037:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:17:46.188233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642160149758044:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:46.188335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642160149758037:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:17:46.188341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642160149758044:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:46.188450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642160149758037:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:17:46.188470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642160149758044:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:46.188569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642160149758037:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:17:46.188598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642160149758044:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:46.188672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642160149758037:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:17:46.188715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642160149758044:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:46.188781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642160149758037:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:17:46.188851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642160149758044:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:46.188896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642160149758037:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:17:46.188965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491642160149758044:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:46.188992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642160149758037:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:17:46.189107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642160149758037:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:17:46.189222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491642160149758037:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:17:46.192681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:17:46.192748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:17:46.192880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_N ... vNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.154867Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038087;self_id=[2:7491642245538835202:3057];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038087;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.154978Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038087;self_id=[2:7491642245538835202:3057];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038087;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.156988Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7491642249833803550:3303];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.157234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7491642249833803550:3303];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.160004Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7491642245538835997:3219];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.160251Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7491642245538835997:3219];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.162032Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7491642245538835692:3173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.162295Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7491642245538835692:3173];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.165644Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7491642245538835507:3112];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.165829Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7491642245538835507:3112];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.166117Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7491642245538836103:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038056;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.166256Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7491642245538836103:3248];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038056;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.166644Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7491642245538836017:3220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.166833Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7491642245538836017:3220];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.167673Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[2:7491642245538835828:3197];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.167822Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[2:7491642245538835828:3197];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.170292Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7491642245538836057:3238];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038036;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.170486Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7491642245538836057:3238];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.174855Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7491642249833803429:3256];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.175126Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7491642249833803429:3256];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.175282Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7491642245538835164:3044];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.175451Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7491642245538835164:3044];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.177391Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7491642245538836115:3252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.177575Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7491642245538836115:3252];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.180477Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7491642245538836021:3222];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.180681Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7491642245538836021:3222];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.182079Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7491642249833803505:3269];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.182343Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7491642249833803505:3269];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.182524Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7491642245538836105:3249];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.182684Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7491642245538836105:3249];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.187128Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7491642249833803591:3323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.187305Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7491642249833803591:3323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.263923Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038030;self_id=[2:7491642245538835967:3213];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038030;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.264209Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038030;self_id=[2:7491642245538835967:3213];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038030;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.843529Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[2:7491642232653929853:2502];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037920;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:13.843825Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[2:7491642232653929853:2502];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037920;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.cpp:558, void NKikimr::NKqp::CompareYson(const TString &, const TString &, const TString &): (ReformatYson(expected) == ReformatYson(actual)) failed: ("[[[\"2\"]]]" != "[[[\"1\"]]]") , with diff: "[[[\"(2|1)\"]]]" 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:558: CompareYson @ 0x48DC52D7 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:368: DoExecute @ 0x18F41B6A 4. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:389: Execute_ @ 0x18F1017A 6. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: operator() @ 0x18F162D7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18F162D7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18F162D7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F162D7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F162D7 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 12. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 15. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: Execute @ 0x18F154A3 16. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 17. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 18. ??:0: ?? @ 0x7F7CE852AD8F 19. ??:0: ?? @ 0x7F7CE852AE3F 20. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive1 [GOOD] Test command err: Trying to start YDB, gRPC: 64683, MsgBus: 19613 2025-04-10T11:18:08.255360Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642257263585712:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:08.255452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e14/r3tmp/tmpUD77BI/pdisk_1.dat 2025-04-10T11:18:08.530537Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64683, node 1 2025-04-10T11:18:08.600693Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:08.600713Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:08.600721Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:08.600871Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:08.633497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:08.633626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:08.635160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19613 TClient is connected to server localhost:19613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:09.031993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:09.053386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:09.165382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:09.287383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:09.335474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:10.368677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642265853522085:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.368754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.648661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:10.677188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:10.705535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:10.731706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:10.759157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:10.807846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:10.843557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642265853522597:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.843642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.843720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642265853522602:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.847024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:10.856758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642265853522604:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:18:10.923809Z node 1 :TX_PROXY ERROR: Actor# [1:7491642265853522657:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:12.177743Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGY3OGMyNTUtNjEwZWM2MWItNDk3NzZhN2UtZmIwM2E3ZWU=, ActorId: [1:7491642270148490241:2495], ActorState: ExecuteState, TraceId: 01jrfpqyd29jdjxw1vp80b6z0e, Create QueryResponse for error on request, msg:
:3:25: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 Trying to start YDB, gRPC: 16888, MsgBus: 4236 2025-04-10T11:18:12.661878Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642274236721716:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:12.661979Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e14/r3tmp/tmpPmirk0/pdisk_1.dat 2025-04-10T11:18:12.741919Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16888, node 2 2025-04-10T11:18:12.792481Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:12.792505Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:12.792512Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:12.792637Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:12.793897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:12.793976Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:12.795587Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4236 TClient is connected to server localhost:4236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:13.192517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.200218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.269587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.416069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.479542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:15.438345Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642287121625383:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.438492Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.469705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.499186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.525448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.549846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.574930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.607015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.646813Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642287121625889:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.646926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.646971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642287121625894:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.649629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:15.656547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642287121625896:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:18:15.713497Z node 2 :TX_PROXY ERROR: Actor# [2:7491642287121625951:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSinkTx::InvalidateOnError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 63163, MsgBus: 15844 2025-04-10T11:18:06.340225Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642246872222084:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:06.340301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e5d/r3tmp/tmpPBkUkY/pdisk_1.dat 2025-04-10T11:18:06.608284Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63163, node 1 2025-04-10T11:18:06.643791Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:06.643819Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:06.643826Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:06.643987Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:06.694953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:06.695094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:06.696768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15844 TClient is connected to server localhost:15844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:07.065129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:07.080189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:07.183916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:07.324736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:07.389466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:08.571310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642255462158446:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:08.571423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:08.798097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:08.821897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:08.845584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:08.867542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:08.890256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:08.914024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:08.945815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642255462158956:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:08.945896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:08.945907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642255462158961:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:08.948926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:08.959538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642255462158963:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:18:09.045325Z node 1 :TX_PROXY ERROR: Actor# [1:7491642259757126313:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:09.719193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:18:09.787201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:18:09.819445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:18:11.340295Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642246872222084:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:11.340371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25201, MsgBus: 28349 2025-04-10T11:18:12.965922Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642273533420341:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:12.966018Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e5d/r3tmp/tmpcAmQE3/pdisk_1.dat 2025-04-10T11:18:13.064940Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:13.094011Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:13.094100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:13.095676Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25201, node 2 2025-04-10T11:18:13.127630Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:13.127663Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:13.127678Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:13.127805Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28349 TClient is connected to server localhost:28349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:13.477636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.495823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.567161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.711359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.770342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:15.555422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642286418324004:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.555500Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.608779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.633869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.658964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.684701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.711436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.735506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.771544Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642286418324510:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.771610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.771716Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642286418324515:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.775191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:15.784686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642286418324517:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:18:15.870747Z node 2 :TX_PROXY ERROR: Actor# [2:7491642286418324571:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:16.913323Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715673; 2025-04-10T11:18:16.925452Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642290713292262:2494], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [2:7491642290713292151:2494]Got LOCKS BROKEN for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[2:7491642290713292262:2494].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:18:16.925966Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642290713292255:2494], SessionActorId: [2:7491642290713292151:2494], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7491642290713292151:2494]. isRollback=0 2025-04-10T11:18:16.926242Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzNjY2NmOWQtZTY0NzhjNTctZDgwZWUwODEtZjViZDNmNTI=, ActorId: [2:7491642290713292151:2494], ActorState: ExecuteState, TraceId: 01jrfpr30sf0agjw5mpwg919r2, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7491642290713292256:2494] from: [2:7491642290713292255:2494] 2025-04-10T11:18:16.926330Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491642290713292256:2494] TxId: 281474976715673. Ctx: { TraceId: 01jrfpr30sf0agjw5mpwg919r2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzNjY2NmOWQtZTY0NzhjNTctZDgwZWUwODEtZjViZDNmNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:18:16.926555Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzNjY2NmOWQtZTY0NzhjNTctZDgwZWUwODEtZjViZDNmNTI=, ActorId: [2:7491642290713292151:2494], ActorState: ExecuteState, TraceId: 01jrfpr30sf0agjw5mpwg919r2, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError [GOOD] Test command err: 2025-04-10T11:15:52.772585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:52.772975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:52.773047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fc3/r3tmp/tmp3ymFTp/pdisk_1.dat 2025-04-10T11:15:53.155231Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30897, node 1 2025-04-10T11:15:53.394573Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:53.394637Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:53.394671Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:53.395151Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:53.402874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:53.496482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:53.496637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:53.511064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19616 2025-04-10T11:15:54.018618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:57.210214Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:57.241199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:57.241368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:57.267760Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:57.269437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:57.498650Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.499178Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.499788Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.499946Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.500038Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.500337Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.500430Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.500523Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.500617Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.664682Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:57.664792Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:57.678013Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:57.813177Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:57.858193Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:57.858291Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:57.893237Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:57.894319Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:57.894552Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:57.894612Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:57.894663Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:57.894716Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:57.894770Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:57.894857Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:57.895309Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:57.916876Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:57.916995Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:57.923917Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:57.927503Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:57.927739Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:15:57.935631Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:57.949286Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:57.949355Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:57.949426Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:57.963268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:57.973025Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:57.973153Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:58.140220Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:58.269268Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:58.346071Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:59.333407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.333525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.346908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:15:59.764281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2550:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.764440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.765805Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2555:3131]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:15:59.765986Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:15:59.766076Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2557:3133] 2025-04-10T11:15:59.766170Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2557:3133] 2025-04-10T11:15:59.766704Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2558:2994] 2025-04-10T11:15:59.766934Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2557:3133], server id = [2:2558:2994], tablet id = 72075186224037894, status = OK 2025-04-10T11:15:59.767124Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2558:2994], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:15:59.767189Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:15:59.767392Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:15:59.767466Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2555:3131], StatRequests.size() = 1 2025-04-10T11:15:59.786649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2562:3137], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.786759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.787285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2567:3142], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.793822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:15:59.970797Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:15:59.970867Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:16:00.045636Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2557:3133], schemeshard count = 1 2025-04-10T11:16:00.388404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... 04-10T11:17:13.533097Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:15.926630Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:15.926964Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:17:19.149941Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:21.613206Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:21.613657Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:17:24.897506Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:27.107673Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:27.108304Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:17:30.394958Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:32.429507Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:32.429892Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:17:35.999807Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:38.444205Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:38.444614Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:17:41.585680Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:43.840048Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:43.840492Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:17:46.969295Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:49.448995Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:49.449437Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:17:52.753091Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:17:55.366075Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:17:55.366571Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:17:58.981822Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:18:01.487166Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:18:01.487559Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:18:05.187901Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:18:07.573824Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:18:07.574216Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:18:11.131597Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:18:12.406187Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-10T11:18:12.406268Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:18:12.406300Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:18:12.406334Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:18:13.920667Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:18:13.921090Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:18:13.963618Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037897 2025-04-10T11:18:13.963706Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 212.000000s, at schemeshard: 72075186224037897 2025-04-10T11:18:13.964005Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-04-10T11:18:13.976647Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:18:15.155471Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:15.155536Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:18:15.155570Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-10T11:18:15.155607Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-10T11:18:15.155642Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:18:15.155955Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:15.158927Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:18:15.161630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6726:4730], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.161725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6736:4735], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.161828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.170746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-10T11:18:15.212555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6740:4738], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:18:15.394121Z node 2 :TX_PROXY ERROR: Actor# [2:6836:4784] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:15.428857Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6865:4799]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:15.429039Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:18:15.429114Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:6867:4801] 2025-04-10T11:18:15.429158Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:6867:4801] 2025-04-10T11:18:15.429432Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:6868:4802] 2025-04-10T11:18:15.429512Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:6867:4801], server id = [2:6868:4802], tablet id = 72075186224037894, status = OK 2025-04-10T11:18:15.429579Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:6868:4802], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:18:15.429648Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-10T11:18:15.429834Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:18:15.429914Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:6865:4799], StatRequests.size() = 1 2025-04-10T11:18:15.534347Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2E4NTk0ZWMtNThjZjNhYmMtMzAxYThkMmYtNzczOTY2MDc=, TxId: 2025-04-10T11:18:15.534399Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2E4NTk0ZWMtNThjZjNhYmMtMzAxYThkMmYtNzczOTY2MDc=, TxId: 2025-04-10T11:18:15.534847Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:15.547636Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:18:15.547682Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:15.600280Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:18:15.600364Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:18:15.673985Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:6867:4801], schemeshard count = 1 2025-04-10T11:18:16.695259Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:16.695380Z node 2 :STATISTICS ERROR: [72075186224037894] IsColumnTable. traversal path [OwnerId: 72075186224037897, LocalPathId: 4] is not known to schemeshard 2025-04-10T11:18:16.695763Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:16.698829Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:18:16.709434Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTI3ZDg1MGUtMzc1NDRlMi1hZDhhMTkzMC1jNjAxYzkxOQ==, TxId: 2025-04-10T11:18:16.709516Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTI3ZDg1MGUtMzc1NDRlMi1hZDhhMTkzMC1jNjAxYzkxOQ==, TxId: 2025-04-10T11:18:16.709942Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:16.723546Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:16.723611Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2868:3279] 2025-04-10T11:18:16.724127Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6951:4855]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:16.726475Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:16.726526Z node 2 :STATISTICS ERROR: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] Navigate failed 2025-04-10T11:18:16.726570Z node 2 :STATISTICS DEBUG: ReplyFailed(), request id = 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] Test command err: 2025-04-10T11:15:50.150848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:50.151376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:50.151469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010ef/r3tmp/tmpdgSaZM/pdisk_1.dat 2025-04-10T11:15:50.643289Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18101, node 1 2025-04-10T11:15:51.181032Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:51.181097Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:51.181133Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:51.181686Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:51.186311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:51.276873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:51.277398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:51.292765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6922 2025-04-10T11:15:51.818430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:55.187518Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:55.231304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.231523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.262974Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:55.265885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.505349Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.505857Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.506436Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.506612Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.506697Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.507001Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.507107Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.507184Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.507268Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.671602Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.671778Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.685738Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.847867Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:55.899003Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:55.899081Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:55.936644Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:55.937863Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:55.938106Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:55.938202Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:55.938263Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:55.938315Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:55.938370Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:55.938440Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:55.938974Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:55.961392Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.961524Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.968088Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:55.971843Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:55.972169Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:15:55.980223Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:55.996728Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:55.996788Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:55.996859Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:56.013378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:56.059981Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:56.060113Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:56.206562Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:56.349765Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:56.448081Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:57.431860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.432027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.452186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:15:57.546494Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:57.546711Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:15:57.547001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:15:57.547110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:15:57.547192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:15:57.547293Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:15:57.547380Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:15:57.547542Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:15:57.547678Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:15:57.547805Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:15:57.547925Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:15:57.548034Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:15:57.572823Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:15:57.572902Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 8Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7252:5308] 2025-04-10T11:18:13.440943Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7252:5308] 2025-04-10T11:18:13.487911Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:18:13.488009Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:18:13.488682Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:18:13.489074Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:18:13.489398Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-04-10T11:18:13.489439Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-04-10T11:18:13.489472Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-04-10T11:18:13.489507Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-04-10T11:18:13.489540Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1744283893423863 2025-04-10T11:18:13.489570Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-04-10T11:18:13.489639Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-04-10T11:18:13.489697Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:18:13.489790Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-04-10T11:18:13.489857Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-04-10T11:18:13.489933Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-04-10T11:18:13.490013Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:18:13.490174Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:13.491070Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:18:13.491826Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:13.491879Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:13.491977Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR 2025-04-10T11:18:13.567574Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7298:5337] 2025-04-10T11:18:13.567862Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7298:5337], schemeshard id = 72075186224037897 2025-04-10T11:18:13.568011Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7252:5308], server id = [2:7299:5338], tablet id = 72075186224037894, status = OK 2025-04-10T11:18:13.568070Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7299:5338] 2025-04-10T11:18:13.568170Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7299:5338], node id = 2, have schemeshards count = 1, need schemeshards count = 0 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR 2025-04-10T11:18:13.657437Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:13.657558Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:13.659057Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7310:5341] 2025-04-10T11:18:13.659274Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:13.660265Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2805:3226] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-10T11:18:13.660327Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2805:3226] 2025-04-10T11:18:13.674753Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:13.674905Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-10T11:18:13.675071Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:18:13.675727Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7313:5344], server id = [2:7314:5345], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:13.675883Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7313:5344], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:13.680375Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:13.680549Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:13.680764Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:13.680988Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:13.681339Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:13.685050Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7313:5344], server id = [2:7314:5345], tablet id = 72075186224037899 2025-04-10T11:18:13.685117Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:13.685908Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:13.728673Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7334:5364]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:13.729010Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:13.729069Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7334:5364], StatRequests.size() = 1 2025-04-10T11:18:13.860012Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTEzYTNmOTktODI3OWZkYjUtNDcwMGUyYjAtNjg0Mzk4Yjg=, TxId: 2025-04-10T11:18:13.860105Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTEzYTNmOTktODI3OWZkYjUtNDcwMGUyYjAtNjg0Mzk4Yjg=, TxId: 2025-04-10T11:18:13.860824Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:13.875188Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:13.875261Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:14.420983Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-10T11:18:14.421094Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:18:15.104469Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:15.104565Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:18:15.104621Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-10T11:18:16.351267Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:16.351454Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:16.351512Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:16.352315Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:16.366310Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:16.366768Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:16.366837Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:16.367246Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:16.380932Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:16.381135Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-10T11:18:16.381722Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7421:5414], server id = [2:7422:5415], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:16.381823Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7421:5414], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:16.383091Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:16.383183Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:16.383344Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:16.383513Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:16.383769Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:16.386245Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7421:5414], server id = [2:7422:5415], tablet id = 72075186224037899 2025-04-10T11:18:16.386283Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:16.386750Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:16.408409Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2UzM2I1MDQtNDdlZDhkNzgtYWE5ZGE3ZjYtYmRmMjBmODc=, TxId: 2025-04-10T11:18:16.408483Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2UzM2I1MDQtNDdlZDhkNzgtYWE5ZGE3ZjYtYmRmMjBmODc=, TxId: 2025-04-10T11:18:16.409015Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:16.434331Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:16.434403Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2805:3226] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 30077, MsgBus: 21927 2025-04-10T11:18:02.111370Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642232410536755:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:02.111522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e81/r3tmp/tmpstKhRq/pdisk_1.dat 2025-04-10T11:18:02.342342Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:02.355662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:02.355783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:02.357095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30077, node 1 2025-04-10T11:18:02.406559Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:02.406590Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:02.406634Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:02.406785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21927 TClient is connected to server localhost:21927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:02.825270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:04.420770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642241000472015:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:04.420888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642241000471982:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:04.420976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:04.424462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:04.431924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642241000472019:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:18:04.527565Z node 1 :TX_PROXY ERROR: Actor# [1:7491642241000472070:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:04.779379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:04.900455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:05.612868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:06.668857Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2IxNjBmZjYtZjczY2QzYTQtODNkZTAwZjQtMTg5NmNlMA==, ActorId: [1:7491642249590415253:2968], ActorState: ExecuteState, TraceId: 01jrfpqrxq4r04essgw6meq6jz, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-10T11:18:07.111550Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642232410536755:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:07.111616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18FA00B3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18F82902 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F978CDCAD8F 18. ??:0: ?? @ 0x7F978CDCAE3F 19. ??:0: ?? @ 0x1656B028 Trying to start YDB, gRPC: 17378, MsgBus: 4291 2025-04-10T11:18:10.407710Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642264242719540:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:10.407872Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e81/r3tmp/tmpPNNuLi/pdisk_1.dat 2025-04-10T11:18:10.498497Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17378, node 2 2025-04-10T11:18:10.537869Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:10.537990Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:10.539671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:10.553871Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:10.553903Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:10.553910Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:10.554033Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4291 TClient is connected to server localhost:4291 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:10.892217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:12.864105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642272832654785:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:12.864374Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642272832654765:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:12.864476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:12.867116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:12.877131Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642272832654802:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:12.980254Z node 2 :TX_PROXY ERROR: Actor# [2:7491642272832654853:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:13.024119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:13.059492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:14.009507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.295623Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGE4ODE5ZTYtNzI0YTQ0ZDgtNmIwMTUzYzQtMjI1NGNmMzE=, ActorId: [2:7491642285717565134:2967], ActorState: ExecuteState, TraceId: 01jrfpr1as9w7j0w977th93bpx, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-10T11:18:15.408040Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642264242719540:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:15.408113Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18FA00B3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18F82B2A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F978CDCAD8F 18. ??:0: ?? @ 0x7F978CDCAE3F 19. ??:0: ?? @ 0x1656B028 >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] Test command err: 2025-04-10T11:15:50.182742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:50.183125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:50.183193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010f8/r3tmp/tmpFF1yIV/pdisk_1.dat 2025-04-10T11:15:50.632938Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16615, node 1 2025-04-10T11:15:51.179563Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:51.179611Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:51.179640Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:51.180097Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:51.185993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:51.277075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:51.277422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:51.292740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12286 2025-04-10T11:15:51.841839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:54.959111Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:54.998441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:54.998565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.029157Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:55.032201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.277230Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.277932Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.278691Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.278863Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.278966Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.279327Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.279434Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.279531Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.279623Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.449872Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.450005Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.463437Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.600635Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:55.651920Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:55.652038Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:55.689832Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:55.691114Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:55.691372Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:55.691443Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:55.691509Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:55.691580Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:55.691658Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:55.691719Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:55.692244Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:55.716306Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.716457Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.728481Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:55.733044Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:55.733382Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:15:55.742912Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:55.759292Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:55.759360Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:55.759442Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:55.777384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:55.789106Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:55.789285Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:56.018928Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:56.155847Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:56.254921Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:57.229583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.229715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.247288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:15:57.539877Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:57.540129Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:15:57.540429Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:15:57.540568Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:15:57.540674Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:15:57.540781Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:15:57.540916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:15:57.541051Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:15:57.541177Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:15:57.541256Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:15:57.541339Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:15:57.541412Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:15:57.566024Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:57.566105Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process= ... extTraversal 2025-04-10T11:18:16.496572Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:18:16.496628Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:16.496671Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:16.500258Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:16.516917Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:16.517442Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:16.517526Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:16.518402Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:16.532755Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:16.532983Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:18:16.533811Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8561:6481], server id = [2:8566:6486], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:16.534258Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8561:6481], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:16.534528Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8562:6482], server id = [2:8567:6487], tablet id = 72075186224037900, status = OK 2025-04-10T11:18:16.534582Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8562:6482], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:16.536555Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8563:6483], server id = [2:8568:6488], tablet id = 72075186224037901, status = OK 2025-04-10T11:18:16.536623Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8563:6483], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:16.537651Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8564:6484], server id = [2:8569:6489], tablet id = 72075186224037902, status = OK 2025-04-10T11:18:16.537730Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8564:6484], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:16.538761Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8565:6485], server id = [2:8572:6492], tablet id = 72075186224037903, status = OK 2025-04-10T11:18:16.538830Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8565:6485], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:16.544265Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:16.545215Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8561:6481], server id = [2:8566:6486], tablet id = 72075186224037899 2025-04-10T11:18:16.545263Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:16.546060Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:18:16.546452Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8562:6482], server id = [2:8567:6487], tablet id = 72075186224037900 2025-04-10T11:18:16.546483Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:16.547184Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8584:6501], server id = [2:8586:6502], tablet id = 72075186224037904, status = OK 2025-04-10T11:18:16.547299Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8584:6501], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:16.547631Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-10T11:18:16.549080Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8563:6483], server id = [2:8568:6488], tablet id = 72075186224037901 2025-04-10T11:18:16.549113Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:16.549450Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8587:6503], server id = [2:8590:6505], tablet id = 72075186224037905, status = OK 2025-04-10T11:18:16.549529Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8587:6503], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:16.550352Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-10T11:18:16.551422Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8564:6484], server id = [2:8569:6489], tablet id = 72075186224037902 2025-04-10T11:18:16.551453Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:16.551789Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8589:6504], server id = [2:8593:6508], tablet id = 72075186224037906, status = OK 2025-04-10T11:18:16.551877Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8589:6504], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:16.552406Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-10T11:18:16.553672Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8565:6485], server id = [2:8572:6492], tablet id = 72075186224037903 2025-04-10T11:18:16.553703Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:16.554338Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8594:6509], server id = [2:8597:6512], tablet id = 72075186224037907, status = OK 2025-04-10T11:18:16.554429Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8594:6509], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:16.554621Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8598:6513], server id = [2:8602:6516], tablet id = 72075186224037908, status = OK 2025-04-10T11:18:16.554665Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8598:6513], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:16.558339Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-10T11:18:16.558828Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8584:6501], server id = [2:8586:6502], tablet id = 72075186224037904 2025-04-10T11:18:16.558858Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:16.559289Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:18:16.559715Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8587:6503], server id = [2:8590:6505], tablet id = 72075186224037905 2025-04-10T11:18:16.559761Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:16.561005Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-10T11:18:16.561605Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8589:6504], server id = [2:8593:6508], tablet id = 72075186224037906 2025-04-10T11:18:16.561633Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:16.562445Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-10T11:18:16.562732Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8594:6509], server id = [2:8597:6512], tablet id = 72075186224037907 2025-04-10T11:18:16.562760Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:16.563112Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-10T11:18:16.563159Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:16.563413Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:16.563618Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:16.563961Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:16.565853Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8598:6513], server id = [2:8602:6516], tablet id = 72075186224037908 2025-04-10T11:18:16.565885Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:16.566519Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:16.600494Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8627:6537]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:16.600728Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:16.600781Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8627:6537], StatRequests.size() = 1 2025-04-10T11:18:16.746808Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTYxZDVlNzUtNmMwNzc2NTAtNmVjMGY1ZDUtOTBhMDg2MTY=, TxId: 2025-04-10T11:18:16.746875Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTYxZDVlNzUtNmMwNzc2NTAtNmVjMGY1ZDUtOTBhMDg2MTY=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-10T11:18:16.747533Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:16.748757Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T11:18:16.779992Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:16.780060Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:16.851257Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:8650:6548];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-10T11:18:17.103322Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8760:6643]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:17.103601Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:17.103652Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:18:17.106449Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:17.106521Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:18:17.106570Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:18:17.110958Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> YdbTableSplit::RenameTablesAndSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 3675, MsgBus: 25642 2025-04-10T11:17:57.356737Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642210902781143:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:57.356908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e90/r3tmp/tmpclQiAV/pdisk_1.dat 2025-04-10T11:17:57.590705Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3675, node 1 2025-04-10T11:17:57.646444Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:17:57.646475Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:17:57.646484Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:17:57.646590Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:17:57.690773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:17:57.690914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:17:57.692773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25642 TClient is connected to server localhost:25642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:17:58.066048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:59.425422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642219492716388:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:59.425525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642219492716412:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:59.425591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:17:59.429899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:17:59.439127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642219492716415:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:17:59.511246Z node 1 :TX_PROXY ERROR: Actor# [1:7491642219492716466:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:17:59.752831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:17:59.852449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:00.595028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:01.976615Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715667; 2025-04-10T11:18:01.992369Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642228082660226:2968], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [1:7491642228082659460:2968]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[1:7491642228082660226:2968].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:18:01.992911Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642228082660215:2968], SessionActorId: [1:7491642228082659460:2968], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7491642228082659460:2968]. isRollback=0 2025-04-10T11:18:01.993827Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTQ2ZDM1NTMtZjIyMzdjOC1iZGE4NjJkNS1jODM5NzdiYg==, ActorId: [1:7491642228082659460:2968], ActorState: ExecuteState, TraceId: 01jrfpqmey19g8egh727jqxc46, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7491642228082660216:2968] from: [1:7491642228082660215:2968] 2025-04-10T11:18:01.993904Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491642228082660216:2968] TxId: 281474976715667. Ctx: { TraceId: 01jrfpqmey19g8egh727jqxc46, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQ2ZDM1NTMtZjIyMzdjOC1iZGE4NjJkNS1jODM5NzdiYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:18:01.994077Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTQ2ZDM1NTMtZjIyMzdjOC1iZGE4NjJkNS1jODM5NzdiYg==, ActorId: [1:7491642228082659460:2968], ActorState: ExecuteState, TraceId: 01jrfpqmey19g8egh727jqxc46, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 2025-04-10T11:18:02.356600Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642210902781143:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:02.356668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 14418, MsgBus: 63750 2025-04-10T11:18:07.857196Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642251283136598:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:07.857394Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e90/r3tmp/tmpvNOGCG/pdisk_1.dat 2025-04-10T11:18:07.940552Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14418, node 2 2025-04-10T11:18:07.997526Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:07.997633Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:07.999477Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:07.999516Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:07.999523Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:07.999529Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:07.999652Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63750 TClient is connected to server localhost:63750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:08.375659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:10.170826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642264168039152:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.170871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642264168039134:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.170966Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.174444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:10.183522Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642264168039156:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:10.283590Z node 2 :TX_PROXY ERROR: Actor# [2:7491642264168039207:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:10.325346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:10.357998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:11.232656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:12.312917Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-10T11:18:12.313186Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037889 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:18:12.313366Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037889 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:18:12.313550Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642272757982389:2968], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [2:7491642272757982285:2968]Got CONSTRAINT VIOLATION for table `/Root/KV`. ShardID=72075186224037889, Sink=[2:7491642272757982389:2968].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:18:12.313648Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642272757982382:2968], SessionActorId: [2:7491642272757982285:2968], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7491642272757982285:2968]. isRollback=0 2025-04-10T11:18:12.313848Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGI3NDgwNjktNjU5OTIzN2ItMmFjMjE5ODktYzEzZWY4ZGQ=, ActorId: [2:7491642272757982285:2968], ActorState: ExecuteState, TraceId: 01jrfpqyh60wf7rjw5egx4n3pm, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7491642272757982383:2968] from: [2:7491642272757982382:2968] 2025-04-10T11:18:12.313926Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491642272757982383:2968] TxId: 281474976715664. Ctx: { TraceId: 01jrfpqyh60wf7rjw5egx4n3pm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGI3NDgwNjktNjU5OTIzN2ItMmFjMjE5ODktYzEzZWY4ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KV`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:18:12.314097Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGI3NDgwNjktNjU5OTIzN2ItMmFjMjE5ODktYzEzZWY4ZGQ=, ActorId: [2:7491642272757982285:2968], ActorState: ExecuteState, TraceId: 01jrfpqyh60wf7rjw5egx4n3pm, Create QueryResponse for error on request, msg:
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Duplicate keys have been found., code: 2012 2025-04-10T11:18:12.367928Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGI3NDgwNjktNjU5OTIzN2ItMmFjMjE5ODktYzEzZWY4ZGQ=, ActorId: [2:7491642272757982285:2968], ActorState: ExecuteState, TraceId: 01jrfpqyjy7tsz4gbg3sc24hh8, Create QueryResponse for error on request, msg:
: Error: Transaction not found: 01jrfpqyh3drd3xfsg7kytkq40, code: 2015 WAIT_INDEXATION: 0 2025-04-10T11:18:12.857709Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642251283136598:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:12.857784Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> YdbTableSplit::SplitByLoadWithUpdates >> YdbTableSplit::SplitByLoadWithReads >> YdbTableSplit::MergeByNoLoadAfterSplit >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] Test command err: 2025-04-10T11:15:51.755911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:51.756335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:51.756418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001040/r3tmp/tmp0Ia7sI/pdisk_1.dat 2025-04-10T11:15:52.162141Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12747, node 1 2025-04-10T11:15:52.405394Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:52.405457Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:52.405508Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:52.405989Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:52.409057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:52.500329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:52.500495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:52.514342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7748 2025-04-10T11:15:53.033802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:56.294121Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:56.332841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:56.333031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:56.360965Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:56.363414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:56.597708Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.598377Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.599060Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.599230Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.599336Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.599677Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.599821Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.599918Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.600017Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.772071Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:56.772164Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:56.785374Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:56.912971Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:56.966505Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:56.966662Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:57.013680Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:57.014790Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:57.015026Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:57.015084Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:57.015125Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:57.015174Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:57.015223Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:57.015275Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:57.015759Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:57.038804Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:57.038917Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:57.046124Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:57.049788Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:57.050190Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:15:57.059911Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:57.078270Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:57.078339Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:57.078441Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:57.096710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:57.148063Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:57.148222Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:57.296916Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:57.433488Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:57.531734Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:58.534114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:58.534302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:58.553490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:15:58.827964Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:58.828176Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:15:58.828455Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:15:58.828546Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:15:58.828640Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:15:58.828763Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:15:58.828844Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:15:58.828917Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:15:58.829029Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:15:58.829130Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:15:58.829211Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:15:58.829305Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:15:58.854731Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:58.854851Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=T ... ode 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:18:14.463271Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8441:6415], schemeshard count = 1 2025-04-10T11:18:16.497159Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:16.497213Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:18:16.497259Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:16.497342Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:16.500661Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:16.516847Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:16.517371Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:16.517450Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:16.518182Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 2 2025-04-10T11:18:16.518233Z node 2 :STATISTICS WARN: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets are probably in Hive boot queue 2025-04-10T11:18:16.518275Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:17.624099Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:17.638031Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:17.638316Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:18:17.638989Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8601:6498], server id = [2:8606:6503], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:17.639418Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8601:6498], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:17.641301Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8602:6499], server id = [2:8607:6504], tablet id = 72075186224037900, status = OK 2025-04-10T11:18:17.641395Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8602:6499], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:17.642238Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8603:6500], server id = [2:8608:6505], tablet id = 72075186224037901, status = OK 2025-04-10T11:18:17.642302Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8603:6500], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:17.643243Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8604:6501], server id = [2:8609:6506], tablet id = 72075186224037902, status = OK 2025-04-10T11:18:17.643303Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8604:6501], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:17.643701Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8605:6502], server id = [2:8612:6509], tablet id = 72075186224037903, status = OK 2025-04-10T11:18:17.643767Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8605:6502], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:17.649276Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:17.649939Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8601:6498], server id = [2:8606:6503], tablet id = 72075186224037899 2025-04-10T11:18:17.649989Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:17.650809Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:18:17.651307Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8602:6499], server id = [2:8607:6504], tablet id = 72075186224037900 2025-04-10T11:18:17.651337Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:17.652046Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-10T11:18:17.652216Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8624:6518], server id = [2:8627:6520], tablet id = 72075186224037904, status = OK 2025-04-10T11:18:17.652305Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8624:6518], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:17.653653Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8603:6500], server id = [2:8608:6505], tablet id = 72075186224037901 2025-04-10T11:18:17.653685Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:17.654135Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8626:6519], server id = [2:8630:6522], tablet id = 72075186224037905, status = OK 2025-04-10T11:18:17.654230Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8626:6519], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:17.654701Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-10T11:18:17.655830Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8604:6501], server id = [2:8609:6506], tablet id = 72075186224037902 2025-04-10T11:18:17.655862Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:17.656366Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8629:6521], server id = [2:8632:6524], tablet id = 72075186224037906, status = OK 2025-04-10T11:18:17.656437Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8629:6521], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:17.656896Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-10T11:18:17.657815Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8605:6502], server id = [2:8612:6509], tablet id = 72075186224037903 2025-04-10T11:18:17.657843Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:17.658456Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8633:6525], server id = [2:8637:6529], tablet id = 72075186224037907, status = OK 2025-04-10T11:18:17.658521Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8633:6525], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:17.659684Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8638:6530], server id = [2:8642:6533], tablet id = 72075186224037908, status = OK 2025-04-10T11:18:17.659743Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8638:6530], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:17.662810Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-10T11:18:17.663242Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8624:6518], server id = [2:8627:6520], tablet id = 72075186224037904 2025-04-10T11:18:17.663273Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:17.663737Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:18:17.664228Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8626:6519], server id = [2:8630:6522], tablet id = 72075186224037905 2025-04-10T11:18:17.664259Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:17.665345Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-10T11:18:17.665686Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8629:6521], server id = [2:8632:6524], tablet id = 72075186224037906 2025-04-10T11:18:17.665716Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:17.666284Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-10T11:18:17.666558Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8633:6525], server id = [2:8637:6529], tablet id = 72075186224037907 2025-04-10T11:18:17.666583Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:17.666830Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-10T11:18:17.666881Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:17.667065Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:17.667257Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:17.667744Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:17.669876Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8638:6530], server id = [2:8642:6533], tablet id = 72075186224037908 2025-04-10T11:18:17.669907Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:17.670713Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:17.707307Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8667:6554]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:17.707550Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:17.707594Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8667:6554], StatRequests.size() = 1 2025-04-10T11:18:17.845830Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTljNDE5MDMtZDIwZDFlMzYtMjExNGMyZGYtNWEwMDdiYjI=, TxId: 2025-04-10T11:18:17.845897Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTljNDE5MDMtZDIwZDFlMzYtMjExNGMyZGYtNWEwMDdiYjI=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-10T11:18:17.846620Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8680:6560]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:17.846884Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:17.847347Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:17.847405Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:18:17.850597Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:17.850668Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:18:17.850722Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:18:17.856320Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 21574, MsgBus: 65519 2025-04-10T11:18:08.576986Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642257569040164:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:08.577137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ddf/r3tmp/tmpxnKs8B/pdisk_1.dat 2025-04-10T11:18:08.859980Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21574, node 1 2025-04-10T11:18:08.927482Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:08.927501Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:08.927520Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:08.927647Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:08.956876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:08.956988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:08.958800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65519 TClient is connected to server localhost:65519 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:09.325819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:09.343785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:09.444546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:09.557085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:09.628435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:10.695279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642266158976562:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.695413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:10.967036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:10.990229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:11.014074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:11.036709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:11.062099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:11.091134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:11.144930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642270453944368:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:11.145002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:11.145156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642270453944373:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:11.148629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:11.157298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642270453944375:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:18:11.260403Z node 1 :TX_PROXY ERROR: Actor# [1:7491642270453944432:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:12.387495Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2ZhZDk3NmEtMTY1NjQ5M2MtYmUwZDAzOWMtNWVmMDFkZjk=, ActorId: [1:7491642270453944684:2487], ActorState: ExecuteState, TraceId: 01jrfpqykcbswadmk7vv2n62gd, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 9673, MsgBus: 14033 2025-04-10T11:18:12.990391Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642271130857137:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:12.990491Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ddf/r3tmp/tmpBTDqYX/pdisk_1.dat 2025-04-10T11:18:13.075729Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9673, node 2 2025-04-10T11:18:13.124632Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:13.124657Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:13.124665Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:13.124797Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:13.125589Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:13.125705Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:13.127375Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14033 TClient is connected to server localhost:14033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:13.473870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.491313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.541710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.681590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.742119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:15.544363Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642284015760793:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.544472Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.572870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.597365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.621913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.646520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.672855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.703256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.738997Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642284015761300:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.739096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.739159Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642284015761305:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.741928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:15.749672Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642284015761307:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:18:15.845697Z node 2 :TX_PROXY ERROR: Actor# [2:7491642284015761362:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:17.990663Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642271130857137:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:17.990743Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:18:18.960173Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710673; 2025-04-10T11:18:18.961259Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642296900663774:2494], Table: `/Root/EightShard` ([72057594046644480:3:1]), SessionActorId: [2:7491642288310728940:2494]Got LOCKS BROKEN for table `/Root/EightShard`. ShardID=72075186224037891, Sink=[2:7491642296900663774:2494].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:18:18.961929Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642296900663754:2494], SessionActorId: [2:7491642288310728940:2494], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/EightShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7491642288310728940:2494]. isRollback=0 2025-04-10T11:18:18.962291Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGFiZTRiOWYtNmU5MmVkNjAtZDQ1ZTEzNDctMmFmNjNkOWM=, ActorId: [2:7491642288310728940:2494], ActorState: ExecuteState, TraceId: 01jrfpr4tsehv4k2c8rycknv6p, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7491642296900663755:2494] from: [2:7491642296900663754:2494] 2025-04-10T11:18:18.962408Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491642296900663755:2494] TxId: 281474976710673. Ctx: { TraceId: 01jrfpr4tsehv4k2c8rycknv6p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFiZTRiOWYtNmU5MmVkNjAtZDQ1ZTEzNDctMmFmNjNkOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/EightShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:18:18.962686Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGFiZTRiOWYtNmU5MmVkNjAtZDQ1ZTEzNDctMmFmNjNkOWM=, ActorId: [2:7491642288310728940:2494], ActorState: ExecuteState, TraceId: 01jrfpr4tsehv4k2c8rycknv6p, Create QueryResponse for error on request, msg: 2025-04-10T11:18:18.962968Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710673; 2025-04-10T11:18:18.963170Z node 2 :TX_DATASHARD ERROR: Complete volatile write [1744283899008 : 281474976710673] from 72075186224037888 at tablet 72075186224037888, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] >> ReadLoad::ShouldReadIterate >> UpsertLoad::ShouldCreateTable >> ReadLoad::ShouldReadKqp >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom >> UpsertLoad::ShouldWriteKqpUpsert >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] Test command err: 2025-04-10T11:15:50.247526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:50.247915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:50.247982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010e6/r3tmp/tmpzXl7bn/pdisk_1.dat 2025-04-10T11:15:50.645705Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18192, node 1 2025-04-10T11:15:51.179342Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:51.179392Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:51.179420Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:51.179975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:51.186065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:51.276566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:51.277431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:51.292764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21148 2025-04-10T11:15:51.865916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:55.175120Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:55.211501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.211711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.240930Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:55.243391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.492506Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.493227Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.493814Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.493965Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.494256Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.494375Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.494500Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.494612Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.494711Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.660338Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.660485Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.674063Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.830579Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:55.881096Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:55.881193Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:55.912888Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:55.913049Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:55.913312Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:55.913398Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:55.913457Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:55.913524Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:55.913572Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:55.913621Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:55.913988Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:55.941343Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.941482Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.948887Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2607] 2025-04-10T11:15:55.954920Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1914:2622] 2025-04-10T11:15:55.955557Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1914:2622], schemeshard id = 72075186224037897 2025-04-10T11:15:55.960411Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:55.982555Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:55.982614Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:55.982672Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:56.040311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:56.050764Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:56.050948Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:56.215661Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:56.380883Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:56.478648Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:57.364856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2235:3070], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.365011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.390470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:15:57.753244Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:57.753512Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:15:57.753836Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:15:57.753960Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:15:57.754074Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:15:57.754232Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:15:57.754362Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:15:57.754484Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:15:57.754645Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:15:57.754797Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:15:57.754940Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:15:57.755073Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:15:57.814719Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2391:2887];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:57.814821Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2391:2887];tablet_id=72075186224037900;process= ... [72075186224037894] Subscribed for config changes 2025-04-10T11:18:18.718496Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:18.718545Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:18.718642Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:18:18.719578Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:18.719632Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:18.721424Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:18.827471Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:18.827620Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-10T11:18:18.828294Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8693:6545], server id = [2:8698:6550], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:18.828384Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8693:6545], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:18.828669Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8694:6546], server id = [2:8699:6551], tablet id = 72075186224037900, status = OK 2025-04-10T11:18:18.828717Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8694:6546], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:18.829511Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8695:6547], server id = [2:8700:6552], tablet id = 72075186224037901, status = OK 2025-04-10T11:18:18.829553Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8695:6547], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:18.829765Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8696:6548], server id = [2:8701:6553], tablet id = 72075186224037902, status = OK 2025-04-10T11:18:18.829807Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8696:6548], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:18.830930Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:18.831569Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8693:6545], server id = [2:8698:6550], tablet id = 72075186224037899 2025-04-10T11:18:18.831602Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:18.831912Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8697:6549], server id = [2:8703:6555], tablet id = 72075186224037903, status = OK 2025-04-10T11:18:18.831965Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8697:6549], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:18.832330Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:18:18.833031Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-10T11:18:18.833296Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8694:6546], server id = [2:8699:6551], tablet id = 72075186224037900 2025-04-10T11:18:18.833325Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:18.833613Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8706:6558], server id = [2:8708:6560], tablet id = 72075186224037904, status = OK 2025-04-10T11:18:18.833655Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8706:6558], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:18.833864Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-10T11:18:18.834002Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8695:6547], server id = [2:8700:6552], tablet id = 72075186224037901 2025-04-10T11:18:18.834018Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:18.834493Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-10T11:18:18.834863Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8709:6561], server id = [2:8712:6564], tablet id = 72075186224037905, status = OK 2025-04-10T11:18:18.834900Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8709:6561], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:18.835211Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8696:6548], server id = [2:8701:6553], tablet id = 72075186224037902 2025-04-10T11:18:18.835231Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:18.835701Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8711:6563], server id = [2:8713:6565], tablet id = 72075186224037906, status = OK 2025-04-10T11:18:18.835749Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8711:6563], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:18.835900Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8697:6549], server id = [2:8703:6555], tablet id = 72075186224037903 2025-04-10T11:18:18.835929Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:18.836126Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8714:6566], server id = [2:8717:6569], tablet id = 72075186224037907, status = OK 2025-04-10T11:18:18.836159Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8714:6566], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:18.836573Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-10T11:18:18.836725Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8716:6568], server id = [2:8718:6570], tablet id = 72075186224037908, status = OK 2025-04-10T11:18:18.836768Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8716:6568], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:18.837399Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:18:18.837591Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8706:6558], server id = [2:8708:6560], tablet id = 72075186224037904 2025-04-10T11:18:18.837607Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:18.838264Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-10T11:18:18.838502Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8709:6561], server id = [2:8712:6564], tablet id = 72075186224037905 2025-04-10T11:18:18.838524Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:18.838727Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-10T11:18:18.838834Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8711:6563], server id = [2:8713:6565], tablet id = 72075186224037906 2025-04-10T11:18:18.838865Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:18.838982Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-10T11:18:18.839010Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:18.839168Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:18.839483Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:18.839686Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:18.841854Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8714:6566], server id = [2:8717:6569], tablet id = 72075186224037907 2025-04-10T11:18:18.841876Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:18.842179Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8716:6568], server id = [2:8718:6570], tablet id = 72075186224037908 2025-04-10T11:18:18.842215Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:18.842436Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:18.871274Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8739:6591]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:18.871521Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:18.871578Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8739:6591], StatRequests.size() = 1 2025-04-10T11:18:18.954891Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGM0YjBmZWMtYmIyYWJmYjMtNDNkZjk5MTUtNTk2NmMzYWI=, TxId: 2025-04-10T11:18:18.954952Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGM0YjBmZWMtYmIyYWJmYjMtNDNkZjk5MTUtNTk2NmMzYWI=, TxId: 2025-04-10T11:18:18.955432Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:18.967899Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8754:6597] 2025-04-10T11:18:18.967988Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8645:6513], server id = [2:8754:6597], tablet id = 72075186224037894, status = OK 2025-04-10T11:18:18.968079Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8754:6597], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-10T11:18:18.968176Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8755:6598] 2025-04-10T11:18:18.968233Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8755:6598], schemeshard id = 72075186224037897 2025-04-10T11:18:18.981203Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:18.981269Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:19.035744Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8758:6601]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:19.036033Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:19.036073Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:18:19.038517Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:19.038580Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:18:19.038628Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:18:19.043780Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> DataShardTxOrder::RandomPoints_DelayData [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir >> KqpTx::InvalidateOnError [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot >> TPopulatorTestWithResets::UpdateAck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] Test command err: 2025-04-10T11:15:52.701006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:52.701372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:52.701431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fe4/r3tmp/tmp23BDvP/pdisk_1.dat 2025-04-10T11:15:53.066671Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12537, node 1 2025-04-10T11:15:53.299193Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:53.299250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:53.299277Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:53.299733Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:53.302700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:53.391316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:53.391478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:53.405103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64161 2025-04-10T11:15:53.927664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:57.032837Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:57.071959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:57.072142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:57.100566Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:57.103071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:57.350258Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.350859Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.351531Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.351698Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.351790Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.352088Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.352192Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.352296Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.352392Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.518824Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:57.518957Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:57.532076Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:57.659582Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:57.705239Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:57.705327Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:57.740252Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:57.741580Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:57.741829Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:57.741925Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:57.741989Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:57.742048Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:57.742104Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:57.742200Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:57.742749Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:57.765061Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:57.765169Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:57.772447Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:57.775902Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:57.776166Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:15:57.784700Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:57.800118Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:57.800179Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:57.800258Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:57.852081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:57.858205Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:57.858319Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:58.007176Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:58.144356Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:58.220518Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:59.190652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.190768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.209636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:15:59.561111Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2393:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:59.561373Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2393:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:15:59.561695Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2393:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:15:59.561848Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2393:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:15:59.561991Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2393:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:15:59.562193Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2393:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:15:59.562342Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2393:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:15:59.562482Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2393:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:15:59.562625Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2393:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:15:59.562770Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2393:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:15:59.562898Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2393:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:15:59.563062Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2393:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:15:59.632612Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2403:2891];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:59.632709Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2403:2891];tablet_id=72075186224037900;process= ... 67:6407], schemeshard count = 1 2025-04-10T11:18:18.526482Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:18.526538Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:18:18.526576Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:18.526613Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:18.530208Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:18.545362Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:18.545833Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:18.545898Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:18.546628Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:18.546673Z node 2 :STATISTICS WARN: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets do not exist in Hive anymore; tablet count = 3 2025-04-10T11:18:18.546715Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:19.654415Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:19.654486Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:19.654833Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:19.668175Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:19.668433Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:18:19.669103Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8630:6493], server id = [2:8635:6498], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:19.669449Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8630:6493], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:19.671053Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8631:6494], server id = [2:8636:6499], tablet id = 72075186224037900, status = OK 2025-04-10T11:18:19.671115Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8631:6494], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:19.671230Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8632:6495], server id = [2:8637:6500], tablet id = 72075186224037901, status = OK 2025-04-10T11:18:19.671260Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8632:6495], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:19.672128Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8633:6496], server id = [2:8638:6501], tablet id = 72075186224037902, status = OK 2025-04-10T11:18:19.672173Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8633:6496], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:19.672904Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8634:6497], server id = [2:8641:6504], tablet id = 72075186224037903, status = OK 2025-04-10T11:18:19.672969Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8634:6497], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:19.676750Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:19.677501Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8630:6493], server id = [2:8635:6498], tablet id = 72075186224037899 2025-04-10T11:18:19.677560Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:19.678820Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:18:19.679457Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8653:6513], server id = [2:8655:6514], tablet id = 72075186224037904, status = OK 2025-04-10T11:18:19.679545Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8653:6513], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:19.679886Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8631:6494], server id = [2:8636:6499], tablet id = 72075186224037900 2025-04-10T11:18:19.679912Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:19.680623Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-10T11:18:19.681331Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8632:6495], server id = [2:8637:6500], tablet id = 72075186224037901 2025-04-10T11:18:19.681365Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:19.681858Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8657:6515], server id = [2:8661:6519], tablet id = 72075186224037905, status = OK 2025-04-10T11:18:19.681937Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8657:6515], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:19.682581Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-10T11:18:19.683512Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8633:6496], server id = [2:8638:6501], tablet id = 72075186224037902 2025-04-10T11:18:19.683546Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:19.683778Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-10T11:18:19.684254Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8659:6517], server id = [2:8662:6520], tablet id = 72075186224037906, status = OK 2025-04-10T11:18:19.684306Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8659:6517], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:19.684740Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8634:6497], server id = [2:8641:6504], tablet id = 72075186224037903 2025-04-10T11:18:19.684762Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:19.685091Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8663:6521], server id = [2:8669:6526], tablet id = 72075186224037907, status = OK 2025-04-10T11:18:19.685149Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8663:6521], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:19.685822Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8666:6524], server id = [2:8668:6525], tablet id = 72075186224037908, status = OK 2025-04-10T11:18:19.685867Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8666:6524], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:19.688067Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-10T11:18:19.688494Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8653:6513], server id = [2:8655:6514], tablet id = 72075186224037904 2025-04-10T11:18:19.688522Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:19.689880Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:18:19.690307Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8657:6515], server id = [2:8661:6519], tablet id = 72075186224037905 2025-04-10T11:18:19.690334Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:19.691116Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-10T11:18:19.691792Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8659:6517], server id = [2:8662:6520], tablet id = 72075186224037906 2025-04-10T11:18:19.691820Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:19.692390Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-10T11:18:19.692817Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8666:6524], server id = [2:8668:6525], tablet id = 72075186224037908 2025-04-10T11:18:19.692841Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:19.693212Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-10T11:18:19.693262Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:19.693426Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:19.693633Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:19.694006Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:19.696077Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8663:6521], server id = [2:8669:6526], tablet id = 72075186224037907 2025-04-10T11:18:19.696102Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:19.696817Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:19.732274Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8696:6549]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:19.732532Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:19.732566Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8696:6549], StatRequests.size() = 1 2025-04-10T11:18:19.847224Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Nzg3NzZhMzgtYTVkMmFhZDctYWJkMmFhYWYtMmJmZTk1MmI=, TxId: 2025-04-10T11:18:19.847296Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Nzg3NzZhMzgtYTVkMmFhZDctYWJkMmFhYWYtMmJmZTk1MmI=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-10T11:18:19.847845Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8709:6555]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:19.848047Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:19.848504Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:19.848564Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:18:19.852194Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:19.852261Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:18:19.852315Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:18:19.857773Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] >> TPopulatorTest::Boot [GOOD] >> TPopulatorTest::RemoveDir [GOOD] >> TPopulatorTestWithResets::UpdateAck [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2025-04-10T11:18:22.383816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:22.383869Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 4178, MsgBus: 15612 2025-04-10T11:18:12.835521Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642273742162185:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:12.835754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d17/r3tmp/tmpzbRPcb/pdisk_1.dat 2025-04-10T11:18:13.083121Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4178, node 1 2025-04-10T11:18:13.139617Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:13.139637Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:13.139648Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:13.139778Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:13.170970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:13.171073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:13.172898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15612 TClient is connected to server localhost:15612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:13.556553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.584098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.738278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.885754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.939578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:15.347947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642286627065862:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.348056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.617085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.641112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.667005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.705836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.730540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.757605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.791132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642286627066368:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.791231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.791249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642286627066373:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.793954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:15.802038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642286627066375:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:18:15.864583Z node 1 :TX_PROXY ERROR: Actor# [1:7491642286627066430:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 20051, MsgBus: 30421 2025-04-10T11:18:17.524116Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642295059697207:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:17.524214Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d17/r3tmp/tmpPFrnSt/pdisk_1.dat 2025-04-10T11:18:17.615288Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20051, node 2 2025-04-10T11:18:17.651163Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:17.651261Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:17.652762Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:17.681815Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:17.681851Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:17.681856Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:17.681961Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30421 TClient is connected to server localhost:30421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:18.044905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:18.060757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:18.132940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:18.266582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:18.325500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:20.212918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642307944600871:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:20.213004Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:20.236480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:20.260235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:20.283447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:20.305773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:20.328466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:20.355055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:20.429134Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642307944601382:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:20.429225Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:20.429272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642307944601387:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:20.431945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:20.440127Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642307944601389:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:18:20.530835Z node 2 :TX_PROXY ERROR: Actor# [2:7491642307944601445:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:21.225327Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-10T11:18:21.248874Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037911 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:18:21.249056Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037911 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:18:21.249262Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642312239569053:2494], Table: `/Root/KeyValue` ([72057594046644480:6:1]), SessionActorId: [2:7491642312239569023:2494]Got CONSTRAINT VIOLATION for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[2:7491642312239569053:2494].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:18:21.249884Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642312239569046:2494], SessionActorId: [2:7491642312239569023:2494], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7491642312239569023:2494]. isRollback=0 2025-04-10T11:18:21.250176Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDgyMGI4Y2ItYzkwMjMzMTctODhlYjhiNGEtYTE3ZTg5MmU=, ActorId: [2:7491642312239569023:2494], ActorState: ExecuteState, TraceId: 01jrfpr77j6wp78am72mzf4n5t, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7491642312239569047:2494] from: [2:7491642312239569046:2494] 2025-04-10T11:18:21.250277Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491642312239569047:2494] TxId: 281474976715671. Ctx: { TraceId: 01jrfpr77j6wp78am72mzf4n5t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDgyMGI4Y2ItYzkwMjMzMTctODhlYjhiNGEtYTE3ZTg5MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:18:21.250498Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDgyMGI4Y2ItYzkwMjMzMTctODhlYjhiNGEtYTE3ZTg5MmU=, ActorId: [2:7491642312239569023:2494], ActorState: ExecuteState, TraceId: 01jrfpr77j6wp78am72mzf4n5t, Create QueryResponse for error on request, msg: 2025-04-10T11:18:21.301481Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDgyMGI4Y2ItYzkwMjMzMTctODhlYjhiNGEtYTE3ZTg5MmU=, ActorId: [2:7491642312239569023:2494], ActorState: ExecuteState, TraceId: 01jrfpr7a8fz36z0rsy82p19vs, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2025-04-10T11:18:22.383568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:22.383636Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-04-10T11:18:22.493429Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 330, preserialized size# 51 2025-04-10T11:18:22.493537Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-04-10T11:18:22.495126Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:22.495219Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:22.495259Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:22.496005Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 220, preserialized size# 2 2025-04-10T11:18:22.496079Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 2025-04-10T11:18:22.500486Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 340, preserialized size# 56 2025-04-10T11:18:22.500586Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 FAKE_COORDINATOR: Erasing txId 100 2025-04-10T11:18:22.501999Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 225, preserialized size# 2 2025-04-10T11:18:22.502039Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-10T11:18:22.529387Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-04-10T11:18:22.529470Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Successful handshake: replica# [1:12:2059] 2025-04-10T11:18:22.529532Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Resume sync: replica# [1:12:2059], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:18:22.529621Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-04-10T11:18:22.529663Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:97:2123] Successful handshake: replica# [1:15:2062] 2025-04-10T11:18:22.529706Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:97:2123] Resume sync: replica# [1:15:2062], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:18:22.529757Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-04-10T11:18:22.529778Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:98:2124] Successful handshake: replica# [1:18:2065] 2025-04-10T11:18:22.529797Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:98:2124] Resume sync: replica# [1:18:2065], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:18:22.529898Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:24339059:0] }: sender# [1:96:2122] 2025-04-10T11:18:22.530008Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:95:2121] 2025-04-10T11:18:22.530121Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:96:2122] 2025-04-10T11:18:22.530196Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-04-10T11:18:22.530264Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:96:2122] 2025-04-10T11:18:22.530320Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 0 2025-04-10T11:18:22.530381Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:1099535966835:0] }: sender# [1:97:2123] 2025-04-10T11:18:22.530421Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-04-10T11:18:22.530486Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:95:2121] 2025-04-10T11:18:22.530568Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:97:2123] 2025-04-10T11:18:22.530649Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 0 2025-04-10T11:18:22.530678Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-04-10T11:18:22.530715Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:97:2123] 2025-04-10T11:18:22.530734Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 0 2025-04-10T11:18:22.530767Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:2199047594611:0] }: sender# [1:98:2124] 2025-04-10T11:18:22.530804Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-04-10T11:18:22.530839Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:95:2121] 2025-04-10T11:18:22.530867Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:98:2124] 2025-04-10T11:18:22.530884Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-04-10T11:18:22.530929Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:98:2124] 2025-04-10T11:18:22.530955Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 0 2025-04-10T11:18:22.530985Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 0 2025-04-10T11:18:22.531033Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:24339059:0] }: sender# [1:96:2122] 2025-04-10T11:18:22.531057Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:95:2121] 2025-04-10T11:18:22.531093Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-04-10T11:18:22.531135Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:96:2122], cookie# 0 2025-04-10T11:18:22.531155Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:96:2122], cookie# 0 2025-04-10T11:18:22.531197Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 0 2025-04-10T11:18:22.531257Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:96:2122], cookie# 100 2025-04-10T11:18:22.531286Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-04-10T11:18:22.531338Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:1099535966835:0] }: sender# [1:97:2123] 2025-04-10T11:18:22.531368Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:95:2121] 2025-04-10T11:18:22.531399Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 0 2025-04-10T11:18:22.531422Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:96:2122], cookie# 0 2025-04-10T11:18:22.531456Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-04-10T11:18:22.531491Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-04-10T11:18:22.531518Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 0 2025-04-10T11:18:22.531535Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 0 2025-04-10T11:18:22.531563Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 100 2025-04-10T11:18:22.531586Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-04-10T11:18:22.531626Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-04-10T11:18:22.531698Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:2199047594611:0] }: sender# [1:98:2124] 2025-04-10T11:18:22.531759Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:95:2121] 2025-04-10T11:18:22.532393Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 0 2025-04-10T11:18:22.532428Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 0 2025-04-10T11:18:22.532462Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-04-10T11:18:22.532577Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-04-10T11:18:22.532601Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-04-10T11:18:22.532620Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-04-10T11:18:22.532682Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2124], cookie# 0 2025-04-10T11:18:22.532694Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 0 2025-04-10T11:18:22.533079Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2124], cookie# 100 2025-04-10T11:18:22.533113Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 2025-04-10T11:18:22.533216Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 0 2025-04-10T11:18:22.533233Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 0 2025-04-10T11:18:22.533424Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-04-10T11:18:22.533451Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 TestWaitNotification: OK eventTxId 100 >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] Test command err: 2025-04-10T11:18:22.384393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:22.384463Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-04-10T11:18:22.492398Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 330, preserialized size# 51 2025-04-10T11:18:22.492513Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-04-10T11:18:22.494137Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:22.494291Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:22.494361Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:22.496096Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 220, preserialized size# 2 2025-04-10T11:18:22.496164Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-04-10T11:18:22.496321Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-04-10T11:18:22.496390Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-04-10T11:18:22.496435Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-04-10T11:18:22.496667Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-04-10T11:18:22.496720Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:22.496774Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:22.496819Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:22.497062Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-04-10T11:18:22.497113Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-04-10T11:18:22.497192Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-04-10T11:18:22.497243Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-04-10T11:18:22.497298Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-04-10T11:18:22.497802Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-04-10T11:18:22.498265Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:96:2122], cookie# 100 2025-04-10T11:18:22.498371Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:97:2123], cookie# 100 2025-04-10T11:18:22.498422Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-04-10T11:18:22.498876Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:98:2124], cookie# 100 2025-04-10T11:18:22.498909Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-04-10T11:18:22.501321Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 340, preserialized size# 56 2025-04-10T11:18:22.501379Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-04-10T11:18:22.501537Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:22.501586Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:22.501623Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:22.502305Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 225, preserialized size# 2 2025-04-10T11:18:22.502348Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2025-04-10T11:18:2 ... oard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:12:2059], cookie# 101 2025-04-10T11:18:22.510844Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:15:2062], cookie# 101 2025-04-10T11:18:22.510884Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:18:2065], cookie# 101 2025-04-10T11:18:22.511009Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:96:2122], cookie# 101 2025-04-10T11:18:22.511091Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-10T11:18:22.511132Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-10T11:18:22.511169Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-10T11:18:22.511302Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:97:2123], cookie# 101 2025-04-10T11:18:22.511354Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 5 2025-04-10T11:18:22.511419Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 101 2025-04-10T11:18:22.511471Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 101 2025-04-10T11:18:22.511508Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 101 2025-04-10T11:18:22.511799Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:98:2124], cookie# 101 2025-04-10T11:18:22.512029Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 101 2025-04-10T11:18:22.512144Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 101 2025-04-10T11:18:22.512182Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-04-10T11:18:22.512488Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 101 2025-04-10T11:18:22.512522Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-10T11:18:22.514218Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 101, event size# 232, preserialized size# 2 2025-04-10T11:18:22.514264Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 6 2025-04-10T11:18:22.514385Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-10T11:18:22.514498Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-10T11:18:22.514534Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-10T11:18:22.514779Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000002, drop txId: 101" Path: "/Root/DirB" PathId: 2 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 101, event size# 306, preserialized size# 0 2025-04-10T11:18:22.514819Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 101, is deletion# true, version: 0 2025-04-10T11:18:22.514931Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:12:2059], cookie# 101 2025-04-10T11:18:22.514986Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:15:2062], cookie# 101 2025-04-10T11:18:22.515037Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:18:2065], cookie# 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:18:22.515134Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:96:2122], cookie# 101 2025-04-10T11:18:22.515172Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-10T11:18:22.515225Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-10T11:18:22.515260Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-04-10T11:18:22.515530Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:97:2123], cookie# 101 2025-04-10T11:18:22.515563Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2025-04-10T11:18:22.515608Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2025-04-10T11:18:22.515651Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2025-04-10T11:18:22.515693Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2025-04-10T11:18:22.515940Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:98:2124], cookie# 101 2025-04-10T11:18:22.515986Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:96:2122], cookie# 101 2025-04-10T11:18:22.516110Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:97:2123], cookie# 101 2025-04-10T11:18:22.516157Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 18446744073709551615 2025-04-10T11:18:22.516477Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:98:2124], cookie# 101 2025-04-10T11:18:22.516511Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 >> TPopulatorTest::MakeDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 6763, MsgBus: 17186 2025-04-10T11:18:01.037931Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642226649370188:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:01.038049Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e88/r3tmp/tmp67pJva/pdisk_1.dat 2025-04-10T11:18:01.306111Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6763, node 1 2025-04-10T11:18:01.370643Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:01.370680Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:01.370691Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:01.370825Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:01.404723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:01.404837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:01.406389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17186 TClient is connected to server localhost:17186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:01.784493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:03.223845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642235239305442:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:03.223949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642235239305454:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:03.224057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:03.228500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:03.237590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642235239305457:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:18:03.308054Z node 1 :TX_PROXY ERROR: Actor# [1:7491642235239305508:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:03.580238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:03.679458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:04.415542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:05.526549Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2025-04-10T11:18:05.536748Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:18:05.536921Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:18:05.537079Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642243829248825:2966], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7491642243829248457:2966]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7491642243829248825:2966].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:18:05.537533Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642243829248816:2966], SessionActorId: [1:7491642243829248457:2966], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7491642243829248457:2966]. isRollback=0 2025-04-10T11:18:05.537778Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2U5OTc1YmItYzAxOTM3ZDctMzM1YzNkZjItN2M3ZDYwN2Q=, ActorId: [1:7491642243829248457:2966], ActorState: ExecuteState, TraceId: 01jrfpqqx12fn3xdfxysvfp6yv, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7491642243829248817:2966] from: [1:7491642243829248816:2966] 2025-04-10T11:18:05.537850Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491642243829248817:2966] TxId: 281474976710667. Ctx: { TraceId: 01jrfpqqx12fn3xdfxysvfp6yv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2U5OTc1YmItYzAxOTM3ZDctMzM1YzNkZjItN2M3ZDYwN2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:18:05.538013Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2U5OTc1YmItYzAxOTM3ZDctMzM1YzNkZjItN2M3ZDYwN2Q=, ActorId: [1:7491642243829248457:2966], ActorState: ExecuteState, TraceId: 01jrfpqqx12fn3xdfxysvfp6yv, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 WAIT_INDEXATION: 0 2025-04-10T11:18:06.037652Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642226649370188:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:06.037732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 27265, MsgBus: 28544 2025-04-10T11:18:11.435431Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642268880889373:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:11.435533Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e88/r3tmp/tmpbEoZQn/pdisk_1.dat 2025-04-10T11:18:11.529148Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27265, node 2 2025-04-10T11:18:11.571318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:11.571423Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:11.572734Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:11.588844Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:11.588869Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:11.588876Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:11.589007Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28544 TClient is connected to server localhost:28544 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:11.961349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:14.012646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642281765791920:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:14.012705Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642281765791926:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:14.012761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:14.016738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:14.025700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642281765791934:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:14.119270Z node 2 :TX_PROXY ERROR: Actor# [2:7491642281765791985:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:14.165634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:14.235892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.109251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:16.435864Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642268880889373:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:16.435924Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:18:16.478390Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTc0MDY2OTUtYmIwYmMzZWMtNDU2MTE2ZTctN2FlMjg1ZTc=, ActorId: [2:7491642290355735092:2968], ActorState: ExecuteState, TraceId: 01jrfpr2h91f417x60t8zsy0fz, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TPopulatorTest::MakeDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] Test command err: 2025-04-10T11:15:03.708277Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:15:03.709203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:15:03.709253Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:03.724046Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:15:03.724509Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-04-10T11:15:03.724799Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:15:03.775742Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:15:03.789062Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:15:03.789763Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:15:03.791638Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:15:03.791717Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:15:03.791768Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:15:03.792208Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:15:03.792399Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:15:03.792479Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:199:2154] in generation 2 2025-04-10T11:15:03.864134Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:15:03.920932Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:15:03.921134Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:15:03.921250Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-04-10T11:15:03.921290Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:15:03.921347Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:15:03.921388Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:03.921645Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:03.921714Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:03.922012Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:15:03.922125Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:15:03.922283Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:15:03.922322Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:15:03.922377Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:15:03.922421Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:15:03.922461Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:15:03.922510Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:15:03.922557Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:15:03.922649Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:03.922692Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:03.922765Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-04-10T11:15:03.925375Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:15:03.925431Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:15:03.925519Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:15:03.925689Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:15:03.925741Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:15:03.925815Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:15:03.925864Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:15:03.925903Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:15:03.925957Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:15:03.925990Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:15:03.926329Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:15:03.926375Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:15:03.926412Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:15:03.926444Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:15:03.926496Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:15:03.926525Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:15:03.926565Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:15:03.926618Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:15:03.926648Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:15:03.942885Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:15:03.942956Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:15:03.942998Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:15:03.943172Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:15:03.943229Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:15:03.943742Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:03.943797Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:15:03.943839Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-04-10T11:15:03.943982Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:15:03.944019Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:15:03.944140Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:15:03.944187Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:03.944251Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:15:03.944292Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:15:03.947898Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:15:03.947961Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:15:03.948107Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:03.948140Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:15:03.948194Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:15:03.948231Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:15:03.948265Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:15:03.948305Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:15:03.948346Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:15:03.948391Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:03.948432Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:15:03.948502Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:15:03.948539Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:15:03.948719Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:15:03.948787Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:15:03.948816Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:15:03.948856Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:15:03.948886Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:15:03.948954Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:15:03.948982Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:15:03.949014Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:15:03.949048Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:15:03.949103Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:15:03.949144Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:15:03.949172Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-10T11:15:03.949204Z node 1 :TX_D ... consumer 9437184 txId 520 2025-04-10T11:18:21.316513Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 521 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2025-04-10T11:18:21.316543Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.316568Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 521 2025-04-10T11:18:21.316631Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 522 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2025-04-10T11:18:21.316659Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.316684Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 522 2025-04-10T11:18:21.316816Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 523 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2025-04-10T11:18:21.316843Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.316867Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 523 2025-04-10T11:18:21.316943Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 524 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2025-04-10T11:18:21.316969Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.316992Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 524 2025-04-10T11:18:21.317102Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-10T11:18:21.317125Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.317155Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2025-04-10T11:18:21.317244Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 525 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2025-04-10T11:18:21.317268Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.317291Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 525 2025-04-10T11:18:21.317418Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 526 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2025-04-10T11:18:21.317445Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.317469Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 526 2025-04-10T11:18:21.317569Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 527 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2025-04-10T11:18:21.317594Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.317615Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 527 2025-04-10T11:18:21.317708Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 528 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2025-04-10T11:18:21.317731Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.317752Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 528 2025-04-10T11:18:21.317832Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 529 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2025-04-10T11:18:21.317855Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.317875Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 529 2025-04-10T11:18:21.317941Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 530 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2025-04-10T11:18:21.317965Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.317985Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 530 2025-04-10T11:18:21.318092Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-10T11:18:21.318117Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.318139Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2025-04-10T11:18:21.318267Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 531 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-04-10T11:18:21.318293Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.318318Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 531 2025-04-10T11:18:21.318413Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 532 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-04-10T11:18:21.318441Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.318464Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 532 2025-04-10T11:18:21.318526Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 533 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-04-10T11:18:21.318549Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.318575Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 533 2025-04-10T11:18:21.318665Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 534 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-04-10T11:18:21.318689Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.318711Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 534 2025-04-10T11:18:21.318822Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 535 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-04-10T11:18:21.318846Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.318866Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 535 2025-04-10T11:18:21.318965Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 536 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-04-10T11:18:21.318987Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.319012Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 536 2025-04-10T11:18:21.319109Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 537 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-04-10T11:18:21.319135Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.319159Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 537 2025-04-10T11:18:21.319246Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-10T11:18:21.319271Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.319293Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 2025-04-10T11:18:21.338821Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:18:21.338884Z node 16 :TX_DATASHARD TRACE: Complete execution for [1000005:538] at 9437184 on unit CompleteOperation 2025-04-10T11:18:21.338948Z node 16 :TX_DATASHARD DEBUG: Complete [1000005 : 538] from 9437184 at tablet 9437184 send result to client [16:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-04-10T11:18:21.339018Z node 16 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-10T11:18:21.339061Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:18:21.339440Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:343:2311]: {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-10T11:18:21.339481Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:18:21.339518Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 538 expect 20 29 29 24 30 24 30 30 22 29 29 29 29 29 30 29 27 29 30 15 23 27 29 29 28 28 - 23 29 28 - - actual 20 29 29 24 30 24 30 30 22 29 29 29 29 29 30 29 27 29 30 15 23 27 29 29 28 28 - 23 29 28 - - interm 20 29 29 24 30 24 30 30 22 29 29 29 29 29 30 29 27 29 30 15 23 27 29 29 28 28 - 23 29 28 - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] Test command err: 2025-04-10T11:15:51.987273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:51.987680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:51.987778Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001088/r3tmp/tmp7ZRWMH/pdisk_1.dat 2025-04-10T11:15:52.345226Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22875, node 1 2025-04-10T11:15:52.584909Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:52.584971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:52.585003Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:52.585506Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:52.588410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:52.674200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:52.674355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:52.688098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26504 2025-04-10T11:15:53.191371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:56.401290Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:56.427925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:56.428087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:56.454777Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:56.456923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:56.683052Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.683523Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.683950Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.684058Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.684109Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.684307Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.684367Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.684417Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.684478Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:56.846524Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:56.846651Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:56.859963Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:56.999504Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:57.049559Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:57.049669Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:57.090622Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:57.091804Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:57.092026Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:57.092089Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:57.092136Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:57.092197Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:57.092258Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:57.092303Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:57.092799Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:57.117288Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:57.117473Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:57.127628Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:57.133409Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:57.133862Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:15:57.146937Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:57.168434Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:57.168532Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:57.168641Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:57.238420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:57.257047Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:57.257220Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:57.424061Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:57.572452Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:57.649559Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:58.663105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:58.663278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:58.681772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:15:58.770697Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:58.770914Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:15:58.771170Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:15:58.771275Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:15:58.771364Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:15:58.771451Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:15:58.771522Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:15:58.771603Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:15:58.771742Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:15:58.771855Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:15:58.771933Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:15:58.772018Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:15:58.793809Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:15:58.793907Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 74Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7263:5315] 2025-04-10T11:18:18.782433Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7263:5315] 2025-04-10T11:18:18.818945Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:18:18.819067Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:18:18.819638Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:18:18.820278Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:18:18.820554Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-04-10T11:18:18.820590Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-04-10T11:18:18.820625Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-04-10T11:18:18.820656Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-04-10T11:18:18.820690Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1744283898734054 2025-04-10T11:18:18.820723Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-04-10T11:18:18.820750Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded global traversal round: 2 2025-04-10T11:18:18.820820Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-04-10T11:18:18.820881Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:18:18.820960Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-04-10T11:18:18.821022Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-04-10T11:18:18.821091Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-04-10T11:18:18.821149Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:18:18.821265Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:18.821949Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:18:18.822538Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:18.822598Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:18.822691Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:18:18.823663Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:18.823711Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:18.825088Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:18.876195Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:18.876369Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-10T11:18:18.876918Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7310:5346], server id = [2:7311:5347], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:18.877050Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7310:5346], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:18.878868Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:18.878967Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:18.879128Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:18.879288Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:18.879564Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:18.882130Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7310:5346], server id = [2:7311:5347], tablet id = 72075186224037899 2025-04-10T11:18:18.882190Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:18.882689Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:18.913754Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7329:5365]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:18.913934Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:18.914029Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7329:5365], StatRequests.size() = 1 2025-04-10T11:18:19.029407Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Njg2ZWI3YS1hYzgwYTVjMy1jZGU1ZDc5OC04MDFjZDI4ZQ==, TxId: 2025-04-10T11:18:19.029483Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Njg2ZWI3YS1hYzgwYTVjMy1jZGU1ZDc5OC04MDFjZDI4ZQ==, TxId: 2025-04-10T11:18:19.029956Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:19.042222Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7344:5371] 2025-04-10T11:18:19.042406Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7344:5371], schemeshard id = 72075186224037897 2025-04-10T11:18:19.042530Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7263:5315], server id = [2:7345:5372], tablet id = 72075186224037894, status = OK 2025-04-10T11:18:19.042591Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7345:5372] 2025-04-10T11:18:19.042668Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7345:5372], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-10T11:18:19.055603Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:19.055664Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:19.144552Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7356:5375] 2025-04-10T11:18:19.145363Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2803:3226] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-10T11:18:19.145426Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2803:3226] 2025-04-10T11:18:19.145506Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-10T11:18:19.626072Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-10T11:18:19.626181Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:18:19.637028Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-10T11:18:19.637105Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:18:20.396136Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:20.396207Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:18:20.396244Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-10T11:18:21.646582Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:21.646740Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:21.646791Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:21.647474Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:21.660424Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:21.660835Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:21.660903Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:21.661342Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:21.674587Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:21.674816Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-10T11:18:21.675384Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7428:5418], server id = [2:7429:5419], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:21.675508Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7428:5418], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:21.676861Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:21.676966Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:21.677178Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:21.677501Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:21.677787Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:21.681183Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7428:5418], server id = [2:7429:5419], tablet id = 72075186224037899 2025-04-10T11:18:21.681232Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:21.681820Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:21.714223Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmM3YTJiMDItYjMyMWIxNmUtOTJmMmUzLTY4M2RlNDA4, TxId: 2025-04-10T11:18:21.714306Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmM3YTJiMDItYjMyMWIxNmUtOTJmMmUzLTY4M2RlNDA4, TxId: 2025-04-10T11:18:21.714892Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:21.728554Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:21.728659Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2803:3226] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] Test command err: 2025-04-10T11:15:52.015937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:52.016219Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:52.016293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00106b/r3tmp/tmph5XAfu/pdisk_1.dat 2025-04-10T11:15:52.385869Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21131, node 1 2025-04-10T11:15:52.613558Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:52.613618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:52.613651Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:52.614137Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:52.617132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:52.702757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:52.702848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:52.716606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23547 2025-04-10T11:15:53.230186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:56.809136Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:56.850492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:56.850694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:56.880305Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:56.883075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:57.131729Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.132153Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.132571Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.132665Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.132713Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.132933Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.132993Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.133040Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.133119Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:57.293826Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:57.293930Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:57.307219Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:57.448272Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:57.495762Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:57.495873Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:57.536888Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:57.538238Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:57.538490Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:57.538565Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:57.538627Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:57.538681Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:57.538736Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:57.538779Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:57.539223Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:57.562096Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:57.562254Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:57.570786Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:57.575326Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:57.575683Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:15:57.586191Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:57.602547Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:57.602616Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:57.602700Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:57.662766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:57.669614Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:57.669746Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:57.835589Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:57.980874Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:58.057193Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:59.067748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.067874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:59.082116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:15:59.171379Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:59.171577Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:15:59.171828Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:15:59.171947Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:15:59.172049Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:15:59.172173Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:15:59.172255Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:15:59.172327Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:15:59.172461Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:15:59.172587Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:15:59.172661Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:15:59.172733Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:15:59.194631Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:15:59.194721Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... :7128:5235], tablet id = 72075186224037894 2025-04-10T11:18:19.196457Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7252:5309] 2025-04-10T11:18:19.196515Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7252:5309] 2025-04-10T11:18:19.196692Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:7253:5310], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:18:19.246407Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:18:19.246503Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:18:19.247070Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:18:19.247672Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:18:19.247925Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-04-10T11:18:19.247962Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-04-10T11:18:19.247988Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-04-10T11:18:19.248016Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-04-10T11:18:19.248046Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1744283899179396 2025-04-10T11:18:19.248076Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-04-10T11:18:19.248143Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-04-10T11:18:19.248200Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:18:19.248281Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-04-10T11:18:19.248351Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-04-10T11:18:19.248413Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-04-10T11:18:19.248458Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:18:19.248579Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:19.249206Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:18:19.249876Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:19.249923Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:19.250083Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:18:19.251109Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:19.251177Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:19.252466Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:19.314385Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:19.314606Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:18:19.315234Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7300:5341], server id = [2:7301:5342], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:19.315350Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7300:5341], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:19.319102Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:19.319212Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:19.319395Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:19.319608Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:19.319893Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:19.322731Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7300:5341], server id = [2:7301:5342], tablet id = 72075186224037899 2025-04-10T11:18:19.322780Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:19.323305Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:19.358163Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7321:5361]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:19.358440Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:19.358493Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7321:5361], StatRequests.size() = 1 2025-04-10T11:18:19.460117Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTU2MzQ1NmQtNGMyNGM2ZTMtNjUxY2E5ODctYjczMmRkNjQ=, TxId: 2025-04-10T11:18:19.460199Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTU2MzQ1NmQtNGMyNGM2ZTMtNjUxY2E5ODctYjczMmRkNjQ=, TxId: 2025-04-10T11:18:19.460778Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:19.473406Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7336:5367] 2025-04-10T11:18:19.473554Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7252:5309], server id = [2:7336:5367], tablet id = 72075186224037894, status = OK 2025-04-10T11:18:19.473607Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7336:5367], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-10T11:18:19.473718Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7337:5368] 2025-04-10T11:18:19.473780Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7337:5368], schemeshard id = 72075186224037897 2025-04-10T11:18:19.486801Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:19.486877Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:19.564145Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7348:5371] 2025-04-10T11:18:19.564777Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2805:3226] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-10T11:18:19.564842Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2805:3226] 2025-04-10T11:18:19.564915Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-10T11:18:20.052840Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-10T11:18:20.052925Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:18:20.782494Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:20.782575Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:18:20.782619Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-10T11:18:22.030025Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:22.030229Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:22.030340Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:22.031212Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:22.044792Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:22.045238Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:22.045327Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:22.045785Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:22.059461Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:22.059664Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-10T11:18:22.060269Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7420:5414], server id = [2:7421:5415], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:22.060393Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7420:5414], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:22.061921Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:22.062033Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:22.062235Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:22.062454Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:22.062767Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:22.065754Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7420:5414], server id = [2:7421:5415], tablet id = 72075186224037899 2025-04-10T11:18:22.065804Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:22.066356Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:22.088677Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2RkMTY0ZjUtMzhjMDdlYzAtYzYwY2MxZWItYzYyNjJjMGE=, TxId: 2025-04-10T11:18:22.088752Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2RkMTY0ZjUtMzhjMDdlYzAtYzYwY2MxZWItYzYyNjJjMGE=, TxId: 2025-04-10T11:18:22.089402Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:22.124763Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:22.124852Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2805:3226] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] Test command err: 2025-04-10T11:18:23.648440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:23.648511Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-04-10T11:18:23.728822Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 330, preserialized size# 51 2025-04-10T11:18:23.728924Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-04-10T11:18:23.730434Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:23.730529Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:23.730581Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:23.731204Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 220, preserialized size# 2 2025-04-10T11:18:23.731242Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-04-10T11:18:23.731371Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-04-10T11:18:23.731418Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-04-10T11:18:23.731450Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-04-10T11:18:23.731636Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-04-10T11:18:23.731729Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:23.731769Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:23.731804Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:23.731937Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-04-10T11:18:23.731965Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-04-10T11:18:23.732044Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-04-10T11:18:23.732081Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-04-10T11:18:23.732124Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-04-10T11:18:23.732460Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-04-10T11:18:23.732784Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:96:2122], cookie# 100 2025-04-10T11:18:23.732872Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:97:2123], cookie# 100 2025-04-10T11:18:23.732898Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-04-10T11:18:23.733243Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:98:2124], cookie# 100 2025-04-10T11:18:23.733267Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-04-10T11:18:23.735402Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 340, preserialized size# 56 2025-04-10T11:18:23.735453Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-04-10T11:18:23.735577Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:23.735610Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:23.735637Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:23.736157Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 225, preserialized size# 2 2025-04-10T11:18:23.736189Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2025-04-10T11:18:23.736269Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 100 2025-04-10T11:18:23.736315Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 100 2025-04-10T11:18:23.736359Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 100 2025-04-10T11:18:23.736399Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:23.736515Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:96:2122], cookie# 100 2025-04-10T11:18:23.736542Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:23.736570Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-04-10T11:18:23.736641Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 100 2025-04-10T11:18:23.736661Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-04-10T11:18:23.736690Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-04-10T11:18:23.736734Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-04-10T11:18:23.736780Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-04-10T11:18:23.736977Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2124], cookie# 100 2025-04-10T11:18:23.737140Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-04-10T11:18:23.737316Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-04-10T11:18:23.737346Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-04-10T11:18:23.737450Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-04-10T11:18:23.737490Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 TestModificationResult got TxId: 100, wait until txId: 100 |89.5%| [TA] $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 >> GroupWriteTest::TwoTables [GOOD] >> KqpScanLogs::WideCombine-EnabledLogs >> KqpScanSpilling::SelfJoin >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling >> KqpScanSpilling::HandleErrorsCorrectly >> GroupWriteTest::WithRead [GOOD] >> KqpScanSpilling::SpillingPragmaParseError >> KqpScanLogs::GraceJoin+EnabledLogs >> KqpScanLogs::GraceJoin-EnabledLogs >> KqpScanLogs::WideCombine+EnabledLogs >> ReadLoad::ShouldReadIterate [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldDropCreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::TwoTables [GOOD] Test command err: RandomSeed# 16632558717568189248 2025-04-10T11:18:18.084104Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058679074007041 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-10T11:18:18.084214Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058502699329537 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-10T11:18:18.112120Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-10T11:18:18.112192Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 going to send TEvBlock {TabletId# 72058679074007041 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-10T11:18:18.112286Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-10T11:18:18.112329Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 going to send TEvBlock {TabletId# 72058502699329537 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-10T11:18:18.116489Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-10T11:18:18.116609Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-10T11:18:18.140069Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:18:18.140195Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:18:18.145327Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-10T11:18:18.145412Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-10T11:18:24.931667Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-10T11:18:24.931749Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:18:24.931803Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:18:24.931837Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-10T11:18:24.931865Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:18:24.931907Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:18:24.931936Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-10T11:18:24.931964Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:18:24.931993Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:18:24.969417Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2025-04-10T11:18:24.969518Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2025-04-10T11:18:24.969561Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2025-04-10T11:18:24.969602Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2025-04-10T11:18:24.969641Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} 2025-04-10T11:18:24.969674Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WithRead [GOOD] Test command err: RandomSeed# 3484535215814676120 2025-04-10T11:18:18.647290Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 3 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-10T11:18:18.662643Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-10T11:18:18.662700Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 going to send TEvBlock {TabletId# 3 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-10T11:18:18.664378Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-10T11:18:18.675757Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:18:18.677601Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-10T11:18:24.983053Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-10T11:18:24.983166Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:18:24.983228Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-10T11:18:24.983265Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:18:25.081217Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2025-04-10T11:18:25.081337Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling >> KqpSinkMvcc::OltpMultiSinksNoSinks [GOOD] >> KqpSinkMvcc::OltpMultiSinks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] Test command err: 2025-04-10T11:18:23.571655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:23.571897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:23.572137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014f9/r3tmp/tmpS5UYOr/pdisk_1.dat 2025-04-10T11:18:23.953754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:24.000234Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:24.043792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:24.043883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:24.055935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:24.146920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:24.468658Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-04-10T11:18:24.468848Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-10T11:18:24.624305Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.155083s, errors=0 2025-04-10T11:18:24.624408Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TPDiskRaces::OwnerKilledWhileReadingLogAndThenKillLastOwner [GOOD] >> TPDiskTest::PDiskRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] Test command err: 2025-04-10T11:18:23.755556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:23.755837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:23.756045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014fc/r3tmp/tmpi3oZNa/pdisk_1.dat 2025-04-10T11:18:24.110857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:24.144276Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:24.183457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:24.183617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:24.195250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:24.276908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:24.596864Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-04-10T11:18:24.597016Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-04-10T11:18:24.604763Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} started# 5 actors each with inflight# 4 2025-04-10T11:18:24.604841Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-10T11:18:24.604887Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-10T11:18:24.604931Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-10T11:18:24.604986Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-10T11:18:24.605017Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-10T11:18:24.610207Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} session: ydb://session/3?node_id=1&id=N2RiNDMwMmItODE0YzMzMy0yODIzNTUzYS1kN2IzOGRlZQ== 2025-04-10T11:18:24.610300Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} session: ydb://session/3?node_id=1&id=NGJlZjJhZGEtOWE2M2M0OGYtYjhiY2VhYTEtZGE3NjMzMg== 2025-04-10T11:18:24.611846Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} session: ydb://session/3?node_id=1&id=MjMzODc1MTQtNGEyNTFhNGYtYjE2ZDliNjktNjgzMGY5ZjM= 2025-04-10T11:18:24.614892Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} session: ydb://session/3?node_id=1&id=YWEzYTVlMmEtYTQ0YmRmMWQtM2UyOGMyMTUtNTI4OWRjMDU= 2025-04-10T11:18:24.614966Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} session: ydb://session/3?node_id=1&id=NGFmNGUwZmQtNmVmYTI2ZmUtYmUxZDE1NTYtNDUzNDU5Nw== 2025-04-10T11:18:24.619866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:777:2653], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.620005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.620198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.620441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:776:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.620509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:778:2654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.620575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.621047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.630752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:24.681295Z node 1 :TX_PROXY ERROR: Actor# [1:797:2673] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:24.682352Z node 1 :TX_PROXY ERROR: Actor# [1:798:2674] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:24.683299Z node 1 :TX_PROXY ERROR: Actor# [1:799:2675] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:24.684031Z node 1 :TX_PROXY ERROR: Actor# [1:800:2676] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:24.838102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:788:2664], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:24.838262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:789:2665], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:24.838364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:24.838427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:24.838479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:24.873148Z node 1 :TX_PROXY ERROR: Actor# [1:898:2739] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:25.546981Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} finished in 1744283905.546916s, errors=0 2025-04-10T11:18:25.547350Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1744283905546 OperationsOK: 4 OperationsError: 0 } 2025-04-10T11:18:25.561145Z node 1 :TX_PROXY ERROR: Actor# [1:971:2777] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:25.627676Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} finished in 1744283905.627619s, errors=0 2025-04-10T11:18:25.628018Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1744283905627 OperationsOK: 4 OperationsError: 0 } 2025-04-10T11:18:25.641910Z node 1 :TX_PROXY ERROR: Actor# [1:1022:2799] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:25.706130Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} finished in 1744283905.706082s, errors=0 2025-04-10T11:18:25.706442Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1744283905706 OperationsOK: 4 OperationsError: 0 } 2025-04-10T11:18:25.719578Z node 1 :TX_PROXY ERROR: Actor# [1:1073:2821] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:25.791637Z node 1 :TX_PROXY ERROR: Actor# [1:1123:2842] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:25.808242Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} finished in 1744283905.808204s, errors=0 2025-04-10T11:18:25.808708Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1744283905808 OperationsOK: 4 OperationsError: 0 } 2025-04-10T11:18:25.857514Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} finished in 1744283905.857464s, errors=0 2025-04-10T11:18:25.857833Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1744283905857 OperationsOK: 4 OperationsError: 0 } 2025-04-10T11:18:25.857886Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} finished in 1.253381s, oks# 20, errors# 0 2025-04-10T11:18:25.858002Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] >> AnalyzeColumnshard::AnalyzeStatus [GOOD] >> TPDiskTest::PDiskRestart [GOOD] >> TPDiskTest::PDiskRestartManyLogWrites >> TPDiskTest::PDiskRestartManyLogWrites [GOOD] >> TPDiskTest::CommitDeleteChunks >> TPDiskTest::CommitDeleteChunks [GOOD] >> TPDiskTest::DeviceHaltTooLong >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus [GOOD] Test command err: 2025-04-10T11:15:50.599130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:50.599480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:50.599543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0010a4/r3tmp/tmpgdfE76/pdisk_1.dat 2025-04-10T11:15:50.963099Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15296, node 1 2025-04-10T11:15:51.211904Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:51.211960Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:51.211994Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:51.212462Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:51.215415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:51.300207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:51.300332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:51.313572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1279 2025-04-10T11:15:51.831101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:54.839392Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:54.888724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:54.888923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:54.920566Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:54.923120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.162391Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.163099Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.163721Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.163867Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.164116Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.164225Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.164348Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.164434Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.164517Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:55.351235Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.351358Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.364676Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:55.524665Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:55.576691Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:55.576798Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:55.616434Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:55.616692Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:55.616922Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:55.616997Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:55.617050Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:55.617117Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:55.617172Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:55.617225Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:55.617838Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:55.640752Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.640863Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1873:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:55.647663Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1885:2607] 2025-04-10T11:15:55.655272Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1910:2617] 2025-04-10T11:15:55.655651Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1910:2617], schemeshard id = 72075186224037897 2025-04-10T11:15:55.663399Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:15:55.685214Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:55.685260Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:55.685338Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:15:55.697951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:55.705777Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:55.705920Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:55.864779Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:56.045409Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:56.112657Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:15:57.125839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2239:3072], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.126035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:15:57.205015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:15:57.329120Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:15:57.329354Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:15:57.329648Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:15:57.329781Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:15:57.329974Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:15:57.330122Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:15:57.330272Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:15:57.330406Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:15:57.330533Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:15:57.330649Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:15:57.330805Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:15:57.330955Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2852];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:15:57.360556Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:15:57.360670Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... cs` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:18:23.145012Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6985:5164], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:23.145086Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6995:5169], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:23.145149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:23.156096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-10T11:18:23.214481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6999:5172], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:18:23.432831Z node 2 :TX_PROXY ERROR: Actor# [2:7095:5218] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:23.511942Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7124:5233]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:23.512204Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:18:23.512301Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7126:5235] 2025-04-10T11:18:23.512368Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7126:5235] 2025-04-10T11:18:23.512647Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7127:5236] 2025-04-10T11:18:23.512790Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7127:5236], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:18:23.512871Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-10T11:18:23.513013Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7126:5235], server id = [2:7127:5236], tablet id = 72075186224037894, status = OK 2025-04-10T11:18:23.513106Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:18:23.513202Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7124:5233], StatRequests.size() = 1 2025-04-10T11:18:23.658370Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmM4NDIxZC1iZGU1NDNkNC1mZTRhMGQyOS0zMWMxMzI5MQ==, TxId: 2025-04-10T11:18:23.658492Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmM4NDIxZC1iZGU1NDNkNC1mZTRhMGQyOS0zMWMxMzI5MQ==, TxId: 2025-04-10T11:18:23.659129Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:23.673953Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:18:23.674043Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:23.717822Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:18:23.717930Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:18:23.804954Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7126:5235], schemeshard count = 1 2025-04-10T11:18:24.854613Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:24.854726Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:24.858198Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:24.875017Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:24.875497Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:24.875562Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-10T11:18:24.900484Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:24.922763Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2025-04-10T11:18:24.924990Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7205:5284] 2025-04-10T11:18:24.925609Z node 2 :STATISTICS DEBUG: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_ENQUEUED 2025-04-10T11:18:24.926917Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7207:5285]
---- StatisticsAggregator ----
Database: /Root/Database
BaseStatistics: 1
SchemeShards: 1
    72075186224037897
Nodes: 1
    2
RequestedSchemeShards: 1
    72075186224037897
FastCounter: 3
FastCheckInFlight: 0
FastSchemeShards: 0
FastNodes: 0
PropagationInFlight: 0
PropagationSchemeShards: 0
PropagationNodes: 0
LastSSIndex: 0
PendingRequests: 0
ProcessUrgentInFlight: 0
Columns: 2
DatashardRanges: 0
CountMinSketches: 0
ScheduleTraversalsByTime: 2
  oldest table: [OwnerId: 72075186224037897, LocalPathId: 4], update time: 1970-01-01T00:00:00Z
ScheduleTraversalsBySchemeShard: 1
    72075186224037897
    [OwnerId: 72075186224037897, LocalPathId: 4], [OwnerId: 72075186224037897, LocalPathId: 3]
ForceTraversals: 1
    1970-01-01T00:00:05Z
NavigateType: Analyze
NavigateAnalyzeOperationId: 
NavigatePathId: 
ForceTraversalOperationId: 
TraversalStartTime: 1970-01-01T00:00:00Z
TraversalPathId: 
TraversalIsColumnTable: 0
TraversalStartKey: 
GlobalTraversalRound: 1
TraversalRound: 0
HiveRequestRound: 0
... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2025-04-10T11:18:24.928154Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-10T11:18:24.928267Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-10T11:18:24.942122Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:18:26.251155Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:26.251302Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:26.251364Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:26.251960Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:26.265978Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:26.266513Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:26.266600Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:26.267538Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:26.299354Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:26.299587Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:18:26.300250Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7257:5314], server id = [2:7258:5315], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:26.300406Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7257:5314], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:26.304765Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:26.304860Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:26.305070Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:26.305231Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:26.305473Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7257:5314], server id = [2:7258:5315], tablet id = 72075186224037899 2025-04-10T11:18:26.305516Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:26.305690Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:26.309203Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:26.345862Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7278:5334]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:26.346168Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:26.346217Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7278:5334], StatRequests.size() = 1 2025-04-10T11:18:26.466642Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:04.000000Z, event interval end# 2025-04-10T11:18:24.000000Z 2025-04-10T11:18:26.467319Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmIwZWE1MmUtZmUzMWM0NTEtZGNjOWYxNWMtZjc5NzYwYQ==, TxId: 2025-04-10T11:18:26.467404Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmIwZWE1MmUtZmUzMWM0NTEtZGNjOWYxNWMtZjc5NzYwYQ==, TxId: 2025-04-10T11:18:26.468061Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:26.482966Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:26.483051Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:952:2756] 2025-04-10T11:18:26.484419Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7295:5342] 2025-04-10T11:18:26.485026Z node 2 :STATISTICS DEBUG: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] Test command err: 2025-04-10T11:18:27.528655Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:18:27.532863Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:18:27.539211Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:18:27.544650Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:18:27.546273Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:18:27.549134Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:2:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource >> TExternalDataSourceTest::ReadOnlyMode >> TExternalDataSourceTest::RemovingReferencesFromDataSources >> TExternalDataSourceTest::SchemeErrors >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TExternalDataSourceTest::DropTableTwice >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: 2025-04-10T11:18:23.614342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:23.614562Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:23.614768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001503/r3tmp/tmpBLbQHe/pdisk_1.dat 2025-04-10T11:18:23.979471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:24.013944Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:24.051496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:24.051594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:24.063009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:24.146458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:24.479151Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-04-10T11:18:24.479396Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-10T11:18:24.621229Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.141390s, errors=0 2025-04-10T11:18:24.621337Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 2025-04-10T11:18:28.077643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:28.077871Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:18:28.078077Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001503/r3tmp/tmpDVetHT/pdisk_1.dat 2025-04-10T11:18:28.381201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:28.405916Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:28.442785Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:28.442902Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:28.454478Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:28.536649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:28.795377Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-04-10T11:18:28.795515Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-04-10T11:18:28.892810Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.096923s, errors=0 2025-04-10T11:18:28.892910Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists >> TExternalDataSourceTest::CreateExternalDataSource >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] |89.6%| [TA] $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] Test command err: 2025-04-10T11:16:01.962601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:16:01.962933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:16:01.962991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007f8/r3tmp/tmpgxob8x/pdisk_1.dat 2025-04-10T11:16:02.270349Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29392, node 1 2025-04-10T11:16:02.483156Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:02.483204Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:02.483241Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:02.483636Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:16:02.489740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:16:02.570663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:02.570813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:02.583804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12683 2025-04-10T11:16:03.054496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:16:05.781454Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:16:05.812046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:05.812201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:05.840167Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:16:05.842335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:06.067139Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:06.067785Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:06.068445Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:06.068603Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:06.068691Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:06.069005Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:06.069114Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:06.069199Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:06.069284Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:06.230703Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:06.230867Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:06.243710Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:06.378339Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:06.428931Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:16:06.429048Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:16:06.468300Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:16:06.469167Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:16:06.469358Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:16:06.469411Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:16:06.469453Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:16:06.469490Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:16:06.469538Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:16:06.469574Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:16:06.469983Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:16:06.492712Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:06.492849Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:06.501172Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:16:06.505749Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:16:06.506051Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:16:06.516789Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-10T11:16:06.535165Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:16:06.535243Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:16:06.535349Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-10T11:16:06.550420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:16:06.556367Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:16:06.556494Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:16:06.711417Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:16:06.904145Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:16:07.001429Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:07.800267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:16:08.540101Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:08.680948Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-10T11:16:08.681012Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:16:08.681115Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2598:2946], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:16:08.683025Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2600:2948] 2025-04-10T11:16:08.683191Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2600:2948], schemeshard id = 72075186224037899 2025-04-10T11:16:09.948192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2732:3252], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:09.948337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:09.969035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-10T11:16:10.215395Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3081];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:10.215684Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3081];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:16:10.216002Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3081];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:16:10.216157Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3081];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:16:10.216296Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3081];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:16:10.216416Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3081];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:16:10.216554Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3081];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:16:10.216691Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3081];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:16:10.216828Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3081];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11 ... 025-04-10T11:18:25.815559Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:9526:7170], schemeshard count = 1 2025-04-10T11:18:27.178968Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-10T11:18:27.179048Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 218.000000s, at schemeshard: 72075186224037899 2025-04-10T11:18:27.179313Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-04-10T11:18:27.195866Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:18:27.955267Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:27.955332Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:18:27.955376Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-10T11:18:27.955426Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-10T11:18:27.960288Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:27.977916Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:27.978607Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:27.978736Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:27.979796Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:27.994051Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:27.994305Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:18:27.995114Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9667:7252], server id = [2:9672:7257], tablet id = 72075186224037905, status = OK 2025-04-10T11:18:27.995499Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9667:7252], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:18:27.997054Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9668:7253], server id = [2:9673:7258], tablet id = 72075186224037906, status = OK 2025-04-10T11:18:27.997131Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9668:7253], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:18:27.997283Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9669:7254], server id = [2:9674:7259], tablet id = 72075186224037907, status = OK 2025-04-10T11:18:27.997349Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9669:7254], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:18:27.999329Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9670:7255], server id = [2:9675:7260], tablet id = 72075186224037908, status = OK 2025-04-10T11:18:27.999403Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9670:7255], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:18:28.000331Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9671:7256], server id = [2:9677:7262], tablet id = 72075186224037909, status = OK 2025-04-10T11:18:28.000401Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9671:7256], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:18:28.006246Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:18:28.007115Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9667:7252], server id = [2:9672:7257], tablet id = 72075186224037905 2025-04-10T11:18:28.007172Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.008155Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-10T11:18:28.008991Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9668:7253], server id = [2:9673:7258], tablet id = 72075186224037906 2025-04-10T11:18:28.009027Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.009740Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-10T11:18:28.010009Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-10T11:18:28.010695Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9691:7272], server id = [2:9694:7274], tablet id = 72075186224037910, status = OK 2025-04-10T11:18:28.010786Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9691:7272], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:18:28.011031Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9669:7254], server id = [2:9674:7259], tablet id = 72075186224037907 2025-04-10T11:18:28.011065Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.011305Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9670:7255], server id = [2:9675:7260], tablet id = 72075186224037908 2025-04-10T11:18:28.011337Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.012623Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9693:7273], server id = [2:9695:7275], tablet id = 72075186224037911, status = OK 2025-04-10T11:18:28.012711Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9693:7273], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:18:28.013684Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2025-04-10T11:18:28.014053Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9696:7276], server id = [2:9698:7278], tablet id = 72075186224037912, status = OK 2025-04-10T11:18:28.014124Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9696:7276], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:18:28.015136Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9697:7277], server id = [2:9699:7279], tablet id = 72075186224037913, status = OK 2025-04-10T11:18:28.015203Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9697:7277], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:18:28.015961Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9671:7256], server id = [2:9677:7262], tablet id = 72075186224037909 2025-04-10T11:18:28.015997Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.017659Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9703:7283], server id = [2:9705:7285], tablet id = 72075186224037914, status = OK 2025-04-10T11:18:28.017732Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9703:7283], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:18:28.020749Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2025-04-10T11:18:28.021771Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9691:7272], server id = [2:9694:7274], tablet id = 72075186224037910 2025-04-10T11:18:28.021805Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.022558Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-04-10T11:18:28.023310Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9693:7273], server id = [2:9695:7275], tablet id = 72075186224037911 2025-04-10T11:18:28.023336Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.024312Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-04-10T11:18:28.024999Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9696:7276], server id = [2:9698:7278], tablet id = 72075186224037912 2025-04-10T11:18:28.025030Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.025390Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2025-04-10T11:18:28.025833Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9697:7277], server id = [2:9699:7279], tablet id = 72075186224037913 2025-04-10T11:18:28.025871Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.025966Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2025-04-10T11:18:28.026024Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:28.026275Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:28.026513Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:28.026849Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-10T11:18:28.029420Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9703:7283], server id = [2:9705:7285], tablet id = 72075186224037914 2025-04-10T11:18:28.029459Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.030069Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:28.070572Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:9733:7308]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:28.070869Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:28.070926Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:9733:7308], StatRequests.size() = 1 2025-04-10T11:18:28.200029Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzE5MzRlMGEtYTZlMzBiOGMtOTM2MmVjZGEtMzZhMWExMTU=, TxId: 2025-04-10T11:18:28.200107Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzE5MzRlMGEtYTZlMzBiOGMtOTM2MmVjZGEtMzZhMWExMTU=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-10T11:18:28.200872Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:9746:7314]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:28.201106Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:28.202125Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:28.202216Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:18:28.205489Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:28.205556Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-10T11:18:28.205612Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:18:28.212031Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> UpsertLoad::ShouldDropCreateTable [GOOD] >> KqpScanSpilling::SpillingPragmaParseError [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] Test command err: 2025-04-10T11:18:29.200769Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:18:29.203385Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:18:29.209536Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:18:29.214949Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:18:29.216848Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:18:29.219169Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[3e000000:_:0:2:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] >> TExternalDataSourceTest::DropTableTwice [GOOD] >> TExternalDataSourceTest::SchemeErrors [GOOD] >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: 2025-04-10T11:15:57.853447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:57.853869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:57.853955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00086d/r3tmp/tmpBGCzJn/pdisk_1.dat 2025-04-10T11:15:58.196258Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2240, node 1 2025-04-10T11:15:58.422104Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:58.422176Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:58.422208Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:58.422663Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:58.425423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:58.508542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:58.508687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:58.522001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20978 2025-04-10T11:15:59.010813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:16:01.826863Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:16:01.862069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:01.862278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:01.890113Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:16:01.892299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:02.133603Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.134211Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.134856Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.135017Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.135121Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.135407Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.135519Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.135608Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.135698Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:02.297217Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:02.297371Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:02.310694Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:02.426168Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:02.471241Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:16:02.471369Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:16:02.507810Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:16:02.509005Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:16:02.509246Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:16:02.509313Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:16:02.509375Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:16:02.509449Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:16:02.509508Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:16:02.509564Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:16:02.510064Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:16:02.533101Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:02.533215Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:02.539540Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:16:02.542879Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:16:02.543195Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:16:02.551563Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:16:02.565569Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:16:02.565625Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:16:02.565678Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:16:02.580148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:16:02.592018Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:16:02.592141Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:16:02.745430Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:16:02.921532Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:16:03.018349Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:03.963525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:03.963658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:03.982650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:16:04.300636Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:04.300821Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:16:04.301042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:16:04.301125Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:16:04.301195Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:16:04.301289Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:16:04.301390Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:16:04.301479Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:16:04.301569Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:16:04.301654Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:16:04.301731Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:16:04.301805Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:16:04.321233Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:04.321336Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=T ... ATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8760:6643] 2025-04-10T11:18:26.179995Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8760:6643], schemeshard id = 72075186224037897 2025-04-10T11:18:26.205465Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:26.205537Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:26.261623Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8764:6646] 2025-04-10T11:18:26.262384Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4085:3320] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-10T11:18:26.262465Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:4085:3320] 2025-04-10T11:18:26.262532Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-10T11:18:26.706092Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-10T11:18:26.706232Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:18:26.718656Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-10T11:18:26.718731Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:18:27.389049Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:27.389139Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:18:27.389191Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-10T11:18:28.462371Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:28.462561Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:28.462625Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:28.463645Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:28.478301Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:28.478834Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:28.478930Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:28.479499Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:28.493811Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:28.494024Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-10T11:18:28.495132Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8841:6687], server id = [2:8846:6692], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:28.495277Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8841:6687], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:28.495603Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8842:6688], server id = [2:8847:6693], tablet id = 72075186224037900, status = OK 2025-04-10T11:18:28.495658Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8842:6688], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:28.496804Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8843:6689], server id = [2:8848:6694], tablet id = 72075186224037901, status = OK 2025-04-10T11:18:28.496896Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8843:6689], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:28.497748Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8844:6690], server id = [2:8849:6695], tablet id = 72075186224037902, status = OK 2025-04-10T11:18:28.497816Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8844:6690], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:28.498698Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8845:6691], server id = [2:8850:6696], tablet id = 72075186224037903, status = OK 2025-04-10T11:18:28.498798Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8845:6691], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:28.500146Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:28.501075Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:18:28.501680Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8841:6687], server id = [2:8846:6692], tablet id = 72075186224037899 2025-04-10T11:18:28.501738Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.502259Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8842:6688], server id = [2:8847:6693], tablet id = 72075186224037900 2025-04-10T11:18:28.502291Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.502823Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-10T11:18:28.503079Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-10T11:18:28.503440Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-10T11:18:28.503710Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8843:6689], server id = [2:8848:6694], tablet id = 72075186224037901 2025-04-10T11:18:28.503743Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.504120Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8855:6701], server id = [2:8859:6705], tablet id = 72075186224037904, status = OK 2025-04-10T11:18:28.504217Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8855:6701], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:28.504303Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8857:6703], server id = [2:8858:6704], tablet id = 72075186224037905, status = OK 2025-04-10T11:18:28.504373Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8857:6703], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:28.504753Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8844:6690], server id = [2:8849:6695], tablet id = 72075186224037902 2025-04-10T11:18:28.504784Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.505867Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8845:6691], server id = [2:8850:6696], tablet id = 72075186224037903 2025-04-10T11:18:28.505899Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.506181Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8860:6706], server id = [2:8864:6710], tablet id = 72075186224037906, status = OK 2025-04-10T11:18:28.506266Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8860:6706], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:28.506738Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8861:6707], server id = [2:8863:6709], tablet id = 72075186224037907, status = OK 2025-04-10T11:18:28.506801Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8861:6707], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:28.507143Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8862:6708], server id = [2:8865:6711], tablet id = 72075186224037908, status = OK 2025-04-10T11:18:28.507200Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8862:6708], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:28.508161Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:18:28.509112Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-10T11:18:28.509634Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8857:6703], server id = [2:8858:6704], tablet id = 72075186224037905 2025-04-10T11:18:28.509669Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.510238Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8855:6701], server id = [2:8859:6705], tablet id = 72075186224037904 2025-04-10T11:18:28.510274Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.510600Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-10T11:18:28.510801Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-10T11:18:28.511004Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-10T11:18:28.511063Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:28.511278Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8861:6707], server id = [2:8863:6709], tablet id = 72075186224037907 2025-04-10T11:18:28.511314Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.511487Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:28.511735Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:28.512051Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:28.515620Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8860:6706], server id = [2:8864:6710], tablet id = 72075186224037906 2025-04-10T11:18:28.515670Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.516233Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8862:6708], server id = [2:8865:6711], tablet id = 72075186224037908 2025-04-10T11:18:28.516268Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.516704Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:28.541893Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjFjZTQ5NGYtNDc5MWQwZjAtOTBiYjkxNS1iZmNhMjNiYg==, TxId: 2025-04-10T11:18:28.541983Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjFjZTQ5NGYtNDc5MWQwZjAtOTBiYjkxNS1iZmNhMjNiYg==, TxId: 2025-04-10T11:18:28.542968Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:28.557641Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:28.557740Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:4085:3320] >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists >> TExternalDataSourceTest::DropExternalDataSource >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources >> TExternalDataSourceTest::ParallelCreateExternalDataSource >> TExternalDataSourceTest::ReadOnlyMode [GOOD] >> ReadLoad::ShouldReadKqp [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 25720, MsgBus: 14733 2025-04-10T11:18:04.864759Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642237379822526:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:04.864814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e70/r3tmp/tmpNXEE9h/pdisk_1.dat 2025-04-10T11:18:05.150366Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25720, node 1 2025-04-10T11:18:05.219244Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:05.219269Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:05.219278Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:05.219397Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:05.233929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:05.234053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:05.235845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14733 TClient is connected to server localhost:14733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:05.672620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:06.967447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642245969757791:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:06.967531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642245969757782:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:06.967663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:06.971127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:06.978701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642245969757796:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:18:07.070290Z node 1 :TX_PROXY ERROR: Actor# [1:7491642250264725143:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:07.279501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:07.379066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:08.078023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:09.122843Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDE3YjYxYjMtNTc3ZjNlOGYtYjA0ZmM2MjQtMzRmNzBmZjM=, ActorId: [1:7491642258854668178:2968], ActorState: ExecuteState, TraceId: 01jrfpqvcy5eefqa47t5v9354a, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-10T11:18:09.864954Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642237379822526:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:09.865034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18F90D57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18F81C02 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7FE9485DED8F 18. ??:0: ?? @ 0x7FE9485DEE3F 19. ??:0: ?? @ 0x1656B028 Trying to start YDB, gRPC: 8328, MsgBus: 9813 2025-04-10T11:18:12.949205Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642272024476679:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:12.949380Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e70/r3tmp/tmpUyVqlf/pdisk_1.dat 2025-04-10T11:18:13.027260Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8328, node 2 2025-04-10T11:18:13.071528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:13.071621Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:13.073095Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:13.080796Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:13.080812Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:13.080817Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:13.080932Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9813 TClient is connected to server localhost:9813 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:13.439318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:15.639522Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642284909379208:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.639589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642284909379223:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.639645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool defa ... :25.903451Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[2:7491642293499315809:2574];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037919;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.903596Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[2:7491642293499315809:2574];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037919;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.903865Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;self_id=[2:7491642289204348262:2509];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037935;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.903969Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.904037Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;self_id=[2:7491642289204348262:2509];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037935;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.904139Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037958;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715669; 2025-04-10T11:18:25.904196Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;self_id=[2:7491642289204348319:2537];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037952;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.904290Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.904341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;self_id=[2:7491642289204348319:2537];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037952;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.904384Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.905010Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[2:7491642289204348398:2552];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037937;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.905190Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[2:7491642289204348398:2552];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037937;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.905190Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037958;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.905447Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[2:7491642289204348250:2506];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037950;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.905589Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[2:7491642289204348250:2506];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037950;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.906452Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;self_id=[2:7491642293499315907:2591];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037917;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.906574Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7491642293499315807:2573];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037909;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.906733Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7491642293499315807:2573];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037909;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.906765Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;self_id=[2:7491642293499315907:2591];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037917;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.906891Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[2:7491642289204348329:2542];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037949;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.907046Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[2:7491642289204348329:2542];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037949;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.907438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;self_id=[2:7491642289204348254:2508];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037941;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.907498Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7491642293499315896:2584];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037905;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.907593Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;self_id=[2:7491642289204348254:2508];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037941;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.907701Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7491642293499315896:2584];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037905;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.907925Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;self_id=[2:7491642289204348210:2503];ev=NActors::TEvents::TEvWakeup;fline=sync.h:19;event=tx_timeout;lock=281474976715665;tx_id=281474976715669;d=2.000889s; 2025-04-10T11:18:25.908010Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:7491642293499315794:2564];ev=NActors::TEvents::TEvWakeup;fline=sync.h:19;event=tx_timeout;lock=281474976715665;tx_id=281474976715669;d=2.000945s; 2025-04-10T11:18:25.908059Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[2:7491642289204348311:2533];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037951;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.908092Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:7491642293499315794:2564];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037908;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.908262Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:7491642293499315794:2564];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037908;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.908277Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[2:7491642289204348311:2533];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037951;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.908456Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[2:7491642289204348240:2505];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037923;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.908503Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;self_id=[2:7491642289204348210:2503];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037964;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.908596Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[2:7491642289204348240:2505];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037923;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.908659Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;self_id=[2:7491642289204348210:2503];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037964;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.910600Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037964;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715669; 2025-04-10T11:18:25.910747Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715669; 2025-04-10T11:18:25.912325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:18:25.912325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18F93668 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18F8205A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7FE9485DED8F 18. ??:0: ?? @ 0x7FE9485DEE3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:18:30.972867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:18:30.972965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.973013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:18:30.973052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:18:30.973099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:18:30.973134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:18:30.973240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.973367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:18:30.973711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:31.067252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:18:31.067352Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:31.090645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:31.090917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:18:31.091049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:18:31.101852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:18:31.102109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:18:31.102962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.104065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:18:31.109645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.118097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:18:31.118199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.118265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:18:31.118385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.126216Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:18:31.283031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.283289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.283492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:18:31.283753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:18:31.283840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.286390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.286522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:18:31.286992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.287076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:18:31.287144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:18:31.287193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:18:31.289243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.289330Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:18:31.289377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:18:31.291428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.291486Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.291534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.291599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.295925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:18:31.298096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:18:31.298330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:18:31.299451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.299592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.299664Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.300108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:18:31.300179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.300373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:31.300480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:18:31.303762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.303819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.304094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.304149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:18:31.304526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.304583Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:18:31.304696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.304737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.304783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.304820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.304893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:18:31.304947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.304990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:18:31.305030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:18:31.305108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:31.305164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:18:31.305210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:18:31.307495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... e DataStream was not found" TxId: 125 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.391674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 125, database: /MyRoot, subject: , status: StatusSchemeError, reason: (NKikimr::NExternalSource::TExternalSourceException) External source with type DataStream was not found, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 2025-04-10T11:18:31.394794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.395177Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } 2025-04-10T11:18:31.395300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 126:0, path# /MyRoot/DirA/MyExternalDataSource 2025-04-10T11:18:31.395527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Authorization method isn't specified, at schemeshard: 72057594046678944 2025-04-10T11:18:31.399526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Authorization method isn\'t specified" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.399793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Authorization method isn't specified, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-04-10T11:18:31.403230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.403576Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-04-10T11:18:31.403671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 127:0, path# /MyRoot/DirA/MyExternalDataSource 2025-04-10T11:18:31.403842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-04-10T11:18:31.406441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Maximum length of location must be less or equal equal to 1000 but got 1001" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.406694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-04-10T11:18:31.410034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.410593Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-04-10T11:18:31.410730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 128:0, path# /MyRoot/DirA/MyExternalDataSource 2025-04-10T11:18:31.410913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-04-10T11:18:31.413852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Maximum length of installation must be less or equal equal to 1000 but got 1001" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.414102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-04-10T11:18:31.417710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.417983Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } 2025-04-10T11:18:31.418107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 129:0, path# /MyRoot/DirA/ 2025-04-10T11:18:31.418251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-04-10T11:18:31.421060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/DirA/\', error: path part shouldn\'t be empty" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.421322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/ TestModificationResult got TxId: 129, wait until txId: 129 >> KqpScanSpilling::SelfJoin [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:18:30.970041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:18:30.970170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.970217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:18:30.970257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:18:30.971209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:18:30.971272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:18:30.971382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.971467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:18:30.974562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:31.072984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:18:31.073060Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:31.093920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:31.094227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:18:31.094387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:18:31.102587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:18:31.102810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:18:31.103476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.106027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:18:31.108624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.117668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:18:31.117741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.117815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:18:31.117943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.126841Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:18:31.239806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.241548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.243439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:18:31.244668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:18:31.244800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.248192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.248349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:18:31.248578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.248734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:18:31.248789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:18:31.248859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:18:31.251140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.251193Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:18:31.251233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:18:31.253460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.253516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.253567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.253639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.258577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:18:31.260849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:18:31.261848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:18:31.263044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.263193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.263271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.264574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:18:31.264659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.264891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:31.265014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:18:31.267885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.267939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.268174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.268231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:18:31.268587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.268652Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:18:31.268748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.268798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.268844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.268884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.268925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:18:31.268966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.269000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:18:31.269035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:18:31.269098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:31.269150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:18:31.269181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:18:31.270997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:18:31.271115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:18:31.271162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:18:31.271224Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:18:31.271272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:31.271363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:18:31.274529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:18:31.276766Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:18:31.282397Z node 1 :TX_PROXY DEBUG: actor# [1:268:2259] Bootstrap 2025-04-10T11:18:31.303303Z node 1 :TX_PROXY DEBUG: actor# [1:268:2259] Become StateWork (SchemeCache [1:273:2264]) 2025-04-10T11:18:31.305898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.307633Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 101:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-04-10T11:18:31.307732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-10T11:18:31.307784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-10T11:18:31.309677Z node 1 :TX_PROXY DEBUG: actor# [1:268:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:18:31.313675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.314575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource 2025-04-10T11:18:31.315029Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:18:31.315274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:18:31.315317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:18:31.316892Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:18:31.317002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:18:31.317042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:283:2274] TestWaitNotification: OK eventTxId 101 2025-04-10T11:18:31.317498Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:18:31.317715Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 234us result status StatusPathDoesNotExist 2025-04-10T11:18:31.317926Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] >> TExternalDataSourceTest::DropExternalDataSource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:18:30.970337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:18:30.970470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.970523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:18:30.970565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:18:30.971222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:18:30.971305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:18:30.971410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.971515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:18:30.972493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:31.077377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:18:31.077443Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:31.096268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:31.096596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:18:31.096770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:18:31.110847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:18:31.111037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:18:31.111755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.112017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:18:31.115593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.117829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:18:31.117916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.117981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:18:31.118097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.126192Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:18:31.287870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.288097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.288291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:18:31.288534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:18:31.288623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.291082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.291206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:18:31.291386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.291476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:18:31.291523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:18:31.291563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:18:31.293495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.293547Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:18:31.293584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:18:31.295355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.295405Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.295444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.295494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.298871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:18:31.300853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:18:31.301048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:18:31.302025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.302141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.302220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.302531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:18:31.302609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.302756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:31.302832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:18:31.304988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.305026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.305232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.305295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:18:31.305618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.305668Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:18:31.305760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.305795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.305830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.305874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.305935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:18:31.305978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.306011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:18:31.306044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:18:31.306109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:31.306161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:18:31.306193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:18:31.307709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... T_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:18:31.727247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:18:31.727326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T11:18:31.729714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAccepted TxId: 128 SchemeshardId: 72057594046678944 PathId: 4, at schemeshard: 72057594046678944 2025-04-10T11:18:31.729872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2025-04-10T11:18:31.730114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.730173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.730403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:18:31.730514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.730550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:454:2412], at schemeshard: 72057594046678944, txId: 128, path id: 1 2025-04-10T11:18:31.730583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:454:2412], at schemeshard: 72057594046678944, txId: 128, path id: 4 2025-04-10T11:18:31.730949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.730995Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 128:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:18:31.731035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 128 ready parts: 1/1 2025-04-10T11:18:31.731157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 128 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:18:31.731638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2025-04-10T11:18:31.731732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2025-04-10T11:18:31.731759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2025-04-10T11:18:31.731794Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-10T11:18:31.731834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-10T11:18:31.732765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2025-04-10T11:18:31.732833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2025-04-10T11:18:31.732857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2025-04-10T11:18:31.732906Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2025-04-10T11:18:31.732936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:18:31.732998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 0/1, is published: true 2025-04-10T11:18:31.735221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 128:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:128 msg type: 269090816 2025-04-10T11:18:31.735393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 128, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 128 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 128 at step: 5000004 2025-04-10T11:18:31.737471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-04-10T11:18:31.738843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.738967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 128 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.739021Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 128:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2025-04-10T11:18:31.739149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 128:0 128 -> 240 2025-04-10T11:18:31.739347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:18:31.739414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T11:18:31.739838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 FAKE_COORDINATOR: Erasing txId 128 2025-04-10T11:18:31.741588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.741627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.741775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:18:31.741914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.741953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:454:2412], at schemeshard: 72057594046678944, txId: 128, path id: 1 2025-04-10T11:18:31.741990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:454:2412], at schemeshard: 72057594046678944, txId: 128, path id: 4 2025-04-10T11:18:31.742066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.742107Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 128:0 ProgressState 2025-04-10T11:18:31.742257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#128:0 progress is 1/1 2025-04-10T11:18:31.742293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-04-10T11:18:31.742335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#128:0 progress is 1/1 2025-04-10T11:18:31.742368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-04-10T11:18:31.742403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 1/1, is published: false 2025-04-10T11:18:31.742450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-04-10T11:18:31.742488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 128:0 2025-04-10T11:18:31.742520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 128:0 2025-04-10T11:18:31.742585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:18:31.742625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 128, publications: 2, subscribers: 0 2025-04-10T11:18:31.742659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-10T11:18:31.742689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 4], 3 2025-04-10T11:18:31.743643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2025-04-10T11:18:31.743746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2025-04-10T11:18:31.743783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 128 2025-04-10T11:18:31.743823Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-10T11:18:31.743863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-10T11:18:31.744930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2025-04-10T11:18:31.745015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2025-04-10T11:18:31.745046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 128 2025-04-10T11:18:31.745074Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-10T11:18:31.745101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T11:18:31.745162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 128, subscribers: 0 2025-04-10T11:18:31.750091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-04-10T11:18:31.750548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 TestModificationResult got TxId: 128, wait until txId: 128 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SpillingPragmaParseError [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2gt4/001802/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk9 Trying to start YDB, gRPC: 2828, MsgBus: 21236 2025-04-10T11:18:25.493835Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642327797523261:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:25.493965Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001802/r3tmp/tmpSPMg5F/pdisk_1.dat 2025-04-10T11:18:25.875733Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:25.932574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:25.932690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 2828, node 1 2025-04-10T11:18:25.935748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:26.059628Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:26.059660Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:26.059667Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:26.059788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21236 TClient is connected to server localhost:21236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:26.820656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:18:26.850368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:18:27.020819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:27.189963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:27.269879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:28.825515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642340682426922:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:28.825643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.167331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.194602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.219244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.243496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.267619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.295111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.346605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642344977394732:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.346678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.346774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642344977394737:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.350261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:29.358490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642344977394739:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:18:29.457802Z node 1 :TX_PROXY ERROR: Actor# [1:7491642344977394794:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:30.494670Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642327797523261:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:30.494782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:18:30.587330Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642349272362393:2500], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:3:40: Error: Bad "EnableSpillingNodes" setting for "$all" cluster: (yexception) tools/enum_parser/enum_serialization_runtime/enum_runtime.cpp:70: Key 'GraceJoin1' not found in enum NYql::NDq::EEnabledSpillingNodes. Valid options are: 'None', 'GraceJoin', 'Aggregation', 'All'. 2025-04-10T11:18:30.587653Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWUzMmU5MjYtNzE0Y2UzZjktMzZmMjM0MWYtYzJhYzI1OA==, ActorId: [1:7491642349272362386:2496], ActorState: ExecuteState, TraceId: 01jrfprg995tqfktd36hrs0p8v, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: 2025-04-10T11:18:23.478331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:23.478548Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:23.478787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001502/r3tmp/tmpQUbGcu/pdisk_1.dat 2025-04-10T11:18:23.959005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:24.008767Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:24.048239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:24.048360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:24.059972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:24.146507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:24.468247Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 1000 2025-04-10T11:18:24.469667Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor Bootstrap called: RowCount: 1000 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-04-10T11:18:24.524997Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor finished in 0.055055s, errors=0 2025-04-10T11:18:24.525557Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-04-10T11:18:24.525702Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [1:747:2629] with id# {Tag: 0, parent: [1:738:2620], subTag: 3} Bootstrap called: RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-04-10T11:18:24.526900Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-04-10T11:18:24.527823Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:750:2632] 2025-04-10T11:18:24.527961Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Bootstrap called, sample# 0 2025-04-10T11:18:24.528013Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Connect to# 72075186224037888 called 2025-04-10T11:18:24.528839Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-04-10T11:18:24.536360Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} finished in 0.007441s, read# 1000 2025-04-10T11:18:24.536870Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:750:2632] with chunkSize# 0 finished: 0 { DurationMs: 7 OperationsOK: 1000 OperationsError: 0 } 2025-04-10T11:18:24.537043Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:753:2635] 2025-04-10T11:18:24.537104Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} Bootstrap called, sample# 0 2025-04-10T11:18:24.537144Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} Connect to# 72075186224037888 called 2025-04-10T11:18:24.537453Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-04-10T11:18:24.772587Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} finished in 0.235075s, read# 1000 2025-04-10T11:18:24.772726Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:753:2635] with chunkSize# 1 finished: 0 { DurationMs: 235 OperationsOK: 1000 OperationsError: 0 } 2025-04-10T11:18:24.772830Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:756:2638] 2025-04-10T11:18:24.772878Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} Bootstrap called, sample# 0 2025-04-10T11:18:24.772905Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} Connect to# 72075186224037888 called 2025-04-10T11:18:24.773126Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-04-10T11:18:24.840305Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} finished in 0.067128s, read# 1000 2025-04-10T11:18:24.840479Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:756:2638] with chunkSize# 10 finished: 0 { DurationMs: 67 OperationsOK: 1000 OperationsError: 0 } 2025-04-10T11:18:24.840636Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:759:2641] 2025-04-10T11:18:24.840714Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} Bootstrap called, sample# 1000 2025-04-10T11:18:24.840754Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} Connect to# 72075186224037888 called 2025-04-10T11:18:24.841040Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-04-10T11:18:24.844918Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} finished in 0.003122s, sampled# 1000, iter finished# 1, oks# 1000 2025-04-10T11:18:24.845091Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} received keyCount# 1000 2025-04-10T11:18:24.845290Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} started read actor with id# [1:762:2644] 2025-04-10T11:18:24.845386Z node 1 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [1:747:2629], subTag: 5} Bootstrap called, will read keys# 1000 2025-04-10T11:18:25.237852Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} received point times# 1000, Inflight left# 0 2025-04-10T11:18:25.238103Z node 1 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 392 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 31\n" } 2025-04-10T11:18:25.238334Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} finished in 0.712440s with report: { DurationMs: 7 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 235 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 67 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 392 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 31\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-04-10T11:18:25.238829Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:747:2629] with tag# 3 2025-04-10T11:18:28.550036Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:28.550235Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:18:28.550468Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001502/r3tmp/tmpsqzbnL/pdisk_1.dat 2025-04-10T11:18:28.822857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:28.847827Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:28.884905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:28.885049Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:28.896434Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:28.977485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.242483Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2025-04-10T11:18:29.242947Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-04-10T11:18:29.265175Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 1} TUpsertActor finished in 0.021946s, errors=0 2025-04-10T11:18:29.265794Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-04-10T11:18:29.265928Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [2:747:2629] with id# {Tag: 0, parent: [2:738:2620], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-04-10T11:18:29.267216Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-04-10T11:18:29.267351Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:750:2632] 2025-04-10T11:18:29.267441Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} Bootstrap called, sample# 0 2025-04-10T11:18:29.267478Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} Connect to# 72075186224037888 called 2025-04-10T11:18:29.267808Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-04-10T11:18:29.268640Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} finished in 0.000768s, read# 10 2025-04-10T11:18:29.268804Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:750:2632] with chunkSize# 0 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-04-10T11:18:29.268913Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:753:2635] 2025-04-10T11:18:29.268961Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} Bootstrap called, sample# 0 2025-04-10T11:18:29.268988Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} Connect to# 72075186224037888 called 2025-04-10T11:18:29.269200Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-04-10T11:18:29.271501Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} finished in 0.002260s, read# 10 2025-04-10T11:18:29.271649Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:753:2635] with chunkSize# 1 finished: 0 { DurationMs: 2 OperationsOK: 10 OperationsError: 0 } 2025-04-10T11:18:29.271730Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:756:2638] 2025-04-10T11:18:29.271769Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} Bootstrap called, sample# 0 2025-04-10T11:18:29.271799Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} Connect to# 72075186224037888 called 2025-04-10T11:18:29.272002Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-04-10T11:18:29.272574Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} finished in 0.000542s, read# 10 2025-04-10T11:18:29.272690Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:756:2638] with chunkSize# 10 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-04-10T11:18:29.272791Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:759:2641] 2025-04-10T11:18:29.272875Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} Bootstrap called, sample# 10 2025-04-10T11:18:29.272901Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} Connect to# 72075186224037888 called 2025-04-10T11:18:29.273087Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-04-10T11:18:29.273517Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} finished in 0.000377s, sampled# 10, iter finished# 1, oks# 10 2025-04-10T11:18:29.273605Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} received keyCount# 10 2025-04-10T11:18:29.273749Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} started read actor with id# [2:762:2644] 2025-04-10T11:18:29.273811Z node 2 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [2:747:2629], subTag: 5} Bootstrap called, will read keys# 10 2025-04-10T11:18:29.643760Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} received point times# 1000, Inflight left# 0 2025-04-10T11:18:29.643957Z node 2 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 370 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 24\n" } 2025-04-10T11:18:29.644131Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} finished in 0.378036s with report: { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 2 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 370 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 24\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-04-10T11:18:29.644251Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:747:2629] with tag# 3 >> TCacheTest::Recreate >> TCacheTest::List >> TCacheTest::SystemView >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] >> TCacheTest::CheckSystemViewAccess >> TCacheTest::RacyRecreateAndSync >> TCacheTest::WatchRoot >> TCacheTest::MigrationCommon >> TYardTest::TestLogWriteCutEqualRandomWait [GOOD] >> TYardTest::TestLogWriteCutUnequal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] Test command err: 2025-04-10T11:16:01.737702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:16:01.737987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:16:01.738044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007ff/r3tmp/tmpCdyQpL/pdisk_1.dat 2025-04-10T11:16:02.038570Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4206, node 1 2025-04-10T11:16:02.251081Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:02.251142Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:02.251174Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:02.251630Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:16:02.254419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:16:02.337472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:02.337621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:02.350437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21757 2025-04-10T11:16:02.825343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:16:05.407585Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:16:05.438067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:05.438252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:05.465254Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:16:05.467575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:05.689276Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:05.689880Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:05.690451Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:05.690606Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:05.690692Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:05.690985Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:05.691076Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:05.691155Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:05.691232Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:05.854403Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:05.854502Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:05.867829Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:05.990738Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:06.038329Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:16:06.038430Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:16:06.073291Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:16:06.074589Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:16:06.074834Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:16:06.074904Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:16:06.074971Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:16:06.075074Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:16:06.075146Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:16:06.075211Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:16:06.075773Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:16:06.098801Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:06.098933Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:06.106568Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:16:06.111239Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:16:06.111515Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:16:06.121655Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:16:06.140164Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:16:06.140242Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:16:06.140327Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:16:06.159888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:16:06.168393Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:16:06.168551Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:16:06.346117Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:16:06.536023Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:16:06.612524Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:07.595061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:07.595243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:07.615426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:16:07.720482Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:07.720723Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:16:07.721020Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:16:07.721180Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:16:07.721331Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:16:07.721480Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:16:07.721603Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:16:07.721783Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:16:07.721925Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:16:07.722038Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:16:07.722206Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:16:07.722345Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:16:07.750246Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:16:07.750340Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... pipe client id = [2:7256:5312] 2025-04-10T11:18:25.785349Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7256:5312] 2025-04-10T11:18:25.831655Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:18:25.831773Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:18:25.832499Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:18:25.833284Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:18:25.833670Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-04-10T11:18:25.833723Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-04-10T11:18:25.833783Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-04-10T11:18:25.833827Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-04-10T11:18:25.833870Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1744283905746975 2025-04-10T11:18:25.833908Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-04-10T11:18:25.833948Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded global traversal round: 2 2025-04-10T11:18:25.834051Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-04-10T11:18:25.834135Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:18:25.834391Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-04-10T11:18:25.834487Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-04-10T11:18:25.834574Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-04-10T11:18:25.834643Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:18:25.834828Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:25.835705Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:18:25.836565Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:25.836637Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:25.837348Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:18:25.838256Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:25.838325Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:25.841158Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:25.919852Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:25.920100Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-10T11:18:25.920868Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7303:5343], server id = [2:7304:5344], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:25.920996Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7303:5343], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:25.925162Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:25.925276Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:25.925489Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:25.925714Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:25.926134Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:25.929089Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7303:5343], server id = [2:7304:5344], tablet id = 72075186224037899 2025-04-10T11:18:25.929130Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:25.929700Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:25.971848Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7324:5363]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:25.972047Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:25.972085Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7324:5363], StatRequests.size() = 1 2025-04-10T11:18:26.096297Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:04.000000Z, event interval end# 2025-04-10T11:18:24.000000Z 2025-04-10T11:18:26.096884Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWM4YmY5YjYtNDJjNTM5NzEtYjNiZjRhMWMtYjhkMjQyMA==, TxId: 2025-04-10T11:18:26.096958Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWM4YmY5YjYtNDJjNTM5NzEtYjNiZjRhMWMtYjhkMjQyMA==, TxId: 2025-04-10T11:18:26.097685Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:26.114304Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7339:5369] 2025-04-10T11:18:26.114510Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7339:5369], schemeshard id = 72075186224037897 2025-04-10T11:18:26.114593Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7256:5312], server id = [2:7340:5370], tablet id = 72075186224037894, status = OK 2025-04-10T11:18:26.114655Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7340:5370] 2025-04-10T11:18:26.114708Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7340:5370], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-10T11:18:26.127697Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:26.127782Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:26.216825Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7351:5373] 2025-04-10T11:18:26.217611Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2805:3226] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-10T11:18:26.217672Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2805:3226] 2025-04-10T11:18:26.217742Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-10T11:18:26.738802Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-10T11:18:26.738905Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:18:27.520404Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:27.520496Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:18:27.520567Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-10T11:18:28.800923Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:28.801096Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:28.801155Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:28.801876Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:28.819426Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:28.819789Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:28.819872Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:28.820440Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:28.833741Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:28.833967Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-10T11:18:28.834438Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7423:5416], server id = [2:7424:5417], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:28.834584Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7423:5416], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:28.835935Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:28.836056Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:28.836457Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7423:5416], server id = [2:7424:5417], tablet id = 72075186224037899 2025-04-10T11:18:28.836502Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:28.836587Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:28.836731Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:28.837065Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:28.840471Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:28.873080Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTlkZDg5OWItNDQ5YjgxZTUtODkzZmNlYzItNWIxNmY5YTg=, TxId: 2025-04-10T11:18:28.873150Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTlkZDg5OWItNDQ5YjgxZTUtODkzZmNlYzItNWIxNmY5YTg=, TxId: 2025-04-10T11:18:28.873716Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:28.898246Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:28.898339Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2805:3226] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: 2025-04-10T11:18:23.752141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:23.752412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:23.752611Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014f0/r3tmp/tmp2dDb8z/pdisk_1.dat 2025-04-10T11:18:24.145406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:24.191819Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:24.232839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:24.232984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:24.244770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:24.343369Z node 1 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# BrandNewTable in dir# /Root 2025-04-10T11:18:24.764825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:645:2552], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.765015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.854608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:25.205346Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2025-04-10T11:18:25.207311Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-04-10T11:18:25.230165Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 1} TUpsertActor finished in 0.022470s, errors=0 2025-04-10T11:18:25.230519Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-10T11:18:25.230654Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-04-10T11:18:25.287990Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 3} TUpsertActor finished in 0.057083s, errors=0 2025-04-10T11:18:25.288096Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:755:2630] with tag# 3 2025-04-10T11:18:28.923394Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:28.923620Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:18:28.923865Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014f0/r3tmp/tmpDXBx2K/pdisk_1.dat 2025-04-10T11:18:29.250391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.279131Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:29.318243Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:29.318380Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:29.331452Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:29.416256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.687615Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2025-04-10T11:18:29.687808Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 2025-04-10T11:18:30.130605Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.442338s, errors=0 2025-04-10T11:18:30.130727Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 2025-04-10T11:18:30.137905Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 drops table# table in dir# /Root 2025-04-10T11:18:30.165238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:781:2662], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:30.165410Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:30.414870Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# table in dir# /Root 2025-04-10T11:18:30.434303Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:847:2708], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:30.434435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:30.447573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:18:30.501971Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-10T11:18:30.719361Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2025-04-10T11:18:30.719756Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-04-10T11:18:30.732287Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 1} TUpsertActor finished in 0.012185s, errors=0 2025-04-10T11:18:30.732586Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-10T11:18:30.732720Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-04-10T11:18:30.793172Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 3} TUpsertActor finished in 0.060104s, errors=0 2025-04-10T11:18:30.793289Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:940:2782] with tag# 3 |89.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:18:30.970014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:18:30.970184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.970251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:18:30.970288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:18:30.971209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:18:30.971271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:18:30.971364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.971496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:18:30.972534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:31.068291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:31.068394Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:31.086629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:31.087020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:18:31.087219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:18:31.098615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:18:31.098950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:18:31.103046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.104355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:18:31.110928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.117816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:18:31.117888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.117949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:18:31.122173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.129999Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:18:31.269468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.269701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.269892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:18:31.270132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:18:31.270220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.272491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.272628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:18:31.272833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.272901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:18:31.272938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:18:31.272984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:18:31.274942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.275008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:18:31.275051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:18:31.276912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.276964Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.277030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.277095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.280961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:18:31.283549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:18:31.283753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:18:31.284838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.285022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.285099Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.285464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:18:31.285536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.285758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:31.285843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:18:31.288264Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.288321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.288523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.288569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:18:31.288807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.288879Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:18:31.289003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.289050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.289095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.289129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.289166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:18:31.289208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.289261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:18:31.289299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:18:31.289382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:31.289423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:18:31.289457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:18:31.291839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:18:31.291967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:18:31.292010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 31.327995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.328060Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-10T11:18:31.328204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-10T11:18:31.328430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:31.328503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:18:31.331477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.331536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.331707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:18:31.331811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.331855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:18:31.331935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:18:31.332380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.332440Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:18:31.332556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:18:31.332608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:18:31.332654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:18:31.332691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:18:31.332730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:18:31.332775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:18:31.332829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:18:31.332865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:18:31.332941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:18:31.332983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-10T11:18:31.333025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-10T11:18:31.333079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-10T11:18:31.333715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:18:31.333824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:18:31.333864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:18:31.333911Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-10T11:18:31.333952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:31.334618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:18:31.334729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:18:31.334790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:18:31.334832Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-10T11:18:31.334869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:18:31.334954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:18:31.338755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:18:31.338859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:18:31.339086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:18:31.339150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:18:31.339560Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:18:31.339655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:18:31.339706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:297:2288] TestWaitNotification: OK eventTxId 101 2025-04-10T11:18:31.340181Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:18:31.340355Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 228us result status StatusSuccess 2025-04-10T11:18:31.340743Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-10T11:18:31.345139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.345443Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-04-10T11:18:31.345548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalDataSource Propose: opId# 102:0, path# /MyRoot/UniqueName 2025-04-10T11:18:31.345699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, at schemeshard: 72057594046678944 2025-04-10T11:18:31.355016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-10T11:18:31.355230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:18:31.355528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:18:31.355571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:18:31.356032Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:18:31.356145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:18:31.356184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:305:2296] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:18:30.969999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:18:30.970187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.970249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:18:30.970286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:18:30.971148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:18:30.971204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:18:30.971293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.971379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:18:30.972435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:31.075560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:18:31.075628Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:31.091975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:31.092303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:18:31.092449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:18:31.104327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:18:31.104554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:18:31.105279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.105496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:18:31.108729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.117840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:18:31.117923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.117998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:18:31.118172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.125395Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:18:31.272785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.273074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.273289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:18:31.273619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:18:31.273720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.278073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.278257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:18:31.278454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.278540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:18:31.278587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:18:31.278639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:18:31.280693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.280766Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:18:31.280811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:18:31.283001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.283065Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.283114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.283198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.287250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:18:31.290309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:18:31.290521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:18:31.291547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.291682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.291751Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.292090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:18:31.292216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.292390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:31.292468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:18:31.295177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.295227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.295437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.295482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:18:31.295817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.295868Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:18:31.295996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.296035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.296078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.296114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.296172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:18:31.296218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.296277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:18:31.296318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:18:31.296384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:31.296431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:18:31.296469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:18:31.298584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... thId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:18:31.451446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:18:31.451490Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-10T11:18:31.451537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:31.453716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:18:31.453851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:18:31.453887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:18:31.453920Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-10T11:18:31.453967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:18:31.454059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-10T11:18:31.455524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-10T11:18:31.455686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-10T11:18:31.456684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.456835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.456898Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-04-10T11:18:31.457006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:18:31.457095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-10T11:18:31.457354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:31.457434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:18:31.459813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:18:31.460862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:18:31.463835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.463895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.464078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:18:31.464248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.464287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-10T11:18:31.464348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 104, path id: 2 FAKE_COORDINATOR: Erasing txId 104 2025-04-10T11:18:31.464753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.464804Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-10T11:18:31.464940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:18:31.464985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:18:31.465050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:18:31.465092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:18:31.465137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-10T11:18:31.465190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:18:31.465234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T11:18:31.465270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T11:18:31.465381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:18:31.465441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-10T11:18:31.465479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-10T11:18:31.465515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-10T11:18:31.466203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:18:31.466323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:18:31.466365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:18:31.466414Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:18:31.466464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:18:31.466815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:18:31.466878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:18:31.466966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:31.467461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:18:31.467539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:18:31.467571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:18:31.467601Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-10T11:18:31.467655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:31.467740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-10T11:18:31.474354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:18:31.474520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:18:31.474597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-10T11:18:31.474882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-10T11:18:31.474931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-10T11:18:31.475544Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-10T11:18:31.475654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:18:31.475689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:388:2379] TestWaitNotification: OK eventTxId 104 2025-04-10T11:18:31.476248Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:18:31.476442Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 232us result status StatusPathDoesNotExist 2025-04-10T11:18:31.476582Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SelfJoin [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2gt4/0017e9/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk5 Trying to start YDB, gRPC: 21225, MsgBus: 7838 2025-04-10T11:18:25.493783Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642330023638793:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:25.493890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017e9/r3tmp/tmptTMZ2W/pdisk_1.dat 2025-04-10T11:18:25.891034Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21225, node 1 2025-04-10T11:18:25.932782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:25.932931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:25.935884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:26.059439Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:26.059465Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:26.059472Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:26.059600Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7838 TClient is connected to server localhost:7838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:26.834574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:26.851166Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:18:26.870204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:27.046831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:27.229453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:27.313920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:28.860623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642342908542448:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:28.860753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.243588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.271654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.299270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.327770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.361385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.396270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.474556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642347203510258:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.474641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.474858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642347203510263:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.479137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:29.489834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642347203510265:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:18:29.584646Z node 1 :TX_PROXY ERROR: Actor# [1:7491642347203510322:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:30.494908Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642330023638793:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:30.494967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:18:30.553670Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:148;event=channel_info;ch_size=50;ch_count=1;ch_limit=50;inputs=0;input_channels_count=0; 2025-04-10T11:18:30.553878Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642351498477930:2496], TxId: 281474976715671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MzE2MjAwOTQtNjEwYWQ0Y2QtN2I2NTllMWYtNmU3NzA2ZTY=. CustomerSuppliedId : . TraceId : 01jrfprgavb70exawx8pvwyngq. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Start compute actor [1:7491642351498477930:2496], task: 1 2025-04-10T11:18:30.553905Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642351498477930:2496], TxId: 281474976715671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MzE2MjAwOTQtNjEwYWQ0Y2QtN2I2NTllMWYtNmU3NzA2ZTY=. CustomerSuppliedId : . TraceId : 01jrfprgavb70exawx8pvwyngq. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Set execution timeout 299.938522s 2025-04-10T11:18:30.558337Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642351498477930:2496], TxId: 281474976715671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MzE2MjAwOTQtNjEwYWQ0Y2QtN2I2NTllMWYtNmU3NzA2ZTY=. CustomerSuppliedId : . TraceId : 01jrfprgavb70exawx8pvwyngq. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Create sink for output 0 { Sink { Type: "KqpTableSink" Settings { type_url: "type.googleapis.com/NKikimrKqp.TKqpTableSinkSettings" value: "\032\036\n\016/Root/KeyValue\020\200\202\224\204\200\200\200\200\001\030\006(\001\"\t\n\003Key\020\001 \004*\t\n\003Key\020\001 \004*\014\n\005Value\020\002 \201 0\227\247\200\200\200\200@8\001H\000R\022\tf\301V\271\006\251\367g\021\300\t\000\000\001\000\020\000X\000`\000h\000h\001x\000" } } } 2025-04-10T11:18:30.558504Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642351498477930:2496], TxId: 281474976715671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MzE2MjAwOTQtNjEwYWQ0Y2QtN2I2NTllMWYtNmU3NzA2ZTY=. CustomerSuppliedId : . TraceId : 01jrfprgavb70exawx8pvwyngq. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646926 2025-04-10T11:18:30.558557Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642351498477930:2496], TxId: 281474976715671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MzE2MjAwOTQtNjEwYWQ0Y2QtN2I2NTllMWYtNmU3NzA2ZTY=. CustomerSuppliedId : . TraceId : 01jrfprgavb70exawx8pvwyngq. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Received channels info: 2025-04-10T11:18:30.558652Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642351498477930:2496], TxId: 281474976715671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MzE2MjAwOTQtNjEwYWQ0Y2QtN2I2NTllMWYtNmU3NzA2ZTY=. CustomerSuppliedId : . TraceId : 01jrfprgavb70exawx8pvwyngq. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 0, toSend: 67108864, finished: 0 2025-04-10T11:18:30.558760Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 1. Add data: 32 / 32 2025-04-10T11:18:30.558806Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 1. Send data=32, closed=1, bufferActorId=[1:7491642351498477926:2496] 2025-04-10T11:18:30.558831Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642351498477930:2496], TxId: 281474976715671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MzE2MjAwOTQtNjEwYWQ0Y2QtN2I2NTllMWYtNmU3NzA2ZTY=. CustomerSuppliedId : . TraceId : 01jrfprgavb70exawx8pvwyngq. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Drai ... OMPUTE DEBUG: SelfId: [1:7491642355793445525:2551], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. CustomerSuppliedId : . TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-10T11:18:31.798463Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445525:2551], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. CustomerSuppliedId : . TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:18:31.798480Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445525:2551], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. CustomerSuppliedId : . TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T11:18:31.798626Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:18:31.798726Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:18:31.799222Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:18:31.799295Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:18:31.799623Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445525:2551], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. CustomerSuppliedId : . TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-10T11:18:31.799641Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445525:2551], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. CustomerSuppliedId : . TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:18:31.799711Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:18:31.799765Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646923 2025-04-10T11:18:31.799772Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445525:2551], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. CustomerSuppliedId : . TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T11:18:31.799785Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 4. Finish input channelId: 4, from: [1:7491642355793445525:2551] 2025-04-10T11:18:31.799804Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445525:2551], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. CustomerSuppliedId : . TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-10T11:18:31.799810Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:18:31.799818Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445525:2551], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. CustomerSuppliedId : . TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:18:31.799852Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 2, seqNo: [10] 2025-04-10T11:18:31.799864Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [11] 2025-04-10T11:18:31.799878Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 3. Tasks execution finished 2025-04-10T11:18:31.799895Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445525:2551], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. CustomerSuppliedId : . TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T11:18:31.799973Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 3. pass away 2025-04-10T11:18:31.800069Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715682;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:18:31.800144Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:18:31.800223Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:18:31.800867Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:18:31.800968Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:18:31.801017Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T11:18:31.801475Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:18:31.801533Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T11:18:31.801818Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:18:31.801862Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 4, seqNo: [11] 2025-04-10T11:18:31.801871Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 4. Tasks execution finished 2025-04-10T11:18:31.801883Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642355793445526:2552], TxId: 281474976715682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDU0NDI4YjMtYWVhYTQ4NWItMzU5NzFkOTAtNDVmNzQwMA==. TraceId : 01jrfprh033k8xfgk0cj6z8tr4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-04-10T11:18:31.801956Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 4. pass away 2025-04-10T11:18:31.802045Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715682;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:18:31.803415Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283911727, txId: 281474976715681] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:18:30.969999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:18:30.970122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.970199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:18:30.970247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:18:30.971210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:18:30.971260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:18:30.971341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.971435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:18:30.972438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:31.077493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:18:31.077549Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:31.095384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:31.095724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:18:31.096038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:18:31.103890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:18:31.104124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:18:31.104803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.105001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:18:31.109187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.117809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:18:31.117885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.117938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:18:31.118053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.125297Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:18:31.256438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.256648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.256852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:18:31.257107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:18:31.257179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.263163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.263303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:18:31.263473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.263538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:18:31.263569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:18:31.263606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:18:31.265585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.265642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:18:31.265679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:18:31.267471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.267514Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.267556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.267622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.271469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:18:31.273187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:18:31.273375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:18:31.274371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.274487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.274543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.274873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:18:31.274948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.275104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:31.275212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:18:31.277486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.277535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.277722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.277767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:18:31.278104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.278169Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:18:31.278288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.278328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.278364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.278403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.278459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:18:31.278523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.278562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:18:31.278600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:18:31.278669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:31.278705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:18:31.278738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:18:31.280477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... SHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:18:31.376850Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-10T11:18:31.376884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:31.377802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:18:31.377889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:18:31.377915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:18:31.377962Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:18:31.377994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:18:31.378066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-10T11:18:31.380043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T11:18:31.380193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-10T11:18:31.381212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.381387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.381451Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000003 2025-04-10T11:18:31.381627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-10T11:18:31.381851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:31.381923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:18:31.383151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:18:31.383381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:18:31.384561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.384596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.384758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:18:31.384884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:18:31.384972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.385011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-10T11:18:31.385066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T11:18:31.385101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T11:18:31.385405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.385477Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:18:31.385582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:18:31.385617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:18:31.385651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:18:31.385706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:18:31.385749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-10T11:18:31.385795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:18:31.385835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:18:31.385886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:18:31.385951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:18:31.385988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-10T11:18:31.386037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-10T11:18:31.386070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-10T11:18:31.386874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:18:31.386961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:18:31.386992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:18:31.387053Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:18:31.387110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:31.388098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:18:31.388172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:18:31.388196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:18:31.388222Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:18:31.388270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:18:31.388337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-10T11:18:31.392146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:18:31.392634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:18:31.392865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:18:31.392910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:18:31.393421Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:18:31.393524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:18:31.393562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:331:2322] TestWaitNotification: OK eventTxId 102 2025-04-10T11:18:31.394086Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:18:31.394346Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 272us result status StatusSuccess 2025-04-10T11:18:31.394682Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:18:30.970005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:18:30.970197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.970264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:18:30.970332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:18:30.971227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:18:30.971313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:18:30.971428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.971528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:18:30.972560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:31.075479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:18:31.075548Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:31.098138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:31.098555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:18:31.098743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:18:31.106440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:18:31.106680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:18:31.107435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.107651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:18:31.109741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116756Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.117591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:18:31.117670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.117751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:18:31.117912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.125887Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:18:31.282691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.282961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.283212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:18:31.283529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:18:31.283603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.286066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.286270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:18:31.286468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.286559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:18:31.286603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:18:31.286652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:18:31.289741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.289822Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:18:31.289909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:18:31.292319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.292380Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.292433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.292510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.296623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:18:31.303151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:18:31.303431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:18:31.304519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.304698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.304769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.305131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:18:31.305205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.305393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:31.305490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:18:31.311471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.311534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.311746Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.311807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:18:31.312196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.312265Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:18:31.312379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.312442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.312490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.312546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.312590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:18:31.312637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.312680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:18:31.312720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:18:31.312797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:31.312853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:18:31.312894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:18:31.315147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... end EvNotifyTxCompletion 2025-04-10T11:18:32.232876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 2025-04-10T11:18:32.233437Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2025-04-10T11:18:32.233561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2025-04-10T11:18:32.233611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [2:340:2331] 2025-04-10T11:18:32.233862Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-04-10T11:18:32.233965Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-04-10T11:18:32.234013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-04-10T11:18:32.234037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [2:340:2331] 2025-04-10T11:18:32.234131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-04-10T11:18:32.234199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [2:340:2331] TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 2025-04-10T11:18:32.234730Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:18:32.235000Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 310us result status StatusSuccess 2025-04-10T11:18:32.235390Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:32.236186Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:18:32.236376Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 204us result status StatusSuccess 2025-04-10T11:18:32.236632Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:32.237406Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:18:32.237553Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 164us result status StatusSuccess 2025-04-10T11:18:32.237989Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:32.238534Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:18:32.238716Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 195us result status StatusSuccess 2025-04-10T11:18:32.238967Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:32.239456Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:18:32.239606Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 171us result status StatusSuccess 2025-04-10T11:18:32.239850Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCacheTest::Navigate >> TCacheTest::List [GOOD] >> TCacheTest::MigrationCommit >> TCacheTest::Recreate [GOOD] >> TCacheTest::SysLocks >> TCacheTest::CheckSystemViewAccess [GOOD] >> TCacheTest::CookiesArePreserved >> TCacheTest::SystemView [GOOD] >> TCacheTest::TableSchemaVersion >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:18:30.970025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:18:30.970137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.970218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:18:30.970263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:18:30.971207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:18:30.971250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:18:30.971322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.971397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:18:30.973414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:31.073080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:18:31.073149Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:31.090281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:31.090655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:18:31.090848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:18:31.098906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:18:31.099147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:18:31.103223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.104103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:18:31.109167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.121993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:18:31.122087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.122186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:18:31.122334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.130512Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:18:31.254702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.254994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.255245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:18:31.255540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:18:31.255626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.258531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.258717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:18:31.258947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.259037Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:18:31.259080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:18:31.259129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:18:31.261017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.261080Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:18:31.261126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:18:31.263091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.263160Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.263212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.263300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.267514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:18:31.269484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:18:31.269661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:18:31.270507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.270615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.270678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.271007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:18:31.271065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.271202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:31.271275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:18:31.273172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.273224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.273445Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.273499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:18:31.273880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.273928Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:18:31.274053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.274091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.274133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.274202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.274245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:18:31.274293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.274331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:18:31.274373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:18:31.274440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:31.274496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:18:31.274539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:18:31.277627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... ation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:18:32.203401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:18:32.203442Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:18:32.203482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:18:32.203558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:18:32.203623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:18:32.203680Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-04-10T11:18:32.203723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-10T11:18:32.203760Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-10T11:18:32.203783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-10T11:18:32.205146Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:18:32.205263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:18:32.205324Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:18:32.205376Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:18:32.205422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:18:32.206614Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:18:32.206715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:18:32.206754Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:18:32.206793Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-10T11:18:32.206827Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:18:32.207511Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:18:32.207586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:18:32.207614Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:18:32.207652Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-10T11:18:32.207685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:18:32.207756Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:18:32.209709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:18:32.211231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:18:32.211362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:18:32.211599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:18:32.211652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:18:32.212087Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:18:32.212185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:18:32.212223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:333:2324] TestWaitNotification: OK eventTxId 101 2025-04-10T11:18:32.212765Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:18:32.212976Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 272us result status StatusSuccess 2025-04-10T11:18:32.213287Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-10T11:18:32.216722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:32.216903Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2025-04-10T11:18:32.217025Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2025-04-10T11:18:32.219628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:32.219826Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T11:18:32.220173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T11:18:32.220231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T11:18:32.220724Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:18:32.220833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:18:32.220878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:341:2332] TestWaitNotification: OK eventTxId 103 2025-04-10T11:18:32.221533Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:18:32.221753Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 262us result status StatusSuccess 2025-04-10T11:18:32.222096Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:18:30.995045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:18:30.995157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.995207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:18:30.995253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:18:30.995301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:18:30.995339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:18:30.995413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.995505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:18:30.995846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:31.089734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:18:31.089858Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:31.107281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:31.107594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:18:31.107776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:18:31.117040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:18:31.117345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:18:31.118163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.118544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:18:31.120879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.122006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.122064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.122268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:18:31.122327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.122380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:18:31.122483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.130254Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:18:31.257190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.257416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.257610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:18:31.257897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:18:31.257967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.263061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.263200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:18:31.263369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.263431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:18:31.263459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:18:31.263490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:18:31.267202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.267269Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:18:31.267305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:18:31.269299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.269362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.269422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.269475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.279363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:18:31.281418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:18:31.281649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:18:31.282749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.282872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.282939Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.283340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:18:31.283431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.283585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:31.283665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:18:31.286373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.286418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.286613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.286655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:18:31.287012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.287064Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:18:31.287157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.287190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.287238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.287306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.287345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:18:31.287389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.287425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:18:31.287457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:18:31.287524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:31.287569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:18:31.287601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:18:31.289349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... opId# 101:0 ProgressState 2025-04-10T11:18:32.074817Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:18:32.074858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:18:32.074912Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:18:32.074951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:18:32.074994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:18:32.075041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:18:32.075084Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:18:32.075126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:18:32.075208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:18:32.075251Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-10T11:18:32.075310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:18:32.075352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-10T11:18:32.076278Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:18:32.076365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:18:32.076404Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:18:32.076477Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:18:32.076553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:32.077341Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:18:32.077417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:18:32.077447Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:18:32.077474Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-10T11:18:32.077515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:18:32.077594Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:18:32.094208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:18:32.094865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:18:32.095136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:18:32.095188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:18:32.095655Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:18:32.095786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:18:32.095833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:303:2294] TestWaitNotification: OK eventTxId 101 2025-04-10T11:18:32.096412Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:18:32.096612Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 249us result status StatusSuccess 2025-04-10T11:18:32.096951Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-10T11:18:32.100477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:32.100750Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2025-04-10T11:18:32.100833Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2025-04-10T11:18:32.100991Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-10T11:18:32.108028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-10T11:18:32.108233Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:18:32.108562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:18:32.108616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:18:32.109038Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:18:32.109162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:18:32.109210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:311:2302] TestWaitNotification: OK eventTxId 102 2025-04-10T11:18:32.109754Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:18:32.109960Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 229us result status StatusSuccess 2025-04-10T11:18:32.110369Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] Test command err: 2025-04-10T11:16:03.969428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:16:03.969750Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:16:03.969813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007bd/r3tmp/tmpakIcLx/pdisk_1.dat 2025-04-10T11:16:04.268146Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9082, node 1 2025-04-10T11:16:04.487044Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:04.487087Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:04.487116Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:04.487464Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:16:04.489539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:16:04.576056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:04.576163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:04.590591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17939 2025-04-10T11:16:05.134350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:16:07.711017Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:16:07.744308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:07.744471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:07.772481Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:16:07.774883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:07.991384Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:07.991878Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:07.992319Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:07.992429Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:07.992483Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:07.992699Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:07.992763Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:07.992816Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:07.992865Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:08.153260Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:08.153396Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:08.166689Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:08.286544Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:08.332269Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:16:08.332362Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:16:08.369803Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:16:08.370889Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:16:08.371117Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:16:08.371184Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:16:08.371237Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:16:08.371292Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:16:08.371355Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:16:08.371413Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:16:08.371850Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:16:08.394087Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:08.394242Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:08.400844Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:16:08.404118Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:16:08.404353Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:16:08.412335Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:16:08.425587Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:16:08.425637Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:16:08.425690Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:16:08.439535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:16:08.445107Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:16:08.445210Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:16:08.609929Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:16:08.785810Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:16:08.862281Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:09.830333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2245:3077], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:09.830480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:09.850495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:16:10.125154Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:10.125341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:16:10.125583Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:16:10.125663Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:16:10.125755Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:16:10.125839Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:16:10.125929Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:16:10.126001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:16:10.126080Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:16:10.126171Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:16:10.126247Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:16:10.126316Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:16:10.160778Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2890];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:10.160858Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2890];tablet_id=72075186224037900;process=T ... [72075186224037894] Subscribed for config changes 2025-04-10T11:18:29.053086Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:29.053188Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:29.053338Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:18:29.054452Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:29.054514Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:29.056842Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:29.120339Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:29.120482Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-10T11:18:29.121285Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8658:6516], server id = [2:8663:6521], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:29.121712Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8658:6516], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:29.122070Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8659:6517], server id = [2:8664:6522], tablet id = 72075186224037900, status = OK 2025-04-10T11:18:29.122114Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8659:6517], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:29.123564Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8660:6518], server id = [2:8665:6523], tablet id = 72075186224037901, status = OK 2025-04-10T11:18:29.123616Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8660:6518], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:29.124083Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8661:6519], server id = [2:8666:6524], tablet id = 72075186224037902, status = OK 2025-04-10T11:18:29.124124Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8661:6519], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:29.125609Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8662:6520], server id = [2:8669:6527], tablet id = 72075186224037903, status = OK 2025-04-10T11:18:29.125668Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8662:6520], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:29.131594Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:29.132722Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8658:6516], server id = [2:8663:6521], tablet id = 72075186224037899 2025-04-10T11:18:29.132772Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:29.133373Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:18:29.134130Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8659:6517], server id = [2:8664:6522], tablet id = 72075186224037900 2025-04-10T11:18:29.134179Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:29.134653Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-10T11:18:29.135232Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8682:6536], server id = [2:8683:6537], tablet id = 72075186224037904, status = OK 2025-04-10T11:18:29.135313Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8682:6536], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:29.135806Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8660:6518], server id = [2:8665:6523], tablet id = 72075186224037901 2025-04-10T11:18:29.135842Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:29.136032Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-10T11:18:29.137097Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8684:6538], server id = [2:8687:6540], tablet id = 72075186224037905, status = OK 2025-04-10T11:18:29.137153Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8684:6538], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:29.137595Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8661:6519], server id = [2:8666:6524], tablet id = 72075186224037902 2025-04-10T11:18:29.137628Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:29.138510Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-10T11:18:29.138689Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8686:6539], server id = [2:8689:6542], tablet id = 72075186224037906, status = OK 2025-04-10T11:18:29.138758Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8686:6539], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:29.140133Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8662:6520], server id = [2:8669:6527], tablet id = 72075186224037903 2025-04-10T11:18:29.140163Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:29.140664Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8688:6541], server id = [2:8692:6545], tablet id = 72075186224037907, status = OK 2025-04-10T11:18:29.140735Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8688:6541], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:29.141542Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8694:6547], server id = [2:8698:6550], tablet id = 72075186224037908, status = OK 2025-04-10T11:18:29.141601Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8694:6547], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:29.144881Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-10T11:18:29.145653Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8682:6536], server id = [2:8683:6537], tablet id = 72075186224037904 2025-04-10T11:18:29.145690Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:29.147295Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:18:29.147936Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8684:6538], server id = [2:8687:6540], tablet id = 72075186224037905 2025-04-10T11:18:29.147981Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:29.148399Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-10T11:18:29.149132Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8686:6539], server id = [2:8689:6542], tablet id = 72075186224037906 2025-04-10T11:18:29.149166Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:29.150067Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-10T11:18:29.150438Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8688:6541], server id = [2:8692:6545], tablet id = 72075186224037907 2025-04-10T11:18:29.150468Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:29.150969Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-10T11:18:29.151027Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:29.151268Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:29.151475Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:29.151898Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:29.155144Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8694:6547], server id = [2:8698:6550], tablet id = 72075186224037908 2025-04-10T11:18:29.155188Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:29.156090Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:29.193558Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8724:6572]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:29.193847Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:29.193888Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8724:6572], StatRequests.size() = 1 2025-04-10T11:18:29.354679Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjE4ZjZkOGItYTZjYTRjMzAtOTgyMzI3Ny03NGRmZDkzYg==, TxId: 2025-04-10T11:18:29.354774Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjE4ZjZkOGItYTZjYTRjMzAtOTgyMzI3Ny03NGRmZDkzYg==, TxId: 2025-04-10T11:18:29.355453Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:29.369669Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8739:6578] 2025-04-10T11:18:29.369840Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8610:6484], server id = [2:8739:6578], tablet id = 72075186224037894, status = OK 2025-04-10T11:18:29.370039Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8739:6578], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-10T11:18:29.370242Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8740:6579] 2025-04-10T11:18:29.370327Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8740:6579], schemeshard id = 72075186224037897 2025-04-10T11:18:29.395344Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:29.395440Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:29.483834Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8743:6582]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:29.484324Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:29.484401Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:18:29.488159Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:29.488236Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:18:29.488302Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:18:29.494430Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:18:30.971553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:18:30.971669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.971734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:18:30.971789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:18:30.971844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:18:30.971877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:18:30.971946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:30.972036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:18:30.972399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:31.074561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:18:31.074623Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:31.091848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:31.092103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:18:31.092299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:18:31.103783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:18:31.104025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:18:31.104764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.104954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:18:31.108712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.116698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.117529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:18:31.117617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.117735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:18:31.117895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.131107Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:18:31.271143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:31.271351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.271592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:18:31.271821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:18:31.271885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.274051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.274264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:18:31.274458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.274531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:18:31.274577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:18:31.274626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:18:31.276670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.276734Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:18:31.276795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:18:31.278501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.278561Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.278608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.278683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.282821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:18:31.284973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:18:31.285169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:18:31.286090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:31.286238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:31.286312Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.286617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:18:31.286696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:31.286878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:31.286986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:18:31.291462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:31.291519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:31.291734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:31.291789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:18:31.292142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:31.292247Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:18:31.292369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.292409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.292452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:31.292504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.292544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:18:31.292586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:31.292623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:18:31.292668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:18:31.292734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:31.292778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:18:31.292812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:18:31.294587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... Id: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:18:32.167017Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:18:32.167056Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-10T11:18:32.167094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:32.168201Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:18:32.168275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:18:32.168300Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:18:32.168356Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-10T11:18:32.168390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:18:32.168452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-10T11:18:32.169558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T11:18:32.169672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-10T11:18:32.170689Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:32.170822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 8589936749 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:32.170879Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 102:0 HandleReply TEvOperationPlan: step# 5000003 2025-04-10T11:18:32.170966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:18:32.171050Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-10T11:18:32.171215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:32.171280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:18:32.172964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:18:32.173750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:18:32.174982Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:32.175040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:32.175173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:18:32.175308Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:32.175354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-10T11:18:32.175391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T11:18:32.175575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:18:32.175620Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:18:32.175713Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:18:32.175750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:18:32.175789Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:18:32.175820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:18:32.175882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-10T11:18:32.175916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:18:32.175949Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:18:32.175983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:18:32.176049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:18:32.176088Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-10T11:18:32.176121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-10T11:18:32.176148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-10T11:18:32.176531Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:18:32.176611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:18:32.176639Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:18:32.176670Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:18:32.176699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:18:32.176974Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:18:32.177009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:18:32.177060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:32.177344Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:18:32.177399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:18:32.177423Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:18:32.177448Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:18:32.177473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:32.177534Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-10T11:18:32.180799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:18:32.181912Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:18:32.181985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:18:32.182205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:18:32.182248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:18:32.182608Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:18:32.182701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:18:32.182749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:331:2322] TestWaitNotification: OK eventTxId 102 2025-04-10T11:18:32.183241Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:18:32.183395Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 192us result status StatusPathDoesNotExist 2025-04-10T11:18:32.183550Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] Test command err: 2025-04-10T11:16:07.913275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:16:07.913622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:16:07.913681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00075a/r3tmp/tmpzvxwnX/pdisk_1.dat 2025-04-10T11:16:08.212752Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6242, node 1 2025-04-10T11:16:08.416883Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:08.416927Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:08.416950Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:08.417257Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:16:08.419326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:16:08.497024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:08.497174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:08.509874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27460 2025-04-10T11:16:08.980591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:16:11.633764Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:16:11.663297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:11.663436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:11.690940Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:16:11.692838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:11.909952Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.910418Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.910897Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.911018Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.911082Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.911304Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.911402Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.911459Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.911513Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:12.074375Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:12.074493Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:12.087625Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:12.202672Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:12.244886Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:16:12.244994Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:16:12.277359Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:16:12.278254Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:16:12.278441Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:16:12.278506Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:16:12.278548Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:16:12.278588Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:16:12.278625Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:16:12.278669Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:16:12.279057Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:16:12.300662Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:12.300806Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:12.307348Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:16:12.311308Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:16:12.311658Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:16:12.321108Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:16:12.336831Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:16:12.336916Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:16:12.336973Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:16:12.352527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:16:12.358236Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:16:12.358345Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:16:12.503377Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:16:12.696009Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:16:12.792625Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:13.757512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:13.757616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:13.775746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:16:13.999367Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:13.999532Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:16:13.999784Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:16:13.999875Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:16:13.999952Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:16:14.000074Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:16:14.000150Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:16:14.000228Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:16:14.000337Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:16:14.000415Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:16:14.000483Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:16:14.000563Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:16:14.023052Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:14.023166Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=T ... 892629Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:30.892735Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:30.893779Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:30.908872Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:30.909204Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:18:30.910089Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8568:6484], server id = [2:8573:6489], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:30.910717Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8568:6484], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:30.911324Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8569:6485], server id = [2:8574:6490], tablet id = 72075186224037900, status = OK 2025-04-10T11:18:30.911396Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8569:6485], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:30.913082Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8570:6486], server id = [2:8575:6491], tablet id = 72075186224037901, status = OK 2025-04-10T11:18:30.913155Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8570:6486], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:30.915312Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8571:6487], server id = [2:8576:6492], tablet id = 72075186224037902, status = OK 2025-04-10T11:18:30.915395Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8571:6487], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:30.916426Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8572:6488], server id = [2:8577:6493], tablet id = 72075186224037903, status = OK 2025-04-10T11:18:30.916504Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8572:6488], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:30.923410Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:30.924623Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8568:6484], server id = [2:8573:6489], tablet id = 72075186224037899 2025-04-10T11:18:30.924694Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:30.925501Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:18:30.925976Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8569:6485], server id = [2:8574:6490], tablet id = 72075186224037900 2025-04-10T11:18:30.926008Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:30.926716Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8591:6504], server id = [2:8593:6505], tablet id = 72075186224037904, status = OK 2025-04-10T11:18:30.926808Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8591:6504], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:30.927497Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-10T11:18:30.928641Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8570:6486], server id = [2:8575:6491], tablet id = 72075186224037901 2025-04-10T11:18:30.928675Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:30.929607Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-10T11:18:30.930467Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8594:6506], server id = [2:8597:6508], tablet id = 72075186224037905, status = OK 2025-04-10T11:18:30.930562Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8594:6506], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:30.931054Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8571:6487], server id = [2:8576:6492], tablet id = 72075186224037902 2025-04-10T11:18:30.931085Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:30.931866Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-10T11:18:30.932981Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8596:6507], server id = [2:8601:6512], tablet id = 72075186224037906, status = OK 2025-04-10T11:18:30.933070Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8596:6507], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:30.933562Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8572:6488], server id = [2:8577:6493], tablet id = 72075186224037903 2025-04-10T11:18:30.933594Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:30.934388Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8600:6511], server id = [2:8606:6516], tablet id = 72075186224037907, status = OK 2025-04-10T11:18:30.934477Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8600:6511], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:30.936189Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8603:6514], server id = [2:8609:6519], tablet id = 72075186224037908, status = OK 2025-04-10T11:18:30.936259Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8603:6514], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:30.936910Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-10T11:18:30.938935Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8591:6504], server id = [2:8593:6505], tablet id = 72075186224037904 2025-04-10T11:18:30.938974Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:30.940922Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:18:30.941490Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8594:6506], server id = [2:8597:6508], tablet id = 72075186224037905 2025-04-10T11:18:30.941529Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:30.943642Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-10T11:18:30.944409Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8596:6507], server id = [2:8601:6512], tablet id = 72075186224037906 2025-04-10T11:18:30.944445Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:30.945017Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-10T11:18:30.945487Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8600:6511], server id = [2:8606:6516], tablet id = 72075186224037907 2025-04-10T11:18:30.945520Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:30.945926Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-10T11:18:30.945977Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:30.946928Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:30.947115Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:30.947480Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8603:6514], server id = [2:8609:6519], tablet id = 72075186224037908 2025-04-10T11:18:30.947508Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:30.947851Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:30.995172Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:30.995417Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-10T11:18:30.995980Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8622:6526], server id = [2:8623:6527], tablet id = 72075186224037900, status = OK 2025-04-10T11:18:30.996076Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8622:6526], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:30.997400Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:18:30.997499Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:30.997713Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:30.997932Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:30.999016Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:31.001371Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8622:6526], server id = [2:8623:6527], tablet id = 72075186224037900 2025-04-10T11:18:31.001411Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:31.002139Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:31.053988Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8641:6545]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:31.054262Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:31.054335Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8641:6545], StatRequests.size() = 1 2025-04-10T11:18:31.186385Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTcxODA3M2MtMmZmMTNjMjYtN2RhMzdhNS1jOWQ5ZmY4Zg==, TxId: 2025-04-10T11:18:31.186489Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTcxODA3M2MtMmZmMTNjMjYtN2RhMzdhNS1jOWQ5ZmY4Zg==, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-10T11:18:31.187396Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8655:6551]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:31.187690Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:31.188347Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:31.188445Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:18:31.192868Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:31.192948Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:18:31.193018Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:18:31.202591Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 probe = 3 >> TCacheTest::RacyRecreateAndSync [GOOD] >> TCacheTest::RacyCreateAndSync >> TCacheTest::CookiesArePreserved [GOOD] >> TCacheTest::SysLocks [GOOD] |89.7%| [TA] $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TA] {RESULT} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> TCacheTest::RacyCreateAndSync [GOOD] >> TCacheTest::Navigate [GOOD] >> TCacheTest::PathBelongsToDomain >> TCacheTest::TableSchemaVersion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] Test command err: 2025-04-10T11:16:05.656545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:16:05.656834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:16:05.656906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007ae/r3tmp/tmp3Ho6Hq/pdisk_1.dat 2025-04-10T11:16:05.974522Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18423, node 1 2025-04-10T11:16:06.186306Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:06.186364Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:06.186399Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:06.186909Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:16:06.189736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:16:06.277235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:06.277431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:06.291072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20743 2025-04-10T11:16:06.778585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:16:09.365294Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:16:09.390613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:09.390753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:09.419046Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:16:09.420900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:09.657128Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:09.657548Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:09.657946Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:09.658039Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:09.658090Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:09.658314Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:09.658375Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:09.658431Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:09.658487Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:09.820847Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:09.820956Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:09.833773Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:09.954338Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:09.996236Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:16:09.996317Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:16:10.029445Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:16:10.030407Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:16:10.030591Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:16:10.030666Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:16:10.030722Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:16:10.030767Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:16:10.030809Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:16:10.030847Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:16:10.031215Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:16:10.051814Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:10.051911Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:10.057684Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:16:10.060974Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:16:10.061210Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:16:10.068674Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:16:10.084767Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:16:10.084839Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:16:10.084905Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:16:10.100560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:16:10.108450Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:16:10.108590Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:16:10.277941Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:16:10.442910Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:16:10.552317Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:11.564815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:11.564921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:11.584333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:16:11.858899Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:11.859087Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:16:11.859370Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:16:11.859496Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:16:11.859613Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:16:11.859719Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:16:11.859789Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:16:11.859898Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:16:11.860018Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:16:11.860132Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:16:11.860224Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:16:11.860302Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:16:11.884001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:11.884090Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process= ... :32.282260Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:32.283259Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:32.343295Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:32.343452Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 2 2025-04-10T11:18:32.344095Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8575:6489], server id = [2:8582:6496], tablet id = 72075186224037903 2025-04-10T11:18:32.344136Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:32.344430Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8668:6554], server id = [2:8673:6559], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:32.344518Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8668:6554], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:32.344640Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8669:6555], server id = [2:8674:6560], tablet id = 72075186224037900, status = OK 2025-04-10T11:18:32.344672Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8669:6555], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:32.346383Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8670:6556], server id = [2:8675:6561], tablet id = 72075186224037901, status = OK 2025-04-10T11:18:32.346458Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8670:6556], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:32.346840Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8671:6557], server id = [2:8676:6562], tablet id = 72075186224037902, status = OK 2025-04-10T11:18:32.346906Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8671:6557], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:32.347572Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8672:6558], server id = [2:8678:6564], tablet id = 72075186224037903, status = OK 2025-04-10T11:18:32.347615Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8672:6558], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:32.347940Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:32.349205Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:18:32.349499Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8668:6554], server id = [2:8673:6559], tablet id = 72075186224037899 2025-04-10T11:18:32.349525Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:32.349991Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-10T11:18:32.350487Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8669:6555], server id = [2:8674:6560], tablet id = 72075186224037900 2025-04-10T11:18:32.350510Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:32.350832Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-10T11:18:32.350998Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8670:6556], server id = [2:8675:6561], tablet id = 72075186224037901 2025-04-10T11:18:32.351015Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:32.351204Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8681:6567], server id = [2:8685:6571], tablet id = 72075186224037904, status = OK 2025-04-10T11:18:32.351264Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8681:6567], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:32.351354Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8671:6557], server id = [2:8676:6562], tablet id = 72075186224037902 2025-04-10T11:18:32.351372Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:32.352127Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8684:6570], server id = [2:8688:6574], tablet id = 72075186224037905, status = OK 2025-04-10T11:18:32.352174Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8684:6570], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:32.352668Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8686:6572], server id = [2:8690:6576], tablet id = 72075186224037906, status = OK 2025-04-10T11:18:32.352706Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8686:6572], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:32.353082Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8689:6575], server id = [2:8692:6577], tablet id = 72075186224037907, status = OK 2025-04-10T11:18:32.353124Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8689:6575], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:32.354124Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-10T11:18:32.354528Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8681:6567], server id = [2:8685:6571], tablet id = 72075186224037904 2025-04-10T11:18:32.354551Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:32.355092Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-10T11:18:32.355622Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8672:6558], server id = [2:8678:6564], tablet id = 72075186224037903 2025-04-10T11:18:32.355654Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:32.356106Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:18:32.356398Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-10T11:18:32.356518Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8696:6581], server id = [2:8698:6583], tablet id = 72075186224037908, status = OK 2025-04-10T11:18:32.356557Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8696:6581], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:32.356680Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8684:6570], server id = [2:8688:6574], tablet id = 72075186224037905 2025-04-10T11:18:32.356700Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:32.357051Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-10T11:18:32.357208Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8686:6572], server id = [2:8690:6576], tablet id = 72075186224037906 2025-04-10T11:18:32.357226Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:32.357482Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8689:6575], server id = [2:8692:6577], tablet id = 72075186224037907 2025-04-10T11:18:32.357501Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:32.357577Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-10T11:18:32.357612Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:32.357795Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:32.357954Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:32.358193Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:32.360720Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8696:6581], server id = [2:8698:6583], tablet id = 72075186224037908 2025-04-10T11:18:32.360759Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:32.361501Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:32.401938Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8716:6601]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:32.402244Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:32.402329Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8716:6601], StatRequests.size() = 1 2025-04-10T11:18:32.533323Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:03.000000Z, event interval end# 2025-04-10T11:18:30.000000Z 2025-04-10T11:18:32.534256Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjZjODQwMmItZjljZTM1MGQtOGZlYjBiNzgtZTc3MTE4OGQ=, TxId: 2025-04-10T11:18:32.534334Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjZjODQwMmItZjljZTM1MGQtOGZlYjBiNzgtZTc3MTE4OGQ=, TxId: 2025-04-10T11:18:32.535024Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:32.547735Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8731:6607] 2025-04-10T11:18:32.547862Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8595:6505], server id = [2:8732:6608], tablet id = 72075186224037894, status = OK 2025-04-10T11:18:32.547982Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8732:6608] 2025-04-10T11:18:32.548085Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8731:6607], schemeshard id = 72075186224037897 2025-04-10T11:18:32.548177Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8732:6608], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-10T11:18:32.561674Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:32.561749Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:32.637706Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8735:6611]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:32.638113Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:32.638209Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:18:32.640984Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:32.641042Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:18:32.641102Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:18:32.646762Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |89.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> TCacheTest::WatchRoot [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction >> TCacheTest::MigrationCommon [GOOD] >> TCacheTest::MigrationDeletedPathNavigate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::SysLocks [GOOD] Test command err: 2025-04-10T11:18:34.128980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:34.129029Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:34.370388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:18:34.399738Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-10T11:18:34.401217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:18:34.437558Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T11:18:34.447269Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-04-10T11:18:34.772147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:34.772235Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:34.828524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CookiesArePreserved [GOOD] Test command err: 2025-04-10T11:18:34.131377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:34.131427Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:34.389247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-04-10T11:18:34.401374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:18:34.408117Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-10T11:18:34.408827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2025-04-10T11:18:34.414199Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:205:2195], for# user1@builtin, access# DescribeSchema 2025-04-10T11:18:34.415193Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:211:2201], for# user1@builtin, access# 2025-04-10T11:18:34.729995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:34.730061Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:34.813814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-04-10T11:18:34.820107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:18:34.827062Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 >> TCacheTest::PathBelongsToDomain [GOOD] >> TCacheTest::MigrationCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::TableSchemaVersion [GOOD] Test command err: 2025-04-10T11:18:34.129058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:34.129103Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:34.368892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-04-10T11:18:34.727611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:34.727664Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:34.780371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-04-10T11:18:34.811201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 101:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-10T11:18:34.977960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::RacyCreateAndSync [GOOD] Test command err: 2025-04-10T11:18:34.453620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:34.453666Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:34.608945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:18:34.627290Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-10T11:18:34.629761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:18:34.671976Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T11:18:34.693423Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-04-10T11:18:34.941350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:34.941404Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:34.994217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:18:35.007768Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 23422, MsgBus: 21397 2025-04-10T11:18:13.101411Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642276970504724:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:13.101483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d12/r3tmp/tmpZxFGGr/pdisk_1.dat 2025-04-10T11:18:13.414044Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:13.432962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:13.433087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:13.436901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23422, node 1 2025-04-10T11:18:13.490473Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:13.490505Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:13.490513Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:13.490704Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21397 TClient is connected to server localhost:21397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:13.957211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:13.977808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:14.110366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:14.268885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:14.322518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:15.539415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642285560441107:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.539570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.760509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.782831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.805386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.825991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.846652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.873199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:15.907691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642285560441614:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.907750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.907811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642285560441619:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:15.910635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:15.918548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642285560441621:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:18:15.975367Z node 1 :TX_PROXY ERROR: Actor# [1:7491642285560441674:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 6803, MsgBus: 18950 2025-04-10T11:18:18.065935Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642300337055599:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:18.065992Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d12/r3tmp/tmpoORHUs/pdisk_1.dat 2025-04-10T11:18:18.175735Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6803, node 2 2025-04-10T11:18:18.213394Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:18.213510Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:18.215519Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:18.234975Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:18.235003Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:18.235008Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:18.235099Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18950 TClient is connected to server localhost:18950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:18.540776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:18.547660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:18.592349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:18.741467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:18.801015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:20.445277Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642308926991974:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:20.445360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:20.481377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:20.508473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:20.545939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:20.576578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:20.602790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:20.633191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:20.670087Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642308926992480:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:20.670185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:20.670241Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642308926992485:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:20.673963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:20.684567Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642308926992488:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:18:20.783067Z node 2 :TX_PROXY ERROR: Actor# [2:7491642308926992542:3439] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:23.066428Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642300337055599:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:23.066506Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:18:33.170182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:18:33.170213Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:33.987454Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642364761568326:2646], TxId: 281474976710682, task: 1. Ctx: { TraceId : 01jrfprkgp5x6xjkfa7nz3j1e5. SessionId : ydb://session/3?node_id=2&id=YTIwYWEyNzYtODg3OWI2YTEtOTU4N2RkMjYtZWIyNWQyYTg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1744283901367/18446744073709551615 shard 72075186224037888 with lowWatermark v1744283901563/18446744073709551615 (node# 2 state# Ready) } } 2025-04-10T11:18:33.988010Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642364761568326:2646], TxId: 281474976710682, task: 1. Ctx: { TraceId : 01jrfprkgp5x6xjkfa7nz3j1e5. SessionId : ydb://session/3?node_id=2&id=YTIwYWEyNzYtODg3OWI2YTEtOTU4N2RkMjYtZWIyNWQyYTg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1744283901367/18446744073709551615 shard 72075186224037888 with lowWatermark v1744283901563/18446744073709551615 (node# 2 state# Ready) } }. 2025-04-10T11:18:33.988599Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642364761568327:2647], TxId: 281474976710682, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YTIwYWEyNzYtODg3OWI2YTEtOTU4N2RkMjYtZWIyNWQyYTg=. TraceId : 01jrfprkgp5x6xjkfa7nz3j1e5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7491642364761568322:2494], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T11:18:33.989094Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTIwYWEyNzYtODg3OWI2YTEtOTU4N2RkMjYtZWIyNWQyYTg=, ActorId: [2:7491642313221960120:2494], ActorState: ExecuteState, TraceId: 01jrfprkgp5x6xjkfa7nz3j1e5, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::PathBelongsToDomain [GOOD] Test command err: 2025-04-10T11:18:34.844047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:34.844111Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:35.013399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:18:35.036051Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-04-10T11:18:35.313169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:35.313230Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:35.372580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-04-10T11:18:35.379156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:18:35.386290Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 2025-04-10T11:18:35.397101Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:226:2204], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:18:35.397419Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:228:2206], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationCommit [GOOD] Test command err: 2025-04-10T11:18:34.130690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:34.130733Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:34.377257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 TestWaitNotification wait txId: 103 2025-04-10T11:18:34.408818Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:18:34.408909Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:18:34.409156Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-10T11:18:34.781392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:34.781469Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:34.843758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:175:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:178:2067] recipient: [2:177:2171] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:179:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:181:2067] recipient: [2:177:2171] 2025-04-10T11:18:34.901918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:34.901995Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:211:2067] recipient: [2:24:2071] 2025-04-10T11:18:34.932810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-10T11:18:34.941986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:246:2067] recipient: [2:238:2213] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:246:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:249:2067] recipient: [2:241:2215] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:249:2067] recipient: [2:241:2215] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:251:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:251:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:250:2219] sender: [2:252:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:255:2221] sender: [2:258:2067] recipient: [2:241:2215] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-04-10T11:18:34.983410Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:250:2219] sender: [2:289:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:255:2221] sender: [2:290:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-04-10T11:18:35.107358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:336:2285] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:336:2285] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:343:2289] sender: [2:344:2067] recipient: [2:336:2285] Leader for TabletID 72075186233409548 is [2:343:2289] sender: [2:345:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-10T11:18:35.289919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:419:2067] recipient: [2:415:2333] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:419:2067] recipient: [2:415:2333] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:422:2337] sender: [2:423:2067] recipient: [2:415:2333] Leader for TabletID 72075186233409549 is [2:422:2337] sender: [2:426:2067] recipient: [2:24:2071] 2025-04-10T11:18:35.337153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:35.337215Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 106 2025-04-10T11:18:35.359931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:18:35.360010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:18:35.360449Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-04-10T11:18:35.360609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-10T11:18:35.382507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-04-10T11:18:35.382887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:510:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:512:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:514:2067] recipient: [2:513:2407] Leader for TabletID 72057594046678944 is [2:515:2408] sender: [2:516:2067] recipient: [2:513:2407] 2025-04-10T11:18:35.445226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:35.445323Z node 2 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046678944 is [2:515:2408] sender: [2:543:2067] recipient: [2:24:2071] { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] >> TCacheTest::MigrationLostMessage >> KqpSinkMvcc::OltpMultiSinks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] Test command err: 2025-04-10T11:16:08.071309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:16:08.071650Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:16:08.071716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00074a/r3tmp/tmp9po5cn/pdisk_1.dat 2025-04-10T11:16:08.367662Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1987, node 1 2025-04-10T11:16:08.573383Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:08.573445Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:08.573477Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:08.573956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:16:08.580903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:16:08.664545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:08.664679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:08.678760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15399 2025-04-10T11:16:09.167185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:16:11.595801Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:16:11.620565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:11.620704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:11.647282Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:16:11.649029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:11.861713Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.862213Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.862679Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.862796Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.862845Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.863044Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.863101Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.863160Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:11.863210Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:12.021308Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:12.021462Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:12.034118Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:12.139665Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:12.185860Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:16:12.185958Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:16:12.218200Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:16:12.219162Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:16:12.219378Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:16:12.219453Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:16:12.219512Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:16:12.219561Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:16:12.219608Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:16:12.219658Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:16:12.220065Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:16:12.242408Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:12.242576Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:12.249411Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:16:12.253848Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:16:12.254214Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:16:12.262693Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:16:12.277230Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:16:12.277286Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:16:12.277383Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:16:12.292628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:16:12.305476Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:16:12.305610Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:16:12.504871Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:16:12.659220Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:16:12.736511Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:13.742060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:13.742197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:13.763934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:16:14.092858Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:14.093050Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:16:14.093332Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:16:14.093437Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:16:14.093524Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:16:14.093645Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:16:14.093733Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:16:14.093814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:16:14.093906Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:16:14.093998Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:16:14.094075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:16:14.094170Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:16:14.118798Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:14.118899Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=T ... id = 2, client id = [2:8564:6483], server id = [2:8569:6488], tablet id = 72075186224037900 2025-04-10T11:18:35.889112Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:35.890214Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-10T11:18:35.891002Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8565:6484], server id = [2:8570:6489], tablet id = 72075186224037901 2025-04-10T11:18:35.891038Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:35.891546Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8589:6504], server id = [2:8591:6505], tablet id = 72075186224037905, status = OK 2025-04-10T11:18:35.891638Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8589:6504], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:35.892508Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-10T11:18:35.893713Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8566:6485], server id = [2:8571:6490], tablet id = 72075186224037902 2025-04-10T11:18:35.893748Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:35.894208Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8593:6507], server id = [2:8595:6509], tablet id = 72075186224037906, status = OK 2025-04-10T11:18:35.894296Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8593:6507], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:35.894788Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-10T11:18:35.895836Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8567:6486], server id = [2:8572:6491], tablet id = 72075186224037903 2025-04-10T11:18:35.895869Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:35.896306Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8596:6510], server id = [2:8599:6513], tablet id = 72075186224037907, status = OK 2025-04-10T11:18:35.896388Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8596:6510], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:35.898132Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8600:6514], server id = [2:8604:6517], tablet id = 72075186224037908, status = OK 2025-04-10T11:18:35.898235Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8600:6514], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:35.900382Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-10T11:18:35.900822Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8585:6502], server id = [2:8588:6503], tablet id = 72075186224037904 2025-04-10T11:18:35.900859Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:35.902859Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:18:35.903519Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8589:6504], server id = [2:8591:6505], tablet id = 72075186224037905 2025-04-10T11:18:35.903561Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:35.904783Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-10T11:18:35.905554Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8593:6507], server id = [2:8595:6509], tablet id = 72075186224037906 2025-04-10T11:18:35.905595Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:35.905838Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-10T11:18:35.906323Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8596:6510], server id = [2:8599:6513], tablet id = 72075186224037907 2025-04-10T11:18:35.906361Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:35.906588Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-10T11:18:35.906651Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:35.906881Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:35.907147Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:35.907624Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:35.910960Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8600:6514], server id = [2:8604:6517], tablet id = 72075186224037908 2025-04-10T11:18:35.911005Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:35.911906Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:35.958472Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8629:6538]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:35.958792Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:35.958872Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8629:6538], StatRequests.size() = 1 ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to KQP_TABLE_RESOLVER ... waiting for 3rd TEvResolveKeySetResult (done) 2025-04-10T11:18:36.061679Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8440:6415], server id = [2:8441:6416], tablet id = 72075186224037894 2025-04-10T11:18:36.061822Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:8635:6541] 2025-04-10T11:18:36.061887Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:8635:6541] 2025-04-10T11:18:36.062093Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:8636:6542], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:18:36.130108Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:18:36.130276Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:18:36.131079Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:18:36.132164Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:18:36.132525Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-04-10T11:18:36.132582Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-04-10T11:18:36.132630Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-04-10T11:18:36.132676Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-04-10T11:18:36.132731Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1744283915838174 2025-04-10T11:18:36.132774Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-04-10T11:18:36.132816Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded global traversal round: 2 2025-04-10T11:18:36.132915Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-04-10T11:18:36.132979Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:18:36.133093Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-04-10T11:18:36.133179Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:18:36.133269Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:18:36.133382Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:18:36.133619Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:36.134818Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:18:36.135792Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:36.135866Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:36.136017Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR 2025-04-10T11:18:36.246174Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8684:6571] 2025-04-10T11:18:36.246305Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8635:6541], server id = [2:8684:6571], tablet id = 72075186224037894, status = OK 2025-04-10T11:18:36.246425Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8684:6571], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-10T11:18:36.246545Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8685:6572] 2025-04-10T11:18:36.246598Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8685:6572], schemeshard id = 72075186224037897 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to KQP_TABLE_RESOLVER ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse 2025-04-10T11:18:36.301672Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:36.301789Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:36.304839Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:36.323905Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YWNmZDQwYTQtNzgwZGYzOTItN2VkYjEwMTUtMmUzN2NmODg=, TxId: 2025-04-10T11:18:36.323996Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWNmZDQwYTQtNzgwZGYzOTItN2VkYjEwMTUtMmUzN2NmODg=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-10T11:18:36.324649Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8693:6576]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:36.324846Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:36.324881Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:18:36.327361Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:36.327414Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:18:36.327461Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:18:36.332507Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TraverseColumnShard::TraverseColumnTable [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 >> EraseRowsTests::EraseRowsShouldSuccess >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase >> EraseRowsTests::ConditionalEraseRowsShouldNotErase >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors >> DistributedEraseTests::ConditionalEraseRowsShouldErase >> TCacheTest::MigrationLostMessage [GOOD] >> TCacheTest::MigrationUndo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 11733, MsgBus: 27844 2025-04-10T11:18:15.394515Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642284124475320:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:15.394652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d0c/r3tmp/tmp92B0JR/pdisk_1.dat 2025-04-10T11:18:15.662885Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11733, node 1 2025-04-10T11:18:15.720543Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:15.720572Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:15.720579Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:15.720709Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:15.756682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:15.756822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:15.758681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27844 TClient is connected to server localhost:27844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:16.169011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:17.592297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642292714410574:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:17.592475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642292714410585:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:17.592607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:17.596631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:17.605322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642292714410588:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:18:17.664576Z node 1 :TX_PROXY ERROR: Actor# [1:7491642292714410639:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:17.873653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:17.985910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:18.827347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 2025-04-10T11:18:20.394329Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642284124475320:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:20.394393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 13486, MsgBus: 2429 2025-04-10T11:18:26.067807Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642331644930758:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:26.068600Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d0c/r3tmp/tmpdkms6G/pdisk_1.dat 2025-04-10T11:18:26.205002Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13486, node 2 2025-04-10T11:18:26.237959Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:26.238070Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:26.239411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:26.264052Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:26.264079Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:26.264087Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:26.264223Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2429 TClient is connected to server localhost:2429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:26.801043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:26.809940Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:18:29.381651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642344529833302:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.381737Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.381939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642344529833314:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.385814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:29.394755Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642344529833316:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:29.489088Z node 2 :TX_PROXY ERROR: Actor# [2:7491642344529833367:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:29.539751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.575892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:30.715236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:31.572610Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642331644930758:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:31.649406Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTable [GOOD] Test command err: 2025-04-10T11:16:08.671121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:16:08.671370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:16:08.671414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000717/r3tmp/tmphCOf9Q/pdisk_1.dat 2025-04-10T11:16:08.943237Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26661, node 1 2025-04-10T11:16:09.136503Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:09.136561Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:09.136587Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:09.137030Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:16:09.143368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:16:09.222722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:09.222836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:09.235520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17496 2025-04-10T11:16:09.699076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:16:12.041476Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:16:12.066418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:12.066548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:12.093037Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:16:12.095084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:12.320723Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:12.321231Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:12.321789Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:12.321916Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:12.321974Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:12.322180Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:12.322258Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:12.322336Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:12.322416Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:12.486195Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:12.486308Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:12.499546Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:12.638006Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:12.686041Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:16:12.686180Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:16:12.720487Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:16:12.721632Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:16:12.721851Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:16:12.721941Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:16:12.721999Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:16:12.722050Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:16:12.722097Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:16:12.722175Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:16:12.722565Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:16:12.742697Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:12.742810Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:12.748788Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:16:12.752744Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:16:12.753066Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:16:12.762610Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:16:12.778140Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:16:12.778223Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:16:12.778282Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:16:12.791209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:16:12.796832Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:16:12.796953Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:16:12.963529Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:16:13.089522Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:16:13.165060Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:14.182681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:14.182841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:14.210677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:16:14.568855Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:14.569056Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:16:14.569357Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:16:14.569451Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:16:14.569556Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:16:14.569660Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:16:14.569766Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:16:14.569859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:16:14.569954Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:16:14.570081Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:16:14.570192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:16:14.570284Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:16:14.594770Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:14.594865Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process= ... ode 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:18:35.315757Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:35.385435Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:18:35.385527Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:18:35.460647Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8445:6420], schemeshard count = 1 2025-04-10T11:18:37.534085Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:37.534137Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:18:37.534195Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:18:37.534229Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:37.537080Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:37.552734Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:37.553227Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:37.553330Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:37.554013Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:37.567820Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:37.568036Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:18:37.568713Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8568:6487], server id = [2:8573:6492], tablet id = 72075186224037899, status = OK 2025-04-10T11:18:37.569131Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8568:6487], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:37.569585Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8569:6488], server id = [2:8574:6493], tablet id = 72075186224037900, status = OK 2025-04-10T11:18:37.569651Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8569:6488], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:37.571136Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8570:6489], server id = [2:8575:6494], tablet id = 72075186224037901, status = OK 2025-04-10T11:18:37.571205Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8570:6489], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:37.572416Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8571:6490], server id = [2:8576:6495], tablet id = 72075186224037902, status = OK 2025-04-10T11:18:37.572483Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8571:6490], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:37.573432Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8572:6491], server id = [2:8579:6498], tablet id = 72075186224037903, status = OK 2025-04-10T11:18:37.573493Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8572:6491], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:37.579201Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:18:37.579976Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8568:6487], server id = [2:8573:6492], tablet id = 72075186224037899 2025-04-10T11:18:37.580022Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:37.580978Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:18:37.581698Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8569:6488], server id = [2:8574:6493], tablet id = 72075186224037900 2025-04-10T11:18:37.581732Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:37.582635Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-10T11:18:37.583451Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8591:6507], server id = [2:8595:6509], tablet id = 72075186224037904, status = OK 2025-04-10T11:18:37.583548Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8591:6507], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:37.584066Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8570:6489], server id = [2:8575:6494], tablet id = 72075186224037901 2025-04-10T11:18:37.584100Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:37.585045Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-10T11:18:37.585569Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8594:6508], server id = [2:8597:6511], tablet id = 72075186224037905, status = OK 2025-04-10T11:18:37.585651Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8594:6508], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:37.586109Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8571:6490], server id = [2:8576:6495], tablet id = 72075186224037902 2025-04-10T11:18:37.586142Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:37.587099Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-10T11:18:37.587676Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8596:6510], server id = [2:8598:6512], tablet id = 72075186224037906, status = OK 2025-04-10T11:18:37.587748Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8596:6510], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:37.588205Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8572:6491], server id = [2:8579:6498], tablet id = 72075186224037903 2025-04-10T11:18:37.588237Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:37.589156Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8600:6514], server id = [2:8602:6516], tablet id = 72075186224037907, status = OK 2025-04-10T11:18:37.589225Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8600:6514], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:37.589525Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8604:6518], server id = [2:8607:6520], tablet id = 72075186224037908, status = OK 2025-04-10T11:18:37.589580Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8604:6518], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:18:37.594334Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-10T11:18:37.594858Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8591:6507], server id = [2:8595:6509], tablet id = 72075186224037904 2025-04-10T11:18:37.594894Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:37.595763Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:18:37.596153Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8594:6508], server id = [2:8597:6511], tablet id = 72075186224037905 2025-04-10T11:18:37.596185Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:37.596944Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-10T11:18:37.597431Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8596:6510], server id = [2:8598:6512], tablet id = 72075186224037906 2025-04-10T11:18:37.597464Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:37.597912Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-10T11:18:37.598382Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8600:6514], server id = [2:8602:6516], tablet id = 72075186224037907 2025-04-10T11:18:37.598415Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:37.599017Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-10T11:18:37.599073Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:37.599270Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:37.599459Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:37.599869Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:37.601946Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8604:6518], server id = [2:8607:6520], tablet id = 72075186224037908 2025-04-10T11:18:37.601981Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:37.602644Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:37.639355Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8634:6543]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:37.639633Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:37.639684Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8634:6543], StatRequests.size() = 1 2025-04-10T11:18:37.774544Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTdhZTg1MmMtYmVhNDlkMjktMjA0NWQ5OTAtODVlNWI5MTA=, TxId: 2025-04-10T11:18:37.774614Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTdhZTg1MmMtYmVhNDlkMjktMjA0NWQ5OTAtODVlNWI5MTA=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-10T11:18:37.775402Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8649:6549]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:37.775645Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:37.776145Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:37.776223Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:18:37.780086Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:37.780153Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:18:37.780213Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:18:37.786661Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TCacheTest::MigrationUndo [GOOD] >> AnalyzeColumnshard::AnalyzeServerless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationUndo [GOOD] Test command err: 2025-04-10T11:18:38.052447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:38.052518Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:38.201776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:175:2067] recipient: [1:46:2093] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:178:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:179:2067] recipient: [1:177:2171] Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:181:2067] recipient: [1:177:2171] 2025-04-10T11:18:38.253855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:38.253917Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:211:2067] recipient: [1:24:2071] 2025-04-10T11:18:38.284018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-10T11:18:38.293866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:247:2067] recipient: [1:238:2213] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:247:2067] recipient: [1:238:2213] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:249:2067] recipient: [1:243:2217] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:249:2067] recipient: [1:243:2217] Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:251:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:251:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:252:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:252:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409546 is [1:250:2219] sender: [1:253:2067] recipient: [1:238:2213] Leader for TabletID 72075186233409547 is [1:255:2221] sender: [1:258:2067] recipient: [1:243:2217] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-04-10T11:18:38.327893Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [1:250:2219] sender: [1:289:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [1:255:2221] sender: [1:290:2067] recipient: [1:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-04-10T11:18:38.408173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:340:2067] recipient: [1:336:2285] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:340:2067] recipient: [1:336:2285] Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:341:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:341:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409548 is [1:343:2289] sender: [1:344:2067] recipient: [1:336:2285] Leader for TabletID 72075186233409548 is [1:343:2289] sender: [1:345:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-10T11:18:38.578755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:420:2067] recipient: [1:416:2334] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:420:2067] recipient: [1:416:2334] Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:422:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:422:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409549 is [1:423:2337] sender: [1:424:2067] recipient: [1:416:2334] 2025-04-10T11:18:38.621220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:38.621289Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [1:423:2337] sender: [1:451:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 106 2025-04-10T11:18:38.672818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:18:38.672868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:18:38.673150Z node 1 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-04-10T11:18:38.673282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-10T11:18:38.690814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-04-10T11:18:38.691190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 skipDeleteNotification path: /Root/USER_0/DirA/Table1 pathId: [OwnerId: 72057594046678944, LocalPathId: 4] Strong: 1 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-10T11:18:38.731803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 108:0, at schemeshard: 72075186233409549 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 TestWaitNotification: OK eventTxId 109 TestModificationResults wait txId: 110 2025-04-10T11:18:38.835208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 110:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:625:2067] recipient: [1:620:2503] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:625:2067] recipient: [1:620:2503] Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:626:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:626:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409550 is [1:628:2507] sender: [1:629:2067] recipient: [1:620:2503] Leader for TabletID 72075186233409550 is [1:628:2507] sender: [1:630:2067] recipient: [1:24:2071] TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 TestWaitNotification: OK eventTxId 110 2025-04-10T11:18:39.311855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:39.311914Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:39.364729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-04-10T11:18:39.370583Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [2:174:2170], Recipient [2:70:2109]: NActors::TEvents::TEvPoison 2025-04-10T11:18:39.371253Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:175:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:178:2067] recipient: [2:177:2171] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:179:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:181:2067] recipient: [2:177:2171] 2025-04-10T11:18:39.377356Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received ev ... : 2] was 2 2025-04-10T11:18:39.975957Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: Mediator, at schemeshard: 72057594046678944 2025-04-10T11:18:39.975977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:18:39.975999Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:3, tabletId: 72075186233409548, PathId: [OwnerId: 72057594046678944, LocalPathId: 4], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-10T11:18:39.976020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-10T11:18:39.976160Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:39.976343Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:39.976543Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-10T11:18:39.976885Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:39.976999Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:39.977420Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:39.977512Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:39.977749Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:39.977883Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:39.977971Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:39.978268Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:39.978367Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:39.978591Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:39.978830Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:39.978997Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:39.979077Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:39.979197Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:39.979447Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:18:39.981104Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:18:39.981267Z node 2 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-10T11:18:39.982383Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [2:514:2401], Recipient [2:514:2401]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-10T11:18:39.982448Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-10T11:18:39.983003Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:39.983058Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:39.983330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:18:39.983397Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:39.983451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:18:39.983488Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:18:39.983620Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [2:530:2401], Recipient [2:514:2401]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-10T11:18:39.983676Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-10T11:18:39.983711Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:18:40.017048Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:160:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:18:40.017261Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:160:2157], cacheItem# { Subscriber: { Subscriber: [2:382:2319] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 5000002 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:18:40.017566Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:543:2419], recipient# [2:542:2418], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } { Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:18:40.018058Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:160:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:18:40.018212Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:160:2157], cacheItem# { Subscriber: { Subscriber: [2:391:2322] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 200 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:18:40.018485Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:545:2421], recipient# [2:544:2420], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:18:40.018952Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:160:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:18:40.019069Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:160:2157], cacheItem# { Subscriber: { Subscriber: [2:400:2325] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 250 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:18:40.019269Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:547:2423], recipient# [2:546:2422], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [72057594046678944:4:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } |89.7%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::UpdatePartitionLocation >> TPersQueueTest::SetupLockSession2 >> TPersQueueTest::WriteExisting >> TopicService::OneConsumer_TheRangesDoNotOverlap >> TPersQueueTest::ReadFromSeveralPartitions >> TPersQueueTest::BadTopic >> DemoTx::Scenario_1 |89.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless [GOOD] Test command err: 2025-04-10T11:15:54.408421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:54.408808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:54.408880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ef0/r3tmp/tmpE3xFoE/pdisk_1.dat 2025-04-10T11:15:54.811875Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23640, node 1 2025-04-10T11:15:55.074513Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:55.074578Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:55.074614Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:55.075150Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:55.078012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:55.177077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.177228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.191428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61592 2025-04-10T11:15:55.719059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:58.496166Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:58.524726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:58.524901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:58.552654Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:58.555410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:58.801332Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:58.801731Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:58.802177Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:58.802281Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:58.802346Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:58.802639Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:58.802705Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:58.802756Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:58.802808Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:58.966386Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:58.966506Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:58.979225Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:59.091461Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:59.134277Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:59.134385Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:59.170885Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:59.171804Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:59.171979Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:59.172035Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:59.172091Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:59.172144Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:59.172196Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:59.172238Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:59.172663Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:15:59.193990Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:59.194123Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:15:59.201630Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:15:59.205832Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:15:59.206213Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:15:59.215224Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-10T11:15:59.232308Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:15:59.232374Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:15:59.232452Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-10T11:15:59.250534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:15:59.258280Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:15:59.258405Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:15:59.428323Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:15:59.559019Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:15:59.634837Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:00.448924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:16:01.208121Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:01.355486Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-10T11:16:01.355534Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:16:01.355620Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2595:2946], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:16:01.356449Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2597:2948] 2025-04-10T11:16:01.356533Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2597:2948], schemeshard id = 72075186224037899 2025-04-10T11:16:02.621864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2728:3249], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:02.622037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:02.643398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-10T11:16:02.744614Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2815:3050];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:02.744801Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2815:3050];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:16:02.745058Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2815:3050];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:16:02.745200Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2815:3050];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:16:02.745304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2815:3050];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:16:02.745469Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2815:3050];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:16:02.745592Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2815:3050];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:16:02.745675Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2815:3050];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:16:02.745767Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2815:3050];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11 ... 32.733360Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:18:34.551422Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037897 2025-04-10T11:18:34.551528Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 196.000000s, at schemeshard: 72075186224037897 2025-04-10T11:18:34.551942Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-04-10T11:18:34.571572Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:18:35.979652Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:35.979735Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:18:35.979776Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-10T11:18:35.979819Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-10T11:18:35.979857Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:18:35.980356Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-10T11:18:35.984062Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:18:35.988696Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8040:5940], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:35.988793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8050:5945], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:35.988914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:36.001457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-10T11:18:36.061923Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:8054:5948], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:18:36.295697Z node 2 :TX_PROXY ERROR: Actor# [2:8154:5997] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:36.382541Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:8183:6012]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:36.382908Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:18:36.383006Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:8185:6014] 2025-04-10T11:18:36.383093Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:8185:6014] 2025-04-10T11:18:36.383553Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8186:6015] 2025-04-10T11:18:36.383678Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8185:6014], server id = [2:8186:6015], tablet id = 72075186224037894, status = OK 2025-04-10T11:18:36.383813Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8186:6015], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:18:36.383897Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-10T11:18:36.384062Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:18:36.384157Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:8183:6012], StatRequests.size() = 1 2025-04-10T11:18:36.517021Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDU3MWEwOGYtOTYyMmZlNGMtMmI2YWFiZjktZDQ5MGUzNWU=, TxId: 2025-04-10T11:18:36.517098Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDU3MWEwOGYtOTYyMmZlNGMtMmI2YWFiZjktZDQ5MGUzNWU=, TxId: 2025-04-10T11:18:36.517653Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:36.532078Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:18:36.532152Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:36.599891Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:18:36.599973Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:18:36.665029Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8185:6014], schemeshard count = 1 2025-04-10T11:18:36.977006Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-10T11:18:36.977092Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 229.000000s, at schemeshard: 72075186224037899 2025-04-10T11:18:36.977338Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-04-10T11:18:36.991004Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:18:37.939800Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:37.939896Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-10T11:18:37.942723Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:37.959316Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:37.959959Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:37.960033Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037899, LocalPathId: 2], AnalyzedShards 1 2025-04-10T11:18:37.973402Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:38.006544Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-10T11:18:38.007714Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-10T11:18:38.007820Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-10T11:18:38.022232Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:18:39.427238Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:39.427359Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-10T11:18:39.427397Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-10T11:18:39.427992Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:18:39.442196Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:18:39.442737Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:18:39.442823Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:18:39.443961Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:18:39.458322Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:18:39.458617Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:18:39.459305Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8320:6103], server id = [2:8321:6104], tablet id = 72075186224037905, status = OK 2025-04-10T11:18:39.459440Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8320:6103], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:18:39.463527Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:18:39.463654Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:18:39.463907Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:18:39.464135Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:18:39.464596Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-10T11:18:39.467201Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8320:6103], server id = [2:8321:6104], tablet id = 72075186224037905 2025-04-10T11:18:39.467266Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:18:39.468039Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:18:39.508093Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8341:6123]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:39.508385Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:39.508441Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8341:6123], StatRequests.size() = 1 2025-04-10T11:18:39.654407Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzE0YjFkODgtNjRlNzZlZTItZjZkYTdlODMtYWU2ZWMwMzQ=, TxId: 2025-04-10T11:18:39.654476Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzE0YjFkODgtNjRlNzZlZTItZjZkYTdlODMtYWU2ZWMwMzQ=, TxId: 2025-04-10T11:18:39.655037Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:39.670916Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-10T11:18:39.670996Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3314:3400] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean >> TOlap::StoreStatsQuota [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::StoreStatsQuota [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:16:49.168403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:16:49.168516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:16:49.168552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:16:49.168576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:16:49.168609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:16:49.168629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:16:49.168667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:16:49.168721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:16:49.168956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:16:49.226002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:16:49.226046Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:49.236917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:16:49.237120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:16:49.237258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:16:49.243364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:16:49.243568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:16:49.244022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:49.244374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:16:49.245929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:49.247130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:49.247180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:49.247310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:16:49.247340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:49.247369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:16:49.247491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.252841Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:16:49.340316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:16:49.340519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.340680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:16:49.340848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:16:49.340884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.343046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:49.343160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:16:49.343283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.343318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:16:49.343367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:16:49.343404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:16:49.344995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.345043Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:16:49.345068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:16:49.346501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.346543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.346584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:49.346615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:16:49.349592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:16:49.351282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:16:49.351438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:16:49.352209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:16:49.352385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:16:49.352416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:49.352609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:16:49.352658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:16:49.353174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:16:49.353284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:16:49.355224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:16:49.355260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:16:49.355379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:16:49.355406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:16:49.355598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:16:49.355646Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:16:49.355720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:49.355751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:49.355785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:16:49.355820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:49.355845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:16:49.355871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:16:49.355902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:16:49.355930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:16:49.355973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:16:49.355997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:16:49.356019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:16:49.357546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:49.357632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:16:49.357659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d=18446744073709551615;;current_snapshot_ts=5000006; 2025-04-10T11:18:41.472385Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:18:41.472444Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:18:41.472490Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:18:41.472611Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:18:41.600548Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:415:2384];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-10T11:18:41.780447Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:415:2384];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-10T11:18:41.790927Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186233409546; 2025-04-10T11:18:41.791065Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546; 2025-04-10T11:18:41.791130Z node 2 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:18:41.791181Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:18:41.791261Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:18:41.791333Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-04-10T11:18:41.791413Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=4700006;tx_id=18446744073709551615;;current_snapshot_ts=5000006; 2025-04-10T11:18:41.791458Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:18:41.791525Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:18:41.791570Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:18:41.791681Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:18:41.922781Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:415:2384];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-10T11:18:42.076697Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:415:2384];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-10T11:18:42.090682Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186233409546; 2025-04-10T11:18:42.090892Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546; 2025-04-10T11:18:42.090965Z node 2 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:18:42.091054Z node 2 :TX_COLUMNSHARD DEBUG: There are stats for 1 tables 2025-04-10T11:18:42.091186Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:18:42.091288Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:18:42.091398Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-04-10T11:18:42.091490Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=4700006;tx_id=18446744073709551615;;current_snapshot_ts=5000006; 2025-04-10T11:18:42.091548Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:18:42.091613Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:18:42.091669Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:18:42.091799Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:18:42.092106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-10T11:18:42.092926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-10T11:18:42.093104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=OlapStore, is column=0, is olap=1, RowCount 0, DataSize 0 2025-04-10T11:18:42.093218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: OLAP store contains 1 tables. 2025-04-10T11:18:42.093439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Aggregated stats for pathId 3: RowCount 0, DataSize 0 2025-04-10T11:18:42.094030Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:42.094113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:18:42.095285Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:18:42.099377Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:42.099449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:336:2312], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-10T11:18:42.099573Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 276us result status StatusSuccess 2025-04-10T11:18:42.100129Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } Children { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1000000 data_size_soft_quota: 900000 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:42.101033Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2025-04-10T11:18:42.101543Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated;fline=columnshard_subdomain_path_id.cpp:90;notify_subdomain=[OwnerId: 72057594046678944, LocalPathId: 2]; >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase |89.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::SchemeshardRestart >> EraseRowsTests::EraseRowsShouldSuccess [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: 2025-04-10T11:18:23.896550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:23.896782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:23.896972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001505/r3tmp/tmpWg4ruB/pdisk_1.dat 2025-04-10T11:18:24.225776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:24.264193Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:24.302461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:24.302581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:24.314189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:24.394979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:24.705461Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2025-04-10T11:18:24.707011Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-04-10T11:18:24.732635Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor finished in 0.025311s, errors=0 2025-04-10T11:18:24.732988Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2025-04-10T11:18:24.733126Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2025-04-10T11:18:24.734409Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-04-10T11:18:24.734561Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} started fullscan actor# [1:750:2632] 2025-04-10T11:18:24.734663Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Bootstrap called, sample# 100 2025-04-10T11:18:24.734709Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Connect to# 72075186224037888 called 2025-04-10T11:18:24.735556Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-04-10T11:18:24.736709Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} finished in 0.000974s, sampled# 100, iter finished# 1, oks# 100 2025-04-10T11:18:24.736872Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} received keyCount# 100 2025-04-10T11:18:24.737211Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} started# 10 actors each with inflight# 1 2025-04-10T11:18:24.737296Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 2} Bootstrap called 2025-04-10T11:18:24.737370Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-10T11:18:24.737425Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 3} Bootstrap called 2025-04-10T11:18:24.737449Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-10T11:18:24.737476Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 4} Bootstrap called 2025-04-10T11:18:24.737498Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-10T11:18:24.737524Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 5} Bootstrap called 2025-04-10T11:18:24.737559Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-10T11:18:24.737601Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 6} Bootstrap called 2025-04-10T11:18:24.737646Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-10T11:18:24.737680Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 7} Bootstrap called 2025-04-10T11:18:24.737715Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-10T11:18:24.737758Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 8} Bootstrap called 2025-04-10T11:18:24.737782Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-10T11:18:24.737809Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 9} Bootstrap called 2025-04-10T11:18:24.737832Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-10T11:18:24.737857Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 10} Bootstrap called 2025-04-10T11:18:24.737892Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-10T11:18:24.737924Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 11} Bootstrap called 2025-04-10T11:18:24.737945Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-10T11:18:24.741608Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 2} session: ydb://session/3?node_id=1&id=NTQwYjVkZWQtY2UyZTZjYjItY2U2ZjE5ZjUtYjkyN2JlOTQ= 2025-04-10T11:18:24.741910Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 3} session: ydb://session/3?node_id=1&id=N2M0NzA2ZTQtZTgwYmJmMGUtNDcyOWMxN2UtODA2YTA2MGY= 2025-04-10T11:18:24.743669Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 4} session: ydb://session/3?node_id=1&id=ODQ2NWFhMTYtYWU0MjUzOWUtY2UwOWQxNzYtNDIzNDY1Zjg= 2025-04-10T11:18:24.745234Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 5} session: ydb://session/3?node_id=1&id=ZjYyYThkNTUtZjNiYjJiZTQtN2RiYzU2MTYtZjI1YzEwYmY= 2025-04-10T11:18:24.746767Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 6} session: ydb://session/3?node_id=1&id=NmMwN2U1MDMtOGI0MGE2Yy1mMDExOTlkOC1iNzg4NWFkMQ== 2025-04-10T11:18:24.748252Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 7} session: ydb://session/3?node_id=1&id=NjcyOWZiZjctYjIwZGJkMjUtNDQ5NTRjNmUtZTZjZDQ3MmU= 2025-04-10T11:18:24.749719Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 8} session: ydb://session/3?node_id=1&id=OWI1ODBjMTQtYTU5NWRhZDctOGFjZTUwNzAtMjU0YjA3NTM= 2025-04-10T11:18:24.751338Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 9} session: ydb://session/3?node_id=1&id=ODQ2OTMyYjMtYWI5MmJmYjktNmU2Y2UyNzctM2EwYjNkMmQ= 2025-04-10T11:18:24.754188Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 10} session: ydb://session/3?node_id=1&id=NWQwZjVhZGQtNjEwZjRmNC1jN2RhZDJmNS1lNGU1ODQ2MA== 2025-04-10T11:18:24.754287Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 11} session: ydb://session/3?node_id=1&id=OTZjMWNjZTItYzE3OGJlMzUtZjYyMTg1MmUtY2I2ZWU2ODI= 2025-04-10T11:18:24.759237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:775:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.759321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:812:2688], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.759381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:813:2689], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.759543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:814:2690], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.759585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:815:2691], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.759636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:816:2692], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.759704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:817:2693], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.759766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:818:2694], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:24.759822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:820:2696], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Erro ... 2025-04-10T11:18:36.345634Z node 2 :TX_PROXY ERROR: Actor# [2:848:2724] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:36.346027Z node 2 :TX_PROXY ERROR: Actor# [2:849:2725] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:36.346415Z node 2 :TX_PROXY ERROR: Actor# [2:856:2726] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:36.347245Z node 2 :TX_PROXY ERROR: Actor# [2:861:2728] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:36.347710Z node 2 :TX_PROXY ERROR: Actor# [2:862:2729] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:36.348337Z node 2 :TX_PROXY ERROR: Actor# [2:869:2730] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:36.481843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:829:2705], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:36.481979Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:830:2706], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:36.482075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:831:2707], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:36.482177Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:832:2708], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:36.482247Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:833:2709], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:36.482315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:834:2710], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:36.482407Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:835:2711], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:36.482502Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:839:2715], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:36.482578Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2720], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:36.482645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:857:2727], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:36.518565Z node 2 :TX_PROXY ERROR: Actor# [2:986:2822] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:36.968015Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 4} finished in 0.658280s, errors=0 2025-04-10T11:18:36.968183Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 4 { Tag: 4 DurationMs: 658 OperationsOK: 100 OperationsError: 0 } 2025-04-10T11:18:36.982463Z node 2 :TX_PROXY ERROR: Actor# [2:1915:3144] txid# 281474976715769, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:37.332839Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 2} finished in 1.025352s, errors=0 2025-04-10T11:18:37.333110Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 2 { Tag: 2 DurationMs: 1025 OperationsOK: 100 OperationsError: 0 } 2025-04-10T11:18:37.347214Z node 2 :TX_PROXY ERROR: Actor# [2:2822:3450] txid# 281474976715870, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:37.822395Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 9} finished in 1.507601s, errors=0 2025-04-10T11:18:37.822676Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 9 { Tag: 9 DurationMs: 1507 OperationsOK: 100 OperationsError: 0 } 2025-04-10T11:18:37.837942Z node 2 :TX_PROXY ERROR: Actor# [2:3729:3756] txid# 281474976715971, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:38.337620Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 8} finished in 2.024072s, errors=0 2025-04-10T11:18:38.338013Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 8 { Tag: 8 DurationMs: 2024 OperationsOK: 100 OperationsError: 0 } 2025-04-10T11:18:38.353534Z node 2 :TX_PROXY ERROR: Actor# [2:4636:4062] txid# 281474976716072, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:38.974603Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 11} finished in 2.657528s, errors=0 2025-04-10T11:18:38.974976Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 11 { Tag: 11 DurationMs: 2657 OperationsOK: 100 OperationsError: 0 } 2025-04-10T11:18:38.992002Z node 2 :TX_PROXY ERROR: Actor# [2:5543:4368] txid# 281474976716173, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:39.602857Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 6} finished in 3.291727s, errors=0 2025-04-10T11:18:39.603217Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 6 { Tag: 6 DurationMs: 3291 OperationsOK: 100 OperationsError: 0 } 2025-04-10T11:18:39.620744Z node 2 :TX_PROXY ERROR: Actor# [2:6450:4674] txid# 281474976716274, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:40.303044Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 7} finished in 3.990565s, errors=0 2025-04-10T11:18:40.303574Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 7 { Tag: 7 DurationMs: 3990 OperationsOK: 100 OperationsError: 0 } 2025-04-10T11:18:40.321847Z node 2 :TX_PROXY ERROR: Actor# [2:7357:4980] txid# 281474976716375, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:41.050106Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 10} finished in 4.733083s, errors=0 2025-04-10T11:18:41.050631Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 10 { Tag: 10 DurationMs: 4733 OperationsOK: 100 OperationsError: 0 } 2025-04-10T11:18:41.074259Z node 2 :TX_PROXY ERROR: Actor# [2:8264:5286] txid# 281474976716476, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:41.779404Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 3} finished in 5.471779s, errors=0 2025-04-10T11:18:41.779674Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 3 { Tag: 3 DurationMs: 5471 OperationsOK: 100 OperationsError: 0 } 2025-04-10T11:18:41.799998Z node 2 :TX_PROXY ERROR: Actor# [2:9171:5592] txid# 281474976716577, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:42.737728Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 5} finished in 6.427898s, errors=0 2025-04-10T11:18:42.738323Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 5 { Tag: 5 DurationMs: 6427 OperationsOK: 100 OperationsError: 0 } 2025-04-10T11:18:42.738413Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished in 6.434640s, oks# 1000, errors# 0 2025-04-10T11:18:42.738821Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:747:2629] with tag# 3 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds >> EraseRowsTests::ConditionalEraseRowsShouldNotErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> EraseRowsTests::EraseRowsFromReplicatedTable |89.8%| [TA] $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [TA] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx >> TableCreation::SimpleTableCreation >> KqpProxy::InvalidSessionID >> TableCreation::ConcurrentTableCreationWithDifferentVersions >> ScriptExecutionsTest::RunCheckLeaseStatus >> TableCreation::MultipleTablesCreation >> TableCreation::ConcurrentTableCreation >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] Test command err: 2025-04-10T11:18:19.751414Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642303222583894:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:19.752870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d9f/r3tmp/tmpZivQc1/pdisk_1.dat 2025-04-10T11:18:20.095765Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23228, node 1 2025-04-10T11:18:20.128292Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:18:20.128340Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:18:20.135654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:20.135729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:20.145405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:20.258002Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:20.258026Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:20.258031Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:20.258123Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:20.676645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:27860 2025-04-10T11:18:22.185630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642316107486843:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.185797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.530918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:18:22.685265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642316107487015:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.685372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.706256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283902655 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283902655 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-10T11:18:22.771057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642316107487115:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.771214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.771649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642316107487123:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.771808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.772078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642316107487140:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.772130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642316107487138:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.772142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642316107487147:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.772271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642316107487139:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.772308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642316107487145:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.772311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642316107487142:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.772350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.772511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642316107487154:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.773267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642316107487165:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.773334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.773334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642316107487171:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.773420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642316107487180:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.777092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642316107487240:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.777202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId ... ydb://session/3?node_id=1&id=NjVlYzdkYWItMmFkZjIwMGEtYjgzZWZmMjEtZTU1MDg4Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.726285Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724976. Ctx: { TraceId: 01jrfprw91c010hkv8qxxqm30s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzMyMmQ5ZDUtNjQ0ZjNhOWYtNTQ0MDY5M2ItMjE5MTFmNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.726693Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724978. Ctx: { TraceId: 01jrfprw8z2qetmnbmykp7y09v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTFmNmY2NWEtMjY4N2EzYWUtNzhhYWJkMy1hNjJmNmM4OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.728635Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724977. Ctx: { TraceId: 01jrfprw922kvayefmesmb4874, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmM5YTZkOTAtOGU2YWY5OTgtNGI0Yjc1NzQtNDBlOGZjYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.728838Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724979. Ctx: { TraceId: 01jrfprw948e98zvq1z0ncsez0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA5YzRiOGQtMmU0MDYzLWQ2ZmRjNDc1LTI2M2ZkMGMz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.732383Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724981. Ctx: { TraceId: 01jrfprw9812vhwagtzkpfrrnw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVkNDQwYjktNjI2MDFlMzYtNWM5Y2U1MmItYzQwZTc5MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.732429Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724982. Ctx: { TraceId: 01jrfprw9973dy7dfrea5zk0bs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNhYjUwZTQtYmUwOGU1YTItZDhkZjkwNWMtZjRjMWE3ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.735141Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724980. Ctx: { TraceId: 01jrfprw991bd94qn0ze3x90mg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWRhNTNjMTAtNDM3YjJhZGMtZDI0NmY0YjktMmJkYjIwZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.737966Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724983. Ctx: { TraceId: 01jrfprw9b25r9566frvm62yyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmE5MjZlNTYtYjhiMGIwOTEtYzIxZDU5YjktYzc2YjNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.740648Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724984. Ctx: { TraceId: 01jrfprw9c77mcqawvrgjk0na4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2YyODk1ZjUtOTM2NDkyMjktMjliNjM1ZWYtY2E4YjFiOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.741789Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724985. Ctx: { TraceId: 01jrfprw9ef3bkxmbf34kfmf8k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzdkYWItMmFkZjIwMGEtYjgzZWZmMjEtZTU1MDg4Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.757084Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724987. Ctx: { TraceId: 01jrfprwa1f1pyeg5eajmyshty, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzMyMmQ5ZDUtNjQ0ZjNhOWYtNTQ0MDY5M2ItMjE5MTFmNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.757088Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724986. Ctx: { TraceId: 01jrfprwa253ymwx867wq63skw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmM5YTZkOTAtOGU2YWY5OTgtNGI0Yjc1NzQtNDBlOGZjYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-04-10T11:18:42.759026Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724989. Ctx: { TraceId: 01jrfprwa287tjnmf76c0524dm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTFmNmY2NWEtMjY4N2EzYWUtNzhhYWJkMy1hNjJmNmM4OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.759954Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724988. Ctx: { TraceId: 01jrfprwa2e58ha8bddgb34pw9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA5YzRiOGQtMmU0MDYzLWQ2ZmRjNDc1LTI2M2ZkMGMz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.762913Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724990. Ctx: { TraceId: 01jrfprwa37y2ex5tafh0xeys9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVkNDQwYjktNjI2MDFlMzYtNWM5Y2U1MmItYzQwZTc5MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.764059Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724993. Ctx: { TraceId: 01jrfprwa5dqzww3wah12teqn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2YyODk1ZjUtOTM2NDkyMjktMjliNjM1ZWYtY2E4YjFiOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.764274Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724994. Ctx: { TraceId: 01jrfprwa8frwkr23hhjxpp3k2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzdkYWItMmFkZjIwMGEtYjgzZWZmMjEtZTU1MDg4Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.766210Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724991. Ctx: { TraceId: 01jrfprwa3bns89s72vax49nk5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmE5MjZlNTYtYjhiMGIwOTEtYzIxZDU5YjktYzc2YjNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.766320Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724992. Ctx: { TraceId: 01jrfprwa3749q2pww7xh7zfyz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNhYjUwZTQtYmUwOGU1YTItZDhkZjkwNWMtZjRjMWE3ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: 2025-04-10T11:18:42.773467Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724995. Ctx: { TraceId: 01jrfprwa5aex3w0hbarvdrrg4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWRhNTNjMTAtNDM3YjJhZGMtZDI0NmY0YjktMmJkYjIwZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.778097Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724996. Ctx: { TraceId: 01jrfprwaq7xtw6hathd3c81se, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzMyMmQ5ZDUtNjQ0ZjNhOWYtNTQ0MDY5M2ItMjE5MTFmNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283902655 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-10T11:18:42.785726Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724998. Ctx: { TraceId: 01jrfprwaz7b2rpxxz37c4bj6c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNhYjUwZTQtYmUwOGU1YTItZDhkZjkwNWMtZjRjMWE3ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.785730Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724999. Ctx: { TraceId: 01jrfprwaz94nxw0w4mfdmgrd6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmE5MjZlNTYtYjhiMGIwOTEtYzIxZDU5YjktYzc2YjNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.788728Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724997. Ctx: { TraceId: 01jrfprwazdbfd6c20vcc934kd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmM5YTZkOTAtOGU2YWY5OTgtNGI0Yjc1NzQtNDBlOGZjYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.793406Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725000. Ctx: { TraceId: 01jrfprwb40yqh2bycdc02rvnd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA5YzRiOGQtMmU0MDYzLWQ2ZmRjNDc1LTI2M2ZkMGMz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.794482Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725001. Ctx: { TraceId: 01jrfprwb226b1vahn7vxpskvz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTFmNmY2NWEtMjY4N2EzYWUtNzhhYWJkMy1hNjJmNmM4OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.795344Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725002. Ctx: { TraceId: 01jrfprwb26sz34xmfckztkecm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzdkYWItMmFkZjIwMGEtYjgzZWZmMjEtZTU1MDg4Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.803949Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725003. Ctx: { TraceId: 01jrfprwb29v4bavns15kapz7w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVkNDQwYjktNjI2MDFlMzYtNWM5Y2U1MmItYzQwZTc5MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:42.804626Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725004. Ctx: { TraceId: 01jrfprwb7e2cjdjqmvzch8y04, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2YyODk1ZjUtOTM2NDkyMjktMjliNjM1ZWYtY2E4YjFiOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283902655 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] >> KqpProxy::PingNotExistedSession >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-04-10T11:18:41.973943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:41.974081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:41.974258Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e45/r3tmp/tmpueVSfs/pdisk_1.dat 2025-04-10T11:18:42.431620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.478470Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:42.522898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:42.523036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:42.535724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:42.633996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.688645Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:18:42.688906Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:42.726452Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:42.726541Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:42.728645Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:42.728707Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:42.728777Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:42.729704Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:42.729815Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:42.729877Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:18:42.740533Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:42.777059Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:42.778852Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:42.778982Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:18:42.779032Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:42.779078Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:42.779111Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:42.780428Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:42.780540Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:42.780608Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:42.780661Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:42.780754Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:42.780798Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:42.780907Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:42.782055Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:42.783136Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:42.783255Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:42.785115Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:42.796245Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:42.797143Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:42.954332Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:42.958897Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:42.958969Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:42.959735Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:42.959798Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:42.959862Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:42.960132Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:42.960304Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:42.961001Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:42.961073Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:42.963796Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:42.965347Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:42.966806Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:42.966850Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:42.967318Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:42.967385Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:42.968002Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:42.968062Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:42.968104Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:42.968164Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:42.968209Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:42.968306Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:42.972011Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:42.975072Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:42.975146Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:42.975683Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:43.004784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.004900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.004973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.015101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:43.020895Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.173220Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.176977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:43.259090Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:44.045089Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfprwhr7gr3hmdjaaj1f5bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTU4NzdmNzMtYjg0YzAxZGUtZWRlNjRjMjgtMjlkZTM4NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:44.083673Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:44.089467Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:44.089681Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 ... 37968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:47.818292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:47.844854Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:666:2570] 2025-04-10T11:18:47.845095Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:47.891085Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:47.891222Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:47.892907Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:47.892997Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:47.893060Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:47.893435Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:47.893570Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:47.893650Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:682:2570] in generation 1 2025-04-10T11:18:47.904421Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:47.904505Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:47.904601Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:47.904705Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:684:2580] 2025-04-10T11:18:47.904745Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:47.904786Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:47.904818Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:47.905162Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:47.905253Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:47.905364Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:47.905406Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:47.905443Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:47.905487Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:47.905569Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:663:2568], serverId# [2:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:47.905977Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:47.906398Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:47.906484Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:47.908206Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:47.918774Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:47.918885Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:48.070237Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:48.070788Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:48.070838Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.071514Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.071565Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:48.071613Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:48.071848Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:48.071993Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:48.072702Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.072771Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:48.073168Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:48.073568Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:48.074968Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:48.075001Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.075447Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:48.075494Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.076040Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.076076Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:48.076115Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:48.076159Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:48.076192Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:48.076264Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.076959Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:48.078790Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:48.078839Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:48.079270Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:48.082697Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-04-10T11:18:48.082796Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-10T11:18:48.103798Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-10T11:18:48.103863Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.104144Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-04-10T11:18:48.105894Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:745:2627], serverId# [2:746:2628], sessionId# [0:0:0] 2025-04-10T11:18:48.106050Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-10T11:18:48.106237Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-10T11:18:48.106278Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.106467Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:745:2627], serverId# [2:746:2628], sessionId# [0:0:0] 2025-04-10T11:18:48.107936Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:750:2632], serverId# [2:751:2633], sessionId# [0:0:0] 2025-04-10T11:18:48.108065Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-10T11:18:48.108184Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-10T11:18:48.108221Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.108393Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:750:2632], serverId# [2:751:2633], sessionId# [0:0:0] 2025-04-10T11:18:48.109741Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:755:2637], serverId# [2:756:2638], sessionId# [0:0:0] 2025-04-10T11:18:48.109853Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-10T11:18:48.109965Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-10T11:18:48.110018Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.110213Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:755:2637], serverId# [2:756:2638], sessionId# [0:0:0] 2025-04-10T11:18:48.111581Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:760:2642], serverId# [2:761:2643], sessionId# [0:0:0] 2025-04-10T11:18:48.111702Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-10T11:18:48.111886Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-10T11:18:48.111924Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.112077Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:760:2642], serverId# [2:761:2643], sessionId# [0:0:0] 2025-04-10T11:18:48.113534Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:765:2647], serverId# [2:766:2648], sessionId# [0:0:0] 2025-04-10T11:18:48.113660Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-10T11:18:48.113778Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-10T11:18:48.113810Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.113942Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:765:2647], serverId# [2:766:2648], sessionId# [0:0:0] >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] >> GroupWriteTest::ByTableName [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit >> KqpProxy::InvalidSessionID [GOOD] >> KqpProxy::LoadedMetadataAfterCompilationTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] Test command err: 2025-04-10T11:18:41.977793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:41.977989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:41.978131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ebb/r3tmp/tmpy3mSSi/pdisk_1.dat 2025-04-10T11:18:42.430809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.499296Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:42.542405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:42.542554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:42.555501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:42.640813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.685245Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:18:42.685527Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:42.744035Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:42.744144Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:42.745752Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:42.745836Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:42.745910Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:42.746316Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:42.746446Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:42.746518Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:18:42.757324Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:42.794829Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:42.795046Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:42.795167Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:18:42.795210Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:42.795241Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:42.795274Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:42.795716Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:42.795816Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:42.795878Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:42.795927Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:42.795973Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:42.796013Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:42.796143Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:42.796613Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:42.796857Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:42.796952Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:42.799435Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:42.811609Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:42.811724Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:42.966770Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:42.971366Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:42.971419Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:42.972088Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:42.972149Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:42.972207Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:42.972407Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:42.972521Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:42.973355Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:42.973431Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:42.976162Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:42.976561Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:42.977893Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:42.977939Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:42.978405Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:42.978472Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:42.979018Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:42.979050Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:42.979092Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:42.979141Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:42.979174Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:42.979246Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:42.983224Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:42.984925Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:42.984971Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:42.985448Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:43.006102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.006282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.006375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.013262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:43.019236Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.179824Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.183237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:43.268916Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:44.045491Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfprwhrcng2s9mj0batx38b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTJiMDM4ODYtZDQ3NTUyYTMtYzFmYWY1MzUtMTdiNjJiMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:44.083553Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:44.086092Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:44.086333Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 ... 025-04-10T11:18:48.085594Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:48.085628Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:48.085659Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.085724Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:663:2568], serverId# [2:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:48.086019Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:48.086215Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:48.086272Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:48.087510Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:48.098077Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:48.098212Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:48.246049Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:48.246554Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:48.246619Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.247223Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.247260Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:48.247290Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:48.247475Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:48.247630Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:48.248232Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.248330Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:48.248721Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:48.249113Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:48.250608Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:48.250656Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.251268Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:48.251340Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.252100Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.252139Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:48.252226Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:48.252287Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:48.252333Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:48.252418Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.253256Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:48.255084Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:48.255131Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:48.255485Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:48.261720Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:48.261796Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:746:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:48.261850Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:48.266405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:48.271735Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:48.419101Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:48.421154Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:48.454348Z node 2 :TX_PROXY ERROR: Actor# [2:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:48.503853Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfps1p42khs4v05fdrpbr2e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTk1ODUwN2EtNDAxMGZlMTgtM2QwZjE4YjAtOTU4ZGQ0OTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:48.505576Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:856:2687], serverId# [2:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:48.505872Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:48.505978Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 2025-04-10T11:18:48.516635Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.519507Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-04-10T11:18:48.520174Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-10T11:18:48.531086Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-10T11:18:48.531163Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.531407Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:48.531446Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-10T11:18:48.531692Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.531748Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:48.531791Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:48.531852Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.531936Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-04-10T11:18:48.532810Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:48.533115Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:48.533281Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.533335Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:48.533402Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-10T11:18:48.533620Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:48.533677Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.534250Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-10T11:18:48.534464Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T11:18:48.534601Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-10T11:18:48.534642Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-10T11:18:48.554816Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:48.554873Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-10T11:18:48.555182Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.555208Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:48.555236Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-10T11:18:48.555329Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:48.555372Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.555402Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] Test command err: 2025-04-10T11:18:41.969852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:41.970056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:41.970243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ea7/r3tmp/tmpp9DOKH/pdisk_1.dat 2025-04-10T11:18:42.430710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.507529Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:42.554124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:42.554275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:42.565765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:42.662427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.724521Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:18:42.724752Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:42.823426Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:42.823532Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:42.825087Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:42.825161Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:42.825241Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:42.825587Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:42.825698Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:42.825764Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:18:42.836486Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:42.863147Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:42.863341Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:42.863467Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:18:42.863508Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:42.863539Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:42.863570Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:42.863950Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:42.864041Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:42.864119Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:42.864164Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:42.864207Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:42.864247Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:42.864346Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:42.864732Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:42.864961Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:42.865041Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:42.866607Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:42.877385Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:42.877499Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:43.030601Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:43.033817Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:43.033860Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.034702Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.034770Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:43.034833Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:43.035064Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:43.035216Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:43.035854Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.035921Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:43.037819Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:43.038334Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.039688Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:43.039730Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.040195Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:43.040271Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.040889Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.040926Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.040974Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:43.041022Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:43.041066Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:43.041148Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.049877Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.051978Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:43.052033Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:43.052505Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:43.060503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.060600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.060690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.065350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:43.070214Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.227246Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.230002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:43.296190Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:44.044702Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfprwkj1ytvqb928qbtwc20, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjY3NDQ5MGYtZmNjMjZhNDItMzhhMjMxODQtMjNiZjk1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:44.088250Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:44.088823Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:44.089029Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 ... 4-10T11:18:44.219599Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:44.219651Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:44.219704Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for WaitForStreamClearance 2025-04-10T11:18:44.220061Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:44.220146Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:44.220876Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-10T11:18:44.221179Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T11:18:44.221338Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-10T11:18:44.221401Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 0 2025-04-10T11:18:44.224634Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:44.224710Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037888 2025-04-10T11:18:44.225254Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:44.225325Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:44.225372Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2025-04-10T11:18:44.225540Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:44.225636Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:44.225699Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:47.901100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:47.901335Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:18:47.901563Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ea7/r3tmp/tmpMvDUb4/pdisk_1.dat 2025-04-10T11:18:48.158826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:48.184516Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:48.221261Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:48.221385Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:48.232604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:48.313913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:48.334960Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:666:2570] 2025-04-10T11:18:48.335221Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:48.371946Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:48.372053Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:48.373627Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:48.373691Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:48.373748Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:48.374030Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:48.374142Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:48.374234Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:682:2570] in generation 1 2025-04-10T11:18:48.384858Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:48.384939Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:48.385035Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:48.385112Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:684:2580] 2025-04-10T11:18:48.385146Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:48.385180Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:48.385219Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.385537Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:48.385623Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:48.385698Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.385742Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:48.385787Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:48.385834Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.385908Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:663:2568], serverId# [2:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:48.386354Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:48.386542Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:48.386604Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:48.388015Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:48.398723Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:48.398852Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:48.547786Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:48.548470Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:48.548540Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.549321Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.549370Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:48.549413Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:48.549702Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:48.549824Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:48.550486Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.550558Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:48.550958Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:48.551373Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:48.552896Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:48.552960Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.553627Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:48.553702Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.554480Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.554527Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:48.554570Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:48.554634Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:48.554683Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:48.554767Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.555590Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:48.557682Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:48.557737Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:48.558253Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:48.562937Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-04-10T11:18:48.563054Z node 2 :TX_DATASHARD NOTICE: Rejecting erase request on datashard: tablet# 72075186224037888, error# Can't execute erase at replicated table 2025-04-10T11:18:48.563221Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName [GOOD] Test command err: RandomSeed# 17224877922653775843 2025-04-10T11:18:17.892838Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058428954028033 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-10T11:18:17.924815Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-10T11:18:17.924891Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 going to send TEvBlock {TabletId# 72058428954028033 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-10T11:18:17.928394Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-10T11:18:17.944774Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:18:17.947410Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-10T11:18:43.666651Z 8 00h01m24.010512s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 2716 2025-04-10T11:18:49.430081Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-10T11:18:49.430217Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:18:49.430270Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-10T11:18:49.430308Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-10T11:18:49.489005Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2025-04-10T11:18:49.489115Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] Test command err: 2025-04-10T11:18:42.555281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:42.555447Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:42.555569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e98/r3tmp/tmpwUohAD/pdisk_1.dat 2025-04-10T11:18:42.944491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.984268Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:43.032593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.032748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.044201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:43.129209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:43.180337Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:18:43.180616Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:43.225366Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:43.225482Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:43.228438Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:43.228535Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:43.228632Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:43.229006Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:43.229145Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:43.229217Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:18:43.241040Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.289922Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:43.290143Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.291589Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:18:43.291643Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.291682Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:43.291720Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.292197Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:43.292308Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:43.292372Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.292423Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.292466Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:43.292510Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.292617Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:43.293077Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:43.293334Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:43.293425Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:43.295179Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.306724Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:43.306826Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:43.459523Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:43.464220Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:43.464286Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.465105Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.465180Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:43.465244Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:43.465528Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:43.465677Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:43.466333Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.466412Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:43.468218Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:43.468647Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.469735Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:43.469784Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.470238Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:43.470307Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.470936Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.470977Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.471026Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:43.471080Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:43.471121Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:43.471182Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.474041Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.476300Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:43.476371Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:43.476945Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:43.485095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.485203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.485271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.489914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:43.496578Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.659568Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.663085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:43.761072Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:44.142997Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfprx0tezxqyq4nw639g3kj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzMyMmQ0NzUtNDAxMGI3ZjYtMWI4ODI4Yi1mNzk3YzFhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:44.150428Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:44.151002Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:44.151287Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 ... 025-04-10T11:18:48.325148Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:48.325177Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:48.325216Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.325290Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:663:2568], serverId# [2:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:48.325651Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:48.325826Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:48.325882Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:48.327217Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:48.337756Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:48.337850Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:48.485335Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:48.485799Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:48.485840Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.486432Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.486470Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:48.486505Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:48.486675Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:48.486791Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:48.487312Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.487361Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:48.487660Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:48.487964Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:48.489096Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:48.489132Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.489672Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:48.489742Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.490455Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.490498Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:48.490553Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:48.490612Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:48.490656Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:48.490752Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.491577Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:48.493583Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:48.493634Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:48.494134Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:48.500847Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:48.500933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:746:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:48.500999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:48.505105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:48.509474Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:48.657048Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:48.660061Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:48.694707Z node 2 :TX_PROXY ERROR: Actor# [2:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:48.773870Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfps1xk592d6pkds0nmjx6w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjJmYzZjMDYtZmJhZDhkZWYtZGYxMTRjZjctZTI0NjM5MDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:48.779886Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:856:2687], serverId# [2:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:48.780292Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:48.780473Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-04-10T11:18:48.791322Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.795167Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-04-10T11:18:48.796162Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-10T11:18:48.807449Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-10T11:18:48.807537Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.807794Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:48.807852Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-10T11:18:48.808161Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.808212Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:48.808295Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:48.808373Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.808467Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-04-10T11:18:48.809461Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:48.809831Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:48.810028Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.810072Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:48.810129Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-10T11:18:48.810401Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:48.810485Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.811128Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-10T11:18:48.811383Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T11:18:48.811544Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-10T11:18:48.811595Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-10T11:18:48.835141Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:48.835208Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-10T11:18:48.835543Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.835574Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:48.835604Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-10T11:18:48.835698Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:48.835751Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.835794Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> YdbTableSplit::SplitByLoadWithReads [GOOD] >> TableCreation::ConcurrentTableCreation [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation |89.8%| [TA] $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [TA] {RESULT} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TableCreation::SimpleTableCreation [GOOD] >> TableCreation::SimpleUpdateTable >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] >> TableCreation::ConcurrentUpdateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] Test command err: 2025-04-10T11:18:42.169113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:42.169331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:42.169513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e6f/r3tmp/tmpfMbClK/pdisk_1.dat 2025-04-10T11:18:42.546535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.586966Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:42.631498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:42.631646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:42.643169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:42.728759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.771514Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:18:42.771823Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:42.823249Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:42.823423Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:42.825393Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:42.825483Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:42.825574Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:42.826028Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:42.826187Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:42.826269Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:18:42.838681Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:42.869764Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:42.870027Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:42.870198Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:18:42.870253Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:42.870297Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:42.870340Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:42.870860Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:42.870984Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:42.871065Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:42.871123Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:42.871184Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:42.871236Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:42.871360Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:42.871849Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:42.872169Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:42.872263Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:42.874165Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:42.885353Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:42.885496Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:43.038955Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:43.042807Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:43.042887Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.043903Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.043991Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:43.044082Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:43.044403Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:43.044649Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:43.045564Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.045662Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:43.049775Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:43.050368Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.053871Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:43.053939Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.054586Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:43.054685Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.055689Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.055737Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.055798Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:43.055869Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:43.055933Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:43.056042Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.060287Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.062962Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:43.063048Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:43.063713Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:43.074857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.075004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.075101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.081340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:43.088750Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.244553Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.248733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:43.326788Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:44.045106Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfprwm01jctgdzs2nnd76ej, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzc2MjZlZTctMzRmMDViZDAtMzUzZDEzMS1hMjBiODQzZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:44.085512Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:44.086007Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:44.086313Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 ... 025-04-10T11:18:48.603281Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:48.603326Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:48.603372Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.603495Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:663:2568], serverId# [2:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:48.603968Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:48.604224Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:48.604307Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:48.606204Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:48.616920Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:48.617027Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:48.766943Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:48.767658Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:48.767725Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.768521Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.768576Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:48.768628Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:48.768912Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:48.769077Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:48.769919Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:48.770014Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:48.770522Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:48.771028Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:48.772871Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:48.772929Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.773702Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:48.773776Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.774773Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:48.774837Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:48.774911Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:48.774976Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:48.775033Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:48.775134Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:48.776172Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:48.778908Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:48.778984Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:48.779595Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:48.788263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:48.788384Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:746:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:48.788486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:48.794324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:48.800489Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:48.951062Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:48.954355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:48.989345Z node 2 :TX_PROXY ERROR: Actor# [2:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:49.076454Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfps26j68eng44qsnvj97h9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2QyZjQwZjMtZjhiNjA2YmUtZGY3Mjk4MTQtN2Q5NGFjMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:49.079571Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:856:2687], serverId# [2:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:49.080127Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:49.080350Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 2025-04-10T11:18:49.094881Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:49.099210Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-04-10T11:18:49.100329Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-10T11:18:49.111773Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-10T11:18:49.111853Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:49.112137Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:49.112214Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-10T11:18:49.112522Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:49.112576Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:49.112634Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:49.112710Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:49.112832Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-04-10T11:18:49.113850Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:49.114332Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:49.114537Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:49.114576Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:49.114620Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-10T11:18:49.114885Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:49.114967Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:49.115691Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-10T11:18:49.115951Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T11:18:49.116127Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-10T11:18:49.116183Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-10T11:18:49.141542Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:49.141625Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-10T11:18:49.142121Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:49.142193Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:49.142244Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-10T11:18:49.142408Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:49.142485Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:49.142543Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] Test command err: 2025-04-10T11:16:13.993537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:16:13.993865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:16:13.993920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00068a/r3tmp/tmpNaTBO2/pdisk_1.dat 2025-04-10T11:16:14.311953Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26299, node 1 2025-04-10T11:16:14.550860Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:14.550914Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:14.550940Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:14.551345Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:16:14.553622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:16:14.647838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:14.647964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:14.661033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15360 2025-04-10T11:16:15.172195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:16:17.805003Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:16:17.833417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:17.833589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:17.860742Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:16:17.863186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:18.086844Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:18.087365Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:18.087802Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:18.087929Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:18.087985Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:18.088203Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:18.088265Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:18.088319Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:18.088371Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:16:18.250318Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:18.250454Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:18.264070Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:18.403082Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:18.451726Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:16:18.451857Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:16:18.494613Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:16:18.495750Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:16:18.495985Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:16:18.496061Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:16:18.496121Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:16:18.496175Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:16:18.496233Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:16:18.496300Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:16:18.496814Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:16:18.520999Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:18.521133Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:18.529197Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:16:18.533771Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:16:18.534132Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:16:18.544538Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:16:18.561991Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:16:18.562068Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:16:18.562166Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:16:18.580569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:16:18.588283Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:16:18.588420Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:16:18.741737Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:16:18.916512Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:16:18.993817Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:20.021220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:20.021364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:20.039353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:16:20.464483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2550:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:20.464635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:20.465876Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2555:3131]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:16:20.466072Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:16:20.466165Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2557:3133] 2025-04-10T11:16:20.466240Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2557:3133] 2025-04-10T11:16:20.466688Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2558:2994] 2025-04-10T11:16:20.466927Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2557:3133], server id = [2:2558:2994], tablet id = 72075186224037894, status = OK 2025-04-10T11:16:20.467134Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2558:2994], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:16:20.467192Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:16:20.467416Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:16:20.467472Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2555:3131], StatRequests.size() = 1 2025-04-10T11:16:20.483637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2562:3137], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:20.483778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:20.484203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2567:3142], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:20.489023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:16:20.663518Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:16:20.663591Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:16:20.737852Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2557:3133], schemeshard count = 1 2025-04-10T11:16:21.084294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... ry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:18:39.860740Z node 2 :TX_PROXY ERROR: Actor# [2:7214:5021] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:39.903200Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7243:5036]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:39.903479Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:18:39.903590Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7245:5038] 2025-04-10T11:18:39.903659Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7245:5038] 2025-04-10T11:18:39.904148Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7246:5039] 2025-04-10T11:18:39.904253Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7245:5038], server id = [2:7246:5039], tablet id = 72075186224037894, status = OK 2025-04-10T11:18:39.904329Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7246:5039], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:18:39.904371Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-10T11:18:39.904443Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:18:39.904502Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7243:5036], StatRequests.size() = 1 2025-04-10T11:18:40.052842Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDYwYjJlMGYtOTEwMGZlNy0yOTg4NDVmMi04NDljNmI2Zg==, TxId: 2025-04-10T11:18:40.052934Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDYwYjJlMGYtOTEwMGZlNy0yOTg4NDVmMi04NDljNmI2Zg==, TxId: 2025-04-10T11:18:40.053510Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:40.070457Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-10T11:18:40.070516Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:40.117596Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:18:40.117692Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:18:40.209782Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7245:5038], schemeshard count = 1 2025-04-10T11:18:41.377390Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:41.377507Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-10T11:18:41.377569Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:42.694794Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:18:42.717765Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:42.717945Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-10T11:18:42.717985Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:42.718398Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:42.721034Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:18:42.737363Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTNkMzRjMGUtM2Q2NDhkMGUtOTQxNzYyNTEtN2ExMGRhMjc=, TxId: 2025-04-10T11:18:42.737441Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTNkMzRjMGUtM2Q2NDhkMGUtOTQxNzYyNTEtN2ExMGRhMjc=, TxId: 2025-04-10T11:18:42.737927Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:42.753397Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:18:42.753468Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3117:3322] 2025-04-10T11:18:44.202080Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:44.202179Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-04-10T11:18:44.202219Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-10T11:18:45.516847Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:18:45.518363Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:18:45.518698Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:18:45.540463Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:45.540550Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-10T11:18:45.540588Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:18:45.540945Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:45.543771Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:18:45.555255Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzY5NmRlZTMtMTQ2YjRlOTgtZDg1MmQ3NTQtNWIyNTU5OGQ=, TxId: 2025-04-10T11:18:45.555323Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzY5NmRlZTMtMTQ2YjRlOTgtZDg1MmQ3NTQtNWIyNTU5OGQ=, TxId: 2025-04-10T11:18:45.555835Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:45.569754Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:18:45.569818Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:18:46.974906Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:18:46.974987Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-10T11:18:46.975052Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-10T11:18:48.269576Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:18:48.269708Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-04-10T11:18:48.269746Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-10T11:18:48.270178Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:18:48.272895Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:18:48.282374Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjUzOGM4MS05NmIyZThlOS0yNWU5NmEyMy00MDk2ZTQwNw==, TxId: 2025-04-10T11:18:48.282444Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjUzOGM4MS05NmIyZThlOS0yNWU5NmEyMy00MDk2ZTQwNw==, TxId: 2025-04-10T11:18:48.283046Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:18:48.296728Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-10T11:18:48.296796Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3117:3322] 2025-04-10T11:18:48.297291Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7569:5220]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:48.300494Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:48.300558Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:18:48.304438Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:18:48.304507Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:18:48.304574Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:18:48.307118Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-10T11:18:48.307455Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 2025-04-10T11:18:48.307806Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:7599:5232]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:18:48.310434Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:48.310487Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:18:48.310883Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:18:48.310939Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:18:48.310988Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:18:48.313417Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-10T11:18:48.313837Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >> TableCreation::MultipleTablesCreation [GOOD] >> TableCreation::CreateOldTable >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard >> KqpProxy::NoLocalSessionExecution ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] Test command err: 2025-04-10T11:18:19.751474Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642304441705133:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:19.751560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d81/r3tmp/tmpLu0QJK/pdisk_1.dat 2025-04-10T11:18:20.117803Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25146, node 1 2025-04-10T11:18:20.124944Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:18:20.138574Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:18:20.163878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:20.164039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:20.179410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:20.258252Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:20.258276Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:20.258284Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:20.258411Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:20.703728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:28193 2025-04-10T11:18:22.332180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642317326608074:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.332315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.557824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:18:22.707580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642317326608245:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.707658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.727152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283902676 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283902676 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-10T11:18:22.798705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642317326608351:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.798822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.798992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642317326608371:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.799069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642317326608374:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.799076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642317326608372:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.799117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642317326608373:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.799163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642317326608375:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.799248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642317326608376:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.800928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642317326608416:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.801519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642317326608414:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.801604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.801674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642317326608429:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.802342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642317326608443:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.802395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.802574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642317326608447:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.803164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642317326608467:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.803208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.803237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId ... n/3?node_id=1&id=NTVhMzVjOGMtMWRlYzBiNDctNmNiODUxMTgtZTNlN2RkZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.681388Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731917. Ctx: { TraceId: 01jrfps13w2r53x3y1tmgpnd3k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTJjZmZiYmUtMTE0YTQwYzEtMmZmNjM1OTQtZDRiNzdlYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.683015Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731918. Ctx: { TraceId: 01jrfps13z3v26c661nddfrhte, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE0ZmRlY2ItNWY4MzM0ZTAtMTVkNWRlYmEtYzNiMjEzZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.687238Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731919. Ctx: { TraceId: 01jrfps1414gx1wbzjy2przxjh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2I3YWI5YjItYjIyN2Q2MmMtN2ZkNjNiYi04ZDNmZmFkOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.694837Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731920. Ctx: { TraceId: 01jrfps140eeggzn4b8rz6zrk2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njg1Y2Q4NzgtNmFiMWVjYmYtYTRjYmY5MWEtZTlkOWFlMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.695575Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731921. Ctx: { TraceId: 01jrfps140e4jrgh59bmnyh3wx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEzNzM2YTEtYmNlNTBiYTktY2MzNWU1NGQtNmNiNWUzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.696170Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731922. Ctx: { TraceId: 01jrfps1413eyyyf7dy3fmz7ec, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTgyOTg1OTQtNTUyMGU1MDEtNDgyNTQ5OTgtNGYyMjVkZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.701748Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731923. Ctx: { TraceId: 01jrfps14710ztkqeep4mt1rs9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQxNjIzMDgtZWQzZDhmYzMtZjk0NzNkZDAtZmJiODUxOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.701810Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731928. Ctx: { TraceId: 01jrfps14b5jywmh1z4d148nsk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTVhMzVjOGMtMWRlYzBiNDctNmNiODUxMTgtZTNlN2RkZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.702282Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731924. Ctx: { TraceId: 01jrfps148fdyy3sbx11935ng6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWJjZTU0YzEtOTk4NjhlMzMtMWYwOTg1OWMtYjg5NzdkNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.702383Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731927. Ctx: { TraceId: 01jrfps14a44sacm0n1kkdd19e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE0ZmRlY2ItNWY4MzM0ZTAtMTVkNWRlYmEtYzNiMjEzZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.702813Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731926. Ctx: { TraceId: 01jrfps14a3hs6eyxt1my9hq4m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTJjZmZiYmUtMTE0YTQwYzEtMmZmNjM1OTQtZDRiNzdlYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.705486Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731925. Ctx: { TraceId: 01jrfps14a8mw2fk564mxc9by3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQ1ZTJmNjAtODRlOWE4OTgtNjkyMzE1NzktOWY3M2UzNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.710473Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731929. Ctx: { TraceId: 01jrfps14v8jvdnz55ycx4rnap, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTgyOTg1OTQtNTUyMGU1MDEtNDgyNTQ5OTgtNGYyMjVkZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.710508Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731930. Ctx: { TraceId: 01jrfps14qa8x6gj4sv22jzcga, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njg1Y2Q4NzgtNmFiMWVjYmYtYTRjYmY5MWEtZTlkOWFlMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.711088Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731932. Ctx: { TraceId: 01jrfps14rcc1h0mdgfryeeef7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEzNzM2YTEtYmNlNTBiYTktY2MzNWU1NGQtNmNiNWUzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.711164Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731931. Ctx: { TraceId: 01jrfps14rf0ry4m99g41y1j9b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2I3YWI5YjItYjIyN2Q2MmMtN2ZkNjNiYi04ZDNmZmFkOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.793564Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731933. Ctx: { TraceId: 01jrfps17d30sz4vrd0teyp34t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQxNjIzMDgtZWQzZDhmYzMtZjk0NzNkZDAtZmJiODUxOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.793610Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731934. Ctx: { TraceId: 01jrfps17d6c1fwawtqyesbz20, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWJjZTU0YzEtOTk4NjhlMzMtMWYwOTg1OWMtYjg5NzdkNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.794712Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731935. Ctx: { TraceId: 01jrfps17d46an9d3snmr1vcph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTVhMzVjOGMtMWRlYzBiNDctNmNiODUxMTgtZTNlN2RkZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283902676 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-10T11:18:47.807494Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731937. Ctx: { TraceId: 01jrfps17v43ryrnxhc16ggmar, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTJjZmZiYmUtMTE0YTQwYzEtMmZmNjM1OTQtZDRiNzdlYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.807509Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731938. Ctx: { TraceId: 01jrfps17v6rfpp458bjy3vaq0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE0ZmRlY2ItNWY4MzM0ZTAtMTVkNWRlYmEtYzNiMjEzZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.808108Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731939. Ctx: { TraceId: 01jrfps17v2d5yrfx6k8p2dpc7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2I3YWI5YjItYjIyN2Q2MmMtN2ZkNjNiYi04ZDNmZmFkOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.808166Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731936. Ctx: { TraceId: 01jrfps17vfk55e6vhepg9fvgb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njg1Y2Q4NzgtNmFiMWVjYmYtYTRjYmY5MWEtZTlkOWFlMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.808701Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731943. Ctx: { TraceId: 01jrfps17vc08jr7qppzt4y8jh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWJjZTU0YzEtOTk4NjhlMzMtMWYwOTg1OWMtYjg5NzdkNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.809156Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731940. Ctx: { TraceId: 01jrfps17vdvsfryjj01jj8zft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQxNjIzMDgtZWQzZDhmYzMtZjk0NzNkZDAtZmJiODUxOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.809255Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731942. Ctx: { TraceId: 01jrfps17v263b5zgntdkj5pt1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEzNzM2YTEtYmNlNTBiYTktY2MzNWU1NGQtNmNiNWUzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.809757Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731941. Ctx: { TraceId: 01jrfps17v8bgk823t9x6zbrs3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTgyOTg1OTQtNTUyMGU1MDEtNDgyNTQ5OTgtNGYyMjVkZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.817668Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731944. Ctx: { TraceId: 01jrfps18069pprffm249gtkrj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTVhMzVjOGMtMWRlYzBiNDctNmNiODUxMTgtZTNlN2RkZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:47.817892Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731945. Ctx: { TraceId: 01jrfps180eh90n8bztpmzpbyj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQ1ZTJmNjAtODRlOWE4OTgtNjkyMzE1NzktOWY3M2UzNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283902676 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-04-10T11:18:42.182060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:42.182287Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:42.182449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e2d/r3tmp/tmpTxkw3Q/pdisk_1.dat 2025-04-10T11:18:42.567138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.611301Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:42.655099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:42.655244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:42.666561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:42.748067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.801038Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:18:42.801280Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:42.856150Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:42.856270Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:42.857870Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:42.857954Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:42.858028Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:42.858402Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:42.858520Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:42.858584Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:18:42.869261Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:42.901439Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:42.901646Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:42.901776Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:18:42.901820Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:42.901853Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:42.901891Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:42.902346Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:42.902453Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:42.902530Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:42.902593Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:42.902642Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:42.902685Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:42.902792Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:42.903211Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:42.903437Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:42.903527Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:42.905191Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:42.915834Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:42.915935Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:43.072683Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:43.080660Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:43.080737Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.081734Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.081810Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:43.081864Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:43.082138Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:43.082332Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:43.083072Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.083145Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:43.085185Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:43.085660Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.087727Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:43.087776Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.088292Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:43.088371Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.089093Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.089131Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.089185Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:43.089245Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:43.089296Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:43.089401Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.093522Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.095811Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:43.095874Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:43.096433Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:43.109522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.109633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.109715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.115153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:43.121295Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.272196Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.275461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:43.346485Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:44.045138Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfprwn36p7h8ve28pyhew67, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2M2YjQ2NDctOWU5MTMzOWQtNzhiNzkxMzItNTA4NTA4OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:44.084991Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:44.085599Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:44.085843Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 ... 24037893 2025-04-10T11:18:50.035686Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1250:3030], serverId# [2:1251:3031], sessionId# [0:0:0] 2025-04-10T11:18:50.035912Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1250:3030], serverId# [2:1251:3031], sessionId# [0:0:0] 2025-04-10T11:18:50.037420Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1255:3035], serverId# [2:1256:3036], sessionId# [0:0:0] 2025-04-10T11:18:50.037625Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1255:3035], serverId# [2:1256:3036], sessionId# [0:0:0] 2025-04-10T11:18:50.039142Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1260:3040], serverId# [2:1261:3041], sessionId# [0:0:0] 2025-04-10T11:18:50.039363Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1260:3040], serverId# [2:1261:3041], sessionId# [0:0:0] 2025-04-10T11:18:50.041791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:50.045837Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-04-10T11:18:50.045959Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:18:50.046181Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-10T11:18:50.046902Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-04-10T11:18:50.047048Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-04-10T11:18:50.047567Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:50.086714Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [2:1283:3060] 2025-04-10T11:18:50.086951Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:50.098024Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:50.098197Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:50.099747Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-04-10T11:18:50.099836Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2025-04-10T11:18:50.099892Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2025-04-10T11:18:50.100238Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:50.100376Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:50.100443Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037894 persisting started state actor id [2:1299:3060] in generation 1 2025-04-10T11:18:50.121967Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:50.122067Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037894 2025-04-10T11:18:50.122224Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:50.122318Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037894, actorId: [2:1301:3070] 2025-04-10T11:18:50.122357Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2025-04-10T11:18:50.122393Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2025-04-10T11:18:50.122425Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-10T11:18:50.122916Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037894 2025-04-10T11:18:50.123043Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2025-04-10T11:18:50.123130Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-04-10T11:18:50.123167Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:50.123209Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037894 TxInFly 0 2025-04-10T11:18:50.123247Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-04-10T11:18:50.123338Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1282:3059], serverId# [2:1288:3062], sessionId# [0:0:0] 2025-04-10T11:18:50.123897Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-10T11:18:50.124117Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037894 txId 281474976715663 ssId 72057594046644480 seqNo 2:7 2025-04-10T11:18:50.124202Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715663 at tablet 72075186224037894 2025-04-10T11:18:50.124839Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-04-10T11:18:50.135928Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-10T11:18:50.136063Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:50.298258Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1311:3080], serverId# [2:1313:3082], sessionId# [0:0:0] 2025-04-10T11:18:50.298837Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715663 at step 4000 at tablet 72075186224037894 { Transactions { TxId: 281474976715663 AckTo { RawX1: 0 RawX2: 0 } } Step: 4000 MediatorID: 72057594046382081 TabletID: 72075186224037894 } 2025-04-10T11:18:50.298875Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-10T11:18:50.298988Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-04-10T11:18:50.299023Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:50.299061Z node 2 :TX_DATASHARD DEBUG: Found ready operation [4000:281474976715663] in PlanQueue unit at 72075186224037894 2025-04-10T11:18:50.299275Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037894 loaded tx from db 4000:281474976715663 keys extracted: 0 2025-04-10T11:18:50.299375Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:50.300031Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-04-10T11:18:50.300092Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037894 tableId# [OwnerId: 72057594046644480, LocalPathId: 8] schema version# 1 2025-04-10T11:18:50.300405Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037894 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:50.300737Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:50.302542Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037894 time 3500 2025-04-10T11:18:50.302589Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-10T11:18:50.303571Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037894 step# 4000} 2025-04-10T11:18:50.303637Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-04-10T11:18:50.304972Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-04-10T11:18:50.305009Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2025-04-10T11:18:50.305041Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037894 2025-04-10T11:18:50.305088Z node 2 :TX_DATASHARD DEBUG: Complete [4000 : 281474976715663] from 72075186224037894 at tablet 72075186224037894 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:50.305128Z node 2 :TX_DATASHARD INFO: 72075186224037894 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2025-04-10T11:18:50.305229Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-10T11:18:50.305891Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-04-10T11:18:50.305959Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:18:50.306001Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-04-10T11:18:50.306129Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-04-10T11:18:50.306207Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-04-10T11:18:50.306255Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:50.306441Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-10T11:18:50.306772Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037894 coordinator 72057594046316545 last step 0 next step 4000 2025-04-10T11:18:50.307577Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037894 state Ready 2025-04-10T11:18:50.307630Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-04-10T11:18:50.311695Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1340:3103], serverId# [2:1341:3104], sessionId# [0:0:0] 2025-04-10T11:18:50.311852Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1340:3103], serverId# [2:1341:3104], sessionId# [0:0:0] 2025-04-10T11:18:50.313019Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1345:3108], serverId# [2:1346:3109], sessionId# [0:0:0] 2025-04-10T11:18:50.313227Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1345:3108], serverId# [2:1346:3109], sessionId# [0:0:0] 2025-04-10T11:18:50.315109Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1350:3113], serverId# [2:1351:3114], sessionId# [0:0:0] 2025-04-10T11:18:50.315369Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1350:3113], serverId# [2:1351:3114], sessionId# [0:0:0] >> ScriptExecutionsTest::RunCheckLeaseStatus [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring >> KqpProxy::PingNotExistedSession [GOOD] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease >> TopicService::OneConsumer_TheRangesDoNotOverlap [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] >> DemoTx::Scenario_1 [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutClustersList >> TPQCDTest::TestUnavailableWithoutBoth >> TPQCDTest::TestPrioritizeLocalDatacenter >> TPQCDTest::TestUnavailableWithoutNetClassifier >> TPQCDTest::TestDiscoverClusters >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed >> TableCreation::SimpleUpdateTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] Test command err: 2025-04-10T11:18:42.505416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:42.505663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:42.505886Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e31/r3tmp/tmpLEPbPS/pdisk_1.dat 2025-04-10T11:18:42.938548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.979762Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:43.020818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.020957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.035463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:43.123291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:43.173173Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:18:43.173593Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:43.227434Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:43.227574Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:43.229415Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:43.229519Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:43.229593Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:43.229982Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:43.230113Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:43.230212Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:18:43.240885Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.261224Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:43.261481Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.261591Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:18:43.261628Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.261657Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:43.261705Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.262184Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:43.262328Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:43.262396Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.262453Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.262498Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:43.262542Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.262632Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:43.263040Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:43.263339Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:43.263446Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:43.265138Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.275922Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:43.276027Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:43.427715Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:43.440051Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:43.440146Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.441185Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.441247Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:43.441318Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:43.441594Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:43.441803Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:43.442698Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.442800Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:43.451514Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:43.452124Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.453854Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:43.453916Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.454668Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:43.454769Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.455720Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.455767Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.455824Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:43.455898Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:43.455949Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:43.456060Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.460846Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.463633Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:43.463721Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:43.464358Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:43.475189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.475377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.475568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.481918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:43.490074Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.658187Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.661807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:43.777157Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:44.147203Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfprx0gbr12fth87f28dae1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzhjMjg0NWItOTlmMDNiZmUtZDYxYWM2ZTgtN2U5YjljZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:44.154751Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:44.155285Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:44.155533Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 ... 025-04-10T11:18:52.731064Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:52.731111Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:52.731161Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:52.731282Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:52.731807Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:52.732080Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:52.732172Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:52.734173Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:52.744936Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:52.745067Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:52.895258Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:52.895995Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:52.896057Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:52.897048Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:52.897107Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:52.897158Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:52.897417Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:52.897567Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:52.898457Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:52.898528Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:52.899035Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:52.899466Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:52.901019Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:52.901072Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:52.901610Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:52.901700Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:52.902473Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:52.902529Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:52.902579Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:52.902646Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:52.902696Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:52.902793Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:52.904274Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:52.906325Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:52.906392Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:52.906571Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:52.915819Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:52.915922Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:52.915998Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:52.921618Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:52.928408Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:53.086640Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:53.089784Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:53.125328Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:53.246693Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfps67hcz598gppf41zkcmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmNiOWM3OWQtZmRlOTc5ODktYThjZTI3NDItNTM1MTY3YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:53.252377Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:53.252836Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:53.253052Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-04-10T11:18:53.264120Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.268527Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-10T11:18:53.269957Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-10T11:18:53.281753Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-10T11:18:53.281838Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.282181Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:53.282240Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-10T11:18:53.282555Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.282613Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:53.282668Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:53.282728Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.282821Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-10T11:18:53.283896Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:53.284311Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:53.284517Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.284564Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:53.284617Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-10T11:18:53.284901Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:53.284993Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.285676Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-10T11:18:53.286016Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 48, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T11:18:53.286173Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-10T11:18:53.286236Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-10T11:18:53.323075Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:53.323157Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-10T11:18:53.323652Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.323698Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:53.323740Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-10T11:18:53.323876Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:53.323942Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.323994Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] Test command err: 2025-04-10T11:18:42.337645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:42.337873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:42.338053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e44/r3tmp/tmpoKi0Sr/pdisk_1.dat 2025-04-10T11:18:42.776546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.816329Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:42.859235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:42.859389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:42.874496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:42.956256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.996850Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:18:42.997150Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:43.047342Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:43.047449Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:43.049087Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:43.049159Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:43.049229Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:43.049625Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:43.049771Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:43.049844Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:18:43.060548Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.097411Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:43.097636Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.097771Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:18:43.097816Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.097853Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:43.097893Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.098363Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:43.098483Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:43.098557Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.098612Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.098652Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:43.098699Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.098818Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:43.099313Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:43.099544Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:43.099636Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:43.101010Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.114664Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:43.114767Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:43.267893Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:43.272412Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:43.272469Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.273327Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.273382Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:43.273440Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:43.273674Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:43.273819Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:43.274914Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.274983Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:43.277030Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:43.277511Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.278930Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:43.278972Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.279434Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:43.279505Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.280197Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.280234Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.280280Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:43.280335Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:43.280381Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:43.280476Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.292563Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.294898Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:43.294956Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:43.295491Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:43.303970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.304075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.304157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.309258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:43.314732Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.462886Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.465084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:43.539970Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:44.045105Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfprwv55hspch3g8nyktjz4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzE1M2I2MjQtYWQ2NjYxYzUtNDFjNDljZjQtODMxM2Y2Yzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:44.106297Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:44.106944Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:44.107284Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=8 ... 025-04-10T11:18:52.779110Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:52.779153Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:52.779199Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:52.779294Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:52.779738Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:52.779953Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:52.780033Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:52.781825Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:52.792553Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:52.792666Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:52.942406Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:52.943041Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:52.943098Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:52.944230Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:52.944287Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:52.944335Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:52.944590Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:52.944737Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:52.945483Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:52.945565Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:52.946013Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:52.946462Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:52.948072Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:52.948123Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:52.948649Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:52.948734Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:52.949507Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:52.949555Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:52.949628Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:52.949694Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:52.949740Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:52.949825Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:52.951323Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:52.953421Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:52.953497Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:52.953677Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:52.962360Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:52.962471Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:52.962560Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:52.967815Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:52.974378Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:53.126905Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:53.130061Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:53.167310Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:53.254991Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfps690b60hrnqf0gfr5n68, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODA3MWI4MmYtYThhZDkwYjEtZmI2YTcwMGUtNzkwOTNmNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:53.257746Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:53.258200Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:53.258426Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-04-10T11:18:53.270745Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.278762Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-10T11:18:53.279928Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-10T11:18:53.294838Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-10T11:18:53.294939Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.295226Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:53.295275Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-10T11:18:53.295596Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.295648Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:53.295699Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:53.295757Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.295854Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-10T11:18:53.296858Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:53.297240Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:53.297443Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.297496Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:53.297554Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-10T11:18:53.297820Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:53.297888Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.298585Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-10T11:18:53.298843Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T11:18:53.299014Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-10T11:18:53.299073Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-10T11:18:53.372251Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:53.372317Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-10T11:18:53.372745Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.372782Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:53.372820Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-10T11:18:53.372941Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:53.373006Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.373044Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] Test command err: 2025-04-10T11:18:42.334277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:42.334541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:42.334722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e81/r3tmp/tmp3Hkk6F/pdisk_1.dat 2025-04-10T11:18:42.704694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.742911Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:42.784499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:42.784649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:42.796235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:42.879231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.924021Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:18:42.924282Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:42.972533Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:42.972683Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:42.974396Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:42.974480Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:42.974571Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:42.974978Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:42.975138Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:42.975219Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:18:42.986003Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.018010Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:43.018379Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.018519Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:18:43.018567Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.018607Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:43.018645Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.019103Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:43.019250Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:43.019339Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.019399Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.019471Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:43.019513Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.019640Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:43.020144Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:43.020359Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:43.020437Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:43.022243Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.033074Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:43.033185Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:43.188278Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:43.200173Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:43.200251Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.201272Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.201359Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:43.201418Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:43.201668Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:43.201843Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:43.202924Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.203011Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:43.211835Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:43.212257Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.213676Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:43.213720Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.214116Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:43.214231Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.214878Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.214917Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.214964Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:43.215017Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:43.215057Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:43.215132Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.218667Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.221132Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:43.221200Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:43.221776Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:43.231198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.231308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.231407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.238767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:43.244802Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.412534Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.421257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:43.503471Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:44.045056Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfprwrwc6hr3mn04mzpwytq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTM2NDQwMTUtZTIzMjRjNmEtYzkwOWJkNTctNmZmZTE1OTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:44.083558Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:44.085073Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:44.085388Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=7 ... 025-04-10T11:18:52.874273Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:52.874320Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:52.874367Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:52.874473Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:52.874982Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:52.875217Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:52.875308Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:52.877261Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:52.888015Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:52.888135Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:53.046841Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:53.047549Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:53.047609Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.048570Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.048630Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:53.048682Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:53.048962Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:53.049099Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:53.049886Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.049955Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:53.050478Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:53.050932Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:53.052575Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:53.052628Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.053153Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:53.053236Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.054087Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.054140Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:53.054219Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:53.054297Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:53.054370Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:53.054472Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.056044Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:53.058140Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:53.058237Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:53.058420Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:53.067657Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:53.067750Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:53.067827Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:53.073491Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:53.080439Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:53.242029Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:53.245525Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:53.280839Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:53.364762Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfps6c9a7wphn33mpgexg3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODBhNThiZi0yNmNmNGQxYy1mZmI2ZWUwMC1mOTQxNDJlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:53.367809Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:53.368300Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:53.368474Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 2025-04-10T11:18:53.379477Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.384198Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-10T11:18:53.385383Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-10T11:18:53.396761Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-10T11:18:53.396844Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.397120Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:53.397162Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-10T11:18:53.397491Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.397547Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:53.397608Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:53.397695Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.397789Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-10T11:18:53.398844Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:53.399226Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:53.399451Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.399495Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:53.399540Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-10T11:18:53.399800Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:53.399873Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.400544Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-10T11:18:53.400800Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 37, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T11:18:53.400921Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-10T11:18:53.400967Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-10T11:18:53.455273Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:53.455344Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-10T11:18:53.455771Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.455807Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:53.455849Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-10T11:18:53.455980Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:53.456044Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.456092Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] Test command err: 2025-04-10T11:18:42.398605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:42.398863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:42.399032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e17/r3tmp/tmpgzVsv6/pdisk_1.dat 2025-04-10T11:18:42.814818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.862893Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:42.902480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:42.902625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:42.914101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:43.007955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:43.073073Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2586] 2025-04-10T11:18:43.073346Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:43.117322Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:43.117591Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:43.119490Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:43.119575Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:43.119645Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:43.120022Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:43.120508Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2588] 2025-04-10T11:18:43.120719Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:43.129702Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:43.129783Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:721:2586] in generation 1 2025-04-10T11:18:43.131440Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:43.131579Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:43.132722Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T11:18:43.132779Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T11:18:43.132837Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T11:18:43.133075Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:43.133275Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:43.133326Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:729:2588] in generation 1 2025-04-10T11:18:43.134610Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:695:2590] 2025-04-10T11:18:43.134752Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:43.143896Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:43.144045Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:43.145607Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-10T11:18:43.145686Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-10T11:18:43.145737Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-10T11:18:43.146064Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:43.146202Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:43.146267Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:739:2590] in generation 1 2025-04-10T11:18:43.157351Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.195721Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:43.195974Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.196122Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:743:2617] 2025-04-10T11:18:43.196168Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.196208Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:43.196248Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.196348Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.196409Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T11:18:43.196494Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.196552Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:744:2618] 2025-04-10T11:18:43.196576Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T11:18:43.196613Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T11:18:43.196639Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:18:43.196849Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.196878Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-10T11:18:43.196932Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.196977Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:745:2619] 2025-04-10T11:18:43.196994Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-10T11:18:43.197008Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-10T11:18:43.197022Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:18:43.197436Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:43.197548Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:43.197610Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T11:18:43.197661Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T11:18:43.197769Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.197818Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.197870Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:43.197918Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.197965Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:18:43.197987Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.198007Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-10T11:18:43.198031Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:18:43.198059Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-10T11:18:43.198112Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-10T11:18:43.198248Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:707:2596], sessionId# [0:0:0] 2025-04-10T11:18:43.198332Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:715:2603], sessionId# [0:0:0] 2025-04-10T11:18:43.198392Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T11:18:43.198428Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.198454Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-10T11:18:43.198485Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:18:43.198995Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:43.199248Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:43.199343Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:43.199820Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:18:43.200030Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-10T11:18:43.200087Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-10T11:18:43.202284Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.202368Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:18:43.213182Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:43.213279Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:43.213395Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T11:18:43.213423Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:43.257031Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:762:2629], sessionId# [0:0:0] 2025-04-10T11:18:43.257286Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-10T11:18:43.257494Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 2 ... 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:52.492696Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:52.493664Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:52.493721Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:52.493774Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:52.494029Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:52.494209Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:52.494921Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:52.494999Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:52.495486Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:52.495942Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:52.497589Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:52.497646Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:52.498196Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:52.498272Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:52.499017Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:52.499081Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:52.499133Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:52.499212Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:52.499268Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:52.499367Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:52.500902Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:52.502785Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:52.502857Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:52.503019Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:52.510662Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:52.510746Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:52.510806Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:52.556142Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:52.562637Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:52.711694Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:52.714818Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:52.749811Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:52.820217Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfps5txbyst713y0sad3n7t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2Q4YjExMTgtYjM1ZTczNjMtYTk5YzIwNzItZTc2NzlhZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:52.822892Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:52.823251Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:52.823413Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 2025-04-10T11:18:52.834350Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.065162Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfps65nedvbqwztpzaw30bp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTcwMzRmY2YtNzdkZTgyMzEtMmMwNWYyYjgtZDMxYjFiYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:53.068213Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint64_value: 0 } } 2025-04-10T11:18:53.077211Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:895:2718], serverId# [3:896:2719], sessionId# [0:0:0] 2025-04-10T11:18:53.078347Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-10T11:18:53.089820Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-10T11:18:53.089899Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.089977Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-10T11:18:53.090744Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-04-10T11:18:53.090821Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.091032Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:53.091078Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037888 2025-04-10T11:18:53.091298Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.091348Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:53.091398Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:53.091455Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.091616Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:895:2718], serverId# [3:896:2719], sessionId# [0:0:0] 2025-04-10T11:18:53.156524Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfps6d48pmncymm3fyy6aps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTcwMzRmY2YtNzdkZTgyMzEtMmMwNWYyYjgtZDMxYjFiYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:53.159404Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037888 2025-04-10T11:18:53.159584Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2025-04-10T11:18:53.171526Z node 3 :TX_DATASHARD INFO: Write transaction 6 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2025-04-10T11:18:53.171841Z node 3 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:18:53.172064Z node 3 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:18:53.172151Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.172429Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:917:2693], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [3:863:2693]Got LOCKS BROKEN for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:917:2693].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:18:53.173026Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:910:2693], SessionActorId: [3:863:2693], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:863:2693]. isRollback=0 2025-04-10T11:18:53.173466Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTcwMzRmY2YtNzdkZTgyMzEtMmMwNWYyYjgtZDMxYjFiYzc=, ActorId: [3:863:2693], ActorState: ExecuteState, TraceId: 01jrfps6d48pmncymm3fyy6aps, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:911:2693] from: [3:910:2693] 2025-04-10T11:18:53.173732Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:911:2693] TxId: 281474976715662. Ctx: { TraceId: 01jrfps6d48pmncymm3fyy6aps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTcwMzRmY2YtNzdkZTgyMzEtMmMwNWYyYjgtZDMxYjFiYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:18:53.174063Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTcwMzRmY2YtNzdkZTgyMzEtMmMwNWYyYjgtZDMxYjFiYzc=, ActorId: [3:863:2693], ActorState: ExecuteState, TraceId: 01jrfps6d48pmncymm3fyy6aps, Create QueryResponse for error on request, msg: >> TopicService::OneConsumer_TheRangesOverlap >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] >> TableCreation::CreateOldTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] Test command err: 2025-04-10T11:18:42.084500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:42.084748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:42.084950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e8c/r3tmp/tmpK0gLNS/pdisk_1.dat 2025-04-10T11:18:42.467459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.501393Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:42.542236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:42.542368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:42.554096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:42.640970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.695529Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:18:42.695788Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:42.769345Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:42.769469Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:42.771184Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:42.771275Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:42.771344Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:42.771681Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:42.771799Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:42.771864Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:18:42.782713Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:42.832191Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:42.832379Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:42.832499Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:18:42.832536Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:42.832571Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:42.832606Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:42.832979Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:42.833070Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:42.833122Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:42.833168Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:42.833213Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:42.833261Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:42.833378Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:42.833757Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:42.833977Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:42.834070Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:42.835593Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:42.846361Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:42.846462Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:42.996335Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:43.001046Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:43.001123Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.002313Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.002383Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:43.002435Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:43.002649Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:43.002819Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:43.003552Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.003631Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:43.005778Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:43.006459Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.007924Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:43.007970Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.008518Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:43.008596Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.009270Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.009324Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.009374Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:43.009432Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:43.009484Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:43.009588Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.013583Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.015694Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:43.015751Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:43.016344Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:43.025232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.025358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.025427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:43.030629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:43.035703Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.190453Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.193532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:43.269069Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:44.045286Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfprwjf8sqedaspz5nh4s0a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTFhZTNkYzgtYTM2ZmExYTctNTBmYTdlODQtOGQ4YTQwNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:44.083903Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:44.087006Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:44.087227Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 ... 025-04-10T11:18:53.179737Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:53.179777Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:53.179823Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.179911Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-04-10T11:18:53.180426Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:53.180648Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:53.180735Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:53.182578Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:53.194706Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:53.194836Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:53.345992Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-04-10T11:18:53.346651Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:18:53.346704Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.347588Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.347637Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:53.347683Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:18:53.347965Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:18:53.348109Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:18:53.348830Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.348902Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:18:53.349400Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:53.349809Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:53.351486Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:18:53.351537Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.352014Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:18:53.352087Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.352720Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.352767Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:53.352815Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:53.352875Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:18:53.352927Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:18:53.353007Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.354758Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:53.356601Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:18:53.356668Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:18:53.356826Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:18:53.366871Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:53.366988Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:53.367072Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:53.372592Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:18:53.378761Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:53.536966Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:53.539736Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:18:53.573881Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:53.661531Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfps6nm0y5d2km6pz680sg0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Nzk5ODdlMGItNjQwN2Q5NzctOWQ3NTI4MjQtYTBmMTgzZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:53.663868Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-04-10T11:18:53.664224Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:18:53.664402Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-04-10T11:18:53.675335Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.679297Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-10T11:18:53.680509Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-10T11:18:53.691886Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-10T11:18:53.691965Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:53.692225Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:53.692263Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-10T11:18:53.692529Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.692575Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:53.692619Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:53.692691Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.692779Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-04-10T11:18:53.693738Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:53.694125Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:53.694368Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.694414Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:53.694473Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-10T11:18:53.694769Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:53.694839Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.695399Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-10T11:18:53.695639Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 43, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T11:18:53.695729Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-10T11:18:53.695764Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-10T11:18:53.723477Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:18:53.723547Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-10T11:18:53.723960Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:53.723988Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:18:53.724019Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-10T11:18:53.724121Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:53.724175Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:53.724216Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> DemoTx::Scenario_2 >> TPersQueueTest::BadTopic [GOOD] >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable >> TableCreation::ConcurrentUpdateTable [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows >> TPersQueueTest::UpdatePartitionLocation [GOOD] >> TPersQueueTest::TopicServiceCommitOffset >> TPersQueueTest::WriteExisting [GOOD] >> TPersQueueTest::WriteExistingBigValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::SimpleUpdateTable [GOOD] Test command err: 2025-04-10T11:18:46.322545Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642420303400306:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:46.322659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ab2/r3tmp/tmpQ8fNww/pdisk_1.dat 2025-04-10T11:18:46.817814Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:46.820047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:46.820127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:46.826327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21762 TServer::EnableGrpc on GrpcPort 10992, node 1 2025-04-10T11:18:47.142844Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:47.142867Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:47.142873Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:47.143001Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:18:47.371697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:49.092368Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:49.094163Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:49.095341Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:18:49.095374Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:18:49.095399Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:49.095440Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:49.095508Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.095553Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.095917Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.095950Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.098116Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-10T11:18:49.098137Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-10T11:18:49.098195Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-10T11:18:49.098502Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-10T11:18:49.098524Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-10T11:18:49.098538Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-10T11:18:49.098629Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-10T11:18:49.098643Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-10T11:18:49.098680Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-10T11:18:49.103188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-10T11:18:49.106071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:18:49.107443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:18:49.112728Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-10T11:18:49.112729Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-10T11:18:49.112781Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-04-10T11:18:49.112782Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710658 2025-04-10T11:18:49.112846Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-10T11:18:49.112874Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-04-10T11:18:49.207137Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-10T11:18:49.236473Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-10T11:18:49.260685Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-10T11:18:49.271814Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-10T11:18:49.316523Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-10T11:18:49.334883Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-10T11:18:49.335526Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 6144f49a-15a2af8a-a276850b-717ca59c, Bootstrap. Database: /dc-1 2025-04-10T11:18:49.362391Z node 1 :KQP_PROXY DEBUG: Request has 18444999789780.189258s seconds to be completed 2025-04-10T11:18:49.365477Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=MWFkNWYzZDctYTdiNzZkZjEtYzY2NWI3YjMtMzdlNTQwNGE=, workerId: [1:7491642433188302873:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-10T11:18:49.365658Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:49.366694Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 6144f49a-15a2af8a-a276850b-717ca59c, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-10T11:18:49.367253Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=MWFkNWYzZDctYTdiNzZkZjEtYzY2NWI3YjMtMzdlNTQwNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7491642433188302873:2333] 2025-04-10T11:18:49.367300Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7491642433188302876:2468] 2025-04-10T11:18:49.369221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642433188302875:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.369226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642433188302886:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.369361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.372821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-10T11:18:49.380664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642433188302890:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T11:18:49.467471Z node 1 :TX_PROXY ERROR: Actor# [1:7491642433188302932:2500] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges ... :2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:53.912745Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642448883766398:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:53.912805Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:53.915546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:2, at schemeshard: 72057594046644480 2025-04-10T11:18:53.922437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642448883766404:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-10T11:18:54.015406Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453178733743:2497] txid# 281474976715662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:54.127607Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:7491642448883766388:2334], selfId: [2:7491642435998863834:2272], source: [2:7491642448883766387:2333] 2025-04-10T11:18:54.127791Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 28dfa96c-4989135e-6ca9da9a-5db9a544, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWUxZmFhZDItZjlmY2IxZWMtMzRlYmRhN2UtMzUwY2UyNGE=, TxId: 2025-04-10T11:18:54.127821Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 28dfa96c-4989135e-6ca9da9a-5db9a544, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWUxZmFhZDItZjlmY2IxZWMtMzRlYmRhN2UtMzUwY2UyNGE=, TxId: 2025-04-10T11:18:54.127832Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: 28dfa96c-4989135e-6ca9da9a-5db9a544. Result: SUCCESS. Issues: 2025-04-10T11:18:54.129861Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZjJkMTI3MjgtN2ViMDQ1OTQtOTRiZDlkZDItY2U4OGNjYQ==, workerId: [2:7491642453178733800:2351], database: dc-1, longSession: 1, local sessions count: 2 2025-04-10T11:18:54.130002Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:54.130082Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=OWUxZmFhZDItZjlmY2IxZWMtMzRlYmRhN2UtMzUwY2UyNGE=, workerId: [2:7491642448883766387:2333], local sessions count: 1 2025-04-10T11:18:54.130234Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZjJkMTI3MjgtN2ViMDQ1OTQtOTRiZDlkZDItY2U4OGNjYQ==, CurrentExecutionId: 28dfa96c-4989135e-6ca9da9a-5db9a544, CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [2:7491642453178733800:2351] 2025-04-10T11:18:54.130269Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 604800.000000s actor id: [2:7491642453178733802:2527] 2025-04-10T11:18:54.144657Z node 2 :KQP_PROXY DEBUG: TraceId: "01jrfps7e07gfzzwmxeem4hzqf", Request has 18444999789775.406981s seconds to be completed 2025-04-10T11:18:54.146719Z node 2 :KQP_PROXY DEBUG: TraceId: "01jrfps7e07gfzzwmxeem4hzqf", Created new session, sessionId: ydb://session/3?node_id=2&id=NDRmYWIzZmMtNTYyZTA1M2MtMzU2Mjk2M2UtYjc0M2M4YTk=, workerId: [2:7491642453178733816:2361], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-10T11:18:54.146874Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jrfps7e07gfzzwmxeem4hzqf 2025-04-10T11:18:54.149481Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Describe result: PathErrorUnknown 2025-04-10T11:18:54.149504Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Creating table 2025-04-10T11:18:54.149541Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-04-10T11:18:54.152529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:1, at schemeshard: 72057594046644480 2025-04-10T11:18:54.154417Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715664 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2025-04-10T11:18:54.154463Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715664 2025-04-10T11:18:54.184793Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 28dfa96c-4989135e-6ca9da9a-5db9a544, Bootstrap. Database: /dc-1 2025-04-10T11:18:54.184978Z node 2 :KQP_PROXY DEBUG: Request has 18444999789775.366661s seconds to be completed 2025-04-10T11:18:54.186581Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: create. Transaction completed: 281474976715664. Doublechecking... 2025-04-10T11:18:54.186979Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=NDE0MmJmYzQtMzcxZGQ3MWYtZWJmN2RlZGMtNGMzYmNmOTQ=, workerId: [2:7491642453178733907:2366], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-10T11:18:54.187114Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:54.187206Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [2:7491642448883766384:2462], selfId: [2:7491642435998863834:2272], source: [2:7491642453178733800:2351] 2025-04-10T11:18:54.187334Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 28dfa96c-4989135e-6ca9da9a-5db9a544, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-04-10T11:18:54.187636Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NDE0MmJmYzQtMzcxZGQ3MWYtZWJmN2RlZGMtNGMzYmNmOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 8, targetId: [2:7491642453178733907:2366] 2025-04-10T11:18:54.187674Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 8 timeout: 300.000000s actor id: [2:7491642453178733910:2593] 2025-04-10T11:18:54.249584Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-10T11:18:54.250180Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2025-04-10T11:18:54.250239Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-04-10T11:18:54.251850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:54.252634Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715666 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 } 2025-04-10T11:18:54.252677Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715666 2025-04-10T11:18:54.284474Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: alter. Transaction completed: 281474976715666. Doublechecking... 2025-04-10T11:18:54.351478Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7491642453178733909:2367], selfId: [2:7491642435998863834:2272], source: [2:7491642453178733907:2366] 2025-04-10T11:18:54.351693Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 28dfa96c-4989135e-6ca9da9a-5db9a544, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDE0MmJmYzQtMzcxZGQ3MWYtZWJmN2RlZGMtNGMzYmNmOTQ=, TxId: 2025-04-10T11:18:54.351719Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 28dfa96c-4989135e-6ca9da9a-5db9a544, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDE0MmJmYzQtMzcxZGQ3MWYtZWJmN2RlZGMtNGMzYmNmOTQ=, TxId: 2025-04-10T11:18:54.351897Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 28dfa96c-4989135e-6ca9da9a-5db9a544, start saving rows range [0; 1) 2025-04-10T11:18:54.351969Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 28dfa96c-4989135e-6ca9da9a-5db9a544, Bootstrap. Database: /dc-1 2025-04-10T11:18:54.351975Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NDE0MmJmYzQtMzcxZGQ3MWYtZWJmN2RlZGMtNGMzYmNmOTQ=, workerId: [2:7491642453178733907:2366], local sessions count: 2 2025-04-10T11:18:54.352102Z node 2 :KQP_PROXY DEBUG: Request has 18444999789775.199525s seconds to be completed 2025-04-10T11:18:54.353977Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YzZlMDhhYjEtMTU4N2ExNjYtY2M5NmQ0MzktODExNzQwMTM=, workerId: [2:7491642453178733969:2376], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-10T11:18:54.354102Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:54.354608Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 28dfa96c-4989135e-6ca9da9a-5db9a544, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-10T11:18:54.355067Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YzZlMDhhYjEtMTU4N2ExNjYtY2M5NmQ0MzktODExNzQwMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7491642453178733969:2376] 2025-04-10T11:18:54.355114Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7491642453178733971:2633] 2025-04-10T11:18:54.360680Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-10T11:18:54.403580Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NDRmYWIzZmMtNTYyZTA1M2MtMzU2Mjk2M2UtYjc0M2M4YTk=, workerId: [2:7491642453178733816:2361], local sessions count: 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] Test command err: 2025-04-10T11:18:46.555481Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642417740838964:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:46.560120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001a9c/r3tmp/tmptSN5W3/pdisk_1.dat 2025-04-10T11:18:47.038182Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:47.056142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:47.056237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:47.059713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19724 TServer::EnableGrpc on GrpcPort 15903, node 1 2025-04-10T11:18:47.318970Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:47.318991Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:47.318998Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:47.319116Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:18:47.452076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:49.333616Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:49.335876Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:49.337192Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:18:49.337228Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:18:49.337253Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:49.337292Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:49.337368Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.337405Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.337600Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.337642Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.338836Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-10T11:18:49.338859Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-10T11:18:49.338911Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-10T11:18:49.339093Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-10T11:18:49.339113Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-10T11:18:49.339136Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-10T11:18:49.339374Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-10T11:18:49.339385Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-10T11:18:49.339409Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-10T11:18:49.343184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-10T11:18:49.345314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:18:49.347092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:49.360039Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-10T11:18:49.360039Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-10T11:18:49.360102Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-04-10T11:18:49.360103Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710658 2025-04-10T11:18:49.360163Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-10T11:18:49.360178Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-04-10T11:18:49.479561Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-10T11:18:49.507256Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-10T11:18:49.510674Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-10T11:18:49.571515Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-10T11:18:49.572821Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-10T11:18:49.579590Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-10T11:18:49.580220Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: cb7f35bd-afd2b1d-53692531-7fb27f58, Bootstrap. Database: /dc-1 2025-04-10T11:18:49.588485Z node 1 :KQP_PROXY DEBUG: Request has 18444999789779.963157s seconds to be completed 2025-04-10T11:18:49.591377Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=M2E5MDZlODktMWVkNWQ0OGQtM2MyYjAyZTMtNGJlYzc4MTQ=, workerId: [1:7491642430625741576:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-10T11:18:49.591553Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:49.592363Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: cb7f35bd-afd2b1d-53692531-7fb27f58, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-10T11:18:49.592817Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=M2E5MDZlODktMWVkNWQ0OGQtM2MyYjAyZTMtNGJlYzc4MTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7491642430625741576:2333] 2025-04-10T11:18:49.592849Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7491642430625741578:2465] 2025-04-10T11:18:49.594582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642430625741587:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.594590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642430625741579:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.594788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.597884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-10T11:18:49.605648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642430625741593:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T11:18:49.679519Z node 1 :TX_PROXY ERROR: Actor# [1:7491642430625741635:2497] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" ... test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" SchemeShardTabletId: 72057594046644480 PathId: 10 PathCreateTxId: 281474976715682 } 2025-04-10T11:18:54.449720Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Subscribe on create table tx: 281474976715682 2025-04-10T11:18:54.449772Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545364:2686] txid# 281474976715692, issues: { message: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.449865Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545329:2657] txid# 281474976715683, issues: { message: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.449945Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715681 Issues { message: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" SchemeShardTabletId: 72057594046644480 PathId: 10 PathCreateTxId: 281474976715682 } 2025-04-10T11:18:54.449960Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Subscribe on create table tx: 281474976715682 2025-04-10T11:18:54.450024Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545373:2695] txid# 281474976715701, issues: { message: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.450186Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545368:2690] txid# 281474976715696, issues: { message: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.450393Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545311:2639] txid# 281474976715666, issues: { message: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.450512Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545352:2680] txid# 281474976715686, issues: { message: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.450586Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545317:2645] txid# 281474976715672, issues: { message: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.450619Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545365:2687] txid# 281474976715693, issues: { message: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.450694Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545367:2689] txid# 281474976715695, issues: { message: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.450723Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545363:2685] txid# 281474976715691, issues: { message: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.450861Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715687 Issues { message: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" SchemeShardTabletId: 72057594046644480 PathId: 11 PathCreateTxId: 281474976715698 } 2025-04-10T11:18:54.450862Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715679 Issues { message: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" SchemeShardTabletId: 72057594046644480 PathId: 10 PathCreateTxId: 281474976715682 } 2025-04-10T11:18:54.450881Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Subscribe on create table tx: 281474976715682 2025-04-10T11:18:54.450885Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Subscribe on create table tx: 281474976715698 2025-04-10T11:18:54.450950Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545315:2643] txid# 281474976715670, issues: { message: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.451003Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545374:2696] txid# 281474976715702, issues: { message: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.451080Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715699 Issues { message: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" SchemeShardTabletId: 72057594046644480 PathId: 11 PathCreateTxId: 281474976715698 } 2025-04-10T11:18:54.451100Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Subscribe on create table tx: 281474976715698 2025-04-10T11:18:54.451115Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715680 Issues { message: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" SchemeShardTabletId: 72057594046644480 PathId: 10 PathCreateTxId: 281474976715682 } 2025-04-10T11:18:54.451135Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Subscribe on create table tx: 281474976715682 2025-04-10T11:18:54.451177Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545369:2691] txid# 281474976715697, issues: { message: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.451208Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545319:2647] txid# 281474976715674, issues: { message: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.451303Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545320:2648] txid# 281474976715675, issues: { message: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.451346Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715688 Issues { message: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" SchemeShardTabletId: 72057594046644480 PathId: 11 PathCreateTxId: 281474976715698 } 2025-04-10T11:18:54.451379Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Subscribe on create table tx: 281474976715698 2025-04-10T11:18:54.451462Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715676 Issues { message: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" SchemeShardTabletId: 72057594046644480 PathId: 10 PathCreateTxId: 281474976715682 } 2025-04-10T11:18:54.451490Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Subscribe on create table tx: 281474976715682 2025-04-10T11:18:54.451495Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545323:2651] txid# 281474976715677, issues: { message: "Check failed: path: \'/dc-1/.test/test_table0\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:18:54.451610Z node 2 :TX_PROXY ERROR: Actor# [2:7491642453799545361:2683] txid# 281474976715689, issues: { message: "Check failed: path: \'/dc-1/.test/test_table1\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::CreateOldTable [GOOD] Test command err: 2025-04-10T11:18:46.361190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642417890807294:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:46.361242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001aca/r3tmp/tmpukilEh/pdisk_1.dat 2025-04-10T11:18:46.873921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:46.874013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:46.875677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:46.878697Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8309 TServer::EnableGrpc on GrpcPort 26575, node 1 2025-04-10T11:18:47.115931Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:47.115969Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:47.115981Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:47.116116Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:18:47.393982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:47.411752Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:18:49.450507Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:49.452253Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:49.453185Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:18:49.453225Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:18:49.453260Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:49.453334Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:49.453414Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.453553Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.453619Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.454296Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-10T11:18:49.454326Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-10T11:18:49.454345Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.454391Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-10T11:18:49.454923Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-10T11:18:49.454941Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-10T11:18:49.454963Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-10T11:18:49.455207Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-10T11:18:49.455219Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-10T11:18:49.455238Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-10T11:18:49.458647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-10T11:18:49.460115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:49.462093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:18:49.466722Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-10T11:18:49.466740Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-10T11:18:49.466774Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-04-10T11:18:49.466774Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-04-10T11:18:49.466817Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-10T11:18:49.466861Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710658 2025-04-10T11:18:49.581664Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-10T11:18:49.608157Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-10T11:18:49.644654Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-10T11:18:49.654785Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-10T11:18:49.708074Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-10T11:18:49.739244Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-10T11:18:49.739726Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 6f469c69-c4c4a7fd-5746f0ec-32f350e3, Bootstrap. Database: /dc-1 2025-04-10T11:18:49.756442Z node 1 :KQP_PROXY DEBUG: Request has 18444999789779.795208s seconds to be completed 2025-04-10T11:18:49.759417Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=MjNkYTI5OGYtYzVkZWNmMzgtZmY4NTlhNDEtZDM5M2ZmNDc=, workerId: [1:7491642430775710073:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-10T11:18:49.759608Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:49.760561Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 6f469c69-c4c4a7fd-5746f0ec-32f350e3, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-10T11:18:49.761143Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=MjNkYTI5OGYtYzVkZWNmMzgtZmY4NTlhNDEtZDM5M2ZmNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7491642430775710073:2333] 2025-04-10T11:18:49.761188Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7491642430775710075:2467] 2025-04-10T11:18:49.763385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642430775710076:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.763390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642430775710087:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.763478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.766746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-10T11:18:49.774939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642430775710090:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T11:18:49.867330Z node 1 :TX_PROXY ERROR: Actor# [1:7491642430775710132:2499] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/po ... UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-10T11:18:54.402436Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MWMzMGIxNmYtZGZlZjkxMDItYTZmMDVmMjktYmQwZjdlMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7491642451800165912:2333] 2025-04-10T11:18:54.402485Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [2:7491642451800165914:2465] 2025-04-10T11:18:54.403556Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642451800165915:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:54.403558Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642451800165923:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:54.403621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:54.406632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:2, at schemeshard: 72057594046644480 2025-04-10T11:18:54.415270Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642451800165929:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-10T11:18:54.514299Z node 2 :TX_PROXY ERROR: Actor# [2:7491642451800165971:2497] txid# 281474976715662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:54.703639Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:7491642451800165913:2334], selfId: [2:7491642438915263341:2267], source: [2:7491642451800165912:2333] 2025-04-10T11:18:54.703863Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: bf68d892-ab8b77ed-935eaec0-84ff2888, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWMzMGIxNmYtZGZlZjkxMDItYTZmMDVmMjktYmQwZjdlMmY=, TxId: 2025-04-10T11:18:54.703899Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: bf68d892-ab8b77ed-935eaec0-84ff2888, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWMzMGIxNmYtZGZlZjkxMDItYTZmMDVmMjktYmQwZjdlMmY=, TxId: 2025-04-10T11:18:54.703915Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: bf68d892-ab8b77ed-935eaec0-84ff2888. Result: SUCCESS. Issues: 2025-04-10T11:18:54.706625Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZjBlMWM2ZDktMTA0YmMzZTMtYTk0Y2M4M2MtZjI0NzNhOGQ=, workerId: [2:7491642451800166030:2351], database: dc-1, longSession: 1, local sessions count: 2 2025-04-10T11:18:54.706764Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:54.710637Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MWMzMGIxNmYtZGZlZjkxMDItYTZmMDVmMjktYmQwZjdlMmY=, workerId: [2:7491642451800165912:2333], local sessions count: 1 2025-04-10T11:18:54.710959Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZjBlMWM2ZDktMTA0YmMzZTMtYTk0Y2M4M2MtZjI0NzNhOGQ=, CurrentExecutionId: bf68d892-ab8b77ed-935eaec0-84ff2888, CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [2:7491642451800166030:2351] 2025-04-10T11:18:54.711019Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 604800.000000s actor id: [2:7491642451800166033:2530] 2025-04-10T11:18:54.744851Z node 2 :KQP_PROXY DEBUG: TraceId: "01jrfps80rbtjd4zqh0cc0t5ge", Request has 18444999789774.806800s seconds to be completed 2025-04-10T11:18:54.747181Z node 2 :KQP_PROXY DEBUG: TraceId: "01jrfps80rbtjd4zqh0cc0t5ge", Created new session, sessionId: ydb://session/3?node_id=2&id=M2I3NjE4MWItYjY0NWNhZDAtNDkwY2UzYzctZDFmMGI1MjM=, workerId: [2:7491642451800166047:2361], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-10T11:18:54.747389Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jrfps80rbtjd4zqh0cc0t5ge 2025-04-10T11:18:54.752372Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Describe result: PathErrorUnknown 2025-04-10T11:18:54.752394Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Creating table 2025-04-10T11:18:54.752428Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-04-10T11:18:54.755613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:1, at schemeshard: 72057594046644480 2025-04-10T11:18:54.757834Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715664 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2025-04-10T11:18:54.757880Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715664 2025-04-10T11:18:54.778662Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: bf68d892-ab8b77ed-935eaec0-84ff2888, Bootstrap. Database: /dc-1 2025-04-10T11:18:54.779147Z node 2 :KQP_PROXY DEBUG: Request has 18444999789774.772488s seconds to be completed 2025-04-10T11:18:54.781062Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=NGIwNTExMWItZDNhMGFmNzgtMTk3ODZhYjktYWRmYTI2MjQ=, workerId: [2:7491642451800166113:2364], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-10T11:18:54.781240Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:54.781344Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [2:7491642451800165909:2463], selfId: [2:7491642438915263341:2267], source: [2:7491642451800166030:2351] 2025-04-10T11:18:54.781575Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: bf68d892-ab8b77ed-935eaec0-84ff2888, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-04-10T11:18:54.781858Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NGIwNTExMWItZDNhMGFmNzgtMTk3ODZhYjktYWRmYTI2MjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 8, targetId: [2:7491642451800166113:2364] 2025-04-10T11:18:54.781897Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 8 timeout: 300.000000s actor id: [2:7491642451800166115:2571] 2025-04-10T11:18:54.792782Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: create. Transaction completed: 281474976715664. Doublechecking... 2025-04-10T11:18:54.871622Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-10T11:18:54.872124Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-10T11:18:54.901366Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=M2I3NjE4MWItYjY0NWNhZDAtNDkwY2UzYzctZDFmMGI1MjM=, workerId: [2:7491642451800166047:2361], local sessions count: 2 2025-04-10T11:18:54.989525Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7491642451800166114:2365], selfId: [2:7491642438915263341:2267], source: [2:7491642451800166113:2364] 2025-04-10T11:18:54.989706Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: bf68d892-ab8b77ed-935eaec0-84ff2888, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGIwNTExMWItZDNhMGFmNzgtMTk3ODZhYjktYWRmYTI2MjQ=, TxId: 2025-04-10T11:18:54.989738Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: bf68d892-ab8b77ed-935eaec0-84ff2888, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGIwNTExMWItZDNhMGFmNzgtMTk3ODZhYjktYWRmYTI2MjQ=, TxId: 2025-04-10T11:18:54.989911Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: bf68d892-ab8b77ed-935eaec0-84ff2888, start saving rows range [0; 1) 2025-04-10T11:18:54.990000Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: bf68d892-ab8b77ed-935eaec0-84ff2888, Bootstrap. Database: /dc-1 2025-04-10T11:18:54.990244Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NGIwNTExMWItZDNhMGFmNzgtMTk3ODZhYjktYWRmYTI2MjQ=, workerId: [2:7491642451800166113:2364], local sessions count: 1 2025-04-10T11:18:54.990283Z node 2 :KQP_PROXY DEBUG: Request has 18444999789774.561344s seconds to be completed 2025-04-10T11:18:54.991780Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZTQ1OWI0Y2MtY2VmYmY4MjItNzAzMjA2MGMtZGY2MWRkMDE=, workerId: [2:7491642451800166184:2377], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-10T11:18:54.991925Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:54.992217Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: bf68d892-ab8b77ed-935eaec0-84ff2888, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-10T11:18:54.992590Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZTQ1OWI0Y2MtY2VmYmY4MjItNzAzMjA2MGMtZGY2MWRkMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7491642451800166184:2377] 2025-04-10T11:18:54.992630Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7491642451800166186:2618] >> TYardTest::TestLogWriteCutUnequal [GOOD] >> TYardTest::TestLogMultipleWriteRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] Test command err: 2025-04-10T11:18:42.107661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:42.107862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:42.108037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e5a/r3tmp/tmpzIdOjd/pdisk_1.dat 2025-04-10T11:18:42.488038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.531592Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:42.570855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:42.571000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:42.582871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:42.664328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.720777Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2586] 2025-04-10T11:18:42.721005Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:42.771807Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:42.772074Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:42.773651Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:42.773738Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:42.773803Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:42.774122Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:42.774554Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2588] 2025-04-10T11:18:42.774736Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:42.783142Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:42.783265Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:721:2586] in generation 1 2025-04-10T11:18:42.784717Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:42.784874Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:42.786101Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T11:18:42.786189Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T11:18:42.786256Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T11:18:42.786515Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:42.786696Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:42.786735Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:729:2588] in generation 1 2025-04-10T11:18:42.788073Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:695:2590] 2025-04-10T11:18:42.788251Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:42.801699Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:42.801804Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:42.803804Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-10T11:18:42.803892Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-10T11:18:42.803939Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-10T11:18:42.804270Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:42.804387Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:42.804446Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:739:2590] in generation 1 2025-04-10T11:18:42.815738Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:42.837529Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:42.837697Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:42.837798Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:743:2617] 2025-04-10T11:18:42.837831Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:42.837866Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:42.837901Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:42.837979Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:42.838010Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T11:18:42.838062Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:42.838112Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:744:2618] 2025-04-10T11:18:42.838139Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T11:18:42.838254Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T11:18:42.838291Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:18:42.838499Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:42.838528Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-10T11:18:42.838582Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:42.838631Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:745:2619] 2025-04-10T11:18:42.838651Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-10T11:18:42.838671Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-10T11:18:42.838693Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:18:42.839045Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:42.839133Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:42.839186Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T11:18:42.839238Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T11:18:42.839340Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:42.839398Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:42.839449Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:42.839495Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:42.839555Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:18:42.839592Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:42.839614Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-10T11:18:42.839641Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:18:42.839673Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-10T11:18:42.839726Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-10T11:18:42.839844Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:707:2596], sessionId# [0:0:0] 2025-04-10T11:18:42.839919Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:715:2603], sessionId# [0:0:0] 2025-04-10T11:18:42.839950Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T11:18:42.839975Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:42.839996Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-10T11:18:42.840026Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:18:42.840553Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:42.840782Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:42.840888Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:42.841364Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:18:42.841550Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-10T11:18:42.841611Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-10T11:18:42.843871Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:42.843956Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:18:42.856556Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:42.856661Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:42.856741Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T11:18:42.856796Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:42.901001Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:762:2629], sessionId# [0:0:0] 2025-04-10T11:18:42.901228Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-10T11:18:42.901397Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 2 ... 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-04-10T11:18:54.830540Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-04-10T11:18:54.831030Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 ack init split/merge destination OpId 281474976715663 2025-04-10T11:18:54.831113Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state SplitDstReceivingSnapshot 2025-04-10T11:18:54.831893Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack init split/merge destination OpId 281474976715663 2025-04-10T11:18:54.831947Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 not sending time cast registration request in state SplitDstReceivingSnapshot 2025-04-10T11:18:54.833380Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 received split OpId 281474976715663 at state Ready 2025-04-10T11:18:54.844700Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 starting snapshot for split OpId 281474976715663 2025-04-10T11:18:54.845246Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 CancelReadIterators#0 2025-04-10T11:18:54.848377Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 3, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:18:54.848450Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 3, finished edge# 0, front# 0 2025-04-10T11:18:54.848855Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 4, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:18:54.848889Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 4, finished edge# 0, front# 0 2025-04-10T11:18:54.851764Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 7, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:18:54.851809Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 7, finished edge# 0, front# 0 2025-04-10T11:18:54.893039Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 8, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:18:54.893107Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 8, finished edge# 0, front# 0 2025-04-10T11:18:54.894177Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:18:54.894227Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-04-10T11:18:54.894689Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 snapshot complete for split OpId 281474976715663 2025-04-10T11:18:54.894908Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 12 for split OpId 281474976715663 2025-04-10T11:18:54.894975Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 24 for split OpId 281474976715663 2025-04-10T11:18:54.895016Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 36 for split OpId 281474976715663 2025-04-10T11:18:54.895055Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 48 for split OpId 281474976715663 2025-04-10T11:18:54.895335Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 146 total snapshot size is 194 for split OpId 281474976715663 2025-04-10T11:18:54.895587Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 206 for split OpId 281474976715663 2025-04-10T11:18:54.895631Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 218 for split OpId 281474976715663 2025-04-10T11:18:54.895671Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 230 for split OpId 281474976715663 2025-04-10T11:18:54.895707Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 242 for split OpId 281474976715663 2025-04-10T11:18:54.895838Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 155 total snapshot size is 397 for split OpId 281474976715663 2025-04-10T11:18:54.896474Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Sending snapshots from src for split OpId 281474976715663 2025-04-10T11:18:54.896668Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037892 size 221 2025-04-10T11:18:54.896789Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037891 size 215 2025-04-10T11:18:54.897090Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [3:1234:2944], serverId# [3:1235:2945], sessionId# [0:0:0] 2025-04-10T11:18:54.897140Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037892, clientId# [3:1233:2943], serverId# [3:1236:2946], sessionId# [0:0:0] 2025-04-10T11:18:54.897313Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2025-04-10T11:18:54.898031Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2025-04-10T11:18:54.899649Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 ack snapshot OpId 281474976715663 2025-04-10T11:18:54.899818Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2025-04-10T11:18:54.900084Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:54.900183Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-10T11:18:54.900259Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [3:1239:2949] 2025-04-10T11:18:54.900302Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-04-10T11:18:54.900352Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2025-04-10T11:18:54.900386Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-10T11:18:54.900509Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715663 2025-04-10T11:18:54.901204Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 2000 2025-04-10T11:18:54.901254Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-10T11:18:54.901359Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-10T11:18:54.901396Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:54.901436Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-10T11:18:54.901476Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-10T11:18:54.901874Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1234:2944], serverId# [3:1235:2945], sessionId# [0:0:0] 2025-04-10T11:18:54.901953Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack snapshot OpId 281474976715663 2025-04-10T11:18:54.902042Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037892 2025-04-10T11:18:54.902109Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:54.902203Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-10T11:18:54.902259Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037892, actorId: [3:1241:2951] 2025-04-10T11:18:54.902283Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2025-04-10T11:18:54.902326Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2025-04-10T11:18:54.902352Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-10T11:18:54.902448Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715663 2025-04-10T11:18:54.903101Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-04-10T11:18:54.903151Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-10T11:18:54.903300Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 1500 next step 2000 2025-04-10T11:18:54.903362Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037891: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-10T11:18:54.903479Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-10T11:18:54.903512Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:54.903547Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-04-10T11:18:54.903589Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-10T11:18:54.903865Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [3:1233:2943], serverId# [3:1236:2946], sessionId# [0:0:0] 2025-04-10T11:18:54.904207Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 1500 next step 2000 2025-04-10T11:18:54.904249Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-10T11:18:54.927905Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715663 2025-04-10T11:18:54.931637Z node 3 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715663, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-04-10T11:18:54.934958Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-10T11:18:54.935031Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-04-10T11:18:54.935181Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1102:2841], serverId# [3:1103:2842], sessionId# [0:0:0] 2025-04-10T11:18:54.935586Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:18:54.935647Z node 3 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037889 state 5 2025-04-10T11:18:54.935909Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715663 2025-04-10T11:18:54.935990Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:18:54.936048Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentUpdateTable [GOOD] Test command err: 2025-04-10T11:18:46.294527Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642419902865462:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:46.296544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001afb/r3tmp/tmp3W0qVL/pdisk_1.dat 2025-04-10T11:18:46.726177Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:46.756750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:46.757040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:46.766928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25484 TServer::EnableGrpc on GrpcPort 62420, node 1 2025-04-10T11:18:47.092427Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:47.092454Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:47.092461Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:47.092556Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:18:47.344952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:48.830780Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:48.832104Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:48.833183Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:18:48.833212Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:18:48.833239Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:48.833266Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:48.833360Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:48.833412Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:48.833456Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:48.833800Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:48.834136Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-10T11:18:48.834164Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-10T11:18:48.834172Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-10T11:18:48.834180Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-10T11:18:48.834191Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-10T11:18:48.834197Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-10T11:18:48.834253Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-10T11:18:48.834257Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-10T11:18:48.834276Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-10T11:18:48.838277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-10T11:18:48.840399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:48.841414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:18:48.845610Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-10T11:18:48.845609Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-10T11:18:48.845642Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710659 2025-04-10T11:18:48.845642Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-04-10T11:18:48.845687Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-10T11:18:48.845712Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710658 2025-04-10T11:18:48.954878Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-10T11:18:48.975254Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-10T11:18:48.985058Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-10T11:18:49.010101Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-10T11:18:49.044401Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-10T11:18:49.055605Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-10T11:18:49.057832Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: ad44be09-e5ccb4f4-c4ae4c08-267a105f, Bootstrap. Database: /dc-1 2025-04-10T11:18:49.067914Z node 1 :KQP_PROXY DEBUG: Request has 18444999789780.483741s seconds to be completed 2025-04-10T11:18:49.079391Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=N2EzOTg2MjgtNDQwZDM2ZTctMjQwZTMwZDAtNzBlYmZjOGE=, workerId: [1:7491642432787768240:2332], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-10T11:18:49.079639Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:49.083253Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: ad44be09-e5ccb4f4-c4ae4c08-267a105f, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-10T11:18:49.086398Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=N2EzOTg2MjgtNDQwZDM2ZTctMjQwZTMwZDAtNzBlYmZjOGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7491642432787768240:2332] 2025-04-10T11:18:49.086442Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7491642432787768244:2464] 2025-04-10T11:18:49.088638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642432787768256:2337], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.088642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642432787768243:2334], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.088711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.094019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-10T11:18:49.101721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642432787768258:2338], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T11:18:49.197939Z node 1 :TX_PROXY ERROR: Actor# [1:7491642432787768300:2496] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges ... 25-04-10T11:18:54.528528Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715668 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-10T11:18:54.528543Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-10T11:18:54.528551Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715670 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-10T11:18:54.528559Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-10T11:18:54.528649Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715672 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-10T11:18:54.528668Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-10T11:18:54.528736Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715669 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-10T11:18:54.528744Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-10T11:18:54.528743Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715675 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-10T11:18:54.528749Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-10T11:18:54.541186Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: alter. Transaction completed: 281474976715673. Doublechecking... 2025-04-10T11:18:54.579632Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-10T11:18:54.580485Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-10T11:18:54.580961Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-10T11:18:54.585788Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-10T11:18:54.590287Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-10T11:18:54.590610Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-10T11:18:54.602236Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-10T11:18:54.607739Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-10T11:18:54.611821Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7491642451697228544:2365], selfId: [2:7491642434517358494:2271], source: [2:7491642451697228542:2364] 2025-04-10T11:18:54.612002Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: ffbd5929-56ce61bd-d3c94a5-8bb7df5a, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTA4OTFjMjYtZDNiMGNkOTYtZDQ1YzFmZjktN2M5YjI1NDE=, TxId: 2025-04-10T11:18:54.612041Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: ffbd5929-56ce61bd-d3c94a5-8bb7df5a, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTA4OTFjMjYtZDNiMGNkOTYtZDQ1YzFmZjktN2M5YjI1NDE=, TxId: 2025-04-10T11:18:54.612201Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: ffbd5929-56ce61bd-d3c94a5-8bb7df5a, start saving rows range [0; 1) 2025-04-10T11:18:54.612282Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: ffbd5929-56ce61bd-d3c94a5-8bb7df5a, Bootstrap. Database: /dc-1 2025-04-10T11:18:54.612679Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YTA4OTFjMjYtZDNiMGNkOTYtZDQ1YzFmZjktN2M5YjI1NDE=, workerId: [2:7491642451697228542:2364], local sessions count: 2 2025-04-10T11:18:54.612723Z node 2 :KQP_PROXY DEBUG: Request has 18444999789774.938904s seconds to be completed 2025-04-10T11:18:54.615127Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MWRjNDc4YmQtZjEyNDdjN2QtYmY0NDQ0MDUtMzNkMmZhZWU=, workerId: [2:7491642451697228710:2376], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-10T11:18:54.615315Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:54.615670Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: ffbd5929-56ce61bd-d3c94a5-8bb7df5a, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-10T11:18:54.616031Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MWRjNDc4YmQtZjEyNDdjN2QtYmY0NDQ0MDUtMzNkMmZhZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7491642451697228710:2376] 2025-04-10T11:18:54.616056Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7491642451697228712:2703] 2025-04-10T11:18:54.622668Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-10T11:18:54.630821Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-10T11:18:54.663280Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZTQxNDNkODAtMTA1YjIyNDAtMWIxNzI5MzMtZjVkYTZmZDE=, workerId: [2:7491642451697228477:2361], local sessions count: 2 2025-04-10T11:18:54.771993Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7491642451697228711:2377], selfId: [2:7491642434517358494:2271], source: [2:7491642451697228710:2376] 2025-04-10T11:18:54.772320Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: ffbd5929-56ce61bd-d3c94a5-8bb7df5a, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWRjNDc4YmQtZjEyNDdjN2QtYmY0NDQ0MDUtMzNkMmZhZWU=, TxId: 2025-04-10T11:18:54.772377Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: ffbd5929-56ce61bd-d3c94a5-8bb7df5a, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWRjNDc4YmQtZjEyNDdjN2QtYmY0NDQ0MDUtMzNkMmZhZWU=, TxId: 2025-04-10T11:18:54.772508Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: ffbd5929-56ce61bd-d3c94a5-8bb7df5a, result part successfully saved 2025-04-10T11:18:54.772532Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: ffbd5929-56ce61bd-d3c94a5-8bb7df5a, reply SUCCESS, issues: 2025-04-10T11:18:54.772609Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MWRjNDc4YmQtZjEyNDdjN2QtYmY0NDQ0MDUtMzNkMmZhZWU=, workerId: [2:7491642451697228710:2376], local sessions count: 1 2025-04-10T11:18:54.772718Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: ffbd5929-56ce61bd-d3c94a5-8bb7df5a, Bootstrap. Database: /dc-1 2025-04-10T11:18:54.772827Z node 2 :KQP_PROXY DEBUG: Request has 18444999789774.778802s seconds to be completed 2025-04-10T11:18:54.774553Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=OWU2MWJhZGUtZTVlNTQyMDgtMzJlMGYzMTQtZWJkMjdlMmI=, workerId: [2:7491642451697228749:2388], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-10T11:18:54.774694Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:54.774944Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: ffbd5929-56ce61bd-d3c94a5-8bb7df5a, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-10T11:18:54.775231Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=OWU2MWJhZGUtZTVlNTQyMDgtMzJlMGYzMTQtZWJkMjdlMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7491642451697228749:2388] 2025-04-10T11:18:54.775265Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7491642451697228751:2721] >> KqpProxy::NoLocalSessionExecution [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable >> TPQCDTest::TestRelatedServicesAreRunning >> TYardTest::TestLogMultipleWriteRead [GOOD] >> TYardTest::TestLogContinuityPersistence >> TPersQueueTest::SetupLockSession2 [GOOD] >> TPersQueueTest::SetupLockSession >> DistributedEraseTests::ConditionalEraseRowsCheckLimits [GOOD] >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex >> TPersQueueTest::SchemeshardRestart [GOOD] >> TPersQueueTest::SameOffset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] Test command err: 2025-04-10T11:18:42.503046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:42.503295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:42.503529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e76/r3tmp/tmpQu3bAJ/pdisk_1.dat 2025-04-10T11:18:42.897427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.942757Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:42.988121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:42.988277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.002668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:43.094658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:43.165686Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2586] 2025-04-10T11:18:43.166011Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:43.212779Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:43.213050Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:43.214783Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:43.214846Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:43.214901Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:43.215248Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:43.215666Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2588] 2025-04-10T11:18:43.215870Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:43.222946Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:43.223029Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:721:2586] in generation 1 2025-04-10T11:18:43.224320Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:43.224420Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:43.225599Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T11:18:43.225647Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T11:18:43.225691Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T11:18:43.225923Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:43.226090Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:43.226128Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:729:2588] in generation 1 2025-04-10T11:18:43.227553Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:695:2590] 2025-04-10T11:18:43.227758Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:43.236870Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:43.237003Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:43.238614Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-10T11:18:43.238695Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-10T11:18:43.238741Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-10T11:18:43.239075Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:43.239202Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:43.239271Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:739:2590] in generation 1 2025-04-10T11:18:43.250508Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.278804Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:43.279066Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.279237Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:743:2617] 2025-04-10T11:18:43.279294Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.279343Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:43.279393Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.279499Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.279539Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T11:18:43.279604Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.279662Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:744:2618] 2025-04-10T11:18:43.279691Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T11:18:43.279729Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T11:18:43.279766Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:18:43.280006Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.280039Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-10T11:18:43.280111Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.280221Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:745:2619] 2025-04-10T11:18:43.280248Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-10T11:18:43.280272Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-10T11:18:43.280296Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:18:43.280763Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:43.280875Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:43.280935Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T11:18:43.280991Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T11:18:43.281107Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.281166Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.281230Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:43.281288Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.281370Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:18:43.281411Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.281438Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-10T11:18:43.281470Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:18:43.281506Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-10T11:18:43.281563Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-10T11:18:43.281692Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:707:2596], sessionId# [0:0:0] 2025-04-10T11:18:43.281776Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:715:2603], sessionId# [0:0:0] 2025-04-10T11:18:43.281812Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T11:18:43.281842Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.281867Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-10T11:18:43.281902Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:18:43.282706Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:43.283001Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:43.283103Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:43.283608Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:18:43.283807Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-10T11:18:43.283926Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-10T11:18:43.286371Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.286467Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:18:43.297420Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:43.297538Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:43.297646Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T11:18:43.297687Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:43.341943Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:762:2629], sessionId# [0:0:0] 2025-04-10T11:18:43.342214Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-10T11:18:43.342403Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 2 ... egularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-10T11:18:56.198757Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1108:2846] Handle TEvDataShard::TEvEraseRowsRequest 2025-04-10T11:18:56.198927Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1108:2846] Propose tx: txId# 281474976715662, shard# 72075186224037890, keys# 3, dependents# 0, dependencies# 1 2025-04-10T11:18:56.199037Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1108:2846] Propose tx: txId# 281474976715662, shard# 72075186224037888, keys# 3, dependents# 0, dependencies# 1 2025-04-10T11:18:56.199101Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1108:2846] Propose tx: txId# 281474976715662, shard# 72075186224037889, keys# 3, dependents# 2, dependencies# 0 2025-04-10T11:18:56.199435Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:56.199618Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715662 at tablet 72075186224037888 2025-04-10T11:18:56.199941Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-10T11:18:56.200214Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715662 at tablet 72075186224037890 2025-04-10T11:18:56.200527Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:18:56.200629Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715662 at tablet 72075186224037889 2025-04-10T11:18:56.212205Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-10T11:18:56.212311Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:56.212455Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2025-04-10T11:18:56.212527Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2025-04-10T11:18:56.212687Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1108:2846] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037888, status# 1 2025-04-10T11:18:56.212884Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T11:18:56.213029Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-10T11:18:56.213066Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-10T11:18:56.213131Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1108:2846] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037889, status# 1 2025-04-10T11:18:56.213188Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 2025-04-10T11:18:56.213226Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 2025-04-10T11:18:56.213258Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1108:2846] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037890, status# 1 2025-04-10T11:18:56.213287Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1108:2846] Register plan: txId# 281474976715662, minStep# 1503, maxStep# 31503 2025-04-10T11:18:56.227789Z node 3 :TX_DATASHARD INFO: OnDetach: 72075186224037888 2025-04-10T11:18:56.229066Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-10T11:18:56.231924Z node 3 :TX_DATASHARD ERROR: [DistEraser] [3:1108:2846] Reply: txId# 281474976715662, status# SHARD_UNKNOWN, error# Tx state unknown: reason# lost pipe while waiting for reply (plan), txId# 281474976715662, shard# 72075186224037888 2025-04-10T11:18:56.232194Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037889 is reset 2025-04-10T11:18:56.232259Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037890 is reset 2025-04-10T11:18:56.232559Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-10T11:18:56.232619Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-04-10T11:18:56.232917Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:18:56.232974Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:56.233025Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2025-04-10T11:18:56.233127Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:18:56.233964Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1102:2841], serverId# [3:1103:2842], sessionId# [0:0:0] 2025-04-10T11:18:56.254115Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:1119:2856] 2025-04-10T11:18:56.254433Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:56.259359Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:56.259897Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:56.262029Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:56.262099Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:56.262160Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:56.262538Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:56.263316Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:56.263380Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:1134:2856] in generation 2 2025-04-10T11:18:56.275173Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:56.275319Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037888 2025-04-10T11:18:56.275467Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:18:56.275835Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:1137:2864] 2025-04-10T11:18:56.275898Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:56.275963Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:18:56.276010Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:56.276296Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-10T11:18:56.276524Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-10T11:18:56.277798Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:56.277919Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:56.278232Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1502 2025-04-10T11:18:56.278301Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:56.278393Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:56.278691Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:56.278766Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:18:56.278826Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 1 2025-04-10T11:18:56.278892Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:56.279028Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-04-10T11:18:56.279094Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2025-04-10T11:18:56.279158Z node 3 :TX_DATASHARD DEBUG: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2025-04-10T11:18:56.279384Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 1500 next step 1502 2025-04-10T11:18:56.279461Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715661 2025-04-10T11:18:56.279546Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 1502 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-10T11:18:56.279606Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1502:281474976715661 at 72075186224037889 2025-04-10T11:18:56.279662Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-10T11:18:56.279725Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037889 {TEvReadSet step# 1502 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-10T11:18:56.279842Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-04-10T11:18:56.279880Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715661 2025-04-10T11:18:56.279925Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715661 2025-04-10T11:18:56.280076Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715661 2025-04-10T11:18:56.280267Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715661 2025-04-10T11:18:56.280325Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 1502 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-04-10T11:18:56.280361Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1502:281474976715661 at 72075186224037890 2025-04-10T11:18:56.280394Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-10T11:18:56.280442Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037890 {TEvReadSet step# 1502 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-04-10T11:18:56.280527Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 >> TYardTest::TestLogContinuityPersistence [GOOD] >> TYardTest::TestLogContinuityPersistenceLarge |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRange >> KqpCost::Range >> KqpCost::ScanScriptingRangeFullScan-SourceRead >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] >> KqpCost::ScanQueryRangeFullScan+SourceRead ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] Test command err: 2025-04-10T11:18:54.851047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642454524569563:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:54.851092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001585/r3tmp/tmpqHC1IA/pdisk_1.dat 2025-04-10T11:18:55.230881Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18687, node 1 2025-04-10T11:18:55.280935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:55.281046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:55.282381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:55.416121Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:55.416153Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:55.416166Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:55.416303Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:57.559977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642467409472158:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.560212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.560320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642467409472170:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.572581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-04-10T11:18:57.591824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642467409472172:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-04-10T11:18:57.683522Z node 1 :TX_PROXY ERROR: Actor# [1:7491642467409472233:2356] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:58.091401Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642467409472250:2372], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:58.091674Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2MwMTE1ZWUtM2FmNGM1ZmQtMmUyZmQxNjItY2I3MzYwMjA=, ActorId: [1:7491642467409472156:2361], ActorState: ExecuteState, TraceId: 01jrfpsarm0gfv8knwtrdqakfy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:58.114077Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] >> KqpCost::OlapPointLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] Test command err: 2025-04-10T11:18:54.849204Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642453648049290:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:54.849388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001596/r3tmp/tmp36d7aO/pdisk_1.dat 2025-04-10T11:18:55.263937Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:55.279916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 3179, node 1 2025-04-10T11:18:55.279988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:55.282911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:55.414058Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001596/r3tmp/yandexAuMRCw.tmp 2025-04-10T11:18:55.414083Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001596/r3tmp/yandexAuMRCw.tmp 2025-04-10T11:18:55.414930Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001596/r3tmp/yandexAuMRCw.tmp 2025-04-10T11:18:55.415125Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:57.624568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642466532951886:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.624671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642466532951863:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.624876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.630302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-04-10T11:18:57.649910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642466532951892:2368], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-04-10T11:18:57.792907Z node 1 :TX_PROXY ERROR: Actor# [1:7491642466532951953:2358] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:58.111807Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642466532951974:2376], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:58.112122Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWZiNGEwMDItZjVmNWI1OWEtNTcwNjdmZGYtZTFmMmM2ZjQ=, ActorId: [1:7491642466532951861:2363], ActorState: ExecuteState, TraceId: 01jrfpsatnadn7pz1w1ne262w5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:58.137902Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead >> KqpCost::QuerySeviceRangeFullScan >> KqpCost::IndexLookup+useSink |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] Test command err: 2025-04-10T11:18:48.903113Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642426617162948:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:48.903307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001a8b/r3tmp/tmpRhFO8C/pdisk_1.dat 2025-04-10T11:18:49.272044Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:49.292357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:49.292493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:49.309635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17534, node 1 2025-04-10T11:18:49.362806Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:49.362831Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:49.362837Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:49.362969Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:49.589882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:51.566640Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:51.569472Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-10T11:18:51.570007Z node 1 :KQP_PROXY DEBUG: Received ping session request, request_id: 2, sender: [1:7491642430912131194:2317], trace_id: 01jrfps31f2n3z1xvejfzkevsm 2025-04-10T11:18:51.570260Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 5.000000s actor id: [0:0:0] 2025-04-10T11:18:51.570315Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:18:51.570374Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:18:51.570403Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:51.570462Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-10T11:18:51.570610Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:51.570652Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:51.570672Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:51.570723Z node 1 :KQP_PROXY DEBUG: Session not found, targetId: [2:8678280833929343339:121] requestId: 2 2025-04-10T11:18:51.572845Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:51.572999Z node 1 :KQP_PROXY DEBUG: TraceId: "01jrfps31f2n3z1xvejfzkevsm", Forwarded response to sender actor, requestId: 2, sender: [1:7491642430912131194:2317], selfId: [1:7491642426617163202:2281], source: [1:7491642426617163202:2281] 2025-04-10T11:18:52.642945Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491642444280153045:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:52.643088Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001a8b/r3tmp/tmptQcMbj/pdisk_1.dat 2025-04-10T11:18:52.726328Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:52.776588Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:52.776669Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:52.778230Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15428 TServer::EnableGrpc on GrpcPort 32592, node 4 2025-04-10T11:18:52.909755Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:52.909785Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:52.909793Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:52.909921Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-10T11:18:52.954977Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:55.148177Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:55.149066Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:55.151416Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:55.151480Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:55.151510Z node 4 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:18:55.151532Z node 4 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:18:55.151552Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:55.151843Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:55.154743Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-10T11:18:55.154761Z node 4 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-10T11:18:55.154799Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-10T11:18:55.154919Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-10T11:18:55.154927Z node 4 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-10T11:18:55.154941Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-10T11:18:55.154973Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-10T11:18:55.154977Z node 4 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-10T11:18:55.155002Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-10T11:18:55.159970Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:1, at schemeshard: 72057594046644480 2025-04-10T11:18:55.161562Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:18:55.163272Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:55.168958Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-10T11:18:55.168999Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-10T11:18:55.169021Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976715658 2025-04-10T11:18:55.169038Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976715660 2025-04-10T11:18:55.169398Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-10T11:18:55.169424Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976715659 2025-04-10T11:18:55.306052Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976715658. Doublechecking... 2025-04-10T11:18:55.368739Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976715659. Doublechecking... 2025-04-10T11:18:55.370019Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976715660. Doublechecking... 2025-04-10T11:18:55.384038Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-10T11:18:55.430028Z node 4 ... 8:58.172617Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:58.172761Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 56ceaab6-2c6fa43b-807e3349-e8fe3f, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-10T11:18:58.173066Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=NTNhNzYwYjQtNzI0NDA2MTgtNzllMWRmNmQtOTRmZWZlNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 20, targetId: [4:7491642470049958138:2444] 2025-04-10T11:18:58.173107Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 20 timeout: 300.000000s actor id: [4:7491642470049958140:2662] 2025-04-10T11:18:58.178613Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 20, sender: [4:7491642470049958139:2445], selfId: [4:7491642444280153265:2269], source: [4:7491642470049958138:2444] 2025-04-10T11:18:58.178848Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 56ceaab6-2c6fa43b-807e3349-e8fe3f, State: Get operation info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=NTNhNzYwYjQtNzI0NDA2MTgtNzllMWRmNmQtOTRmZWZlNjI=, TxId: 01jrfpsbbx4qn42rpde1zawz3q 2025-04-10T11:18:58.179373Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 56ceaab6-2c6fa43b-807e3349-e8fe3f, State: Get operation info, RunDataQuery: -- TSaveScriptFinalStatusActor::FinishScriptExecution DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $operation_status AS Int32; DECLARE $execution_status AS Int32; DECLARE $finalization_status AS Int32; DECLARE $issues AS JsonDocument; DECLARE $plan AS JsonDocument; DECLARE $stats AS JsonDocument; DECLARE $ast AS Optional; DECLARE $ast_compressed AS Optional; DECLARE $ast_compression_method AS Optional; DECLARE $operation_ttl AS Interval; DECLARE $customer_supplied_id AS Text; DECLARE $user_token AS Text; DECLARE $script_sinks AS Optional; DECLARE $script_secret_names AS Optional; DECLARE $applicate_script_external_effect_required AS Bool; UPDATE `.metadata/script_executions` SET operation_status = $operation_status, execution_status = $execution_status, finalization_status = IF($applicate_script_external_effect_required, $finalization_status, NULL), issues = $issues, plan = $plan, end_ts = CurrentUtcTimestamp(), stats = $stats, ast = $ast, ast_compressed = $ast_compressed, ast_compression_method = $ast_compression_method, expire_at = IF($operation_ttl > CAST(0 AS Interval), CurrentUtcTimestamp() + $operation_ttl, NULL), customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2025-04-10T11:18:58.180052Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=NTNhNzYwYjQtNzI0NDA2MTgtNzllMWRmNmQtOTRmZWZlNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 21, targetId: [4:7491642470049958138:2444] 2025-04-10T11:18:58.180099Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 21 timeout: 300.000000s actor id: [4:7491642470049958161:2667] 2025-04-10T11:18:58.191955Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 21, sender: [4:7491642470049958160:2451], selfId: [4:7491642444280153265:2269], source: [4:7491642470049958138:2444] 2025-04-10T11:18:58.192292Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 56ceaab6-2c6fa43b-807e3349-e8fe3f, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=NTNhNzYwYjQtNzI0NDA2MTgtNzllMWRmNmQtOTRmZWZlNjI=, TxId: 2025-04-10T11:18:58.192370Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 56ceaab6-2c6fa43b-807e3349-e8fe3f, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NTNhNzYwYjQtNzI0NDA2MTgtNzllMWRmNmQtOTRmZWZlNjI=, TxId: 2025-04-10T11:18:58.192450Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: 56ceaab6-2c6fa43b-807e3349-e8fe3f. UNAVAILABLE. Issues: {
: Error: Lease expired } 2025-04-10T11:18:58.192593Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: 56ceaab6-2c6fa43b-807e3349-e8fe3f, successfully finalized script execution operation 2025-04-10T11:18:58.192616Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: 56ceaab6-2c6fa43b-807e3349-e8fe3f, reply success 2025-04-10T11:18:58.192847Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=NTNhNzYwYjQtNzI0NDA2MTgtNzllMWRmNmQtOTRmZWZlNjI=, workerId: [4:7491642470049958138:2444], local sessions count: 1 2025-04-10T11:18:58.203127Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jrfpsbctd8dpdfk0jhv46ppq, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=YjY5YzJkODYtM2MyNmVkNS1kZmFiNWZjMy03OWE4NTAwMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 22, targetId: [4:7491642461460023264:2361] 2025-04-10T11:18:58.203162Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 22 timeout: 300.000000s actor id: [4:7491642470049958186:2674] 2025-04-10T11:18:58.402917Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:58.714773Z node 4 :KQP_PROXY DEBUG: TraceId: "01jrfpsbctd8dpdfk0jhv46ppq", Forwarded response to sender actor, requestId: 22, sender: [4:7491642470049958185:2456], selfId: [4:7491642444280153265:2269], source: [4:7491642461460023264:2361] 2025-04-10T11:18:58.718805Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 56ceaab6-2c6fa43b-807e3349-e8fe3f, Bootstrap. Database: /dc-1 2025-04-10T11:18:58.719040Z node 4 :KQP_PROXY DEBUG: Request has 18444999789770.832596s seconds to be completed 2025-04-10T11:18:58.720949Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=N2EyOTc4M2UtMTkzNmVjNTktMTNmYjI2ZTgtZDlmNWNhZTc=, workerId: [4:7491642470049958241:2470], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-10T11:18:58.721086Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:58.722313Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 56ceaab6-2c6fa43b-807e3349-e8fe3f, RunDataQuery: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-10T11:18:58.724762Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=N2EyOTc4M2UtMTkzNmVjNTktMTNmYjI2ZTgtZDlmNWNhZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 24, targetId: [4:7491642470049958241:2470] 2025-04-10T11:18:58.724807Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 24 timeout: 300.000000s actor id: [4:7491642470049958243:2703] 2025-04-10T11:18:58.926435Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 24, sender: [4:7491642470049958242:2471], selfId: [4:7491642444280153265:2269], source: [4:7491642470049958241:2470] 2025-04-10T11:18:58.926889Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 56ceaab6-2c6fa43b-807e3349-e8fe3f, State: Get lease info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=N2EyOTc4M2UtMTkzNmVjNTktMTNmYjI2ZTgtZDlmNWNhZTc=, TxId: 01jrfpsc39arr1dga8a4bxyzg1 2025-04-10T11:18:58.926999Z node 4 :KQP_PROXY WARN: [TQueryBase] [TScriptLeaseUpdater] TraceId: 56ceaab6-2c6fa43b-807e3349-e8fe3f, State: Get lease info, Finish with BAD_REQUEST, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=4&id=N2EyOTc4M2UtMTkzNmVjNTktMTNmYjI2ZTgtZDlmNWNhZTc=, TxId: 01jrfpsc39arr1dga8a4bxyzg1 2025-04-10T11:18:58.927039Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 56ceaab6-2c6fa43b-807e3349-e8fe3f, State: Get lease info, Rollback transaction: 01jrfpsc39arr1dga8a4bxyzg1 2025-04-10T11:18:58.928223Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=N2EyOTc4M2UtMTkzNmVjNTktMTNmYjI2ZTgtZDlmNWNhZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 25, targetId: [4:7491642470049958241:2470] 2025-04-10T11:18:58.928256Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 25 timeout: 600.000000s actor id: [4:7491642470049958269:2715] 2025-04-10T11:18:58.929068Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 25, sender: [4:7491642470049958268:2478], selfId: [4:7491642444280153265:2269], source: [4:7491642470049958241:2470] 2025-04-10T11:18:58.929189Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 56ceaab6-2c6fa43b-807e3349-e8fe3f, State: Get lease info, RollbackTransactionResult: SUCCESS. Issues: 2025-04-10T11:18:58.929421Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=N2EyOTc4M2UtMTkzNmVjNTktMTNmYjI2ZTgtZDlmNWNhZTc=, workerId: [4:7491642470049958241:2470], local sessions count: 1 2025-04-10T11:18:58.937543Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=YjY5YzJkODYtM2MyNmVkNS1kZmFiNWZjMy03OWE4NTAwMg==, workerId: [4:7491642461460023264:2361], local sessions count: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] Test command err: 2025-04-10T11:18:46.285865Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642419711028431:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:46.286594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ad8/r3tmp/tmpKZiLcH/pdisk_1.dat 2025-04-10T11:18:46.730110Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:46.740560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:46.741054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:46.767255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23178 TServer::EnableGrpc on GrpcPort 18435, node 1 2025-04-10T11:18:47.090537Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:47.090560Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:47.090577Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:47.090707Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:18:47.372235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:48.823751Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:48.825605Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:48.828929Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:18:48.828967Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:18:48.828996Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:48.829035Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:48.829157Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:48.829205Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:48.829271Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:48.829299Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:48.829848Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-10T11:18:48.829878Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-10T11:18:48.829915Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-10T11:18:48.830173Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-10T11:18:48.830184Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-10T11:18:48.830198Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-10T11:18:48.830248Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-10T11:18:48.830252Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-10T11:18:48.830274Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-10T11:18:48.834084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-10T11:18:48.836067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:18:48.837490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:48.842012Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-10T11:18:48.842050Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-10T11:18:48.842063Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710658 2025-04-10T11:18:48.842096Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-04-10T11:18:48.842167Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-10T11:18:48.842178Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-04-10T11:18:48.960439Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-10T11:18:48.980899Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-10T11:18:48.985025Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-10T11:18:49.051647Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-10T11:18:49.051653Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-10T11:18:49.073635Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-10T11:18:49.074060Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: b75a01a7-1d2778c7-2750609f-e023c978, Bootstrap. Database: /dc-1 2025-04-10T11:18:49.083523Z node 1 :KQP_PROXY DEBUG: Request has 18444999789780.468117s seconds to be completed 2025-04-10T11:18:49.086499Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=Y2FkNWE2OGItMmJiZjJjY2ItNzYyM2Q2N2UtZTYzYjk5ZWM=, workerId: [1:7491642432595931213:2332], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-10T11:18:49.086683Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:49.087445Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: b75a01a7-1d2778c7-2750609f-e023c978, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-10T11:18:49.087911Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=Y2FkNWE2OGItMmJiZjJjY2ItNzYyM2Q2N2UtZTYzYjk5ZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7491642432595931213:2332] 2025-04-10T11:18:49.087948Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7491642432595931216:2464] 2025-04-10T11:18:49.089728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642432595931222:2337], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.089740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642432595931215:2334], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.089850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.093960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-10T11:18:49.104003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642432595931231:2338], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T11:18:49.197011Z node 1 :TX_PROXY ERROR: Actor# [1:7491642432595931273:2497] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges ... supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2025-04-10T11:18:57.399069Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=OTZiNjEyYjktYjhmNjI0M2UtNDQ3YmE2Ni0yMTVmYzU0OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 16, targetId: [2:7491642463549890277:2395] 2025-04-10T11:18:57.399101Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 16 timeout: 300.000000s actor id: [2:7491642467844857643:2606] 2025-04-10T11:18:57.506631Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: fbae35d8-f1301eaf-d913edaf-17aa2d6d, Bootstrap. Database: /dc-1 2025-04-10T11:18:57.506849Z node 2 :KQP_PROXY DEBUG: Request has 18444999789772.044792s seconds to be completed 2025-04-10T11:18:57.509021Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=NWQxMTBiNDgtYTFiNjljNDEtNWI5ODZhZGMtOTgxYjA4OWQ=, workerId: [2:7491642467844857662:2420], database: /dc-1, longSession: 1, local sessions count: 4 2025-04-10T11:18:57.509182Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:57.509529Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: fbae35d8-f1301eaf-d913edaf-17aa2d6d, RunDataQuery: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-10T11:18:57.509838Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NWQxMTBiNDgtYTFiNjljNDEtNWI5ODZhZGMtOTgxYjA4OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 18, targetId: [2:7491642467844857662:2420] 2025-04-10T11:18:57.509876Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 18 timeout: 300.000000s actor id: [2:7491642467844857664:2619] 2025-04-10T11:18:57.722503Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642446370019960:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:57.759934Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:18:57.766024Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 18, sender: [2:7491642467844857663:2421], selfId: [2:7491642446370020168:2267], source: [2:7491642467844857662:2420] 2025-04-10T11:18:57.766248Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: fbae35d8-f1301eaf-d913edaf-17aa2d6d, State: Get lease info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWQxMTBiNDgtYTFiNjljNDEtNWI5ODZhZGMtOTgxYjA4OWQ=, TxId: 01jrfpsaz06hwws30p5hh4rkbr 2025-04-10T11:18:57.766384Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: fbae35d8-f1301eaf-d913edaf-17aa2d6d, State: Get lease info, RunDataQuery: -- TScriptLeaseUpdater::OnGetLeaseInfo DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $lease_duration AS Interval; UPDATE `.metadata/script_execution_leases` SET lease_deadline=(CurrentUtcTimestamp() + $lease_duration) WHERE database = $database AND execution_id = $execution_id; 2025-04-10T11:18:57.766744Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NWQxMTBiNDgtYTFiNjljNDEtNWI5ODZhZGMtOTgxYjA4OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 19, targetId: [2:7491642467844857662:2420] 2025-04-10T11:18:57.766778Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 19 timeout: 300.000000s actor id: [2:7491642467844857694:2634] 2025-04-10T11:18:57.814078Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 16, sender: [2:7491642467844857642:2413], selfId: [2:7491642446370020168:2267], source: [2:7491642463549890277:2395] 2025-04-10T11:18:57.814790Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: d9fb3c6a-e5d5c95d-6727dbb9-3a7ae7a1, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTZiNjEyYjktYjhmNjI0M2UtNDQ3YmE2Ni0yMTVmYzU0OA==, TxId: 2025-04-10T11:18:57.814874Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: d9fb3c6a-e5d5c95d-6727dbb9-3a7ae7a1, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTZiNjEyYjktYjhmNjI0M2UtNDQ3YmE2Ni0yMTVmYzU0OA==, TxId: 2025-04-10T11:18:57.814885Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: d9fb3c6a-e5d5c95d-6727dbb9-3a7ae7a1. SUCCESS. Issues: 2025-04-10T11:18:57.815872Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=OTZiNjEyYjktYjhmNjI0M2UtNDQ3YmE2Ni0yMTVmYzU0OA==, workerId: [2:7491642463549890277:2395], local sessions count: 3 2025-04-10T11:18:57.816090Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YzAzY2ViMmYtOTk2NTM1MjEtNThlZmJlYzYtZDQ2NzY4ZDc=, workerId: [2:7491642463549890153:2351], local sessions count: 2 2025-04-10T11:18:57.936103Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 19, sender: [2:7491642467844857692:2428], selfId: [2:7491642446370020168:2267], source: [2:7491642467844857662:2420] 2025-04-10T11:18:57.936337Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: fbae35d8-f1301eaf-d913edaf-17aa2d6d, State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWQxMTBiNDgtYTFiNjljNDEtNWI5ODZhZGMtOTgxYjA4OWQ=, TxId: 2025-04-10T11:18:57.936419Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: fbae35d8-f1301eaf-d913edaf-17aa2d6d, State: Update lease, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWQxMTBiNDgtYTFiNjljNDEtNWI5ODZhZGMtOTgxYjA4OWQ=, TxId: 2025-04-10T11:18:57.936727Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NWQxMTBiNDgtYTFiNjljNDEtNWI5ODZhZGMtOTgxYjA4OWQ=, workerId: [2:7491642467844857662:2420], local sessions count: 1 2025-04-10T11:18:57.943882Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jrfpsb4qajytj2h1fd0ptqtx, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MjI3MDBiN2EtZTc3MGRlYzEtOGM3NTU3YzEtYTIzNzg3MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 20, targetId: [2:7491642463549890168:2361] 2025-04-10T11:18:57.943921Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 20 timeout: 300.000000s actor id: [2:7491642467844857745:2650] 2025-04-10T11:18:58.455749Z node 2 :KQP_PROXY DEBUG: TraceId: "01jrfpsb4qajytj2h1fd0ptqtx", Forwarded response to sender actor, requestId: 20, sender: [2:7491642467844857744:2440], selfId: [2:7491642446370020168:2267], source: [2:7491642463549890168:2361] 2025-04-10T11:18:58.467250Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: fbae35d8-f1301eaf-d913edaf-17aa2d6d, Bootstrap. Start TCheckLeaseStatusQueryActor 2025-04-10T11:18:58.467334Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: fbae35d8-f1301eaf-d913edaf-17aa2d6d, Bootstrap. Database: /dc-1 2025-04-10T11:18:58.467572Z node 2 :KQP_PROXY DEBUG: Request has 18444999789771.084063s seconds to be completed 2025-04-10T11:18:58.469553Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZmIyNjE2NzQtYWY4Y2EwOGMtOTNhODY2ODgtMWNlY2I1OWE=, workerId: [2:7491642472139825091:2453], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-10T11:18:58.469680Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:58.469978Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: fbae35d8-f1301eaf-d913edaf-17aa2d6d, RunDataQuery: -- TCheckLeaseStatusQueryActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, execution_status, finalization_status, issues, run_script_actor_id FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-10T11:18:58.470306Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZmIyNjE2NzQtYWY4Y2EwOGMtOTNhODY2ODgtMWNlY2I1OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 22, targetId: [2:7491642472139825091:2453] 2025-04-10T11:18:58.470342Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 22 timeout: 300.000000s actor id: [2:7491642472139825093:2675] 2025-04-10T11:18:58.509971Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:58.946182Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 22, sender: [2:7491642472139825092:2454], selfId: [2:7491642446370020168:2267], source: [2:7491642472139825091:2453] 2025-04-10T11:18:58.946426Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: fbae35d8-f1301eaf-d913edaf-17aa2d6d, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmIyNjE2NzQtYWY4Y2EwOGMtOTNhODY2ODgtMWNlY2I1OWE=, TxId: 2025-04-10T11:18:58.946597Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: fbae35d8-f1301eaf-d913edaf-17aa2d6d, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmIyNjE2NzQtYWY4Y2EwOGMtOTNhODY2ODgtMWNlY2I1OWE=, TxId: 2025-04-10T11:18:58.946678Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: fbae35d8-f1301eaf-d913edaf-17aa2d6d, reply success 2025-04-10T11:18:58.946894Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZmIyNjE2NzQtYWY4Y2EwOGMtOTNhODY2ODgtMWNlY2I1OWE=, workerId: [2:7491642472139825091:2453], local sessions count: 1 2025-04-10T11:18:58.953915Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MjI3MDBiN2EtZTc3MGRlYzEtOGM3NTU3YzEtYTIzNzg3MGU=, workerId: [2:7491642463549890168:2361], local sessions count: 0 >> TYardTest::TestLogContinuityPersistenceLarge [GOOD] >> TYardTest::TestLogWriteLsnConsistency ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] Test command err: 2025-04-10T11:18:54.869756Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642453461491637:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:54.869824Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00156e/r3tmp/tmp529a9W/pdisk_1.dat 2025-04-10T11:18:55.246469Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:55.278677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:55.278799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3135, node 1 2025-04-10T11:18:55.280954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:55.418068Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/00156e/r3tmp/yandexXThvUh.tmp 2025-04-10T11:18:55.418096Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/00156e/r3tmp/yandexXThvUh.tmp 2025-04-10T11:18:55.418310Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/00156e/r3tmp/yandexXThvUh.tmp 2025-04-10T11:18:55.418468Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24333 PQClient connected to localhost:3135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:55.934055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:18:55.987585Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:56.011059Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:56.030919Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T11:18:57.688508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642466346394235:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.688573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642466346394251:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.688679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.692642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T11:18:57.707562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642466346394264:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T11:18:57.806868Z node 1 :TX_PROXY ERROR: Actor# [1:7491642466346394329:2389] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:58.151219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:58.155516Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642466346394344:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:58.157467Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODJkNWNiMjctOTI3ZGUyMTktMzIxMzUwNmYtM2JkODU1NGY=, ActorId: [1:7491642466346394232:2331], ActorState: ExecuteState, TraceId: 01jrfpsaw33mxp6yksm228b87j, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:58.167865Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:18:58.259531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:58.350530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:18:58.645172Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfpsbm0bvgxnqtb3tvnybtr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjBjNjVmMC1jZjJhZmZjMi0xODIxOGYwMi03YjMzNWQ5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] Test command err: 2025-04-10T11:18:54.870606Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642453969184716:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:54.870655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001559/r3tmp/tmpw9H3vS/pdisk_1.dat 2025-04-10T11:18:55.210096Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:55.279126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:55.279229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:55.281060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23787, node 1 2025-04-10T11:18:55.414123Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001559/r3tmp/yandexKiyomt.tmp 2025-04-10T11:18:55.414179Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001559/r3tmp/yandexKiyomt.tmp 2025-04-10T11:18:55.415033Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001559/r3tmp/yandexKiyomt.tmp 2025-04-10T11:18:55.415231Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9432 PQClient connected to localhost:23787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:55.956156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:55.986859Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:55.994737Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:18:57.670923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642466854087331:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.670923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642466854087339:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.671038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.674568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T11:18:57.685159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642466854087345:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T11:18:57.758244Z node 1 :TX_PROXY ERROR: Actor# [1:7491642466854087410:2389] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:58.083981Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642466854087425:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:58.089674Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzExMTk2MDgtNThjODcwMjItMmMyNjRlNzYtNWNiNTc2OGI=, ActorId: [1:7491642466854087328:2331], ActorState: ExecuteState, TraceId: 01jrfpsavq40q5bxv8qm6d4ec5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:58.092407Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:18:58.130927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:58.254633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:58.338582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:18:58.645286Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfpsbjt2jeedstvpgm2fjrw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDE1OGQ1YzctYmM1ZTQ0OTctODBjYjUzMzQtMTJlNTA4MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TYardTest::TestLogWriteLsnConsistency [GOOD] >> TYardTest::TestLotsOfTinyAsyncLogLatency >> TPersQueueTest::ReadFromSeveralPartitions [GOOD] >> TPersQueueTest::ReadFromSeveralPartitionsMigrated >> KqpProxy::LoadedMetadataAfterCompilationTimeout [GOOD] >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow-isSink |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::AAARangeFullScan >> KqpCost::IndexLookupJoin-StreamLookupJoin >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] Test command err: 2025-04-10T11:18:42.281236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:42.281495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:42.281698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e5e/r3tmp/tmpajnQZJ/pdisk_1.dat 2025-04-10T11:18:42.677109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.724001Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:42.772115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:42.772265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:42.787383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:42.871152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.924171Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2586] 2025-04-10T11:18:42.924450Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:42.969680Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:42.969899Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:42.971465Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:42.971553Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:42.971611Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:42.971918Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:42.972346Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2588] 2025-04-10T11:18:42.972569Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:42.990353Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:42.990448Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:721:2586] in generation 1 2025-04-10T11:18:42.992021Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:42.992156Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:42.993691Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T11:18:42.993763Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T11:18:42.993834Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T11:18:43.002389Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:43.002733Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:43.002807Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:729:2588] in generation 1 2025-04-10T11:18:43.004646Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:695:2590] 2025-04-10T11:18:43.004842Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:43.020783Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:43.020903Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:43.022899Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-10T11:18:43.022976Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-10T11:18:43.023026Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-10T11:18:43.023351Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:43.023491Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:43.023558Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:739:2590] in generation 1 2025-04-10T11:18:43.034626Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.085852Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:43.086075Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.086279Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:743:2617] 2025-04-10T11:18:43.086337Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.086379Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:43.086423Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.086539Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.086577Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T11:18:43.086644Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.086726Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:744:2618] 2025-04-10T11:18:43.086758Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T11:18:43.086811Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T11:18:43.086842Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:18:43.087074Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.087106Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-10T11:18:43.087167Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.087221Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:745:2619] 2025-04-10T11:18:43.087247Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-10T11:18:43.087273Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-10T11:18:43.087296Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:18:43.087772Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:43.087888Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:43.087945Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T11:18:43.088005Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T11:18:43.088124Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.088175Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.088227Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:43.088291Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.088348Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:18:43.088374Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.088399Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-10T11:18:43.088426Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:18:43.088459Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-10T11:18:43.088517Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-10T11:18:43.088642Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:707:2596], sessionId# [0:0:0] 2025-04-10T11:18:43.088723Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:715:2603], sessionId# [0:0:0] 2025-04-10T11:18:43.088760Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T11:18:43.088787Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.088815Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-10T11:18:43.088844Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:18:43.089456Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:43.089730Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:43.089830Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:43.090368Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:18:43.090586Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-10T11:18:43.090654Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-10T11:18:43.093021Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.093109Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:18:43.104142Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:43.104267Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:43.104392Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T11:18:43.104434Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:43.150195Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:762:2629], sessionId# [0:0:0] 2025-04-10T11:18:43.150438Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-10T11:18:43.150622Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 2 ... [2000:281474976715662] at 72075186224037888 for LoadAndWaitInRS 2025-04-10T11:19:01.254363Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:19:01.254509Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715662 2025-04-10T11:19:01.254566Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2000 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 19 Seqno# 6 Flags# 0} 2025-04-10T11:19:01.254624Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-10T11:19:01.254714Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T11:19:01.254758Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:19:01.254794Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2000:281474976715662] at 72075186224037890 for LoadAndWaitInRS 2025-04-10T11:19:01.255042Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:19:01.266369Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:19:01.266473Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [3:1108:2846], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:19:01.266564Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2000 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 5} 2025-04-10T11:19:01.266622Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:19:01.266745Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2025-04-10T11:19:01.266806Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1108:2846] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037888, status# 2 2025-04-10T11:19:01.266899Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:19:01.266938Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715662] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1108:2846], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:19:01.266981Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037890 {TEvReadSet step# 2000 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 6} 2025-04-10T11:19:01.267003Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:19:01.267079Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1108:2846] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037890, status# 2 2025-04-10T11:19:01.267126Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1108:2846] Reply: txId# 281474976715662, status# OK, error# 2025-04-10T11:19:01.267252Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 2025-04-10T11:19:01.267432Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-10T11:19:01.267479Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-04-10T11:19:01.267584Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:19:01.267621Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:19:01.267655Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-10T11:19:01.267725Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:19:01.268021Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1102:2841], serverId# [3:1103:2842], sessionId# [0:0:0] 2025-04-10T11:19:01.269162Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:19:01.269526Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T11:19:01.269715Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:19:01.269764Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:19:01.269813Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037889 for WaitForStreamClearance 2025-04-10T11:19:01.270058Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:19:01.270122Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:19:01.270888Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 1 2025-04-10T11:19:01.271128Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037889, TxId: 281474976715664, Size: 70, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T11:19:01.271265Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037889, TxId: 281474976715664, PendingAcks: 0 2025-04-10T11:19:01.271327Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 0 2025-04-10T11:19:01.275464Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-10T11:19:01.275517Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715664, at: 72075186224037889 2025-04-10T11:19:01.275960Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:19:01.275995Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:19:01.276038Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037889 for ReadTableScan 2025-04-10T11:19:01.276157Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:19:01.276211Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:19:01.276256Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:19:01.279068Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:19:01.279410Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:19:01.279591Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:19:01.279641Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:19:01.279693Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2025-04-10T11:19:01.279931Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:19:01.279994Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:19:01.280611Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2025-04-10T11:19:01.280848Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T11:19:01.280972Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2025-04-10T11:19:01.281019Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2025-04-10T11:19:01.317830Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:19:01.317913Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037888 2025-04-10T11:19:01.318718Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:19:01.318771Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:19:01.318817Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2025-04-10T11:19:01.318957Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:19:01.319027Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:19:01.319081Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:19:01.322118Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-10T11:19:01.322526Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-10T11:19:01.322726Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T11:19:01.322779Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:19:01.322840Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037890 for WaitForStreamClearance 2025-04-10T11:19:01.323087Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:19:01.323161Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:19:01.323858Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 1 2025-04-10T11:19:01.324105Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715666, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T11:19:01.324253Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715666, PendingAcks: 0 2025-04-10T11:19:01.324305Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 0 2025-04-10T11:19:01.368454Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-10T11:19:01.368549Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037890 2025-04-10T11:19:01.369088Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T11:19:01.369136Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:19:01.369183Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037890 for ReadTableScan 2025-04-10T11:19:01.369338Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:19:01.369412Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:19:01.369465Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] Test command err: 2025-04-10T11:18:52.289413Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642445948123297:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:52.289552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:18:52.319401Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642443088113088:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:52.319461Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001a66/r3tmp/tmpuxD4vY/pdisk_1.dat 2025-04-10T11:18:52.640519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:52.640715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:52.673870Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:52.675043Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:18:52.676263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:52.682995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:52.683111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:52.696284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4387 2025-04-10T11:18:55.303058Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:55.320091Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:55.330595Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:18:55.330641Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:18:55.330678Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:55.330728Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:55.330853Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:55.330881Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:55.331558Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:55.331810Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:55.619012Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:55.620311Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:55.631762Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZWFmNDc1M2UtODdiYzMxNTUtOGFjMDg5MDctZWU4YTg4ZWI=, workerId: [2:7491642455973015287:2308], database: , longSession: 1, local sessions count: 1 2025-04-10T11:18:55.631820Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:55.632015Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:18:55.633088Z node 2 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:18:55.633126Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:55.633179Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:55.633275Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:18:55.633289Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:55.633344Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:55.633531Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZWFmNDc1M2UtODdiYzMxNTUtOGFjMDg5MDctZWU4YTg4ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [2:8678280833929343339:121] 2025-04-10T11:18:55.633604Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 600.000000s actor id: [1:7491642458833025985:2475] 2025-04-10T11:18:55.633939Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:55.634058Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZWFmNDc1M2UtODdiYzMxNTUtOGFjMDg5MDctZWU4YTg4ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7491642455973015287:2308] 2025-04-10T11:18:55.634085Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [2:7491642455973015288:2119] 2025-04-10T11:18:55.636538Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642455973015289:2309], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:55.636647Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:55.636567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642458833025984:2313], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:55.636686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:55.945800Z node 2 :KQP_PROXY DEBUG: TraceId: "01jrfps8wjb3jyk20yrh3zqkb2", Created new session, sessionId: ydb://session/3?node_id=2&id=ZjUwZDFiNWUtZWVlNWM5MTYtYWMzMjM5NjktZDQwNDg5OGU=, workerId: [2:7491642455973015300:2311], database: , longSession: 0, local sessions count: 2 2025-04-10T11:18:55.945956Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jrfps8wjb3jyk20yrh3zqkb2, Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZjUwZDFiNWUtZWVlNWM5MTYtYWMzMjM5NjktZDQwNDg5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 4, targetId: [2:7491642455973015300:2311] 2025-04-10T11:18:55.945977Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 4 timeout: 300.000000s actor id: [2:7491642455973015301:2122] 2025-04-10T11:18:55.946603Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642455973015307:2315], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:55.948249Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642455973015302:2312], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:55.948300Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:55.958919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:18:55.994783Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642455973015309:2316], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:18:56.096098Z node 2 :TX_PROXY ERROR: Actor# [2:7491642460267982633:2132] txid# 281474976715658, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:56.153147Z node 2 :KQP_PROXY DEBUG: TraceId: "01jrfps8wjb3jyk20yrh3zqkb2", Forwarded response to sender actor, requestId: 4, sender: [2:7491642455973015299:2310], selfId: [2:7491642443088113302:2278], source: [2:7491642455973015300:2311] 2025-04-10T11:18:56.153533Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZjUwZDFiNWUtZWVlNWM5MTYtYWMzMjM5NjktZDQwNDg5OGU=, workerId: [2:7491642455973015300:2311], local sessions count: 1 2025-04-10T11:18:56.159876Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7491642445948123559:2281], selfId: [2:7491642443088113302:2278], source: [2:7491642455973015287:2308] 2025-04-10T11:18:56.160152Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7491642445948124003:2443], selfId: [1:7491642445948123559:2281], source: [2:7491642443088113302:2278] 2025-04-10T11:18:57.511465Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491642466172428824:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:57.511546Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001a66/r3tmp/tmpZYaL9p/pdisk_1.dat 2025-04-10T11:18:57.649549Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:57.675468Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:57.675573Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:57.678644Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9809, node 3 2025-04-10T11:18:57.730821Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:57.730853Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:57.730862Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:57.731047Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localh ... 057332169:2358] 2025-04-10T11:19:00.550613Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 300.000000s actor id: [3:7491642479057332171:2959] 2025-04-10T11:19:00.551073Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642479057332172:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:00.551158Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:00.551398Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642479057332177:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:00.554518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:2, at schemeshard: 72057594046644480 2025-04-10T11:19:00.591529Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491642479057332179:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-10T11:19:00.655231Z node 3 :TX_PROXY ERROR: Actor# [3:7491642479057332247:3016] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:00.884548Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [3:7491642479057332170:2359], selfId: [3:7491642466172429045:2280], source: [3:7491642479057332169:2358] 2025-04-10T11:19:00.884835Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 60e4a1b1-649834b3-a6888c2-fec80cc7, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=M2ZhNTUyNWMtNWZjZTM4ZDUtNmE5YzQwNjktZmYzZjg5YjA=, TxId: 2025-04-10T11:19:00.884907Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 60e4a1b1-649834b3-a6888c2-fec80cc7, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=M2ZhNTUyNWMtNWZjZTM4ZDUtNmE5YzQwNjktZmYzZjg5YjA=, TxId: 2025-04-10T11:19:00.884932Z node 3 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: 60e4a1b1-649834b3-a6888c2-fec80cc7. Result: SUCCESS. Issues: 2025-04-10T11:19:00.887752Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=MzIzOTdlNGQtMWQ4MDQxNDItOWM5NzQ2ZjAtOThmM2JjODU=, workerId: [3:7491642479057332333:2376], database: /Root, longSession: 1, local sessions count: 2 2025-04-10T11:19:00.887917Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:19:00.888586Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jrfpsddb91dsadzyfjnbrcga, Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=MzIzOTdlNGQtMWQ4MDQxNDItOWM5NzQ2ZjAtOThmM2JjODU=, CurrentExecutionId: 60e4a1b1-649834b3-a6888c2-fec80cc7, CustomerSuppliedId: 01jrfpsddb91dsadzyfjnbrcga, PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 7, targetId: [3:7491642479057332333:2376] 2025-04-10T11:19:00.888637Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 7 timeout: 604800.000000s actor id: [3:7491642479057332337:3063] 2025-04-10T11:19:00.888689Z node 3 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=3&id=M2ZhNTUyNWMtNWZjZTM4ZDUtNmE5YzQwNjktZmYzZjg5YjA=, workerId: [3:7491642479057332169:2358], local sessions count: 1 2025-04-10T11:19:00.922615Z node 3 :KQP_PROXY DEBUG: TraceId: "01jrfpse1qcwa99jp14w1n7a24", Request has 18444999789768.629036s seconds to be completed 2025-04-10T11:19:00.924868Z node 3 :KQP_PROXY DEBUG: TraceId: "01jrfpse1qcwa99jp14w1n7a24", Created new session, sessionId: ydb://session/3?node_id=3&id=MzE5NjJiNTktNTgwMjY3MmQtZDdkNTAzOGEtZTI4MmUyYjA=, workerId: [3:7491642479057332349:2382], database: /Root, longSession: 1, local sessions count: 2 2025-04-10T11:19:00.925037Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jrfpse1qcwa99jp14w1n7a24 2025-04-10T11:19:00.941675Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jrfpse2d0rwt9jr39r77r1gc, Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=MzE5NjJiNTktNTgwMjY3MmQtZDdkNTAzOGEtZTI4MmUyYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 9, targetId: [3:7491642479057332349:2382] 2025-04-10T11:19:00.941724Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 9 timeout: 300.000000s actor id: [3:7491642479057332352:3069] 2025-04-10T11:19:00.971333Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 60e4a1b1-649834b3-a6888c2-fec80cc7, Bootstrap. Database: /Root 2025-04-10T11:19:00.971569Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491642479057332368:3078], for# user@builtin, access# DescribeSchema 2025-04-10T11:19:00.971598Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7491642479057332368:3078], for# user@builtin, access# DescribeSchema 2025-04-10T11:19:00.971926Z node 3 :KQP_PROXY DEBUG: Request has 18444999789768.579705s seconds to be completed 2025-04-10T11:19:00.973912Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=NTY2M2Q0MDQtMWZmMTM3MzEtNjZjZmE1YzEtNWQ2NjA1YTc=, workerId: [3:7491642479057332369:2387], database: /Root, longSession: 1, local sessions count: 3 2025-04-10T11:19:00.974060Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:19:00.974574Z node 3 :KQP_PROXY DEBUG: TraceId: "01jrfpsddb91dsadzyfjnbrcga", Forwarded response to sender actor, requestId: 7, sender: [3:7491642479057332166:2957], selfId: [3:7491642466172429045:2280], source: [3:7491642479057332333:2376] 2025-04-10T11:19:00.974803Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 60e4a1b1-649834b3-a6888c2-fec80cc7, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-04-10T11:19:00.975209Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=NTY2M2Q0MDQtMWZmMTM3MzEtNjZjZmE1YzEtNWQ2NjA1YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 11, targetId: [3:7491642479057332369:2387] 2025-04-10T11:19:00.975255Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 11 timeout: 300.000000s actor id: [3:7491642479057332371:3079] 2025-04-10T11:19:00.986430Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491642479057332353:2384], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:19:00.988323Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzE5NjJiNTktNTgwMjY3MmQtZDdkNTAzOGEtZTI4MmUyYjA=, ActorId: [3:7491642479057332349:2382], ActorState: ExecuteState, TraceId: 01jrfpse2d0rwt9jr39r77r1gc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:19:00.988648Z node 3 :KQP_PROXY DEBUG: TraceId: "01jrfpse2d0rwt9jr39r77r1gc", Forwarded response to sender actor, requestId: 9, sender: [3:7491642479057332351:2383], selfId: [3:7491642466172429045:2280], source: [3:7491642479057332349:2382] 2025-04-10T11:19:01.220539Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 11, sender: [3:7491642479057332370:2388], selfId: [3:7491642466172429045:2280], source: [3:7491642479057332369:2387] 2025-04-10T11:19:01.220829Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 60e4a1b1-649834b3-a6888c2-fec80cc7, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=NTY2M2Q0MDQtMWZmMTM3MzEtNjZjZmE1YzEtNWQ2NjA1YTc=, TxId: 2025-04-10T11:19:01.220851Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 60e4a1b1-649834b3-a6888c2-fec80cc7, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=NTY2M2Q0MDQtMWZmMTM3MzEtNjZjZmE1YzEtNWQ2NjA1YTc=, TxId: 2025-04-10T11:19:01.221050Z node 3 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 60e4a1b1-649834b3-a6888c2-fec80cc7, start saving rows range [0; 1) 2025-04-10T11:19:01.221123Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 60e4a1b1-649834b3-a6888c2-fec80cc7, Bootstrap. Database: /Root 2025-04-10T11:19:01.221133Z node 3 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=3&id=NTY2M2Q0MDQtMWZmMTM3MzEtNjZjZmE1YzEtNWQ2NjA1YTc=, workerId: [3:7491642479057332369:2387], local sessions count: 2 2025-04-10T11:19:01.221264Z node 3 :KQP_PROXY DEBUG: Request has 18444999789768.330366s seconds to be completed 2025-04-10T11:19:01.223605Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=MjM3ZjQzYzgtNGQwMGFhMGQtNThhMWEzMGEtYTNjM2E3YTk=, workerId: [3:7491642483352299705:2397], database: /Root, longSession: 1, local sessions count: 3 2025-04-10T11:19:01.223801Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:19:01.224209Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 60e4a1b1-649834b3-a6888c2-fec80cc7, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-10T11:19:01.224702Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=MjM3ZjQzYzgtNGQwMGFhMGQtNThhMWEzMGEtYTNjM2E3YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 13, targetId: [3:7491642483352299705:2397] 2025-04-10T11:19:01.224753Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 13 timeout: 300.000000s actor id: [3:7491642483352299707:3099] 2025-04-10T11:19:01.247053Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] Test command err: 2025-04-10T11:18:34.129093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:34.129141Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:34.373360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-04-10T11:18:34.391889Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [1:174:2170], Recipient [1:70:2109]: NActors::TEvents::TEvPoison 2025-04-10T11:18:34.392608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:175:2067] recipient: [1:46:2093] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:178:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:179:2067] recipient: [1:177:2171] Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:181:2067] recipient: [1:177:2171] 2025-04-10T11:18:34.400671Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828672, Sender [1:177:2171], Recipient [1:180:2172]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:18:34.419542Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828673, Sender [1:177:2171], Recipient [1:180:2172]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:18:34.421595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:18:34.421738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:34.421777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:18:34.421820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:18:34.421858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:18:34.421912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:18:34.421980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:34.422050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:18:34.422515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:34.440925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:34.442294Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828684, Sender [1:177:2171], Recipient [1:180:2172]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:18:34.442624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:18:34.442794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:18:34.442989Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 65542, Sender [1:7238242728502259555:7369577], Recipient [1:180:2172]: TSystem::Undelivered 2025-04-10T11:18:34.443025Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, processing event TEvents::TEvUndelivered 2025-04-10T11:18:34.443056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:34.443086Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:34.443342Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Clear operation queue and active pipes 2025-04-10T11:18:34.443389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:18:34.444419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:34.444563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.445236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.445805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.445960Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T11:18:34.446699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.446830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.446941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.447130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.447247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.447414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.447717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.447898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.448979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.449098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.450192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.450325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.450427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.450655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.451294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.451450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.451722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.451944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.452020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.452099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.453195Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:18:34.454778Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:18:34.454942Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-10T11:18:34.455515Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [1:180:2172], Recipient [1:180:2172]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-10T11:18:34.455564Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-10T11:18:34.456329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:34.456386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:34.456571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:18:34.456618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:34.456663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:18:34.456719Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:18:34.456956Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:193:2172], Recipient [1:180:2172]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-10T11:18:34.457005Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-10T11:18:34.457050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:211:2067] recipient: [1:24:2071] 2025-04-10T11:18:34.478971Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:210:2189], Recipient [1:180:2172]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2025-04-10T11:18:34.479045Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T11:18:34.593147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:34.593460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/USER_0, opId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:18:34.593574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: Root, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:18:34.593729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-10T11:18:34.593936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:18:34.594050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:18:34.594097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 7205759404667894 ... 057594046678944, status: OK, at schemeshard: 72075186233409549 2025-04-10T11:18:35.137568Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125003, Sender [1:423:2337], Recipient [1:492:2384]: NKikimrScheme.TEvSyncTenantSchemeShard DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-10T11:18:35.137605Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvSyncTenantSchemeShard 2025-04-10T11:18:35.137686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-10T11:18:35.137778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:18:35.137817Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:18:35.137900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[1:423:2337], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:18:35.137978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:18:35.138052Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:18:35.593916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:35.593976Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:35.647076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:175:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:178:2067] recipient: [2:177:2171] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:179:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:181:2067] recipient: [2:177:2171] 2025-04-10T11:18:35.692292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:35.692355Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:211:2067] recipient: [2:24:2071] 2025-04-10T11:18:35.720267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-10T11:18:35.725948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:246:2067] recipient: [2:238:2213] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:246:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:249:2067] recipient: [2:241:2215] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:249:2067] recipient: [2:241:2215] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:251:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:251:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:250:2219] sender: [2:252:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:255:2221] sender: [2:258:2067] recipient: [2:241:2215] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-04-10T11:18:35.741616Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:250:2219] sender: [2:289:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:255:2221] sender: [2:290:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-04-10T11:18:35.785319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:336:2285] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:336:2285] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:343:2289] sender: [2:344:2067] recipient: [2:336:2285] Leader for TabletID 72075186233409548 is [2:343:2289] sender: [2:345:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-10T11:18:35.930220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:419:2067] recipient: [2:415:2333] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:419:2067] recipient: [2:415:2333] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:422:2337] sender: [2:423:2067] recipient: [2:415:2333] Leader for TabletID 72075186233409549 is [2:422:2337] sender: [2:426:2067] recipient: [2:24:2071] 2025-04-10T11:18:35.971037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:35.971097Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-04-10T11:18:35.988905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:18:35.988962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-10T11:18:35.989258Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-04-10T11:18:35.989422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-10T11:18:36.006911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-04-10T11:18:36.007299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-10T11:18:36.058247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 108:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:553:2067] recipient: [2:550:2440] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:553:2067] recipient: [2:550:2440] Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:556:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:556:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409550 is [2:557:2444] sender: [2:558:2067] recipient: [2:550:2440] Leader for TabletID 72075186233409550 is [2:557:2444] sender: [2:559:2067] recipient: [2:24:2071] TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 Forgetting tablet 72075186233409548 TestWaitNotification: OK eventTxId 108 2025-04-10T11:18:38.397515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:18:38.397607Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:38.460798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:18:38.460860Z node 2 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] Test command err: 2025-04-10T11:18:54.849758Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642454697866214:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:54.849871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00156d/r3tmp/tmpKYL7AZ/pdisk_1.dat 2025-04-10T11:18:55.244908Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:55.277766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:55.278341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11273, node 1 2025-04-10T11:18:55.282783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:55.414058Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:55.414087Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:55.414094Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:55.414258Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25076 PQClient connected to localhost:11273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:55.942116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:55.958824Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:18:57.807261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642467582768842:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.807261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642467582768834:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.807376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.815777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T11:18:57.819274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642467582768878:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.819395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.831403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642467582768848:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T11:18:58.042364Z node 1 :TX_PROXY ERROR: Actor# [1:7491642467582768906:2388] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:58.130977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:58.140087Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642471877736220:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:58.140386Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjU2ODZlZjEtMjJkNDRjZmEtYWRjNWIzNjMtZjgwNzI3Yjg=, ActorId: [1:7491642467582768831:2331], ActorState: ExecuteState, TraceId: 01jrfpsazz9ztdc79k7kph69gc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:58.150350Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:18:58.260867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:58.344168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:18:58.645193Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfpsbk4bpkc9rzw2f7zfywy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2FhMWNhODgtYTFkNjBiODItZWRkNzE5MzItOGZhZGRhY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:59.849409Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642454697866214:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:59.849525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpCost::IndexLookupAndTake-useSink >> KqpCost::OltpWriteRow+isSink >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] Test command err: 2025-04-10T11:18:57.532954Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642466097349925:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:57.533148Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001553/r3tmp/tmpeqpYk4/pdisk_1.dat 2025-04-10T11:18:57.862545Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22272, node 1 2025-04-10T11:18:57.949107Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001553/r3tmp/yandexitJN6K.tmp 2025-04-10T11:18:57.949139Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001553/r3tmp/yandexitJN6K.tmp 2025-04-10T11:18:57.949585Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001553/r3tmp/yandexitJN6K.tmp 2025-04-10T11:18:57.949777Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:57.955215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:57.955359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:57.957126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32299 PQClient connected to localhost:22272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:58.247919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T11:19:00.450744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642478982252524:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:00.450856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:00.453420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642478982252560:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:00.464940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T11:19:00.486315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642478982252562:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T11:19:00.770347Z node 1 :TX_PROXY ERROR: Actor# [1:7491642478982252629:2391] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:00.807242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:00.953166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:00.994057Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642478982252637:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:19:00.995830Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2IyZmRhNWMtNTcxZGJkN2EtNmVmYTY4MzMtYjY3MDU4NjE=, ActorId: [1:7491642478982252521:2328], ActorState: ExecuteState, TraceId: 01jrfpsdjd99gdb89axhgdp6wk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:19:00.998105Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:19:01.040654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:19:01.276188Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfpse7yfajkw2w5yzctjr8w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODNlMDdmM2EtNzI4NmMxMmYtODc5ZGJmMzgtMzhlYTNlMDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:19:02.534450Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642466097349925:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:02.534554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] >> TPQCDTest::TestDiscoverClusters [GOOD] >> TopicService::OneConsumer_TheRangesOverlap [GOOD] >> KqpCost::ScanQueryRangeFullScan-SourceRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] Test command err: 2025-04-10T11:18:34.725208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:34.725269Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:34.878436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:18:34.907567Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-10T11:18:34.909665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:18:34.957091Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-10T11:18:35.416364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:35.416446Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-10T11:18:35.475835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink >> KqpCost::OlapWriteRow |90.0%| [TA] $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] >> KqpCost::Range [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup-useSink >> TopicService::DifferentConsumers_TheRangesOverlap >> KqpCost::OlapRangeFullScan |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] Test command err: 2025-04-10T11:18:42.236558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:42.236759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:18:42.236930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e1f/r3tmp/tmpach1os/pdisk_1.dat 2025-04-10T11:18:42.688992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.727405Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:42.771226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:42.771358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:42.782889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:42.871914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:42.928403Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2586] 2025-04-10T11:18:42.928638Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:42.980879Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:42.981178Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:42.982876Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:18:42.982946Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:18:42.982999Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:18:42.983329Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:42.983758Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2588] 2025-04-10T11:18:42.983968Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:42.992459Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:42.992580Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:721:2586] in generation 1 2025-04-10T11:18:42.994697Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:42.994873Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:42.996244Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T11:18:42.996377Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T11:18:42.996448Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T11:18:42.996739Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:42.996942Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:42.996983Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:729:2588] in generation 1 2025-04-10T11:18:42.998234Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:695:2590] 2025-04-10T11:18:42.998410Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:43.007416Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:43.007533Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:18:43.008883Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-10T11:18:43.008947Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-10T11:18:43.008990Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-10T11:18:43.009291Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:18:43.009415Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:18:43.009483Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:739:2590] in generation 1 2025-04-10T11:18:43.023071Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.065592Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:18:43.065814Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.065938Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:743:2617] 2025-04-10T11:18:43.065987Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:18:43.066021Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:18:43.066057Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:18:43.066226Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.066269Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T11:18:43.066332Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.066384Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:744:2618] 2025-04-10T11:18:43.066409Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T11:18:43.066456Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T11:18:43.066482Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:18:43.066707Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:18:43.066736Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-10T11:18:43.066783Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:18:43.066846Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:745:2619] 2025-04-10T11:18:43.066870Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-10T11:18:43.066890Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-10T11:18:43.066911Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:18:43.067311Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:18:43.067412Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:18:43.067458Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T11:18:43.067522Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T11:18:43.067610Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:18:43.067641Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.067674Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:18:43.067752Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:18:43.067789Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:18:43.067804Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.067820Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-10T11:18:43.067849Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:18:43.067870Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-10T11:18:43.067906Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-10T11:18:43.067988Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:707:2596], sessionId# [0:0:0] 2025-04-10T11:18:43.068044Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:715:2603], sessionId# [0:0:0] 2025-04-10T11:18:43.068075Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T11:18:43.068093Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:18:43.068106Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-10T11:18:43.068123Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:18:43.068528Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:18:43.068787Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:18:43.068897Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:18:43.069391Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:18:43.069537Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-10T11:18:43.069576Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-10T11:18:43.071909Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:18:43.071999Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:18:43.083212Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:18:43.083326Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:43.083420Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T11:18:43.083484Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-10T11:18:43.127340Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:762:2629], sessionId# [0:0:0] 2025-04-10T11:18:43.127576Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-10T11:18:43.127750Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 2 ... 80, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-04-10T11:19:03.801972Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 0 Step: 2500 TxId: 281474976715666 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-04-10T11:19:03.802077Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 0 Step: 2500 TxId: 281474976715666 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-04-10T11:19:03.802377Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:19:03.802564Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-10T11:19:03.802601Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:19:03.802635Z node 3 :TX_DATASHARD DEBUG: Found ready operation [2500:281474976715666] in PlanQueue unit at 72075186224037893 2025-04-10T11:19:03.802789Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037893 loaded tx from db 2500:281474976715666 keys extracted: 0 2025-04-10T11:19:03.802906Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:19:03.815432Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037893 step# 2500} 2025-04-10T11:19:03.816073Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-10T11:19:03.827429Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 2500} 2025-04-10T11:19:03.827537Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-10T11:19:03.827616Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037891 from 72075186224037891 to 72075186224037893 txId 281474976715666 2025-04-10T11:19:03.827700Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-10T11:19:03.827775Z node 3 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715666] from 72075186224037891 at tablet 72075186224037891 send result to client [3:1441:3072], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:19:03.827916Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037891, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 } 2025-04-10T11:19:03.827995Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-10T11:19:03.828444Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1441:3072] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715666, shard# 72075186224037891, status# 2 2025-04-10T11:19:03.828972Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037893 source 72075186224037891 dest 72075186224037893 producer 72075186224037891 txId 281474976715666 2025-04-10T11:19:03.829084Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037893 got read set: {TEvReadSet step# 2500 txid# 281474976715666 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletProducer# 72075186224037891 ReadSet.Size()# 19 Seqno# 2 Flags# 0} 2025-04-10T11:19:03.829200Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037893 2025-04-10T11:19:03.829326Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037891 2025-04-10T11:19:03.829682Z node 3 :TX_DATASHARD DEBUG: Send 3 change records: to# [3:1241:2954], at tablet# 72075186224037891 2025-04-10T11:19:03.829750Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 3, forgotten# 0, left# 0, at tablet# 72075186224037891 2025-04-10T11:19:03.829832Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-10T11:19:03.829875Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:19:03.829920Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2500:281474976715666] at 72075186224037893 for LoadAndWaitInRS 2025-04-10T11:19:03.830389Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:19:03.831016Z node 3 :TX_DATASHARD DEBUG: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037891, generation# 1, at tablet# 72075186224037892 2025-04-10T11:19:03.843195Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-10T11:19:03.843307Z node 3 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715666] from 72075186224037893 at tablet 72075186224037893 send result to client [3:1441:3072], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:19:03.843411Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037893 {TEvReadSet step# 2500 txid# 281474976715666 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletConsumer# 72075186224037893 Flags# 0 Seqno# 2} 2025-04-10T11:19:03.843471Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-10T11:19:03.843617Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1441:3072] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715666, shard# 72075186224037893, status# 2 2025-04-10T11:19:03.843674Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1441:3072] Reply: txId# 281474976715666, status# OK, error# 2025-04-10T11:19:03.843841Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037891 source 72075186224037891 dest 72075186224037893 consumer 72075186224037893 txId 281474976715666 2025-04-10T11:19:03.844204Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037891 2025-04-10T11:19:03.844258Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037891 2025-04-10T11:19:03.844465Z node 3 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037891 2025-04-10T11:19:03.844504Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037891 2025-04-10T11:19:03.844615Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 5, at tablet: 72075186224037891 2025-04-10T11:19:03.844654Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 6, at tablet: 72075186224037891 2025-04-10T11:19:03.844808Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1436:3068], serverId# [3:1437:3069], sessionId# [0:0:0] 2025-04-10T11:19:03.844885Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-10T11:19:03.844924Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:19:03.844962Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-10T11:19:03.846136Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037893 2025-04-10T11:19:03.846598Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037893 2025-04-10T11:19:03.846850Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-10T11:19:03.846908Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:19:03.846967Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037893 for WaitForStreamClearance 2025-04-10T11:19:03.847253Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:19:03.847340Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-10T11:19:03.848098Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037893, TxId: 281474976715667, MessageQuota: 1 2025-04-10T11:19:03.848258Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037893, TxId: 281474976715667, MessageQuota: 1 2025-04-10T11:19:03.849830Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037893 2025-04-10T11:19:03.849885Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715667, at: 72075186224037893 2025-04-10T11:19:03.850070Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-10T11:19:03.850113Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:19:03.850178Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037893 for ReadTableScan 2025-04-10T11:19:03.850313Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:19:03.850370Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-10T11:19:03.850424Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-10T11:19:03.851649Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037892 2025-04-10T11:19:03.851961Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037892 2025-04-10T11:19:03.852151Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-10T11:19:03.852190Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:19:03.852233Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037892 for WaitForStreamClearance 2025-04-10T11:19:03.852430Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:19:03.852489Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-10T11:19:03.853008Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715668, MessageQuota: 1 2025-04-10T11:19:03.853128Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037892, TxId: 281474976715668, MessageQuota: 1 2025-04-10T11:19:03.889783Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2025-04-10T11:19:03.889851Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037892 2025-04-10T11:19:03.890196Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-10T11:19:03.890249Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:19:03.890298Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037892 for ReadTableScan 2025-04-10T11:19:03.890420Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:19:03.890480Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-10T11:19:03.890518Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestDiscoverClusters [GOOD] Test command err: 2025-04-10T11:18:54.851361Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642454250158739:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:54.851424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00157e/r3tmp/tmpgMea7n/pdisk_1.dat 2025-04-10T11:18:55.264197Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:55.294602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:55.294681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:55.296244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25534, node 1 2025-04-10T11:18:55.413988Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/00157e/r3tmp/yandexAIxtzJ.tmp 2025-04-10T11:18:55.414020Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/00157e/r3tmp/yandexAIxtzJ.tmp 2025-04-10T11:18:55.414971Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/00157e/r3tmp/yandexAIxtzJ.tmp 2025-04-10T11:18:55.415157Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23320 PQClient connected to localhost:25534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:56.013141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T11:18:57.815386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642467135061348:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.815391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642467135061357:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.815645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.816218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642467135061365:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.816296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:57.819817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T11:18:57.832490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642467135061362:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T11:18:58.054288Z node 1 :TX_PROXY ERROR: Actor# [1:7491642467135061418:2389] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:58.131069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:58.180350Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642471430028734:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:58.182511Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2U0YzAzMjEtNDEwOTU3ODctMmI1YTU0ZTMtZDJlOTI3NDA=, ActorId: [1:7491642467135061345:2331], ActorState: ExecuteState, TraceId: 01jrfpsazp2pvzkz84bg05770n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:58.184951Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:18:58.269158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:58.352050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:18:58.645158Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfpsbk71gawt9v3jydf03zh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjQ4MzZhYzctYWJiZDczZDktNmJjOGRjZmYtOWEyY2RhZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:59.839732Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jrfpscsm9qzpq91fhm0c9cn6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA4MzRiZTgtMzVkMjAzZmUtZmI2NWZkNzUtNDI3MzZhYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:18:59.854564Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642454250158739:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:59.854688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:01.076567Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jrfpse0a0aaemdpm003m72d1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWJmNWJlZTMtZjdlMTk4ZmMtMTllMzNmM2ItNDRiZTQ4YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:19:02.382234Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jrfpsf9zfztz1s15nrbp3jsd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQ0MjRjZWItMTcwZWZkNWYtODM2MjI1MDItMzVhMGJiOWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:19:03.779817Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jrfpsgnq19bw3q0ewrd61zyx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWU5Mzg3NTAtZTlhMjRhYTEtYmFjYTRkZTEtM2UyMTQxZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] |90.0%| [TA] $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpCost::QuerySeviceRangeFullScan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 14822, MsgBus: 17920 2025-04-10T11:18:59.036739Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642476975323779:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:59.038424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000bad/r3tmp/tmpONLPK1/pdisk_1.dat 2025-04-10T11:18:59.400767Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:59.444167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:59.444831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:59.447898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14822, node 1 2025-04-10T11:18:59.612409Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:59.612434Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:59.612456Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:59.612606Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17920 TClient is connected to server localhost:17920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:00.309577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:00.334768Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:00.346607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:00.500662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:00.681855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:00.778103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:02.265731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642489860227388:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:02.265943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:02.791519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:02.831365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:02.871084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:02.902424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:02.933999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:02.967600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.080975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642494155195199:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.081077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.081159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642494155195204:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.086502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:03.101287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642494155195206:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:03.177625Z node 1 :TX_PROXY ERROR: Actor# [1:7491642494155195263:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:04.035157Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642476975323779:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:04.035277Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:04.881583Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283944893, txId: 281474976710671] shutting down >> DemoTx::Scenario_2 [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range [GOOD] Test command err: Trying to start YDB, gRPC: 4543, MsgBus: 2849 2025-04-10T11:18:59.042642Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642473108036530:2165];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:59.043117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f4c/r3tmp/tmpMTgyzz/pdisk_1.dat 2025-04-10T11:18:59.449381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:59.449501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:59.451023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:59.460571Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4543, node 1 2025-04-10T11:18:59.617337Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:59.617360Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:59.617367Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:59.617493Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2849 TClient is connected to server localhost:2849 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:00.309427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:00.346512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:00.513016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:19:00.664312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:19:00.730219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:02.176771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642485992940077:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:02.176874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:02.789334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:02.869771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:02.902416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:02.933105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:02.967484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.012568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.116149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642490287907895:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.116242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.116510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642490287907900:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.121376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:03.134232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642490287907902:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:03.198141Z node 1 :TX_PROXY ERROR: Actor# [1:7491642490287907958:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:04.042495Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642473108036530:2165];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:04.042603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] >> KqpCost::OlapRange [GOOD] >> KqpCost::IndexLookup+useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 25415, MsgBus: 61464 2025-04-10T11:18:59.995324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642474149126462:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:59.995547Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ba1/r3tmp/tmpAR7kj7/pdisk_1.dat 2025-04-10T11:19:00.313594Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25415, node 1 2025-04-10T11:19:00.376960Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:00.376984Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:00.376991Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:00.377115Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:19:00.379093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:00.379214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:00.380887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61464 TClient is connected to server localhost:61464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:00.989765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:01.017498Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:01.029468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:01.152970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:01.296465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:01.384986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:03.090733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642491328997428:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.090875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.370905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.444904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.485902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.517015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.559247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.632771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.702545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642491328997947:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.702654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.702865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642491328997952:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.706203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:03.715389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642491328997954:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:03.809738Z node 1 :TX_PROXY ERROR: Actor# [1:7491642491328998009:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:04.825651Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 2025-04-10T11:19:06.013594Z, after 1.188500s 2025-04-10T11:19:04.826496Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:19:04.880026Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2025-04-10T11:19:04.995201Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642474149126462:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:04.995283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:05.058021Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7491642499918932898:2488] 2025-04-10T11:19:05.058076Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7491642495623965566:2488] 2025-04-10T11:19:05.074326Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1744283945117:281474976710671 created 2025-04-10T11:19:05.074888Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642499918932911:2488] TxId: 281474976710672. Ctx: { TraceId: 01jrfpshvx50jsfbgdg6qkf961, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 3, stages: 2 2025-04-10T11:19:05.074937Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2025-04-10T11:19:05.074953Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2025-04-10T11:19:05.075267Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key sets: 1 2025-04-10T11:19:05.075438Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-10T11:19:05.075485Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642499918932911:2488] TxId: 281474976710672. Ctx: { TraceId: 01jrfpshvx50jsfbgdg6qkf961, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Start resolving tablets nodes... (1) 2025-04-10T11:19:05.075584Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710672. Shard resolve complete, resolved shards: 1 2025-04-10T11:19:05.075618Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642499918932911:2488] TxId: 281474976710672. Ctx: { TraceId: 01jrfpshvx50jsfbgdg6qkf961, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-04-10T11:19:05.075660Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642499918932911:2488] TxId: 281474976710672. Ctx: { TraceId: 01jrfpshvx50jsfbgdg6qkf961, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037914] 2025-04-10T11:19:05.075677Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-10T11:19:05.075731Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642499918932911:2488] TxId: 281474976710672. Ctx: { TraceId: 01jrfpshvx50jsfbgdg6qkf961, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2 ... lfId: [1:7491642499918932918:2497], TxId: 281474976710672, task: 1, CA Id [1:7491642499918932915:2497]. dropping batch for read #0 2025-04-10T11:19:05.096535Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642499918932918:2497], TxId: 281474976710672, task: 1, CA Id [1:7491642499918932915:2497]. effective maxinflight 1 sorted 1 2025-04-10T11:19:05.096546Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642499918932918:2497], TxId: 281474976710672, task: 1, CA Id [1:7491642499918932915:2497]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-10T11:19:05.096560Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642499918932918:2497], TxId: 281474976710672, task: 1, CA Id [1:7491642499918932915:2497]. returned async data processed rows 3 left freeSpace 8388548 received rows 3 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-10T11:19:05.096757Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642499918932915:2497], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=. TraceId : 01jrfpshvx50jsfbgdg6qkf961. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T11:19:05.096770Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642499918932915:2497], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=. TraceId : 01jrfpshvx50jsfbgdg6qkf961. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:19:05.096792Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642499918932916:2498], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=. TraceId : 01jrfpshvx50jsfbgdg6qkf961. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646923 2025-04-10T11:19:05.096831Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-10T11:19:05.096861Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Finish input channelId: 1, from: [1:7491642499918932915:2497] 2025-04-10T11:19:05.096922Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642499918932916:2498], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=. TraceId : 01jrfpshvx50jsfbgdg6qkf961. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:19:05.097153Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642499918932916:2498], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=. TraceId : 01jrfpshvx50jsfbgdg6qkf961. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T11:19:05.097180Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642499918932915:2497], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=. TraceId : 01jrfpshvx50jsfbgdg6qkf961. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-10T11:19:05.097216Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642499918932915:2497], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=. TraceId : 01jrfpshvx50jsfbgdg6qkf961. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:19:05.097239Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished 2025-04-10T11:19:05.097253Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642499918932915:2497], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=. TraceId : 01jrfpshvx50jsfbgdg6qkf961. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T11:19:05.097467Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. pass away 2025-04-10T11:19:05.097595Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710672;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:19:05.097833Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-10T11:19:05.098174Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642499918932911:2488] TxId: 281474976710672. Ctx: { TraceId: 01jrfpshvx50jsfbgdg6qkf961, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7491642495623965566:2488], seqNo: 1, nRows: 1 2025-04-10T11:19:05.098370Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642499918932911:2488] TxId: 281474976710672. Ctx: { TraceId: 01jrfpshvx50jsfbgdg6qkf961, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7491642499918932915:2497], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 19499 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 1183 FinishTimeMs: 1744283945097 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 1 ReadBytes: 20 AffectedPartitions: 1 } IngressRows: 3 ComputeCpuTimeUs: 163 BuildCpuTimeUs: 1020 HostName: "ghrun-x5vqckr7qy" NodeId: 1 StartTimeMs: 1744283945096 CreateTimeMs: 1744283945077 } MaxMemoryUsage: 1048576 } 2025-04-10T11:19:05.098440Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jrfpshvx50jsfbgdg6qkf961, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7491642499918932915:2497] 2025-04-10T11:19:05.098497Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642499918932911:2488] TxId: 281474976710672. Ctx: { TraceId: 01jrfpshvx50jsfbgdg6qkf961, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7491642499918932916:2498], 2025-04-10T11:19:05.121633Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7491642499918932919:2498] 2025-04-10T11:19:05.121778Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642499918932916:2498], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=. TraceId : 01jrfpshvx50jsfbgdg6qkf961. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:19:05.121859Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-10T11:19:05.122264Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished 2025-04-10T11:19:05.122316Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642499918932916:2498], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=. TraceId : 01jrfpshvx50jsfbgdg6qkf961. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T11:19:05.122417Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. pass away 2025-04-10T11:19:05.122513Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710672;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:19:05.122690Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642499918932916:2498], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=. TraceId : 01jrfpshvx50jsfbgdg6qkf961. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Send stats to executor actor [1:7491642499918932911:2488] TaskId: 2 Stats: CpuTimeUs: 15202 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 724 FinishTimeMs: 1744283945121 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 215 BuildCpuTimeUs: 509 HostName: "ghrun-x5vqckr7qy" NodeId: 1 CreateTimeMs: 1744283945079 } MaxMemoryUsage: 1048576 2025-04-10T11:19:05.122950Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-10T11:19:05.123239Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642499918932911:2488] TxId: 281474976710672. Ctx: { TraceId: 01jrfpshvx50jsfbgdg6qkf961, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7491642499918932916:2498], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 15202 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 724 FinishTimeMs: 1744283945121 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 215 BuildCpuTimeUs: 509 HostName: "ghrun-x5vqckr7qy" NodeId: 1 CreateTimeMs: 1744283945079 } MaxMemoryUsage: 1048576 } 2025-04-10T11:19:05.123296Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jrfpshvx50jsfbgdg6qkf961, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7491642499918932916:2498] 2025-04-10T11:19:05.123440Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642499918932911:2488] TxId: 281474976710672. Ctx: { TraceId: 01jrfpshvx50jsfbgdg6qkf961, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:19:05.123492Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642499918932911:2488] TxId: 281474976710672. Ctx: { TraceId: 01jrfpshvx50jsfbgdg6qkf961, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVlMWUyZDctZDZiZmZkYTYtZjgwYzNjMDktZTE1ODU2MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.034701s ReadRows: 1 ReadBytes: 20 ru: 23 rate limiter was not found force flag: 1 2025-04-10T11:19:05.124804Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283945117, txId: 281474976710671] shutting down |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapPointLookup [GOOD] >> KqpCost::PointLookup |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 1473, MsgBus: 29509 2025-04-10T11:19:00.606838Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642479940363615:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:00.606987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b9b/r3tmp/tmp1jJxJs/pdisk_1.dat 2025-04-10T11:19:01.014535Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1473, node 1 2025-04-10T11:19:01.058137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:01.058496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:01.067042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:01.103888Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:01.103909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:01.103918Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:01.104056Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29509 TClient is connected to server localhost:29509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:01.572587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:01.590380Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:01.607569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:01.764934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:01.965098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:02.045222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:03.813318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642492825267264:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.813485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:04.178341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.218483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.297528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.376346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.405522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.451359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.547754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642497120235084:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:04.547835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642497120235089:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:04.547843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:04.552000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:04.560562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642497120235091:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:04.620463Z node 1 :TX_PROXY ERROR: Actor# [1:7491642497120235143:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:05.609816Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642479940363615:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:05.609898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 64804, MsgBus: 23426 2025-04-10T11:19:00.690208Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642478154176412:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:00.690329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b8f/r3tmp/tmpIsdGD5/pdisk_1.dat 2025-04-10T11:19:01.122455Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:01.137493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:01.137606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:01.140002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64804, node 1 2025-04-10T11:19:01.224986Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:01.225011Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:01.225020Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:01.225199Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23426 TClient is connected to server localhost:23426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:01.780371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:01.807176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:01.957850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:02.125836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:19:02.202494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.967533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642491039080054:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.967699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:04.284535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.316971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.355832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.387136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.421942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.469290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.584617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642495334047865:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:04.584690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:04.584830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642495334047870:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:04.589648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:04.604384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642495334047872:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:04.673626Z node 1 :TX_PROXY ERROR: Actor# [1:7491642495334047927:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:05.693816Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642478154176412:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:05.693946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:06.201861Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283946216, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] Test command err: 2025-04-10T11:18:46.320753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642419997817759:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:46.321276Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ac5/r3tmp/tmpBrSTVG/pdisk_1.dat 2025-04-10T11:18:46.771248Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:46.809819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:46.809910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:46.812138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20062 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:18:47.069855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:49.227594Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:49.229835Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:49.231148Z node 1 :KQP_PROXY WARN: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-04-10T11:18:49.233607Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:18:49.233645Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:18:49.233675Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:49.233726Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:18:49.233849Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.233896Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.233986Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7491642424292785467:2282], selfId: [1:7491642419997817855:2278], source: [1:7491642419997817855:2278] 2025-04-10T11:18:49.234008Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.234028Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:18:49.234685Z node 1 :KQP_PROXY WARN: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-04-10T11:18:49.234777Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7491642424292785467:2282], selfId: [1:7491642419997817855:2278], source: [1:7491642419997817855:2278] 2025-04-10T11:18:49.235026Z node 1 :KQP_PROXY WARN: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq 2025-04-10T11:18:49.235111Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 4, sender: [1:7491642424292785467:2282], selfId: [1:7491642419997817855:2278], source: [1:7491642419997817855:2278] 2025-04-10T11:18:49.236835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642432882720098:2309], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:49.236948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:52.942168Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:18:52.942426Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:18:52.942672Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ac5/r3tmp/tmpUBB5ZY/pdisk_1.dat 2025-04-10T11:18:53.271717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:18:53.308578Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:18:53.308671Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:18:53.308981Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:53.333727Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:293:2338], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:18:53.335808Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:293:2338], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-10T11:18:53.335963Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:293:2338], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:610:2532] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:18:53.336146Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:293:2338], cacheItem# { Subscriber: { Subscriber: [2:610:2532] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:18:53.336339Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:293:2338], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-10T11:18:53.336427Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:293:2338], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:611:2533] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:18:53.336527Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:293:2338], cacheItem# { Subscriber: { Subscriber: [2:611:2533] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:18:53.336746Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:624:2534], recipient# [2:302:2346], result# { ErrorCount: 2 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:18:53.348989Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:53.349156Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:53.363615Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:53.445979Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:293:2338], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /Root PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 500 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" E ... to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to BSC_STAT_PROCESSOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NIcNodeCache::TIcNodeCacheServiceActor Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to NKikimr::NBsController::TBlobStorageController::TSelfHealActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR 2025-04-10T11:19:01.291307Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(20) 2025-04-10T11:19:01.291386Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 20 sessionId: ydb://session/3?node_id=2&id=M2ZmZDAzNGMtZTBmMzNmMjctNTFhM2FjYmItMzk4ZWJmMTE= status: TIMEOUT round: 0 2025-04-10T11:19:01.291549Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2ZmZDAzNGMtZTBmMzNmMjctNTFhM2FjYmItMzk4ZWJmMTE=, ActorId: [2:1132:2935], ActorState: ExecuteState, TraceId: 01jrfpsdjj91bk5xghgkn6yd8s, Create QueryResponse for error on request, msg: Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER 2025-04-10T11:19:01.291849Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 20, sender: [2:593:2518], selfId: [2:57:2104], source: [2:1132:2935] Send scheduled evet back 2025-04-10T11:19:01.292014Z node 2 :KQP_COMPILE_ACTOR NOTICE: Compilation timeout, self: [2:1134:2937], cluster: db, database: , text: "SELECT * FROM `/Root/Table`;", startTime: 2025-04-10T11:19:00.436624Z 2025-04-10T11:19:01.292112Z node 2 :KQP_COMPILE_ACTOR DEBUG: Send response, self: [2:1134:2937], owner: [2:287:2332], status: TIMEOUT, issues:
: Error: Query compilation timed out. , uid: deb511c6-234b2509-5ef7f715-cdfe2f9b Send captured event back Send captured event back Send captured event back Send captured event back Send captured event back 2025-04-10T11:19:02.174286Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491642487346863476:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:02.174352Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ac5/r3tmp/tmpWNJ3ZE/pdisk_1.dat 2025-04-10T11:19:02.337392Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:02.360918Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:02.361027Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:02.363776Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31287, node 3 2025-04-10T11:19:02.449191Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:02.449217Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:02.449228Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:02.449404Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:02.696124Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:05.578282Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:19:05.579257Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-10T11:19:05.583721Z node 3 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:19:05.583766Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:19:05.583814Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:19:05.583895Z node 3 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:19:05.583915Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:19:05.588014Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 >> DemoTx::Scenario_3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 32756, MsgBus: 15364 2025-04-10T11:19:00.677057Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642478464738869:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:00.677114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b8e/r3tmp/tmpM8Usox/pdisk_1.dat 2025-04-10T11:19:01.090729Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32756, node 1 2025-04-10T11:19:01.142911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:01.146610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:01.150355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:01.172296Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:01.172318Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:01.172326Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:01.172464Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15364 TClient is connected to server localhost:15364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:01.698407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:01.719454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:01.729689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:01.918102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:02.093629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:02.180910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:03.770955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642491349642538:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.771113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:04.070657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.106341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.143860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.177807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.210527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.279580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.359946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642495644610354:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:04.360049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:04.360298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642495644610359:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:04.363614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:04.375206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642495644610361:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:04.469660Z node 1 :TX_PROXY ERROR: Actor# [1:7491642495644610416:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:05.449931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:19:05.679870Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642478464738869:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:05.679944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRange [GOOD] Test command err: Trying to start YDB, gRPC: 4151, MsgBus: 32582 2025-04-10T11:18:59.038732Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642476872180952:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:59.039450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f32/r3tmp/tmpqasKzr/pdisk_1.dat 2025-04-10T11:18:59.452011Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:59.470886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:59.471023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:59.474649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4151, node 1 2025-04-10T11:18:59.612420Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:59.612446Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:59.612454Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:59.612600Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32582 TClient is connected to server localhost:32582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:00.312590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:00.346433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:00.521392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:00.684710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:00.765110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:02.326547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642489757084610:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:02.326655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:02.789332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:02.832677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:02.862739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:02.933427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:02.970492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.044060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.122255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642494052052429:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.122422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.122532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642494052052434:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.126472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:03.137716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642494052052436:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:03.192879Z node 1 :TX_PROXY ERROR: Actor# [1:7491642494052052489:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:04.042262Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642476872180952:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:04.042347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:04.586098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.783457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7491642498347020207:2501];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:19:04.783457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7491642498347020205:2500];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:19:04.783743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7491642498347020207:2501];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:19:04.783745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7491642498347020205:2500];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:19:04.785652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7491642498347020207:2501];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:19:04.785666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7491642498347020205:2500];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:19:04.785862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7491642498347020207:2501];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:19:04.786678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7491642498347020205:2500];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:19:04.786847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7491642498347020205:2500];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:19:04.786871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7491642498347020207:2501];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:19:04.788998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7491642498347020207:2501];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:19:04.789190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7491642498347020207:2501];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:19:04.789331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7491642498347020207:2501];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:19:04.789463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7491642498347020207:2501];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:19:04.789602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72 ... 22;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:19:04.991735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:19:04.991817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:19:04.991844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:19:04.991897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:19:04.991917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:19:04.991952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:19:04.991981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:19:04.992409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:19:04.992445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:19:04.992585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:19:04.992609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:19:04.992713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:19:04.992740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:19:04.992877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:19:04.992901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:19:04.992985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:19:04.993007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:19:05.006114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:19:05.006200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:19:05.006296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:19:05.006332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:19:05.006480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:19:05.006515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:19:05.006614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:19:05.006659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:19:05.006719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:19:05.006754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:19:05.006803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:19:05.006831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:19:05.007320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:19:05.007366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:19:05.007667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:19:05.007710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:19:05.007832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:19:05.007861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:19:05.008020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:19:05.008045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:19:05.008147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:19:05.008171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:19:05.051923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.051958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.058711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.065637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.070015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.071639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.076969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.077575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.082602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.083698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.527321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2025-04-10T11:19:05.527746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2025-04-10T11:19:05.528813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;self_id=[1:7491642498347020199:2499];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037927;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037928;receive=72075186224037923; 2025-04-10T11:19:05.529157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; |90.1%| [TA] $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.1%| [TA] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpCost::AAARangeFullScan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapPointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 29135, MsgBus: 18393 2025-04-10T11:19:00.527004Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642479577758130:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:00.527070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b9f/r3tmp/tmp6KuT1C/pdisk_1.dat 2025-04-10T11:19:00.894444Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:00.943801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:00.943881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29135, node 1 2025-04-10T11:19:00.947185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:01.019645Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:01.019667Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:01.019677Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:01.020196Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18393 TClient is connected to server localhost:18393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:01.544750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:01.578399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:01.753546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:01.889892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:01.967766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:03.515652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642492462661738:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.515808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:03.814638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.854600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.888962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.940089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.982998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.016849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.061420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642496757629546:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:04.061502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:04.061733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642496757629551:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:04.065692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:04.080958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642496757629553:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:04.151048Z node 1 :TX_PROXY ERROR: Actor# [1:7491642496757629606:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:05.261959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:19:05.490120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642501052597335:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:19:05.498484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642501052597335:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:19:05.498918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642501052597335:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:19:05.499046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642501052597335:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:19:05.499164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642501052597335:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:19:05.499294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642501052597335:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:19:05.499421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642501052597335:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:19:05.499534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642501052597335:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:19:05.499654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642501052597335:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:19:05.499771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642501052597335:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:19:05.499878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642501052597335:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:19:05.499990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642501052597335:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:19:05.546492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642501052597333:2497];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:19:05.546575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642501052597333:2497];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:19:05.551019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642501052597333:2497];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:19:05.551333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642501052597333:2497];tablet_id=72075186224037922;process=TTxInitSchema::Execute; ... ;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:19:05.767065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:19:05.767156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:19:05.767189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:19:05.767251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:19:05.767279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:19:05.767325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:19:05.767353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:19:05.767770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:19:05.767810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:19:05.767984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:19:05.768012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:19:05.768130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:19:05.768160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:19:05.768334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:19:05.768339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:19:05.768368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:19:05.768368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:19:05.768452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:19:05.768468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:19:05.768480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:19:05.768493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:19:05.768634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:19:05.768662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:19:05.768745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:19:05.768775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:19:05.768838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:19:05.768863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:19:05.768909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:19:05.768934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:19:05.769401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:19:05.769441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:19:05.769605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:19:05.769634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:19:05.769756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:19:05.769786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:19:05.769949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:19:05.769976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:19:05.770073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:19:05.770095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:19:05.857802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.860168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.865389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.870589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.872021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.878522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.878696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.885408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.892271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:05.900770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:06.094771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2025-04-10T11:19:06.095331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;self_id=[1:7491642501052597337:2499];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037927;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037923;receive=72075186224037928; 2025-04-10T11:19:06.095649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2025-04-10T11:19:06.096190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2 >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable [GOOD] >> TPersQueueTest::Cache >> TPersQueueTest::TopicServiceCommitOffset [GOOD] >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] >> KqpCost::IndexLookupJoin+StreamLookupJoin >> KqpCost::OltpWriteRow-isSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::AAARangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 15155, MsgBus: 24259 2025-04-10T11:19:02.573082Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642488527583122:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:02.575057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b79/r3tmp/tmpRepHSG/pdisk_1.dat 2025-04-10T11:19:03.020062Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:03.023266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:03.023385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:03.027611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15155, node 1 2025-04-10T11:19:03.120733Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:03.120764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:03.120778Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:03.120931Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24259 TClient is connected to server localhost:24259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:03.662425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:03.698329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:03.841378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:04.013493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:04.102025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:06.033646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642505707454081:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:06.033801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:06.433094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.463674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.504940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.545211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.616583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.709130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.772367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642505707454597:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:06.772453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:06.773147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642505707454602:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:06.777762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:06.789868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642505707454604:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:06.888652Z node 1 :TX_PROXY ERROR: Actor# [1:7491642505707454660:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:07.573128Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642488527583122:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:07.573229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; PONOS {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Group (-∞, +∞)","Name (-∞, +∞)"],"Reverse":false,"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Test","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":1}],"E-Rows":"No estimate","Predicate":"item.Amount \u003C 5000","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Limit-Filter","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":19,"Max":19,"Min":19,"History":[2,19]}},"Name":"4","Push":{"WaitTimeUs":{"Count":1,"Sum":930,"Max":930,"Min":930,"History":[2,930]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[2,1048576]},"Tasks":1,"OutputRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"FinishedTasks":1,"IngressRows":{"Count":1,"Sum":3,"Max":3,"Min":3},"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/Test","ReadRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"ReadBytes":{"Count":1,"Sum":20,"Max":20,"Min":20}}],"BaseTimeMs":1744283948262,"OutputBytes":{"Count":1,"Sum":19,"Max":19,"Min":19},"CpuTimeUs":{"Count":1,"Sum":1180,"Max":1180,"Min":1180,"History":[2,1180]},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":192,"Max":192,"Min":192,"History":[2,192]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":192,"Max":192,"Min":192,"History":[2,192]},"WaitTimeUs":{"Count":1,"Sum":940,"Max":940,"Min":940,"History":[2,940]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"Merge","SortColumns":["Group (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":19,"Max":19,"Min":19,"History":[3,19]}},"Name":"RESULT","Push":{"WaitTimeUs":{"Count":1,"Sum":1514,"Max":1514,"Min":1514,"History":[3,1514]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[3,1048576]},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"InputBytes":{"Count":1,"Sum":19,"Max":19,"Min":19},"ResultRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"Tasks":1,"ResultBytes":{"Count":1,"Sum":19,"Max":19,"M ... Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":1,"A-SelfCpu":1.089,"A-Cpu":2.269,"A-Size":19,"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} query_phases { duration_us: 6610 table_access { name: "/Root/Test" reads { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 5868 affected_shards: 1 } compilation { duration_us: 222380 cpu_time_us: 218714 } process_cpu_time_us: 349 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"Test\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\"],\"Reverse\":false,\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/Test\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"},{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"E-Rows\":\"No estimate\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"Limit-Filter\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[2,19]}},\"Name\":\"4\",\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":930,\"Max\":930,\"Min\":930,\"History\":[2,930]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[2,1048576]},\"Tasks\":1,\"OutputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FinishedTasks\":1,\"IngressRows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/Test\",\"ReadRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ReadBytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20}}],\"BaseTimeMs\":1744283948262,\"OutputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"CpuTimeUs\":{\"Count\":1,\"Sum\":1180,\"Max\":1180,\"Min\":1180,\"History\":[2,1180]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[2,192]}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[2,192]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":940,\"Max\":940,\"Min\":940,\"History\":[2,940]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}]}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Group (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[3,19]}},\"Name\":\"RESULT\",\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":1514,\"Max\":1514,\"Min\":1514,\"History\":[3,1514]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[3,1048576]},\"DurationUs\":{\"Count\":1,\"Sum\":1000,\"Max\":1000,\"Min\":1000},\"InputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"ResultRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Tasks\":1,\"ResultBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"OutputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FinishedTasks\":1,\"InputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"PhysicalStageId\":1,\"StageDurationUs\":1000,\"BaseTimeMs\":1744283948262,\"OutputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"CpuTimeUs\":{\"Count\":1,\"Sum\":1089,\"Max\":1089,\"Min\":1089,\"History\":[3,1089]},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[3,19]}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[3,19]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1469,\"Max\":1469,\"Min\":1469,\"History\":[3,1469]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":222380,\"CpuTimeUs\":218714},\"ProcessCpuTimeUs\":349,\"TotalDurationUs\":237848,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":372},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\"],\"Reverse\":false,\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/Test\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"E-Rows\":\"No estimate\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"Filter\"}],\"Operators\":[{\"A-Rows\":1,\"A-SelfCpu\":1.18,\"A-Cpu\":1.18,\"A-Size\":19,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"A-Rows\":1,\"A-SelfCpu\":1.089,\"A-Cpu\":2.269,\"A-Size\":19,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/Test\" \'\"72057594046644480:9\" \'\"\" \'1))\n(let $2 \'(\'\"Amount\" \'\"Comment\" \'\"Group\" \'\"Name\"))\n(let $3 (KqpRowsSourceSettings $1 $2 \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $4 (Uint64 \'1))\n(let $5 (OptionalType (DataType \'String)))\n(let $6 (StructType \'(\'\"Amount\" (OptionalType (DataType \'Uint64))) \'(\'\"Comment\" $5) \'(\'\"Group\" (OptionalType (DataType \'Uint32))) \'(\'\"Name\" $5)))\n(let $7 \'(\'(\'\"_logical_id\" \'559) \'(\'\"_id\" \'\"ea051beb-5b01dc82-aa4c823d-d888e209\") \'(\'\"_wide_channels\" $6)))\n(let $8 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $3)) (lambda \'($12) (block \'(\n (let $13 (lambda \'($16) (block \'(\n (let $17 (Member $16 \'\"Amount\"))\n (return $17 (Member $16 \'\"Comment\") (Member $16 \'\"Group\") (Member $16 \'\"Name\") (Coalesce (< $17 (Uint64 \'\"5000\")) (Bool \'false)))\n ))))\n (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda \'($18 $19 $20 $21 $22) $22) $4))\n (let $15 (lambda \'($23 $24 $25 $26 $27) $23 $24 $25 $26))\n (return (FromFlow (WideMap $14 $15)))\n))) $7))\n(let $9 (DqCnMerge (TDqOutput $8 \'0) \'(\'(\'\"2\" \'\"Asc\"))))\n(let $10 (DqPhyStage \'($9) (lambda \'($28) (FromFlow (NarrowMap (Take (ToFlow $28) $4) (lambda \'($29 $30 $31 $32) (AsStruct \'(\'\"Amount\" $29) \'(\'\"Comment\" $30) \'(\'\"Group\" $31) \'(\'\"Name\" $32)))))) \'(\'(\'\"_logical_id\" \'572) \'(\'\"_id\" \'\"906b4033-250d1874-bfb3bb24-fc11ec6f\"))))\n(let $11 (DqCnResult (TDqOutput $10 \'0) \'()))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($8 $10) \'($11) \'() \'(\'(\'\"type\" \'\"data\")))) \'((KqpTxResultBinding (ListType $6) \'0 \'0)) \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 237848 total_cpu_time_us: 224931 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Test\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":9},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Amount\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Comment\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Group\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Name\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Group\\\",\\\"Name\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1744283948\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"e033d977-e0215f50-8247fc5c-b40f66e4\",\"version\":\"1.0\"}" >> KqpCost::IndexLookupAndTake+useSink [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot >> KqpCost::OltpWriteRow+isSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 28247, MsgBus: 14273 2025-04-10T11:19:02.618801Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642489724046602:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:02.618872Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b6b/r3tmp/tmp3wdnwe/pdisk_1.dat 2025-04-10T11:19:03.024805Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:03.038266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:03.038359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28247, node 1 2025-04-10T11:19:03.054729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:03.114552Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:03.114575Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:03.114585Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:03.114760Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14273 TClient is connected to server localhost:14273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:03.617559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:03.644948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:03.794567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:19:03.949209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:19:04.037615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:05.812521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642502608950276:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:05.812621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:06.097323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.134236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.171505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.210214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.245981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.323602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.391871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642506903918089:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:06.392035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:06.392295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642506903918094:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:06.397097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:06.414952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642506903918096:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:06.475860Z node 1 :TX_PROXY ERROR: Actor# [1:7491642506903918149:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:07.588597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:19:07.619595Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642489724046602:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:07.619744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:07.634058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:19:07.683242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 /Root/Join1_2 1 19 /Root/Join1_1 8 136 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 10920, MsgBus: 30856 2025-04-10T11:19:02.737688Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642486412416746:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:02.742341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b74/r3tmp/tmpbuoOWQ/pdisk_1.dat 2025-04-10T11:19:03.155150Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:03.156015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:03.156116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:03.159538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10920, node 1 2025-04-10T11:19:03.236940Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:03.236987Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:03.237003Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:03.237136Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30856 TClient is connected to server localhost:30856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:03.869715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:03.900595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:04.103515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:04.285520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:04.376022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:06.368709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642503592287708:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:06.368880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:06.720804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.770897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.824482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.925797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.980354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:07.027354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:07.092748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642507887255517:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:07.092843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:07.093230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642507887255522:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:07.097450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:07.113421Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:19:07.114190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642507887255524:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:07.199330Z node 1 :TX_PROXY ERROR: Actor# [1:7491642507887255580:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:07.742167Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642486412416746:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:07.742233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:08.342891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] >> KqpCost::IndexLookupAndTake-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow-isSink [GOOD] Test command err: Trying to start YDB, gRPC: 21197, MsgBus: 6643 2025-04-10T11:19:02.246103Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642488431232557:2228];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:02.246557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b85/r3tmp/tmpas0y9A/pdisk_1.dat 2025-04-10T11:19:02.613269Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:02.627561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:02.627626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:02.630480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21197, node 1 2025-04-10T11:19:02.718733Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:02.718754Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:02.718761Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:02.718871Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6643 TClient is connected to server localhost:6643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:03.226695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:03.239832Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:03.256444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:03.413403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:03.578332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:03.670284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:05.344810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642501316136042:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:05.344954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:05.855176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:05.911283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:05.985514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.054709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.106632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.172603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:06.284546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642505611103852:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:06.284620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:06.284962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642505611103857:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:06.289040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:06.301074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642505611103859:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:19:06.358094Z node 1 :TX_PROXY ERROR: Actor# [1:7491642505611103917:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:07.245505Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642488431232557:2228];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:07.245616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:07.717142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 723 cpu_time_us: 723 } query_phases { duration_us: 5715 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1400 affected_shards: 1 } compilation { duration_us: 62319 cpu_time_us: 57780 } process_cpu_time_us: 1145 total_duration_us: 71000 total_cpu_time_us: 61048 query_phases { duration_us: 675 cpu_time_us: 675 } query_phases { duration_us: 3174 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1265 affected_shards: 1 } compilation { duration_us: 85082 cpu_time_us: 74082 } process_cpu_time_us: 1110 total_duration_us: 92340 total_cpu_time_us: 77132 2025-04-10T11:19:08.365664Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642514201039010:2529], TxId: 281474976715676, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDA0ZjM0YzgtOTRlYmE4YjctZDZhZjllOTItMmZkZmE3OGU=. CustomerSuppliedId : . TraceId : 01jrfpsn184dwxqe7agv6e2ah8. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-10T11:19:08.366088Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642514201039011:2530], TxId: 281474976715676, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jrfpsn184dwxqe7agv6e2ah8. SessionId : ydb://session/3?node_id=1&id=ZDA0ZjM0YzgtOTRlYmE4YjctZDZhZjllOTItMmZkZmE3OGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491642514201039007:2490], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:19:08.366586Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDA0ZjM0YzgtOTRlYmE4YjctZDZhZjllOTItMmZkZmE3OGU=, ActorId: [1:7491642509906071472:2490], ActorState: ExecuteState, TraceId: 01jrfpsn184dwxqe7agv6e2ah8, Create QueryResponse for error on request, msg: query_phases { duration_us: 988 cpu_time_us: 988 } query_phases { duration_us: 9561 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 7410 affected_shards: 1 } query_phases { duration_us: 45739 cpu_time_us: 46406 } compilation { duration_us: 233094 cpu_time_us: 224935 } process_cpu_time_us: 2081 total_duration_us: 293304 total_cpu_time_us: 281820 query_phases { duration_us: 695 cpu_time_us: 695 } query_phases { duration_us: 2464 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 2820 affected_shards: 1 } query_phases { duration_us: 1207 cpu_time_us: 1537 } query_phases { duration_us: 4234 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1321 affected_shards: 1 } compilation { duration_us: 232040 cpu_time_us: 227476 } process_cpu_time_us: 1554 total_duration_us: 244275 total_cpu_time_us: 235403 query_phases { duration_us: 773 cpu_time_us: 773 } query_phases { duration_us: 10646 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 10638 affected_shards: 1 } query_phases { duration_us: 1224 cpu_time_us: 835 affected_shards: 1 } compilation { duration_us: 272423 cpu_time_us: 267595 } process_cpu_time_us: 1557 total_duration_us: 293155 total_cpu_time_us: 281398 query_phases { duration_us: 649 cpu_time_us: 649 } query_phases { duration_us: 6148 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 6066 affected_shards: 1 } query_phases { duration_us: 3621 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1292 affected_shards: 1 } compilation { duration_us: 184183 cpu_time_us: 180046 } process_cpu_time_us: 1418 total_duration_us: 200421 total_cpu_time_us: 189471 query_phases { duration_us: 534 cpu_time_us: 534 } query_phases { duration_us: 2701 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1006 affected_shards: 1 } compilation { duration_us: 93431 cpu_time_us: 89841 } process_cpu_time_us: 868 total_duration_us: 97984 total_cpu_time_us: 92249 query_phases { duration_us: 652 cpu_time_us: 652 } query_phases { duration_us: 3031 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 951 affected_shards: 1 } compilation { duration_us: 63092 cpu_time_us: 59518 } process_cpu_time_us: 1072 total_duration_us: 68254 total_cpu_time_us: 62193 >> TYardTest::TestLotsOfTinyAsyncLogLatency [GOOD] >> TYardTest::TestLogLatency ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 17353, MsgBus: 19342 2025-04-10T11:19:03.614974Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642491754366074:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:03.618126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b5a/r3tmp/tmp81Jhbo/pdisk_1.dat 2025-04-10T11:19:03.989106Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:04.020750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:04.020865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:04.024723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17353, node 1 2025-04-10T11:19:04.084290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:04.084317Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:04.084334Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:04.084499Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19342 TClient is connected to server localhost:19342 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:04.887389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:04.924692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:05.073170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:05.303516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:05.412352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:07.104489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642508934237034:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:07.104604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:07.418872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:07.454131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:07.485091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:07.523503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:07.554063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:07.654684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:07.725904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642508934237556:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:07.725994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:07.726296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642508934237561:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:07.730895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:07.742745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642508934237563:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:07.841723Z node 1 :TX_PROXY ERROR: Actor# [1:7491642508934237618:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:08.617900Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642491754366074:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:08.618004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:08.834568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 >> TExtSubDomainTest::DeclareAndLs >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndDrop >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::GenericCases >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 17138, MsgBus: 11870 2025-04-10T11:19:04.884875Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642498657500115:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:04.885064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b37/r3tmp/tmp3Ntmk3/pdisk_1.dat 2025-04-10T11:19:05.387652Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:05.392219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:05.392476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:05.397044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17138, node 1 2025-04-10T11:19:05.475119Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:05.475145Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:05.475168Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:05.475327Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11870 TClient is connected to server localhost:11870 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:06.144601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:06.164324Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:06.182479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:06.392343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:06.556854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:06.649400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:08.484409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642515837371004:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:08.484547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:08.842537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:08.911468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:08.952742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.022098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.051650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.086421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.137067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642520132338818:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:09.137141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:09.137521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642520132338823:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:09.141095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:09.150287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642520132338825:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:09.227871Z node 1 :TX_PROXY ERROR: Actor# [1:7491642520132338878:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:09.886609Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642498657500115:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:09.886693Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:10.216643Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 2025-04-10T11:19:10.894822Z, after 0.679049s 2025-04-10T11:19:10.216726Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:19:10.266474Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2025-04-10T11:19:10.410442Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7491642524427306468:2489] 2025-04-10T11:19:10.410501Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7491642524427306436:2489] 2025-04-10T11:19:10.414728Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1744283950458:281474976710671 created 2025-04-10T11:19:10.414920Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642524427306478:2489] TxId: 281474976710672. Ctx: { TraceId: 01jrfpsq4cepk9n09ct6f48w3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 3, stages: 2 2025-04-10T11:19:10.414947Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-10T11:19:10.415000Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2025-04-10T11:19:10.415205Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key sets: 1 2025-04-10T11:19:10.415354Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-10T11:19:10.415396Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642524427306478:2489] TxId: 281474976710672. Ctx: { TraceId: 01jrfpsq4cepk9n09ct6f48w3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Start resolving tablets nodes... (1) 2025-04-10T11:19:10.415577Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710672. Shard resolve complete, resolved shards: 1 2025-04-10T11:19:10.415621Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642524427306478:2489] TxId: 281474976710672. Ctx: { TraceId: 01jrfpsq4cepk9n09ct6f48w3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-04-10T11:19:10.415659Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642524427306478:2489] TxId: 281474976710672. Ctx: { TraceId: 01jrfpsq4cepk9n09ct6f48w3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037914] 2025-04-10T11:19:10.415693Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-10T11:19:10.415755Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642524427306478:2489] TxId: 281474976710672. Ctx: { TraceId: 01jrfpsq4cepk9n09ct6f48w3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZm ... 1:19:10.447052Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_fetcher_actor.cpp:128;Recv TEvScanData from ShardID==[1:7491642524427306490:2050];ScanId=1;Finished=1;Lock=;BrokenLocks=; 2025-04-10T11:19:10.447125Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_fetcher_actor.cpp:517;action=got EvScanData;rows=3;finished=1;exceeded=0;scan=1;packs_to_send=0;from=[1:7491642524427306490:2050];shards remain=0;in flight scans=1;in flight shards=1;delayed_for_seconds_by_ratelimiter=0;tablet_id=72075186224037914;locks=0;broken locks=0; 2025-04-10T11:19:10.447166Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.cpp:39;event=receive_data;actor_id=[1:7491642524427306490:2050];count_chunks=1; 2025-04-10T11:19:10.447189Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:261;event=on_receive;compute_shard_id=NO_VALUE_OPTIONAL; 2025-04-10T11:19:10.447216Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:208;event=add_data_to_compute;rows=3; 2025-04-10T11:19:10.447245Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:178;event=send_data_to_compute;space=8388608;queue=1;compute_actor_id=[1:7491642524427306483:2496];rows=3; 2025-04-10T11:19:10.447271Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:162;event=stop_scanner; 2025-04-10T11:19:10.447292Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:39;event=scan_ack_on_finished;actor_id=[1:7491642524427306490:2050]; 2025-04-10T11:19:10.447309Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642524427306487:2498]. EVLOGKQP:0/0/3/3 2025-04-10T11:19:10.447336Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:359;event=scanner_finished;tablet_id=72075186224037914;stop_shard=1; 2025-04-10T11:19:10.447365Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:93;event=stop_scanner;actor_id=[1:7491642524427306490:2050];message=;final_flag=1; 2025-04-10T11:19:10.447458Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642524427306487:2498]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, pending resolve shards: 0, average read rows: 3, average read bytes: 0, 2025-04-10T11:19:10.447491Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:406;event=wait_all_scanner_finished;scans=0; 2025-04-10T11:19:10.447559Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642524427306487:2498]. EVLOGKQP(max_in_flight:1) InFlightScans:InFlightShards:;wScans=0;wShards=0; {SHARD(72075186224037914):CHUNKS=1;D=0.000000s;PacksCount=1;RowsCount=3;BytesCount=0;MinPackSize=3;MaxPackSize=3;CAVG=0.000000s;CMIN=0.000000s;CMAX=0.000000s;}; 2025-04-10T11:19:10.447817Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:174 :TEvSendData: [1:7491642524427306487:2498]/[1:7491642524427306483:2496] 2025-04-10T11:19:10.448103Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642524427306483:2496], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jrfpsq4cepk9n09ct6f48w3t. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T11:19:10.448144Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:204 :TEvFetcherFinished: [1:7491642524427306487:2498] 2025-04-10T11:19:10.448226Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-10T11:19:10.448257Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642524427306485:2497], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jrfpsq4cepk9n09ct6f48w3t. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646923 2025-04-10T11:19:10.448300Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Finish input channelId: 1, from: [1:7491642524427306483:2496] 2025-04-10T11:19:10.448357Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642524427306485:2497], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jrfpsq4cepk9n09ct6f48w3t. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:19:10.448516Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished 2025-04-10T11:19:10.448552Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642524427306483:2496], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jrfpsq4cepk9n09ct6f48w3t. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T11:19:10.448679Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. pass away 2025-04-10T11:19:10.448685Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642524427306485:2497], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jrfpsq4cepk9n09ct6f48w3t. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T11:19:10.448795Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710672;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:19:10.448869Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642524427306478:2489] TxId: 281474976710672. Ctx: { TraceId: 01jrfpsq4cepk9n09ct6f48w3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7491642524427306483:2496], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 3995 Tasks { TaskId: 1 CpuTimeUs: 1011 FinishTimeMs: 1744283950448 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ComputeCpuTimeUs: 158 BuildCpuTimeUs: 853 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-x5vqckr7qy" NodeId: 1 CreateTimeMs: 1744283950417 } MaxMemoryUsage: 1048576 } 2025-04-10T11:19:10.449049Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jrfpsq4cepk9n09ct6f48w3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7491642524427306483:2496] 2025-04-10T11:19:10.449100Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-10T11:19:10.449117Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642524427306478:2489] TxId: 281474976710672. Ctx: { TraceId: 01jrfpsq4cepk9n09ct6f48w3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7491642524427306485:2497], 2025-04-10T11:19:10.449288Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642524427306478:2489] TxId: 281474976710672. Ctx: { TraceId: 01jrfpsq4cepk9n09ct6f48w3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7491642524427306436:2489], seqNo: 1, nRows: 1 2025-04-10T11:19:10.451684Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7491642524427306486:2497] 2025-04-10T11:19:10.451748Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642524427306485:2497], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jrfpsq4cepk9n09ct6f48w3t. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:19:10.451813Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-10T11:19:10.451831Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished 2025-04-10T11:19:10.451847Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491642524427306485:2497], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jrfpsq4cepk9n09ct6f48w3t. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-04-10T11:19:10.451941Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. pass away 2025-04-10T11:19:10.452046Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710672;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:19:10.452236Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-10T11:19:10.452399Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642524427306478:2489] TxId: 281474976710672. Ctx: { TraceId: 01jrfpsq4cepk9n09ct6f48w3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7491642524427306485:2497], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 26470 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 769 FinishTimeMs: 1744283950451 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 259 BuildCpuTimeUs: 510 HostName: "ghrun-x5vqckr7qy" NodeId: 1 CreateTimeMs: 1744283950418 } MaxMemoryUsage: 1048576 } 2025-04-10T11:19:10.452446Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jrfpsq4cepk9n09ct6f48w3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7491642524427306485:2497] 2025-04-10T11:19:10.452562Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642524427306478:2489] TxId: 281474976710672. Ctx: { TraceId: 01jrfpsq4cepk9n09ct6f48w3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:19:10.452619Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491642524427306478:2489] TxId: 281474976710672. Ctx: { TraceId: 01jrfpsq4cepk9n09ct6f48w3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgzMzZmMWUtZTM4MDA0ZTQtMWFhMzJkODUtNjIyYWZmNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.030465s ReadRows: 3 ReadBytes: 96 ru: 20 rate limiter was not found force flag: 1 2025-04-10T11:19:10.453494Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744283950458, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 8637, MsgBus: 8811 2025-04-10T11:19:03.923229Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642490414939002:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:03.923292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b52/r3tmp/tmpz7ZuUG/pdisk_1.dat 2025-04-10T11:19:04.364728Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:04.368150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:04.368270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:04.373206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8637, node 1 2025-04-10T11:19:04.506938Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:04.506967Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:04.506976Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:04.507133Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8811 TClient is connected to server localhost:8811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:05.120455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:05.168238Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:05.177429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:05.426189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:05.588448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:05.673086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:07.628208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642507594809955:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:07.628314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:07.910944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:07.945320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:07.980252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:08.012751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:08.080990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:08.144414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:08.237938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642511889777773:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:08.238008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:08.238233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642511889777778:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:08.242089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:08.252376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642511889777780:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:08.323107Z node 1 :TX_PROXY ERROR: Actor# [1:7491642511889777836:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:08.924795Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642490414939002:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:08.924904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:09.372270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow+isSink [GOOD] Test command err: Trying to start YDB, gRPC: 20483, MsgBus: 5929 2025-04-10T11:19:04.114750Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642495256102444:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:04.130547Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b49/r3tmp/tmpRYNzON/pdisk_1.dat 2025-04-10T11:19:04.696094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:04.696226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:04.699970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:04.708187Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20483, node 1 2025-04-10T11:19:04.877147Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:04.877171Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:04.877179Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:04.877272Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5929 TClient is connected to server localhost:5929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:05.589390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:05.616133Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:05.630426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:05.845087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:06.059980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:06.168658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:07.923559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642508141006111:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:07.923722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:08.228098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:08.260200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:08.285086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:08.313598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:08.388349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:08.444767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:08.520486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642512435973924:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:08.520592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:08.520632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642512435973929:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:08.524849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:08.539435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642512435973931:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:08.595480Z node 1 :TX_PROXY ERROR: Actor# [1:7491642512435973984:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:09.102391Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642495256102444:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:09.102492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:09.444327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 4386 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1799 affected_shards: 1 } compilation { duration_us: 67328 cpu_time_us: 64203 } process_cpu_time_us: 557 total_duration_us: 73560 total_cpu_time_us: 66559 query_phases { duration_us: 5237 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1382 affected_shards: 1 } compilation { duration_us: 56333 cpu_time_us: 53520 } process_cpu_time_us: 602 total_duration_us: 65137 total_cpu_time_us: 55504 2025-04-10T11:19:09.834221Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=5; 2025-04-10T11:19:09.845953Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037919 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:19:09.846307Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037919 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:19:09.846520Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642516730941766:2497], Table: `/Root/TestTable` ([72057594046644480:16:1]), SessionActorId: [1:7491642516730941570:2497]Got CONSTRAINT VIOLATION for table `/Root/TestTable`. ShardID=72075186224037919, Sink=[1:7491642516730941766:2497].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:19:09.847195Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642516730941759:2497], SessionActorId: [1:7491642516730941570:2497], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7491642516730941570:2497]. isRollback=0 2025-04-10T11:19:09.847472Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTE3Mzk0MjktNGIwMDg4ZGQtZGE1NzVhMWYtMWUxMDNlYzU=, ActorId: [1:7491642516730941570:2497], ActorState: ExecuteState, TraceId: 01jrfpspnvb5qnhytpcadwfs9s, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7491642516730941760:2497] from: [1:7491642516730941759:2497] 2025-04-10T11:19:09.847578Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491642516730941760:2497] TxId: 281474976710675. Ctx: { TraceId: 01jrfpspnvb5qnhytpcadwfs9s, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTE3Mzk0MjktNGIwMDg4ZGQtZGE1NzVhMWYtMWUxMDNlYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:19:09.847961Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTE3Mzk0MjktNGIwMDg4ZGQtZGE1NzVhMWYtMWUxMDNlYzU=, ActorId: [1:7491642516730941570:2497], ActorState: ExecuteState, TraceId: 01jrfpspnvb5qnhytpcadwfs9s, Create QueryResponse for error on request, msg: query_phases { duration_us: 18170 cpu_time_us: 1406 affected_shards: 1 } compilation { duration_us: 71577 cpu_time_us: 68482 } process_cpu_time_us: 813 total_duration_us: 91669 total_cpu_time_us: 70701 query_phases { duration_us: 4274 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1792 affected_shards: 1 } compilation { duration_us: 59485 cpu_time_us: 56455 } process_cpu_time_us: 569 total_duration_us: 66804 total_cpu_time_us: 58816 query_phases { duration_us: 4140 cpu_time_us: 1730 affected_shards: 1 } compilation { duration_us: 91356 cpu_time_us: 88529 } process_cpu_time_us: 578 total_duration_us: 97396 total_cpu_time_us: 90837 query_phases { duration_us: 4592 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1719 affected_shards: 1 } compilation { duration_us: 94843 cpu_time_us: 91612 } process_cpu_time_us: 540 total_duration_us: 101187 total_cpu_time_us: 93871 query_phases { duration_us: 4261 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1703 affected_shards: 1 } compilation { duration_us: 48473 cpu_time_us: 45903 } process_cpu_time_us: 453 total_duration_us: 55318 total_cpu_time_us: 48059 query_phases { duration_us: 4185 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1520 affected_shards: 1 } compilation { duration_us: 52327 cpu_time_us: 49463 } process_cpu_time_us: 466 total_duration_us: 58597 total_cpu_time_us: 51449 >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] >> TYardTest::TestLogLatency [GOOD] >> TYardTest::TestMultiYardFirstRecordToKeep >> KqpCost::IndexLookup-useSink [GOOD] >> TPersQueueTest::SameOffset [GOOD] >> TPersQueueTest::SchemeOperationsTest >> TYardTest::TestMultiYardFirstRecordToKeep [GOOD] >> TYardTest::TestLogOverwriteRestarts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 10662, MsgBus: 26147 2025-04-10T11:19:05.253658Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642501290069254:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:05.253740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b2e/r3tmp/tmpTW87dd/pdisk_1.dat 2025-04-10T11:19:05.991644Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:05.999615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:05.999726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:06.007739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10662, node 1 2025-04-10T11:19:06.127262Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:06.127302Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:06.127318Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:06.127477Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26147 TClient is connected to server localhost:26147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:06.866914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:06.887167Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:06.902184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:07.065217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:07.273069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:07.378178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:09.237044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642518469940213:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:09.237152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:09.587318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.618992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.652157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.684991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.723142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.797771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.888014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642518469940737:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:09.888100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:09.888189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642518469940742:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:09.892043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:09.904109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642518469940744:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:09.996054Z node 1 :TX_PROXY ERROR: Actor# [1:7491642518469940799:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:10.253618Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642501290069254:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:10.253717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:10.850708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 >> KqpCost::OlapRangeFullScan [GOOD] >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 >> Compression::WriteRAW >> KqpCost::PointLookup [GOOD] >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 14395, MsgBus: 16061 2025-04-10T11:19:06.110606Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642507082065282:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:06.110762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b00/r3tmp/tmp1ze9eW/pdisk_1.dat 2025-04-10T11:19:06.622628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:06.622747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:06.624508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:06.651686Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14395, node 1 2025-04-10T11:19:06.762793Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:06.762814Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:06.762820Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:06.762961Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16061 TClient is connected to server localhost:16061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:07.471628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:07.502339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:07.662233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:07.836920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:19:07.918982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.748950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642519966968930:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:09.749077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:10.144922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:10.225579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:10.261248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:10.297557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:10.332388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:10.382745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:10.465758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642524261936748:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:10.465852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:10.465884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642524261936753:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:10.469596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:10.478027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642524261936755:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:10.540090Z node 1 :TX_PROXY ERROR: Actor# [1:7491642524261936807:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:11.110852Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642507082065282:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:11.110925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:11.595251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-04-10T11:19:14.072048Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.072080Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.072104Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.072769Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.073927Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.088787Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.089843Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.099320Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.099346Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.099627Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.099969Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.100773Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.101475Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.101811Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.102488Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-10T11:19:14.103854Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.103879Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.103900Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.110815Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.114425Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.114593Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.114911Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.122518Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.128639Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T11:19:14.128788Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.128854Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-10T11:19:14.130231Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.130263Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.130295Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.130878Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.131610Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.131764Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.132040Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-04-10T11:19:14.132996Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T11:19:14.133183Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-10T11:19:14.133572Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-10T11:19:14.133803Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-10T11:19:14.133923Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.133959Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T11:19:14.134006Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-10T11:19:14.134184Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-04-10T11:19:14.134220Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-10T11:19:14.134246Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-10T11:19:14.134271Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T11:19:14.134422Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-04-10T11:19:14.134516Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-10T11:19:14.134537Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-10T11:19:14.134556Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-10T11:19:14.134635Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-04-10T11:19:14.134664Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-10T11:19:14.134684Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-10T11:19:14.134702Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T11:19:14.134819Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-04-10T11:19:14.136278Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.136303Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.136345Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.136681Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.138585Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.138761Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.139018Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-04-10T11:19:14.140927Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T11:19:14.141867Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-10T11:19:14.142240Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-10T11:19:14.142441Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-10T11:19:14.142560Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.142597Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T11:19:14.142620Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-10T11:19:14.142654Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-10T11:19:14.142700Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T11:19:14.142939Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2025-04-10T11:19:14.143020Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-10T11:19:14.143041Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-10T11:19:14.143064Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-10T11:19:14.143086Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-10T11:19:14.143109Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T11:19:14.143272Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2025-04-10T11:19:14.144583Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.144610Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.144631Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.145052Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.145650Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.145781Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.145982Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.147499Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T11:19:14.148318Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T11:19:14.148594Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-04-10T11:19:14.148703Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-10T11:19:14.148941Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.148974Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T11:19:14.148996Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-04-10T11:19:14.149015Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-04-10T11:19:14.149062Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-04-10T11:19:14.149086Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-10T11:19:14.149219Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-10T11:19:14.149392Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 15861, MsgBus: 6318 2025-04-10T11:19:06.311876Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642503198762925:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:06.313689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b1d/r3tmp/tmpLz5OLC/pdisk_1.dat 2025-04-10T11:19:06.897034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:06.900044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:06.907588Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:06.909949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15861, node 1 2025-04-10T11:19:07.053505Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:07.053525Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:07.053537Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:07.053623Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6318 TClient is connected to server localhost:6318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:07.759624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:07.795332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:07.969511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:08.140515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:08.222536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:09.772718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642516083666563:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:09.772876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:10.122642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:10.200026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:10.267126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:10.307821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:10.340642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:10.374868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:10.446707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642520378634380:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:10.446785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:10.447076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642520378634385:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:10.450323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:10.459019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642520378634387:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:10.545669Z node 1 :TX_PROXY ERROR: Actor# [1:7491642520378634443:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:11.313327Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642503198762925:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:11.313431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:11.669001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:19:11.821578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642524673602154:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:19:11.821782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642524673602154:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:19:11.822056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642524673602154:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:19:11.822367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642524673602154:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:19:11.822505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642524673602154:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:19:11.822623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642524673602154:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:19:11.822751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642524673602154:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:19:11.822885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642524673602154:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:19:11.823078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642524673602154:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:19:11.823219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642524673602154:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:19:11.823336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642524673602154:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:19:11.823445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642524673602154:2498];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:19:11.823984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7491642524673602178:2506];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:19:11.824085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7491642524673602178:2506];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:19:11.824346Z node 1 :TX_COLUMN ... MNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:19:11.996569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:19:11.996642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:19:11.996673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:19:11.996727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:19:11.996754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:19:11.996797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:19:11.996833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:19:11.998087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:19:11.998136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:19:11.998335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:19:11.998375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:19:11.998512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:19:11.998546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:19:11.998680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:19:11.998707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:19:11.998794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:19:11.998830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:19:11.999332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:19:11.999373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:19:11.999481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:19:11.999518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:19:11.999656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:19:11.999699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:19:11.999775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:19:11.999810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:19:11.999868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:19:11.999896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:19:11.999937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:19:11.999963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:19:12.001171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:19:12.001268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:19:12.001464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:19:12.001500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:19:12.001572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:19:12.001597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:19:12.001703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:19:12.001734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:19:12.001802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:19:12.001828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:19:12.047106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:12.047204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:12.051913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:12.053610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:12.056810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:12.060073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:12.061433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:12.065609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:12.066082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:12.070239Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:12.200710Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2025-04-10T11:19:12.200710Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2025-04-10T11:19:12.201416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; query_phases { duration_us: 254831 table_access { name: "/Root/TestTable" reads { rows: 2 bytes: 72 } } cpu_time_us: 119919 } compilation { duration_us: 442305 cpu_time_us: 438280 } process_cpu_time_us: 441 total_duration_us: 702697 total_cpu_time_us: 558640 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-04-10T11:19:14.059195Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.059233Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.059258Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.059864Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.067327Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-10T11:19:14.067449Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.070073Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.070097Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.070117Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.070539Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.071138Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-10T11:19:14.071210Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.072165Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.072187Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.072241Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.072686Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-10T11:19:14.072729Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.072758Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.073663Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-04-10T11:19:14.074635Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.074711Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.074766Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.075176Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-10T11:19:14.075211Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.075224Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.075283Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-04-10T11:19:14.076813Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-10T11:19:14.076837Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-10T11:19:14.076865Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.077222Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.077756Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.090877Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-10T11:19:14.091856Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.093220Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-04-10T11:19:14.100071Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-04-10T11:19:14.100358Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.100402Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T11:19:14.100423Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-10T11:19:14.100969Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-10T11:19:14.100994Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-10T11:19:14.101010Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-10T11:19:14.101027Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-04-10T11:19:14.101045Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-04-10T11:19:14.101101Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-04-10T11:19:14.101130Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-04-10T11:19:14.101147Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-04-10T11:19:14.101162Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-04-10T11:19:14.102596Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-04-10T11:19:14.102668Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-04-10T11:19:14.102726Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-04-10T11:19:14.102749Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-04-10T11:19:14.102794Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-04-10T11:19:14.102808Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-04-10T11:19:14.102823Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-04-10T11:19:14.102848Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-04-10T11:19:14.102864Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-04-10T11:19:14.102880Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-04-10T11:19:14.102915Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-04-10T11:19:14.102938Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-04-10T11:19:14.102952Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-04-10T11:19:14.102988Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-04-10T11:19:14.103005Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-04-10T11:19:14.103023Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-04-10T11:19:14.103038Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-04-10T11:19:14.103062Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-04-10T11:19:14.103081Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-04-10T11:19:14.103098Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-04-10T11:19:14.103170Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-04-10T11:19:14.103189Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-04-10T11:19:14.103204Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-04-10T11:19:14.103218Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-04-10T11:19:14.103244Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-04-10T11:19:14.103260Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-04-10T11:19:14.103286Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-04-10T11:19:14.103307Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-04-10T11:19:14.103323Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-04-10T11:19:14.103349Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-04-10T11:19:14.103369Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-04-10T11:19:14.103386Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-04-10T11:19:14.103400Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-04-10T11:19:14.103424Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-04-10T11:19:14.103444Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-04-10T11:19:14.103472Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-04-10T11:19:14.103486Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-04-10T11:19:14.103506Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-04-10T11:19:14.103567Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-10T11:19:14.108488Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-04-10T11:19:14.108656Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-04-10T11:19:14.108693Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-04-10T11:19:14.108725Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-04-10T11:19:14.108751Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-04-10T11:19:14.108777Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-04-10T11:19:14.108795Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-04-10T11:19:14.108811Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-04-10T11:19:14.108846Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-04-10T11:19:14.108886Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-04-10T11:19:14.108904Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-04-10T11:19:14.108920Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-04-10T11:19:14.108955Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-04-10T11:19:14.108978Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-04-10T11:19:14.108994Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-04-10T11:19:14.109007Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-04-10T11:19:14.109024Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-04-10T11:19:14.109072Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-04-10T11:19:14.109091Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-04-10T11:19:14.109109Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-04-10T11:19:14.109134Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-04-10T11:19:14.109157Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-04-10T11:19:14.109172Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-04-10T11:19:14.109188Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-04-10T11:19:14.109203Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-04-10T11:19:14.109219Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-04-10T11:19:14.109236Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-04-10T11:19:14.109255Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-04-10T11:19:14.109283Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-04-10T11:19:14.109316Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-04-10T11:19:14.109332Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-04-10T11:19:14.109348Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-04-10T11:19:14.109364Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-04-10T11:19:14.109429Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-04-10T11:19:14.109446Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-04-10T11:19:14.109464Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-04-10T11:19:14.109487Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-04-10T11:19:14.109509Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-04-10T11:19:14.109524Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-04-10T11:19:14.109539Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-04-10T11:19:14.109559Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-04-10T11:19:14.109578Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-04-10T11:19:14.109595Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-04-10T11:19:14.109609Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-04-10T11:19:14.109637Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-04-10T11:19:14.109715Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-04-10T11:19:14.109738Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-04-10T11:19:14.109755Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-04-10T11:19:14.109773Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-04-10T11:19:14.109789Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-04-10T11:19:14.109805Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-04-10T11:19:14.109855Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-10T11:19:14.109999Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-10T11:19:14.113725Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.113757Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.113845Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.114457Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.114926Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.115077Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.115758Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.216508Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.216749Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-10T11:19:14.216810Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.216850Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-10T11:19:14.216911Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-10T11:19:14.417677Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-04-10T11:19:14.520949Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-10T11:19:14.521996Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-10T11:19:14.526218Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-10T11:19:14.527506Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.527539Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.527559Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.527917Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.546557Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.546754Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.550276Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.652010Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.652388Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-10T11:19:14.652454Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.652493Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-10T11:19:14.652572Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-04-10T11:19:14.652660Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-10T11:19:14.658417Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-10T11:19:14.658639Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-10T11:19:14.662204Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 16964, MsgBus: 23982 2025-04-10T11:19:08.171867Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642515193700718:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:08.171989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ac1/r3tmp/tmpvKWvDL/pdisk_1.dat 2025-04-10T11:19:08.526585Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16964, node 1 2025-04-10T11:19:08.580892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:08.581379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:08.588540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:08.665681Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:08.665705Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:08.665717Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:08.665824Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23982 TClient is connected to server localhost:23982 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:09.232539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:09.265359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:09.455550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:09.625905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:19:09.710404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:19:11.520670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642528078604380:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:11.520838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:11.902530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:11.931146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:11.963465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:11.992213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:12.022068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:12.090542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:12.170862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642532373572193:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:12.170968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642532373572198:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:12.170971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:12.175173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:12.189448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642532373572200:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:12.290824Z node 1 :TX_PROXY ERROR: Actor# [1:7491642532373572257:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:13.173616Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642515193700718:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:13.173710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TExtSubDomainTest::DeclareAndLs [GOOD] >> TExtSubDomainTest::DeclareAndDrop [GOOD] >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob >> TVPatchTests::PatchPartPutError >> TOosLogicTests::RenderHtml [GOOD] >> TVPatchTests::FindingPartsWhenError >> TVPatchTests::FullPatchTest [GOOD] >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] >> TVPatchTests::FindingPartsWhenPartsAreDontExist >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> TVPatchTests::PatchPartOk >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TVPatchTests::FindingPartsWhenError [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob [GOOD] >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] >> TVPatchTests::PatchPartPutError [GOOD] >> TVPatchTests::FindingPartsWhenPartsAreDontExist [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists >> TopicService::DifferentConsumers_TheRangesOverlap [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> TVPatchTests::PatchPartOk [GOOD] >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenError [GOOD] Test command err: Recv 65537 2025-04-10T11:19:16.330841Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-10T11:19:16.333683Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# ERROR ResultSize# 1 2025-04-10T11:19:16.333813Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-04-10T11:19:16.333902Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDrop [GOOD] Test command err: 2025-04-10T11:19:12.463041Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642531018569286:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:12.463098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013ab/r3tmp/tmpV7H4f7/pdisk_1.dat 2025-04-10T11:19:13.017408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:13.017537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:13.023387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:13.093758Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9383 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:13.295850Z node 1 :TX_PROXY DEBUG: actor# [1:7491642531018569533:2104] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:13.295910Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642535313537108:2258] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:13.296074Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642531018569557:2117], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:13.296131Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491642531018569557:2117], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:19:13.296934Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:19:13.305959Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531018569225:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642535313537113:2259] 2025-04-10T11:19:13.306032Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642531018569225:2049] Subscribe: subscriber# [1:7491642535313537113:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.306103Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531018569231:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642535313537115:2259] 2025-04-10T11:19:13.306118Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642531018569231:2055] Subscribe: subscriber# [1:7491642535313537115:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.306252Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531018569228:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642535313537114:2259] 2025-04-10T11:19:13.306273Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535313537113:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642531018569225:2049] 2025-04-10T11:19:13.306303Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642531018569228:2052] Subscribe: subscriber# [1:7491642535313537114:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.306335Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535313537115:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642531018569231:2055] 2025-04-10T11:19:13.306371Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535313537114:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642531018569228:2052] 2025-04-10T11:19:13.306401Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642535313537110:2259] 2025-04-10T11:19:13.306443Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531018569225:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642535313537113:2259] 2025-04-10T11:19:13.306449Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642535313537112:2259] 2025-04-10T11:19:13.306467Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531018569231:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642535313537115:2259] 2025-04-10T11:19:13.306482Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531018569228:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642535313537114:2259] 2025-04-10T11:19:13.306501Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491642535313537109:2259][/dc-1] Set up state: owner# [1:7491642531018569557:2117], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.306716Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642535313537111:2259] 2025-04-10T11:19:13.306805Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491642535313537109:2259][/dc-1] Path was already updated: owner# [1:7491642531018569557:2117], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.306880Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535313537113:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535313537110:2259], cookie# 1 2025-04-10T11:19:13.306908Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535313537114:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535313537111:2259], cookie# 1 2025-04-10T11:19:13.306925Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535313537115:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535313537112:2259], cookie# 1 2025-04-10T11:19:13.306960Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531018569225:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535313537113:2259], cookie# 1 2025-04-10T11:19:13.306989Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531018569228:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535313537114:2259], cookie# 1 2025-04-10T11:19:13.307041Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531018569231:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535313537115:2259], cookie# 1 2025-04-10T11:19:13.307097Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535313537113:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642531018569225:2049], cookie# 1 2025-04-10T11:19:13.307114Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535313537114:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642531018569228:2052], cookie# 1 2025-04-10T11:19:13.307159Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535313537115:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642531018569231:2055], cookie# 1 2025-04-10T11:19:13.307206Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642535313537110:2259], cookie# 1 2025-04-10T11:19:13.307242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:13.307260Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642535313537111:2259], cookie# 1 2025-04-10T11:19:13.307346Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:13.307381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642535313537112:2259], cookie# 1 2025-04-10T11:19:13.307427Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Unexpected sync response: sender# [1:7491642535313537112:2259], cookie# 1 2025-04-10T11:19:13.357935Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642531018569557:2117], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:19:13.358585Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642531018569557:2117], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... leKind: 0 Created: 1 CreateStep: 1744283953573 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2025-04-10T11:19:13.591662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:19:13.591702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-04-10T11:19:13.591711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7491642535313537184:2298] 2025-04-10T11:19:13.591863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:19:13.591892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-10T11:19:13.591923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:19:13.592121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:19:13.592136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-10T11:19:13.592168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:19:13.592332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2025-04-10T11:19:13.592366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2025-04-10T11:19:13.592611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:19:13.592674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1 2025-04-10T11:19:13.593684Z node 1 :TX_PROXY DEBUG: actor# [1:7491642531018569533:2104] Handle TEvNavigate describe path /dc-1 2025-04-10T11:19:13.593719Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642535313537201:2321] HANDLE EvNavigateScheme /dc-1 2025-04-10T11:19:13.593802Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642531018569557:2117], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:13.593864Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7491642531018569557:2117], cookie# 4 2025-04-10T11:19:13.593907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535313537113:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535313537110:2259], cookie# 4 2025-04-10T11:19:13.593922Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535313537114:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535313537111:2259], cookie# 4 2025-04-10T11:19:13.593937Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535313537115:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535313537112:2259], cookie# 4 2025-04-10T11:19:13.593959Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531018569228:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535313537114:2259], cookie# 4 2025-04-10T11:19:13.594026Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531018569231:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535313537115:2259], cookie# 4 2025-04-10T11:19:13.594055Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535313537114:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7491642531018569228:2052], cookie# 4 2025-04-10T11:19:13.594089Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535313537115:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7491642531018569231:2055], cookie# 4 2025-04-10T11:19:13.594137Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7491642535313537111:2259], cookie# 4 2025-04-10T11:19:13.594173Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:13.594188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7491642535313537112:2259], cookie# 4 2025-04-10T11:19:13.594203Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:13.594254Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642531018569557:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-10T11:19:13.594350Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642531018569557:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491642535313537109:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744283953573 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:13.594409Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642531018569557:2117], cacheItem# { Subscriber: { Subscriber: [1:7491642535313537109:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744283953573 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-10T11:19:13.594448Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531018569225:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535313537113:2259], cookie# 4 2025-04-10T11:19:13.594526Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535313537113:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7491642531018569225:2049], cookie# 4 2025-04-10T11:19:13.594552Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491642535313537202:2322], recipient# [1:7491642535313537201:2321], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:19:13.594561Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7491642535313537110:2259], cookie# 4 2025-04-10T11:19:13.594573Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535313537109:2259][/dc-1] Unexpected sync response: sender# [1:7491642535313537110:2259], cookie# 4 2025-04-10T11:19:13.594581Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642535313537201:2321] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:13.594699Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642535313537201:2321] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-10T11:19:13.595182Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642535313537201:2321] Handle TEvDescribeSchemeResult Forward to# [1:7491642535313537200:2320] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744283953573 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744283953573 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartPutError [GOOD] Test command err: Recv 65537 2025-04-10T11:19:16.330581Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-10T11:19:16.334777Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-10T11:19:16.334890Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-10T11:19:16.335146Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-04-10T11:19:16.336070Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-10T11:19:16.336351Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-04-10T11:19:16.337209Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-04-10T11:19:16.337318Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-04-10T11:19:16.338181Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR 2025-04-10T11:19:16.338253Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VPutResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-10T11:19:16.338339Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs [GOOD] Test command err: 2025-04-10T11:19:12.463042Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642531025396330:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:12.463095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001397/r3tmp/tmpVYvRrb/pdisk_1.dat 2025-04-10T11:19:13.039714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:13.039844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:13.041882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:13.073295Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20777 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:13.330822Z node 1 :TX_PROXY DEBUG: actor# [1:7491642531025396578:2104] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:13.330890Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642535320364153:2259] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:13.331073Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642531025396603:2117], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:13.331120Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491642531025396603:2117], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:19:13.331403Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:19:13.333759Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531025396270:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642535320364158:2260] 2025-04-10T11:19:13.333840Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642531025396270:2049] Subscribe: subscriber# [1:7491642535320364158:2260], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.333905Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531025396273:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642535320364159:2260] 2025-04-10T11:19:13.333926Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642531025396273:2052] Subscribe: subscriber# [1:7491642535320364159:2260], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.333949Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531025396276:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642535320364160:2260] 2025-04-10T11:19:13.333964Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642531025396276:2055] Subscribe: subscriber# [1:7491642535320364160:2260], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.334033Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535320364158:2260][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642531025396270:2049] 2025-04-10T11:19:13.334091Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535320364159:2260][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642531025396273:2052] 2025-04-10T11:19:13.334137Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535320364160:2260][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642531025396276:2055] 2025-04-10T11:19:13.334214Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531025396270:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642535320364158:2260] 2025-04-10T11:19:13.334236Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531025396273:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642535320364159:2260] 2025-04-10T11:19:13.334249Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531025396276:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642535320364160:2260] 2025-04-10T11:19:13.334331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642535320364155:2260] 2025-04-10T11:19:13.334367Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642535320364156:2260] 2025-04-10T11:19:13.334477Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491642535320364154:2260][/dc-1] Set up state: owner# [1:7491642531025396603:2117], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.334637Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642535320364157:2260] 2025-04-10T11:19:13.334714Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491642535320364154:2260][/dc-1] Path was already updated: owner# [1:7491642531025396603:2117], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.334786Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535320364158:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535320364155:2260], cookie# 1 2025-04-10T11:19:13.334804Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535320364159:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535320364156:2260], cookie# 1 2025-04-10T11:19:13.334818Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535320364160:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535320364157:2260], cookie# 1 2025-04-10T11:19:13.337340Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531025396270:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535320364158:2260], cookie# 1 2025-04-10T11:19:13.337402Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531025396273:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535320364159:2260], cookie# 1 2025-04-10T11:19:13.337440Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531025396276:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535320364160:2260], cookie# 1 2025-04-10T11:19:13.337486Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535320364158:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642531025396270:2049], cookie# 1 2025-04-10T11:19:13.337504Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535320364159:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642531025396273:2052], cookie# 1 2025-04-10T11:19:13.337516Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535320364160:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642531025396276:2055], cookie# 1 2025-04-10T11:19:13.337568Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642535320364155:2260], cookie# 1 2025-04-10T11:19:13.337607Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:13.337654Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642535320364156:2260], cookie# 1 2025-04-10T11:19:13.337685Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:13.337711Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642535320364157:2260], cookie# 1 2025-04-10T11:19:13.337726Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Unexpected sync response: sender# [1:7491642535320364157:2260], cookie# 1 2025-04-10T11:19:13.386997Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642531025396603:2117], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:19:13.387383Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642531025396603:2117], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... SyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2025-04-10T11:19:13.603541Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642531025396603:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491642535320364217:2302] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744283953636 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:13.603642Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642531025396603:2117], cacheItem# { Subscriber: { Subscriber: [1:7491642535320364217:2302] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744283953636 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-10T11:19:13.603830Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491642535320364224:2303], recipient# [1:7491642535320364216:2301], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:19:13.603871Z node 1 :TX_PROXY INFO: Actor# [1:7491642535320364216:2301] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-04-10T11:19:13.609426Z node 1 :TX_PROXY DEBUG: actor# [1:7491642531025396578:2104] Handle TEvNavigate describe path /dc-1 2025-04-10T11:19:13.609476Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642535320364226:2305] HANDLE EvNavigateScheme /dc-1 2025-04-10T11:19:13.609561Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642531025396603:2117], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:13.609630Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7491642531025396603:2117], cookie# 4 2025-04-10T11:19:13.609664Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535320364158:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535320364155:2260], cookie# 4 2025-04-10T11:19:13.609684Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535320364159:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535320364156:2260], cookie# 4 2025-04-10T11:19:13.609709Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535320364160:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535320364157:2260], cookie# 4 2025-04-10T11:19:13.609723Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531025396270:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535320364158:2260], cookie# 4 2025-04-10T11:19:13.609726Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531025396273:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535320364159:2260], cookie# 4 2025-04-10T11:19:13.609756Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531025396276:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535320364160:2260], cookie# 4 2025-04-10T11:19:13.609768Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535320364159:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642531025396273:2052], cookie# 4 2025-04-10T11:19:13.609785Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535320364158:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642531025396270:2049], cookie# 4 2025-04-10T11:19:13.609798Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535320364160:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642531025396276:2055], cookie# 4 2025-04-10T11:19:13.609824Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642535320364156:2260], cookie# 4 2025-04-10T11:19:13.609842Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:13.609869Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642535320364155:2260], cookie# 4 2025-04-10T11:19:13.609889Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:13.609923Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642535320364157:2260], cookie# 4 2025-04-10T11:19:13.609955Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535320364154:2260][/dc-1] Unexpected sync response: sender# [1:7491642535320364157:2260], cookie# 4 2025-04-10T11:19:13.610016Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642531025396603:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-10T11:19:13.610125Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642531025396603:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491642535320364154:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744283953622 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:13.610308Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642531025396603:2117], cacheItem# { Subscriber: { Subscriber: [1:7491642535320364154:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744283953622 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-10T11:19:13.610481Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491642535320364227:2306], recipient# [1:7491642535320364226:2305], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:19:13.610529Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642535320364226:2305] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:13.610593Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642535320364226:2305] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-10T11:19:13.611281Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642535320364226:2305] Handle TEvDescribeSchemeResult Forward to# [1:7491642535320364225:2304] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744283953622 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744283953622 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283953636 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 ... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] Test command err: Recv 65537 2025-04-10T11:19:16.338305Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-10T11:19:16.339227Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-04-10T11:19:16.339300Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-04-10T11:19:16.339529Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-04-10T11:19:16.339624Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-10T11:19:16.339791Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# The diff at index 0 went beyound the blob part; DiffStart# 100 DiffEnd# 96 BlobPartSize# 32 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartOk [GOOD] Test command err: Recv 65537 2025-04-10T11:19:16.552508Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-10T11:19:16.553940Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-10T11:19:16.554068Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-10T11:19:16.554302Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-04-10T11:19:16.554364Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-10T11:19:16.554607Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-04-10T11:19:16.554688Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-04-10T11:19:16.554763Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-04-10T11:19:16.554919Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK 2025-04-10T11:19:16.554965Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-10T11:19:16.555023Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] Test command err: Recv 65537 2025-04-10T11:19:16.330616Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-10T11:19:16.333078Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-10T11:19:16.333171Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-04-10T11:19:16.333260Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-04-10T11:19:16.628153Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-10T11:19:16.628670Z node 2 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-10T11:19:16.628767Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-10T11:19:16.629077Z node 2 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-04-10T11:19:16.629169Z node 2 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-10T11:19:16.629247Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-04-10T11:19:14.083922Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.083947Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.083974Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.090478Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.091133Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.103703Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.104743Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.105641Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T11:19:14.106054Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T11:19:14.106311Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-10T11:19:14.106438Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T11:19:14.106526Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.106566Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-10T11:19:14.106604Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-10T11:19:14.106625Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-10T11:19:14.108252Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.108299Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.108417Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.108959Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.109365Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.109485Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.109728Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-04-10T11:19:14.110543Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T11:19:14.110768Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-10T11:19:14.111071Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-10T11:19:14.111334Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-10T11:19:14.111457Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.111493Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T11:19:14.111526Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-10T11:19:14.112131Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-04-10T11:19:14.112173Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-10T11:19:14.112193Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-10T11:19:14.112211Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T11:19:14.112333Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-04-10T11:19:14.112399Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-10T11:19:14.112419Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-10T11:19:14.112435Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-10T11:19:14.112502Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-04-10T11:19:14.112527Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-10T11:19:14.112557Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-10T11:19:14.112583Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T11:19:14.112677Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-04-10T11:19:14.113804Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.113839Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.113868Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.117542Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.118101Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.118270Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.118954Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2025-04-10T11:19:14.119861Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T11:19:14.120057Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-10T11:19:14.120396Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-10T11:19:14.120615Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-10T11:19:14.120829Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.120882Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-10T11:19:14.120998Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2025-04-10T11:19:14.121047Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T11:19:14.121069Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-10T11:19:14.121139Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2025-04-10T11:19:14.121178Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-10T11:19:14.121198Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-10T11:19:14.121282Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2025-04-10T11:19:14.121324Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-10T11:19:14.121341Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStream ... tream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T11:19:16.459880Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2025-04-10T11:19:16.539639Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-10T11:19:16.539681Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-10T11:19:16.539761Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:16.540264Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:16.546551Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:16.546797Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-10T11:19:16.550404Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-04-10T11:19:16.666341Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-04-10T11:19:16.667521Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:16.669849Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T11:19:16.673845Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-10T11:19:16.674794Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-10T11:19:16.681291Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-10T11:19:16.683292Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-10T11:19:16.684993Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-04-10T11:19:16.686116Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-04-10T11:19:16.698181Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-04-10T11:19:16.699099Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-04-10T11:19:16.699182Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-04-10T11:19:16.699376Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-10T11:19:16.710050Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2025-04-10T11:19:16.716267Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:16.716330Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:16.716363Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:16.716708Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:16.717819Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:16.717968Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:16.718258Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:16.718695Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-04-10T11:19:16.719936Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:16.719969Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:16.720024Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:16.720328Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:16.720788Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:16.720933Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:16.721479Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:16.721658Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T11:19:16.721807Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:16.721862Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-10T11:19:16.722031Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: 2025-04-10T11:19:12.463041Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642531948892103:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:12.463095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013a9/r3tmp/tmpjwJLYx/pdisk_1.dat 2025-04-10T11:19:13.217490Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:13.226944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:13.227070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:13.232787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5118 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:13.414329Z node 1 :TX_PROXY DEBUG: actor# [1:7491642531948892367:2114] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:13.414382Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642536243860139:2439] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:13.414641Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642531948892391:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:13.414686Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491642531948892391:2127], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:19:13.414956Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:19:13.417895Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531948892045:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642536243860144:2440] 2025-04-10T11:19:13.417984Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642531948892045:2049] Subscribe: subscriber# [1:7491642536243860144:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.418076Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531948892048:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642536243860145:2440] 2025-04-10T11:19:13.418116Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642531948892048:2052] Subscribe: subscriber# [1:7491642536243860145:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.418210Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531948892051:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642536243860146:2440] 2025-04-10T11:19:13.418243Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642531948892051:2055] Subscribe: subscriber# [1:7491642536243860146:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.418380Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536243860144:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642531948892045:2049] 2025-04-10T11:19:13.418467Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536243860145:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642531948892048:2052] 2025-04-10T11:19:13.418504Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536243860146:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642531948892051:2055] 2025-04-10T11:19:13.418552Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642536243860141:2440] 2025-04-10T11:19:13.418663Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642536243860142:2440] 2025-04-10T11:19:13.418736Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491642536243860140:2440][/dc-1] Set up state: owner# [1:7491642531948892391:2127], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.418891Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642536243860143:2440] 2025-04-10T11:19:13.418942Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491642536243860140:2440][/dc-1] Path was already updated: owner# [1:7491642531948892391:2127], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.418984Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536243860144:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536243860141:2440], cookie# 1 2025-04-10T11:19:13.419001Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536243860145:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536243860142:2440], cookie# 1 2025-04-10T11:19:13.419039Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536243860146:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536243860143:2440], cookie# 1 2025-04-10T11:19:13.419085Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531948892045:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642536243860144:2440] 2025-04-10T11:19:13.419111Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531948892045:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536243860144:2440], cookie# 1 2025-04-10T11:19:13.419130Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531948892048:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642536243860145:2440] 2025-04-10T11:19:13.419141Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531948892048:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536243860145:2440], cookie# 1 2025-04-10T11:19:13.419155Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531948892051:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642536243860146:2440] 2025-04-10T11:19:13.419167Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531948892051:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536243860146:2440], cookie# 1 2025-04-10T11:19:13.451206Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536243860144:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642531948892045:2049], cookie# 1 2025-04-10T11:19:13.451249Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536243860145:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642531948892048:2052], cookie# 1 2025-04-10T11:19:13.451267Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536243860146:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642531948892051:2055], cookie# 1 2025-04-10T11:19:13.451304Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642536243860141:2440], cookie# 1 2025-04-10T11:19:13.451348Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:13.451375Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642536243860142:2440], cookie# 1 2025-04-10T11:19:13.451397Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:13.451427Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642536243860143:2440], cookie# 1 2025-04-10T11:19:13.451474Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Unexpected sync response: sender# [1:7491642536243860143:2440], cookie# 1 2025-04-10T11:19:13.491357Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642531948892391:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:19:13.491832Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642531948892391:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... HEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642531948892391:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491642536243860313:2570] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744283953762 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:14.284512Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642531948892391:2127], cacheItem# { Subscriber: { Subscriber: [1:7491642536243860313:2570] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744283953762 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-04-10T11:19:14.284649Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491642540538827772:2676], recipient# [1:7491642540538827771:2675], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:19:14.284680Z node 1 :TX_PROXY INFO: Actor# [1:7491642540538827771:2675] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-04-10T11:19:14.287864Z node 1 :TX_PROXY DEBUG: actor# [1:7491642531948892367:2114] Handle TEvNavigate describe path /dc-1 2025-04-10T11:19:14.287913Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642540538827776:2680] HANDLE EvNavigateScheme /dc-1 2025-04-10T11:19:14.288076Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642531948892391:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:14.288184Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7491642531948892391:2127], cookie# 4 2025-04-10T11:19:14.288234Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536243860144:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536243860141:2440], cookie# 4 2025-04-10T11:19:14.288249Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536243860145:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536243860142:2440], cookie# 4 2025-04-10T11:19:14.288262Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536243860146:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536243860143:2440], cookie# 4 2025-04-10T11:19:14.288323Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531948892045:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536243860144:2440], cookie# 4 2025-04-10T11:19:14.288344Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531948892048:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536243860145:2440], cookie# 4 2025-04-10T11:19:14.288359Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531948892051:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536243860146:2440], cookie# 4 2025-04-10T11:19:14.288512Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536243860144:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642531948892045:2049], cookie# 4 2025-04-10T11:19:14.288543Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536243860145:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642531948892048:2052], cookie# 4 2025-04-10T11:19:14.288561Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536243860146:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642531948892051:2055], cookie# 4 2025-04-10T11:19:14.288584Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642536243860141:2440], cookie# 4 2025-04-10T11:19:14.288600Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:14.288614Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642536243860142:2440], cookie# 4 2025-04-10T11:19:14.288629Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:14.288665Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642536243860143:2440], cookie# 4 2025-04-10T11:19:14.288676Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536243860140:2440][/dc-1] Unexpected sync response: sender# [1:7491642536243860143:2440], cookie# 4 2025-04-10T11:19:14.288760Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642531948892391:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-10T11:19:14.288816Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642531948892391:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491642536243860140:2440] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744283953727 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:14.288869Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642531948892391:2127], cacheItem# { Subscriber: { Subscriber: [1:7491642536243860140:2440] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744283953727 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-10T11:19:14.289256Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491642540538827777:2681], recipient# [1:7491642540538827776:2680], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:19:14.289325Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642540538827776:2680] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:14.289398Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642540538827776:2680] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-10T11:19:14.289973Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642540538827776:2680] Handle TEvDescribeSchemeResult Forward to# [1:7491642540538827775:2679] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744283953727 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744283953727 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283953762 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-10T11:19:12.469063Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642528864103842:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:12.469159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001310/r3tmp/tmpDY4McZ/pdisk_1.dat 2025-04-10T11:19:13.016005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:13.018483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:13.028501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:13.045583Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26456 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:13.282339Z node 1 :TX_PROXY DEBUG: actor# [1:7491642528864103906:2115] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:13.282399Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642533159071675:2436] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:13.283324Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642528864103929:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:13.283390Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491642528864103929:2128], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:19:13.283690Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:19:13.286133Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642528864103582:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642533159071680:2437] 2025-04-10T11:19:13.286239Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642528864103582:2049] Subscribe: subscriber# [1:7491642533159071680:2437], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.286316Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642528864103588:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642533159071682:2437] 2025-04-10T11:19:13.286323Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642528864103585:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642533159071681:2437] 2025-04-10T11:19:13.286345Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642528864103588:2055] Subscribe: subscriber# [1:7491642533159071682:2437], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.286377Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642528864103585:2052] Subscribe: subscriber# [1:7491642533159071681:2437], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.286390Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533159071680:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642528864103582:2049] 2025-04-10T11:19:13.286415Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533159071682:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642528864103588:2055] 2025-04-10T11:19:13.286469Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533159071681:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642528864103585:2052] 2025-04-10T11:19:13.286470Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642528864103582:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642533159071680:2437] 2025-04-10T11:19:13.286492Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642528864103588:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642533159071682:2437] 2025-04-10T11:19:13.286506Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642528864103585:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642533159071681:2437] 2025-04-10T11:19:13.286511Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642533159071677:2437] 2025-04-10T11:19:13.286574Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642533159071679:2437] 2025-04-10T11:19:13.286643Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491642533159071676:2437][/dc-1] Set up state: owner# [1:7491642528864103929:2128], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.287786Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642533159071678:2437] 2025-04-10T11:19:13.287850Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491642533159071676:2437][/dc-1] Path was already updated: owner# [1:7491642528864103929:2128], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.287889Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533159071680:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533159071677:2437], cookie# 1 2025-04-10T11:19:13.287941Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533159071681:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533159071678:2437], cookie# 1 2025-04-10T11:19:13.287957Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533159071682:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533159071679:2437], cookie# 1 2025-04-10T11:19:13.290230Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642528864103582:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533159071680:2437], cookie# 1 2025-04-10T11:19:13.290278Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642528864103585:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533159071681:2437], cookie# 1 2025-04-10T11:19:13.290319Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642528864103588:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533159071682:2437], cookie# 1 2025-04-10T11:19:13.290357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533159071680:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642528864103582:2049], cookie# 1 2025-04-10T11:19:13.290375Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533159071681:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642528864103585:2052], cookie# 1 2025-04-10T11:19:13.290390Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533159071682:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642528864103588:2055], cookie# 1 2025-04-10T11:19:13.290437Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642533159071677:2437], cookie# 1 2025-04-10T11:19:13.290490Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:13.290518Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642533159071678:2437], cookie# 1 2025-04-10T11:19:13.290544Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:13.290599Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642533159071679:2437], cookie# 1 2025-04-10T11:19:13.290617Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Unexpected sync response: sender# [1:7491642533159071679:2437], cookie# 1 2025-04-10T11:19:13.359432Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642528864103929:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:19:13.359843Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642528864103929:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... HEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642528864103929:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491642533159071866:2577] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744283953636 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:14.168929Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642528864103929:2128], cacheItem# { Subscriber: { Subscriber: [1:7491642533159071866:2577] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1744283953636 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-04-10T11:19:14.169093Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491642537454039305:2667], recipient# [1:7491642537454039304:2666], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:19:14.169136Z node 1 :TX_PROXY INFO: Actor# [1:7491642537454039304:2666] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-04-10T11:19:14.178429Z node 1 :TX_PROXY DEBUG: actor# [1:7491642528864103906:2115] Handle TEvNavigate describe path /dc-1 2025-04-10T11:19:14.178485Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642537454039307:2669] HANDLE EvNavigateScheme /dc-1 2025-04-10T11:19:14.178636Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642528864103929:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:14.178722Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7491642528864103929:2128], cookie# 4 2025-04-10T11:19:14.178761Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533159071680:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533159071677:2437], cookie# 4 2025-04-10T11:19:14.178774Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533159071681:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533159071678:2437], cookie# 4 2025-04-10T11:19:14.178786Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533159071682:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533159071679:2437], cookie# 4 2025-04-10T11:19:14.178808Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642528864103582:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533159071680:2437], cookie# 4 2025-04-10T11:19:14.178829Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642528864103585:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533159071681:2437], cookie# 4 2025-04-10T11:19:14.178843Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642528864103588:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533159071682:2437], cookie# 4 2025-04-10T11:19:14.178865Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533159071680:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642528864103582:2049], cookie# 4 2025-04-10T11:19:14.178875Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533159071681:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642528864103585:2052], cookie# 4 2025-04-10T11:19:14.178897Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533159071682:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642528864103588:2055], cookie# 4 2025-04-10T11:19:14.178934Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642533159071677:2437], cookie# 4 2025-04-10T11:19:14.178950Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:14.178963Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642533159071678:2437], cookie# 4 2025-04-10T11:19:14.178978Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:14.178999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7491642533159071679:2437], cookie# 4 2025-04-10T11:19:14.179013Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533159071676:2437][/dc-1] Unexpected sync response: sender# [1:7491642533159071679:2437], cookie# 4 2025-04-10T11:19:14.179046Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642528864103929:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-10T11:19:14.179101Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642528864103929:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491642533159071676:2437] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744283953608 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:14.179179Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642528864103929:2128], cacheItem# { Subscriber: { Subscriber: [1:7491642533159071676:2437] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744283953608 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-10T11:19:14.179311Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491642537454039308:2670], recipient# [1:7491642537454039307:2669], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:19:14.179343Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642537454039307:2669] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:14.179448Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642537454039307:2669] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-10T11:19:14.180039Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642537454039307:2669] Handle TEvDescribeSchemeResult Forward to# [1:7491642537454039306:2668] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744283953608 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744283953608 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283953636 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046... (TRUNCATED) >> TVPatchTests::PatchPartGetError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 23370, MsgBus: 26285 2025-04-10T11:19:10.047738Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642523657571972:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:10.047839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a91/r3tmp/tmprD8iXT/pdisk_1.dat 2025-04-10T11:19:10.361054Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23370, node 1 2025-04-10T11:19:10.446417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:10.446754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:10.448571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:10.454544Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:10.454566Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:10.454571Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:10.454659Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26285 TClient is connected to server localhost:26285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:10.947293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:10.972694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:11.095446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:11.254586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:11.337826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:12.939946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642532247508342:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:12.940144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:13.246004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:13.273380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:13.311372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:13.349362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:13.386920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:13.474090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:13.541263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642536542476155:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:13.541359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:13.541769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642536542476160:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:13.546500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:13.560964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642536542476162:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:13.643323Z node 1 :TX_PROXY ERROR: Actor# [1:7491642536542476217:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:14.944685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:19:14.998317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:19:15.050290Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642523657571972:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:15.050378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:15.071075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 /Root/Join1_2 1 19 /Root/Join1_1 8 136 >> TVPatchTests::PatchPartGetError [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true >> TopicService::UnknownConsumer >> TExtSubDomainTest::GenericCases [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true >> TPersQueueTest::WriteExistingBigValue [GOOD] >> TPersQueueTest::WriteEmptyData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartGetError [GOOD] Test command err: Recv 65537 2025-04-10T11:19:17.948401Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-10T11:19:17.949467Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-10T11:19:17.949540Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-10T11:19:17.949731Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-04-10T11:19:17.949786Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-10T11:19:17.949937Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VGetResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-10T11:19:17.949998Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |90.2%| [TA] $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::GenericCases [GOOD] Test command err: 2025-04-10T11:19:12.471317Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642529817094877:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:12.471418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0012f4/r3tmp/tmpyQBdL3/pdisk_1.dat 2025-04-10T11:19:13.137546Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:13.147293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:13.147434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:13.151992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9359 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:13.403980Z node 1 :TX_PROXY DEBUG: actor# [1:7491642529817095141:2116] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:13.404040Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642534112062922:2448] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:13.404170Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642529817095176:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:13.404194Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491642529817095176:2130], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:19:13.404422Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642534112062923:2449][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:19:13.406355Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529817094820:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642534112062928:2449] 2025-04-10T11:19:13.406364Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529817094817:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642534112062927:2449] 2025-04-10T11:19:13.406412Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642529817094820:2053] Subscribe: subscriber# [1:7491642534112062928:2449], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.406418Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642529817094817:2050] Subscribe: subscriber# [1:7491642534112062927:2449], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.406483Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529817094823:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642534112062929:2449] 2025-04-10T11:19:13.406506Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642529817094823:2056] Subscribe: subscriber# [1:7491642534112062929:2449], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.406566Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642534112062928:2449][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642529817094820:2053] 2025-04-10T11:19:13.406605Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642534112062927:2449][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642529817094817:2050] 2025-04-10T11:19:13.406659Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642534112062929:2449][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642529817094823:2056] 2025-04-10T11:19:13.406713Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642534112062923:2449][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642534112062925:2449] 2025-04-10T11:19:13.406740Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642534112062923:2449][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642534112062924:2449] 2025-04-10T11:19:13.406792Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491642534112062923:2449][/dc-1] Set up state: owner# [1:7491642529817095176:2130], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.406889Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642534112062923:2449][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642534112062926:2449] 2025-04-10T11:19:13.406940Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491642534112062923:2449][/dc-1] Path was already updated: owner# [1:7491642529817095176:2130], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.406979Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642534112062927:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642534112062924:2449], cookie# 1 2025-04-10T11:19:13.407006Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642534112062928:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642534112062925:2449], cookie# 1 2025-04-10T11:19:13.407030Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642534112062929:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642534112062926:2449], cookie# 1 2025-04-10T11:19:13.407060Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529817094820:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642534112062928:2449] 2025-04-10T11:19:13.407105Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529817094820:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642534112062928:2449], cookie# 1 2025-04-10T11:19:13.407135Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529817094817:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642534112062927:2449] 2025-04-10T11:19:13.407149Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529817094817:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642534112062927:2449], cookie# 1 2025-04-10T11:19:13.407190Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529817094823:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642534112062929:2449] 2025-04-10T11:19:13.407207Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529817094823:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642534112062929:2449], cookie# 1 2025-04-10T11:19:13.407396Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642534112062928:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642529817094820:2053], cookie# 1 2025-04-10T11:19:13.407443Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642534112062927:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642529817094817:2050], cookie# 1 2025-04-10T11:19:13.407458Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642534112062929:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642529817094823:2056], cookie# 1 2025-04-10T11:19:13.407506Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642534112062923:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642534112062925:2449], cookie# 1 2025-04-10T11:19:13.407537Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642534112062923:2449][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:13.407551Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642534112062923:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642534112062924:2449], cookie# 1 2025-04-10T11:19:13.407568Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642534112062923:2449][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:13.407619Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642534112062923:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642534112062926:2449], cookie# 1 2025-04-10T11:19:13.407632Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642534112062923:2449][/dc-1] Unexpected sync response: sender# [1:7491642534112062926:2449], cookie# 1 2025-04-10T11:19:13.469569Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642529817095176:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:19:13.469887Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642529817095176:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... ate# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:16.653241Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642546996965619:3031][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7491642546996965634:3031] 2025-04-10T11:19:16.653249Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529817094820:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7491642546996965629:3029] 2025-04-10T11:19:16.653258Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491642546996965619:3031][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7491642529817095176:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:16.653263Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529817094820:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7491642546996965628:3030] 2025-04-10T11:19:16.653274Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529817094820:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7491642546996965636:3031] 2025-04-10T11:19:16.653289Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529817094823:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7491642546996965630:3030] 2025-04-10T11:19:16.653317Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529817094823:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7491642546996965637:3031] 2025-04-10T11:19:16.653322Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642529817095176:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-10T11:19:16.653432Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642529817095176:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7491642546996965617:3029] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:16.653570Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642529817095176:2130], cacheItem# { Subscriber: { Subscriber: [1:7491642546996965617:3029] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:16.653670Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642529817095176:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-10T11:19:16.653706Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491642546996965638:3032], recipient# [1:7491642546996965613:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:16.653750Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642529817095176:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7491642546996965618:3030] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:16.653808Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642529817095176:2130], cacheItem# { Subscriber: { Subscriber: [1:7491642546996965618:3030] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:16.653835Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642529817095176:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-10T11:19:16.653903Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642529817095176:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7491642546996965619:3031] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:16.653968Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642529817095176:2130], cacheItem# { Subscriber: { Subscriber: [1:7491642546996965619:3031] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:16.654082Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491642546996965639:3033], recipient# [1:7491642546996965616:2339], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:17.469812Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642529817094877:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:17.469897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:17.483533Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642529817095176:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:17.483675Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642529817095176:2130], cacheItem# { Subscriber: { Subscriber: [1:7491642534112062932:2451] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:17.483774Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491642551291932955:3037], recipient# [1:7491642551291932954:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:17.654201Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642529817095176:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:17.654346Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642529817095176:2130], cacheItem# { Subscriber: { Subscriber: [1:7491642546996965617:3029] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:17.654429Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491642551291932959:3040], recipient# [1:7491642551291932958:2341], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpRm::NodesMembershipByExchanger >> KqpRm::SingleTask >> KqpRm::Reduce >> KqpRm::ResourceBrokerNotEnoughResources >> KqpRm::ManyTasks >> KqpRm::SnapshotSharingByExchanger >> KqpRm::DisonnectNodes >> KqpRm::NotEnoughExecutionUnits >> KqpRm::Reduce [GOOD] >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> KqpRm::ManyTasks [GOOD] >> KqpRm::NotEnoughExecutionUnits [GOOD] >> DemoTx::Scenario_3 [GOOD] >> KqpRm::SingleTask [GOOD] >> TPersQueueTest::SetupLockSession [GOOD] >> TPersQueueTest::StreamReadCreateAndDestroyMsgs >> TPersQueueTest::ReadFromSeveralPartitionsMigrated [GOOD] >> TPersQueueTest::Init ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2025-04-10T11:19:21.119622Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:19:21.120198Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d35/r3tmp/tmpBkXJnb/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:19:21.120948Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d35/r3tmp/tmpBkXJnb/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/000d35/r3tmp/tmpBkXJnb/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15481571931045426661 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:19:21.172222Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.172566Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.200758Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:459:2100] with ResourceBroker at [2:430:2099] 2025-04-10T11:19:21.200905Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:460:2101] 2025-04-10T11:19:21.201093Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:458:2336] with ResourceBroker at [1:429:2317] 2025-04-10T11:19:21.201166Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:461:2337] 2025-04-10T11:19:21.201315Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.201355Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.201386Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.201409Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.201646Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.216743Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.217137Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.217236Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.217656Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.217776Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.217812Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.217931Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.218043Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.218073Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.218186Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.218306Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.219102Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-10T11:19:21.219413Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.219967Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.220105Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.220202Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.220429Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.220498Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:21.220721Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.220865Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.220905Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:21.223748Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:458:2336]) priority=0 resources={0, 100} 2025-04-10T11:19:21.223827Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.223889Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:19:21.223933Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.223979Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:458:2336])) 2025-04-10T11:19:21.224194Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:19:21.224418Z node 1 :RESOURCE_BROKER DEBUG: Update task kqp-1-1-1 (1 by [1:458:2336]) (priority=0 type=kqp_query resources={0, 30} resubmit=0) 2025-04-10T11:19:21.224481Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.224528Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.075000 (insert task kqp-1-1-1 (1 by [1:458:2336])) 2025-04-10T11:19:21.224567Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 70, Free Tier: 0, ExecutionUnits: 0. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2025-04-10T11:19:21.114478Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:19:21.115030Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d26/r3tmp/tmpfk1xUT/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:19:21.115733Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d26/r3tmp/tmpfk1xUT/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/000d26/r3tmp/tmpfk1xUT/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15554823242778979150 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:19:21.166740Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.167091Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.188062Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:459:2100] with ResourceBroker at [2:430:2099] 2025-04-10T11:19:21.188201Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:460:2101] 2025-04-10T11:19:21.188381Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:458:2336] with ResourceBroker at [1:429:2317] 2025-04-10T11:19:21.188447Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:461:2337] 2025-04-10T11:19:21.188598Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.188636Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.188667Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.188690Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.189672Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.201715Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.202011Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.202085Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.202457Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.202570Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.202604Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.202700Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.202791Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.202814Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.202901Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.203001Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.204566Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-10T11:19:21.204947Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.205492Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.205610Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.205694Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.205921Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.205984Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:21.206229Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.206389Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.206426Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:21.209928Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:458:2336]) priority=0 resources={0, 100} 2025-04-10T11:19:21.210001Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.210054Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:19:21.210096Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.210164Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:458:2336])) 2025-04-10T11:19:21.215120Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:19:21.215434Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-2 (2 by [1:458:2336]) priority=0 resources={0, 100} 2025-04-10T11:19:21.215477Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-2 (2 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.215524Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-2 (2 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:19:21.215569Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-2 (2 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.215612Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-1-2-2 (2 by [1:458:2336])) 2025-04-10T11:19:21.215653Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:19:21.215769Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-3-3 (3 by [1:458:2336]) priority=0 resources={0, 100} 2025-04-10T11:19:21.215806Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-3-3 (3 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.215834Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-3-3 (3 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:19:21.215869Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-3-3 (3 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.215917Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.750000 (insert task kqp-1-3-3 (3 by [1:458:2336])) 2025-04-10T11:19:21.215943Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 3. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:19:21.216047Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-4-4 (4 by [1:458:2336]) priority=0 resources={0, 100} 2025-04-10T11:19:21.216070Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-4-4 (4 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.216098Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-4-4 (4 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:19:21.216126Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-4-4 (4 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.216154Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.750000 to 1.000000 (insert task kqp-1-4-4 (4 by [1:458:2336])) 2025-04-10T11:19:21.216176Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 4. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:19:21.216264Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-5-5 (5 by [1:458:2336]) priority=0 resources={0, 100} 2025-04-10T11:19:21.216286Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-5-5 (5 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.216312Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-5-5 (5 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:19:21.216334Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-5-5 (5 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.216359Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.000000 to 1.250000 (insert task kqp-1-5-5 (5 by [1:458:2336])) 2025-04-10T11:19:21.216398Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 5. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:19:21.216510Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-6-6 (6 by [1:458:2336]) priority=0 resources={0, 100} 2025-04-10T11:19:21.216539Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-6-6 (6 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.216578Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-6-6 (6 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:19:21.216608Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-6-6 (6 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.216637Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.250000 to 1.500000 (insert task kqp-1-6-6 (6 by [1:458:2336])) 2025-04-10T11:19:21.216661Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 6. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:19:21.216754Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-7-7 (7 by [1:458:2336]) priority=0 resources={0, 100} 2025-04-10T11:19:21.216789Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-7-7 (7 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.216818Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-7-7 (7 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:19:21.216842Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-7-7 (7 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.216870Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.500000 to 1.750000 (insert task kqp-1-7-7 (7 by [1:458:2336])) 2025-04-10T11:19:21.216906Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 7. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:19:21.217015Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-8-8 (8 by [1:458:2336]) priority=0 resources={0, 100} 2025-04-10T11:19:21.217049Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-8-8 (8 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.217089Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-8-8 (8 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:19:21.217115Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-8-8 (8 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.217142Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.750000 to 2.000000 (insert task kqp-1-8-8 (8 by [1:458:2336])) 2025-04-10T11:19:21.217181Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 8. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:19:21.217341Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-9-9 (9 by [1:458:2336]) priority=0 resources={0, 100} 2025-04-10T11:19:21.217366Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-9-9 (9 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.217395Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-9-9 (9 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:19:21.217424Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-9-9 (9 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.217449Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 2.000000 to 2.250000 (insert task kqp-1-9-9 (9 by [1:458:2336])) 2025-04-10T11:19:21.217476Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 9. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:19:21.217595Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [1:458:2336]) (release resources {0, 100}) 2025-04-10T11:19:21.217649Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 2.250000 to 2.000000 (remove task kqp-1-1-1 (1 by [1:458:2336])) 2025-04-10T11:19:21.217697Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] Test command err: 2025-04-10T11:19:21.115176Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:19:21.115767Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d42/r3tmp/tmp2une7U/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:19:21.116465Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d42/r3tmp/tmp2une7U/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/000d42/r3tmp/tmp2une7U/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3564317936210194528 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:19:21.166531Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.166905Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.188083Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:459:2100] with ResourceBroker at [2:430:2099] 2025-04-10T11:19:21.188236Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:460:2101] 2025-04-10T11:19:21.188393Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:458:2336] with ResourceBroker at [1:429:2317] 2025-04-10T11:19:21.188457Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:461:2337] 2025-04-10T11:19:21.188612Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.188650Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.188683Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.188705Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.189677Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.200898Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.201323Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.201433Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2025-04-10T11:19:21.201699Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.201827Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.201860Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.201970Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.202092Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.202117Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.202230Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2025-04-10T11:19:21.202354Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.204571Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-10T11:19:21.204923Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.205535Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.205664Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.205760Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.205995Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.206083Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:21.206336Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.206500Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.206561Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:21.209521Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:458:2336]) priority=0 resources={0, 1000} 2025-04-10T11:19:21.209595Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.209664Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 1000} for task kqp-1-2-1 (1 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:19:21.209706Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.209753Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 2.500000 (insert task kqp-1-2-1 (1 by [1:458:2336])) 2025-04-10T11:19:21.215171Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 1000ExternalMemory: 0 } 2025-04-10T11:19:21.215347Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-2 (2 by [1:458:2336]) priority=0 resources={0, 100000} 2025-04-10T11:19:21.215435Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-2 (2 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.215500Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task kqp-1-2-2 (2 by [1:458:2336]) 2025-04-10T11:19:21.215560Z node 1 :RESOURCE_BROKER DEBUG: Removing task kqp-1-2-2 (2 by [1:458:2336]) 2025-04-10T11:19:21.215664Z node 1 :KQP_RESOURCE_MANAGER NOTICE: TxId: 1, taskId: 2. Not enough memory for query, requested: 100000. TxResourcesInfo { TxId: 1, Database: , tx initially granted memory: 0B, tx total memory allocations: 1000B, tx largest successful memory allocation: 1000B, tx last failed memory allocation: 0B, tx total execution units: 0, started at: 2025-04-10T11:19:21.209426Z } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2025-04-10T11:19:21.118405Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:19:21.118984Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d50/r3tmp/tmpS8SqXy/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:19:21.119655Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d50/r3tmp/tmpS8SqXy/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/000d50/r3tmp/tmpS8SqXy/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9593649638527682693 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:19:21.171855Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.172180Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.188710Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:459:2100] with ResourceBroker at [2:430:2099] 2025-04-10T11:19:21.188860Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:460:2101] 2025-04-10T11:19:21.189038Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:458:2336] with ResourceBroker at [1:429:2317] 2025-04-10T11:19:21.189124Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:461:2337] 2025-04-10T11:19:21.189255Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.189295Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.189344Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.189368Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.189620Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.204447Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.204755Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.204844Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.205096Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.205193Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.205226Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.205370Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.205471Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.205496Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.205582Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.205707Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.206589Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-10T11:19:21.206889Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.207444Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.207571Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.207684Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.207928Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.207994Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:21.208218Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.208381Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.208432Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2025-04-10T11:19:21.114559Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:19:21.115059Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d37/r3tmp/tmplkS3le/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:19:21.115730Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d37/r3tmp/tmplkS3le/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/000d37/r3tmp/tmplkS3le/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16529402368154577052 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:19:21.168802Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.169172Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.188099Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:459:2100] with ResourceBroker at [2:430:2099] 2025-04-10T11:19:21.188242Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:460:2101] 2025-04-10T11:19:21.188427Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:458:2336] with ResourceBroker at [1:429:2317] 2025-04-10T11:19:21.188498Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:461:2337] 2025-04-10T11:19:21.188626Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.188667Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.188699Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.188724Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.189686Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.206979Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.207305Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.207390Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.207663Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.207785Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.207820Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.207916Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.208014Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.208040Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.208130Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.208226Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.208949Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-10T11:19:21.209244Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.209812Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.209938Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.210039Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.210316Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.210394Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:21.210650Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.210823Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.210868Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:21.213712Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:458:2336]) priority=0 resources={0, 100} 2025-04-10T11:19:21.213792Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.213852Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:19:21.213910Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:21.213964Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:458:2336])) 2025-04-10T11:19:21.215108Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:19:21.215426Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-2-1 (1 by [1:458:2336]) (release resources {0, 100}) 2025-04-10T11:19:21.215491Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.000000 (remove task kqp-1-2-1 (1 by [1:458:2336])) 2025-04-10T11:19:21.215545Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true >> KqpRm::DisonnectNodes [GOOD] >> TGRpcCmsTest::AuthTokenTest >> KqpRm::NodesMembershipByExchanger [GOOD] >> TGRpcCmsTest::RemoveWithAnotherTokenTest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true >> DemoTx::Scenario_4 >> TSubDomainTest::CreateTablet >> TSubDomainTest::Boot >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain >> TSubDomainTest::StartAndStopTenanNode >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2025-04-10T11:19:21.114472Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:19:21.115034Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d59/r3tmp/tmp7F4Z0G/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:19:21.115780Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d59/r3tmp/tmp7F4Z0G/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/000d59/r3tmp/tmp7F4Z0G/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9004914434291491401 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:19:21.166723Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.167051Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.188022Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:459:2100] with ResourceBroker at [2:430:2099] 2025-04-10T11:19:21.188138Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:460:2101] 2025-04-10T11:19:21.188265Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:458:2336] with ResourceBroker at [1:429:2317] 2025-04-10T11:19:21.188321Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:461:2337] 2025-04-10T11:19:21.188460Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.188496Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.188520Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.188535Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.189649Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.202414Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.202689Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.202745Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.202987Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.203120Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.203152Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.203270Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.203378Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.203397Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.203455Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.203538Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.204814Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-10T11:19:21.205145Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.205777Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.205917Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.206018Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.206272Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.206369Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:21.206590Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.206767Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.206815Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:22.267668Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-10T11:19:22.267972Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-10T11:19:22.270747Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:22.540955Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2025-04-10T11:19:21.115412Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:19:21.115969Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d2e/r3tmp/tmpF9L8Rc/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:19:21.116621Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d2e/r3tmp/tmpF9L8Rc/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/000d2e/r3tmp/tmpF9L8Rc/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8541258187285615696 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:19:21.174132Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.174489Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.196897Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:459:2100] with ResourceBroker at [2:430:2099] 2025-04-10T11:19:21.197046Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:460:2101] 2025-04-10T11:19:21.197243Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:458:2336] with ResourceBroker at [1:429:2317] 2025-04-10T11:19:21.197333Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:461:2337] 2025-04-10T11:19:21.197474Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.197517Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.197550Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.197574Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.197820Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.222501Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.222859Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.222946Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.223236Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.223360Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.223407Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.223516Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.223633Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.223659Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.223738Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.223844Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.224689Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-10T11:19:21.225033Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.225629Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.225759Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.225855Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.226092Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.226187Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:21.226418Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.226583Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.226638Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:22.289013Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-10T11:19:22.289183Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-10T11:19:22.290297Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2025-04-10T11:19:22.290516Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2025-04-10T11:19:22.290916Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-04-10T11:19:22.292109Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:83:2074] ServerId# [1:351:2269] TabletId# 72057594037932033 PipeClientId# [2:83:2074] 2025-04-10T11:19:22.292624Z node 2 :TX_PROXY WARN: actor# [2:145:2087] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-10T11:19:22.292880Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-10T11:19:22.293125Z node 2 :KQP_RESOURCE_MANAGER INFO: Subcriber is not available for info exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:22.293190Z node 2 :KQP_RESOURCE_MANAGER INFO: Kill previous info exchanger subscriber for 'kqpexch+/dc-1' at [2:463:2103], reason: tenant updated 2025-04-10T11:19:22.293398Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:22.297531Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:22.297742Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:22.627000Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSubDomainTest::UserAttributes >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets [GOOD] >> TPersQueueTest::TopicServiceReadBudget >> TTransferTests::Create >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-10T11:19:12.463102Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642530826679050:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:12.463168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001340/r3tmp/tmpf0lxAK/pdisk_1.dat 2025-04-10T11:19:13.130016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:13.130176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:13.137928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:13.175142Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:4992 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:13.401348Z node 1 :TX_PROXY DEBUG: actor# [1:7491642530826679316:2116] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:13.401407Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642535121647084:2437] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:13.401622Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642530826679357:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:13.401656Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491642530826679357:2130], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:19:13.401893Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535121647085:2438][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:19:13.403913Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642530826678992:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642535121647089:2438] 2025-04-10T11:19:13.403993Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642530826678992:2050] Subscribe: subscriber# [1:7491642535121647089:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.404080Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642530826678998:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642535121647091:2438] 2025-04-10T11:19:13.404095Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642530826678998:2056] Subscribe: subscriber# [1:7491642535121647091:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.404165Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535121647089:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642530826678992:2050] 2025-04-10T11:19:13.404195Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535121647091:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642530826678998:2056] 2025-04-10T11:19:13.404250Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535121647085:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642535121647086:2438] 2025-04-10T11:19:13.404282Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535121647085:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642535121647088:2438] 2025-04-10T11:19:13.404339Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491642535121647085:2438][/dc-1] Set up state: owner# [1:7491642530826679357:2130], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.404468Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535121647089:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535121647086:2438], cookie# 1 2025-04-10T11:19:13.404483Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535121647090:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535121647087:2438], cookie# 1 2025-04-10T11:19:13.404497Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535121647091:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535121647088:2438], cookie# 1 2025-04-10T11:19:13.404522Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642530826678992:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642535121647089:2438] 2025-04-10T11:19:13.404549Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642530826678992:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535121647089:2438], cookie# 1 2025-04-10T11:19:13.404568Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642530826678998:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642535121647091:2438] 2025-04-10T11:19:13.404581Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642530826678998:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535121647091:2438], cookie# 1 2025-04-10T11:19:13.406822Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642530826678995:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642535121647090:2438] 2025-04-10T11:19:13.406886Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642530826678995:2053] Subscribe: subscriber# [1:7491642535121647090:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.406935Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642530826678995:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535121647090:2438], cookie# 1 2025-04-10T11:19:13.406987Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535121647089:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642530826678992:2050], cookie# 1 2025-04-10T11:19:13.407003Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535121647091:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642530826678998:2056], cookie# 1 2025-04-10T11:19:13.407046Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535121647090:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642530826678995:2053] 2025-04-10T11:19:13.407084Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535121647090:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642530826678995:2053], cookie# 1 2025-04-10T11:19:13.407126Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535121647085:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642535121647086:2438], cookie# 1 2025-04-10T11:19:13.407172Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535121647085:2438][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:13.407196Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535121647085:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642535121647088:2438], cookie# 1 2025-04-10T11:19:13.407221Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535121647085:2438][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:13.407260Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535121647085:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642535121647087:2438] 2025-04-10T11:19:13.407343Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491642535121647085:2438][/dc-1] Path was already updated: owner# [1:7491642530826679357:2130], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.407366Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535121647085:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642535121647087:2438], cookie# 1 2025-04-10T11:19:13.407380Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535121647085:2438][/dc-1] Unexpected sync response: sender# [1:7491642535121647087:2438], cookie# 1 2025-04-10T11:19:13.407438Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642530826678995:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642535121647090:2438] 2025-04-10T11:19:13.465961Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642530826679357:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:19:13.469820Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642530826679357:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... sion: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:21.766561Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558286435071:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642571171338247:2785] 2025-04-10T11:19:21.766566Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558286435074:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642571171338248:2785] 2025-04-10T11:19:21.766582Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558286435071:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7491642571171338259:2787] 2025-04-10T11:19:21.766587Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558286435074:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7491642571171338260:2787] 2025-04-10T11:19:21.766590Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7491642558286435071:2053] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-10T11:19:21.766596Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7491642558286435074:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-10T11:19:21.766617Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7491642558286435071:2053] Subscribe: subscriber# [3:7491642571171338259:2787], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:21.766624Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7491642558286435074:2056] Subscribe: subscriber# [3:7491642571171338260:2787], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:21.766640Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558286435071:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642571171338253:2786] 2025-04-10T11:19:21.766643Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558286435074:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642571171338254:2786] 2025-04-10T11:19:21.766677Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7491642571171338258:2787][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7491642558286435068:2050] 2025-04-10T11:19:21.766698Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7491642571171338259:2787][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7491642558286435071:2053] 2025-04-10T11:19:21.766716Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7491642571171338260:2787][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7491642558286435074:2056] 2025-04-10T11:19:21.766728Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7491642558286435407:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-10T11:19:21.766744Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491642571171338244:2787][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7491642571171338255:2787] 2025-04-10T11:19:21.766817Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491642571171338244:2787][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7491642571171338256:2787] 2025-04-10T11:19:21.766844Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7491642571171338244:2787][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7491642558286435407:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:21.766861Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7491642558286435407:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7491642571171338240:2785] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:21.766867Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491642571171338244:2787][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7491642571171338257:2787] 2025-04-10T11:19:21.766900Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7491642571171338244:2787][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7491642558286435407:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:21.766923Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558286435068:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642571171338258:2787] 2025-04-10T11:19:21.766939Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558286435071:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642571171338259:2787] 2025-04-10T11:19:21.766958Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558286435074:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642571171338260:2787] 2025-04-10T11:19:21.766996Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642558286435407:2127], cacheItem# { Subscriber: { Subscriber: [3:7491642571171338240:2785] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:21.767058Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7491642558286435407:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-10T11:19:21.767148Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7491642558286435407:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7491642571171338241:2786] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:21.767200Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642558286435407:2127], cacheItem# { Subscriber: { Subscriber: [3:7491642571171338241:2786] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:21.767282Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7491642558286435407:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-10T11:19:21.767334Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7491642558286435407:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7491642571171338244:2787] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:21.767358Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642571171338261:2788], recipient# [3:7491642571171338238:2320], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:21.767394Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642558286435407:2127], cacheItem# { Subscriber: { Subscriber: [3:7491642571171338244:2787] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:21.767454Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642571171338262:2789], recipient# [3:7491642571171338239:2321], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpRm::SnapshotSharingByExchanger [GOOD] >> TSubDomainTest::FailIfAffectedSetNotInterior ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-10T11:19:12.471346Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642531160088116:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:12.471424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001383/r3tmp/tmp0XswW0/pdisk_1.dat 2025-04-10T11:19:13.109222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:13.109356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:13.117990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:13.126524Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6175 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:13.390746Z node 1 :TX_PROXY DEBUG: actor# [1:7491642531160088374:2116] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:13.390799Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642535455056145:2438] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:13.390907Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642531160088398:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:13.390931Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491642531160088398:2129], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:19:13.391095Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535455056146:2439][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:19:13.392562Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531160088053:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642535455056151:2439] 2025-04-10T11:19:13.392563Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531160088050:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642535455056150:2439] 2025-04-10T11:19:13.392622Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642531160088053:2053] Subscribe: subscriber# [1:7491642535455056151:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.392623Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642531160088050:2050] Subscribe: subscriber# [1:7491642535455056150:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.392671Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531160088056:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642535455056152:2439] 2025-04-10T11:19:13.392687Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535455056150:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642531160088050:2050] 2025-04-10T11:19:13.392689Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642531160088056:2056] Subscribe: subscriber# [1:7491642535455056152:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.392704Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535455056151:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642531160088053:2053] 2025-04-10T11:19:13.392721Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535455056152:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642531160088056:2056] 2025-04-10T11:19:13.392725Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531160088050:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642535455056150:2439] 2025-04-10T11:19:13.392744Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531160088053:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642535455056151:2439] 2025-04-10T11:19:13.392757Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535455056146:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642535455056147:2439] 2025-04-10T11:19:13.392761Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531160088056:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642535455056152:2439] 2025-04-10T11:19:13.392781Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535455056146:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642535455056148:2439] 2025-04-10T11:19:13.392816Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491642535455056146:2439][/dc-1] Set up state: owner# [1:7491642531160088398:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.392892Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535455056146:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642535455056149:2439] 2025-04-10T11:19:13.392923Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491642535455056146:2439][/dc-1] Path was already updated: owner# [1:7491642531160088398:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.392955Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535455056150:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535455056147:2439], cookie# 1 2025-04-10T11:19:13.392979Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535455056151:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535455056148:2439], cookie# 1 2025-04-10T11:19:13.392991Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535455056152:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535455056149:2439], cookie# 1 2025-04-10T11:19:13.393028Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531160088050:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535455056150:2439], cookie# 1 2025-04-10T11:19:13.393045Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531160088053:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535455056151:2439], cookie# 1 2025-04-10T11:19:13.393054Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642531160088056:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642535455056152:2439], cookie# 1 2025-04-10T11:19:13.393076Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535455056150:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642531160088050:2050], cookie# 1 2025-04-10T11:19:13.393085Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535455056151:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642531160088053:2053], cookie# 1 2025-04-10T11:19:13.393095Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642535455056152:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642531160088056:2056], cookie# 1 2025-04-10T11:19:13.393115Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535455056146:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642535455056147:2439], cookie# 1 2025-04-10T11:19:13.393136Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535455056146:2439][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:13.393146Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535455056146:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642535455056148:2439], cookie# 1 2025-04-10T11:19:13.393160Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535455056146:2439][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:13.393179Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535455056146:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642535455056149:2439], cookie# 1 2025-04-10T11:19:13.393189Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642535455056146:2439][/dc-1] Unexpected sync response: sender# [1:7491642535455056149:2439], cookie# 1 2025-04-10T11:19:13.442889Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642531160088398:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:19:13.443309Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642531160088398:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... adata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7491642568823327037:2935] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:21.893721Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491642568823327039:2937][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7491642568823327053:2937] 2025-04-10T11:19:21.893739Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491642568823327039:2937][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7491642568823327052:2937] 2025-04-10T11:19:21.893752Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7491642568823327039:2937][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7491642555938424094:2149], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:21.893768Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491642568823327039:2937][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7491642568823327054:2937] 2025-04-10T11:19:21.893783Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7491642568823327039:2937][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7491642555938424094:2149], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:21.893790Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642555938424094:2149], cacheItem# { Subscriber: { Subscriber: [3:7491642568823327037:2935] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:21.893798Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642555938423695:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642568823327043:2935] 2025-04-10T11:19:21.893813Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642555938423695:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642568823327049:2936] 2025-04-10T11:19:21.893825Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642555938423695:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642568823327055:2937] 2025-04-10T11:19:21.893839Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7491642555938424094:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-10T11:19:21.893880Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642568823327058:2938], recipient# [3:7491642568823327033:2322], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:21.893881Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7491642555938424094:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7491642568823327038:2936] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:21.893914Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642555938423701:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642568823327051:2936] 2025-04-10T11:19:21.893925Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642555938423701:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642568823327057:2937] 2025-04-10T11:19:21.893930Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642555938424094:2149], cacheItem# { Subscriber: { Subscriber: [3:7491642568823327038:2936] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:21.893936Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642555938423698:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642568823327056:2937] 2025-04-10T11:19:21.893955Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7491642555938424094:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-10T11:19:21.893999Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7491642555938424094:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7491642568823327039:2937] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:21.894058Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642555938424094:2149], cacheItem# { Subscriber: { Subscriber: [3:7491642568823327039:2937] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:21.894167Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642568823327059:2939], recipient# [3:7491642568823327036:2324], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:22.770815Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642555938424094:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:22.770983Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642555938424094:2149], cacheItem# { Subscriber: { Subscriber: [3:7491642560233392262:2824] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:22.771076Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642573118294373:2943], recipient# [3:7491642573118294372:2325], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:22.894419Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642555938424094:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:22.894578Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642555938424094:2149], cacheItem# { Subscriber: { Subscriber: [3:7491642568823327037:2935] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:22.894690Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642573118294378:2944], recipient# [3:7491642573118294377:2326], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-10T11:19:12.510389Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642529635664613:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:12.510570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001339/r3tmp/tmpOkeb8n/pdisk_1.dat 2025-04-10T11:19:13.153330Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:13.187835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:13.187956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:13.198021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16979 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:13.433779Z node 1 :TX_PROXY DEBUG: actor# [1:7491642529635664855:2116] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:13.433839Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642533930632617:2427] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:13.433979Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642529635664878:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:13.434015Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491642529635664878:2129], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:19:13.434444Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533930632618:2428][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:19:13.442732Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529635664531:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642533930632622:2428] 2025-04-10T11:19:13.442802Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642529635664531:2050] Subscribe: subscriber# [1:7491642533930632622:2428], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.442898Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529635664534:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642533930632623:2428] 2025-04-10T11:19:13.442921Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642529635664534:2053] Subscribe: subscriber# [1:7491642533930632623:2428], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.442946Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529635664537:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642533930632624:2428] 2025-04-10T11:19:13.442963Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642529635664537:2056] Subscribe: subscriber# [1:7491642533930632624:2428], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.443005Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533930632622:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642529635664531:2050] 2025-04-10T11:19:13.443052Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533930632623:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642529635664534:2053] 2025-04-10T11:19:13.443101Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533930632624:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642529635664537:2056] 2025-04-10T11:19:13.443162Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533930632618:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642533930632619:2428] 2025-04-10T11:19:13.443195Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533930632618:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642533930632620:2428] 2025-04-10T11:19:13.443246Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491642533930632618:2428][/dc-1] Set up state: owner# [1:7491642529635664878:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.444776Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533930632618:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642533930632621:2428] 2025-04-10T11:19:13.444837Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491642533930632618:2428][/dc-1] Path was already updated: owner# [1:7491642529635664878:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.444888Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533930632622:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533930632619:2428], cookie# 1 2025-04-10T11:19:13.444906Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533930632623:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533930632620:2428], cookie# 1 2025-04-10T11:19:13.444923Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533930632624:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533930632621:2428], cookie# 1 2025-04-10T11:19:13.444953Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529635664531:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642533930632622:2428] 2025-04-10T11:19:13.444985Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529635664531:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533930632622:2428], cookie# 1 2025-04-10T11:19:13.445023Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529635664534:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642533930632623:2428] 2025-04-10T11:19:13.445042Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529635664534:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533930632623:2428], cookie# 1 2025-04-10T11:19:13.445060Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529635664537:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642533930632624:2428] 2025-04-10T11:19:13.445075Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529635664537:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533930632624:2428], cookie# 1 2025-04-10T11:19:13.477706Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533930632622:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642529635664531:2050], cookie# 1 2025-04-10T11:19:13.477748Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533930632623:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642529635664534:2053], cookie# 1 2025-04-10T11:19:13.477762Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533930632624:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642529635664537:2056], cookie# 1 2025-04-10T11:19:13.477802Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533930632618:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642533930632619:2428], cookie# 1 2025-04-10T11:19:13.477838Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533930632618:2428][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:13.477872Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533930632618:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642533930632620:2428], cookie# 1 2025-04-10T11:19:13.477902Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533930632618:2428][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:13.477927Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533930632618:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642533930632621:2428], cookie# 1 2025-04-10T11:19:13.477940Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533930632618:2428][/dc-1] Unexpected sync response: sender# [1:7491642533930632621:2428], cookie# 1 2025-04-10T11:19:13.498214Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642529635664878:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:19:13.498633Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642529635664878:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:21.971018Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7491642571270641680:3014][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7491642558385738588:2128], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:21.971021Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558385738253:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642571270641693:3014] 2025-04-10T11:19:21.971051Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491642571270641680:3014][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7491642571270641688:3014] 2025-04-10T11:19:21.971054Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7491642558385738588:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-10T11:19:21.971075Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7491642571270641680:3014][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7491642558385738588:2128], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:21.971112Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7491642571270641697:3015][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7491642558385738247:2050] 2025-04-10T11:19:21.971139Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7491642571270641698:3015][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7491642558385738250:2053] 2025-04-10T11:19:21.971143Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7491642558385738588:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7491642571270641680:3014] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:21.971162Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7491642571270641699:3015][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7491642558385738253:2056] 2025-04-10T11:19:21.971196Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642558385738588:2128], cacheItem# { Subscriber: { Subscriber: [3:7491642571270641680:3014] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:21.971198Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491642571270641681:3015][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7491642571270641694:3015] 2025-04-10T11:19:21.971226Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491642571270641681:3015][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7491642571270641695:3015] 2025-04-10T11:19:21.971230Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558385738253:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642571270641699:3015] 2025-04-10T11:19:21.971256Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558385738247:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642571270641685:3013] 2025-04-10T11:19:21.971276Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558385738247:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642571270641691:3014] 2025-04-10T11:19:21.971286Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7491642571270641681:3015][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7491642558385738588:2128], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:21.971297Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558385738247:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642571270641697:3015] 2025-04-10T11:19:21.971313Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558385738250:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642571270641689:3013] 2025-04-10T11:19:21.971335Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558385738250:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642571270641692:3014] 2025-04-10T11:19:21.971337Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491642571270641681:3015][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7491642571270641696:3015] 2025-04-10T11:19:21.971354Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642558385738250:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642571270641698:3015] 2025-04-10T11:19:21.971363Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7491642571270641681:3015][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7491642558385738588:2128], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:21.971422Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7491642558385738588:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-10T11:19:21.971431Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642571270641700:3016], recipient# [3:7491642571270641677:2324], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:21.971497Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7491642558385738588:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7491642571270641681:3015] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:21.971595Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642558385738588:2128], cacheItem# { Subscriber: { Subscriber: [3:7491642571270641681:3015] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:21.971698Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642571270641701:3017], recipient# [3:7491642571270641678:2325], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:22.798588Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642558385738588:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:22.798734Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642558385738588:2128], cacheItem# { Subscriber: { Subscriber: [3:7491642562680706825:2829] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:22.798835Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642575565609011:3018], recipient# [3:7491642575565609010:2326], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2025-04-10T11:19:21.114646Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:19:21.115195Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d3e/r3tmp/tmpJ4ipD5/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:19:21.116002Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/000d3e/r3tmp/tmpJ4ipD5/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/000d3e/r3tmp/tmpJ4ipD5/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9887055072216908635 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:19:21.169040Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.169398Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:19:21.188144Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:459:2100] with ResourceBroker at [2:430:2099] 2025-04-10T11:19:21.188317Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:460:2101] 2025-04-10T11:19:21.188510Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:458:2336] with ResourceBroker at [1:429:2317] 2025-04-10T11:19:21.188582Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:461:2337] 2025-04-10T11:19:21.188711Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.188754Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.188788Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-10T11:19:21.188812Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-10T11:19:21.189688Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.202587Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.202927Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.203017Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.203274Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.203398Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.203435Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.203561Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.203678Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-10T11:19:21.203704Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:21.203791Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283961 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:21.203885Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-10T11:19:21.204660Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-10T11:19:21.204965Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.205542Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.205669Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.205767Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.205991Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-10T11:19:21.206081Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:21.206314Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.206475Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:21.206538Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:22.256147Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-10T11:19:22.256272Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-10T11:19:22.256433Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:458:2336]) priority=0 resources={0, 100} 2025-04-10T11:19:22.256483Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:22.256535Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:19:22.256576Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:22.256633Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:458:2336])) 2025-04-10T11:19:22.256846Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:19:22.256923Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-1-2 (2 by [1:458:2336]) priority=0 resources={0, 100} 2025-04-10T11:19:22.256977Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-1-2 (2 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:22.257016Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:458:2336]) from queue queue_kqp_resource_manager 2025-04-10T11:19:22.257061Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-1-2 (2 by [1:458:2336]) to queue queue_kqp_resource_manager 2025-04-10T11:19:22.257114Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:458:2336])) 2025-04-10T11:19:22.257184Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:19:22.257262Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:22.257407Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283962 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-04-10T11:19:22.257673Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:22.522384Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-10T11:19:22.522620Z node 2 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [2:459:2100]) priority=0 resources={0, 100} 2025-04-10T11:19:22.522680Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [2:459:2100]) to queue queue_kqp_resource_manager 2025-04-10T11:19:22.522740Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [2:459:2100]) from queue queue_kqp_resource_manager 2025-04-10T11:19:22.522806Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [2:459:2100]) to queue queue_kqp_resource_manager 2025-04-10T11:19:22.522860Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [2:459:2100])) 2025-04-10T11:19:22.522989Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:19:22.523066Z node 2 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-2-2 (2 by [2:459:2100]) priority=0 resources={0, 100} 2025-04-10T11:19:22.523119Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-2-2 (2 by [2:459:2100]) to queue queue_kqp_resource_manager 2025-04-10T11:19:22.523161Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-2-2 (2 by [2:459:2100]) from queue queue_kqp_resource_manager 2025-04-10T11:19:22.523200Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-2-2 (2 by [2:459:2100]) to queue queue_kqp_resource_manager 2025-04-10T11:19:22.523239Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-2-2 (2 by [2:459:2100])) 2025-04-10T11:19:22.523313Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-10T11:19:22.523396Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:22.523576Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283963 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-04-10T11:19:22.523910Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:24.074588Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-10T11:19:24.074718Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [1:458:2336]) (release resources {0, 100}) 2025-04-10T11:19:24.074811Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [1:458:2336])) 2025-04-10T11:19:24.074857Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2025-04-10T11:19:24.074908Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-10T11:19:24.074957Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-2-1-2 (2 by [1:458:2336]) (release resources {0, 100}) 2025-04-10T11:19:24.074999Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-1-2 (2 by [1:458:2336])) 2025-04-10T11:19:24.075042Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-10T11:19:24.075100Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:24.075254Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1744283964 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:24.075556Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-10T11:19:24.375633Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-10T11:19:24.375758Z node 2 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [2:459:2100]) (release resources {0, 100}) 2025-04-10T11:19:24.375816Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [2:459:2100])) 2025-04-10T11:19:24.375855Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2025-04-10T11:19:24.375906Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-10T11:19:24.375957Z node 2 :RESOURCE_BROKER DEBUG: Finish task kqp-2-2-2 (2 by [2:459:2100]) (release resources {0, 100}) 2025-04-10T11:19:24.376016Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-2-2 (2 by [2:459:2100])) 2025-04-10T11:19:24.376069Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-10T11:19:24.376138Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-10T11:19:24.376306Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1744283965 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-10T11:19:24.376633Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-10T11:19:24.635692Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request |90.3%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [TA] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTransferTests::Create [GOOD] >> TTransferTests::CreateSequential ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] Test command err: 2025-04-10T11:18:19.751353Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642302576310798:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:19.751450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001db2/r3tmp/tmpG3j1TW/pdisk_1.dat 2025-04-10T11:18:20.077300Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:20.129070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:20.129170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:20.134451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25840, node 1 2025-04-10T11:18:20.257956Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:20.257990Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:20.257997Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:20.258087Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25296 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:20.676618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Triggering split by load TClient is connected to server localhost:25296 2025-04-10T11:18:22.206436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461213736:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.206527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.530943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:18:22.682899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461213905:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.682995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.705060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283902655 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283902655 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-10T11:18:22.791602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461214013:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.791741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.792003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461214039:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.792047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461214046:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.792055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461214040:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.792110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461214041:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.792160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461214042:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.792200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461214044:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.792200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461214043:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.793385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461214070:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.793412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461214075:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.793448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.794777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461214100:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.794819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461214102:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.794829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.796374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461214130:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.796440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.796458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642315461214138:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool defa ... 1474976733394. Ctx: { TraceId: 01jrfpsys74vnsawbx2enpwqap, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODM4MmEwNjMtZTEwMGM0NDItMjUwZDhjNjUtYTAyNThlYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:19:18.068861Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976733395. Ctx: { TraceId: 01jrfpsyskbckcehfkwx8z23xg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODM4MmEwNjMtZTEwMGM0NDItMjUwZDhjNjUtYTAyNThlYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient is connected to server localhost:25296 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283902655 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards Fast forward > 10h to trigger the merge TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283902655 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-10T11:19:20.105473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2025-04-10T11:19:20.105588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2025-04-10T11:19:20.105635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 2025-04-10T11:19:22.863334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Propose merge request : Transaction { WorkingDir: "/Root" OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 } Internal: true FailOnExist: false } TxId: 281474976715658 TabletId: 72057594046644480, reason: shard with tabletId: 72075186224037889 merge by load (shardLoad: 0.02), shardToMergeCount: 2, totalSize: 0, sizeToMerge: 0, totalLoad: 0.04, loadThreshold: 0.07 2025-04-10T11:19:22.863525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:22.864265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:19:22.867532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-04-10T11:19:22.874727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-10T11:19:22.874818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-10T11:19:22.876564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:22.882108Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7491642573159432400:6746] 2025-04-10T11:19:22.902611Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-04-10T11:19:22.902753Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-04-10T11:19:22.902987Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-10T11:19:22.907102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976715658:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715658:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715658 TabletId: 72075186224037891 2025-04-10T11:19:22.907147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 131 2025-04-10T11:19:22.908869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:19:22.923853Z node 1 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2025-04-10T11:19:22.923991Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:19:22.924053Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-10T11:19:22.924081Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2025-04-10T11:19:22.924347Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-10T11:19:22.926501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037890 2025-04-10T11:19:22.926790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037889 2025-04-10T11:19:22.927087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 131 -> 132 2025-04-10T11:19:22.928561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:19:22.928832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:19:22.928897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:19:22.929732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-10T11:19:22.929776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-10T11:19:22.929795Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-10T11:19:22.935004Z node 1 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-10T11:19:22.935004Z node 1 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-04-10T11:19:22.935154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-04-10T11:19:22.935404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-04-10T11:19:22.935462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-10T11:19:22.935481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-10T11:19:22.935514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-10T11:19:22.939760Z node 1 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:19:22.939805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976715658:0 2025-04-10T11:19:22.939881Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:19:22.940171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:19:22.940386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:19:22.945229Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-10T11:19:22.945285Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-10T11:19:22.945855Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-10T11:19:22.945951Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-10T11:19:22.946248Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-10T11:19:22.946308Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-10T11:19:22.946792Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-10T11:19:22.946865Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283902655 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 3 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) >> TSubDomainTest::Boot [GOOD] >> TSubDomainTest::CheckAccessCopyTable >> TSubDomainTest::UserAttributes [GOOD] >> TSubDomainTest::UserAttributesApplyIf >> TTransferTests::CreateSequential [GOOD] >> TTransferTests::CreateInParallel >> KqpIndexes::ExplainCollectFullDiagnostics >> KqpIndexes::UpdateIndexSubsetPk >> KqpUniqueIndex::UpsertExplicitNullInComplexFk >> KqpUniqueIndex::InsertFkAlreadyExist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow Test command err: Trying to start YDB, gRPC: 20502, MsgBus: 20896 2025-04-10T11:19:05.479564Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642499593392717:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:05.480509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000b25/r3tmp/tmpt7Twsm/pdisk_1.dat 2025-04-10T11:19:06.121575Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:06.122321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:06.122418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:06.128341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20502, node 1 2025-04-10T11:19:06.350853Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:06.350877Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:06.350895Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:06.351046Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20896 TClient is connected to server localhost:20896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:07.098508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:07.146287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:07.329635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:07.537641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:07.630787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:09.381854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642516773263521:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:09.382036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:09.702863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.735428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.802752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.844333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.905838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:09.939099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:10.018315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642521068231339:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:10.018389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:10.018626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642521068231344:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:10.022639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:10.034457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642521068231346:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:10.118186Z node 1 :TX_PROXY ERROR: Actor# [1:7491642521068231401:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:10.475095Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642499593392717:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:10.475171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:11.025812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:19:11.168981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642525363199110:2509];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:19:11.169182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642525363199110:2509];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:19:11.169515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642525363199110:2509];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:19:11.169646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642525363199110:2509];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:19:11.169727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642525363199110:2509];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:19:11.169823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642525363199110:2509];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:19:11.169931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642525363199110:2509];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:19:11.170060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642525363199110:2509];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:19:11.170200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642525363199110:2509];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:19:11.170308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642525363199110:2509];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:19:11.170421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642525363199110:2509];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:19:11.170510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491642525363199110:2509];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:19:11.172304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642525363199108:2508];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:19:11.172403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491642525363199108:2508];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:19:11.172649Z node 1 :TX_COL ... blet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:19:11.329976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:19:11.330072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:19:11.330209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:19:11.330245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:19:11.330278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:19:11.330315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:19:11.330426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:19:11.330447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:19:11.330452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:19:11.330479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:19:11.330583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:19:11.330617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:19:11.330667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:19:11.330694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:19:11.330791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:19:11.330816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:19:11.330828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:19:11.330838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:19:11.330948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:19:11.330993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:19:11.380551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:11.380937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:11.386639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:11.386639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:11.392729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:11.392729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:11.398869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:11.398869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:11.405479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:11.405479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:19:11.564664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2025-04-10T11:19:11.564961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2025-04-10T11:19:11.565226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;self_id=[1:7491642525363199122:2512];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037927;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037928;receive=72075186224037923; 2025-04-10T11:19:11.565601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2025-04-10T11:19:11.650758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710675;tx_id=281474976710675;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710675; query_phases { duration_us: 8747 cpu_time_us: 2573 affected_shards: 1 } query_phases { duration_us: 5114 cpu_time_us: 392 affected_shards: 1 } compilation { duration_us: 55676 cpu_time_us: 53089 } process_cpu_time_us: 573 total_duration_us: 72461 total_cpu_time_us: 56627 AddressSanitizer:DEADLYSIGNAL ================================================================= ==1546043==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018d3ad2d bp 0x7fff703bc3e0 sp 0x7fff703bc240 T0) ==1546043==The signal is caused by a READ memory access. ==1546043==Hint: address points to the zero page. 2025-04-10T11:19:21.117085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:19:21.117142Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18d3ad2d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18d3ad2d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18d3ad2d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18d3ad2d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18d3ad2d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18d5fe47 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18d5fe47 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18d5fe47 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18d5fe47 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18d5fe47 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #10 0x196b1ff5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x196b1ff5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x196b1ff5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19681b48 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18d5ecf3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19683415 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x196ac56c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f8649851d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f8649851e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x164bd028 in _start (/home/runner/.ya/build/build_root/2gt4/000b25/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x164bd028) (BuildId: e3319472fc934b0dfc588e72f37ffc5cf69adf9b) AddressSanitizer can not provide additional info. SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==1546043==ABORTING >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP >> KqpUniqueIndex::ReplaceFkAlreadyExist >> KqpUniqueIndex::InsertNullInComplexFk >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex-UseSink >> KqpMultishardIndex::WriteIntoRenamingSyncIndex >> KqpMultishardIndex::SecondaryIndexSelectNull >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> TopicService::UnknownConsumer [GOOD] >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> TYardTest::TestLogOverwriteRestarts [GOOD] >> TYardTest::TestMultiYardHarakiri >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation |90.3%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} >> TSubDomainTest::StartAndStopTenanNode [GOOD] >> TSubDomainTest::StartTenanNodeAndStopAtDestructor >> TTransferTests::CreateInParallel [GOOD] >> TTransferTests::CreateDropRecreate >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain >> TGRpcCmsTest::AuthTokenTest [GOOD] >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] Test command err: 2025-04-10T11:15:55.131040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:15:55.131479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:15:55.131553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00089b/r3tmp/tmpOqibdj/pdisk_1.dat 2025-04-10T11:15:55.505565Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18558, node 1 2025-04-10T11:15:55.739437Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:15:55.739505Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:15:55.739546Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:15:55.740060Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:15:55.749172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:15:55.834652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:55.834836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:55.849199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26438 2025-04-10T11:15:56.378740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:15:59.283031Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:15:59.318322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:59.318492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:59.346283Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:15:59.348321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:59.583104Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.583640Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.584252Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.584373Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.584457Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.584706Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.584828Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.584909Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.584972Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:15:59.747370Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:15:59.747466Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:15:59.760395Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:15:59.904214Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:15:59.945127Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:15:59.945264Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:15:59.990262Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:15:59.991493Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:15:59.991737Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:15:59.991797Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:15:59.991872Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:15:59.991941Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:15:59.992008Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:15:59.992068Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:15:59.992618Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:16:00.016583Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:00.016701Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:16:00.023648Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:16:00.027335Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:16:00.027744Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:16:00.039383Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:16:00.057133Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:16:00.057203Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:16:00.057307Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:16:00.071865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:16:00.078364Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:16:00.078505Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:16:00.287479Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:16:00.424231Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:16:00.521768Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:16:01.528896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:01.529128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:16:01.546827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:16:01.642057Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:16:01.642263Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:16:01.642527Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:16:01.642674Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:16:01.642795Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:16:01.642919Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:16:01.643016Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:16:01.643109Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:16:01.643250Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:16:01.643341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:16:01.643463Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:16:01.643584Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2330:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:16:01.667036Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:16:01.667186Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:19:18.435307Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODc0YmU4YWYtMzZhNzJmYmQtNTA2N2EzYmItZDEzOWQ1NQ==, TxId: 2025-04-10T11:19:18.435375Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODc0YmU4YWYtMzZhNzJmYmQtNTA2N2EzYmItZDEzOWQ1NQ==, TxId: 2025-04-10T11:19:18.435883Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:19:18.461381Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:19:18.461449Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId7, ActorId=[1:2805:3226] 2025-04-10T11:19:18.996508Z node 2 :STATISTICS DEBUG: Event round 10 is different from the current 0 2025-04-10T11:19:18.996607Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:19:19.651262Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:19:19.651774Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:19:19.686841Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:19:19.686922Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId8 2025-04-10T11:19:19.686956Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-10T11:19:19.686984Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-10T11:19:19.687127Z node 2 :STATISTICS DEBUG: Event round 9 is different from the current 0 2025-04-10T11:19:19.687159Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-04-10T11:19:20.952354Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:19:22.133648Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:19:22.133726Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId8 2025-04-10T11:19:22.133764Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-10T11:19:22.133789Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-10T11:19:23.285444Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:19:23.319691Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:19:23.319873Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:19:23.319923Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:19:23.320587Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:19:23.334971Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:19:23.335482Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:19:23.335563Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:19:23.336084Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:19:23.350642Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:19:23.350869Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 11, current Round: 0 2025-04-10T11:19:23.351499Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9361:6411], server id = [2:9362:6412], tablet id = 72075186224037899, status = OK 2025-04-10T11:19:23.351602Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9361:6411], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:19:23.352914Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:19:23.353064Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:19:23.353242Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:19:23.353448Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:19:23.353823Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:19:23.356809Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9361:6411], server id = [2:9362:6412], tablet id = 72075186224037899 2025-04-10T11:19:23.356850Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:19:23.357228Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:19:23.390922Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzZiZTNhMmMtYjdiNDA2OTItZWIyNGQzMmItOWQ3N2U2ZjU=, TxId: 2025-04-10T11:19:23.390996Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzZiZTNhMmMtYjdiNDA2OTItZWIyNGQzMmItOWQ3N2U2ZjU=, TxId: 2025-04-10T11:19:23.391511Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:19:23.417645Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:19:23.417724Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId8, ActorId=[1:2805:3226] 2025-04-10T11:19:23.964678Z node 2 :STATISTICS DEBUG: Event round 11 is different from the current 0 2025-04-10T11:19:23.964766Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:19:24.651663Z node 2 :STATISTICS DEBUG: Event round 10 is different from the current 0 2025-04-10T11:19:24.651736Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-04-10T11:19:24.651833Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:19:24.651864Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-10T11:19:24.651891Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-10T11:19:25.831023Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:19:25.831210Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:19:25.869102Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:19:27.069859Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:19:27.069936Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-10T11:19:27.069969Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-10T11:19:28.302803Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:19:28.302955Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:19:28.303000Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:19:28.303594Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:19:28.317808Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:19:28.318273Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:19:28.318354Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:19:28.318766Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:19:28.344268Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:19:28.344498Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 12, current Round: 0 2025-04-10T11:19:28.345011Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9530:6500], server id = [2:9531:6501], tablet id = 72075186224037899, status = OK 2025-04-10T11:19:28.345118Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9530:6500], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:19:28.346586Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:19:28.346687Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:19:28.346861Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:19:28.347050Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:19:28.347449Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:19:28.350226Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9530:6500], server id = [2:9531:6501], tablet id = 72075186224037899 2025-04-10T11:19:28.350271Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:19:28.351229Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:19:28.374290Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2E4M2M1MDktYjAxYmRmMDYtZmRkMjdhZDQtOGFmOTY5N2Y=, TxId: 2025-04-10T11:19:28.374365Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2E4M2M1MDktYjAxYmRmMDYtZmRkMjdhZDQtOGFmOTY5N2Y=, TxId: 2025-04-10T11:19:28.375486Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:19:28.401593Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:19:28.401666Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId9, ActorId=[1:2805:3226] >> TopicService::UnknownTopic >> TSubDomainTest::UserAttributesApplyIf [GOOD] >> TSubDomainTest::CreateTablet [GOOD] >> TSubDomainTest::CreateTabletForUnknownDomain |90.3%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AuthTokenTest [GOOD] Test command err: 2025-04-10T11:19:23.521197Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642576441875793:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:23.521430Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ed1/r3tmp/tmpbBYtkc/pdisk_1.dat 2025-04-10T11:19:24.050498Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:24.121933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:24.122056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:24.128441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12966, node 1 2025-04-10T11:19:24.527852Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:24.527893Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:24.527903Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:24.528072Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:25.133999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:25.322883Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7491642585031811196:2314], Recipient [1:7491642576441876275:2197]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-10T11:19:25.322939Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-10T11:19:25.322980Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:19:25.322999Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:19:25.323167Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" 2025-04-10T11:19:25.330369Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1744283965329249) 2025-04-10T11:19:25.338913Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1744283965329249 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-10T11:19:25.339211Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-10T11:19:25.342993Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-10T11:19:25.343981Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744283965329249&action=1" } } } 2025-04-10T11:19:25.344926Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:19:25.345005Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-10T11:19:25.345222Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-10T11:19:25.346762Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-10T11:19:25.346968Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-10T11:19:25.352442Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-10T11:19:25.352496Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-10T11:19:25.352557Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7491642585031811201:2197], Recipient [1:7491642576441876275:2197]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-10T11:19:25.352577Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-10T11:19:25.352612Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:19:25.352625Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:19:25.352660Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-10T11:19:25.354610Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-10T11:19:25.354718Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-10T11:19:25.358427Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-10T11:19:25.358457Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:19:25.358466Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:19:25.358471Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:19:25.358524Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-10T11:19:25.358546Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1744283965329249 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-10T11:19:25.364321Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-10T11:19:25.364520Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:19:25.364564Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-04-10T11:19:25.364571Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-04-10T11:19:25.370228Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "Root" 2025-04-10T11:19:25.373061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-10T11:19:25.376175Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7491642585031811216:2315], Recipient [1:7491642576441876275:2197]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744283965329249&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-10T11:19:25.376235Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-10T11:19:25.376451Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744283965329249&action=1" } } 2025-04-10T11:19:25.394916Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-10T11:19:25.395018Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-04-10T11:19:25.406883Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-04-10T11:19:25.423234Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2025-04-10T11:19:25.423777Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283965452 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: ... SlotBroker::TEvTenantState 2025-04-10T11:19:26.759528Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-10T11:19:26.803033Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7491642589326779035:2383], Recipient [1:7491642576441876275:2197]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-10T11:19:26.810214Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-10T11:19:26.810350Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-10T11:19:26.814669Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7491642576441876170:2200], Recipient [1:7491642576441876275:2197]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-10T11:19:26.814700Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-10T11:19:26.821516Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-10T11:19:26.845536Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7491642589326779059:2384], Recipient [1:7491642576441876275:2197]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-10T11:19:26.845596Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-10T11:19:26.848944Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-10T11:19:26.852135Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7491642576441876170:2200], Recipient [1:7491642576441876275:2197]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-10T11:19:26.852171Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-10T11:19:26.852718Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-10T11:19:26.864852Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7491642589326779068:2385], Recipient [1:7491642576441876275:2197]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-10T11:19:26.864886Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-10T11:19:26.864924Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-10T11:19:26.865066Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7491642576441876170:2200], Recipient [1:7491642576441876275:2197]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-10T11:19:26.865092Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-10T11:19:26.865699Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-10T11:19:26.877036Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7491642589326779076:2386], Recipient [1:7491642576441876275:2197]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-10T11:19:26.877070Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-10T11:19:26.877114Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-10T11:19:26.877278Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7491642576441876170:2200], Recipient [1:7491642576441876275:2197]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-10T11:19:26.877288Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-10T11:19:26.877833Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-10T11:19:26.892758Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7491642589326779114:2387], Recipient [1:7491642576441876275:2197]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-10T11:19:26.892812Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-10T11:19:26.892859Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-10T11:19:26.892947Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7491642576441876170:2200], Recipient [1:7491642576441876275:2197]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-10T11:19:26.892967Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-10T11:19:26.893545Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-10T11:19:26.895057Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2025-04-10T11:19:26.895080Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-10T11:19:26.895152Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-04-10T11:19:26.895276Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435076, Sender [1:7491642585031811303:2197], Recipient [1:7491642576441876275:2197]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-04-10T11:19:26.895300Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainReady 2025-04-10T11:19:26.895314Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:19:26.895322Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:19:26.895358Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain for tenant /Root/users/user-1 to 2 2025-04-10T11:19:26.895401Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=RUNNING txid=1744283965329249 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-10T11:19:26.895450Z node 1 :CMS_TENANTS TRACE: Update database for /Root/users/user-1 confirmedsubdomain=2 2025-04-10T11:19:26.901238Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain complete for /Root/users/user-1 2025-04-10T11:19:26.901263Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:19:26.903046Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7491642589326779120:2388], Recipient [1:7491642576441876275:2197]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-10T11:19:26.903073Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-10T11:19:26.903116Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-10T11:19:26.903220Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7491642576441876170:2200], Recipient [1:7491642576441876275:2197]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-10T11:19:26.903237Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-10T11:19:26.903767Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: RUNNING required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } TClient is connected to server localhost:28615 TClient::Ls request: /Root/users/user-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root/users/user-1" PathId: 1 SchemeshardId: 72075186224037897 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037897 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 3 ProcessingParams { Version: 3 PlanReso... (TRUNCATED) 2025-04-10T11:19:27.319948Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-10T11:19:27.321540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected |90.3%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTransferTests::CreateDropRecreate [GOOD] >> TTransferTests::ConsistencyLevel ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] Test command err: 2025-04-10T11:19:23.506827Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642576420253097:2278];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:23.507058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ece/r3tmp/tmp1eme3G/pdisk_1.dat 2025-04-10T11:19:24.123012Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:24.162474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:24.162574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:24.181422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4039, node 1 2025-04-10T11:19:24.506541Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:24.506572Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:24.506579Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:24.506718Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:25.132147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:21707 2025-04-10T11:19:25.474501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:25.605704Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7491642585010188301:2315], Recipient [1:7491642576420253367:2196]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2025-04-10T11:19:25.614382Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-10T11:19:25.614466Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:19:25.614481Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:19:25.614681Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" 2025-04-10T11:19:25.614964Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1744283965614186) 2025-04-10T11:19:25.615652Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1744283965614186 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-10T11:19:25.615856Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-10T11:19:25.627817Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-10T11:19:25.628829Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744283965614186&action=1" } } } 2025-04-10T11:19:25.628992Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:19:25.629073Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-10T11:19:25.629261Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-10T11:19:25.629890Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-10T11:19:25.630070Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-10T11:19:25.643731Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-10T11:19:25.643797Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-10T11:19:25.643921Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7491642585010188306:2196], Recipient [1:7491642576420253367:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-10T11:19:25.643940Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-10T11:19:25.643983Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:19:25.643993Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:19:25.644047Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-10T11:19:25.644089Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-10T11:19:25.644186Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-10T11:19:25.651788Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7491642585010188312:2316], Recipient [1:7491642576420253367:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744283965614186&action=1" } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2025-04-10T11:19:25.651830Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-10T11:19:25.652077Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744283965614186&action=1" } } 2025-04-10T11:19:25.670301Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-10T11:19:25.670334Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:19:25.670346Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:19:25.670356Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:19:25.670459Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-10T11:19:25.670487Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1744283965614186 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-10T11:19:25.682968Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-10T11:19:25.683156Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:19:25.683205Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-04-10T11:19:25.683216Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-04-10T11:19:25.691992Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" DatabaseName: "Root" 2025-04-10T11:19:25.693723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-10T11:19:25.709380Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-10T11:19:25.709479Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715659 2025-04-10T11:19:25.713790Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715659 2025-04-10T11:19:25.715042Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7491642585010188374:2319], Recipient [1:7491642576420253367:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744283965614186&action=1" } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2025-04-10T11:19:25.715062Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-10T11:19:25.715236Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744283965614186&action=1" } } 2025-04-10T11:19:25.727440Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715659 2025-04-10T11:19:25.728100Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describ ... { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-10T11:19:27.347341Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) drop subdomain 2025-04-10T11:19:27.347450Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain drop cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root/users" OperationType: ESchemeOpForceDropExtSubDomain Drop { Name: "user-1" } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\016user-2@builtin\022\030\022\026\n\024all-users@well-known\032\016user-2@builtin\"\007Builtin*\017**** (FA717EBF)" DatabaseName: "Root" 2025-04-10T11:19:27.348444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976715663 2025-04-10T11:19:27.350956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:27.352604Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7491642593600123568:2393], Recipient [1:7491642576420253367:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744283967338172&action=2" } UserToken: "" } 2025-04-10T11:19:27.352623Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-10T11:19:27.352799Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744283967338172&action=2" } } 2025-04-10T11:19:27.356128Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715663 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-04-10T11:19:27.356183Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715663 2025-04-10T11:19:27.356400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976715663 2025-04-10T11:19:27.359511Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715663 2025-04-10T11:19:27.395257Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715663 2025-04-10T11:19:27.395279Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-10T11:19:27.395324Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-10T11:19:27.395396Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7491642593600123561:2196], Recipient [1:7491642576420253367:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-10T11:19:27.395423Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-04-10T11:19:27.395438Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:19:27.395447Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:19:27.395482Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-04-10T11:19:27.395506Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1744283967338172 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-04-10T11:19:27.395553Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744283967338172 issue=AccessDenied: Access denied for request 2025-04-10T11:19:27.395478Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-04-10T11:19:27.399275Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-04-10T11:19:27.399302Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-10T11:19:27.399319Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-04-10T11:19:27.401024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-04-10T11:19:27.403430Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-04-10T11:19:27.403519Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-04-10T11:19:27.403533Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:19:27.403703Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7491642576420253251:2194], Recipient [1:7491642576420253367:2196]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-10T11:19:27.403721Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-10T11:19:27.403737Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:19:27.403746Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:19:27.403777Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-04-10T11:19:27.403798Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1744283967338172 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-04-10T11:19:27.406507Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-10T11:19:27.406557Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:19:27.406584Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-10T11:19:27.406837Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-04-10T11:19:27.406690Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-10T11:19:27.406864Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-10T11:19:27.408356Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-10T11:19:27.408949Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 2 } } Success: true ConfigTxSeqNo: 10 2025-04-10T11:19:27.406996Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-04-10T11:19:27.409023Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 2 } } } 2025-04-10T11:19:27.407036Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-04-10T11:19:27.407052Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-10T11:19:27.412193Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7491642593600123647:2395], Recipient [1:7491642576420253367:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744283967338172&action=2" } UserToken: "" } 2025-04-10T11:19:27.412213Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-10T11:19:27.412327Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744283967338172&action=2" } } 2025-04-10T11:19:27.415672Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2025-04-10T11:19:27.415775Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7491642593600123639:2196], Recipient [1:7491642576420253367:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-10T11:19:27.416728Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-10T11:19:27.416759Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:19:27.416769Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:19:27.416826Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-10T11:19:27.416847Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-10T11:19:27.450958Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-10T11:19:27.451003Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-10T11:19:27.451012Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:19:27.451019Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-10T11:19:27.451100Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1744283967338172 2025-04-10T11:19:27.456792Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1744283967338172 issue=AccessDenied: Access denied for request 2025-04-10T11:19:27.456862Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1744283967338172 issue=AccessDenied: Access denied for request 2025-04-10T11:19:27.456877Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-10T11:19:27.456977Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1744283967338172 code=SUCCESS errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-04-10T11:19:27.463410Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-10T11:19:27.463464Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-10T11:19:27.472113Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7491642593600123686:2401], Recipient [1:7491642576420253367:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744283967338172&action=2" } UserToken: "" } 2025-04-10T11:19:27.472141Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-10T11:19:27.472304Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1744283967338172&action=2" ready: true status: SUCCESS } } 2025-04-10T11:19:27.487166Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-10T11:19:27.487994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::UserAttributesApplyIf [GOOD] Test command err: 2025-04-10T11:19:24.248614Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642581831038290:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:24.249826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d60/r3tmp/tmp8nYLcx/pdisk_1.dat 2025-04-10T11:19:24.642756Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:24.658719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:24.658833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:24.660644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16525 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:24.926736Z node 1 :TX_PROXY DEBUG: actor# [1:7491642581831038397:2102] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:24.926811Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642581831038682:2259] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:24.927004Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642581831038428:2118], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:24.927052Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491642581831038428:2118], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:19:24.927330Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642581831038683:2260][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:19:24.929450Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642581831038100:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642581831038687:2260] 2025-04-10T11:19:24.929497Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642581831038100:2049] Subscribe: subscriber# [1:7491642581831038687:2260], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:24.929567Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642581831038103:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642581831038688:2260] 2025-04-10T11:19:24.929595Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642581831038103:2052] Subscribe: subscriber# [1:7491642581831038688:2260], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:24.929641Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642581831038106:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642581831038689:2260] 2025-04-10T11:19:24.929657Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642581831038106:2055] Subscribe: subscriber# [1:7491642581831038689:2260], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:24.929711Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642581831038687:2260][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642581831038100:2049] 2025-04-10T11:19:24.929737Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642581831038688:2260][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642581831038103:2052] 2025-04-10T11:19:24.929753Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642581831038689:2260][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642581831038106:2055] 2025-04-10T11:19:24.929810Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642581831038683:2260][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642581831038684:2260] 2025-04-10T11:19:24.929857Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642581831038683:2260][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642581831038685:2260] 2025-04-10T11:19:24.929914Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491642581831038683:2260][/dc-1] Set up state: owner# [1:7491642581831038428:2118], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:24.930053Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642581831038683:2260][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642581831038686:2260] 2025-04-10T11:19:24.930096Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491642581831038683:2260][/dc-1] Path was already updated: owner# [1:7491642581831038428:2118], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:24.930132Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642581831038687:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642581831038684:2260], cookie# 1 2025-04-10T11:19:24.930297Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642581831038100:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642581831038687:2260] 2025-04-10T11:19:24.930362Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642581831038100:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642581831038687:2260], cookie# 1 2025-04-10T11:19:24.930394Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642581831038103:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642581831038688:2260] 2025-04-10T11:19:24.930410Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642581831038106:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642581831038689:2260] 2025-04-10T11:19:24.930952Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642581831038688:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642581831038685:2260], cookie# 1 2025-04-10T11:19:24.930997Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642581831038689:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642581831038686:2260], cookie# 1 2025-04-10T11:19:24.931032Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642581831038687:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642581831038100:2049], cookie# 1 2025-04-10T11:19:24.931073Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642581831038683:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642581831038684:2260], cookie# 1 2025-04-10T11:19:24.931109Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642581831038683:2260][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:24.931144Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642581831038103:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642581831038688:2260], cookie# 1 2025-04-10T11:19:24.931207Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642581831038106:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642581831038689:2260], cookie# 1 2025-04-10T11:19:24.931283Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642581831038688:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642581831038103:2052], cookie# 1 2025-04-10T11:19:24.931305Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642581831038689:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642581831038106:2055], cookie# 1 2025-04-10T11:19:24.931331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642581831038683:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642581831038685:2260], cookie# 1 2025-04-10T11:19:24.931373Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642581831038683:2260][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:24.931406Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642581831038683:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642581831038686:2260], cookie# 1 2025-04-10T11:19:24.931428Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642581831038683:2260][/dc-1] Unexpected sync response: sender# [1:7491642581831038686:2260], cookie# 1 2025-04-10T11:19:24.998831Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642581831038428:2118], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:19:24.999301Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642581831038428:2118], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... .319089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710662 TClient::Ls request: /dc-1/USER_0 2025-04-10T11:19:28.320088Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7491642597433331130:2117], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283968252 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [2:7491642601728298736:2300] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1744283968252 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [2:7491642601728298736:2300] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1744283968252 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2025-04-10T11:19:28.320307Z node 2 :TX_PROXY DEBUG: actor# [2:7491642597433331107:2104] Handle TEvNavigate describe path /dc-1/USER_0 2025-04-10T11:19:28.320350Z node 2 :TX_PROXY DEBUG: Actor# [2:7491642601728298805:2353] HANDLE EvNavigateScheme /dc-1/USER_0 2025-04-10T11:19:28.320427Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7491642597433331130:2117], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:28.320502Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7491642601728298736:2300][/dc-1/USER_0] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7491642597433331130:2117], cookie# 10 2025-04-10T11:19:28.320546Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7491642601728298740:2300][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7491642601728298737:2300], cookie# 10 2025-04-10T11:19:28.320580Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7491642601728298741:2300][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7491642601728298738:2300], cookie# 10 2025-04-10T11:19:28.320615Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7491642601728298742:2300][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7491642601728298739:2300], cookie# 10 2025-04-10T11:19:28.320643Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7491642597433330803:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7491642601728298740:2300], cookie# 10 2025-04-10T11:19:28.320666Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7491642597433330806:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7491642601728298741:2300], cookie# 10 2025-04-10T11:19:28.320686Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7491642597433330809:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7491642601728298742:2300], cookie# 10 2025-04-10T11:19:28.320719Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7491642601728298740:2300][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7491642597433330803:2049], cookie# 10 2025-04-10T11:19:28.320737Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7491642601728298741:2300][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7491642597433330806:2052], cookie# 10 2025-04-10T11:19:28.320749Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7491642601728298742:2300][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7491642597433330809:2055], cookie# 10 2025-04-10T11:19:28.320771Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7491642601728298736:2300][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7491642601728298737:2300], cookie# 10 2025-04-10T11:19:28.320790Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7491642601728298736:2300][/dc-1/USER_0] Sync is in progress: cookie# 10, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:28.320806Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7491642601728298736:2300][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7491642601728298738:2300], cookie# 10 2025-04-10T11:19:28.320821Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7491642601728298736:2300][/dc-1/USER_0] Sync is done: cookie# 10, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:28.320842Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7491642601728298736:2300][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7491642601728298739:2300], cookie# 10 2025-04-10T11:19:28.320853Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7491642601728298736:2300][/dc-1/USER_0] Unexpected sync response: sender# [2:7491642601728298739:2300], cookie# 10 2025-04-10T11:19:28.320886Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7491642597433331130:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2025-04-10T11:19:28.320963Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7491642597433331130:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7491642601728298736:2300] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1744283968252 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:28.321030Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7491642597433331130:2117], cacheItem# { Subscriber: { Subscriber: [2:7491642601728298736:2300] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1744283968252 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2025-04-10T11:19:28.321166Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7491642601728298806:2354], recipient# [2:7491642601728298805:2353], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:19:28.321193Z node 2 :TX_PROXY DEBUG: Actor# [2:7491642601728298805:2353] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:28.321246Z node 2 :TX_PROXY DEBUG: Actor# [2:7491642601728298805:2353] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2025-04-10T11:19:28.321695Z node 2 :TX_PROXY DEBUG: Actor# [2:7491642601728298805:2353] Handle TEvDescribeSchemeResult Forward to# [2:7491642601728298804:2352] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283968252 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744283968252 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1... (TRUNCATED) >> TPersQueueTest::WriteEmptyData [GOOD] >> TPersQueueTest::WriteNonExistingPartition |90.4%| [TA] $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.4%| [TA] {RESULT} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::SecondaryIndexUpsert1DeleteUpdate >> TSubDomainTest::FailIfAffectedSetNotInterior [GOOD] >> TSubDomainTest::GenericCases >> KqpIndexes::UpdateDeletePlan+UseSink >> TTransferTests::ConsistencyLevel [GOOD] >> TTransferTests::Alter >> KqpMultishardIndex::DataColumnWrite+UseSink >> TPersQueueTest::SchemeOperationsTest [GOOD] >> TPersQueueTest::SchemeOperationFirstClassCitizen >> KqpUniqueIndex::UpdateFkSameValue >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSubDomainTest::CheckAccessCopyTable [GOOD] >> TSubDomainTest::ConsistentCopyTable >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] >> TPersQueueTest::Cache [GOOD] >> TPersQueueTest::CacheHead >> TTransferTests::Alter [GOOD] >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-10T11:19:12.475055Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642532665001804:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:12.475157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0012cf/r3tmp/tmpLrYSwQ/pdisk_1.dat 2025-04-10T11:19:13.039861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:13.039982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:13.045014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:13.084351Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26622 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:13.296115Z node 1 :TX_PROXY DEBUG: actor# [1:7491642532665002064:2116] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:13.296155Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642536959969840:2444] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:13.296336Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642532665002113:2140], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:13.296385Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491642532665002113:2140], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:19:13.296623Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536959969841:2445][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:19:13.301992Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642532665001740:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642536959969845:2445] 2025-04-10T11:19:13.302005Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642532665001743:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642536959969846:2445] 2025-04-10T11:19:13.302061Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642532665001740:2050] Subscribe: subscriber# [1:7491642536959969845:2445], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.302061Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642532665001743:2053] Subscribe: subscriber# [1:7491642536959969846:2445], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.302125Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642532665001746:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642536959969847:2445] 2025-04-10T11:19:13.302143Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642532665001746:2056] Subscribe: subscriber# [1:7491642536959969847:2445], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.302195Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536959969845:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642532665001740:2050] 2025-04-10T11:19:13.302227Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536959969846:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642532665001743:2053] 2025-04-10T11:19:13.302234Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642532665001740:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642536959969845:2445] 2025-04-10T11:19:13.302250Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642532665001743:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642536959969846:2445] 2025-04-10T11:19:13.302290Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536959969847:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642532665001746:2056] 2025-04-10T11:19:13.302324Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642532665001746:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642536959969847:2445] 2025-04-10T11:19:13.302344Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536959969841:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642536959969842:2445] 2025-04-10T11:19:13.302377Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536959969841:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642536959969843:2445] 2025-04-10T11:19:13.302450Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491642536959969841:2445][/dc-1] Set up state: owner# [1:7491642532665002113:2140], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.302599Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536959969841:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642536959969844:2445] 2025-04-10T11:19:13.302651Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491642536959969841:2445][/dc-1] Path was already updated: owner# [1:7491642532665002113:2140], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.302749Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536959969845:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536959969842:2445], cookie# 1 2025-04-10T11:19:13.302771Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536959969846:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536959969843:2445], cookie# 1 2025-04-10T11:19:13.302785Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536959969847:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536959969844:2445], cookie# 1 2025-04-10T11:19:13.302827Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642532665001740:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536959969845:2445], cookie# 1 2025-04-10T11:19:13.302859Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642532665001743:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536959969846:2445], cookie# 1 2025-04-10T11:19:13.302876Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642532665001746:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642536959969847:2445], cookie# 1 2025-04-10T11:19:13.302931Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536959969845:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642532665001740:2050], cookie# 1 2025-04-10T11:19:13.302956Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536959969846:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642532665001743:2053], cookie# 1 2025-04-10T11:19:13.302973Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642536959969847:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642532665001746:2056], cookie# 1 2025-04-10T11:19:13.303003Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536959969841:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642536959969842:2445], cookie# 1 2025-04-10T11:19:13.303032Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536959969841:2445][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:13.303044Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536959969841:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642536959969843:2445], cookie# 1 2025-04-10T11:19:13.303062Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536959969841:2445][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:13.303083Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536959969841:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642536959969844:2445], cookie# 1 2025-04-10T11:19:13.303097Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642536959969841:2445][/dc-1] Unexpected sync response: sender# [1:7491642536959969844:2445], cookie# 1 2025-04-10T11:19:13.359077Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642532665002113:2140], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:19:13.359504Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642532665002113:2140], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... ncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:31.678757Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642575611151101:2128], cacheItem# { Subscriber: { Subscriber: [3:7491642579906119290:2786] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:31.678846Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642614265858740:3531], recipient# [3:7491642614265858739:2345], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:31.682889Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642575611151101:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:31.683049Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642575611151101:2128], cacheItem# { Subscriber: { Subscriber: [3:7491642579906119290:2786] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:31.683133Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642614265858742:3532], recipient# [3:7491642614265858741:2346], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:32.659806Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642575611151101:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:32.659956Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642575611151101:2128], cacheItem# { Subscriber: { Subscriber: [3:7491642592791021487:2985] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:32.660050Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642618560826063:3536], recipient# [3:7491642618560826062:2347], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:32.680615Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642575611151101:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:32.680765Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642575611151101:2128], cacheItem# { Subscriber: { Subscriber: [3:7491642579906119290:2786] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:32.680909Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642618560826065:3537], recipient# [3:7491642618560826064:2348], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:32.686505Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642575611151101:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:32.686649Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642575611151101:2128], cacheItem# { Subscriber: { Subscriber: [3:7491642579906119290:2786] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:32.686739Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642618560826067:3538], recipient# [3:7491642618560826066:2349], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:33.665158Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642575611151101:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:33.665339Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642575611151101:2128], cacheItem# { Subscriber: { Subscriber: [3:7491642592791021487:2985] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:33.665441Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642622855793390:3549], recipient# [3:7491642622855793389:2350], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:33.690598Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642575611151101:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:33.690763Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642575611151101:2128], cacheItem# { Subscriber: { Subscriber: [3:7491642579906119290:2786] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:33.690860Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642622855793392:3550], recipient# [3:7491642622855793391:2351], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TSubDomainTest::CreateTabletForUnknownDomain [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::Alter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:19:25.230168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:19:25.230362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:25.230410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:19:25.230462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:19:25.231163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:19:25.231222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:19:25.231322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:25.231415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:19:25.232574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:19:25.338323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:19:25.338405Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:25.365003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:19:25.365378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:19:25.365560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:19:25.396729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:19:25.397115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:19:25.402663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:25.404135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:19:25.411554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:25.418122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:25.418260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:25.418944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:19:25.419014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:25.419112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:19:25.419288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:19:25.426821Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:19:25.559959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:19:25.561724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:25.562638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:19:25.564020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:19:25.564105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:25.571880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:25.572062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:19:25.572300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:25.572451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:19:25.572496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:19:25.572556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:19:25.578946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:25.579039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:19:25.579085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:19:25.589022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:25.589110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:25.589160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:25.589263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:19:25.596586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:19:25.599408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:19:25.600482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:19:25.601877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:25.602072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:19:25.602133Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:25.603836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:19:25.603924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:25.604166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:19:25.604279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:19:25.607741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:25.607797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:25.608001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:25.608055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:19:25.608470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:25.608535Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:19:25.608645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:25.608700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:25.608751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:25.608785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:25.608834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:19:25.608881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:25.608921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:19:25.608970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:19:25.609053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:19:25.609111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:19:25.609165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:19:25.612030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:25.612175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:25.612217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Id, TxId: 103, partId: 0, tablet: 72075186233409546 2025-04-10T11:19:35.737976Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186233409546] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046678944 LocalId: 2 } OperationId { TxId: 103 PartId: 0 } SwitchState { StandBy { } } Config { SrcConnectionParams { StaticCredentials { User: "user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } 2025-04-10T11:19:35.738366Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186233409546][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046678944 LocalId: 2 } OperationId { TxId: 103 PartId: 0 } SwitchState { StandBy { } } Config { SrcConnectionParams { StaticCredentials { User: "user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } 2025-04-10T11:19:35.738588Z node 6 :REPLICATION_CONTROLLER NOTICE: [controller 72075186233409546][TxAlterReplication] Alter replication: rid# 1, pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:19:35.741033Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186233409546][TxAlterReplication] Complete 2025-04-10T11:19:35.741405Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 276758531, Sender [6:310:2297], Recipient [6:133:2156]: NKikimrReplication.TEvAlterReplicationResult OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-04-10T11:19:35.741469Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NReplication::TEvController::TEvAlterReplicationResult 2025-04-10T11:19:35.741581Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvAlterReplicationResult, at schemeshard: 72057594046678944, message: OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-04-10T11:19:35.741781Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-04-10T11:19:35.741903Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterReplication TConfigureParts opId# 103:0 HandleReply NKikimrReplication.TEvAlterReplicationResult OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-04-10T11:19:35.741984Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 3 -> 128 2025-04-10T11:19:35.742141Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:19:35.742246Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:2 2025-04-10T11:19:35.744690Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:19:35.744774Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:19:35.744827Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 103:0 2025-04-10T11:19:35.745052Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:133:2156], Recipient [6:133:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T11:19:35.745100Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T11:19:35.745179Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:19:35.745238Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterReplication TPropose opId# 103:0 ProgressState 2025-04-10T11:19:35.745290Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:19:35.745369Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-10T11:19:35.745561Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:19:35.747749Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:19:35.747819Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-10T11:19:35.747948Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-10T11:19:35.748315Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [6:130:2154], Recipient [6:260:2251] 2025-04-10T11:19:35.748377Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:19:35.748491Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:35.748663Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 25769805930 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:19:35.748745Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterReplication TPropose opId# 103:0 HandleReply TEvOperationPlan: step# 5000004 2025-04-10T11:19:35.748925Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-10T11:19:35.749173Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:19:35.749263Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:19:35.749384Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 2025-04-10T11:19:35.751583Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:19:35.751641Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack coordinator stepId#5000004 first txId#103 countTxs#1 2025-04-10T11:19:35.751718Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack mediator stepId#5000004 2025-04-10T11:19:35.751770Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 103:0 2025-04-10T11:19:35.752048Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:133:2156], Recipient [6:133:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T11:19:35.752096Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 103 2025-04-10T11:19:35.752230Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:35.752291Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:19:35.752617Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:35.752700Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [6:209:2211], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-10T11:19:35.753368Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:19:35.753445Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-10T11:19:35.753593Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:19:35.753649Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:19:35.753701Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:19:35.753756Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:19:35.753803Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:19:35.753854Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-10T11:19:35.753907Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:19:35.753964Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:19:35.754011Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:19:35.754282Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:19:35.754350Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-04-10T11:19:35.754406Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-04-10T11:19:35.755662Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:209:2211], Recipient [6:133:2156]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 4 } 2025-04-10T11:19:35.755724Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-10T11:19:35.755855Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:19:35.755996Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:19:35.756049Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:19:35.756108Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-10T11:19:35.756163Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:19:35.756286Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-10T11:19:35.756334Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:19:35.760526Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:19:35.761832Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:19:35.761916Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-10T11:19:12.498679Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642529190962729:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:12.498743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001308/r3tmp/tmpKdrICG/pdisk_1.dat 2025-04-10T11:19:13.056974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:13.057084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:13.062504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:13.088363Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28642 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:13.300822Z node 1 :TX_PROXY DEBUG: actor# [1:7491642529190962916:2116] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:13.300883Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642533485930680:2433] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:13.301024Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642529190962941:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:13.301065Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491642529190962941:2129], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:19:13.301240Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533485930681:2434][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:19:13.302994Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529190962592:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642533485930685:2434] 2025-04-10T11:19:13.303042Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642529190962592:2050] Subscribe: subscriber# [1:7491642533485930685:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.303103Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529190962595:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642533485930686:2434] 2025-04-10T11:19:13.303118Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642529190962595:2053] Subscribe: subscriber# [1:7491642533485930686:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.303138Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529190962598:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642533485930687:2434] 2025-04-10T11:19:13.303158Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642529190962598:2056] Subscribe: subscriber# [1:7491642533485930687:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:13.303201Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533485930685:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642529190962592:2050] 2025-04-10T11:19:13.303221Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533485930686:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642529190962595:2053] 2025-04-10T11:19:13.303258Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533485930687:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642529190962598:2056] 2025-04-10T11:19:13.303301Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533485930681:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642533485930682:2434] 2025-04-10T11:19:13.303325Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533485930681:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642533485930683:2434] 2025-04-10T11:19:13.303363Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491642533485930681:2434][/dc-1] Set up state: owner# [1:7491642529190962941:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.303435Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533485930681:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642533485930684:2434] 2025-04-10T11:19:13.303484Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491642533485930681:2434][/dc-1] Path was already updated: owner# [1:7491642529190962941:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:13.303477Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529190962592:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642533485930685:2434] 2025-04-10T11:19:13.303522Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529190962595:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642533485930686:2434] 2025-04-10T11:19:13.303523Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533485930685:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533485930682:2434], cookie# 1 2025-04-10T11:19:13.303535Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529190962598:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642533485930687:2434] 2025-04-10T11:19:13.303544Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533485930686:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533485930683:2434], cookie# 1 2025-04-10T11:19:13.303557Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533485930687:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533485930684:2434], cookie# 1 2025-04-10T11:19:13.303592Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529190962598:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533485930687:2434], cookie# 1 2025-04-10T11:19:13.303626Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533485930687:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642529190962598:2056], cookie# 1 2025-04-10T11:19:13.303649Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533485930681:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642533485930684:2434], cookie# 1 2025-04-10T11:19:13.303695Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533485930681:2434][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:13.303722Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529190962592:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533485930685:2434], cookie# 1 2025-04-10T11:19:13.303739Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642529190962595:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642533485930686:2434], cookie# 1 2025-04-10T11:19:13.303754Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533485930685:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642529190962592:2050], cookie# 1 2025-04-10T11:19:13.303788Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642533485930686:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642529190962595:2053], cookie# 1 2025-04-10T11:19:13.303810Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533485930681:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642533485930682:2434], cookie# 1 2025-04-10T11:19:13.303828Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533485930681:2434][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:13.303847Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533485930681:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642533485930683:2434], cookie# 1 2025-04-10T11:19:13.303873Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642533485930681:2434][/dc-1] Unexpected sync response: sender# [1:7491642533485930683:2434], cookie# 1 2025-04-10T11:19:13.360641Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642529190962941:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:19:13.360974Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642529190962941:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... ncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:31.342710Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642579826146632:2132], cacheItem# { Subscriber: { Subscriber: [3:7491642584121114807:2787] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:31.342815Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642614185887475:4061], recipient# [3:7491642614185887474:2350], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:32.259942Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642579826146632:2132], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:32.260111Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642579826146632:2132], cacheItem# { Subscriber: { Subscriber: [3:7491642597006017219:3186] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:32.260206Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642618480854791:4065], recipient# [3:7491642618480854790:2351], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:32.324405Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642579826146632:2132], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:32.324563Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642579826146632:2132], cacheItem# { Subscriber: { Subscriber: [3:7491642584121114807:2787] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:32.324664Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642618480854793:4066], recipient# [3:7491642618480854792:2352], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:32.346329Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642579826146632:2132], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:32.346484Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642579826146632:2132], cacheItem# { Subscriber: { Subscriber: [3:7491642584121114807:2787] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:32.346613Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642618480854795:4067], recipient# [3:7491642618480854794:2353], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:33.261455Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642579826146632:2132], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:33.261619Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642579826146632:2132], cacheItem# { Subscriber: { Subscriber: [3:7491642597006017219:3186] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:33.261742Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642622775822111:4071], recipient# [3:7491642622775822110:2354], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:33.326556Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642579826146632:2132], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:33.326712Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642579826146632:2132], cacheItem# { Subscriber: { Subscriber: [3:7491642584121114807:2787] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:33.326810Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642622775822114:4072], recipient# [3:7491642622775822113:2355], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:33.350673Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642579826146632:2132], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:33.350829Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642579826146632:2132], cacheItem# { Subscriber: { Subscriber: [3:7491642584121114807:2787] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:33.350913Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642622775822118:4075], recipient# [3:7491642622775822115:2356], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |90.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |90.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::ExplainCollectFullDiagnostics [GOOD] >> KqpIndexes::ForbidDirectIndexTableCreation >> DemoTx::Scenario_4 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] Test command err: 2025-04-10T11:19:23.914817Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642576340257647:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:23.915301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d84/r3tmp/tmpfJ9NR1/pdisk_1.dat 2025-04-10T11:19:24.521442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:24.521547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:24.539749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:24.560109Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22355 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:24.821546Z node 1 :TX_PROXY DEBUG: actor# [1:7491642576340257810:2116] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:24.821605Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642580635225579:2440] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:24.821729Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642580635225166:2140], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:24.821771Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491642580635225166:2140], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:19:24.822063Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580635225580:2441][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:19:24.824196Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642576340257487:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642580635225584:2441] 2025-04-10T11:19:24.824279Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642576340257487:2050] Subscribe: subscriber# [1:7491642580635225584:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:24.824338Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642576340257490:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642580635225585:2441] 2025-04-10T11:19:24.824358Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642576340257490:2053] Subscribe: subscriber# [1:7491642580635225585:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:24.824383Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642576340257493:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642580635225586:2441] 2025-04-10T11:19:24.824402Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642576340257493:2056] Subscribe: subscriber# [1:7491642580635225586:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:24.824456Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642580635225584:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642576340257487:2050] 2025-04-10T11:19:24.824491Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642580635225585:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642576340257490:2053] 2025-04-10T11:19:24.824515Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642580635225586:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642576340257493:2056] 2025-04-10T11:19:24.824635Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580635225580:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642580635225581:2441] 2025-04-10T11:19:24.824680Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580635225580:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642580635225582:2441] 2025-04-10T11:19:24.824734Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491642580635225580:2441][/dc-1] Set up state: owner# [1:7491642580635225166:2140], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:24.824840Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580635225580:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642580635225583:2441] 2025-04-10T11:19:24.824882Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491642580635225580:2441][/dc-1] Path was already updated: owner# [1:7491642580635225166:2140], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:24.824926Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642580635225584:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642580635225581:2441], cookie# 1 2025-04-10T11:19:24.824941Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642580635225585:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642580635225582:2441], cookie# 1 2025-04-10T11:19:24.824955Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642580635225586:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642580635225583:2441], cookie# 1 2025-04-10T11:19:24.824981Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642576340257487:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642580635225584:2441] 2025-04-10T11:19:24.825006Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642576340257487:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642580635225584:2441], cookie# 1 2025-04-10T11:19:24.825025Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642576340257490:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642580635225585:2441] 2025-04-10T11:19:24.825039Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642576340257490:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642580635225585:2441], cookie# 1 2025-04-10T11:19:24.825055Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642576340257493:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642580635225586:2441] 2025-04-10T11:19:24.825067Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642576340257493:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642580635225586:2441], cookie# 1 2025-04-10T11:19:24.827342Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642580635225584:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642576340257487:2050], cookie# 1 2025-04-10T11:19:24.827375Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642580635225585:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642576340257490:2053], cookie# 1 2025-04-10T11:19:24.827390Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642580635225586:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642576340257493:2056], cookie# 1 2025-04-10T11:19:24.827444Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580635225580:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642580635225581:2441], cookie# 1 2025-04-10T11:19:24.827495Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580635225580:2441][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:24.827514Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580635225580:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642580635225582:2441], cookie# 1 2025-04-10T11:19:24.827539Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580635225580:2441][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:24.827566Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580635225580:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642580635225583:2441], cookie# 1 2025-04-10T11:19:24.827600Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580635225580:2441][/dc-1] Unexpected sync response: sender# [1:7491642580635225583:2441], cookie# 1 2025-04-10T11:19:24.897212Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642580635225166:2140], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:19:24.897644Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642580635225166:2140], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... 10T11:19:34.066613Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491642624427733558:2555][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7491642624427733573:2555] 2025-04-10T11:19:34.066619Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642607247863415:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642624427733571:2556] 2025-04-10T11:19:34.066646Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7491642624427733558:2555][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7491642607247863749:2128], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:34.066648Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7491642607247863415:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7491642624427733577:2555] 2025-04-10T11:19:34.066678Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491642624427733558:2555][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7491642624427733574:2555] 2025-04-10T11:19:34.066713Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7491642624427733558:2555][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7491642607247863749:2128], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:34.066739Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7491642607247863749:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-10T11:19:34.066822Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7491642607247863749:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7491642624427733559:2556] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:34.066912Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642607247863749:2128], cacheItem# { Subscriber: { Subscriber: [3:7491642624427733559:2556] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:34.066967Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7491642607247863749:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-10T11:19:34.067007Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7491642607247863749:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7491642624427733558:2555] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:34.067061Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642607247863749:2128], cacheItem# { Subscriber: { Subscriber: [3:7491642624427733558:2555] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:34.067148Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642624427733578:2557], recipient# [3:7491642624427733556:2314], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:34.067265Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642624427733579:2558], recipient# [3:7491642624427733555:2313], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:34.470494Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642607247863749:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:34.470660Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642607247863749:2128], cacheItem# { Subscriber: { Subscriber: [3:7491642611542831613:2537] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:34.470801Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642624427733581:2559], recipient# [3:7491642624427733580:2315], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:35.067753Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642607247863749:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:35.067857Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642607247863749:2128], cacheItem# { Subscriber: { Subscriber: [3:7491642624427733559:2556] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:35.067918Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642628722700888:2563], recipient# [3:7491642628722700887:2316], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:35.446492Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491642607247863639:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:35.446582Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:35.478584Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491642607247863749:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:35.478734Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491642607247863749:2128], cacheItem# { Subscriber: { Subscriber: [3:7491642611542831613:2537] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:35.478853Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491642628722700892:2566], recipient# [3:7491642628722700891:2317], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |90.4%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL [GOOD] >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] >> KqpUniqueIndex::InsertFkAlreadyExist [GOOD] >> KqpUniqueIndex::InsertFkDuplicate |90.4%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::StreamReadCreateAndDestroyMsgs [GOOD] >> TPersQueueTest::StreamReadCommitAndStatusMsgs >> TPersQueueTest::Init [GOOD] >> TPersQueueTest::NoDecompressionMemoryLeaks >> KqpIndexes::UpdateIndexSubsetPk [GOOD] >> KqpIndexes::UpdateOnReadColumns >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex-UseSink [GOOD] >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex+UseSink >> KqpUniqueIndex::InsertNullInComplexFk [GOOD] >> KqpUniqueIndex::InsertNullInComplexFkDuplicate >> KqpUniqueIndex::ReplaceFkAlreadyExist [GOOD] >> KqpUniqueIndex::ReplaceFkDuplicate >> KqpMultishardIndex::SecondaryIndexSelectNull [GOOD] >> KqpMultishardIndex::SecondaryIndexSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] Test command err: 2025-04-10T11:19:23.811263Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642576341584666:2173];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:23.812975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000da7/r3tmp/tmpHoecJA/pdisk_1.dat 2025-04-10T11:19:24.578844Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:24.595435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:24.595574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:24.606035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3361 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:24.966728Z node 1 :TX_PROXY DEBUG: actor# [1:7491642576341584778:2112] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:24.966787Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642580636552585:2449] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:24.966897Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642580636552105:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:24.966990Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580636552563:2439][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7491642580636552105:2127], cookie# 1 2025-04-10T11:19:24.975586Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642580636552567:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642580636552564:2439], cookie# 1 2025-04-10T11:19:24.975663Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642580636552568:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642580636552565:2439], cookie# 1 2025-04-10T11:19:24.975681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642580636552569:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642580636552566:2439], cookie# 1 2025-04-10T11:19:24.975719Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642576341584477:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642580636552568:2439], cookie# 1 2025-04-10T11:19:24.975736Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642576341584474:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642580636552567:2439], cookie# 1 2025-04-10T11:19:24.975751Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642576341584480:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642580636552569:2439], cookie# 1 2025-04-10T11:19:24.975787Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642580636552568:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642576341584477:2053], cookie# 1 2025-04-10T11:19:24.975805Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642580636552567:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642576341584474:2050], cookie# 1 2025-04-10T11:19:24.975818Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642580636552569:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642576341584480:2056], cookie# 1 2025-04-10T11:19:24.975855Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580636552563:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642580636552565:2439], cookie# 1 2025-04-10T11:19:24.975881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580636552563:2439][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:24.975896Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580636552563:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642580636552564:2439], cookie# 1 2025-04-10T11:19:24.975915Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580636552563:2439][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:24.975953Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580636552563:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642580636552566:2439], cookie# 1 2025-04-10T11:19:24.975971Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642580636552563:2439][/dc-1] Unexpected sync response: sender# [1:7491642580636552566:2439], cookie# 1 2025-04-10T11:19:24.976031Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642580636552105:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-10T11:19:24.991695Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642580636552105:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491642580636552563:2439] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:24.991810Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642580636552105:2127], cacheItem# { Subscriber: { Subscriber: [1:7491642580636552563:2439] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-10T11:19:24.993924Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491642580636552586:2450], recipient# [1:7491642580636552585:2449], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:19:24.993975Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642580636552585:2449] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:25.044226Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642580636552585:2449] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-10T11:19:25.047594Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642580636552585:2449] Handle TEvDescribeSchemeResult Forward to# [1:7491642580636552584:2448] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:19:25.084992Z node 1 :TX_PROXY DEBUG: actor# [1:7491642576341584778:2112] Handle TEvProposeTransaction 2025-04-10T11:19:25.085031Z node 1 :TX_PROXY DEBUG: actor# [1:7491642576341584778:2112] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T11:19:25.085123Z node 1 :TX_PROXY DEBUG: actor# [1:7491642576341584778:2112] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491642584931519888:2455] 2025-04-10T11:19:25.235713Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642584931519888:2455] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-10T11:19:25.235758Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642584931519888:2455] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:19:25.235831Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642584931519888:2455] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:25.235922Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642580636552105:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status ... 255:2357] 2025-04-10T11:19:36.961998Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7491642633274034245:2357][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7491642616094164613:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:36.962102Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7491642616094164613:2105], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests PathId: Strong: 0 } 2025-04-10T11:19:36.962240Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7491642616094164613:2105], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [4:7491642633274034245:2357] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:36.962418Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491642616094164613:2105], cacheItem# { Subscriber: { Subscriber: [4:7491642633274034245:2357] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:36.962603Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491642633274034259:2359], recipient# [4:7491642633274034244:2334], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:36.963483Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7491642633274034244:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:19:37.058940Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7491642616094164613:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:37.059117Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491642616094164613:2105], cacheItem# { Subscriber: { Subscriber: [4:7491642633274034245:2357] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:37.059181Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491642616094164613:2105], cacheItem# { Subscriber: { Subscriber: [4:7491642633274034246:2358] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:37.059294Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491642637569001556:2360], recipient# [4:7491642633274034244:2334], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:37.059770Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7491642633274034244:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:19:37.210640Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7491642616094164613:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:37.210781Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491642616094164613:2105], cacheItem# { Subscriber: { Subscriber: [4:7491642633274034245:2357] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:37.210839Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491642616094164613:2105], cacheItem# { Subscriber: { Subscriber: [4:7491642633274034246:2358] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:37.210955Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491642637569001557:2361], recipient# [4:7491642633274034244:2334], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:37.211552Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7491642633274034244:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:19:37.426729Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491642616094164513:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:37.426815Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:37.500671Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7491642616094164613:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:37.500829Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491642616094164613:2105], cacheItem# { Subscriber: { Subscriber: [4:7491642620389132304:2344] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:37.500921Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491642637569001559:2362], recipient# [4:7491642637569001558:2335], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> DemoTx::Scenario_5 >> KqpIndexes::SecondaryIndexUpdateOnUsingIndex >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly [GOOD] >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate >> KqpUniqueIndex::UpsertExplicitNullInComplexFk [GOOD] >> KqpUniqueIndex::UpsertImplicitNullInComplexFk >> TYardTest::TestMultiYardHarakiri [GOOD] >> TYardTest::TestLogOwerwrite >> TYardTest::TestLogOwerwrite [GOOD] >> TPersQueueTest::TopicServiceReadBudget [GOOD] >> TPersQueueTest::TopicServiceSimpleHappyWrites >> TSubDomainTest::GenericCases [GOOD] >> KqpIndexes::UpdateDeletePlan+UseSink [GOOD] >> KqpIndexes::UpdateDeletePlan-UseSink >> TSubDomainTest::ConsistentCopyTable [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestLogOwerwrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::GenericCases [GOOD] Test command err: 2025-04-10T11:19:25.961121Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642587012757932:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:25.961168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d4b/r3tmp/tmpEkCuxn/pdisk_1.dat 2025-04-10T11:19:26.751861Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:26.761616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:26.761717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:26.767007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27906 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:27.074380Z node 1 :TX_PROXY DEBUG: actor# [1:7491642591307725465:2141] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:27.074436Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642595602693225:2452] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:27.074544Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642591307725489:2154], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:27.074659Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642591307725912:2447][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7491642591307725489:2154], cookie# 1 2025-04-10T11:19:27.076388Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642591307725916:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642591307725913:2447], cookie# 1 2025-04-10T11:19:27.076424Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642591307725917:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642591307725914:2447], cookie# 1 2025-04-10T11:19:27.076441Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642591307725918:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642591307725915:2447], cookie# 1 2025-04-10T11:19:27.076482Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642587012757811:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642591307725917:2447], cookie# 1 2025-04-10T11:19:27.076513Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642587012757814:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642591307725918:2447], cookie# 1 2025-04-10T11:19:27.076551Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642591307725917:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642587012757811:2054], cookie# 1 2025-04-10T11:19:27.076567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642591307725918:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642587012757814:2057], cookie# 1 2025-04-10T11:19:27.076599Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642591307725912:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642591307725914:2447], cookie# 1 2025-04-10T11:19:27.076626Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642591307725912:2447][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:27.076657Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642591307725912:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642591307725915:2447], cookie# 1 2025-04-10T11:19:27.076682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642591307725912:2447][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:27.076749Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642591307725489:2154], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-10T11:19:27.078227Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642587012757808:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642591307725916:2447], cookie# 1 2025-04-10T11:19:27.078269Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642591307725916:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642587012757808:2051], cookie# 1 2025-04-10T11:19:27.078294Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642591307725912:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642591307725913:2447], cookie# 1 2025-04-10T11:19:27.078310Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642591307725912:2447][/dc-1] Unexpected sync response: sender# [1:7491642591307725913:2447], cookie# 1 2025-04-10T11:19:27.090291Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642591307725489:2154], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491642591307725912:2447] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:27.090440Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642591307725489:2154], cacheItem# { Subscriber: { Subscriber: [1:7491642591307725912:2447] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-10T11:19:27.092809Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491642595602693226:2453], recipient# [1:7491642595602693225:2452], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:19:27.092865Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642595602693225:2452] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:27.162777Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642595602693225:2452] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-10T11:19:27.166414Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642595602693225:2452] Handle TEvDescribeSchemeResult Forward to# [1:7491642595602693224:2451] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:19:27.201230Z node 1 :TX_PROXY DEBUG: actor# [1:7491642591307725465:2141] Handle TEvProposeTransaction 2025-04-10T11:19:27.201262Z node 1 :TX_PROXY DEBUG: actor# [1:7491642591307725465:2141] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T11:19:27.201396Z node 1 :TX_PROXY DEBUG: actor# [1:7491642591307725465:2141] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491642595602693234:2460] 2025-04-10T11:19:27.373467Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642595602693234:2460] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-10T11:19:27.373536Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642595602693234:2460] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:19:27.373622Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642595602693234:2460] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:27.373719Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642591307725489:2154], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Statu ... sender# [4:7491642647573549212:3024] 2025-04-10T11:19:39.080901Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7491642626098711497:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-10T11:19:39.080911Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7491642647573549198:3024][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7491642626098711497:2128], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:39.080966Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7491642626098711497:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7491642647573549198:3024] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:39.081019Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491642626098711497:2128], cacheItem# { Subscriber: { Subscriber: [4:7491642647573549198:3024] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:39.081073Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491642647573549218:3026], recipient# [4:7491642647573549195:2333], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:39.542388Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491642626098711253:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:39.542472Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:39.567098Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7491642626098711497:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:39.567247Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491642626098711497:2128], cacheItem# { Subscriber: { Subscriber: [4:7491642630393679391:2544] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:39.567355Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491642647573549222:3029], recipient# [4:7491642647573549221:2334], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:40.090351Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7491642626098711497:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:40.090504Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491642626098711497:2128], cacheItem# { Subscriber: { Subscriber: [4:7491642647573549198:3024] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:40.090615Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491642651868516531:3035], recipient# [4:7491642651868516530:2335], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:40.549513Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7491642626098711497:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:40.549673Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491642626098711497:2128], cacheItem# { Subscriber: { Subscriber: [4:7491642630393679391:2544] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:40.549788Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491642651868516533:3036], recipient# [4:7491642651868516532:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:40.567655Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7491642626098711497:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:40.567805Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491642626098711497:2128], cacheItem# { Subscriber: { Subscriber: [4:7491642630393679391:2544] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:40.567897Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491642651868516535:3037], recipient# [4:7491642651868516534:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:41.097511Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7491642626098711497:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:41.097656Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491642626098711497:2128], cacheItem# { Subscriber: { Subscriber: [4:7491642647573549198:3024] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:41.097762Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491642656163483842:3041], recipient# [4:7491642656163483841:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpIndexes::ForbidDirectIndexTableCreation [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink >> TopicService::UnknownTopic [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] >> KqpIndexes::SecondaryIndexUpsert1DeleteUpdate [GOOD] >> KqpIndexes::SecondaryIndexUpsert2Update >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::ConsistentCopyTable [GOOD] Test command err: 2025-04-10T11:19:23.641328Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642578524266973:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:23.641409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d86/r3tmp/tmpjvmn1U/pdisk_1.dat 2025-04-10T11:19:24.370842Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:24.384431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:24.385029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:24.395600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3574 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:24.740126Z node 1 :TX_PROXY DEBUG: actor# [1:7491642578524267215:2102] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:24.740176Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642582819234809:2263] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:24.741233Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642578524267241:2117], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:24.741367Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642582819234791:2257][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7491642578524267241:2117], cookie# 1 2025-04-10T11:19:24.743728Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642582819234795:2257][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642582819234792:2257], cookie# 1 2025-04-10T11:19:24.743772Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642582819234796:2257][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642582819234793:2257], cookie# 1 2025-04-10T11:19:24.743786Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642582819234797:2257][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642582819234794:2257], cookie# 1 2025-04-10T11:19:24.743829Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642578524266913:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642582819234795:2257], cookie# 1 2025-04-10T11:19:24.743886Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642578524266916:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642582819234796:2257], cookie# 1 2025-04-10T11:19:24.743906Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642578524266919:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642582819234797:2257], cookie# 1 2025-04-10T11:19:24.743939Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642582819234795:2257][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642578524266913:2049], cookie# 1 2025-04-10T11:19:24.743953Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642582819234796:2257][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642578524266916:2052], cookie# 1 2025-04-10T11:19:24.743964Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642582819234797:2257][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642578524266919:2055], cookie# 1 2025-04-10T11:19:24.743997Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642582819234791:2257][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642582819234792:2257], cookie# 1 2025-04-10T11:19:24.744023Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642582819234791:2257][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:24.744044Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642582819234791:2257][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642582819234793:2257], cookie# 1 2025-04-10T11:19:24.744083Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642582819234791:2257][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:24.744106Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642582819234791:2257][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642582819234794:2257], cookie# 1 2025-04-10T11:19:24.744120Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642582819234791:2257][/dc-1] Unexpected sync response: sender# [1:7491642582819234794:2257], cookie# 1 2025-04-10T11:19:24.744187Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642578524267241:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-10T11:19:24.750637Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642578524267241:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491642582819234791:2257] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:24.750786Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642578524267241:2117], cacheItem# { Subscriber: { Subscriber: [1:7491642582819234791:2257] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-10T11:19:24.753135Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491642582819234810:2264], recipient# [1:7491642582819234809:2263], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:19:24.753216Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642582819234809:2263] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:24.795964Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642582819234809:2263] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-10T11:19:24.800925Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642582819234809:2263] Handle TEvDescribeSchemeResult Forward to# [1:7491642582819234808:2262] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:19:24.827060Z node 1 :TX_PROXY DEBUG: actor# [1:7491642578524267215:2102] Handle TEvProposeTransaction 2025-04-10T11:19:24.827085Z node 1 :TX_PROXY DEBUG: actor# [1:7491642578524267215:2102] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T11:19:24.828275Z node 1 :TX_PROXY DEBUG: actor# [1:7491642578524267215:2102] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491642582819234815:2268] 2025-04-10T11:19:24.934740Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642582819234815:2268] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-10T11:19:24.934790Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642582819234815:2268] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:19:24.934931Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642582819234815:2268] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:24.935022Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642578524267241:2117], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status ... nknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:41.242100Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7491642655012596455:2244], recipient# [6:7491642655012596432:2322], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:41.242275Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7491642655012596456:2245], recipient# [6:7491642655012596433:2323], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:41.242420Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:19:41.242435Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7491642655012596432:2322], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:19:41.300137Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7491642633537759717:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:41.300299Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7491642633537759717:2107], cacheItem# { Subscriber: { Subscriber: [6:7491642655012596434:2241] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:41.300351Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7491642633537759717:2107], cacheItem# { Subscriber: { Subscriber: [6:7491642655012596435:2242] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:41.300485Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7491642655012596457:2246], recipient# [6:7491642655012596432:2322], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:41.300755Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7491642655012596432:2322], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:19:41.472352Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7491642633537759717:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:41.472475Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7491642633537759717:2107], cacheItem# { Subscriber: { Subscriber: [6:7491642655012596434:2241] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:41.472517Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7491642633537759717:2107], cacheItem# { Subscriber: { Subscriber: [6:7491642655012596435:2242] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:41.472637Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7491642655012596458:2247], recipient# [6:7491642655012596432:2322], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:41.472876Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7491642655012596432:2322], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:19:41.709229Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7491642633537759717:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:41.709411Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7491642633537759717:2107], cacheItem# { Subscriber: { Subscriber: [6:7491642655012596434:2241] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:41.709459Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7491642633537759717:2107], cacheItem# { Subscriber: { Subscriber: [6:7491642655012596435:2242] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:41.709587Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7491642655012596459:2248], recipient# [6:7491642655012596432:2322], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:41.710055Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7491642655012596432:2322], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] Test command err: 2025-04-10T11:19:23.926576Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642579563558428:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:23.926624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d6a/r3tmp/tmpMEZ2Ov/pdisk_1.dat 2025-04-10T11:19:24.598655Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:24.630897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:24.631015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:24.638463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9397 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:24.869091Z node 1 :TX_PROXY DEBUG: actor# [1:7491642579563558656:2116] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:24.869134Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642583858526418:2432] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:24.869254Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642583858525976:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:24.869323Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491642583858525976:2129], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:19:24.869518Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642583858526419:2433][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:19:24.871481Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642579563558336:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642583858526424:2433] 2025-04-10T11:19:24.871497Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642579563558333:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642583858526423:2433] 2025-04-10T11:19:24.871552Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642579563558333:2050] Subscribe: subscriber# [1:7491642583858526423:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:24.871552Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642579563558336:2053] Subscribe: subscriber# [1:7491642583858526424:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:24.871616Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642579563558339:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491642583858526425:2433] 2025-04-10T11:19:24.871643Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491642579563558339:2056] Subscribe: subscriber# [1:7491642583858526425:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:19:24.871728Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642583858526424:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642579563558336:2053] 2025-04-10T11:19:24.871779Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642583858526425:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642579563558339:2056] 2025-04-10T11:19:24.871843Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642583858526419:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642583858526421:2433] 2025-04-10T11:19:24.871873Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642583858526419:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642583858526422:2433] 2025-04-10T11:19:24.871940Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491642583858526419:2433][/dc-1] Set up state: owner# [1:7491642583858525976:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:24.871967Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642579563558336:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642583858526424:2433] 2025-04-10T11:19:24.871983Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642579563558339:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642583858526425:2433] 2025-04-10T11:19:24.872097Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642583858526423:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642579563558333:2050] 2025-04-10T11:19:24.872128Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642583858526423:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642583858526420:2433], cookie# 1 2025-04-10T11:19:24.872146Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642583858526424:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642583858526421:2433], cookie# 1 2025-04-10T11:19:24.872162Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642583858526425:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642583858526422:2433], cookie# 1 2025-04-10T11:19:24.872182Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642583858526419:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491642583858526420:2433] 2025-04-10T11:19:24.872225Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491642583858526419:2433][/dc-1] Path was already updated: owner# [1:7491642583858525976:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:19:24.872271Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642579563558333:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491642583858526423:2433] 2025-04-10T11:19:24.872299Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642579563558333:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642583858526423:2433], cookie# 1 2025-04-10T11:19:24.872320Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642579563558336:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642583858526424:2433], cookie# 1 2025-04-10T11:19:24.872335Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642579563558339:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642583858526425:2433], cookie# 1 2025-04-10T11:19:24.872364Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642583858526423:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642579563558333:2050], cookie# 1 2025-04-10T11:19:24.872388Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642583858526424:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642579563558336:2053], cookie# 1 2025-04-10T11:19:24.872402Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642583858526425:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642579563558339:2056], cookie# 1 2025-04-10T11:19:24.872447Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642583858526419:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642583858526420:2433], cookie# 1 2025-04-10T11:19:24.872479Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642583858526419:2433][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:24.872496Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642583858526419:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642583858526421:2433], cookie# 1 2025-04-10T11:19:24.872514Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642583858526419:2433][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:24.872542Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642583858526419:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642583858526422:2433], cookie# 1 2025-04-10T11:19:24.872564Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642583858526419:2433][/dc-1] Unexpected sync response: sender# [1:7491642583858526422:2433], cookie# 1 2025-04-10T11:19:24.920638Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642583858525976:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:19:24.921014Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642583858525976:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... _requests PathId: Strong: 1 } 2025-04-10T11:19:41.251001Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7491642638713875442:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7491642655893745840:2759] 2025-04-10T11:19:41.251018Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7491642638713875442:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7491642655893745846:2760] 2025-04-10T11:19:41.251063Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [5:7491642638713875776:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [5:7491642655893745826:2758] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:41.251173Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7491642638713875776:2128], cacheItem# { Subscriber: { Subscriber: [5:7491642655893745826:2758] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:41.251208Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [5:7491642638713875776:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-10T11:19:41.251251Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [5:7491642638713875776:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [5:7491642655893745827:2759] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:41.251293Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7491642638713875776:2128], cacheItem# { Subscriber: { Subscriber: [5:7491642655893745827:2759] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:41.251349Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [5:7491642638713875776:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-10T11:19:41.251390Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [5:7491642638713875776:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [5:7491642655893745828:2760] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:41.251438Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7491642638713875776:2128], cacheItem# { Subscriber: { Subscriber: [5:7491642655893745828:2760] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:41.251517Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7491642655893745847:2761], recipient# [5:7491642655893745824:2315], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:41.251572Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7491642655893745848:2762], recipient# [5:7491642655893745825:2316], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:41.326540Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7491642638713875776:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:41.326701Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7491642638713875776:2128], cacheItem# { Subscriber: { Subscriber: [5:7491642643008843742:2616] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:41.326810Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7491642655893745850:2763], recipient# [5:7491642655893745849:2317], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:42.253037Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7491642638713875776:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:42.253184Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7491642638713875776:2128], cacheItem# { Subscriber: { Subscriber: [5:7491642655893745828:2760] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:42.253338Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7491642660188713157:2767], recipient# [5:7491642660188713156:2318], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:42.307556Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491642638713875520:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:42.307658Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:42.330545Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7491642638713875776:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:42.330700Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7491642638713875776:2128], cacheItem# { Subscriber: { Subscriber: [5:7491642643008843742:2616] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:19:42.330808Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7491642660188713159:2768], recipient# [5:7491642660188713158:2319], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpUniqueIndex::UpdateFkSameValue [GOOD] >> KqpUniqueIndex::UpdateFkPkOverlap >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL [GOOD] >> KqpIndexes::DeleteByIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> TopicService::UseDoubleSlashInTopicPath >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 >> KqpUniqueIndex::InsertFkDuplicate [GOOD] >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex+UseSink [GOOD] >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex+UseSink >> ReadSessionImplTest::DataReceivedCallback [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] Test command err: 2025-04-10T11:16:57.758009Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491641953076799954:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:16:57.758188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001819/r3tmp/tmp5ZXWEx/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22939, node 1 2025-04-10T11:16:58.127117Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:16:58.128401Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:16:58.128433Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:16:58.152779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:16:58.152900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:16:58.176150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:16:58.255045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:16:58.255073Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:16:58.255085Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:16:58.255237Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:16:58.632600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:17:02.758361Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491641953076799954:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:17:02.758467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:17:13.126861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:17:13.126901Z node 1 :IMPORT WARN: Table profiles were not loaded >> KqpIndexes::SecondaryIndexUpdateOnUsingIndex [GOOD] >> KqpIndexes::SecondaryIndexSelectUsingScripting |90.4%| [TA] $(B)/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.4%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 1234, MsgBus: 12713 2025-04-10T11:19:29.377360Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642605805253567:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:29.394336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e15/r3tmp/tmpE0ewjM/pdisk_1.dat 2025-04-10T11:19:29.964472Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:30.000662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:30.000777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:30.009625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1234, node 1 2025-04-10T11:19:30.214900Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:30.214935Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:30.214942Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:30.215056Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12713 TClient is connected to server localhost:12713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:31.337162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.402509Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:31.412798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.633263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.926183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:32.044617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:33.596917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642622985124468:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:33.597023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.166570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.241379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.289813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.325337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.367586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.370660Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642605805253567:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:34.370706Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:34.403603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.476080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642627280092280:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.476186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.476381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642627280092285:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.485638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:34.495968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642627280092287:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:34.598344Z node 1 :TX_PROXY ERROR: Actor# [1:7491642627280092345:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:35.785524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 11428, MsgBus: 27420 2025-04-10T11:19:39.622575Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642648387166045:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:39.622660Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e15/r3tmp/tmpzlKTCT/pdisk_1.dat 2025-04-10T11:19:39.823985Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:39.840832Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:39.840923Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:39.842767Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11428, node 2 2025-04-10T11:19:39.970804Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:39.970824Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:39.970832Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:39.970974Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27420 TClient is connected to server localhost:27420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:19:40.528258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:19:40.543301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:40.671009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:19:40.910383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:19:41.010023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:43.442037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642665567036994:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:43.442129Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:43.503321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:43.546261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:43.581141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:43.619012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:43.701672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:43.743810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:43.814553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642665567037510:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:43.814682Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:43.814946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642665567037515:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:43.819987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:43.833724Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642665567037517:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:43.937204Z node 2 :TX_PROXY ERROR: Actor# [2:7491642665567037573:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:44.605976Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642648387166045:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:44.606045Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:44.769496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 2606, MsgBus: 23400 2025-04-10T11:19:29.355438Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642602112134748:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:29.355490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e06/r3tmp/tmpW3niO4/pdisk_1.dat 2025-04-10T11:19:29.959564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:29.959650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:29.963109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:30.032422Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2606, node 1 2025-04-10T11:19:30.089384Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:19:30.097233Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:19:30.214759Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:30.214782Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:30.214792Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:30.214912Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23400 TClient is connected to server localhost:23400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:31.277458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.313670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:19:31.548627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:19:31.819147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.938649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:33.355464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642619292005707:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:33.355584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.176541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.246280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.284902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.346508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.358296Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642602112134748:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:34.358351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:34.380762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.424826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.524049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642623586973526:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.524116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.524315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642623586973531:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.527171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:34.535787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642623586973533:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:34.614883Z node 1 :TX_PROXY ERROR: Actor# [1:7491642623586973588:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:35.777578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:37.481081Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642636471876998:2605], TxId: 281474976710676, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTExNjRlNWUtM2IwZWUzYmYtYWU2NTk1YTQtN2RkYTJlOWE=. TraceId : 01jrfpth9m3ymxvtgyhzg2ekmq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-10T11:19:37.481413Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642636471876999:2606], TxId: 281474976710676, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTExNjRlNWUtM2IwZWUzYmYtYWU2NTk1YTQtN2RkYTJlOWE=. CustomerSuppliedId : . TraceId : 01jrfpth9m3ymxvtgyhzg2ekmq. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491642636471876993:2559], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:19:37.481716Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTExNjRlNWUtM2IwZWUzYmYtYWU2NTk1YTQtN2RkYTJlOWE=, ActorId: [1:7491642632176909264:2559], ActorState: ExecuteState, TraceId: 01jrfpth9m3ymxvtgyhzg2ekmq, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 8153, MsgBus: 21401 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e06/r3tmp/tmpJwNqdB/pdisk_1.dat 2025-04-10T11:19:38.476026Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:19:38.525003Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:38.543360Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:38.543438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:38.545083Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8153, node 2 2025-04-10T11:19:38.710022Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:38.710045Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:38.710053Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:38.710216Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21401 TClient is connected to server localhost:21401 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:39.247519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:39.259765Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:19:39.270593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:39.413504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:39.609979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:39.704390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:42.535404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642661247507646:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:42.535507Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:42.604216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:42.680686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:42.727203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:42.764116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:42.804726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:42.878874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:42.962566Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642661247508171:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:42.962682Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:42.962969Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642661247508176:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:42.968013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:42.978535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642661247508178:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:19:43.069701Z node 2 :TX_PROXY ERROR: Actor# [2:7491642665542475530:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:44.021357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:45.494634Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642674132411595:2597], TxId: 281474976715676, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZGU0MWM1ZDQtZDY1MDAwYjItNDA3YWU2ZGYtZjgwMDgwNGM=. CustomerSuppliedId : . TraceId : 01jrfpts71f7z5qxvgprq53022. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-10T11:19:45.494822Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642674132411597:2598], TxId: 281474976715676, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZGU0MWM1ZDQtZDY1MDAwYjItNDA3YWU2ZGYtZjgwMDgwNGM=. CustomerSuppliedId : . TraceId : 01jrfpts71f7z5qxvgprq53022. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7491642674132411592:2555], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:19:45.494998Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGU0MWM1ZDQtZDY1MDAwYjItNDA3YWU2ZGYtZjgwMDgwNGM=, ActorId: [2:7491642669837443884:2555], ActorState: ExecuteState, TraceId: 01jrfpts71f7z5qxvgprq53022, Create QueryResponse for error on request, msg: >> TPersQueueTest::SchemeOperationFirstClassCitizen [GOOD] >> TPersQueueTest::SchemeOperationsCheckPropValues >> DSProxyStrategyTest::Restore_mirror3dc [GOOD] >> TPersQueueTest::WriteNonExistingPartition [GOOD] >> TPersQueueTest::WriteNonExistingTopic >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2025-04-10T11:19:14.111989Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.112015Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.112036Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.113922Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.131555Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.131706Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.132446Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.132983Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.138301Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T11:19:14.138463Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.138537Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-10T11:19:14.139460Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.139484Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.139510Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.139826Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.141354Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.141884Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.142082Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.142532Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.143754Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T11:19:14.143870Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.143923Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-10T11:19:14.144921Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.144952Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.144975Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.145264Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.145814Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.145940Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.146183Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.146941Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.149480Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T11:19:14.149575Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.149611Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-10T11:19:14.150932Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.150970Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.150991Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.151318Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.151970Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.152073Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.157655Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.159447Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.160093Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T11:19:14.160219Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.160269Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-10T11:19:14.161404Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.161428Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.161479Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.162394Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.163085Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.163217Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.163424Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.163799Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.163926Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T11:19:14.164070Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.164121Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-10T11:19:14.164783Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.164813Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.164841Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.165164Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.165788Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.165885Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.166130Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.166559Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.167004Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T11:19:14.167230Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.167272Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-10T11:19:14.168351Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.168373Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.168398Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.168703Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.169367Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.169472Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.169743Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.170445Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.170639Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T11:19:14.170747Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.170786Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-10T11:19:14.171777Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.171800Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.171834Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:14.172118Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:14.172573Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:14.172649Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.172854Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:14.174472Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:14.174892Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T11:19:14.174980Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:14.175022Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-10T11:19:14.180832Z :ReadSession INFO: Random seed for debugging is 1744283954180802 2025-04-10T11:19:14.640786Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642539600403293:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:14.640877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:19:14.715859Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642539847400182:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:14.731029Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error ... message-group-id" CreateTime: 2025-04-10T11:19:33.920000Z WriteTime: 2025-04-10T11:19:33.935000Z Ip: "ipv6:[::1]:35566" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:35566" } } } 2025-04-10T11:19:33.952978Z :DEBUG: [/Root] [/Root] [5001a03d-1a151996-27946fc-f4569e97] [dc1] Commit offsets [2, 3). Partition stream id: 1 2025-04-10T11:19:33.953158Z :DEBUG: [/Root] [/Root] [5001a03d-1a151996-27946fc-f4569e97] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-10T11:19:33.954108Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8652399531054395509_v1 grpc read done: success# 1, data# { read { } } 2025-04-10T11:19:33.954263Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8652399531054395509_v1 got read request: guid# b2bb3a8c-b3e63d97-28447758-f3f9dfb1 2025-04-10T11:19:33.954536Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8652399531054395509_v1 grpc read done: success# 1, data# { commit { cookies { assign_id: 1 partition_cookie: 3 } } } 2025-04-10T11:19:33.954637Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8652399531054395509_v1 commit request from client for 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-10T11:19:33.954660Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8652399531054395509_v1 commit request from 3 to 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-10T11:19:33.954697Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8652399531054395509_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 3 2025-04-10T11:19:33.956627Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T11:19:33.956666Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T11:19:33.956769Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_8652399531054395509_v1 2025-04-10T11:19:33.956864Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:19:33.956881Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:19:33.956896Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:19:33.956913Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:19:33.956927Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-10T11:19:33.956938Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-10T11:19:33.956953Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:19:33.956970Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:19:33.956995Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:19:33.959809Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:19:33.959854Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:19:33.959902Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 3 2025-04-10T11:19:33.966394Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8652399531054395509_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2025-04-10T11:19:33.966446Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8652399531054395509_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 3 2025-04-10T11:19:33.966500Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8652399531054395509_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 3 2025-04-10T11:19:33.967523Z :DEBUG: [/Root] [/Root] [5001a03d-1a151996-27946fc-f4569e97] [dc1] Committed response: { cookies { assign_id: 1 partition_cookie: 3 } } 2025-04-10T11:19:34.021859Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a8522c76-1da7bed5-7c15501e-6cd20525_0] Write session will now close 2025-04-10T11:19:34.021937Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a8522c76-1da7bed5-7c15501e-6cd20525_0] Write session: aborting 2025-04-10T11:19:34.022542Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a8522c76-1da7bed5-7c15501e-6cd20525_0] Write session: gracefully shut down, all writes complete 2025-04-10T11:19:34.022586Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a8522c76-1da7bed5-7c15501e-6cd20525_0] Write session: destroy 2025-04-10T11:19:34.042989Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|a8522c76-1da7bed5-7c15501e-6cd20525_0 grpc read done: success: 0 data: 2025-04-10T11:19:34.043022Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|a8522c76-1da7bed5-7c15501e-6cd20525_0 grpc read failed 2025-04-10T11:19:34.043048Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|a8522c76-1da7bed5-7c15501e-6cd20525_0 grpc closed 2025-04-10T11:19:34.043063Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|a8522c76-1da7bed5-7c15501e-6cd20525_0 is DEAD 2025-04-10T11:19:34.043730Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T11:19:34.044251Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7491642621204784967:2629] destroyed 2025-04-10T11:19:34.044306Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T11:19:36.386419Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8652399531054395509_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2025-04-10T11:19:43.950281Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8652399531054395509_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2025-04-10T11:19:44.024833Z :INFO: [/Root] [/Root] [5001a03d-1a151996-27946fc-f4569e97] Closing read session. Close timeout: 0.000000s 2025-04-10T11:19:44.024918Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-04-10T11:19:44.024954Z :INFO: [/Root] [/Root] [5001a03d-1a151996-27946fc-f4569e97] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16784 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:19:44.025042Z :NOTICE: [/Root] [/Root] [5001a03d-1a151996-27946fc-f4569e97] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-10T11:19:44.025081Z :DEBUG: [/Root] [/Root] [5001a03d-1a151996-27946fc-f4569e97] [dc1] Abort session to cluster 2025-04-10T11:19:44.025602Z :NOTICE: [/Root] [/Root] [5001a03d-1a151996-27946fc-f4569e97] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T11:19:44.026999Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8652399531054395509_v1 grpc read done: success# 0, data# { } 2025-04-10T11:19:44.027224Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_8652399531054395509_v1 grpc read failed 2025-04-10T11:19:44.029151Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_8652399531054395509_v1 2025-04-10T11:19:44.029204Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7491642595434980664:2546] destroyed 2025-04-10T11:19:44.029421Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_8652399531054395509_v1 2025-04-10T11:19:44.027265Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_8652399531054395509_v1 grpc closed 2025-04-10T11:19:44.027303Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_8652399531054395509_v1 is DEAD 2025-04-10T11:19:44.029317Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7491642595434980656:2543] disconnected; active server actors: 1 2025-04-10T11:19:44.029347Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7491642595434980656:2543] client user disconnected session shared/user_1_1_8652399531054395509_v1 2025-04-10T11:19:45.452402Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:45.452430Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:45.452456Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:19:45.452943Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:19:45.453549Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:19:45.453798Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:45.454000Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:19:45.454769Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T11:19:45.455264Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T11:19:45.462474Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-10T11:19:45.462581Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T11:19:45.462640Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:19:45.462683Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-10T11:19:45.462862Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-10T11:19:45.462924Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> KqpIndexes::UpdateDeletePlan-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> DSProxyStrategyTest::Restore_mirror3dc [GOOD] Test command err: diskMask# 496 nonWorkingDomain# 0 93024 diskMask# 496 nonWorkingDomain# 1 23040 diskMask# 497 nonWorkingDomain# 0 39096 diskMask# 497 nonWorkingDomain# 1 69264 diskMask# 498 nonWorkingDomain# 0 66300 diskMask# 498 nonWorkingDomain# 1 8640 diskMask# 499 nonWorkingDomain# 0 22680 diskMask# 499 nonWorkingDomain# 1 60432 diskMask# 500 nonWorkingDomain# 0 66300 diskMask# 500 nonWorkingDomain# 1 23040 diskMask# 501 nonWorkingDomain# 0 22680 diskMask# 501 nonWorkingDomain# 1 69264 diskMask# 502 nonWorkingDomain# 0 29970 diskMask# 502 nonWorkingDomain# 1 8640 diskMask# 503 nonWorkingDomain# 0 9972 diskMask# 503 nonWorkingDomain# 1 60432 diskMask# 504 nonWorkingDomain# 0 42456 diskMask# 504 nonWorkingDomain# 1 84264 diskMask# 505 nonWorkingDomain# 0 24480 diskMask# 505 nonWorkingDomain# 1 75432 diskMask# 506 nonWorkingDomain# 0 24480 diskMask# 506 nonWorkingDomain# 1 75432 diskMask# 507 nonWorkingDomain# 0 10212 diskMask# 507 nonWorkingDomain# 1 45816 diskMask# 508 nonWorkingDomain# 0 24480 diskMask# 508 nonWorkingDomain# 1 84264 diskMask# 509 nonWorkingDomain# 0 10212 diskMask# 509 nonWorkingDomain# 1 75432 diskMask# 510 nonWorkingDomain# 0 10212 diskMask# 510 nonWorkingDomain# 1 75432 diskMask# 511 nonWorkingDomain# 0 3942 diskMask# 511 nonWorkingDomain# 1 45816 diskMask# 1 nonWorkingDomain# 0 368640 diskMask# 1 nonWorkingDomain# 1 336960 diskMask# 2 nonWorkingDomain# 0 368640 |90.5%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpMultishardIndex::SecondaryIndexSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 65370, MsgBus: 12696 2025-04-10T11:19:31.387343Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642613187634237:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:31.387657Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001dfd/r3tmp/tmpTVC97W/pdisk_1.dat 2025-04-10T11:19:31.983596Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:32.020306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:32.020391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:32.022130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65370, node 1 2025-04-10T11:19:32.278670Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:32.278688Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:32.278694Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:32.278787Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12696 TClient is connected to server localhost:12696 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:33.145875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:33.182955Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:33.204417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:33.421339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:33.606361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:33.695557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:35.612624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642630367505051:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:35.612781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:35.996810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:36.082856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:36.137567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:36.185654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:36.253529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:36.332848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:36.386334Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642613187634237:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:36.386446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:36.391891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642634662472866:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:36.392002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:36.392289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642634662472871:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:36.396979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:36.411454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642634662472873:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:36.469438Z node 1 :TX_PROXY ERROR: Actor# [1:7491642634662472930:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:37.733175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:38.824744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:1, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 7820, MsgBus: 24407 2025-04-10T11:19:40.262769Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642651079848942:2087];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:40.262831Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001dfd/r3tmp/tmppYysoA/pdisk_1.dat 2025-04-10T11:19:40.535408Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:40.565495Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:40.565764Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:40.567864Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7820, node 2 2025-04-10T11:19:40.706335Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:40.706360Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:40.706369Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:40.706537Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24407 TClient is connected to server localhost:24407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:19:41.420130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:19:41.439293Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:41.449695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:41.523179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:41.719840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:41.799043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:44.216685Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642668259719859:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.216759Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.268575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.301650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.334842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.406662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.445417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.529561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.621005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642668259720384:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.621087Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.621122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642668259720389:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.624679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:44.636647Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642668259720391:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:44.692766Z node 2 :TX_PROXY ERROR: Actor# [2:7491642668259720445:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:45.266571Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642651079848942:2087];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:45.280349Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:45.642717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpdateDeletePlan-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13063, MsgBus: 63711 2025-04-10T11:19:34.431445Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642624377925854:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:34.432096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001dfa/r3tmp/tmpT9AMFA/pdisk_1.dat 2025-04-10T11:19:34.978401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:34.978573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:34.983482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:35.016717Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13063, node 1 2025-04-10T11:19:35.197940Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:35.197961Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:35.197977Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:35.198109Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63711 TClient is connected to server localhost:63711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:35.877526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:35.906963Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:35.925292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:36.162827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:36.356908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:36.461689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:38.477747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642641557796816:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:38.477866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:38.810695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:38.858039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:38.907184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:38.949249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:39.036142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:39.127474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:39.226507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642645852764636:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:39.226589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:39.226950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642645852764641:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:39.231179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:39.253791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642645852764643:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:39.329448Z node 1 :TX_PROXY ERROR: Actor# [1:7491642645852764698:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:39.432897Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642624377925854:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:39.432965Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:40.620768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10194, MsgBus: 17007 2025-04-10T11:19:42.390486Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642660642651081:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001dfa/r3tmp/tmpkKjQa8/pdisk_1.dat 2025-04-10T11:19:42.440475Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:19:42.573000Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:42.599251Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:42.599353Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:42.600679Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10194, node 2 2025-04-10T11:19:42.686834Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:42.686856Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:42.686867Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:42.686988Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17007 TClient is connected to server localhost:17007 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:43.134904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:43.149710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:43.224575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:43.413973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:43.496827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:46.016764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642677822521878:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:46.016874Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:46.080472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:46.135939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:46.195663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:46.242013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:46.283485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:46.373237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:46.447740Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642677822522393:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:46.447843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:46.448187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642677822522398:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:46.452140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:46.467769Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642677822522400:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:19:46.529472Z node 2 :TX_PROXY ERROR: Actor# [2:7491642677822522455:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:47.365276Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642660642651081:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:47.365370Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:47.644144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 >> KqpIndexes::SecondaryIndexUpsert2Update [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 8429, MsgBus: 10990 2025-04-10T11:19:29.387719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642605440219067:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:29.388019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e0c/r3tmp/tmpSLLlSA/pdisk_1.dat 2025-04-10T11:19:29.954523Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:29.956084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:29.956181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:29.980310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8429, node 1 2025-04-10T11:19:30.207338Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:30.207360Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:30.207366Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:30.207479Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10990 TClient is connected to server localhost:10990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:31.244457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.305032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.543202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.721168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.795692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:33.358223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642622620089865:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:33.358445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.171851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.249773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.323116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.365760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.386459Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642605440219067:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:34.387818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:34.439807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.517923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.614307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642626915057691:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.614454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.622291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642626915057696:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.628993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:34.655355Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:19:34.655825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642626915057698:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:34.716482Z node 1 :TX_PROXY ERROR: Actor# [1:7491642626915057754:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:35.780903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:37.793366Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfpth9wb1hjqmykxp52reef, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjBjMjhmOGMtNzJjNTEwZi1lNDEyOWI2MC00MWQ3MzE5Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:19:37.806270Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjBjMjhmOGMtNzJjNTEwZi1lNDEyOWI2MC00MWQ3MzE5Mw==, ActorId: [1:7491642635504993449:2559], ActorState: ExecuteState, TraceId: 01jrfpth9wb1hjqmykxp52reef, Create QueryResponse for error on request, msg: 2025-04-10T11:19:38.544448Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfptj2qeafaw76fvn3hs8f1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjBjMjhmOGMtNzJjNTEwZi1lNDEyOWI2MC00MWQ3MzE5Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:19:38.544596Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjBjMjhmOGMtNzJjNTEwZi1lNDEyOWI2MC00MWQ3MzE5Mw==, ActorId: [1:7491642635504993449:2559], ActorState: ExecuteState, TraceId: 01jrfptj2qeafaw76fvn3hs8f1, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 24445, MsgBus: 27244 2025-04-10T11:19:39.618139Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642646504640213:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:39.646489Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e0c/r3tmp/tmpIEzbdf/pdisk_1.dat 2025-04-10T11:19:39.787969Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:39.791584Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:39.791666Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:39.793490Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24445, node 2 2025-04-10T11:19:39.938651Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:39.938682Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:39.938691Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:39.938766Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27244 TClient is connected to server localhost:27244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:40.486513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:40.495264Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:19:40.510684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:40.628325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:40.828530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:40.917598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:43.424960Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642663684510997:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:43.425085Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:43.487300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:43.560517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:43.634984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:43.672570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:43.699740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:43.732484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:43.782939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642663684511513:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:43.783018Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:43.783230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642663684511518:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:43.786526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:43.796496Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642663684511520:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:19:43.893825Z node 2 :TX_PROXY ERROR: Actor# [2:7491642663684511578:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:44.572455Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642646504640213:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:44.572541Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:44.772470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:46.922649Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfptt3p7w97z5811z4kkhsr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzhhMzMxNTYtZGRhYjhjNDEtM2QyMjJlZjEtMjIyYmYyYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:19:46.922927Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzhhMzMxNTYtZGRhYjhjNDEtM2QyMjJlZjEtMjIyYmYyYmU=, ActorId: [2:7491642672274447264:2555], ActorState: ExecuteState, TraceId: 01jrfptt3p7w97z5811z4kkhsr, Create QueryResponse for error on request, msg: 2025-04-10T11:19:47.847973Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfpttzr9drf3k10g6n7pa9k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzhhMzMxNTYtZGRhYjhjNDEtM2QyMjJlZjEtMjIyYmYyYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:19:47.848234Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzhhMzMxNTYtZGRhYjhjNDEtM2QyMjJlZjEtMjIyYmYyYmU=, ActorId: [2:7491642672274447264:2555], ActorState: ExecuteState, TraceId: 01jrfpttzr9drf3k10g6n7pa9k, Create QueryResponse for error on request, msg: >> YdbTableSplit::RenameTablesAndSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::SecondaryIndexSelect [GOOD] Test command err: Trying to start YDB, gRPC: 1535, MsgBus: 13130 2025-04-10T11:19:29.375979Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642605562624427:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:29.376118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e05/r3tmp/tmp5bdMU7/pdisk_1.dat 2025-04-10T11:19:30.115727Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:30.187082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:30.187214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:30.188950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1535, node 1 2025-04-10T11:19:30.300642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:30.300663Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:30.300677Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:30.300807Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13130 TClient is connected to server localhost:13130 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:31.453669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.486327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.742326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.997057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:32.148322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:33.740022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642622742495403:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:33.740135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.171696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.216302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.252220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.285813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.344454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.376290Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642605562624427:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:34.376345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:34.436480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.538265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642627037463222:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.538326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.538592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642627037463227:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.541704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:34.552448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642627037463229:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:34.642534Z node 1 :TX_PROXY ERROR: Actor# [1:7491642627037463285:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:35.789087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 63191, MsgBus: 24328 2025-04-10T11:19:39.894631Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642646557298097:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:39.894688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e05/r3tmp/tmpLBslky/pdisk_1.dat 2025-04-10T11:19:40.060098Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:40.077093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:40.077184Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:40.078594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63191, node 2 2025-04-10T11:19:40.245683Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:40.245706Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:40.245716Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:40.245835Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24328 TClient is connected to server localhost:24328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:41.003921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:41.011506Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:19:41.028441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:41.115104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:41.334412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:41.413891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:43.800995Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642663737169031:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:43.801090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:43.861097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:43.936090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:43.972313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.006748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.040032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.114327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.169196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642668032136848:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.169283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.169541Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642668032136853:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.173060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:44.182442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642668032136855:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:19:44.243256Z node 2 :TX_PROXY ERROR: Actor# [2:7491642668032136907:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:44.898252Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642646557298097:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:44.898333Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:45.177244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10620, MsgBus: 15052 2025-04-10T11:19:29.381775Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642602721908161:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:29.386803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e13/r3tmp/tmpqVOUL4/pdisk_1.dat 2025-04-10T11:19:29.957501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:29.957641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:29.980952Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:29.982975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10620, node 1 2025-04-10T11:19:30.220158Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:30.220194Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:30.220203Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:30.220398Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15052 TClient is connected to server localhost:15052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:31.242346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.278880Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:31.297411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.540124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.782489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.911357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:33.484330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642619901778904:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:33.484443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.167200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.209985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.286958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.370837Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642602721908161:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:34.370904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:34.373319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.416773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.486886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.546098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642624196746727:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.546262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.546494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642624196746732:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.551040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:34.562135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642624196746734:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:34.647415Z node 1 :TX_PROXY ERROR: Actor# [1:7491642624196746788:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:35.825439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4752, MsgBus: 28194 2025-04-10T11:19:37.703533Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642636857663140:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:37.703618Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e13/r3tmp/tmpZCRAM2/pdisk_1.dat 2025-04-10T11:19:37.816391Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:37.851000Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:37.851081Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:37.859278Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4752, node 2 2025-04-10T11:19:37.914719Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:37.914741Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:37.914748Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:37.914866Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28194 TClient is connected to server localhost:28194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:38.383471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:38.392528Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:19:38.404994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:38.510880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-1 ... self is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:41.398749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:41.477335Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642654037534616:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:41.477455Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:41.477729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642654037534621:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:41.480891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:41.491004Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642654037534623:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:19:41.575239Z node 2 :TX_PROXY ERROR: Actor# [2:7491642654037534679:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:42.665849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:19:42.706873Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642636857663140:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:42.709234Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:42.728119Z node 2 :TX_PROXY ERROR: Actor# [2:7491642658332502331:3693] txid# 281474976715672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path is not a directory (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:42.749542Z node 2 :TX_PROXY ERROR: Actor# [2:7491642658332502348:3701] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path is not a directory (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:42.763599Z node 2 :TX_PROXY ERROR: Actor# [2:7491642658332502374:3725] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path is not a directory (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 6785, MsgBus: 27229 2025-04-10T11:19:43.617645Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491642662251764094:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:43.617689Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e13/r3tmp/tmphgBTps/pdisk_1.dat 2025-04-10T11:19:43.724505Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:43.745028Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:43.745112Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:43.746490Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6785, node 3 2025-04-10T11:19:43.786622Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:43.786644Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:43.786652Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:43.786761Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27229 TClient is connected to server localhost:27229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:44.221049Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:44.238017Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:44.298931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:44.469470Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:44.575988Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:47.124051Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642679431635047:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:47.124144Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:47.178245Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.236153Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.281101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.322303Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.359594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.408805Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.520892Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642679431635560:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:47.520994Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:47.521436Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642679431635565:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:47.524922Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:47.542127Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491642679431635567:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:19:47.640977Z node 3 :TX_PROXY ERROR: Actor# [3:7491642679431635623:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:48.618278Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491642662251764094:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:48.618328Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:48.740925Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:19:50.364776Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] Test command err: 2025-04-10T11:18:19.754209Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642302047719425:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:19.754296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d8c/r3tmp/tmpwpjeNP/pdisk_1.dat 2025-04-10T11:18:20.088351Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:20.141087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:20.141166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6875, node 1 2025-04-10T11:18:20.152860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:20.258221Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:20.258245Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:20.258253Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:20.258359Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:20.689287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:22.160875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642314932622350:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.161463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.528283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:18:22.531053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:18:22.531097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:18:22.534400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2025-04-10T11:18:22.611375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744283902655, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:18:22.658029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-10T11:18:22.670593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642314932622588:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.670684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:22.697343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:18:22.697908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:18:22.697940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:18:22.700285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo 2025-04-10T11:18:22.712282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744283902760, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:18:22.719167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 Fast forward 1m 2025-04-10T11:18:24.754399Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642302047719425:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:24.754491Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; partitions 2 Fast forward 1m partitions 2 Fast forward 1m partitions 2 Fast forward 1m 2025-04-10T11:18:32.686347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:32.688946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:18:32.752111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-10T11:18:32.760868Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T11:18:32.760912Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found partitions 1 2025-04-10T11:18:34.817740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMoveTable Propose, from: /Root/Foo, to: /Root/Bar, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:34.818079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:18:34.821846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE RENAME, dst path: /Root/Foo, dst path: /Root/Bar 2025-04-10T11:18:34.837201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744284394881, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:18:34.843013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976710660, done: 0, blocked: 1 2025-04-10T11:18:34.848629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2025-04-10T11:18:34.852588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:18:35.087014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:18:35.087039Z node 1 :IMPORT WARN: Table profiles were not loaded Fast forward 1m 2025-04-10T11:18:37.818062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T11:18:37.818222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:3 data size 0 row count 0 2025-04-10T11:18:37.818301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037890 maps to shardIdx: 72057594046644480:3 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=Bar, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:18:37.818528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 partitions 1 Fast forward 1m 2025-04-10T11:18:42.717936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6841088 rowCount 50000 cpuUsage 0.405 2025-04-10T11:18:42.817726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T11:18:42.817857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:3 data size 6841088 row count 50000 2025-04-10T11:18:42.817926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037890 maps to shardIdx: 72057594046644480:3 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=Bar, is column=0, is olap=0, RowCount 50000, DataSize 6841088 2025-04-10T11:18:42.818071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Requesting full stats from datashard 72075186224037890 2025-04-10T11:18:42.818346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-10T11:18:42.820910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got partition histogram at tablet 72057594046644480 from datashard 72075186224037890 state: 'Ready' data size: 6841088 row count: 50000 2025-04-10T11:18:42.820977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPartitionHistogram::Execute partition histogram at tablet 72057594046644480 from datashard 72075186224037890 for pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6841088 rowCount 50000 dataSizeHistogram buckets 0 2025-04-10T11:18:42.821046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Failed to find proper split key (initially) for 'Split by size' of datashard 72075186224037890 partitions 1 Fast forward 1m partitions 1 Fast forward 1m 2025-04-10T11:18:47.718591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6841088 rowCount 50000 cpuUsage 0.1014 2025-04-10T11:18:47.822591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T11:18:47.822700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSi ... 75186224037891 parts [ [72075186224037890:1:119:1:12288:11448:0] ] return ack processed 2025-04-10T11:19:50.808110Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:19:50.808160Z node 1 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-04-10T11:19:50.808948Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:19:50.809020Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 parts [ [72075186224037890:1:119:1:12288:11448:0] ] return ack processed 2025-04-10T11:19:50.809045Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:19:50.809114Z node 1 :TX_DATASHARD INFO: 72075186224037892 Initiating switch from PreOffline to Offline state 2025-04-10T11:19:50.810828Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7491642692889748703:2741], serverId# [1:7491642692889748712:4761], sessionId# [0:0:0] 2025-04-10T11:19:50.810868Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7491642692889748701:2739], serverId# [1:7491642692889748711:4760], sessionId# [0:0:0] 2025-04-10T11:19:50.811725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491642357882295956 RawX2: 4503603922340181 } TabletId: 72075186224037890 State: 4 2025-04-10T11:19:50.811797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:19:50.812004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491642357882295956 RawX2: 4503603922340181 } TabletId: 72075186224037890 State: 4 2025-04-10T11:19:50.812033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:19:50.814101Z node 1 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:19:50.814180Z node 1 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:19:50.814230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2025-04-10T11:19:50.814266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2025-04-10T11:19:50.814291Z node 1 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:19:50.814318Z node 1 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:19:50.814435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:19:50.814504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:19:50.814761Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-10T11:19:50.814781Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-10T11:19:50.816588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491642680004846328 RawX2: 4503603922340497 } TabletId: 72075186224037891 State: 4 2025-04-10T11:19:50.816652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:19:50.816852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491642680004846328 RawX2: 4503603922340497 } TabletId: 72075186224037891 State: 4 2025-04-10T11:19:50.816890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:19:50.816979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491642680004846330 RawX2: 4503603922340498 } TabletId: 72075186224037892 State: 4 2025-04-10T11:19:50.817006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:19:50.817079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491642680004846330 RawX2: 4503603922340498 } TabletId: 72075186224037892 State: 4 2025-04-10T11:19:50.817106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:19:50.820277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:19:50.820368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:19:50.820422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:19:50.820460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:19:50.820596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-10T11:19:50.820844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-10T11:19:50.821028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-10T11:19:50.821381Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-10T11:19:50.821421Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-10T11:19:50.821443Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-10T11:19:50.821457Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-10T11:19:50.821538Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-10T11:19:50.822476Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-10T11:19:50.822604Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-10T11:19:50.823313Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-10T11:19:50.827656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-10T11:19:50.827702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-10T11:19:50.827771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-10T11:19:50.828624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-10T11:19:50.828925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-10T11:19:50.829195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-10T11:19:50.829365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-10T11:19:50.829568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-10T11:19:50.829693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-10T11:19:50.829841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:19:50.829875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-10T11:19:50.829953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-10T11:19:50.831140Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-10T11:19:50.831185Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [1:7491642692889748618:4681], serverId# [1:7491642692889748619:4682], sessionId# [0:0:0] 2025-04-10T11:19:50.831217Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-04-10T11:19:50.832789Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-10T11:19:50.832885Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-10T11:19:50.833089Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-10T11:19:50.833114Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-10T11:19:50.834829Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-10T11:19:50.834905Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-10T11:19:50.835583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-10T11:19:50.835602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-10T11:19:50.835669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-10T11:19:50.836101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-10T11:19:50.836126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-10T11:19:50.836161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-10T11:19:50.836198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] >> KqpMultishardIndex::DataColumnWrite+UseSink [GOOD] >> KqpMultishardIndex::DataColumnWrite-UseSink >> DemoTx::Scenario_5 [GOOD] >> KqpIndexes::DeleteByIndex [GOOD] |90.5%| [TA] $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [TA] {RESULT} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::StreamReadCommitAndStatusMsgs [GOOD] >> TPersQueueTest::StreamReadManyUpdateTokenAndRead >> KqpIndexes::SecondaryIndexSelectUsingScripting [GOOD] >> KqpIndexes::SecondaryIndexReplace-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 18521, MsgBus: 12751 2025-04-10T11:19:29.367190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642602168332467:2088];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:29.368323Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e02/r3tmp/tmpHyTacn/pdisk_1.dat 2025-04-10T11:19:30.016186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:30.016297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:30.022001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:30.026669Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18521, node 1 2025-04-10T11:19:30.204619Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:30.204653Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:30.205668Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:30.205874Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12751 TClient is connected to server localhost:12751 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:31.461958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.486104Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:31.502359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.716230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.927342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:32.021063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:33.724109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642619348203413:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:33.724222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.169458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.210703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.266268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.301491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.343688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.370310Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642602168332467:2088];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:34.370418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:34.414567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.480907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642623643171223:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.480980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.481215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642623643171228:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.484576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:34.494440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642623643171230:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:34.584467Z node 1 :TX_PROXY ERROR: Actor# [1:7491642623643171285:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:35.781507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 21651, MsgBus: 31533 2025-04-10T11:19:40.693769Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642649559754765:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:40.693802Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e02/r3tmp/tmpDPK3C3/pdisk_1.dat 2025-04-10T11:19:40.876245Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21651, node 2 2025-04-10T11:19:40.975765Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:40.975849Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:40.977139Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:40.994930Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:40.994954Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:40.994966Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:40.995108Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31533 TClient is connected to server localhost:31533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:41.539222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:41.548393Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:41.561946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:41.667568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:41.872609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:42.006943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:44.278477Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642666739625694:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.278583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.320707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.355006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.403937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.441677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.492018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.563155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.615632Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642666739626210:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.615748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.616090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642666739626215:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.619847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:44.631602Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-10T11:19:44.638845Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642666739626217:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:19:44.704222Z node 2 :TX_PROXY ERROR: Actor# [2:7491642666739626270:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:45.600233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:45.807493Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642649559754765:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:45.810858Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:52.589194Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfptz8q3ep2zq1fm9tj34zv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2U3ZmQzMy00ZDZmMWZkOC1mZmNmMTY4Mi0zMmVhNjFkMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-10T11:19:52.612383Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2U3ZmQzMy00ZDZmMWZkOC1mZmNmMTY4Mi0zMmVhNjFkMA==, ActorId: [2:7491642671034594649:2555], ActorState: ExecuteState, TraceId: 01jrfptz8q3ep2zq1fm9tj34zv, Create QueryResponse for error on request, msg: |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DeleteByIndex [GOOD] Test command err: Trying to start YDB, gRPC: 26352, MsgBus: 30232 2025-04-10T11:19:29.381289Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642604408753298:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:29.381854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e03/r3tmp/tmpUU2vrJ/pdisk_1.dat 2025-04-10T11:19:30.040372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:30.040484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:30.046847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26352, node 1 2025-04-10T11:19:30.079515Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:19:30.079549Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:19:30.084840Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:30.278693Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:30.278718Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:30.278734Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:30.278855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30232 TClient is connected to server localhost:30232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:31.246990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.294110Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:31.314074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.538346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.839805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.999827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:33.634093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642621588624115:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:33.634332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.168502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.215128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.257643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.318831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.354108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.358940Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642604408753298:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:34.359006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:34.437244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.530903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642625883591938:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.530991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.531424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642625883591943:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.535763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:34.554355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642625883591945:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:34.648298Z node 1 :TX_PROXY ERROR: Actor# [1:7491642625883592002:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:35.884139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:19:36.051657Z node 1 :TX_PROXY ERROR: Actor# [1:7491642634473527176:3825] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 18837, MsgBus: 4011 2025-04-10T11:19:38.163068Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642642341784729:2175];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e03/r3tmp/tmpo0NCIQ/pdisk_1.dat 2025-04-10T11:19:38.218429Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:19:38.340878Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:38.359032Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:38.359130Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:38.362551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18837, node 2 2025-04-10T11:19:38.462980Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:38.463004Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:38.463013Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:38.463152Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4011 TClient is connected to server localhost:4011 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:39.104636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:39.140825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ... nsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:42.159247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:42.240028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:42.321329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642659521656079:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:42.321470Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:42.321713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642659521656085:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:42.325626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:42.346412Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:19:42.346676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642659521656087:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:42.436421Z node 2 :TX_PROXY ERROR: Actor# [2:7491642659521656142:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:43.139381Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642642341784729:2175];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:43.139445Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:43.436337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:19:43.535308Z node 2 :TX_PROXY ERROR: Actor# [2:7491642663816624004:3810] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:44.422280Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:19:44.450323Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 18541, MsgBus: 24233 2025-04-10T11:19:45.301964Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491642671105730811:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:45.302064Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e03/r3tmp/tmpqg7MV4/pdisk_1.dat 2025-04-10T11:19:45.432624Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:45.459013Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:45.459115Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:45.463905Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18541, node 3 2025-04-10T11:19:45.510902Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:45.510932Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:45.510940Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:45.511082Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24233 TClient is connected to server localhost:24233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:46.019736Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:46.028466Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:19:46.044823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:46.148279Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:46.382322Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:46.474183Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:49.222346Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642688285601774:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.222462Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.260311Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:49.306694Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:49.357153Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:49.427850Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:49.498103Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:49.581017Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:49.684211Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642688285602301:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.684392Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.684765Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642688285602306:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.688671Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:49.703564Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491642688285602308:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:19:49.779716Z node 3 :TX_PROXY ERROR: Actor# [3:7491642688285602363:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:50.302164Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491642671105730811:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:50.302272Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:51.016637Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 [GOOD] |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 >> TStorageTenantTest::Empty [GOOD] |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 >> TFstClassSrcIdPQTest::TestTableCreated |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex+UseSink [GOOD] |90.5%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::TopicServiceSimpleHappyWrites [GOOD] >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 >> TPersQueueTest::NoDecompressionMemoryLeaks [GOOD] >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive >> TopicService::UseDoubleSlashInTopicPath [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17590, MsgBus: 18726 2025-04-10T11:19:29.363956Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642603516173398:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:29.364082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001dfe/r3tmp/tmpxScLzU/pdisk_1.dat 2025-04-10T11:19:29.988085Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:29.996993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:29.997126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:29.999696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17590, node 1 2025-04-10T11:19:30.216096Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:30.216116Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:30.216122Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:30.216232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18726 TClient is connected to server localhost:18726 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:31.244005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.278740Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:31.300742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.589587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.901699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.989872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:33.467671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642620696044197:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:33.467768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.167445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.212965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.256362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.296896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.343543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.362372Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642603516173398:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:34.362437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:34.426265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.504762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642624991012015:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.504835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.505053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642624991012020:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.509402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:34.527981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642624991012022:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:34.615370Z node 1 :TX_PROXY ERROR: Actor# [1:7491642624991012077:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:35.874233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 820 cpu_time_us: 820 } query_phases { duration_us: 2912 cpu_time_us: 2912 } query_phases { duration_us: 12274 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 11212 affected_shards: 1 } query_phases { duration_us: 1849 cpu_time_us: 1849 } query_phases { duration_us: 6367 cpu_time_us: 6367 } query_phases { duration_us: 5837 table_access { name: "/Root/TestTable/Index/indexImplTable" } cpu_time_us: 5162 } query_phases { duration_us: 11202 cpu_time_us: 11202 } query_phases { duration_us: 15398 cpu_time_us: 15398 } query_phases { duration_us: 15936 cpu_time_us: 6420 } query_phases { duration_us: 10293 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 6577 affected_shards: 2 } compilation { duration_us: 1089748 cpu_time_us: 1065655 } process_cpu_time_us: 32575 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":46,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":45,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_7_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1744283977140,\"TaskId\":1,\"Host\":\"ghrun-x5vqckr7qy\",\"ComputeTimeUs\":84}],\"CpuTimeUs\":819}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\"}],\"BaseTimeMs\":1744283977139,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":819,\"Max\":819,\"Min\":819}},\"CTE Name\":\"precompute_7_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":44,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":43,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1744283977139,\"FinishedTasks\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_8_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":42,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":41,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1744283977139,\"TaskId\":2,\"Host\":\"ghrun-x5vqckr7qy\",\"ComputeTimeUs\":140}],\"CpuTimeUs\":595}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\"}],\"BaseTimeMs\":1744283977139,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":595,\"Max\":595,\"Min\":595}},\"CTE Name\":\"precompute_8_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":39,\"Subplan Name\":\"CTE precompute_8_0\",\"Plans\":[{\"PlanNodeId\":38,\"Plans\":[{\"PlanNodeId\":37,\"Plans\":[{\"Pl ... \"Key\" (Member $147 \'\"Key\")) \'(\'\"Value\" (Member $147 \'\"Value\")) \'(\'\"fk1\" (Member $147 \'\"fk1\")) \'(\'\"fk3\" (Member $147 \'\"fk3\"))))) \'0 $138))\n (let $140 (OrderedFilter $137 (lambda \'($148) (And (Exists (Member $148 \'\"fk1\")) (Exists (Member $148 \'\"fk2\")) (Exists (Member $148 \'\"fk3\"))))))\n (let $141 (lambda \'($150) (Void)))\n (let $142 (ToDict $140 (lambda \'($149) (AsStruct \'(\'\"fk1\" (Member $149 \'\"fk1\")) \'(\'\"fk2\" (Member $149 \'\"fk2\")) \'(\'\"fk3\" (Member $149 \'\"fk3\")))) $141 $35))\n (let $143 (Variant (DictKeys $142) \'1 $138))\n (let $144 (Variant (== (Length $140) (Length $142)) \'2 $138))\n (let $145 (ToDict $137 (lambda \'($151) (AsStruct \'(\'\"Key\" (Member $151 \'\"Key\")))) $141 $35))\n (let $146 (Variant $145 \'\"3\" $138))\n (return (Iterator (AsList $139 $143 $144 $146)))\n))) \'(\'(\'\"_logical_id\" \'4308) \'(\'\"_id\" \'\"5d9a6181-cbc41206-57e81dce-c9243a68\"))))\n(let $59 (DqCnValue (TDqOutput $58 \'\"3\")))\n(let $60 (DqCnValue (TDqOutput $58 \'0)))\n(let $61 (DqCnValue (TDqOutput $58 \'2)))\n(let $62 (DqCnValue (TDqOutput $58 \'1)))\n(let $63 \'($59 $60 $61 $62))\n(let $64 (KqpTxResultBinding $54 \'\"3\" \'0))\n(let $65 (KqpPhysicalTx \'($58) $63 \'(\'($53 $64)) $3))\n(let $66 \'\"%kqp%tx_result_binding_4_3\")\n(let $67 (DqPhyStage \'() (lambda \'() (Iterator %kqp%tx_result_binding_4_3)) \'(\'(\'\"_logical_id\" \'4703) \'(\'\"_id\" \'\"2f9e9675-87f97895-7fd76dbd-12ad3f7e\"))))\n(let $68 (KqpTable \'\"/Root/TestTable/Index/indexImplTable\" \'\"72057594046644480:18\" \'\"\" \'1))\n(let $69 (KqpCnStreamLookup (TDqOutput $67 \'0) $68 \'(\'\"Key\") $55 $24))\n(let $70 \'\"%kqp%tx_result_binding_4_0\")\n(let $71 (Bool \'false))\n(let $72 (DqPhyStage \'($69) (lambda \'($152) (Map (Filter (Take $152 (Uint64 \'1)) (lambda \'($153) (Not (Contains %kqp%tx_result_binding_4_0 $153)))) (lambda \'($154) $71))) \'(\'(\'\"_logical_id\" \'4691) \'(\'\"_id\" \'\"3c83914b-d280002d-278690b1-4956b989\"))))\n(let $73 (DqCnUnionAll (TDqOutput $72 \'0)))\n(let $74 (Bool \'true))\n(let $75 (DqPhyStage \'($73) (lambda \'($155) (block \'(\n (let $156 (lambda \'($157 $158) $71))\n (return (FromFlow (Condense (ToFlow $155) $74 $156 $156)))\n))) \'(\'(\'\"_logical_id\" \'4671) \'(\'\"_id\" \'\"7d2b2653-fd8b1404-eabc2dd-307afdb4\"))))\n(let $76 \'($67 $72 $75))\n(let $77 (DqCnValue (TDqOutput $75 \'0)))\n(let $78 (KqpTxResultBinding $57 \'\"4\" \'0))\n(let $79 (KqpTxResultBinding $55 \'\"4\" \'\"3\"))\n(let $80 (KqpPhysicalTx $76 \'($77) \'(\'($70 $78) \'($66 $79)) $41))\n(let $81 \'\"%kqp%tx_result_binding_4_2\")\n(let $82 \'\"%kqp%tx_result_binding_5_0\")\n(let $83 \'\"%kqp%tx_result_binding_4_1\")\n(let $84 (DqPhyStage \'() (lambda \'() (block \'(\n (let $159 (KqpEnsure $74 %kqp%tx_result_binding_4_2 \'\"2012\" (Utf8 \'\"Duplicated keys found.\")))\n (let $160 (KqpEnsure $74 %kqp%tx_result_binding_5_0 \'\"2012\" (Utf8 \'\"Conflict with existing key.\")))\n (let $161 (If (And $159 $160) %kqp%tx_result_binding_4_1 (List $11)))\n (return (ToStream (Just (PartitionByKey $161 (lambda \'($162) (Member $162 \'\"Key\")) (Void) (Void) (lambda \'($163) (FlatMap $163 (lambda \'($164) (Last (ForwardList (Nth $164 \'1))))))))))\n))) \'(\'(\'\"_logical_id\" \'5033) \'(\'\"_id\" \'\"fef3b174-bae8d183-7e917be7-f6140f08\"))))\n(let $85 (DqCnValue (TDqOutput $84 \'0)))\n(let $86 (KqpTxResultBinding $11 \'\"4\" \'1))\n(let $87 (KqpTxResultBinding $56 \'\"4\" \'2))\n(let $88 (KqpTxResultBinding $56 \'\"5\" \'0))\n(let $89 \'(\'($83 $86) \'($81 $87) \'($82 $88)))\n(let $90 (KqpPhysicalTx \'($84) \'($85) $89 $3))\n(let $91 \'\"%kqp%tx_result_binding_6_0\")\n(let $92 %kqp%tx_result_binding_6_0)\n(let $93 (DqPhyStage \'() (lambda \'() (Iterator (AsList (ToDict (FlatMap (Map $92 (lambda \'($165) (AsStruct \'(\'\"Key\" (Member $165 \'\"Key\")) \'(\'\"fk1\" (Member $165 \'\"fk1\")) \'(\'\"fk3\" (Member $165 \'\"fk3\"))))) (lambda \'($166) (block \'(\n (let $167 (AsStruct \'(\'\"Key\" (Member $166 \'\"Key\"))))\n (return (IfPresent (Lookup $46 $167) (lambda \'($168) (Just \'($167 $168 (Or (AggrNotEquals (Member $166 \'\"fk1\") (Member $168 \'\"fk1\")) (AggrNotEquals (Member $166 \'\"fk3\") (Member $168 \'\"fk3\")))))) (Nothing (OptionalType (TupleType $19 $44 $56)))))\n)))) (lambda \'($169) (Nth $169 \'0)) (lambda \'($170) \'((Nth $170 \'1) (Nth $170 \'2))) $35)))) \'(\'(\'\"_logical_id\" \'5184) \'(\'\"_id\" \'\"6f1b423a-3825ed53-7db9f9cc-e5ed46ca\"))))\n(let $94 (DqCnValue (TDqOutput $93 \'0)))\n(let $95 (KqpTxResultBinding $11 \'\"6\" \'0))\n(let $96 \'($91 $95))\n(let $97 (KqpPhysicalTx \'($93) \'($94) \'($51 $96) $3))\n(let $98 (DataSink \'\"KqpTableSink\" \'\"db\"))\n(let $99 (KqpTableSinkSettings $22 \'false \'\"upsert\" \'0 \'\"oltp\" \'false \'false \'()))\n(let $100 (DqPhyStage \'() (lambda \'() (Iterator $92)) \'(\'(\'\"_logical_id\" \'5732) \'(\'\"_id\" \'\"546065b8-fc9595be-2239eba6-e4fab133\")) \'((DqSink \'0 $98 $99))))\n(let $101 \'\"%kqp%tx_result_binding_7_0\")\n(let $102 (DictType $19 (TupleType $44 $56)))\n(let $103 %kqp%tx_result_binding_7_0)\n(let $104 \'(\'(\'\"_logical_id\" \'5760) \'(\'\"_id\" \'\"e32604da-fbddc736-35f9ff94-55e291ea\") $30))\n(let $105 (DqPhyStage \'() (lambda \'() (block \'(\n (let $171 (lambda \'($173) (block \'(\n (let $174 (Nth $173 \'1))\n (let $175 (Nth $174 \'0))\n (return (Member (Nth $173 \'0) \'\"Key\") (Member $175 \'\"fk1\") (Member $175 \'\"fk2\") (Member $175 \'\"fk3\") (Nth $174 \'1))\n ))))\n (let $172 (lambda \'($181 $182 $183 $184 $185) $181 $182 $183 $184))\n (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow (DictItems $103)) $171) (lambda \'($176 $177 $178 $179 $180) $180)) $172)))\n))) $104))\n(let $106 (DqCnUnionAll (TDqOutput $105 \'0)))\n(let $107 (lambda \'($186) (FromFlow (NarrowMap (ToFlow $186) $34))))\n(let $108 (KqpTableSinkSettings $68 \'false \'\"delete\" \'1 \'\"oltp\" \'false \'false \'()))\n(let $109 (DqPhyStage \'($106) $107 \'(\'(\'\"_logical_id\" \'5746) \'(\'\"_id\" \'\"434320f0-6b496d61-c0e7f218-509b85bb\")) \'((DqSink \'0 $98 $108))))\n(let $110 \'(\'(\'\"_logical_id\" \'5812) \'(\'\"_id\" \'\"e4722940-289ec04f-d7cf3b89-80b7a9e7\") $30))\n(let $111 (DqPhyStage \'() (lambda \'() (FromFlow (ExpandMap (FlatMap (Map (ToFlow $92) (lambda \'($187) (AsStruct \'(\'\"Key\" (Member $187 \'\"Key\")) \'(\'\"fk1\" (Member $187 \'\"fk1\")) \'(\'\"fk3\" (Member $187 \'\"fk3\"))))) (lambda \'($188) (block \'(\n (let $189 \'(\'\"Key\" (Member $188 \'\"Key\")))\n (let $190 \'(\'\"fk1\" (Member $188 \'\"fk1\")))\n (let $191 \'(\'\"fk3\" (Member $188 \'\"fk3\")))\n (return (IfPresent (Lookup $103 (AsStruct $189)) (lambda \'($192) (If (Nth $192 \'1) (Just (AsStruct $189 $190 \'(\'\"fk2\" (Member (Nth $192 \'0) \'\"fk2\")) $191)) (Nothing (OptionalType $29)))) (Just (AsStruct $189 $190 $47 $191))))\n)))) $26))) $110))\n(let $112 (DqCnUnionAll (TDqOutput $111 \'0)))\n(let $113 (KqpTableSinkSettings $68 \'false \'\"\" \'2 \'\"oltp\" \'false \'false \'()))\n(let $114 (DqPhyStage \'($112) $107 \'(\'(\'\"_logical_id\" \'5774) \'(\'\"_id\" \'\"9d68ce9f-df8e97f4-cedd8263-8bb4da8d\")) \'((DqSink \'0 $98 $113))))\n(let $115 \'($100 $105 $109 $111 $114))\n(let $116 (KqpTxResultBinding $102 \'\"7\" \'0))\n(let $117 (KqpPhysicalTx $115 \'() \'($96 \'($101 $116)) \'($40 \'(\'\"with_effects\"))))\n(let $118 \'($4 $17 $42 $52 $65 $80 $90 $97 $117))\n(return (KqpPhysicalQuery $118 \'() \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 1347671 total_cpu_time_us: 1300717 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/TestTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":16},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Value\\\",\\\"Id\\\":5,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk1\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk2\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk3\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"Indexes\\\":[{\\\"Name\\\":\\\"Index\\\",\\\"Type\\\":2,\\\"State\\\":1,\\\"SchemaVersion\\\":1,\\\"LocalPathId\\\":17,\\\"PathOwnerId\\\":8716544,\\\"KeyColumns\\\":[\\\"fk1\\\",\\\"fk2\\\",\\\"fk3\\\"]}],\\\"SecondaryGlobalIndexMetadata\\\":[{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/TestTable/Index/indexImplTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":18},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk1\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk2\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk3\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"fk1\\\",\\\"fk2\\\",\\\"fk3\\\",\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1744283994\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"dd6127a7-e29fcca8-e58378eb-4986e8ef\",\"version\":\"1.0\"}" 2025-04-10T11:19:54.462767Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest >> BasicUsage::BrokenCredentialsProvider [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 24945, MsgBus: 64869 2025-04-10T11:19:34.865018Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642623859149982:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:34.865362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001df7/r3tmp/tmpid6Oaq/pdisk_1.dat 2025-04-10T11:19:35.350568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:35.350655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:35.352316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24945, node 1 2025-04-10T11:19:35.378945Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:35.562185Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:35.562204Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:35.562211Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:35.562317Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64869 TClient is connected to server localhost:64869 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:36.333918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:36.374862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:36.533773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:36.771951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:36.867751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:38.916182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642641039020820:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:38.916313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:39.256908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:39.296467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:39.333209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:39.366100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:39.413821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:39.457010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:39.525554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642645333988629:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:39.525697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:39.525876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642645333988634:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:39.531215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:39.547816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642645333988636:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:39.634617Z node 1 :TX_PROXY ERROR: Actor# [1:7491642645333988692:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:39.871500Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642623859149982:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:39.871651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:40.621882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 23343, MsgBus: 27815 2025-04-10T11:19:44.992924Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642666398146225:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:44.993054Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001df7/r3tmp/tmpUK3XY3/pdisk_1.dat 2025-04-10T11:19:45.238654Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:45.264762Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:45.264871Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:45.266363Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23343, node 2 2025-04-10T11:19:45.419032Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:45.419066Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:45.419075Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:45.419217Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27815 TClient is connected to server localhost:27815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:45.993016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:46.016577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:46.111473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:19:46.325906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:19:46.416549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:49.023398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642687872984453:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.023481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.080523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:49.135249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:49.177881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:49.234039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:49.271456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:49.316414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:49.432659Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642687872984970:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.432719Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.432960Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642687872984975:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.436116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:49.446736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642687872984977:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:19:49.541466Z node 2 :TX_PROXY ERROR: Actor# [2:7491642687872985032:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:50.071260Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642666398146225:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:50.071464Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:50.589902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> TopicService::RelativePath >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2025-04-10T11:19:14.138774Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1744283954138743 2025-04-10T11:19:14.638638Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642537591502814:2217];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:14.639077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:19:14.714369Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642540733685659:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:14.718682Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000bdd/r3tmp/tmpII3uq1/pdisk_1.dat 2025-04-10T11:19:14.978774Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:19:14.981996Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:19:15.301950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:15.302656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:15.303090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:15.303140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:15.310550Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:19:15.310678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:15.318464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:15.344236Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28998, node 1 2025-04-10T11:19:15.398008Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:19:15.542424Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000bdd/r3tmp/yandexYTVfVn.tmp 2025-04-10T11:19:15.542459Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000bdd/r3tmp/yandexYTVfVn.tmp 2025-04-10T11:19:15.546834Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000bdd/r3tmp/yandexYTVfVn.tmp 2025-04-10T11:19:15.547045Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:19:15.972055Z INFO: TTestServer started on Port 63419 GrpcPort 28998 TClient is connected to server localhost:63419 PQClient connected to localhost:28998 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:16.386130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T11:19:18.199544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642554771372864:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:18.199583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642554771372856:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:18.200587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:18.205824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T11:19:18.226087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642554771372871:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T11:19:18.327589Z node 1 :TX_PROXY ERROR: Actor# [1:7491642554771372957:2682] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:18.664161Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491642557913555174:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:19:18.664890Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTFhYzI3YzUtZmZhZTBiMjctYThjNmI1NDgtZGNjNzFjNGE=, ActorId: [2:7491642557913555134:2308], ActorState: ExecuteState, TraceId: 01jrfpsyya75f8ssg7zm9c3acs, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:19:18.668833Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642554771372967:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:19:18.669296Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:19:18.673035Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2YyZjVkNTAtMWZmM2YyMC1mMjdmNGZlYy04Mzk1YjMwZQ==, ActorId: [1:7491642554771372854:2336], ActorState: ExecuteState, TraceId: 01jrfpsyxmb1g1y8b48evjt8wz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:19:18.673473Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:19:18.733357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:18.893153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:19.055974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:28998", true, true, 1000); 2025-04-10T11:19:19.440177Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfpszw38atjtjn7jt1ey7b1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDYwNzMyOTktMjUxNWQ5NDgtMmUyNDUzYWEtNTEzODNiNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491642559066340672:2980] 2025-04-10T11:19:19.631751Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642537591502814:2217];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:19.631836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:19.714654Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642540733685659:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:19.714726Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T11:19:25.735614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:28998 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-10T11:19:25.843516Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:28998 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { ... ic created, have version: 1 2025-04-10T11:19:56.670846Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-04-10T11:19:56.672504Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-04-10T11:19:56.672584Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:5139 2025-04-10T11:19:56.684081Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-10T11:19:56.684970Z node 5 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-10T11:19:56.685004Z node 5 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-04-10T11:19:56.685707Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-10T11:19:56.685839Z node 5 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:39654 2025-04-10T11:19:56.685865Z node 5 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:39654 proto=v1 topic=test-topic durationSec=0 2025-04-10T11:19:56.685876Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2025-04-10T11:19:56.687971Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-10T11:19:56.688111Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-10T11:19:56.688132Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-10T11:19:56.688145Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-10T11:19:56.688169Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7491642720796273475:2508] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-10T11:19:56.691671Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7491642720796273475:2508] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-10T11:19:56.876777Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7491642720796273475:2508] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-10T11:19:56.877159Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7491642720796273530:2508] connected; active server actors: 1 2025-04-10T11:19:56.877217Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7491642720796273475:2508] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-10T11:19:56.877234Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7491642720796273475:2508] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-10T11:19:56.877631Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7491642720796273530:2508] disconnected; active server actors: 1 2025-04-10T11:19:56.877659Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7491642720796273530:2508] disconnected no session 2025-04-10T11:19:56.984381Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7491642720796273475:2508] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-10T11:19:56.984424Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7491642720796273475:2508] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-10T11:19:56.984442Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7491642720796273475:2508] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-10T11:19:56.984471Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-10T11:19:56.986708Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2025-04-10T11:19:56.991785Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|6e5a3d3d-4ddb0656-fa55ee34-9c574ea9_0 2025-04-10T11:19:56.990273Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7491642720796273554:2508], now have 1 active actors on pipe 2025-04-10T11:19:56.990368Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T11:19:56.990401Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T11:19:56.990512Z node 6 :PERSQUEUE INFO: new Cookie src|6e5a3d3d-4ddb0656-fa55ee34-9c574ea9_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-10T11:19:56.990647Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-10T11:19:56.990705Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:19:56.991329Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T11:19:56.991367Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T11:19:56.991464Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:19:56.992855Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744283996992 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:19:56.992992Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|6e5a3d3d-4ddb0656-fa55ee34-9c574ea9_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-10T11:19:56.993206Z :INFO: [] MessageGroupId [src] SessionId [src|6e5a3d3d-4ddb0656-fa55ee34-9c574ea9_0] Write session: close. Timeout = 0 ms 2025-04-10T11:19:56.993257Z :INFO: [] MessageGroupId [src] SessionId [src|6e5a3d3d-4ddb0656-fa55ee34-9c574ea9_0] Write session will now close 2025-04-10T11:19:56.993310Z :DEBUG: [] MessageGroupId [src] SessionId [src|6e5a3d3d-4ddb0656-fa55ee34-9c574ea9_0] Write session: aborting 2025-04-10T11:19:56.993763Z :INFO: [] MessageGroupId [src] SessionId [src|6e5a3d3d-4ddb0656-fa55ee34-9c574ea9_0] Write session: gracefully shut down, all writes complete 2025-04-10T11:19:56.993808Z :DEBUG: [] MessageGroupId [src] SessionId [src|6e5a3d3d-4ddb0656-fa55ee34-9c574ea9_0] Write session: destroy 2025-04-10T11:19:56.995157Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7491642720796273554:2508] destroyed 2025-04-10T11:19:56.995218Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T11:19:56.994286Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|6e5a3d3d-4ddb0656-fa55ee34-9c574ea9_0 grpc read done: success: 0 data: 2025-04-10T11:19:56.994309Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6e5a3d3d-4ddb0656-fa55ee34-9c574ea9_0 grpc read failed 2025-04-10T11:19:56.994348Z node 5 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: src|6e5a3d3d-4ddb0656-fa55ee34-9c574ea9_0 2025-04-10T11:19:56.994369Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6e5a3d3d-4ddb0656-fa55ee34-9c574ea9_0 is DEAD 2025-04-10T11:19:56.994662Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T11:19:57.019804Z :INFO: [/Root] [/Root] [c9ee379b-a1d99073-707904a2-40514a9f] Starting read session 2025-04-10T11:19:57.019867Z :DEBUG: [/Root] [/Root] [c9ee379b-a1d99073-707904a2-40514a9f] Starting session to cluster null (localhost:5139) 2025-04-10T11:19:57.022345Z :DEBUG: [/Root] [/Root] [c9ee379b-a1d99073-707904a2-40514a9f] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:57.022398Z :DEBUG: [/Root] [/Root] [c9ee379b-a1d99073-707904a2-40514a9f] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:57.022432Z :DEBUG: [/Root] [/Root] [c9ee379b-a1d99073-707904a2-40514a9f] [null] Reconnecting session to cluster null in 0.000000s 2025-04-10T11:19:57.023668Z :ERROR: [/Root] [/Root] [c9ee379b-a1d99073-707904a2-40514a9f] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2025-04-10T11:19:57.023744Z :DEBUG: [/Root] [/Root] [c9ee379b-a1d99073-707904a2-40514a9f] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:57.023778Z :DEBUG: [/Root] [/Root] [c9ee379b-a1d99073-707904a2-40514a9f] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:19:57.023900Z :INFO: [/Root] [/Root] [c9ee379b-a1d99073-707904a2-40514a9f] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2025-04-10T11:19:57.024081Z :NOTICE: [/Root] [/Root] [c9ee379b-a1d99073-707904a2-40514a9f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T11:19:57.024114Z :DEBUG: [/Root] [/Root] [c9ee379b-a1d99073-707904a2-40514a9f] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2025-04-10T11:19:57.024194Z :INFO: [/Root] [/Root] [c9ee379b-a1d99073-707904a2-40514a9f] Closing read session. Close timeout: 0.000000s 2025-04-10T11:19:57.024238Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-10T11:19:57.024278Z :INFO: [/Root] [/Root] [c9ee379b-a1d99073-707904a2-40514a9f] Counters: { Errors: 1 CurrentSessionLifetimeMs: 4 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:19:57.024368Z :NOTICE: [/Root] [/Root] [c9ee379b-a1d99073-707904a2-40514a9f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:19:59.230880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:19:59.231032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:59.231084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:19:59.231125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:19:59.231977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:19:59.232063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:19:59.232153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:59.232257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:19:59.233187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:19:59.315270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:19:59.315320Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:59.331779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:19:59.332066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:19:59.332220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:19:59.340688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:19:59.341869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:19:59.344983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.346314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:19:59.352264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.360610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:59.360694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.360888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:19:59.360939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:59.360987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:19:59.361169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.370406Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:19:59.495972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:19:59.497814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.499576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:19:59.500988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:19:59.501070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.507213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.507351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:19:59.507567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.507726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:19:59.507779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:19:59.507813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:19:59.510587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.510647Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:19:59.510687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:19:59.512526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.512622Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.512682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.512739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.522119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:19:59.527095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:19:59.527289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:19:59.528414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.528533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:19:59.528590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.528885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:19:59.528952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.529154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:19:59.529283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:19:59.531346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:59.531389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:59.531540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.531589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:19:59.531834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.531935Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:19:59.532023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:59.532056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.532094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:59.532122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.532153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:19:59.532187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.532230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:19:59.532266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:19:59.532321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:19:59.532392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:19:59.532423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:19:59.534653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:59.534749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:59.534789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... schemeshard: 72057594046678944 2025-04-10T11:20:00.001376Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T11:20:00.001574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.001670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.001782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2025-04-10T11:20:00.001832Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueue, at schemeshard: 72057594046678944 2025-04-10T11:20:00.001885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-10T11:20:00.001925Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueueReadBalancer, at schemeshard: 72057594046678944 2025-04-10T11:20:00.001948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:20:00.002067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.002141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.002372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 8, at schemeshard: 72057594046678944 2025-04-10T11:20:00.002535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:20:00.003034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.003154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.003519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.003596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.003783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.003867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.003941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.004137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.004226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.004444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.004733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.004898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.004943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.005002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:00.005234Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:20:00.022863Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:20:00.023064Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-10T11:20:00.024062Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [1:570:2500], Recipient [1:570:2500]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-10T11:20:00.024123Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-10T11:20:00.027383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:20:00.027468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:20:00.027534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:20:00.027596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:20:00.027642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:20:00.027671Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:20:00.027872Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:606:2500], Recipient [1:570:2500]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-10T11:20:00.027902Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-10T11:20:00.027924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:570:2500] sender: [1:625:2058] recipient: [1:15:2062] 2025-04-10T11:20:00.083274Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:624:2542], Recipient [1:570:2500]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-10T11:20:00.083330Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-10T11:20:00.083434Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:20:00.083617Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 169us result status StatusSuccess 2025-04-10T11:20:00.084067Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:20:00.084687Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:626:2543], Recipient [1:570:2500]: NKikimrPQ.TEvPeriodicTopicStats PathId: 2 Generation: 1 Round: 96 DataSize: 19 UsedReserveSize: 7 2025-04-10T11:20:00.084738Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-04-10T11:20:00.084794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 19 UsedReserveSize 7 2025-04-10T11:20:00.084834Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-10T11:20:00.084892Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-04-10T11:20:00.085090Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:627:2544], Recipient [1:570:2500]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-10T11:20:00.085126Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-10T11:20:00.085217Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:20:00.085387Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 171us result status StatusSuccess 2025-04-10T11:20:00.085715Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:19:59.230867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:19:59.231007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:59.231071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:19:59.231110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:19:59.232005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:19:59.232078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:19:59.232166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:59.232260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:19:59.233200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:19:59.316099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:19:59.316157Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:59.338302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:19:59.338571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:19:59.338705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:19:59.346073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:19:59.346339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:19:59.346942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.347296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:19:59.352194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.359860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:59.359937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.360869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:19:59.360927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:59.360994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:19:59.361167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.373482Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:19:59.505820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:19:59.506082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.506325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:19:59.506535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:19:59.506585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.508784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.508909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:19:59.509080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.509143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:19:59.509209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:19:59.509243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:19:59.511192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.511250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:19:59.511283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:19:59.512963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.513058Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.513159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.513218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.517186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:19:59.519115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:19:59.519284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:19:59.521416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.521539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:19:59.521588Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.522732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:19:59.522817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.522965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:19:59.523066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:19:59.527036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:59.527078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:59.527239Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.527282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:19:59.527528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.527578Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:19:59.527667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:59.527723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.527761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:59.527795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.527837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:19:59.527878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.527930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:19:59.527962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:19:59.528018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:19:59.528055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:19:59.528084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:19:59.530276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:59.530390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:59.530433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... s in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:20:00.328826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T11:20:00.328884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-10T11:20:00.329496Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [1:338:2314], Recipient [1:285:2272]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 3 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 11 } 2025-04-10T11:20:00.329533Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-10T11:20:00.329589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:20:00.329649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:20:00.329675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:20:00.329713Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-10T11:20:00.329741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:20:00.329831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-10T11:20:00.329868Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:20:00.334858Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409551][Topic3] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409551 2025-04-10T11:20:00.335007Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-04-10T11:20:00.335752Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:20:00.340245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:20:00.340288Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:20:00.340874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:20:00.341042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-10T11:20:00.341481Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [1:1031:2893], Recipient [1:285:2272]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [1:1031:2893] ServerId: [1:1032:2894] } 2025-04-10T11:20:00.341518Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-10T11:20:00.341546Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset 2025-04-10T11:20:00.343303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:20:00.343402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:20:00.343428Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-10T11:20:00.343830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-10T11:20:00.343873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-10T11:20:00.344388Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1047:2909], Recipient [1:285:2272]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:20:00.344437Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:20:00.344485Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-10T11:20:00.344638Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:554:2488], Recipient [1:285:2272]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2025-04-10T11:20:00.344670Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-10T11:20:00.344755Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-10T11:20:00.344867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:20:00.344906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1045:2907] 2025-04-10T11:20:00.345105Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [1:1047:2909], Recipient [1:285:2272]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:20:00.345141Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T11:20:00.345177Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-04-10T11:20:00.345785Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1048:2910], Recipient [1:285:2272]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-10T11:20:00.345841Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-10T11:20:00.345923Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:20:00.346089Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 161us result status StatusSuccess 2025-04-10T11:20:00.346547Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 31 UsedReserveSize: 31 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:20:00.347390Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:1049:2911], Recipient [1:285:2272]: NKikimrPQ.TEvPeriodicTopicStats PathId: 4 Generation: 1 Round: 6 DataSize: 151 UsedReserveSize: 151 2025-04-10T11:20:00.347433Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-04-10T11:20:00.347477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 4] DataSize 151 UsedReserveSize 151 2025-04-10T11:20:00.347512Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-10T11:20:00.347980Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1050:2912], Recipient [1:285:2272]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-10T11:20:00.348022Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-10T11:20:00.348127Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:20:00.350688Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 148us result status StatusSuccess 2025-04-10T11:20:00.351121Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 182 UsedReserveSize: 182 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> ObjectStorageListingTest::ListingNoFilter >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 24071, MsgBus: 8799 2025-04-10T11:19:33.618224Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642623260091100:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:33.619378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001dfc/r3tmp/tmpOiQS1m/pdisk_1.dat 2025-04-10T11:19:34.158464Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:34.184056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:34.184167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24071, node 1 2025-04-10T11:19:34.211315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:34.294637Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:34.294661Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:34.294671Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:34.294773Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8799 TClient is connected to server localhost:8799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:34.977557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:35.025467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:35.207786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:35.415192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:35.515251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:37.567192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642640439962069:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:37.567352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:37.963742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:38.006810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:38.088883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:38.122729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:38.159752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:38.221603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:38.316110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642644734929882:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:38.316191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:38.316576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642644734929887:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:38.321607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:38.341117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642644734929889:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:38.424984Z node 1 :TX_PROXY ERROR: Actor# [1:7491642644734929944:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:38.667330Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642623260091100:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:38.667379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:39.551630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:19:40.908358Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:19:40.988817Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642653324865299:2555], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestTable/Index/indexImplTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:19:40.989024Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmMyZjk5ZTEtZWU3NzVkYTUtYzdjNmY2MWMtOTI5NDFlYjM=, ActorId: [1:7491642649029897539:2498], ActorState: ExecuteState, TraceId: 01jrfptn4e6b7w4ynttyq6n1kt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:19:41.036087Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642657619832602:2558], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Required global index not found, index name: WrongView, code: 2003 2025-04-10T11:19:41.037632Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmMyZjk5ZTEtZWU3NzVkYTUtYzdjNmY2MWMtOTI5NDFlYjM=, ActorId: [1:7491642649029897539:2498], ActorState: ExecuteState, TraceId: 01jrfptn6ebkhdfrhyf9aqv1q3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:19:41.795140Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:19:42.458821Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:19:42.490325Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:19:42.834009Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:19:42.859084Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 25966, MsgBus: 2172 2025-04-10T11:19:43.751832Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642664429308347:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:43.758917Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001dfc/r3tmp/tmpr4VUaQ/pdisk_1.dat 2025-04-10T11:19:43.902950Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:43.933501Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:43.933601Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:43.935634Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25966, node 2 2025-04-10T11:19:44.006743Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:44.006774Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:44.006784Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:44.006950Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2172 TClient is connected to server localhost:2172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Sel ... nsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.769718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.808249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.883884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.940602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642681609179711:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:47.940701Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:47.945947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642681609179716:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:47.953354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:47.979535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642681609179718:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:19:48.076949Z node 2 :TX_PROXY ERROR: Actor# [2:7491642685904147070:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:48.754256Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642664429308347:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:48.754334Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:49.126515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:19:50.490053Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:19:50.526255Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:19:50.906982Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:19:50.928872Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 62256, MsgBus: 8598 2025-04-10T11:19:52.183629Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491642701039524744:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:52.193157Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001dfc/r3tmp/tmpGFr21h/pdisk_1.dat 2025-04-10T11:19:52.328946Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:52.331738Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:52.331837Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:52.335348Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62256, node 3 2025-04-10T11:19:52.400205Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:52.400233Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:52.400245Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:52.400384Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8598 TClient is connected to server localhost:8598 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:52.913150Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:52.930443Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:53.006974Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:53.174266Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:53.257131Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:55.889324Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642713924428249:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:55.889450Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:55.940754Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:55.980756Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:56.028140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:56.065616Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:56.140512Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:56.215236Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:56.294537Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642718219396064:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:56.294637Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:56.294812Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642718219396069:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:56.303289Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:56.321776Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491642718219396071:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:19:56.424541Z node 3 :TX_PROXY ERROR: Actor# [3:7491642718219396128:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:57.114837Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491642701039524744:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:57.114929Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:57.437121Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:19:57.519584Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] >> KqpIndexes::SecondaryIndexReplace-UseSink [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:19:59.230903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:19:59.231039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:59.231106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:19:59.231172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:19:59.231983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:19:59.232063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:19:59.232161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:59.232258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:19:59.233236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:19:59.317056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:19:59.317120Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:59.342568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:19:59.342862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:19:59.343028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:19:59.363839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:19:59.364158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:19:59.365002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.365452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:19:59.367731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.369149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:59.369209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.369457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:19:59.369511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:59.369566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:19:59.369744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.376597Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:19:59.537619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:19:59.537894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.538130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:19:59.538401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:19:59.538467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.541950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.542116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:19:59.542327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.542398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:19:59.542455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:19:59.542497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:19:59.546226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.546312Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:19:59.546358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:19:59.550197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.550569Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.550643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.550697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.554812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:19:59.559079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:19:59.559238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:19:59.560078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.560190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:19:59.560239Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.560515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:19:59.560595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.560738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:19:59.560808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:19:59.564862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:59.564902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:59.565057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.565112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:19:59.565438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.565493Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:19:59.565579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:59.565611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.565653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:59.565677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.565713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:19:59.565747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.565787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:19:59.565817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:19:59.565872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:19:59.565906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:19:59.565934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:19:59.567856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:59.567963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:59.567992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:20:01.656594Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-10T11:20:01.656674Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2025-04-10T11:20:01.657393Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 0 2025-04-10T11:20:01.657495Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-10T11:20:01.657680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2025-04-10T11:20:01.682410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:20:01.694554Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:20:01.694761Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 215us result status StatusSuccess 2025-04-10T11:20:01.695213Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:20:02.324638Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-10T11:20:02.324707Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-04-10T11:20:02.325416Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 4 DataSize: 16975298 UsedReserveSize: 0 2025-04-10T11:20:02.325544Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-10T11:20:02.325780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2025-04-10T11:20:02.343277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:20:02.353744Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:20:02.353933Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 209us result status StatusSuccess 2025-04-10T11:20:02.354403Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:20:02.396396Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:20:02.396600Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 244us result status StatusSuccess 2025-04-10T11:20:02.397058Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:20:02.397833Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:636:2559] connected; active server actors: 1 2025-04-10T11:20:02.413691Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] BALANCER INIT DONE for Topic1: (0, 72075186233409546) (1, 72075186233409546) (2, 72075186233409546) 2025-04-10T11:20:02.414136Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-04-10T11:20:02.415636Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:20:02.415789Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 169us result status StatusSuccess 2025-04-10T11:20:02.416131Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:20:02.416446Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72057594046678944, NodeId 1, Generation 3 2025-04-10T11:20:02.416919Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72075186233409546, NodeId 1, Generation 2 2025-04-10T11:20:02.451744Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:683:2594] connected; active server actors: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:19:59.230994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:19:59.231134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:59.231197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:19:59.231258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:19:59.232480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:19:59.232538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:19:59.232613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:59.232696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:19:59.233199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:19:59.314356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:19:59.314433Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:59.330626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:19:59.330892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:19:59.331066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:19:59.341077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:19:59.341878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:19:59.345056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.346277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:19:59.352258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.359891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:59.359970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.360948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:19:59.361033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:59.361088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:19:59.361288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.369686Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:19:59.509129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:19:59.509486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.509747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:19:59.510025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:19:59.510091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.513223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.513418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:19:59.513659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.513744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:19:59.513795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:19:59.513834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:19:59.516672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.516762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:19:59.516805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:19:59.519154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.519284Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.519356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.519422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.531963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:19:59.538510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:19:59.538755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:19:59.540091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.540269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:19:59.540338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.540777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:19:59.540876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.541121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:19:59.541288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:19:59.543886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:59.543943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:59.544157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.544221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:19:59.544490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.544549Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:19:59.544685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:59.544727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.544778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:59.544811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.544857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:19:59.544896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.544942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:19:59.544977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:19:59.545045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:19:59.545084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:19:59.545114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:19:59.547333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:59.547440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:59.547471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:20:01.653799Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-10T11:20:01.653913Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2025-04-10T11:20:01.654421Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-10T11:20:01.654579Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-10T11:20:01.654662Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-10T11:20:01.655109Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 2 DataSize: 16975298 UsedReserveSize: 16975298 2025-04-10T11:20:01.655231Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-10T11:20:01.655509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2025-04-10T11:20:01.685394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:20:01.698591Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:20:01.698811Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 232us result status StatusSuccess 2025-04-10T11:20:01.699302Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:20:02.322923Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-10T11:20:02.323014Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-04-10T11:20:02.323516Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-10T11:20:02.323639Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-10T11:20:02.323768Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-10T11:20:02.324270Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 16975298 2025-04-10T11:20:02.324393Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-10T11:20:02.324698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2025-04-10T11:20:02.343497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:20:02.354028Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:20:02.354215Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 205us result status StatusSuccess 2025-04-10T11:20:02.354588Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:20:02.397539Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:20:02.397735Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 217us result status StatusSuccess 2025-04-10T11:20:02.398088Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPersQueueTest::WriteNonExistingTopic [GOOD] >> TPersQueueTest::WriteAfterAlter >> TPersQueueTest::CacheHead [GOOD] >> TPersQueueTest::CheckACLForGrpcWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexReplace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14218, MsgBus: 31015 2025-04-10T11:19:40.218658Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642650446398064:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:40.219240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001df1/r3tmp/tmp591fZv/pdisk_1.dat 2025-04-10T11:19:40.938185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:40.942622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:40.942885Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:40.948193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14218, node 1 2025-04-10T11:19:41.047340Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:41.047356Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:41.047361Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:41.047443Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31015 TClient is connected to server localhost:31015 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:41.798594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:41.838766Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:41.848093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:42.041272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:42.230536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:42.315109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:44.084175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642667626268867:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.084342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.389983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.419602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.449644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.523483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.569956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.631311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:44.688239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642667626269387:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.688329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.688776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642667626269392:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:44.693321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:44.706319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642667626269394:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:44.805209Z node 1 :TX_PROXY ERROR: Actor# [1:7491642667626269450:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:45.210245Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642650446398064:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:45.210313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:45.691638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:19:46.469071Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:19:47.457972Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 29228, MsgBus: 5215 2025-04-10T11:19:48.283092Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642683511007111:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:48.283176Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001df1/r3tmp/tmpo5J1GN/pdisk_1.dat 2025-04-10T11:19:48.491257Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:48.514014Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:48.514547Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:48.517255Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29228, node 2 2025-04-10T11:19:48.631466Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:48.631493Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:48.631500Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:48.631624Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5215 TClient is connected to server localhost:5215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:49.215318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:49.225338Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:49.240312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:49.317115Z node 2 :FLAT_TX_SCHEM ... 6710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:52.229568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:52.286291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:52.331616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:52.407641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:52.469524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642700690878592:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:52.469616Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:52.470006Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642700690878597:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:52.474494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:52.486437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642700690878599:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:52.584609Z node 2 :TX_PROXY ERROR: Actor# [2:7491642700690878655:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:53.284690Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642683511007111:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:53.284769Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:53.592100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6374, MsgBus: 26202 2025-04-10T11:19:54.893871Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491642711463679436:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:54.894791Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001df1/r3tmp/tmpV6PPGm/pdisk_1.dat 2025-04-10T11:19:55.029886Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:55.029988Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:55.033838Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:55.049034Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6374, node 3 2025-04-10T11:19:55.092118Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:55.092147Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:55.092158Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:55.092280Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26202 TClient is connected to server localhost:26202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:55.709407Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:55.718579Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:19:55.740790Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:55.841651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:56.010237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:56.083957Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:58.627549Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642728643550347:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:58.627644Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:58.678240Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:58.715206Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:58.746515Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:58.781592Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:58.820421Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:58.857361Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:58.945434Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642728643550859:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:58.945517Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:58.945739Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491642728643550864:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:58.949423Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:58.962554Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491642728643550866:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:19:59.035485Z node 3 :TX_PROXY ERROR: Actor# [3:7491642732938518218:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:59.894393Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491642711463679436:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:59.894490Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:20:00.263794Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:20:01.300542Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:20:02.200255Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:20:02.235391Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 >> TPersQueueTest::SchemeOperationsCheckPropValues [GOOD] >> TPersQueueTest::ReadRuleServiceType >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 |90.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 >> ObjectStorageListingTest::ListingNoFilter [GOOD] |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] Test command err: 2025-04-10T11:20:04.821080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:04.821327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:04.821530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014f4/r3tmp/tmppGXvsT/pdisk_1.dat 2025-04-10T11:20:05.322169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:05.368484Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:05.411875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:05.412371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:05.434053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:05.527162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:05.586680Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:20:05.586944Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:20:05.636805Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:20:05.636974Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:20:05.640044Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:20:05.640150Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:20:05.640196Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:20:05.641812Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:20:05.641989Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:20:05.642089Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:20:05.652943Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:20:05.677859Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:20:05.680008Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:20:05.680193Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:20:05.680233Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:20:05.680264Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:20:05.680296Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:20:05.681428Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:20:05.681558Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:20:05.681626Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:20:05.681677Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:20:05.681813Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:20:05.681872Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:20:05.681966Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:20:05.682399Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:20:05.682806Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:20:05.682908Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:20:05.684798Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:20:05.695402Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:20:05.695521Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:20:05.848053Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:20:05.852626Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:20:05.852712Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:20:05.853671Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:20:05.853736Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:20:05.853822Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:20:05.854082Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:20:05.854300Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:20:05.855040Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:20:05.855115Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:20:05.857731Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:20:05.862788Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:20:05.864448Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:20:05.864517Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:20:05.865069Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:20:05.865152Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:20:05.865945Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:20:05.865985Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:20:05.866051Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:20:05.866121Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:20:05.866195Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:20:05.866391Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:20:05.870475Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:20:05.872788Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:20:05.872857Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:20:05.873545Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:20:05.901080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:05.901211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:05.901313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:05.909598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:20:05.916084Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:20:06.087826Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:20:06.091000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:20:06.179925Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:20:07.094201Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfpvdg6eb6myp3pr1s0622h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjliZmIzYTUtN2VjMzEzZDUtMTJmODlmNDItNzE4MDE3YWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:20:07.152019Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-10T11:20:07.154427Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:20:07.154721Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-04-10T11:20:07.167745Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:20:07.183185Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:864:2694], serverId# [1:865:2695], sessionId# [0:0:0] 2025-04-10T11:20:07.183428Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-10T11:20:07.183792Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 3 common prefixes: 2 2025-04-10T11:20:07.183998Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:864:2694], serverId# [1:865:2695], sessionId# [0:0:0] >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx |90.7%| [TA] $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:19:59.230918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:19:59.231055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:59.231109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:19:59.231164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:19:59.231960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:19:59.232045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:19:59.232156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:59.232267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:19:59.233229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:19:59.317278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:19:59.317352Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:59.333101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:19:59.333406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:19:59.333574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:19:59.343630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:19:59.343900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:19:59.344973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.346290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:19:59.352513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.359933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:59.360029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.364205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:19:59.364323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:59.364416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:19:59.364661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.372989Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:19:59.511871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:19:59.512141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.512366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:19:59.512575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:19:59.512641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.515057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.515181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:19:59.515341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.515405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:19:59.515451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:19:59.515491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:19:59.517824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.517910Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:19:59.517949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:19:59.519944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.520065Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.520122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.520183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.531880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:19:59.534049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:19:59.534280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:19:59.535281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.535394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:19:59.535447Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.535751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:19:59.535809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.535937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:19:59.536011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:19:59.539577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:59.539622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:59.539796Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.539838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:19:59.540108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.540160Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:19:59.540266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:59.540299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.540338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:59.540371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.540427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:19:59.540470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.540519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:19:59.540555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:19:59.540625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:19:59.540663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:19:59.540693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:19:59.543199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:59.543301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:59.543338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:20:08.262449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.262532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.263006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-04-10T11:20:08.263143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-10T11:20:08.263250Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T11:20:08.263556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-04-10T11:20:08.263716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.263843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-04-10T11:20:08.263903Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-10T11:20:08.263944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:20:08.264057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-10T11:20:08.264245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.264487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-10T11:20:08.264840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.264978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.265441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.265530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.265741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.265915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.266017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.266304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.266423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.266591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.266850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.267030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.267087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.267162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:08.267456Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:20:08.288401Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:20:08.288595Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-10T11:20:08.290593Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [1:1019:2963], Recipient [1:1019:2963]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-10T11:20:08.290658Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-10T11:20:08.302817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:20:08.302940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:20:08.303411Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1019:2963], Recipient [1:1019:2963]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:08.303460Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:08.315313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:20:08.315413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:20:08.315487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:20:08.315523Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:20:08.317061Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1055:2963], Recipient [1:1019:2963]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-10T11:20:08.317115Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-10T11:20:08.317167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1019:2963] sender: [1:1075:2058] recipient: [1:15:2062] 2025-04-10T11:20:08.367375Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1074:3007], Recipient [1:1019:2963]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-10T11:20:08.367438Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-10T11:20:08.367578Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:20:08.367885Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 289us result status StatusSuccess 2025-04-10T11:20:08.368684Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82488 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 >> KqpDataIntegrityTrails::Select >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] >> TopicService::RelativePath [GOOD] |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> TPersQueueTest::StreamReadManyUpdateTokenAndRead [GOOD] >> TPersQueueTest::SetupWriteSession |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:19:59.230878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:19:59.231004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:59.231080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:19:59.231125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:19:59.231946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:19:59.232034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:19:59.232157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:59.232295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:19:59.233186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:19:59.315374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:19:59.315437Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:59.331547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:19:59.331857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:19:59.332006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:19:59.346455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:19:59.346814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:19:59.347587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.348126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:19:59.354900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.359982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:59.360098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.360888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:19:59.360986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:59.361050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:19:59.361265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.371160Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:19:59.516825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:19:59.517106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.517353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:19:59.517612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:19:59.517672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.520029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.520178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:19:59.520305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.520356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:19:59.520394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:19:59.520427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:19:59.522535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.522615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:19:59.522655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:19:59.524410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.524493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.524535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.524580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.527521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:19:59.529422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:19:59.529598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:19:59.530622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.530773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:19:59.530839Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.531129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:19:59.531219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.531384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:19:59.531668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:19:59.533737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:59.533789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:59.533959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.534004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:19:59.534316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.534366Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:19:59.534472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:59.534511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.534554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:59.534629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.534680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:19:59.534725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.534783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:19:59.534824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:19:59.534885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:19:59.534922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:19:59.534963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:19:59.537238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:59.537388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:59.537474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... t for Tables, read records: 2, at schemeshard: 72057594046678944 2025-04-10T11:20:10.951352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-10T11:20:10.951441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-10T11:20:10.951533Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T11:20:10.951860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2025-04-10T11:20:10.952050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.952202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2025-04-10T11:20:10.952293Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-10T11:20:10.952350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:20:10.952392Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 3], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-10T11:20:10.952418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:20:10.952575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 2, at schemeshard: 72057594046678944 2025-04-10T11:20:10.952762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.953101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2025-04-10T11:20:10.953505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.953656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.954104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.958403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.958749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.958865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.958962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.959211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.959303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.959489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.959725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.959910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.959971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.960043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:10.960337Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:20:10.975268Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:20:10.975514Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-10T11:20:10.977099Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [1:1141:3073], Recipient [1:1141:3073]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-10T11:20:10.977167Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-10T11:20:10.978526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:20:10.978629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:20:10.983221Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1141:3073], Recipient [1:1141:3073]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:10.983300Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:10.984069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:20:10.984151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:20:10.984221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:20:10.984269Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:20:10.985155Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1177:3073], Recipient [1:1141:3073]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-10T11:20:10.985207Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-10T11:20:10.985253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1141:3073] sender: [1:1197:2058] recipient: [1:15:2062] 2025-04-10T11:20:11.024038Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1196:3117], Recipient [1:1141:3073]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-10T11:20:11.024112Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-10T11:20:11.024250Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:20:11.024576Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 317us result status StatusSuccess 2025-04-10T11:20:11.025449Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1 MinPartitionsCount: 20 MaxPartitionsCount: 20 } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 26323 Memory: 141368 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> TopicService::AccessRights |90.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] >> KqpDataIntegrityTrails::BrokenReadLock+UseSink |90.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] |90.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 14920, MsgBus: 11767 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013ef/r3tmp/tmp1mmlmC/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14920, node 1 TClient is connected to server localhost:11767 TClient is connected to server localhost:11767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8086, MsgBus: 3477 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013a7/r3tmp/tmp1stFcB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8086, node 1 TClient is connected to server localhost:3477 TClient is connected to server localhost:3477 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive [GOOD] >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort |90.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 2967, MsgBus: 22850 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013b8/r3tmp/tmpVYrVql/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2967, node 1 TClient is connected to server localhost:22850 TClient is connected to server localhost:22850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 |90.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] Test command err: Trying to start YDB, gRPC: 25327, MsgBus: 23633 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001336/r3tmp/tmpQwrBt8/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25327, node 1 TClient is connected to server localhost:23633 TClient is connected to server localhost:23633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |90.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 |90.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] |90.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] Test command err: Trying to start YDB, gRPC: 11984, MsgBus: 11632 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001332/r3tmp/tmpZLqyiZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11984, node 1 TClient is connected to server localhost:11632 TClient is connected to server localhost:11632 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpDataIntegrityTrails::Select [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] >> KqpDataIntegrityTrails::BrokenReadLock-UseSink >> TPersQueueTest::CheckACLForGrpcWrite [GOOD] >> TPersQueueTest::CheckACLForGrpcRead >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] |90.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 29942, MsgBus: 16120 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001365/r3tmp/tmprD1ivu/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29942, node 1 TClient is connected to server localhost:16120 TClient is connected to server localhost:16120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select [GOOD] Test command err: Trying to start YDB, gRPC: 20455, MsgBus: 19271 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001319/r3tmp/tmp3hJCKI/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20455, node 1 TClient is connected to server localhost:19271 TClient is connected to server localhost:19271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] >> KqpMultishardIndex::WriteIntoRenamingSyncIndex [GOOD] >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31061, MsgBus: 28132 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001318/r3tmp/tmpTjRFHn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31061, node 1 TClient is connected to server localhost:28132 TClient is connected to server localhost:28132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-04-10T11:19:15.454868Z :ReadSession INFO: Random seed for debugging is 1744283955454837 2025-04-10T11:19:15.933832Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642542427639250:2275];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:15.933874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000bce/r3tmp/tmpOpiubz/pdisk_1.dat 2025-04-10T11:19:16.388081Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:19:16.388956Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:19:16.462309Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:19:16.682539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:16.682711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:16.683480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:16.683531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:16.688471Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:19:16.688700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:16.690185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22039, node 1 2025-04-10T11:19:16.724908Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:19:16.726240Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:19:16.801474Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:16.890335Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000bce/r3tmp/yandexBWkOcD.tmp 2025-04-10T11:19:16.890371Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000bce/r3tmp/yandexBWkOcD.tmp 2025-04-10T11:19:16.890574Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000bce/r3tmp/yandexBWkOcD.tmp 2025-04-10T11:19:16.890791Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:19:16.958874Z INFO: TTestServer started on Port 13843 GrpcPort 22039 TClient is connected to server localhost:13843 PQClient connected to localhost:22039 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:17.289492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T11:19:19.638473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642559607509263:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:19.638653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:19.646332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642559607509275:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:19.651398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T11:19:19.687763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642559607509309:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:19.687864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:19.696862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642559607509277:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T11:19:20.060554Z node 1 :TX_PROXY ERROR: Actor# [1:7491642559607509362:2691] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:20.091003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:20.099418Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491642558871195414:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:19:20.100955Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmE2YmFlOS02Mzc1Y2FiYy1kNWFhZmQxYy0yZGU0Nzk3Yg==, ActorId: [2:7491642558871195379:2308], ActorState: ExecuteState, TraceId: 01jrfpt0d50sp1nf8a2npg20j9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:19:20.099633Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642563902476682:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:19:20.101203Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjhhY2NmYzYtNTQ0OTNjYzktNDA3YzZmODktNzlhYzkwN2I=, ActorId: [1:7491642559607509245:2336], ActorState: ExecuteState, TraceId: 01jrfpt09j6ctarfbjmpcwa05a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:19:20.103578Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:19:20.103675Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:19:20.326021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:20.455686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:22039", true, true, 1000); 2025-04-10T11:19:20.680229Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jrfpt16y2fw9smxczy2gnry5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2IyZWYyN2QtODNmMjVmMmQtZWVkMjlkMTItMzNhYWI4MjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491642563902477083:2993] 2025-04-10T11:19:20.934284Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642542427639250:2275];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:20.934377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T11:19:25.832476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:22039 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-10T11:19:25.984184Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:22039 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 ... 8-b38b353c-4831bb9b-b1c14190 2025-04-10T11:20:15.621250Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_17581261982344121494_v1 Process answer. Aval parts: 0 2025-04-10T11:20:15.622714Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_17581261982344121494_v1 grpc read done: success# 1, data# { read { } } 2025-04-10T11:20:15.622831Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_17581261982344121494_v1 got read request: guid# d45db628-fcac3604-877b76e7-b92bf890 2025-04-10T11:20:15.625718Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2025-04-10T11:20:15.625878Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2025-04-10T11:20:15.625945Z :DEBUG: [/Root] [/Root] [678bdd6-309d181d-8bb5ce17-e7551196] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-04-10T11:20:14.474000Z WriteTime: 2025-04-10T11:20:14.478000Z Ip: "ipv6:[::1]:43808" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:43808" } } } } 2025-04-10T11:20:15.626167Z :INFO: [/Root] [/Root] [678bdd6-309d181d-8bb5ce17-e7551196] Closing read session. Close timeout: 3.000000s 2025-04-10T11:20:15.626215Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-04-10T11:20:15.626263Z :INFO: [/Root] [/Root] [678bdd6-309d181d-8bb5ce17-e7551196] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1571 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:20:15.626907Z :INFO: [/Root] [/Root] [678bdd6-309d181d-8bb5ce17-e7551196] Closing read session. Close timeout: 0.000000s 2025-04-10T11:20:15.626959Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-04-10T11:20:15.627007Z :INFO: [/Root] [/Root] [678bdd6-309d181d-8bb5ce17-e7551196] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1572 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:20:15.627125Z :NOTICE: [/Root] [/Root] [678bdd6-309d181d-8bb5ce17-e7551196] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T11:20:15.630434Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_17581261982344121494_v1 grpc read done: success# 0, data# { } 2025-04-10T11:20:15.630466Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_17581261982344121494_v1 grpc read failed 2025-04-10T11:20:15.630500Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_17581261982344121494_v1 grpc closed 2025-04-10T11:20:15.630551Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_17581261982344121494_v1 is DEAD 2025-04-10T11:20:15.631951Z node 7 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [7:7491642795595329142:2540] disconnected; active server actors: 1 2025-04-10T11:20:15.631975Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [7:7491642795595329142:2540] client user disconnected session shared/user_7_1_17581261982344121494_v1 2025-04-10T11:20:15.632193Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_17581261982344121494_v1 2025-04-10T11:20:15.632243Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7491642795595329145:2543] destroyed 2025-04-10T11:20:15.632292Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_1_17581261982344121494_v1 2025-04-10T11:20:16.189353Z node 7 :KQP_COMPUTE WARN: TxId: 281474976710695, task: 1, CA Id [7:7491642804185263942:2576]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-10T11:20:16.223754Z node 7 :KQP_COMPUTE WARN: TxId: 281474976710695, task: 1, CA Id [7:7491642804185263942:2576]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T11:20:16.285172Z node 7 :KQP_COMPUTE WARN: TxId: 281474976710695, task: 1, CA Id [7:7491642804185263942:2576]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T11:20:16.350162Z node 7 :KQP_COMPUTE WARN: TxId: 281474976710695, task: 1, CA Id [7:7491642804185263942:2576]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T11:20:16.454481Z node 7 :KQP_COMPUTE WARN: TxId: 281474976710695, task: 1, CA Id [7:7491642804185263942:2576]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T11:20:16.604858Z node 7 :KQP_COMPUTE WARN: TxId: 281474976710695, task: 1, CA Id [7:7491642804185263942:2576]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T11:20:16.854435Z node 7 :KQP_COMPUTE WARN: TxId: 281474976710695, task: 1, CA Id [7:7491642804185263942:2576]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-10T11:20:17.914847Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:20:17.914892Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:20:17.914932Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:20:17.915330Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:20:17.915880Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:20:17.916180Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:20:17.918579Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-04-10T11:20:17.923823Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:20:17.925512Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:20:17.925622Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:20:17.926695Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:20:17.927909Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:20:17.928114Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:20:17.928437Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:20:17.929874Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-10T11:20:17.930518Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-04-10T11:20:17.930627Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-04-10T11:20:17.930807Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:20:17.930887Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-10T11:20:17.930923Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-10T11:20:17.930979Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-10T11:20:17.934432Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:20:17.934482Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:20:17.934523Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:20:17.942441Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:20:17.955092Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:20:17.955307Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:20:17.955752Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-10T11:20:17.956672Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:20:17.956909Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T11:20:17.959229Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:20:17.959328Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-10T11:20:17.959471Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2025-04-10T11:20:17.961570Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:20:17.961614Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:20:17.961664Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-10T11:20:17.962141Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-10T11:20:17.964233Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-10T11:20:17.964429Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:20:17.965325Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:20:17.965514Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-10T11:20:17.965616Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-10T11:20:17.965732Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27111, MsgBus: 11466 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001304/r3tmp/tmpL7dhXr/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27111, node 1 TClient is connected to server localhost:11466 TClient is connected to server localhost:11466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 15476, MsgBus: 18067 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00132d/r3tmp/tmpDXV0oh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15476, node 1 TClient is connected to server localhost:18067 TClient is connected to server localhost:18067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TFstClassSrcIdPQTest::TestTableCreated [GOOD] >> TFstClassSrcIdPQTest::NoMapping >> TPersQueueTest::ReadRuleServiceType [GOOD] >> TPersQueueTest::ReadRuleServiceTypeLimit >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4043, MsgBus: 26402 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0012fb/r3tmp/tmpLCPdFd/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4043, node 1 TClient is connected to server localhost:26402 TClient is connected to server localhost:26402 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TGRpcStreamingTest::WriteAndFinishWorks >> TGRpcStreamingTest::ReadFinish >> TGRpcStreamingTest::WritesDoneFromClient >> TGRpcStreamingTest::SimpleEcho >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8125, MsgBus: 20913 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0012f3/r3tmp/tmpyJcr8D/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8125, node 1 TClient is connected to server localhost:20913 TClient is connected to server localhost:20913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 >> TPersQueueTest::WriteAfterAlter [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed >> HttpRequest::Probe >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 >> BasicStatistics::SimpleGlobalIndex >> HttpRequest::ProbeServerless >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 >> BasicStatistics::TwoServerlessDbs |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard >> HttpRequest::Status >> BasicStatistics::NotFullStatisticsColumnshard >> HttpRequest::Analyze >> ColumnStatistics::CountMinSketchStatistics >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15250, MsgBus: 7353 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0012bb/r3tmp/tmpL7cIbS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15250, node 1 TClient is connected to server localhost:7353 TClient is connected to server localhost:7353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TGRpcStreamingTest::SimpleEcho [GOOD] >> TGRpcStreamingTest::ReadFinish [GOOD] >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] |90.9%| [TA] $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |90.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2025-04-10T11:20:21.708783Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642828380848248:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:20:21.709000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001579/r3tmp/tmp2UN4AV/pdisk_1.dat 2025-04-10T11:20:22.141087Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:22.159034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:22.159113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:22.162208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:22.239238Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:37410 2025-04-10T11:20:22.239774Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7491642832675815919:2257] peer# ipv6:[::1]:37410 2025-04-10T11:20:22.239811Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade read Name# Session peer# ipv6:[::1]:37410 2025-04-10T11:20:22.246323Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] read finished Name# Session ok# false data# peer# ipv6:[::1]:37410 2025-04-10T11:20:22.246426Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2025-04-10T11:20:22.246475Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade finish Name# Session peer# ipv6:[::1]:37410 grpc status# (9) message# Everything is A-OK 2025-04-10T11:20:22.247152Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:37410 2025-04-10T11:20:22.247208Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2025-04-10T11:20:22.247240Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:37410 grpc status# (9) message# Everything is A-OK 2025-04-10T11:20:22.247334Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:37410 (finish done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2025-04-10T11:20:21.698746Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642827830992383:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:20:21.698899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00158d/r3tmp/tmpUJyUt2/pdisk_1.dat 2025-04-10T11:20:22.134079Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:22.163040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:22.163147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:22.165910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:22.225160Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:42774 2025-04-10T11:20:22.225574Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7491642832125960207:2256] peer# ipv6:[::1]:42774 2025-04-10T11:20:22.225616Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade read Name# Session peer# ipv6:[::1]:42774 2025-04-10T11:20:22.226656Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] read finished Name# Session ok# true data# peer# ipv6:[::1]:42774 2025-04-10T11:20:22.226982Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 1 2025-04-10T11:20:22.227013Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:42774 2025-04-10T11:20:22.227250Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade finish Name# Session peer# ipv6:[::1]:42774 grpc status# (0) message# 2025-04-10T11:20:22.227862Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:42774 2025-04-10T11:20:22.228113Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:42774 grpc status# (0) message# 2025-04-10T11:20:22.228139Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:42774 2025-04-10T11:20:22.228181Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:42774 (finish done) >> TopicService::AccessRights [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2025-04-10T11:20:21.701557Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642826997002383:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:20:21.701710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001542/r3tmp/tmph3L6CY/pdisk_1.dat 2025-04-10T11:20:22.114745Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:22.132288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:22.132745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:22.135995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:22.274582Z node 1 :GRPC_SERVER DEBUG: [0x51f00002d880] stream accepted Name# Session ok# true peer# ipv6:[::1]:48098 2025-04-10T11:20:22.274910Z node 1 :GRPC_SERVER DEBUG: [0x51f00002d880] facade attach Name# Session actor# [1:7491642831291970210:2258] peer# ipv6:[::1]:48098 2025-04-10T11:20:22.274935Z node 1 :GRPC_SERVER DEBUG: [0x51f00002d880] facade read Name# Session peer# ipv6:[::1]:48098 2025-04-10T11:20:22.274994Z node 1 :GRPC_SERVER DEBUG: [0x51f00002d880] facade finish Name# Session peer# ipv6:[::1]:48098 grpc status# (0) message# 2025-04-10T11:20:22.278386Z node 1 :GRPC_SERVER DEBUG: [0x51f00002d880] read finished Name# Session ok# false data# peer# ipv6:[::1]:48098 2025-04-10T11:20:22.278438Z node 1 :GRPC_SERVER DEBUG: [0x51f00002d880] stream done notification Name# Session ok# true peer# ipv6:[::1]:48098 2025-04-10T11:20:22.278471Z node 1 :GRPC_SERVER DEBUG: [0x51f00002d880] stream finished Name# Session ok# true peer# ipv6:[::1]:48098 grpc status# (0) message# 2025-04-10T11:20:22.278518Z node 1 :GRPC_SERVER DEBUG: [0x51f00002d880] deregistering request Name# Session peer# ipv6:[::1]:48098 (finish done) 2025-04-10T11:20:22.278670Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] Test command err: 2025-04-10T11:20:21.699139Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642829304677423:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:20:21.699307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00158b/r3tmp/tmpJJTezi/pdisk_1.dat 2025-04-10T11:20:22.177395Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:22.184715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:22.184843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:22.187621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:22.314463Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:36004 2025-04-10T11:20:22.315294Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7491642833599645250:2258] peer# ipv6:[::1]:36004 2025-04-10T11:20:22.315322Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:36004 2025-04-10T11:20:22.315619Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:36004 grpc status# (0) message# 2025-04-10T11:20:22.316557Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:36004 2025-04-10T11:20:22.316885Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:36004 2025-04-10T11:20:22.317063Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:36004 2025-04-10T11:20:22.317161Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:36004 grpc status# (0) message# 2025-04-10T11:20:22.317221Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-04-10T11:20:22.317236Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-04-10T11:20:22.320534Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:36004 (finish done) |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |90.9%| [TA] $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TA] {RESULT} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 >> KqpMultishardIndex::DataColumnWrite-UseSink [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:19:59.230871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:19:59.231006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:59.231057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:19:59.231110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:19:59.231976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:19:59.232059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:19:59.232161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:19:59.232256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:19:59.233440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:19:59.316138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:19:59.316189Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:59.330689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:19:59.330953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:19:59.331098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:19:59.347106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:19:59.347329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:19:59.347960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.348299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:19:59.352096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.359888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:59.359964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.360851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:19:59.360919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:59.361037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:19:59.361239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.369710Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:19:59.507338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:19:59.507544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.507738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:19:59.507949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:19:59.508016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.513858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.513966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:19:59.514086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.514133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:19:59.514190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:19:59.514227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:19:59.515746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.515796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:19:59.515824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:19:59.516988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.517062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.517099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.517146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.519920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:19:59.521369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:19:59.521514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:19:59.522243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:19:59.522372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:19:59.522429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.522683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:19:59.522740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:19:59.522912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:19:59.522972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:19:59.525102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:19:59.525140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:19:59.525281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:19:59.525331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:19:59.525534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:19:59.525576Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:19:59.525655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:59.525687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.525738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:19:59.525775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.525808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:19:59.525849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:19:59.525894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:19:59.525922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:19:59.525967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:19:59.525996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:19:59.526019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:19:59.528558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:59.528642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:19:59.528677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ly one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:20:26.212614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.212698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.213123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-04-10T11:20:26.213241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-10T11:20:26.213343Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T11:20:26.213582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-04-10T11:20:26.213720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.213813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-04-10T11:20:26.213860Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-10T11:20:26.213902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:20:26.214047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-10T11:20:26.214195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.214429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-10T11:20:26.214727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.214842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.215167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.215238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.215452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.215547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.215657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.215832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.215914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.216073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.216311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.216485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.216533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.216585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T11:20:26.216802Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-10T11:20:26.227852Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:20:26.228037Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-10T11:20:26.229792Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [1:1755:3678], Recipient [1:1755:3678]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-10T11:20:26.229843Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-10T11:20:26.233092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:20:26.233167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:20:26.233533Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1755:3678], Recipient [1:1755:3678]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:26.233579Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:26.234642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:20:26.234719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:20:26.234773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:20:26.234811Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-10T11:20:26.237884Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1793:3678], Recipient [1:1755:3678]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-10T11:20:26.237934Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-10T11:20:26.237987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1755:3678] sender: [1:1813:2058] recipient: [1:15:2062] 2025-04-10T11:20:26.289795Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1812:3724], Recipient [1:1755:3678]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-10T11:20:26.289873Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-10T11:20:26.289999Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:20:26.290354Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 322us result status StatusSuccess 2025-04-10T11:20:26.291159Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 27456 RowCount: 200 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 23166 Memory: 156728 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 27456 DataSize: 27456 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TopicService::ThereAreGapsInTheOffsetRanges >> BasicStatistics::TwoTables >> HttpRequest::AnalyzeServerless >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 |91.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicStatistics::TwoNodes |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 >> KqpIndexes::UpdateOnReadColumns [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpdateOnReadColumns [GOOD] Test command err: Trying to start YDB, gRPC: 27470, MsgBus: 5524 2025-04-10T11:19:29.395136Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642604522853945:2241];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:29.398796Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e09/r3tmp/tmp3xICXF/pdisk_1.dat 2025-04-10T11:19:29.911081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:29.911227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:29.918767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:29.950903Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27470, node 1 2025-04-10T11:19:29.981723Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:19:29.981873Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:19:30.210579Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:30.210600Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:30.210607Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:30.212493Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5524 TClient is connected to server localhost:5524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:31.356637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.386062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.579608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.824836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.914997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:33.544236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642621702724713:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:33.544338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.166407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.214930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.256180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.317232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.351656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.381422Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642604522853945:2241];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:34.381465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:34.427459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.501487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642625997692529:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.501609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.502066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642625997692535:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.506944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:34.518664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642625997692537:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:34.576982Z node 1 :TX_PROXY ERROR: Actor# [1:7491642625997692590:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:35.908522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28402, MsgBus: 29097 2025-04-10T11:19:39.561116Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642645111974060:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:39.561228Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e09/r3tmp/tmpfbOy5Y/pdisk_1.dat 2025-04-10T11:19:39.756450Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:39.759939Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:39.760027Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:39.761665Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28402, node 2 2025-04-10T11:19:39.966836Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:39.966873Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:39.966883Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:39.967054Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29097 TClient is connected to server localhost:29097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:40.745877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:40.758239Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:19:40.773581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:40.899640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting ... 4976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:20:12.679425Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:20:12.704332Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491642766697518589:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:20:12.704405Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:20:12.733417Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:20:12.824382Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491642788172357363:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:12.824496Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:12.824902Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491642788172357368:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:12.829646Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:20:12.854709Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491642788172357370:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:20:12.917207Z node 5 :TX_PROXY ERROR: Actor# [5:7491642788172357425:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:20:14.179514Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:20:14.238077Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:20:14.310433Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25860, MsgBus: 21844 2025-04-10T11:20:18.794239Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491642814441822629:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:20:18.810939Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e09/r3tmp/tmpQm83I9/pdisk_1.dat 2025-04-10T11:20:18.977415Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:19.005321Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:19.005429Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:19.008094Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25860, node 6 2025-04-10T11:20:19.082914Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:19.082945Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:19.082955Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:19.083103Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21844 TClient is connected to server localhost:21844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:20:19.727192Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:20:19.743111Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:20:19.822043Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:20:20.006664Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:20:20.089956Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:20:22.999904Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491642831621693600:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:23.000024Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:23.061257Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:20:23.105753Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:20:23.154697Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:20:23.203045Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:20:23.249385Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:20:23.307811Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:20:23.369128Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491642835916661407:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:23.369229Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:23.369309Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491642835916661412:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:23.373630Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:20:23.387874Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491642835916661414:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:20:23.450510Z node 6 :TX_PROXY ERROR: Actor# [6:7491642835916661467:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:20:23.783029Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491642814441822629:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:20:23.783136Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:20:24.921759Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:20:25.016201Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:20:25.121715Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnWrite-UseSink [FAIL] Test command err: Trying to start YDB, gRPC: 15218, MsgBus: 61764 2025-04-10T11:19:34.715965Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642624872750851:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:34.716035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001df9/r3tmp/tmpGhlzEk/pdisk_1.dat 2025-04-10T11:19:35.286758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:35.286937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:35.290289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:35.304987Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15218, node 1 2025-04-10T11:19:35.490749Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:35.490789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:35.490798Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:35.490910Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61764 TClient is connected to server localhost:61764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:36.152073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:36.173719Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:36.184330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:36.332989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:36.540732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:36.633231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:38.487263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642642052621599:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:38.487383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:38.820237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:38.858290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:38.893949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:38.935375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:38.965988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:39.037574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:39.088736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642646347589407:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:39.088809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:39.089055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642646347589412:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:39.092508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:39.103687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642646347589414:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:39.203476Z node 1 :TX_PROXY ERROR: Actor# [1:7491642646347589469:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:39.722288Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642624872750851:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:39.722381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:40.186293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:48.200456Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642685002299273:2846], TxId: 281474976710719, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MmVkMThkMDktNWFiYTU4YjEtZTEzZmU5Y2EtOGJhODgxMWI=. TraceId : 01jrfptvvy5t50ew2f0sjc2g0y. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-10T11:19:48.200885Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642685002299274:2847], TxId: 281474976710719, task: 2. Ctx: { TraceId : 01jrfptvvy5t50ew2f0sjc2g0y. SessionId : ydb://session/3?node_id=1&id=MmVkMThkMDktNWFiYTU4YjEtZTEzZmU5Y2EtOGJhODgxMWI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491642685002299270:2497], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:19:48.201260Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmVkMThkMDktNWFiYTU4YjEtZTEzZmU5Y2EtOGJhODgxMWI=, ActorId: [1:7491642650642557057:2497], ActorState: ExecuteState, TraceId: 01jrfptvvy5t50ew2f0sjc2g0y, Create QueryResponse for error on request, msg: 2025-04-10T11:19:50.294756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:19:50.294795Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:50.654963Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710734 at tablet 72075186224037933 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037933) | 2025-04-10T11:19:50.659022Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710734 at tablet 72075186224037934 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037934) | 2025-04-10T11:19:50.671161Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710734 at tablet 72075186224037931 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037931) | 2025-04-10T11:19:50.672221Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710734 at tablet 72075186224037932 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037932) | 2025-04-10T11:19:50.744119Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2025-04-10T11:19:50.744252Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-04-10T11:19:50.744274Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-04-10T11:19:50.760272Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2025-04-10T11:19:50.760387Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2025-04-10T11:19:50.760405Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-04-10T11:19:50.760425Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not fou ... waiting... 2025-04-10T11:19:54.596098Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:19:54.604516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:54.686341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:54.897425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:54.991339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:57.341495Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642724210473282:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:57.341602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:57.390120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:57.432527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:57.471685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:57.528749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:57.569766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:57.640490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:57.728814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642724210473801:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:57.728913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:57.729171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642724210473806:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:57.733586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:57.744382Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642724210473808:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:19:57.841346Z node 2 :TX_PROXY ERROR: Actor# [2:7491642724210473864:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:58.782362Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642707030602389:2126];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:58.782436Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:59.037622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:20:07.166489Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642767160150989:2839], TxId: 281474976715731, task: 1. Ctx: { TraceId : 01jrfpved15bz13d8kcnsxmass. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODg0M2NkNy1lOWUzZGY5ZC05MWQyZTY0ZC1hMmVmZWM2. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-10T11:20:07.166721Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491642767160150991:2840], TxId: 281474976715731, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODg0M2NkNy1lOWUzZGY5ZC05MWQyZTY0ZC1hMmVmZWM2. TraceId : 01jrfpved15bz13d8kcnsxmass. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7491642767160150986:2489], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:20:07.167072Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODg0M2NkNy1lOWUzZGY5ZC05MWQyZTY0ZC1hMmVmZWM2, ActorId: [2:7491642732800408717:2489], ActorState: ExecuteState, TraceId: 01jrfpved15bz13d8kcnsxmass, Create QueryResponse for error on request, msg: 2025-04-10T11:20:08.967390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:20:08.967420Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:09.521861Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-04-10T11:20:09.526742Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-04-10T11:20:09.543611Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491642775750087034:2489] TxId: 281474976715750. Ctx: { TraceId: 01jrfpvg944b8m9x5sck3w8k98, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODg0M2NkNy1lOWUzZGY5ZC05MWQyZTY0ZC1hMmVmZWM2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ERROR: [WRONG_SHARD_STATE] Rejecting data TxId 281474976715750 because datashard 72075186224037922: is in a pre/offline state assuming this is due to a finished split (wrong shard state); 2025-04-10T11:20:09.543875Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODg0M2NkNy1lOWUzZGY5ZC05MWQyZTY0ZC1hMmVmZWM2, ActorId: [2:7491642732800408717:2489], ActorState: ExecuteState, TraceId: 01jrfpvg944b8m9x5sck3w8k98, Create QueryResponse for error on request, msg: 2025-04-10T11:20:09.561648Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-10T11:20:09.562695Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found 2025-04-10T11:20:09.562761Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037920 not found 2025-04-10T11:20:09.562779Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-04-10T11:20:09.562795Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-10T11:20:09.562809Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-10T11:20:09.562837Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-04-10T11:20:19.514464Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2025-04-10T11:20:19.521611Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037922 not found 2025-04-10T11:20:19.521648Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037919 not found 2025-04-10T11:20:19.528176Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037930 not found 2025-04-10T11:20:24.547957Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-10T11:20:24.591968Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037923 not found 2025-04-10T11:20:24.592016Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037940 not found assertion failed at ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:2079, virtual void NKikimr::NKqp::NTestSuiteKqpMultishardIndex::TTestCaseDataColumnWrite::Execute_(NUnitTest::TTestContext &) [UseSink = false]: (result.IsSuccess()) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x198E1F3B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x19DA749F 2. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:2079: Execute_ @ 0x19529914 3. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354: operator() @ 0x194E0327 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354:1) &> @ 0x194E0327 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354:1) &> @ 0x194E0327 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x194E0327 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x194E0327 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19DDE4C5 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19DDE4C5 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19DDE4C5 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19DAE018 12. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354: Execute @ 0x194DF4F3 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19DAF8E5 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19DD8A3C 15. ??:0: ?? @ 0x7F0166E8AD8F 16. ??:0: ?? @ 0x7F0166E8AE3F 17. ??:0: ?? @ 0x16613028 >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay >> TPersQueueTest::SetupWriteSession [GOOD] >> TPersQueueTest::StoreNoMoreThanXSourceIDs |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 >> BasicStatistics::Serverless >> ColumnStatistics::CountMinSketchServerlessStatistics >> BasicStatistics::Simple >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TPersQueueTest::CheckACLForGrpcRead [GOOD] >> TPersQueueTest::CheckKillBalancer >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 >> TFstClassSrcIdPQTest::NoMapping [GOOD] >> TFstClassSrcIdPQTest::ProperPartitionSelected |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TPersQueueTest::ReadRuleServiceTypeLimit [GOOD] >> TPersQueueTest::ReadRuleDisallowDefaultServiceType |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoDatabases |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-04-10 11:20:15,648 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:20:16,223 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1354915 722M 694M 642M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/2gt4/001869/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mod 1361010 2.7G 2.6G 2.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2gt4/001869/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_ 1368235 459M 436M 425M └─ moto_server s3 --port 22581 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 71, in wait_for if condition_func(): File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 172, in lambda: get_rows_in_portion(bucket1_path) == self.row_count File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 159, in get_rows_in_portion portions_stat = table.get_portion_stat_by_tier() File "ydb/tests/olap/common/column_table_helper.py", line 26, in get_portion_stat_by_tier results = self.ydb_client.query( File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...apture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2gt4/001869/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2gt4/001869', '--source-root', '/home/runner/.ya/build/build_root/2gt4/001869/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2gt4/001869/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...apture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2gt4/001869/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2gt4/001869', '--source-root', '/home/runner/.ya/build/build_root/2gt4/001869/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2gt4/001869/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout",), {}) >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 >> TopicService::ThereAreGapsInTheOffsetRanges [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn >> TColumnShardTestSchema::ExportAfterFail >> TColumnShardTestSchema::CreateTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TopicService::OnePartitionAndNoGapsInTheOffsets >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] Leader for TabletID 72057594046678944 is [1:236:2154] sender: [1:237:2060] recipient: [1:218:2142] 2025-04-10T11:18:43.413088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:18:43.413356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:43.413406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:18:43.413468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:18:43.414237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:18:43.414291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:18:43.414418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:18:43.414503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:18:43.416042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:18:43.524000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:18:43.524080Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:43.533065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:18:43.533798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:18:43.534003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:18:43.546378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:18:43.546666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:18:43.550508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:43.551779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:18:43.557803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:43.565027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:43.565126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:43.566012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:18:43.566083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:43.566141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:18:43.566375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:18:43.579650Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:236:2154] sender: [1:349:2060] recipient: [1:17:2064] 2025-04-10T11:18:43.724865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:18:43.726630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:43.728813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:18:43.730181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:18:43.730328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:43.735308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:43.735463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:18:43.735682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:43.735804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:18:43.735844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:18:43.735875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:18:43.738438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:43.738519Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:18:43.738563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:18:43.740903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:43.740968Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:43.741035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:43.741113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:18:43.754930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:18:43.757795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:18:43.758905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:18:43.760170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:18:43.760342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 240 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:18:43.760413Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:43.761839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:18:43.761915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:18:43.762181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:18:43.762303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:18:43.765738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:18:43.765796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:18:43.766002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:18:43.766045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:316:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:18:43.766169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:18:43.766222Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:18:43.766367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:43.766405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:43.766447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:18:43.766489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:43.766534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:18:43.766608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:18:43.766714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:18:43.766768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:18:43.766863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:18:43.766911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:18:43.766963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:18:43.769843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:18:43.770002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:18:43.770052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:37.177779Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:37.177873Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:37.177902Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:37.536191Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:37.536281Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:37.536379Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:37.536433Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:37.887499Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:37.887595Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:37.887688Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:37.887719Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:38.253570Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:38.253667Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:38.253752Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:38.253784Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:38.651965Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:38.652067Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:38.652164Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:38.652201Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:39.018946Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:39.019044Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:39.019133Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:39.019163Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:39.374169Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:39.374273Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:39.374383Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:39.374415Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:39.728421Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:39.728510Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:39.728593Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:39.728625Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:40.089372Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:40.089452Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:40.089528Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:40.089557Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:40.485154Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:40.485241Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:20:40.485382Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:40.485420Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:20:40.528289Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1076:2833], Recipient [7:235:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-10T11:20:40.528376Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-10T11:20:40.528546Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:20:40.528783Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable" took 238us result status StatusPathDoesNotExist 2025-04-10T11:20:40.528944Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:20:40.529487Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1077:2834], Recipient [7:235:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-10T11:20:40.529543Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-10T11:20:40.529640Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:20:40.529823Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp" took 184us result status StatusPathDoesNotExist 2025-04-10T11:20:40.529969Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:20:40.530456Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1078:2835], Recipient [7:235:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true } 2025-04-10T11:20:40.530512Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-10T11:20:40.530610Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:20:40.530797Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable/ValueIndex" took 189us result status StatusPathDoesNotExist 2025-04-10T11:20:40.530936Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |91.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |91.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} >> HttpRequest::Status [GOOD] >> TColumnShardTestSchema::CreateTable [GOOD] >> HttpRequest::Analyze [GOOD] >> TColumnShardTestSchema::ForgetAfterFail >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable [GOOD] Test command err: 2025-04-10T11:20:40.509269Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:20:40.654101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:20:40.684731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:20:40.685010Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:20:40.694179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:40.694445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:40.694781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:40.694892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:40.695022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:40.695134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:40.695238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:40.695351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:40.695468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:20:40.695607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:20:40.695714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:20:40.695828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:20:40.722199Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:20:40.722378Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:20:40.722443Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:20:40.722725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:40.724648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:20:40.724794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:20:40.724847Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:20:40.724951Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:20:40.725054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:20:40.725132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:20:40.725173Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:20:40.725354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:40.725440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:20:40.725480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:20:40.725517Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:20:40.725627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:20:40.725693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:20:40.725738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:20:40.725768Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:20:40.725877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:20:40.725941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:20:40.725986Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:20:40.726064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:20:40.726104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:20:40.726133Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:20:40.726619Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-04-10T11:20:40.726714Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-10T11:20:40.727440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=677; 2025-04-10T11:20:40.727577Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=60; 2025-04-10T11:20:40.727785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:20:40.727852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:20:40.727915Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:20:40.728192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:20:40.728266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:20:40.728317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:20:40.728507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:20:40.728570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:20:40.728622Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:20:40.728889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:20:40.728945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:20:40.728987Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:20:40.803674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:20:40.803784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:20:40.803880Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ame: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-04-10T11:20:42.374784Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=118;this=88923004875264;method=TTxController::StartProposeOnExecute;tx_info=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;fline=schema.h:36;event=sync_schema; 2025-04-10T11:20:42.388286Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;this=88923004875264;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_this=89197881216192;fline=columnshard__propose_transaction.cpp:103;event=actual tx operator; 2025-04-10T11:20:42.388394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;this=88923004875264;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_this=89197881216192;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-04-10T11:20:42.419819Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;this=88923004875264;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_this=89197881216192;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=118; 2025-04-10T11:20:42.420416Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-04-10T11:20:42.420596Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1018 at tablet 9437184, mediator 0 2025-04-10T11:20:42.420817Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] execute at tablet 9437184 2025-04-10T11:20:42.421192Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 19 ttl settings: { Version: 1 } at tablet 9437184 2025-04-10T11:20:42.421265Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=tables_manager.cpp:245;method=RegisterTable;path_id=19; 2025-04-10T11:20:42.421338Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=column_engine.h:144;event=RegisterTable;path_id=19; 2025-04-10T11:20:42.421766Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=19; 2025-04-10T11:20:42.421905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=tx_controller.cpp:212;event=finished_tx;tx_id=118; 2025-04-10T11:20:42.434447Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] complete at tablet 9437184 2025-04-10T11:20:42.434597Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 20 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-04-10T11:20:42.436212Z node 1 :TX_COLUMNSHARD_TX ERROR: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=119;this=88923004878400;method=TTxController::StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;fline=tx_controller.cpp:348;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-04-10T11:20:42.450866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;this=88923004878400;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-04-10T11:20:42.450954Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;this=88923004878400;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=119; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-04-10T11:20:42.452550Z node 1 :TX_COLUMNSHARD_TX ERROR: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=88923004879968;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;fline=tx_controller.cpp:348;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-04-10T11:20:42.465377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;this=88923004879968;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-04-10T11:20:42.465462Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;this=88923004879968;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-04-10T11:20:42.467058Z node 1 :TX_COLUMNSHARD_TX ERROR: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=88923004881536;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;fline=tx_controller.cpp:348;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-04-10T11:20:42.535285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;this=88923004881536;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-04-10T11:20:42.535375Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;this=88923004881536;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex [GOOD] >> TColumnShardTestSchema::RebootForgetWithLostAnswer >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status [GOOD] Test command err: 2025-04-10T11:20:27.772302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:27.772759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:27.772869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d73/r3tmp/tmpfLirfh/pdisk_1.dat 2025-04-10T11:20:28.231150Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4851, node 1 2025-04-10T11:20:28.512104Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:28.512162Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:28.512201Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:28.512684Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:28.515532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:28.607762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:28.607921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:28.623655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8867 2025-04-10T11:20:29.186577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:32.426688Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:32.470054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:32.470261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:32.499298Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:32.501911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:32.776561Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.777066Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.777767Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.777918Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.778022Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.778619Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.778717Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.778801Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.778883Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.959647Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:32.959768Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:32.973255Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:33.132910Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:33.201193Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:33.201306Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:33.241353Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:33.242452Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:33.242648Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:33.242706Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:33.242761Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:33.242807Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:33.242876Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:33.242926Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:33.243380Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:33.268852Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:33.269001Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:33.278406Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:20:33.283751Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:20:33.284044Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:20:33.294044Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:20:33.312652Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:33.312714Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:33.312782Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:20:33.329481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:33.383316Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:33.383465Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:33.547423Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:33.723838Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:33.805841Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:34.987173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:34.987312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:35.017162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:20:35.376575Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:35.376846Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:35.377197Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:35.377400Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:35.377546Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:35.377692Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:35.377830Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:35.377975Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:35.378170Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:20:35.378343Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:20:35.378566Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:20:35.378708Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:20:35.414854Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:35.414972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=TT ... storeV1Chunks_V2;id=15; 2025-04-10T11:20:35.932862Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:20:35.932956Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:20:35.932985Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:20:36.489870Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:36.494620Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:36.499227Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:36.506596Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:36.511892Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:36.516415Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:36.521397Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:36.525981Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:36.533446Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:36.537308Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:37.839780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3189:3179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:37.839974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:37.862975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037897 2025-04-10T11:20:38.487690Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:38.488224Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:38.489810Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:38.490374Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:38.490831Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:38.491260Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:38.492580Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:38.493022Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:38.493489Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:38.495108Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:39.351272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3817:3236], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:39.351476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:39.368391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-04-10T11:20:39.425182Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.425783Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.428971Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.429476Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.429925Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.430432Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.430929Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.432411Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.432877Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.433359Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000013s 2025-04-10T11:20:41.405647Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:4090:4236] 2025-04-10T11:20:41.408509Z node 2 :STATISTICS DEBUG: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION Answer: 'No analyze operation' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze [GOOD] Test command err: 2025-04-10T11:20:28.081465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:28.081870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:28.081947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d85/r3tmp/tmpv3bGzQ/pdisk_1.dat 2025-04-10T11:20:28.528558Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20432, node 1 2025-04-10T11:20:28.882227Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:28.882288Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:28.882320Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:28.882810Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:28.890852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:28.981630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:28.981788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:28.995866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19680 2025-04-10T11:20:29.613817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:33.004153Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:33.052459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:33.052582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:33.081341Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:33.084643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:33.358440Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.358999Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.359679Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.359839Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.359944Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.360181Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.360273Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.360356Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.360434Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.538312Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:33.538442Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:33.556515Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:33.718690Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:33.771660Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:33.771766Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:33.815025Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:33.816205Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:33.816408Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:33.816473Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:33.816533Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:33.816600Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:33.816659Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:33.816714Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:33.817169Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:33.845710Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:33.845857Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:33.858675Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:20:33.865872Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:20:33.866414Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:20:33.877487Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:20:33.895513Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:33.895581Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:33.895659Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:20:33.913945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:33.960164Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:33.960336Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:34.132219Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:34.287391Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:34.391117Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:35.577647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:35.577825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:35.606892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:20:35.983574Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:35.983846Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:35.984192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:35.984353Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:35.984492Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:35.984613Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:35.984725Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:35.984841Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:35.984974Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:20:35.985129Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:20:35.985276Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:20:35.985417Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2392:2887];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:20:36.019226Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:36.019345Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2891];tablet_id=72075186224037900;process= ... tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:20:37.036395Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:37.042107Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:37.047438Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:37.056964Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:37.063771Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:37.069439Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:37.075657Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:37.081266Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:37.087880Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:37.093946Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715659; 2025-04-10T11:20:38.398251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3189:3179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:38.398450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:38.421587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037897 2025-04-10T11:20:39.046223Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:39.046759Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:39.047117Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:39.047520Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:39.047927Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:39.048779Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:39.050221Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:39.051135Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:39.051531Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:39.052248Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715660; 2025-04-10T11:20:39.907876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3817:3236], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:39.908053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:39.925390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-04-10T11:20:39.984119Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.984686Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.985146Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.985625Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.986054Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.987971Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.989816Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.990404Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.990914Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:39.992110Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000020s 2025-04-10T11:20:41.973518Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:4088:4236] 2025-04-10T11:20:41.976457Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4085:3322] , Record { OperationId: "\000\000\000\000\026dj\223e\323\267\353\325\364w\001" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } } 2025-04-10T11:20:41.976525Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId=djeӷw 2025-04-10T11:20:41.976570Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId=djeӷw , PathId [OwnerId: 72075186224037897, LocalPathId: 4] Answer: 'Analyze sent. OperationId: 00000005k4da9pbmxqxfaz8xr1' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex [GOOD] Test command err: Trying to start YDB, gRPC: 9501, MsgBus: 18682 2025-04-10T11:19:29.353065Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642602332818168:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:29.365099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e12/r3tmp/tmpBdt0uC/pdisk_1.dat 2025-04-10T11:19:29.927111Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:29.931199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:29.931299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:29.979082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9501, node 1 2025-04-10T11:19:30.211085Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:30.211103Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:30.211110Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:30.211219Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18682 TClient is connected to server localhost:18682 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:31.244493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.297400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.584650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:31.818132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:19:31.960739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:19:33.537464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642619512689142:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:33.537582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.166705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.241938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.287204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.330753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.350928Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642602332818168:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:34.351086Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:34.364516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.427841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:19:34.499192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642623807656954:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.499286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.499622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642623807656959:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:34.503370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:19:34.517989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642623807656961:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:19:34.613711Z node 1 :TX_PROXY ERROR: Actor# [1:7491642623807657016:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:35.800571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:37.537593Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642636692560960:2742], TxId: 281474976710695, task: 2. Ctx: { TraceId : 01jrfpthr49863nt6wvjbfats4. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTZjY2MwMzAtNzAzZGRjNjAtZGExZWEwMzktYWY4ZDJlOTU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InputTransform[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037937 node# 1 state# Ready) } } 2025-04-10T11:19:37.540718Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642636692560960:2742], TxId: 281474976710695, task: 2. Ctx: { TraceId : 01jrfpthr49863nt6wvjbfats4. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTZjY2MwMzAtNzAzZGRjNjAtZGExZWEwMzktYWY4ZDJlOTU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037937 node# 1 state# Ready) } }. 2025-04-10T11:19:37.541533Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642636692560961:2743], TxId: 281474976710695, task: 3. Ctx: { TraceId : 01jrfpthr49863nt6wvjbfats4. SessionId : ydb://session/3?node_id=1&id=OTZjY2MwMzAtNzAzZGRjNjAtZGExZWEwMzktYWY4ZDJlOTU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491642636692560925:2561], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T11:19:37.541988Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTZjY2MwMzAtNzAzZGRjNjAtZGExZWEwMzktYWY4ZDJlOTU=, ActorId: [1:7491642632397592684:2561], ActorState: ExecuteState, TraceId: 01jrfpthr49863nt6wvjbfats4, Create QueryResponse for error on request, msg: 2025-04-10T11:19:44.915995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:19:44.920158Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:46.228092Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 281474976711107 because datashard 72075186224037919: is in process of split opId 281474976715657 state SplitSrcSendingSnapshot (wrong shard state);tx_id=281474976711107; 2025-04-10T11:19:46.231257Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 281474976711107 because datashard 72075186224037927: is in process of split opId 281474976715659 state SplitSrcMakeSnapshot (wrong shard state);tx_id=281474976711107; 2025-04-10T11:19:46.240069Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7491642675347276213:2561], Table: `/Root/MultiShardIndexed/index_new/indexImplTable` ([72057594046644480:20:2]), SessionActorId: [1:7491642632397592684:2561]Got OVERLOADED for table `/Root/MultiShardIndexed/index_new/indexImplTable`. ShardID=72075186224037919, Sink=[1:7491642675347276213:2561]. Ignored this error.{
: Error: Rejecting data TxId 281474976711107 because datashard 72075186224037919: is in process of split opId 281474976715657 state SplitSrcSendingSnapshot (wrong shard state), code: 2006 } 2025-04-10T11:19:46.240176Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491642675347276100:2561], SessionActorId: [1:7491642632397592684:2561], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037919 is overloaded. Table `/Root/MultiShardIndexed/index_new/indexImplTable`., code: 2006
: Error: Rejecting data TxId 281474976711107 because datashard 72075186224037919: is in pr ... zAzZGRjNjAtZGExZWEwMzktYWY4ZDJlOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: {
: Error: Wrong shard state. Table `/Root/MultiShardIndexed/index_new/indexImplTable`., code: 2005 subissue: {
: Error: Rejecting data TxId 281474976712637 because datashard 72075186224037938: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } 2025-04-10T11:20:16.354670Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTZjY2MwMzAtNzAzZGRjNjAtZGExZWEwMzktYWY4ZDJlOTU=, ActorId: [1:7491642632397592684:2561], ActorState: ExecuteState, TraceId: 01jrfpvqn74twx9htg4vxn5b48, Create QueryResponse for error on request, msg: 2025-04-10T11:20:16.575768Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found Trying to start YDB, gRPC: 19797, MsgBus: 22102 2025-04-10T11:20:18.568845Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642816218043091:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:20:18.570549Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e12/r3tmp/tmpS9g0f6/pdisk_1.dat 2025-04-10T11:20:18.743931Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:18.764643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:18.764742Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:18.766457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19797, node 2 2025-04-10T11:20:18.886796Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:18.886822Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:18.886830Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:18.886995Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22102 TClient is connected to server localhost:22102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:20:19.299034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:20:19.309477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:20:19.383221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:20:19.561073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:20:19.649914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:20:21.717052Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642829102946740:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:21.717203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:21.765439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:20:21.792812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:20:21.823205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:20:21.866391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:20:21.902365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:20:21.976456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:20:22.065981Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642833397914554:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:22.066052Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:22.066412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642833397914559:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:22.071563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:20:22.091297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642833397914561:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:20:22.164588Z node 2 :TX_PROXY ERROR: Actor# [2:7491642833397914615:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:20:23.163129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:20:23.567462Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642816218043091:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:20:23.567610Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:20:33.546733Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037937:1][72075186224037919][2:7491642837692883664:2629] Apply status: status# 2, reason# 7 2025-04-10T11:20:33.573903Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037920 not found 2025-04-10T11:20:33.595803Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-04-10T11:20:33.597176Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-10T11:20:33.608842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvSplitPartitioningChangedAck for unknown txId 281474976710661 datashard 72075186224037923 2025-04-10T11:20:33.617196Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037922 not found 2025-04-10T11:20:33.617593Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037923 not found 2025-04-10T11:20:33.617621Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-10T11:20:33.617640Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-10T11:20:33.617871Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found 2025-04-10T11:20:33.627621Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-04-10T11:20:33.627669Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2025-04-10T11:20:33.627688Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-04-10T11:20:33.721563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:20:33.721589Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:38.516450Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037930 not found 2025-04-10T11:20:38.517896Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2025-04-10T11:20:40.072738Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037936:1][72075186224037919][2:7491642910707342626:2628] Handshake status: status# 2, reason# 7 2025-04-10T11:20:41.569780Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037935:1][72075186224037919][2:7491642915002311813:2630] Handshake status: status# 2, reason# 7 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 >> TColumnShardTestSchema::RebootHotTiersAfterTtl |91.2%| [TA] $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdCompactionSmoke |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> HttpRequest::AnalyzeServerless [GOOD] >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 >> TColumnShardTestSchema::RebootOneColdTier >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] Test command err: 2025-04-10T11:20:31.159998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:31.160344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:31.160410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d36/r3tmp/tmp7qFmOg/pdisk_1.dat 2025-04-10T11:20:31.542623Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23047, node 1 2025-04-10T11:20:31.799441Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:31.799503Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:31.799542Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:31.799981Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:31.802777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:31.905564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:31.905702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:31.920560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32586 2025-04-10T11:20:32.463062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:35.787273Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:35.844162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:35.844342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:35.875977Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:35.879213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:36.148143Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.148683Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.149245Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.149432Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.149561Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.149825Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.149919Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.150019Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.150263Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.341654Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:36.341790Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:36.359808Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:36.528445Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:36.589174Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:36.589281Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:36.634264Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:36.635392Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:36.635614Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:36.635679Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:36.635781Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:36.635843Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:36.635891Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:36.635938Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:36.636387Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:36.659946Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:36.660075Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:36.668029Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:20:36.672478Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:20:36.672783Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:20:36.682527Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-10T11:20:36.740025Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:36.740092Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:36.740155Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-10T11:20:36.758355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:36.767719Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:36.767872Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:36.940980Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:37.093627Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:37.201888Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:38.001845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:20:38.795217Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:39.040274Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-10T11:20:39.040341Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:20:39.040439Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2599:2948], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:20:39.043165Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2609:2953] 2025-04-10T11:20:39.043593Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2609:2953], schemeshard id = 72075186224037899 2025-04-10T11:20:40.289944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2730:3252], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:40.290122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:40.309662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-10T11:20:40.632438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2878:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:40.632687Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2878:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:40.632938Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2878:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:40.633046Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2878:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:40.633139Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2878:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:40.633210Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2878:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:40.633311Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2878:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:40.633413Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2878:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:40.633485Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2878:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11 ... RN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:20:41.424314Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:41.431547Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:41.437417Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:41.442465Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:41.447722Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:41.453974Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:41.458946Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:41.465099Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:41.470047Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:41.475282Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; 2025-04-10T11:20:42.714552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3548:3349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:42.714713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:42.739560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715662:0, at schemeshard: 72075186224037899 2025-04-10T11:20:43.437174Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715662; 2025-04-10T11:20:43.437805Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715662; 2025-04-10T11:20:43.438904Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715662; 2025-04-10T11:20:43.439414Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715662; 2025-04-10T11:20:43.440355Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715662; 2025-04-10T11:20:43.440894Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715662; 2025-04-10T11:20:43.441434Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715662; 2025-04-10T11:20:43.442060Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715662; 2025-04-10T11:20:43.442423Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715662; 2025-04-10T11:20:43.442732Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715662; 2025-04-10T11:20:44.756062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4341:3423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:44.756672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:44.761649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-04-10T11:20:44.855649Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715663; 2025-04-10T11:20:44.856189Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715663; 2025-04-10T11:20:44.856930Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715663; 2025-04-10T11:20:44.858022Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715663; 2025-04-10T11:20:44.858730Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715663; 2025-04-10T11:20:44.859179Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715663; 2025-04-10T11:20:44.859581Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715663; 2025-04-10T11:20:44.861185Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715663; 2025-04-10T11:20:44.861595Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715663; 2025-04-10T11:20:44.861962Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715663; waiting actualization: 0/0.000020s 2025-04-10T11:20:46.871814Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:4606:4447] 2025-04-10T11:20:46.875717Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4602:3505] , Record { OperationId: "\000\000\000\000\034?u\310\322\373\325\244S7]\254" Tables { PathId { OwnerId: 72057594046644480 LocalId: 2 } } } 2025-04-10T11:20:46.875800Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId= ?uդS7] 2025-04-10T11:20:46.875853Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId= ?uդS7] , PathId [OwnerId: 72057594046644480, LocalPathId: 2] Answer: 'Analyze sent. OperationId: 000000071zeq4d5yynmh9keqdc' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TFstClassSrcIdPQTest::ProperPartitionSelected [GOOD] >> TPQCompatTest::DiscoverTopics >> TPersQueueTest::ReadRuleDisallowDefaultServiceType [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigration >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay >> TColumnShardTestSchema::Drop >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestReorderedExecutor >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed [GOOD] >> TPersQueueTest::TestWriteStat >> TopicService::OnePartitionAndNoGapsInTheOffsets [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> TPersQueueTest::CheckKillBalancer [GOOD] >> TPersQueueTest::CheckDeleteTopic >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 >> TColumnShardTestSchema::Drop [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop [GOOD] Test command err: 2025-04-10T11:20:51.695652Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:20:51.787546Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:20:51.810636Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:20:51.810926Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:20:51.818681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:51.818898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:51.819125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:51.819253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:51.819379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:51.819487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:51.819589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:51.819712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:51.819826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:20:51.819970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:20:51.820073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:20:51.820167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:20:51.849179Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:20:51.849558Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:20:51.849708Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:20:51.849882Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:51.850086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:20:51.850193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:20:51.850236Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:20:51.850339Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:20:51.850415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:20:51.850469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:20:51.850501Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:20:51.850677Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:51.850737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:20:51.850784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:20:51.850811Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:20:51.850898Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:20:51.850956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:20:51.851009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:20:51.851037Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:20:51.851106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:20:51.851143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:20:51.851184Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:20:51.851239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:20:51.851274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:20:51.851302Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:20:51.851710Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-04-10T11:20:51.851822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-10T11:20:51.851907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-10T11:20:51.852004Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-04-10T11:20:51.852159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:20:51.852211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:20:51.852242Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:20:51.852453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:20:51.852522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:20:51.852559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:20:51.852689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:20:51.852731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:20:51.852780Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:20:51.853015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:20:51.853056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:20:51.853087Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:20:51.853215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:20:51.853254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:20:51.853320Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ne=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:122:2872:0]; 2025-04-10T11:21:02.275815Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:123:2872:0]; 2025-04-10T11:21:02.275867Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:124:2872:0]; 2025-04-10T11:21:02.275918Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:125:2872:0]; 2025-04-10T11:21:02.275972Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:126:2872:0]; 2025-04-10T11:21:02.276028Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:127:2872:0]; 2025-04-10T11:21:02.276084Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:128:2872:0]; 2025-04-10T11:21:02.276132Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:129:2872:0]; 2025-04-10T11:21:02.276182Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:130:2872:0]; 2025-04-10T11:21:02.276234Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:131:2864:0]; 2025-04-10T11:21:02.276288Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:132:2872:0]; 2025-04-10T11:21:02.276336Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:133:2872:0]; 2025-04-10T11:21:02.276386Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:134:2864:0]; 2025-04-10T11:21:02.276441Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:135:2864:0]; 2025-04-10T11:21:02.276495Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:136:2864:0]; 2025-04-10T11:21:02.276545Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:137:2856:0]; 2025-04-10T11:21:02.276599Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:138:2864:0]; 2025-04-10T11:21:02.276655Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:139:2864:0]; 2025-04-10T11:21:02.276704Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:140:2856:0]; 2025-04-10T11:21:02.276760Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:141:2856:0]; 2025-04-10T11:21:02.276815Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:142:2800:0]; 2025-04-10T11:21:02.276864Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:143:2752:0]; 2025-04-10T11:21:02.276916Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:144:2792:0]; 2025-04-10T11:21:02.276970Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:145:2792:0]; 2025-04-10T11:21:02.277024Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:146:2792:0]; 2025-04-10T11:21:02.277077Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:147:2784:0]; 2025-04-10T11:21:02.277127Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:148:2784:0]; 2025-04-10T11:21:02.277180Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:149:2784:0]; 2025-04-10T11:21:02.277229Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:150:2784:0]; 2025-04-10T11:21:02.277283Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:151:2784:0]; 2025-04-10T11:21:02.277351Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:152:2776:0]; 2025-04-10T11:21:02.277404Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:153:2768:0]; 2025-04-10T11:21:02.277458Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:154:9448:0]; 2025-04-10T11:21:02.282653Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=9;column_names=saved_at;);; 2025-04-10T11:21:02.282993Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; 2025-04-10T11:21:02.407307Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-10T11:21:02.408110Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[22] (CS::GENERAL) apply at tablet 9437184 2025-04-10T11:21:02.441223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=e1ebac62-15fd11f0-a2ca65c5-659b516b;fline=with_appended.cpp:24;event=skip_inserted_data;reason=table_removed;path_id=1; 2025-04-10T11:21:02.444523Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 464 2025-04-10T11:21:02.449468Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=10308;raw_bytes=8378;count=1;records=100} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=5601076;raw_bytes=7864534;count=3;records=80000} inactive {blob_bytes=5605344;raw_bytes=7864506;count=2;records=80000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:21:02.464613Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:656:2673];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:21:02.464862Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:656:2673];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:21:02.465027Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:656:2673];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:21:02.465083Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:656:2673] finished for tablet 9437184 2025-04-10T11:21:02.465677Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:656:2673];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:646:2663];stats={"p":[{"events":["f_bootstrap"],"t":0.012},{"events":["l_bootstrap","f_ProduceResults"],"t":0.014},{"events":["f_ack","f_processing"],"t":0.195},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.196}],"full":{"a":1744284062268549,"name":"_full_task","f":1744284062268549,"d_finished":0,"c":0,"l":1744284062465162,"d":196613},"events":[{"name":"bootstrap","f":1744284062281140,"d_finished":2038,"c":1,"l":1744284062283178,"d":2038},{"a":1744284062464547,"name":"ack","f":1744284062464547,"d_finished":0,"c":0,"l":1744284062465162,"d":615},{"a":1744284062464496,"name":"processing","f":1744284062464496,"d_finished":0,"c":0,"l":1744284062465162,"d":666},{"name":"ProduceResults","f":1744284062283157,"d_finished":393,"c":2,"l":1744284062465062,"d":393},{"a":1744284062465066,"name":"Finish","f":1744284062465066,"d_finished":0,"c":0,"l":1744284062465162,"d":96}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:21:02.465777Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:656:2673];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:646:2663];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:21:02.466296Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:656:2673];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:646:2663];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.012},{"events":["l_bootstrap","f_ProduceResults"],"t":0.014},{"events":["f_ack","f_processing"],"t":0.195},{"events":["l_ProduceResults","f_Finish"],"t":0.196},{"events":["l_ack","l_processing","l_Finish"],"t":0.197}],"full":{"a":1744284062268549,"name":"_full_task","f":1744284062268549,"d_finished":0,"c":0,"l":1744284062465838,"d":197289},"events":[{"name":"bootstrap","f":1744284062281140,"d_finished":2038,"c":1,"l":1744284062283178,"d":2038},{"a":1744284062464547,"name":"ack","f":1744284062464547,"d_finished":0,"c":0,"l":1744284062465838,"d":1291},{"a":1744284062464496,"name":"processing","f":1744284062464496,"d_finished":0,"c":0,"l":1744284062465838,"d":1342},{"name":"ProduceResults","f":1744284062283157,"d_finished":393,"c":2,"l":1744284062465062,"d":393},{"a":1744284062465066,"name":"Finish","f":1744284062465066,"d_finished":0,"c":0,"l":1744284062465838,"d":772}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:21:02.466398Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:656:2673];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:21:02.267017Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:21:02.466456Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:656:2673];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:21:02.466594Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:656:2673];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 >> TColumnShardTestSchema::HotTiersWithStat >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 >> TPQCompatTest::DiscoverTopics [GOOD] >> TPQCompatTest::SetupLockSession >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 >> TTopicYqlTest::DropTopicYql >> TPersQueueTest::ReadRuleServiceTypeMigration [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 >> TPersQueueTest::StoreNoMoreThanXSourceIDs [GOOD] >> TPersQueueTest::SetupWriteSessionOnDisabledCluster >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-04-10T11:19:14.140304Z :WriteRAW INFO: Random seed for debugging is 1744283954140248 2025-04-10T11:19:14.627405Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642539549932089:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:14.627514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:19:14.710904Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642539289103332:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:14.711009Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:19:14.945915Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:19:14.960133Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000bd0/r3tmp/tmpBGcEPm/pdisk_1.dat 2025-04-10T11:19:15.324469Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:15.341428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:15.341557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:15.342689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:15.342744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:15.348444Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:19:15.348589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:15.349079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20529, node 1 2025-04-10T11:19:15.561394Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000bd0/r3tmp/yandexHGA8VF.tmp 2025-04-10T11:19:15.561426Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000bd0/r3tmp/yandexHGA8VF.tmp 2025-04-10T11:19:15.561700Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000bd0/r3tmp/yandexHGA8VF.tmp 2025-04-10T11:19:15.561841Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:19:15.959542Z INFO: TTestServer started on Port 12370 GrpcPort 20529 TClient is connected to server localhost:12370 PQClient connected to localhost:20529 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:16.338511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T11:19:18.280099Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642556468972818:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:18.280193Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642556468972843:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:18.280294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:18.285889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:19:18.302439Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642556468972847:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:19:18.381141Z node 2 :TX_PROXY ERROR: Actor# [2:7491642556468972875:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:18.672105Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642556729802202:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:19:18.672371Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491642556468972890:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:19:18.672689Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzM0ZTNkNTgtYzM2NmQ0YS00OGY0MmY2OC1hMjc0YTI4OA==, ActorId: [2:7491642556468972816:2308], ActorState: ExecuteState, TraceId: 01jrfpsz0467qy3hhjenwryjsv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:19:18.673927Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2NjMjM4MTktNTk5OGRkMTctNGIzZThjMzYtMzhlNmU3Yjk=, ActorId: [1:7491642556729802161:2336], ActorState: ExecuteState, TraceId: 01jrfpsz2x55r5rpc20ph4jhyp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:19:18.674805Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:19:18.674802Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:19:18.728904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:19:18.944124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:19:19.113324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:20529", true, true, 1000); 2025-04-10T11:19:19.440767Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jrfpszy5b93awh078ph80bgg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWMzMGMyZGQtOTA1YjU2MzUtOGRlNGE1MTYtZmEyZTQyODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491642561024769918:2961] 2025-04-10T11:19:19.622347Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642539549932089:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:19.622475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:19.709191Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642539289103332:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:19.709261Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T11:19:25.599678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:20529 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-10T11:19:25.719354Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:20529 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 ... de 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-10T11:21:14.555986Z node 15 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:37762 2025-04-10T11:21:14.556016Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:37762 proto=v1 topic=test-topic durationSec=0 2025-04-10T11:21:14.556027Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2025-04-10T11:21:14.558075Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-10T11:21:14.558252Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-10T11:21:14.558274Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-10T11:21:14.558288Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-10T11:21:14.558314Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7491643057023812198:2504] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-10T11:21:14.561959Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7491643057023812198:2504] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-10T11:21:14.812659Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7491643057023812198:2504] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-10T11:21:14.813010Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7491643057023812251:2504] connected; active server actors: 1 2025-04-10T11:21:14.813072Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7491643057023812198:2504] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-10T11:21:14.813097Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7491643057023812198:2504] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-10T11:21:14.813438Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7491643057023812251:2504] disconnected; active server actors: 1 2025-04-10T11:21:14.813463Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7491643057023812251:2504] disconnected no session 2025-04-10T11:21:14.957855Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [15:7491643057023812283:2504], now have 1 active actors on pipe 2025-04-10T11:21:14.956444Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7491643057023812198:2504] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-10T11:21:14.956503Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7491643057023812198:2504] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-10T11:21:14.956526Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7491643057023812198:2504] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-10T11:21:14.956565Z node 15 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-10T11:21:14.958010Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2025-04-10T11:21:14.958264Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T11:21:14.958306Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T11:21:14.958418Z node 16 :PERSQUEUE INFO: new Cookie src|44bfff66-dfd879d4-b644bffc-91fdf519_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-10T11:21:14.958539Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-10T11:21:14.958599Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:21:14.959276Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-10T11:21:14.959320Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-10T11:21:14.959421Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:21:14.959806Z node 15 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|44bfff66-dfd879d4-b644bffc-91fdf519_0 2025-04-10T11:21:14.960656Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744284074960 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:21:14.960796Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|44bfff66-dfd879d4-b644bffc-91fdf519_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-10T11:21:14.962261Z :INFO: [] MessageGroupId [src] SessionId [src|44bfff66-dfd879d4-b644bffc-91fdf519_0] Write session: close. Timeout = 0 ms 2025-04-10T11:21:14.962331Z :INFO: [] MessageGroupId [src] SessionId [src|44bfff66-dfd879d4-b644bffc-91fdf519_0] Write session will now close 2025-04-10T11:21:14.962389Z :DEBUG: [] MessageGroupId [src] SessionId [src|44bfff66-dfd879d4-b644bffc-91fdf519_0] Write session: aborting 2025-04-10T11:21:14.962967Z :INFO: [] MessageGroupId [src] SessionId [src|44bfff66-dfd879d4-b644bffc-91fdf519_0] Write session: gracefully shut down, all writes complete 2025-04-10T11:21:14.963026Z :DEBUG: [] MessageGroupId [src] SessionId [src|44bfff66-dfd879d4-b644bffc-91fdf519_0] Write session: destroy 2025-04-10T11:21:14.967244Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|44bfff66-dfd879d4-b644bffc-91fdf519_0 grpc read done: success: 0 data: 2025-04-10T11:21:14.967278Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|44bfff66-dfd879d4-b644bffc-91fdf519_0 grpc read failed 2025-04-10T11:21:14.967311Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|44bfff66-dfd879d4-b644bffc-91fdf519_0 grpc closed 2025-04-10T11:21:14.967332Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|44bfff66-dfd879d4-b644bffc-91fdf519_0 is DEAD 2025-04-10T11:21:14.968221Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T11:21:14.968664Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [15:7491643057023812283:2504] destroyed 2025-04-10T11:21:14.968721Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T11:21:15.024723Z :INFO: [/Root] [/Root] [ac0ee9a0-1d7cbe2c-16095df5-1fea36e4] Starting read session 2025-04-10T11:21:15.024787Z :DEBUG: [/Root] [/Root] [ac0ee9a0-1d7cbe2c-16095df5-1fea36e4] Starting cluster discovery 2025-04-10T11:21:15.025044Z :INFO: [/Root] [/Root] [ac0ee9a0-1d7cbe2c-16095df5-1fea36e4] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13901: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:13901
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:13901. " 2025-04-10T11:21:15.025086Z :DEBUG: [/Root] [/Root] [ac0ee9a0-1d7cbe2c-16095df5-1fea36e4] Restart cluster discovery in 0.008569s 2025-04-10T11:21:15.036287Z :DEBUG: [/Root] [/Root] [ac0ee9a0-1d7cbe2c-16095df5-1fea36e4] Starting cluster discovery 2025-04-10T11:21:15.036697Z :INFO: [/Root] [/Root] [ac0ee9a0-1d7cbe2c-16095df5-1fea36e4] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13901: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:13901
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:13901. " 2025-04-10T11:21:15.036760Z :DEBUG: [/Root] [/Root] [ac0ee9a0-1d7cbe2c-16095df5-1fea36e4] Restart cluster discovery in 0.012051s 2025-04-10T11:21:15.049368Z :DEBUG: [/Root] [/Root] [ac0ee9a0-1d7cbe2c-16095df5-1fea36e4] Starting cluster discovery 2025-04-10T11:21:15.049597Z :INFO: [/Root] [/Root] [ac0ee9a0-1d7cbe2c-16095df5-1fea36e4] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13901: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:13901
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:13901. " 2025-04-10T11:21:15.049633Z :DEBUG: [/Root] [/Root] [ac0ee9a0-1d7cbe2c-16095df5-1fea36e4] Restart cluster discovery in 0.032817s 2025-04-10T11:21:15.086336Z :DEBUG: [/Root] [/Root] [ac0ee9a0-1d7cbe2c-16095df5-1fea36e4] Starting cluster discovery 2025-04-10T11:21:15.086694Z :NOTICE: [/Root] [/Root] [ac0ee9a0-1d7cbe2c-16095df5-1fea36e4] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13901: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:13901
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:13901. " } 2025-04-10T11:21:15.090362Z :NOTICE: [/Root] [/Root] [ac0ee9a0-1d7cbe2c-16095df5-1fea36e4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13901: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:13901
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:13901. " } 2025-04-10T11:21:15.090568Z :INFO: [/Root] [/Root] [ac0ee9a0-1d7cbe2c-16095df5-1fea36e4] Closing read session. Close timeout: 0.000000s 2025-04-10T11:21:15.090736Z :NOTICE: [/Root] [/Root] [ac0ee9a0-1d7cbe2c-16095df5-1fea36e4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 >> TPQCompatTest::SetupLockSession [GOOD] >> TPQCompatTest::BadTopics >> TTopicYqlTest::DropTopicYql [GOOD] >> TTopicYqlTest::CreateTopicYqlBackCompatibility >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant >> TColumnShardTestSchema::ExportAfterFail [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284640.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284640.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283440.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-10T11:20:41.928706Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:20:42.014315Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:20:42.029046Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:20:42.029245Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:20:42.035210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:42.035419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:42.035641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:42.035719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:42.035796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:42.035875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:42.035936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:42.036026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:42.036109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:20:42.036177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:20:42.036243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:20:42.036322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:20:42.062072Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:20:42.062448Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:20:42.062508Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:20:42.062683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:42.062826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:20:42.062908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:20:42.062946Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:20:42.063027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:20:42.063080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:20:42.063121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:20:42.063150Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:20:42.063302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:42.063365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:20:42.063401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:20:42.063432Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:20:42.063532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:20:42.063583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:20:42.063625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:20:42.063653Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:20:42.063734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:20:42.063778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:20:42.063808Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:20:42.063854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:20:42.063888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:20:42.063924Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:20:42.064282Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-10T11:20:42.064368Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-04-10T11:20:42.064446Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-10T11:20:42.064510Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-04-10T11:20:42.064650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:20:42.064715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:20:42.064747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:20:42.064936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:20:42.064977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:20:42.065007Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:20:42.065133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:20:42.065169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:20:42.065195Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:20:42.065381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... umn_names=timestamp;);;;); 2025-04-10T11:21:25.050986Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:21:25.051013Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:21:25.051039Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:21:25.051121Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:21:25.051203Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:21:25.051234Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:21:25.051301Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=14867; 2025-04-10T11:21:25.051337Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=118936;num_rows=14867;batch_columns=timestamp; 2025-04-10T11:21:25.051433Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1266:3275];bytes=118936;rows=14867;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:1267:3276]->[1:1266:3275] 2025-04-10T11:21:25.051509Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:21:25.051598Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:21:25.051675Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:21:25.051771Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:21:25.051837Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:21:25.051898Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:21:25.051925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1267:3276] finished for tablet 9437184 2025-04-10T11:21:25.052347Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1266:3275];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.06},{"events":["f_processing","f_task_result"],"t":0.062},{"events":["f_ack","l_task_result"],"t":0.981},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.987}],"full":{"a":1744284084064553,"name":"_full_task","f":1744284084064553,"d_finished":0,"c":0,"l":1744284085051967,"d":987414},"events":[{"name":"bootstrap","f":1744284084064780,"d_finished":60104,"c":1,"l":1744284084124884,"d":60104},{"a":1744284085051758,"name":"ack","f":1744284085046468,"d_finished":4783,"c":7,"l":1744284085051702,"d":4992},{"a":1744284085051748,"name":"processing","f":1744284084127386,"d_finished":425506,"c":56,"l":1744284085051704,"d":425725},{"name":"ProduceResults","f":1744284084069203,"d_finished":79765,"c":65,"l":1744284085051912,"d":79765},{"a":1744284085051914,"name":"Finish","f":1744284085051914,"d_finished":0,"c":0,"l":1744284085051967,"d":53},{"name":"task_result","f":1744284084127414,"d_finished":419515,"c":49,"l":1744284085046256,"d":419515}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:21:25.052404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1266:3275];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:21:25.052883Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1266:3275];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.06},{"events":["f_processing","f_task_result"],"t":0.062},{"events":["f_ack","l_task_result"],"t":0.981},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.987}],"full":{"a":1744284084064553,"name":"_full_task","f":1744284084064553,"d_finished":0,"c":0,"l":1744284085052437,"d":987884},"events":[{"name":"bootstrap","f":1744284084064780,"d_finished":60104,"c":1,"l":1744284084124884,"d":60104},{"a":1744284085051758,"name":"ack","f":1744284085046468,"d_finished":4783,"c":7,"l":1744284085051702,"d":5462},{"a":1744284085051748,"name":"processing","f":1744284084127386,"d_finished":425506,"c":56,"l":1744284085051704,"d":426195},{"name":"ProduceResults","f":1744284084069203,"d_finished":79765,"c":65,"l":1744284085051912,"d":79765},{"a":1744284085051914,"name":"Finish","f":1744284085051914,"d_finished":0,"c":0,"l":1744284085052437,"d":523},{"name":"task_result","f":1744284084127414,"d_finished":419515,"c":49,"l":1744284085046256,"d":419515}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1267:3276]->[1:1266:3275] 2025-04-10T11:21:25.052997Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:21:24.063975Z;index_granules=0;index_portions=7;index_batches=1260;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=10402524;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10402524;selected_rows=0; 2025-04-10T11:21:25.053044Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:21:25.053347Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1267:3276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 160000/10402332 160000/10402524 >> TPersQueueTest::CheckDeleteTopic [GOOD] >> TPersQueueTest::CheckDecompressionTasksWithoutSession >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault [GOOD] >> TPersQueueTest::SetMeteringMode >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 >> TColumnShardTestSchema::HotTiers >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 >> CompressExecutor::TestExecutorMemUsage [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2025-04-10T11:19:14.144109Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1744283954144079 2025-04-10T11:19:14.698135Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642537480043727:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:14.698320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:19:14.828866Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642538634981982:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:14.830237Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000bfc/r3tmp/tmpy1Dpo5/pdisk_1.dat 2025-04-10T11:19:15.073042Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:19:15.083730Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:19:15.573183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:15.573289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:15.573991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:15.577702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:15.588620Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:19:15.588768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:15.589997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:15.621779Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32531, node 1 2025-04-10T11:19:15.663157Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:19:15.663194Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:19:15.836125Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000bfc/r3tmp/yandexkN6hit.tmp 2025-04-10T11:19:15.836149Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000bfc/r3tmp/yandexkN6hit.tmp 2025-04-10T11:19:15.836321Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000bfc/r3tmp/yandexkN6hit.tmp 2025-04-10T11:19:15.836470Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:19:15.958575Z INFO: TTestServer started on Port 9789 GrpcPort 32531 TClient is connected to server localhost:9789 PQClient connected to localhost:32531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:19:16.276748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T11:19:18.700099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642554659913935:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:18.700160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642554659913906:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:18.700240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:18.704418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2025-04-10T11:19:18.734380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642554659913944:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-04-10T11:19:18.798005Z node 1 :TX_PROXY ERROR: Actor# [1:7491642554659914044:2694] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:19:19.041708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-04-10T11:19:19.047748Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642554659914061:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:19:19.048066Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDFiZmU4MGItYTMxZWMzZmEtMjFmNmUwZC1hNTk1ZDgxZQ==, ActorId: [1:7491642554659913903:2335], ActorState: ExecuteState, TraceId: 01jrfpszczbp6tqmzmrg0tfdze, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:19:19.050671Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:19:19.051766Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491642555814851510:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:19:19.053608Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2I2YWMzODYtMzMyYWExN2UtNTIwZWQ2MjAtMTYxMzMwMzI=, ActorId: [2:7491642555814851485:2308], ActorState: ExecuteState, TraceId: 01jrfpszdse93q6r2v1q7k4t4y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:19:19.054029Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:19:19.279887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-04-10T11:19:19.486110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:32531", true, true, 1000); 2025-04-10T11:19:19.704840Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642537480043727:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:19.704912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:19:19.812170Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jrfpt0bh21gs38ga304ch81e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRkMTBhNjItNTJjZmQzMTctZTljODg2NzktNTk5ZDI4ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:19:19.820562Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642538634981982:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:19.820659Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7491642558954881763:2991] === CheckClustersList. Ok 2025-04-10T11:19:25.759113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:32531 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-10T11:19:25.938453Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:32531 MetaRequest { CmdC ... nit_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-04-10T11:21:27.937388Z node 15 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:55688 2025-04-10T11:21:27.937419Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:55688 proto=v1 topic=test-topic durationSec=0 2025-04-10T11:21:27.937434Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2025-04-10T11:21:27.939119Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-04-10T11:21:27.939264Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-10T11:21:27.939283Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-10T11:21:27.939299Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-10T11:21:27.939321Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7491643111462711903:2586] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2025-04-10T11:21:27.943317Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7491643111462711903:2586] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2025-04-10T11:21:28.114644Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710698. Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-10T11:21:28.114818Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7491643111462711916:2588] TxId: 281474976710698. Ctx: { TraceId: 01jrfpxxm76drhgmbtceymmq20, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=MmM3MzNlMjEtMjllN2ZhMmItM2RiODI1Y2UtZTcwNjkwMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-10T11:21:28.115057Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=MmM3MzNlMjEtMjllN2ZhMmItM2RiODI1Y2UtZTcwNjkwMzQ=, ActorId: [15:7491643111462711904:2588], ActorState: ExecuteState, TraceId: 01jrfpxxm76drhgmbtceymmq20, Create QueryResponse for error on request, msg: 2025-04-10T11:21:28.116644Z node 15 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [15:7491643111462711903:2586] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=MmM3MzNlMjEtMjllN2ZhMmItM2RiODI1Y2UtZTcwNjkwMzQ=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jrfpxxm8cs9d2vybxcnrsz2t" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2025-04-10T11:21:28.116792Z node 15 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=MmM3MzNlMjEtMjllN2ZhMmItM2RiODI1Y2UtZTcwNjkwMzQ=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jrfpxxm8cs9d2vybxcnrsz2t" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2025-04-10T11:21:28.117233Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2025-04-10T11:21:28.117787Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c80b6f82-acad3b1c-62e2d35-77f5e5c1_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=MmM3MzNlMjEtMjllN2ZhMmItM2RiODI1Y2UtZTcwNjkwMzQ=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jrfpxxm8cs9d2vybxcnrsz2t" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2025-04-10T11:21:28.117834Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c80b6f82-acad3b1c-62e2d35-77f5e5c1_0] Write session will restart in 2.000000s 2025-04-10T11:21:28.117961Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c80b6f82-acad3b1c-62e2d35-77f5e5c1_0] Write session: Do CDS request 2025-04-10T11:21:28.118023Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c80b6f82-acad3b1c-62e2d35-77f5e5c1_0] Do schedule cds request after 2000 ms 2025-04-10T11:21:28.376620Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720680. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T11:21:28.376757Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7491643116175021351:2492] TxId: 281474976720680. Ctx: { TraceId: 01jrfpxxwf6ncsyez9behsmkj5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=Nzc1MWYxNzMtNGFlNjE2MGItOTA5YjdiNTktNjRlZTEzZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T11:21:28.376995Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=Nzc1MWYxNzMtNGFlNjE2MGItOTA5YjdiNTktNjRlZTEzZDE=, ActorId: [16:7491643116175021348:2492], ActorState: ExecuteState, TraceId: 01jrfpxxwf6ncsyez9behsmkj5, Create QueryResponse for error on request, msg: 2025-04-10T11:21:28.377952Z node 16 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfpxxwga2s4qpjfnfwj0c3j" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-10T11:21:28.467751Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710700. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T11:21:28.467874Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7491643115757679277:2596] TxId: 281474976710700. Ctx: { TraceId: 01jrfpxxz5e3fnmjwhednahfrj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=M2Q5ZjYyZjUtZjExODQ0NjQtZDc2YTBlNDItNmFjMjk2N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T11:21:28.468090Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=M2Q5ZjYyZjUtZjExODQ0NjQtZDc2YTBlNDItNmFjMjk2N2I=, ActorId: [15:7491643115757679274:2596], ActorState: ExecuteState, TraceId: 01jrfpxxz5e3fnmjwhednahfrj, Create QueryResponse for error on request, msg: 2025-04-10T11:21:28.469037Z node 15 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfpxxz5e3fnmjwhef2pphem" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-10T11:21:28.862720Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710702. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T11:21:28.862898Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7491643115757679324:2590] TxId: 281474976710702. Ctx: { TraceId: 01jrfpxxwhdanke95c6g1z3re4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ODEwYzQ5NzgtZmYwMzlmMjktOWQ5YzkwNWYtN2U3Mjk1ODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T11:21:28.863253Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=ODEwYzQ5NzgtZmYwMzlmMjktOWQ5YzkwNWYtN2U3Mjk1ODc=, ActorId: [15:7491643115757679259:2590], ActorState: ExecuteState, TraceId: 01jrfpxxwhdanke95c6g1z3re4, Create QueryResponse for error on request, msg: 2025-04-10T11:21:28.864954Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfpxybmc2pj20d7eygcpapb" } } YdbStatus: UNAVAILABLE ConsumedRu: 318 } 2025-04-10T11:21:28.896026Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720682. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T11:21:28.896201Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7491643116175021407:2493] TxId: 281474976720682. Ctx: { TraceId: 01jrfpxxx48e2dmr5afbqqjv00, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=MmIzMTRjYmQtMzlhMmE3M2YtYjQ1MzI3Mi0xYjg4YTY1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T11:21:28.896510Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=MmIzMTRjYmQtMzlhMmE3M2YtYjQ1MzI3Mi0xYjg4YTY1OA==, ActorId: [16:7491643116175021360:2493], ActorState: ExecuteState, TraceId: 01jrfpxxx48e2dmr5afbqqjv00, Create QueryResponse for error on request, msg: 2025-04-10T11:21:28.897890Z node 16 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfpxycm1kc0tenvj5wpt7rb" } } YdbStatus: UNAVAILABLE ConsumedRu: 326 } 2025-04-10T11:21:28.922568Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c80b6f82-acad3b1c-62e2d35-77f5e5c1_0] Write session: close. Timeout = 0 ms 2025-04-10T11:21:28.922667Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c80b6f82-acad3b1c-62e2d35-77f5e5c1_0] Write session will now close 2025-04-10T11:21:28.922766Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c80b6f82-acad3b1c-62e2d35-77f5e5c1_0] Write session: aborting 2025-04-10T11:21:28.923737Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c80b6f82-acad3b1c-62e2d35-77f5e5c1_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-04-10T11:21:28.923804Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c80b6f82-acad3b1c-62e2d35-77f5e5c1_0] Write session: destroy >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck |91.2%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 >> TColumnShardTestSchema::DropWriteRace >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 >> TColumnShardTestSchema::DropWriteRace [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::DropWriteRace [GOOD] Test command err: 2025-04-10T11:21:34.135713Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:21:34.227187Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:21:34.249725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:21:34.250036Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:21:34.258085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:21:34.258316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:21:34.258582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:21:34.258738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:21:34.258865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:21:34.258979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:21:34.259096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:21:34.259227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:21:34.259357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:21:34.259490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:21:34.259594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:21:34.259697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:21:34.289380Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:21:34.289762Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:21:34.289816Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:21:34.289967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:34.290184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:21:34.290270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:21:34.290311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:21:34.290412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:21:34.290517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:21:34.290562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:21:34.290589Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:21:34.290739Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:34.290801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:21:34.290838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:21:34.290873Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:21:34.290967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:21:34.291037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:21:34.291090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:21:34.291116Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:21:34.291185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:21:34.291220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:21:34.291267Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:21:34.291324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:21:34.291359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:21:34.291384Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:21:34.291776Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=39; 2025-04-10T11:21:34.291873Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-10T11:21:34.291954Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-04-10T11:21:34.292069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=48; 2025-04-10T11:21:34.292228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:21:34.292283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:21:34.292313Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:21:34.292600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:21:34.292663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:21:34.292701Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:21:34.292824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:21:34.292866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:21:34.292893Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:21:34.293085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:21:34.293128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:21:34.293152Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:21:34.293311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:21:34.293350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:21:34.293402Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=58; 2025-04-10T11:21:34.565085Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-04-10T11:21:34.565154Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=43; 2025-04-10T11:21:34.565215Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-04-10T11:21:34.565284Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2025-04-10T11:21:34.565369Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=27; 2025-04-10T11:21:34.565440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=28; 2025-04-10T11:21:34.565470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=4043; 2025-04-10T11:21:34.565630Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:21:34.565684Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:21:34.565756Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:21:34.566041Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:21:34.566085Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;fline=columnshard_impl.cpp:521;problem=Background activities cannot be started: no index at tablet; 2025-04-10T11:21:34.566345Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:21:34.566523Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:21:34.566647Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:21:34.566703Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:21:34.566740Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:21:34.566796Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:21:34.566849Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:521;problem=Background activities cannot be started: no index at tablet; 2025-04-10T11:21:34.840814Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=101;this=88923004797312;method=TTxController::StartProposeOnExecute;tx_info=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;fline=schema.h:36;event=sync_schema; 2025-04-10T11:21:34.853995Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923004797312;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881139392;fline=columnshard__propose_transaction.cpp:103;event=actual tx operator; 2025-04-10T11:21:34.854166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923004797312;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881139392;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:99:2134]; 2025-04-10T11:21:34.854244Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923004797312;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881139392;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=101; 2025-04-10T11:21:34.854625Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-04-10T11:21:34.854736Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000001 at tablet 9437184, mediator 0 2025-04-10T11:21:34.854777Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2025-04-10T11:21:34.855102Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 1 ttl settings: { Version: 1 } at tablet 9437184 2025-04-10T11:21:34.861884Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-10T11:21:34.862060Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:245;method=RegisterTable;path_id=1; 2025-04-10T11:21:34.862110Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-04-10T11:21:34.869498Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-04-10T11:21:34.869718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tx_controller.cpp:212;event=finished_tx;tx_id=101; 2025-04-10T11:21:34.893880Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-10T11:21:34.894101Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6120;columns=10; 2025-04-10T11:21:34.911064Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=6120;count=1; 2025-04-10T11:21:34.914077Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-04-10T11:21:34.914466Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-10T11:21:34.926980Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-10T11:21:34.927147Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:21:34.954544Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;this=88923005124128;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_this=89197881216960;fline=columnshard__propose_transaction.cpp:103;event=actual tx operator; 2025-04-10T11:21:34.954638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;this=88923005124128;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_this=89197881216960;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:99:2134]; 2025-04-10T11:21:34.954689Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;this=88923005124128;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_this=89197881216960;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=103; 2025-04-10T11:21:34.955048Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-04-10T11:21:34.955216Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000002 at tablet 9437184, mediator 0 2025-04-10T11:21:34.955279Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] execute at tablet 9437184 2025-04-10T11:21:34.955562Z node 1 :TX_COLUMNSHARD DEBUG: DropTable for pathId: 1 at tablet 9437184 2025-04-10T11:21:34.955658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=103;fline=tx_controller.cpp:212;event=finished_tx;tx_id=103; 2025-04-10T11:21:34.967902Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] complete at tablet 9437184 2025-04-10T11:21:34.968059Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:21:34.968322Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000003 at tablet 9437184, mediator 0 2025-04-10T11:21:34.968389Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[8] execute at tablet 9437184 2025-04-10T11:21:34.968706Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=abstract.h:83;progress_tx_id=102;lock_id=1;broken=0; 2025-04-10T11:21:34.968865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;commit_tx_id=102;commit_lock_id=1;fline=insert_table.cpp:50;event=abort_insertion;path_id=1;blob_range={ Blob: DS:0:[9437184:2:1:3:0:7080:0] Offset: 0 Size: 7080 }; 2025-04-10T11:21:34.969030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=tx_controller.cpp:212;event=finished_tx;tx_id=102; 2025-04-10T11:21:34.981315Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[8] complete at tablet 9437184 2025-04-10T11:21:34.981443Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=102;lock_id=1;broken=0; 2025-04-10T11:21:34.981573Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change 2025-04-10 11:21:19,879 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:21:20,427 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1387563 639M 625M 557M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/2gt4/0017fc/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mod 1390856 6.1G 6.0G 5.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2gt4/0017fc/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_d 1396626 390M 373M 357M └─ moto_server s3 --port 17332 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 138, in test_data_unchanged_after_ttl_change logger.info(f"Rows older than {self.days_to_cool} days: {self.get_row_count_by_date(table_path, self.days_to_cool)}") File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 31, in get_row_count_by_date return self.ydb_client.query(f"SELECT count(*) as Rows from `{table_path}` WHERE ts < CurrentUtcTimestamp() - DateTime::IntervalFromDays({past_days})")[0].rows[0]["Rows"] File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...nner/.ya/build/build_root/2gt4/0017fc/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2gt4/0017fc/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2gt4/0017fc', '--source-root', '/home/runner/.ya/build/build_root/2gt4/0017fc/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2gt4/0017fc/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...nner/.ya/build/build_root/2gt4/0017fc/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2gt4/0017fc/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2gt4/0017fc', '--source-root', '/home/runner/.ya/build/build_root/2gt4/0017fc/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2gt4/0017fc/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] Test command err: 2025-04-10T11:18:42.582715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642402052114071:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:42.590342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:18:42.639023Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642400941427224:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:42.639686Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:18:42.805832Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:18:42.828812Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a7a/r3tmp/tmpyEZRBW/pdisk_1.dat 2025-04-10T11:18:43.081802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.081930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.083819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.083880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.087907Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:18:43.088064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:43.090534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:43.119212Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31617, node 1 2025-04-10T11:18:43.145661Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:18:43.145746Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:18:43.323554Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000a7a/r3tmp/yandexomQVGm.tmp 2025-04-10T11:18:43.323590Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000a7a/r3tmp/yandexomQVGm.tmp 2025-04-10T11:18:43.325444Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000a7a/r3tmp/yandexomQVGm.tmp 2025-04-10T11:18:43.325588Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:43.591568Z INFO: TTestServer started on Port 27618 GrpcPort 31617 TClient is connected to server localhost:27618 PQClient connected to localhost:31617 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:43.955130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:18:44.026620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:18:46.102554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642419231984349:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:46.102673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:46.103599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642419231984377:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:46.107040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T11:18:46.137561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642419231984379:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T11:18:46.237768Z node 1 :TX_PROXY ERROR: Actor# [1:7491642419231984471:2775] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:46.558922Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642419231984481:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:46.572026Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTExYTc2MzUtNGUxMGFiMzctMjlkOWUzOTktYjc1NTI4MWQ=, ActorId: [1:7491642419231984347:2337], ActorState: ExecuteState, TraceId: 01jrfprzj798epfqv84k7ky8aw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:46.574375Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:18:46.619496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:46.705079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:47.008122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:18:47.336507Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfps0j8bkx586h738rextmy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGExMDViMzQtMzE4ZjJhNTQtMmFiZWE0MDctYzE5YjZiMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491642423526952238:3114] 2025-04-10T11:18:47.586450Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642402052114071:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:47.596682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:18:47.637195Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642400941427224:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:47.637268Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T11:18:53.097951Z node 1 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=1&id=M2QwNjEyYjItZTNhN2M5ZDctY2RkNjliYS1mYWI5MzgwZQ==, workerId: [1:7491642445001789109:2454], local sessions count: 0 2025-04-10T11:18:53.103288Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642402052114360:2148], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:18:53.103585Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642402052114360:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-04-10T11:18:53.103725Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642402052114360:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491642406347082122:2456] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744283924033 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:18:53.103844Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642402052114360:2148], cacheItem# { Subscriber: { Subscriber: [1:7491642406347082122:2456] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1744283924033 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] Is ... 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 0 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-04-10T11:21:33.671345Z node 25 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:21:33.671466Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete partitions for TxId 281474976715676 2025-04-10T11:21:33.671495Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715676, NewState EXECUTED 2025-04-10T11:21:33.671524Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715676 moved from EXECUTING to EXECUTED 2025-04-10T11:21:33.672061Z node 25 :PERSQUEUE DEBUG: [TxId: 281474976715676] save tx TxId: 281474976715676 State: EXECUTED MinStep: 1744284093517 MaxStep: 18446744073709551615 Step: 1744284093692 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 0 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } } BootstrapConfig { } SourceActor { RawX1: 7491643091932274602 RawX2: 107374184595 } Partitions { Partition { PartitionId: 0 } } 2025-04-10T11:21:33.672402Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:21:33.676590Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:21:33.676625Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-04-10T11:21:33.676676Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676, State EXECUTED 2025-04-10T11:21:33.676704Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676 State EXECUTED FrontTxId 281474976715676 2025-04-10T11:21:33.676725Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-04-10T11:21:33.676743Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676, NewState WAIT_RS_ACKS 2025-04-10T11:21:33.676785Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676 moved from EXECUTED to WAIT_RS_ACKS 2025-04-10T11:21:33.676841Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976715676] PredicateAcks: 0/0 2025-04-10T11:21:33.676854Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-10T11:21:33.676899Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976715676] PredicateAcks: 0/0 2025-04-10T11:21:33.676919Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976715676 to the list for deletion 2025-04-10T11:21:33.676937Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676, NewState DELETING 2025-04-10T11:21:33.676959Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976715676 2025-04-10T11:21:33.677011Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:21:33.676684Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:21:33.676718Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Try execute txs with state EXECUTED 2025-04-10T11:21:33.676734Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715676, State EXECUTED 2025-04-10T11:21:33.676753Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715676 State EXECUTED FrontTxId 281474976715676 2025-04-10T11:21:33.681983Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:21:33.676775Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TPersQueue::SendEvReadSetAckToSenders 2025-04-10T11:21:33.682021Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-04-10T11:21:33.676797Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715676, NewState WAIT_RS_ACKS 2025-04-10T11:21:33.682036Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676, State DELETING 2025-04-10T11:21:33.676825Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715676 moved from EXECUTED to WAIT_RS_ACKS 2025-04-10T11:21:33.682054Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976715676 2025-04-10T11:21:33.676854Z node 25 :PERSQUEUE DEBUG: [TxId: 281474976715676] PredicateAcks: 0/0 2025-04-10T11:21:33.676863Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-10T11:21:33.676877Z node 25 :PERSQUEUE DEBUG: [TxId: 281474976715676] PredicateAcks: 0/0 2025-04-10T11:21:33.676904Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] add an TxId 281474976715676 to the list for deletion 2025-04-10T11:21:33.676929Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715676, NewState DELETING 2025-04-10T11:21:33.676959Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete key for TxId 281474976715676 2025-04-10T11:21:33.677011Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:21:33.681689Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:21:33.681718Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Try execute txs with state DELETING 2025-04-10T11:21:33.681738Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715676, State DELETING 2025-04-10T11:21:33.681762Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete TxId 281474976715676 TClient::Ls request: /Root/PQ/rt3.dc1--legacy--topic1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--legacy--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976715676 CreateStep: 1744284093692 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037894 } PersQueueGroup { Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 10... (TRUNCATED) === PATH DESCRIPTION: Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } YdbDatabasePath: "/Root" PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } Partitions { PartitionId: 0 TabletId: 72075186224037893 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186224037892 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037894 NextPartitionId: 2 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 >> TPQCompatTest::BadTopics [GOOD] >> TPQCompatTest::CommitOffsets |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 >> TColumnShardTestSchema::RebootExportAfterFail >> TPersQueueTest::SetupWriteSessionOnDisabledCluster [GOOD] >> TPersQueueTest::SetupReadSession >> TColumnShardTestSchema::ColdTiersWithStat >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 >> TColumnShardTestSchema::RebootHotTiers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284643.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144284643.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284643.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124284643.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283443.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124283443.000000s;Name=;Codec=}; 2025-04-10T11:20:45.603529Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:20:45.709275Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:20:45.734521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:20:45.734863Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:20:45.743633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:45.743879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:45.744154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:45.744295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:45.744428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:45.744567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:45.744692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:45.744822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:45.744938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:20:45.745061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:20:45.745168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:20:45.745281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:20:45.776932Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:20:45.777166Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:20:45.777227Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:20:45.777433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:45.777620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:20:45.777709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:20:45.777756Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:20:45.777853Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:20:45.777916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:20:45.777961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:20:45.777992Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:20:45.778179Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:45.778245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:20:45.778287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:20:45.778317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:20:45.778413Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:20:45.778473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:20:45.778517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:20:45.778548Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:20:45.778628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:20:45.778683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:20:45.778725Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:20:45.778788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:20:45.778835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:20:45.778873Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:20:45.779317Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-04-10T11:20:45.779419Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=54; 2025-04-10T11:20:45.779538Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=50; 2025-04-10T11:20:45.779623Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-10T11:20:45.779799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:20:45.779859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:20:45.779898Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:20:45.780113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:20:45.780166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:20:45.780199Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:20:45.780346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:20:45.780391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:20:45.780432Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:20:45.780650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... cpp:29;PRECHARGE:finishLoadingTime=10; 2025-04-10T11:21:38.802960Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=167; 2025-04-10T11:21:38.802990Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=26237; 2025-04-10T11:21:38.806979Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=3928; 2025-04-10T11:21:38.811487Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=3671; 2025-04-10T11:21:38.811576Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=4518; 2025-04-10T11:21:38.811700Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=71; 2025-04-10T11:21:38.811787Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=50; 2025-04-10T11:21:38.811887Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=67; 2025-04-10T11:21:38.811973Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=52; 2025-04-10T11:21:38.817387Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5350; 2025-04-10T11:21:38.824961Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=7464; 2025-04-10T11:21:38.825091Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=40; 2025-04-10T11:21:38.825156Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=25; 2025-04-10T11:21:38.825210Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2025-04-10T11:21:38.825258Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-04-10T11:21:38.825314Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=21; 2025-04-10T11:21:38.825390Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2025-04-10T11:21:38.825434Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-10T11:21:38.825514Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=46; 2025-04-10T11:21:38.825558Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-04-10T11:21:38.825621Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=24; 2025-04-10T11:21:38.825692Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=39; 2025-04-10T11:21:38.825914Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=188; 2025-04-10T11:21:38.825949Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=55070; 2025-04-10T11:21:38.826084Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:21:38.826201Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:21:38.826259Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:21:38.826317Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:21:38.836092Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-10T11:21:38.836243Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:21:38.836298Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:21:38.836386Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-10T11:21:38.836450Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:21:38.836499Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:21:38.836549Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:21:38.836593Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:21:38.836688Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:21:38.837542Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:21:38.837621Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1987:3887];tablet_id=9437184;parent=[1:1949:3856];fline=manager.cpp:82;event=ask_data;request=request_id=95;1={portions_count=11};; 2025-04-10T11:21:38.838721Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:21:38.839572Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:21:38.839611Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:21:38.839639Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:21:38.839683Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:21:38.839747Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:21:38.839822Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-10T11:21:38.839883Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:21:38.839931Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:21:38.839983Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:21:38.840026Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:21:38.840118Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:21:38.840698Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=11;path_id=1; 2025-04-10T11:21:38.842039Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed [GOOD] >> TTopicYqlTest::CreateAndAlterTopicYql >> TColumnShardTestSchema::HotTiersAfterTtl >> TPersQueueTest::CheckDecompressionTasksWithoutSession [GOOD] >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 >> TColumnShardTestSchema::RebootOneColdTier [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284649.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144284649.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284649.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124284649.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283449.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124283449.000000s;Name=;Codec=}; 2025-04-10T11:20:51.280776Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:20:51.369010Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:20:51.391344Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:20:51.391618Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:20:51.399096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:51.399328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:51.399551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:51.399661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:51.399779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:51.399896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:51.399990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:51.400111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:51.400220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:20:51.400325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:20:51.400429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:20:51.400526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:20:51.428811Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:20:51.429163Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:20:51.429228Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:20:51.429418Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:51.429612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:20:51.429699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:20:51.429740Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:20:51.429822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:20:51.429877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:20:51.429915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:20:51.429942Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:20:51.430082Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:51.430132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:20:51.430189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:20:51.430214Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:20:51.430322Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:20:51.430376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:20:51.430414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:20:51.430441Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:20:51.430536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:20:51.430578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:20:51.430606Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:20:51.430657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:20:51.430695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:20:51.430732Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:20:51.431107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-10T11:20:51.431180Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-04-10T11:20:51.431268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2025-04-10T11:20:51.431342Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-10T11:20:51.431495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:20:51.431542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:20:51.431573Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:20:51.431767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:20:51.431812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:20:51.431840Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:20:51.431969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:20:51.432008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:20:51.432033Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:20:51.432215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... pp:29;PRECHARGE:finishLoadingTime=14; 2025-04-10T11:21:44.003820Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=282; 2025-04-10T11:21:44.003868Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=28525; 2025-04-10T11:21:44.010995Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=7034; 2025-04-10T11:21:44.018684Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=6550; 2025-04-10T11:21:44.018802Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7699; 2025-04-10T11:21:44.018984Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=106; 2025-04-10T11:21:44.019120Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=77; 2025-04-10T11:21:44.019253Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=87; 2025-04-10T11:21:44.019373Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=72; 2025-04-10T11:21:44.028164Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8709; 2025-04-10T11:21:44.039363Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=11059; 2025-04-10T11:21:44.039519Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=46; 2025-04-10T11:21:44.039606Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=32; 2025-04-10T11:21:44.039656Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-04-10T11:21:44.039706Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-10T11:21:44.039757Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-10T11:21:44.039849Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=46; 2025-04-10T11:21:44.039901Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-10T11:21:44.040005Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=60; 2025-04-10T11:21:44.040056Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-04-10T11:21:44.040127Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=32; 2025-04-10T11:21:44.040226Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=55; 2025-04-10T11:21:44.040506Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=237; 2025-04-10T11:21:44.040553Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=73121; 2025-04-10T11:21:44.040727Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:21:44.040853Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:21:44.040914Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:21:44.040986Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:21:44.050665Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-10T11:21:44.050833Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:21:44.050907Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:21:44.050998Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-10T11:21:44.051070Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:21:44.051121Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:21:44.051178Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:21:44.051223Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:21:44.051332Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:21:44.051939Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:21:44.052584Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1987:3887];tablet_id=9437184;parent=[1:1949:3856];fline=manager.cpp:82;event=ask_data;request=request_id=95;1={portions_count=11};; 2025-04-10T11:21:44.053097Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:21:44.053379Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:21:44.053418Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:21:44.053448Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:21:44.053522Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:21:44.053592Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:21:44.053660Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-10T11:21:44.053730Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:21:44.053780Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:21:44.053838Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:21:44.053881Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:21:44.053979Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:21:44.055458Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=11;path_id=1; 2025-04-10T11:21:44.056906Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay [GOOD] >> TPersQueueTest::PartitionsMapping >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] Test command err: 2025-04-10T11:19:24.318174Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642582075057854:2186];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:24.318455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:19:24.374163Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491642583227335260:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:24.374235Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:19:24.374995Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642584385074369:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:24.419926Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d75/r3tmp/tmpeDZ1RJ/pdisk_1.dat 2025-04-10T11:19:25.400073Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:19:25.441160Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:19:25.438295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:19:25.623846Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:25.659384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:25.659515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:25.661737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:25.661791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:25.661950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:25.662027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:25.681142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:25.682390Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:19:25.682414Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:19:25.686840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:25.687157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14611 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:26.542368Z node 1 :TX_PROXY DEBUG: actor# [1:7491642582075057964:2143] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:26.542426Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642590664993047:2474] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:26.542559Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491642582075057989:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:19:26.542677Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642586370025524:2306][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7491642582075057989:2157], cookie# 1 2025-04-10T11:19:26.544348Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642586370025564:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642586370025561:2306], cookie# 1 2025-04-10T11:19:26.544377Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642586370025565:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642586370025562:2306], cookie# 1 2025-04-10T11:19:26.544393Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642586370025566:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642586370025563:2306], cookie# 1 2025-04-10T11:19:26.544428Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642577780090304:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642586370025564:2306], cookie# 1 2025-04-10T11:19:26.544456Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642577780090307:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642586370025565:2306], cookie# 1 2025-04-10T11:19:26.544478Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491642577780090310:2059] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491642586370025566:2306], cookie# 1 2025-04-10T11:19:26.544513Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642586370025564:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642577780090304:2053], cookie# 1 2025-04-10T11:19:26.544527Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642586370025565:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642577780090307:2056], cookie# 1 2025-04-10T11:19:26.544558Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491642586370025566:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642577780090310:2059], cookie# 1 2025-04-10T11:19:26.544594Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642586370025524:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642586370025561:2306], cookie# 1 2025-04-10T11:19:26.544614Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642586370025524:2306][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:19:26.544629Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642586370025524:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642586370025562:2306], cookie# 1 2025-04-10T11:19:26.544648Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642586370025524:2306][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:19:26.544670Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642586370025524:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491642586370025563:2306], cookie# 1 2025-04-10T11:19:26.544687Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491642586370025524:2306][/dc-1] Unexpected sync response: sender# [1:7491642586370025563:2306], cookie# 1 2025-04-10T11:19:26.544739Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491642582075057989:2157], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-10T11:19:26.557335Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491642582075057989:2157], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491642586370025524:2306] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:19:26.557720Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491642582075057989:2157], cacheItem# { Subscriber: { Subscriber: [1:7491642586370025524:2306] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-10T11:19:26.560449Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491642590664993048:2475], recipient# [1:7491642590664993047:2474], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:19:26.560521Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642590664993047:2474] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:26.613500Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642590664993047:2474] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-10T11:19:26.624351Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642590664993047:2474] Handle TEvDescribeSchemeResult Forward to# [1:7491642590664993046:2473] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response ... 42642311984641:2109], cacheItem# { Subscriber: { Subscriber: [8:7491642646606952327:2352] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:21:47.817225Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7491643196362766877:2661], recipient# [8:7491643196362766876:2638], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:21:47.978710Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7491642637312700705:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:21:47.978908Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7491642637312700705:2127], cacheItem# { Subscriber: { Subscriber: [7:7491642654492570772:2750] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:21:47.979026Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7491643195658452136:3620], recipient# [7:7491643195658452135:2694], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:21:48.286464Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7491642642311984641:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:21:48.286677Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7491642642311984641:2109], cacheItem# { Subscriber: { Subscriber: [8:7491642663786821530:2357] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:21:48.286849Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7491643200657734175:2662], recipient# [8:7491643200657734174:2639], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:21:48.659565Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7491642637312700705:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:21:48.659756Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7491642637312700705:2127], cacheItem# { Subscriber: { Subscriber: [7:7491642641607668614:2553] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:21:48.659864Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7491643199953419434:3621], recipient# [7:7491643199953419433:2695], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:21:48.673859Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7491642637312700705:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:21:48.673982Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7491642637312700705:2127], cacheItem# { Subscriber: { Subscriber: [7:7491642641607668614:2553] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:21:48.674065Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7491643199953419436:3622], recipient# [7:7491643199953419435:2696], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:21:48.817895Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7491642642311984641:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:21:48.818031Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7491642642311984641:2109], cacheItem# { Subscriber: { Subscriber: [8:7491642646606952327:2352] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:21:48.818129Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7491643200657734177:2663], recipient# [8:7491643200657734176:2640], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:21:48.979706Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7491642637312700705:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:21:48.979890Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7491642637312700705:2127], cacheItem# { Subscriber: { Subscriber: [7:7491642654492570772:2750] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:21:48.980010Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7491643199953419447:3626], recipient# [7:7491643199953419446:2697], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |91.2%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TPQCompatTest::CommitOffsets [GOOD] >> TPQCompatTest::LongProducerAndLongMessageGroupId >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 >> TTopicYqlTest::CreateAndAlterTopicYql [GOOD] >> TTopicYqlTest::AlterAutopartitioning >> TColumnShardTestSchema::RebootHotTiersTtlWithStat >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant >> TPersQueueTest::SetMeteringMode [GOOD] >> TPersQueueTest::ReadWithoutConsumerFederation >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged |91.3%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [GOOD] >> TPersQueueTest::PartitionsMapping [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 >> TPersQueueTest::MessageMetadata >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 >> TPDiskTest::DeviceHaltTooLong [GOOD] >> TPDiskTest::ChangePDiskKey >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 [GOOD] >> TPDiskTest::ChangePDiskKey [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 >> TPDiskTest::PDiskIncreaseLogChunksLimitAfterRestart >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck >> TPDiskTest::PDiskIncreaseLogChunksLimitAfterRestart [GOOD] >> TPersQueueTest::SetupReadSession [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 [GOOD] >> TPDiskTest::AllRequestsAreAnsweredOnPDiskRestart >> TPersQueueTest::TestBigMessage >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 [GOOD] >> TPDiskTest::AllRequestsAreAnsweredOnPDiskRestart [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 >> TPDiskTest::ChunkWriteDifferentOffsetAndSize >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 >> TPDiskTest::ChunkWriteDifferentOffsetAndSize [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 >> TPDiskTest::ChunkWriteBadOffset >> TPDiskTest::ChunkWriteBadOffset [GOOD] >> TPQCompatTest::LongProducerAndLongMessageGroupId [GOOD] >> TPQCompatTest::ReadWriteSessions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskTest::ChunkWriteBadOffset [GOOD] Test command err: restart# 0 start with noop scheduler# 0 end with noop scheduler# 0 all chunk reads are received all chunk writes are received all log writes are received restart# 0 start with noop scheduler# 1 end with noop scheduler# 0 all chunk reads are received all chunk writes are received all log writes are received restart# 0 start with noop scheduler# 0 end with noop scheduler# 1 all chunk reads are received all chunk writes are received all log writes are received restart# 0 start with noop scheduler# 1 end with noop scheduler# 1 all chunk reads are received all chunk writes are received all log writes are received restart# 1 start with noop scheduler# 0 end with noop scheduler# 0 restart all chunk reads are received all chunk writes are received all log writes are received restart# 1 start with noop scheduler# 1 end with noop scheduler# 0 restart all chunk reads are received all chunk writes are received all log writes are received restart# 1 start with noop scheduler# 0 end with noop scheduler# 1 restart all chunk reads are received all chunk writes are received all log writes are received restart# 1 start with noop scheduler# 1 end with noop scheduler# 1 restart all chunk reads are received all chunk writes are received all log writes are received seed# 1744284128363706 seed# 1744284128849958 |91.3%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} >> TTopicYqlTest::AlterAutopartitioning [GOOD] >> TTopicYqlTest::BadRequests >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 |91.3%| [TA] $(B)/ydb/core/blobstorage/pdisk/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/pdisk/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 >> TColumnShardTestSchema::ForgetWithLostAnswer >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 >> TPersQueueTest::ReadWithoutConsumerFederation [GOOD] >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen >> ColumnStatistics::CountMinSketchStatistics [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchStatistics [GOOD] Test command err: 2025-04-10T11:20:28.126961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:28.127457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:28.127549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d6d/r3tmp/tmpZ7CIHa/pdisk_1.dat 2025-04-10T11:20:28.511386Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29853, node 1 2025-04-10T11:20:28.789176Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:28.789241Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:28.789282Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:28.789883Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:28.793009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:28.918809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:28.918980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:28.933084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20750 2025-04-10T11:20:29.503233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:32.780333Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:32.818384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:32.818567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:32.851845Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:32.855343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:33.121316Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.121991Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.122633Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.122802Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.123089Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.123238Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.123319Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.123390Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.123464Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.302742Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:33.302860Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:33.315988Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:33.482896Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:33.545678Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:33.545799Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:33.580146Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:33.580730Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:33.580952Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:33.581010Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:33.581064Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:33.581138Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:33.581214Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:33.581278Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:33.581817Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:33.604393Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1868:2593] 2025-04-10T11:20:33.606696Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:20:33.623968Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:33.624028Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:33.624127Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:20:33.627294Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:33.627421Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1912:2621], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:33.644456Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1964:2640] 2025-04-10T11:20:33.645818Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1964:2640], schemeshard id = 72075186224037897 2025-04-10T11:20:33.675243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:33.728633Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:33.728815Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:33.908716Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:34.105231Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:34.182882Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:35.316601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:35.316798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:35.338904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:20:35.513775Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:35.514036Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:35.514390Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:35.514525Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:35.514646Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:35.514773Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:35.514886Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:35.515093Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:35.515244Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:20:35.515375Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:20:35.515495Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:20:35.515675Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:20:35.548226Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:20:35.548345Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... ARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-04-10T11:20:38.126474Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000019s 2025-04-10T11:20:49.236982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:20:49.237073Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:50.423748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:20:50.423825Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:22:09.670988Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-10T11:22:09.671085Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:22:09.671127Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:22:09.671168Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:22:10.622911Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-10T11:22:10.622996Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 192.000000s, at schemeshard: 72075186224037897 2025-04-10T11:22:10.623307Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-04-10T11:22:10.661385Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:22:11.299202Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:22:11.299266Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:22:11.299306Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-10T11:22:11.299344Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:22:11.299683Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:22:11.305662Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:22:11.314574Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6973:5159], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:22:11.314710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6984:5164], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:22:11.314808Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:22:11.329623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-10T11:22:11.388685Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6987:5167], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:22:11.548964Z node 2 :TX_PROXY ERROR: Actor# [2:7080:5213] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:22:11.657457Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7109:5228]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:11.657638Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:22:11.657721Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7111:5230] 2025-04-10T11:22:11.657790Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7111:5230] 2025-04-10T11:22:11.658045Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7112:5231] 2025-04-10T11:22:11.658242Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7111:5230], server id = [2:7112:5231], tablet id = 72075186224037894, status = OK 2025-04-10T11:22:11.658295Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7112:5231], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:22:11.658346Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-10T11:22:11.658441Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:22:11.658505Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7109:5228], StatRequests.size() = 1 2025-04-10T11:22:11.821079Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzA1MGJlNDUtY2RjOTI0MzEtYzA3ZmRmM2YtZmY4NmFkZWE=, TxId: 2025-04-10T11:22:11.821158Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzA1MGJlNDUtY2RjOTI0MzEtYzA3ZmRmM2YtZmY4NmFkZWE=, TxId: 2025-04-10T11:22:11.822235Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:22:11.865335Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:22:11.865415Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:22:11.876192Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:22:11.876260Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:22:11.919408Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7111:5230], schemeshard count = 1 2025-04-10T11:22:13.304835Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:22:13.304909Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:22:13.304953Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:22:13.305015Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:22:13.308059Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:22:13.325015Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:22:13.325523Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:22:13.325588Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:22:13.326289Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:22:13.339307Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:22:13.339569Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-10T11:22:13.340106Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7231:5297], server id = [2:7232:5298], tablet id = 72075186224037899, status = OK 2025-04-10T11:22:13.340495Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7231:5297], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:22:13.346357Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:22:13.346468Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:22:13.346682Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:22:13.346835Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:22:13.347030Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7231:5297], server id = [2:7232:5298], tablet id = 72075186224037899 2025-04-10T11:22:13.347065Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:22:13.347236Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:22:13.349728Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:22:13.380974Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7252:5317]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:13.381167Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:22:13.381200Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7252:5317], StatRequests.size() = 1 2025-04-10T11:22:13.494082Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODZjMmJmYjgtY2JiM2EwZWMtMTA3YjQzN2ItMzlkZTEzMzE=, TxId: 2025-04-10T11:22:13.494166Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODZjMmJmYjgtY2JiM2EwZWMtMTA3YjQzN2ItMzlkZTEzMzE=, TxId: 2025-04-10T11:22:13.495021Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:22:13.496027Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:7265:5453]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:13.496289Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:22:13.496334Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:22:13.498282Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:22:13.498353Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:22:13.498420Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:22:13.510116Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] >> TPersQueueTest::MessageMetadata [GOOD] >> TPersQueueTest::LOGBROKER_7820 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 >> TColumnShardTestSchema::HotTiersTtl >> TColumnShardTestSchema::HotTiersWithStat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=144284648.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284648.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284648.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284648.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284648.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284648.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284648.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283448.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124284648.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124284648.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283448.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283448.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124283448.000000s;Name=;Codec=}; 2025-04-10T11:20:49.079049Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:20:49.161437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:20:49.185452Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:20:49.185785Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:20:49.194136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:49.194394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:49.194643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:49.194788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:49.194902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:49.195020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:49.195130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:49.195258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:49.195380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:20:49.195492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:20:49.195598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:20:49.195703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:20:49.224861Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:20:49.225235Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:20:49.225332Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:20:49.225518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:49.225746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:20:49.225839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:20:49.225886Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:20:49.225985Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:20:49.226051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:20:49.226100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:20:49.226137Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:20:49.226325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:49.226417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:20:49.226474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:20:49.226507Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:20:49.226601Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:20:49.226660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:20:49.226706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:20:49.226736Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:20:49.226836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:20:49.226894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:20:49.226927Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:20:49.226993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:20:49.227041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:20:49.227081Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:20:49.227554Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=73; 2025-04-10T11:20:49.227648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-04-10T11:20:49.227736Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-10T11:20:49.227830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=48; 2025-04-10T11:20:49.228013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:20:49.228083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:20:49.228123Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:20:49.228376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:20:49.228428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:20:49.228459Z node 1 :TX_COLUMNSHARD NOTICE: tabl ... p:29;EXECUTE:finishLoadingTime=479; 2025-04-10T11:22:16.595000Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=64433; 2025-04-10T11:22:16.610556Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=15453; 2025-04-10T11:22:16.624494Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=12628; 2025-04-10T11:22:16.624629Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=13950; 2025-04-10T11:22:16.624825Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=111; 2025-04-10T11:22:16.624957Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=74; 2025-04-10T11:22:16.625113Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=102; 2025-04-10T11:22:16.625245Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=81; 2025-04-10T11:22:16.637560Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=12223; 2025-04-10T11:22:16.649120Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=11425; 2025-04-10T11:22:16.649305Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=45; 2025-04-10T11:22:16.649393Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=31; 2025-04-10T11:22:16.649451Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-04-10T11:22:16.649502Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-04-10T11:22:16.649554Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-04-10T11:22:16.649635Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2025-04-10T11:22:16.649687Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-04-10T11:22:16.649788Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=58; 2025-04-10T11:22:16.649837Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-10T11:22:16.649910Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2025-04-10T11:22:16.650016Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=59; 2025-04-10T11:22:16.650441Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=381; 2025-04-10T11:22:16.650490Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=127503; 2025-04-10T11:22:16.650660Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:22:16.650780Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:22:16.650839Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:22:16.650908Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:22:16.670548Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-10T11:22:16.670693Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:22:16.670743Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:22:16.670803Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-10T11:22:16.670860Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:22:16.670897Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:22:16.670938Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:16.670968Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:16.671046Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:22:16.671484Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:22:16.671548Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2617:4491];tablet_id=9437184;parent=[1:2577:4458];fline=manager.cpp:82;event=ask_data;request=request_id=151;1={portions_count=29};; 2025-04-10T11:22:16.672328Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:22:16.672605Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:22:16.672632Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:22:16.672659Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:22:16.672706Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:22:16.672771Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:22:16.672834Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-10T11:22:16.672898Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:22:16.672947Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:22:16.672998Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:16.673039Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:16.673131Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:22:16.674501Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-04-10T11:22:16.676101Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284668.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284668.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284668.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284668.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284668.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284668.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283468.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124284668.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124284668.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283468.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283468.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124283468.000000s;Name=;Codec=}; 2025-04-10T11:21:08.382034Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:21:08.461894Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:21:08.478869Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:21:08.479091Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:21:08.484852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:21:08.485029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:21:08.485182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:21:08.485274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:21:08.485362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:21:08.485430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:21:08.485517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:21:08.485614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:21:08.485714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:21:08.485780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:21:08.485841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:21:08.485941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:21:08.508824Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:21:08.509170Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:21:08.509224Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:21:08.509394Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:08.509552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:21:08.509628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:21:08.509671Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:21:08.509757Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:21:08.509815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:21:08.509856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:21:08.509885Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:21:08.510027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:08.510118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:21:08.510183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:21:08.510215Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:21:08.510299Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:21:08.510346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:21:08.510385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:21:08.510415Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:21:08.510495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:21:08.510541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:21:08.510570Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:21:08.510615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:21:08.510655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:21:08.510688Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:21:08.511062Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-04-10T11:21:08.511144Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-10T11:21:08.511212Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-04-10T11:21:08.511289Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-10T11:21:08.511442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:21:08.511519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:21:08.511559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:21:08.511758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:21:08.511805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:21:08.511835Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-10T11:22:17.319614Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:22:17.319652Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:22:17.319700Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:17.319737Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:17.319825Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:22:17.320040Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-04-10T11:22:17.320146Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:22:17.320271Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:22:17.320325Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:22:17.320729Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-10T11:22:17.320803Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:22:17.321186Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1976:3981];trace_detailed=; 2025-04-10T11:22:17.321528Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-10T11:22:17.321714Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:22:17.321845Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:17.321957Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:17.322276Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:22:17.322365Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:17.322471Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:17.322504Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1976:3981] finished for tablet 9437184 2025-04-10T11:22:17.322825Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1975:3980];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744284137321132,"name":"_full_task","f":1744284137321132,"d_finished":0,"c":0,"l":1744284137322552,"d":1420},"events":[{"name":"bootstrap","f":1744284137321277,"d_finished":705,"c":1,"l":1744284137321982,"d":705},{"a":1744284137322256,"name":"ack","f":1744284137322256,"d_finished":0,"c":0,"l":1744284137322552,"d":296},{"a":1744284137322240,"name":"processing","f":1744284137322240,"d_finished":0,"c":0,"l":1744284137322552,"d":312},{"name":"ProduceResults","f":1744284137321785,"d_finished":386,"c":2,"l":1744284137322493,"d":386},{"a":1744284137322495,"name":"Finish","f":1744284137322495,"d_finished":0,"c":0,"l":1744284137322552,"d":57}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:17.322882Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1975:3980];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:22:17.323210Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1975:3980];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744284137321132,"name":"_full_task","f":1744284137321132,"d_finished":0,"c":0,"l":1744284137322917,"d":1785},"events":[{"name":"bootstrap","f":1744284137321277,"d_finished":705,"c":1,"l":1744284137321982,"d":705},{"a":1744284137322256,"name":"ack","f":1744284137322256,"d_finished":0,"c":0,"l":1744284137322917,"d":661},{"a":1744284137322240,"name":"processing","f":1744284137322240,"d_finished":0,"c":0,"l":1744284137322917,"d":677},{"name":"ProduceResults","f":1744284137321785,"d_finished":386,"c":2,"l":1744284137322493,"d":386},{"a":1744284137322495,"name":"Finish","f":1744284137322495,"d_finished":0,"c":0,"l":1744284137322917,"d":422}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1976:3981]->[1:1975:3980] 2025-04-10T11:22:17.323281Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:22:17.320778Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:22:17.323317Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:22:17.323419Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 >> TColumnShardTestSchema::HotTiersRevCompression >> TColumnShardTestSchema::RebootDrop >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 >> TPersQueueTest::TestBigMessage [GOOD] >> TPersQueueTest::TClusterTrackerTest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 >> TTopicYqlTest::BadRequests [GOOD] >> TPQCompatTest::ReadWriteSessions [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::BadRequests [GOOD] Test command err: 2025-04-10T11:18:42.538507Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642402830055372:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:42.538589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:18:42.635505Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642402184610709:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:42.635537Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:18:42.888638Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:18:42.891318Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a3a/r3tmp/tmpAgWU24/pdisk_1.dat 2025-04-10T11:18:43.157838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.157942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.159054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.159120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.167652Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:18:43.167819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:43.168520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:43.193569Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3733, node 1 2025-04-10T11:18:43.238744Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:18:43.238771Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:18:43.323582Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000a3a/r3tmp/yandexgsAL5k.tmp 2025-04-10T11:18:43.323611Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000a3a/r3tmp/yandexgsAL5k.tmp 2025-04-10T11:18:43.325382Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000a3a/r3tmp/yandexgsAL5k.tmp 2025-04-10T11:18:43.325588Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:43.586752Z INFO: TTestServer started on Port 19266 GrpcPort 3733 TClient is connected to server localhost:19266 PQClient connected to localhost:3733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:43.983197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:18:44.035118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:18:46.261697Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642419364480275:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:46.261702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642419364480261:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:46.261943Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:46.271923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:18:46.299252Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642419364480298:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:18:46.371614Z node 2 :TX_PROXY ERROR: Actor# [2:7491642419364480325:2178] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:46.708614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:46.720795Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642420009925728:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:46.721079Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGI5ZWEyNDItY2IwOWZiMzMtY2NmZjk4LWYyNjU2NDYw, ActorId: [1:7491642420009925685:2335], ActorState: ExecuteState, TraceId: 01jrfprzs3dkcevb125bhhfm4t, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:46.738894Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491642419364480332:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:46.740602Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWE5Njk4MS01NWZjMmItZjYyMTZlMzUtNmY2ZDFmYzI=, ActorId: [2:7491642419364480259:2312], ActorState: ExecuteState, TraceId: 01jrfprzqjb0c6wa40ws676dm8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:46.781010Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:18:46.782353Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:18:46.815883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:46.974900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:18:47.337738Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfps0jbeh8wnyhgkvrcqbp4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTNjMzVmYjctMTU4N2I1OTgtODVkNmQyNDMtY2M1Mzc2NDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491642424304893506:3112] 2025-04-10T11:18:47.535549Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642402830055372:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:47.535643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:18:47.635821Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642402184610709:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:47.635945Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 2 partitions CallPersQueueGRPC request to localhost:3733 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2025-04-10T11:18:53.488909Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:3733 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 2 Config { PartitionConfig { ... eration: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } } BootstrapConfig { } SourceActor { RawX1: 7491643299976911748 RawX2: 107374184600 } Partitions { Partition { PartitionId: 0 } } 2025-04-10T11:22:24.259089Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:22:24.264258Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:22:24.264310Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state CALCULATED 2025-04-10T11:22:24.264342Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State CALCULATED 2025-04-10T11:22:24.264375Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 State CALCULATED FrontTxId 281474976710676 2025-04-10T11:22:24.264409Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState WAIT_RS 2025-04-10T11:22:24.264442Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from CALCULATED to WAIT_RS 2025-04-10T11:22:24.264514Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-04-10T11:22:24.264575Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveParticipantsDecision 1 2025-04-10T11:22:24.264689Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState EXECUTING 2025-04-10T11:22:24.264723Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1744284144288, TxId 281474976710676 2025-04-10T11:22:24.264727Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from WAIT_RS to EXECUTING 2025-04-10T11:22:24.264750Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 0, Expected 1 2025-04-10T11:22:24.265501Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:22:24.265546Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:22:24.265577Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:22:24.265610Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:22:24.265626Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:22:24.265642Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cc1 2025-04-10T11:22:24.265657Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uc1 2025-04-10T11:22:24.265672Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cc2 2025-04-10T11:22:24.265687Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uc2 2025-04-10T11:22:24.265702Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] _config_0 2025-04-10T11:22:24.265735Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:22:24.265770Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:22:24.265816Z node 26 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:22:24.268606Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvTxCommitDone Step 1744284144288, TxId 281474976710676, Partition 0 2025-04-10T11:22:24.268660Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTING 2025-04-10T11:22:24.268687Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State EXECUTING 2025-04-10T11:22:24.268717Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 State EXECUTING FrontTxId 281474976710676 2025-04-10T11:22:24.268738Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-04-10T11:22:24.268768Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:22:24.268770Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId: 281474976710676 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-10T11:22:24.268811Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] complete TxId 281474976710676 2025-04-10T11:22:24.269371Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-04-10T11:22:24.269561Z node 26 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:22:24.269720Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete partitions for TxId 281474976710676 2025-04-10T11:22:24.269760Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState EXECUTED 2025-04-10T11:22:24.269793Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from EXECUTING to EXECUTED 2025-04-10T11:22:24.270410Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710676] save tx TxId: 281474976710676 State: EXECUTED MinStep: 1744284144190 MaxStep: 18446744073709551615 Step: 1744284144288 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } } BootstrapConfig { } SourceActor { RawX1: 7491643299976911748 RawX2: 107374184600 } Partitions { Partition { PartitionId: 0 } } 2025-04-10T11:22:24.270769Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:22:24.275643Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:22:24.275695Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-04-10T11:22:24.275729Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State EXECUTED 2025-04-10T11:22:24.275785Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 State EXECUTED FrontTxId 281474976710676 2025-04-10T11:22:24.275827Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-04-10T11:22:24.275855Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState WAIT_RS_ACKS 2025-04-10T11:22:24.275884Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from EXECUTED to WAIT_RS_ACKS 2025-04-10T11:22:24.275935Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-04-10T11:22:24.275949Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-10T11:22:24.275972Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-04-10T11:22:24.276002Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976710676 to the list for deletion 2025-04-10T11:22:24.276041Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState DELETING 2025-04-10T11:22:24.276083Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976710676 2025-04-10T11:22:24.276169Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:22:24.280551Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:22:24.280603Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-04-10T11:22:24.280638Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State DELETING 2025-04-10T11:22:24.280682Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976710676 2025-04-10T11:22:24.962729Z node 25 :KQP_EXECUTER ERROR: ActorId: [25:7491643355811488583:2510] TxId: 281474976710678. Ctx: { TraceId: 01jrfpzn9zfdyvbrn1grgr9spr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=25&id=YWFiM2E3NzEtN2Y0OTY2MGEtM2E1NzVmMmYtZTQ1ODA5ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 26 2025-04-10T11:22:24.962924Z node 25 :KQP_COMPUTE ERROR: SelfId: [25:7491643355811488587:2510], TxId: 281474976710678, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=25&id=YWFiM2E3NzEtN2Y0OTY2MGEtM2E1NzVmMmYtZTQ1ODA5ZjA=. TraceId : 01jrfpzn9zfdyvbrn1grgr9spr. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [25:7491643355811488583:2510], status: UNAVAILABLE, reason: {
: Error: Terminate execution } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPQCompatTest::ReadWriteSessions [GOOD] Test command err: 2025-04-10T11:18:42.574420Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642400696050638:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:42.574501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:18:42.642835Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642402849282497:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:42.642880Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:18:42.835557Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a5c/r3tmp/tmp8DFPPJ/pdisk_1.dat 2025-04-10T11:18:42.835894Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:18:43.051477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.052085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.054407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.054469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.058050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:43.089757Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:43.094524Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:18:43.095235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8620, node 1 2025-04-10T11:18:43.152212Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:18:43.152278Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:18:43.323612Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000a5c/r3tmp/yandex1FSWMt.tmp 2025-04-10T11:18:43.323648Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000a5c/r3tmp/yandex1FSWMt.tmp 2025-04-10T11:18:43.325956Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000a5c/r3tmp/yandex1FSWMt.tmp 2025-04-10T11:18:43.326129Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:43.590330Z INFO: TTestServer started on Port 24074 GrpcPort 8620 TClient is connected to server localhost:24074 PQClient connected to localhost:8620 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:43.937365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:18:44.042986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:18:44.222359Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:45.951971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642415734184784:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:45.952084Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642415734184759:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:45.952185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:45.964092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:18:45.995827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642415734184788:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:18:46.108828Z node 2 :TX_PROXY ERROR: Actor# [2:7491642420029152111:2179] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:46.559284Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491642420029152125:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:46.559869Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGU2MmM0MWMtNDNkNGVjMjAtNmE0MzhlNmItYjFmOGY5OTI=, ActorId: [2:7491642415734184757:2312], ActorState: ExecuteState, TraceId: 01jrfprzdt3dy7hmn6mcjhd86t, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:46.558916Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642417875920994:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:46.565868Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTlkZjRmZGItMTY5YzE3ZTMtMzkzNDYwZTAtZjQ5MDJkNDk=, ActorId: [1:7491642417875920964:2337], ActorState: ExecuteState, TraceId: 01jrfprzj797frad1cyxrt8cg7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:46.578412Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:18:46.578604Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:18:46.614014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:46.732152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:46.960724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:18:47.337562Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfps0g92f9q3nqj4kffend6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc1ZDYxZjUtOGMyYmQwYzEtMTIzYzk0OTQtNGE1M2JhMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491642422170888761:3095] 2025-04-10T11:18:47.567102Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642400696050638:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:47.567186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:18:47.642896Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642402849282497:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:47.642973Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T11:18:53.227783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710677:0, at schemeshard: 72057594046644480 CreateTopicNoLegacy: /Root/PQ/rt3.dc1--demo Create topic: /Root/PQ/rt3.dc1--demo AddTopic: /Root/PQ/rt3.dc1--demo ===Run query:``DECLARE $version as Int64; DECLARE $pa ... rect read cache: server session deregistered: shared/user_27_5_13158558589914309005_v1 2025-04-10T11:22:25.333744Z node 27 :PQ_READ_PROXY DEBUG: new grpc connection 2025-04-10T11:22:25.333759Z node 27 :PQ_READ_PROXY DEBUG: new session created cookie 6 2025-04-10T11:22:25.334200Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { path: "account/topic2-mirrored-from-dc2" } consumer: "user" } } 2025-04-10T11:22:25.334406Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 read init: from# ipv6:[::1]:46552, request# { init_request { topics_read_settings { path: "account/topic2-mirrored-from-dc2" } consumer: "user" } } 2025-04-10T11:22:25.334755Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 auth for : user 2025-04-10T11:22:25.334806Z node 27 :PQ_METACACHE DEBUG: Handle describe topics 2025-04-10T11:22:25.334824Z node 27 :PQ_METACACHE DEBUG: SendSchemeCacheRequest 2025-04-10T11:22:25.334902Z node 27 :PQ_METACACHE DEBUG: send request for 1 topics, got 1 requests infly, db = "Root/LbCommunal" 2025-04-10T11:22:25.335516Z node 27 :PQ_METACACHE DEBUG: Handle SchemeCache response: result# { ErrorCount: 0 DatabaseName: Root/LbCommunal DomainOwnerId: 0 Instant: 12 ResultSet [{ Path: Root/LbCommunal/account/topic2-mirrored-from-dc2 TableId: [72057594046644480:18:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTopic DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:22:25.335658Z node 27 :PQ_METACACHE DEBUG: Got describe topics SC response 2025-04-10T11:22:25.335725Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 Handle describe topics response 2025-04-10T11:22:25.335958Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 auth is DEAD 2025-04-10T11:22:25.335970Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 auth ok: topics# 1, initDone# 0 2025-04-10T11:22:25.337051Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 register session: topic# rt3.dc2--account--topic2 ===Got response: status: SUCCESS init_response { session_id: "shared/user_27_6_13744175987578068269_v1" } 2025-04-10T11:22:25.337386Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7491643357898740575:2669] connected; active server actors: 1 2025-04-10T11:22:25.337436Z node 27 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] consumer "user" register session for pipe [27:7491643357898740575:2669] session shared/user_27_6_13744175987578068269_v1 2025-04-10T11:22:25.337490Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user register readable partition 0 2025-04-10T11:22:25.337582Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user family created family=1 (Status=Free, Partitions=[0]) 2025-04-10T11:22:25.337653Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user register reading session ReadingSession "shared/user_27_6_13744175987578068269_v1" (Sender=[27:7491643357898740572:2669], Pipe=[27:7491643357898740575:2669], Partitions=[], ActiveFamilyCount=0) 2025-04-10T11:22:25.337708Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user rebalancing was scheduled 2025-04-10T11:22:25.337795Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-04-10T11:22:25.337892Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_27_6_13744175987578068269_v1" (Sender=[27:7491643357898740572:2669], Pipe=[27:7491643357898740575:2669], Partitions=[], ActiveFamilyCount=0) 2025-04-10T11:22:25.337998Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user family 1 status Active partitions [0] session "shared/user_27_6_13744175987578068269_v1" sender [27:7491643357898740572:2669] lock partition 0 for ReadingSession "shared/user_27_6_13744175987578068269_v1" (Sender=[27:7491643357898740572:2669], Pipe=[27:7491643357898740575:2669], Partitions=[], ActiveFamilyCount=1) generation 1 step 3 2025-04-10T11:22:25.338092Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-04-10T11:22:25.338166Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing duration: 0.000307s 2025-04-10T11:22:25.338868Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 assign: record# { Partition: 0 TabletId: 72075186224037898 Topic: "topic2-mirrored-from-dc2" Generation: 1 Step: 3 Session: "shared/user_27_6_13744175987578068269_v1" ClientId: "user" PipeClient { RawX1: 7491643357898740575 RawX2: 4503715591490157 } Path: "/Root/LbCommunal/account/topic2-mirrored-from-dc2" } 2025-04-10T11:22:25.338983Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 INITING TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) 2025-04-10T11:22:25.339502Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server connected, pipe [27:7491643357898740578:2672], now have 1 active actors on pipe 2025-04-10T11:22:25.339696Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037898 Generation: 1, pipe: [27:7491643357898740578:2672] 2025-04-10T11:22:25.339874Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic2-mirrored-from-dc2' requestId: 2025-04-10T11:22:25.339928Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] got client message batch for topic 'rt3.dc2--account--topic2' partition 0 2025-04-10T11:22:25.339984Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Created session shared/user_27_6_13744175987578068269_v1 on pipe: [27:7491643357898740578:2672] 2025-04-10T11:22:25.340065Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/user_27_6_13744175987578068269_v1:1 with generation 1 2025-04-10T11:22:25.340213Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] Topic 'rt3.dc2--account--topic2' partition 0 user user session is set to 0 (startOffset 0) session shared/user_27_6_13744175987578068269_v1 2025-04-10T11:22:25.340429Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:22:25.340480Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:22:25.340515Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:22:25.340554Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:22:25.340571Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-10T11:22:25.340587Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-10T11:22:25.340620Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:22:25.340656Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:22:25.340691Z node 28 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:22:25.343871Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:22:25.343958Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'topic2-mirrored-from-dc2' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 2025-04-10T11:22:25.344316Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 0 SizeLag: 0 WriteTimestampEstimateMS: 0 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-04-10T11:22:25.344387Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 INIT DONE TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) EndOffset 0 readOffset 0 committedOffset 0 2025-04-10T11:22:25.344483Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 sending to client partition status ===Got response: status: SUCCESS start_partition_session_request { partition_session { partition_session_id: 1 path: "account/topic2-mirrored-from-dc2" } partition_offsets { } } 2025-04-10T11:22:25.345987Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 grpc read done: success# 0, data# { } 2025-04-10T11:22:25.346016Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 grpc read failed 2025-04-10T11:22:25.346044Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 closed 2025-04-10T11:22:25.346100Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_13744175987578068269_v1 is DEAD 2025-04-10T11:22:25.346491Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7491643357898740575:2669] disconnected; active server actors: 1 2025-04-10T11:22:25.346518Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Destroy direct read session shared/user_27_6_13744175987578068269_v1 2025-04-10T11:22:25.346571Z node 27 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7491643357898740575:2669] client user disconnected session shared/user_27_6_13744175987578068269_v1 2025-04-10T11:22:25.346578Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server disconnected, pipe [27:7491643357898740578:2672] destroyed 2025-04-10T11:22:25.346640Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_27_6_13744175987578068269_v1 2025-04-10T11:22:26.182732Z node 27 :PQ_METACACHE DEBUG: Check version rescan 2025-04-10T11:22:26.192326Z node 27 :PQ_METACACHE DEBUG: Metacache: reset 2025-04-10T11:22:26.290424Z node 27 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:22:26.290476Z node 27 :IMPORT WARN: Table profiles were not loaded >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError [GOOD] >> TPersQueueTest::CreateTopicWithMeteringMode >> TColumnShardTestSchema::HotTiersTtlWithStat >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] Test command err: 2025-04-10T11:18:44.571987Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642409156221703:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:44.572078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:18:44.831245Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a1a/r3tmp/tmpivpuHo/pdisk_1.dat 2025-04-10T11:18:45.064161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:45.064276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:45.065996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:45.090240Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61722, node 1 2025-04-10T11:18:45.148763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000a1a/r3tmp/yandexGeT2LK.tmp 2025-04-10T11:18:45.148805Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000a1a/r3tmp/yandexGeT2LK.tmp 2025-04-10T11:18:45.148989Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000a1a/r3tmp/yandexGeT2LK.tmp 2025-04-10T11:18:45.149130Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:45.204083Z INFO: TTestServer started on Port 18677 GrpcPort 61722 TClient is connected to server localhost:18677 PQClient connected to localhost:61722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:45.489503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:18:45.513866Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:18:45.520026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:18:47.668885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642422041124418:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:47.669004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642422041124431:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:47.669081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:47.674470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T11:18:47.687169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642422041124433:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T11:18:47.916668Z node 1 :TX_PROXY ERROR: Actor# [1:7491642422041124497:2450] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:47.945878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:48.015291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:48.032610Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642422041124512:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:48.032814Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODM3NzQ5NDktYTgwMzUyZTEtZThmNDU1MjgtN2VlMjcxNzc=, ActorId: [1:7491642422041124401:2336], ActorState: ExecuteState, TraceId: 01jrfps138e2y3k8zfwvr37gzc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:48.034748Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:18:48.120327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:18:48.297257Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfps1kc76sjraev0wc3w43w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzg4YjQ3YTQtOWI3ODYzMjEtYzg4MGU2MjEtMTI1YWI5ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491642426336092106:2632] 2025-04-10T11:18:49.572318Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642409156221703:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:49.572418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 2 partitions CallPersQueueGRPC request to localhost:61722 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:61722 2025-04-10T11:18:54.562394Z node 1 :PERSQUEUE INFO: proxy answer MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 2 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976710681 SchemeShardTabletId: 72057594046644480 PathId: 13 } ErrorCode: OK AddTopic: rt3.dc1--topic1 ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic1, dc = dc1 2025-04-10T11:18:54.627491Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7491642452105896344:2879] connected; active server actors: 1 2025-04-10T11:18:54.628548Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] updating configuration. Deleted partitions []. Added partitions [1, 0] 2025-04-10T11:18:54.629327Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-10T11:18:54.629456Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] BALANCER INIT DONE for rt3.dc1--topic1: (0, 72075186224037892) (1, 72075186224037892) 2025-04-10T11:18:54.630121Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-10T11:18:54.639403Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:18:54.640107Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-10T11:18:54.640591Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:18:54.640817Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-10T11:18:54.640846Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:18:54.640865Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-10T11:18:54.640884Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:18:54.640911Z node 1 :PERSQUEUE NOTICE: [PQ: 7207518622403789 ... ssedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 36 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 38 WriteTimestampMS: 1744284149235 CreateTimestampMS: 1744284149232 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 37 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 39 WriteTimestampMS: 1744284149245 CreateTimestampMS: 1744284149243 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 372 RealReadOffset: 37 WaitQuotaTimeMs: 0 EndOffset: 40 StartOffset: 0 } Cookie: 35 } 2025-04-10T11:22:29.385273Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 38 endOffset 40 2025-04-10T11:22:29.385325Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 40 ReadOffset 38 ReadGuid 30ffe4-35f93914-5f191f01-d5cc9f1f has messages 1 2025-04-10T11:22:29.385475Z node 26 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _26_1_7434005126911648719_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { end: 39 } } } } 2025-04-10T11:22:29.385535Z node 26 :PQ_READ_PROXY INFO: session cookie 1 consumer session _26_1_7434005126911648719_v1 closed with error: reason# can't commit when reading without a consumer 2025-04-10T11:22:29.385841Z node 26 :PQ_READ_PROXY INFO: session cookie 1 consumer session _26_1_7434005126911648719_v1 is DEAD 2025-04-10T11:22:29.386262Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 38, endOffset# 40, WTime# 1744284149245, sizeLag# 372 2025-04-10T11:22:29.386290Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1TEvPartitionReady. Aval parts: 0 2025-04-10T11:22:29.386341Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 read done: guid# 30ffe4-35f93914-5f191f01-d5cc9f1f, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 516 2025-04-10T11:22:29.386369Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 response to read: guid# 30ffe4-35f93914-5f191f01-d5cc9f1f 2025-04-10T11:22:29.386535Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 Process answer. Aval parts: 1 Bytes readed: 516 Offset: 35 from session 1 Offset: 36 from session 1 Offset: 37 from session 1 2025-04-10T11:22:29.387048Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _26_1_7434005126911648719_v1 2025-04-10T11:22:29.387137Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [26:7491643378065115717:2585] destroyed 2025-04-10T11:22:29.387173Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _26_1_7434005126911648719_v1 2025-04-10T11:22:29.387207Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [26:7491643378065115715:2583] destroyed 2025-04-10T11:22:29.387228Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _26_1_7434005126911648719_v1 2025-04-10T11:22:29.387267Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [26:7491643378065115716:2584] destroyed 2025-04-10T11:22:29.387289Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _26_1_7434005126911648719_v1 2025-04-10T11:22:29.387309Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [26:7491643378065115714:2582] destroyed 2025-04-10T11:22:29.387329Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _26_1_7434005126911648719_v1 2025-04-10T11:22:29.387314Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 grpc read done: success# 1, data# { read_request { bytes_size: 400 } } 2025-04-10T11:22:29.387351Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [26:7491643378065115720:2581] destroyed 2025-04-10T11:22:29.387412Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _26_1_7434005126911648719_v1 2025-04-10T11:22:29.387452Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _26_1_7434005126911648719_v1 2025-04-10T11:22:29.387490Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _26_1_7434005126911648719_v1 2025-04-10T11:22:29.387517Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _26_1_7434005126911648719_v1 2025-04-10T11:22:29.387543Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _26_1_7434005126911648719_v1 2025-04-10T11:22:29.387401Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 got read request: guid# feaee9fc-22a7d976-a41f9cc6-81f4d3b3 2025-04-10T11:22:29.387447Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 performing read request: guid# 428cb0dc-9193b03d-34e8bc1f-b77f7afe, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 2, size# 174, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-10T11:22:29.387485Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 2 maxSize 174 maxTimeLagMs 0 readTimestampMs 0 readOffset 38 EndOffset 40 ClientCommitOffset 0 committedOffset 0 Guid 428cb0dc-9193b03d-34e8bc1f-b77f7afe 2025-04-10T11:22:29.387952Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-10T11:22:29.387999Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-10T11:22:29.388146Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 34 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 38 count 2 size 174 endOffset 40 max time lag 0ms effective offset 38 2025-04-10T11:22:29.388200Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 34 added 0 blobs, size 0 count 0 last offset 38, current partition end offset: 40 2025-04-10T11:22:29.388294Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 34. All data is from uncompacted head. 2025-04-10T11:22:29.388327Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T11:22:29.388456Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 38 2025-04-10T11:22:29.388979Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 40 Result { Offset: 38 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 40 WriteTimestampMS: 1744284149259 CreateTimestampMS: 1744284149257 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 39 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 41 WriteTimestampMS: 1744284149269 CreateTimestampMS: 1744284149267 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 40 RealReadOffset: 39 WaitQuotaTimeMs: 0 EndOffset: 40 StartOffset: 0 } Cookie: 38 } 2025-04-10T11:22:29.400704Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset40 2025-04-10T11:22:29.400782Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 40 ReadOffset 40 ReadGuid 428cb0dc-9193b03d-34e8bc1f-b77f7afe has messages 1 2025-04-10T11:22:29.401095Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 read done: guid# 428cb0dc-9193b03d-34e8bc1f-b77f7afe, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 349 2025-04-10T11:22:29.401140Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 response to read: guid# 428cb0dc-9193b03d-34e8bc1f-b77f7afe 2025-04-10T11:22:29.401514Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 Process answer. Aval parts: 0 Bytes readed: 349 Offset: 38 from session 1 Offset: 39 from session 1 2025-04-10T11:22:29.402926Z node 26 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _26_2_5200202045922303127_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { end: 39 } } } } 2025-04-10T11:22:29.403014Z node 26 :PQ_READ_PROXY INFO: session cookie 2 consumer session _26_2_5200202045922303127_v1 closed with error: reason# can't commit when reading without a consumer 2025-04-10T11:22:29.403453Z node 26 :PQ_READ_PROXY INFO: session cookie 2 consumer session _26_2_5200202045922303127_v1 is DEAD 2025-04-10T11:22:29.404415Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _26_2_5200202045922303127_v1 2025-04-10T11:22:29.404519Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [26:7491643378065115731:2591] destroyed 2025-04-10T11:22:29.404592Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _26_2_5200202045922303127_v1 2025-04-10T11:22:29.404662Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [26:7491643378065115730:2590] destroyed 2025-04-10T11:22:29.404694Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _26_2_5200202045922303127_v1 2025-04-10T11:22:29.404732Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [26:7491643378065115728:2588] destroyed 2025-04-10T11:22:29.404765Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _26_2_5200202045922303127_v1 2025-04-10T11:22:29.404806Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [26:7491643378065115729:2589] destroyed 2025-04-10T11:22:29.404846Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _26_2_5200202045922303127_v1 2025-04-10T11:22:29.404888Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [26:7491643378065115727:2587] destroyed 2025-04-10T11:22:29.404984Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _26_2_5200202045922303127_v1 2025-04-10T11:22:29.405044Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _26_2_5200202045922303127_v1 2025-04-10T11:22:29.405079Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _26_2_5200202045922303127_v1 2025-04-10T11:22:29.405110Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _26_2_5200202045922303127_v1 2025-04-10T11:22:29.405148Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _26_2_5200202045922303127_v1 2025-04-10T11:22:30.064055Z node 26 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:22:30.064102Z node 26 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284698.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284698.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283498.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-10T11:21:40.259863Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:21:40.345079Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:21:40.364306Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:21:40.364609Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:21:40.370736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:21:40.370885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:21:40.371087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:21:40.371163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:21:40.371235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:21:40.371323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:21:40.371386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:21:40.371467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:21:40.371548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:21:40.371635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:21:40.371701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:21:40.371759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:21:40.392311Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:21:40.392656Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:21:40.392699Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:21:40.392813Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:40.392940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:21:40.393009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:21:40.393048Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:21:40.393109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:21:40.393147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:21:40.393174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:21:40.393195Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:21:40.393327Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:40.393373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:21:40.393401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:21:40.393419Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:21:40.393515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:21:40.393556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:21:40.393592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:21:40.393614Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:21:40.393686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:21:40.393726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:21:40.393760Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:21:40.393791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:21:40.393815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:21:40.393837Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:21:40.394175Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=40; 2025-04-10T11:21:40.394232Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=25; 2025-04-10T11:21:40.394284Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=23; 2025-04-10T11:21:40.394334Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=23; 2025-04-10T11:21:40.394450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:21:40.394507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:21:40.394539Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:21:40.394672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:21:40.394700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:21:40.394720Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:21:40.394845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:21:40.394875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:21:40.394895Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:21:40.395030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... me=9; 2025-04-10T11:22:32.199530Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=208; 2025-04-10T11:22:32.199562Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=27916; 2025-04-10T11:22:32.204385Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=4764; 2025-04-10T11:22:32.210329Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5145; 2025-04-10T11:22:32.210418Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=5959; 2025-04-10T11:22:32.210561Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=81; 2025-04-10T11:22:32.210666Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=60; 2025-04-10T11:22:32.210784Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=77; 2025-04-10T11:22:32.210893Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=67; 2025-04-10T11:22:32.217747Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=6786; 2025-04-10T11:22:32.226027Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=8160; 2025-04-10T11:22:32.226177Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=53; 2025-04-10T11:22:32.226253Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=28; 2025-04-10T11:22:32.226301Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-04-10T11:22:32.226353Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-04-10T11:22:32.226401Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-10T11:22:32.226476Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=38; 2025-04-10T11:22:32.226526Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-10T11:22:32.226621Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=55; 2025-04-10T11:22:32.226671Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-10T11:22:32.226739Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2025-04-10T11:22:32.226831Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=53; 2025-04-10T11:22:32.227062Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=186; 2025-04-10T11:22:32.227101Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=62587; 2025-04-10T11:22:32.227260Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=10402524;raw_bytes=16084646;count=7;records=160000} at tablet 9437184 2025-04-10T11:22:32.227378Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:22:32.227432Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:22:32.227499Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:22:32.235570Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-10T11:22:32.235843Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:22:32.235909Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:22:32.235989Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-10T11:22:32.236057Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:22:32.236105Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:22:32.236153Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:32.236192Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:32.236287Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:22:32.237067Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:22:32.237157Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1968:3869];tablet_id=9437184;parent=[1:1930:3838];fline=manager.cpp:82;event=ask_data;request=request_id=108;1={portions_count=18};; 2025-04-10T11:22:32.238275Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:22:32.239163Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:22:32.239201Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:22:32.239225Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:22:32.239270Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:22:32.239329Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:22:32.239415Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-10T11:22:32.239477Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:22:32.239521Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:22:32.239569Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:32.239608Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:32.239700Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:22:32.240288Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=18;path_id=1; 2025-04-10T11:22:32.241839Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1930:3838];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 160000/10402332 160000/10402524 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 2408, msgbus: 9830 2025-04-10T11:19:45.802640Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642674782956351:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:45.802699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001aa0/r3tmp/tmpeiYzOh/pdisk_1.dat 2025-04-10T11:19:46.473033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:46.519686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:46.519770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:46.536286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2408, node 1 2025-04-10T11:19:46.799352Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:46.799377Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:46.799384Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:46.799499Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9830 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:47.190418Z node 1 :TX_PROXY DEBUG: actor# [1:7491642674782956579:2114] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:47.190512Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891700:2447] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:47.190851Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891700:2447] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.245821Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891700:2447] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T11:19:47.258806Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891700:2447] Handle TEvDescribeSchemeResult Forward to# [1:7491642683372891697:2446] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:19:47.310451Z node 1 :TX_PROXY DEBUG: actor# [1:7491642674782956579:2114] Handle TEvProposeTransaction 2025-04-10T11:19:47.310488Z node 1 :TX_PROXY DEBUG: actor# [1:7491642674782956579:2114] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T11:19:47.310623Z node 1 :TX_PROXY DEBUG: actor# [1:7491642674782956579:2114] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491642683372891709:2454] 2025-04-10T11:19:47.438403Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891709:2454] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:47.438505Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891709:2454] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:19:47.438545Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891709:2454] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:47.438638Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891709:2454] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:47.439095Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891709:2454] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.439269Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891709:2454] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T11:19:47.439384Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891709:2454] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T11:19:47.439586Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891709:2454] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T11:19:47.440646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.448532Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891709:2454] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T11:19:47.448592Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891709:2454] txid# 281474976710657 SEND to# [1:7491642683372891708:2453] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-10T11:19:47.468666Z node 1 :TX_PROXY DEBUG: actor# [1:7491642674782956579:2114] Handle TEvProposeTransaction 2025-04-10T11:19:47.468690Z node 1 :TX_PROXY DEBUG: actor# [1:7491642674782956579:2114] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T11:19:47.468728Z node 1 :TX_PROXY DEBUG: actor# [1:7491642674782956579:2114] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491642683372891751:2492] 2025-04-10T11:19:47.470572Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891751:2492] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:47.470606Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891751:2492] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:19:47.470617Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891751:2492] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:47.470657Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891751:2492] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:47.470975Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891751:2492] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.471096Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891751:2492] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:19:47.471141Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891751:2492] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T11:19:47.471256Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891751:2492] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T11:19:47.471669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.473679Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891751:2492] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-10T11:19:47.473743Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891751:2492] txid# 281474976710658 SEND to# [1:7491642683372891750:2491] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-10T11:19:47.566460Z node 1 :TX_PROXY DEBUG: actor# [1:7491642674782956579:2114] Handle TEvProposeTransaction 2025-04-10T11:19:47.566498Z node 1 :TX_PROXY DEBUG: actor# [1:7491642674782956579:2114] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T11:19:47.566556Z node 1 :TX_PROXY DEBUG: actor# [1:7491642674782956579:2114] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7491642683372891769:2502] 2025-04-10T11:19:47.569172Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683372891769:2502] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53592" 2025-04-10T11:19:47.569247Z node 1 :TX_PROXY DEBUG: Act ... 40072Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708370:2525] txid# 281474976715660 HANDLE EvClientConnected 2025-04-10T11:22:32.541555Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:22:32.544336Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708370:2525] txid# 281474976715660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2025-04-10T11:22:32.544391Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708370:2525] txid# 281474976715660 SEND to# [59:7491643388194708369:2342] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 53} 2025-04-10T11:22:32.560048Z node 59 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7491643388194708369:2342], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T11:22:32.637043Z node 59 :TX_PROXY DEBUG: actor# [59:7491643371014838423:2112] Handle TEvProposeTransaction 2025-04-10T11:22:32.637086Z node 59 :TX_PROXY DEBUG: actor# [59:7491643371014838423:2112] TxId# 281474976715661 ProcessProposeTransaction 2025-04-10T11:22:32.637148Z node 59 :TX_PROXY DEBUG: actor# [59:7491643371014838423:2112] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7491643388194708447:2582] 2025-04-10T11:22:32.640247Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708447:2582] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T11:22:32.640318Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708447:2582] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:32.640343Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708447:2582] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T11:22:32.640549Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708447:2582] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T11:22:32.640589Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708447:2582] txid# 281474976715661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T11:22:32.641071Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708447:2582] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:22:32.641169Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708447:2582] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:32.641433Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708447:2582] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:32.641579Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708447:2582] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:32.641639Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708447:2582] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-10T11:22:32.641797Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708447:2582] txid# 281474976715661 HANDLE EvClientConnected 2025-04-10T11:22:32.644975Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708447:2582] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-10T11:22:32.645115Z node 59 :TX_PROXY ERROR: Actor# [59:7491643388194708447:2582] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:22:32.645157Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708447:2582] txid# 281474976715661 SEND to# [59:7491643388194708369:2342] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-10T11:22:32.660483Z node 59 :TX_PROXY DEBUG: actor# [59:7491643371014838423:2112] Handle TEvProposeTransaction 2025-04-10T11:22:32.660520Z node 59 :TX_PROXY DEBUG: actor# [59:7491643371014838423:2112] TxId# 281474976715662 ProcessProposeTransaction 2025-04-10T11:22:32.660573Z node 59 :TX_PROXY DEBUG: actor# [59:7491643371014838423:2112] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7491643388194708474:2594] 2025-04-10T11:22:32.662638Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708474:2594] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:41386" 2025-04-10T11:22:32.662704Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708474:2594] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:32.662727Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708474:2594] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:32.662769Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708474:2594] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:32.663158Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708474:2594] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:32.663287Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708474:2594] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:32.663368Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708474:2594] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-04-10T11:22:32.663570Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708474:2594] txid# 281474976715662 HANDLE EvClientConnected 2025-04-10T11:22:32.671305Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708474:2594] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-04-10T11:22:32.671362Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708474:2594] txid# 281474976715662 SEND to# [59:7491643388194708473:2335] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-04-10T11:22:32.711480Z node 59 :TX_PROXY DEBUG: actor# [59:7491643371014838423:2112] Handle TEvProposeTransaction 2025-04-10T11:22:32.711523Z node 59 :TX_PROXY DEBUG: actor# [59:7491643371014838423:2112] TxId# 281474976715663 ProcessProposeTransaction 2025-04-10T11:22:32.711571Z node 59 :TX_PROXY DEBUG: actor# [59:7491643371014838423:2112] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7491643388194708507:2608] 2025-04-10T11:22:32.714419Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708507:2608] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:41408" 2025-04-10T11:22:32.714500Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708507:2608] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:32.714526Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708507:2608] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-04-10T11:22:32.714688Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708507:2608] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T11:22:32.714735Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708507:2608] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T11:22:32.714787Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708507:2608] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:32.715123Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708507:2608] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:32.715155Z node 59 :TX_PROXY ERROR: Actor# [59:7491643388194708507:2608] txid# 281474976715663, Access denied for ordinaryuser@builtin, attempt to manage user 2025-04-10T11:22:32.715269Z node 59 :TX_PROXY ERROR: Actor# [59:7491643388194708507:2608] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-04-10T11:22:32.715305Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643388194708507:2608] txid# 281474976715663 SEND to# [59:7491643388194708506:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-10T11:22:32.715557Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=ZDY5OTYyNy05ZWY1NDdhMi05ZGY4ZGMxYS1kNzc4ZWExZg==, ActorId: [59:7491643388194708492:2352], ActorState: ExecuteState, TraceId: 01jrfpzwvr215d5xb7vxe5n0ps, Create QueryResponse for error on request, msg: 2025-04-10T11:22:32.715823Z node 59 :TX_PROXY DEBUG: actor# [59:7491643371014838423:2112] Handle TEvExecuteKqpTransaction 2025-04-10T11:22:32.715855Z node 59 :TX_PROXY DEBUG: actor# [59:7491643371014838423:2112] TxId# 281474976715664 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] Test command err: Starting YDB, grpc: 9159, msgbus: 26164 2025-04-10T11:19:46.388660Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642677773272944:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:46.388930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001a85/r3tmp/tmp4Y7EBJ/pdisk_1.dat 2025-04-10T11:19:47.081922Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:47.087334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:47.087476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:47.093810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9159, node 1 2025-04-10T11:19:47.406783Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:47.406805Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:47.406812Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:47.406936Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26164 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:47.730532Z node 1 :TX_PROXY DEBUG: actor# [1:7491642677773273032:2116] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:47.730587Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240855:2448] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:47.730945Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240855:2448] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.781951Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240855:2448] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T11:19:47.795696Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240855:2448] Handle TEvDescribeSchemeResult Forward to# [1:7491642682068240854:2447] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:19:47.826359Z node 1 :TX_PROXY DEBUG: actor# [1:7491642677773273032:2116] Handle TEvProposeTransaction 2025-04-10T11:19:47.826399Z node 1 :TX_PROXY DEBUG: actor# [1:7491642677773273032:2116] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T11:19:47.826538Z node 1 :TX_PROXY DEBUG: actor# [1:7491642677773273032:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7491642682068240863:2455] 2025-04-10T11:19:47.920734Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240863:2455] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:47.920830Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240863:2455] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:19:47.920883Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240863:2455] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:47.920972Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240863:2455] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:47.921292Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240863:2455] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.921512Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240863:2455] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T11:19:47.921576Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240863:2455] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T11:19:47.921759Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240863:2455] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T11:19:47.922852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.925848Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240863:2455] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T11:19:47.925891Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240863:2455] txid# 281474976715657 SEND to# [1:7491642682068240862:2454] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-04-10T11:19:47.943506Z node 1 :TX_PROXY DEBUG: actor# [1:7491642677773273032:2116] Handle TEvProposeTransaction 2025-04-10T11:19:47.943546Z node 1 :TX_PROXY DEBUG: actor# [1:7491642677773273032:2116] TxId# 281474976715658 ProcessProposeTransaction 2025-04-10T11:19:47.943576Z node 1 :TX_PROXY DEBUG: actor# [1:7491642677773273032:2116] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7491642682068240906:2491] 2025-04-10T11:19:47.945893Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240906:2491] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:47.945942Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240906:2491] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:19:47.945958Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240906:2491] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:47.946014Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240906:2491] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:47.946353Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240906:2491] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.946463Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240906:2491] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:19:47.946501Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240906:2491] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-04-10T11:19:47.946640Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240906:2491] txid# 281474976715658 HANDLE EvClientConnected 2025-04-10T11:19:47.947153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.949926Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240906:2491] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-04-10T11:19:47.949985Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682068240906:2491] txid# 281474976715658 SEND to# [1:7491642682068240905:2490] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-04-10T11:19:48.032440Z node 1 :TX_PROXY DEBUG: actor# [1:7491642677773273032:2116] Handle TEvProposeTransaction 2025-04-10T11:19:48.032479Z node 1 :TX_PROXY DEBUG: actor# [1:7491642677773273032:2116] TxId# 281474976715659 ProcessProposeTransaction 2025-04-10T11:19:48.032549Z node 1 :TX_PROXY DEBUG: actor# [1:7491642677773273032:2116] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [1:7491642686363208220:2501] 2025-04-10T11:19:48.036242Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642686363208220:2501] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37092" 2025-04-10T11:19:48.036312Z node 1 :TX_PROXY DEBUG: A ... 7491643390862574184:2597] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T11:22:32.793509Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574184:2597] txid# 281474976715661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T11:22:32.794093Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574184:2597] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:22:32.794214Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574184:2597] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:32.794432Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574184:2597] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:32.794584Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574184:2597] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:32.794651Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574184:2597] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-10T11:22:32.794855Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574184:2597] txid# 281474976715661 HANDLE EvClientConnected 2025-04-10T11:22:32.797943Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574184:2597] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-10T11:22:32.798087Z node 59 :TX_PROXY ERROR: Actor# [59:7491643390862574184:2597] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:22:32.798135Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574184:2597] txid# 281474976715661 SEND to# [59:7491643390862574102:2342] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-10T11:22:32.813783Z node 59 :TX_PROXY DEBUG: actor# [59:7491643373682704210:2113] Handle TEvProposeTransaction 2025-04-10T11:22:32.813819Z node 59 :TX_PROXY DEBUG: actor# [59:7491643373682704210:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-04-10T11:22:32.813874Z node 59 :TX_PROXY DEBUG: actor# [59:7491643373682704210:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7491643390862574208:2609] 2025-04-10T11:22:32.816534Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574208:2609] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:40056" 2025-04-10T11:22:32.816615Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574208:2609] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:32.816640Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574208:2609] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:32.816691Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574208:2609] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:32.817026Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574208:2609] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:32.817143Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574208:2609] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:32.817229Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574208:2609] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-04-10T11:22:32.817519Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574208:2609] txid# 281474976715662 HANDLE EvClientConnected 2025-04-10T11:22:32.824787Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574208:2609] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-04-10T11:22:32.824857Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574208:2609] txid# 281474976715662 SEND to# [59:7491643390862574207:2335] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-04-10T11:22:32.832241Z node 59 :TX_PROXY DEBUG: actor# [59:7491643373682704210:2113] Handle TEvProposeTransaction 2025-04-10T11:22:32.832278Z node 59 :TX_PROXY DEBUG: actor# [59:7491643373682704210:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-04-10T11:22:32.832329Z node 59 :TX_PROXY DEBUG: actor# [59:7491643373682704210:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7491643390862574221:2618] 2025-04-10T11:22:32.835095Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574221:2618] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "db_admin@builtin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:40064" 2025-04-10T11:22:32.835184Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574221:2618] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:32.835214Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574221:2618] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:32.835281Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574221:2618] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:32.835725Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574221:2618] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:32.835870Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574221:2618] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:32.835965Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574221:2618] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-04-10T11:22:32.836178Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574221:2618] txid# 281474976715663 HANDLE EvClientConnected 2025-04-10T11:22:32.836595Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:22:32.839127Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574221:2618] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-04-10T11:22:32.839192Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574221:2618] txid# 281474976715663 SEND to# [59:7491643390862574220:2347] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-04-10T11:22:32.874113Z node 59 :TX_PROXY DEBUG: actor# [59:7491643373682704210:2113] Handle TEvProposeTransaction 2025-04-10T11:22:32.874162Z node 59 :TX_PROXY DEBUG: actor# [59:7491643373682704210:2113] TxId# 281474976715664 ProcessProposeTransaction 2025-04-10T11:22:32.874240Z node 59 :TX_PROXY DEBUG: actor# [59:7491643373682704210:2113] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7491643390862574253:2632] 2025-04-10T11:22:32.876913Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574253:2632] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\020db_admin@builtin\022\030\022\026\n\024all-users@well-known\032\020db_admin@builtin\"\007Builtin*\027db_a****ltin (DEFA2CD5)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:40098" 2025-04-10T11:22:32.876998Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574253:2632] txid# 281474976715664 Bootstrap, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:32.877029Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574253:2632] txid# 281474976715664 Bootstrap, UserSID: db_admin@builtin IsClusterAdministrator: 0 2025-04-10T11:22:32.877208Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574253:2632] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T11:22:32.877262Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574253:2632] txid# 281474976715664 HandleResolveDatabase, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 1 DatabaseOwner: db_admin@builtin 2025-04-10T11:22:32.877310Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574253:2632] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:32.877554Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574253:2632] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:32.877641Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574253:2632] HANDLE EvNavigateKeySetResult, txid# 281474976715664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:32.877690Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574253:2632] txid# 281474976715664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715664 TabletId# 72057594046644480} 2025-04-10T11:22:32.877813Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574253:2632] txid# 281474976715664 HANDLE EvClientConnected 2025-04-10T11:22:32.880402Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574253:2632] txid# 281474976715664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715664} 2025-04-10T11:22:32.880466Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643390862574253:2632] txid# 281474976715664 SEND to# [59:7491643390862574252:2353] Source {TEvProposeTransactionStatus txid# 281474976715664 Status# 48} >> TColumnShardTestSchema::RebootColdTiersWithStat >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 6628, msgbus: 11677 2025-04-10T11:19:45.626990Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642671312707758:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:45.627120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ac9/r3tmp/tmpv6a2pT/pdisk_1.dat 2025-04-10T11:19:46.366344Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:46.371480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:46.371580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:46.377942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6628, node 1 2025-04-10T11:19:46.763900Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:46.763939Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:46.763948Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:46.764073Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11677 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:47.178018Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671312708016:2116] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:47.178117Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643139:2451] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:47.179537Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643139:2451] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.230254Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643139:2451] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T11:19:47.243150Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643139:2451] Handle TEvDescribeSchemeResult Forward to# [1:7491642679902643138:2450] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:19:47.294499Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671312708016:2116] Handle TEvProposeTransaction 2025-04-10T11:19:47.294533Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671312708016:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T11:19:47.294695Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671312708016:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491642679902643147:2458] 2025-04-10T11:19:47.474750Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643147:2458] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:47.474842Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643147:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:19:47.474866Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643147:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:47.474944Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643147:2458] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:47.475329Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643147:2458] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.475561Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643147:2458] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T11:19:47.475660Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643147:2458] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T11:19:47.475890Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643147:2458] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T11:19:47.476682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.479165Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643147:2458] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T11:19:47.479273Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643147:2458] txid# 281474976710657 SEND to# [1:7491642679902643146:2457] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-10T11:19:47.542806Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671312708016:2116] Handle TEvProposeTransaction 2025-04-10T11:19:47.542836Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671312708016:2116] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T11:19:47.542870Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671312708016:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491642679902643191:2495] 2025-04-10T11:19:47.545388Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643191:2495] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:47.545473Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643191:2495] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:19:47.545494Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643191:2495] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:47.545545Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643191:2495] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:47.545790Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643191:2495] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.545873Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643191:2495] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:19:47.545914Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643191:2495] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T11:19:47.550130Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643191:2495] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T11:19:47.550751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.571013Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643191:2495] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-10T11:19:47.571067Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643191:2495] txid# 281474976710658 SEND to# [1:7491642679902643190:2494] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-10T11:19:47.677522Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671312708016:2116] Handle TEvProposeTransaction 2025-04-10T11:19:47.677554Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671312708016:2116] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T11:19:47.677612Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671312708016:2116] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7491642679902643209:2505] 2025-04-10T11:19:47.680027Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643209:2505] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\026\010\001\022\022\032\020db_admin@builtin\n\036\010\000\022\032\010\001\020\200\200\002\032\020db_admin@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:50336" 2025-04-10T11:19:47.680073Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642679902643209:2505] txid# 281474976710659 Bootstrap, UserSID: root ... e 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125185:2573] txid# 281474976715660 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:32.894979Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125185:2573] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:32.895084Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125185:2573] HANDLE EvNavigateKeySetResult, txid# 281474976715660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:32.895127Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125185:2573] txid# 281474976715660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-04-10T11:22:32.895260Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125185:2573] txid# 281474976715660 HANDLE EvClientConnected 2025-04-10T11:22:32.897915Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125185:2573] txid# 281474976715660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-10T11:22:32.898048Z node 59 :TX_PROXY ERROR: Actor# [59:7491643389392125185:2573] txid# 281474976715660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:22:32.898094Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125185:2573] txid# 281474976715660 SEND to# [59:7491643389392125111:2341] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 48} 2025-04-10T11:22:32.911339Z node 59 :TX_PROXY DEBUG: actor# [59:7491643372212255240:2113] Handle TEvProposeTransaction 2025-04-10T11:22:32.911374Z node 59 :TX_PROXY DEBUG: actor# [59:7491643372212255240:2113] TxId# 281474976715661 ProcessProposeTransaction 2025-04-10T11:22:32.911426Z node 59 :TX_PROXY DEBUG: actor# [59:7491643372212255240:2113] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7491643389392125209:2585] 2025-04-10T11:22:32.914307Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125209:2585] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45102" 2025-04-10T11:22:32.914388Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125209:2585] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:32.914413Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125209:2585] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:32.914469Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125209:2585] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:32.914835Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125209:2585] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:32.914966Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125209:2585] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:32.915034Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125209:2585] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-10T11:22:32.915177Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125209:2585] txid# 281474976715661 HANDLE EvClientConnected 2025-04-10T11:22:32.921824Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125209:2585] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-04-10T11:22:32.921876Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125209:2585] txid# 281474976715661 SEND to# [59:7491643389392125208:2334] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-10T11:22:32.979641Z node 59 :TX_PROXY DEBUG: actor# [59:7491643372212255240:2113] Handle TEvProposeTransaction 2025-04-10T11:22:32.979680Z node 59 :TX_PROXY DEBUG: actor# [59:7491643372212255240:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-04-10T11:22:32.979730Z node 59 :TX_PROXY DEBUG: actor# [59:7491643372212255240:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7491643389392125232:2602] 2025-04-10T11:22:32.981828Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125232:2602] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45116" 2025-04-10T11:22:32.981884Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125232:2602] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:32.981901Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125232:2602] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:32.981955Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125232:2602] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:32.982300Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125232:2602] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:32.982427Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125232:2602] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:32.982500Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125232:2602] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-04-10T11:22:32.982681Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125232:2602] txid# 281474976715662 HANDLE EvClientConnected 2025-04-10T11:22:32.983071Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:22:32.985115Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125232:2602] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-04-10T11:22:32.985173Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643389392125232:2602] txid# 281474976715662 SEND to# [59:7491643389392125231:2347] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-04-10T11:22:33.024589Z node 59 :TX_PROXY DEBUG: actor# [59:7491643372212255240:2113] Handle TEvProposeTransaction 2025-04-10T11:22:33.024632Z node 59 :TX_PROXY DEBUG: actor# [59:7491643372212255240:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-04-10T11:22:33.024691Z node 59 :TX_PROXY DEBUG: actor# [59:7491643372212255240:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7491643393687092564:2620] 2025-04-10T11:22:33.026840Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393687092564:2620] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDMyNzM1MiwiaWF0IjoxNzQ0Mjg0MTUyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.xscTrPRjSUKpDZFrXXZnHyMSGaWSx6z1hfPxMzRufpxdzQClaWKqIiu61NDk8f--ue2bBiR3PU8H0g_GdQuj6UUl0Q6Njl0fOndDkuKKvJcW83mK7y4P7AzALxtry3nfqg_O4w7MaxixqxFilXBOaX1vgCp9GNI0lG8uDmi-EGecos58KhCbt9K0mtCJapZ1JsbfyECRB07bhCEuLR2zBYt03GvVYKyzukYCkHvu0MfvKgn6sNOGh9dW7PZDOTXWO68n6XnVttYBNnsfRYeuUcLG0CvdshSXbe07scJhixbOLo_M3I776qDQA-jLuQf1sOnPZUed9WhXxgSgRhK43Q\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDMyNzM1MiwiaWF0IjoxNzQ0Mjg0MTUyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45132" 2025-04-10T11:22:33.026900Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393687092564:2620] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:33.026917Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393687092564:2620] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-10T11:22:33.027066Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393687092564:2620] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T11:22:33.027126Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393687092564:2620] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T11:22:33.027186Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393687092564:2620] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:33.027492Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393687092564:2620] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:33.027530Z node 59 :TX_PROXY ERROR: Actor# [59:7491643393687092564:2620] txid# 281474976715663, Access denied for ordinaryuser, attempt to manage user 2025-04-10T11:22:33.027654Z node 59 :TX_PROXY ERROR: Actor# [59:7491643393687092564:2620] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-10T11:22:33.027696Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393687092564:2620] txid# 281474976715663 SEND to# [59:7491643393687092563:2353] Source {TEvProposeTransactionStatus Status# 5} 2025-04-10T11:22:33.027942Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=ZDk2YTU1MjEtODcwMmE5NjQtNmJlYTVmOWMtMzIzNDc5Yzc=, ActorId: [59:7491643389392125253:2353], ActorState: ExecuteState, TraceId: 01jrfpzx5g2e37gham3vrcepr2, Create QueryResponse for error on request, msg: 2025-04-10T11:22:33.028177Z node 59 :TX_PROXY DEBUG: actor# [59:7491643372212255240:2113] Handle TEvExecuteKqpTransaction 2025-04-10T11:22:33.028205Z node 59 :TX_PROXY DEBUG: actor# [59:7491643372212255240:2113] TxId# 281474976715664 ProcessProposeKqpTransaction >> TColumnShardTestSchema::RebootDrop [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] >> TColumnShardTestSchema::ColdTiers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootDrop [GOOD] Test command err: 2025-04-10T11:22:20.505630Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:20.598755Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:20.623136Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:20.623456Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:20.631790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:20.631996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:20.632263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:20.632395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:20.632524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:20.632645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:20.632757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:20.632890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:20.633028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:20.633151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:20.633253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:20.633375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:20.662300Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:20.662686Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:20.662756Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:20.662943Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:20.663128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:20.663217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:20.663260Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:20.663353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:20.663434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:20.663486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:20.663517Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:20.663701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:20.663773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:20.663815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:20.663847Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:20.663934Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:20.664000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:20.664052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:20.664081Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:20.664192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:20.664235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:20.664281Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:20.664339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:20.664378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:20.664407Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:20.664840Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-10T11:22:20.664942Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2025-04-10T11:22:20.665024Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-10T11:22:20.665125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-10T11:22:20.665315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:20.665379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:20.665416Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:20.665627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:20.665702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:20.665747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:20.665904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:22:20.665958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:22:20.665992Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:22:20.666235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:22:20.666281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:22:20.666313Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:22:20.666515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:22:20.666565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:22:20.666614Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:22:35.209887Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:22:35.209918Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:22:35.209968Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:22:35.210037Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:22:35.210105Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-04-10T11:22:35.210201Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700004;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:22:35.210255Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:22:35.210310Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:35.210353Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:35.210435Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-04-10T11:22:35.210491Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:22:35.210984Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=7;path_id=1; 2025-04-10T11:22:35.211682Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-04-10T11:22:35.387389Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000003:max} readable: {1000000004:max} at tablet 9437184 2025-04-10T11:22:35.387578Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:22:35.390121Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 9 } } } ; 2025-04-10T11:22:35.390253Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 9 } } } ; 2025-04-10T11:22:35.390905Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"saved_at","id":9}]},"o":"9","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"9","p":{"address":{"name":"saved_at","id":9}},"o":"9","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"9","t":"Projection"},"w":7,"id":0}}}; 2025-04-10T11:22:35.391036Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:22:35.392357Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1009:3002];trace_detailed=; 2025-04-10T11:22:35.393686Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=9;column_names=saved_at;);; 2025-04-10T11:22:35.393998Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; 2025-04-10T11:22:35.394556Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:22:35.394719Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:22:35.394850Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:22:35.394897Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1009:3002] finished for tablet 9437184 2025-04-10T11:22:35.395359Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1002:2996];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1744284155392286,"name":"_full_task","f":1744284155392286,"d_finished":0,"c":0,"l":1744284155394965,"d":2679},"events":[{"name":"bootstrap","f":1744284155392540,"d_finished":1639,"c":1,"l":1744284155394179,"d":1639},{"a":1744284155394526,"name":"ack","f":1744284155394526,"d_finished":0,"c":0,"l":1744284155394965,"d":439},{"a":1744284155394499,"name":"processing","f":1744284155394499,"d_finished":0,"c":0,"l":1744284155394965,"d":466},{"name":"ProduceResults","f":1744284155394140,"d_finished":325,"c":2,"l":1744284155394880,"d":325},{"a":1744284155394883,"name":"Finish","f":1744284155394883,"d_finished":0,"c":0,"l":1744284155394965,"d":82}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:22:35.395437Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1002:2996];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:22:35.395871Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1002:2996];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":1744284155392286,"name":"_full_task","f":1744284155392286,"d_finished":0,"c":0,"l":1744284155395482,"d":3196},"events":[{"name":"bootstrap","f":1744284155392540,"d_finished":1639,"c":1,"l":1744284155394179,"d":1639},{"a":1744284155394526,"name":"ack","f":1744284155394526,"d_finished":0,"c":0,"l":1744284155395482,"d":956},{"a":1744284155394499,"name":"processing","f":1744284155394499,"d_finished":0,"c":0,"l":1744284155395482,"d":983},{"name":"ProduceResults","f":1744284155394140,"d_finished":325,"c":2,"l":1744284155394880,"d":325},{"a":1744284155394883,"name":"Finish","f":1744284155394883,"d_finished":0,"c":0,"l":1744284155395482,"d":599}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:22:35.395957Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:22:35.390993Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:22:35.396006Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:22:35.396126Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] Test command err: Starting YDB, grpc: 7349, msgbus: 2150 2025-04-10T11:19:45.684389Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642672384362120:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:45.684619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001aae/r3tmp/tmpaPcU9Y/pdisk_1.dat 2025-04-10T11:19:46.431741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:46.431825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:46.450347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:46.461254Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7349, node 1 2025-04-10T11:19:46.748773Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:46.748791Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:46.748798Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:46.748933Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2150 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:47.199964Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672384362228:2116] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:47.200021Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297352:2451] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:47.200415Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297352:2451] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.249645Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297352:2451] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T11:19:47.263138Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297352:2451] Handle TEvDescribeSchemeResult Forward to# [1:7491642680974297351:2450] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:19:47.283343Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672384362228:2116] Handle TEvProposeTransaction 2025-04-10T11:19:47.283377Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672384362228:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T11:19:47.290279Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672384362228:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491642680974297360:2458] 2025-04-10T11:19:47.440223Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297360:2458] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:47.440328Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297360:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:19:47.440351Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297360:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:47.440448Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297360:2458] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:47.440746Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297360:2458] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.440892Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297360:2458] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T11:19:47.440967Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297360:2458] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T11:19:47.441123Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297360:2458] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T11:19:47.442676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.447612Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297360:2458] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T11:19:47.447685Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297360:2458] txid# 281474976710657 SEND to# [1:7491642680974297357:2455] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-10T11:19:47.471790Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672384362228:2116] Handle TEvProposeTransaction 2025-04-10T11:19:47.471814Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672384362228:2116] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T11:19:47.471862Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672384362228:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491642680974297400:2494] 2025-04-10T11:19:47.474468Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297400:2494] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:47.474552Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297400:2494] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:19:47.474569Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297400:2494] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:47.474623Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297400:2494] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:47.474871Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297400:2494] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.474950Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297400:2494] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:19:47.474997Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297400:2494] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T11:19:47.475129Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297400:2494] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T11:19:47.475585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.478617Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297400:2494] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-10T11:19:47.478663Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297400:2494] txid# 281474976710658 SEND to# [1:7491642680974297399:2493] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-10T11:19:47.581061Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672384362228:2116] Handle TEvProposeTransaction 2025-04-10T11:19:47.581090Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672384362228:2116] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T11:19:47.581129Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672384362228:2116] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7491642680974297418:2504] 2025-04-10T11:19:47.583486Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680974297418:2504] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:46636" 2025-04-10T11:19:47.583538Z node 1 :TX_PROXY DEBUG: Act ... dPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51186" 2025-04-10T11:22:33.916246Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119473993:2585] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:33.916270Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119473993:2585] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:33.916325Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119473993:2585] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:33.917225Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119473993:2585] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:33.917355Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119473993:2585] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:33.917455Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119473993:2585] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-10T11:22:33.917735Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119473993:2585] txid# 281474976715661 HANDLE EvClientConnected 2025-04-10T11:22:33.925500Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119473993:2585] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-04-10T11:22:33.925546Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119473993:2585] txid# 281474976715661 SEND to# [59:7491643393119473992:2333] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-10T11:22:33.985341Z node 59 :TX_PROXY DEBUG: actor# [59:7491643375939604032:2113] Handle TEvProposeTransaction 2025-04-10T11:22:33.985378Z node 59 :TX_PROXY DEBUG: actor# [59:7491643375939604032:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-04-10T11:22:33.985427Z node 59 :TX_PROXY DEBUG: actor# [59:7491643375939604032:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7491643393119474021:2603] 2025-04-10T11:22:33.988097Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119474021:2603] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51200" 2025-04-10T11:22:33.988177Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119474021:2603] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:33.988200Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119474021:2603] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:33.988256Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119474021:2603] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:33.988579Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119474021:2603] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:33.988690Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119474021:2603] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:33.988752Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119474021:2603] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-04-10T11:22:33.988908Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119474021:2603] txid# 281474976715662 HANDLE EvClientConnected 2025-04-10T11:22:33.989417Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:22:33.991684Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119474021:2603] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-04-10T11:22:33.991733Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643393119474021:2603] txid# 281474976715662 SEND to# [59:7491643393119474020:2347] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-04-10T11:22:34.026540Z node 59 :TX_PROXY DEBUG: actor# [59:7491643375939604032:2113] Handle TEvProposeTransaction 2025-04-10T11:22:34.026588Z node 59 :TX_PROXY DEBUG: actor# [59:7491643375939604032:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-04-10T11:22:34.026660Z node 59 :TX_PROXY DEBUG: actor# [59:7491643375939604032:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7491643397414441350:2622] 2025-04-10T11:22:34.029491Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441350:2622] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51222" 2025-04-10T11:22:34.029565Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441350:2622] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:34.029585Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441350:2622] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:34.029626Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441350:2622] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:34.029931Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441350:2622] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:34.030031Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441350:2622] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:34.030081Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441350:2622] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-04-10T11:22:34.030260Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441350:2622] txid# 281474976715663 HANDLE EvClientConnected 2025-04-10T11:22:34.036313Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441350:2622] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-04-10T11:22:34.036359Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441350:2622] txid# 281474976715663 SEND to# [59:7491643397414441349:2349] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-04-10T11:22:34.078933Z node 59 :TX_PROXY DEBUG: actor# [59:7491643375939604032:2113] Handle TEvProposeTransaction 2025-04-10T11:22:34.078977Z node 59 :TX_PROXY DEBUG: actor# [59:7491643375939604032:2113] TxId# 281474976715664 ProcessProposeTransaction 2025-04-10T11:22:34.079032Z node 59 :TX_PROXY DEBUG: actor# [59:7491643375939604032:2113] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7491643397414441378:2634] 2025-04-10T11:22:34.081930Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441378:2634] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "targetuser" Password: "passwd" IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDMyNzM1MywiaWF0IjoxNzQ0Mjg0MTUzLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.JRWaDKr4h3qR9gtzd26Ua6rcVxAlRT9XAI9kr2NRg9ddpLGpgOUGDgjgFCvqDr3f-gIzMzQFIIT_8QFs6luP36zl--8ZKAJcvIcHRvO1WHW7aKDA8bALHWLCl4yCBNmncAUk246wDir3dYdg-BwbqR5Q9ACrujIk8QNYMWpxwwXjuzagDgvFMnUmGJrTdDDuRAL9UgpS3Cp4JtREiG8naL00-frM2fJGuoyI7kKBzmvyKcJWhcF2RpNcASJhMrZNJQLtGzNsbbnjJnj_HjFQo-wkVBrysRvSEGuQo8Gt0mTiBuEMioP0qSZiWS8LCrmgJstTeKZuNANtYKSmZuYDnA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDMyNzM1MywiaWF0IjoxNzQ0Mjg0MTUzLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51250" 2025-04-10T11:22:34.082026Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441378:2634] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:34.082057Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441378:2634] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-10T11:22:34.082299Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441378:2634] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T11:22:34.082365Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441378:2634] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T11:22:34.082420Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441378:2634] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:34.082748Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441378:2634] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:34.082786Z node 59 :TX_PROXY ERROR: Actor# [59:7491643397414441378:2634] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-04-10T11:22:34.082888Z node 59 :TX_PROXY ERROR: Actor# [59:7491643397414441378:2634] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-10T11:22:34.082927Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643397414441378:2634] txid# 281474976715664 SEND to# [59:7491643397414441377:2361] Source {TEvProposeTransactionStatus Status# 5} 2025-04-10T11:22:34.083216Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=Y2JlZDAyMmItNzk1NDExMDQtNGZjZmIzMi02YWJmZWEzMQ==, ActorId: [59:7491643397414441368:2361], ActorState: ExecuteState, TraceId: 01jrfpzy6g8d32t4zm4mfs7rf3, Create QueryResponse for error on request, msg: 2025-04-10T11:22:34.083482Z node 59 :TX_PROXY DEBUG: actor# [59:7491643375939604032:2113] Handle TEvExecuteKqpTransaction 2025-04-10T11:22:34.083513Z node 59 :TX_PROXY DEBUG: actor# [59:7491643375939604032:2113] TxId# 281474976715665 ProcessProposeKqpTransaction >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 27036, msgbus: 19369 2025-04-10T11:19:45.667011Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642671945399894:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:45.667129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001af7/r3tmp/tmpcxsiXX/pdisk_1.dat 2025-04-10T11:19:46.267878Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:46.358440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:46.358518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:46.370206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27036, node 1 2025-04-10T11:19:46.738280Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:46.738309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:46.738319Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:46.738457Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19369 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:47.177985Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671945400117:2116] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:47.178050Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335239:2451] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:47.180241Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335239:2451] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.233225Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335239:2451] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T11:19:47.244326Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335239:2451] Handle TEvDescribeSchemeResult Forward to# [1:7491642680535335238:2450] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:19:47.284214Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671945400117:2116] Handle TEvProposeTransaction 2025-04-10T11:19:47.284241Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671945400117:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T11:19:47.284365Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671945400117:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491642680535335245:2456] 2025-04-10T11:19:47.410674Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335245:2456] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:47.410780Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335245:2456] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:19:47.410811Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335245:2456] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:47.410882Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335245:2456] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:47.411152Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335245:2456] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.411292Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335245:2456] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T11:19:47.411345Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335245:2456] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T11:19:47.411493Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335245:2456] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T11:19:47.412236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.416517Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335245:2456] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T11:19:47.416574Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335245:2456] txid# 281474976710657 SEND to# [1:7491642680535335244:2455] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-10T11:19:47.431172Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:19:47.448255Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671945400117:2116] Handle TEvProposeTransaction 2025-04-10T11:19:47.448279Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671945400117:2116] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T11:19:47.448308Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671945400117:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491642680535335287:2494] 2025-04-10T11:19:47.450559Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335287:2494] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:47.450598Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335287:2494] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:19:47.450613Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335287:2494] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:47.450685Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335287:2494] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:47.450951Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335287:2494] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.451032Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335287:2494] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:19:47.451065Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335287:2494] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T11:19:47.451157Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335287:2494] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T11:19:47.451621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.456687Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335287:2494] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-10T11:19:47.456733Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680535335287:2494] txid# 281474976710658 SEND to# [1:7491642680535335286:2493] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-10T11:19:49.204082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642689125269972:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.204489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642689125269961:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.204570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.204965Z node 1 :TX_PROXY DEBUG: actor# [1:7491642671945400117:2116] Handle TEvPropo ... wd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:42756" 2025-04-10T11:22:34.762057Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438167:2583] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:34.762079Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438167:2583] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:34.762118Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438167:2583] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:34.762477Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438167:2583] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:34.762624Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438167:2583] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:34.762717Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438167:2583] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-10T11:22:34.762910Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438167:2583] txid# 281474976715661 HANDLE EvClientConnected 2025-04-10T11:22:34.770338Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438167:2583] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-04-10T11:22:34.770401Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438167:2583] txid# 281474976715661 SEND to# [59:7491643400206438166:2333] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-10T11:22:34.832770Z node 59 :TX_PROXY DEBUG: actor# [59:7491643383026568199:2113] Handle TEvProposeTransaction 2025-04-10T11:22:34.832799Z node 59 :TX_PROXY DEBUG: actor# [59:7491643383026568199:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-04-10T11:22:34.832834Z node 59 :TX_PROXY DEBUG: actor# [59:7491643383026568199:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7491643400206438187:2597] 2025-04-10T11:22:34.835095Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438187:2597] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:42766" 2025-04-10T11:22:34.835156Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438187:2597] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:34.835173Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438187:2597] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:34.835215Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438187:2597] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:34.835468Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438187:2597] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:34.835562Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438187:2597] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:34.835617Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438187:2597] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-04-10T11:22:34.835781Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438187:2597] txid# 281474976715662 HANDLE EvClientConnected 2025-04-10T11:22:34.836184Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:22:34.838078Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438187:2597] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-04-10T11:22:34.838127Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438187:2597] txid# 281474976715662 SEND to# [59:7491643400206438186:2347] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-04-10T11:22:34.872136Z node 59 :TX_PROXY DEBUG: actor# [59:7491643383026568199:2113] Handle TEvProposeTransaction 2025-04-10T11:22:34.872169Z node 59 :TX_PROXY DEBUG: actor# [59:7491643383026568199:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-04-10T11:22:34.872223Z node 59 :TX_PROXY DEBUG: actor# [59:7491643383026568199:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7491643400206438223:2616] 2025-04-10T11:22:34.874135Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438223:2616] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:42772" 2025-04-10T11:22:34.874213Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438223:2616] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:34.874231Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438223:2616] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:34.874268Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438223:2616] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:34.874523Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438223:2616] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:34.874635Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438223:2616] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:34.874696Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438223:2616] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-04-10T11:22:34.874845Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438223:2616] txid# 281474976715663 HANDLE EvClientConnected 2025-04-10T11:22:34.882203Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438223:2616] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-04-10T11:22:34.882267Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438223:2616] txid# 281474976715663 SEND to# [59:7491643400206438222:2349] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-04-10T11:22:34.920686Z node 59 :TX_PROXY DEBUG: actor# [59:7491643383026568199:2113] Handle TEvProposeTransaction 2025-04-10T11:22:34.920724Z node 59 :TX_PROXY DEBUG: actor# [59:7491643383026568199:2113] TxId# 281474976715664 ProcessProposeTransaction 2025-04-10T11:22:34.920772Z node 59 :TX_PROXY DEBUG: actor# [59:7491643383026568199:2113] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7491643400206438251:2628] 2025-04-10T11:22:34.923474Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438251:2628] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDMyNzM1NCwiaWF0IjoxNzQ0Mjg0MTU0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.VoZ6rHLsa9-N8YH6dXkKS4bPVV2Ivv2cdX8dJsnuq2z_CnAC5q-CjlE8ggmZ3kqArQ4eJ9PbXzxddbjC4SFJrCiXdByAyATKF1C0t2EEcbnYQjQ8y0aoUp7XUrL20ZQTXCM4QvereWFsx4ri66lzNlHLomdk1id39bk-i73GU3rv17l-iDn0Gp7FJP6NbIj5bXrMPWFMBUNSb3qEwGVIdrJIhxhNycjMv_pDGa_Vq-NPjRuYE8qNVXCaWkONbJkkMaAE3eFSNeCnJXeElE4ezInT8Pwuz-3pX0VvP3iZegmQc0oxOHKMNG4MwEkzoJFgnABKUMMJCyTOZWxUDuofkQ\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDMyNzM1NCwiaWF0IjoxNzQ0Mjg0MTU0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:42794" 2025-04-10T11:22:34.923555Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438251:2628] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:34.923577Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438251:2628] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-10T11:22:34.923729Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438251:2628] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T11:22:34.923778Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438251:2628] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T11:22:34.923826Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438251:2628] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:34.924061Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438251:2628] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:34.924092Z node 59 :TX_PROXY ERROR: Actor# [59:7491643400206438251:2628] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-04-10T11:22:34.924192Z node 59 :TX_PROXY ERROR: Actor# [59:7491643400206438251:2628] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-10T11:22:34.924226Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643400206438251:2628] txid# 281474976715664 SEND to# [59:7491643400206438250:2361] Source {TEvProposeTransactionStatus Status# 5} 2025-04-10T11:22:34.924474Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=MTAxZjI1NDUtOWEzNjY2MDItYjllZGJlM2ItNTRiZmE4YTY=, ActorId: [59:7491643400206438241:2361], ActorState: ExecuteState, TraceId: 01jrfpzz0t4e8nvyje284nye4e, Create QueryResponse for error on request, msg: 2025-04-10T11:22:34.924674Z node 59 :TX_PROXY DEBUG: actor# [59:7491643383026568199:2113] Handle TEvExecuteKqpTransaction 2025-04-10T11:22:34.924694Z node 59 :TX_PROXY DEBUG: actor# [59:7491643383026568199:2113] TxId# 281474976715665 ProcessProposeKqpTransaction >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 2237, msgbus: 27754 2025-04-10T11:19:47.244560Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642679418202840:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:47.266292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001a7d/r3tmp/tmpoV3XrW/pdisk_1.dat 2025-04-10T11:19:48.080623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:48.080736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:48.090621Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:48.091448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2237, node 1 2025-04-10T11:19:48.472400Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:48.472423Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:48.472430Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:48.472545Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27754 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:48.728746Z node 1 :TX_PROXY DEBUG: actor# [1:7491642679418202958:2134] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:48.728848Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170762:2457] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:48.729574Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170762:2457] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:48.769423Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170762:2457] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T11:19:48.780443Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170762:2457] Handle TEvDescribeSchemeResult Forward to# [1:7491642683713170761:2456] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:19:48.810975Z node 1 :TX_PROXY DEBUG: actor# [1:7491642679418202958:2134] Handle TEvProposeTransaction 2025-04-10T11:19:48.811004Z node 1 :TX_PROXY DEBUG: actor# [1:7491642679418202958:2134] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T11:19:48.811119Z node 1 :TX_PROXY DEBUG: actor# [1:7491642679418202958:2134] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491642683713170768:2462] 2025-04-10T11:19:48.876778Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170768:2462] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:48.877034Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170768:2462] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T11:19:48.877068Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170768:2462] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:48.877169Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170768:2462] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:48.877486Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170768:2462] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:48.877631Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170768:2462] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T11:19:48.877681Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170768:2462] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T11:19:48.877839Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170768:2462] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T11:19:48.878637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:19:48.881951Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170768:2462] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T11:19:48.881999Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170768:2462] txid# 281474976710657 SEND to# [1:7491642683713170767:2461] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2025-04-10T11:19:48.926380Z node 1 :TX_PROXY DEBUG: actor# [1:7491642679418202958:2134] Handle TEvProposeTransaction 2025-04-10T11:19:48.926419Z node 1 :TX_PROXY DEBUG: actor# [1:7491642679418202958:2134] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T11:19:48.926476Z node 1 :TX_PROXY DEBUG: actor# [1:7491642679418202958:2134] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491642683713170810:2500] 2025-04-10T11:19:48.928863Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170810:2500] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:48.928924Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170810:2500] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T11:19:48.928943Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170810:2500] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:48.928997Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170810:2500] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:48.929277Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170810:2500] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:48.929380Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170810:2500] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:19:48.929423Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170810:2500] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T11:19:48.929625Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170810:2500] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T11:19:48.930136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:48.939739Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170810:2500] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-10T11:19:48.939787Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170810:2500] txid# 281474976710658 SEND to# [1:7491642683713170809:2499] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-10T11:19:48.981285Z node 1 :TX_PROXY DEBUG: actor# [1:7491642679418202958:2134] Handle TEvProposeTransaction 2025-04-10T11:19:48.981359Z node 1 :TX_PROXY DEBUG: actor# [1:7491642679418202958:2134] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T11:19:48.981427Z node 1 :TX_PROXY DEBUG: actor# [1:7491642679418202958:2134] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7491642683713170828:2510] 2025-04-10T11:19:48.983856Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642683713170828:2510] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \000\n!\010\000\022\035\010\001\020\200\010\032\024ordinaryuser@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56606" 2025-04-10T11:19:48.983920Z node 1 :TX_PROXY DEBUG: Acto ... e 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317304:2585] txid# 281474976715660 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:35.460476Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317304:2585] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:35.460641Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317304:2585] HANDLE EvNavigateKeySetResult, txid# 281474976715660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:35.460707Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317304:2585] txid# 281474976715660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-04-10T11:22:35.460894Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317304:2585] txid# 281474976715660 HANDLE EvClientConnected 2025-04-10T11:22:35.463816Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317304:2585] txid# 281474976715660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-10T11:22:35.463964Z node 59 :TX_PROXY ERROR: Actor# [59:7491643403317317304:2585] txid# 281474976715660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:22:35.464009Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317304:2585] txid# 281474976715660 SEND to# [59:7491643403317317222:2341] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 48} 2025-04-10T11:22:35.478322Z node 59 :TX_PROXY DEBUG: actor# [59:7491643386137447348:2113] Handle TEvProposeTransaction 2025-04-10T11:22:35.478370Z node 59 :TX_PROXY DEBUG: actor# [59:7491643386137447348:2113] TxId# 281474976715661 ProcessProposeTransaction 2025-04-10T11:22:35.478427Z node 59 :TX_PROXY DEBUG: actor# [59:7491643386137447348:2113] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7491643403317317328:2597] 2025-04-10T11:22:35.480694Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317328:2597] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58472" 2025-04-10T11:22:35.480762Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317328:2597] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:35.480780Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317328:2597] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:35.480824Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317328:2597] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:35.481277Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317328:2597] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:35.481440Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317328:2597] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:35.481510Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317328:2597] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-10T11:22:35.481656Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317328:2597] txid# 281474976715661 HANDLE EvClientConnected 2025-04-10T11:22:35.489182Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317328:2597] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-04-10T11:22:35.489244Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317328:2597] txid# 281474976715661 SEND to# [59:7491643403317317327:2334] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-10T11:22:35.739929Z node 59 :TX_PROXY DEBUG: actor# [59:7491643386137447348:2113] Handle TEvProposeTransaction 2025-04-10T11:22:35.739968Z node 59 :TX_PROXY DEBUG: actor# [59:7491643386137447348:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-04-10T11:22:35.740016Z node 59 :TX_PROXY DEBUG: actor# [59:7491643386137447348:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7491643403317317348:2611] 2025-04-10T11:22:35.743035Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317348:2611] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58488" 2025-04-10T11:22:35.743121Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317348:2611] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:35.743147Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317348:2611] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:35.743210Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317348:2611] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:35.743609Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317348:2611] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:35.743701Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317348:2611] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:35.743768Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317348:2611] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-04-10T11:22:35.743911Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317348:2611] txid# 281474976715662 HANDLE EvClientConnected 2025-04-10T11:22:35.744343Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:22:35.746636Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317348:2611] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-04-10T11:22:35.746697Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317348:2611] txid# 281474976715662 SEND to# [59:7491643403317317347:2347] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-04-10T11:22:35.784977Z node 59 :TX_PROXY DEBUG: actor# [59:7491643386137447348:2113] Handle TEvProposeTransaction 2025-04-10T11:22:35.785013Z node 59 :TX_PROXY DEBUG: actor# [59:7491643386137447348:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-04-10T11:22:35.785064Z node 59 :TX_PROXY DEBUG: actor# [59:7491643386137447348:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7491643403317317390:2635] 2025-04-10T11:22:35.787889Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317390:2635] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDMyNzM1NSwiaWF0IjoxNzQ0Mjg0MTU1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.HvomxDJh2qkQQ2HFuK-yxJijzlOuW9hHHZ27JkfNiOyzO52SHAU9XRaGpWzmL1vFR7-fCZ5UYl6T8UKEs1LVwd_5LMgJQ9pQ754KamO_4FXw4LNLWYBkruUJZAenPjLo-eQJt4SYaVgSbgxYSBbsyDPV8t9YyW0VHIfpERZHHffZnBA-7_YKa8Ua7cf74V6InTIHi5HexIcf_tJndLSFtpuqM4P-jQ6YBOnrNKIP2sRQEdrDvG_7Bld8k9HxHmQDXaCliBoWr3KC_nDpDfzj416jztP7BwJqYe-3znDuVP2y3S2yLloIJnT1gfV81CSBbIki6NVUoENLn3RVDxCzlg\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDMyNzM1NSwiaWF0IjoxNzQ0Mjg0MTU1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58520" 2025-04-10T11:22:35.787972Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317390:2635] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:35.787997Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317390:2635] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-10T11:22:35.788166Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317390:2635] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T11:22:35.788248Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317390:2635] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T11:22:35.788304Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317390:2635] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:35.788636Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317390:2635] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:35.788672Z node 59 :TX_PROXY ERROR: Actor# [59:7491643403317317390:2635] txid# 281474976715663, Access denied for ordinaryuser, attempt to manage user 2025-04-10T11:22:35.788780Z node 59 :TX_PROXY ERROR: Actor# [59:7491643403317317390:2635] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-10T11:22:35.788818Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643403317317390:2635] txid# 281474976715663 SEND to# [59:7491643403317317389:2353] Source {TEvProposeTransactionStatus Status# 5} 2025-04-10T11:22:35.789095Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=ODdlOThlMGUtZDk2NmM5MDctOTc4ZTliZWQtZjMyMDhjZjg=, ActorId: [59:7491643403317317375:2353], ActorState: ExecuteState, TraceId: 01jrfpzzvvd829sgcws40jkgwh, Create QueryResponse for error on request, msg: 2025-04-10T11:22:35.789325Z node 59 :TX_PROXY DEBUG: actor# [59:7491643386137447348:2113] Handle TEvExecuteKqpTransaction 2025-04-10T11:22:35.789351Z node 59 :TX_PROXY DEBUG: actor# [59:7491643386137447348:2113] TxId# 281474976715664 ProcessProposeKqpTransaction >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn >> TColumnShardTestSchema::RebootForgetAfterFail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 13810, msgbus: 18513 2025-04-10T11:19:51.139298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642699962885183:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:51.151363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001a62/r3tmp/tmp3Ihij1/pdisk_1.dat 2025-04-10T11:19:51.682829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:51.682939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:51.686388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13810, node 1 2025-04-10T11:19:51.946296Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:52.024104Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-10T11:19:52.024125Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-10T11:19:52.026812Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:52.026832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:52.026839Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:52.026960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18513 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:52.347121Z node 1 :TX_PROXY DEBUG: actor# [1:7491642699962885417:2115] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:52.347174Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853252:2457] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:52.347513Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853252:2457] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:52.397420Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853252:2457] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T11:19:52.410885Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853252:2457] Handle TEvDescribeSchemeResult Forward to# [1:7491642704257853251:2456] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:19:52.436282Z node 1 :TX_PROXY DEBUG: actor# [1:7491642699962885417:2115] Handle TEvProposeTransaction 2025-04-10T11:19:52.436311Z node 1 :TX_PROXY DEBUG: actor# [1:7491642699962885417:2115] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T11:19:52.436418Z node 1 :TX_PROXY DEBUG: actor# [1:7491642699962885417:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7491642704257853261:2462] 2025-04-10T11:19:52.525342Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853261:2462] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:52.525441Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853261:2462] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:19:52.525462Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853261:2462] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:52.525554Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853261:2462] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:52.525844Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853261:2462] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:52.525958Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853261:2462] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T11:19:52.526005Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853261:2462] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T11:19:52.526135Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853261:2462] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T11:19:52.527156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:19:52.531140Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853261:2462] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T11:19:52.531201Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853261:2462] txid# 281474976715657 SEND to# [1:7491642704257853260:2461] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-04-10T11:19:52.555210Z node 1 :TX_PROXY DEBUG: actor# [1:7491642699962885417:2115] Handle TEvProposeTransaction 2025-04-10T11:19:52.555234Z node 1 :TX_PROXY DEBUG: actor# [1:7491642699962885417:2115] TxId# 281474976715658 ProcessProposeTransaction 2025-04-10T11:19:52.555273Z node 1 :TX_PROXY DEBUG: actor# [1:7491642699962885417:2115] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7491642704257853303:2500] 2025-04-10T11:19:52.557501Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853303:2500] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:52.557534Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853303:2500] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:19:52.557546Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853303:2500] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:52.557577Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853303:2500] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:52.557820Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853303:2500] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:52.557888Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853303:2500] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:19:52.557920Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853303:2500] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-04-10T11:19:52.558011Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853303:2500] txid# 281474976715658 HANDLE EvClientConnected 2025-04-10T11:19:52.558421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:52.567372Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853303:2500] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-04-10T11:19:52.567420Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853303:2500] txid# 281474976715658 SEND to# [1:7491642704257853302:2499] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-04-10T11:19:52.599836Z node 1 :TX_PROXY DEBUG: actor# [1:7491642699962885417:2115] Handle TEvProposeTransaction 2025-04-10T11:19:52.599862Z node 1 :TX_PROXY DEBUG: actor# [1:7491642699962885417:2115] TxId# 281474976715659 ProcessProposeTransaction 2025-04-10T11:19:52.599893Z node 1 :TX_PROXY DEBUG: actor# [1:7491642699962885417:2115] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [1:7491642704257853321:2510] 2025-04-10T11:19:52.601341Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642704257853321:2510] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known ... 04795Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243361:2528] txid# 281474976715660 HANDLE EvClientConnected 2025-04-10T11:22:35.806003Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:22:35.808523Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243361:2528] txid# 281474976715660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2025-04-10T11:22:35.808580Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243361:2528] txid# 281474976715660 SEND to# [59:7491643402526243360:2342] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 53} 2025-04-10T11:22:35.825042Z node 59 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7491643402526243360:2342], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T11:22:35.906497Z node 59 :TX_PROXY DEBUG: actor# [59:7491643385346373480:2113] Handle TEvProposeTransaction 2025-04-10T11:22:35.906564Z node 59 :TX_PROXY DEBUG: actor# [59:7491643385346373480:2113] TxId# 281474976715661 ProcessProposeTransaction 2025-04-10T11:22:35.906629Z node 59 :TX_PROXY DEBUG: actor# [59:7491643385346373480:2113] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7491643402526243442:2589] 2025-04-10T11:22:35.910232Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243442:2589] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T11:22:35.910301Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243442:2589] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:35.910328Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243442:2589] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T11:22:35.910503Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243442:2589] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T11:22:35.910539Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243442:2589] txid# 281474976715661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T11:22:35.911046Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243442:2589] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:22:35.911156Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243442:2589] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:35.911365Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243442:2589] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:35.911509Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243442:2589] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:35.911557Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243442:2589] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-10T11:22:35.911723Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243442:2589] txid# 281474976715661 HANDLE EvClientConnected 2025-04-10T11:22:35.914369Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243442:2589] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-10T11:22:35.914487Z node 59 :TX_PROXY ERROR: Actor# [59:7491643402526243442:2589] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:22:35.914520Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243442:2589] txid# 281474976715661 SEND to# [59:7491643402526243360:2342] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-10T11:22:35.931724Z node 59 :TX_PROXY DEBUG: actor# [59:7491643385346373480:2113] Handle TEvProposeTransaction 2025-04-10T11:22:35.931764Z node 59 :TX_PROXY DEBUG: actor# [59:7491643385346373480:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-04-10T11:22:35.931816Z node 59 :TX_PROXY DEBUG: actor# [59:7491643385346373480:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7491643402526243469:2601] 2025-04-10T11:22:35.933951Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243469:2601] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:49170" 2025-04-10T11:22:35.934011Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243469:2601] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:35.934028Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243469:2601] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:35.934065Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243469:2601] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:35.934363Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243469:2601] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:35.934449Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243469:2601] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:35.934493Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243469:2601] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-04-10T11:22:35.934629Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243469:2601] txid# 281474976715662 HANDLE EvClientConnected 2025-04-10T11:22:35.940920Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243469:2601] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-04-10T11:22:35.940968Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243469:2601] txid# 281474976715662 SEND to# [59:7491643402526243468:2335] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-04-10T11:22:35.979418Z node 59 :TX_PROXY DEBUG: actor# [59:7491643385346373480:2113] Handle TEvProposeTransaction 2025-04-10T11:22:35.979448Z node 59 :TX_PROXY DEBUG: actor# [59:7491643385346373480:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-04-10T11:22:35.979487Z node 59 :TX_PROXY DEBUG: actor# [59:7491643385346373480:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7491643402526243505:2618] 2025-04-10T11:22:35.981571Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243505:2618] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:49196" 2025-04-10T11:22:35.981629Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243505:2618] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:35.981647Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243505:2618] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-04-10T11:22:35.981831Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243505:2618] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T11:22:35.981879Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243505:2618] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T11:22:35.981927Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243505:2618] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:35.982189Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243505:2618] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:35.982237Z node 59 :TX_PROXY ERROR: Actor# [59:7491643402526243505:2618] txid# 281474976715663, Access denied for ordinaryuser@builtin, attempt to manage user 2025-04-10T11:22:35.982340Z node 59 :TX_PROXY ERROR: Actor# [59:7491643402526243505:2618] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-04-10T11:22:35.982375Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643402526243505:2618] txid# 281474976715663 SEND to# [59:7491643402526243504:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-10T11:22:35.982574Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=MTUwYmVlMWUtZWQ0M2NiZTItNGRiOWY1ZDAtYWRiMWE2MWI=, ActorId: [59:7491643402526243490:2352], ActorState: ExecuteState, TraceId: 01jrfq001y0b4yzcy2x77k7538, Create QueryResponse for error on request, msg: 2025-04-10T11:22:35.982758Z node 59 :TX_PROXY DEBUG: actor# [59:7491643385346373480:2113] Handle TEvExecuteKqpTransaction 2025-04-10T11:22:35.982783Z node 59 :TX_PROXY DEBUG: actor# [59:7491643385346373480:2113] TxId# 281474976715664 ProcessProposeKqpTransaction >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 9223, msgbus: 6252 2025-04-10T11:19:45.647753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642673560906077:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:45.649131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ad6/r3tmp/tmptwDAmk/pdisk_1.dat 2025-04-10T11:19:46.390802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:46.390885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:46.397571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:19:46.417646Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9223, node 1 2025-04-10T11:19:46.738889Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:46.738910Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:46.738916Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:46.739012Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6252 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:47.184338Z node 1 :TX_PROXY DEBUG: actor# [1:7491642673560906311:2116] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:47.184380Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841442:2461] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:47.184633Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841442:2461] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.236585Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841442:2461] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T11:19:47.259447Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841442:2461] Handle TEvDescribeSchemeResult Forward to# [1:7491642682150841441:2460] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:19:47.302416Z node 1 :TX_PROXY DEBUG: actor# [1:7491642673560906311:2116] Handle TEvProposeTransaction 2025-04-10T11:19:47.302450Z node 1 :TX_PROXY DEBUG: actor# [1:7491642673560906311:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T11:19:47.302551Z node 1 :TX_PROXY DEBUG: actor# [1:7491642673560906311:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491642682150841450:2468] 2025-04-10T11:19:47.397483Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841450:2468] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:47.399279Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841450:2468] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:19:47.399312Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841450:2468] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:47.399421Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841450:2468] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:47.399819Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841450:2468] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.399979Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841450:2468] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T11:19:47.400045Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841450:2468] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T11:19:47.400201Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841450:2468] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T11:19:47.402240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.411502Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841450:2468] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T11:19:47.411556Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841450:2468] txid# 281474976710657 SEND to# [1:7491642682150841449:2467] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-10T11:19:47.439250Z node 1 :TX_PROXY DEBUG: actor# [1:7491642673560906311:2116] Handle TEvProposeTransaction 2025-04-10T11:19:47.439282Z node 1 :TX_PROXY DEBUG: actor# [1:7491642673560906311:2116] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T11:19:47.439315Z node 1 :TX_PROXY DEBUG: actor# [1:7491642673560906311:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491642682150841490:2504] 2025-04-10T11:19:47.441525Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841490:2504] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:47.441561Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841490:2504] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:19:47.441575Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841490:2504] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:47.441652Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841490:2504] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:47.441844Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841490:2504] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.441918Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841490:2504] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:19:47.441952Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841490:2504] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T11:19:47.442059Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841490:2504] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T11:19:47.442508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.445611Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841490:2504] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-10T11:19:47.445676Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642682150841490:2504] txid# 281474976710658 SEND to# [1:7491642682150841489:2503] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-10T11:19:49.326439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642690740776172:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.326454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642690740776164:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.326551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.326797Z node 1 :TX_PROXY DEBUG: actor# [1:7491642673560906311:2116] Handle TEvProposeTransaction 2025-04-10T11:19:49.326815Z node 1 :TX_PROXY DEBUG: actor# [1:7491642673560906311:2116] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T11: ... strator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:36.702139Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124503:2586] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:36.702212Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124503:2586] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:36.702580Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124503:2586] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:36.702735Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124503:2586] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:36.702817Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124503:2586] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-10T11:22:36.703014Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124503:2586] txid# 281474976715661 HANDLE EvClientConnected 2025-04-10T11:22:36.709805Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124503:2586] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-04-10T11:22:36.709877Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124503:2586] txid# 281474976715661 SEND to# [59:7491643406040124502:2334] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-10T11:22:36.903520Z node 59 :TX_PROXY DEBUG: actor# [59:7491643388860254533:2113] Handle TEvProposeTransaction 2025-04-10T11:22:36.903556Z node 59 :TX_PROXY DEBUG: actor# [59:7491643388860254533:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-04-10T11:22:36.903599Z node 59 :TX_PROXY DEBUG: actor# [59:7491643388860254533:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7491643406040124523:2600] 2025-04-10T11:22:36.905940Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124523:2600] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51342" 2025-04-10T11:22:36.906011Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124523:2600] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:36.906032Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124523:2600] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:36.906091Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124523:2600] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:36.906512Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124523:2600] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:36.906659Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124523:2600] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:36.906740Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124523:2600] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-04-10T11:22:36.906938Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124523:2600] txid# 281474976715662 HANDLE EvClientConnected 2025-04-10T11:22:36.907585Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:22:36.910366Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124523:2600] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-04-10T11:22:36.910436Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124523:2600] txid# 281474976715662 SEND to# [59:7491643406040124522:2347] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-04-10T11:22:36.945909Z node 59 :TX_PROXY DEBUG: actor# [59:7491643388860254533:2113] Handle TEvProposeTransaction 2025-04-10T11:22:36.945945Z node 59 :TX_PROXY DEBUG: actor# [59:7491643388860254533:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-04-10T11:22:36.945985Z node 59 :TX_PROXY DEBUG: actor# [59:7491643388860254533:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7491643406040124560:2623] 2025-04-10T11:22:36.949256Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124560:2623] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51358" 2025-04-10T11:22:36.949369Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124560:2623] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:36.949401Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124560:2623] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:22:36.949468Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124560:2623] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:36.949842Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124560:2623] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:36.950000Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124560:2623] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:22:36.950066Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124560:2623] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-04-10T11:22:36.950274Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124560:2623] txid# 281474976715663 HANDLE EvClientConnected 2025-04-10T11:22:36.958171Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124560:2623] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-04-10T11:22:36.958241Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643406040124560:2623] txid# 281474976715663 SEND to# [59:7491643406040124559:2349] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-04-10T11:22:37.004203Z node 59 :TX_PROXY DEBUG: actor# [59:7491643388860254533:2113] Handle TEvProposeTransaction 2025-04-10T11:22:37.004233Z node 59 :TX_PROXY DEBUG: actor# [59:7491643388860254533:2113] TxId# 281474976715664 ProcessProposeTransaction 2025-04-10T11:22:37.004292Z node 59 :TX_PROXY DEBUG: actor# [59:7491643388860254533:2113] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7491643410335091887:2638] 2025-04-10T11:22:37.007616Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643410335091887:2638] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDMyNzM1NiwiaWF0IjoxNzQ0Mjg0MTU2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.EG5LWzsc6FtiEZEKnZ9X-frkzyeHSlQR2ut2TmlkeqmfaoIAqJ27eGmDo8Pt7XSBM2P8Srfyc0Tj6m9N4rwOBTsgPe1im9mz5ZUNNeOSGoDyfY2uKXVU89XGCPuk-G4Ds5D8RMndKmnAzk_eol-rIOGCYav6BBeKc2zHOYZ6dkrl3lFxbwBtSkotFtF2a_rSZyxkFWfNG7_0TVJUKC2IA2YXN9XeCDDzuLm4qx444pfP_oKHatA4Z8PAHlXc2WrVyPgQPO7Itne0bIb2ferhPBdnu0LiBL50vC3CjgHszCPUelYohgVljDxd8XK3ukrm03Pql6YxNOUVUmhIpKCqoQ\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0NDMyNzM1NiwiaWF0IjoxNzQ0Mjg0MTU2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51388" 2025-04-10T11:22:37.007717Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643410335091887:2638] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T11:22:37.007746Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643410335091887:2638] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-10T11:22:37.007934Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643410335091887:2638] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T11:22:37.007988Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643410335091887:2638] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T11:22:37.008030Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643410335091887:2638] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:22:37.008354Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643410335091887:2638] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:22:37.008393Z node 59 :TX_PROXY ERROR: Actor# [59:7491643410335091887:2638] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-04-10T11:22:37.008502Z node 59 :TX_PROXY ERROR: Actor# [59:7491643410335091887:2638] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-10T11:22:37.008543Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643410335091887:2638] txid# 281474976715664 SEND to# [59:7491643410335091886:2361] Source {TEvProposeTransactionStatus Status# 5} 2025-04-10T11:22:37.008830Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=ZTk1MmVlYmMtZmNmN2I4N2ItYmE2MjNlZTgtZGMyMDBlNw==, ActorId: [59:7491643406040124581:2361], ActorState: ExecuteState, TraceId: 01jrfq011vakww4yx5t86sd0rg, Create QueryResponse for error on request, msg: 2025-04-10T11:22:37.009057Z node 59 :TX_PROXY DEBUG: actor# [59:7491643388860254533:2113] Handle TEvExecuteKqpTransaction 2025-04-10T11:22:37.009087Z node 59 :TX_PROXY DEBUG: actor# [59:7491643388860254533:2113] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-04-10T11:22:37.253755Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7491643388860254310:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:22:37.253852Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284700.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284700.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284700.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284700.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284700.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284700.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283500.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124284700.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124284700.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283500.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283500.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124283500.000000s;Name=;Codec=}; 2025-04-10T11:21:40.682028Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:21:40.772508Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:21:40.794772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:21:40.795032Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:21:40.802464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:21:40.802674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:21:40.802894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:21:40.803020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:21:40.803130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:21:40.803231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:21:40.803326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:21:40.803441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:21:40.803544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:21:40.803649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:21:40.803748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:21:40.803841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:21:40.832259Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:21:40.832556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:21:40.832600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:21:40.832720Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:40.832914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:21:40.832984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:21:40.833024Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:21:40.833105Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:21:40.833146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:21:40.833174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:21:40.833194Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:21:40.833331Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:40.833406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:21:40.833446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:21:40.833483Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:21:40.833561Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:21:40.833609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:21:40.833647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:21:40.833673Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:21:40.833755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:21:40.833797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:21:40.833824Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:21:40.833862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:21:40.833886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:21:40.833913Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:21:40.834243Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=36; 2025-04-10T11:21:40.834331Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2025-04-10T11:21:40.834463Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=61; 2025-04-10T11:21:40.834577Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-04-10T11:21:40.834767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:21:40.834843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:21:40.834883Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:21:40.835136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:21:40.835199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:21:40.835233Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:21:40.835398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:21:40.835442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... t_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-04-10T11:22:38.241232Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:22:38.241315Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:22:38.241387Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:38.241450Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:38.241572Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:22:38.241855Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000007:max} readable: {1000000007:max} at tablet 9437184 2025-04-10T11:22:38.242018Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:22:38.242235Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:22:38.242312Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:22:38.242839Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-10T11:22:38.242951Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:22:38.243530Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1388:3393];trace_detailed=; 2025-04-10T11:22:38.244062Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-10T11:22:38.244318Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:22:38.244521Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:38.244669Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:38.245038Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:22:38.245172Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:38.245350Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:38.245408Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1388:3393] finished for tablet 9437184 2025-04-10T11:22:38.245928Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1387:3392];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744284158243453,"name":"_full_task","f":1744284158243453,"d_finished":0,"c":0,"l":1744284158245488,"d":2035},"events":[{"name":"bootstrap","f":1744284158243724,"d_finished":982,"c":1,"l":1744284158244706,"d":982},{"a":1744284158245007,"name":"ack","f":1744284158245007,"d_finished":0,"c":0,"l":1744284158245488,"d":481},{"a":1744284158244984,"name":"processing","f":1744284158244984,"d_finished":0,"c":0,"l":1744284158245488,"d":504},{"name":"ProduceResults","f":1744284158244429,"d_finished":586,"c":2,"l":1744284158245385,"d":586},{"a":1744284158245388,"name":"Finish","f":1744284158245388,"d_finished":0,"c":0,"l":1744284158245488,"d":100}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:38.246034Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1387:3392];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:22:38.246550Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1387:3392];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744284158243453,"name":"_full_task","f":1744284158243453,"d_finished":0,"c":0,"l":1744284158246093,"d":2640},"events":[{"name":"bootstrap","f":1744284158243724,"d_finished":982,"c":1,"l":1744284158244706,"d":982},{"a":1744284158245007,"name":"ack","f":1744284158245007,"d_finished":0,"c":0,"l":1744284158246093,"d":1086},{"a":1744284158244984,"name":"processing","f":1744284158244984,"d_finished":0,"c":0,"l":1744284158246093,"d":1109},{"name":"ProduceResults","f":1744284158244429,"d_finished":586,"c":2,"l":1744284158245385,"d":586},{"a":1744284158245388,"name":"Finish","f":1744284158245388,"d_finished":0,"c":0,"l":1744284158246093,"d":705}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1388:3393]->[1:1387:3392] 2025-04-10T11:22:38.246672Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:22:38.242917Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:22:38.246727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:22:38.246858Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl >> TColumnShardTestSchema::HotTiers [GOOD] >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284690.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284690.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284690.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284690.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284690.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284690.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283490.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124284690.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124284690.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283490.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283490.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124283490.000000s;Name=;Codec=}; 2025-04-10T11:21:30.744300Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:21:30.830640Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:21:30.851736Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:21:30.851946Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:21:30.857908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:21:30.858056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:21:30.858223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:21:30.858308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:21:30.858380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:21:30.858443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:21:30.858500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:21:30.858572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:21:30.858642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:21:30.858701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:21:30.858765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:21:30.858819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:21:30.881334Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:21:30.881612Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:21:30.881654Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:21:30.881797Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:30.881945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:21:30.882016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:21:30.882056Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:21:30.882183Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:21:30.882241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:21:30.882276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:21:30.882302Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:21:30.882435Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:30.882519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:21:30.882559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:21:30.882587Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:21:30.882668Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:21:30.882722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:21:30.882756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:21:30.882773Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:21:30.882830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:21:30.882878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:21:30.882911Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:21:30.882943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:21:30.882966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:21:30.882999Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:21:30.883350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-04-10T11:21:30.883432Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-04-10T11:21:30.883497Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-10T11:21:30.883559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=25; 2025-04-10T11:21:30.883658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:21:30.883697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:21:30.883726Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:21:30.883846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:21:30.883883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:21:30.883905Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-10T11:22:39.604495Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:22:39.604540Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:22:39.604588Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:39.604629Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:39.604726Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:22:39.604925Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-04-10T11:22:39.605037Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:22:39.605160Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:22:39.605210Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:22:39.605590Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-10T11:22:39.605662Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:22:39.606132Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1976:3981];trace_detailed=; 2025-04-10T11:22:39.606514Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-10T11:22:39.606723Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:22:39.606870Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:39.606980Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:39.607258Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:22:39.607371Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:39.607474Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:39.607513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1976:3981] finished for tablet 9437184 2025-04-10T11:22:39.607873Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1975:3980];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744284159606075,"name":"_full_task","f":1744284159606075,"d_finished":0,"c":0,"l":1744284159607567,"d":1492},"events":[{"name":"bootstrap","f":1744284159606261,"d_finished":743,"c":1,"l":1744284159607004,"d":743},{"a":1744284159607233,"name":"ack","f":1744284159607233,"d_finished":0,"c":0,"l":1744284159607567,"d":334},{"a":1744284159607214,"name":"processing","f":1744284159607214,"d_finished":0,"c":0,"l":1744284159607567,"d":353},{"name":"ProduceResults","f":1744284159606799,"d_finished":407,"c":2,"l":1744284159607497,"d":407},{"a":1744284159607499,"name":"Finish","f":1744284159607499,"d_finished":0,"c":0,"l":1744284159607567,"d":68}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:39.607935Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1975:3980];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:22:39.608272Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1975:3980];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744284159606075,"name":"_full_task","f":1744284159606075,"d_finished":0,"c":0,"l":1744284159607973,"d":1898},"events":[{"name":"bootstrap","f":1744284159606261,"d_finished":743,"c":1,"l":1744284159607004,"d":743},{"a":1744284159607233,"name":"ack","f":1744284159607233,"d_finished":0,"c":0,"l":1744284159607973,"d":740},{"a":1744284159607214,"name":"processing","f":1744284159607214,"d_finished":0,"c":0,"l":1744284159607973,"d":759},{"name":"ProduceResults","f":1744284159606799,"d_finished":407,"c":2,"l":1744284159607497,"d":407},{"a":1744284159607499,"name":"Finish","f":1744284159607499,"d_finished":0,"c":0,"l":1744284159607973,"d":474}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1976:3981]->[1:1975:3980] 2025-04-10T11:22:39.608357Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:22:39.605636Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:22:39.608401Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:22:39.608495Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer >> TPersQueueTest::LOGBROKER_7820 [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn >> BasicStatistics::SimpleGlobalIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::LOGBROKER_7820 [GOOD] Test command err: 2025-04-10T11:18:42.517928Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642402527172320:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:42.518006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:18:42.594361Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642401411902650:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:42.594409Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:18:42.779379Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:18:42.786384Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a61/r3tmp/tmpicLKH5/pdisk_1.dat 2025-04-10T11:18:43.108747Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:43.122939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.123056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.123367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.123412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.132312Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:18:43.132463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:43.133074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63178, node 1 2025-04-10T11:18:43.330812Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000a61/r3tmp/yandex3Tcr3C.tmp 2025-04-10T11:18:43.330839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000a61/r3tmp/yandex3Tcr3C.tmp 2025-04-10T11:18:43.331089Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000a61/r3tmp/yandex3Tcr3C.tmp 2025-04-10T11:18:43.331277Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:43.590325Z INFO: TTestServer started on Port 11045 GrpcPort 63178 TClient is connected to server localhost:11045 PQClient connected to localhost:63178 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:43.879161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:18:43.985586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:18:46.138735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642419707042580:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:46.138869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642419707042607:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:46.138897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:46.142507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T11:18:46.167427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642419707042609:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T11:18:46.247815Z node 1 :TX_PROXY ERROR: Actor# [1:7491642419707042707:2764] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:46.597770Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642419707042724:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:46.599702Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTA2ZGI5ZTEtNDdkYzk3NzgtMmNjZTMyNzAtYjdmMTRlOWQ=, ActorId: [1:7491642419707042577:2337], ActorState: ExecuteState, TraceId: 01jrfprzkn9qqhywp8ywzf7ns9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:46.601832Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:18:46.608293Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491642418591772234:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:46.610395Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDIxYTNmZjQtZWVhYjQ1NzgtNmEzNGVlNmMtYTE4MDIwNzA=, ActorId: [2:7491642418591772217:2312], ActorState: ExecuteState, TraceId: 01jrfprzsb7vm91k9zwhxwg4p7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:46.610809Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:18:46.613929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:46.710421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:46.956472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:18:47.337336Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfps0g28r97g5mv1matxk0t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA2OGViYmItOTRlYzEzZmEtZDRlZDE3NWYtYmE3ZmVmOWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491642424002010484:3109] 2025-04-10T11:18:47.518286Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642402527172320:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:47.518376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:18:47.594641Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642401411902650:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:47.594717Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 10 partitions CallPersQueueGRPC request to localhost:63178 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2025-04-10T11:18:53.264842Z node 1 :PQ_METACACHE DEBUG: HandleDescribeAllTopics 2025-04-10T11:18:53.264869Z node 1 :PQ_METACACHE DEBUG: ProcessDescribeAllTopics 2025-04-10T11:18:53.264878Z node 1 :PQ_METACACHE DEBUG: Describe all topics - send empty response 2025-04-10T11:18:53.264895Z node 1 :PQ_METACACHE DEBUG: Send describe all topics response with 0 topics 2025-04-10T11:18:53.264993Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, ... Take Data. Partition 0. Read: {20, 2} (663-663) 2025-04-10T11:22:37.430912Z :DEBUG: [] Take Data. Partition 0. Read: {20, 3} (664-664) 2025-04-10T11:22:37.430939Z :DEBUG: [] Take Data. Partition 0. Read: {20, 4} (665-665) 2025-04-10T11:22:37.431016Z :DEBUG: [] Take Data. Partition 0. Read: {20, 5} (666-666) 2025-04-10T11:22:37.431044Z :DEBUG: [] Take Data. Partition 0. Read: {20, 6} (667-667) 2025-04-10T11:22:37.431072Z :DEBUG: [] Take Data. Partition 0. Read: {20, 7} (668-668) 2025-04-10T11:22:37.431099Z :DEBUG: [] Take Data. Partition 0. Read: {20, 8} (669-669) 2025-04-10T11:22:37.431129Z :DEBUG: [] Take Data. Partition 0. Read: {20, 9} (670-670) 2025-04-10T11:22:37.431160Z :DEBUG: [] Take Data. Partition 0. Read: {20, 10} (671-671) 2025-04-10T11:22:37.431187Z :DEBUG: [] Take Data. Partition 0. Read: {20, 11} (672-672) 2025-04-10T11:22:37.431215Z :DEBUG: [] Take Data. Partition 0. Read: {20, 12} (673-673) 2025-04-10T11:22:37.431247Z :DEBUG: [] Take Data. Partition 0. Read: {20, 13} (674-674) 2025-04-10T11:22:37.431277Z :DEBUG: [] Take Data. Partition 0. Read: {20, 14} (675-675) 2025-04-10T11:22:37.431304Z :DEBUG: [] Take Data. Partition 0. Read: {20, 15} (676-676) 2025-04-10T11:22:37.431335Z :DEBUG: [] Take Data. Partition 0. Read: {20, 16} (677-677) 2025-04-10T11:22:37.431368Z :DEBUG: [] Take Data. Partition 0. Read: {21, 0} (678-678) 2025-04-10T11:22:37.431404Z :DEBUG: [] Take Data. Partition 0. Read: {21, 1} (679-679) 2025-04-10T11:22:37.431433Z :DEBUG: [] Take Data. Partition 0. Read: {21, 2} (680-680) 2025-04-10T11:22:37.431462Z :DEBUG: [] Take Data. Partition 0. Read: {22, 0} (681-681) 2025-04-10T11:22:37.431578Z :DEBUG: [] Take Data. Partition 0. Read: {22, 1} (682-682) 2025-04-10T11:22:37.431609Z :DEBUG: [] Take Data. Partition 0. Read: {22, 2} (683-683) 2025-04-10T11:22:37.431636Z :DEBUG: [] Take Data. Partition 0. Read: {22, 3} (684-684) 2025-04-10T11:22:37.431665Z :DEBUG: [] Take Data. Partition 0. Read: {22, 4} (685-685) 2025-04-10T11:22:37.431696Z :DEBUG: [] Take Data. Partition 0. Read: {22, 5} (686-686) 2025-04-10T11:22:37.431728Z :DEBUG: [] Take Data. Partition 0. Read: {23, 0} (687-687) 2025-04-10T11:22:37.431756Z :DEBUG: [] Take Data. Partition 0. Read: {23, 1} (688-688) 2025-04-10T11:22:37.431780Z :DEBUG: [] Take Data. Partition 0. Read: {23, 2} (689-689) 2025-04-10T11:22:37.431812Z :DEBUG: [] Take Data. Partition 0. Read: {23, 3} (690-690) 2025-04-10T11:22:37.431840Z :DEBUG: [] Take Data. Partition 0. Read: {23, 4} (691-691) 2025-04-10T11:22:37.431867Z :DEBUG: [] Take Data. Partition 0. Read: {23, 5} (692-692) 2025-04-10T11:22:37.431896Z :DEBUG: [] Take Data. Partition 0. Read: {23, 6} (693-693) 2025-04-10T11:22:37.431927Z :DEBUG: [] Take Data. Partition 0. Read: {23, 7} (694-694) 2025-04-10T11:22:37.431956Z :DEBUG: [] Take Data. Partition 0. Read: {23, 8} (695-695) 2025-04-10T11:22:37.431982Z :DEBUG: [] Take Data. Partition 0. Read: {23, 9} (696-696) 2025-04-10T11:22:37.432009Z :DEBUG: [] Take Data. Partition 0. Read: {23, 10} (697-697) 2025-04-10T11:22:37.432041Z :DEBUG: [] Take Data. Partition 0. Read: {23, 11} (698-698) 2025-04-10T11:22:37.432071Z :DEBUG: [] Take Data. Partition 0. Read: {23, 12} (699-699) 2025-04-10T11:22:38.424661Z :DEBUG: [] [] [3190e92d-d245b5f-492f9c0c-e2720fe5] [null] Commit offsets [496, 650). Partition stream id: 1 2025-04-10T11:22:38.424880Z :DEBUG: [] [] [3190e92d-d245b5f-492f9c0c-e2720fe5] [null] The application data is transferred to the client. Number of messages 154, size 308000 bytes 2025-04-10T11:22:38.425170Z :DEBUG: [] [] [3190e92d-d245b5f-492f9c0c-e2720fe5] [null] Commit offsets [650, 700). Partition stream id: 1 2025-04-10T11:22:38.425550Z :DEBUG: [] [] [3190e92d-d245b5f-492f9c0c-e2720fe5] [null] The application data is transferred to the client. Number of messages 50, size 100000 bytes 2025-04-10T11:22:38.426051Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_8048409263783513134_v1 grpc read done: success# 1, data# { commit { cookies { assign_id: 1 partition_cookie: 2 } } } 2025-04-10T11:22:38.426288Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_8048409263783513134_v1 commit request from client for 2 in TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-10T11:22:38.426332Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_8048409263783513134_v1 commit request from 2 to 2 in TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-10T11:22:38.426395Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_8048409263783513134_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) committing to position 700 prev 496 end 700 by cookie 2 2025-04-10T11:22:38.426590Z node 25 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-10T11:22:38.426677Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-10T11:22:38.426925Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--topic1' partition 0 user user offset is set to 700 (startOffset 0) session shared/user_25_1_8048409263783513134_v1 2025-04-10T11:22:38.427230Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:22:38.427287Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:22:38.427331Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:22:38.427389Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:22:38.427414Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-10T11:22:38.427434Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-10T11:22:38.427468Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:22:38.427518Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:22:38.427586Z node 25 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:22:38.430972Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--topic1' partition 0 user user readTimeStamp for offset 700 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:22:38.431132Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:22:38.431260Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 2 2025-04-10T11:22:38.431497Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_8048409263783513134_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2025-04-10T11:22:38.431566Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_8048409263783513134_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 700 endOffset 700 with cookie 2 2025-04-10T11:22:38.431619Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_8048409263783513134_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 700 2025-04-10T11:22:38.432324Z :DEBUG: [] [] [3190e92d-d245b5f-492f9c0c-e2720fe5] [null] Committed response: { cookies { assign_id: 1 partition_cookie: 2 } } 2025-04-10T11:22:39.424772Z :INFO: [] [] [3190e92d-d245b5f-492f9c0c-e2720fe5] Closing read session. Close timeout: 10.000000s 2025-04-10T11:22:39.424864Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:topic1:0:1:699:700 2025-04-10T11:22:39.424925Z :INFO: [] [] [3190e92d-d245b5f-492f9c0c-e2720fe5] Counters: { Errors: 0 CurrentSessionLifetimeMs: 8150 BytesRead: 1400000 MessagesRead: 700 BytesReadCompressed: 1400000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:22:39.426236Z :INFO: [] [] [3190e92d-d245b5f-492f9c0c-e2720fe5] Closing read session. Close timeout: 0.000000s 2025-04-10T11:22:39.426295Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:topic1:0:1:699:700 2025-04-10T11:22:39.426350Z :INFO: [] [] [3190e92d-d245b5f-492f9c0c-e2720fe5] Counters: { Errors: 0 CurrentSessionLifetimeMs: 8152 BytesRead: 1400000 MessagesRead: 700 BytesReadCompressed: 1400000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:22:39.426489Z :NOTICE: [] [] [3190e92d-d245b5f-492f9c0c-e2720fe5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T11:22:39.426657Z node 25 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_25_1_8048409263783513134_v1 grpc read done: success# 0, data# { } 2025-04-10T11:22:39.426712Z node 25 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_25_1_8048409263783513134_v1 grpc read failed 2025-04-10T11:22:39.426746Z node 25 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_25_1_8048409263783513134_v1 grpc closed 2025-04-10T11:22:39.426790Z node 25 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_25_1_8048409263783513134_v1 is DEAD 2025-04-10T11:22:39.427188Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_25_1_8048409263783513134_v1 2025-04-10T11:22:39.427276Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [25:7491643387456105289:2515] destroyed 2025-04-10T11:22:39.427364Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_25_1_8048409263783513134_v1 2025-04-10T11:22:39.428264Z node 26 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [25:7491643387456105282:2510] disconnected; active server actors: 1 2025-04-10T11:22:39.428311Z node 26 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [25:7491643387456105282:2510] client user disconnected session shared/user_25_1_8048409263783513134_v1 2025-04-10T11:22:39.864292Z node 25 :KQP_COMPUTE WARN: TxId: 281474976710697, task: 1, CA Id [25:7491643421815844436:2616]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0 2025-04-10T11:22:39.895530Z node 25 :KQP_COMPUTE WARN: TxId: 281474976710697, task: 1, CA Id [25:7491643421815844436:2616]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-10T11:22:39.941234Z node 25 :KQP_COMPUTE WARN: TxId: 281474976710697, task: 1, CA Id [25:7491643421815844436:2616]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-10T11:22:40.008655Z node 25 :KQP_COMPUTE WARN: TxId: 281474976710697, task: 1, CA Id [25:7491643421815844436:2616]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-10T11:22:40.081358Z node 25 :KQP_COMPUTE WARN: TxId: 281474976710697, task: 1, CA Id [25:7491643421815844436:2616]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-10T11:22:40.210889Z node 25 :KQP_COMPUTE WARN: TxId: 281474976710697, task: 1, CA Id [25:7491643421815844436:2616]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck >> TColumnShardTestSchema::ExportWithLostAnswer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::SimpleGlobalIndex [GOOD] Test command err: 2025-04-10T11:20:26.491781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:26.492222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:26.492303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000dfd/r3tmp/tmpHW6nvh/pdisk_1.dat 2025-04-10T11:20:27.009062Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62791, node 1 2025-04-10T11:20:27.605417Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:27.605474Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:27.605534Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:27.606088Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:27.612354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:27.709072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:27.709818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:27.726867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14400 2025-04-10T11:20:28.318364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:31.802715Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:31.849552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:31.849745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:31.880210Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:31.882834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:32.132363Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.133047Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.133749Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.133900Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.134612Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.134748Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.134834Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.134918Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.135001Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.311740Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:32.311857Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:32.327569Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:32.502102Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:32.571004Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:32.571107Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:32.622312Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:32.623483Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:32.623689Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:32.623750Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:32.623826Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:32.623911Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:32.623965Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:32.624017Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:32.624405Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:32.718137Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:32.718307Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1880:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:32.726001Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1894:2611] 2025-04-10T11:20:32.733643Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1925:2626] 2025-04-10T11:20:32.734975Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1925:2626], schemeshard id = 72075186224037897 2025-04-10T11:20:32.741942Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:20:32.762465Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:32.762532Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:32.762605Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:20:32.775032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:32.782785Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:32.782945Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:32.995491Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:33.199379Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:33.271501Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:34.577225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2239:3073], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:34.578251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:34.700769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:20:35.149001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2452:3116], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:35.149184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:35.151174Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2457:3120]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:20:35.151459Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:20:35.151589Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2459:3122] 2025-04-10T11:20:35.152722Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2459:3122] 2025-04-10T11:20:35.153447Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2460:2930] 2025-04-10T11:20:35.153741Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2459:3122], server id = [2:2460:2930], tablet id = 72075186224037894, status = OK 2025-04-10T11:20:35.154176Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2460:2930], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:20:35.154277Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:20:35.154561Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:20:35.154647Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2457:3120], StatRequests.size() = 1 2025-04-10T11:20:35.163605Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2493:3131]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:20:35.163789Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:20:35.163837Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2493:3131], StatRequests.size() = 1 2025-04-10T11:20:35.164082Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2495:3133]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:20:35.164215Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:20:35.164244Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2495:3133], StatRequests.size() = 1 2025-04-10T11:20:35.227658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2500:3138], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:35.227789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:35.228244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2505:3143], DatabaseI ... 0T11:22:35.249693Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:22:35.249739Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:22:35.802919Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6785:4782]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:35.803228Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-10T11:22:35.803278Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6785:4782], StatRequests.size() = 1 2025-04-10T11:22:36.346220Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:22:36.346615Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:22:36.346897Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:22:36.401522Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 3, at schemeshard: 72075186224037897 2025-04-10T11:22:36.401593Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 235.000000s, at schemeshard: 72075186224037897 2025-04-10T11:22:36.401860Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 73 2025-04-10T11:22:36.415998Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:22:36.779883Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6820:4800]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:36.780170Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-10T11:22:36.780210Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6820:4800], StatRequests.size() = 1 2025-04-10T11:22:37.288850Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:22:37.288938Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:22:37.288997Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-10T11:22:37.289046Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:22:37.289436Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:22:37.335464Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:22:37.339926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6843:4819], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:22:37.340035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6853:4824], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:22:37.340161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:22:37.355293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-10T11:22:37.418598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6857:4827], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:22:37.560058Z node 2 :TX_PROXY ERROR: Actor# [2:6955:4875] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:22:37.602501Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6984:4890]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:37.602719Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-10T11:22:37.602762Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6984:4890], StatRequests.size() = 1 2025-04-10T11:22:37.704783Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjhjN2YwN2ItYmM3ZmI3YzMtN2QxYzI5YjAtNjM2NmY2YTE=, TxId: 2025-04-10T11:22:37.704851Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjhjN2YwN2ItYmM3ZmI3YzMtN2QxYzI5YjAtNjM2NmY2YTE=, TxId: 2025-04-10T11:22:37.705360Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:22:37.718626Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:22:37.718683Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:22:38.009253Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7016:4909]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:38.009631Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-10T11:22:38.009682Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7016:4909], StatRequests.size() = 1 2025-04-10T11:22:38.830274Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7055:4931]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:38.830546Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-10T11:22:38.830588Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7055:4931], StatRequests.size() = 1 2025-04-10T11:22:39.293638Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:22:39.304648Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:22:39.304707Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:22:39.304745Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 6] is data table. 2025-04-10T11:22:39.304778Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 6] 2025-04-10T11:22:39.305078Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:22:39.307641Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:22:39.320644Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTUzNGVmZDctZjcwZWI0YzMtNjkwMTEwODMtNDk3ODcyZmQ=, TxId: 2025-04-10T11:22:39.320705Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTUzNGVmZDctZjcwZWI0YzMtNjkwMTEwODMtNDk3ODcyZmQ=, TxId: 2025-04-10T11:22:39.321163Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:22:39.335446Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 6] 2025-04-10T11:22:39.335526Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:22:39.694710Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7123:4970]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:39.695053Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-10T11:22:39.695099Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7123:4970], StatRequests.size() = 1 2025-04-10T11:22:40.614493Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7166:4994]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:40.614766Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-10T11:22:40.614804Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7166:4994], StatRequests.size() = 1 2025-04-10T11:22:41.101536Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:22:41.101725Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:22:41.102112Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:22:41.112935Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:22:41.112997Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:22:41.113026Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-10T11:22:41.113049Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:22:41.113306Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:22:41.115400Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:22:41.124759Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTYxNzY4ZWYtZTgyMjlmNTgtZDJmMjNiNjgtODA0NDRjMGI=, TxId: 2025-04-10T11:22:41.124812Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTYxNzY4ZWYtZTgyMjlmNTgtZDJmMjNiNjgtODA0NDRjMGI=, TxId: 2025-04-10T11:22:41.125199Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:22:41.139145Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:22:41.139197Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:22:41.511867Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7231:5030]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:41.512179Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-10T11:22:41.512222Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7231:5030], StatRequests.size() = 1 |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiers |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TPersQueueTest::TClusterTrackerTest [GOOD] >> TPersQueueTest::TestReadPartitionByGroupId >> TColumnShardTestSchema::RebootHotTiersRevCompression >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=144284704.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284704.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284704.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284704.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284704.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284704.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284704.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283504.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124284704.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124284704.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283504.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283504.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124283504.000000s;Name=;Codec=}; 2025-04-10T11:21:45.111152Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:21:45.195598Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:21:45.219918Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:21:45.220220Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:21:45.228191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:21:45.228393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:21:45.228631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:21:45.228762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:21:45.228873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:21:45.228973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:21:45.229090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:21:45.229203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:21:45.229324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:21:45.229438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:21:45.229546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:21:45.229645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:21:45.258036Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:21:45.258421Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:21:45.258482Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:21:45.258655Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:45.258807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:21:45.258886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:21:45.258926Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:21:45.259013Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:21:45.259076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:21:45.259118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:21:45.259150Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:21:45.259300Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:45.259390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:21:45.259435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:21:45.259465Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:21:45.259551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:21:45.259601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:21:45.259644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:21:45.259673Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:21:45.259753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:21:45.259798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:21:45.259825Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:21:45.259878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:21:45.259915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:21:45.259954Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:21:45.260398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=62; 2025-04-10T11:21:45.260483Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-10T11:21:45.260559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-10T11:21:45.260645Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-10T11:21:45.260811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:21:45.260887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:21:45.260929Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:21:45.261144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:21:45.261191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:21:45.261220Z node 1 :TX_COLUMNSHARD NOTICE: tabl ... Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-10T11:22:49.959180Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:22:49.959242Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:22:49.959293Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:49.959332Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:49.959421Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:22:49.959616Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-04-10T11:22:49.959743Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:22:49.959878Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:22:49.959931Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:22:49.960344Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-10T11:22:49.960425Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:22:49.960821Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1962:3967];trace_detailed=; 2025-04-10T11:22:49.961158Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-10T11:22:49.961406Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:22:49.961549Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:49.961656Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:49.961945Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:22:49.962033Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:49.962165Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:49.962207Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1962:3967] finished for tablet 9437184 2025-04-10T11:22:49.962565Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1961:3966];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744284169960769,"name":"_full_task","f":1744284169960769,"d_finished":0,"c":0,"l":1744284169962264,"d":1495},"events":[{"name":"bootstrap","f":1744284169960919,"d_finished":762,"c":1,"l":1744284169961681,"d":762},{"a":1744284169961925,"name":"ack","f":1744284169961925,"d_finished":0,"c":0,"l":1744284169962264,"d":339},{"a":1744284169961913,"name":"processing","f":1744284169961913,"d_finished":0,"c":0,"l":1744284169962264,"d":351},{"name":"ProduceResults","f":1744284169961481,"d_finished":424,"c":2,"l":1744284169962193,"d":424},{"a":1744284169962196,"name":"Finish","f":1744284169962196,"d_finished":0,"c":0,"l":1744284169962264,"d":68}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:22:49.962623Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1961:3966];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:22:49.962907Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1961:3966];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744284169960769,"name":"_full_task","f":1744284169960769,"d_finished":0,"c":0,"l":1744284169962661,"d":1892},"events":[{"name":"bootstrap","f":1744284169960919,"d_finished":762,"c":1,"l":1744284169961681,"d":762},{"a":1744284169961925,"name":"ack","f":1744284169961925,"d_finished":0,"c":0,"l":1744284169962661,"d":736},{"a":1744284169961913,"name":"processing","f":1744284169961913,"d_finished":0,"c":0,"l":1744284169962661,"d":748},{"name":"ProduceResults","f":1744284169961481,"d_finished":424,"c":2,"l":1744284169962193,"d":424},{"a":1744284169962196,"name":"Finish","f":1744284169962196,"d_finished":0,"c":0,"l":1744284169962661,"d":465}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1962:3967]->[1:1961:3966] 2025-04-10T11:22:49.962973Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:22:49.960398Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:22:49.963009Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:22:49.963095Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] >> TColumnShardTestSchema::OneColdTier >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284718.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284718.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284718.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284718.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284718.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124284718.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=144284718.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284718.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124283518.000000s;Name=;Codec=}; 2025-04-10T11:21:58.779748Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:21:58.873116Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:21:58.890820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:21:58.891063Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:21:58.897800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:21:58.897990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:21:58.898228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:21:58.898350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:21:58.898446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:21:58.898585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:21:58.898703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:21:58.898813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:21:58.898917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:21:58.899025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:21:58.899120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:21:58.899218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:21:58.926680Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:21:58.927066Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:21:58.927144Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:21:58.927310Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:58.927481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:21:58.927567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:21:58.927611Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:21:58.927700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:21:58.927758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:21:58.927798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:21:58.927826Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:21:58.927981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:58.928044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:21:58.928086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:21:58.928113Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:21:58.928194Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:21:58.928248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:21:58.928296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:21:58.928350Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:21:58.928437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:21:58.928483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:21:58.928513Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:21:58.928561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:21:58.928603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:21:58.928633Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:21:58.929035Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=64; 2025-04-10T11:21:58.929142Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-10T11:21:58.929216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-10T11:21:58.929327Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=66; 2025-04-10T11:21:58.929521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:21:58.929576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:21:58.929609Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:21:58.929792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:21:58.929835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:21:58.929883Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:21:58.930048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... fline=common_data.cpp:29;EXECUTE:finishLoadingTime=627; 2025-04-10T11:22:52.886412Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=51008; 2025-04-10T11:22:52.899668Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=13174; 2025-04-10T11:22:52.912481Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11737; 2025-04-10T11:22:52.912594Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12827; 2025-04-10T11:22:52.912765Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=99; 2025-04-10T11:22:52.912891Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=74; 2025-04-10T11:22:52.913031Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=90; 2025-04-10T11:22:52.913153Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=75; 2025-04-10T11:22:52.928221Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=14976; 2025-04-10T11:22:52.946180Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=17822; 2025-04-10T11:22:52.946325Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=47; 2025-04-10T11:22:52.946404Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=32; 2025-04-10T11:22:52.946454Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-04-10T11:22:52.946499Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-04-10T11:22:52.946540Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-10T11:22:52.946619Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=43; 2025-04-10T11:22:52.946669Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-04-10T11:22:52.946759Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=53; 2025-04-10T11:22:52.946811Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-10T11:22:52.946878Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2025-04-10T11:22:52.946971Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=49; 2025-04-10T11:22:52.947294Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=262; 2025-04-10T11:22:52.947346Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=120053; 2025-04-10T11:22:52.947525Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=36397736;raw_bytes=56295575;count=22;records=560000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:22:52.947634Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:22:52.947686Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:22:52.947750Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:22:52.966624Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-10T11:22:52.966781Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:22:52.966849Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:22:52.966924Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-10T11:22:52.966988Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-04-10T11:22:52.967074Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:22:52.967130Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:52.967172Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:52.967280Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:22:52.968018Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:22:52.968116Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:2593:4495];tablet_id=9437184;parent=[1:2543:4452];fline=manager.cpp:82;event=ask_data;request=request_id=120;1={portions_count=22};; 2025-04-10T11:22:52.968929Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:22:52.969358Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:22:52.969397Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:22:52.969422Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:22:52.969468Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:22:52.969529Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:22:52.969577Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-10T11:22:52.969637Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-04-10T11:22:52.969683Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:22:52.969732Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:52.969770Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:22:52.969856Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:22:52.971532Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=22;path_id=1; 2025-04-10T11:22:52.972648Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2543:4452];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 >> KqpScanLogs::WideCombine-EnabledLogs [GOOD] >> KqpScanLogs::GraceJoin-EnabledLogs [GOOD] >> TKeyValueTracingTest::WriteSmall ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::WideCombine-EnabledLogs [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2gt4/001800/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk3 Trying to start YDB, gRPC: 23145, MsgBus: 20542 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001800/r3tmp/tmp9v5tpb/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23145, node 1 TClient is connected to server localhost:20542 TClient is connected to server localhost:20542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (DataType 'Uint64)) (let $4 '('('"_logical_id" '505) '('"_id" '"c062a890-6d52576b-a406fac-f3f9e110") '('"_wide_channels" (StructType '('"Value" (OptionalType (DataType 'String))) '('_yql_agg_0 $3))))) (let $5 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($12) (block '( (let $13 (lambda '($15) (Member $15 '"Key") (Member $15 '"Value"))) (let $14 (lambda '($25 $26) $25 $26)) (return (FromFlow (WideCombiner (ExpandMap (ToFlow $12) $13) '-1073741824 (lambda '($16 $17) $17) (lambda '($18 $19 $20) (AggrCountInit $19)) (lambda '($21 $22 $23 $24) (AggrCountUpdate $22 $24)) $14))) ))) $4)) (let $6 (DqCnHashShuffle (TDqOutput $5 '0) '('0))) (let $7 (DqPhyStage '($6) (lambda '($27) (block '( (let $28 (WideCombiner (ToFlow $27) '"" (lambda '($29 $30) $29) (lambda '($31 $32 $33) $33) (lambda '($34 $35 $36 $37) (AggrAdd $36 $37)) (lambda '($38 $39) $39))) (return (FromFlow (NarrowMap $28 (lambda '($40) (AsStruct '('"column0" $40)))))) ))) '('('"_logical_id" '1265) '('"_id" '"36144f05-54bb76b6-96a384c7-27dbc398")))) (let $8 (DqCnUnionAll (TDqOutput $7 '0))) (let $9 (DqPhyStage '($8) (lambda '($41) $41) '('('"_logical_id" '1533) '('"_id" '"dafb0055-375c3fa6-2eb662db-ea3b8e7c")))) (let $10 '($5 $7 $9)) (let $11 (DqCnResult (TDqOutput $9 '0) '('"column0"))) (return (KqpPhysicalQuery '((KqpPhysicalTx $10 '($11) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType (StructType '('"column0" $3))) '0 '0)) '('('"type" '"query")))) ) >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::GraceJoin-EnabledLogs [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2gt4/00181c/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk1 Trying to start YDB, gRPC: 10152, MsgBus: 30950 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00181c/r3tmp/tmppQBlgJ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10152, node 1 TClient is connected to server localhost:30950 TClient is connected to server localhost:30950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '776) '('"_id" '"d82cab4b-fe31bc7b-acc06a02-85ba442c") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '674) '('"_id" '"22e79a86-a9f0661e-c51f11f0-f3156189") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '686) '('"_id" '"c424855b-605c844-f5c07b6d-6b036a6f")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2gt4/0017ea/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk8 Trying to start YDB, gRPC: 18856, MsgBus: 13384 2025-04-10T11:18:25.509881Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642330032258242:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:25.509956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017ea/r3tmp/tmpLkHyr2/pdisk_1.dat 2025-04-10T11:18:25.896234Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:25.948527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:25.948632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:25.949979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18856, node 1 2025-04-10T11:18:26.060407Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:26.060432Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:26.060437Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:26.060557Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13384 TClient is connected to server localhost:13384 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:26.865916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:26.910110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:27.089269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:27.250655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:27.321358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:28.913321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642342917161908:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:28.913433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.274911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.345522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.372128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.402448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.427349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.458959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.505551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642347212129714:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.505659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.505894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642347212129719:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.510809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:29.522402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642347212129721:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:18:29.620629Z node 1 :TX_PROXY ERROR: Actor# [1:7491642347212129778:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:30.512062Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642330032258242:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:30.512278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:18:40.886335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:18:40.886376Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '785) '('"_id" '"f7ec0857-6e6e55cc-fe6ccd5f-eba9a747") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '683) '('"_id" '"d3be69c2-eda0a2f8-69ea5e67-2155c749") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '695) '('"_id" '"dc281081-78357e22-8232b4dd-a95d03fb")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) >> TKeyValueTracingTest::WriteHuge |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2gt4/001811/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk4 Trying to start YDB, gRPC: 3818, MsgBus: 5613 2025-04-10T11:18:25.501875Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642330208506308:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:25.502889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001811/r3tmp/tmps97SbM/pdisk_1.dat 2025-04-10T11:18:25.885838Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:25.932613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:25.932710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3818, node 1 2025-04-10T11:18:25.934340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:26.064402Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:26.064424Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:26.064434Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:26.064550Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5613 TClient is connected to server localhost:5613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:26.841790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:26.883072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:27.022636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:18:27.201734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:18:27.290482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:28.934846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642343093409960:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:28.934986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.214685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.241212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.265397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.295112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.324959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.358740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.400985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642347388377766:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.401073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.401325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642347388377771:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.404978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:29.413873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642347388377773:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:18:29.516165Z node 1 :TX_PROXY ERROR: Actor# [1:7491642347388377828:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:30.502099Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642330208506308:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:30.502199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:18:40.870697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:18:40.870743Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '785) '('"_id" '"3e4da16-9b2c8c87-b115dee1-c16bf103") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '683) '('"_id" '"ad55aaf6-4dcf6e3b-518a93e8-46b9b9f8") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '695) '('"_id" '"3931b380-5fffbbb8-72ec99f6-1059b15a")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) 2025-04-10T11:22:57.932715Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7491643498439621862:7418], blobId: 0, bytes: 1401088 2025-04-10T11:22:57.932882Z node 1 :KQP_COMPUTE ERROR: TxId: 281474976710971. Error: [TEvError] File size limit exceeded: 1/0Mb 2025-04-10T11:22:57.933326Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7491643498439621862:7418], blobId: 1, bytes: 84 2025-04-10T11:22:57.933380Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7491643498439621862:7418], blobId: 2, bytes: 2402376 2025-04-10T11:22:57.944761Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7491643498439621862:7418], blobId: 3, bytes: 144 2025-04-10T11:22:57.947383Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491643498439621853:4606], TxId: 281474976710971, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=M2E2MWU2OTAtZWQ1MTVkOWItOThiMGI0NjctMTBiM2MyYmI=. TraceId : 01jrfq0msjbyz8nh92g7zhnepg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: [Compute spilling][TEvError] File size limit exceeded: 1/0Mb }. 2025-04-10T11:22:57.947668Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491643498439621854:4607], TxId: 281474976710971, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E2MWU2OTAtZWQ1MTVkOWItOThiMGI0NjctMTBiM2MyYmI=. TraceId : 01jrfq0msjbyz8nh92g7zhnepg. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-04-10T11:22:57.961472Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2E2MWU2OTAtZWQ1MTVkOWItOThiMGI0NjctMTBiM2MyYmI=, ActorId: [1:7491643498439621834:4600], ActorState: ExecuteState, TraceId: 01jrfq0msjbyz8nh92g7zhnepg, Create QueryResponse for error on request, msg: >> TKeyValueTracingTest::WriteSmall [FAIL] >> BasicStatistics::TwoTables [GOOD] >> KqpScanLogs::WideCombine+EnabledLogs [GOOD] >> TKeyValueTracingTest::ReadSmall >> TKeyValueTracingTest::WriteHuge [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables [GOOD] Test command err: 2025-04-10T11:20:31.315722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:31.316155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:31.316296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d30/r3tmp/tmpZttb7B/pdisk_1.dat 2025-04-10T11:20:31.706646Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29272, node 1 2025-04-10T11:20:31.991295Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:31.991360Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:31.991398Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:31.991853Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:31.994434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:32.084673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:32.084805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:32.098771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17203 2025-04-10T11:20:32.670885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:36.222462Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:36.268399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:36.268491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:36.298738Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:36.300819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:36.537705Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.538243Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.538736Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.538856Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.539103Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.539192Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.539297Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.539362Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.539436Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:36.718520Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:36.718637Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:36.732872Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:36.876358Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:36.937810Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:36.937930Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:36.973344Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:36.973886Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:36.974109Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:36.974194Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:36.974249Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:36.974306Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:36.974365Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:36.974414Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:36.974935Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:37.002835Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1868:2593] 2025-04-10T11:20:37.004874Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:20:37.017964Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:37.018042Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:37.018114Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:20:37.021339Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:37.021456Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1918:2621], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:37.038847Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1979:2648] 2025-04-10T11:20:37.039483Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1979:2648], schemeshard id = 72075186224037897 2025-04-10T11:20:37.066944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:37.074535Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:37.074708Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:37.281739Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:37.447564Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:37.524509Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:38.630121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2245:3077], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:38.630297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:38.646971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:20:38.965673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2383:3112], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:38.965847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:38.967267Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2388:3116]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:20:38.967464Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:20:38.967557Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2390:3118] 2025-04-10T11:20:38.967623Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2390:3118] 2025-04-10T11:20:38.968257Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2391:2877] 2025-04-10T11:20:38.968531Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2390:3118], server id = [2:2391:2877], tablet id = 72075186224037894, status = OK 2025-04-10T11:20:38.968716Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2391:2877], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:20:38.968778Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:20:38.969020Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:20:38.969103Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2388:3116], StatRequests.size() = 1 2025-04-10T11:20:38.993820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2395:3122], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:38.993951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:38.994389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2400:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:39.001370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:20:39.226586Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:20:39.226679Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:20:39.362688Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2390:3118], schemeshard count = 1 2025-04-10T11:20:39.832320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... rId = [2:6702:4773], StatRequests.size() = 1 2025-04-10T11:22:51.886973Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:22:51.887355Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:22:51.887540Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:22:51.941470Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 3, at schemeshard: 72075186224037897 2025-04-10T11:22:51.941541Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 225.000000s, at schemeshard: 72075186224037897 2025-04-10T11:22:51.941878Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 73 2025-04-10T11:22:51.966498Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:22:52.534120Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6735:4789]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:52.534382Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-10T11:22:52.534413Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6735:4789], StatRequests.size() = 1 2025-04-10T11:22:53.229833Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:22:53.229918Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:22:53.229976Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-04-10T11:22:53.230032Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-10T11:22:53.230426Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:22:53.244812Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:22:53.248932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6760:4810], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:22:53.249075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6770:4815], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:22:53.249648Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:22:53.264100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-10T11:22:53.325238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6774:4818], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:22:53.509705Z node 2 :TX_PROXY ERROR: Actor# [2:6870:4864] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:22:53.554798Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6899:4879]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:53.555122Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-10T11:22:53.555181Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6899:4879], StatRequests.size() = 1 2025-04-10T11:22:53.679125Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWZlYmE0NWQtMTRmMTk3OWYtYzRhOGJkZTQtZmJhMTI5YTM=, TxId: 2025-04-10T11:22:53.679224Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWZlYmE0NWQtMTRmMTk3OWYtYzRhOGJkZTQtZmJhMTI5YTM=, TxId: 2025-04-10T11:22:53.679668Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:22:53.693516Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-10T11:22:53.693605Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:22:54.200018Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6934:4898]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:54.200358Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-10T11:22:54.200409Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6934:4898], StatRequests.size() = 1 2025-04-10T11:22:55.487253Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6975:4920]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:55.487520Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-10T11:22:55.487553Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6975:4920], StatRequests.size() = 1 2025-04-10T11:22:56.138510Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:22:56.149429Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:22:56.149502Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:22:56.149545Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-10T11:22:56.149581Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:22:56.149890Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:22:56.152921Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:22:56.164634Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:04.000000Z, event interval end# 2025-04-10T11:22:54.000000Z 2025-04-10T11:22:56.165065Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTk0Njk0OTEtOTRiNGEwN2YtZDg3ODBjNmYtNDViZThhODM=, TxId: 2025-04-10T11:22:56.165115Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTk0Njk0OTEtOTRiNGEwN2YtZDg3ODBjNmYtNDViZThhODM=, TxId: 2025-04-10T11:22:56.165572Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:22:56.179376Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:22:56.179431Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:22:56.740092Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7041:4959]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:56.740368Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-10T11:22:56.740401Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7041:4959], StatRequests.size() = 1 2025-04-10T11:22:58.048222Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7084:4983]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:58.048533Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-10T11:22:58.048595Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7084:4983], StatRequests.size() = 1 2025-04-10T11:22:58.713020Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:22:58.713333Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:22:58.713588Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:22:58.724609Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:22:58.724678Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:22:58.724724Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-10T11:22:58.724762Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:22:58.725124Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:22:58.727849Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:22:58.737914Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTdkM2NhMmMtNmM2YmYzZjMtN2IwN2M0NDAtNWNlZTRkMTQ=, TxId: 2025-04-10T11:22:58.737967Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTdkM2NhMmMtNmM2YmYzZjMtN2IwN2M0NDAtNWNlZTRkMTQ=, TxId: 2025-04-10T11:22:58.738348Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:22:58.753119Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:22:58.753182Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:22:59.332726Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7145:5017]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:59.333019Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-10T11:22:59.333066Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7145:5017], StatRequests.size() = 1 2025-04-10T11:22:59.333857Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 127 ], ReplyToActorId[ [2:7147:5019]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:59.337391Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 127 ] 2025-04-10T11:22:59.337458Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 127, ReplyToActorId = [2:7147:5019], StatRequests.size() = 1 >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::WideCombine+EnabledLogs [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2gt4/001818/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk2 Trying to start YDB, gRPC: 18840, MsgBus: 25123 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001818/r3tmp/tmpcQTKWv/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18840, node 1 TClient is connected to server localhost:25123 TClient is connected to server localhost:25123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (DataType 'Uint64)) (let $4 '('('"_logical_id" '505) '('"_id" '"e573862f-316e5d1c-2b27f227-f8ad65bd") '('"_wide_channels" (StructType '('"Value" (OptionalType (DataType 'String))) '('_yql_agg_0 $3))))) (let $5 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($12) (block '( (let $13 (lambda '($15) (Member $15 '"Key") (Member $15 '"Value"))) (let $14 (lambda '($25 $26) $25 $26)) (return (FromFlow (WideCombiner (ExpandMap (ToFlow $12) $13) '-1073741824 (lambda '($16 $17) $17) (lambda '($18 $19 $20) (AggrCountInit $19)) (lambda '($21 $22 $23 $24) (AggrCountUpdate $22 $24)) $14))) ))) $4)) (let $6 (DqCnHashShuffle (TDqOutput $5 '0) '('0))) (let $7 (DqPhyStage '($6) (lambda '($27) (block '( (let $28 (WideCombiner (ToFlow $27) '"" (lambda '($29 $30) $29) (lambda '($31 $32 $33) $33) (lambda '($34 $35 $36 $37) (AggrAdd $36 $37)) (lambda '($38 $39) $39))) (return (FromFlow (NarrowMap $28 (lambda '($40) (AsStruct '('"column0" $40)))))) ))) '('('"_logical_id" '1265) '('"_id" '"b01b07bd-6a8bea20-a5a24620-542772a3")))) (let $8 (DqCnUnionAll (TDqOutput $7 '0))) (let $9 (DqPhyStage '($8) (lambda '($41) $41) '('('"_logical_id" '1533) '('"_id" '"76e5244-f41fc8b0-cc3d566d-8fdb65a6")))) (let $10 '($5 $7 $9)) (let $11 (DqCnResult (TDqOutput $9 '0) '('"column0"))) (return (KqpPhysicalQuery '((KqpPhysicalTx $10 '($11) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType (StructType '('"column0" $3))) '0 '0)) '('('"type" '"query")))) ) >> KqpScanLogs::GraceJoin+EnabledLogs [GOOD] >> TPersQueueTest::CreateTopicWithMeteringMode [GOOD] >> TPersQueueTest::DefaultMeteringMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284732.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144284732.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284732.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124284732.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283532.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124283532.000000s;Name=;Codec=}; 2025-04-10T11:22:13.714671Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:13.797251Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:13.820921Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:13.821186Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:13.828852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:13.829056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:13.829278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:13.829416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:13.829529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:13.829647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:13.829761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:13.829872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:13.829976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:13.830087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:13.830210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:13.830316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:13.858885Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:13.859200Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:13.859251Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:13.859437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:13.859601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:13.859683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:13.859721Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:13.859805Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:13.859858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:13.859895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:13.859922Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:13.860076Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:13.860136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:13.860172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:13.860200Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:13.860297Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:13.860349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:13.860391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:13.860421Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:13.860492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:13.860535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:13.860567Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:13.860612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:13.860647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:13.860682Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:13.861075Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-10T11:22:13.861154Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-04-10T11:22:13.861222Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-04-10T11:22:13.861308Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-04-10T11:22:13.861444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:13.861496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:13.861527Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:13.861724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:13.861765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:13.861806Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:13.861955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:22:13.861997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:22:13.862024Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:22:13.862208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... :"Finish","f":1744284181109967,"d_finished":0,"c":0,"l":1744284181110604,"d":637},{"name":"task_result","f":1744284180703435,"d_finished":153058,"c":28,"l":1744284181106588,"d":153058}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1280:3287]->[1:1279:3286] 2025-04-10T11:23:01.111099Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1280:3287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:23:00.695935Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203504;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203504;selected_rows=0; 2025-04-10T11:23:01.111174Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1280:3287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:23:01.111439Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1280:3287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:23:01.112837Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-10T11:23:01.113046Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000006:max} readable: {1000000006:max} at tablet 9437184 2025-04-10T11:23:01.113157Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:23:01.113311Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:01.113383Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:01.113803Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-10T11:23:01.113904Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:23:01.114346Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1296:3303];trace_detailed=; 2025-04-10T11:23:01.114689Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-10T11:23:01.114866Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:23:01.115001Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:01.115113Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:01.115339Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:23:01.115427Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:01.115557Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:01.115612Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1296:3303] finished for tablet 9437184 2025-04-10T11:23:01.115955Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1295:3302];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744284181114293,"name":"_full_task","f":1744284181114293,"d_finished":0,"c":0,"l":1744284181115669,"d":1376},"events":[{"name":"bootstrap","f":1744284181114455,"d_finished":689,"c":1,"l":1744284181115144,"d":689},{"a":1744284181115322,"name":"ack","f":1744284181115322,"d_finished":0,"c":0,"l":1744284181115669,"d":347},{"a":1744284181115309,"name":"processing","f":1744284181115309,"d_finished":0,"c":0,"l":1744284181115669,"d":360},{"name":"ProduceResults","f":1744284181114937,"d_finished":441,"c":2,"l":1744284181115597,"d":441},{"a":1744284181115600,"name":"Finish","f":1744284181115600,"d_finished":0,"c":0,"l":1744284181115669,"d":69}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:01.116019Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1295:3302];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:23:01.116336Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1295:3302];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744284181114293,"name":"_full_task","f":1744284181114293,"d_finished":0,"c":0,"l":1744284181116054,"d":1761},"events":[{"name":"bootstrap","f":1744284181114455,"d_finished":689,"c":1,"l":1744284181115144,"d":689},{"a":1744284181115322,"name":"ack","f":1744284181115322,"d_finished":0,"c":0,"l":1744284181116054,"d":732},{"a":1744284181115309,"name":"processing","f":1744284181115309,"d_finished":0,"c":0,"l":1744284181116054,"d":745},{"name":"ProduceResults","f":1744284181114937,"d_finished":441,"c":2,"l":1744284181115597,"d":441},{"a":1744284181115600,"name":"Finish","f":1744284181115600,"d_finished":0,"c":0,"l":1744284181116054,"d":454}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1296:3303]->[1:1295:3302] 2025-04-10T11:23:01.116405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:23:01.113874Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:23:01.116459Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:23:01.116551Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::GraceJoin+EnabledLogs [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2gt4/001822/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk0 Trying to start YDB, gRPC: 20811, MsgBus: 27017 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001822/r3tmp/tmpRbBohw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20811, node 1 TClient is connected to server localhost:27017 TClient is connected to server localhost:27017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '776) '('"_id" '"bbd4a7ee-776a632d-3e706d49-f4e37842") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '674) '('"_id" '"66f0de49-c6cfdad4-7dea207f-caed518a") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '686) '('"_id" '"941a0ac7-9669b2b6-1cc7a166-a315c444")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) >> TKeyValueTracingTest::ReadSmall [FAIL] >> TReplicaTest::UpdateWithoutHandshake >> TReplicaTest::Handshake >> TReplicaTest::Unsubscribe >> TReplicaTest::Subscribe >> TReplicaTest::Update >> TReplicaTest::Unsubscribe [GOOD] >> TReplicaTest::UnsubscribeUnknownPath >> TReplicaTest::Subscribe [GOOD] >> TReplicaTest::SubscribeUnknownPath >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] >> TReplicaTest::Update [GOOD] >> TReplicaTest::UnsubscribeWithoutSubscribe >> TReplicaTest::UpdateWithoutHandshake [GOOD] >> TReplicaTest::UpdateWithStaleGeneration >> TReplicaTest::Handshake [GOOD] >> TReplicaTest::DoubleUnsubscribe >> TReplicaTest::UnsubscribeUnknownPath [GOOD] >> TReplicaTest::SubscribeUnknownPath [GOOD] >> TReplicaTest::SyncVersion >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] >> TReplicaTest::UpdateWithStaleGeneration [GOOD] >> TReplicaTest::DoubleUnsubscribe [GOOD] >> TReplicaTest::DoubleDelete >> TReplicaTest::CommitWithoutHandshake >> TReplicaTest::DoubleDelete [GOOD] >> TReplicaTest::SyncVersion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] Test command err: 2025-04-10T11:23:03.737804Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-10T11:23:03.737867Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:03.742456Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:03.742526Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:03.748862Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:03.749039Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-04-10T11:23:03.749136Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-10T11:23:03.749266Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:7:2054] 2025-04-10T11:23:03.749363Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# path 2025-04-10T11:23:03.749428Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:7:2054] 2025-04-10T11:23:03.749488Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-10T11:23:03.749598Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:7:2054] 2025-04-10T11:23:03.749648Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:03.991883Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-10T11:23:03.991962Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:03.992071Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:03.992099Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:03.992144Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:03.992198Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeUnknownPath [GOOD] Test command err: 2025-04-10T11:23:03.740458Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-10T11:23:03.740530Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:03.740640Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2025-04-10T11:23:03.741672Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path 2025-04-10T11:23:03.743819Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-04-10T11:23:03.743957Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:9:2056] 2025-04-10T11:23:03.743995Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-04-10T11:23:03.744118Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:03.744159Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:03.749075Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:03.750124Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:8:2055] 2025-04-10T11:23:03.750171Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:8:2055], path# path 2025-04-10T11:23:03.750228Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-04-10T11:23:03.750253Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-10T11:23:03.750283Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:04.007615Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] >> TReplicaTest::CommitWithoutHandshake [GOOD] >> TReplicaTest::CommitWithStaleGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UpdateWithStaleGeneration [GOOD] Test command err: 2025-04-10T11:23:03.744040Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:03.744125Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:6:2053] Reject update from unknown populator: sender# [1:7:2054], owner# 1, generation# 1 2025-04-10T11:23:03.744226Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-04-10T11:23:03.744260Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path 2025-04-10T11:23:03.744374Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-10T11:23:03.745264Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:7:2054] 2025-04-10T11:23:03.746365Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# path 2025-04-10T11:23:03.746447Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:7:2054] 2025-04-10T11:23:03.746532Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:03.746618Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-10T11:23:03.746703Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:7:2054] 2025-04-10T11:23:03.746742Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:04.004360Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-10T11:23:04.004502Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:04.004647Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 0 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:04.004685Z node 2 :SCHEME_BOARD_REPLICA ERROR: [2:6:2053] Reject update from stale populator: sender# [2:7:2054], owner# 1, generation# 0, pending generation# 1 2025-04-10T11:23:04.004779Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-04-10T11:23:04.004821Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path 2025-04-10T11:23:04.004882Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-10T11:23:04.004984Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-04-10T11:23:04.005032Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# path 2025-04-10T11:23:04.005096Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:7:2054] 2025-04-10T11:23:04.005146Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:04.005194Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-10T11:23:04.005268Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:7:2054] 2025-04-10T11:23:04.005336Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1] >> TReplicaTest::CommitWithStaleGeneration [GOOD] >> TReplicaTest::Delete >> TPersQueueTest::TestReadPartitionByGroupId [GOOD] >> TPersQueueTest::SrcIdCompatibility ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::SyncVersion [GOOD] Test command err: 2025-04-10T11:23:03.740548Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-10T11:23:03.740619Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:03.742468Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:03.742530Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:03.749275Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:03.749459Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-04-10T11:23:03.749543Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-10T11:23:03.749667Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-04-10T11:23:03.749716Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-10T11:23:03.749750Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:04.004616Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-04-10T11:23:04.004684Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path 2025-04-10T11:23:04.004766Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-10T11:23:04.286473Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-10T11:23:04.286541Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:04.286700Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 76 2025-04-10T11:23:04.286744Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:04.286812Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 100500, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 32} 2025-04-10T11:23:04.286899Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:7:2054] 2025-04-10T11:23:04.286974Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-10T11:23:04.287091Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:7:2054], cookie# 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::DoubleDelete [GOOD] Test command err: 2025-04-10T11:23:03.737754Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-10T11:23:03.737822Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:04.009632Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-10T11:23:04.009698Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:04.009888Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:04.009934Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:04.016775Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:04.017010Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-04-10T11:23:04.017139Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-10T11:23:04.017306Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-04-10T11:23:04.017386Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# path 2025-04-10T11:23:04.017447Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-04-10T11:23:04.293724Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-10T11:23:04.293831Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:04.293936Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-04-10T11:23:04.293967Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path 2025-04-10T11:23:04.294091Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-04-10T11:23:04.294252Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:04.294298Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:04.294357Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:04.294508Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-04-10T11:23:04.294537Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-10T11:23:04.294589Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:04.294745Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2025-04-10T11:23:04.294796Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-04-10T11:23:04.294870Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-04-10T11:23:04.294917Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true >> TReplicaTest::Delete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2gt4/0017e2/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk7 Trying to start YDB, gRPC: 13439, MsgBus: 10390 2025-04-10T11:18:26.090734Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642333007578830:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:26.090803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0017e2/r3tmp/tmpdfGiMN/pdisk_1.dat 2025-04-10T11:18:26.433361Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13439, node 1 2025-04-10T11:18:26.490122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:26.494249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:26.503177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:26.546554Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:18:26.546582Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:18:26.546590Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:18:26.546740Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10390 TClient is connected to server localhost:10390 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:27.156728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:27.176442Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:18:27.194569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:18:27.358989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:27.541425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:18:27.625788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.253924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642345892482502:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.254113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.537604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.566116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.595735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.663787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.707181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.784652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:18:29.840834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642345892483017:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.840901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.841318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642345892483022:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:29.845341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:18:29.856553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491642345892483024:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:18:29.913681Z node 1 :TX_PROXY ERROR: Actor# [1:7491642345892483077:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:31.090846Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642333007578830:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:31.090969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:18:41.419930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:18:41.419983Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '785) '('"_id" '"7c7c3de6-edd6903f-d483af16-9776446b") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '683) '('"_id" '"a8e933fc-1bd3ae6a-7b6fef8e-8673dc5") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '695) '('"_id" '"2023b5cd-b2a02017-3972cd00-dbd39f2e")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::Delete [GOOD] Test command err: 2025-04-10T11:23:04.328908Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-10T11:23:04.328993Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:6:2053] Reject commit from unknown populator: sender# [1:7:2054], owner# 1, generation# 1 2025-04-10T11:23:04.329073Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-10T11:23:04.329108Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:04.585951Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 0 }: sender# [2:7:2054] 2025-04-10T11:23:04.586017Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 0 2025-04-10T11:23:04.586073Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-04-10T11:23:04.586101Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:04.586170Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-04-10T11:23:04.586192Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Commit generation: owner# 1, generation# 1 2025-04-10T11:23:04.586236Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 0 }: sender# [2:7:2054] 2025-04-10T11:23:04.586258Z node 2 :SCHEME_BOARD_REPLICA ERROR: [2:6:2053] Reject commit from stale populator: sender# [2:7:2054], owner# 1, generation# 0, pending generation# 1 2025-04-10T11:23:04.586299Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [2:7:2054] 2025-04-10T11:23:04.586320Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 2 2025-04-10T11:23:04.840626Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-10T11:23:04.840681Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:04.840814Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:04.840848Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# false 2025-04-10T11:23:04.846777Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 42, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:04.846986Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-04-10T11:23:04.847070Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-04-10T11:23:04.847194Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:9:2056] 2025-04-10T11:23:04.847235Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-10T11:23:04.847341Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-04-10T11:23:04.847372Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# true 2025-04-10T11:23:04.847395Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 42, LocalPathId: 1] 2025-04-10T11:23:04.847508Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:10:2057] 2025-04-10T11:23:04.847571Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:10:2057], path# path, domainOwnerId# 0, capabilities# 2025-04-10T11:23:04.847688Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:11:2058] 2025-04-10T11:23:04.847724Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:11:2058], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-10T11:23:04.847813Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:12:2059] 2025-04-10T11:23:04.847838Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:12:2059], path# path, domainOwnerId# 0, capabilities# >> TReplicaTest::Merge >> TReplicaTest::Commit >> TReplicaTest::HandshakeWithStaleGeneration >> TReplicaTest::Commit [GOOD] >> TReplicaTest::AckNotifications >> TReplicaTest::Merge [GOOD] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers >> TReplicaTest::HandshakeWithStaleGeneration [GOOD] >> TReplicaTest::IdempotencyUpdatesAliveSubscriber >> TReplicaTest::AckNotifications [GOOD] >> TReplicaTest::AckNotificationsUponPathRecreation >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers [GOOD] >> TReplicaTest::StrongNotificationAfterCommit >> TReplicaTest::IdempotencyUpdatesAliveSubscriber [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] >> TReplicaTest::StrongNotificationAfterCommit [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::StrongNotificationAfterCommit [GOOD] Test command err: 2025-04-10T11:23:05.933115Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2025-04-10T11:23:05.933186Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path 2025-04-10T11:23:05.933338Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-04-10T11:23:05.933457Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:9:2056] 2025-04-10T11:23:05.933506Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:05.933550Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-10T11:23:05.933626Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-10T11:23:05.933658Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:05.933844Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:05.933879Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:05.939795Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:05.940079Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-04-10T11:23:05.940119Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-10T11:23:05.940152Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:06.203015Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-10T11:23:06.203078Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:06.203182Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2025-04-10T11:23:06.203212Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:06.203278Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-10T11:23:06.203400Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:06.203440Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:06.203511Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:06.203719Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-04-10T11:23:06.203755Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-10T11:23:06.203794Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:06.203940Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:8:2055] 2025-04-10T11:23:06.204016Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:06.204097Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:06.204164Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:06.204207Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:06.204321Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:06.204361Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-04-10T11:23:06.204429Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:06.204539Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:9:2056] 2025-04-10T11:23:06.204602Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2025-04-10T11:23:06.469449Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 1 }: sender# [3:8:2055] 2025-04-10T11:23:06.469504Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path 2025-04-10T11:23:06.469563Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 1, capabilities# 2025-04-10T11:23:06.469685Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-10T11:23:06.469715Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:06.469776Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-10T11:23:06.469803Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 1, generation# 1 2025-04-10T11:23:06.469905Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimr::NSchemeBoard::TReplica::TEvPrivate::TEvSendStrongNotifications { Owner: 1 } |91.5%| [TA] $(B)/ydb/core/kqp/ut/runtime/test-results/unittest/{meta.json ... results_accumulator.log} >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot >> TColumnShardTestSchema::HotTiersTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] Test command err: 2025-04-10T11:23:05.910397Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-10T11:23:05.910445Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:05.910512Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-10T11:23:05.910534Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 1, generation# 1 2025-04-10T11:23:05.910565Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:7:2054] 2025-04-10T11:23:05.910584Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 2 2025-04-10T11:23:06.166827Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2025-04-10T11:23:06.166875Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path 2025-04-10T11:23:06.166995Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-04-10T11:23:06.167090Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-10T11:23:06.167120Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:06.167237Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:06.167263Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:06.172017Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:06.172212Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:8:2055] 2025-04-10T11:23:06.172319Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-04-10T11:23:06.172348Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-10T11:23:06.172391Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:06.172460Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [2:8:2055] 2025-04-10T11:23:06.419539Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-10T11:23:06.419590Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:06.419696Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:06.419735Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:06.419779Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:06.419856Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-04-10T11:23:06.419914Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-04-10T11:23:06.419992Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:06.420019Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:06.420055Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 3, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:06.420257Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:06.420293Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-04-10T11:23:06.420318Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:06.420380Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path 2025-04-10T11:23:06.420438Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-04-10T11:23:06.420498Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:06.420592Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 3 }: sender# [3:8:2055] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] Test command err: 2025-04-10T11:23:05.993010Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:7:2054] 2025-04-10T11:23:05.993090Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 2 2025-04-10T11:23:05.993178Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-10T11:23:05.993211Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:6:2053] Reject handshake from stale populator: sender# [1:7:2054], owner# 1, generation# 1, pending generation# 2 2025-04-10T11:23:06.248922Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-10T11:23:06.248970Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:06.249078Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2025-04-10T11:23:06.249112Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:06.249219Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-10T11:23:06.249387Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:06.249413Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:06.254256Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:06.254415Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-04-10T11:23:06.254444Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-10T11:23:06.254468Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:06.254551Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:06.254581Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:06.254619Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:06.254676Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:06.254706Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-04-10T11:23:06.254744Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:06.254826Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:9:2056] 2025-04-10T11:23:06.254864Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2025-04-10T11:23:06.527035Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-10T11:23:06.527101Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-10T11:23:06.527218Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:06.527253Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:06.527309Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:06.527423Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:06.527459Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-04-10T11:23:06.527509Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:06.527568Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-10T11:23:06.527650Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-04-10T11:23:06.527698Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# true 2025-04-10T11:23:06.527743Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 2] 2025-04-10T11:23:06.527810Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:06.527840Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-10T11:23:06.527869Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-10T11:23:06.527924Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-10T11:23:06.527957Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-04-10T11:23:06.527986Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 2] >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath >> BasicStatistics::TwoNodes [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath [GOOD] >> TReplicaCombinationTest::MigratedPathRecreation >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] >> AutoConfig::GetASPoolsWith3CPUs [GOOD] |91.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/runtime/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284740.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284740.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284740.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284740.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284740.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124284740.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=144284740.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284740.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124283540.000000s;Name=;Codec=}; 2025-04-10T11:22:20.421669Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:20.507892Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:20.530134Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:20.530428Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:20.537619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:20.537818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:20.538028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:20.538135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:20.538266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:20.538382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:20.538476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:20.538591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:20.538696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:20.538801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:20.538906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:20.538996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:20.565140Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:20.565459Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:20.565516Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:20.565672Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:20.565841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:20.565920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:20.565959Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:20.566044Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:20.566099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:20.566137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:20.566187Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:20.566328Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:20.566384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:20.566421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:20.566447Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:20.566525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:20.566574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:20.566610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:20.566637Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:20.566713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:20.566763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:20.566796Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:20.566841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:20.566876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:20.566910Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:20.567274Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-10T11:22:20.567374Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-10T11:22:20.567439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-10T11:22:20.567508Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-04-10T11:22:20.567653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:20.567704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:20.567736Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:20.567898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:20.567935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:20.567960Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:20.568091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... 10T11:23:06.863852Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-10T11:23:06.863925Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-04-10T11:23:06.863973Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:06.864020Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:06.864060Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:06.864157Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:06.864372Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000008:max} readable: {1000000008:max} at tablet 9437184 2025-04-10T11:23:06.864482Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:23:06.864631Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:06.864683Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:06.865098Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-10T11:23:06.865199Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:23:06.865630Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:2007:4016];trace_detailed=; 2025-04-10T11:23:06.865988Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-10T11:23:06.866201Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:23:06.866336Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:06.866444Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:06.866771Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2007:4016];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:23:06.866861Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2007:4016];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:06.866957Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2007:4016];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:06.866998Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:2007:4016] finished for tablet 9437184 2025-04-10T11:23:06.867356Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2007:4016];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:2006:4015];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744284186865563,"name":"_full_task","f":1744284186865563,"d_finished":0,"c":0,"l":1744284186867054,"d":1491},"events":[{"name":"bootstrap","f":1744284186865742,"d_finished":724,"c":1,"l":1744284186866466,"d":724},{"a":1744284186866747,"name":"ack","f":1744284186866747,"d_finished":0,"c":0,"l":1744284186867054,"d":307},{"a":1744284186866732,"name":"processing","f":1744284186866732,"d_finished":0,"c":0,"l":1744284186867054,"d":322},{"name":"ProduceResults","f":1744284186866277,"d_finished":373,"c":2,"l":1744284186866981,"d":373},{"a":1744284186866984,"name":"Finish","f":1744284186866984,"d_finished":0,"c":0,"l":1744284186867054,"d":70}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:06.867418Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2007:4016];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:2006:4015];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:23:06.867727Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2007:4016];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:2006:4015];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744284186865563,"name":"_full_task","f":1744284186865563,"d_finished":0,"c":0,"l":1744284186867454,"d":1891},"events":[{"name":"bootstrap","f":1744284186865742,"d_finished":724,"c":1,"l":1744284186866466,"d":724},{"a":1744284186866747,"name":"ack","f":1744284186866747,"d_finished":0,"c":0,"l":1744284186867454,"d":707},{"a":1744284186866732,"name":"processing","f":1744284186866732,"d_finished":0,"c":0,"l":1744284186867454,"d":722},{"name":"ProduceResults","f":1744284186866277,"d_finished":373,"c":2,"l":1744284186866981,"d":373},{"a":1744284186866984,"name":"Finish","f":1744284186866984,"d_finished":0,"c":0,"l":1744284186867454,"d":470}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:2007:4016]->[1:2006:4015] 2025-04-10T11:23:06.867796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2007:4016];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:23:06.865168Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:23:06.867830Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2007:4016];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:23:06.867910Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2007:4016];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 |91.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] |91.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |91.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest |91.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] Test command err: 2025-04-10T11:23:07.235782Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2025-04-10T11:23:07.235842Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-04-10T11:23:07.235919Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2025-04-10T11:23:07.235940Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-04-10T11:23:07.235987Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2025-04-10T11:23:07.236021Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-04-10T11:23:07.236061Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2025-04-10T11:23:07.236082Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-04-10T11:23:07.236235Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 103 2025-04-10T11:23:07.236284Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-10T11:23:07.240371Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-10T11:23:07.240549Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 103 2025-04-10T11:23:07.240610Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-10T11:23:07.240646Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-10T11:23:07.240753Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:9:2056] 2025-04-10T11:23:07.240830Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-04-10T11:23:07.279334Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:11:2058] 2025-04-10T11:23:07.279373Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Successful handshake: owner# 800, generation# 1 2025-04-10T11:23:07.279433Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:11:2058] 2025-04-10T11:23:07.279456Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Commit generation: owner# 800, generation# 1 2025-04-10T11:23:07.279491Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:12:2059] 2025-04-10T11:23:07.279507Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Successful handshake: owner# 900, generation# 1 2025-04-10T11:23:07.279555Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:12:2059] 2025-04-10T11:23:07.279573Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Commit generation: owner# 900, generation# 1 2025-04-10T11:23:07.279633Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:11:2058], cookie# 0, event size# 103 2025-04-10T11:23:07.279652Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-10T11:23:07.279685Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:10:2057] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-10T11:23:07.279734Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:12:2059], cookie# 0, event size# 103 2025-04-10T11:23:07.279760Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Update description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], deletion# false 2025-04-10T11:23:07.279805Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Replace GSS by TSS description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], domainId# [OwnerId: 800, LocalPathId: 2], curPathId# [OwnerId: 800, LocalPathId: 2], curDomainId# [OwnerId: 800, LocalPathId: 2] 2025-04-10T11:23:07.279850Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:10:2057] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 900, LocalPathId: 1], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-10T11:23:07.279914Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:13:2060] 2025-04-10T11:23:07.279945Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:10:2057] Subscribe: subscriber# [1:13:2060], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2025-04-10T11:23:07.280136Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2025-04-10T11:23:07.280153Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Successful handshake: owner# 800, generation# 1 2025-04-10T11:23:07.280177Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2025-04-10T11:23:07.280202Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Commit generation: owner# 800, generation# 1 2025-04-10T11:23:07.280238Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2025-04-10T11:23:07.280250Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Successful handshake: owner# 800, generation# 1 2025-04-10T11:23:07.280275Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2025-04-10T11:23:07.280299Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Commit generation: owner# 800, generation# 1 2025-04-10T11:23:07.280331Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:15:2062], cookie# 0, event size# 103 2025-04-10T11:23:07.280360Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-10T11:23:07.280391Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:14:2061] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-10T11:23:07.280433Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:16:2063], cookie# 0, event size# 103 2025-04-10T11:23:07.280450Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-10T11:23:07.280473Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:14:2061] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 2, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-10T11:23:07.280530Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:17:2064] 2025-04-10T11:23:07.280552Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:14:2061] Subscribe: subscriber# [1:17:2064], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-04-10T11:23:07.280724Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:19:2066] 2025-04-10T11:23:07.280748Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Successful handshake: owner# 800, generation# 1 2025-04-10T11:23:07.280780Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:19:2066] 2025-04-10T11:23:07.280795Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Commit generation: owner# 800, generation# 1 2025-04-10T11:23:07.280830Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:20:2067] 2025-04-10T11:23:07.280845Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Successful handshake: owner# 900, generation# 1 2025-04-10T11:23:07.280865Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:20:2067] 2025-04-10T11:23:07.280877Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Commit generation: owner# 900, generation# 1 2025-04-10T11:23:07.280904Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:19:2066], cookie# 0, event size# 103 2025-04-10T11:23:07.280916Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-10T11:23:07.280940Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:18:2065] Upsert description: path# /Root/Te ... ble_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2025-04-10T11:23:07.641029Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:394:2441] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:397:2444] 2025-04-10T11:23:07.641054Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:394:2441] Upsert description: path# /Root/Tenant/table_inside 2025-04-10T11:23:07.641113Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:394:2441] Subscribe: subscriber# [2:397:2444], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-04-10T11:23:07.643871Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2025-04-10T11:23:07.643918Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Successful handshake: owner# 910, generation# 1 2025-04-10T11:23:07.643972Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2025-04-10T11:23:07.643998Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Commit generation: owner# 910, generation# 1 2025-04-10T11:23:07.644061Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2025-04-10T11:23:07.644085Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Successful handshake: owner# 910, generation# 1 2025-04-10T11:23:07.644129Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2025-04-10T11:23:07.644151Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Commit generation: owner# 910, generation# 1 2025-04-10T11:23:07.644203Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:399:2446], cookie# 0, event size# 64 2025-04-10T11:23:07.644225Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-04-10T11:23:07.644244Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:398:2445] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2025-04-10T11:23:07.644319Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:400:2447], cookie# 0, event size# 130 2025-04-10T11:23:07.644344Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# false 2025-04-10T11:23:07.644366Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Path was explicitly deleted, ignoring: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2025-04-10T11:23:07.644416Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:401:2448] 2025-04-10T11:23:07.644435Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:398:2445] Upsert description: path# /Root/Tenant/table_inside 2025-04-10T11:23:07.644489Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:398:2445] Subscribe: subscriber# [2:401:2448], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-04-10T11:23:07.646945Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2025-04-10T11:23:07.646987Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Successful handshake: owner# 910, generation# 1 2025-04-10T11:23:07.647039Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2025-04-10T11:23:07.647063Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Commit generation: owner# 910, generation# 1 2025-04-10T11:23:07.647162Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2025-04-10T11:23:07.647202Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Successful handshake: owner# 910, generation# 1 2025-04-10T11:23:07.647252Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2025-04-10T11:23:07.647273Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Commit generation: owner# 910, generation# 1 2025-04-10T11:23:07.647333Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:403:2450], cookie# 0, event size# 64 2025-04-10T11:23:07.647356Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-04-10T11:23:07.647379Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:402:2449] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2025-04-10T11:23:07.647440Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:404:2451], cookie# 0, event size# 64 2025-04-10T11:23:07.647468Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-04-10T11:23:07.647553Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:405:2452] 2025-04-10T11:23:07.647582Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:402:2449] Upsert description: path# /Root/Tenant/table_inside 2025-04-10T11:23:07.647642Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:402:2449] Subscribe: subscriber# [2:405:2452], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-04-10T11:23:07.785421Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:7:2054] 2025-04-10T11:23:07.785495Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 800, generation# 1 2025-04-10T11:23:07.785581Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:7:2054] 2025-04-10T11:23:07.785616Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 800, generation# 1 2025-04-10T11:23:07.785714Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:8:2055] 2025-04-10T11:23:07.785747Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 900, generation# 1 2025-04-10T11:23:07.785806Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:8:2055] 2025-04-10T11:23:07.785840Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 900, generation# 1 2025-04-10T11:23:07.785954Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 118 2025-04-10T11:23:07.786013Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2025-04-10T11:23:07.786077Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-04-10T11:23:07.786207Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 117 2025-04-10T11:23:07.786259Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2025-04-10T11:23:07.786309Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description by newest path form tenant schemeshard: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], domainId# [OwnerId: 800, LocalPathId: 1], curPathId# [OwnerId: 800, LocalPathId: 1111], curDomainId# [OwnerId: 800, LocalPathId: 1] 2025-04-10T11:23:07.786351Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111] 2025-04-10T11:23:07.786412Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-04-10T11:23:07.786547Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 0 }: sender# [3:9:2056] 2025-04-10T11:23:07.786616Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# /root/db/dir_inside, domainOwnerId# 0, capabilities# =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 1111 PathOwnerId: 800 =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 11 PathOwnerId: 900 =========== DomainId: [OwnerId: 800, LocalPathId: 1] IsDeletion: 0 PathId: [OwnerId: 900, LocalPathId: 11] Versions: 1 >> AutoConfig::GetASPoolsith1CPU [GOOD] >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] |91.5%| [TA] $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes [GOOD] Test command err: 2025-04-10T11:20:31.866830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:526:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:31.867121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:31.867228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d25/r3tmp/tmpGTekbe/pdisk_1.dat 2025-04-10T11:20:32.288016Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28692, node 1 2025-04-10T11:20:32.624851Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:32.624903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:32.624934Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:32.625830Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:32.633406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:32.721866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:32.722011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:32.738986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5739 2025-04-10T11:20:33.339464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:39.745709Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:39.758501Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-10T11:20:39.821359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:39.821484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:39.822201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:39.822284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:39.856314Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:39.856491Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:20:39.859979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:39.860342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:40.119456Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.120007Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.120618Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.120793Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.121014Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.121107Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.121193Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.121262Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.121373Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.299614Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:40.299741Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:40.312816Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:40.326124Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:40.326265Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:40.339924Z node 2 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:20:40.341947Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:40.490574Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:40.553028Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:40.553125Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:40.583935Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:40.584076Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:40.584272Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:40.584332Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:40.584385Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:40.584440Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:40.584495Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:40.584556Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:40.584992Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:40.632092Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:40.632198Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2287:2600], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:40.637719Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2300:2610] 2025-04-10T11:20:40.640622Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2316:2619] 2025-04-10T11:20:40.640950Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2316:2619], schemeshard id = 72075186224037897 2025-04-10T11:20:40.650404Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:20:40.672263Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:40.672334Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:40.672417Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:20:40.687753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:40.694996Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:40.695137Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:40.785890Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:41.099643Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:41.219231Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:42.152689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2689:3085], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:42.152880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:42.169882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:20:42.419451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2838:3123], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:42.419648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:42.421147Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2843:3127]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:20:42.421306Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:20:42.421389Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2845:3129] 2025-04-10T11:20:42.421445Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2845:3129] 2025-04-10T11:20:42.422038Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2846:2814] 2025-04-10T11:20:42.422326Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2845:3129], server id = [2:2846:2814], tablet id = 72075186224037894, status = OK 2025-04-10T11:20:42.422525Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2846:2814], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:20:42.422593Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:20:42.422814Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:20:42.422888Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2843:3127], StatRequests.size() = 1 2025-04-10T11:20:42.445894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2850:3133], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:42.446017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool ... solveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-10T11:22:57.133718Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:22:57.133768Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:22:57.133817Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:22:57.899335Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7484:4583]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:57.899618Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-10T11:22:57.899673Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7484:4583], StatRequests.size() = 1 2025-04-10T11:22:58.635351Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:22:58.635642Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:22:58.635971Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:22:58.679419Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-10T11:22:58.679509Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 237.000000s, at schemeshard: 72075186224037897 2025-04-10T11:22:58.679887Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 2025-04-10T11:22:58.693523Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:22:59.304736Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7521:4599]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:59.305026Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-10T11:22:59.305060Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7521:4599], StatRequests.size() = 1 2025-04-10T11:22:59.986392Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:22:59.986454Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:22:59.986505Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-10T11:22:59.986540Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:22:59.986928Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:22:59.998666Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:23:00.002167Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7548:4618], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:00.002258Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7558:4623], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:00.002325Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:00.013505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-10T11:23:00.079691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7562:4626], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:23:00.268579Z node 2 :TX_PROXY ERROR: Actor# [2:7660:4674] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:00.314488Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7690:4690]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:00.314801Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-10T11:23:00.314873Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7690:4690], StatRequests.size() = 1 2025-04-10T11:23:00.452759Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjU2ZTQ5NTgtNmI5ZGIwNzctYjM0ZTdjMWMtMWMyNTk1OTQ=, TxId: 2025-04-10T11:23:00.452851Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjU2ZTQ5NTgtNmI5ZGIwNzctYjM0ZTdjMWMtMWMyNTk1OTQ=, TxId: 2025-04-10T11:23:00.453727Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:23:00.468733Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:23:00.468822Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:23:00.955323Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7727:4700]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:00.955560Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-10T11:23:00.955598Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7727:4700], StatRequests.size() = 1 2025-04-10T11:23:02.221641Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7772:4715]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:02.221979Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-10T11:23:02.222037Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7772:4715], StatRequests.size() = 1 2025-04-10T11:23:02.883025Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:23:02.883195Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:23:02.883226Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:23:02.883264Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-10T11:23:02.883303Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:23:02.883891Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:23:02.886440Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:23:02.902083Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODQ5MjFlN2UtNTEzMjY5NzEtNzYxMzlkYy1mM2ZjNmQ4, TxId: 2025-04-10T11:23:02.902170Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODQ5MjFlN2UtNTEzMjY5NzEtNzYxMzlkYy1mM2ZjNmQ4, TxId: 2025-04-10T11:23:02.902796Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:23:02.917393Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:23:02.917455Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:23:03.520384Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7844:4746]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:03.520741Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-10T11:23:03.520795Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7844:4746], StatRequests.size() = 1 2025-04-10T11:23:04.901327Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7893:4762]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:04.901529Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-10T11:23:04.901557Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7893:4762], StatRequests.size() = 1 2025-04-10T11:23:05.571029Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:23:05.571287Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:23:05.571341Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:23:05.571511Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:23:05.572195Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:23:06.154644Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7930:4778]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:06.155022Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-10T11:23:06.155069Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7930:4778], StatRequests.size() = 1 2025-04-10T11:23:06.155883Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [3:7932:3039]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:06.159747Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:23:06.159876Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [3:7942:3043] 2025-04-10T11:23:06.159956Z node 3 :STATISTICS DEBUG: SyncNode(), pipe client id = [3:7942:3043] 2025-04-10T11:23:06.164310Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7950:4780] 2025-04-10T11:23:06.164993Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:7942:3043], server id = [2:7950:4780], tablet id = 72075186224037894, status = OK 2025-04-10T11:23:06.165870Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7950:4780], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:23:06.165950Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2025-04-10T11:23:06.166410Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-04-10T11:23:06.166526Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [3:7932:3039], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1020541C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x106C12B0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFE4D8CC) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+318 (0xFE540AE) std::__y1::__function::__func, void ()>::operator()()+280 (0xFE67C78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x106EF216) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x106C7E29) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFE66B24) NUnitTest::TTestFactory::Execute()+2438 (0x106C96F6) NUnitTest::RunMain(int, char**)+5213 (0x106E978D) ??+0 (0x7F2100595D90) __libc_start_main+128 (0x7F2100595E40) _start+41 (0xD7D1029) >> TColumnShardTestSchema::RebootHotTiers [GOOD] |91.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsith1CPU [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1020541C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x106C12B0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFE47EFD) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xFE53A28) std::__y1::__function::__func, void ()>::operator()()+280 (0xFE67C78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x106EF216) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x106C7E29) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFE66B24) NUnitTest::TTestFactory::Execute()+2438 (0x106C96F6) NUnitTest::RunMain(int, char**)+5213 (0x106E978D) ??+0 (0x7FECB5D6CD90) __libc_start_main+128 (0x7FECB5D6CE40) _start+41 (0xD7D1029) >> BasicStatistics::Simple [GOOD] |91.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1020541C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x106C12B0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFE47EFD) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xFE53D38) std::__y1::__function::__func, void ()>::operator()()+280 (0xFE67C78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x106EF216) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x106C7E29) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFE66B24) NUnitTest::TTestFactory::Execute()+2438 (0x106C96F6) NUnitTest::RunMain(int, char**)+5213 (0x106E978D) ??+0 (0x7FCB14CEED90) __libc_start_main+128 (0x7FCB14CEEE40) _start+41 (0xD7D1029) >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] |91.6%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> AutoConfig::GetASPoolsWith2CPUs [GOOD] >> AutoConfig::GetServicePoolsWith1CPU [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284703.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284703.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284703.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284703.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284703.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284703.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283503.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124284703.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124284703.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283503.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283503.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124283503.000000s;Name=;Codec=}; 2025-04-10T11:21:43.793429Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:21:43.883406Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:21:43.906385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:21:43.906691Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:21:43.914604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:21:43.914823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:21:43.915052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:21:43.915187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:21:43.915301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:21:43.915404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:21:43.915519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:21:43.915629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:21:43.915730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:21:43.915834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:21:43.915937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:21:43.916055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:21:43.937902Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:21:43.938286Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:21:43.938350Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:21:43.938501Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:43.938653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:21:43.938717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:21:43.938747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:21:43.938807Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:21:43.938854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:21:43.938898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:21:43.938921Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:21:43.939031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:43.939086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:21:43.939115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:21:43.939134Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:21:43.939208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:21:43.939249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:21:43.939284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:21:43.939306Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:21:43.939372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:21:43.939409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:21:43.939428Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:21:43.939457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:21:43.939483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:21:43.939510Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:21:43.939834Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-04-10T11:21:43.939905Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-10T11:21:43.939975Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=27; 2025-04-10T11:21:43.940036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=27; 2025-04-10T11:21:43.940163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:21:43.940210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:21:43.940234Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:21:43.940394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:21:43.940434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:21:43.940453Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... data.cpp:29;EXECUTE:finishLoadingTime=319; 2025-04-10T11:23:08.352237Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=39797; 2025-04-10T11:23:08.359455Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=7155; 2025-04-10T11:23:08.366778Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=6431; 2025-04-10T11:23:08.366868Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7328; 2025-04-10T11:23:08.366999Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=74; 2025-04-10T11:23:08.367106Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=67; 2025-04-10T11:23:08.367219Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=75; 2025-04-10T11:23:08.367333Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=82; 2025-04-10T11:23:08.374598Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=7208; 2025-04-10T11:23:08.391305Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=16609; 2025-04-10T11:23:08.391460Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=44; 2025-04-10T11:23:08.391552Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=33; 2025-04-10T11:23:08.391607Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-04-10T11:23:08.391658Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-10T11:23:08.391708Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-04-10T11:23:08.391797Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2025-04-10T11:23:08.391848Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-10T11:23:08.391943Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=55; 2025-04-10T11:23:08.391992Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-10T11:23:08.392060Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2025-04-10T11:23:08.392171Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=69; 2025-04-10T11:23:08.392558Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=334; 2025-04-10T11:23:08.392602Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=86236; 2025-04-10T11:23:08.392767Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:23:08.392881Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:23:08.392940Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:23:08.393008Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:23:08.410426Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-10T11:23:08.410555Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:23:08.410603Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:08.410661Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-10T11:23:08.410712Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:23:08.410746Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:08.410786Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:08.410817Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:08.410889Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:08.411275Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:08.411333Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2646:4520];tablet_id=9437184;parent=[1:2604:4485];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-04-10T11:23:08.411763Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:23:08.412018Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:23:08.412040Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:23:08.412059Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:23:08.412106Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:23:08.412149Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:08.412188Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-10T11:23:08.412233Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:23:08.412264Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:08.412311Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:08.412342Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:08.412403Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:08.413299Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-04-10T11:23:08.414453Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 |91.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] |91.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith2CPUs [GOOD] |91.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Simple [GOOD] Test command err: 2025-04-10T11:20:36.135985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:36.136373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:36.136448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c52/r3tmp/tmpMXmLq2/pdisk_1.dat 2025-04-10T11:20:36.523309Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17791, node 1 2025-04-10T11:20:36.781025Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:36.781081Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:36.781117Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:36.781542Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:36.784157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:36.882623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:36.882775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:36.896270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64413 2025-04-10T11:20:37.455683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:40.731084Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:40.768464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:40.768663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:40.797568Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:40.800001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:41.054438Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.054979Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.055630Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.055806Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.055935Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.056203Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.056313Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.056402Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.056484Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.223140Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:41.223274Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:41.236733Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:41.379334Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:41.435238Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:41.435345Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:41.480345Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:41.481656Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:41.481880Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:41.481954Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:41.482010Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:41.482094Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:41.482167Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:41.482228Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:41.482790Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:41.510845Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:41.510994Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:41.519730Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:20:41.524056Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:20:41.524388Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:20:41.535172Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:20:41.589396Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:41.589471Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:41.589560Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:20:41.610276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:41.616639Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:41.616783Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:41.779289Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:41.943813Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:42.021876Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:43.163227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:43.163423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:43.182689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:20:43.461990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2383:3111], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:43.462140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:43.463482Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2387:3115]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:20:43.463698Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:20:43.463816Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2389:3117] 2025-04-10T11:20:43.463900Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2389:3117] 2025-04-10T11:20:43.464484Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2390:2876] 2025-04-10T11:20:43.464769Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2389:3117], server id = [2:2390:2876], tablet id = 72075186224037894, status = OK 2025-04-10T11:20:43.464989Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2390:2876], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:20:43.465059Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:20:43.465313Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:20:43.465420Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2387:3115], StatRequests.size() = 1 2025-04-10T11:20:43.492736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2394:3121], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:43.492867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:43.493286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2399:3126], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:43.500818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:20:43.726797Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:20:43.726894Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:20:43.824099Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2389:3117], schemeshard count = 1 2025-04-10T11:20:44.285356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... eplyToActorId[ [2:6403:4625]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:56.375767Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 116 ] 2025-04-10T11:22:56.375814Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 116, ReplyToActorId = [2:6403:4625], StatRequests.size() = 1 2025-04-10T11:22:57.611042Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:6436:4639]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:57.611464Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2025-04-10T11:22:57.611518Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:6436:4639], StatRequests.size() = 1 2025-04-10T11:22:58.311099Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:22:58.874955Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:6471:4655]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:22:58.875162Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-10T11:22:58.875189Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:6471:4655], StatRequests.size() = 1 2025-04-10T11:22:59.585180Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-10T11:22:59.585257Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:22:59.585313Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:22:59.585347Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:23:00.374899Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6511:4672]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:00.375143Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-10T11:23:00.375183Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6511:4672], StatRequests.size() = 1 2025-04-10T11:23:01.077904Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:23:01.078025Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:23:01.078420Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:23:01.110750Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-10T11:23:01.110817Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 188.000000s, at schemeshard: 72075186224037897 2025-04-10T11:23:01.111068Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 2025-04-10T11:23:01.124317Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:23:01.697038Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6544:4688]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:01.697505Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-10T11:23:01.697553Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6544:4688], StatRequests.size() = 1 2025-04-10T11:23:02.356002Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:23:02.356094Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:23:02.356159Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-10T11:23:02.356207Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:23:02.356629Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:23:02.369359Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:23:02.372325Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6567:4707], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:02.372437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6576:4712], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:02.372558Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:02.382234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-10T11:23:02.433807Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6581:4715], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:23:02.627202Z node 2 :TX_PROXY ERROR: Actor# [2:6679:4763] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:02.683831Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6708:4778]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:02.684197Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-10T11:23:02.684268Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6708:4778], StatRequests.size() = 1 2025-04-10T11:23:02.817568Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWYzMDM0NDMtOGY3NjAwZi1jYzA5OWQ0Yy0yYjMxMjFhYw==, TxId: 2025-04-10T11:23:02.817648Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWYzMDM0NDMtOGY3NjAwZi1jYzA5OWQ0Yy0yYjMxMjFhYw==, TxId: 2025-04-10T11:23:02.818251Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:23:02.832110Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:23:02.832182Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:23:03.290020Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6740:4797]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:03.290347Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-10T11:23:03.290390Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6740:4797], StatRequests.size() = 1 2025-04-10T11:23:04.517501Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6779:4819]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:04.517787Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-10T11:23:04.517818Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6779:4819], StatRequests.size() = 1 2025-04-10T11:23:05.164882Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:23:05.165020Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:23:05.165050Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:23:05.165078Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-10T11:23:05.165099Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:23:05.165327Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:23:05.185649Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:23:05.195215Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmY0ZWJkZDAtZDg3NWU5MDgtM2Y2NzZiYTYtZWVjMzk5MmM=, TxId: 2025-04-10T11:23:05.195268Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmY0ZWJkZDAtZDg3NWU5MDgtM2Y2NzZiYTYtZWVjMzk5MmM=, TxId: 2025-04-10T11:23:05.195603Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:23:05.208720Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:23:05.208760Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:23:05.764665Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:6847:4858]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:05.764851Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-10T11:23:05.764877Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:6847:4858], StatRequests.size() = 1 2025-04-10T11:23:07.069328Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:6890:4882]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:07.069652Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-10T11:23:07.069705Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:6890:4882], StatRequests.size() = 1 2025-04-10T11:23:07.730623Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:23:07.730792Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:23:07.730985Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:23:07.731013Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:23:07.731299Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:23:08.279476Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:6923:4898]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:08.279764Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-10T11:23:08.279808Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:6923:4898], StatRequests.size() = 1 |91.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest |91.6%| [TA] $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [TA] {RESULT} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> TSchemeShardTest::RmDirTwice >> TSchemeShardTest::MkRmDir >> TSchemeShardTest::InitRootAgain >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously >> TSchemeShardTest::CreateIndexedTable >> TSchemeShardTest::CreateTable >> TSchemeShardTest::Boot >> TPersQueueTest::TestWriteStat [GOOD] >> TPersQueueTest::TestWriteSessionsConflicts >> TSchemeShardTest::RmDirTwice [GOOD] >> TSchemeShardTest::TopicMeteringMode >> TSchemeShardTest::Boot [GOOD] >> TSchemeShardTest::InitRootAgain [GOOD] >> TSchemeShardTest::InitRootWithOwner >> TSchemeShardTest::CacheEffectiveACL [GOOD] >> TSchemeShardTest::AlterTableKeyColumns >> TGRpcYdbTest::CreateTableBadRequest >> TSchemeShardTest::MkRmDir [GOOD] >> TSchemeShardTest::PathName >> YdbYqlClient::CreateTableWithPartitionAtKeys >> YdbYqlClient::DiscoveryLocationOverride >> YdbYqlClient::TestReadTableOneBatch >> YdbYqlClient::TestReadTableMultiShard >> YdbS3Internal::TestS3Listing >> TGRpcAuthentication::ValidCredentials >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::DependentOps >> TSchemeShardTest::InitRootWithOwner [GOOD] >> TSchemeShardTest::DropTableTwice >> TSchemeShardTest::CreateIndexedTable [GOOD] >> TSchemeShardTest::CreateAlterTableWithCodec >> TSchemeShardTest::CreateTable [GOOD] >> TSchemeShardTest::CreateTableWithDate >> TSchemeShardTest::PathName [GOOD] >> TSchemeShardTest::PathName_SetLocale >> TSchemeShardTest::TopicMeteringMode [GOOD] >> TSchemeShardTest::Restart >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit >> TSchemeShardTest::DependentOps [GOOD] >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName >> TSchemeShardTest::PathName_SetLocale [GOOD] >> TSchemeShardTest::ModifyACL >> HttpRequest::ProbeServerless [GOOD] >> TSchemeShardTest::DropTableTwice [GOOD] >> TSchemeShardTest::IgnoreUserColumnIds >> TSchemeShardTest::CreateAlterTableWithCodec [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TSchemeShardTest::AlterTableSplitSchema >> TSchemeShardTest::ModifyACL [GOOD] >> TSchemeShardTest::NameFormat >> TSchemeShardTest::Restart [GOOD] >> TSchemeShardTest::SchemeErrors >> TSchemeShardTest::CreateTableWithDate [GOOD] >> TSchemeShardTest::CreateIndexedTableRejects >> TSchemeShardTest::IgnoreUserColumnIds [GOOD] >> TSchemeShardTest::DropTableAndConcurrentSplit >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName [GOOD] >> TSchemeShardTest::DropPQ >> TSchemeShardTest::CopyTableTwiceSimultaneously [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless [GOOD] Test command err: 2025-04-10T11:20:26.531863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:26.532343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:26.532441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000de4/r3tmp/tmpGktT3A/pdisk_1.dat 2025-04-10T11:20:27.010876Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14721, node 1 2025-04-10T11:20:27.615880Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:27.615943Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:27.615979Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:27.616454Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:27.619238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:27.713973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:27.714132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:27.727710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6612 2025-04-10T11:20:28.295509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:31.621179Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:31.661192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:31.661398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:31.701006Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:31.703462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:31.969019Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.969667Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.970289Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.970705Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.971004Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.971115Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.971234Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.971322Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.971396Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.152608Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:32.152757Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:32.166600Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:32.330499Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:32.404453Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:32.404563Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:32.444322Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:32.444880Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:32.445121Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:32.445186Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:32.445262Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:32.445352Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:32.445415Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:32.445475Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:32.446030Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:32.470597Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1868:2593] 2025-04-10T11:20:32.473023Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-10T11:20:32.492467Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:32.492533Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:32.492612Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-10T11:20:32.498194Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:32.498352Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1918:2621], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:32.526540Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1979:2648] 2025-04-10T11:20:32.527541Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1979:2648], schemeshard id = 72075186224037897 2025-04-10T11:20:32.564435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:32.573580Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:32.573747Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:32.787465Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:32.990728Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:33.074361Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:34.037006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:20:34.893284Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:35.063802Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-10T11:20:35.063871Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:20:35.063960Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2600:2948], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:20:35.072257Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2601:2949] 2025-04-10T11:20:35.072435Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2601:2949], schemeshard id = 72075186224037899 2025-04-10T11:20:36.667855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2730:3251], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:36.668020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:36.692875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-10T11:20:37.084183Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2883:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:37.084427Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2883:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:37.084722Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2883:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:37.084859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2883:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:37.085009Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2883:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:37.085145Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2883:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:37.085276Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2883:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:37.085433Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2883:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:37.085561Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2883:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11: ... 37894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:23:12.765663Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:23:12.765688Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:23:12.765713Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:23:12.765737Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:23:12.765762Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:23:12.765784Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:23:12.765822Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:23:12.765848Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:23:13.662653Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:23:13.662775Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-10T11:23:13.662813Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-10T11:23:13.663360Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:23:13.678082Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:23:13.678462Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:23:13.678558Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:23:13.679041Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:23:13.704258Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:23:13.704470Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-10T11:23:13.705395Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9865:7403], server id = [2:9870:7408], tablet id = 72075186224037905, status = OK 2025-04-10T11:23:13.705498Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9865:7403], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:23:13.706392Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9866:7404], server id = [2:9871:7409], tablet id = 72075186224037906, status = OK 2025-04-10T11:23:13.706461Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9866:7404], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:23:13.707430Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9867:7405], server id = [2:9873:7411], tablet id = 72075186224037907, status = OK 2025-04-10T11:23:13.707487Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9867:7405], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:23:13.707613Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9868:7406], server id = [2:9872:7410], tablet id = 72075186224037908, status = OK 2025-04-10T11:23:13.707657Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9868:7406], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:23:13.708299Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:23:13.709560Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9865:7403], server id = [2:9870:7408], tablet id = 72075186224037905 2025-04-10T11:23:13.709595Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:23:13.709721Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9869:7407], server id = [2:9876:7414], tablet id = 72075186224037909, status = OK 2025-04-10T11:23:13.709774Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9869:7407], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:23:13.710465Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-10T11:23:13.711276Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9866:7404], server id = [2:9871:7409], tablet id = 72075186224037906 2025-04-10T11:23:13.711307Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:23:13.711520Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-10T11:23:13.712132Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-10T11:23:13.712291Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9877:7415], server id = [2:9880:7418], tablet id = 72075186224037910, status = OK 2025-04-10T11:23:13.712345Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9877:7415], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:23:13.713161Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9868:7406], server id = [2:9872:7410], tablet id = 72075186224037908 2025-04-10T11:23:13.713190Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:23:13.713254Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2025-04-10T11:23:13.713988Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9882:7420], server id = [2:9883:7421], tablet id = 72075186224037911, status = OK 2025-04-10T11:23:13.714047Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9882:7420], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:23:13.714183Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9867:7405], server id = [2:9873:7411], tablet id = 72075186224037907 2025-04-10T11:23:13.714209Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:23:13.714783Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9869:7407], server id = [2:9876:7414], tablet id = 72075186224037909 2025-04-10T11:23:13.714811Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:23:13.714902Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9884:7422], server id = [2:9886:7424], tablet id = 72075186224037912, status = OK 2025-04-10T11:23:13.714965Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9884:7422], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:23:13.715619Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2025-04-10T11:23:13.716040Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9885:7423], server id = [2:9889:7427], tablet id = 72075186224037913, status = OK 2025-04-10T11:23:13.716119Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9885:7423], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:23:13.716915Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9877:7415], server id = [2:9880:7418], tablet id = 72075186224037910 2025-04-10T11:23:13.716948Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:23:13.717357Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9888:7426], server id = [2:9890:7428], tablet id = 72075186224037914, status = OK 2025-04-10T11:23:13.717413Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9888:7426], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:23:13.717990Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-04-10T11:23:13.718138Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-04-10T11:23:13.718735Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9882:7420], server id = [2:9883:7421], tablet id = 72075186224037911 2025-04-10T11:23:13.718773Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:23:13.718846Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9884:7422], server id = [2:9886:7424], tablet id = 72075186224037912 2025-04-10T11:23:13.718868Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:23:13.719039Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2025-04-10T11:23:13.719382Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9885:7423], server id = [2:9889:7427], tablet id = 72075186224037913 2025-04-10T11:23:13.719411Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:23:13.719507Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2025-04-10T11:23:13.719548Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:23:13.719769Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:23:13.719930Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:23:13.720158Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-10T11:23:13.722458Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9888:7426], server id = [2:9890:7428], tablet id = 72075186224037914 2025-04-10T11:23:13.722504Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:23:13.723324Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:23:13.745875Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTQ0MGNlNjEtNTY1N2M1ODctNWFhZWYzYmEtNWEwMDAwYTA=, TxId: 2025-04-10T11:23:13.745947Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTQ0MGNlNjEtNTY1N2M1ODctNWFhZWYzYmEtNWEwMDAwYTA=, TxId: 2025-04-10T11:23:13.746522Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:23:13.761065Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-10T11:23:13.761131Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=pNrV L, ActorId=[1:4602:3502] 2025-04-10T11:23:13.762463Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:9917:5745]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:13.762747Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:23:13.762809Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:23:13.763055Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:23:13.763116Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-10T11:23:13.763195Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-04-10T11:23:13.774783Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' >> TSchemeShardTest::SchemeErrors [GOOD] >> TSchemeShardTest::SerializedCellVec [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> TSchemeShardTest::AlterTableSettings >> TSchemeShardTest::NameFormat [GOOD] >> TSchemeShardTest::ParallelCreateTable >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate >> TSchemeShardTest::DropTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::DropTable >> TGRpcYdbTest::CreateTableBadRequest [GOOD] >> TGRpcYdbTest::CreateTableBadRequest2 >> YdbYqlClient::DiscoveryLocationOverride [GOOD] >> YdbYqlClient::DeleteTableWithDeletedIndex >> TSchemeShardTest::ParallelCreateTable [GOOD] >> TSchemeShardTest::ParallelCreateSameTable >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig >> TSchemeShardTest::AlterTableSettings [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume >> YdbYqlClient::CreateTableWithPartitionAtKeys [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning >> TTableProfileTests::OverwriteCachingPolicy >> YdbYqlClient::TestReadTableMultiShard [GOOD] >> YdbYqlClient::TestReadTableMultiShardUseSnapshot >> TSchemeShardTest::CopyTableWithAlterConfig [GOOD] >> TSchemeShardTest::CopyTableOmitFollowers >> TGRpcAuthentication::ValidCredentials [GOOD] >> TGRpcAuthentication::NoDescribeRights >> YdbS3Internal::TestS3Listing [GOOD] >> YdbS3Internal::TestAccessCheck >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig [GOOD] >> TSchemeShardTest::RejectAlterSolomon >> YdbYqlClient::TestReadTableOneBatch [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder >> TSchemeShardTest::CreateIndexedTableRejects [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop >> TSchemeShardTest::DropTable [GOOD] >> TSchemeShardTest::DropTableById >> TColumnShardTestSchema::HotTiersTtlWithStat [GOOD] >> TSchemeShardTest::ParallelCreateSameTable [GOOD] >> TSchemeShardTest::MultipleColumnFamilies >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter >> TSchemeShardTest::RejectAlterSolomon [GOOD] >> TSchemeShardTest::SimultaneousDropForceDrop >> TSchemeShardTest::DropPQ [GOOD] >> TSchemeShardTest::DropBlockStoreVolume >> TSchemeShardTest::CopyTableOmitFollowers [GOOD] >> TSchemeShardTest::CreateIndexedTableAfterBackup >> TSchemeShardTest::CreateIndexedTableAndForceDrop [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously >> TSchemeShardTest::DropTableById [GOOD] >> TSchemeShardTest::DropPQFail >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtlWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284755.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284755.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284755.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284755.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284755.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124284755.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=144284755.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284755.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124283555.000000s;Name=;Codec=}; 2025-04-10T11:22:35.243109Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:35.312234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:35.330981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:35.331298Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:35.339433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:35.339672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:35.339920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:35.340047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:35.340181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:35.340332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:35.340455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:35.340576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:35.340691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:35.340802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:35.340910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:35.341014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:35.371139Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:35.371507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:35.371590Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:35.371760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:35.371956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:35.372045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:35.372093Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:35.372186Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:35.372246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:35.372291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:35.372320Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:35.372470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:35.372530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:35.372569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:35.372596Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:35.372682Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:35.372733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:35.372776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:35.372808Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:35.372891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:35.372946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:35.372977Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:35.373031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:35.373073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:35.373107Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:35.373535Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=62; 2025-04-10T11:22:35.373639Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-10T11:22:35.373712Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-10T11:22:35.373806Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=52; 2025-04-10T11:22:35.373988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:35.374046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:35.374082Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:35.374316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:35.374367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:35.374399Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:35.374540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... : TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-10T11:23:18.952212Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-04-10T11:23:18.952280Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:18.952348Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:18.952403Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:18.952529Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:18.952870Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000008:max} readable: {1000000008:max} at tablet 9437184 2025-04-10T11:23:18.953012Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:23:18.953204Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:18.953293Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:18.953807Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-10T11:23:18.953913Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:23:18.954477Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:2005:4014];trace_detailed=; 2025-04-10T11:23:18.954961Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-10T11:23:18.955219Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:23:18.955419Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:18.955576Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:18.956023Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2005:4014];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:23:18.956161Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2005:4014];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:18.956304Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2005:4014];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:18.956352Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:2005:4014] finished for tablet 9437184 2025-04-10T11:23:18.956868Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2005:4014];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:2004:4013];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744284198954407,"name":"_full_task","f":1744284198954407,"d_finished":0,"c":0,"l":1744284198956424,"d":2017},"events":[{"name":"bootstrap","f":1744284198954628,"d_finished":985,"c":1,"l":1744284198955613,"d":985},{"a":1744284198955994,"name":"ack","f":1744284198955994,"d_finished":0,"c":0,"l":1744284198956424,"d":430},{"a":1744284198955971,"name":"processing","f":1744284198955971,"d_finished":0,"c":0,"l":1744284198956424,"d":453},{"name":"ProduceResults","f":1744284198955327,"d_finished":557,"c":2,"l":1744284198956333,"d":557},{"a":1744284198956337,"name":"Finish","f":1744284198956337,"d_finished":0,"c":0,"l":1744284198956424,"d":87}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:18.956957Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2005:4014];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:2004:4013];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:23:18.957461Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2005:4014];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:2004:4013];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744284198954407,"name":"_full_task","f":1744284198954407,"d_finished":0,"c":0,"l":1744284198957010,"d":2603},"events":[{"name":"bootstrap","f":1744284198954628,"d_finished":985,"c":1,"l":1744284198955613,"d":985},{"a":1744284198955994,"name":"ack","f":1744284198955994,"d_finished":0,"c":0,"l":1744284198957010,"d":1016},{"a":1744284198955971,"name":"processing","f":1744284198955971,"d_finished":0,"c":0,"l":1744284198957010,"d":1039},{"name":"ProduceResults","f":1744284198955327,"d_finished":557,"c":2,"l":1744284198956333,"d":557},{"a":1744284198956337,"name":"Finish","f":1744284198956337,"d_finished":0,"c":0,"l":1744284198957010,"d":673}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:2005:4014]->[1:2004:4013] 2025-04-10T11:23:18.957577Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2005:4014];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:23:18.953881Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:23:18.957631Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2005:4014];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:23:18.957759Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2005:4014];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 >> TSchemeShardTest::DropBlockStoreVolume [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::SimultaneousDropForceDrop [GOOD] >> TSchemeShardTest::RejectSystemViewPath >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::CreateTableWithUniformPartitioning >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits >> TSchemeShardTest::CreateIndexedTableAfterBackup [GOOD] >> TSchemeShardTest::CreateFinishedInDescription >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 >> TGRpcYdbTest::CreateTableBadRequest2 [GOOD] >> TGRpcYdbTest::CreateTableBadRequest3 >> TSchemeShardTest::RejectSystemViewPath [GOOD] >> TSchemeShardTest::SplitKey [GOOD] >> TSchemeShardTest::SplitAlterCopy >> YdbYqlClient::DeleteTableWithDeletedIndex [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitions >> TPersQueueTest::SrcIdCompatibility [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration >> TSchemeShardTest::CreateFinishedInDescription [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts >> TSchemeShardTest::MultipleColumnFamilies [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize >> TSchemeShardTest::CreateBlockStoreVolume [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles >> TSchemeShardTest::CreateTableWithUniformPartitioning [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries >> TSchemeShardTest::SplitAlterCopy [GOOD] >> TSchemeShardTest::TopicReserveSize >> TSchemeShardTest::BlockStoreVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs >> TTableProfileTests::OverwriteCachingPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsSimple >> YdbYqlClient::TestReadTableMultiShardUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardOneRow >> TGRpcAuthentication::NoDescribeRights [GOOD] >> TGRpcClientLowTest::BiStreamPing >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::SrcIdCompatibility [GOOD] Test command err: === Start server === Server->StartServer(false); 2025-04-10T11:18:42.533022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642403149889346:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:42.533649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:18:42.605861Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642400905859045:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:42.605925Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a4c/r3tmp/tmpBtVRIQ/pdisk_1.dat 2025-04-10T11:18:42.875640Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:18:42.886988Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:18:43.137824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.137936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.140612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.140732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.146509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:43.152274Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:18:43.153684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:43.202808Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18795, node 1 2025-04-10T11:18:43.324547Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000a4c/r3tmp/yandexp3A96s.tmp 2025-04-10T11:18:43.324586Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000a4c/r3tmp/yandexp3A96s.tmp 2025-04-10T11:18:43.325367Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000a4c/r3tmp/yandexp3A96s.tmp 2025-04-10T11:18:43.325563Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:43.586678Z INFO: TTestServer started on Port 4074 GrpcPort 18795 TClient is connected to server localhost:4074 PQClient connected to localhost:18795 === TenantModeEnabled() = 0 === Init PQ - start server on port 18795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:44.061259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:18:44.061567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:44.061873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T11:18:44.062218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:18:44.064538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:44.074944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T11:18:44.075111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T11:18:44.075318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:44.075389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T11:18:44.075413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-10T11:18:44.075425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-10T11:18:44.076646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:18:44.076692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-10T11:18:44.076716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:18:44.077387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:44.077428Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:18:44.077447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-10T11:18:44.079251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:44.079290Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:44.079318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:18:44.079353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:18:44.084053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:18:44.086218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-10T11:18:44.086439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:18:44.090184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744283924131, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:18:44.090380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744283924131 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T11:18:44.090431Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:18:44.090762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-10T11:18:44.090837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:18:44.091013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:18:44.091065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T11:18:44.093767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:18:44.093794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:18:44.093986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:18:44.094001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491642407444857339:2428], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-10T11:18:44.094071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:44.094114Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-10T11:18:44.094217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T11:18:44.094241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:18:44.094259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T11:18:44.094267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:18:44.094288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-10T11:18:44.094306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:18:44.094320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-10T11:18:44.094341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-10T11:18:44.094408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [Own ... e: 5 sessionId: partition: 7 expectedGeneration: (NULL) 2025-04-10T11:23:19.479032Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [25:7491643591240649190:2750], now have 1 active actors on pipe 2025-04-10T11:23:19.479189Z node 25 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) TEvClientConnected Status OK, TabletId: 72075186224037910, NodeId 26, Generation: 1 2025-04-10T11:23:19.479373Z node 26 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-04-10T11:23:19.479424Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-04-10T11:23:19.479619Z node 26 :PERSQUEUE INFO: new Cookie test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0 generated for partition 7 topic 'rt3.dc1--account--topic100' owner test-src-id-compat2 2025-04-10T11:23:19.479754Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 7 2025-04-10T11:23:19.479842Z node 26 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2025-04-10T11:23:19.480383Z node 26 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-04-10T11:23:19.480413Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-04-10T11:23:19.480524Z node 26 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2025-04-10T11:23:19.480920Z node 25 :PQ_WRITE_PROXY INFO: session inited cookie: 5 partition: 7 MaxSeqNo: 0 sessionId: test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0 2025-04-10T11:23:19.481805Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744284199481 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:23:19.481969Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Write session established. Init response: session_id: "test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0" topic: "account/topic100" cluster: "dc1" partition_id: 7 supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-10T11:23:19.482280Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0] Write 1 messages with Id from 1 to 1 2025-04-10T11:23:19.482802Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0] Write session: try to update token 2025-04-10T11:23:19.482866Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0] Send 1 message(s) (0 left), first sequence number is 1 2025-04-10T11:23:19.483342Z node 25 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-10T11:23:19.483750Z node 25 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-10T11:23:19.484197Z node 26 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-04-10T11:23:19.484233Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-04-10T11:23:19.484359Z node 26 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 1 2025-04-10T11:23:19.484589Z node 25 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NActors::IEventHandle 2025-04-10T11:23:19.485006Z node 26 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-04-10T11:23:19.485039Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-04-10T11:23:19.485104Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message topic: rt3.dc1--account--topic100 partition: 7 SourceId: '\0test-src-id-compat2' SeqNo: 1 partNo : 0 messageNo: 1 size 102 offset: -1 2025-04-10T11:23:19.485356Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob processing sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 2025-04-10T11:23:19.540694Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob complete sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 189 count 1 nextOffset 1 batches 1 2025-04-10T11:23:19.541719Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic100' partition 7 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000007_00000000000000000000_00000_0000000001_00000| size 177 WTime 1744284199541 2025-04-10T11:23:19.541955Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:23:19.541991Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] --- delete ---------------- 2025-04-10T11:23:19.542031Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] [x0000000007, x0000000008) 2025-04-10T11:23:19.542064Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] --- write ----------------- 2025-04-10T11:23:19.542096Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] m0000000007ptest-src-id-compat2 2025-04-10T11:23:19.542116Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] d0000000007_00000000000000000000_00000_0000000001_00000| 2025-04-10T11:23:19.542131Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] i0000000007 2025-04-10T11:23:19.542173Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] --- rename ---------------- 2025-04-10T11:23:19.542198Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] =========================== 2025-04-10T11:23:19.542272Z node 26 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:23:19.542436Z node 26 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 7 offset 0 partNo 0 count 1 size 177 2025-04-10T11:23:19.545738Z node 26 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 7 offset 0 count 1 size 177 actorID [26:7491643584599112138:2433] 2025-04-10T11:23:19.545855Z node 26 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037910' partition 7 offset 0 partno 0 count 1 parts 0 size 177 2025-04-10T11:23:19.545881Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 122 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:23:19.545948Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::ReplyWrite. Partition: 7 2025-04-10T11:23:19.546032Z node 26 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 1 requestId: cookie: 1 2025-04-10T11:23:19.546036Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Answering for message sourceid: '\0test-src-id-compat2', Topic: 'rt3.dc1--account--topic100', Partition: 7, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-10T11:23:19.546335Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:23:19.546379Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-10T11:23:19.546317Z node 25 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NActors::IEventHandle 2025-04-10T11:23:19.546612Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 Topic 'rt3.dc1--account--topic100' partition 7 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-10T11:23:19.546647Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-10T11:23:19.546687Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-10T11:23:19.546718Z node 26 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T11:23:19.546792Z node 26 :PERSQUEUE DEBUG: Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp done, result 1744284199485 queuesize 0 startOffset 0 2025-04-10T11:23:19.547168Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0] Write session got write response: sequence_numbers: 1 offsets: 0 already_written: false partition_id: 7 write_statistics { persist_duration_ms: 5 queued_in_partition_duration_ms: 55 } 2025-04-10T11:23:19.547240Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0] Write session: acknoledged message 1 2025-04-10T11:23:19.547527Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0] Write session: close. Timeout = 0 ms 2025-04-10T11:23:19.547613Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0] Write session will now close 2025-04-10T11:23:19.547711Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0] Write session: aborting 2025-04-10T11:23:19.548388Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0] Write session: gracefully shut down, all writes complete 2025-04-10T11:23:19.548461Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0] Write session: destroy 2025-04-10T11:23:19.549633Z node 25 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0 grpc read done: success: 0 data: 2025-04-10T11:23:19.549669Z node 25 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0 grpc read failed 2025-04-10T11:23:19.549731Z node 25 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0 grpc closed 2025-04-10T11:23:19.549763Z node 25 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|7e734342-c8dfbfb5-f29bf2ae-36b6e611_0 is DEAD 2025-04-10T11:23:19.551155Z node 25 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NActors::TEvents::TEvPoison 2025-04-10T11:23:19.551563Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [25:7491643591240649190:2750] destroyed 2025-04-10T11:23:19.551578Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::DropOwner. >> TSchemeShardTest::DropPQFail [GOOD] >> TSchemeShardTest::DropPQAbort >> YdbYqlClient::TestReadTableNotNullBorder [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder2 >> TSchemeShardTest::CreateTableWithSplitBoundaries [GOOD] >> TSchemeShardTest::CreateTableWithConfig >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits >> TSchemeShardTest::MultipleColumnFamiliesWithStorage [GOOD] >> TSchemeShardTest::ParallelModifying >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds >> YdbS3Internal::TestAccessCheck [GOOD] >> YdbScripting::BasicV0 >> BasicStatistics::Serverless [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs [GOOD] >> TSchemeShardTest::DocumentApiVersion >> TSchemeShardTest::CreateTableWithConfig [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies >> TSchemeShardTest::TopicReserveSize [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds [GOOD] >> TSchemeShardTest::CreateDropKesus >> YdbScripting::Params ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] Test command err: Starting YDB, grpc: 1648, msgbus: 3433 2025-04-10T11:19:45.672254Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642672174114205:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:19:45.676333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001b1e/r3tmp/tmpHt8RLJ/pdisk_1.dat 2025-04-10T11:19:46.377961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:19:46.378075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:19:46.378845Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:19:46.397996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1648, node 1 2025-04-10T11:19:46.763944Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:19:46.763972Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:19:46.763978Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:19:46.764086Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3433 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:19:47.183562Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672174114288:2116] Handle TEvNavigate describe path dc-1 2025-04-10T11:19:47.183610Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049411:2450] HANDLE EvNavigateScheme dc-1 2025-04-10T11:19:47.183919Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049411:2450] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.241276Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049411:2450] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T11:19:47.257779Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049411:2450] Handle TEvDescribeSchemeResult Forward to# [1:7491642680764049410:2449] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:19:47.287984Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672174114288:2116] Handle TEvProposeTransaction 2025-04-10T11:19:47.288022Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672174114288:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T11:19:47.288171Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672174114288:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491642680764049419:2457] 2025-04-10T11:19:47.390906Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049419:2457] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:47.392298Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049419:2457] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T11:19:47.392331Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049419:2457] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:47.392479Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049419:2457] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:47.392844Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049419:2457] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.393163Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049419:2457] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T11:19:47.393225Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049419:2457] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T11:19:47.393386Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049419:2457] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T11:19:47.402235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.408822Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049419:2457] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T11:19:47.408875Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049419:2457] txid# 281474976710657 SEND to# [1:7491642680764049416:2454] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-10T11:19:47.432975Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672174114288:2116] Handle TEvProposeTransaction 2025-04-10T11:19:47.433000Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672174114288:2116] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T11:19:47.433032Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672174114288:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491642680764049459:2493] 2025-04-10T11:19:47.435330Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049459:2493] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T11:19:47.435379Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049459:2493] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T11:19:47.435396Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049459:2493] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:19:47.435546Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049459:2493] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:19:47.435817Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049459:2493] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:19:47.435905Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049459:2493] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:19:47.435942Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049459:2493] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T11:19:47.436049Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049459:2493] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T11:19:47.436585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:19:47.438851Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049459:2493] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-10T11:19:47.438923Z node 1 :TX_PROXY DEBUG: Actor# [1:7491642680764049459:2493] txid# 281474976710658 SEND to# [1:7491642680764049458:2492] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-10T11:19:49.436255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642689353984133:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.436354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.440870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491642689353984145:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:19:49.445617Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672174114288:2116] Handle TEvProposeTransaction 2025-04-10T11:19:49.445656Z node 1 :TX_PROXY DEBUG: actor# [1:7491642672174114288:2116] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T11: ... 65 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T11:23:10.018305Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643551064397005:2870] txid# 281474976715665 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T11:23:10.018421Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643551064397005:2870] txid# 281474976715665 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:23:10.018830Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643551064397005:2870] txid# 281474976715665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:23:10.018998Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643551064397005:2870] HANDLE EvNavigateKeySetResult, txid# 281474976715665 shardToRequest# 72075186224037891 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 2] DomainInfo.Params# Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 RedirectRequired# true 2025-04-10T11:23:10.019083Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643551064397005:2870] txid# 281474976715665 SEND to# 72075186224037891 shardToRequest {TEvModifySchemeTransaction txid# 281474976715665 TabletId# 72075186224037891} 2025-04-10T11:23:10.019842Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643551064397005:2870] txid# 281474976715665 HANDLE EvClientConnected 2025-04-10T11:23:10.022565Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpModifyACL ModifyACL { Name: "tenant-db" DiffACL: "\n\022\010\001\022\016\032\014clusteradmin\n\031\010\000\022\025\010\001\020\200\004\032\014clusteradmin \003" } } TxId: 281474976715665 TabletId: 72075186224037891 Owner: "root@builtin" UserToken: "***" PeerName: "ipv6:[::1]:58234" , at schemeshard: 72075186224037891 2025-04-10T11:23:10.022924Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /dc-1/tenant-db, operationId: 281474976715665:0, at schemeshard: 72075186224037891 2025-04-10T11:23:10.023136Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72075186224037891, LocalPathId: 1] name: dc-1/tenant-db type: EPathTypeSubDomain state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-10T11:23:10.023154Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-10T11:23:10.023399Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72075186224037891 2025-04-10T11:23:10.023434Z node 60 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72075186224037891 2025-04-10T11:23:10.023531Z node 60 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-04-10T11:23:10.023552Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-10T11:23:10.023586Z node 60 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-04-10T11:23:10.023603Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-10T11:23:10.023660Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 4 2025-04-10T11:23:10.023764Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2025-04-10T11:23:10.023799Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72075186224037891, LocalPathId: 1], at schemeshard: 72075186224037891 2025-04-10T11:23:10.023816Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-10T11:23:10.023836Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-04-10T11:23:10.023855Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2025-04-10T11:23:10.023870Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715665, [OwnerId: 72075186224037891, LocalPathId: 1], 9 2025-04-10T11:23:10.026683Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72075186224037891, at schemeshard: 72075186224037891 2025-04-10T11:23:10.026996Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, subject: root@builtin, status: StatusSuccess, operation: MODIFY ACL, path: /dc-1/tenant-db, add access: +(DS):clusteradmin, remove access: -():clusteradmin:- 2025-04-10T11:23:10.027288Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186224037891 2025-04-10T11:23:10.027319Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976715665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-10T11:23:10.027285Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643551064397005:2870] txid# 281474976715665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715665} 2025-04-10T11:23:10.027390Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643551064397005:2870] txid# 281474976715665 SEND to# [59:7491643551064397004:2359] Source {TEvProposeTransactionStatus txid# 281474976715665 Status# 48} 2025-04-10T11:23:10.027611Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976715665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-10T11:23:10.027722Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186224037891 2025-04-10T11:23:10.027752Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [60:7491643525684397496:2304], at schemeshard: 72075186224037891, txId: 281474976715665, path id: 1 2025-04-10T11:23:10.027768Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [60:7491643525684397496:2304], at schemeshard: 72075186224037891, txId: 281474976715665, path id: 1 TEST clusteradmin triggers auth on tenant 2025-04-10T11:23:10.028717Z node 60 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976715665 2025-04-10T11:23:10.028854Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976715665 2025-04-10T11:23:10.028881Z node 60 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72075186224037891, txId: 281474976715665 2025-04-10T11:23:10.028900Z node 60 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186224037891, txId: 281474976715665, pathId: [OwnerId: 72075186224037891, LocalPathId: 1], version: 9 2025-04-10T11:23:10.028921Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 5 2025-04-10T11:23:10.029010Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72075186224037891, txId: 281474976715665, subscribers: 0 2025-04-10T11:23:10.031145Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 TClient is connected to server localhost:4243 TClient::Ls request: /dc-1/tenant-db 2025-04-10T11:23:10.408116Z node 59 :TX_PROXY DEBUG: actor# [59:7491643516704657436:2113] Handle TEvNavigate describe path /dc-1/tenant-db 2025-04-10T11:23:10.408211Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643551064397014:2875] HANDLE EvNavigateScheme /dc-1/tenant-db 2025-04-10T11:23:10.408740Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643551064397014:2875] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:23:10.408942Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643551064397014:2875] SEND to# 72075186224037891 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/tenant-db" Options { ReturnBoundaries: false ShowPrivateTable: true ReturnRangeKey: false } 2025-04-10T11:23:10.411104Z node 59 :TX_PROXY DEBUG: Actor# [59:7491643551064397014:2875] Handle TEvDescribeSchemeResult Forward to# [59:7491643551064397013:2874] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/dc-1/tenant-db" PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_tenant-db_kind_tenant-db" Kind: "tenant-db" } StoragePools { Name: "name_tenant-db_kind_test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Sids { Name: "tenantuser" Type: USER } Audience: "/dc-1/tenant-db" } } } PathId: 1 PathOwnerId: 72075186224037891 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037891 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 184467440737095... (TRUNCATED) 2025-04-10T11:23:10.536001Z node 59 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 60 2025-04-10T11:23:10.536615Z node 59 :HIVE WARN: HIVE#72057594037968897 Node(60, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:23:10.538761Z node 60 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 >> TGRpcYdbTest::CreateTableBadRequest3 [GOOD] >> TGRpcYdbTest::CreateTableWithIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [GOOD] Test command err: 2025-04-10T11:20:36.260881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:36.261325Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:36.261418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c85/r3tmp/tmpwf6JJa/pdisk_1.dat 2025-04-10T11:20:36.670584Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30626, node 1 2025-04-10T11:20:36.907679Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:36.907748Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:36.907787Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:36.908267Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:36.918159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:37.002736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:37.002871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:37.017236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16895 2025-04-10T11:20:37.546065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:40.661175Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:40.696834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:40.697040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:40.725839Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:40.728505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:40.965699Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.966333Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.966855Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.967022Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.967260Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.967346Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.967416Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.967497Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:40.967594Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.132273Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:41.132364Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:41.145101Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:41.260224Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:41.320273Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:41.320395Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:41.352804Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:41.353316Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:41.353527Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:41.353591Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:41.353640Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:41.353688Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:41.353763Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:41.353803Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:41.354170Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:41.374900Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1868:2593] 2025-04-10T11:20:41.377105Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-10T11:20:41.390384Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:41.390454Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:41.390548Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-10T11:20:41.393837Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:41.393964Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1918:2621], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:41.410252Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1979:2648] 2025-04-10T11:20:41.410909Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1979:2648], schemeshard id = 72075186224037897 2025-04-10T11:20:41.433837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:41.441184Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:41.441368Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:41.618558Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:41.796585Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:41.873892Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:42.735367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:20:43.544240Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:43.773176Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-10T11:20:43.773258Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:20:43.773400Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2602:2949], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:20:43.774427Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2604:2951] 2025-04-10T11:20:43.775331Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2604:2951], schemeshard id = 72075186224037899 2025-04-10T11:20:45.102702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2737:3255], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:45.102865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:45.123191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-10T11:20:45.303366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2889:3290], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:45.303529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:45.354947Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2894:3294]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:20:45.355152Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:20:45.355351Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-10T11:20:45.355417Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2897:3297] 2025-04-10T11:20:45.355493Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2897:3297] 2025-04-10T11:20:45.356094Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2898:3082] 2025-04-10T11:20:45.356356Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2897:3297], server id = [2:2898:3082], tablet id = 72075186224037894, status = OK 2025-04-10T11:20:45.356604Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2898:3082], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:20:45.356675Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:20:45.356987Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:20:45.357061Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2894:3294], StatRequests.size() = 1 2025-04-10T11:20:45.381196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2902:3301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't ... 13.883706Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:23:13.883757Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:23:13.883800Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:23:14.211439Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:7607:5449]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:14.211610Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2025-04-10T11:23:14.211639Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:7607:5449], StatRequests.size() = 1 2025-04-10T11:23:15.640206Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:23:15.640462Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:23:15.640818Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:23:15.717352Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037897 2025-04-10T11:23:15.717436Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 229.000000s, at schemeshard: 72075186224037897 2025-04-10T11:23:15.717838Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-04-10T11:23:15.742208Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:23:15.766109Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:7648:5472]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:15.766366Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-10T11:23:15.766399Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:7648:5472], StatRequests.size() = 1 2025-04-10T11:23:17.036927Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:23:17.037027Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:23:17.037083Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-10T11:23:17.037144Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:23:17.037695Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-10T11:23:17.053912Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:23:17.058833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7684:5498], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:17.058958Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7694:5503], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:17.059097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:17.074719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-10T11:23:17.139580Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7698:5506], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:23:17.243157Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7795:5555]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:17.243456Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-10T11:23:17.243510Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7795:5555], StatRequests.size() = 1 2025-04-10T11:23:17.335030Z node 2 :TX_PROXY ERROR: Actor# [2:7800:5557] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:17.385506Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7829:5572]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:17.385901Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-10T11:23:17.386202Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-10T11:23:17.386283Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-10T11:23:17.386474Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:23:17.386554Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7829:5572], StatRequests.size() = 1 2025-04-10T11:23:17.522497Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmY1YTNmZjQtYTc0Y2FhMjQtZWIzNGQwOTgtNzc3ZDNhNmM=, TxId: 2025-04-10T11:23:17.522567Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmY1YTNmZjQtYTc0Y2FhMjQtZWIzNGQwOTgtNzc3ZDNhNmM=, TxId: 2025-04-10T11:23:17.523058Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:23:17.537020Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:23:17.537079Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:23:17.580699Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:23:17.580793Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:23:17.679335Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3075:3115], schemeshard count = 1 2025-04-10T11:23:17.961992Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-10T11:23:17.962062Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 204.000000s, at schemeshard: 72075186224037899 2025-04-10T11:23:17.962357Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-04-10T11:23:17.976179Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:23:18.960229Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7897:5615]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:18.960481Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-10T11:23:18.960526Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7897:5615], StatRequests.size() = 1 2025-04-10T11:23:20.282995Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:23:20.294269Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:23:20.294330Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:23:20.294386Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-10T11:23:20.294421Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-10T11:23:20.294690Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-10T11:23:20.297591Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:23:20.311797Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:04.000000Z, event interval end# 2025-04-10T11:23:18.000000Z 2025-04-10T11:23:20.312601Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Yjc5YWI0MjYtODZhNzM4M2QtMzFjZTY0MmUtZDQ1ODQ2YTA=, TxId: 2025-04-10T11:23:20.312654Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Yjc5YWI0MjYtODZhNzM4M2QtMzFjZTY0MmUtZDQ1ODQ2YTA=, TxId: 2025-04-10T11:23:20.313209Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:23:20.327379Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-10T11:23:20.327435Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:23:20.363949Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7968:5658]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:20.364161Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-10T11:23:20.364194Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7968:5658], StatRequests.size() = 1 2025-04-10T11:23:21.837039Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:8022:5690]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:21.837429Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-10T11:23:21.837497Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:8022:5690], StatRequests.size() = 1 2025-04-10T11:23:23.110600Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-04-10T11:23:23.110846Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:23:23.111240Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:23:23.122541Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:23:23.122591Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:23:23.195319Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:8062:5712]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:23.195533Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-10T11:23:23.195562Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:8062:5712], StatRequests.size() = 1 >> TSchemeShardTest::CreateTableWithNamedConfig [GOOD] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false >> TSchemeShardTest::DocumentApiVersion [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir |91.6%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbYqlClient::CreateTableWithUniformPartitions [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning |91.6%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::CreateDropKesus [GOOD] >> TSchemeShardTest::CreateAlterKesus >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig [GOOD] >> TSchemeShardTest::CreatePersQueueGroup >> YdbMonitoring::SelfCheckWithNodesDying >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize [GOOD] >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings >> TSchemeShardTest::DisablePublicationsOfDropping_Dir [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:23:12.964842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:23:12.964927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:23:12.964985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:23:12.965020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:23:12.965857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:23:12.965903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:23:12.965977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:23:12.966090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:23:12.967192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:23:13.048694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:23:13.048758Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:13.064880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:23:13.065198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:23:13.065369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:23:13.073458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:23:13.073759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:23:13.074518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.074982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:23:13.077258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.078775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:13.078848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.079019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:23:13.079066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:13.079114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:23:13.079313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.085692Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:23:13.189099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:23:13.189351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.189584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:23:13.189823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:23:13.189887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.192016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.192144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:23:13.192302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.192357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:23:13.192413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:23:13.192457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:23:13.194293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.194352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:23:13.194392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:23:13.195966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.196004Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.196073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.196124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.204636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:23:13.206401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:23:13.206556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:23:13.207274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.207384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:23:13.207422Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.207661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:23:13.207701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.207833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:23:13.207891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:23:13.209510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:13.209549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:13.209689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.209731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:23:13.209917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.209951Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:23:13.210032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:23:13.210061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.210090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:23:13.210138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.210199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:23:13.210232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.210260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:23:13.210284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:23:13.210338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:23:13.210387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:23:13.210425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:23:13.211880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:23:13.211985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:23:13.212019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Step: 5000005 2025-04-10T11:23:25.726509Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-10T11:23:25.726544Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 2, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-10T11:23:25.726568Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-04-10T11:23:25.726595Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-04-10T11:23:25.727457Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-04-10T11:23:25.727559Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-10T11:23:25.727609Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-10T11:23:25.727638Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-10T11:23:25.727663Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-04-10T11:23:25.727688Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-10T11:23:25.728456Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-04-10T11:23:25.728546Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-10T11:23:25.728600Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-10T11:23:25.728634Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-10T11:23:25.728663Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-10T11:23:25.728865Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-10T11:23:25.729148Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-10T11:23:25.737519Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:23:25.737697Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:23:25.737761Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:23:25.737809Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:23:25.737947Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:25.737977Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:23:25.738231Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:25.738262Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [13:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-10T11:23:25.738313Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:23:25.738355Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-10T11:23:25.738520Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:23:25.738569Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:23:25.738624Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:23:25.738677Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:23:25.738737Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-10T11:23:25.738794Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:23:25.738870Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T11:23:25.738933Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T11:23:25.739189Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 10 2025-04-10T11:23:25.739259Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-04-10T11:23:25.739303Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-10T11:23:25.740036Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:23:25.740114Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:23:25.740152Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:23:25.740208Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-10T11:23:25.740264Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-10T11:23:25.740377Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-10T11:23:25.751165Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-10T11:23:25.775959Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-10T11:23:25.776049Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-10T11:23:25.776558Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-10T11:23:25.776656Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:23:25.776711Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [13:1481:3282] TestWaitNotification: OK eventTxId 104 2025-04-10T11:23:25.777382Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:23:25.777591Z node 13 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 249us result status StatusSuccess 2025-04-10T11:23:25.778170Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 4 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 6 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 7 PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409549 KeyRange { FromBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 5 } Partitions { PartitionId: 3 TabletId: 72075186233409550 KeyRange { ToBound: "0" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409551 KeyRange { FromBound: "0" ToBound: "A" } Status: Inactive ParentPartitionIds: 1 ChildPartitionIds: 5 } Partitions { PartitionId: 5 TabletId: 72075186233409552 KeyRange { FromBound: "0" } Status: Active ParentPartitionIds: 2 ParentPartitionIds: 4 } AlterVersion: 4 BalancerTabletID: 72075186233409547 NextPartitionId: 6 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 494 AccountSize: 494 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::CreateAlterKesus [GOOD] >> TSchemeShardTest::CreateDropSolomon >> YdbOlapStore::LogNonExistingRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284742.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284742.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284742.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284742.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284742.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284742.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283542.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124284742.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124284742.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283542.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283542.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124283542.000000s;Name=;Codec=}; 2025-04-10T11:22:22.681111Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:22.768503Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:22.791398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:22.791695Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:22.799318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:22.799537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:22.799758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:22.799884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:22.800014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:22.800132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:22.800246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:22.800363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:22.800473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:22.800576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:22.800685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:22.800787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:22.829406Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:22.829760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:22.829844Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:22.830020Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:22.830230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:22.830321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:22.830362Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:22.830454Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:22.830513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:22.830555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:22.830585Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:22.830744Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:22.830829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:22.830872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:22.830902Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:22.830994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:22.831049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:22.831091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:22.831120Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:22.831231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:22.831281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:22.831311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:22.831367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:22.831412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:22.831450Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:22.831865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=59; 2025-04-10T11:22:22.831944Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-10T11:22:22.832017Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-10T11:22:22.832100Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-10T11:22:22.832289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:22.832342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:22.832377Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:22.832560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:22.832608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:22.832639Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... let_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-10T11:23:25.812366Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:23:25.812425Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:25.812489Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:25.812546Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:25.812664Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:25.812959Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-04-10T11:23:25.813107Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:23:25.813305Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:25.813380Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:25.813903Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-10T11:23:25.814010Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:23:25.814586Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1976:3981];trace_detailed=; 2025-04-10T11:23:25.815137Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-10T11:23:25.815400Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:23:25.815601Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:25.815749Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:25.816146Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:23:25.816273Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:25.816413Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:25.816472Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1976:3981] finished for tablet 9437184 2025-04-10T11:23:25.816961Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1975:3980];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744284205814509,"name":"_full_task","f":1744284205814509,"d_finished":0,"c":0,"l":1744284205816540,"d":2031},"events":[{"name":"bootstrap","f":1744284205814806,"d_finished":978,"c":1,"l":1744284205815784,"d":978},{"a":1744284205816116,"name":"ack","f":1744284205816116,"d_finished":0,"c":0,"l":1744284205816540,"d":424},{"a":1744284205816087,"name":"processing","f":1744284205816087,"d_finished":0,"c":0,"l":1744284205816540,"d":453},{"name":"ProduceResults","f":1744284205815507,"d_finished":546,"c":2,"l":1744284205816450,"d":546},{"a":1744284205816454,"name":"Finish","f":1744284205816454,"d_finished":0,"c":0,"l":1744284205816540,"d":86}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:25.817055Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1975:3980];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:23:25.817523Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1975:3980];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744284205814509,"name":"_full_task","f":1744284205814509,"d_finished":0,"c":0,"l":1744284205817110,"d":2601},"events":[{"name":"bootstrap","f":1744284205814806,"d_finished":978,"c":1,"l":1744284205815784,"d":978},{"a":1744284205816116,"name":"ack","f":1744284205816116,"d_finished":0,"c":0,"l":1744284205817110,"d":994},{"a":1744284205816087,"name":"processing","f":1744284205816087,"d_finished":0,"c":0,"l":1744284205817110,"d":1023},{"name":"ProduceResults","f":1744284205815507,"d_finished":546,"c":2,"l":1744284205816450,"d":546},{"a":1744284205816454,"name":"Finish","f":1744284205816454,"d_finished":0,"c":0,"l":1744284205817110,"d":656}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1976:3981]->[1:1975:3980] 2025-04-10T11:23:25.817633Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:23:25.813978Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:23:25.817686Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:23:25.817811Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true >> TGRpcClientLowTest::BiStreamPing [GOOD] >> TGRpcClientLowTest::BiStreamCancelled >> YdbYqlClient::TestReadTableNotNullBorder2 [GOOD] >> YdbYqlClient::TestReadTableSnapshot >> TSchemeShardTest::DisablePublicationsOfDropping_Table [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable >> YdbYqlClient::TestReadTableMultiShardOneRow [GOOD] >> YdbYqlClient::TestReadTableBatchLimits >> TTableProfileTests::ExplicitPartitionsSimple [GOOD] >> TTableProfileTests::ExplicitPartitionsUnordered >> TSchemeShardTest::CreateDropSolomon [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon >> TSchemeShardTest::CreatePersQueueGroup [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema >> TYqlDateTimeTests::SimpleUpsertSelect >> TSchemeShardTest::ParallelModifying [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels >> YdbYqlClient::RetryOperationAsync >> YdbScripting::BasicV0 [GOOD] >> YdbScripting::BasicV1 >> BasicStatistics::TwoServerlessDbs [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] >> YdbScripting::Params [GOOD] >> YdbTableBulkUpsert::DataValidation >> TSchemeShardTest::PQGroupExplicitChannels [GOOD] >> TSchemeShardTest::ReadOnlyMode >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> TPersQueueTest::TestWriteSessionsConflicts [GOOD] >> TPersQueueTest::TestReadRuleServiceTypePassword >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::CreateTableWithCompactionStrategies ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [GOOD] Test command err: 2025-04-10T11:20:26.564592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:26.565021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:26.565106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000dd4/r3tmp/tmpVYCPNu/pdisk_1.dat 2025-04-10T11:20:27.011444Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17504, node 1 2025-04-10T11:20:27.601808Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:27.601886Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:27.601923Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:27.602719Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:27.618988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:27.716423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:27.716590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:27.730338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15980 2025-04-10T11:20:28.310071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:31.607653Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:31.664942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:31.665145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:31.697632Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:31.700681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:31.944296Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.947271Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.948076Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.948293Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.948545Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.948627Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.948701Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.948802Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:31.948923Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.141899Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:32.141988Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:32.155176Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:32.323020Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:32.417336Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:32.417481Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:32.458064Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:32.458615Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:32.458838Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:32.458900Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:32.458967Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:32.459061Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:32.459123Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:32.459176Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:32.459633Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:32.482470Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1868:2593] 2025-04-10T11:20:32.484689Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-10T11:20:32.498132Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:32.498218Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:32.498293Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-10T11:20:32.501582Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:32.501695Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1918:2621], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:32.520261Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1979:2648] 2025-04-10T11:20:32.521134Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1979:2648], schemeshard id = 72075186224037897 2025-04-10T11:20:32.562369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:32.576006Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:32.576229Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:32.775226Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:32.968940Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:33.048497Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:34.010680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:20:34.910100Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:35.115156Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-10T11:20:35.115238Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:20:35.115372Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2601:2949], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:20:35.117595Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2603:2951] 2025-04-10T11:20:35.118558Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2603:2951], schemeshard id = 72075186224037899 2025-04-10T11:20:36.260211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:20:36.843604Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:37.085421Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-04-10T11:20:37.085475Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-04-10T11:20:37.085569Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3109:3161], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037905 2025-04-10T11:20:37.086972Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3114:3163] 2025-04-10T11:20:37.087496Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3114:3163], schemeshard id = 72075186224037905 2025-04-10T11:20:38.133719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3214:3418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:38.133886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:38.153635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-04-10T11:20:38.313034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3366:3454], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:38.313261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:38.352814Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3371:3458]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:20:38.352963Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:20:38.353099Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-10T11:20:38.353163Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3374:3461] 2025-04-10T11:20:38.353252Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3 ... sals. 2025-04-10T11:23:21.233854Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-10T11:23:21.233911Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:23:21.234413Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-10T11:23:21.262664Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:23:21.267280Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8953:6408], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:21.267390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8963:6413], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:21.267967Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:21.284002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-10T11:23:21.348213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:8967:6416], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-10T11:23:21.541887Z node 2 :TX_PROXY ERROR: Actor# [2:9067:6465] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:21.586358Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:9096:6480]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:21.586678Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-10T11:23:21.586910Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-10T11:23:21.586965Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-10T11:23:21.587131Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:23:21.587207Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:9096:6480], StatRequests.size() = 1 2025-04-10T11:23:21.697412Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzM3ZTRjNjQtNWZiZWQxMzYtZjc5MzE3ZjQtOGU0MTE2MTQ=, TxId: 2025-04-10T11:23:21.697483Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzM3ZTRjNjQtNWZiZWQxMzYtZjc5MzE3ZjQtOGU0MTE2MTQ=, TxId: 2025-04-10T11:23:21.698107Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:23:21.713553Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:23:21.713634Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:23:21.757432Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:23:21.757495Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:23:21.856452Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3739:3403], schemeshard count = 1 2025-04-10T11:23:22.159276Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-10T11:23:22.159357Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 223.000000s, at schemeshard: 72075186224037899 2025-04-10T11:23:22.159655Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-04-10T11:23:22.174176Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:23:22.398132Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:9134:6505]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:22.398446Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-10T11:23:22.398490Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:9134:6505], StatRequests.size() = 1 2025-04-10T11:23:23.832919Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:9185:6537]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:23.833319Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-10T11:23:23.833370Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:9185:6537], StatRequests.size() = 1 2025-04-10T11:23:24.367192Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2025-04-10T11:23:24.367247Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 185.000000s, at schemeshard: 72075186224037905 2025-04-10T11:23:24.367412Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 26 2025-04-10T11:23:24.392826Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:23:24.605521Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:23:24.605718Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:23:24.605755Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:23:24.605796Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-10T11:23:24.605833Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-10T11:23:24.606177Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-10T11:23:24.608210Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:23:24.621428Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmJiYjRlYTYtMTBkMDMzMS0yNzU0MmVmLWNlMzA1ODM1, TxId: 2025-04-10T11:23:24.621495Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmJiYjRlYTYtMTBkMDMzMS0yNzU0MmVmLWNlMzA1ODM1, TxId: 2025-04-10T11:23:24.622187Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:23:24.637099Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-10T11:23:24.637161Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:23:25.309664Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:9265:6590]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:25.310118Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-10T11:23:25.310188Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:9265:6590], StatRequests.size() = 1 2025-04-10T11:23:26.919291Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:9319:6622]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:26.919573Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-10T11:23:26.919616Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:9319:6622], StatRequests.size() = 1 2025-04-10T11:23:27.690905Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 3 2025-04-10T11:23:27.691274Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:23:27.691311Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:23:27.691348Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is data table. 2025-04-10T11:23:27.691383Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037905, LocalPathId: 2] 2025-04-10T11:23:27.691736Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-10T11:23:27.694297Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:23:27.694637Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:23:27.695787Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:23:27.707418Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGZhYWYyNWMtZjkyNDI1ZTItNTBkYjRiY2ItODQ2NDM1ODM=, TxId: 2025-04-10T11:23:27.707481Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGZhYWYyNWMtZjkyNDI1ZTItNTBkYjRiY2ItODQ2NDM1ODM=, TxId: 2025-04-10T11:23:27.707960Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:23:27.722838Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037905, LocalPathId: 2] 2025-04-10T11:23:27.722896Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:23:28.407499Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:9396:6671]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:28.408039Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-10T11:23:28.408097Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:9396:6671], StatRequests.size() = 1 2025-04-10T11:23:28.408872Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:9398:6673]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:28.414277Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-10T11:23:28.414350Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:9398:6673], StatRequests.size() = 1 >> TGRpcYdbTest::CreateTableWithIndex [GOOD] >> TGRpcYdbTest::CreateDeleteYqlSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:23:12.964864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:23:12.964970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:23:12.965056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:23:12.965092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:23:12.965908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:23:12.965959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:23:12.966039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:23:12.966121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:23:12.967163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:23:13.033237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:23:13.033299Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:13.045275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:23:13.045557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:23:13.045700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:23:13.055737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:23:13.056930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:23:13.060466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.062291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:23:13.068257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:23:13.076992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:13.077056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:23:13.077196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.083362Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:23:13.182579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:23:13.182786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.182971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:23:13.183230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:23:13.183306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.185463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.185617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:23:13.185801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.185848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:23:13.185894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:23:13.185932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:23:13.187936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.188017Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:23:13.188051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:23:13.189509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.189546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.189594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.189632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.192284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:23:13.193844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:23:13.194093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:23:13.195054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.195158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:23:13.195202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.195422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:23:13.195465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.195587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:23:13.195657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:23:13.197778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:13.197815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:13.197962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.198016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:23:13.198340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.198811Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:23:13.199008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:23:13.199039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.199077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:23:13.199105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.199140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:23:13.199173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.199223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:23:13.199255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:23:13.199305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:23:13.199335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:23:13.199380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:23:13.201353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:23:13.201455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:23:13.201495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ], 18446744073709551615 2025-04-10T11:23:29.471325Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:23:29.471418Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:23:29.471464Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:23:29.471550Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:23:29.471615Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:23:29.472034Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:23:29.472110Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:23:29.472142Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:23:29.472174Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:23:29.472208Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:23:29.472290Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-10T11:23:29.475256Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:23:29.475346Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:23:29.475380Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:23:29.475418Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:23:29.476540Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2025-04-10T11:23:29.477087Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-10T11:23:29.478564Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T11:23:29.478878Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:23:29.479512Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-10T11:23:29.479672Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409549 2025-04-10T11:23:29.480294Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:23:29.480522Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:29.480719Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:23:29.480859Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-10T11:23:29.481013Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-10T11:23:29.481123Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-04-10T11:23:29.481679Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:23:29.481794Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:23:29.482490Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:23:29.482560Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:23:29.482646Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:23:29.484538Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:23:29.484616Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-10T11:23:29.485698Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:23:29.485728Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:23:29.485785Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-10T11:23:29.485805Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-10T11:23:29.485968Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:23:29.486010Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:23:29.487079Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T11:23:29.487499Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T11:23:29.487577Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T11:23:29.488243Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:23:29.488386Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:23:29.488452Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [15:533:2487] TestWaitNotification: OK eventTxId 103 2025-04-10T11:23:29.489246Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:23:29.489572Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Solomon" took 373us result status StatusPathDoesNotExist 2025-04-10T11:23:29.489810Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2025-04-10T11:23:29.490411Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-10T11:23:29.490513Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-04-10T11:23:29.490552Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-04-10T11:23:29.490619Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2025-04-10T11:23:29.491245Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:23:29.491493Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 287us result status StatusSuccess 2025-04-10T11:23:29.492011Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings [GOOD] >> YdbYqlClient::CreateTableWithMESettings >> TSchemeShardTest::DisablePublicationsOfDropping_Pq [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:23:12.964836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:23:12.964916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:23:12.964957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:23:12.964990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:23:12.965870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:23:12.965911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:23:12.965967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:23:12.966052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:23:12.967129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:23:13.037818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:23:13.037861Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:13.048507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:23:13.048723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:23:13.048832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:23:13.056353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:23:13.056954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:23:13.060468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.062265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:23:13.068035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:23:13.076873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:13.076939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:23:13.077107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.083031Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:23:13.183498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:23:13.183698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.183901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:23:13.184149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:23:13.184195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.186258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.186367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:23:13.186514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.186565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:23:13.186596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:23:13.186624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:23:13.188143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.188190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:23:13.188224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:23:13.189496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.189527Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.189568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.189603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.192306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:23:13.193705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:23:13.193855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:23:13.194617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.194720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:23:13.194759Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.195261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:23:13.195317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.195448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:23:13.195524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:23:13.197788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:13.197840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:13.198008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.198054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:23:13.198260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.198793Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:23:13.198892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:23:13.198924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.198951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:23:13.198978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.199007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:23:13.199037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.199067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:23:13.199089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:23:13.199138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:23:13.199175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:23:13.199218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:23:13.200761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:23:13.200867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:23:13.200897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 31 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:23:29.931087Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:23:29.931338Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" took 258us result status StatusSuccess 2025-04-10T11:23:29.931776Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" PathDescription { Self { Name: "DirA" PathId: 29 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 28 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 } ChildrenExist: true } Children { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table2" PathId: 32 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 29 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:23:29.932790Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:23:29.933158Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" took 392us result status StatusSuccess 2025-04-10T11:23:29.933711Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 32 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 32 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:23:29.934796Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:23:29.935049Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" took 240us result status StatusSuccess 2025-04-10T11:23:29.935394Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "Table3" PathId: 33 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:23:29.936357Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:23:29.936646Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" took 306us result status StatusSuccess 2025-04-10T11:23:29.937094Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 33 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 33 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPersQueueTest::DefaultMeteringMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] Test command err: 2025-04-10T11:23:14.122768Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643568615506963:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:14.131108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d02/r3tmp/tmpzulmY5/pdisk_1.dat 2025-04-10T11:23:14.496956Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:14.532399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:14.532535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:14.548892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16007, node 1 2025-04-10T11:23:14.652162Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:14.652195Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:14.652205Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:14.652367Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:15.053292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:17.711110Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643583805245457:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:17.711197Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d02/r3tmp/tmpMNmuTp/pdisk_1.dat 2025-04-10T11:23:17.788829Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:17.818476Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:17.818548Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:17.820670Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16791, node 4 2025-04-10T11:23:17.870399Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:17.870429Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:17.870435Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:17.870543Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:18.058938Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:20.119690Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:20.347285Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-04-10T11:23:20.374068Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-04-10T11:23:21.663910Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643600180068046:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:21.663980Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d02/r3tmp/tmpNcWWHR/pdisk_1.dat 2025-04-10T11:23:21.754535Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:21.777505Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:21.777565Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:21.779320Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7973, node 7 2025-04-10T11:23:21.829454Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:21.829476Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:21.829483Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:21.829669Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18366 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:22.073063Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:24.378543Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:26.079915Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643622381926855:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:26.079990Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d02/r3tmp/tmpn3FXtB/pdisk_1.dat 2025-04-10T11:23:26.202249Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:26.247028Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:26.247142Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:26.252420Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7138, node 10 2025-04-10T11:23:26.293051Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:26.293082Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:26.293097Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:26.293306Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:26.556923Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:28.808438Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> TSchemeShardTest::ReadOnlyMode [GOOD] >> TSchemeShardTest::PathErrors >> TSchemeShardTest::CreateTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas >> TGRpcClientLowTest::BiStreamCancelled [GOOD] >> TGRpcClientLowTest::ChangeAcl >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] >> YdbYqlClient::BuildInfo >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] >> TGRpcClientLowTest::GrpcRequestProxy >> TSchemeShardTest::PathErrors [GOOD] >> TSchemeShardTest::NestedDirs >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::CreateSystemColumn >> YdbYqlClient::TestReadTableSnapshot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284763.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144284763.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284763.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124284763.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283563.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124283563.000000s;Name=;Codec=}; 2025-04-10T11:22:45.204220Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:45.309179Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:45.327920Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:45.328167Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:45.347101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:45.347465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:45.347833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:45.347990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:45.348122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:45.348287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:45.348421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:45.348526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:45.348614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:45.348694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:45.348804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:45.348913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:45.379647Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:45.380021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:45.380097Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:45.380278Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:45.380507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:45.380606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:45.380658Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:45.380763Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:45.380833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:45.380884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:45.380921Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:45.381091Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:45.381144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:45.381177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:45.381199Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:45.381312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:45.381386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:45.381435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:45.381480Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:45.381578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:45.381637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:45.381669Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:45.381727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:45.381768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:45.381799Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:45.382339Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=90; 2025-04-10T11:22:45.382455Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-10T11:22:45.382537Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-10T11:22:45.382659Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=70; 2025-04-10T11:22:45.382878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:45.382944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:45.382988Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:45.383226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:45.383284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:45.383328Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:45.383441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:22:45.383488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:22:45.383515Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:22:45.383739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... 1756439,"d_finished":0,"c":0,"l":1744284211757015,"d":576},{"name":"task_result","f":1744284211288679,"d_finished":181139,"c":28,"l":1744284211753392,"d":181139}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1280:3287]->[1:1279:3286] 2025-04-10T11:23:31.757480Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1280:3287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:23:31.281489Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203504;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203504;selected_rows=0; 2025-04-10T11:23:31.757517Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1280:3287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:23:31.757752Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1280:3287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:23:31.759524Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-10T11:23:31.759823Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000006:max} readable: {1000000006:max} at tablet 9437184 2025-04-10T11:23:31.759962Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:23:31.760142Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:31.760209Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:31.760675Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-10T11:23:31.760774Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:23:31.761254Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1296:3303];trace_detailed=; 2025-04-10T11:23:31.761694Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-10T11:23:31.761913Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:23:31.762081Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:31.762230Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:31.762519Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:23:31.762643Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:31.762787Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:31.762837Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1296:3303] finished for tablet 9437184 2025-04-10T11:23:31.763245Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1295:3302];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744284211761186,"name":"_full_task","f":1744284211761186,"d_finished":0,"c":0,"l":1744284211762900,"d":1714},"events":[{"name":"bootstrap","f":1744284211761415,"d_finished":851,"c":1,"l":1744284211762266,"d":851},{"a":1744284211762495,"name":"ack","f":1744284211762495,"d_finished":0,"c":0,"l":1744284211762900,"d":405},{"a":1744284211762478,"name":"processing","f":1744284211762478,"d_finished":0,"c":0,"l":1744284211762900,"d":422},{"name":"ProduceResults","f":1744284211762004,"d_finished":531,"c":2,"l":1744284211762819,"d":531},{"a":1744284211762822,"name":"Finish","f":1744284211762822,"d_finished":0,"c":0,"l":1744284211762900,"d":78}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:31.763319Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1295:3302];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:23:31.763700Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1295:3302];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744284211761186,"name":"_full_task","f":1744284211761186,"d_finished":0,"c":0,"l":1744284211763370,"d":2184},"events":[{"name":"bootstrap","f":1744284211761415,"d_finished":851,"c":1,"l":1744284211762266,"d":851},{"a":1744284211762495,"name":"ack","f":1744284211762495,"d_finished":0,"c":0,"l":1744284211763370,"d":875},{"a":1744284211762478,"name":"processing","f":1744284211762478,"d_finished":0,"c":0,"l":1744284211763370,"d":892},{"name":"ProduceResults","f":1744284211762004,"d_finished":531,"c":2,"l":1744284211762819,"d":531},{"a":1744284211762822,"name":"Finish","f":1744284211762822,"d_finished":0,"c":0,"l":1744284211763370,"d":548}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1296:3303]->[1:1295:3302] 2025-04-10T11:23:31.763795Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:23:31.760746Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:23:31.763846Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:23:31.763956Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> TGRpcClientLowTest::SimpleRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DefaultMeteringMode [GOOD] Test command err: 2025-04-10T11:18:42.544984Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642400270170461:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:42.545040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:18:42.618079Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642400617591431:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:42.619155Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:18:42.795882Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:18:42.823110Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a31/r3tmp/tmprKjEfi/pdisk_1.dat 2025-04-10T11:18:43.052326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.052433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.094739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.094815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.097224Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:18:43.098226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:43.098778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:43.111231Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28086, node 1 2025-04-10T11:18:43.143400Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:18:43.144601Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:18:43.326816Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000a31/r3tmp/yandex7t7k67.tmp 2025-04-10T11:18:43.326839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000a31/r3tmp/yandex7t7k67.tmp 2025-04-10T11:18:43.326965Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000a31/r3tmp/yandex7t7k67.tmp 2025-04-10T11:18:43.327108Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:43.585357Z INFO: TTestServer started on Port 22984 GrpcPort 28086 TClient is connected to server localhost:22984 PQClient connected to localhost:28086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:43.949817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:18:44.053783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:18:46.029100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642417797460934:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:46.029186Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491642417797460923:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:46.029572Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:18:46.035215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:18:46.052596Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491642417797460946:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:18:46.138204Z node 2 :TX_PROXY ERROR: Actor# [2:7491642417797460974:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:18:46.586715Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491642417797460989:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:46.588833Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWJhOWJjZTUtYjVmZDc2NjItODhjNzZiZTItN2YwZTMwZjE=, ActorId: [2:7491642417797460920:2306], ActorState: ExecuteState, TraceId: 01jrfprzg70hr8mzp2tsatbka3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:46.592007Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:18:46.592292Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491642417450040871:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:18:46.608722Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGM2Y2Y5ZjItYWUxYjVjMWQtN2Q2YWJmZGMtNjcwNDRkYmM=, ActorId: [1:7491642417450040845:2341], ActorState: ExecuteState, TraceId: 01jrfprzjr348ja5myv3j7p3ng, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:18:46.610766Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:18:46.614993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:18:46.724146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:18:46.949921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:18:47.337390Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfps0kk810t9d0j2q4564cm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTU2ZDYzMTctYTM4ZTNiZTctZmI0MTE4NDYtNmIwZmYxZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491642421745008557:3071] 2025-04-10T11:18:47.546389Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491642400270170461:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:47.546464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:18:47.616856Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491642400617591431:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:47.616903Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic with 1 partitions CallPersQueueGRPC request to localhost:28086 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:28086 2025-04-10T11:18:53.196529Z node 1 :PERSQUEUE INFO: proxy answer MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic" NumPartitions: 1 Config { Partitio ... ute txs with state CALCULATING 2025-04-10T11:23:29.453391Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, State CALCULATING 2025-04-10T11:23:29.453430Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 State CALCULATING FrontTxId 281474976710672 2025-04-10T11:23:29.453456Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-04-10T11:23:29.453490Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, NewState CALCULATED 2025-04-10T11:23:29.453552Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 moved from CALCULATING to CALCULATED 2025-04-10T11:23:29.454210Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976710672] save tx TxId: 281474976710672 State: CALCULATED MinStep: 1744284209402 MaxStep: 18446744073709551615 Step: 1744284209486 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ttt" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/ttt" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7491643578019814636 RawX2: 124554053788 } Partitions { Partition { PartitionId: 0 } } 2025-04-10T11:23:29.454412Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:23:29.461266Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:23:29.461333Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state CALCULATED 2025-04-10T11:23:29.461370Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, State CALCULATED 2025-04-10T11:23:29.461406Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 State CALCULATED FrontTxId 281474976710672 2025-04-10T11:23:29.461457Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, NewState WAIT_RS 2025-04-10T11:23:29.461503Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 moved from CALCULATED to WAIT_RS 2025-04-10T11:23:29.461591Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-04-10T11:23:29.461651Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveParticipantsDecision 1 2025-04-10T11:23:29.461756Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, NewState EXECUTING 2025-04-10T11:23:29.461797Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 moved from WAIT_RS to EXECUTING 2025-04-10T11:23:29.461820Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1744284209486, TxId 281474976710672 2025-04-10T11:23:29.461829Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 0, Expected 1 2025-04-10T11:23:29.462421Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:23:29.462478Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:23:29.462524Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:23:29.462567Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:23:29.462600Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:23:29.462616Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] _config_0 2025-04-10T11:23:29.462660Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:23:29.462710Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:23:29.462787Z node 30 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:23:29.465958Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvTxCommitDone Step 1744284209486, TxId 281474976710672, Partition 0 2025-04-10T11:23:29.466007Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTING 2025-04-10T11:23:29.466017Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:23:29.466040Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, State EXECUTING 2025-04-10T11:23:29.466076Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 State EXECUTING FrontTxId 281474976710672 2025-04-10T11:23:29.466102Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-04-10T11:23:29.466167Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId: 281474976710672 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-10T11:23:29.466215Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] complete TxId 281474976710672 2025-04-10T11:23:29.466626Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ttt" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/ttt" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } 2025-04-10T11:23:29.466715Z node 30 :PERSQUEUE NOTICE: [PQ: 72075186224037892] metering mode METERING_MODE_REQUEST_UNITS 2025-04-10T11:23:29.466843Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete partitions for TxId 281474976710672 2025-04-10T11:23:29.466873Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, NewState EXECUTED 2025-04-10T11:23:29.466908Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 moved from EXECUTING to EXECUTED 2025-04-10T11:23:29.467292Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976710672] save tx TxId: 281474976710672 State: EXECUTED MinStep: 1744284209402 MaxStep: 18446744073709551615 Step: 1744284209486 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ttt" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/ttt" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7491643578019814636 RawX2: 124554053788 } Partitions { Partition { PartitionId: 0 } } 2025-04-10T11:23:29.467641Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:23:29.474352Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:23:29.474411Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-04-10T11:23:29.474447Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, State EXECUTED 2025-04-10T11:23:29.474477Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 State EXECUTED FrontTxId 281474976710672 2025-04-10T11:23:29.474518Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-04-10T11:23:29.474548Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, NewState WAIT_RS_ACKS 2025-04-10T11:23:29.474579Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 moved from EXECUTED to WAIT_RS_ACKS 2025-04-10T11:23:29.474626Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976710672] PredicateAcks: 0/0 2025-04-10T11:23:29.474641Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-10T11:23:29.474667Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976710672] PredicateAcks: 0/0 2025-04-10T11:23:29.474696Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976710672 to the list for deletion 2025-04-10T11:23:29.474725Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, NewState DELETING 2025-04-10T11:23:29.474771Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976710672 2025-04-10T11:23:29.474866Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:23:29.483849Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:23:29.483906Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-04-10T11:23:29.483944Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, State DELETING 2025-04-10T11:23:29.483976Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976710672 2025-04-10T11:23:29.487286Z node 29 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-10T11:23:29.487440Z node 29 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "/Root/PQ/ttt" 2025-04-10T11:23:29.487550Z node 29 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ/ttt >> YdbTableBulkUpsertOlap::UpsertArrowBatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:23:12.964858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:23:12.964962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:23:12.965025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:23:12.965059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:23:12.965878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:23:12.965910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:23:12.965961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:23:12.966054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:23:12.967166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:23:13.033001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:23:13.033063Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:13.045268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:23:13.045610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:23:13.045766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:23:13.055145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:23:13.056894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:23:13.060464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.062271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:23:13.067949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:23:13.076901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:13.076953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:23:13.077152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.083839Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:23:13.196112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:23:13.196301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.196479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:23:13.196687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:23:13.196739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.198755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.198868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:23:13.198996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.199044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:23:13.199075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:23:13.199103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:23:13.200714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.200759Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:23:13.200789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:23:13.202179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.202220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.202270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.202307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.204966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:23:13.206373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:23:13.206517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:23:13.207296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.207451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:23:13.207495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.207704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:23:13.207744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.207894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:23:13.207966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:23:13.209505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:13.209542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:13.209694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.209738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:23:13.209960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.209998Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:23:13.210088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:23:13.210113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.210140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:23:13.210180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.210220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:23:13.210258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.210285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:23:13.210308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:23:13.210379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:23:13.210414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:23:13.210452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:23:13.212137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:23:13.212231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:23:13.212259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 5-04-10T11:23:32.230196Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-10T11:23:32.230261Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:23:32.231194Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:23:32.231288Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:23:32.231319Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:23:32.231349Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-10T11:23:32.231379Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:23:32.231455Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-10T11:23:32.234055Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-10T11:23:32.234262Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-10T11:23:32.235420Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:32.235606Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 64424511597 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:23:32.235713Z node 15 :FLAT_TX_SCHEMESHARD INFO: TDropSolomon TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2025-04-10T11:23:32.235807Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 3] name: Obj type: EPathTypeSolomonVolume state: EPathStateDrop stepDropped: 0 droppedTxId: 104 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:32.235874Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:23:32.236036Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:23:32.236166Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 130 2025-04-10T11:23:32.236382Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:23:32.236480Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:23:32.237820Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:23:32.238033Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-04-10T11:23:32.239574Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:32.239621Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:32.239770Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:23:32.239954Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:32.239993Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:442:2402], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-10T11:23:32.240038Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:442:2402], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-10T11:23:32.240105Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:23:32.240165Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 104:0 ProgressState 2025-04-10T11:23:32.240279Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:23:32.240348Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:23:32.240423Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:23:32.240497Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:23:32.240567Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-10T11:23:32.240632Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:23:32.240700Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T11:23:32.240753Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T11:23:32.240934Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:23:32.241014Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-10T11:23:32.241102Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-10T11:23:32.241162Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-10T11:23:32.241973Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:23:32.242067Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:23:32.242120Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:23:32.242214Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-10T11:23:32.242290Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:23:32.242719Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:23:32.242801Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:23:32.242833Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:23:32.242869Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-10T11:23:32.242901Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:23:32.242979Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-10T11:23:32.245710Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-10T11:23:32.245996Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:23:32.247488Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2025-04-10T11:23:32.249376Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:23:32.249801Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:23:32.250312Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:23:32.250393Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T11:23:32.250507Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:23:32.250964Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:23:32.252902Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:23:32.253042Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-10T11:23:32.253208Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-10T11:23:32.253768Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-10T11:23:32.253846Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-10T11:23:32.254517Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-10T11:23:32.254661Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:23:32.254722Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [15:567:2509] TestWaitNotification: OK eventTxId 104 >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] >> TSchemeShardTest::CreateSystemColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableSnapshot [GOOD] Test command err: 2025-04-10T11:23:14.094097Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643569875362466:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:14.094207Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d13/r3tmp/tmpjxTLEj/pdisk_1.dat 2025-04-10T11:23:14.495997Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4017, node 1 2025-04-10T11:23:14.506183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:14.506304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:14.511700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:14.512694Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:14.653189Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:14.653210Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:14.653219Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:14.653359Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19036 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:15.055016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:16.553410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643578465298063:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:16.554063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:17.062229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:17.198422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643582760265549:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:17.198499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:17.198531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643582760265554:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:17.202049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:17.217650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643582760265556:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:23:17.272360Z node 1 :TX_PROXY ERROR: Actor# [1:7491643582760265625:2805] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:17.363837Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfq18ad32xhhhcam4jvxy10, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI2MTlmZWEtNjU0Njc4M2UtOWNiNjdmOTMtNWFmMDQ2ZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:17.575329Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfq18g57qt4b1x7txy37fn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI2MTlmZWEtNjU0Njc4M2UtOWNiNjdmOTMtNWFmMDQ2ZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:17.592902Z node 1 :TX_PROXY ERROR: [ReadTable [1:7491643582760265707:2368] TxId# 281474976710663] RESPONSE Status# ResolveError shard: 0 table: Root/Test 2025-04-10T11:23:17.600712Z node 1 :TX_PROXY ERROR: [ReadTable [1:7491643582760265710:2369] TxId# 281474976710664] RESPONSE Status# ResolveError shard: 0 table: Root/Test 2025-04-10T11:23:19.097798Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643592262225045:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:19.097927Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d13/r3tmp/tmpqhGrRB/pdisk_1.dat 2025-04-10T11:23:19.234208Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:19.268904Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:19.269058Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:19.272618Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29276, node 4 2025-04-10T11:23:19.337966Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:19.337993Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:19.338001Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:19.338194Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:19.575229Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:22.036930Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643605147127950:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:22.037043Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:22.057608Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:22.142483Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643605147128119:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:22.142564Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643605147128124:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:22.142568Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:22.145208Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:22.159367Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491643605147128126:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:23:22.248915Z node 4 :TX_PROXY ERROR: Actor# [4:7491643605147128197:2796] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:22.342698Z node 4 :KQP_EXECUTER ERROR: TxId: ... in>: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:26.449989Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:26.540228Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643623115125408:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:26.540350Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:26.540500Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643623115125413:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:26.544047Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:26.560892Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491643623115125415:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:23:26.653540Z node 7 :TX_PROXY ERROR: Actor# [7:7491643623115125492:2806] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:26.705654Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq1heb7xxx98hym4yenm9p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YWFhZGU3YWMtZDI4YjMzYmEtZWIzMjRhN2YtYTljZGM5ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:26.828378Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq1hkyccmc8qh4kqr8vf99, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YWFhZGU3YWMtZDI4YjMzYmEtZWIzMjRhN2YtYTljZGM5ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:26.849726Z node 7 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:23:28.236219Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643631173623739:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:28.236315Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d13/r3tmp/tmpEe9OG1/pdisk_1.dat 2025-04-10T11:23:28.358931Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:28.391345Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:28.391441Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:28.393995Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14079, node 10 2025-04-10T11:23:28.454514Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:28.454536Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:28.454546Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:28.454709Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:28.722487Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:28.774932Z node 10 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jrfq1km6c7b96dx5bbxkgj00, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:38592, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.998259s 2025-04-10T11:23:28.784092Z node 10 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jrfq1kmfbxxayw5vc59gnw1d, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:38600, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:23:31.412836Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ExecuteSchemeQueryRequest, traceId# 01jrfq1p6m5jw3s0q0k1etcyj1, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:48816, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:23:31.414690Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643644058526649:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:31.414793Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:31.430531Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:31.433889Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:31.433927Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:31.434008Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:31.434035Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:31.505839Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:31.505843Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:31.505952Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:31.505964Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:31.521796Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jrfq1pa1854czj1xa2ztv2ct, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:48818, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:23:31.532282Z node 10 :READ_TABLE_API DEBUG: [10:7491643644058526830:2346] Adding quota request to queue ShardId: 0, TxId: 281474976715659 2025-04-10T11:23:31.532333Z node 10 :READ_TABLE_API DEBUG: [10:7491643644058526830:2346] Assign stream quota to Shard 0, Quota 5, TxId 281474976715659 Reserved: 5 of 25, Queued: 0 2025-04-10T11:23:31.533145Z node 10 :READ_TABLE_API DEBUG: [10:7491643644058526830:2346] got stream part, size: 35, RU required: 128 rate limiter absent 2025-04-10T11:23:31.533536Z node 10 :READ_TABLE_API DEBUG: [10:7491643644058526830:2346] Starting inactivity timer for 600.000000s with tag 3 2025-04-10T11:23:31.533737Z node 10 :READ_TABLE_API NOTICE: [10:7491643644058526830:2346] Finish grpc stream, status: 400000 2025-04-10T11:23:31.539971Z node 10 :GRPC_SERVER DEBUG: [0x51a00005e280] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.539981Z node 10 :GRPC_SERVER DEBUG: [0x51a000053480] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.540270Z node 10 :GRPC_SERVER DEBUG: [0x51a000074a80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.540287Z node 10 :GRPC_SERVER DEBUG: [0x51a00005d680] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.540569Z node 10 :GRPC_SERVER DEBUG: [0x51a000052880] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.540575Z node 10 :GRPC_SERVER DEBUG: [0x51a00010c880] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.540781Z node 10 :GRPC_SERVER DEBUG: [0x51a000075080] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.540817Z node 10 :GRPC_SERVER DEBUG: [0x51a000075c80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.540997Z node 10 :GRPC_SERVER DEBUG: [0x51a00006ba80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.541055Z node 10 :GRPC_SERVER DEBUG: [0x51a000050a80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.541202Z node 10 :GRPC_SERVER DEBUG: [0x51a00010da80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.541299Z node 10 :GRPC_SERVER DEBUG: [0x51a000051080] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.541454Z node 10 :GRPC_SERVER DEBUG: [0x51a000050480] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.541532Z node 10 :GRPC_SERVER DEBUG: [0x51a00010ec80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.541691Z node 10 :GRPC_SERVER DEBUG: [0x51a00010e080] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.541832Z node 10 :GRPC_SERVER DEBUG: [0x51a000051c80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-10T11:23:31.541925Z node 10 :GRPC_SERVER DEBUG: [0x51a000051680] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> TYqlDateTimeTests::SimpleUpsertSelect [GOOD] >> TYqlDateTimeTests::TimestampKey >> YdbTableBulkUpsert::DataValidation [GOOD] >> YdbTableBulkUpsert::DecimalPK >> TSchemeShardTest::NestedDirs [GOOD] >> TSchemeShardTest::NewOwnerOnDatabase >> YdbYqlClient::TestReadTableBatchLimits [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284760.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144284760.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284760.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124284760.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283560.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124283560.000000s;Name=;Codec=}; 2025-04-10T11:22:42.662216Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:42.761557Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:42.792894Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:42.793367Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:42.802356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:42.802663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:42.802920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:42.803041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:42.803163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:42.803302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:42.803443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:42.803569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:42.803681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:42.803799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:42.803915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:42.804024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:42.834455Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:42.834861Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:42.834938Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:42.835165Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:42.835381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:42.835476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:42.835525Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:42.835619Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:42.835684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:42.835729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:42.835759Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:42.835943Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:42.836019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:42.836087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:42.836118Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:42.836225Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:42.836283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:42.836331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:42.836375Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:42.836460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:42.836505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:42.836535Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:42.836589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:42.836634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:42.836672Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:42.837193Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=92; 2025-04-10T11:22:42.837302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-10T11:22:42.837385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-10T11:22:42.837474Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-10T11:22:42.837648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:42.837707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:42.837747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:42.837963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:42.838011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:42.838043Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:42.838233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:22:42.838284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:22:42.838316Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:22:42.838488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... cpp:29;PRECHARGE:finishLoadingTime=15; 2025-04-10T11:23:32.925496Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=287; 2025-04-10T11:23:32.925540Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=26729; 2025-04-10T11:23:32.932136Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=6516; 2025-04-10T11:23:32.938712Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5656; 2025-04-10T11:23:32.938834Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=6600; 2025-04-10T11:23:32.939017Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=108; 2025-04-10T11:23:32.939147Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=71; 2025-04-10T11:23:32.939278Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=86; 2025-04-10T11:23:32.939389Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=64; 2025-04-10T11:23:32.945812Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=6329; 2025-04-10T11:23:32.956532Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10588; 2025-04-10T11:23:32.956681Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=41; 2025-04-10T11:23:32.956761Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=29; 2025-04-10T11:23:32.956808Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-10T11:23:32.956853Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-04-10T11:23:32.956899Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-10T11:23:32.956976Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2025-04-10T11:23:32.957024Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-10T11:23:32.957126Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=64; 2025-04-10T11:23:32.957175Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-10T11:23:32.957244Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2025-04-10T11:23:32.957353Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=64; 2025-04-10T11:23:32.957613Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=219; 2025-04-10T11:23:32.957662Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=65950; 2025-04-10T11:23:32.957822Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:23:32.957939Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:23:32.957995Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:23:32.958062Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:23:32.967689Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-10T11:23:32.967855Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:23:32.967923Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:32.968011Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-10T11:23:32.968086Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:23:32.968134Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:32.968190Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:32.968233Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:32.968339Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:32.968966Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:32.969664Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1987:3887];tablet_id=9437184;parent=[1:1949:3856];fline=manager.cpp:82;event=ask_data;request=request_id=95;1={portions_count=11};; 2025-04-10T11:23:32.970184Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:23:32.970461Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:23:32.970501Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:23:32.970529Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:23:32.970579Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:23:32.970642Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:32.970711Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-04-10T11:23:32.970779Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:23:32.970829Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:32.970883Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:32.970930Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:32.971035Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:32.972529Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=11;path_id=1; 2025-04-10T11:23:32.973899Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1949:3856];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateSystemColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:23:12.964852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:23:12.964945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:23:12.964982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:23:12.965032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:23:12.965883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:23:12.965910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:23:12.965961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:23:12.966049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:23:12.967142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:23:13.035491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:23:13.035543Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:13.048205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:23:13.048451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:23:13.048583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:23:13.055769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:23:13.056908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:23:13.060450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.062304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:23:13.068232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076144Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:23:13.076873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:13.076945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:23:13.077064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.082858Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:23:13.200658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:23:13.200817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.200974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:23:13.201129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:23:13.201175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.203137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.203241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:23:13.203368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.203403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:23:13.203428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:23:13.203451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:23:13.204871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.204908Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:23:13.204936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:23:13.206115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.206172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.206210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.206240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.208677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:23:13.209962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:23:13.210123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:23:13.210842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.210945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:23:13.210985Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.211188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:23:13.211223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.211346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:23:13.211433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:23:13.213208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:13.213249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:13.213421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.213473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:23:13.213706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.213746Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:23:13.213848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:23:13.213877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.213912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:23:13.213955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.213988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:23:13.214022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.214063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:23:13.214095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:23:13.214170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:23:13.214206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:23:13.214252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:23:13.216390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:23:13.216493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:23:13.216529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... eadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:23:33.493934Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:23:33.493989Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:23:33.494050Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T11:23:33.494165Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [15:338:2317] message: TxId: 102 2025-04-10T11:23:33.494250Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:23:33.494314Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:23:33.494365Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:23:33.494532Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:23:33.496414Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:23:33.496484Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:339:2318] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-04-10T11:23:33.499906Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "SystemColumnInCopyAllowed" CopyFromTable: "/MyRoot/SystemColumnAllowed" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:23:33.500298Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /MyRoot/SystemColumnInCopyAllowed, opId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:23:33.500967Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SystemColumnInCopyAllowed, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T11:23:33.501066Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-10T11:23:33.501127Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:23:33.501259Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:23:33.501401Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:23:33.501645Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:23:33.502377Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:23:33.502474Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:23:33.507404Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-04-10T11:23:33.507671Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/SystemColumnInCopyAllowed 2025-04-10T11:23:33.508023Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:33.508103Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:33.508395Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:23:33.508530Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:33.508607Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-10T11:23:33.508690Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-10T11:23:33.509408Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:23:33.509505Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046678944 2025-04-10T11:23:33.509912Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-04-10T11:23:33.511178Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:23:33.511351Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:23:33.511436Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:23:33.511503Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-10T11:23:33.511574Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:23:33.513730Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:23:33.513835Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:23:33.513869Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:23:33.513906Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 1 2025-04-10T11:23:33.513943Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:23:33.514037Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-10T11:23:33.515329Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2025-04-10T11:23:33.515543Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72057594037968897 2025-04-10T11:23:33.515620Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-04-10T11:23:33.516209Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-04-10T11:23:33.516549Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2025-04-10T11:23:33.517218Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-10T11:23:33.517313Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-04-10T11:23:33.517516Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-10T11:23:33.517603Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-04-10T11:23:33.517732Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-10T11:23:33.517877Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2025-04-10T11:23:33.519622Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:23:33.522351Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:23:33.524959Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:23:33.525673Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:23:33.525765Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 ProgressState at tablet# 72057594046678944 2025-04-10T11:23:33.525857Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 103:0 seqNo# 2:2 at tablet# 72057594046678944 2025-04-10T11:23:33.530361Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-04-10T11:23:33.530565Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2025-04-10T11:23:33.530645Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409547 2025-04-10T11:23:33.530684Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 TestModificationResult got TxId: 103, wait until txId: 103 >> YdbTableBulkUpsertOlap::UpsertCsvBug >> YdbScripting::BasicV1 [GOOD] >> YdbScripting::MultiResults >> YdbYqlClient::TestTzTypesFullStack >> YdbLogStore::LogStore >> TTableProfileTests::ExplicitPartitionsUnordered [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] >> TSchemeShardTest::NewOwnerOnDatabase [GOOD] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableBatchLimits [GOOD] Test command err: 2025-04-10T11:23:14.075287Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643570415058562:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:14.075423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d04/r3tmp/tmpqR0Iej/pdisk_1.dat 2025-04-10T11:23:14.489553Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:14.524053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:14.524221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11031, node 1 2025-04-10T11:23:14.564569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:14.572732Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:14.572868Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:14.654978Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:14.655007Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:14.655014Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:14.655191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:15.061703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:15.143040Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jrfq16a397e90xwc4q52ysbz, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51284, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.974158s 2025-04-10T11:23:15.168493Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jrfq16ap7p8qz5cs4pwrr5dv, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51286, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:23:16.496513Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jrfq17mg0pzy5w73ww9793w5, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51290, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:23:16.498633Z node 1 :TX_PROXY DEBUG: actor# [1:7491643570415058792:2141] Handle TEvProposeTransaction 2025-04-10T11:23:16.498669Z node 1 :TX_PROXY DEBUG: actor# [1:7491643570415058792:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T11:23:16.498771Z node 1 :TX_PROXY DEBUG: actor# [1:7491643570415058792:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491643579004994163:2627] 2025-04-10T11:23:16.578671Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643579004994163:2627] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:51290" 2025-04-10T11:23:16.578727Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643579004994163:2627] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:23:16.579143Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643579004994163:2627] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:23:16.579213Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643579004994163:2627] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:23:16.579470Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643579004994163:2627] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:23:16.579638Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643579004994163:2627] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:23:16.579721Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643579004994163:2627] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T11:23:16.579845Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643579004994163:2627] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T11:23:16.581746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:16.583622Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643579004994163:2627] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-10T11:23:16.583672Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643579004994163:2627] txid# 281474976710658 SEND to# [1:7491643579004994162:2335] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-10T11:23:16.584276Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:16.584338Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:16.584408Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:16.584424Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:16.626193Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994239:2686], Recipient [1:7491643579004994400:2344]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.626519Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994245:2692], Recipient [1:7491643579004994398:2342]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.627364Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994246:2693], Recipient [1:7491643579004994394:2338]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.627412Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994240:2687], Recipient [1:7491643579004994396:2340]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.627958Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994244:2691], Recipient [1:7491643579004994413:2348]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.628180Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994252:2699], Recipient [1:7491643579004994416:2351]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.628394Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994251:2698], Recipient [1:7491643579004994402:2346]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.628854Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994241:2688], Recipient [1:7491643579004994412:2347]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.628894Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994249:2696], Recipient [1:7491643579004994401:2345]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.629309Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994237:2684], Recipient [1:7491643579004994417:2352]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.629745Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994238:2685], Recipient [1:7491643579004994397:2341]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.629825Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994248:2695], Recipient [1:7491643579004994399:2343]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.630238Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994247:2694], Recipient [1:7491643579004994418:2353]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.630370Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994250:2697], Recipient [1:7491643579004994415:2350]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.630599Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994242:2689], Recipient [1:7491643579004994414:2349]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.630983Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643579004994243:2690], Recipient [1:7491643579004994395:2339]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:16.631905Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7491643579004994239:2686], Recipient [1:7491643579004994400:2344]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:23:16.631933Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7491643579004994251:2698], Recipient [1:7491643579004994402:2346]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:23:16.632337Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7491643579004994400:2344] 2025-04-10T11:23:16.632379Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:7491643579004994402:2346] 2025-04-10T11:23:16.632622Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:23:16.632627Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:23:16.643492Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7491643579004994249:2696], Recipient [1:7491643579004994401:2345]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:23:16.643498Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7491643579004994246:2693], Recipient [1:7491643579004994394:2338]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:23:16.643918Z node 1 :TX_DATASHARD INFO: TDataShard: ... -10T11:23:33.327941Z node 10 :READ_TABLE_API DEBUG: [10:7491643653000650693:2400] Send zero quota to Shard 0, TxId 281474976715678 2025-04-10T11:23:33.327957Z node 10 :READ_TABLE_API DEBUG: [10:7491643653000650693:2400] Send zero quota to Shard 0, TxId 281474976715678 2025-04-10T11:23:33.327969Z node 10 :READ_TABLE_API DEBUG: [10:7491643653000650693:2400] Send zero quota to Shard 0, TxId 281474976715678 2025-04-10T11:23:33.327981Z node 10 :READ_TABLE_API DEBUG: [10:7491643653000650693:2400] Send zero quota to Shard 0, TxId 281474976715678 2025-04-10T11:23:33.328066Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7491643653000650694:2400], Recipient [10:7491643644410714295:2344]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744284213336 TxId: 281474976715678 2025-04-10T11:23:33.328155Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7491643653000650694:2400], Recipient [10:7491643644410714283:2338]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744284213336 TxId: 281474976715678 2025-04-10T11:23:33.328192Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7491643653000650694:2400], Recipient [10:7491643644410714309:2347]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744284213336 TxId: 281474976715678 2025-04-10T11:23:33.328312Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7491643653000650694:2400], Recipient [10:7491643644410714297:2345]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744284213336 TxId: 281474976715678 2025-04-10T11:23:33.328314Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7491643653000650694:2400], Recipient [10:7491643644410714288:2341]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744284213336 TxId: 281474976715678 2025-04-10T11:23:33.328425Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7491643653000650694:2400], Recipient [10:7491643644410714291:2342]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744284213336 TxId: 281474976715678 2025-04-10T11:23:33.328448Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7491643653000650694:2400], Recipient [10:7491643644410714285:2339]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744284213336 TxId: 281474976715678 2025-04-10T11:23:33.328555Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7491643653000650694:2400], Recipient [10:7491643644410714302:2346]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744284213336 TxId: 281474976715678 ---- batch start ---- [[0u];[0u];["A"]] ---- batch end ---- ---- batch start ---- [[1u];[2u];["A"]] ---- batch end ---- ---- batch start ---- [[2u];[4u];["A"]] ---- batch end ---- ---- batch start ---- [[3u];[6u];["A"]] ---- batch end ---- ---- batch start ---- [[4u];[8u];["A"]] ---- batch end ---- ---- batch start ---- [[5u];[10u];["A"]] ---- batch end ---- ---- batch start ---- [[6u];[12u];["A"]] ---- batch end ---- ---- batch start ---- [[7u];[14u];["A"]] ---- batch end ---- ---- batch start ---- [[8u];[16u];["A"]] ---- batch end ---- ---- batch start ---- [[9u];[18u];["A"]] ---- batch end ---- ---- batch start ---- [[10u];[20u];["A"]] ---- batch end ---- ---- batch start ---- [[11u];[22u];["A"]] ---- batch end ---- ---- batch start ---- [[12u];[24u];["A"]] ---- batch end ---- ---- batch start ---- [[13u];[26u];["A"]] ---- batch end ---- ---- batch start ---- [[14u];[28u];["A"]] ---- batch end ---- ---- batch start ---- [[15u];[30u];["A"]] ---- batch end ---- ---- batch start ---- [[16u];[32u];["A"]] ---- batch end ---- ---- batch start ---- [[17u];[34u];["A"]] ---- batch end ---- ---- batch start ---- [[18u];[36u];["A"]] ---- batch end ---- ---- batch start ---- [[19u];[38u];["A"]] ---- batch end ---- ---- batch start ---- [[20u];[40u];["A"]] ---- batch end ---- ---- batch start ---- [[21u];[42u];["A"]] ---- batch end ---- ---- batch start ---- [[22u];[44u];["A"]] ---- batch end ---- ---- batch start ---- [[23u];[46u];["A"]] ---- batch end ---- ---- batch start ---- [[24u];[48u];["A"]] ---- batch end ---- ---- batch start ---- [[25u];[50u];["A"]] ---- batch end ---- ---- batch start ---- [[26u];[52u];["A"]] ---- batch end ---- ---- batch start ---- [[27u];[54u];["A"]] ---- batch end ---- ---- batch start ---- [[28u];[56u];["A"]] ---- batch end ---- ---- batch start ---- [[29u];[58u];["A"]] ---- batch end ---- ---- batch start ---- [[30u];[60u];["A"]] ---- batch end ---- ---- batch start ---- [[31u];[62u];["A"]] ---- batch end ---- ---- batch start ---- [[32u];[64u];["A"]] ---- batch end ---- ---- batch start ---- [[33u];[66u];["A"]] ---- batch end ---- ---- batch start ---- [[34u];[68u];["A"]] ---- batch end ---- ---- batch start ---- [[35u];[70u];["A"]] ---- batch end ---- ---- batch start ---- [[36u];[72u];["A"]] ---- batch end ---- ---- batch start ---- [[37u];[74u];["A"]] ---- batch end ---- ---- batch start ---- [[38u];[76u];["A"]] ---- batch end ---- ---- batch start ---- [[39u];[78u];["A"]] ---- batch end ---- ---- batch start ---- [[40u];[80u];["A"]] ---- batch end ---- ---- batch start ---- [[41u];[82u];["A"]] ---- batch end ---- ---- batch start ---- [[42u];[84u];["A"]] ---- batch end ---- ---- batch start ---- [[43u];[86u];["A"]] ---- batch end ---- ---- batch start ---- [[44u];[88u];["A"]] ---- batch end ---- ---- batch start ---- [[45u];[90u];["A"]] ---- batch end ---- ---- batch start ---- [[46u];[92u];["A"]] ---- batch end ---- ---- batch start ---- [[47u];[94u];["A"]] ---- batch end ---- ---- batch start ---- [[48u];[96u];["A"]] ---- batch end ---- ---- batch start ---- [[49u];[98u];["A"]] ---- batch end ---- ---- batch start ---- [[50u];[100u];["A"]] ---- batch end ---- ---- batch start ---- [[51u];[102u];["A"]] ---- batch end ---- ---- batch start ---- [[52u];[104u];["A"]] ---- batch end ---- ---- batch start ---- [[53u];[106u];["A"]] ---- batch end ---- ---- batch start ---- [[54u];[108u];["A"]] ---- batch end ---- ---- batch start ---- [[55u];[110u];["A"]] ---- batch end ---- ---- batch start ---- [[56u];[112u];["A"]] ---- batch end ---- ---- batch start ---- [[57u];[114u];["A"]] ---- batch end ---- ---- batch start ---- [[58u];[116u];["A"]] ---- batch end ---- ---- batch start ---- [[59u];[118u];["A"]] ---- batch end ---- ---- batch start ---- [[60u];[120u];["A"]] ---- batch end ---- ---- batch start ---- [[61u];[122u];["A"]] ---- batch end ---- ---- batch start ---- [[62u];[124u];["A"]] ---- batch end ---- ---- batch start ---- [[63u];[126u];["A"]] ---- batch end ---- ---- batch start ---- [[64u];[128u];["A"]] ---- batch end ---- ---- batch start ---- [[65u];[130u];["A"]] ---- batch end ---- ---- batch start ---- [[66u];[132u];["A"]] ---- batch end ---- ---- batch start ---- [[67u];[134u];["A"]] ---- batch end ---- ---- batch start ---- [[68u];[136u];["A"]] ---- batch end ---- ---- batch start ---- [[69u];[138u];["A"]] ---- batch end ---- ---- batch start ---- [[70u];[140u];["A"]] ---- batch end ---- ---- batch start ---- [[71u];[142u];["A"]] ---- batch end ---- ---- batch start ---- [[72u];[144u];["A"]] ---- batch end ---- ---- batch start ---- [[73u];[146u];["A"]] ---- batch end ---- ---- batch start ---- [[74u];[148u];["A"]] ---- batch end ---- ---- batch start ---- [[75u];[150u];["A"]] ---- batch end ---- ---- batch start ---- [[76u];[152u];["A"]] ---- batch end ---- ---- batch start ---- [[77u];[154u];["A"]] ---- batch end ---- ---- batch start ---- [[78u];[156u];["A"]] ---- batch end ---- ---- batch start ---- [[79u];[158u];["A"]] ---- batch end ---- ---- batch start ---- [[80u];[160u];["A"]] ---- batch end ---- ---- batch start ---- [[81u];[162u];["A"]] ---- batch end ---- ---- batch start ---- [[82u];[164u];["A"]] ---- batch end ---- ---- batch start ---- [[83u];[166u];["A"]] ---- batch end ---- ---- batch start ---- [[84u];[168u];["A"]] ---- batch end ---- ---- batch start ---- [[85u];[170u];["A"]] ---- batch end ---- ---- batch start ---- [[86u];[172u];["A"]] ---- batch end ---- ---- batch start ---- [[87u];[174u];["A"]] ---- batch end ---- ---- batch start ---- [[88u];[176u];["A"]] ---- batch end ---- ---- batch start ---- [[89u];[178u];["A"]] ---- batch end ---- ---- batch start ---- [[90u];[180u];["A"]] ---- batch end ---- ---- batch start ---- [[91u];[182u];["A"]] ---- batch end ---- ---- batch start ---- [[92u];[184u];["A"]] ---- batch end ---- ---- batch start ---- [[93u];[186u];["A"]] ---- batch end ---- ---- batch start ---- [[94u];[188u];["A"]] ---- batch end ---- ---- batch start ---- [[95u];[190u];["A"]] ---- batch end ---- ---- batch start ---- [[96u];[192u];["A"]] ---- batch end ---- ---- batch start ---- [[97u];[194u];["A"]] ---- batch end ---- ---- batch start ---- [[98u];[196u];["A"]] ---- batch end ---- ---- batch start ---- [[99u];[198u];["A"]] ---- batch end ---- 2025-04-10T11:23:33.332136Z node 10 :GRPC_SERVER DEBUG: [0x51a00019a480] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.332143Z node 10 :GRPC_SERVER DEBUG: [0x51a000179480] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.332373Z node 10 :GRPC_SERVER DEBUG: [0x51a000179a80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.332445Z node 10 :GRPC_SERVER DEBUG: [0x51a000198c80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.332543Z node 10 :GRPC_SERVER DEBUG: [0x51a000184280] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.332629Z node 10 :GRPC_SERVER DEBUG: [0x51a000181e80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.332685Z node 10 :GRPC_SERVER DEBUG: [0x51a000045c80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.332763Z node 10 :GRPC_SERVER DEBUG: [0x51a00003de80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.332825Z node 10 :GRPC_SERVER DEBUG: [0x51a0000fd280] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.332908Z node 10 :GRPC_SERVER DEBUG: [0x51a000022280] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.332953Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f7280] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.333038Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a0880] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.333077Z node 10 :GRPC_SERVER DEBUG: [0x51a00018a880] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.333157Z node 10 :GRPC_SERVER DEBUG: [0x51a000180c80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.333200Z node 10 :GRPC_SERVER DEBUG: [0x51a000180680] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.333320Z node 10 :GRPC_SERVER DEBUG: [0x51a0000fcc80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-10T11:23:33.333363Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a0280] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> YdbYqlClient::CreateTableWithMESettings [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts >> YdbTableBulkUpsert::ValidRetry >> YdbYqlClient::BuildInfo [GOOD] >> YdbYqlClient::AlterTableAddIndexWithDataColumn >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::NewOwnerOnDatabase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:23:12.964866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:23:12.965020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:23:12.965072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:23:12.965109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:23:12.965917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:23:12.965972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:23:12.966061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:23:12.966177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:23:12.967213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:23:13.033120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:23:13.033168Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:13.045259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:23:13.045544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:23:13.045686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:23:13.055343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:23:13.056942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:23:13.060465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.062287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:23:13.068134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:23:13.076880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:13.076922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:23:13.077064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.082651Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:23:13.174608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:23:13.175820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.177501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:23:13.178579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:23:13.178651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.181397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.182194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:23:13.182396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.182459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:23:13.182505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:23:13.182543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:23:13.184627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.184701Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:23:13.184742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:23:13.186385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.186424Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.186481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.186529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.190265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:23:13.191855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:23:13.192009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:23:13.193858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.193972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:23:13.194019Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.195143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:23:13.195193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.195374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:23:13.195460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:23:13.197791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:13.197839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:13.197982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.198048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:23:13.198257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.198789Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:23:13.198879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:23:13.198914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.198945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:23:13.198988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.199027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:23:13.199075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.199112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:23:13.199137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:23:13.199214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:23:13.199244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:23:13.199295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:23:13.200878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:23:13.200973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:23:13.201003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 594037968897 2025-04-10T11:23:35.255876Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 2 -> 3 2025-04-10T11:23:35.256692Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-10T11:23:35.257691Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-10T11:23:35.261270Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:23:35.261523Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:23:35.261594Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId# 107:0 ProgressState at tabletId# 72057594046678944 2025-04-10T11:23:35.261711Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 107:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409549 seqNo: 2:1 2025-04-10T11:23:35.262182Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 107:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409549 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 122 RawX2: 64424511588 } TxBody: "\n\223\004\n\005Table\020\005\032\r\n\003key\030\004 \001(\000@\000\032\020\n\005value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\r/MyRoot/Table\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020\005:\004\010\002\020\001" TxId: 107 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } SubDomainPathId: 1 2025-04-10T11:23:35.266303Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 107:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 269549568 2025-04-10T11:23:35.266521Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 107, partId: 0, tablet: 72075186233409549 TestModificationResult got TxId: 107, wait until txId: 107 TestModificationResults wait txId: 108 2025-04-10T11:23:35.349551Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpModifyACL ModifyACL { Name: "Table" NewOwner: "user1" } ApplyIf { PathTypes: EPathTypeSubDomain PathTypes: EPathTypeExtSubDomain } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:23:35.349853Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /MyRoot/Table, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-10T11:23:35.350023Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: fail in ApplyIf section: wrong Path type. Expected types: EPathTypeSubDomain, EPathTypeExtSubDomain; But actual Path type is EPathTypeTable, at schemeshard: 72057594046678944 2025-04-10T11:23:35.352918Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "fail in ApplyIf section: wrong Path type. Expected types: EPathTypeSubDomain, EPathTypeExtSubDomain; But actual Path type is EPathTypeTable" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:23:35.353161Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail in ApplyIf section: wrong Path type. Expected types: EPathTypeSubDomain, EPathTypeExtSubDomain; But actual Path type is EPathTypeTable, operation: MODIFY ACL, path: /MyRoot/Table, set owner:user1 TestModificationResult got TxId: 108, wait until txId: 108 TestModificationResults wait txId: 109 2025-04-10T11:23:35.356630Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpModifyACL ModifyACL { Name: "Table" NewOwner: "user1" } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:23:35.356860Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /MyRoot/Table, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-10T11:23:35.356991Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 5] name: Table type: EPathTypeTable state: EPathStateCreate stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:35.357047Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-10T11:23:35.357269Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-10T11:23:35.357361Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 109:0, at schemeshard: 72057594046678944 2025-04-10T11:23:35.357502Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-04-10T11:23:35.357560Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-10T11:23:35.357629Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-04-10T11:23:35.357683Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-10T11:23:35.357775Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-10T11:23:35.357868Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-10T11:23:35.357906Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 1/1, is published: false 2025-04-10T11:23:35.357965Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-10T11:23:35.358012Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-10T11:23:35.358058Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 109:0 2025-04-10T11:23:35.358116Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 109, publications: 2, subscribers: 0 2025-04-10T11:23:35.358188Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 109, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-04-10T11:23:35.358228Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 109, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-04-10T11:23:35.360682Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusSuccess TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:23:35.360897Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /MyRoot/Table, set owner:user1 2025-04-10T11:23:35.361152Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:35.361219Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 109, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-10T11:23:35.361394Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 109, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:35.361595Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:35.361665Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:205:2207], at schemeshard: 72057594046678944, txId: 109, path id: 5 2025-04-10T11:23:35.361742Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:205:2207], at schemeshard: 72057594046678944, txId: 109, path id: 1 2025-04-10T11:23:35.362544Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 109 2025-04-10T11:23:35.362693Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 109 2025-04-10T11:23:35.362756Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 109 2025-04-10T11:23:35.362829Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 109, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-04-10T11:23:35.362890Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-10T11:23:35.363411Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 109 2025-04-10T11:23:35.363487Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 109 2025-04-10T11:23:35.363515Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 109 2025-04-10T11:23:35.363542Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 109, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-04-10T11:23:35.363570Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-04-10T11:23:35.363640Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 109, subscribers: 0 2025-04-10T11:23:35.366374Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 109 2025-04-10T11:23:35.366961Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 109 TestModificationResult got TxId: 109, wait until txId: 109 >> TGRpcClientLowTest::GrpcRequestProxy [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken >> TGRpcClientLowTest::ChangeAcl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] Test command err: 2025-04-10T11:23:14.103837Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643570934684844:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:14.103903Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d10/r3tmp/tmp36gsma/pdisk_1.dat 2025-04-10T11:23:14.464713Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:14.497103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:14.497227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 17456, node 1 2025-04-10T11:23:14.530238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:14.652335Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:14.652362Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:14.652368Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:14.652476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20953 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:15.053458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:17.752654Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643581886467091:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:17.752732Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d10/r3tmp/tmpgtDpvd/pdisk_1.dat 2025-04-10T11:23:17.844511Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:17.867128Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:17.867207Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:17.869585Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3387, node 4 2025-04-10T11:23:17.916297Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:17.916318Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:17.916324Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:17.916444Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:18.091030Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:18.155110Z node 4 :TX_PROXY ERROR: Actor# [4:7491643586181435297:2599] txid# 281474976715658, issues: { message: "Unknown column \'BlaBla\' specified in key column list" severity: 1 } 2025-04-10T11:23:21.429739Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643602199865051:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:21.429833Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d10/r3tmp/tmpIhdtau/pdisk_1.dat 2025-04-10T11:23:21.537275Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:21.554059Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:21.554131Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:21.557094Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30140, node 7 2025-04-10T11:23:21.604779Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:21.604812Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:21.604820Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:21.604972Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:21.835492Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:25.193332Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643618143710684:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:25.193408Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d10/r3tmp/tmp7RBYfA/pdisk_1.dat 2025-04-10T11:23:25.294525Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:25.331173Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:25.331264Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:25.333609Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13550, node 10 2025-04-10T11:23:25.386831Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:25.386855Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:25.386863Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:25.387011Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11384 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:25.655902Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:25.712840Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:25.867768Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:23:28.276921Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643631028614076:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:28.276921Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643631028614066:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:28.277043Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:28.281186Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:23:28.300226Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491643631028614082:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T11:23:28.402097Z node 10 :TX_PROXY ERROR: Actor# [10:7491643631028614168:3077] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:29.158051Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq1k4j3xdzfvjw3cpd663x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NGY3NjA0ZDUtYmRmZTI2NDUtNDQxOTUxMjgtYmUyMmQ4MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:29.173181Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfq1k4j3xdzfvjw3cpd663x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NGY3NjA0ZDUtYmRmZTI2NDUtNDQxOTUxMjgtYmUyMmQ4MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:29.357048Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfq1m1d80pfvg85fqxsdjby, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MjNhNWIwMGUtNTIyYzhkYmYtZTMyZjRhM2ItM2Q5Mjcw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:30.999175Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643639353405120:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:30.999357Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d10/r3tmp/tmpUrTVcq/pdisk_1.dat 2025-04-10T11:23:31.146467Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:31.187161Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:31.187257Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:31.190442Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25080, node 13 2025-04-10T11:23:31.250127Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:31.250175Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:31.250188Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:31.250353Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:31.529298Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithMESettings [GOOD] Test command err: 2025-04-10T11:23:14.067550Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643568819863722:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:14.068330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d08/r3tmp/tmpGE4nXQ/pdisk_1.dat 2025-04-10T11:23:14.490217Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:14.528133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:14.528324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:14.535747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15642, node 1 2025-04-10T11:23:14.670726Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:14.670765Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:14.670773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:14.670933Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2598 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:15.064956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:16.616825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:18.198925Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643587889779190:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:18.199015Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d08/r3tmp/tmpTcTK3K/pdisk_1.dat 2025-04-10T11:23:18.287932Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:18.318749Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:18.318839Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:18.320884Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10640, node 4 2025-04-10T11:23:18.363408Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:18.363426Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:18.363432Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:18.363543Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6882 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:18.554703Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:21.043505Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:22.564318Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643606627112400:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:22.564400Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d08/r3tmp/tmpTB5luI/pdisk_1.dat 2025-04-10T11:23:22.665002Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:22.702501Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:22.702588Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:22.704886Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6638, node 7 2025-04-10T11:23:22.745840Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:22.745865Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:22.745874Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:22.746019Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18332 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:22.945303Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:25.190567Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:25.289085Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:23:25.326136Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:23:25.367195Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:23:26.758491Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643620650698517:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:26.758579Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d08/r3tmp/tmpIqMLxl/pdisk_1.dat 2025-04-10T11:23:26.870846Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:26.892586Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:26.892676Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:26.895667Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24087, node 10 2025-04-10T11:23:26.946137Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:26.946178Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:26.946186Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:26.946359Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:27.182950Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:29.503875Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:29.604427Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:23:29.611989Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-04-10T11:23:29.612086Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-04-10T11:23:31.125827Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643641882486860:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:31.126102Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d08/r3tmp/tmpNlxVn0/pdisk_1.dat 2025-04-10T11:23:31.300581Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:31.338600Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:31.338719Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:31.343195Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9210, node 13 2025-04-10T11:23:31.406933Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:31.406957Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:31.406966Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:31.407147Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7322 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:31.744444Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:34.715347Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> TGRpcClientLowTest::SimpleRequest [GOOD] >> TGRpcClientLowTest::SimpleRequestDummyService >> TColumnShardTestSchema::ColdTiers [GOOD] >> TGRpcNewClient::YqlQueryWithParams ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284761.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284761.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284761.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284761.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284761.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124284761.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=144284761.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284761.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124283561.000000s;Name=;Codec=}; 2025-04-10T11:22:42.062672Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:42.137154Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:42.155209Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:42.155416Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:42.161367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:42.161568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:42.161746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:42.161822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:42.161894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:42.161973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:42.162030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:42.162102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:42.162198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:42.162271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:42.162335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:42.162390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:42.181565Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:42.181823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:42.181870Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:42.181981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:42.182094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:42.182189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:42.182229Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:42.182291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:42.182333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:42.182359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:42.182376Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:42.182500Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:42.182540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:42.182565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:42.182581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:42.182634Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:42.182665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:42.182690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:42.182708Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:42.182760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:42.182795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:42.182813Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:42.182847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:42.182871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:42.182889Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:42.183202Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=40; 2025-04-10T11:22:42.183291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=27; 2025-04-10T11:22:42.183341Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=23; 2025-04-10T11:22:42.183395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=25; 2025-04-10T11:22:42.183511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:42.183554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:42.183576Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:42.183686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:42.183711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:42.183729Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:42.183833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... nes;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=421; 2025-04-10T11:23:35.986376Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=39681; 2025-04-10T11:23:35.996024Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=9530; 2025-04-10T11:23:36.004869Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=7792; 2025-04-10T11:23:36.004977Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=8842; 2025-04-10T11:23:36.005123Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=80; 2025-04-10T11:23:36.005226Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=54; 2025-04-10T11:23:36.005399Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=106; 2025-04-10T11:23:36.005515Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=61; 2025-04-10T11:23:36.014461Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8863; 2025-04-10T11:23:36.029926Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=15323; 2025-04-10T11:23:36.030072Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=36; 2025-04-10T11:23:36.030165Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=27; 2025-04-10T11:23:36.030227Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-04-10T11:23:36.030272Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-04-10T11:23:36.030317Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-10T11:23:36.030399Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=42; 2025-04-10T11:23:36.030446Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-04-10T11:23:36.030538Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=52; 2025-04-10T11:23:36.030583Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-10T11:23:36.030639Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=25; 2025-04-10T11:23:36.030724Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=48; 2025-04-10T11:23:36.031069Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=275; 2025-04-10T11:23:36.031112Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=92190; 2025-04-10T11:23:36.031288Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=36397736;raw_bytes=56295575;count=22;records=560000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:23:36.031402Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:23:36.031453Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:23:36.031516Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:23:36.048464Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-10T11:23:36.048617Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:23:36.048678Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:36.048761Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-10T11:23:36.048824Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-04-10T11:23:36.048869Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:36.048912Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:36.048948Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:36.049053Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:36.049816Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:36.049899Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:2595:4497];tablet_id=9437184;parent=[1:2545:4454];fline=manager.cpp:82;event=ask_data;request=request_id=120;1={portions_count=22};; 2025-04-10T11:23:36.050646Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:23:36.050958Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:23:36.050994Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:23:36.051027Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:23:36.051066Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:23:36.051117Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:36.051168Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-10T11:23:36.051221Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-04-10T11:23:36.051261Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:36.051302Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:36.051337Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:36.051411Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:36.052405Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=22;path_id=1; 2025-04-10T11:23:36.053423Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2545:4454];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::ChangeAcl [GOOD] Test command err: 2025-04-10T11:23:14.080470Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643571022933242:2087];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:14.080540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d0f/r3tmp/tmp5gaOez/pdisk_1.dat 2025-04-10T11:23:14.487060Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:14.497445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:14.497561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:14.504750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3617, node 1 2025-04-10T11:23:14.518433Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:14.522667Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:14.653273Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:14.653315Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:14.653328Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:14.653479Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:15.053490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:24174 TClient is connected to server localhost:24174 2025-04-10T11:23:15.526846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:23:16.632907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643579612868892:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:16.632909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643579612868866:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:16.633075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:16.637945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T11:23:16.654506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643579612868895:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T11:23:16.734931Z node 1 :TX_PROXY ERROR: Actor# [1:7491643579612868968:2712] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:24174 TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284195122 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\001\020\200\204\002\032\004user \003" EffectiveACL: "\n\016\010\001\020\200\204\002\032\004user \003" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744284196690 ParentPathId: 1 PathState: EPathStateCreate Owner: "met... (TRUNCATED) 2025-04-10T11:23:18.921726Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643586536349517:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:18.921854Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d0f/r3tmp/tmpTMTg1V/pdisk_1.dat 2025-04-10T11:23:19.088691Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:19.132424Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:19.132525Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:19.138996Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10641, node 4 2025-04-10T11:23:19.205491Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:19.205515Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:19.205548Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:19.205752Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24902 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:19.479844Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:24902 TClient is connected to server localhost:24902 2025-04-10T11:23:20.039216Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:23:21.768239Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643599421252444:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:21.768264Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643599421252467:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:21.768336Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:21.772272Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:23:21.791863Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491643599421252472:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T11:23:21.862527Z node 4 :TX_PROXY ERROR: Actor# [4:7491643599421252547:2712] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:24902 TClient::Ls request: Root 2025-04-10T11:23:22.168835Z node 4 :TX_PROXY ERROR: Access denied for user with access DescribeSchema to path Root TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 12 ErrorReason: "Access denied" 2025-04-10T11:23:23.468087Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643609618668938:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:23.468186Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d0f/r3tmp/tmpvooB42/pdisk_1.dat 2025-04-10T11:23:23.575362Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:23.601562Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:23.601625Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:23.605215Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10622, node 7 2025-04-10T11:23:23.654538Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:23.654569Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:23.654576Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:23.654730Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:23.929222Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:23.980830Z node 7 :TICKET_PARSER ERROR: Ticket some****oken (BB86510A): Could not find correct token validator 2025-04-10T11:23:23.981402Z node 7 :GRPC_SERVER ERROR: Received TEvRefreshTokenResponse, Authenticated = 0 2025-04-10T11:23:27.850027Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643624562150266:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:27.850101Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d0f/r3tmp/tmpZ1TgUu/pdisk_1.dat 2025-04-10T11:23:27.956502Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:27.986548Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:27.986629Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:27.989210Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3247, node 10 2025-04-10T11:23:28.044673Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:28.044706Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:28.044714Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:28.044863Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:28.337455Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:32.261792Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643647786475114:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:32.261906Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d0f/r3tmp/tmpNRCnbz/pdisk_1.dat 2025-04-10T11:23:32.372930Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:32.410386Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:32.410494Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:32.413656Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21760, node 13 2025-04-10T11:23:32.473038Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:32.473066Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:32.473075Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:32.473237Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:32.790889Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:18008 2025-04-10T11:23:33.160052Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert >> YdbTableBulkUpsertOlap::UpsertArrowBatch [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowDupField ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284757.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284757.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284757.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284757.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284757.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284757.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283557.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124284757.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124284757.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283557.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283557.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124283557.000000s;Name=;Codec=}; 2025-04-10T11:22:37.761938Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:37.840539Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:37.858069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:37.858345Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:37.864874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:37.865018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:37.865185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:37.865309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:37.865393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:37.865486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:37.865553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:37.865634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:37.865731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:37.865799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:37.865868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:37.865931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:37.885520Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:37.885775Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:37.885817Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:37.885932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:37.886050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:37.886122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:37.886175Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:37.886234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:37.886271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:37.886296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:37.886314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:37.886418Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:37.886492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:37.886519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:37.886538Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:37.886599Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:37.886632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:37.886658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:37.886675Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:37.886728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:37.886762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:37.886784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:37.886814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:37.886837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:37.886877Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:37.887158Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=31; 2025-04-10T11:22:37.887215Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=24; 2025-04-10T11:22:37.887266Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=22; 2025-04-10T11:22:37.887334Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-04-10T11:22:37.887457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:37.887498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:37.887521Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:37.887650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:37.887684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:37.887706Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:37.887791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:22:37.887815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-04-10T11:23:37.294700Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:23:37.294744Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:37.294794Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:37.294839Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:37.294953Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:37.295147Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000007:max} readable: {1000000007:max} at tablet 9437184 2025-04-10T11:23:37.295254Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:23:37.295392Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:37.295443Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:37.295832Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-10T11:23:37.295905Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:23:37.296356Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1388:3393];trace_detailed=; 2025-04-10T11:23:37.296700Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-10T11:23:37.296898Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:23:37.297037Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:37.297150Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:37.297400Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:23:37.297491Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:37.297594Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:37.297628Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1388:3393] finished for tablet 9437184 2025-04-10T11:23:37.297965Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1387:3392];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744284217296288,"name":"_full_task","f":1744284217296288,"d_finished":0,"c":0,"l":1744284217297677,"d":1389},"events":[{"name":"bootstrap","f":1744284217296455,"d_finished":719,"c":1,"l":1744284217297174,"d":719},{"a":1744284217297380,"name":"ack","f":1744284217297380,"d_finished":0,"c":0,"l":1744284217297677,"d":297},{"a":1744284217297366,"name":"processing","f":1744284217297366,"d_finished":0,"c":0,"l":1744284217297677,"d":311},{"name":"ProduceResults","f":1744284217296973,"d_finished":391,"c":2,"l":1744284217297614,"d":391},{"a":1744284217297616,"name":"Finish","f":1744284217297616,"d_finished":0,"c":0,"l":1744284217297677,"d":61}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:37.298025Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1387:3392];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:23:37.298430Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1387:3392];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744284217296288,"name":"_full_task","f":1744284217296288,"d_finished":0,"c":0,"l":1744284217298063,"d":1775},"events":[{"name":"bootstrap","f":1744284217296455,"d_finished":719,"c":1,"l":1744284217297174,"d":719},{"a":1744284217297380,"name":"ack","f":1744284217297380,"d_finished":0,"c":0,"l":1744284217298063,"d":683},{"a":1744284217297366,"name":"processing","f":1744284217297366,"d_finished":0,"c":0,"l":1744284217298063,"d":697},{"name":"ProduceResults","f":1744284217296973,"d_finished":391,"c":2,"l":1744284217297614,"d":391},{"a":1744284217297616,"name":"Finish","f":1744284217297616,"d_finished":0,"c":0,"l":1744284217298063,"d":447}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1388:3393]->[1:1387:3392] 2025-04-10T11:23:37.298542Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:23:37.295879Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:23:37.298595Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:23:37.298706Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1388:3393];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin >> YdbYqlClient::TestYqlIssues >> TYqlDateTimeTests::TimestampKey [GOOD] >> TYqlDateTimeTests::IntervalKey >> TGRpcNewCoordinationClient::CreateDropDescribe >> YdbTableBulkUpsert::DecimalPK [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail >> YdbYqlClient::TestReadTableMultiShardWholeTable >> YdbTableBulkUpsertOlap::UpsertCsvBug [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard >> TAuthenticationWithSqlExecution::CreateAlterUserWithHash >> YdbYqlClient::TestTzTypesFullStack [GOOD] >> YdbYqlClient::TestVariant >> TGRpcYdbTest::MakeListRemoveDirectory >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK [GOOD] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync >> YdbScripting::MultiResults [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore >> TGRpcClientLowTest::SimpleRequestDummyService [GOOD] >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable >> YdbYqlClient::AlterTableAddIndexWithDataColumn [GOOD] >> YdbYqlClient::CheckDefaultTableSettings1 >> YdbTableBulkUpsert::ValidRetry [GOOD] >> YdbTableBulkUpsert::Types >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbScripting::MultiResults [GOOD] Test command err: 2025-04-10T11:23:14.081759Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643571913915702:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:14.082234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d03/r3tmp/tmpyc9xK8/pdisk_1.dat 2025-04-10T11:23:14.458235Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:14.510733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:14.510847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:14.516520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28022, node 1 2025-04-10T11:23:14.652233Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:14.652258Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:14.652265Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:14.652420Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:15.074207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:16.547239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:16.973969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643580503853091:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:16.973972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643580503853083:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:16.974057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:16.977112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:16.993145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643580503853097:2439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:23:17.051462Z node 1 :TX_PROXY ERROR: Actor# [1:7491643584798820463:4165] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:17.490019Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfq183banmg0y70cbzw33dz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM3MDhlNTMtNWIwM2FmMzAtMWRhN2MwYWYtODExZGZjM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS 2025-04-10T11:23:18.899143Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643585529061988:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:18.899282Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d03/r3tmp/tmpuXJxlF/pdisk_1.dat 2025-04-10T11:23:19.089173Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:19.140136Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:19.140222Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:19.144296Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23018, node 4 2025-04-10T11:23:19.208120Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:19.208150Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:19.208158Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:19.208333Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:19.482049Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:21.945548Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:22.327139Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643602708934143:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:22.327170Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643602708934135:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:22.327236Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:22.329809Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:22.342944Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491643602708934149:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:23:22.445208Z node 4 :TX_PROXY ERROR: Actor# [4:7491643602708934241:4166] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:22.549880Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq1dap68xytt619d3hxgmb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YWEyOTQzNTctZWE1YzA1YTAtNzQ3Y2E1MTctMWFiMDk0MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:22.581021Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:18465 2025-04-10T11:23:24.395913Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643612614656240:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:24.396018Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d03/r3tmp/tmpk9Z9ti/pdisk_1.dat 2025-04-10T11:23:24.550370Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:24.581836Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:24.581937Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:24.584352Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31181, node 7 2025-04-10T11:23:24.636442Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:24.636464Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:24.636470Z node 7 :NET_CLASSIFIER WARN: ... SIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:29.796619Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:29.796630Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:29.796830Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:29.979262Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:33.188654Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643650194405257:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:33.188754Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:33.262229Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:33.347611Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643650194405420:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:33.347705Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:33.347798Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643650194405425:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:33.352034Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:33.373540Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491643650194405427:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:23:33.470947Z node 10 :TX_PROXY ERROR: Actor# [10:7491643650194405506:2812] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:33.517343Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfq1mvpd3m9h3angzm3zkjq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OGZkNDRmMTQtNGIzZTQyYzItNjBiZDFkMzUtNDM2NjE4M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:33.612400Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jrfq1r8r5xtgt8wffh7sa8jc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MjkxNWRlNS1kNzg1NjllNi00OGVhZjA1MC0yZDlmNzEwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:33.616372Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284213651, txId: 281474976710662] shutting down 2025-04-10T11:23:34.996265Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643657597221919:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:34.996376Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d03/r3tmp/tmpVe7FPm/pdisk_1.dat 2025-04-10T11:23:35.123253Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:35.158473Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:35.158565Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:35.160722Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12920, node 13 2025-04-10T11:23:35.217198Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:35.217231Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:35.217241Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:35.217418Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:35.472360Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:38.504754Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643674777092106:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:38.504852Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:38.787481Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:38.870266Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643674777092294:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:38.870346Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:38.870501Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643674777092299:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:38.873404Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:38.888941Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7491643674777092301:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:23:38.956398Z node 13 :TX_PROXY ERROR: Actor# [13:7491643674777092372:2797] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:39.000675Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq1t762yk66v79rqtfg611, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTAxZDQ5YjgtN2JmMTNmYWEtMWZhODQ4My1lOTBiYzE5MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:39.107563Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq1t762yk66v79rqtfg611, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ODNiMDllYi00NmIxOWJmNi0zZjY1NzA5Ni1mOTc2NzM0Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:39.162498Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfq1t762yk66v79rqtfg611, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YjE2YTQyMWUtMjQ5YmYxNjAtMWI2MjBiZC1lNGU5MTYxNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:39.307136Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfq1t762yk66v79rqtfg611, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTdmNjcxYi03Mjk1NDllYy1lYWNhMjBkOS04N2MzZGQyMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyType >> TGRpcNewClient::YqlQueryWithParams [GOOD] >> TGRpcNewClient::YqlExplainDataQuery >> YdbYqlClient::RetryOperationAsync [GOOD] >> YdbYqlClient::QueryLimits >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=144284759.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284759.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284759.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284759.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284759.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284759.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284759.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283559.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124284759.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124284759.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283559.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283559.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124283559.000000s;Name=;Codec=}; 2025-04-10T11:22:40.094061Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:40.163096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:40.180994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:40.181215Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:40.187459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:40.187618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:40.187785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:40.187873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:40.187946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:40.188129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:40.188194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:40.188271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:40.188341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:40.188413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:40.188479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:40.188543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:40.206540Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:40.206836Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:40.206900Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:40.207057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:40.207186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:40.207256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:40.207293Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:40.207366Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:40.207408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:40.207440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:40.207463Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:40.207585Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:40.207656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:40.207689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:40.207710Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:40.207779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:40.207825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:40.207857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:40.207890Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:40.207945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:40.207974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:40.207998Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:40.208045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:40.208076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:40.208100Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:40.208428Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=56; 2025-04-10T11:22:40.208490Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=28; 2025-04-10T11:22:40.208556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-10T11:22:40.208631Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-04-10T11:22:40.208757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:40.208821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:40.208864Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:40.209040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:40.209077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:40.209100Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;pr ... et_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-10T11:23:41.610910Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:23:41.610973Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:41.611041Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:41.611098Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:41.611229Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:41.611541Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-04-10T11:23:41.611704Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:23:41.611904Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:41.611979Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:41.612498Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-10T11:23:41.612611Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:23:41.613185Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1962:3967];trace_detailed=; 2025-04-10T11:23:41.613729Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-10T11:23:41.613995Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:23:41.614214Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:41.614354Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:41.614784Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:23:41.614911Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:41.615061Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:41.615113Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1962:3967] finished for tablet 9437184 2025-04-10T11:23:41.615626Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1961:3966];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744284221613107,"name":"_full_task","f":1744284221613107,"d_finished":0,"c":0,"l":1744284221615191,"d":2084},"events":[{"name":"bootstrap","f":1744284221613397,"d_finished":994,"c":1,"l":1744284221614391,"d":994},{"a":1744284221614753,"name":"ack","f":1744284221614753,"d_finished":0,"c":0,"l":1744284221615191,"d":438},{"a":1744284221614731,"name":"processing","f":1744284221614731,"d_finished":0,"c":0,"l":1744284221615191,"d":460},{"name":"ProduceResults","f":1744284221614101,"d_finished":563,"c":2,"l":1744284221615094,"d":563},{"a":1744284221615096,"name":"Finish","f":1744284221615096,"d_finished":0,"c":0,"l":1744284221615191,"d":95}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:41.615710Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1961:3966];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:23:41.616188Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1961:3966];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744284221613107,"name":"_full_task","f":1744284221613107,"d_finished":0,"c":0,"l":1744284221615765,"d":2658},"events":[{"name":"bootstrap","f":1744284221613397,"d_finished":994,"c":1,"l":1744284221614391,"d":994},{"a":1744284221614753,"name":"ack","f":1744284221614753,"d_finished":0,"c":0,"l":1744284221615765,"d":1012},{"a":1744284221614731,"name":"processing","f":1744284221614731,"d_finished":0,"c":0,"l":1744284221615765,"d":1034},{"name":"ProduceResults","f":1744284221614101,"d_finished":563,"c":2,"l":1744284221615094,"d":563},{"a":1744284221615096,"name":"Finish","f":1744284221615096,"d_finished":0,"c":0,"l":1744284221615765,"d":669}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1962:3967]->[1:1961:3966] 2025-04-10T11:23:41.616297Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:23:41.612574Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:23:41.616350Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:23:41.616472Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 >> TColumnShardTestSchema::OneColdTier [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowDupField [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard >> TGRpcNewCoordinationClient::CreateDropDescribe [GOOD] >> TGRpcNewCoordinationClient::NodeNotFound >> YdbOlapStore::LogNonExistingRequest [GOOD] >> YdbOlapStore::LogNonExistingUserId >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] >> TColumnShardTestSchema::RebootColdTiersWithStat [GOOD] >> YdbYqlClient::TestYqlIssues [GOOD] >> YdbYqlClient::TestYqlSessionClosed >> TGRpcYdbTest::DropTableBadRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284773.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=144284773.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284773.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124284773.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283573.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=124283573.000000s;Name=;Codec=}; 2025-04-10T11:22:54.879102Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:54.948033Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:54.963305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:54.963522Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:54.970028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:54.970272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:54.970512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:54.970639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:54.970759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:54.970888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:54.970983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:54.971113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:54.971224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:54.971330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:54.971431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:54.971523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:54.999938Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:55.000260Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:55.000318Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:55.000487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:55.000656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:55.000739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:55.000785Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:55.000879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:55.000944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:55.000990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:55.001028Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:55.001183Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:55.001242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:55.001303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:55.001336Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:55.001453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:55.001523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:55.001618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:55.001660Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:55.001755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:55.001801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:55.001836Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:55.001887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:55.001933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:55.001970Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:55.002380Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-10T11:22:55.002462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-10T11:22:55.002533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-10T11:22:55.002624Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=47; 2025-04-10T11:22:55.002799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:55.002881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:55.002922Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:55.003120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:55.003163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:55.003190Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:55.003348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:22:55.003388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:22:55.003415Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:22:55.003674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... 2995289,"d_finished":0,"c":0,"l":1744284222995934,"d":645},{"name":"task_result","f":1744284222419109,"d_finished":238626,"c":28,"l":1744284222991745,"d":238626}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1280:3287]->[1:1279:3286] 2025-04-10T11:23:42.996459Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1280:3287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:23:42.410397Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203504;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203504;selected_rows=0; 2025-04-10T11:23:42.996500Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1280:3287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:23:42.996760Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1280:3287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:23:42.998728Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-10T11:23:42.999034Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000006:max} readable: {1000000006:max} at tablet 9437184 2025-04-10T11:23:42.999172Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:23:42.999349Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:42.999418Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:42.999915Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-10T11:23:43.000016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:23:43.000555Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1296:3303];trace_detailed=; 2025-04-10T11:23:43.001001Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-10T11:23:43.001258Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:23:43.001473Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:43.001621Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:43.001957Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:23:43.002077Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:43.002242Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:43.002304Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1296:3303] finished for tablet 9437184 2025-04-10T11:23:43.002772Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1295:3302];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744284223000485,"name":"_full_task","f":1744284223000485,"d_finished":0,"c":0,"l":1744284223002379,"d":1894},"events":[{"name":"bootstrap","f":1744284223000688,"d_finished":967,"c":1,"l":1744284223001655,"d":967},{"a":1744284223001930,"name":"ack","f":1744284223001930,"d_finished":0,"c":0,"l":1744284223002379,"d":449},{"a":1744284223001909,"name":"processing","f":1744284223001909,"d_finished":0,"c":0,"l":1744284223002379,"d":470},{"name":"ProduceResults","f":1744284223001381,"d_finished":566,"c":2,"l":1744284223002281,"d":566},{"a":1744284223002285,"name":"Finish","f":1744284223002285,"d_finished":0,"c":0,"l":1744284223002379,"d":94}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:43.002855Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1295:3302];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:23:43.003316Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1295:3302];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744284223000485,"name":"_full_task","f":1744284223000485,"d_finished":0,"c":0,"l":1744284223002907,"d":2422},"events":[{"name":"bootstrap","f":1744284223000688,"d_finished":967,"c":1,"l":1744284223001655,"d":967},{"a":1744284223001930,"name":"ack","f":1744284223001930,"d_finished":0,"c":0,"l":1744284223002907,"d":977},{"a":1744284223001909,"name":"processing","f":1744284223001909,"d_finished":0,"c":0,"l":1744284223002907,"d":998},{"name":"ProduceResults","f":1744284223001381,"d_finished":566,"c":2,"l":1744284223002281,"d":566},{"a":1744284223002285,"name":"Finish","f":1744284223002285,"d_finished":0,"c":0,"l":1744284223002907,"d":622}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1296:3303]->[1:1295:3302] 2025-04-10T11:23:43.003426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:23:42.999986Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:23:43.003481Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:23:43.003605Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1296:3303];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> YdbTableBulkUpsert::AsyncIndexShouldFail [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldSucceed >> TYqlDateTimeTests::IntervalKey [GOOD] >> TYqlDateTimeTests::SimpleOperations >> TGRpcYdbTest::MakeListRemoveDirectory [GOOD] >> TGRpcYdbTest::ReadTable >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts >> YdbYqlClient::TestVariant [GOOD] >> YdbYqlClient::TestTransactionQueryError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284759.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284759.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284759.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284759.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284759.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284759.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283559.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124284759.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124284759.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283559.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283559.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124283559.000000s;Name=;Codec=}; 2025-04-10T11:22:39.791675Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:39.860985Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:39.876957Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:39.877173Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:39.883934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:39.884131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:39.884343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:39.884463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:39.884574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:39.884671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:39.884773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:39.884883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:39.884978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:39.885076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:39.885173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:39.885265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:39.907713Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:39.908074Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:39.908131Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:39.908294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:39.908451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:39.908525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:39.908567Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:39.908649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:39.908703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:39.908740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:39.908771Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:39.908909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:39.908991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:39.909030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:39.909061Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:39.909140Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:39.909188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:39.909239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:39.909274Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:39.909366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:39.909403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:39.909433Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:39.909477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:39.909512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:39.909552Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:39.909937Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=67; 2025-04-10T11:22:39.910007Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-10T11:22:39.910078Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-10T11:22:39.910175Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-10T11:22:39.910327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:39.910402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:39.910440Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:39.910627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:39.910669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:39.910697Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TT ... =9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-10T11:23:43.314433Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:23:43.314501Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:43.314570Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:43.314635Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:43.314786Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:43.315090Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-04-10T11:23:43.315248Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:23:43.315442Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:43.315522Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-10T11:23:43.316087Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-04-10T11:23:43.316202Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:23:43.316833Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:1976:3981];trace_detailed=; 2025-04-10T11:23:43.317424Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-10T11:23:43.317727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-10T11:23:43.317938Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:43.318102Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:43.318531Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:23:43.318680Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:43.318842Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:43.318899Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1976:3981] finished for tablet 9437184 2025-04-10T11:23:43.319462Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1975:3980];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1744284223316753,"name":"_full_task","f":1744284223316753,"d_finished":0,"c":0,"l":1744284223318983,"d":2230},"events":[{"name":"bootstrap","f":1744284223317044,"d_finished":1094,"c":1,"l":1744284223318138,"d":1094},{"a":1744284223318498,"name":"ack","f":1744284223318498,"d_finished":0,"c":0,"l":1744284223318983,"d":485},{"a":1744284223318473,"name":"processing","f":1744284223318473,"d_finished":0,"c":0,"l":1744284223318983,"d":510},{"name":"ProduceResults","f":1744284223317837,"d_finished":607,"c":2,"l":1744284223318878,"d":607},{"a":1744284223318881,"name":"Finish","f":1744284223318881,"d_finished":0,"c":0,"l":1744284223318983,"d":102}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:23:43.319556Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1975:3980];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:23:43.320050Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1975:3980];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1744284223316753,"name":"_full_task","f":1744284223316753,"d_finished":0,"c":0,"l":1744284223319613,"d":2860},"events":[{"name":"bootstrap","f":1744284223317044,"d_finished":1094,"c":1,"l":1744284223318138,"d":1094},{"a":1744284223318498,"name":"ack","f":1744284223318498,"d_finished":0,"c":0,"l":1744284223319613,"d":1115},{"a":1744284223318473,"name":"processing","f":1744284223318473,"d_finished":0,"c":0,"l":1744284223319613,"d":1140},{"name":"ProduceResults","f":1744284223317837,"d_finished":607,"c":2,"l":1744284223318878,"d":607},{"a":1744284223318881,"name":"Finish","f":1744284223318881,"d_finished":0,"c":0,"l":1744284223319613,"d":732}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1976:3981]->[1:1975:3980] 2025-04-10T11:23:43.320160Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:23:43.316163Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:23:43.320221Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:23:43.320354Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1976:3981];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284756.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284756.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284756.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284756.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284756.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284756.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283556.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124284756.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124284756.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283556.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283556.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124283556.000000s;Name=;Codec=}; 2025-04-10T11:22:36.826296Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:36.916047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:36.935585Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:36.935840Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:36.942537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:36.942733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:36.942925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:36.943018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:36.943115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:36.943215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:36.943282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:36.943385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:36.943471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:36.943549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:36.943623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:36.943694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:36.968151Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:36.968511Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:36.968587Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:36.968733Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:36.968883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:36.968966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:36.969002Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:36.969069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:36.969116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:36.969150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:36.969171Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:36.969326Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:36.969398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:36.969437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:36.969475Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:36.969546Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:36.969589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:36.969621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:36.969652Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:36.969730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:36.969768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:36.969800Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:36.969840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:36.969870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:36.969907Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:36.970406Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=98; 2025-04-10T11:22:36.970493Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-10T11:22:36.970582Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-10T11:22:36.970676Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=47; 2025-04-10T11:22:36.970862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:36.970922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:36.970952Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:36.971154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:36.971192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:36.971218Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:36.971387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:22:36.971441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... _data.cpp:29;EXECUTE:finishLoadingTime=290; 2025-04-10T11:23:43.188059Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=31373; 2025-04-10T11:23:43.194565Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=6427; 2025-04-10T11:23:43.201158Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5573; 2025-04-10T11:23:43.201270Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=6613; 2025-04-10T11:23:43.201444Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=95; 2025-04-10T11:23:43.201555Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=65; 2025-04-10T11:23:43.201713Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=105; 2025-04-10T11:23:43.201838Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=77; 2025-04-10T11:23:43.208922Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=7009; 2025-04-10T11:23:43.218803Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9756; 2025-04-10T11:23:43.218948Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=40; 2025-04-10T11:23:43.219033Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=34; 2025-04-10T11:23:43.219086Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-04-10T11:23:43.219135Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-10T11:23:43.219182Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-04-10T11:23:43.219270Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=43; 2025-04-10T11:23:43.219317Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-10T11:23:43.219415Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=59; 2025-04-10T11:23:43.219463Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-10T11:23:43.219537Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=37; 2025-04-10T11:23:43.219631Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=54; 2025-04-10T11:23:43.219987Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=319; 2025-04-10T11:23:43.220033Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=71181; 2025-04-10T11:23:43.220190Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=31203592;raw_bytes=48253350;count=18;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:23:43.220306Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:23:43.220364Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:23:43.220434Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:23:43.241769Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-10T11:23:43.241930Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:23:43.241994Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:43.242076Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-04-10T11:23:43.242162Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:23:43.242211Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:43.242261Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:43.242302Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:43.242403Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:43.242888Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:43.242965Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2208:4081];tablet_id=9437184;parent=[1:2168:4048];fline=manager.cpp:82;event=ask_data;request=request_id=128;1={portions_count=18};; 2025-04-10T11:23:43.243507Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:23:43.243756Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:23:43.243789Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:23:43.243833Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:23:43.243894Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:23:43.243959Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:43.244018Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-04-10T11:23:43.244080Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:23:43.244127Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:43.244176Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:43.244215Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:43.244300Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:43.245662Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=18;path_id=1; 2025-04-10T11:23:43.246672Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> TGRpcYdbTest::ExecuteQueryBadRequest >> YdbYqlClient::TestReadTableMultiShardWholeTable [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard [GOOD] >> YdbTableBulkUpsertOlap::UpsertMixed >> TAuthenticationWithSqlExecution::CreateAlterUserWithHash [GOOD] >> TDatabaseQuotas::DisableWritesToDatabase >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync [GOOD] >> YdbYqlClient::SimpleColumnFamilies >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable [GOOD] >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids >> YdbTableBulkUpsert::Types [GOOD] >> YdbTableBulkUpsert::Uint8 >> TTableProfileTests::UseTableProfilePreset >> YdbYqlClient::CheckDefaultTableSettings1 [GOOD] >> YdbYqlClient::CheckDefaultTableSettings2 >> YdbQueryService::TestCreateAndAttachSession >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter >> TGRpcNewClient::YqlExplainDataQuery [GOOD] >> TGRpcNewCoordinationClient::CheckUnauthorized >> YdbYqlClient::QueryLimits [GOOD] >> YdbYqlClient::QueryStats >> TGRpcNewCoordinationClient::NodeNotFound [GOOD] >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores >> TGRpcYdbTest::DropTableBadRequest [GOOD] >> TGRpcYdbTest::CreateYqlSession >> YdbYqlClient::TestYqlSessionClosed [GOOD] >> YdbYqlClient::TestYqlLongSessionPrepareError >> TGRpcYdbTest::ExecuteQueryBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryImplicitSession >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey >> YdbMonitoring::SelfCheckWithNodesDying [GOOD] >> YdbOlapStore::BulkUpsert >> TGRpcYdbTest::ReadTable [GOOD] >> TGRpcYdbTest::OperationTimeout >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain [GOOD] >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism >> YdbYqlClient::TestTransactionQueryError [GOOD] >> YdbYqlClient::TestReadWrongTable >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribe >> YdbTableBulkUpsertOlap::UpsertMixed [GOOD] >> YdbYqlClient::AlterTableAddIndex >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check [GOOD] >> TGRpcClientLowTest::MultipleSimpleRequests ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] Test command err: 2025-04-10T11:23:18.453477Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643586269198639:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:18.453663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cff/r3tmp/tmp6ftSCN/pdisk_1.dat 2025-04-10T11:23:18.769760Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31876, node 1 2025-04-10T11:23:18.803475Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:18.803514Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:18.824509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:18.824625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:18.826374Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:18.826399Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:18.826417Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:18.826554Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:23:18.828522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:19.133303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:25074 2025-04-10T11:23:19.375598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:19.404175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:19.908746Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491643590866917823:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:19.908802Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:23:19.924015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:19.924091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:19.926970Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:23:19.927955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25074 2025-04-10T11:23:20.177368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:25074 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744284200700 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-10T11:23:21.028635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:25074 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1744284201250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-10T11:23:21.521115Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-10T11:23:21.524489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:23:23.304249Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643610434812134:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:23.304398Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cff/r3tmp/tmp7EPC0i/pdisk_1.dat 2025-04-10T11:23:23.436064Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:23.469053Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:23.469121Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:23.471922Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27974, node 4 2025-04-10T11:23:23.528597Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:23.528628Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:23.528636Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:23.528826Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:23.801001Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:19964 2025-04-10T11:23:24.079913Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:24.096322Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:24.600534Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491643614023002189:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:24.600608Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:23:24.618472Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:24.618566Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:24.621368Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-04-10T11:23:24.622278Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19964 2025-04-10T11:2 ... .506125Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:23:32.506741Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:23:33.507222Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:23:35.953648Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643662018773059:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:35.953750Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cff/r3tmp/tmpTDVNyC/pdisk_1.dat 2025-04-10T11:23:36.097435Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:36.136806Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:36.136910Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:36.140328Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17293, node 10 2025-04-10T11:23:36.200837Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:36.200863Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:36.200872Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:36.201063Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:36.532566Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:2871 2025-04-10T11:23:36.915234Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:36.941796Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:37.447352Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7491643668797986876:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:37.447447Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:23:37.451177Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:37.451292Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:37.454497Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-04-10T11:23:37.467514Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2871 2025-04-10T11:23:37.804585Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-04-10T11:23:37.804987Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:23:38.448994Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:23:39.449615Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:23:40.450847Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:23:42.541491Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643690191814961:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:42.542113Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cff/r3tmp/tmpvOMNZu/pdisk_1.dat 2025-04-10T11:23:42.703764Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:42.753471Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:42.753575Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:42.755754Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3756, node 13 2025-04-10T11:23:42.822866Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:42.822891Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:42.822903Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:42.823085Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:43.213380Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:21029 2025-04-10T11:23:43.648914Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:43.680900Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:44.195260Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7491643698017906881:2123];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:44.195383Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:23:44.228321Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:44.228497Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:44.240853Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-04-10T11:23:44.247313Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21029 2025-04-10T11:23:44.779082Z node 13 :TX_PROXY ERROR: Actor# [13:7491643698781750840:2897] txid# 281474976715660, issues: { message: "Error at split boundary 0: Value of type Uint64 expected in tuple at position 1" severity: 1 } 2025-04-10T11:23:44.787688Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-10T11:23:44.788334Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:23:45.198347Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:23:46.199218Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:23:47.199670Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:23:48.204004Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; >> YdbQueryService::TestCreateAndAttachSession [GOOD] >> YdbQueryService::TestForbidExecuteWithoutAttach >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin >> TYqlDateTimeTests::SimpleOperations [GOOD] >> TYqlDecimalTests::DecimalKey >> TGRpcNewCoordinationClient::CheckUnauthorized [GOOD] >> TGRpcNewCoordinationClient::CreateAlter ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] Test command err: 2025-04-10T11:23:25.212840Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643615853275782:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:25.212955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cfd/r3tmp/tmpAbn9Xg/pdisk_1.dat 2025-04-10T11:23:25.561657Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6311, node 1 2025-04-10T11:23:25.586746Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:25.586847Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:25.607308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:25.607460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:25.616606Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:25.616635Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:25.616649Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:25.616819Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:23:25.617997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:25.936151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:27.730734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643624443211388:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:27.730849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:28.104192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:28.234929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643628738178874:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:28.234981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:28.235018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643628738178879:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:28.238945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:28.256438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643628738178881:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:23:28.361045Z node 1 :TX_PROXY ERROR: Actor# [1:7491643628738178958:2820] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:28.411827Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfq1gy27v07p03ryk7bezz4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmYyNzAzNDYtMTU4ZjUzMjctZDgwMGVmZDItYmNmNDAzZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:28.535219Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jrfq1k9z5vfnpnxz8v9s0m3g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJjYWYxMjYtNmE4ZjQ2M2EtNDg0MjNjMmUtOTRiYjRjZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:28.540085Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284208576, txId: 281474976710662] shutting down 2025-04-10T11:23:30.150463Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643638667314873:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:30.150964Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cfd/r3tmp/tmpVRbBtB/pdisk_1.dat 2025-04-10T11:23:30.293053Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:30.328823Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:30.328927Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:30.331443Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17281, node 4 2025-04-10T11:23:30.400984Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:30.401008Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:30.401013Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:30.401126Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:30.609777Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:32.866260Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Decimal(22,9) value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Date value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Datetime value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Timestamp value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Interval value CLIENT_INTERNAL_ERROR
: Error: GRpc error: (13): Unable to parse request
: Error: Grpc error response on endpoint localhost:17281 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Yson value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Json value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid JSON for JsonDocument provided: TAPE_ERROR: The JSON document has an improper structure: missing or superfluous commas, braces, missing keys, etc. BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid DyNumber string representation 2025-04-10T11:23:34.462993Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643655228361789:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:34.463063Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cfd/r3tmp/tmpWabnXE/pdisk_1.dat 2025-04-10T11:23:34.562296Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:34.589600Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:34.589671Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:34.592123Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14900, node 7 2025-04-10T11:23:34.641639Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:34.641664Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:34.641672Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:34.641805Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:34.829878Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:37.519841Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:37.633167Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643668113264869:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:37.633203Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643668113264863:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:37.633312Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:37.636816Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:37.656228Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491643668113264880:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:23:37.754366Z node 7 :TX_PROXY ERROR: Actor# [7:7491643668113264961:2800] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:37.953254Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq1w8yf2j289abrje9drr0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZGJhZjU0ZWYtNmUwYzk3YzAtYzg3MWNkMzItMzI1ODZmOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:39.316422Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643677579017389:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:39.317039Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cfd/r3tmp/tmp4UZwTC/pdisk_1.dat 2025-04-10T11:23:39.459772Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:39.500211Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:39.500307Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:39.503148Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14928, node 10 2025-04-10T11:23:39.558361Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:39.558384Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:39.558393Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:39.558544Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14663 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:39.936521Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:43.014511Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/ui8' Only async-indexed tables are supported by BulkUpsert
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' unknown table test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cfd/r3tmp/tmpVj6vGz/pdisk_1.dat 2025-04-10T11:23:44.655935Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:23:44.734467Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:44.773508Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:44.773621Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:44.777217Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5285, node 13 2025-04-10T11:23:44.827715Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:44.827740Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:44.827749Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:44.827909Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:45.029840Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:48.155962Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:49.375523Z node 13 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' unknown table >> YdbYqlClient::CheckDefaultTableSettings2 [GOOD] >> YdbYqlClient::CheckDefaultTableSettings3 >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores [GOOD] >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore >> TTableProfileTests::UseTableProfilePreset [GOOD] >> TTableProfileTests::UseTableProfilePresetViaSdk >> TGRpcYdbTest::CreateYqlSession [GOOD] >> TGRpcYdbTest::ExecuteDmlQuery >> TGRpcYdbTest::GetOperationBadRequest >> TGRpcYdbTest::ExecuteQueryImplicitSession [GOOD] >> TGRpcYdbTest::ExecuteQueryExplicitSession >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> YdbImport::Simple >> TGRpcYdbTest::OperationTimeout [GOOD] >> TGRpcYdbTest::OperationCancelAfter >> YdbYqlClient::TestYqlLongSessionPrepareError [GOOD] >> YdbYqlClient::TestYqlLongSessionMultipleErrors >> TColumnShardTestSchema::RebootColdTiers [GOOD] >> YdbYqlClient::TestReadWrongTable [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer >> YdbYqlClient::TestReadTableMultiShardWithDescribe [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit >> TSchemeShardTest::DropPQAbort [GOOD] >> TSchemeShardTest::ManyDirs >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284766.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284766.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284766.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284766.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284766.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284766.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283566.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124284766.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124284766.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283566.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283566.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124283566.000000s;Name=;Codec=}; 2025-04-10T11:22:46.933149Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:47.009662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:47.029976Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:47.030219Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:47.036185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:47.036337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:47.036521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:47.036604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:47.036682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:47.036744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:47.036802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:47.036891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:47.036961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:47.037027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:47.037090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:47.037148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:47.056076Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:47.056442Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:47.056497Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:47.056658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:47.056817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:47.056898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:47.056939Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:47.057024Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:47.057076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:47.057115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:47.057146Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:47.057306Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:47.057396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:47.057440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:47.057470Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:47.057557Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:47.057606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:47.057642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:47.057668Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:47.057748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:47.057798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:47.057825Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:47.057872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:47.057910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:47.057942Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:47.058383Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-04-10T11:22:47.058468Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-10T11:22:47.058542Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-10T11:22:47.058615Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-10T11:22:47.058785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:47.058845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:47.058879Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:47.059054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:47.059094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:47.059123Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:47.059278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:22:47.059325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... ta.cpp:29;EXECUTE:finishLoadingTime=255; 2025-04-10T11:23:54.391690Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=32441; 2025-04-10T11:23:54.398435Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=6647; 2025-04-10T11:23:54.405639Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5950; 2025-04-10T11:23:54.405778Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7216; 2025-04-10T11:23:54.405996Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=119; 2025-04-10T11:23:54.406140Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=80; 2025-04-10T11:23:54.406329Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=101; 2025-04-10T11:23:54.406465Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=86; 2025-04-10T11:23:54.415154Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8605; 2025-04-10T11:23:54.426250Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10950; 2025-04-10T11:23:54.426442Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=63; 2025-04-10T11:23:54.426542Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=37; 2025-04-10T11:23:54.426595Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-04-10T11:23:54.426649Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-04-10T11:23:54.426698Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-04-10T11:23:54.426783Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=44; 2025-04-10T11:23:54.426836Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-10T11:23:54.426948Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=71; 2025-04-10T11:23:54.427012Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-10T11:23:54.427090Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2025-04-10T11:23:54.427207Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=73; 2025-04-10T11:23:54.427604Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=346; 2025-04-10T11:23:54.427657Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=77150; 2025-04-10T11:23:54.427829Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=31203592;raw_bytes=48253350;count=18;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:23:54.427963Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:23:54.428025Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:23:54.428103Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:23:54.448556Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-10T11:23:54.448748Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:23:54.448819Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:54.448902Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-04-10T11:23:54.448976Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:23:54.449027Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:54.449083Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:54.449127Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:54.449234Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:54.449822Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:54.449913Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2208:4081];tablet_id=9437184;parent=[1:2168:4048];fline=manager.cpp:82;event=ask_data;request=request_id=128;1={portions_count=18};; 2025-04-10T11:23:54.451026Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:23:54.452197Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:23:54.452238Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:23:54.452269Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:23:54.452321Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:23:54.452400Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:54.452491Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-04-10T11:23:54.452572Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:23:54.452625Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:54.452687Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:54.452735Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:54.452846Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:54.453207Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=18;path_id=1; 2025-04-10T11:23:54.454396Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2168:4048];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword >> YdbQueryService::TestForbidExecuteWithoutAttach [GOOD] >> YdbQueryService::TestCreateDropAttachSession ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadWrongTable [GOOD] Test command err: 2025-04-10T11:23:35.087382Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643659951625094:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:35.087518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ced/r3tmp/tmpZXt83x/pdisk_1.dat 2025-04-10T11:23:35.384287Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11351, node 1 2025-04-10T11:23:35.442667Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:35.442705Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:35.444575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:35.444685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:35.467993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:35.481432Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:35.481457Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:35.481465Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:35.481580Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:35.793653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:38.182770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643672836528010:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:38.182771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643672836528018:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:38.182865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:38.185966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:23:38.202211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643672836528024:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:23:38.297028Z node 1 :TX_PROXY ERROR: Actor# [1:7491643672836528099:2690] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:40.306511Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643681130431002:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:40.345290Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ced/r3tmp/tmpqNXqpV/pdisk_1.dat 2025-04-10T11:23:40.476186Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:40.513424Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:40.513502Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:40.517948Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13977, node 4 2025-04-10T11:23:40.698966Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:40.699009Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:40.699018Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:40.699182Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:40.982176Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:43.338989Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643694015333814:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:43.338997Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643694015333819:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:43.339070Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:43.341827Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:23:43.360529Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491643694015333828:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:23:43.429938Z node 4 :TX_PROXY ERROR: Actor# [4:7491643694015333910:2687] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:44.979529Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643698933727639:2229];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:44.979859Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ced/r3tmp/tmpTXeYad/pdisk_1.dat 2025-04-10T11:23:45.159078Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:45.198526Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:45.198621Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:45.201635Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16547, node 7 2025-04-10T11:23:45.283632Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:45.283671Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:45.283680Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:45.283839Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:45.619307Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:48.293627Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643716113597732:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:48.293717Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:48.362339Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:48.523485Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643716113597902:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:48.523556Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:48.523781Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643716113597907:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:48.528191Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:48.563095Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491643716113597909:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:23:48.644047Z node 7 :TX_PROXY ERROR: Actor# [7:7491643716113597984:2812] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:48.712400Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfq26xacqqwkzxn8vwnnwa2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NGE2MjU1OTAtYWMxZDNiODYtYmMzYTc0NzktMjI5YTM4MTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:48.792367Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfq27460r7djenc88x7nz5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ODcwNDAzMDEtODg0YmJlMDYtNjU3Y2ZlNzAtODQzY2RhZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:48.865146Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NGE2MjU1OTAtYWMxZDNiODYtYmMzYTc0NzktMjI5YTM4MTA=, ActorId: [7:7491643716113597729:2335], ActorState: ExecuteState, TraceId: 01jrfq2766cghx30tjx37c7mgj, Create QueryResponse for error on request, msg: 2025-04-10T11:23:50.565517Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643726591532174:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:50.568766Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ced/r3tmp/tmp7HC9Y1/pdisk_1.dat 2025-04-10T11:23:50.802128Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27801, node 10 2025-04-10T11:23:50.892164Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:50.892283Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:50.911665Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:50.926794Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:50.926819Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:50.926828Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:50.926998Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:51.221203Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:51.242573Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:23:51.283810Z node 10 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jrfq29kkepmgsy8kf9g8qhw8, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:47816, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.998377s 2025-04-10T11:23:51.295595Z node 10 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jrfq29kzamzeg79tfwc1m64f, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:47824, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:23:54.140313Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jrfq2ccw58wf24r2y9j038df, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:47832, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:23:54.141840Z node 10 :TX_PROXY ERROR: [ReadTable [10:7491643743771402453:2336] TxId# 281474976710658] Navigate request failed for table 'Root/NoTable' 2025-04-10T11:23:54.141969Z node 10 :TX_PROXY ERROR: [ReadTable [10:7491643743771402453:2336] TxId# 281474976710658] RESPONSE Status# ResolveError shard: 0 table: Root/NoTable 2025-04-10T11:23:54.142266Z node 10 :READ_TABLE_API NOTICE: [10:7491643743771402452:2336] Finish grpc stream, status: 400070
: Error: Failed to resolve table Root/NoTable, code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Failed to resolve table Root/NoTable 2025-04-10T11:23:54.146193Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f7880] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.146482Z node 10 :GRPC_SERVER DEBUG: [0x51a000041480] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.146714Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f7e80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.146909Z node 10 :GRPC_SERVER DEBUG: [0x51a000043880] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.147131Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f7280] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.147368Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f6c80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.147551Z node 10 :GRPC_SERVER DEBUG: [0x51a00009d880] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.147721Z node 10 :GRPC_SERVER DEBUG: [0x51a0000af280] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.147884Z node 10 :GRPC_SERVER DEBUG: [0x51a0000af880] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.148060Z node 10 :GRPC_SERVER DEBUG: [0x51a000127e80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.148235Z node 10 :GRPC_SERVER DEBUG: [0x51a000095a80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.148405Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f5480] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.148578Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f5a80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.148752Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f6080] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.148929Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f6680] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.149086Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f4e80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-10T11:23:54.149265Z node 10 :GRPC_SERVER DEBUG: [0x51a000096080] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] Test command err: 2025-04-10T11:23:33.439156Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643649951324208:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:33.439221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf2/r3tmp/tmpK6THC4/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22304, node 1 2025-04-10T11:23:33.787568Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:33.787930Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:33.787956Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:33.831934Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:33.831984Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:33.832014Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:33.832243Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:23:33.834736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:33.834827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:33.844487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:34.110092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:37.612380Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643668083871231:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:37.612475Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf2/r3tmp/tmp65kFOH/pdisk_1.dat 2025-04-10T11:23:37.729377Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:37.762126Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:37.762245Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:37.764766Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16901, node 4 2025-04-10T11:23:37.811192Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:37.811217Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:37.811226Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:37.811357Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:38.112912Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:42.012889Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643689632957553:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:42.012955Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf2/r3tmp/tmpno0EB9/pdisk_1.dat 2025-04-10T11:23:42.163888Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:42.184308Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:42.184400Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:42.187552Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8978, node 7 2025-04-10T11:23:42.251095Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:42.251119Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:42.251128Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:42.251279Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13427 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:42.480463Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:46.107085Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643706341602527:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:46.107169Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf2/r3tmp/tmpnkQ3Yd/pdisk_1.dat 2025-04-10T11:23:46.235044Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:46.262611Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:46.262697Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:46.267145Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4354, node 10 2025-04-10T11:23:46.323580Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:46.323605Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:46.323613Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:46.323750Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5869 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:46.589070Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:50.570865Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643724226088900:2224];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:50.573220Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf2/r3tmp/tmpzLwExc/pdisk_1.dat 2025-04-10T11:23:50.774491Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:50.821793Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:50.821897Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:50.829044Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27720, node 13 2025-04-10T11:23:51.000470Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:51.000504Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:51.000515Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:51.000708Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:51.341835Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] >> TGRpcNewCoordinationClient::CreateAlter [GOOD] >> TGRpcNewCoordinationClient::BasicMethods >> YdbYqlClient::AlterTableAddIndex [GOOD] >> YdbYqlClient::AlterTableAddIndexAsyncOp >> TGRpcYdbTest::GetOperationBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithUuid >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore [GOOD] >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=144284754.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284754.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284754.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284754.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284754.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284754.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284754.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283554.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124284754.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124284754.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283554.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283554.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124283554.000000s;Name=;Codec=}; 2025-04-10T11:22:34.411100Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:34.484855Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:34.499597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:34.499823Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:34.506506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:34.506661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:34.506846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:34.506939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:34.507015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:34.507115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:34.507210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:34.507307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:34.507395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:34.507487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:34.507550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:34.507642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:34.531386Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:34.531741Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:34.531808Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:34.531979Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:34.532141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:34.532231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:34.532273Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:34.532367Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:34.532425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:34.532467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:34.532496Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:34.532644Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:34.532730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:34.532773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:34.532807Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:34.532918Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:34.532967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:34.533025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:34.533062Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:34.533136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:34.533173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:34.533201Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:34.533252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:34.533305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:34.533345Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:34.533735Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-04-10T11:22:34.533815Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-10T11:22:34.533887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-04-10T11:22:34.533962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-10T11:22:34.534112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:34.534192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:34.534229Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:34.534423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:34.534463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:34.534492Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;pr ... p:29;EXECUTE:finishLoadingTime=479; 2025-04-10T11:23:56.302773Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=51394; 2025-04-10T11:23:56.313105Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=10254; 2025-04-10T11:23:56.323726Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=9619; 2025-04-10T11:23:56.323827Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=10633; 2025-04-10T11:23:56.324012Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=115; 2025-04-10T11:23:56.324139Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=73; 2025-04-10T11:23:56.324296Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=104; 2025-04-10T11:23:56.324418Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=72; 2025-04-10T11:23:56.336233Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=11745; 2025-04-10T11:23:56.352375Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=16034; 2025-04-10T11:23:56.352511Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=44; 2025-04-10T11:23:56.352590Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=29; 2025-04-10T11:23:56.352648Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-04-10T11:23:56.352705Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-04-10T11:23:56.352766Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=10; 2025-04-10T11:23:56.352862Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=52; 2025-04-10T11:23:56.352919Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-10T11:23:56.353049Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=80; 2025-04-10T11:23:56.353112Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-04-10T11:23:56.353196Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=39; 2025-04-10T11:23:56.353315Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=75; 2025-04-10T11:23:56.353749Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=384; 2025-04-10T11:23:56.353805Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=109759; 2025-04-10T11:23:56.353970Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:23:56.354083Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:23:56.354135Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:23:56.354228Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:23:56.373104Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-10T11:23:56.373272Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:23:56.373351Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:56.373425Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-10T11:23:56.373491Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:23:56.373536Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:56.373584Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:56.373624Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:56.373744Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:56.374309Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:56.374391Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2617:4491];tablet_id=9437184;parent=[1:2577:4458];fline=manager.cpp:82;event=ask_data;request=request_id=151;1={portions_count=29};; 2025-04-10T11:23:56.375377Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:23:56.375715Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:23:56.375750Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:23:56.375776Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:23:56.375820Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:23:56.375878Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:23:56.375935Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-04-10T11:23:56.375998Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:23:56.376048Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:23:56.376110Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:56.376163Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:23:56.376264Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:23:56.377353Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-04-10T11:23:56.378918Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2577:4458];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 >> TGRpcYdbTest::ExecuteQueryExplicitSession [GOOD] >> TGRpcYdbTest::ExecutePreparedQuery >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] >> YdbTableBulkUpsert::Uint8 [GOOD] >> YdbTableBulkUpsert::ZeroRows >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts >> TGRpcYdbTest::ExecuteDmlQuery [GOOD] >> TGRpcYdbTest::CreateYqlSessionExecuteQuery >> YdbYqlClient::TestColumnOrder >> TGRpcYdbTest::OperationCancelAfter [GOOD] >> TGRpcYdbTest::KeepAlive >> TYqlDecimalTests::DecimalKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf6/r3tmp/tmpcfP21P/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18198, node 1 TClient is connected to server localhost:11352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:23:36.617675Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643666651507558:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:36.617735Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf6/r3tmp/tmpJ5POhC/pdisk_1.dat 2025-04-10T11:23:36.719758Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:36.743840Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:36.743903Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:36.745818Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7931, node 4 2025-04-10T11:23:36.797829Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:36.797850Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:36.797861Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:36.797996Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:36.978893Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:39.666809Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643679536410471:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:39.666923Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:39.946520Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:40.088460Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643683831377953:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:40.088541Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:40.088624Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643683831377958:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:40.092458Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:40.110995Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491643683831377960:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:23:40.168841Z node 4 :TX_PROXY ERROR: Actor# [4:7491643683831378037:2811] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:40.272036Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq1ynqcfrtd91dctvz3wc1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZGFmMGFmNjctZDU4ZjRjYzQtMThjMmM4NjItYWQ1NTQ3ZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:40.317362Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:23:40.402295Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:23:40.555927Z node 4 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-10T11:23:42.010375Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643691944125428:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:42.010426Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf6/r3tmp/tmpEf2mJH/pdisk_1.dat 2025-04-10T11:23:42.191732Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:42.230469Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:42.230562Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:42.232770Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5533, node 7 2025-04-10T11:23:42.302803Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:42.302833Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:42.302844Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:42.302994Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:42.617841Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:45.412501Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:47.002884Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643706107896154:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:47.005116Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf6/r3tmp/tmpGFKEna/pdisk_1.dat 2025-04-10T11:23:47.168855Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:47.208773Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:47.208874Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:47.214824Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7604, node 10 2025-04-10T11:23:47.339595Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:47.339634Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:47.339650Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:47.339855Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:47.641296Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:50.762102Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:52.506561Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643734391241547:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:52.506651Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf6/r3tmp/tmp9KJfrZ/pdisk_1.dat 2025-04-10T11:23:52.734541Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12673, node 13 2025-04-10T11:23:52.833002Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:52.833103Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:52.884380Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:52.901112Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:52.901143Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:52.901154Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:52.901326Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:53.231961Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:56.491396Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] Test command err: 2025-04-10T11:23:32.849704Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643647210742104:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:32.849821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf3/r3tmp/tmpNJAwb7/pdisk_1.dat 2025-04-10T11:23:33.221812Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7354, node 1 2025-04-10T11:23:33.249328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:33.249443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:33.249925Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:33.249957Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:33.275909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:33.281763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:33.281790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:33.281803Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:33.282003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:33.514911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-04-10T11:23:35.528487Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-10T11:23:35.536295Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-10T11:23:36.980582Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643666412933664:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:36.980655Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf3/r3tmp/tmpcwwg1B/pdisk_1.dat 2025-04-10T11:23:37.107452Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:37.128968Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:37.129059Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:37.131124Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14716, node 4 2025-04-10T11:23:37.233405Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:37.233429Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:37.233436Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:37.233631Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:37.480331Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-10T11:23:37.551960Z node 4 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-10T11:23:37.563573Z node 4 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-10T11:23:41.520511Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643684281857597:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:41.520609Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf3/r3tmp/tmpuzYD1Q/pdisk_1.dat 2025-04-10T11:23:41.630683Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:41.665427Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:41.665525Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:41.669526Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9850, node 7 2025-04-10T11:23:41.720202Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:41.720235Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:41.720243Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:41.720391Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:41.997493Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {SUCCESS, 0} TestRequest(database="/blabla", token="") => {SUCCESS, 0} TestRequest(database="blabla", token="") => {SUCCESS, 0} TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-04-10T11:23:44.612405Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-10T11:23:44.625317Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-10T11:23:44.635027Z node 7 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token TestRequest(database="/Root", token="invalid token") => {UNAUTHORIZED, 0} 2025-04-10T11:23:44.648058Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-10T11:23:44.665179Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-10T11:23:46.274972Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643707048122098:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:46.275060Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf3/r3tmp/tmpm6b9GU/pdisk_1.dat 2025-04-10T11:23:46.416715Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:46.451623Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:46.451717Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:46.454746Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30951, node 10 2025-04-10T11:23:46.500753Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:46.500778Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:46.500787Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:46.500942Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:23:46.670794Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 TestRequest(database="/Root", token="") => {SUCCESS, 0} TestRequest(database="/blabla", token="") => {SUCCESS, 0} TestRequest(database="blabla", token="") => {SUCCESS, 0} TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-04-10T11:23:49.606631Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-10T11:23:49.617785Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-10T11:23:49.627529Z node 10 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token TestRequest(database="/Root", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-10T11:23:49.640893Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-10T11:23:49.650289Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-10T11:23:51.366901Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643729018828329:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:51.367070Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf3/r3tmp/tmpkxDnUI/pdisk_1.dat 2025-04-10T11:23:51.520239Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:51.552734Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:51.552834Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:51.555958Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25330, node 13 2025-04-10T11:23:51.639971Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:51.639993Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:51.640001Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:51.640158Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:51.931863Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:56.364609Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7491643729018828329:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:56.364686Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> YdbYqlClient::QueryStats [GOOD] >> YdbYqlClient::RenameTables >> YdbImport::Simple [GOOD] >> YdbImport::EmptyData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] Test command err: 2025-04-10T11:20:36.582707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:603:2415], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:36.583075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:36.583180Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c68/r3tmp/tmpeUB8iQ/pdisk_1.dat 2025-04-10T11:20:36.975080Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14172, node 1 2025-04-10T11:20:37.213520Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:37.213586Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:37.213616Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:37.214357Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:37.216686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:37.311999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:37.312148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:37.326578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16158 2025-04-10T11:20:37.895498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:41.354275Z node 4 :STATISTICS INFO: Subscribed for config changes on node 4 2025-04-10T11:20:41.389985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:41.390112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:41.422891Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-10T11:20:41.425416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:41.695873Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.696616Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.697436Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.697682Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.697813Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.698118Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.698283Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.698388Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.698505Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:41.871329Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:41.871452Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:41.885096Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:42.049896Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:42.086380Z node 4 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:42.086477Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:42.179232Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:42.179473Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:42.179726Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:42.179803Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:42.179863Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:42.179937Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:42.179993Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:42.180051Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:42.180517Z node 4 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:42.204275Z node 4 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:42.204407Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:2030:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:42.214003Z node 4 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [4:2044:2608] 2025-04-10T11:20:42.221734Z node 4 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [4:2074:2624] 2025-04-10T11:20:42.221841Z node 4 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:2074:2624], schemeshard id = 72075186224037897 2025-04-10T11:20:42.227250Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-10T11:20:42.267223Z node 4 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:42.267306Z node 4 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:42.267378Z node 4 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-10T11:20:42.286087Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:42.301747Z node 4 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:42.301878Z node 4 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:42.539617Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:42.743789Z node 4 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:42.821991Z node 4 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:43.701016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:20:47.270131Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-10T11:20:47.360019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:47.360151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:47.360367Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:47.360432Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:47.400022Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:20:47.401623Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:20:47.402244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:47.404037Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:47.519296Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:47.698261Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-10T11:20:47.698334Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:20:47.698452Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:3088:2949], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-10T11:20:47.701078Z node 4 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [4:3091:2951] 2025-04-10T11:20:47.701324Z node 4 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:3091:2951], schemeshard id = 72075186224037899 2025-04-10T11:20:48.812406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:20:52.267584Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:52.312962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:52.313083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:52.313476Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:52.313549Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:52.339937Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:52.341309Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:52.342459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:52.344698Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:52.431732Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:52.712345Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-04-10T11:20:52.712436Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-04-10T11:20:52.712559Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:3934:3159], at schemeshard: 72075186224037905, StatisticsAggregatorId: 720751862 ... DNmZg==, TxId: 2025-04-10T11:23:49.516051Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:23:49.533165Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-10T11:23:49.533250Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:23:49.587738Z node 4 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:23:49.587856Z node 4 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:23:49.657772Z node 4 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [4:11496:7102], schemeshard count = 1 2025-04-10T11:23:51.502083Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-10T11:23:51.502189Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 192.000000s, at schemeshard: 72075186224037899 2025-04-10T11:23:51.502667Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-04-10T11:23:51.518805Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:23:52.581479Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:23:52.581553Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:23:52.581595Z node 4 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-10T11:23:52.581656Z node 4 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-10T11:23:52.586759Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:23:52.608716Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:23:52.609776Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:23:52.609906Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:23:52.612592Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:23:52.667892Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:23:52.668236Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 2, current Round: 0 2025-04-10T11:23:52.669189Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:11660:7195], server id = [4:11661:7196], tablet id = 72075186224037911, status = OK 2025-04-10T11:23:52.669630Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:11660:7195], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-10T11:23:52.674938Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-04-10T11:23:52.675060Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2025-04-10T11:23:52.675406Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:23:52.675632Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:23:52.675957Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-10T11:23:52.678440Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:11660:7195], server id = [4:11661:7196], tablet id = 72075186224037911 2025-04-10T11:23:52.678492Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:23:52.679122Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:23:52.719203Z node 4 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [4:11681:7215]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:52.719593Z node 4 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:23:52.719650Z node 4 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [4:11681:7215], StatRequests.size() = 1 2025-04-10T11:23:52.854749Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=OTk1YzM0YzctZWRhOTM0ZjctM2Y3MzRhZjctOGI3OWFhMTI=, TxId: 2025-04-10T11:23:52.854821Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=OTk1YzM0YzctZWRhOTM0ZjctM2Y3MzRhZjctOGI3OWFhMTI=, TxId: 2025-04-10T11:23:52.855572Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:23:52.872223Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-10T11:23:52.872289Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:23:53.484735Z node 4 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-10T11:23:53.484844Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-10T11:23:53.964081Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2025-04-10T11:23:53.964161Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 193.000000s, at schemeshard: 72075186224037905 2025-04-10T11:23:53.964458Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 28 2025-04-10T11:23:53.984541Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:23:55.918894Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:23:55.918977Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:23:55.919022Z node 4 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is column table. 2025-04-10T11:23:55.919062Z node 4 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037905, LocalPathId: 2] 2025-04-10T11:23:55.923649Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:23:55.943735Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:23:55.944262Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:23:55.944329Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:23:55.945029Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:23:55.964061Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:23:55.964307Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 3, current Round: 0 2025-04-10T11:23:55.965100Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:11824:7289], server id = [4:11825:7290], tablet id = 72075186224037912, status = OK 2025-04-10T11:23:55.965201Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:11824:7289], path = { OwnerId: 72075186224037905 LocalId: 2 } 2025-04-10T11:23:55.969249Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-04-10T11:23:55.969369Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2025-04-10T11:23:55.969635Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:23:55.969809Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:23:55.970071Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-10T11:23:55.972503Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:11824:7289], server id = [4:11825:7290], tablet id = 72075186224037912 2025-04-10T11:23:55.972540Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:23:55.973334Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:23:56.011961Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=ZWE0MTQ1MmUtYmUzNmJjNDktNmZlMDc1MjktN2QxOGFjMzA=, TxId: 2025-04-10T11:23:56.012028Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=ZWE0MTQ1MmUtYmUzNmJjNDktNmZlMDc1MjktN2QxOGFjMzA=, TxId: 2025-04-10T11:23:56.012449Z node 4 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 4, interval end# 1970-01-01T00:02:05.000000Z, event interval end# 2025-04-10T11:23:54.000000Z 2025-04-10T11:23:56.013618Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:23:56.015070Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:11855:5965]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:56.015424Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:23:56.015504Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:23:56.018940Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:23:56.019006Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-10T11:23:56.019065Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:23:56.035392Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-10T11:23:56.036747Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:11855:5965]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:23:56.037104Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:23:56.037168Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:23:56.037590Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:23:56.037677Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-10T11:23:56.037741Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-10T11:23:56.042452Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> TTableProfileTests::UseDefaultProfile >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDecimalTests::DecimalKey [GOOD] Test command err: 2025-04-10T11:23:28.994616Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643628369821322:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:28.994715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf8/r3tmp/tmpc3N7bm/pdisk_1.dat 2025-04-10T11:23:29.341679Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:29.383638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:29.383773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 61917, node 1 2025-04-10T11:23:29.412544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:29.430027Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:29.430050Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:29.430059Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:29.430271Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24022 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:29.734486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:31.842719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:31.994010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643641254724387:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:31.994417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643641254724378:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:31.994531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:31.998508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:32.019276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643641254724392:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:23:32.084637Z node 1 :TX_PROXY ERROR: Actor# [1:7491643645549691763:2803] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:32.410643Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfq1prqeyhyaf2kkfkbwks8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzYyYmMyYmMtMmI3N2M4YTItNzU4M2JlN2QtNmJhNjZlYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:32.578812Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfq1q6gep92qmebc4ne4gv6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzYyYmMyYmMtMmI3N2M4YTItNzU4M2JlN2QtNmJhNjZlYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:32.685644Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jrfq1qb8cpvzfmeetth2fz65, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzYyYmMyYmMtMmI3N2M4YTItNzU4M2JlN2QtNmJhNjZlYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:32.784943Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jrfq1qek6tj144a3x05tq090, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzYyYmMyYmMtMmI3N2M4YTItNzU4M2JlN2QtNmJhNjZlYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:32.886236Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfq1qhpdfh8bd39ndj8pass, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzYyYmMyYmMtMmI3N2M4YTItNzU4M2JlN2QtNmJhNjZlYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:34.333911Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643655270842915:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:34.334060Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf8/r3tmp/tmps3aKg8/pdisk_1.dat 2025-04-10T11:23:34.428849Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:34.453527Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:34.453617Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:34.455966Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18494, node 4 2025-04-10T11:23:34.505020Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:34.505042Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:34.505050Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:34.505204Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:34.688775Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:36.956346Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:37.045105Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643668155745971:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:37.045126Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643668155745979:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:37.045195Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:37.048473Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:37.067892Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491643668155745985:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:23:37.151144Z node 4 :TX_PROXY ERROR: Actor# [4:7491643668155746058:2805] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:37.218710Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq1vpk1wknwzwybv7cstpx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YjRiNjRiOTUtMzk4ZjFhOGUtZjkzMjIyZmUtNTM3NzkyYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:37.329947Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq1vwa3qx09a4k1qrxx9ws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YjRiNjRiOTUtMzk4ZjFhOGUtZjkzMjIyZmUtNTM3NzkyYWU=, CurrentExecutionId: , CustomerSupplied ... 6715666. Ctx: { TraceId: 01jrfq27fyfh35kpv4hbzj68ga, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjA1NzZlYTEtODdmN2QxODktYTcxYTY2YzEtMTgwNGNhNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:49.539413Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfq27fyfh35kpv4hbzj68ga, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjA1NzZlYTEtODdmN2QxODktYTcxYTY2YzEtMTgwNGNhNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:49.698697Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491643697708293728:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:49.698768Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:23:49.704479Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jrfq27xt2v3qyazpaekxb63p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjA1NzZlYTEtODdmN2QxODktYTcxYTY2YzEtMTgwNGNhNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:49.815280Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrfq282f914yxp82e96wbcwp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjA1NzZlYTEtODdmN2QxODktYTcxYTY2YzEtMTgwNGNhNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:49.908968Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jrfq285y6axv7yr7g9d9fqwk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjA1NzZlYTEtODdmN2QxODktYTcxYTY2YzEtMTgwNGNhNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:50.015295Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jrfq288t4wx7234ex7a7w0t9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjA1NzZlYTEtODdmN2QxODktYTcxYTY2YzEtMTgwNGNhNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:50.130734Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jrfq28c43k2b9f9bbww6da24, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjA1NzZlYTEtODdmN2QxODktYTcxYTY2YzEtMTgwNGNhNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:50.445174Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jrfq28fscyjxznea18rw0gf6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjA1NzZlYTEtODdmN2QxODktYTcxYTY2YzEtMTgwNGNhNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:50.450412Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jrfq28fscyjxznea18rw0gf6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjA1NzZlYTEtODdmN2QxODktYTcxYTY2YzEtMTgwNGNhNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:52.194861Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643735545579062:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:52.195011Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf8/r3tmp/tmpyQgWIQ/pdisk_1.dat 2025-04-10T11:23:52.362736Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:52.403137Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:52.403229Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:52.406848Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2983, node 13 2025-04-10T11:23:52.498817Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:52.498839Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:52.498846Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:52.498966Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:52.819683Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:55.643549Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:55.757500Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643748430482123:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:55.757501Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643748430482115:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:55.757573Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:55.761065Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:55.778752Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7491643748430482129:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:23:55.848123Z node 13 :TX_PROXY ERROR: Actor# [13:7491643748430482200:2780] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:55.959259Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq2dzb60zekdamtrtr9feh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2IyYmI1NmYtZTMxZDBhODctYmQxMjE2ZjktMTg3NjE5NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:56.112192Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq2e661x4vmng2kjxmdv4y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2IyYmI1NmYtZTMxZDBhODctYmQxMjE2ZjktMTg3NjE5NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:56.295653Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfq2eap3tmvgtepytj5ckpe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2IyYmI1NmYtZTMxZDBhODctYmQxMjE2ZjktMTg3NjE5NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:56.424577Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfq2egc6qckp007as6fmf15, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2IyYmI1NmYtZTMxZDBhODctYmQxMjE2ZjktMTg3NjE5NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:56.557173Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfq2emd82zkj7qtnmzfhnr7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2IyYmI1NmYtZTMxZDBhODctYmQxMjE2ZjktMTg3NjE5NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:56.683848Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfq2erjc0wy5wnv0cwxhms9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2IyYmI1NmYtZTMxZDBhODctYmQxMjE2ZjktMTg3NjE5NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:56.818175Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfq2ewm0sq4h69ytebh2fv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2IyYmI1NmYtZTMxZDBhODctYmQxMjE2ZjktMTg3NjE5NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:57.055683Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jrfq2f0pcdzm7emrgjae3acr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2IyYmI1NmYtZTMxZDBhODctYmQxMjE2ZjktMTg3NjE5NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:57.195271Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7491643735545579062:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:57.195322Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:23:57.331385Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrfq2f8a677w9h075h41rbv2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2IyYmI1NmYtZTMxZDBhODctYmQxMjE2ZjktMTg3NjE5NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:57.700949Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jrfq2fgt1sgffg2hbjqvfw49, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2IyYmI1NmYtZTMxZDBhODctYmQxMjE2ZjktMTg3NjE5NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TTableProfileTests::UseTableProfilePresetViaSdk [GOOD] >> TTableProfileTests::WrongTableProfile >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] >> YdbQueryService::TestCreateDropAttachSession [GOOD] >> YdbQueryService::TestCreateAttachAndDropAttachedSession >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] Test command err: 2025-04-10T11:23:39.062275Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643677804242490:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:39.062387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cb4/r3tmp/tmpAGJYqR/pdisk_1.dat 2025-04-10T11:23:39.474094Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:39.496662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:39.496879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:39.500649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64456, node 1 2025-04-10T11:23:39.550909Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:39.550953Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:39.550965Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:39.551074Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:39.816781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:41.892216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643686394178098:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:41.892384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:42.171980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:42.302215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643690689145564:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:42.302342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643690689145569:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:42.302352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:42.305573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:42.322348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643690689145571:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:23:42.410297Z node 1 :TX_PROXY ERROR: Actor# [1:7491643690689145646:2804] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:42.474388Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491643690689145664:2355], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:43: Error: Failed to convert type: Struct<'Key':String,'Value':String> to Struct<'Key':Uint32?,'Value':String?>
:2:43: Error: Failed to convert 'Key': String to Optional
:2:43: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T11:23:42.474744Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzY1M2E2MTAtZGExMWVlODgtODkxZmFjZmYtYmZhMmFiMGI=, ActorId: [1:7491643686394178095:2334], ActorState: ExecuteState, TraceId: 01jrfq20tx1rgmhjz5d5f2c326, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:23:44.135247Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643700779349196:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:44.135393Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cb4/r3tmp/tmpl4mLsq/pdisk_1.dat 2025-04-10T11:23:44.320766Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:44.359862Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:44.359932Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:44.368018Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24179, node 4 2025-04-10T11:23:44.482909Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:44.482945Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:44.482960Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:44.483141Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:44.735732Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:47.406732Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643713664252142:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:47.406852Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:49.053313Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643718771899799:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:49.053463Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cb4/r3tmp/tmpZZfSeS/pdisk_1.dat 2025-04-10T11:23:49.259608Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:49.311607Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:49.311717Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:49.315622Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9803, node 7 2025-04-10T11:23:49.504932Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:49.504956Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:49.504964Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:49.505143Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:49.782281Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:52.574337Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643731656802738:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:52.574460Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:52.594016Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:52.697002Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643731656802905:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:52.697083Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:52.697419Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643731656802910:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:52.701230Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:52.728305Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491643731656802912:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:23:52.823743Z node 7 :TX_PROXY ERROR: Actor# [7:7491643731656802985:2817] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:52.894865Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfq2azr5yqz9aq3dgnbvbtv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZGEyMTExMmItMmYzOGRiYTUtMTMyYjMzZWUtNWYwMmFiNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:52.955281Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7491643731656803031:2362], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:23:52.956629Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=ZGEyMTExMmItMmYzOGRiYTUtMTMyYjMzZWUtNWYwMmFiNzY=, ActorId: [7:7491643731656802720:2335], ActorState: ExecuteState, TraceId: 01jrfq2b6mepnata1j53c7dm2k, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:23:53.021145Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfq2b860z4amn99j2etm8ad, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZGEyMTExMmItMmYzOGRiYTUtMTMyYjMzZWUtNWYwMmFiNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:53.171741Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jrfq2bag1y53zv1a8s7kns6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZGEyMTExMmItMmYzOGRiYTUtMTMyYjMzZWUtNWYwMmFiNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:54.864757Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643741823019427:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:54.864861Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cb4/r3tmp/tmpo54Gvq/pdisk_1.dat 2025-04-10T11:23:55.024997Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:55.059134Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:55.059197Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:55.061988Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25002, node 10 2025-04-10T11:23:55.169920Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:55.169942Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:55.169949Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:55.170063Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:55.497130Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:58.334558Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643759002889637:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:58.334661Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:58.355029Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:58.456509Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643759002889799:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:58.456573Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:58.456656Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643759002889804:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:58.459435Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:58.475606Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491643759002889806:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:23:58.551880Z node 10 :TX_PROXY ERROR: Actor# [10:7491643759002889882:2787] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:58.566424Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7491643759002889900:2355], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:23:58.566632Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=MjA5NTE3MmQtOGUwMmNlN2UtZjA2MzI4NDYtMmZiYjM1Nzg=, ActorId: [10:7491643759002889634:2335], ActorState: ExecuteState, TraceId: 01jrfq2gkqbgjejhwwh65y5tdt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:23:58.589694Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7491643759002889918:2361], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable2]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:23:58.591460Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=MjA5NTE3MmQtOGUwMmNlN2UtZjA2MzI4NDYtMmZiYjM1Nzg=, ActorId: [10:7491643759002889634:2335], ActorState: ExecuteState, TraceId: 01jrfq2gqfbfrdypnnjyhfvfjh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284758.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284758.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283558.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-10T11:22:39.707747Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:39.788181Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:39.812014Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:39.812327Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:39.820690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:39.820908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:39.821155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:39.821278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:39.821415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:39.821549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:39.821658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:39.821786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:39.821899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:39.822029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:39.822138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:39.822257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:39.849830Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:39.850197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:39.850257Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:39.850436Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:39.850594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:39.850678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:39.850720Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:39.850816Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:39.850878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:39.850922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:39.850955Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:39.851123Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:39.851199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:39.851239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:39.851271Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:39.851385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:39.851442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:39.851487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:39.851518Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:39.851615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:39.851667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:39.851697Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:39.851751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:39.851795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:39.851828Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:39.852255Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-10T11:22:39.852352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-10T11:22:39.852434Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-10T11:22:39.852515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-10T11:22:39.852706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:39.852791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:39.852833Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:39.853039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:39.853086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:39.853124Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:39.853266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:22:39.853326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:22:39.853358Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:22:39.853560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:1:8688:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:35:8656:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:24:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:81:8336:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:5:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:29:8704:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:14:8656:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:97:2768:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:89:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:61:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:63:8568:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:80:8368:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:56:9608:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:13:8656:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:478:9640:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:32:8704:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:59:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:74:8544:0]; FALLBACK_ACTOR_LOGGING;priority=FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:30:8704:0]; DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:94:8360:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:4:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:44:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:33:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:28:8712:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:93:8368:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:41:2848:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:88:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:6:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:47:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:50:8536:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:69:2768:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:87:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:96:8328:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:79:8408:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:45:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:27:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:64:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:20:2840:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:58:8568:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:49:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:72:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:18:8592:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:84:9608:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:66:8360:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:98:9384:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:3:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:86:8568:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:70:9384:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:77:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:11:8672:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:36:8624:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 0/0 160000/10402524 >> TGRpcYdbTest::ExecuteQueryWithUuid [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest >> TGRpcNewCoordinationClient::BasicMethods [GOOD] >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] Test command err: 2025-04-10T11:23:39.863055Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643677809141037:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:39.863098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ca2/r3tmp/tmpde5bBr/pdisk_1.dat 2025-04-10T11:23:40.204023Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:40.258545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:40.258724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:40.261043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20010, node 1 2025-04-10T11:23:40.323468Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:40.323493Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:40.323521Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:40.323637Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23381 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:40.639468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:40.711034Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jrfq1z951z802xb43eqg4fmg, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:46076, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.997142s 2025-04-10T11:23:40.731019Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jrfq1z9hbm3vz18y7f301jn3, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:46084, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:23:42.763097Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jrfq219adkdbgdkw5f6xz7qb, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:46090, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:23:42.763870Z node 1 :TX_PROXY DEBUG: actor# [1:7491643677809141267:2141] Handle TEvProposeTransaction 2025-04-10T11:23:42.763912Z node 1 :TX_PROXY DEBUG: actor# [1:7491643677809141267:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T11:23:42.764011Z node 1 :TX_PROXY DEBUG: actor# [1:7491643677809141267:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491643690694043948:2629] 2025-04-10T11:23:42.815979Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643690694043948:2629] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:46090" 2025-04-10T11:23:42.816031Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643690694043948:2629] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:23:42.816502Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643690694043948:2629] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:23:42.816584Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643690694043948:2629] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:23:42.816806Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643690694043948:2629] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:23:42.816957Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643690694043948:2629] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:23:42.817023Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643690694043948:2629] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T11:23:42.817197Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643690694043948:2629] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T11:23:42.819928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:42.825182Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643690694043948:2629] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-10T11:23:42.825311Z node 1 :TX_PROXY DEBUG: Actor# [1:7491643690694043948:2629] txid# 281474976710658 SEND to# [1:7491643690694043947:2335] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-10T11:23:42.826263Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:42.826408Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:42.826850Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:42.826911Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:42.889729Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044024:2700], Recipient [1:7491643690694044181:2350]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.889746Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044030:2706], Recipient [1:7491643690694044169:2343]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.890707Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044029:2705], Recipient [1:7491643690694044162:2339]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.890728Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044033:2709], Recipient [1:7491643690694044174:2345]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.891505Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044034:2710], Recipient [1:7491643690694044177:2346]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.891520Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044036:2712], Recipient [1:7491643690694044173:2344]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.892200Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044023:2699], Recipient [1:7491643690694044179:2348]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.892217Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044032:2708], Recipient [1:7491643690694044185:2354]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.892806Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044031:2707], Recipient [1:7491643690694044182:2351]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.892830Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044038:2714], Recipient [1:7491643690694044163:2340]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.893201Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044025:2701], Recipient [1:7491643690694044180:2349]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.893348Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044026:2702], Recipient [1:7491643690694044183:2352]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.893614Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044028:2704], Recipient [1:7491643690694044184:2353]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.893786Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044037:2713], Recipient [1:7491643690694044178:2347]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.894211Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044027:2703], Recipient [1:7491643690694044168:2342]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.894317Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7491643690694044035:2711], Recipient [1:7491643690694044167:2341]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:23:42.894609Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7491643690694044027:2703], Recipient [1:7491643690694044168:2342]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:23:42.894935Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7491643690694044035:2711], Recipient [1:7491643690694044167:2341]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:23:42.895115Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037903 actor [1:7491643690694044168:2342] 2025-04-10T11:23:42.895257Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:7491643690694044167:2341] 2025-04-10T11:23:42.895379Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:23:42.895444Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:23:42.905501Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7491643690694044037:2713], Recipient [1:7491643690694044178:2347]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:23:42.905937Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:7491643690694044178:2347] 2025-04-10T11:23:42.905939Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7491643690694044029:2705], Recipient [1:7491643690694044162:2339]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:23:42.906138Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:2 ... 435072, Sender [10:7491643757549164847:2339], Recipient [10:7491643757549164847:2339]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:23:59.696930Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:23:59.696969Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-04-10T11:23:59.696984Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:23:59.697003Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715681] at 72075186224037897 for WaitForStreamClearance 2025-04-10T11:23:59.697015Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit WaitForStreamClearance 2025-04-10T11:23:59.697034Z node 10 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715681] at 72075186224037897 2025-04-10T11:23:59.697046Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-04-10T11:23:59.697060Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit WaitForStreamClearance 2025-04-10T11:23:59.697071Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit ReadTableScan 2025-04-10T11:23:59.697082Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2025-04-10T11:23:59.697269Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Continue 2025-04-10T11:23:59.697296Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:23:59.697307Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-04-10T11:23:59.697318Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-04-10T11:23:59.697330Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-04-10T11:23:59.697360Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-04-10T11:23:59.697597Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [10:7491643761844133187:2140], Recipient [10:7491643757549164847:2339]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-10T11:23:59.697617Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-10T11:23:59.697645Z node 10 :READ_TABLE_API DEBUG: [10:7491643761844133167:2401] Adding quota request to queue ShardId: 0, TxId: 281474976715680 2025-04-10T11:23:59.697679Z node 10 :READ_TABLE_API DEBUG: [10:7491643761844133167:2401] Assign stream quota to Shard 0, Quota 5, TxId 281474976715680 Reserved: 5 of 25, Queued: 0 2025-04-10T11:23:59.697824Z node 10 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 5 2025-04-10T11:23:59.698063Z node 10 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037897, TxId: 281474976715681, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 4 2025-04-10T11:23:59.698113Z node 10 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037897, TxId: 281474976715681, PendingAcks: 0 2025-04-10T11:23:59.698138Z node 10 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 4 2025-04-10T11:23:59.698246Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037897 2025-04-10T11:23:59.698260Z node 10 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715681, at: 72075186224037897 2025-04-10T11:23:59.698272Z node 10 :READ_TABLE_API DEBUG: [10:7491643761844133167:2401] got stream part, size: 75, RU required: 128 rate limiter absent 2025-04-10T11:23:59.698363Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [10:7491643761844133168:2401], Recipient [10:7491643757549164847:2339]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715681 2025-04-10T11:23:59.698390Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-04-10T11:23:59.698402Z node 10 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037897 txId 281474976715681 2025-04-10T11:23:59.698430Z node 10 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037897 txId 281474976715681 2025-04-10T11:23:59.698519Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [10:7491643761844133168:2401], Recipient [10:7491643757549164847:2339]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715681 2025-04-10T11:23:59.698538Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-04-10T11:23:59.698613Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7491643761844133168:2401], Recipient [10:7491643757549164847:2339]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1744284239740 TxId: 281474976715680 2025-04-10T11:23:59.698707Z node 10 :READ_TABLE_API DEBUG: [10:7491643761844133167:2401] Starting inactivity timer for 600.000000s with tag 3 2025-04-10T11:23:59.698789Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [10:7491643757549164847:2339], Recipient [10:7491643757549164847:2339]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:23:59.698810Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:23:59.698834Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-04-10T11:23:59.698851Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:23:59.698878Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715681] at 72075186224037897 for ReadTableScan 2025-04-10T11:23:59.698894Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2025-04-10T11:23:59.698911Z node 10 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715681] at 72075186224037897 error: , IsFatalError: 0 2025-04-10T11:23:59.698933Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-04-10T11:23:59.698952Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit ReadTableScan 2025-04-10T11:23:59.698963Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit FinishPropose 2025-04-10T11:23:59.698972Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit FinishPropose 2025-04-10T11:23:59.699008Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is DelayCompleteNoMoreRestarts 2025-04-10T11:23:59.699039Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit FinishPropose 2025-04-10T11:23:59.699050Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit CompletedOperations 2025-04-10T11:23:59.699058Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit CompletedOperations 2025-04-10T11:23:59.699105Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-04-10T11:23:59.699120Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit CompletedOperations 2025-04-10T11:23:59.699130Z node 10 :TX_DATASHARD TRACE: Execution plan for [0:281474976715681] at 72075186224037897 has finished 2025-04-10T11:23:59.699141Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:23:59.699150Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-04-10T11:23:59.699160Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-04-10T11:23:59.699169Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-04-10T11:23:59.700608Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-04-10T11:23:59.700649Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976715681] at 72075186224037897 on unit FinishPropose 2025-04-10T11:23:59.700670Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715681 at tablet 72075186224037897 send to client, exec latency: 2 ms, propose latency: 4 ms, status: COMPLETE 2025-04-10T11:23:59.700716Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 2025-04-10T11:23:59.701954Z node 10 :READ_TABLE_API NOTICE: [10:7491643761844133167:2401] Finish grpc stream, status: 400000 2025-04-10T11:23:59.705063Z node 10 :GRPC_SERVER DEBUG: [0x51a00006ea80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.705074Z node 10 :GRPC_SERVER DEBUG: [0x51a000083480] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.705374Z node 10 :GRPC_SERVER DEBUG: [0x51a000086a80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.705373Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ed080] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.705572Z node 10 :GRPC_SERVER DEBUG: [0x51a000021c80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.705596Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e8880] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.705763Z node 10 :GRPC_SERVER DEBUG: [0x51a00004a480] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.705775Z node 10 :GRPC_SERVER DEBUG: [0x51a000083a80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.705937Z node 10 :GRPC_SERVER DEBUG: [0x51a000109280] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.705950Z node 10 :GRPC_SERVER DEBUG: [0x51a000100880] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.706108Z node 10 :GRPC_SERVER DEBUG: [0x51a000000080] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.706116Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f1280] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.706341Z node 10 :GRPC_SERVER DEBUG: [0x51a000078680] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.706341Z node 10 :GRPC_SERVER DEBUG: [0x51a0001a2e80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.706517Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ed680] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.706520Z node 10 :GRPC_SERVER DEBUG: [0x51a000119a80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:59.706688Z node 10 :GRPC_SERVER DEBUG: [0x51a000126c80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts >> TDatabaseQuotas::DisableWritesToDatabase [GOOD] >> TGRpcAuthentication::InvalidPassword ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword [GOOD] Test command err: 2025-04-10T11:23:38.836571Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643672874811530:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:38.836649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cc5/r3tmp/tmpm1oR2g/pdisk_1.dat 2025-04-10T11:23:39.228823Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24385, node 1 2025-04-10T11:23:39.248390Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:39.248496Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:39.267345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:39.268828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:39.273303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:39.306246Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:39.306271Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:39.306278Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:39.306484Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:39.549494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:42.905370Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643690660070962:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:42.905442Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cc5/r3tmp/tmpdQjazc/pdisk_1.dat 2025-04-10T11:23:43.018119Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:43.052843Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:43.052944Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:43.056952Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16393, node 4 2025-04-10T11:23:43.111991Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:43.112021Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:43.112029Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:43.112174Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:43.321066Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:47.289361Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643713036513989:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:47.294252Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cc5/r3tmp/tmprVr27Z/pdisk_1.dat 2025-04-10T11:23:47.496289Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:47.528250Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:47.528340Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:47.531900Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22577, node 7 2025-04-10T11:23:47.609008Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:47.609039Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:47.609047Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:47.609184Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30362 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:47.899749Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:51.904107Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643729637058224:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:51.904209Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cc5/r3tmp/tmpOqWXS4/pdisk_1.dat 2025-04-10T11:23:52.048793Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:52.070965Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:52.071062Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:52.077624Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4896, node 10 2025-04-10T11:23:52.166359Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:52.166384Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:52.166393Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:52.166543Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:52.381255Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:56.549309Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643751943621105:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:56.549375Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cc5/r3tmp/tmp1SzWP8/pdisk_1.dat 2025-04-10T11:23:56.724767Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:56.765782Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:56.765896Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:56.770545Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4705, node 13 2025-04-10T11:23:56.843236Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:56.843264Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:56.843272Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:56.843411Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:57.120107Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] >> TGRpcYdbTest::ExecutePreparedQuery [GOOD] >> TGRpcYdbTest::ExecuteQueryCache ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::BasicMethods [GOOD] Test command err: 2025-04-10T11:23:37.897697Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643670857751038:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:37.897837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cd3/r3tmp/tmpNqzS6M/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28834, node 1 2025-04-10T11:23:38.239477Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:38.241466Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:38.241504Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:38.272046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:38.272168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:38.274790Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:38.274842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:38.274857Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:38.274976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:23:38.277166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:38.565685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:40.690127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643683742653951:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:40.690135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643683742653942:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:40.690213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:40.693646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:23:40.708039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643683742653956:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:23:40.803360Z node 1 :TX_PROXY ERROR: Actor# [1:7491643683742654029:2679] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:42.584787Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643692559179705:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:42.586401Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cd3/r3tmp/tmpLYyOJM/pdisk_1.dat 2025-04-10T11:23:42.748014Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:42.785791Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:42.785954Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:42.792075Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19840, node 4 2025-04-10T11:23:42.874182Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:42.874207Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:42.874214Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:42.874392Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:43.129611Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:45.591973Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643705444082642:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:45.592055Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:45.628449Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:45.768964Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643705444082810:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:45.769035Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643705444082815:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:45.769060Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:45.772787Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:45.792322Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491643705444082817:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:23:45.851410Z node 4 :TX_PROXY ERROR: Actor# [4:7491643705444082890:2802] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:47.458191Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643714093908602:2148];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:47.485715Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cd3/r3tmp/tmpcSWHgS/pdisk_1.dat 2025-04-10T11:23:47.681262Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62097, node 7 2025-04-10T11:23:47.781308Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:47.781399Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:47.794446Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:47.800610Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:47.800629Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:47.800637Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:47.800769Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:48.086531Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:48.170316Z node 7 :TX_PROXY ERROR: Actor# [7:7491643718388876731:2596] txid# 281474976715658, Access denied for bad@builtin on path /Root, with access CreateTable 2025-04-10T11:23:48.170496Z node 7 :TX_PROXY ERROR: Actor# [7:7491643718388876731:2596] txid# 281474976715658, issues: { message: "Access denied for bad@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-04-10T11:23:52.226862Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643732209016179:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:52.227035Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cd3/r3tmp/tmpQfXMcy/pdisk_1.dat 2025-04-10T11:23:52.401613Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:52.442296Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:52.442395Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:52.445055Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29183, node 10 2025-04-10T11:23:52.568720Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:52.568746Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:52.568754Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:52.568871Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20900 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:52.850038Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:52.920540Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:53.031120Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:23:53.064190Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:23:53.100388Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:23:57.239370Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643754287554145:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:57.239439Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cd3/r3tmp/tmprnaWnF/pdisk_1.dat 2025-04-10T11:23:57.435577Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:57.473010Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:57.473122Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:57.476283Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21501, node 13 2025-04-10T11:23:57.606041Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:57.606067Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:57.606077Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:57.606258Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:57.917272Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:58.031425Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> YdbTableBulkUpsert::ZeroRows [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] Test command err: 2025-04-10T11:23:39.274050Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643679197160107:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:39.275532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cb3/r3tmp/tmphpqu5k/pdisk_1.dat 2025-04-10T11:23:39.674675Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:39.718837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:39.718951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14436, node 1 2025-04-10T11:23:39.723944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:39.788030Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:39.788053Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:39.788062Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:39.788178Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:40.042547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:40.117935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:40.204912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:23:40.274020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropKesus, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:23:40.289032Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T11:23:40.316103Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-04-10T11:23:43.576689Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643693975095374:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:43.576777Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cb3/r3tmp/tmpQJdIr6/pdisk_1.dat 2025-04-10T11:23:43.779037Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25400, node 4 2025-04-10T11:23:43.876480Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:43.876499Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:43.876504Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:43.876674Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:23:43.910575Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:43.910666Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:43.913774Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:44.130654Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:48.188081Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643717255026468:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:48.188128Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cb3/r3tmp/tmpFWPWgc/pdisk_1.dat 2025-04-10T11:23:48.340022Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:48.362535Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:48.362607Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:48.368060Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2662, node 7 2025-04-10T11:23:48.445999Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:48.446021Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:48.446029Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:48.446189Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:48.708250Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:48.794273Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:52.805343Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643733098208799:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:52.805427Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cb3/r3tmp/tmpGHUWTd/pdisk_1.dat 2025-04-10T11:23:52.939598Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:52.975731Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:52.975819Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:52.978245Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30532, node 10 2025-04-10T11:23:53.072402Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:53.072426Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:53.072435Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:53.072592Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:53.332012Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:53.404523Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:57.425826Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643755162592100:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:57.425891Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cb3/r3tmp/tmpEkp9hn/pdisk_1.dat 2025-04-10T11:23:57.719516Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:57.762453Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:57.762545Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:57.765619Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64938, node 13 2025-04-10T11:23:57.828880Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:57.828908Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:57.828918Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:57.829077Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4322 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:58.135518Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:58.202646Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> YdbYqlClient::TestColumnOrder [GOOD] >> YdbYqlClient::TestDecimal >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] >> YdbImport::EmptyData [GOOD] >> YdbImport::ImportFromS3ToExistingTable >> TGRpcYdbTest::CreateYqlSessionExecuteQuery [GOOD] >> TGRpcYdbTest::DeleteFromAfterCreate >> TPersQueueTest::TestReadRuleServiceTypePassword [GOOD] >> TPersQueueTest::TestReadPartitionStatus >> TGRpcYdbTest::KeepAlive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] Test command err: 2025-04-10T11:23:34.976885Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643656855563015:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:34.977426Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cee/r3tmp/tmp93Gdfe/pdisk_1.dat 2025-04-10T11:23:35.316024Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26549, node 1 2025-04-10T11:23:35.373044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:35.373235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:35.387535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:35.401325Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:35.401349Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:35.401369Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:35.401491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:35.684396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:37.748765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 SUCCESS 2025-04-10T11:23:37.897887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643669740466097:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:37.897896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643669740466102:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:37.897980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:37.901370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:37.921742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643669740466111:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:23:37.998860Z node 1 :TX_PROXY ERROR: Actor# [1:7491643669740466189:2818] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:38.310366Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfq1wh58j8p92yb0km1vcwx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzJhYmQ2M2QtNjQ5ZTNmNzYtNDdhNjNlZGYtZTUzYWJlNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:38.321993Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284218355, txId: 281474976710661] shutting down 2025-04-10T11:23:38.354728Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T11:23:38.356587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 BAD_REQUEST 2025-04-10T11:23:38.454573Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-10T11:23:38.456490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-10T11:23:38.542234Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-10T11:23:40.142251Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643683297790430:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:40.142424Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cee/r3tmp/tmp8wVOhm/pdisk_1.dat 2025-04-10T11:23:40.302668Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:40.342804Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:40.342935Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:40.346812Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18793, node 4 2025-04-10T11:23:40.461069Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:40.461100Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:40.461120Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:40.461315Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10066 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:40.735256Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:43.276171Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:43.295961Z node 4 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [4:7491643696182693391:2339] 2025-04-10T11:23:43.296166Z node 4 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:23:43.307079Z node 4 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:23:43.307147Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:23:43.309013Z node 4 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:23:43.309064Z node 4 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:23:43.309117Z node 4 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:23:43.309498Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:23:43.309565Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:23:43.309593Z node 4 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [4:7491643696182693416:2339] in generation 1 2025-04-10T11:23:43.311808Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:23:43.311852Z node 4 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:23:43.311949Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:23:43.311993Z node 4 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [4:7491643696182693419:2340] 2025-04-10T11:23:43.312008Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:23:43.312021Z node 4 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:23:43.312045Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:23:43.312193Z node 4 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:23:43.312278Z node 4 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:23:43.312312Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:23:43.312330Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:23:43.312356Z node 4 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:23:43.312381Z n ... nitialize from file: (empty maybe) 2025-04-10T11:23:51.562078Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:51.562291Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:51.871951Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:54.648060Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643744000996266:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:54.648171Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:54.687412Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:54.800429Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643744000996432:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:54.800520Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:54.800578Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643744000996437:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:54.804568Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:54.828216Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491643744000996439:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:23:54.890281Z node 10 :TX_PROXY ERROR: Actor# [10:7491643744000996510:2799] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:54.974334Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfq2d1f4v1mnqc6hqxb1dzy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODdjZTk2ZTMtNGMxZjk2NDktNGIxNzY1MzItNWUyNGU5NjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:55.049055Z node 10 :TX_PROXY WARN: [AlterTableAddIndex [10:7491643748295963859:2369] TxId# 281474976710663] Access check failed 2025-04-10T11:23:55.079690Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:23:55.181652Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T11:23:55.273988Z node 10 :TX_PROXY ERROR: [AlterTableAddIndex [10:7491643748295964230:2383] TxId# 281474976710665] Unable to navigate: Root/WrongPath status: PathErrorUnknown 2025-04-10T11:23:55.377648Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037889 not found 2025-04-10T11:23:57.210993Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643755188796058:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:57.211072Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cee/r3tmp/tmpAkyxw2/pdisk_1.dat 2025-04-10T11:23:57.355713Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:57.394256Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:57.394362Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:57.397425Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18100, node 13 2025-04-10T11:23:57.534943Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:57.534970Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:57.534979Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:57.535137Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:57.863755Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:00.866864Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643768073698983:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:00.866980Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:00.886042Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:01.006348Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643772368666447:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:01.006450Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:01.006742Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643772368666452:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:01.010773Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:24:01.035634Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7491643772368666454:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:24:01.114954Z node 13 :TX_PROXY ERROR: Actor# [13:7491643772368666525:2797] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:01.187928Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq2k3d43dm5wm9nanjbbeh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NWY3ZWYxMDEtYTU4ZDVkZmUtZTQzYjQ4MDQtNDUwOTZlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:01.245669Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:24:01.332948Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:24:01.503477Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 >> YdbYqlClient::RenameTables [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] Test command err: 2025-04-10T11:23:36.749864Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643664257228696:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:36.750036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ce4/r3tmp/tmp2i0LXq/pdisk_1.dat 2025-04-10T11:23:37.137338Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12276, node 1 2025-04-10T11:23:37.182884Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:37.182920Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:37.194952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:37.195031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:37.204686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:37.216479Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:37.216497Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:37.216502Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:37.216604Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:37.525187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:37.667761Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:55522) has now valid token of root@builtin 2025-04-10T11:23:37.723212Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:23:37.723257Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:23:37.723278Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:23:37.723327Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-10T11:23:40.438462Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643680210573784:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:40.438547Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ce4/r3tmp/tmpkBg6fK/pdisk_1.dat 2025-04-10T11:23:40.575958Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:40.603569Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:40.603655Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:40.606792Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10282, node 4 2025-04-10T11:23:40.702782Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:40.702806Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:40.702814Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:40.702941Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:40.907809Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:41.103063Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:56432) has now valid token of root@builtin 2025-04-10T11:23:41.158444Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:23:41.158478Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:23:41.158488Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:23:41.158525Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-10T11:23:44.644857Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643698564930080:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:44.646328Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ce4/r3tmp/tmp94jz1T/pdisk_1.dat 2025-04-10T11:23:44.838487Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:44.859687Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:44.859780Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:44.862179Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9871, node 7 2025-04-10T11:23:44.972369Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:44.972393Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:44.972402Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:44.972565Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17574 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:45.237895Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1744291424819874 Nodes { NodeId: 1024 Host: "localhost" Port: 27322 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1744291424819874 } Nodes { NodeId: 7 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 8 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 9 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-04-10T11:23:49.277921Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643722578068267:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:49.278069Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ce4/r3tmp/tmpFoMw7F/pdisk_1.dat 2025-04-10T11:23:49.423361Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:49.472144Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:49.472249Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:49.476993Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1911, node 10 2025-04-10T11:23:49.583715Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:49.583739Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:49.583749Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:49.584003Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:49.878041Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: UNAUTHORIZED Reason: "Cannot authorize node. Access denied" } 2025-04-10T11:23:53.935197Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643738269247967:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:53.935278Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ce4/r3tmp/tmpLw5vk6/pdisk_1.dat 2025-04-10T11:23:54.100642Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:54.140622Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:54.140722Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:54.143663Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13536, node 13 2025-04-10T11:23:54.223476Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:54.223497Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:54.223512Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:54.223666Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:54.506021Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1744291434095774 Nodes { NodeId: 1024 Host: "localhost" Port: 3181 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1744291434095774 } Nodes { NodeId: 13 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 14 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 15 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-04-10T11:23:59.014708Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7491643763428271430:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:59.015968Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ce4/r3tmp/tmpiW3Z2E/pdisk_1.dat 2025-04-10T11:23:59.222322Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:59.275407Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:59.275525Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:59.284219Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63467, node 16 2025-04-10T11:23:59.396101Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:59.396132Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:59.396145Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:59.396354Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12903 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:59.725169Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node 2025-04-10T11:23:59.948129Z node 16 :TICKET_PARSER ERROR: Ticket 66B87E1A513E7B6CDBC776BBA2A80AD43CB3EB586D072AEBC698971EDF2EFA76: Cannot create token from certificate. Client certificate failed verification Register node result Status { Code: ERROR Reason: "Cannot create token from certificate. Client certificate failed verification" } |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect >> KqpSysColV0::InnerJoinSelectAsterisk >> KqpSystemView::Sessions >> KqpSysColV1::UpdateAndDelete >> KqpSysColV1::SelectRowById ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::KeepAlive [GOOD] Test command err: 2025-04-10T11:23:40.698002Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643681527950693:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:40.698082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c7f/r3tmp/tmpYWFwVo/pdisk_1.dat 2025-04-10T11:23:41.072671Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27415, node 1 2025-04-10T11:23:41.119408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:41.119526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:41.137829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:41.169296Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:41.169336Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:41.169348Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:41.169526Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:41.428697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:41.522255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:23:44.659495Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643697016123885:2154];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:44.659641Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c7f/r3tmp/tmpDtd5HX/pdisk_1.dat 2025-04-10T11:23:44.790051Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:44.821940Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:44.822027Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:44.825646Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23035, node 4 2025-04-10T11:23:44.893006Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:44.893034Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:44.893042Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:44.893164Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13600 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:45.132827Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:45.296979Z node 4 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jrfq23rg0bwy3a90wt1jjbm7, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33148, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:23:45.300141Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:45.304213Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:45.304330Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:45.304375Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:45.304419Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:45.390270Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:45.390395Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:45.390408Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:45.390473Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:45.450381Z node 4 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jrfq23xa262tf2c7d5hnfew9, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33148, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:23:47.731043Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jrfq264jfakn3mtbsdyrb4r8, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33148, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:23:47.733330Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643709901026852:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:47.733462Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:47.733816Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643709901026864:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:47.744242Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:47.753333Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:47.753447Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:47.753467Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:47.753548Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:47.771269Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:47.771393Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:47.771409Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:23:47.771457Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:23:47.779397Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491643709901026866:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:23:47.860274Z node 4 :TX_PROXY ERROR: Actor# [4:7491643709901026944:2801] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:48.298240Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq264jfakn3mtbsdyrb4r8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NTU5NDBhMWYtYjZmZDEwNWYtZjg3MTZhN2ItNWViNmExMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:48.315744Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jrfq26pvd215ezcz75m1e9m7, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33148, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:23:48.316226Z node 4 :READ_TABLE_API NOTICE: [4:7491643714195994302:2351] Finish grpc stream, status: 400010 2025-04-10T11:23:48.321604Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jrfq26q17c96b5jngxgehkfx, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33148, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:23:48.331696Z node 4 :READ_TABLE_API DEBUG: [4:7491643714195994303:2352] Adding quota request to queue ShardId: 0, TxId: 281474976715662 2025-04-10T11:23:48.331783Z node 4 :READ_TABLE_API DEBUG: [4:7491643714195994303:2352] Assign stream quota to Shard 0, Quota 5, TxId 281474976715662 Reserved: 5 of 25, Queued: 0 2025-04-10T11:23:48.332701Z node 4 :READ_TABLE_API DEBUG: [4:7491643714195994303:2352] got stream part, size: 290, RU required: 128 rate limiter absent 2025-04-10T11:23:48.333049Z node 4 :READ_TABLE_API DEBUG: [4:7491643714195994303:2352] Starting inactivity timer for 600.000000s with tag 3 2025-04-10T11:23:48 ... am quota to Shard 0, Quota 5, TxId 281474976715666 Reserved: 5 of 25, Queued: 0 2025-04-10T11:23:48.416206Z node 4 :READ_TABLE_API DEBUG: [4:7491643714195994349:2356] got stream part, size: 244, RU required: 128 rate limiter absent 2025-04-10T11:23:48.416593Z node 4 :READ_TABLE_API DEBUG: [4:7491643714195994349:2356] Starting inactivity timer for 600.000000s with tag 3 2025-04-10T11:23:48.457376Z node 4 :READ_TABLE_API NOTICE: [4:7491643714195994349:2356] Finish grpc stream, status: 400000 2025-04-10T11:23:48.459485Z node 4 :GRPC_SERVER DEBUG: [0x51a0000a9e80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.459509Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d2680] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.459725Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d1a80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.459726Z node 4 :GRPC_SERVER DEBUG: [0x51a0000aa480] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.459901Z node 4 :GRPC_SERVER DEBUG: [0x51a0000a8c80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.459909Z node 4 :GRPC_SERVER DEBUG: [0x51a0000ab080] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.460084Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d0280] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.460093Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d0e80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.460287Z node 4 :GRPC_SERVER DEBUG: [0x51a000105680] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.460297Z node 4 :GRPC_SERVER DEBUG: [0x51a00009f080] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.460436Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d3280] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.460498Z node 4 :GRPC_SERVER DEBUG: [0x51a0000ab680] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.460593Z node 4 :GRPC_SERVER DEBUG: [0x51a0000ac280] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.460648Z node 4 :GRPC_SERVER DEBUG: [0x51a0000ac880] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.460705Z node 4 :GRPC_SERVER DEBUG: [0x51a0000abc80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.460798Z node 4 :GRPC_SERVER DEBUG: [0x51a000099c80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-10T11:23:48.460846Z node 4 :GRPC_SERVER DEBUG: [0x51a000106e80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-04-10T11:23:49.856924Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643720063012411:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:49.857160Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c7f/r3tmp/tmphwHknY/pdisk_1.dat 2025-04-10T11:23:49.971948Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:50.003764Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:50.003855Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:50.006323Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27800, node 7 2025-04-10T11:23:50.095010Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:50.095030Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:50.095038Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:50.095189Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17392 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:50.349982Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting...
: Error: Operation timeout. 2025-04-10T11:23:54.473515Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643743694052445:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:54.473610Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c7f/r3tmp/tmpaAxCyg/pdisk_1.dat 2025-04-10T11:23:54.618910Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:54.656227Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:54.656322Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:54.660120Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15397, node 10 2025-04-10T11:23:54.727141Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:54.727163Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:54.727172Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:54.727317Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:54.961908Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting...
: Error: Operation cancelled. 2025-04-10T11:23:59.329522Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643763144083874:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:59.329595Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c7f/r3tmp/tmpOMpl8b/pdisk_1.dat 2025-04-10T11:23:59.526753Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:59.568551Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:59.568663Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:59.573236Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8382, node 13 2025-04-10T11:23:59.647635Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:59.647660Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:59.647670Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:59.647841Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:59.962242Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> YdbTableBulkUpsertOlap::UpsertCSV [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard >> TTableProfileTests::UseDefaultProfile [GOOD] >> TTableProfileTests::OverwriteCompactionPolicy >> YdbOlapStore::LogNonExistingUserId [GOOD] >> YdbOlapStore::LogPagingBefore >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession >> KqpSystemView::ReadSuccess >> YdbQueryService::TestCreateAttachAndDropAttachedSession [GOOD] >> YdbS3Internal::BadRequests ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::RenameTables [GOOD] Test command err: 2025-04-10T11:23:29.102438Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643634073177394:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:29.102497Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf7/r3tmp/tmpphHeCl/pdisk_1.dat 2025-04-10T11:23:29.417924Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30838, node 1 2025-04-10T11:23:29.445752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:29.445858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:29.449135Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:29.449201Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:29.495147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:29.501703Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:29.501733Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:29.501741Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:29.501941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:29.803660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 2025-04-10T11:23:34.102922Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643634073177394:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:34.103034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 2025-04-10T11:23:36.234426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643664137949604:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:36.234430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643664137949612:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:36.234549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:36.237932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:23:36.253830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643664137949618:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:23:36.329514Z node 1 :TX_PROXY ERROR: Actor# [1:7491643664137949697:2715] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 2025-04-10T11:23:42.569370Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643691558347287:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:42.569434Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf7/r3tmp/tmp0BpWjy/pdisk_1.dat 2025-04-10T11:23:42.736639Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:42.772016Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:42.772119Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:42.774515Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28444, node 4 2025-04-10T11:23:42.842825Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:42.842847Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:42.842855Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:42.842999Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:43.063004Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:45.654699Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643704443250189:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:45.654831Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:45.689987Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:45.814419Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643704443250358:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:45.814509Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:45.814593Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643704443250363:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:45.818718Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:45.834640Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491643704443250365:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:23:45.933589Z n ... 11:24:03.605311Z node 13 :TX_PROXY DEBUG: actor# [13:7491643764971689139:2138] Cookie# 0 userReqId# "" txid# 281474976715672 SEND to# [13:7491643782151560270:3567] 2025-04-10T11:24:03.607356Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560270:3567] txid# 281474976715672 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "Root" OperationType: ESchemeOpDropTable Drop { Name: "Table-1" } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:35322" 2025-04-10T11:24:03.607382Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560270:3567] txid# 281474976715672 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:24:03.607435Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560270:3567] txid# 281474976715672 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:24:03.607724Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560270:3567] txid# 281474976715672 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:24:03.607804Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560270:3567] HANDLE EvNavigateKeySetResult, txid# 281474976715672 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:24:03.607841Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560270:3567] txid# 281474976715672 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715672 TabletId# 72057594046644480} 2025-04-10T11:24:03.608005Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560270:3567] txid# 281474976715672 HANDLE EvClientConnected 2025-04-10T11:24:03.608184Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-1, pathId: 0, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:24:03.608355Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715672:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:24:03.610385Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715672, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-1 2025-04-10T11:24:03.610449Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560270:3567] txid# 281474976715672 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715672} 2025-04-10T11:24:03.610502Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560270:3567] txid# 281474976715672 SEND to# [13:7491643782151560269:2391] Source {TEvProposeTransactionStatus txid# 281474976715672 Status# 53} 2025-04-10T11:24:03.611572Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:24:03.611677Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:24:03.611689Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:24:03.611834Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:24:03.621484Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744284243667, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:24:03.625095Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715672, done: 0, blocked: 1 2025-04-10T11:24:03.628886Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:24:03.628900Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:24:03.629023Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:24:03.629024Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:24:03.630357Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715672:0 2025-04-10T11:24:03.638996Z node 13 :GRPC_SERVER DEBUG: Got grpc request# DropTableRequest, traceId# 01jrfq2nnpc88exr0psd4t0z2g, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:35332, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:24:03.639287Z node 13 :TX_PROXY DEBUG: actor# [13:7491643764971689139:2138] Handle TEvProposeTransaction 2025-04-10T11:24:03.639319Z node 13 :TX_PROXY DEBUG: actor# [13:7491643764971689139:2138] TxId# 281474976715673 ProcessProposeTransaction 2025-04-10T11:24:03.639363Z node 13 :TX_PROXY DEBUG: actor# [13:7491643764971689139:2138] Cookie# 0 userReqId# "" txid# 281474976715673 SEND to# [13:7491643782151560358:3649] 2025-04-10T11:24:03.639430Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037890 not found 2025-04-10T11:24:03.641358Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:24:03.641899Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560358:3649] txid# 281474976715673 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "Root" OperationType: ESchemeOpDropTable Drop { Name: "Table-2" } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:35332" 2025-04-10T11:24:03.641923Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560358:3649] txid# 281474976715673 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:24:03.641980Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560358:3649] txid# 281474976715673 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:24:03.642266Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560358:3649] txid# 281474976715673 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:24:03.642349Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560358:3649] HANDLE EvNavigateKeySetResult, txid# 281474976715673 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:24:03.642379Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560358:3649] txid# 281474976715673 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2025-04-10T11:24:03.642518Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560358:3649] txid# 281474976715673 HANDLE EvClientConnected 2025-04-10T11:24:03.642715Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-2, pathId: 0, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:24:03.642857Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:24:03.644675Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715673, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-2 2025-04-10T11:24:03.644745Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560358:3649] txid# 281474976715673 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715673} 2025-04-10T11:24:03.644786Z node 13 :TX_PROXY DEBUG: Actor# [13:7491643782151560358:3649] txid# 281474976715673 SEND to# [13:7491643782151560355:2395] Source {TEvProposeTransactionStatus txid# 281474976715673 Status# 53} 2025-04-10T11:24:03.645881Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:24:03.645961Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:24:03.646002Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:24:03.646039Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:24:03.654523Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744284243702, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:24:03.660286Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715673, done: 0, blocked: 1 2025-04-10T11:24:03.665317Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:24:03.665332Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:24:03.665415Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:24:03.665418Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:24:03.667169Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715673:0 2025-04-10T11:24:03.673411Z node 13 :GRPC_SERVER DEBUG: [0x51a00011be80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.673719Z node 13 :GRPC_SERVER DEBUG: [0x51a000087680] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.673913Z node 13 :GRPC_SERVER DEBUG: [0x51a00007c880] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.674163Z node 13 :GRPC_SERVER DEBUG: [0x51a0000a3280] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.674410Z node 13 :GRPC_SERVER DEBUG: [0x51a000098480] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.674631Z node 13 :GRPC_SERVER DEBUG: [0x51a000183680] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.674830Z node 13 :GRPC_SERVER DEBUG: [0x51a00007da80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.675023Z node 13 :GRPC_SERVER DEBUG: [0x51a00000b480] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.675314Z node 13 :GRPC_SERVER DEBUG: [0x51a0000a1a80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.675364Z node 13 :GRPC_SERVER DEBUG: [0x51a000058280] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.675524Z node 13 :GRPC_SERVER DEBUG: [0x51a000119480] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.675618Z node 13 :GRPC_SERVER DEBUG: [0x51a000008a80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.675726Z node 13 :GRPC_SERVER DEBUG: [0x51a000179a80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.675891Z node 13 :GRPC_SERVER DEBUG: [0x51a0000e9480] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.675935Z node 13 :GRPC_SERVER DEBUG: [0x51a000174c80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.676127Z node 13 :GRPC_SERVER DEBUG: [0x51a00009ae80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.676203Z node 13 :GRPC_SERVER DEBUG: [0x51a000182480] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-04-10T11:24:03.677403Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037889 not found 2025-04-10T11:24:03.678822Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn >> KqpSystemView::FailResolve >> TGRpcAuthentication::InvalidPassword [GOOD] >> TGRpcAuthentication::DisableLoginAuthentication >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] >> KqpSysColV1::StreamInnerJoinSelectAsterisk >> KqpSysColV1::StreamSelectRowById >> KqpSysColV0::InnerJoinTables >> KqpSysColV0::InnerJoinSelect >> KqpSystemView::PartitionStatsRange1 >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull [GOOD] >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex >> TTableProfileTests::WrongTableProfile [GOOD] >> TYqlDateTimeTests::DateKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284762.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284762.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284762.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284762.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284762.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=144284762.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283562.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124284762.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124284762.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283562.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283562.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=124283562.000000s;Name=;Codec=}; 2025-04-10T11:22:43.306564Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:43.390804Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:43.416496Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:43.416853Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:43.425408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:43.425638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:43.425895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:43.426029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:43.426170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:43.426298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:43.426420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:43.426548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:43.426662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:43.426773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:43.426884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:43.426986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:43.456505Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:43.456866Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:43.456954Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:43.457138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:43.457335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:43.457428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:43.457474Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:43.457568Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:43.457630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:43.457673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:43.457702Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:43.457861Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:43.457944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:43.457988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:43.458020Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:43.458111Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:43.458187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:43.458234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:43.458264Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:43.458351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:43.458401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:43.458433Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:43.458487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:43.458533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:43.458568Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:43.459014Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=69; 2025-04-10T11:22:43.459099Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-10T11:22:43.459197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-10T11:22:43.459297Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=52; 2025-04-10T11:22:43.459471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:43.459528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:43.459569Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:43.459766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:43.459813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:43.459844Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TT ... :29;EXECUTE:finishLoadingTime=577; 2025-04-10T11:24:06.303098Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=54366; 2025-04-10T11:24:06.314804Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=11620; 2025-04-10T11:24:06.327391Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11518; 2025-04-10T11:24:06.327503Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12606; 2025-04-10T11:24:06.327698Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=120; 2025-04-10T11:24:06.327832Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=74; 2025-04-10T11:24:06.327984Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=101; 2025-04-10T11:24:06.328119Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=87; 2025-04-10T11:24:06.342073Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=13878; 2025-04-10T11:24:06.359370Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=17170; 2025-04-10T11:24:06.359518Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=51; 2025-04-10T11:24:06.359598Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=30; 2025-04-10T11:24:06.359655Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-04-10T11:24:06.359708Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-10T11:24:06.359765Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=11; 2025-04-10T11:24:06.359851Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=44; 2025-04-10T11:24:06.359907Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-04-10T11:24:06.360016Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=66; 2025-04-10T11:24:06.360071Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-04-10T11:24:06.360149Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2025-04-10T11:24:06.360254Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=61; 2025-04-10T11:24:06.360634Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=336; 2025-04-10T11:24:06.360680Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=119717; 2025-04-10T11:24:06.360845Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:24:06.360957Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:24:06.361010Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:24:06.361079Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:24:06.380909Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-10T11:24:06.381072Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:24:06.381134Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:24:06.381209Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-10T11:24:06.381287Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:24:06.381334Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:24:06.381385Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:24:06.381423Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:24:06.381521Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:24:06.382061Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:24:06.382142Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2646:4520];tablet_id=9437184;parent=[1:2604:4485];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-04-10T11:24:06.382751Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:24:06.382970Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:24:06.383001Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:24:06.383025Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:24:06.383068Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:24:06.383122Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:24:06.383179Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-10T11:24:06.383236Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:24:06.383280Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:24:06.383328Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:24:06.383368Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:24:06.383453Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:24:06.384045Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-04-10T11:24:06.385585Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> KqpSystemView::PartitionStatsSimple >> TGRpcYdbTest::ExecuteQueryCache [GOOD] >> YdbYqlClient::TestDecimal [GOOD] >> YdbYqlClient::TestBusySession >> YdbImport::ImportFromS3ToExistingTable [GOOD] >> TYqlDecimalTests::SimpleUpsertSelect >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] >> KqpSystemView::PartitionStatsRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExecuteQueryCache [GOOD] Test command err: 2025-04-10T11:23:45.215128Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643705305642388:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:45.220386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c65/r3tmp/tmpy9w4on/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1197, node 1 2025-04-10T11:23:45.594262Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:45.596949Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:45.596966Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:45.638750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:45.638969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:45.664005Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:45.664031Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:45.664040Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:45.664207Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:23:45.700796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:45.938579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:49.069997Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643718498076607:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:49.070090Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c65/r3tmp/tmp0RoGWB/pdisk_1.dat 2025-04-10T11:23:49.161447Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:49.182092Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:49.182227Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:49.185320Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61852, node 4 2025-04-10T11:23:49.244022Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:49.244041Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:49.244048Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:49.244160Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:49.475642Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:53.470250Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643735816383694:2116];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:53.472368Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c65/r3tmp/tmpmNU7nU/pdisk_1.dat 2025-04-10T11:23:53.633769Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:53.663242Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:53.663339Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:53.672027Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3176, node 7 2025-04-10T11:23:53.738682Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:53.738709Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:53.738718Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:53.738849Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:53.972010Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:56.347510Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643748701286548:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:56.347511Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643748701286556:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:56.347653Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:56.351212Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:23:56.366879Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491643748701286562:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:23:56.449873Z node 7 :TX_PROXY ERROR: Actor# [7:7491643748701286637:2686] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:58.250410Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643758057128357:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:58.250695Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c65/r3tmp/tmpv5uYSv/pdisk_1.dat 2025-04-10T11:23:58.393113Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:58.429297Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:58.429397Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:58.432205Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22866, node 10 2025-04-10T11:23:58.486861Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:58.486887Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:58.486897Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:58.487044Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24641 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:58.763293Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:01.659622Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643770942031278:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:01.659733Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643770942031273:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:01.659932Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:01.663469Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:24:01.685054Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491643770942031287:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:24:01.781756Z node 10 :TX_PROXY ERROR: Actor# [10:7491643770942031358:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:03.416360Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643782627246799:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:03.416419Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c65/r3tmp/tmpRIBPrA/pdisk_1.dat 2025-04-10T11:24:03.558900Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:03.595402Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:03.595510Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:03.599299Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11302, node 13 2025-04-10T11:24:03.672639Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:03.672666Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:03.672676Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:03.672842Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30939 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:04.072818Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.178342Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643799807117031:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:07.178431Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643799807117023:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:07.182003Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:07.185550Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:24:07.224222Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7491643799807117037:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:24:07.311499Z node 13 :TX_PROXY ERROR: Actor# [13:7491643799807117111:2686] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] Test command err: 2025-04-10T11:23:36.636842Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643665546526077:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:36.637106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cdf/r3tmp/tmp6SCWiq/pdisk_1.dat 2025-04-10T11:23:36.995896Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:37.042646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:37.042804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12717, node 1 2025-04-10T11:23:37.055291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:37.094761Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:37.094789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:37.094797Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:37.094928Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:37.368918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:39.551519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 CLIENT_DEADLINE_EXCEEDED 2025-04-10T11:23:40.037739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643682726398166:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:40.037739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643682726398161:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:40.037831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:40.041800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:40.059763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643682726398175:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:23:40.154113Z node 1 :TX_PROXY ERROR: Actor# [1:7491643682726398329:4194] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:40.613571Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfq1ym4fzwwhdgje9hvqfr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjUyOGJkYS03MDcwNDYzMy00NmY5ZTRjNy0zMDcwMjNjMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:42.206240Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643689484030658:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:42.206309Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cdf/r3tmp/tmpiw8KC8/pdisk_1.dat 2025-04-10T11:23:42.319061Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:42.353127Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:42.353217Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:42.356848Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13687, node 4 2025-04-10T11:23:42.416282Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:42.416306Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:42.416313Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:42.416463Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:42.647170Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:45.125044Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:46.769955Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643707446705327:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:46.770071Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cdf/r3tmp/tmpCqKTaF/pdisk_1.dat 2025-04-10T11:23:46.953622Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:46.989993Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:46.990091Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:46.993505Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4898, node 7 2025-04-10T11:23:47.081925Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:47.081958Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:47.081968Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:47.082138Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:47.345194Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:50.220711Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:51.770175Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491643707446705327:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:51.770250Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:23:58.456118Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643760638279326:2072];sen ... 9];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-04-10T11:24:07.305083Z node 13 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-04-10T11:24:07.305377Z node 13 :TX_COLUMNSHARD DEBUG: TxWriteIndex[5] (CS::INDEXATION) apply at tablet 72075186224037888 2025-04-10T11:24:07.307584Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643796367207378:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:07.307672Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:07.307964Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643796367207390:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:07.310328Z node 13 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:2 Blob count: 1 2025-04-10T11:24:07.310487Z node 13 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 2025-04-10T11:24:07.311565Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:24:07.318712Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7491643796367207191:2339];ev=NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated;fline=columnshard_subdomain_path_id.cpp:90;notify_subdomain=[OwnerId: 72057594046644480, LocalPathId: 1]; 2025-04-10T11:24:07.320615Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=50f9096a-15fe11f0-8358d782-d7376f1;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-10T11:24:07.320665Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=50f9096a-15fe11f0-8358d782-d7376f1;fline=with_appended.cpp:65;portions=1,;task_id=50f9096a-15fe11f0-8358d782-d7376f1; 2025-04-10T11:24:07.320929Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=50f9096a-15fe11f0-8358d782-d7376f1;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::50f9096a-15fe11f0-8358d782-d7376f1; 2025-04-10T11:24:07.321007Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=50f9096a-15fe11f0-8358d782-d7376f1;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:24:07.321087Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=50f9096a-15fe11f0-8358d782-d7376f1;tablet_id=72075186224037888;fline=columnshard_impl.cpp:781;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=0; 2025-04-10T11:24:07.321185Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=50f9096a-15fe11f0-8358d782-d7376f1;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-10T11:24:07.321635Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=50f9096a-15fe11f0-8358d782-d7376f1;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:24:07.321686Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=50f9096a-15fe11f0-8358d782-d7376f1;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:24:07.321727Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=50f9096a-15fe11f0-8358d782-d7376f1;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:24:07.321846Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=50f9096a-15fe11f0-8358d782-d7376f1;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:24:07.322250Z node 13 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Delete Blob DS:2181038080:[72075186224037888:1:1:3:0:3864:0] 2025-04-10T11:24:07.322310Z node 13 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:2 Blob count: 1 2025-04-10T11:24:07.322419Z node 13 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=50f9096a-15fe11f0-8358d782-d7376f1;mem=3380;cpu=0; 2025-04-10T11:24:07.322566Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:781;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=0; 2025-04-10T11:24:07.339033Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7491643796367207191:2339];ev=NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated;fline=columnshard_subdomain_path_id.cpp:90;notify_subdomain=[OwnerId: 72057594046644480, LocalPathId: 1]; 2025-04-10T11:24:07.345067Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7491643796367207392:2412], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:24:07.430609Z node 13 :TX_PROXY ERROR: Actor# [13:7491643796367207538:2913] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:07.622272Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[13:7491643796367207191:2339];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-10T11:24:07.792562Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq2s85fqh19djq5qpy4b6e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjMxMDMzNjQtNjNjNmU4ZGQtNWM4YmY0ZjMtNTE0OTc2ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:07.866038Z node 13 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976715662 scanId: 1 version: {1744284247377:max} readable: {1744284247832:max} at tablet 72075186224037888 2025-04-10T11:24:07.867070Z node 13 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715662 scanId: 1 at tablet 72075186224037888 2025-04-10T11:24:07.867665Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7491643796367207191:2339];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715662;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1744284247377:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 5 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 1 } Columns { Id: 7 } Columns { Id: 2 } Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2025-04-10T11:24:07.997143Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7491643796367207191:2339];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715662;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1744284247377:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 5 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 1 } Columns { Id: 7 } Columns { Id: 2 } Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2025-04-10T11:24:07.999102Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7491643796367207191:2339];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715662;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1744284247377:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":8},{"from":12},{"from":4},{"from":6},{"from":2},{"from":14},{"from":10}]},{"owner_id":2,"inputs":[{"from":15}]},{"owner_id":4,"inputs":[{"from":15}]},{"owner_id":6,"inputs":[{"from":15}]},{"owner_id":8,"inputs":[{"from":15}]},{"owner_id":10,"inputs":[{"from":15}]},{"owner_id":12,"inputs":[{"from":15}]},{"owner_id":14,"inputs":[{"from":15}]},{"owner_id":15,"inputs":[]}],"nodes":{"15":{"p":{"p":{"data":[{"name":"stringToString","id":7},{"name":"id","id":1},{"name":"timestamp","id":2},{"name":"dateTimeS","id":3},{"name":"dateTimeU","id":4},{"name":"date","id":5},{"name":"utf8ToString","id":6}]},"o":"7,1,2,3,4,5,6","t":"FetchOriginalData"},"w":14,"id":15},"2":{"p":{"i":"5","p":{"address":{"name":"date","id":5}},"o":"5","t":"AssembleOriginalData"},"w":19,"id":2},"8":{"p":{"i":"1","p":{"address":{"name":"id","id":1}},"o":"1","t":"AssembleOriginalData"},"w":19,"id":8},"0":{"p":{"i":"5,3,4,1,7,2,6","t":"Projection"},"w":133,"id":0},"4":{"p":{"i":"3","p":{"address":{"name":"dateTimeS","id":3}},"o":"3","t":"AssembleOriginalData"},"w":19,"id":4},"14":{"p":{"i":"6","p":{"address":{"name":"utf8ToString","id":6}},"o":"6","t":"AssembleOriginalData"},"w":19,"id":14},"10":{"p":{"i":"7","p":{"address":{"name":"stringToString","id":7}},"o":"7","t":"AssembleOriginalData"},"w":19,"id":10},"6":{"p":{"i":"4","p":{"address":{"name":"dateTimeU","id":4}},"o":"4","t":"AssembleOriginalData"},"w":19,"id":6},"12":{"p":{"i":"2","p":{"address":{"name":"timestamp","id":2}},"o":"2","t":"AssembleOriginalData"},"w":19,"id":12}}}; 2025-04-10T11:24:08.007508Z node 13 :TX_COLUMNSHARD INFO: self_id=[13:7491643796367207217:2343];tablet_id=72075186224037888;parent=[13:7491643796367207191:2339];fline=manager.cpp:82;event=ask_data;request=request_id=3;3={portions_count=1};; 2025-04-10T11:24:08.012275Z node 13 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-10T11:24:08.013965Z node 13 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-10T11:24:08.026467Z node 13 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2025-04-10T11:24:08.049472Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284247377, txId: 18446744073709551615] shutting down 2025-04-10T11:24:08.124110Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[13:7491643796367207191:2339];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-10T11:24:08.171668Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7491643796367207191:2339];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] Test command err: 2025-04-10T11:23:44.150119Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643697466478084:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:44.150347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c79/r3tmp/tmpqMOtn7/pdisk_1.dat 2025-04-10T11:23:44.540089Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:44.548577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:44.548704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 31868, node 1 2025-04-10T11:23:44.570045Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:44.570066Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:44.588894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:44.662792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:44.662823Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:44.662835Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:44.662938Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:44.968369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:45.084389Z node 1 :TX_PROXY ERROR: Actor# [1:7491643701761446303:2603] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-10T11:23:48.299538Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643718036494617:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:48.299589Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c79/r3tmp/tmpABPTbs/pdisk_1.dat 2025-04-10T11:23:48.415201Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:48.430745Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:48.430818Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:48.433857Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28182, node 4 2025-04-10T11:23:48.517151Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:48.517171Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:48.517176Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:48.517289Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:48.829526Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:53.039546Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643736251227625:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:53.039664Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c79/r3tmp/tmpxJmwps/pdisk_1.dat 2025-04-10T11:23:53.180151Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:53.215793Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:53.215883Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:53.218647Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23278, node 7 2025-04-10T11:23:53.303740Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:53.303762Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:53.303770Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:53.303907Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:53.513779Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:56.282139Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643749136130541:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:56.282292Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:56.567268Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:56.739323Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643749136130717:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:56.739411Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:56.739750Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643749136130722:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:56.744937Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:56.766717Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491643749136130724:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:23:56.859980Z node 7 :TX_PROXY ERROR: Actor# [7:7491643749136130800:2808] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:57.101877Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq2f7v8x9kv76w8t7k1gwa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWJhYWQ0ZDEtYTM0NDc5YzAtZTQxNDQ2NmItZmEwNGU2NjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:57.292547Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq2ffa5wyg1vxtz3b7846t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWJhYWQ0ZDEtYTM0NDc5YzAtZTQxNDQ2NmItZmEwNGU2NjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:23:58.931542Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643758918985626:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:58.931621Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c79/r3tmp/tmpLRZ8wP/pdisk_1.dat 2025-04-10T11:23:59.117486Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18534, node 10 2025-04-10T11:23:59.206395Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:59.206418Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:59.206426Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:59.206956Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:23:59.246994Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:59.247097Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:59.251720Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:59.536280Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:02.436997Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643776098855880:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:02.437132Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643776098855872:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:02.437363Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:02.441674Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:24:02.458949Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491643776098855886:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:24:02.555335Z node 10 :TX_PROXY ERROR: Actor# [10:7491643776098855965:2679] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:02.700934Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7491643776098855994:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[Root/NotFound]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:02.701178Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=NGE5NzI2ZjItNjkxZTM3Yi1hMDk4NWI1ZS1iOTg0MmEzZQ==, ActorId: [10:7491643776098855845:2331], ActorState: ExecuteState, TraceId: 01jrfq2mqf1rrjqbgzewm253p5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:24:04.453662Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643786172384084:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:04.453919Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c79/r3tmp/tmpZZ6jGk/pdisk_1.dat 2025-04-10T11:24:04.603687Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:04.643078Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:04.643219Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:04.653226Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11979, node 13 2025-04-10T11:24:04.714511Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:04.714543Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:04.714552Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:04.714731Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31118 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:05.078209Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:05.150938Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:05.247707Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:24:08.046780Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643803352254571:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:08.046895Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:08.046947Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643803352254583:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:08.051585Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:24:08.076114Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7491643803352254585:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T11:24:08.172132Z node 13 :TX_PROXY ERROR: Actor# [13:7491643803352254658:2911] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:08.289295Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq2sza5ss3qby8e6k8zv9f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZGIyZjdhN2YtMzk2NGZlOTMtMzU3YjkxMjItYTU4NDk4M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:08.401170Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfq2t7e3d9cnjb7b2g4dnak, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZGIyZjdhN2YtMzk2NGZlOTMtMzU3YjkxMjItYTU4NDk4M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession [GOOD] >> TGRpcYdbTest::ExplainQuery >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword >> KqpSystemView::PartitionStatsParametricRanges >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup >> KqpSystemView::ReadSuccess [GOOD] >> KqpSysColV1::SelectRowById [GOOD] >> KqpSysColV1::StreamInnerJoinSelect [GOOD] >> TGRpcAuthentication::DisableLoginAuthentication [GOOD] >> TGRpcAuthentication::NoConnectRights >> TTableProfileTests::OverwriteCompactionPolicy [GOOD] >> TTableProfileTests::OverwriteExecutionPolicy >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn [GOOD] >> KqpSystemView::FailResolve [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] >> KqpSysColV1::StreamSelectRowById [GOOD] >> KqpSysColV1::StreamSelectRowAsterisk >> KqpSysColV1::UpdateAndDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 9110, MsgBus: 30897 2025-04-10T11:24:06.341492Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643795306921654:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:06.341559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000aec/r3tmp/tmpXmpEyT/pdisk_1.dat 2025-04-10T11:24:06.858798Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:06.873732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:06.873825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:06.875518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9110, node 1 2025-04-10T11:24:06.986855Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:06.986874Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:06.986882Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:06.986996Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30897 TClient is connected to server localhost:30897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:07.651542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:07.682661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:07.936694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.122082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.240583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:09.900585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643808191825318:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.900697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.206909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:10.279536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:10.320181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:10.351282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:10.385651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:10.419524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:10.507488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643812486793130:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.507550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.507788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643812486793135:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.512253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:10.521885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643812486793137:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:10.599022Z node 1 :TX_PROXY ERROR: Actor# [1:7491643812486793193:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:11.343444Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643795306921654:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:11.346521Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:11.395616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.539841Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jrfq2x8zdwhgmxqxbxm0g7jq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmYzYjgwYmUtM2MxZDM1MjYtZTRiNDJmYjItNWUwNWQ0ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:11.551267Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284251537, txId: 281474976710672] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 16037, MsgBus: 28141 2025-04-10T11:24:05.643262Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643790763881835:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:05.643752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000af2/r3tmp/tmpsQJxRl/pdisk_1.dat 2025-04-10T11:24:06.088723Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:06.115935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:06.116058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:06.132328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16037, node 1 2025-04-10T11:24:06.266750Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:06.266781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:06.266791Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:06.266926Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28141 TClient is connected to server localhost:28141 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:07.154231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.202772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.377384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.553426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.637136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:09.033660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643807943752716:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.033776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.620137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.663665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.693874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.718881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.787933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.885821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.977479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643807943753242:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.977568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.977804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643807943753247:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.981046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:09.990262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643807943753249:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:10.054467Z node 1 :TX_PROXY ERROR: Actor# [1:7491643812238720599:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:10.643425Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643790763881835:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:10.643529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] >> YdbS3Internal::BadRequests [GOOD] >> KqpSystemView::PartitionStatsRange1 [GOOD] >> KqpSysColV0::InnerJoinSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 7138, MsgBus: 12629 2025-04-10T11:24:05.642101Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643788244704612:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:05.642779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000af0/r3tmp/tmpMYnEde/pdisk_1.dat 2025-04-10T11:24:06.061927Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:06.098443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:06.099257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:06.103609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7138, node 1 2025-04-10T11:24:06.266775Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:06.266802Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:06.266811Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:06.266939Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12629 TClient is connected to server localhost:12629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:07.052186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.079292Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:07.099237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.334840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:07.527946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:24:07.611249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:09.004567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643801129608273:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.004687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.617311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.655036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.687912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.719446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.763451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.817063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.909011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643805424576082:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.909115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.909123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643805424576087:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.912151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:09.920376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643805424576089:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:10.018797Z node 1 :TX_PROXY ERROR: Actor# [1:7491643809719543443:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:10.641778Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643788244704612:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:10.641846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 24193, MsgBus: 28712 2025-04-10T11:24:05.642860Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643787567860335:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:05.642986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000aff/r3tmp/tmpTYvVVD/pdisk_1.dat 2025-04-10T11:24:06.108033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:06.131817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:06.131927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24193, node 1 2025-04-10T11:24:06.136040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:06.269044Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:06.269074Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:06.269080Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:06.269179Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28712 TClient is connected to server localhost:28712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:07.128207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.145225Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:07.159161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.325006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.520991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:07.611843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.002453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643800452763989:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.002552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.618534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.663703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.733854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.800136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.833311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.908412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.959381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643804747731806:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.959447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.959514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643804747731811:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.963344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:09.972977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643804747731813:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:10.058962Z node 1 :TX_PROXY ERROR: Actor# [1:7491643809042699164:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:10.646363Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643787567860335:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:10.655283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:11.808612Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284251815, txId: 281474976710671] shutting down >> KqpSysColV0::InnerJoinTables [GOOD] >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts [GOOD] Test command err: 2025-04-10T11:23:22.496080Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643606335402766:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:22.496224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cfe/r3tmp/tmpC1hnnR/pdisk_1.dat 2025-04-10T11:23:22.784831Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4087, node 1 2025-04-10T11:23:22.868796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:22.868931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:22.871408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:22.879971Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:22.879991Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:22.879996Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:22.880100Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:23.162091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:23.242877Z node 1 :TICKET_PARSER DEBUG: Ticket 1584618F485526E5D7EC40D71F8B2067AE55BE60D59A5DDC44B8F115DF1A0A7C (ipv6:[::1]:55426) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-10T11:23:23.243337Z node 1 :TICKET_PARSER ERROR: Ticket 1584618F485526E5D7EC40D71F8B2067AE55BE60D59A5DDC44B8F115DF1A0A7C: Cannot create token from certificate. Client certificate failed verification 2025-04-10T11:23:23.306195Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:55436) has now valid token of root@builtin 2025-04-10T11:23:23.378240Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:23:23.378300Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:23:23.378325Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:23:23.378361Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-10T11:23:25.954741Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643616942483010:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:25.954808Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cfe/r3tmp/tmphPzI1a/pdisk_1.dat 2025-04-10T11:23:26.041693Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:26.065323Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:26.065407Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:26.067532Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15827, node 4 2025-04-10T11:23:26.115423Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:26.115445Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:26.115452Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:26.115589Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:26.318526Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:26.385224Z node 4 :TICKET_PARSER DEBUG: Ticket 1584618F485526E5D7EC40D71F8B2067AE55BE60D59A5DDC44B8F115DF1A0A7C (ipv6:[::1]:60086) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-10T11:23:26.385750Z node 4 :TICKET_PARSER ERROR: Ticket 1584618F485526E5D7EC40D71F8B2067AE55BE60D59A5DDC44B8F115DF1A0A7C: Cannot create token from certificate. Client certificate failed verification 2025-04-10T11:23:26.444122Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:60100) has now valid token of root@builtin 2025-04-10T11:23:26.500949Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:23:26.500977Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:23:26.500986Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:23:26.501016Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-10T11:23:29.933386Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643634440099336:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:29.933456Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cfe/r3tmp/tmpIvIQAb/pdisk_1.dat 2025-04-10T11:23:30.051718Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:30.092955Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:30.093038Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:30.095711Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6734, node 7 2025-04-10T11:23:30.161704Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:30.161728Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:30.161761Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:30.161895Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11537 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:30.362492Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... E0410 11:23:30.407150456 1619781 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0410 11:23:30.410110373 1619834 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0410 11:23:30.417035122 1619780 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0410 11:23:30.419527287 1619834 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0410 11:23:30.426063276 1619837 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0410 11:23:30.428207744 1619780 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0410 11:23:30.434327653 1619644 ssl_transport_security.cc:1431] Handshake failed ... ENT. E0410 11:23:55.761776692 1633405 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:9129'; Got args: {grpc.client_channel_factory=0x502000086850, grpc.default_authority=localhost:9129, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x506000b85e60, grpc.internal.event_engine=0x502000a95970, grpc.internal.subchannel_pool=0x504000079390, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x5040002dd610, grpc.server_uri=dns:///localhost:9129} E0410 11:23:55.780258069 1633405 ssl_transport_security.cc:791] Invalid private key. E0410 11:23:55.780445056 1633405 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0410 11:23:55.780632704 1633405 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:9129'; Got args: {grpc.client_channel_factory=0x502000086850, grpc.default_authority=localhost:9129, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x50600093eae0, grpc.internal.event_engine=0x502000c9fa10, grpc.internal.subchannel_pool=0x504000079390, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x5040002dd610, grpc.server_uri=dns:///localhost:9129} E0410 11:23:55.786278571 1633405 ssl_transport_security.cc:791] Invalid private key. E0410 11:23:55.786557768 1633405 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0410 11:23:55.786798685 1633405 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:9129'; Got args: {grpc.client_channel_factory=0x502000086850, grpc.default_authority=localhost:9129, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x50600093eae0, grpc.internal.event_engine=0x50200072cc90, grpc.internal.subchannel_pool=0x504000079390, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x5040002dd610, grpc.server_uri=dns:///localhost:9129} E0410 11:23:55.792797967 1633405 ssl_transport_security.cc:791] Invalid private key. E0410 11:23:55.792973573 1633405 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0410 11:23:55.793162830 1633405 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:9129'; Got args: {grpc.client_channel_factory=0x502000086850, grpc.default_authority=localhost:9129, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x50600004a1e0, grpc.internal.event_engine=0x5020009b3650, grpc.internal.subchannel_pool=0x504000079390, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x5040002dd610, grpc.server_uri=dns:///localhost:9129} E0410 11:23:55.794860908 1633405 ssl_transport_security.cc:791] Invalid private key. E0410 11:23:55.794994567 1633405 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0410 11:23:55.795155496 1633405 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:9129'; Got args: {grpc.client_channel_factory=0x502000086850, grpc.default_authority=localhost:9129, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x50600004a1e0, grpc.internal.event_engine=0x50200059afd0, grpc.internal.subchannel_pool=0x504000079390, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x5040002dd610, grpc.server_uri=dns:///localhost:9129} 2025-04-10T11:24:00.992818Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7491643767682491665:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:00.993377Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cfe/r3tmp/tmp0REDTL/pdisk_1.dat 2025-04-10T11:24:01.152750Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:01.191390Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:01.191495Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:01.200339Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6224, node 25 2025-04-10T11:24:01.320699Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:01.320728Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:01.320737Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:01.320881Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:01.738631Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:01.927642Z node 25 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:57572) has now valid token of root@builtin 2025-04-10T11:24:02.010553Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:24:02.010594Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:24:02.010609Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:24:02.010671Z node 25 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-10T11:24:06.753956Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7491643791729857321:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:06.778558Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cfe/r3tmp/tmpDsddgY/pdisk_1.dat 2025-04-10T11:24:06.934653Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:06.984372Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:06.984493Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:06.992981Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11543, node 28 2025-04-10T11:24:07.143329Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:07.143360Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:07.143368Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:07.143550Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:07.640621Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.934290Z node 28 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:52174) has now valid token of root@builtin 2025-04-10T11:24:08.027080Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:24:08.027118Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:24:08.027132Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:24:08.027184Z node 28 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator >> YdbYqlClient::TestBusySession [GOOD] >> YdbYqlClient::TestConstraintViolation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailResolve [GOOD] Test command err: Trying to start YDB, gRPC: 8709, MsgBus: 6963 2025-04-10T11:24:07.115848Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643796636801641:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:07.115889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ae5/r3tmp/tmpwcOLmO/pdisk_1.dat 2025-04-10T11:24:07.589333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:07.589436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:07.590899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:07.623088Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8709, node 1 2025-04-10T11:24:07.806547Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:07.806569Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:07.806574Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:07.806974Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6963 TClient is connected to server localhost:6963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:08.430013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.461516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.602266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.773981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.855748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:10.715686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643809521705317:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.715792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.000249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.026127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.051909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.123573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.154961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.187359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.248868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643813816673130:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.248979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.249048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643813816673135:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.253773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:11.265991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643813816673137:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:11.368905Z node 1 :TX_PROXY ERROR: Actor# [1:7491643813816673193:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:12.116392Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643796636801641:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:12.116483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:12.254805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:24:12.427629Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7491643818111640849:3708], for# user0@builtin, access# SelectRow 2025-04-10T11:24:12.427799Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 1] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint64 : NULL, Uint64 : NULL, Uint64 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-10T11:24:12.437368Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGUwMzczZmItMWI2NjU4ODgtNmQxMTJkOWQtNzEwMTEyYw==, ActorId: [1:7491643818111640810:2501], ActorState: ExecuteState, TraceId: 01jrfq2y3ffs0vnkn6fnbqemez, Create QueryResponse for error on request, msg: 2025-04-10T11:24:12.437744Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284252426, txId: 281474976710672] shutting down 2025-04-10T11:24:12.438191Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jrfq2y3ffs0vnkn6fnbqemez, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGUwMzczZmItMWI2NjU4ODgtNmQxMTJkOWQtNzEwMTEyYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 6469, MsgBus: 28035 2025-04-10T11:24:07.241052Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643798129715648:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:07.241104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ae8/r3tmp/tmpFw9fYK/pdisk_1.dat 2025-04-10T11:24:07.790510Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:07.793672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:07.793778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:07.799870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6469, node 1 2025-04-10T11:24:07.876037Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:07.876057Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:07.876068Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:07.876184Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28035 TClient is connected to server localhost:28035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:08.477034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.526964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.691168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:08.857521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:24:08.929682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:10.582893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643811014619103:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.583037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.954347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:10.985200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.015012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.058127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.093783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.161698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.201065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643815309586915:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.201142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.201153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643815309586920:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.205069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:11.214688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643815309586922:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:11.307473Z node 1 :TX_PROXY ERROR: Actor# [1:7491643815309586977:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:12.238295Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643798129715648:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:12.238405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:12.379647Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284252410, txId: 281474976710671] shutting down >> KqpSystemView::PartitionStatsSimple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] Test command err: 2025-04-10T11:23:33.505948Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643653908665006:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:33.506058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf1/r3tmp/tmpczlqYg/pdisk_1.dat 2025-04-10T11:23:33.819007Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:33.860840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:33.860963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:33.864087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8293, node 1 2025-04-10T11:23:33.912136Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:33.912156Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:33.912164Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:33.912307Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:34.159054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:21203 2025-04-10T11:23:34.345782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:34.415461Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7491643658203633337:2319];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:23:34.434654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7491643658203633337:2319];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:23:34.434811Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-10T11:23:34.446377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643658203633337:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:23:34.446623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643658203633337:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:23:34.447048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643658203633337:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:23:34.447206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643658203633337:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:23:34.447294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643658203633337:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:23:34.447374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643658203633337:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:23:34.447459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643658203633337:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:23:34.447558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643658203633337:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:23:34.447685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643658203633337:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:23:34.447767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643658203633337:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:23:34.447850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643658203633337:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:23:34.447943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643658203633337:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:23:34.450654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7491643658203633341:2321];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:23:34.473147Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7491643658203633341:2321];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:23:34.473336Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037890 2025-04-10T11:23:34.480953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491643658203633341:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:23:34.481014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491643658203633341:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:23:34.481302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491643658203633341:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:23:34.481427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491643658203633341:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:23:34.481544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491643658203633341:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:23:34.481645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491643658203633341:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:23:34.481770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491643658203633341:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:23:34.481887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491643658203633341:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:23:34.481991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491643658203633341:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:23:34.482176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491643658203633341:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:23:34.482293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491643658203633341:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:23:34.482391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491643658203633341:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:23:34.485545Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-04-10T11:23:34.485665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:23:34.485699Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:23:34.485879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:23:34.486022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:23:34.486088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:23:34.486114Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04 ... ready operation [1744284250632:281474976710658] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:10.595959Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744284250632:281474976710658 keys extracted: 0 2025-04-10T11:24:10.596106Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:10.596228Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:10.596292Z node 13 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:24:10.596859Z node 13 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:24:10.597331Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:10.599960Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744284250631 2025-04-10T11:24:10.599991Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:10.600028Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744284250639 2025-04-10T11:24:10.601345Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744284250632} 2025-04-10T11:24:10.601416Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:10.602791Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:10.602824Z node 13 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:10.602860Z node 13 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:24:10.602916Z node 13 :TX_DATASHARD DEBUG: Complete [1744284250632 : 281474976710658] from 72075186224037888 at tablet 72075186224037888 send result to client [13:7491643793557606006:2201], exec latency: 0 ms, propose latency: 6 ms 2025-04-10T11:24:10.602966Z node 13 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710658 state Ready TxInFly 0 2025-04-10T11:24:10.603011Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:10.605329Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710658 datashard 72075186224037888 state Ready 2025-04-10T11:24:10.605391Z node 13 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8984;columns=10; 2025-04-10T11:24:10.634331Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:7491643810737476060:2743], serverId# [13:7491643810737476061:2744], sessionId# [0:0:0] 2025-04-10T11:24:10.634473Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-04-10T11:24:10.642299Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-10T11:24:10.642353Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 SUCCESS Upsert done: 0.030972s 2025-04-10T11:24:10.656753Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643810737476069:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.656851Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.656933Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643810737476081:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.661606Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:24:10.668605Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:10.684221Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:10.687916Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7491643810737476083:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:24:10.769089Z node 13 :TX_PROXY ERROR: Actor# [13:7491643810737476156:2806] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:10.887500Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:24:10.887610Z node 13 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710661 at tablet 72075186224037888 2025-04-10T11:24:10.890820Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:24:10.894309Z node 13 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710661 at step 1744284250940 at tablet 72075186224037888 { Transactions { TxId: 281474976710661 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744284250940 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:24:10.894342Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:10.894511Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:10.894534Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:10.894556Z node 13 :TX_DATASHARD DEBUG: Found ready operation [1744284250940:281474976710661] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:10.894736Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744284250940:281474976710661 keys extracted: 0 2025-04-10T11:24:10.895068Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:10.897340Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744284250940} 2025-04-10T11:24:10.897398Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:10.897440Z node 13 :TX_DATASHARD DEBUG: Complete [1744284250940 : 281474976710661] from 72075186224037888 at tablet 72075186224037888 send result to client [13:7491643810737476185:2822], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:24:10.897462Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:10.899031Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfq2wgvessj2x9h03h7zg8p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZjUyYjJiNmUtZDkzY2VjNGYtMjJmOGM4MjUtNmZmMmZjOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:10.903144Z node 13 :TX_DATASHARD INFO: Start scan, at: [13:7491643810737476214:2146], tablet: [13:7491643810737475957:2340], scanId: 4, table: /Root/LogsX, gen: 1, deadline: 2025-04-10T11:34:10.902229Z 2025-04-10T11:24:10.903837Z node 13 :TX_DATASHARD DEBUG: Got ScanDataAck, at: [13:7491643810737476214:2146], scanId: 4, table: /Root/LogsX, gen: 1, tablet: [13:7491643810737475957:2340], freeSpace: 8388608;limits:(bytes=0;chunks=0); 2025-04-10T11:24:10.903863Z node 13 :TX_DATASHARD DEBUG: Wakeup driver at: [13:7491643810737476214:2146] 2025-04-10T11:24:10.905388Z node 13 :TX_DATASHARD DEBUG: Range 0 of 1 exhausted: try next one. table: /Root/LogsX range: [(Utf8 : NULL, Timestamp : NULL) ; ()) next range: 2025-04-10T11:24:10.905425Z node 13 :TX_DATASHARD DEBUG: TableRanges is over, at: [13:7491643810737476214:2146], scanId: 4, table: /Root/LogsX 2025-04-10T11:24:10.905467Z node 13 :TX_DATASHARD DEBUG: Finish scan, at: [13:7491643810737476214:2146], scanId: 4, table: /Root/LogsX, reason: 0, abortEvent: 2025-04-10T11:24:10.905504Z node 13 :TX_DATASHARD DEBUG: Send ScanData, from: [13:7491643810737476214:2146], to: [13:7491643810737476208:2359], scanId: 4, table: /Root/LogsX, bytes: 11000, rows: 100, page faults: 0, finished: 1, pageFault: 0 2025-04-10T11:24:10.905964Z node 13 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-10T11:24:10.906045Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:10.906061Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:10.906089Z node 13 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:24:10.906173Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:10.924973Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284250940, txId: 281474976710661] shutting down 2025-04-10T11:24:11.182737Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7491643793557605671:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:11.182847Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:11.496161Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jrfq2wsz3vgszavw2ashm4sj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YjViMzNlMDYtYzgwZTVlYjMtMjQ5MTljNzYtZDE3ZTcwMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 100 rows Negative (wrong format): BAD_REQUEST Negative (wrong data): SCHEME_ERROR FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8016;columns=9; 2025-04-10T11:24:11.546239Z node 13 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:142;event=cannot_parse;message=Invalid: Ran out of field metadata, likely malformed;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (less columns): BAD_REQUEST FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8984;columns=10; 2025-04-10T11:24:11.557908Z node 13 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:142;event=cannot_parse;message=Serialization error: batch is not valid: Invalid: Offsets buffer size (bytes): 400 isn't large enough for length: 100;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (reordered columns): BAD_REQUEST >> KqpSystemView::PartitionStatsRange2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 24442, MsgBus: 2071 2025-04-10T11:24:07.168724Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643798944906675:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:07.169138Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000aea/r3tmp/tmpquUhNv/pdisk_1.dat 2025-04-10T11:24:07.676501Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:07.687729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:07.687842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:07.692129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24442, node 1 2025-04-10T11:24:07.830571Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:07.830595Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:07.830603Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:07.830763Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2071 TClient is connected to server localhost:2071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:08.478750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.496789Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:08.501042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.669862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.856368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.941828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:10.652219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643811829810210:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.652347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.952727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.019369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.054381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.085617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.113434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.186762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.242427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643816124778025:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.242493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.242668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643816124778030:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.245636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:11.255223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643816124778032:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:11.349331Z node 1 :TX_PROXY ERROR: Actor# [1:7491643816124778087:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:12.164792Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643798944906675:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:12.164863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:12.836289Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284252830, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 19681, MsgBus: 17518 2025-04-10T11:24:05.643326Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643787472027894:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:05.644858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000af9/r3tmp/tmpaZppVX/pdisk_1.dat 2025-04-10T11:24:06.065248Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:06.107220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:06.107366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:06.109943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19681, node 1 2025-04-10T11:24:06.266723Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:06.266745Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:06.266754Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:06.266880Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17518 TClient is connected to server localhost:17518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:07.046185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.074087Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:07.102680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.305264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.511367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.594917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:09.009694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643804651898843:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.009849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.618764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.696104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.765096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.794769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.829513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.874190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.936659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643804651899360:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.936733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.936930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643804651899365:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.940194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:09.949184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643804651899367:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:10.015391Z node 1 :TX_PROXY ERROR: Actor# [1:7491643808946866718:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:10.641880Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643787472027894:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:10.641985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 1739, MsgBus: 14455 2025-04-10T11:24:07.887928Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643797017031627:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:07.888827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000adc/r3tmp/tmpeUEN4G/pdisk_1.dat 2025-04-10T11:24:08.336348Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:08.370775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:08.370897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 1739, node 1 2025-04-10T11:24:08.372713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:08.446256Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:08.446277Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:08.446285Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:08.446393Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14455 TClient is connected to server localhost:14455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:09.091999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:09.131308Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:09.149597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:09.291633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:09.459278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:09.540841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.146419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643814196902569:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.146542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.450412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.478785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.505633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.536149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.564754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.609822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.655690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643814196903079:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.655781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.655991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643814196903084:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.659329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:11.669087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643814196903086:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:11.758654Z node 1 :TX_PROXY ERROR: Actor# [1:7491643814196903140:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:12.855879Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284252829, txId: 281474976710671] shutting down 2025-04-10T11:24:12.889388Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643797017031627:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:12.889465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 29087, MsgBus: 19611 2025-04-10T11:24:07.320970Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643798253702688:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:07.321025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ade/r3tmp/tmpjEwVVM/pdisk_1.dat 2025-04-10T11:24:07.858618Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:07.861333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:07.861435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:07.865322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29087, node 1 2025-04-10T11:24:07.986606Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:07.986624Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:07.986635Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:07.986738Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19611 TClient is connected to server localhost:19611 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:08.593174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.612655Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:08.625478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.813394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:09.007613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:09.083476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:10.771412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643811138606363:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.771567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.109098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.142545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.171563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.200595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.230173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.264794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.301629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643815433574167:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.301724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.301749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643815433574172:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.305924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:11.314069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643815433574174:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:11.372533Z node 1 :TX_PROXY ERROR: Actor# [1:7491643815433574230:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:12.321080Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643798253702688:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:12.321168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbS3Internal::BadRequests [GOOD] Test command err: 2025-04-10T11:23:47.130047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643711733162082:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:47.130203Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c08/r3tmp/tmpauUb8O/pdisk_1.dat 2025-04-10T11:23:47.578747Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:47.588251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:47.588359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:47.603587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14016, node 1 2025-04-10T11:23:47.763938Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:47.763961Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:47.763969Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:47.764074Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21740 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:48.037812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:50.179377Z node 1 :KQP_PROXY WARN: Failed to parse session id: unknownSesson 2025-04-10T11:23:51.870174Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643730629053766:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:51.870347Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c08/r3tmp/tmpOBLfZR/pdisk_1.dat 2025-04-10T11:23:52.073957Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:52.115719Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:52.115855Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:52.121025Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14565, node 4 2025-04-10T11:23:52.247082Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:52.247110Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:52.247118Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:52.247317Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:52.523243Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:56.680025Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643749942777266:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:56.680107Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c08/r3tmp/tmpsmMqrR/pdisk_1.dat 2025-04-10T11:23:56.872471Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:56.907124Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:56.907206Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:56.911679Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14637, node 7 2025-04-10T11:23:57.004320Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:57.004343Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:57.004350Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:57.004497Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25262 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:57.304124Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:01.332598Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643770239045560:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:01.332822Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c08/r3tmp/tmp1DWfuj/pdisk_1.dat 2025-04-10T11:24:01.497906Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:01.531571Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:01.531663Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:01.533970Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19727, node 10 2025-04-10T11:24:01.594448Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:01.594471Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:01.594479Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:01.594618Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:01.880687Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:04.873501Z node 10 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:24:04.874741Z node 10 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-10T11:24:04.875766Z node 10 :KQP_PROXY DEBUG: TraceId: "01jrfq2m0cfzqacmfg9z2qmb79", Request has 18444999789464.675885s seconds to be completed 2025-04-10T11:24:04.879121Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YTdhNWJlOTQtMmZiMzRjMGEtMmIzZTk4OGQtNDI0Mzc5NTQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YTdhNWJlOTQtMmZiMzRjMGEtMmIzZTk4OGQtNDI0Mzc5NTQ= 2025-04-10T11:24:04.879209Z node 10 :KQP_PROXY DEBUG: TraceId: "01jrfq2m0cfzqacmfg9z2qmb79", Created new session, sessionId: ydb://session/3?node_id=10&id=YTdhNWJlOTQtMmZiMzRjMGEtMmIzZTk4OGQtNDI0Mzc5NTQ=, workerId: [10:7491643783123948482:2332], database: , longSession: 1, local sessions count: 1 2025-04-10T11:24:04.879254Z node 10 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-10T11:24:04.879456Z node 10 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jrfq2m0cfzqacmfg9z2qmb79 2025-04-10T11:24:04.879511Z node 10 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:24:04.879537Z node 10 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:24:04.879559Z node 10 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:24:04.879608Z node 10 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-10T11:24:04.880346Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:24:04.880421Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:24:04.880571Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:24:04.880607Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:24:04.881287Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YTdhNWJlOTQtMmZiMzRjMGEtMmIzZTk4OGQtNDI0Mzc5NTQ=, ActorId: [10:7491643783123948482:2332], ActorState: unknown state, session actor bootstrapped 2025-04-10T11:24:04.882343Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:24:04.895203Z node 10 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=10&id=YTdhNWJlOTQtMmZiMzRjMGEtMmIzZTk4OGQtNDI0Mzc5NTQ=, rpc ctrl: [10:7491643783123948498:2333], sameNode: 1, trace_id: 2025-04-10T11:24:04.895239Z node 10 :KQP_PROXY TRACE: Attach local session: [10:7491643783123948482:2332] to rpc: [10:7491643783123948498:2333] on same node 2025-04-10T11:24:04.900698Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=YTdhNWJlOTQtMmZiMzRjMGEtMmIzZTk4OGQtNDI0Mzc5NTQ=, ActorId: [10:7491643783123948482:2332], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T11:24:04.900766Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=YTdhNWJlOTQtMmZiMzRjMGEtMmIzZTk4OGQtNDI0Mzc5NTQ=, ActorId: [10:7491643783123948482:2332], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T11:24:04.900812Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YTdhNWJlOTQtMmZiMzRjMGEtMmIzZTk4OGQtNDI0Mzc5NTQ=, ActorId: [10:7491643783123948482:2332], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T11:24:04.900845Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YTdhNWJlOTQtMmZiMzRjMGEtMmIzZTk4OGQtNDI0Mzc5NTQ=, ActorId: [10:7491643783123948482:2332], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T11:24:04.900937Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YTdhNWJlOTQtMmZiMzRjMGEtMmIzZTk4OGQtNDI0Mzc5NTQ=, ActorId: [10:7491643783123948482:2332], ActorState: unknown state, Session actor destroyed 2025-04-10T11:24:04.901076Z node 10 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=10&id=YTdhNWJlOTQtMmZiMzRjMGEtMmIzZTk4OGQtNDI0Mzc5NTQ=, workerId: [10:7491643783123948482:2332], local sessions count: 0 2025-04-10T11:24:04.906502Z node 10 :KQP_PROXY DEBUG: Received ping session request, request_id: 3, sender: [10:7491643783123948501:2335], trace_id: 2025-04-10T11:24:04.906673Z node 10 :KQP_PROXY NOTICE: Session not found: ydb://session/3?node_id=10&id=YTdhNWJlOTQtMmZiMzRjMGEtMmIzZTk4OGQtNDI0Mzc5NTQ= 2025-04-10T11:24:04.906785Z node 10 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [10:7491643783123948501:2335], selfId: [10:7491643770239045774:2279], source: [10:7491643770239045774:2279] 2025-04-10T11:24:06.581176Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643793504590511:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:06.581233Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c08/r3tmp/tmpATKWY7/pdisk_1.dat 2025-04-10T11:24:06.783337Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65241, node 13 2025-04-10T11:24:06.965426Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:06.971099Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:07.072197Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:07.216600Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:07.216625Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:07.216635Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:07.216786Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7415 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:07.622259Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:10.507442Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.061889Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643814979429996:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.061901Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643814979430004:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.062019Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.066482Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:24:11.103007Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7491643814979430010:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:24:11.175256Z node 13 :TX_PROXY ERROR: Actor# [13:7491643814979430138:4257] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:11.581927Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7491643793504590511:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:11.582007Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:11.694769Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq2wxm03hwv61x7ypxnba2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OGVjNGZhZTQtMzhiMmVlNWUtNWVmOWJhM2UtM2VhMjBmY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 29423, MsgBus: 19640 2025-04-10T11:24:07.314858Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643796126401231:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:07.314924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ae3/r3tmp/tmpDdx6om/pdisk_1.dat 2025-04-10T11:24:07.853243Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:07.856879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:07.856978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:07.860396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29423, node 1 2025-04-10T11:24:08.064716Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:08.064750Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:08.064763Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:08.064924Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19640 TClient is connected to server localhost:19640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:08.767701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.780089Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:08.787959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.949828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:09.109467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:09.187656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:24:10.959880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643809011304904:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.960030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.287206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.322346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.348946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.379108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.407624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.452570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:11.531532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643813306272719:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.531606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.531659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643813306272724:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.535540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:11.547234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643813306272726:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:11.617964Z node 1 :TX_PROXY ERROR: Actor# [1:7491643813306272779:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:12.315004Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643796126401231:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:12.315094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TYqlDecimalTests::SimpleUpsertSelect [GOOD] >> TYqlDecimalTests::NegativeValues >> KqpSystemView::QueryStatsScan >> KqpSystemView::FailNavigate >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert >> KqpSystemView::NodesSimple >> KqpSysColV1::StreamSelectRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 15957, MsgBus: 8206 2025-04-10T11:24:08.468477Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643803465673638:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:08.468607Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ad8/r3tmp/tmpwxZPZv/pdisk_1.dat 2025-04-10T11:24:08.944563Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:08.951185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:08.951322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:08.961508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15957, node 1 2025-04-10T11:24:09.072427Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:09.072455Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:09.072463Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:09.072570Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8206 TClient is connected to server localhost:8206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:09.582650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:09.607764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:09.743527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:09.894987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.968011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:11.848492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643816350577154:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:11.848669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:12.204220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:12.246414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:12.275450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:12.343757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:12.379682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:12.414015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:12.462427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643820645544962:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:12.462519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:12.464939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643820645544967:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:12.468462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:12.478022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643820645544969:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:12.541388Z node 1 :TX_PROXY ERROR: Actor# [1:7491643820645545023:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:13.455664Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643803465673638:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:13.455807Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:13.632919Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284253622, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn [GOOD] Test command err: 2025-04-10T11:21:47.293175Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:21:47.392725Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:21:47.397483Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:21:47.397917Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:21:47.422181Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:21:47.422512Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:21:47.431479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:21:47.431703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:21:47.431942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:21:47.432079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:21:47.432212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:21:47.432367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:21:47.432485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:21:47.432624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:21:47.432750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:21:47.432873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:21:47.432995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:21:47.433131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:21:47.459109Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:21:47.463321Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:21:47.463690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:21:47.463758Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:21:47.463960Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:47.464154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:21:47.464238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:21:47.464283Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:21:47.464377Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:21:47.464460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:21:47.464516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:21:47.464552Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:21:47.464726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:47.464797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:21:47.464838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:21:47.464867Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:21:47.464973Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:21:47.465029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:21:47.465073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:21:47.465102Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:21:47.465175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:21:47.465232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:21:47.465263Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:21:47.465335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:21:47.465378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:21:47.465405Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:21:47.465824Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-10T11:21:47.465931Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2025-04-10T11:21:47.466024Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-10T11:21:47.466119Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-10T11:21:47.466305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:21:47.466375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:21:47.466411Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:21:47.466669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:21:47.466716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:21:47.466754Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:21:47.466909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:21:47.466958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:21:47.466993Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:21:47.467188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:21:47.467229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:21:47.467259Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T1 ... names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:11.817024Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:11.817054Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:24:11.817084Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:24:11.817151Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:24:11.817218Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:11.817242Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:24:11.817318Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=73; 2025-04-10T11:24:11.817357Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=584;num_rows=73;batch_columns=timestamp; 2025-04-10T11:24:11.817480Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:851:2843];bytes=584;rows=73;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-04-10T11:24:11.817553Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:11.817635Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:11.817781Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:11.817879Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:24:11.817944Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:11.818005Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:11.818032Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: Scan [5:858:2850] finished for tablet 9437184 2025-04-10T11:24:11.818619Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[5:851:2843];stats={"p":[{"events":["f_bootstrap"],"t":0.067},{"events":["f_ProduceResults"],"t":0.588},{"events":["l_bootstrap"],"t":0.843},{"events":["f_processing","f_task_result"],"t":0.867},{"events":["l_task_result"],"t":8.071},{"events":["f_ack"],"t":8.121},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":9.072}],"full":{"a":1744284242745479,"name":"_full_task","f":1744284242745479,"d_finished":0,"c":0,"l":1744284251818095,"d":9072616},"events":[{"name":"bootstrap","f":1744284242813325,"d_finished":775823,"c":1,"l":1744284243589148,"d":775823},{"a":1744284251817866,"name":"ack","f":1744284250866866,"d_finished":884702,"c":904,"l":1744284251817822,"d":884931},{"a":1744284251817859,"name":"processing","f":1744284243613154,"d_finished":3932075,"c":4520,"l":1744284251817824,"d":3932311},{"name":"ProduceResults","f":1744284243333657,"d_finished":1591194,"c":5426,"l":1744284251818018,"d":1591194},{"a":1744284251818020,"name":"Finish","f":1744284251818020,"d_finished":0,"c":0,"l":1744284251818095,"d":75},{"name":"task_result","f":1744284243613191,"d_finished":2958449,"c":3616,"l":1744284250817090,"d":2958449}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:11.818738Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:851:2843];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:24:11.819479Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[5:851:2843];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.067},{"events":["f_ProduceResults"],"t":0.588},{"events":["l_bootstrap"],"t":0.843},{"events":["f_processing","f_task_result"],"t":0.867},{"events":["l_task_result"],"t":8.071},{"events":["f_ack"],"t":8.121},{"events":["l_ProduceResults","f_Finish"],"t":9.072},{"events":["l_ack","l_processing","l_Finish"],"t":9.073}],"full":{"a":1744284242745479,"name":"_full_task","f":1744284242745479,"d_finished":0,"c":0,"l":1744284251818805,"d":9073326},"events":[{"name":"bootstrap","f":1744284242813325,"d_finished":775823,"c":1,"l":1744284243589148,"d":775823},{"a":1744284251817866,"name":"ack","f":1744284250866866,"d_finished":884702,"c":904,"l":1744284251817822,"d":885641},{"a":1744284251817859,"name":"processing","f":1744284243613154,"d_finished":3932075,"c":4520,"l":1744284251817824,"d":3933021},{"name":"ProduceResults","f":1744284243333657,"d_finished":1591194,"c":5426,"l":1744284251818018,"d":1591194},{"a":1744284251818020,"name":"Finish","f":1744284251818020,"d_finished":0,"c":0,"l":1744284251818805,"d":785},{"name":"task_result","f":1744284243613191,"d_finished":2958449,"c":3616,"l":1744284250817090,"d":2958449}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:11.819598Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:24:02.665271Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=904;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7049848;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7049848;selected_rows=0; 2025-04-10T11:24:11.819680Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:24:11.820009Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpSystemView::PartitionStatsRange3 >> KqpSystemView::PartitionStatsRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284769.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284769.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284769.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284769.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=144284769.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=144284769.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283569.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124284769.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124284769.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283569.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=124283569.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=124283569.000000s;Name=;Codec=}; 2025-04-10T11:22:49.738597Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:49.825678Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:49.849221Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:49.849467Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:49.855509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:49.855653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:49.855826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:49.855913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:49.855990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:49.856052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:49.856122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:49.856243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:49.856328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:49.856411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:49.856488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:49.856547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:49.877682Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:49.878036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:49.878096Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:49.878273Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:49.878448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:49.878540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:49.878591Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:49.878693Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:49.878758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:49.878810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:49.878849Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:49.879036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:49.879131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:49.879173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:49.879206Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:49.879301Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:49.879351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:49.879407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:49.879458Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:49.879548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:49.879584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:49.879611Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:49.879654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:49.879690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:49.879719Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:49.880129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=58; 2025-04-10T11:22:49.880214Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-10T11:22:49.880298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-04-10T11:22:49.880380Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-04-10T11:22:49.880563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:49.880636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:49.880673Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:49.880845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:49.880886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:49.880912Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... ata.cpp:29;EXECUTE:finishLoadingTime=515; 2025-04-10T11:24:14.207139Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=57143; 2025-04-10T11:24:14.217148Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=9932; 2025-04-10T11:24:14.226101Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=8024; 2025-04-10T11:24:14.226213Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=8979; 2025-04-10T11:24:14.226365Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=90; 2025-04-10T11:24:14.226458Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=50; 2025-04-10T11:24:14.226563Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=70; 2025-04-10T11:24:14.226674Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=65; 2025-04-10T11:24:14.235741Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=9003; 2025-04-10T11:24:14.248981Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=13132; 2025-04-10T11:24:14.249144Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=59; 2025-04-10T11:24:14.249234Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=32; 2025-04-10T11:24:14.249310Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=26; 2025-04-10T11:24:14.249366Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-04-10T11:24:14.249420Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-04-10T11:24:14.249519Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=53; 2025-04-10T11:24:14.249579Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-10T11:24:14.249696Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=61; 2025-04-10T11:24:14.249755Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-10T11:24:14.249812Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=26; 2025-04-10T11:24:14.249900Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=55; 2025-04-10T11:24:14.250223Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=285; 2025-04-10T11:24:14.250257Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=108210; 2025-04-10T11:24:14.250380Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:24:14.250471Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:24:14.250513Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:24:14.250567Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:24:14.267075Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-04-10T11:24:14.267238Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:24:14.267291Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:24:14.267362Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-10T11:24:14.267416Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:24:14.267450Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:24:14.267495Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:24:14.267532Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:24:14.267628Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:24:14.268157Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:24:14.268229Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2646:4520];tablet_id=9437184;parent=[1:2604:4485];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-04-10T11:24:14.268810Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:24:14.269063Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:24:14.269086Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:24:14.269104Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:24:14.269133Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:24:14.269182Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:24:14.269234Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-04-10T11:24:14.269297Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-10T11:24:14.269329Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:24:14.269364Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:24:14.269388Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:24:14.269454Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:24:14.270416Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-04-10T11:24:14.271955Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2604:4485];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> KqpSysColV1::SelectRange >> TYqlDateTimeTests::DateKey [GOOD] >> TYqlDateTimeTests::DatetimeKey >> KqpSysColV1::InnerJoinTables >> KqpSystemView::QueryStatsSimple >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions [GOOD] >> KqpSystemView::Join >> KqpSysColV0::UpdateAndDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] Test command err: 2025-04-10T11:21:20.619074Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:21:20.723637Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:21:20.729017Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:21:20.729408Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:21:20.747868Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:21:20.748124Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:21:20.756372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:21:20.756574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:21:20.756779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:21:20.756901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:21:20.756997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:21:20.757087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:21:20.757155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:21:20.757252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:21:20.757350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:21:20.757492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:21:20.757590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:21:20.757692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:21:20.776177Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:21:20.780170Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:21:20.780508Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:21:20.780560Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:21:20.780731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:20.780896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:21:20.780986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:21:20.781032Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:21:20.781144Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:21:20.781224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:21:20.781271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:21:20.781317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:21:20.781502Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:21:20.781581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:21:20.781623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:21:20.781665Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:21:20.781793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:21:20.781851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:21:20.781899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:21:20.781928Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:21:20.782000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:21:20.782063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:21:20.782094Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:21:20.782164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:21:20.782209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:21:20.782237Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:21:20.782665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-10T11:21:20.782749Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-10T11:21:20.782839Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-10T11:21:20.782954Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=58; 2025-04-10T11:21:20.783120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:21:20.783171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:21:20.783207Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:21:20.783449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:21:20.783502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:21:20.783535Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:21:20.783675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:21:20.783716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:21:20.783747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:21:20.783939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:21:20.783987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:21:20.784022Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T1 ... in NKikimr::NTxUT::PlanSchemaTx(NActors::TTestBasicRuntime&, NActors::TActorId const&, NKikimr::NOlap::TSnapshot) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:79:5 #27 0x31483b82 in NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, NKikimr::NOlap::TSnapshot const&, bool) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:480:13 #28 0xff9b983 in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:219:5 #29 0xff9888b in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1184:13 #30 0xff931e7 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #31 0xff931e7 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #32 0xff931e7 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #33 0xff931e7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #34 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #35 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #36 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #37 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #38 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x100b418d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2a1b7206 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x2a1b7206 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x2a1b7206 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x2a1b7206 in __allocate_at_least > *> > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x2a1b7206 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:358:25 #6 0x2a1b7206 in std::__y1::deque>, std::__y1::allocator>>>::__add_back_capacity() /-S/contrib/libs/cxxsupp/libcxx/include/deque:2144:51 #7 0x2a1a9544 in push_back /-S/contrib/libs/cxxsupp/libcxx/include/deque:1528:5 #8 0x2a1a9544 in NKikimr::NOlap::TBlobManager::StartBlobBatch() /-S/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp:380:23 #9 0x2a18eb4d in TWriteAction /-S/ydb/core/tx/columnshard/blobs_action/bs/write.h:41:30 #10 0x2a18eb4d in void std::__y1::allocator::construct[abi:fe190000]> const&, std::__y1::shared_ptr&>(NKikimr::NOlap::NBlobOperations::NBlobStorage::TWriteAction*, TBasicString> const&, std::__y1::shared_ptr&) /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:165:24 #11 0x2a1842ba in construct > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:320:9 #12 0x2a1842ba in __shared_ptr_emplace > &, std::__y1::shared_ptr &, std::__y1::allocator, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:296:5 #13 0x2a1842ba in allocate_shared, const TBasicString > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:875:51 #14 0x2a1842ba in make_shared > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:883:10 #15 0x2a1842ba in NKikimr::NOlap::NBlobOperations::NBlobStorage::TOperator::DoStartWritingAction() /-S/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp:16:12 #16 0x25bd53a5 in NKikimr::NOlap::IBlobsStorageOperator::StartWritingAction(NKikimr::NOlap::NBlobOperations::EConsumer) /-S/ydb/core/tx/columnshard/blobs_action/abstract/storage.h:106:23 #17 0x25c2d860 in NKikimr::NColumnShard::TWriteOperation::Start(NKikimr::NColumnShard::TColumnShard&, std::__y1::shared_ptr const&, NActors::TActorId const&, NKikimr::NOlap::TWritingContext const&) /-S/ydb/core/tx/columnshard/operations/write.cpp:39:53 #18 0x25de8b94 in NKikimr::NColumnShard::TWriteTask::Execute(NKikimr::NColumnShard::TColumnShard*, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/tablet/write_queue.cpp:26:21 #19 0x25dea128 in NKikimr::NColumnShard::TWriteTasksQueue::Drain(bool, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/tablet/write_queue.cpp:45:15 #20 0x25d99e92 in NKikimr::NColumnShard::TColumnShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/columnshard__write.cpp:615:22 #21 0x25d1cbc6 in NKikimr::NColumnShard::TColumnShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/columnshard/columnshard_impl.h:449:13 #22 0x115b762c in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #23 0x2cc61cf4 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #24 0x2cc5a569 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #25 0x2cc648e3 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #26 0x3148fad2 in NKikimr::NEvents::TDataEvents::TEvWriteResult* NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TAutoPtr&, std::__y1::function, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:446:13 #27 0x3146f664 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:510:20 #28 0x3146f664 in NKikimr::NTxUT::WaitWriteResult(NActors::TTestBasicRuntime&, unsigned long, std::__y1::vector>*) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:102:26 #29 0x31470de1 in NKikimr::NTxUT::WriteDataImpl(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, unsigned long, TBasicString> const&, std::__y1::shared_ptr const&, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:128:16 #30 0x31471e10 in NKikimr::NTxUT::WriteData(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, TBasicString> const&, std::__y1::vector> const&, bool, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:143:16 #31 0xff9c125 in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:232:9 #32 0xff9888b in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1184:13 #33 0xff931e7 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #34 0xff931e7 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #35 0xff931e7 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #36 0xff931e7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #37 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #38 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #39 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #40 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #41 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #42 0xff92093 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #43 0x1084a305 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #44 0x1086a39c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #45 0x7fa053764d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 3120168 byte(s) leaked in 55709 allocation(s). >> KqpSystemView::NodesRange1 >> KqpSysColV0::SelectRowAsterisk >> BasicStatistics::TwoDatabases [GOOD] >> KqpSystemView::PartitionStatsFollower >> TGRpcYdbTest::ExplainQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRanges [GOOD] Test command err: Trying to start YDB, gRPC: 9414, MsgBus: 17980 2025-04-10T11:24:10.185635Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643809627127077:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:10.185751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ad5/r3tmp/tmpmaoMNe/pdisk_1.dat 2025-04-10T11:24:10.594732Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:10.602040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:10.602625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 9414, node 1 2025-04-10T11:24:10.607241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:10.656297Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:10.656325Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:10.656341Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:10.656533Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17980 TClient is connected to server localhost:17980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:11.194421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:11.236078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:11.382992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:11.523665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:11.586944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:13.307395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643822512030752:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:13.307613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:13.585152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:13.622899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:13.652664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:13.684253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:13.715916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:13.753638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:13.816467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643822512031266:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:13.816576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:13.816889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643822512031271:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:13.820715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:13.833310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643822512031273:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:13.908616Z node 1 :TX_PROXY ERROR: Actor# [1:7491643822512031326:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:15.185468Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643809627127077:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:15.185581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:15.314845Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284255301, txId: 281474976710671] shutting down |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions [GOOD] Test command err: Trying to start YDB, gRPC: 17928, MsgBus: 20163 2025-04-10T11:24:05.646455Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643788606186492:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:05.646495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000aee/r3tmp/tmpoCaOD3/pdisk_1.dat 2025-04-10T11:24:06.066527Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:06.098930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:06.099243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:06.100933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17928, node 1 2025-04-10T11:24:06.267902Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:06.267926Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:06.267939Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:06.268082Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20163 TClient is connected to server localhost:20163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:07.043853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.074881Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:07.096381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:07.103790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:07.266220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:24:07.454009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.554403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:09.003700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643801491090164:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.003819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.617209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.653136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.719505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.750193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.786471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.862353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.935870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643805786057980:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.935959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.935990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643805786057985:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.940326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-10T11:24:09.953757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643805786057987:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-10T11:24:10.018464Z node 1 :TX_PROXY ERROR: Actor# [1:7491643810081025338:3455] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:10.646800Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643788606186492:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:10.646899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 1 ydb-cpp-sdk/dev 2025-04-10T11:24:15.776536Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284255762, txId: 281474976710684] shutting down >> KqpSystemView::PartitionStatsParametricRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExplainQuery [GOOD] Test command err: 2025-04-10T11:23:53.370863Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643739782228275:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:53.370956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c05/r3tmp/tmp2sxBeS/pdisk_1.dat 2025-04-10T11:23:53.746836Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:53.775215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:53.775312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:53.780180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23253, node 1 2025-04-10T11:23:53.889872Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:53.889892Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:53.889903Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:53.890002Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5867 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:54.127330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:57.349424Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643756546529648:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:57.349506Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c05/r3tmp/tmpv5qzx4/pdisk_1.dat 2025-04-10T11:23:57.483860Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:57.509457Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:57.509535Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:57.513265Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28653, node 4 2025-04-10T11:23:57.594376Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:57.594398Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:57.594407Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:57.594570Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:57.805542Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:00.063330Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643769431432592:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:00.063331Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643769431432581:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:00.063430Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:00.067375Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:24:00.094183Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491643769431432595:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:24:00.171363Z node 4 :TX_PROXY ERROR: Actor# [4:7491643769431432674:2681] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:01.837462Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643772339174704:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:01.837516Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c05/r3tmp/tmp5gKGW2/pdisk_1.dat 2025-04-10T11:24:01.983562Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:02.011258Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:02.011341Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:02.014393Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8724, node 7 2025-04-10T11:24:02.086441Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:02.086469Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:02.086478Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:02.086663Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:02.348198Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR E0000 00:00:1744284244.518079 1632074 text_format.cc:383] Error parsing text-format Ydb.Type: 3:13: Unknown enumeration value of "TYPE_UNDEFINED" for field "type_id". 2025-04-10T11:24:04.521427Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643785224077606:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:04.521532Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643785224077598:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:04.521950Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:04.526284Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:24:04.545626Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491643785224077612:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 2814749767 ... 057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:04.716499Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=YzNmODc2YzItMjYyMmNhNmYtOTk4ODNlOWEtNzY3NjA4M2E=, ActorId: [7:7491643785224077595:2331], ActorState: ExecuteState, TraceId: 01jrfq2ph71rzd99pjqjxpc6xe, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-04-10T11:24:04.717407Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfq2ph71rzd99pjqjxpc6xe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzNmODc2YzItMjYyMmNhNmYtOTk4ODNlOWEtNzY3NjA4M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root
: Error: Unsupported protobuf type:
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: E0000 00:00:1744284244.719951 1632074 text_format.cc:383] Error parsing text-format Ydb.Type: 5:21: Unknown enumeration value of "Int32" for field "type_id". 2025-04-10T11:24:04.774552Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=YzNmODc2YzItMjYyMmNhNmYtOTk4ODNlOWEtNzY3NjA4M2E=, ActorId: [7:7491643785224077595:2331], ActorState: ExecuteState, TraceId: 01jrfq2pqg0ybprzyttxmeg618, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-04-10T11:24:04.776232Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq2pqg0ybprzyttxmeg618, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzNmODc2YzItMjYyMmNhNmYtOTk4ODNlOWEtNzY3NjA4M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root
: Error: Unsupported protobuf type:
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-04-10T11:24:06.517012Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643795032193732:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:06.517110Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c05/r3tmp/tmp3U49TX/pdisk_1.dat 2025-04-10T11:24:06.869882Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:06.893646Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:06.893740Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:06.902706Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24374, node 10 2025-04-10T11:24:07.038756Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:07.038797Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:07.038806Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:07.038963Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:07.427586Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:09.897984Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643807917096633:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.897995Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643807917096644:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.898053Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:09.901589Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:24:09.920233Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491643807917096647:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:24:10.008737Z node 10 :TX_PROXY ERROR: Actor# [10:7491643812212064034:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:11.749216Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643814778091689:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:11.749324Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c05/r3tmp/tmpZgeixY/pdisk_1.dat 2025-04-10T11:24:11.952930Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6496, node 13 2025-04-10T11:24:12.053989Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:12.054012Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:12.054022Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:12.054186Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:24:12.059463Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:12.059549Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:12.062642Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:12.309240Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:12.359829Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:15.081079Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643831957962110:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:15.081179Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643831957962099:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:15.081271Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:15.085202Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:24:15.107470Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7491643831957962113:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:24:15.199770Z node 13 :TX_PROXY ERROR: Actor# [13:7491643831957962194:2817] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:15.291255Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq30v67aghdnzr00y5z6yx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NTk5ZGE4OGEtNTg3MDI1MDItYjhkOThkZmQtYmZkNTkxN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoDatabases [GOOD] Test command err: 2025-04-10T11:20:39.557043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:329:2219], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:39.557433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:20:39.557500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000bc3/r3tmp/tmpaFSZgn/pdisk_1.dat 2025-04-10T11:20:39.963440Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4808, node 1 2025-04-10T11:20:40.210581Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:40.210650Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:40.210693Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:40.210951Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:40.214096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:40.303426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:40.303544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:40.317634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4763 2025-04-10T11:20:40.844638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:44.154229Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-10T11:20:44.188843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:44.189028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:44.233660Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:20:44.239901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:44.508385Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.509007Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.509649Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.509804Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.510041Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.510121Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.510257Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.510362Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.510439Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.679360Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:44.679493Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:44.692833Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:44.856304Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:44.916812Z node 3 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:44.916935Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:44.949695Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:44.951505Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:44.951744Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:44.951811Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:44.951873Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:44.951936Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:44.951994Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:44.952048Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:44.952742Z node 3 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:44.982513Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:44.982665Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1947:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:44.988532Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1959:2607] 2025-04-10T11:20:44.992509Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1977:2617] 2025-04-10T11:20:44.992841Z node 3 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:1977:2617], schemeshard id = 72075186224037897 2025-04-10T11:20:44.997875Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database1 2025-04-10T11:20:45.067943Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:45.068020Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:45.068101Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database1/.metadata/_statistics 2025-04-10T11:20:45.083910Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:45.091809Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:45.091949Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:45.292626Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:45.472437Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:45.539539Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:46.488289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:20:49.762628Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:49.798757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:49.798854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:49.883193Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:49.885172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:50.098027Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:50.098605Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:50.099173Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:50.099336Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:50.099577Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:50.099684Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:50.099779Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:50.099867Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:50.099958Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:50.263851Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:50.263974Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:50.283258Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:50.453371Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:50.501015Z node 2 :STATISTICS INFO: [72075186224038895] OnActivateExecutor 2025-04-10T11:20:50.501120Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Execute 2025-04-10T11:20:50.554984Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Complete 2025-04-10T11:20:50.556054Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Execute 2025-04-10T11:20:50.556255Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:50.556300Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:50.556344Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:50.556404Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:50.556458Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:50.556501Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Complete 2025-04-10T11:20:50.557126Z node 2 :STATISTICS INFO: [72075186224038895] Subscribed for config changes 2025-04-10T11:20:50.594334Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038895, at schemeshard: 72075186224038898 2025-04-10T11:20:50.594487Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3215:2596], at schemeshard: 72075186224038898, StatisticsAggregatorId: 72075186224038895, at schemeshard: 72075186224038898 2025-04-10T11:20:50.604527Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3230:2608] 2025-04-10T11:20:50.611036Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3251:2618] ... eStatsToSA in 239.000000s, at schemeshard: 72075186224038898 2025-04-10T11:24:08.040758Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038898, stats size# 49 2025-04-10T11:24:08.060073Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Complete 2025-04-10T11:24:09.260607Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:10326:4779]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:24:09.261112Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-10T11:24:09.261166Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:10326:4779], StatRequests.size() = 1 2025-04-10T11:24:10.047224Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal 2025-04-10T11:24:10.047293Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-04-10T11:24:10.047334Z node 2 :STATISTICS DEBUG: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038898, LocalPathId: 3] is data table. 2025-04-10T11:24:10.047370Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038898, LocalPathId: 3] 2025-04-10T11:24:10.047694Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2025-04-10T11:24:10.051114Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:24:10.055282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10350:4465], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.055378Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10360:4470], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.055477Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:10.071919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038898 2025-04-10T11:24:10.139049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:10364:4473], DatabaseId: /Root/Database2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2025-04-10T11:24:10.303507Z node 2 :TX_PROXY ERROR: Actor# [2:10452:4521] txid# 281474976730659, issues: { message: "Check failed: path: \'/Root/Database2/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224038898, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:10.328814Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:10481:4536]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:24:10.329128Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:24:10.329266Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:10483:4538] 2025-04-10T11:24:10.329360Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:10483:4538] 2025-04-10T11:24:10.329996Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:10484:4539] 2025-04-10T11:24:10.330228Z node 2 :STATISTICS DEBUG: [72075186224038895] EvConnectNode, pipe server id = [2:10484:4539], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:24:10.330289Z node 2 :STATISTICS DEBUG: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-10T11:24:10.330465Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:10483:4538], server id = [2:10484:4539], tablet id = 72075186224038895, status = OK 2025-04-10T11:24:10.330642Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:24:10.330746Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:10481:4536], StatRequests.size() = 1 2025-04-10T11:24:10.448865Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzhjNDU3NTAtNGRjOTA2YWItNGNmNGIzODYtNzMwMDYwNjU=, TxId: 2025-04-10T11:24:10.448930Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzhjNDU3NTAtNGRjOTA2YWItNGNmNGIzODYtNzMwMDYwNjU=, TxId: 2025-04-10T11:24:10.449501Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-04-10T11:24:10.472076Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038898, LocalPathId: 3] 2025-04-10T11:24:10.472145Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:24:10.555261Z node 2 :STATISTICS DEBUG: [72075186224038895] EvFastPropagateCheck 2025-04-10T11:24:10.555328Z node 2 :STATISTICS DEBUG: [72075186224038895] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:24:10.637539Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:10483:4538], schemeshard count = 1 2025-04-10T11:24:11.422948Z node 3 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:24:11.434502Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:24:11.434564Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:24:11.434604Z node 3 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-10T11:24:11.434641Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:24:11.435114Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database1 2025-04-10T11:24:11.437593Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:24:11.454017Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=ODc2OWQyMTQtODFkZTljMTQtNmJhMWQ3NzItN2Y2YzBiNTE=, TxId: 2025-04-10T11:24:11.454097Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=ODc2OWQyMTQtODFkZTljMTQtNmJhMWQ3NzItN2Y2YzBiNTE=, TxId: 2025-04-10T11:24:11.454762Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:24:11.470700Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:24:11.470759Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:24:11.639355Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [3:10584:4819]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:24:11.639753Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-10T11:24:11.639803Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [3:10584:4819], StatRequests.size() = 1 2025-04-10T11:24:13.679961Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [3:10655:4845]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:24:13.680329Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-10T11:24:13.680381Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [3:10655:4845], StatRequests.size() = 1 2025-04-10T11:24:14.368882Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal 2025-04-10T11:24:14.368949Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-04-10T11:24:14.368991Z node 2 :STATISTICS DEBUG: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038898, LocalPathId: 4] is data table. 2025-04-10T11:24:14.369032Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038898, LocalPathId: 4] 2025-04-10T11:24:14.369377Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2025-04-10T11:24:14.372523Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:24:14.389640Z node 2 :SYSTEM_VIEWS WARN: [72075186224038891] TEvIntervalQuerySummary, wrong stage: node id# 2 2025-04-10T11:24:14.390331Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmIzM2M1MzgtYWYxMjhjODctNWEwYTUyZDQtYjdhZjZkZmY=, TxId: 2025-04-10T11:24:14.390389Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmIzM2M1MzgtYWYxMjhjODctNWEwYTUyZDQtYjdhZjZkZmY=, TxId: 2025-04-10T11:24:14.391050Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-04-10T11:24:14.406687Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038898, LocalPathId: 4] 2025-04-10T11:24:14.406747Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:24:15.374186Z node 3 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-04-10T11:24:15.374467Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-04-10T11:24:15.374904Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:24:15.386481Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:24:15.386552Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:24:15.561917Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [3:10739:4859]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:24:15.562308Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-10T11:24:15.562353Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [3:10739:4859], StatRequests.size() = 1 2025-04-10T11:24:15.563003Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:10741:4620]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:24:15.567297Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:24:15.567366Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:10741:4620], StatRequests.size() = 1 |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> TGRpcAuthentication::NoConnectRights [GOOD] >> KqpSysColV1::SelectRowAsterisk |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn [GOOD] Test command err: 2025-04-10T11:20:40.446937Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:20:40.635267Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:20:40.651581Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:20:40.654111Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:20:40.681814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:20:40.682676Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:20:40.694705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:40.694943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:40.695180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:40.695315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:40.695448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:40.695591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:40.695703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:40.695823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:40.695936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:20:40.696073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:20:40.696205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:20:40.696322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:20:40.724639Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:20:40.729219Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:20:40.729618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:20:40.729683Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:20:40.729870Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:40.730052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:20:40.730134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:20:40.730200Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:20:40.730303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:20:40.730392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:20:40.730442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:20:40.730473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:20:40.730655Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:40.730739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:20:40.730788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:20:40.730817Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:20:40.730952Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:20:40.731022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:20:40.731067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:20:40.731097Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:20:40.731168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:20:40.731228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:20:40.731261Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:20:40.731315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:20:40.731358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:20:40.731390Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:20:40.731829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-10T11:20:40.731911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-04-10T11:20:40.731999Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-10T11:20:40.732111Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=54; 2025-04-10T11:20:40.732290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:20:40.732343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:20:40.732377Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:20:40.732595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:20:40.732643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:20:40.732671Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:20:40.732817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:20:40.732858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:20:40.732890Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:20:40.733129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:20:40.733176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:20:40.733205Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T1 ... lumn_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:15.171556Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:15.171616Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:24:15.171677Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:24:15.171827Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:24:15.171945Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:15.171992Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:24:15.172095Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-04-10T11:24:15.172146Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-04-10T11:24:15.172307Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:585:2601];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2025-04-10T11:24:15.172419Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:15.172556Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:15.172731Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:15.172875Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:24:15.172985Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:15.173090Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:15.173134Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: Scan [5:592:2608] finished for tablet 9437184 2025-04-10T11:24:15.173832Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[5:585:2601];stats={"p":[{"events":["f_bootstrap"],"t":0.192},{"events":["f_ProduceResults"],"t":1.46},{"events":["l_bootstrap"],"t":2.999},{"events":["f_processing","f_task_result"],"t":3.105},{"events":["l_task_result"],"t":20.495},{"events":["f_ack"],"t":20.532},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":25.494}],"full":{"a":1744284229679016,"name":"_full_task","f":1744284229679016,"d_finished":0,"c":0,"l":1744284255173210,"d":25494194},"events":[{"name":"bootstrap","f":1744284229871122,"d_finished":2807877,"c":1,"l":1744284232678999,"d":2807877},{"a":1744284255172852,"name":"ack","f":1744284250211841,"d_finished":4885383,"c":903,"l":1744284255172765,"d":4885741},{"a":1744284255172837,"name":"processing","f":1744284232784256,"d_finished":16916229,"c":4515,"l":1744284255172768,"d":16916602},{"name":"ProduceResults","f":1744284231139950,"d_finished":9190454,"c":5420,"l":1744284255173111,"d":9190454},{"a":1744284255173114,"name":"Finish","f":1744284255173114,"d_finished":0,"c":0,"l":1744284255173210,"d":96},{"name":"task_result","f":1744284232784291,"d_finished":11932931,"c":3612,"l":1744284250174396,"d":11932931}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:15.173940Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:585:2601];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:24:15.174575Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[5:585:2601];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.192},{"events":["f_ProduceResults"],"t":1.46},{"events":["l_bootstrap"],"t":2.999},{"events":["f_processing","f_task_result"],"t":3.105},{"events":["l_task_result"],"t":20.495},{"events":["f_ack"],"t":20.532},{"events":["l_ProduceResults","f_Finish"],"t":25.494},{"events":["l_ack","l_processing","l_Finish"],"t":25.495}],"full":{"a":1744284229679016,"name":"_full_task","f":1744284229679016,"d_finished":0,"c":0,"l":1744284255174034,"d":25495018},"events":[{"name":"bootstrap","f":1744284229871122,"d_finished":2807877,"c":1,"l":1744284232678999,"d":2807877},{"a":1744284255172852,"name":"ack","f":1744284250211841,"d_finished":4885383,"c":903,"l":1744284255172765,"d":4886565},{"a":1744284255172837,"name":"processing","f":1744284232784256,"d_finished":16916229,"c":4515,"l":1744284255172768,"d":16917426},{"name":"ProduceResults","f":1744284231139950,"d_finished":9190454,"c":5420,"l":1744284255173111,"d":9190454},{"a":1744284255173114,"name":"Finish","f":1744284255173114,"d_finished":0,"c":0,"l":1744284255174034,"d":920},{"name":"task_result","f":1744284232784291,"d_finished":11932931,"c":3612,"l":1744284250174396,"d":11932931}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:15.174691Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:23:49.593569Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-04-10T11:24:15.174766Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:24:15.175076Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpSystemView::NodesRange2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges [GOOD] Test command err: Trying to start YDB, gRPC: 5498, MsgBus: 64069 2025-04-10T11:24:12.159258Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643818753160621:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:12.159373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ad4/r3tmp/tmpiDpIRh/pdisk_1.dat 2025-04-10T11:24:12.458820Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5498, node 1 2025-04-10T11:24:12.545998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:12.546129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:12.546560Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:12.546584Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:12.546592Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:12.546724Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:24:12.548435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64069 TClient is connected to server localhost:64069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:13.098454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:13.135040Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:13.142969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:13.322197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:13.498739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:13.577469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:15.321780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643831638064278:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:15.321894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:15.719761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:15.786338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:15.811334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:15.838613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:15.864837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:15.935560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:15.987942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643831638064795:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:15.988022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:15.988215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643831638064800:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:15.991664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:16.006603Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:24:16.007285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643831638064802:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:16.089647Z node 1 :TX_PROXY ERROR: Actor# [1:7491643835933032151:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:17.159358Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643818753160621:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:17.159451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:17.538337Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284257526, txId: 281474976710671] shutting down |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] >> KqpSysColV0::SelectRange >> TTableProfileTests::OverwriteExecutionPolicy [GOOD] >> TTableProfileTests::OverwritePartitioningPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcAuthentication::NoConnectRights [GOOD] Test command err: 2025-04-10T11:23:40.295053Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643681444504107:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:40.295171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c98/r3tmp/tmpIHcubg/pdisk_1.dat 2025-04-10T11:23:40.683584Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:40.730524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:40.730632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:40.735694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29332, node 1 2025-04-10T11:23:40.798713Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:40.798730Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:40.798737Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:40.798828Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:41.079270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:11192 TClient is connected to server localhost:11192 2025-04-10T11:23:41.524601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:23:43.282577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643694329407064:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:43.282677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643694329407059:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:43.282818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:43.287524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T11:23:43.302459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643694329407074:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T11:23:43.387410Z node 1 :TX_PROXY ERROR: Actor# [1:7491643694329407147:2716] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:11192 2025-04-10T11:23:43.856197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:23:48.616092Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:446:2410], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:23:48.616449Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:23:48.616594Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c98/r3tmp/tmpIvBkHt/pdisk_1.dat 2025-04-10T11:23:49.162539Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:23:49.240606Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:49.240755Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:49.255541Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:49.556027Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [4:1002:2806], Recipient [4:552:2466]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:23:49.556122Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:23:49.556160Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T11:23:49.556307Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [4:999:2804], Recipient [4:552:2466]: {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T11:23:49.556342Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T11:23:49.651611Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "tenant" } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:23:49.651859Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/tenant, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:23:49.651960Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: tenant, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-10T11:23:49.652119Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-10T11:23:49.652272Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:23:49.652387Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:23:49.652443Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:23:49.652522Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T11:23:49.652589Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:23:49.652640Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:23:49.655922Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-10T11:23:49.656115Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/tenant 2025-04-10T11:23:49.656190Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T11:23:49.656238Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715657:0 2025-04-10T11:23:49.656600Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [4:552:2466], Recipient [4:552:2466]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T11:23:49.656647Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T11:23:49.656904Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:23:49.656963Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:23:49.657175Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:23:49.657306Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:23:49.657381Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:769:2617], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-04-10T11:23:49.657438Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:769:2617], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 2 2025-04-10T11:23:49.657951Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:23:49.658031Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxCreateSubDomain, at tablet# 72057594046644480 2025-04-10T11:23:49.658080Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-04-10T11:23:49.658122Z node 4 ... diateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 0 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82488 } ShardState: 2 UserTablePartOwners: 72075186224037890 NodeId: 5 StartTime: 1458 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-10T11:24:00.959729Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [5:1639:2457], Recipient [4:552:2466]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037890 TableLocalId: 3 Generation: 1 Round: 2 TableStats { DataSize: 75 RowCount: 1 IndexSize: 0 InMemSize: 0 LastAccessTime: 2046 LastUpdateTime: 2046 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 0 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82488 } ShardState: 2 UserTablePartOwners: 72075186224037890 NodeId: 5 StartTime: 1458 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-10T11:24:00.959798Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T11:24:00.959872Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 75 rowCount 1 cpuUsage 0 2025-04-10T11:24:00.960047Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] raw table stats: DataSize: 75 RowCount: 1 IndexSize: 0 InMemSize: 0 LastAccessTime: 2046 LastUpdateTime: 2046 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 0 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T11:24:00.960107Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-10T11:24:01.025297Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfq2k23bbjrrzjk8dz2wcmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZjBkZGY0MmQtMjExM2QyYzAtMTQ4NzJjNDUtMmQ2NzVjYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:02.678546Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491643774652622715:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:02.678627Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c98/r3tmp/tmpSEwpeO/pdisk_1.dat 2025-04-10T11:24:02.793045Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:02.831780Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:02.831877Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:02.835855Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3758, node 6 2025-04-10T11:24:02.922512Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:02.922558Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:02.922570Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:02.922722Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1839 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:03.228664Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:1839 2025-04-10T11:24:07.352291Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491643797607563015:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:07.352997Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c98/r3tmp/tmpUXV4nX/pdisk_1.dat 2025-04-10T11:24:07.666488Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:07.696119Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:07.696225Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:07.708230Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26342, node 9 2025-04-10T11:24:07.903076Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:07.903102Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:07.903110Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:07.903273Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:08.333340Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:13.000055Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7491643818579416833:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:13.000147Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c98/r3tmp/tmpFWCq38/pdisk_1.dat 2025-04-10T11:24:13.270317Z node 12 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9219, node 12 2025-04-10T11:24:13.401815Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:13.401942Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:13.425940Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:13.448807Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:13.448836Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:13.448847Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:13.449032Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:13.816949Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:3435 >> YdbYqlClient::TestConstraintViolation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 21001, MsgBus: 20022 2025-04-10T11:24:13.418874Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643824033186408:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:13.419068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000acb/r3tmp/tmp4aofdj/pdisk_1.dat 2025-04-10T11:24:13.807447Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21001, node 1 2025-04-10T11:24:13.878071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:13.878256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:13.880372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:13.880961Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:13.880994Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:13.881003Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:13.881130Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20022 TClient is connected to server localhost:20022 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:14.493892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:14.538659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:14.666606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:14.832946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:14.908083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:16.819011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643836918089867:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:16.819161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:17.204518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:17.254839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:17.289375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:17.328441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:17.364036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:17.441616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:17.554222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643841213057690:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:17.554305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:17.554873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643841213057695:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:17.559523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:17.574388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643841213057697:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:17.670335Z node 1 :TX_PROXY ERROR: Actor# [1:7491643841213057753:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:18.418805Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643824033186408:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:18.418899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:18.908005Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284258934, txId: 281474976710671] shutting down >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile >> TYqlDecimalTests::NegativeValues [GOOD] >> KqpSystemView::PartitionStatsRange2 [GOOD] >> KqpSysColV1::StreamInnerJoinTables >> KqpSystemView::FailNavigate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestConstraintViolation [GOOD] Test command err: 2025-04-10T11:23:59.007682Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643764671256083:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:59.007765Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001bf5/r3tmp/tmpfFQ8Mh/pdisk_1.dat 2025-04-10T11:23:59.390171Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:59.451549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:59.451691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:59.455441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13391, node 1 2025-04-10T11:23:59.530758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:59.530785Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:59.530792Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:59.531311Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:59.833410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:01.917796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643773261191686:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:01.917892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:02.215940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:02.355896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643777556159152:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:02.355973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643777556159157:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:02.355980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:02.359554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:24:02.376496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643777556159159:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:24:02.435387Z node 1 :TX_PROXY ERROR: Actor# [1:7491643777556159230:2797] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:02.556998Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq2mdk0mbepyb3kq2qm7gq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc5YjJhZjMtMTY2MDc2ZTYtYjk3ZGIzMDctNzE4OTI2YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:02.707344Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq2mmg6efs0x1ewenbnvdy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc5YjJhZjMtMTY2MDc2ZTYtYjk3ZGIzMDctNzE4OTI2YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:04.143674Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643783429603556:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:04.143740Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001bf5/r3tmp/tmpkpqxlo/pdisk_1.dat 2025-04-10T11:24:04.266129Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:04.302532Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:04.302617Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:04.305943Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17249, node 4 2025-04-10T11:24:04.400466Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:04.400500Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:04.400508Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:04.400640Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:04.652011Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.342396Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643796314506469:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:07.342478Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:07.342878Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643796314506481:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:07.347397Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:24:07.380740Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491643796314506483:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:24:07.447105Z node 4 :TX_PROXY ERROR: Actor# [4:7491643796314506567:2683] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:09.239955Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643806377322551:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:09.240036Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001bf5/r3tmp/tmpaPjQFc/pdisk_1.dat 2025-04-10T11:24:09.469417Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14310, node 7 2025-04-10T11:24:09.566390Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:09.566498Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:09.599274Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:09.602000Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:09.602024Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty may ... revious query completion proxyRequestId: 9 2025-04-10T11:24:12.682590Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NzUwODA5OGUtODUxYjMwMzMtOTRhNjk3ZTktMzcxMWNiNGY=, ActorId: [7:7491643819262225456:2335], ActorState: ExecuteState, TraceId: 01jrfq2yg62kak2xhnfs1a1tsm, Reply query error, msg: Pending previous query completion proxyRequestId: 10 2025-04-10T11:24:12.683131Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:12.683935Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NzUwODA5OGUtODUxYjMwMzMtOTRhNjk3ZTktMzcxMWNiNGY=, ActorId: [7:7491643819262225456:2335], ActorState: ExecuteState, TraceId: 01jrfq2yg62kak2xhnfs1a1tsm, Reply query error, msg: Pending previous query completion proxyRequestId: 11 2025-04-10T11:24:12.685794Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643819262225520:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:12.685928Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:12.690272Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:24:12.692132Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NzUwODA5OGUtODUxYjMwMzMtOTRhNjk3ZTktMzcxMWNiNGY=, ActorId: [7:7491643819262225456:2335], ActorState: ExecuteState, TraceId: 01jrfq2yg62kak2xhnfs1a1tsm, Reply query error, msg: Pending previous query completion proxyRequestId: 12 2025-04-10T11:24:12.692863Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7491643819262225540:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:12.692963Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:12.724531Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7491643819262225498:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:24:12.785885Z node 7 :TX_PROXY ERROR: Actor# [7:7491643819262225590:2697] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:14.506528Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643828438857757:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:14.506584Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001bf5/r3tmp/tmpP9CbJg/pdisk_1.dat 2025-04-10T11:24:14.677844Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:14.729311Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:14.729445Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:14.736021Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3701, node 10 2025-04-10T11:24:14.857017Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:14.857043Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:14.857051Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:14.857202Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:15.177519Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.256623Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643845618727977:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:18.256707Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:18.278282Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:18.422477Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643845618728137:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:18.422567Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:18.422786Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643845618728142:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:18.427341Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:24:18.469226Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491643845618728144:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:24:18.575494Z node 10 :TX_PROXY ERROR: Actor# [10:7491643845618728228:2804] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:18.672745Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq343m8k027j77wsmq8qz1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZmI2OGFlZDEtYjhhYWI2NTItNDdlNTVhZTktNDJjODljMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:18.806410Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq34cj45t1e11ybpddy8d5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZmI2OGFlZDEtYjhhYWI2NTItNDdlNTVhZTktNDJjODljMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:18.811846Z node 10 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-10T11:24:18.833209Z node 10 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:24:18.833463Z node 10 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:24:18.833757Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7491643845618728288:2335], Table: `Root/Test` ([72057594046644480:2:1]), SessionActorId: [10:7491643845618727974:2335]Got CONSTRAINT VIOLATION for table `Root/Test`. ShardID=72075186224037888, Sink=[10:7491643845618728288:2335].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:24:18.834362Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7491643845618728281:2335], SessionActorId: [10:7491643845618727974:2335], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[10:7491643845618727974:2335]. isRollback=0 2025-04-10T11:24:18.834670Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZmI2OGFlZDEtYjhhYWI2NTItNDdlNTVhZTktNDJjODljMmQ=, ActorId: [10:7491643845618727974:2335], ActorState: ExecuteState, TraceId: 01jrfq34cj45t1e11ybpddy8d5, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [10:7491643845618728282:2335] from: [10:7491643845618728281:2335] 2025-04-10T11:24:18.834795Z node 10 :KQP_EXECUTER ERROR: ActorId: [10:7491643845618728282:2335] TxId: 281474976715662. Ctx: { TraceId: 01jrfq34cj45t1e11ybpddy8d5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZmI2OGFlZDEtYjhhYWI2NTItNDdlNTVhZTktNDJjODljMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `Root/Test`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:24:18.835054Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZmI2OGFlZDEtYjhhYWI2NTItNDdlNTVhZTktNDJjODljMmQ=, ActorId: [10:7491643845618727974:2335], ActorState: ExecuteState, TraceId: 01jrfq34cj45t1e11ybpddy8d5, Create QueryResponse for error on request, msg: >> KqpSysColV1::StreamSelectRange [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] >> KqpSystemView::PartitionStatsRange3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 62692, MsgBus: 18619 2025-04-10T11:24:14.898806Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643830018164243:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:14.899195Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ac6/r3tmp/tmptajImx/pdisk_1.dat 2025-04-10T11:24:15.204599Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62692, node 1 2025-04-10T11:24:15.292574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:15.292793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:15.300179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:15.338520Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:15.338550Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:15.338571Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:15.338685Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18619 TClient is connected to server localhost:18619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:15.825455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:15.850344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:16.020500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:16.190822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:16.288314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.368615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643847198035192:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:18.368766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:18.700723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:18.744085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:18.782531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:18.865174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:18.913013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.000669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.093181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643851493003012:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.093303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.093476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643851493003017:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.099343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:19.113787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643851493003019:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:19.183956Z node 1 :TX_PROXY ERROR: Actor# [1:7491643851493003073:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:19.896133Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643830018164243:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:19.896225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:20.440545Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284260426, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDecimalTests::NegativeValues [GOOD] Test command err: 2025-04-10T11:23:54.513532Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643740533658742:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:54.513604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c03/r3tmp/tmpnMhVho/pdisk_1.dat 2025-04-10T11:23:54.880795Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:54.927026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:54.927133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:54.929436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63491, node 1 2025-04-10T11:23:55.006574Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:55.006596Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:55.006604Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:55.006700Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:55.309101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:57.354656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 SUCCESS 3 rows in 0.026038s 2025-04-10T11:23:57.569553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643753418561825:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:57.569640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643753418561817:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:57.569993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:57.574212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:23:57.601824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643753418561831:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:23:57.700852Z node 1 :TX_PROXY ERROR: Actor# [1:7491643753418561904:2811] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:23:58.422501Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfq2fqyajbvqeh7rcygpqyy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmY4MmQ5MTctNGU4MDMyMjYtZDEzNjhlZjMtNmJiMTVlOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 3 rows 2025-04-10T11:23:59.791623Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643764383244396:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:59.791672Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c03/r3tmp/tmpY2wfM4/pdisk_1.dat 2025-04-10T11:23:59.943398Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:59.970909Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:59.970972Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:59.979652Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14814, node 4 2025-04-10T11:24:00.071291Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:00.071318Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:00.071325Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:00.071492Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:00.314114Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:02.707867Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:04.345699Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643783614834141:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:04.346196Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c03/r3tmp/tmpO9nByJ/pdisk_1.dat 2025-04-10T11:24:04.532293Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:04.565322Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:04.565405Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:04.568254Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14292, node 7 2025-04-10T11:24:04.626224Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:04.626246Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:04.626253Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:04.626417Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:04.903924Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.496538Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.416550Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643804492330776:2099];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:09.417292Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c03/r3tmp/tmpg55gEd/pdisk_1.dat 202 ... fig is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:09.798883Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:09.798891Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:09.799074Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8813 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:10.084941Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:12.808479Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:12.909821Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643817377233801:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:12.909902Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643817377233809:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:12.909956Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:12.914648Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:24:12.932953Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491643817377233815:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:24:13.006886Z node 10 :TX_PROXY ERROR: Actor# [10:7491643821672201186:2792] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:13.121207Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfq2yqb9aq0zxtc8g5ratsr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MTM2MDZhYi1lNDc4MGRiZi0yZTQyMWYwYi1hYTI2NDA2MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:13.262231Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfq2yybfb5yte34nhc4mp56, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MTM2MDZhYi1lNDc4MGRiZi0yZTQyMWYwYi1hYTI2NDA2MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:13.496184Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jrfq2z2w2c06m0yvrepb6f21, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MTM2MDZhYi1lNDc4MGRiZi0yZTQyMWYwYi1hYTI2NDA2MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:13.610585Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jrfq2za29smvyz866chhpv70, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MTM2MDZhYi1lNDc4MGRiZi0yZTQyMWYwYi1hYTI2NDA2MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:13.764035Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfq2zdf9a1b0mt83ns4ng7x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MTM2MDZhYi1lNDc4MGRiZi0yZTQyMWYwYi1hYTI2NDA2MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:15.483413Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643830940541352:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:15.483461Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c03/r3tmp/tmpcWjzIv/pdisk_1.dat 2025-04-10T11:24:15.658964Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:15.703908Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:15.703998Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:15.708100Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19252, node 13 2025-04-10T11:24:15.777808Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:15.777833Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:15.777841Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:15.778012Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10213 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:16.105027Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.828137Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:18.941515Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643843825444403:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:18.941621Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643843825444412:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:18.941668Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:18.945075Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:24:18.971409Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7491643843825444417:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:24:19.026922Z node 13 :TX_PROXY ERROR: Actor# [13:7491643848120411789:2791] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:19.155553Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq34kvfh39gaa64vccg6gf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Yjg0ZDhhNGMtN2I2YjYzMDctZTUyMzUyYjgtODBjMjQ2MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:19.287360Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq34v18yxq8bszdk06g4q9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Yjg0ZDhhNGMtN2I2YjYzMDctZTUyMzUyYjgtODBjMjQ2MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:19.490551Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfq34yx0n8f6etxxvq7endj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Yjg0ZDhhNGMtN2I2YjYzMDctZTUyMzUyYjgtODBjMjQ2MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:19.603427Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfq355a5eq9g73h3gb9q405, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Yjg0ZDhhNGMtN2I2YjYzMDctZTUyMzUyYjgtODBjMjQ2MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:19.771226Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfq358sc289qgjq8ysv441m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Yjg0ZDhhNGMtN2I2YjYzMDctZTUyMzUyYjgtODBjMjQ2MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpSystemView::QueryStatsScan [GOOD] >> TPersQueueTest::TestReadPartitionStatus [GOOD] >> TPersQueueTest::TxCounters >> KqpSysColV1::SelectRange [GOOD] >> BindQueue::Basic >> TBlobStorageWardenTest::TestSendUsefulMonitoring >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> TBlobStorageWardenTest::TestHttpMonPage >> TBlobStorageWardenTest::TestDeleteStoragePool >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailNavigate [GOOD] Test command err: Trying to start YDB, gRPC: 13819, MsgBus: 12942 2025-04-10T11:24:15.606838Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643833201075429:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:15.606902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ab9/r3tmp/tmpm9Xh7M/pdisk_1.dat 2025-04-10T11:24:15.951384Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13819, node 1 2025-04-10T11:24:16.029421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:16.032333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:16.050842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:16.074804Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:16.074827Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:16.074835Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:16.074979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12942 TClient is connected to server localhost:12942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:16.805440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:16.834417Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:16.846903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:16.852967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:24:17.033757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:17.252431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:17.339533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.216783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643850380946186:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.216906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.643292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.691132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.718940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.753993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.782309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.814090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.882410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643850380946698:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.882483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.882552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643850380946704:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.887227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-10T11:24:19.898006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643850380946706:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-10T11:24:19.992773Z node 1 :TX_PROXY ERROR: Actor# [1:7491643850380946761:3457] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:20.606424Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643833201075429:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:20.606542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:20.924924Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7491643854675914360:3663], for# user0@builtin, access# DescribeSchema 2025-04-10T11:24:20.924959Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7491643854675914360:3663], for# user0@builtin, access# DescribeSchema 2025-04-10T11:24:20.937714Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491643854675914355:2502], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/.sys/partition_stats]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:20.938010Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGYyN2RkNDktNzQ4ZmMzY2QtYWQ1ZWY4YzYtNmVmMjAwNWI=, ActorId: [1:7491643854675914348:2498], ActorState: ExecuteState, TraceId: 01jrfq36ggcvthxbsxszz6t0fc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> KqpSysColV1::InnerJoinTables [GOOD] >> KqpSysColV0::SelectRowAsterisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 9391, MsgBus: 21827 2025-04-10T11:24:15.951700Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643833126943977:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:15.958820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ab1/r3tmp/tmpT8o2KT/pdisk_1.dat 2025-04-10T11:24:16.405750Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:16.411046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:16.411195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:16.416078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9391, node 1 2025-04-10T11:24:16.530836Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:16.530859Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:16.530866Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:16.530965Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21827 TClient is connected to server localhost:21827 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:17.217470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:17.266449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:17.430840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:17.680476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:17.766507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:19.488542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643850306814915:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.488675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.749205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.778722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.810348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.841943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.909029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.982419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.090875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643854601782728:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.090953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.091013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643854601782733:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.094919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:20.107013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643854601782735:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:20.212205Z node 1 :TX_PROXY ERROR: Actor# [1:7491643854601782791:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:20.950277Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643833126943977:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:20.950376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:21.316917Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284261349, txId: 281474976710671] shutting down |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 [GOOD] Test command err: Trying to start YDB, gRPC: 10209, MsgBus: 14479 2025-04-10T11:24:16.121853Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643838514573271:2126];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:16.121900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000aae/r3tmp/tmpAdL0sq/pdisk_1.dat 2025-04-10T11:24:16.566081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:16.567118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:16.572393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:16.600774Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10209, node 1 2025-04-10T11:24:16.688932Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:16.688957Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:16.688964Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:16.689088Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14479 TClient is connected to server localhost:14479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:17.389151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:17.412571Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:17.426906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:17.612742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:17.749651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:17.837422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:19.560192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643851399476868:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.560312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.908538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.952471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.982735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.024012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.065168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.148953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.251495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643855694444686:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.251589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.251963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643855694444691:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.256277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:20.277562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643855694444693:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:20.376226Z node 1 :TX_PROXY ERROR: Actor# [1:7491643855694444749:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:21.122052Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643838514573271:2126];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:21.122126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:21.554974Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284261541, txId: 281474976710671] shutting down |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] Test command err: 2025-04-10T11:23:58.147350Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643760251765567:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:58.147396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c00/r3tmp/tmpCNQdh0/pdisk_1.dat 2025-04-10T11:23:58.544788Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:58.561602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:58.561722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:58.567307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62650, node 1 2025-04-10T11:23:58.643022Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:58.643053Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:58.643061Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:58.643171Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24894 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:58.961242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:02.578887Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643775081799880:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:02.584381Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c00/r3tmp/tmpLmerw0/pdisk_1.dat 2025-04-10T11:24:02.723675Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:02.754535Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:02.754675Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:02.758019Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10299, node 4 2025-04-10T11:24:02.827356Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:02.827386Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:02.827392Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:02.827556Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:03.103985Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.025857Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643799598227786:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:07.026003Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c00/r3tmp/tmpTm5STk/pdisk_1.dat 2025-04-10T11:24:07.235946Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:07.281523Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:07.281607Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:07.285784Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20129, node 7 2025-04-10T11:24:07.415080Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:07.415106Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:07.415114Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:07.415247Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:07.679555Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:12.002818Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643817335535739:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:12.002886Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c00/r3tmp/tmpMwHrOv/pdisk_1.dat 2025-04-10T11:24:12.129362Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:12.161892Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:12.161974Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:12.166500Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16018, node 10 2025-04-10T11:24:12.242634Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:12.242657Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:12.242663Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:12.242807Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:12.502743Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:16.864289Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643835514304573:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:16.864366Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c00/r3tmp/tmpbP69CL/pdisk_1.dat 2025-04-10T11:24:17.181679Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:17.229891Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:17.229987Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:17.233323Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6874, node 13 2025-04-10T11:24:17.442754Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:17.442786Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:17.442798Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:17.442984Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27299 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:17.760700Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> TYqlDateTimeTests::DatetimeKey [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsScan [GOOD] Test command err: Trying to start YDB, gRPC: 27562, MsgBus: 6229 2025-04-10T11:24:15.471578Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643830348417926:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:15.471884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ac0/r3tmp/tmpnMCOya/pdisk_1.dat 2025-04-10T11:24:15.923874Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:15.927797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:15.927906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:15.931402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27562, node 1 2025-04-10T11:24:16.028959Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:16.028986Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:16.028995Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:16.029102Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6229 TClient is connected to server localhost:6229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:16.765655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:16.794516Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:16.812404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:16.994379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:17.147154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:17.221216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.934294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643843233321600:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:18.934400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.214655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.286166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.319374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.396825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.433756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.485398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.581729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643847528289421:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.581819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.582007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643847528289426:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:19.590449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:19.614465Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:24:19.614993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643847528289428:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:19.678109Z node 1 :TX_PROXY ERROR: Actor# [1:7491643847528289484:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:20.471242Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643830348417926:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:20.471303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:21.831356Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284260978, txId: 281474976710671] shutting down 2025-04-10T11:24:21.964631Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284261951, txId: 281474976710674] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 18012, MsgBus: 6724 2025-04-10T11:24:16.420954Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643836306971039:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:16.420992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000aa2/r3tmp/tmp5Tk2xM/pdisk_1.dat 2025-04-10T11:24:16.930035Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:16.938907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:16.939026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:16.941672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18012, node 1 2025-04-10T11:24:17.044067Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:17.044103Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:17.044118Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:17.044231Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6724 TClient is connected to server localhost:6724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:17.745966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:17.770357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:17.971645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.136896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.247332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:20.051494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643853486841981:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.051637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.380619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.417590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.459007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.533523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.570505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.620818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.714722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643853486842504:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.714812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.715050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643853486842509:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.718511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:20.729214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643853486842511:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:20.816555Z node 1 :TX_PROXY ERROR: Actor# [1:7491643853486842567:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:21.421317Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643836306971039:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:21.421407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 8839, MsgBus: 15881 2025-04-10T11:24:16.538244Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643838718864587:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:16.543021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a97/r3tmp/tmpdMyJ3M/pdisk_1.dat 2025-04-10T11:24:17.125813Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:17.127618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:17.127722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:17.138276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8839, node 1 2025-04-10T11:24:17.252826Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:17.252857Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:17.252867Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:17.253182Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15881 TClient is connected to server localhost:15881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:17.808631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:17.829533Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:17.849757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:17.993418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.201929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.280900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:20.027031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643855898735399:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.027149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.334818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.405199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.440167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.472503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.518712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.576048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.660119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643855898735918:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.660186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.660265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643855898735923:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.664522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:20.677624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643855898735925:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:20.735524Z node 1 :TX_PROXY ERROR: Actor# [1:7491643855898735977:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:21.531531Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643838718864587:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:21.564672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] >> YdbLogStore::LogStore [GOOD] >> YdbLogStore::LogStoreNegative ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 13003, MsgBus: 6987 2025-04-10T11:24:17.100648Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643841080344011:2247];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:17.107931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a7c/r3tmp/tmpOqPsVa/pdisk_1.dat 2025-04-10T11:24:17.668368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:17.668471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:17.671354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:17.687890Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13003, node 1 2025-04-10T11:24:17.795998Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:17.796022Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:17.796029Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:17.796144Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6987 TClient is connected to server localhost:6987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:18.509885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.539861Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:18.552807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:18.692013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:24:18.880671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.991119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:20.781580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643853965247479:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.781722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:21.113337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:21.157865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:21.187570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:21.225738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:21.258274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:21.302966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:21.394205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643858260215291:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:21.394282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:21.394518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643858260215296:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:21.398113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:21.412497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643858260215298:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:21.506228Z node 1 :TX_PROXY ERROR: Actor# [1:7491643858260215353:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:22.097371Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643841080344011:2247];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:22.097486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] >> TBlobStorageWardenTest::TestFilterBadSerials [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError >> TBlobStorageWardenTest::TestDeleteStoragePool [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup >> KqpSysColV1::SelectRowAsterisk [GOOD] >> KqpSysColV0::UpdateAndDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] Test command err: 2025-04-10T11:24:23.318278Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.318484Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.318576Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.322850Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.323758Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.323840Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.323902Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002097/r3tmp/tmpTlGo8Z/pdisk_1.dat 2025-04-10T11:24:24.038599Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] bootstrap ActorId# [1:480:2462] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1291:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:24:24.038756Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.038801Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.038829Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.038858Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.038886Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.038914Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.038955Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:8:0:0:1291:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:24:24.039032Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1291:1] Marker# BPG33 2025-04-10T11:24:24.039078Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1291:1] Marker# BPG32 2025-04-10T11:24:24.039126Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1291:2] Marker# BPG33 2025-04-10T11:24:24.039155Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1291:2] Marker# BPG32 2025-04-10T11:24:24.039187Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1291:3] Marker# BPG33 2025-04-10T11:24:24.039214Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1291:3] Marker# BPG32 2025-04-10T11:24:24.039386Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:3] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.039463Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:2] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.039513Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:1] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.042144Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-10T11:24:24.042410Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-10T11:24:24.042550Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-10T11:24:24.042647Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1291:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-10T11:24:24.042716Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1291:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:24:24.042932Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.119 sample PartId# [72057594037932033:2:8:0:0:1291:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.12 sample PartId# [72057594037932033:2:8:0:0:1291:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.121 sample PartId# [72057594037932033:2:8:0:0:1291:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 3.822 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.024 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.159 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-10T11:24:24.107344Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [1:525:2499] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:24:24.107498Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.107551Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.107579Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.107605Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.107630Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.107658Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.107697Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:24:24.107761Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2025-04-10T11:24:24.107809Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2025-04-10T11:24:24.107855Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2025-04-10T11:24:24.107884Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2025-04-10T11:24:24.107916Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2025-04-10T11:24:24.107943Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2025-04-10T11:24:24.108093Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.108159Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.108205Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:53:2097] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:1] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.109953Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-10T11:24:24.110130Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 ... ROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:10:0:0:238:1] Marker# BPG32 2025-04-10T11:24:24.144621Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:10:0:0:238:2] Marker# BPG33 2025-04-10T11:24:24.144648Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:10:0:0:238:2] Marker# BPG32 2025-04-10T11:24:24.144682Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:10:0:0:238:3] Marker# BPG33 2025-04-10T11:24:24.144708Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:10:0:0:238:3] Marker# BPG32 2025-04-10T11:24:24.144861Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:10:0:0:238:3] FDS# 238 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.144966Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:53:2097] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:10:0:0:238:2] FDS# 238 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.145018Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:10:0:0:238:1] FDS# 238 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.146742Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:10:0:0:238:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 11 } Cost# 81874 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 12 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-10T11:24:24.146981Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:10:0:0:238:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81874 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-04-10T11:24:24.147103Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:10:0:0:238:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81874 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-10T11:24:24.147183Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Result# TEvPutResult {Id# [72057594037932033:2:10:0:0:238:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-10T11:24:24.147271Z node 1 :BS_PROXY_PUT INFO: [8d27cf9df52bfb78] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:10:0:0:238:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:24:24.147465Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.11 sample PartId# [72057594037932033:2:10:0:0:238:3] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.111 sample PartId# [72057594037932033:2:10:0:0:238:2] QueryCount# 1 VDiskId# [2000000:1:0:2:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.112 sample PartId# [72057594037932033:2:10:0:0:238:1] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 2.866 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.094 VDiskId# [2000000:1:0:2:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.198 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-10T11:24:24.149018Z node 1 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:24:24.149072Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:24:24.151213Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:530:2503] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.151372Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:531:2504] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.151483Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:532:2505] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.151592Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:533:2506] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.151709Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:534:2507] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.151817Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:535:2508] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.151925Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:536:2509] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.151953Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:24:24.152568Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.152683Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.152752Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.152841Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.152901Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.152960Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.153010Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.153059Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-10T11:24:24.153102Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-10T11:24:24.153248Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] bootstrap ActorId# [1:537:2510] Group# 2181038082 TabletId# 1234 Generation# 1 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-10T11:24:24.153320Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] Sending TEvVBlock Tablet# 1234 Generation# 1 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-10T11:24:24.153531Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:530:2503] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 1 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 10702016950815215785 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-10T11:24:24.154629Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-10T11:24:24.154690Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2025-04-10T11:24:24.155052Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:530:2503] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 2025-04-10T11:24:24.156731Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2025-04-10T11:24:24.157458Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] bootstrap ActorId# [1:539:2512] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-10T11:24:24.157515Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-10T11:24:24.157679Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:530:2503] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 16408073987892534985 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-10T11:24:24.158964Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-10T11:24:24.159044Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2025-04-10T11:24:24.159475Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] bootstrap ActorId# [1:540:2513] Group# 2181038082 TabletId# 1234 Generation# 4 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-10T11:24:24.159535Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] Sending TEvVBlock Tablet# 1234 Generation# 4 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-10T11:24:24.159699Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:530:2503] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 4 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 1212158853261244891 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-10T11:24:24.160412Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-10T11:24:24.160466Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] Result# TEvBlockResult {Status# OK} Marker# DSPB04 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] Test command err: 2025-04-10T11:24:23.321318Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.321459Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.322933Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.323532Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.327479Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.329784Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00209a/r3tmp/tmpXbZ2pl/pdisk_1.dat 2025-04-10T11:24:23.969796Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] bootstrap ActorId# [1:544:2462] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1294:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:24:23.969977Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.970025Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.970204Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.970237Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.970266Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.970295Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.970345Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] restore Id# [72057594037932033:2:8:0:0:1294:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:24:23.970424Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1294:1] Marker# BPG33 2025-04-10T11:24:23.970475Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1294:1] Marker# BPG32 2025-04-10T11:24:23.970523Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1294:2] Marker# BPG33 2025-04-10T11:24:23.970561Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1294:2] Marker# BPG32 2025-04-10T11:24:23.970593Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1294:3] Marker# BPG33 2025-04-10T11:24:23.970624Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1294:3] Marker# BPG32 2025-04-10T11:24:23.970820Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:65:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1294:3] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:23.970899Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1294:2] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:23.970955Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1294:1] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:23.973805Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1294:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-10T11:24:23.974076Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1294:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-10T11:24:23.974191Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1294:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-10T11:24:23.974288Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-10T11:24:23.974356Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:24:23.974554Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.365 sample PartId# [72057594037932033:2:8:0:0:1294:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.366 sample PartId# [72057594037932033:2:8:0:0:1294:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.366 sample PartId# [72057594037932033:2:8:0:0:1294:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 4.257 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.489 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.601 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-10T11:24:24.044116Z node 1 :BS_PROXY_PUT INFO: [8d27cf9df52bfb78] bootstrap ActorId# [1:589:2499] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:229:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:24:24.044299Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.044359Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.044394Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.044429Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.044465Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.044498Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.044543Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] restore Id# [72057594037932033:2:9:0:0:229:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:24:24.044629Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG33 2025-04-10T11:24:24.044689Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG32 2025-04-10T11:24:24.044754Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG33 2025-04-10T11:24:24.044787Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG32 2025-04-10T11:24:24.044826Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG33 2025-04-10T11:24:24.044862Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG32 2025-04-10T11:24:24.045043Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:3] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.045142Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:2] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.045213Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:72:2098] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:1] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.047770Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-10T11:24:24.048041Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-04-10T11:24:24.048215Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutL ... wn Marker# BPG51 2025-04-10T11:24:24.109507Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:24:24.109551Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-04-10T11:24:24.109582Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-04-10T11:24:24.109705Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:594:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.114019Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-10T11:24:24.114140Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-04-10T11:24:24.114227Z node 1 :BS_PROXY_PUT INFO: [f913878b3da83702] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:24:24.114368Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.485 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 4.838 VDiskId# [82000002:1:0:0:0] NodeId# 1 Status# OK } ] } 2025-04-10T11:24:24.114824Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:24:24.114873Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-04-10T11:24:24.114967Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2025-04-10T11:24:24.115652Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/2gt4/00209a/r3tmp/tmpXbZ2pl//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-04-10T11:24:24.116453Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-04-10T11:24:24.116494Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:24:24.118582Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:605:2105] Create Queue# [2:607:2106] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.118713Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:605:2105] Create Queue# [2:608:2107] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.118795Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:605:2105] Create Queue# [2:609:2108] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.118926Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:605:2105] Create Queue# [2:610:2109] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.119047Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:605:2105] Create Queue# [2:611:2110] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.119166Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:605:2105] Create Queue# [2:612:2111] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.119286Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:605:2105] Create Queue# [2:613:2112] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.119306Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:24:24.120293Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.120542Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.120619Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.120796Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.120899Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.120969Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.121013Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.121032Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-10T11:24:24.121057Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-10T11:24:24.121176Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] bootstrap ActorId# [2:614:2113] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-10T11:24:24.121224Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-10T11:24:24.121401Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:607:2106] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 10522518470934023623 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-10T11:24:24.122452Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-10T11:24:24.122498Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2025-04-10T11:24:24.122733Z node 2 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-10T11:24:24.122869Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-10T11:24:24.123131Z node 1 :BS_PROXY_PUT INFO: [91379e686f748e92] bootstrap ActorId# [1:615:2513] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-04-10T11:24:24.123245Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.123287Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:24:24.123347Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2025-04-10T11:24:24.123382Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2025-04-10T11:24:24.123481Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:594:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.123684Z node 1 :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:24:24.123900Z node 1 :BS_PROXY_PUT INFO: [91379e686f748e92] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-10T11:24:24.123975Z node 1 :BS_PROXY_PUT ERROR: [91379e686f748e92] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2025-04-10T11:24:24.124019Z node 1 :BS_PROXY_PUT NOTICE: [91379e686f748e92] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:24:24.124098Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.491 sample PartId# [1234:2:0:0:0:11:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } ] } 2025-04-10T11:24:24.124446Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:607:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] Test command err: 2025-04-10T11:24:23.322698Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.322851Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.324378Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.324826Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.328723Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.331380Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f60/r3tmp/tmpkLd7kD/pdisk_1.dat 2025-04-10T11:24:23.993200Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] bootstrap ActorId# [1:544:2462] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1289:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:24:23.993424Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1289:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.993472Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1289:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.993502Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1289:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.993541Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1289:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.993571Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1289:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.993599Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1289:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.993645Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] restore Id# [72057594037932033:2:8:0:0:1289:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:24:23.993729Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1289:1] Marker# BPG33 2025-04-10T11:24:23.993785Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1289:1] Marker# BPG32 2025-04-10T11:24:23.993830Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1289:2] Marker# BPG33 2025-04-10T11:24:23.993859Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1289:2] Marker# BPG32 2025-04-10T11:24:23.993896Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1289:3] Marker# BPG33 2025-04-10T11:24:23.993923Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1289:3] Marker# BPG32 2025-04-10T11:24:23.994120Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:65:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1289:3] FDS# 1289 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:23.994222Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1289:2] FDS# 1289 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:23.994272Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1289:1] FDS# 1289 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:23.996258Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1289:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90149 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-10T11:24:23.996464Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1289:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90149 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-10T11:24:23.996567Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1289:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90149 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-10T11:24:23.996649Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1289:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-10T11:24:23.996736Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1289:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:24:23.996939Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.271 sample PartId# [72057594037932033:2:8:0:0:1289:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.272 sample PartId# [72057594037932033:2:8:0:0:1289:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.273 sample PartId# [72057594037932033:2:8:0:0:1289:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 3.302 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.468 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.566 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-10T11:24:24.049378Z node 1 :BS_PROXY_PUT INFO: [8d27cf9df52bfb78] bootstrap ActorId# [1:589:2499] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:229:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:24:24.049546Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.049589Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.049616Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.049647Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.049674Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.049699Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.049740Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] restore Id# [72057594037932033:2:9:0:0:229:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:24:24.049811Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG33 2025-04-10T11:24:24.049863Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG32 2025-04-10T11:24:24.049912Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG33 2025-04-10T11:24:24.049941Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG32 2025-04-10T11:24:24.049974Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG33 2025-04-10T11:24:24.050001Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG32 2025-04-10T11:24:24.050177Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:3] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.050257Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:2] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.050307Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:72:2098] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:1] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.052267Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-10T11:24:24.052469Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-04-10T11:24:24.052563Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutL ... ] Create Queue# [1:595:2504] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.093412Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:590:2500] Create Queue# [1:596:2505] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.093532Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:590:2500] Create Queue# [1:597:2506] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.093643Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:590:2500] Create Queue# [1:598:2507] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.093753Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:590:2500] Create Queue# [1:599:2508] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.093866Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:590:2500] Create Queue# [1:600:2509] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.093894Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:24:24.094645Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.094794Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.094880Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.094949Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.095013Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.095067Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.095121Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.095182Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-10T11:24:24.095223Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-10T11:24:24.095281Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2025-04-10T11:24:24.096119Z node 1 :BS_PROXY_PUT INFO: [1a43693427d0a82b] bootstrap ActorId# [1:601:2510] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-04-10T11:24:24.096257Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.096300Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:24:24.096356Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-04-10T11:24:24.096390Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-04-10T11:24:24.096514Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:594:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.099735Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-10T11:24:24.099860Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-04-10T11:24:24.099922Z node 1 :BS_PROXY_PUT INFO: [1a43693427d0a82b] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:24:24.100046Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.549 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 3.792 VDiskId# [82000002:1:0:0:0] NodeId# 1 Status# OK } ] } 2025-04-10T11:24:24.100789Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:24:24.100841Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-04-10T11:24:24.100937Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Marker# DSP17 2025-04-10T11:24:24.101592Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/2gt4/001f60/r3tmp/tmpkLd7kD//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-04-10T11:24:24.103008Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-04-10T11:24:24.103061Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:24:24.105114Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:605:2106] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.105258Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:606:2107] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.105396Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:607:2108] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.105526Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:608:2109] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.105643Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:609:2110] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.105759Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:610:2111] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.105874Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:611:2112] targetNodeId# 1 Marker# DSP01 2025-04-10T11:24:24.105904Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:24:24.107314Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.107589Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.107653Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.107863Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.107961Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.108039Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.108094Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:24.108122Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-10T11:24:24.108158Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-10T11:24:24.108355Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:605:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> KqpSystemView::NodesSimple [GOOD] >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDateTimeTests::DatetimeKey [GOOD] Test command err: 2025-04-10T11:23:46.933666Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643708127082783:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:46.933867Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c0d/r3tmp/tmptdmgY8/pdisk_1.dat 2025-04-10T11:23:47.278999Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:47.323731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:47.323831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 10442, node 1 2025-04-10T11:23:47.355959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:47.394683Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:47.394708Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:47.394714Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:47.394810Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:47.665202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:4842 2025-04-10T11:23:47.958606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:47.974727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:48.480585Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491643715888430681:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:48.480634Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:23:48.507706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:48.507829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:48.514962Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:23:48.515816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4842 2025-04-10T11:23:48.756028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:4842 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744284229190 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-10T11:23:49.645169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:4842 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1744284230240 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-10T11:23:50.777944Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-10T11:23:50.784660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:23:53.013759Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643739469458130:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:53.013805Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c0d/r3tmp/tmpwJl2UY/pdisk_1.dat 2025-04-10T11:23:53.174661Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:53.205830Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:53.205909Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:53.208999Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29850, node 4 2025-04-10T11:23:53.266881Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:53.266906Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:53.266915Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:53.267091Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:53.478127Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:23780 2025-04-10T11:23:53.745311Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:53.765722Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:54.275363Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491643741020200965:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:54.275479Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:23:54.291328Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:54.291417Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:54.294388Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-04-10T11:23:54.295475Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23780 2025-04-10T11:23:56.577420Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at ... ARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:08.951179Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:08.951358Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:09.404608Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:13.320908Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:13.402286Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491643804177184119:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:13.402412Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:13.492840Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643825652021855:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:13.492911Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7491643825652021849:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:13.493111Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:13.498936Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:24:13.525680Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7491643825652021863:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:24:13.588132Z node 10 :TX_PROXY ERROR: Actor# [10:7491643825652021938:2812] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:13.772657Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfq2z9jem5y7fgaxbq5pe1j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NmZlMzAwZS04YjM4MTBiOC0xOWJjZWRlNy03YjIyYWZk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:14.029869Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfq2zk55kcf7zxzkvv556q8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NmZlMzAwZS04YjM4MTBiOC0xOWJjZWRlNy03YjIyYWZk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:14.327895Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jrfq2ztpb9xnh0gj7qc1q96x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NmZlMzAwZS04YjM4MTBiOC0xOWJjZWRlNy03YjIyYWZk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:14.540674Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jrfq303zfddeqber9yz4kx69, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NmZlMzAwZS04YjM4MTBiOC0xOWJjZWRlNy03YjIyYWZk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:16.670287Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643835267622577:2218];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c0d/r3tmp/tmponXIey/pdisk_1.dat 2025-04-10T11:24:16.797406Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:16.870783Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:16.927193Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:16.927316Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:16.932893Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8309, node 13 2025-04-10T11:24:17.018017Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:17.018049Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:17.018061Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:17.018294Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:17.640658Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:21.081195Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:21.197054Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643856742460099:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:21.197167Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491643856742460094:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:21.197501Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:21.202070Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:24:21.231527Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7491643856742460108:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:24:21.294749Z node 13 :TX_PROXY ERROR: Actor# [13:7491643856742460178:2788] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:21.373048Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq36ta7ce39tr4sy103x2v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MmIxNDU0M2EtMTM3MDA4N2ItNjY1MTMzMjEtY2JlMGYwMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:21.524118Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq370cbdh2svhdmmypeted, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MmIxNDU0M2EtMTM3MDA4N2ItNjY1MTMzMjEtY2JlMGYwMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:21.660989Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7491643835267622577:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:21.661077Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:21.844515Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfq374y44vr9j3vfzkn841t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MmIxNDU0M2EtMTM3MDA4N2ItNjY1MTMzMjEtY2JlMGYwMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:22.121651Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfq37ez9df6b570t5n8npy5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MmIxNDU0M2EtMTM3MDA4N2ItNjY1MTMzMjEtY2JlMGYwMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpSysColV0::SelectRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] Test command err: 2025-04-10T11:24:23.319032Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.320602Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.322216Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.322959Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.324146Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.324228Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.325187Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0020a5/r3tmp/tmpG0LjVr/pdisk_1.dat 2025-04-10T11:24:24.596394Z node 2 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:24.598945Z node 2 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:24.599245Z node 2 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0020a5/r3tmp/tmpYqGGHJ/pdisk_1.dat 2025-04-10T11:24:24.693228Z node 2 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:24.693362Z node 2 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:24.693427Z node 2 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 7428, MsgBus: 16924 2025-04-10T11:24:18.961358Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643846170585977:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:18.961642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a4e/r3tmp/tmpa8VdIf/pdisk_1.dat 2025-04-10T11:24:19.384674Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:19.403395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:19.403498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:19.407102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7428, node 1 2025-04-10T11:24:19.537552Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:19.537582Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:19.537591Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:19.537720Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16924 TClient is connected to server localhost:16924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:20.055763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:20.073086Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:20.085853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:20.232843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:20.398052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.477914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:22.267340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643863350456942:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:22.267481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:22.591866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.618273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.684935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.720248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.753333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.829181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.884458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643863350457460:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:22.884529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:22.884698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643863350457466:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:22.888914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:22.905186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643863350457468:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:23.004383Z node 1 :TX_PROXY ERROR: Actor# [1:7491643863350457525:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:23.961377Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643846170585977:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:23.961449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 6222, MsgBus: 29494 2025-04-10T11:24:17.015190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643842148964122:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:17.015267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a81/r3tmp/tmp3YvI5p/pdisk_1.dat 2025-04-10T11:24:17.564335Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:17.595127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:17.595256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:17.597555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6222, node 1 2025-04-10T11:24:17.718732Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:17.718761Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:17.718768Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:17.718874Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29494 TClient is connected to server localhost:29494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:18.410561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.449820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.625841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.838910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.930423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:20.791680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643855033867573:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.791781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:21.101756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:21.129969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:21.159761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:21.188269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:21.255575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:21.299321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:21.374578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643859328835385:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:21.374695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:21.375012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643859328835390:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:21.379063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:21.389014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643859328835392:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:21.468672Z node 1 :TX_PROXY ERROR: Actor# [1:7491643859328835447:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:22.009990Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643842148964122:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:22.010080Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] Test command err: 2025-04-10T11:24:23.334690Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.337237Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.338946Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.340142Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.342235Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.342305Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:23.343466Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f4b/r3tmp/tmpTqK8oV/pdisk_1.dat 2025-04-10T11:24:23.967052Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] bootstrap ActorId# [1:480:2462] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1298:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:24:23.967293Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1298:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.967332Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1298:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.967359Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1298:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.967384Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1298:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.967408Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1298:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.967432Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1298:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:23.967483Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:8:0:0:1298:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:24:23.967553Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1298:1] Marker# BPG33 2025-04-10T11:24:23.967597Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1298:1] Marker# BPG32 2025-04-10T11:24:23.967645Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1298:2] Marker# BPG33 2025-04-10T11:24:23.967669Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1298:2] Marker# BPG32 2025-04-10T11:24:23.967698Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1298:3] Marker# BPG33 2025-04-10T11:24:23.967724Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1298:3] Marker# BPG32 2025-04-10T11:24:23.967908Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1298:3] FDS# 1298 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:23.967972Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1298:2] FDS# 1298 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:23.968019Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1298:1] FDS# 1298 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:23.970053Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1298:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90220 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-10T11:24:23.970290Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1298:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90220 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-10T11:24:23.970380Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1298:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90220 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-10T11:24:23.970461Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1298:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-10T11:24:23.970518Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1298:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:24:23.970724Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.914 sample PartId# [72057594037932033:2:8:0:0:1298:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.915 sample PartId# [72057594037932033:2:8:0:0:1298:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.915 sample PartId# [72057594037932033:2:8:0:0:1298:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 3.989 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.196 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.279 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-10T11:24:24.037813Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [1:525:2499] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:24:24.037971Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.038017Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.038046Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.038071Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.038094Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.038120Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:24.038180Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:24:24.038248Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2025-04-10T11:24:24.038294Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2025-04-10T11:24:24.038336Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2025-04-10T11:24:24.038362Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2025-04-10T11:24:24.038391Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2025-04-10T11:24:24.038418Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2025-04-10T11:24:24.038565Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.038627Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.038671Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:53:2097] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:1] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:24.042726Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-10T11:24:24.042911Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP0 ... wn Marker# BPG51 2025-04-10T11:24:25.661007Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:24:25.661062Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-04-10T11:24:25.661103Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-04-10T11:24:25.661228Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:596:2505] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:25.664229Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-10T11:24:25.664329Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-04-10T11:24:25.664377Z node 2 :BS_PROXY_PUT INFO: [f913878b3da83702] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:24:25.664475Z node 2 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.513 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 3.547 VDiskId# [82000002:1:0:0:0] NodeId# 2 Status# OK } ] } 2025-04-10T11:24:25.664841Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:24:25.664874Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-04-10T11:24:25.664971Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2025-04-10T11:24:25.665447Z node 3 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/2gt4/001f4b/r3tmp/tmpbA7xzx//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-04-10T11:24:25.666134Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-04-10T11:24:25.666197Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:24:25.667906Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:607:2105] Create Queue# [3:609:2106] targetNodeId# 2 Marker# DSP01 2025-04-10T11:24:25.668016Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:607:2105] Create Queue# [3:610:2107] targetNodeId# 2 Marker# DSP01 2025-04-10T11:24:25.668103Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:607:2105] Create Queue# [3:611:2108] targetNodeId# 2 Marker# DSP01 2025-04-10T11:24:25.668173Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:607:2105] Create Queue# [3:612:2109] targetNodeId# 2 Marker# DSP01 2025-04-10T11:24:25.668268Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:607:2105] Create Queue# [3:613:2110] targetNodeId# 2 Marker# DSP01 2025-04-10T11:24:25.668405Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:607:2105] Create Queue# [3:614:2111] targetNodeId# 2 Marker# DSP01 2025-04-10T11:24:25.668476Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:607:2105] Create Queue# [3:615:2112] targetNodeId# 2 Marker# DSP01 2025-04-10T11:24:25.668493Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:24:25.669355Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:25.669438Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:25.669548Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:25.669601Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:25.669703Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:25.669734Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:25.669769Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-10T11:24:25.669787Z node 3 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-10T11:24:25.669810Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-10T11:24:25.669918Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] bootstrap ActorId# [3:616:2113] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-10T11:24:25.669956Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 2 Marker# DSPB03 2025-04-10T11:24:25.670085Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:609:2106] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 13251969989699524175 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-10T11:24:25.671295Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 2 Marker# DSPB01 2025-04-10T11:24:25.671368Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2025-04-10T11:24:25.671681Z node 3 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-10T11:24:25.671867Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-10T11:24:25.672184Z node 2 :BS_PROXY_PUT INFO: [91379e686f748e92] bootstrap ActorId# [2:617:2515] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-04-10T11:24:25.672333Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:25.672383Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:24:25.672446Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2025-04-10T11:24:25.672488Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2025-04-10T11:24:25.672619Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:596:2505] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:25.672835Z node 2 :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:24:25.673176Z node 2 :BS_PROXY_PUT INFO: [91379e686f748e92] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-10T11:24:25.673266Z node 2 :BS_PROXY_PUT ERROR: [91379e686f748e92] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2025-04-10T11:24:25.673345Z node 2 :BS_PROXY_PUT NOTICE: [91379e686f748e92] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:24:25.673459Z node 2 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.603 sample PartId# [1234:2:0:0:0:11:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 2 } ] } 2025-04-10T11:24:25.673959Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:609:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] Test command err: Trying to start YDB, gRPC: 8263, MsgBus: 15318 2025-04-10T11:24:16.154526Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643838160152569:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:16.154579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:16.391581Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491643834894152042:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:16.450705Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491643836436537889:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ab7/r3tmp/tmph7zOXU/pdisk_1.dat 2025-04-10T11:24:16.880839Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:16.931578Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:17.214294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:17.543243Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:17.583494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:17.583603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:17.599657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:17.600832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:17.600941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:17.602559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:17.602633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:17.606942Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:24:17.606970Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:24:17.610313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:17.611116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8263, node 1 2025-04-10T11:24:17.776842Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:17.776870Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:17.776878Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:17.777028Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15318 TClient is connected to server localhost:15318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:19.146306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:19.192864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:19.403662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:19.666588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:24:19.782865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:21.155034Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643838160152569:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:21.155104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:21.274300Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491643836436537889:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:21.274379Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:21.319432Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491643834894152042:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:21.319507Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:21.766773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643859634991032:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:21.766882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:22.055211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.102769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.159963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.219664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.268361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.357874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.469458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643863929959012:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:22.469532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:22.469702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643863929959017:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:22.473228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:22.492946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643863929959019:2407], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:22.585974Z node 1 :TX_PROXY ERROR: Actor# [1:7491643863929959094:4116] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:23.658664Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284263648, txId: 281474976710671] shutting down 2025-04-10T11:24:23.993790Z node 3 :BS_PROXY_PUT ERROR: [6b45ffd9311316d1] Result# TEvPutResult {Id# [72075186224037893:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037893:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-10T11:24:23.994576Z node 2 :BS_PROXY_PUT ERROR: [d6e952406292289b] Result# TEvPutResult {Id# [72075186224037892:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037892:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 15769, MsgBus: 20769 2025-04-10T11:24:20.191568Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643851714732462:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:20.191621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a13/r3tmp/tmpLT3Khw/pdisk_1.dat 2025-04-10T11:24:20.654972Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:20.700045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:20.700138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 15769, node 1 2025-04-10T11:24:20.707212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:20.788203Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:20.788228Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:20.788235Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:20.788346Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20769 TClient is connected to server localhost:20769 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:21.359380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:21.378735Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:21.400904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:21.561900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:21.735298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:24:21.812947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:23.579375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643864599636132:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:23.579525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:23.901160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:23.945183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:23.987939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:24.018794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:24.051393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:24.094093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:24.169574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643868894603938:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:24.169672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:24.169819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643868894603943:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:24.173743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:24.185715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643868894603945:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:24.243109Z node 1 :TX_PROXY ERROR: Actor# [1:7491643868894604000:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:25.191989Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643851714732462:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:25.192064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 18143, MsgBus: 7973 2025-04-10T11:24:19.834531Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643848432845607:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:19.834603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a1c/r3tmp/tmpfWs7GF/pdisk_1.dat 2025-04-10T11:24:20.285311Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:20.289909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:20.290000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:20.292154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18143, node 1 2025-04-10T11:24:20.403063Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:20.403090Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:20.403100Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:20.403232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7973 TClient is connected to server localhost:7973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:20.931863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:20.962654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:21.138324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:21.301916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:21.380441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:23.153640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643865612716556:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:23.153778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:23.466396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:23.503104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:23.539436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:23.612406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:23.685903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:23.756586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:23.859203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643865612717083:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:23.859300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:23.859574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643865612717088:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:23.864331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:23.876462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643865612717090:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:23.955206Z node 1 :TX_PROXY ERROR: Actor# [1:7491643865612717145:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:24.834863Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643848432845607:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:24.834917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV1::StreamInnerJoinTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] Test command err: 2025-04-10T11:24:24.998768Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:24.998915Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:25.000847Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:25.001398Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:25.006073Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-10T11:24:25.008965Z node 1 :BS_SYNCLOG WARN: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f36/r3tmp/tmpvXZ3AZ/pdisk_1.dat Formatting pdisk Creating PDisk Creating pdisk Verify that PDisk returns ERROR 2025-04-10T11:24:25.612010Z node 1 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/001f36/r3tmp/tmp2QV5ty//new_pdisk.dat": no such file. PDiskId# 1001 2025-04-10T11:24:25.612521Z node 1 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1001 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/001f36/r3tmp/tmp2QV5ty//new_pdisk.dat": no such file. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/001f36/r3tmp/tmp2QV5ty//new_pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10945862519271005707 PDiskId# 1001 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 HashedMainKey[0]# 0x221976E60BD392C7 StartOwnerRound# 10 SectorMap# false EnableSectorEncryption # 1 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# Enable WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1001 2025-04-10T11:24:25.644139Z node 1 :BS_PROXY_PUT INFO: [e2e5f1b9c917f854] bootstrap ActorId# [1:543:2461] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:349:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:24:25.644330Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:25.644378Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:25.644404Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:25.644424Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:25.644443Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:25.644460Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:24:25.644489Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] restore Id# [72057594037932033:2:8:0:0:349:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:24:25.644550Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:349:1] Marker# BPG33 2025-04-10T11:24:25.644590Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:349:1] Marker# BPG32 2025-04-10T11:24:25.644622Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:349:2] Marker# BPG33 2025-04-10T11:24:25.644649Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:349:2] Marker# BPG32 2025-04-10T11:24:25.644677Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:349:3] Marker# BPG33 2025-04-10T11:24:25.644697Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:349:3] Marker# BPG32 2025-04-10T11:24:25.644834Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:65:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:349:3] FDS# 349 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:25.644888Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:349:2] FDS# 349 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:25.644941Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:349:1] FDS# 349 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:24:25.647883Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:349:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 82748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-10T11:24:25.648104Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:349:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 82748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-10T11:24:25.648204Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:349:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 82748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-10T11:24:25.648285Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:349:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-10T11:24:25.648381Z node 1 :BS_PROXY_PUT INFO: [e2e5f1b9c917f854] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:349:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:24:25.648738Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.036 sample PartId# [72057594037932033:2:8:0:0:349:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.037 sample PartId# [72057594037932033:2:8:0:0:349:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.038 sample PartId# [72057594037932033:2:8:0:0:349:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 4.027 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.201 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.299 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile [GOOD] >> LocalTableWriter::ApplyInCorrectOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 10549, MsgBus: 4484 2025-04-10T11:24:21.519964Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643856898544170:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:21.656290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a0e/r3tmp/tmpvtuhAo/pdisk_1.dat 2025-04-10T11:24:21.828232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:21.828352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:21.831074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:21.842560Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10549, node 1 2025-04-10T11:24:21.932377Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:21.932399Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:21.932411Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:21.932809Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4484 TClient is connected to server localhost:4484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:22.426365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:22.440522Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:22.450589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:22.576449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:22.757680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:22.833202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:24.553596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643869783447680:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:24.553725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:24.796870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:24.827433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:24.854610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:24.881420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:24.912567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:24.986761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:25.043653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643874078415494:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:25.043762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:25.044140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643874078415500:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:25.048125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:25.086287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643874078415502:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:25.149530Z node 1 :TX_PROXY ERROR: Actor# [1:7491643874078415554:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:26.477842Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284266508, txId: 281474976710671] shutting down [[[108u];["One"];[8];["Value5"];[108u];["One"];#;["Value31"]]] 2025-04-10T11:24:26.510073Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643856898544170:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:26.510217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite >> LocalTableWriter::WriteTable >> LocalTableWriter::SupportedTypes >> LocalTableWriter::DecimalKeys >> TTableProfileTests::OverwritePartitioningPolicy [GOOD] >> TTableProfileTests::OverwriteStoragePolicy >> KqpSystemView::NodesRange1 [GOOD] >> BindQueue::Basic [GOOD] >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile [GOOD] Test command err: 2025-04-10T11:23:58.894366Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643758395087140:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:58.894425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001bf9/r3tmp/tmpo2Pfxy/pdisk_1.dat 2025-04-10T11:23:59.304840Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:59.323996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:59.324122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 5648, node 1 2025-04-10T11:23:59.370049Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:59.370073Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:59.371716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:59.424256Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:59.424278Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:59.424291Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:59.424429Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5870 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:59.717455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:03.360085Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643782578832172:2102];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:03.360205Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001bf9/r3tmp/tmplhpTuD/pdisk_1.dat 2025-04-10T11:24:03.533014Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:03.566043Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:03.566131Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:03.569291Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27646, node 4 2025-04-10T11:24:03.639149Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:03.639205Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:03.639216Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:03.639369Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:03.924597Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.007184Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643803452010375:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:08.008128Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001bf9/r3tmp/tmpk0Dw66/pdisk_1.dat 2025-04-10T11:24:08.281780Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23650, node 7 2025-04-10T11:24:08.369843Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:08.369970Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:08.390372Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:08.423155Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:08.423179Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:08.423187Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:08.423313Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14110 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:08.734023Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:14110 2025-04-10T11:24:09.142281Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.142809Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:24:09.142841Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.147605Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-04-10T11:24:09.157477Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744284249204, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:24:09.159839Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-10T11:24:09.159916Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2025-04-10T11:24:09.161128Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-04-10T11:24:09.165636Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.166356Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:24:09.166388Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:24:09.168006Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-04-10T11:24:09.671172Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491643808339446581:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:09.671224Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:09.734110Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:09.734221Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:09.741679Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-04-10T11:24:09.745222Z node 7 :HIVE WARN: HIVE#72057594037968897 ... , txId: 281474976715659, subscribers: 1 2025-04-10T11:24:18.881383Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:24:18.882688Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:24:18.882714Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:24:18.882866Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTableIndex Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex, operationId: 281474976715660:1, transaction: WorkingDir: "/Root/ydb_ut_tenant/Table-1" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "MyIndex" KeyColumnNames: "Value" Type: EIndexTypeGlobal IndexImplTableDescriptions { } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2025-04-10T11:24:18.883039Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:24:18.883091Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex/indexImplTable, opId: 281474976715660:2, at schemeshard: 72057594046644480 2025-04-10T11:24:18.883888Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:24:18.896102Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/ydb_ut_tenant/Table-1 2025-04-10T11:24:19.051056Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744284259080, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:24:19.066602Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-04-10T11:24:19.066718Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2025-04-10T11:24:19.066733Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:2 2025-04-10T11:24:19.109495Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-04-10T11:24:19.110237Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:24:20.249176Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:24:20.255754Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7491643854694668064:2327], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:20.350427Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7491643854694668064:2327], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:20.484378Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7491643854694668064:2327], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:21.629645Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491643859754881074:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:21.629716Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001bf9/r3tmp/tmpzhcnTs/pdisk_1.dat 2025-04-10T11:24:21.872116Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8849, node 13 2025-04-10T11:24:21.957339Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:21.957443Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:22.001386Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:22.034425Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:22.034455Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:22.034477Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:22.034664Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:22.346019Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:18936 2025-04-10T11:24:22.768631Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.769106Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:24:22.769145Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.775156Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-04-10T11:24:22.783944Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744284262826, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:24:22.786385Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-10T11:24:22.786443Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2025-04-10T11:24:22.787331Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2025-04-10T11:24:22.792057Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.792707Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:24:22.792737Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.796071Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-04-10T11:24:23.304332Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7491643867603965408:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:23.304525Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:23.358654Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:23.358766Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:23.363321Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-04-10T11:24:23.366003Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:23.739935Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744284263785, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:24:23.742502Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-10T11:24:23.742639Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2025-04-10T11:24:23.743777Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-04-10T11:24:25.796688Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:24:25.798214Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:24:25.798248Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:24:25.800655Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/ydb_ut_tenant/Table-1 2025-04-10T11:24:25.872677Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744284265910, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:24:25.883154Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2025-04-10T11:24:25.901924Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-10T11:24:25.902611Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected >> Yq_1::Basic >> Yq_1::DescribeJob >> Yq_1::ListConnections >> Yq_1::DescribeConnection >> Yq_1::Basic_Null >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] >> YdbLogStore::LogStoreNegative [GOOD] >> YdbLogStore::Dirs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] Test command err: Delete nodeId# 47 Disable nodeId# 8 Disable nodeId# 63 Disable nodeId# 35 Add nodeId# 101 Pick Pick Delete nodeId# 6 Enable nodeId# 8 Disable nodeId# 72 Pick Pick Delete nodeId# 90 Pick Add nodeId# 102 Add nodeId# 103 Disable nodeId# 95 Delete nodeId# 84 Pick Enable nodeId# 72 Pick Add nodeId# 104 Delete nodeId# 30 Disable nodeId# 72 Delete nodeId# 79 Disable nodeId# 44 Delete nodeId# 26 Delete nodeId# 2 Enable nodeId# 63 Add nodeId# 105 Delete nodeId# 60 Pick Add nodeId# 106 Enable nodeId# 35 Disable nodeId# 3 Enable nodeId# 44 Add nodeId# 107 Pick Delete nodeId# 44 Delete nodeId# 24 Enable nodeId# 3 Delete nodeId# 92 Delete nodeId# 95 Add nodeId# 108 Delete nodeId# 56 Disable nodeId# 38 Delete nodeId# 107 Enable nodeId# 38 Pick Delete nodeId# 68 Disable nodeId# 106 Delete nodeId# 32 Pick Add nodeId# 109 Enable nodeId# 106 Pick Pick Disable nodeId# 82 Add nodeId# 110 Enable nodeId# 72 Disable nodeId# 88 Pick Delete nodeId# 31 Add nodeId# 111 Disable nodeId# 16 Add nodeId# 112 Enable nodeId# 16 Add nodeId# 113 Add nodeId# 114 Add nodeId# 115 Add nodeId# 116 Pick Enable nodeId# 82 Disable nodeId# 57 Disable nodeId# 33 Enable nodeId# 57 Disable nodeId# 67 Add nodeId# 117 Disable nodeId# 21 Delete nodeId# 67 Add nodeId# 118 Pick Pick Delete nodeId# 16 Delete nodeId# 80 Delete nodeId# 55 Enable nodeId# 21 Delete nodeId# 112 Disable nodeId# 10 Delete nodeId# 59 Disable nodeId# 52 Add nodeId# 119 Disable nodeId# 75 Pick Disable nodeId# 51 Pick Add nodeId# 120 Add nodeId# 121 Delete nodeId# 62 Delete nodeId# 118 Add nodeId# 122 Add nodeId# 123 Delete nodeId# 69 Pick Disable nodeId# 57 Add nodeId# 124 Pick Enable nodeId# 51 Disable nodeId# 58 Delete nodeId# 96 Delete nodeId# 42 Disable nodeId# 15 Delete nodeId# 65 Enable nodeId# 58 Add nodeId# 125 Add nodeId# 126 Enable nodeId# 52 Disable nodeId# 28 Disable nodeId# 1 Disable nodeId# 86 Delete nodeId# 52 Disable nodeId# 41 Enable nodeId# 15 Pick Disable nodeId# 126 Delete nodeId# 66 Disable nodeId# 35 Delete nodeId# 109 Delete nodeId# 119 Delete nodeId# 125 Enable nodeId# 57 Delete nodeId# 87 Pick Enable nodeId# 41 Add nodeId# 127 Delete nodeId# 102 Pick Add nodeId# 128 Pick Add nodeId# 129 Pick Pick Enable nodeId# 33 Add nodeId# 130 Disable nodeId# 9 Delete nodeId# 120 Disable nodeId# 64 Pick Enable nodeId# 126 Add nodeId# 131 Disable nodeId# 130 Add nodeId# 132 Add nodeId# 133 Disable nodeId# 11 Delete nodeId# 73 Delete nodeId# 15 Disable nodeId# 93 Disable nodeId# 127 Pick Add nodeId# 134 Delete nodeId# 127 Delete nodeId# 88 Add nodeId# 135 Enable nodeId# 130 Pick Disable nodeId# 45 Enable nodeId# 9 Delete nodeId# 97 Enable nodeId# 86 Add nodeId# 136 Delete nodeId# 10 Delete nodeId# 133 Delete nodeId# 136 Enable nodeId# 45 Pick Enable nodeId# 64 Enable nodeId# 1 Add nodeId# 137 Add nodeId# 138 Add nodeId# 139 Delete nodeId# 111 Enable nodeId# 11 Enable nodeId# 93 Enable nodeId# 35 Enable nodeId# 28 Pick Add nodeId# 140 Enable nodeId# 75 Pick Pick Disable nodeId# 134 Pick Enable nodeId# 134 Disable nodeId# 114 Delete nodeId# 58 Enable nodeId# 114 Pick Pick Add nodeId# 141 Pick Delete nodeId# 45 Disable nodeId# 20 Add nodeId# 142 Delete nodeId# 1 Pick Enable nodeId# 20 Delete nodeId# 115 Delete nodeId# 126 Disable nodeId# 85 Delete nodeId# 142 Add nodeId# 143 Enable nodeId# 85 Pick Add nodeId# 144 Disable nodeId# 101 Disable nodeId# 141 Add nodeId# 145 Enable nodeId# 141 Disable nodeId# 54 Add nodeId# 146 Disable nodeId# 61 Add nodeId# 147 Delete nodeId# 50 Enable nodeId# 54 Disable nodeId# 91 Add nodeId# 148 Disable nodeId# 141 Disable nodeId# 22 Enable nodeId# 91 Enable nodeId# 22 Enable nodeId# 141 Add nodeId# 149 Pick Add nodeId# 150 Delete nodeId# 48 Delete nodeId# 134 Delete nodeId# 8 Enable nodeId# 101 Pick Delete nodeId# 145 Disable nodeId# 141 Delete nodeId# 150 Disable nodeId# 149 Add nodeId# 151 Add nodeId# 152 Add nodeId# 153 Delete nodeId# 132 Delete nodeId# 7 Add nodeId# 154 Pick Disable nodeId# 29 Delete nodeId# 104 Enable nodeId# 141 Delete nodeId# 124 Pick Disable nodeId# 28 Delete nodeId# 18 Delete nodeId# 61 Pick Delete nodeId# 83 Enable nodeId# 29 Enable nodeId# 28 Pick Add nodeId# 155 Add nodeId# 156 Enable nodeId# 149 Add nodeId# 157 Disable nodeId# 78 Pick Enable nodeId# 78 Add nodeId# 158 Delete nodeId# 12 Disable nodeId# 39 Delete nodeId# 35 Add nodeId# 159 Delete nodeId# 34 Enable nodeId# 39 Delete nodeId# 89 Pick Delete nodeId# 123 Disable nodeId# 78 Add nodeId# 160 Disable nodeId# 46 Pick Pick Delete nodeId# 116 Disable nodeId# 137 Pick Add nodeId# 161 Delete nodeId# 72 Delete nodeId# 152 Disable nodeId# 129 Disable nodeId# 75 Disable nodeId# 93 Disable nodeId# 143 Enable nodeId# 78 Add nodeId# 162 Disable nodeId# 121 Disable nodeId# 64 Pick Delete nodeId# 144 Delete nodeId# 161 Delete nodeId# 14 Add nodeId# 163 Add nodeId# 164 Disable nodeId# 54 Disable nodeId# 76 Disable nodeId# 71 Delete nodeId# 146 Disable nodeId# 36 Add nodeId# 165 Add nodeId# 166 Delete nodeId# 121 Delete nodeId# 135 Enable nodeId# 54 Disable nodeId# 11 Delete nodeId# 57 Disable nodeId# 5 Pick Add nodeId# 167 Disable nodeId# 159 Add nodeId# 168 Add nodeId# 169 Pick Enable nodeId# 36 Enable nodeId# 75 Disable nodeId# 39 Add nodeId# 170 Disable nodeId# 20 Pick Enable nodeId# 46 Enable nodeId# 129 Pick Enable nodeId# 93 Delete nodeId# 131 Enable nodeId# 64 Disable nodeId# 141 Delete nodeId# 86 Pick Disable nodeId# 108 Enable nodeId# 141 Disable nodeId# 51 Add nodeId# 171 Enable nodeId# 143 Disable nodeId# 128 Pick Add nodeId# 172 Delete nodeId# 140 Enable nodeId# 159 Pick Disable nodeId# 143 Delete nodeId# 113 Enable nodeId# 137 Pick Delete nodeId# 172 Pick Add nodeId# 173 Disable nodeId# 49 Delete nodeId# 39 Pick Enable nodeId# 20 Disable nodeId# 36 Delete nodeId# 93 Enable nodeId# 5 Delete nodeId# 20 Delete nodeId# 29 Pick Add nodeId# 174 Add nodeId# 175 Add nodeId# 176 Add nodeId# 177 Pick Delete nodeId# 147 Disable nodeId# 130 Enable nodeId# 128 Delete nodeId# 171 Pick Enable nodeId# 49 Pick Disable nodeId# 27 Disable nodeId# 74 Add nodeId# 178 Delete nodeId# 162 Disable nodeId# 49 Disable nodeId# 37 Add nodeId# 179 Disable nodeId# 110 Disable nodeId# 128 Enable nodeId# 128 Add nodeId# 180 Pick Disable nodeId# 22 Pick Disable nodeId# 33 Disable nodeId# 170 Pick Pick Enable nodeId# 130 Disable nodeId# 41 Delete nodeId# 54 Add nodeId# 181 Pick Enable nodeId# 27 Delete nodeId# 99 Disable nodeId# 53 Delete nodeId# 122 Pick Pick Disable nodeId# 40 Enable nodeId# 11 Pick Delete nodeId# 13 Disable nodeId# 81 Add nodeId# 182 Disable nodeId# 155 Disable nodeId# 17 Disable nodeId# 182 Enable nodeId# 108 Delete nodeId# 51 Pick Add nodeId# 183 Enable nodeId# 81 Disable nodeId# 167 Delete nodeId# 85 Pick Disable nodeId# 169 Pick Enable nodeId# 143 Disable nodeId# 117 Disable nodeId# 3 Disable nodeId# 164 Add nodeId# 184 Add nodeId# 185 Pick Enable nodeId# 76 Disable nodeId# 27 Delete nodeId# 157 Enable nodeId# 117 Add nodeId# 186 Enable nodeId# 3 Delete nodeId# 76 Enable nodeId# 27 Pick Add nodeId# 187 Pick Delete nodeId# 149 Disable nodeId# 3 Disable nodeId# 63 Add nodeId# 188 Pick Add nodeId# 189 Add nodeId# 190 Enable nodeId# 37 Pick Delete nodeId# 110 Enable nodeId# 49 Disable nodeId# 138 Add nodeId# 191 Pick Add nodeId# 192 Pick Add nodeId# 193 Pick Delete nodeId# 105 Pick Disable nodeId# 159 Pick Enable nodeId# 71 Disable nodeId# 174 Add nodeId# 194 Delete nodeId# 176 Delete nodeId# 175 Pick Disable nodeId# 128 Delete nodeId# 185 Add nodeId# 195 Enable nodeId# 53 Enable nodeId# 63 Enable nodeId# 17 Disable nodeId# 160 Disable nodeId# 9 Delete nodeId# 137 Enable nodeId# 159 Delete nodeId# 130 Delete nodeId# 98 Disable nodeId# 166 Add nodeId# 196 Add nodeId# 197 Delete nodeId# 165 Add nodeId# 198 Enable nodeId# 174 Delete nodeId# 141 Enable nodeId# 40 Disable nodeId# 195 Delete nodeId# 190 Delete nodeId# 153 Disable nodeId# 40 Enable nodeId# 169 Delete nodeId# 5 Delete nodeId# 128 Delete nodeId# 168 Enable nodeId# 33 Delete nodeId# 75 Pick Delete nodeId# 117 Pick Delete nodeId# 4 Enable nodeId# 195 Disable nodeId# 108 Enable nodeId# 36 Enable nodeId# 9 Disable nodeId# 38 Pick Delete nodeId# 43 Pick Enable nodeId# 182 Add nodeId# 199 Add nodeId# 200 Pick Add nodeId# 201 Add nodeId# 202 Disable nodeId# 178 Add nodeId# 203 Add nodeId# 204 Delete nodeId# 70 Enable nodeId# 3 Pick Enable nodeId# 22 Disable nodeId# 202 Enable nodeId# 41 Add nodeId# 205 Add nodeId# 206 Delete nodeId# 183 Enable nodeId# 138 Pick Delete nodeId# 106 Pick Enable nodeId# 155 Delete nodeId# 71 Enable nodeId# 38 Pick Delete nodeId# 3 Enable nodeId# 160 Disable nodeId# 159 Disable nodeId# 151 Disable nodeId# 182 Delete nodeId# 159 Pick Disable nodeId# 22 Enable nodeId# 166 Enable nodeId# 167 Delete nodeId# 206 Enable nodeId# 178 Add nodeId# 207 Pick Pick Add nodeId# 208 Pick Pick Enable nodeId# 40 Add nodeId# 209 Disable nodeId# 138 Disable nodeId# 155 Disable nodeId# 166 Enable nodeId# 202 Enable nodeId# 164 Add nodeId# 210 Disable nodeId# 36 Pick Pick Disable nodeId# 196 Pick Delete nodeId# 189 Pick Pick Delete nodeId# 91 Enable nodeId# 196 Disable nodeId# 53 Disable nodeId# 154 Add nodeId# 211 Enable nodeId# 108 Delete nodeId# 143 Add nodeId# 212 Add nodeId# 213 Enable nodeId# 138 Disable nodeId# 212 Add nodeId# 214 Disable nodeId# 167 Disable nodeId# 49 Pick Disable nodeId# 148 Pick Pick Add nodeId# 215 Disable nodeId# 207 Enable nodeId# 36 Delete nodeId# 94 Pick Enable nodeId# 166 Enable nodeId# 170 Add nodeId# 216 Delete nodeId# 38 Pick Add nodeId# 217 Disable nodeId# 170 Disable nodeId# 64 Enable nodeId# 207 Delete nodeId# 193 Disable nodeId# 201 Delete nodeId# 78 Delete nodeId# 173 Disable nodeId# 166 Add nodeId# 218 Enable nodeId# 155 Add nodeId# 219 Add nodeId# 220 Enable nodeId# 182 Delete nodeId# 81 Pick Add nodeId# 221 Delete nodeId# 182 Delete nodeId# 23 Delete nodeId# 148 Add nodeId# 222 Enable nodeId# 151 Disable nodeId# 222 Enable nodeId# 64 Delete nodeId# 46 Pick Delete nodeId# 25 Add nodeId# 223 Enable nodeId# 74 Delete nodeId# 138 Pick Enable nodeId# 22 Add nodeId# 224 Disable nodeId# 114 Enable nodeId# 53 Pick Disable nodeId# 163 Pick Delete nodeId# 41 Disable nodeId# 186 Pick Disable nodeId# 180 Delete nodeId# 204 Enable nodeId# 180 Disable nodeId# 17 Disable nodeId# 180 Enable nodeId# 201 Pick Add nodeId# 225 Pick Add nodeId# 226 Pick Disable nodeId# 219 Disable nodeId# 187 Pick Enable nodeId# 212 Delete nodeId# 36 Delete nodeId# 221 Enable nodeId# 222 Add nodeId# 227 Enable nodeId# 170 Enable nodeId# 166 Enable nodeId# 187 Add nodeId# 228 Enable nodeId# 114 Delete nodeId# 164 Delete nodeId# 156 Enable nodeId# 49 Disable nodeId# 211 Add nodeId# 229 Add nodeId# 230 Pick Enable nodeId# 219 Add nodeId# 231 Add nodeId# 232 Enable nodeId# 163 Add nodeId# 233 Delete nodeId# 177 Add nodeId# 234 Enable nodeId# 211 Delete nodeId# 129 Disable nodeId# 201 Pick Disable nodeId# 11 Delete nodeId# 186 Disable nodeId# 33 Disable nodeId# 179 Pick Pick Add nodeId# 235 Delete nodeId# 215 Pick Disable nodeId# 166 Enable nodeId# 166 Delete nodeId# 21 Add nodeId# 236 Add nodeId# 237 Pick Pick Delete nodeId# 236 Add nodeId# 238 Pick Pick Disable nodeId# 108 Enable nodeId# 154 Add nodeId# 239 Add nodeId# 240 Enable nodeId# 108 Enable nodeId# 167 Delete nodeId# 218 Disable nodeId# 64 Add nodeId# 241 Add nodeId# 242 Delete nodeId# 74 Enable nodeId# 17 Pick Add nodeId# 243 Enable nodeId# 179 Disable nodeId# 101 Disable nodeId# 160 Delete nodeId# 212 Disable nodeId# 209 Enable nodeId# 64 Delete nodeId# 63 Enable nodeId# 160 Add nodeId# 244 Add nodeId# 245 Enable nodeId# 209 Enable nodeId# 11 Enable nodeId# 33 Delete nodeId# 224 Enable nodeId# 101 Add nodeId# 246 Disable nodeId# 27 Enable nodeId# 180 Delete nodeId# 216 Enable nodeId# 201 Enable nodeId# 27 Delete nodeId# 240 Add nodeId# 247 Disable nodeId# 163 Pick Enable nodeId# 163 Pick Pick Pick Disable nodeId# 170 Pick Add nodeId# 248 Enable nodeId# 170 Delete nodeId# 170 Disable nodeId# 202 Add nodeId# 249 Add nodeId# 250 Pick Enable nodeId# 202 Delete nodeId# 139 Delete nodeId# 188 Delete nodeId# 211 Delete nodeId# 213 ... 20183 Disable nodeId# 20253 Add nodeId# 20255 Pick Pick Delete nodeId# 20144 Disable nodeId# 20187 Pick Enable nodeId# 20204 Enable nodeId# 20229 Enable nodeId# 20225 Add nodeId# 20256 Delete nodeId# 20229 Add nodeId# 20257 Enable nodeId# 20181 Pick Delete nodeId# 20243 Pick Enable nodeId# 20220 Delete nodeId# 20089 Add nodeId# 20258 Delete nodeId# 20169 Add nodeId# 20259 Disable nodeId# 20217 Delete nodeId# 20234 Pick Disable nodeId# 20214 Add nodeId# 20260 Add nodeId# 20261 Add nodeId# 20262 Enable nodeId# 20221 Pick Delete nodeId# 20249 Enable nodeId# 20236 Disable nodeId# 20225 Enable nodeId# 20226 Delete nodeId# 20239 Pick Pick Enable nodeId# 20224 Disable nodeId# 20257 Delete nodeId# 20260 Disable nodeId# 20244 Add nodeId# 20263 Delete nodeId# 20210 Pick Enable nodeId# 20198 Pick Enable nodeId# 20225 Add nodeId# 20264 Disable nodeId# 20255 Delete nodeId# 20233 Delete nodeId# 20245 Enable nodeId# 20187 Delete nodeId# 20224 Enable nodeId# 20244 Enable nodeId# 20257 Pick Delete nodeId# 20183 Delete nodeId# 20191 Delete nodeId# 20223 Pick Delete nodeId# 20242 Pick Pick Disable nodeId# 20263 Delete nodeId# 20203 Delete nodeId# 20221 Add nodeId# 20265 Delete nodeId# 20196 Enable nodeId# 20255 Disable nodeId# 20092 Pick Disable nodeId# 20213 Enable nodeId# 20240 Add nodeId# 20266 Enable nodeId# 20253 Add nodeId# 20267 Enable nodeId# 20092 Delete nodeId# 20263 Disable nodeId# 20244 Disable nodeId# 20218 Enable nodeId# 20217 Disable nodeId# 20173 Delete nodeId# 20205 Disable nodeId# 20236 Disable nodeId# 20240 Add nodeId# 20268 Delete nodeId# 20261 Enable nodeId# 20213 Delete nodeId# 20231 Delete nodeId# 20255 Pick Enable nodeId# 20240 Delete nodeId# 20251 Pick Add nodeId# 20269 Delete nodeId# 20225 Delete nodeId# 20244 Delete nodeId# 20254 Delete nodeId# 20267 Disable nodeId# 20240 Disable nodeId# 20204 Enable nodeId# 20214 Pick Disable nodeId# 20258 Disable nodeId# 20213 Pick Add nodeId# 20270 Delete nodeId# 20181 Enable nodeId# 20218 Disable nodeId# 20214 Add nodeId# 20271 Delete nodeId# 20187 Delete nodeId# 20211 Pick Disable nodeId# 20230 Enable nodeId# 20240 Enable nodeId# 20213 Delete nodeId# 20258 Pick Delete nodeId# 20237 Enable nodeId# 20204 Enable nodeId# 20236 Enable nodeId# 20214 Add nodeId# 20272 Pick Enable nodeId# 20173 Disable nodeId# 20218 Enable nodeId# 20218 Pick Enable nodeId# 20230 Delete nodeId# 20204 Delete nodeId# 20226 Pick Pick Disable nodeId# 20272 Disable nodeId# 20240 Pick Pick Add nodeId# 20273 Pick Delete nodeId# 20272 Add nodeId# 20274 Delete nodeId# 20262 Delete nodeId# 20092 Pick Pick Pick Enable nodeId# 20240 Disable nodeId# 20273 Pick Pick Disable nodeId# 20217 Enable nodeId# 20217 Add nodeId# 20275 Enable nodeId# 20273 Delete nodeId# 20155 Delete nodeId# 20217 Delete nodeId# 20207 Add nodeId# 20276 Add nodeId# 20277 Add nodeId# 20278 Delete nodeId# 20220 Disable nodeId# 20270 Add nodeId# 20279 Enable nodeId# 20270 Add nodeId# 20280 Add nodeId# 20281 Add nodeId# 20282 Delete nodeId# 20240 Delete nodeId# 20256 Add nodeId# 20283 Delete nodeId# 20264 Add nodeId# 20284 Pick Pick Pick Add nodeId# 20285 Delete nodeId# 20218 Add nodeId# 20286 Add nodeId# 20287 Add nodeId# 20288 Delete nodeId# 20259 Disable nodeId# 20265 Pick Disable nodeId# 20236 Delete nodeId# 20273 Pick Delete nodeId# 20286 Enable nodeId# 20236 Pick Delete nodeId# 20265 Disable nodeId# 20275 Pick Enable nodeId# 20275 Disable nodeId# 20278 Add nodeId# 20289 Pick Enable nodeId# 20278 Pick Add nodeId# 20290 Disable nodeId# 20230 Pick Add nodeId# 20291 Add nodeId# 20292 Delete nodeId# 20279 Pick Enable nodeId# 20230 Delete nodeId# 20266 Add nodeId# 20293 Pick Delete nodeId# 20247 Add nodeId# 20294 Pick Pick Pick Pick Pick Delete nodeId# 20284 Pick Delete nodeId# 20202 Pick Pick Disable nodeId# 20276 Enable nodeId# 20276 Pick Add nodeId# 20295 Add nodeId# 20296 Pick Add nodeId# 20297 Add nodeId# 20298 Disable nodeId# 20214 Disable nodeId# 20285 Pick Delete nodeId# 20230 Pick Pick Disable nodeId# 20194 Enable nodeId# 20194 Enable nodeId# 20214 Delete nodeId# 20269 Add nodeId# 20299 Delete nodeId# 20289 Pick Delete nodeId# 20257 Pick Delete nodeId# 20214 Disable nodeId# 20176 Delete nodeId# 20248 Delete nodeId# 20283 Delete nodeId# 20296 Disable nodeId# 20276 Pick Pick Disable nodeId# 20227 Enable nodeId# 20176 Enable nodeId# 20227 Enable nodeId# 20276 Enable nodeId# 20285 Add nodeId# 20300 Add nodeId# 20301 Add nodeId# 20302 Pick Add nodeId# 20303 Add nodeId# 20304 Disable nodeId# 20302 Add nodeId# 20305 Disable nodeId# 20300 Add nodeId# 20306 Pick Add nodeId# 20307 Delete nodeId# 20268 Add nodeId# 20308 Add nodeId# 20309 Add nodeId# 20310 Enable nodeId# 20300 Disable nodeId# 20298 Pick Add nodeId# 20311 Disable nodeId# 20253 Enable nodeId# 20298 Add nodeId# 20312 Pick Pick Disable nodeId# 20290 Disable nodeId# 20291 Delete nodeId# 20290 Pick Add nodeId# 20313 Delete nodeId# 20287 Enable nodeId# 20302 Enable nodeId# 20253 Pick Add nodeId# 20314 Pick Disable nodeId# 20299 Delete nodeId# 20280 Disable nodeId# 20307 Disable nodeId# 20288 Delete nodeId# 20306 Add nodeId# 20315 Enable nodeId# 20299 Pick Add nodeId# 20316 Add nodeId# 20317 Enable nodeId# 20307 Delete nodeId# 20250 Add nodeId# 20318 Disable nodeId# 20308 Delete nodeId# 20302 Delete nodeId# 20317 Delete nodeId# 20305 Pick Disable nodeId# 20173 Delete nodeId# 20314 Add nodeId# 20319 Delete nodeId# 20173 Pick Enable nodeId# 20308 Add nodeId# 20320 Pick Enable nodeId# 20291 Delete nodeId# 20307 Disable nodeId# 20294 Add nodeId# 20321 Add nodeId# 20322 Pick Delete nodeId# 20198 Pick Pick Add nodeId# 20323 Disable nodeId# 20293 Delete nodeId# 20252 Pick Disable nodeId# 20323 Add nodeId# 20324 Delete nodeId# 20323 Add nodeId# 20325 Delete nodeId# 20227 Delete nodeId# 20213 Enable nodeId# 20288 Disable nodeId# 20304 Disable nodeId# 20206 Enable nodeId# 20293 Pick Add nodeId# 20326 Add nodeId# 20327 Pick Disable nodeId# 20276 Add nodeId# 20328 Enable nodeId# 20276 Enable nodeId# 20206 Delete nodeId# 20301 Enable nodeId# 20304 Disable nodeId# 20176 Disable nodeId# 20300 Disable nodeId# 20311 Pick Add nodeId# 20329 Delete nodeId# 20316 Enable nodeId# 20311 Delete nodeId# 20206 Pick Enable nodeId# 20176 Pick Pick Pick Add nodeId# 20330 Add nodeId# 20331 Enable nodeId# 20300 Pick Pick Delete nodeId# 20285 Disable nodeId# 20331 Enable nodeId# 20331 Delete nodeId# 20288 Disable nodeId# 20291 Disable nodeId# 20297 Pick Add nodeId# 20332 Disable nodeId# 20320 Add nodeId# 20333 Delete nodeId# 20238 Enable nodeId# 20297 Add nodeId# 20334 Pick Pick Enable nodeId# 20291 Delete nodeId# 20276 Add nodeId# 20335 Add nodeId# 20336 Enable nodeId# 20294 Add nodeId# 20337 Pick Delete nodeId# 20334 Pick Disable nodeId# 20311 Enable nodeId# 20311 Pick Pick Disable nodeId# 20270 Add nodeId# 20338 Pick Add nodeId# 20339 Enable nodeId# 20270 Disable nodeId# 20322 Add nodeId# 20340 Pick Add nodeId# 20341 Pick Delete nodeId# 20340 Pick Add nodeId# 20342 Add nodeId# 20343 Enable nodeId# 20322 Delete nodeId# 20309 Disable nodeId# 20327 Enable nodeId# 20327 Delete nodeId# 20308 Disable nodeId# 20282 Add nodeId# 20344 Pick Pick Add nodeId# 20345 Pick Add nodeId# 20346 Disable nodeId# 20318 Delete nodeId# 20292 Pick Add nodeId# 20347 Enable nodeId# 20318 Add nodeId# 20348 Enable nodeId# 20282 Disable nodeId# 20324 Pick Enable nodeId# 20320 Add nodeId# 20349 Enable nodeId# 20324 Pick Delete nodeId# 20311 Pick Disable nodeId# 20318 Delete nodeId# 20349 Delete nodeId# 20312 Add nodeId# 20350 Add nodeId# 20351 Enable nodeId# 20318 Pick Delete nodeId# 20347 Disable nodeId# 20318 Disable nodeId# 20330 Enable nodeId# 20330 Delete nodeId# 20336 Delete nodeId# 20278 Enable nodeId# 20318 Pick Disable nodeId# 20326 Pick Disable nodeId# 20351 Pick Enable nodeId# 20326 Delete nodeId# 20275 Pick Disable nodeId# 20274 Add nodeId# 20352 Add nodeId# 20353 Enable nodeId# 20351 Add nodeId# 20354 Enable nodeId# 20274 Pick Disable nodeId# 20324 Pick Disable nodeId# 20344 Disable nodeId# 20345 Enable nodeId# 20324 Pick Delete nodeId# 20293 Disable nodeId# 20342 Add nodeId# 20355 Enable nodeId# 20342 Delete nodeId# 20333 Add nodeId# 20356 Delete nodeId# 20351 Delete nodeId# 20294 Disable nodeId# 20315 Pick Enable nodeId# 20345 Pick Enable nodeId# 20315 Pick Delete nodeId# 20322 Enable nodeId# 20344 Add nodeId# 20357 Delete nodeId# 20304 Disable nodeId# 20282 Pick Add nodeId# 20358 Delete nodeId# 20313 Disable nodeId# 20335 Pick Pick Add nodeId# 20359 Pick Add nodeId# 20360 Disable nodeId# 20346 Enable nodeId# 20335 Disable nodeId# 20303 Enable nodeId# 20303 Enable nodeId# 20282 Pick Disable nodeId# 20315 Pick Add nodeId# 20361 Pick Disable nodeId# 20358 Delete nodeId# 20358 Pick Delete nodeId# 20274 Enable nodeId# 20346 Enable nodeId# 20315 Pick Pick Delete nodeId# 20327 Pick Pick Add nodeId# 20362 Add nodeId# 20363 Add nodeId# 20364 Add nodeId# 20365 Disable nodeId# 20331 Delete nodeId# 20298 Enable nodeId# 20331 Delete nodeId# 20319 Disable nodeId# 20277 Delete nodeId# 20360 Pick Delete nodeId# 20282 Delete nodeId# 20291 Enable nodeId# 20277 Delete nodeId# 20338 Disable nodeId# 20318 Add nodeId# 20366 Pick Disable nodeId# 20346 Enable nodeId# 20346 Disable nodeId# 20281 Add nodeId# 20367 Enable nodeId# 20318 Pick Disable nodeId# 20337 Disable nodeId# 20343 Disable nodeId# 20320 Delete nodeId# 20236 Enable nodeId# 20337 Pick Pick Delete nodeId# 20345 Disable nodeId# 20354 Pick Delete nodeId# 20353 Add nodeId# 20368 Pick Enable nodeId# 20343 Disable nodeId# 20356 Add nodeId# 20369 Pick Disable nodeId# 20332 Pick Disable nodeId# 20194 Pick Pick Pick Delete nodeId# 20369 Delete nodeId# 20367 Disable nodeId# 20167 Add nodeId# 20370 Add nodeId# 20371 Enable nodeId# 20194 Add nodeId# 20372 Delete nodeId# 20167 Add nodeId# 20373 Pick Disable nodeId# 20363 Add nodeId# 20374 Add nodeId# 20375 Add nodeId# 20376 Enable nodeId# 20320 Delete nodeId# 20253 Enable nodeId# 20332 Delete nodeId# 20325 Enable nodeId# 20354 Delete nodeId# 20326 Disable nodeId# 20194 Delete nodeId# 20346 Delete nodeId# 20363 Disable nodeId# 20342 Pick Add nodeId# 20377 Add nodeId# 20378 Add nodeId# 20379 Enable nodeId# 20194 Pick Delete nodeId# 20310 Pick Enable nodeId# 20342 Delete nodeId# 20368 Add nodeId# 20380 Pick Disable nodeId# 20303 Add nodeId# 20381 Pick Add nodeId# 20382 Pick Pick Delete nodeId# 20331 Delete nodeId# 20270 Delete nodeId# 20271 Pick Add nodeId# 20383 Add nodeId# 20384 Pick Delete nodeId# 20277 Disable nodeId# 20300 Disable nodeId# 20365 Add nodeId# 20385 Delete nodeId# 20329 Delete nodeId# 20382 Disable nodeId# 20380 Disable nodeId# 20375 Pick Delete nodeId# 20378 Add nodeId# 20386 Add nodeId# 20387 Disable nodeId# 20342 Pick Add nodeId# 20388 Enable nodeId# 20380 Pick Add nodeId# 20389 Add nodeId# 20390 Enable nodeId# 20356 Disable nodeId# 20335 Enable nodeId# 20335 Disable nodeId# 20385 Disable nodeId# 20377 Disable nodeId# 20364 Disable nodeId# 20366 Delete nodeId# 20386 Pick Add nodeId# 20391 Pick Enable nodeId# 20366 Delete nodeId# 20194 Delete nodeId# 20366 Enable nodeId# 20300 Add nodeId# 20392 Disable nodeId# 20372 Enable nodeId# 20281 Pick Delete nodeId# 20303 Disable nodeId# 20295 Disable nodeId# 20356 Add nodeId# 20393 Add nodeId# 20394 Add nodeId# 20395 Enable nodeId# 20295 Enable nodeId# 20385 Enable nodeId# 20364 Delete nodeId# 20361 Pick Delete nodeId# 20343 Delete nodeId# 20295 Disable nodeId# 20394 Delete nodeId# 20388 Add nodeId# 20396 Disable nodeId# 20315 Add nodeId# 20397 Enable nodeId# 20394 Disable nodeId# 20397 Enable nodeId# 20375 Add nodeId# 20398 Enable nodeId# 20377 Delete nodeId# 20393 Delete nodeId# 20398 Pick Pick Disable nodeId# 20352 Enable nodeId# 20356 Pick Delete nodeId# 20341 Add nodeId# 20399 Disable nodeId# 20321 Delete nodeId# 20372 Disable nodeId# 20396 Delete nodeId# 20373 Delete nodeId# 20321 Enable nodeId# 20342 Disable nodeId# 20364 Add nodeId# 20400 Add nodeId# 20401 Enable nodeId# 20396 Delete nodeId# 20384 Enable nodeId# 20364 Add nodeId# 20402 Enable nodeId# 20365 Add nodeId# 20403 Delete nodeId# 20362 Disable nodeId# 20390 Disable nodeId# 20335 >> AsyncIndexChangeCollector::UpsertToSameKey >> AsyncIndexChangeCollector::DeleteNothing >> AsyncIndexChangeCollector::UpsertSingleRow >> CdcStreamChangeCollector::UpsertManyRows >> CdcStreamChangeCollector::UpsertToSameKey >> KqpSystemView::NodesRange2 [GOOD] >> CdcStreamChangeCollector::InsertSingleRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 9880, MsgBus: 15542 2025-04-10T11:24:17.811502Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643839500521687:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:17.820915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:17.866763Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491643842429648276:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:17.866810Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:17.895183Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491643841113124247:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:17.895228Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:17.954826Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643842688605779:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:17.954865Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a75/r3tmp/tmpZWkIz1/pdisk_1.dat 2025-04-10T11:24:18.880716Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:18.914311Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:19.002809Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:19.042616Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:19.067266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:19.500647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:19.500773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:19.507331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:19.507456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:19.516261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:19.516294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:19.516615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:19.516638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:19.516706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:19.516725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:19.555049Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-10T11:24:19.562721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:19.581191Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-10T11:24:19.581343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:19.655895Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:24:19.655950Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:24:19.656592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:19.662097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:19.662241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:19.706928Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:19.886572Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:19.923165Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 9880, node 1 2025-04-10T11:24:20.294943Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:20.294976Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:20.294984Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:20.295126Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15542 TClient is connected to server localhost:15542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:21.658747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:21.707640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:22.045766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:22.274385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:22.450883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:22.808373Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643839500521687:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:22.808481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:22.870357Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491643842429648276:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:22.870418Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:22.898458Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491643841113124247:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:22.898512Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:22.956776Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491643842688605779:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:22.956845Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:24.542221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643869565294714:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:24.542354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:24.926517Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.102892s 2025-04-10T11:24:24.926566Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.102999s 2025-04-10T11:24:24.972507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:25.064721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:25.159017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:25.211734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:25.276475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:25.336972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:25.412819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643873860262624:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:25.412894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:25.412924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643873860262629:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:25.416392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:25.436561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643873860262631:2397], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:25.511454Z node 1 :TX_PROXY ERROR: Actor# [1:7491643873860262708:4051] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:26.615541Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284266603, txId: 281474976710671] shutting down 2025-04-10T11:24:26.898719Z node 5 :BS_PROXY_PUT ERROR: [1493dc555cd5a325] Result# TEvPutResult {Id# [72075186224037889:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037889:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE is not ready", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-10T11:24:26.898718Z node 4 :BS_PROXY_PUT ERROR: [3cf2969209965798] Result# TEvPutResult {Id# [72075186224037893:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037893:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-10T11:24:26.898719Z node 2 :BS_PROXY_PUT ERROR: [81225ee001433f2c] Result# TEvPutResult {Id# [72075186224037894:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037894:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-10T11:24:26.898759Z node 3 :BS_PROXY_PUT ERROR: [71aa40867ca26a6f] Result# TEvPutResult {Id# [72075186224037888:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037888:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 |92.2%| [TA] $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] Test command err: 2025-04-10T11:20:33.384295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:526:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:33.384578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:33.384670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cfe/r3tmp/tmp5md5w3/pdisk_1.dat 2025-04-10T11:20:33.758373Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6317, node 1 2025-04-10T11:20:34.081570Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:34.081631Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:34.081665Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:34.082185Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:34.085468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:34.170736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:34.170863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:34.186403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4567 2025-04-10T11:20:34.770972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:38.188636Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-10T11:20:38.225811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:38.225926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:38.268751Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:20:38.270526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:38.527348Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:38.528126Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:38.528592Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:38.528733Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:38.528987Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:38.529084Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:38.529207Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:38.529320Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:38.529427Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:38.709492Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:38.709611Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:38.725126Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:38.906984Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:38.954030Z node 3 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:38.954134Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:38.990255Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:38.990566Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:38.990792Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:38.990860Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:38.990934Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:38.990999Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:38.991052Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:38.991108Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:38.994763Z node 3 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:39.022306Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:39.022452Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1949:2599], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:39.030554Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1964:2611] 2025-04-10T11:20:39.040142Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1984:2621] 2025-04-10T11:20:39.041094Z node 3 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:1984:2621], schemeshard id = 72075186224037897 2025-04-10T11:20:39.094519Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared1 2025-04-10T11:20:39.124027Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:39.124098Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:39.124176Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared1/.metadata/_statistics 2025-04-10T11:20:39.141041Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:39.149704Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:39.149857Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:39.343452Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:39.505083Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:39.617863Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:40.593688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:20:44.177942Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:44.256659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:44.256804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:44.300865Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:44.303288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:44.520325Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.520843Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.521476Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.521647Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.521902Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.522021Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.522139Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.522260Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.522347Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:44.683712Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:44.683816Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:44.699294Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:44.858048Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:44.951295Z node 2 :STATISTICS INFO: [72075186224038895] OnActivateExecutor 2025-04-10T11:20:44.951399Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Execute 2025-04-10T11:20:44.990693Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Complete 2025-04-10T11:20:44.990948Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Execute 2025-04-10T11:20:44.991175Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:44.991242Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:44.991307Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:44.991368Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:44.991433Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:44.991494Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Complete 2025-04-10T11:20:44.991958Z node 2 :STATISTICS INFO: [72075186224038895] Subscribed for config changes 2025-04-10T11:20:45.029989Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038895, at schemeshard: 72075186224038898 2025-04-10T11:20:45.030123Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3228:2599], at schemeshard: 72075186224038898, StatisticsAggregatorId: 72075186224038895, at schemeshard: 72075186224038898 2025-04-10T11:20:45.040512Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3242:2610] 2025-04-10T11:20:45.044015Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3258:2619] 202 ... erId: 72075186224038898, LocalPathId: 3] 2025-04-10T11:24:23.136305Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared2 2025-04-10T11:24:23.140596Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:24:23.146595Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12559:5252], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:23.146706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12569:5257], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:23.146799Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:23.169335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038898 2025-04-10T11:24:23.272311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:12573:5260], DatabaseId: /Root/Shared2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2025-04-10T11:24:23.421394Z node 2 :TX_PROXY ERROR: Actor# [2:12661:5308] txid# 281474976730659, issues: { message: "Check failed: path: \'/Root/Shared2/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224038898, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:23.449153Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:12690:5323]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:24:23.449750Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:24:23.449872Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:12692:5325] 2025-04-10T11:24:23.449959Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:12692:5325] 2025-04-10T11:24:23.450756Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:12693:5326] 2025-04-10T11:24:23.451318Z node 2 :STATISTICS DEBUG: [72075186224038895] EvConnectNode, pipe server id = [2:12693:5326], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:24:23.451408Z node 2 :STATISTICS DEBUG: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-10T11:24:23.451619Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:12692:5325], server id = [2:12693:5326], tablet id = 72075186224038895, status = OK 2025-04-10T11:24:23.451834Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:24:23.451910Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:12690:5323], StatRequests.size() = 1 2025-04-10T11:24:23.591755Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzFiZjFhZDgtZmUxMGZjM2YtZTZjNDk4Y2YtYTc1YmJlNw==, TxId: 2025-04-10T11:24:23.591832Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzFiZjFhZDgtZmUxMGZjM2YtZTZjNDk4Y2YtYTc1YmJlNw==, TxId: 2025-04-10T11:24:23.592607Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-04-10T11:24:23.610957Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038898, LocalPathId: 3] 2025-04-10T11:24:23.611020Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:24:23.659541Z node 2 :STATISTICS DEBUG: [72075186224038895] EvFastPropagateCheck 2025-04-10T11:24:23.659640Z node 2 :STATISTICS DEBUG: [72075186224038895] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:24:23.718234Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:12692:5325], schemeshard count = 1 2025-04-10T11:24:24.249440Z node 3 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-10T11:24:24.249519Z node 3 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 198.000000s, at schemeshard: 72075186224037899 2025-04-10T11:24:24.249853Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-04-10T11:24:24.280782Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:24:24.553137Z node 3 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:24:24.564787Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:24:24.564864Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:24:24.564903Z node 3 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-10T11:24:24.564937Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-10T11:24:24.565239Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared1 2025-04-10T11:24:24.568406Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:24:24.593586Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=Nzg3OGJiN2ItYWY5YWYxNGItYzhlM2M0NC05NzE2M2JiZA==, TxId: 2025-04-10T11:24:24.593656Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=Nzg3OGJiN2ItYWY5YWYxNGItYzhlM2M0NC05NzE2M2JiZA==, TxId: 2025-04-10T11:24:24.594334Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:24:24.614808Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-10T11:24:24.615037Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:24:24.707305Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [3:12795:5659]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:24:24.707720Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-10T11:24:24.707771Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [3:12795:5659], StatRequests.size() = 1 2025-04-10T11:24:26.547703Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [3:12878:5693]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:24:26.548155Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-10T11:24:26.548194Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [3:12878:5693], StatRequests.size() = 1 2025-04-10T11:24:27.070093Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224038900 2025-04-10T11:24:27.070183Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 186.000000s, at schemeshard: 72075186224038900 2025-04-10T11:24:27.070569Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038900, stats size# 26 2025-04-10T11:24:27.086622Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Complete 2025-04-10T11:24:27.291466Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal 2025-04-10T11:24:27.291524Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-04-10T11:24:27.291565Z node 2 :STATISTICS DEBUG: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038900, LocalPathId: 2] is data table. 2025-04-10T11:24:27.291596Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038900, LocalPathId: 2] 2025-04-10T11:24:27.292030Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared2 2025-04-10T11:24:27.294691Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:24:27.312681Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmNjZDU4NTgtZjRlNzAzNGEtZTg1MDQ1OTgtODUxOGVkMzA=, TxId: 2025-04-10T11:24:27.312761Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmNjZDU4NTgtZjRlNzAzNGEtZTg1MDQ1OTgtODUxOGVkMzA=, TxId: 2025-04-10T11:24:27.313476Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-04-10T11:24:27.342287Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038900, LocalPathId: 2] 2025-04-10T11:24:27.342346Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-10T11:24:28.188102Z node 3 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 3 2025-04-10T11:24:28.188985Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:24:28.189683Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-04-10T11:24:28.201607Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:24:28.201665Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:24:28.301807Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:12980:5713]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:24:28.302206Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-10T11:24:28.302255Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:12980:5713], StatRequests.size() = 1 2025-04-10T11:24:28.303217Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:12982:5424]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:24:28.308026Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:24:28.308314Z node 2 :STATISTICS DEBUG: [72075186224038895] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-10T11:24:28.308350Z node 2 :STATISTICS DEBUG: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-10T11:24:28.308858Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:24:28.308954Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:12982:5424], StatRequests.size() = 1 >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 >> LocalTableWriter::ApplyInCorrectOrder [GOOD] >> LocalTableWriter::WriteTable [GOOD] >> LocalTableWriter::SupportedTypes [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> LocalTableWriter::ConsistentWrite [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> LocalTableWriter::DecimalKeys [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 14222, MsgBus: 22966 2025-04-10T11:24:20.029081Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643853380581348:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:20.030057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:20.117913Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491643851790761368:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:20.117977Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:20.242423Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491643854315048265:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:20.242495Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:20.279755Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643852993839104:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a37/r3tmp/tmpNnViay/pdisk_1.dat 2025-04-10T11:24:20.814763Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:20.810310Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:21.201897Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:21.270297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:21.271449Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:21.410779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:21.410936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:21.412250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:21.412307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:21.412698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:21.412745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:21.415277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:21.415335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:21.415534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:21.415625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:21.420036Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-10T11:24:21.420200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:21.422636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:21.425627Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:24:21.425704Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:24:21.425724Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-10T11:24:21.428951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:21.429610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:21.429951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:21.493295Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14222, node 1 2025-04-10T11:24:21.594422Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:21.594485Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:21.761074Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:21.761098Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:21.761113Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:21.761226Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22966 TClient is connected to server localhost:22966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:22.890531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:22.949839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:23.253659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:23.557983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:23.790656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:25.028555Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643853380581348:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:25.028676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:25.118238Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491643851790761368:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:25.118316Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:25.214040Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491643852993839104:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:25.214131Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:25.239028Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491643854315048265:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:25.239124Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:26.041087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643879150387028:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:26.041263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:26.355345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:26.399801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:26.436794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:26.505212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:26.548845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:26.590716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:26.677705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643879150387663:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:26.677756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:26.677799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643879150387668:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:26.680872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:26.697412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643879150387670:2395], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:26.761221Z node 1 :TX_PROXY ERROR: Actor# [1:7491643879150387750:4024] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:27.987947Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284267978, txId: 281474976710671] shutting down 2025-04-10T11:24:28.274554Z node 5 :BS_PROXY_PUT ERROR: [e58f273473980426] Result# TEvPutResult {Id# [72075186224037897:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037897:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-10T11:24:28.274526Z node 4 :BS_PROXY_PUT ERROR: [7b361fc5acbc0a16] Result# TEvPutResult {Id# [72075186224037915:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037915:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-10T11:24:28.274632Z node 2 :BS_PROXY_PUT ERROR: [a00e6547929d4cea] Result# TEvPutResult {Id# [72075186224037894:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037894:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-10T11:24:28.274526Z node 3 :BS_PROXY_PUT ERROR: [f4475b0b6856e1e4] Result# TEvPutResult {Id# [72075186224037896:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037896:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ApplyInCorrectOrder [GOOD] Test command err: 2025-04-10T11:24:28.435149Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643888889873262:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:28.435344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000aa5/r3tmp/tmpvkaBSp/pdisk_1.dat 2025-04-10T11:24:28.823226Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:28.842509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:28.843274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:28.846593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20781 TServer::EnableGrpc on GrpcPort 5941, node 1 2025-04-10T11:24:29.168610Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:29.168632Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:29.168639Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:29.168783Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:29.691865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:29.709811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284269826 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-10T11:24:29.862533Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643893184841249:2354] Handshake: worker# [1:7491643893184841157:2294] 2025-04-10T11:24:29.862947Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643893184841249:2354] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:24:29.863295Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643893184841249:2354] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-10T11:24:29.863355Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643893184841249:2354] Send handshake: worker# [1:7491643893184841157:2294] 2025-04-10T11:24:29.863640Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643893184841249:2354] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-10T11:24:29.872084Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643893184841249:2354] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-04-10T11:24:29.873478Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643893184841249:2354] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-04-10T11:24:29.873800Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643893184841252:2354] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-10T11:24:29.873865Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643893184841249:2354] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T11:24:29.873955Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643893184841252:2354] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-10T11:24:29.877542Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643893184841252:2354] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-10T11:24:29.877651Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643893184841249:2354] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T11:24:29.877712Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643893184841249:2354] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-04-10T11:24:29.878102Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643893184841249:2354] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-10T11:24:29.878528Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643893184841249:2354] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-04-10T11:24:29.878624Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643893184841249:2354] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 },{ Order: 3 BodySize: 48 }] } 2025-04-10T11:24:29.878781Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643893184841252:2354] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 3 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-10T11:24:29.880451Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643893184841252:2354] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-10T11:24:29.880534Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643893184841249:2354] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T11:24:29.880584Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643893184841249:2354] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2025-04-10T11:24:28.435105Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643888569747456:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:28.435213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a6f/r3tmp/tmpYwrrwD/pdisk_1.dat 2025-04-10T11:24:28.828417Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:28.877163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:28.877299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:28.878990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11158 TServer::EnableGrpc on GrpcPort 23304, node 1 2025-04-10T11:24:29.165926Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:29.165953Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:29.165960Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:29.166071Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:29.683917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:29.706597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284269826 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-10T11:24:29.856924Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892864715444:2355] Handshake: worker# [1:7491643892864715351:2294] 2025-04-10T11:24:29.857207Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892864715444:2355] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:24:29.857447Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892864715444:2355] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-10T11:24:29.857491Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892864715444:2355] Send handshake: worker# [1:7491643892864715351:2294] 2025-04-10T11:24:29.858051Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892864715444:2355] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 36b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 36b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-10T11:24:29.858275Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892864715444:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 36 },{ Order: 3 BodySize: 36 }] } 2025-04-10T11:24:29.858449Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643892864715447:2355] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-10T11:24:29.858494Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892864715444:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T11:24:29.859663Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643892864715447:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-10T11:24:29.877127Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643892864715447:2355] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-10T11:24:29.877232Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892864715444:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T11:24:29.877351Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892864715444:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } >> KqpSystemView::Join [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2025-04-10T11:24:28.435050Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643887171406123:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:28.435122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ab3/r3tmp/tmpVmxCiN/pdisk_1.dat 2025-04-10T11:24:28.788238Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:28.854250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:28.854363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:28.855877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4676 TServer::EnableGrpc on GrpcPort 27494, node 1 2025-04-10T11:24:29.165936Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:29.165979Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:29.165991Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:29.166169Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:29.706619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:29.724891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284269833 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-10T11:24:29.846303Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handshake: worker# [1:7491643891466374028:2295] 2025-04-10T11:24:29.846818Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:24:29.847174Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-10T11:24:29.847234Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Send handshake: worker# [1:7491643891466374028:2295] 2025-04-10T11:24:29.848650Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-10T11:24:29.855630Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-04-10T11:24:29.855827Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2025-04-10T11:24:29.858501Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643891466374124:2356] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-10T11:24:29.858578Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T11:24:29.858706Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643891466374124:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-10T11:24:29.862073Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643891466374124:2356] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-10T11:24:29.862141Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T11:24:29.862253Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2025-04-10T11:24:29.862581Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-10T11:24:29.863517Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-10T11:24:29.864243Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2025-04-10T11:24:29.864347Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2025-04-10T11:24:29.864595Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643891466374124:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-04-10T11:24:29.867052Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643891466374124:2356] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-10T11:24:29.867121Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T11:24:29.867185Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2025-04-10T11:24:29.867603Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-10T11:24:29.867784Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2025-04-10T11:24:29.867889Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643891466374124:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-04-10T11:24:29.869131Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643891466374124:2356] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-10T11:24:29.869194Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T11:24:29.869226Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2025-04-10T11:24:29.869565Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891466374121:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2025-04-10T11:24:28.436413Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643887747967158:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:28.436477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000aa1/r3tmp/tmpurmcE2/pdisk_1.dat 2025-04-10T11:24:28.826615Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:28.874417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:28.874534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:28.876479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23289 TServer::EnableGrpc on GrpcPort 63260, node 1 2025-04-10T11:24:29.166944Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:29.166983Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:29.167022Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:29.167159Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:29.708194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:29.732595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284269847 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "int32_value" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "ui... (TRUNCATED) 2025-04-10T11:24:29.886695Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892042935143:2355] Handshake: worker# [1:7491643892042935050:2294] 2025-04-10T11:24:29.887263Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892042935143:2355] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:24:29.887691Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892042935143:2355] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-10T11:24:29.887757Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892042935143:2355] Send handshake: worker# [1:7491643892042935050:2294] 2025-04-10T11:24:29.890381Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892042935143:2355] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 45b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 44b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 66b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 71b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 12 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 13 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 14 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 58b Offset: 15 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 16 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 17 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 18 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 76b Offset: 19 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 20 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 21 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 61b Offset: 22 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 23 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 24 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 46b Offset: 25 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 47b Offset: 26 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 50b Offset: 27 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 28 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 29 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 30 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 64b Offset: 31 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-10T11:24:29.891088Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892042935143:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2025-04-10T11:24:29.891578Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643892042935146:2355] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-10T11:24:29.891641Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892042935143:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T11:24:29.891972Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643892042935146:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2025-04-10T11:24:29.961659Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643892042935146:2355] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-10T11:24:29.961735Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892042935143:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T11:24:29.961820Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643892042935143:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DecimalKeys [GOOD] Test command err: 2025-04-10T11:24:28.436896Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643887126742445:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:28.436991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a74/r3tmp/tmpG4nh05/pdisk_1.dat 2025-04-10T11:24:28.846272Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:28.882181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:28.882287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:28.884536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30822 TServer::EnableGrpc on GrpcPort 17267, node 1 2025-04-10T11:24:29.166868Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:29.166894Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:29.166930Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:29.167101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30822 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:29.707220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:29.726377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284269840 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Decimal(1,0)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 1 DecimalScale: 0 } IsBuildInProgress: false } Columns { Name: "value" Type: "Decimal(35,10)" TypeId: 4865 I... (TRUNCATED) 2025-04-10T11:24:29.889483Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891421710436:2354] Handshake: worker# [1:7491643891421710343:2293] 2025-04-10T11:24:29.889873Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891421710436:2354] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:24:29.890212Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891421710436:2354] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Decimal(1,0) : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-10T11:24:29.890262Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891421710436:2354] Send handshake: worker# [1:7491643891421710343:2293] 2025-04-10T11:24:29.891815Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891421710436:2354] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 57b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-04-10T11:24:29.892029Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891421710436:2354] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 57 },{ Order: 2 BodySize: 57 },{ Order: 3 BodySize: 57 }] } 2025-04-10T11:24:29.892210Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643891421710439:2354] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-10T11:24:29.892281Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891421710436:2354] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T11:24:29.892411Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643891421710439:2354] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b }] } 2025-04-10T11:24:29.896370Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7491643891421710439:2354] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-10T11:24:29.896437Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891421710436:2354] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-10T11:24:29.896524Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7491643891421710436:2354] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } >> KqpSystemView::PartitionStatsFollower [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.2%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::ForgetAfterFail [GOOD] |92.2%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:24:30.644907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:24:30.645129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:24:30.645170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:24:30.645205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:24:30.648187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:24:30.648251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:24:30.648348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:24:30.648448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:24:30.649761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:24:30.741453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:24:30.741519Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:30.771027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:24:30.771616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:24:30.771987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:24:30.806249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:24:30.806630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:24:30.814245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:24:30.815018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:24:30.826925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:24:30.836349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:24:30.836446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:24:30.837137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:24:30.837214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:24:30.837311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:24:30.837556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:24:30.845660Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:24:30.969591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:24:30.970743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:24:30.972559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:24:30.973702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:24:30.973809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:24:30.977054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:24:30.977232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:24:30.977449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:24:30.977525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:24:30.977571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:24:30.977616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:24:30.980181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:24:30.980315Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:24:30.980369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:24:30.982462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:24:30.982514Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:24:30.982586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:24:30.982681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:24:30.987627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:24:30.989859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:24:30.991109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:24:30.992346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:24:30.992567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:24:30.992629Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:24:30.994014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:24:30.994082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:24:30.995005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:24:30.995150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:24:30.998076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:24:30.998172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:24:30.998393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:24:30.998452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:24:30.998655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:24:30.998769Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:24:30.998849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:24:30.998890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:24:30.998922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:24:30.998955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:24:30.998996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:24:30.999034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:24:30.999075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:24:30.999104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:24:30.999170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:24:30.999222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:24:30.999265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:24:31.001062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:24:31.001155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:24:31.001191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:14856 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E11D4F08-B60F-4A97-8608-9D4AE0C9153E amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-10T11:24:31.610009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:24:31.610141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:24:31.610192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:24:31.610277Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:24:31.610335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:24:31.610448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:24:31.614300Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:480:2439], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /data_01.csv.zst HTTP/1.1 HEADERS: Host: localhost:14856 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E0D1687F-3C09-4018-A91F-535375C51316 amz-sdk-request: attempt=1 content-length: 20 content-md5: 8NOHH1ycwPXC5K+v+37u8g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv.zst / / 20 2025-04-10T11:24:31.619473Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:485:2442], result# PutObjectResult { ETag: f0d3871f5c9cc0f5c2e4afaffb7eeef2 } 2025-04-10T11:24:31.619536Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:485:2442], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-10T11:24:31.619833Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:484:2440], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:14856 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AA3FE4A0-710E-4969-96C4-CF52208BD1C9 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-04-10T11:24:31.629217Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:480:2439], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-04-10T11:24:31.629961Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:479:2438] 2025-04-10T11:24:31.630123Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:480:2439], sender# [1:479:2438], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-04-10T11:24:31.631716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:14856 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F410376B-7EEB-4A90-95BF-C13634C37E3B amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-04-10T11:24:31.633862Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:480:2439], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-04-10T11:24:31.633909Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:480:2439], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-10T11:24:31.634080Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:479:2438], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-10T11:24:31.653666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T11:24:31.653745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T11:24:31.653901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T11:24:31.654001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T11:24:31.654073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:24:31.654279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:24:31.655343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 324 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T11:24:31.655384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-10T11:24:31.655492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 324 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T11:24:31.655563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 324 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-10T11:24:31.655601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:24:31.655645Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:24:31.655683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:24:31.655719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:24:31.655743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T11:24:31.655866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:24:31.657995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:24:31.659331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:24:31.659774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:24:31.659819Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:24:31.659924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:24:31.659962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:24:31.660002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:24:31.660035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:24:31.660071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T11:24:31.660154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:373:2340] message: TxId: 102 2025-04-10T11:24:31.660221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:24:31.660266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:24:31.660297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:24:31.660450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:24:31.662367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:24:31.662426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:456:2416] TestWaitNotification: OK eventTxId 102 |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple |92.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join [GOOD] Test command err: Trying to start YDB, gRPC: 12967, MsgBus: 24787 2025-04-10T11:24:17.001878Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643834857911712:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:17.001966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a8a/r3tmp/tmpJtommO/pdisk_1.dat 2025-04-10T11:24:17.442501Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:17.447743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:17.447823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:17.456117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12967, node 1 2025-04-10T11:24:17.622960Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:17.622994Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:17.623016Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:17.623164Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24787 TClient is connected to server localhost:24787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:18.247821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.270406Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:24:18.282707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.452958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.650052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.740333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:20.415243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643852037782455:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.415375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.759746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.836462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.880355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.912698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.948040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.984033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:21.068531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643856332750263:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:21.068604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:21.069001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643856332750268:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:21.072589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:21.085719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643856332750270:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:21.154819Z node 1 :TX_PROXY ERROR: Actor# [1:7491643856332750325:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:22.002672Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643834857911712:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:22.048171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:22.445266Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284262427, txId: 281474976710671] shutting down waiting... 2025-04-10T11:24:23.619134Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284263603, txId: 281474976710673] shutting down waiting... 2025-04-10T11:24:24.755587Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284264750, txId: 281474976710675] shutting down waiting... 2025-04-10T11:24:25.909731Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284265900, txId: 281474976710677] shutting down waiting... 2025-04-10T11:24:27.113259Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284267108, txId: 281474976710679] shutting down waiting... 2025-04-10T11:24:28.251257Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284268245, txId: 281474976710681] shutting down waiting... 2025-04-10T11:24:29.385087Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284269378, txId: 281474976710683] shutting down waiting... 2025-04-10T11:24:30.589586Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284270569, txId: 281474976710685] shutting down waiting... 2025-04-10T11:24:31.798241Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284271786, txId: 281474976710687] shutting down 2025-04-10T11:24:32.163830Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284272142, txId: 281474976710689] shutting down 2025-04-10T11:24:32.440862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:24:32.440895Z node 1 :IMPORT WARN: Table profiles were not loaded |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] Test command err: Trying to start YDB, gRPC: 64903, MsgBus: 17932 2025-04-10T11:24:17.300572Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643840719860595:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:17.300710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a71/r3tmp/tmpSJR44t/pdisk_1.dat 2025-04-10T11:24:17.765562Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:17.782947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:17.783032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:17.785617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64903, node 1 2025-04-10T11:24:17.952808Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:17.952832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:17.952855Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:17.952975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17932 TClient is connected to server localhost:17932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:18.517585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:18.786318Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7491643840719860886:2196]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:24:18.786385Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:24:18.786466Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7491643840719860886:2196], Recipient [1:7491643840719860886:2196]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:24:18.786481Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:24:19.786696Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7491643840719860886:2196]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:24:19.786734Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:24:19.786803Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7491643840719860886:2196], Recipient [1:7491643840719860886:2196]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:24:19.786815Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:24:20.583194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643853604763007:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.583323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:20.790391Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7491643840719860886:2196]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:24:20.790436Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:24:20.790502Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7491643840719860886:2196], Recipient [1:7491643840719860886:2196]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:24:20.790518Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:24:20.861024Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491643853604763032:2314], Recipient [1:7491643840719860886:2196]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:24:20.861058Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:24:20.861068Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T11:24:20.861093Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491643853604763028:2311], Recipient [1:7491643840719860886:2196]: {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T11:24:20.861104Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T11:24:20.947210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:24:20.947737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.947888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, schema: Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false, at schemeshard: 72057594046644480 2025-04-10T11:24:20.948496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Followers, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-10T11:24:20.948540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-10T11:24:20.948585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:24:20.948732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:24:20.948743Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSetPartitioning: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] path /Root/Followers ShardIndices size 1 2025-04-10T11:24:20.948757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-10T11:24:20.949569Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TCreateTable Propose creating new table opId# 281474976710658:0 path# /Root/Followers pathId# [OwnerId: 72057594046644480, LocalPathId: 2] schemeshard# 72057594046644480 tx# WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } FailOnExist: false 2025-04-10T11:24:20.949725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:24:20.949781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:24:20.949904Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T11:24:20.949940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:24:20.949979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-10T11:24:20.950463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-10T11:24:20.950655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Followers 2025-04-10T11:24:20.950682Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T11:24:20.950697Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976710658:0 2025-04-10T11:24:20.950923Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:7491643840719860886:2196], Recipient [1:7491643840719860886:2196]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T11:24:20.950944Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, pr ... eted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T11:24:31.036427Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.099989s, queue# 1 2025-04-10T11:24:31.086531Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7491643857899730403:2338]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:24:31.086572Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:24:31.086948Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7491643857899730405:2340]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:24:31.086969Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:24:31.087023Z node 1 :TX_DATASHARD DEBUG: SendPeriodicTableStats register new pipe at datashard 72075186224037888 FollowerId 2, TableInfos size = 1 2025-04-10T11:24:31.087096Z node 1 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 2, tableId 2 2025-04-10T11:24:31.087416Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491643900849403676:2533], Recipient [1:7491643840719860886:2196]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:24:31.087444Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269877760, Sender [1:7491643900849403675:2412], Recipient [1:7491643857899730405:2340]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [1:7491643900849403676:2533] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T11:24:31.087449Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:24:31.087464Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T11:24:31.087466Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T11:24:31.087676Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:7491643857899730405:2340], Recipient [1:7491643840719860886:2196]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { } ShardState: 3 NodeId: 1 StartTime: 1744284261059 TableOwnerId: 72057594046644480 FollowerId: 2 2025-04-10T11:24:31.087699Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T11:24:31.087745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 2 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Readonly' dataSize 0 rowCount 0 cpuUsage 0 2025-04-10T11:24:31.087809Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 2 pathId [OwnerId: 72057594046644480, LocalPathId: 2] raw table stats: ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T11:24:31.089526Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7491643857899730404:2339]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:24:31.089553Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:24:31.136751Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:7491643840719860886:2196]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T11:24:31.136796Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T11:24:31.136820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-04-10T11:24:31.136874Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-04-10T11:24:31.136900Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-04-10T11:24:31.136958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046644480:1 data size 800 row count 4 2025-04-10T11:24:31.137023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], pathId map=Followers, is column=0, is olap=0, RowCount 4, DataSize 800 2025-04-10T11:24:31.137039Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 0 2025-04-10T11:24:31.137112Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:1 with partCount# 0, rowCount# 4, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-10T11:24:31.137178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046644480:1 data size 0 row count 0 2025-04-10T11:24:31.137202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=2, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], pathId map=Followers, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:24:31.137210Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 2 2025-04-10T11:24:31.137263Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T11:24:31.137397Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:7491643840719860886:2196]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T11:24:31.137414Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T11:24:31.137429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-10T11:24:31.138975Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] shardIdx 72057594046644480 1 followerId 0 stats DataSize: 800 RowCount: 4 IndexSize: 0 CPUCores: 0 TabletId: 72075186224037888 NodeId: 1 StartTime: 1744284260983 AccessTime: 1744284261615 UpdateTime: 1744284261422 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 RangeReadRows: 0 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 0 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T11:24:31.139143Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] shardIdx 72057594046644480 1 followerId 2 stats DataSize: 0 RowCount: 0 IndexSize: 0 CPUCores: 0 TabletId: 72075186224037888 NodeId: 1 StartTime: 1744284261059 AccessTime: 0 UpdateTime: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T11:24:31.804961Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7491643840719860886:2196]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:24:31.805015Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:24:31.805075Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7491643840719860886:2196], Recipient [1:7491643840719860886:2196]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:24:31.805088Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:24:31.978524Z node 1 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [1:7491643840719860452:2060], database# /Root, no sysview processor ... SELECT from partition_stats, attempt 2 2025-04-10T11:24:32.664828Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7491643905144370999:2424], owner: [1:7491643905144370996:2422], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T11:24:32.666565Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7491643905144370999:2424], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:24:32.666883Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274595843, Sender [1:7491643905144370999:2424], Recipient [1:7491643840719860886:2196]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2025-04-10T11:24:32.666933Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2025-04-10T11:24:32.667106Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7491643905144370999:2424], row count: 2, finished: 1 2025-04-10T11:24:32.667148Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7491643905144370999:2424], owner: [1:7491643905144370996:2422], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T11:24:32.677990Z node 1 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [1:7491643840719860452:2060], database# /Root, query hash# 14960494650040056739, cpu time# 274788 2025-04-10T11:24:32.764474Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435093, Sender [0:0:0], Recipient [1:7491643840719860886:2196]: NKikimr::NSchemeShard::TEvPrivate::TEvConsoleConfigsTimeout 2025-04-10T11:24:32.764525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:24:32.771362Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:32.805909Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7491643840719860886:2196]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:24:32.805973Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T11:24:32.806029Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7491643840719860886:2196], Recipient [1:7491643840719860886:2196]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T11:24:32.806050Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=144284643.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124284643.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=124283443.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-10T11:20:45.247238Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:20:45.346953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:20:45.371520Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:20:45.371872Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:20:45.380667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:45.380922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:45.381184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:45.381322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:45.381447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:45.381582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:45.381690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:45.381811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:45.381931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:20:45.382046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:20:45.382193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:20:45.382300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:20:45.413039Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:20:45.413399Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:20:45.413486Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:20:45.413668Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:45.413882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:20:45.413972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:20:45.414018Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:20:45.414112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:20:45.414189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:20:45.414232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:20:45.414273Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:20:45.414435Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:45.414498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:20:45.414541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:20:45.414571Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:20:45.414685Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:20:45.414745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:20:45.414790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:20:45.414819Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:20:45.414912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:20:45.414964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:20:45.414995Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:20:45.415051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:20:45.415094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:20:45.415136Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:20:45.415602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=79; 2025-04-10T11:20:45.415688Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-10T11:20:45.415770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-10T11:20:45.415863Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=48; 2025-04-10T11:20:45.416056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:20:45.416121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:20:45.416158Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:20:45.416358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:20:45.416401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:20:45.416431Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:20:45.416594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:20:45.416642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:20:45.416677Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:20:45.416878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... _ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:33.102974Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:33.103001Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:24:33.103035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:24:33.103117Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:24:33.103184Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:33.103211Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:24:33.103276Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=14867; 2025-04-10T11:24:33.103313Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=118936;num_rows=14867;batch_columns=timestamp; 2025-04-10T11:24:33.103418Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1894:3868];bytes=118936;rows=14867;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:1895:3869]->[1:1894:3868] 2025-04-10T11:24:33.103489Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:33.103559Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:33.103633Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:33.103709Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:24:33.103770Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:33.103822Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:33.103849Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1895:3869] finished for tablet 9437184 2025-04-10T11:24:33.104240Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1894:3868];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.011},{"events":["f_processing","f_task_result"],"t":0.013},{"events":["f_ack","l_task_result"],"t":0.817},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.825}],"full":{"a":1744284272278368,"name":"_full_task","f":1744284272278368,"d_finished":0,"c":0,"l":1744284273103893,"d":825525},"events":[{"name":"bootstrap","f":1744284272278536,"d_finished":11048,"c":1,"l":1744284272289584,"d":11048},{"a":1744284273103696,"name":"ack","f":1744284273096125,"d_finished":6904,"c":7,"l":1744284273103651,"d":7101},{"a":1744284273103688,"name":"processing","f":1744284272291434,"d_finished":389074,"c":56,"l":1744284273103652,"d":389279},{"name":"ProduceResults","f":1744284272282487,"d_finished":15547,"c":65,"l":1744284273103838,"d":15547},{"a":1744284273103839,"name":"Finish","f":1744284273103839,"d_finished":0,"c":0,"l":1744284273103893,"d":54},{"name":"task_result","f":1744284272291471,"d_finished":381047,"c":49,"l":1744284273095967,"d":381047}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:24:33.104298Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1894:3868];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:24:33.104654Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1894:3868];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.011},{"events":["f_processing","f_task_result"],"t":0.013},{"events":["f_ack","l_task_result"],"t":0.817},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.825}],"full":{"a":1744284272278368,"name":"_full_task","f":1744284272278368,"d_finished":0,"c":0,"l":1744284273104329,"d":825961},"events":[{"name":"bootstrap","f":1744284272278536,"d_finished":11048,"c":1,"l":1744284272289584,"d":11048},{"a":1744284273103696,"name":"ack","f":1744284273096125,"d_finished":6904,"c":7,"l":1744284273103651,"d":7537},{"a":1744284273103688,"name":"processing","f":1744284272291434,"d_finished":389074,"c":56,"l":1744284273103652,"d":389715},{"name":"ProduceResults","f":1744284272282487,"d_finished":15547,"c":65,"l":1744284273103838,"d":15547},{"a":1744284273103839,"name":"Finish","f":1744284273103839,"d_finished":0,"c":0,"l":1744284273104329,"d":490},{"name":"task_result","f":1744284272291471,"d_finished":381047,"c":49,"l":1744284273095967,"d":381047}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1895:3869]->[1:1894:3868] 2025-04-10T11:24:33.104716Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:24:32.277804Z;index_granules=0;index_portions=7;index_batches=1260;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=10402524;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10402524;selected_rows=0; 2025-04-10T11:24:33.104750Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:24:33.104961Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1895:3869];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 0/0 160000/10402524 |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> YdbOlapStore::LogPagingBefore [GOOD] >> YdbOlapStore::LogPagingBetween |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> YdbLogStore::Dirs [GOOD] >> YdbLogStore::LogTable >> CdcStreamChangeCollector::UpsertToSameKey [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams >> AsyncIndexChangeCollector::DeleteNothing [GOOD] |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> AsyncIndexChangeCollector::DeleteSingleRow >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.4%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TPersQueueTest::TxCounters [GOOD] >> TTableProfileTests::OverwriteStoragePolicy [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test >> TPQTest::TestPQPartialRead >> TPQTest::TestMessageNo >> TPartitionChooserSuite::TBoundaryChooserTest [GOOD] >> TPartitionChooserSuite::TBoundaryChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::THashChooserTest [GOOD] >> TPQUserInfoTest::UserDataDeprecatedSerializaion [GOOD] >> TPQUtilsTest::TLastCounter [GOOD] >> TPQTest::TestReadRuleVersions >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig >> TPQTabletTests::Multiple_PQTablets_1 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test >> TFetchRequestTests::HappyWay >> TSourceIdTests::SourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageMinDS [GOOD] >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> TSourceIdTests::SourceIdStorageDeleteByMaxCount >> TPQTabletTests::Parallel_Transactions_1 >> TListAllTopicsTests::PlainList >> TPQTest::TestPartitionTotalQuota >> TPQTest::TestUserInfoCompatibility >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test >> TPQTest::TestSeveralOwners >> TPQTest::TestAccountReadQuota >> TPQTabletTests::DropTablet >> TPQTest::TestWritePQCompact >> TSourceIdTests::SourceIdStorageDeleteByMaxCount [GOOD] >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> TPQTest::TestWriteTimeStampEstimate |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQUtilsTest::TLastCounter [GOOD] >> TPQTabletTests::DropTablet [GOOD] >> TPQTabletTests::Parallel_Transactions_1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::OverwriteStoragePolicy [GOOD] Test command err: 2025-04-10T11:24:00.270404Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643768503967824:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:00.270572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001bee/r3tmp/tmpF3d7Zn/pdisk_1.dat 2025-04-10T11:24:00.644454Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18410, node 1 2025-04-10T11:24:00.705758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:00.705949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:00.727134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:00.742463Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:00.742492Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:00.742506Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:00.742715Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:01.033087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:4236 2025-04-10T11:24:01.328211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:01.346620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:01.852036Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491643772591292388:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:01.852095Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:01.857328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:01.857463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:01.864757Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:24:01.887074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4236 2025-04-10T11:24:02.172801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:4236 TClient::Ls request: /Root/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744284242372 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-10T11:24:02.652647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:4236 TClient::Ls request: /Root/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1744284242757 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-10T11:24:02.972968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:4236 TClient::Ls request: /Root/table-3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-3" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710662 CreateStep: 1744284243058 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-10T11:24:03.250545Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-10T11:24:03.251171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:24:06.416026Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643792339921887:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:06.416131Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001bee/r3tmp/tmpHS9Xz3/pdisk_1.dat 2025-04-10T11:24:06.606075Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:06.650054Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:06.650180Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:06.652853Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27107, node 4 2025-04-10T11:24:06.756351Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:06.756375Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:06.756384Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:06.756537Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:07.095082Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:2954 2025-04-10T11:24:07.461062Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:07.491318Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, ... ct path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001bee/r3tmp/tmp69Oo0O/pdisk_1.dat 2025-04-10T11:24:29.417795Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:29.468358Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:29.468496Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:29.472457Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21487, node 13 2025-04-10T11:24:29.559184Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:29.559222Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:29.559237Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:29.559472Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:30.053119Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:3225 2025-04-10T11:24:30.543192Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:30.578626Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:31.090704Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7491643901864550941:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:31.090804Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:31.094005Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:31.094137Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:31.101823Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-04-10T11:24:31.110928Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3225 2025-04-10T11:24:31.639151Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:3225 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744284272080 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-10T11:24:32.730119Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:3225 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1744284273160 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-10T11:24:33.903660Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:34.214324Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7491643894559474903:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:34.214434Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:3225 TClient::Ls request: /Root/ydb_ut_tenant/table-3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-3" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710662 CreateStep: 1744284274440 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-10T11:24:35.037742Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:3225 TClient::Ls request: /Root/ydb_ut_tenant/table-4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-4" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710663 CreateStep: 1744284275460 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-4" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-10T11:24:36.090825Z node 15 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[15:7491643901864550941:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:36.090957Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:36.207781Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:3225 TClient::Ls request: /Root/ydb_ut_tenant/table-5 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-5" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710664 CreateStep: 1744284276590 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-5" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-10T11:24:37.238757Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-10T11:24:37.253449Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::TxCounters [GOOD] Test command err: === Server->StartServer(false); 2025-04-10T11:18:42.523647Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491642401724705993:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:42.523769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:18:42.623844Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491642403818540009:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:18:42.624200Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:18:42.864349Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:18:42.902645Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a24/r3tmp/tmpHGrqO6/pdisk_1.dat 2025-04-10T11:18:43.175485Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:18:43.176675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.176767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.179063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:18:43.179142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:18:43.187190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:18:43.188064Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:18:43.188997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5243, node 1 2025-04-10T11:18:43.330463Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000a24/r3tmp/yandexHelevw.tmp 2025-04-10T11:18:43.330489Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000a24/r3tmp/yandexHelevw.tmp 2025-04-10T11:18:43.330661Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000a24/r3tmp/yandexHelevw.tmp 2025-04-10T11:18:43.330813Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:18:43.585854Z INFO: TTestServer started on Port 62191 GrpcPort 5243 TClient is connected to server localhost:62191 PQClient connected to localhost:5243 === TenantModeEnabled() = 0 === Init PQ - start server on port 5243 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:18:44.015010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:18:44.015291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:44.015558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T11:18:44.015864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:18:44.016039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:44.018276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T11:18:44.018417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T11:18:44.018573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:44.018624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T11:18:44.018635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-10T11:18:44.018646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-10T11:18:44.022418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:44.022527Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:18:44.022552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-10T11:18:44.024455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:44.024490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:44.024528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:18:44.024575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 waiting... 2025-04-10T11:18:44.040811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:18:44.041435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:18:44.041470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-10T11:18:44.041500Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:18:44.042909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-10T11:18:44.043056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:18:44.046190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744283924089, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:18:44.046385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744283924089 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T11:18:44.046423Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:18:44.046775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-10T11:18:44.046819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:18:44.047006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:18:44.047065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T11:18:44.050495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:18:44.050536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:18:44.050742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:18:44.050767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491642406019673949:2408], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-10T11:18:44.050811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:18:44.050840Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-10T11:18:44.050987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T11:18:44.050995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:18:44.051022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-10T11:18:44.051034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:18:44.051045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-10T11:18:44.051069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:18:44.051083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-10T11:18:44.051094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-10T11:18:44.051170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 7205759404664 ... Helper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-10T11:24:37.713088Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7491643926643587095:2494] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-10T11:24:37.713127Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-10T11:24:37.713889Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-04-10T11:24:37.713994Z node 32 :PERSQUEUE INFO: new Cookie 123|b1209b9c-3803d930-86045117-31aa6f10_0 generated for partition 0 topic 'topic' owner 123 2025-04-10T11:24:37.714511Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 123|b1209b9c-3803d930-86045117-31aa6f10_0 2025-04-10T11:24:37.720524Z node 32 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-10T11:24:37.720754Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/topic" include_location: true 2025-04-10T11:24:37.720849Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[32:7491643926643587102:2496]: Bootstrap 2025-04-10T11:24:37.723175Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7491643926643587102:2496]: Request location 2025-04-10T11:24:37.723402Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7491643926643587112:2497] connected; active server actors: 1 2025-04-10T11:24:37.723485Z node 32 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 32, Generation 1 2025-04-10T11:24:37.723556Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7491643926643587102:2496]: Got location 2025-04-10T11:24:37.723711Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7491643926643587112:2497] disconnected; active server actors: 1 2025-04-10T11:24:37.723755Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7491643926643587112:2497] disconnected no session 2025-04-10T11:24:37.726315Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 123|b1209b9c-3803d930-86045117-31aa6f10_0 grpc read done: success: 0 data: 2025-04-10T11:24:37.726345Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|b1209b9c-3803d930-86045117-31aa6f10_0 grpc read failed 2025-04-10T11:24:37.726390Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|b1209b9c-3803d930-86045117-31aa6f10_0 grpc closed 2025-04-10T11:24:37.726411Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|b1209b9c-3803d930-86045117-31aa6f10_0 is DEAD 2025-04-10T11:24:37.727948Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T11:24:37.728952Z node 32 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-10T11:24:37.728984Z node 32 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2025-04-10T11:24:37.730029Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { path: "topic" producer_id: "123" partition_with_generation { generation: 1 } } 2025-04-10T11:24:37.730205Z node 32 :PQ_WRITE_PROXY INFO: session request cookie: 3 path: "topic" producer_id: "123" partition_with_generation { generation: 1 } from ipv6:[::1]:55642 2025-04-10T11:24:37.730235Z node 32 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="topic server" ip=ipv6:[::1]:55642 proto=topic topic=topic durationSec=0 2025-04-10T11:24:37.730246Z node 32 :PQ_WRITE_PROXY INFO: init check schema 2025-04-10T11:24:37.730285Z node 32 :PQ_WRITE_PROXY INFO: session to partition: 0, generation: 1 2025-04-10T11:24:37.731602Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-04-10T11:24:37.731870Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-10T11:24:37.731904Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-10T11:24:37.731922Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-10T11:24:37.731944Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7491643926643587117:2499] (SourceId=123, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=0 2025-04-10T11:24:37.731966Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 3 sessionId: partition: 0 expectedGeneration: 1 2025-04-10T11:24:37.732563Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-04-10T11:24:37.732662Z node 32 :PERSQUEUE INFO: new Cookie 123|ab316db6-f2c260bc-cbffd11c-f581f083_0 generated for partition 0 topic 'topic' owner 123 2025-04-10T11:24:37.733203Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 3 partition: 0 MaxSeqNo: 0 sessionId: 123|ab316db6-f2c260bc-cbffd11c-f581f083_0 2025-04-10T11:24:37.734624Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|ab316db6-f2c260bc-cbffd11c-f581f083_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-10T11:24:37.734925Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|ab316db6-f2c260bc-cbffd11c-f581f083_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-10T11:24:37.735565Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-04-10T11:24:37.735710Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|ab316db6-f2c260bc-cbffd11c-f581f083_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-10T11:24:37.735921Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|ab316db6-f2c260bc-cbffd11c-f581f083_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-10T11:24:37.736648Z node 32 :PQ_WRITE_PROXY DEBUG: SessionId: ydb://session/3?node_id=32&id=NjIyY2Q4OTItMmQ5MTQyYWYtZGMwZDhjMDUtOWU2MjZmMmU= TxId: 01jrfq3pvfb76736jvs5x5vnsr WriteId: {32, 281474976715673} 2025-04-10T11:24:37.739952Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976715673}, 100000}, State: StateInit] bootstrapping {0, {32, 281474976715673}, 100000} [32:7491643926643587129:2501] 2025-04-10T11:24:37.742531Z node 32 :PERSQUEUE INFO: [topic:{0, {32, 281474976715673}, 100000}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:24:37.742615Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976715673}, 100000}, State: StateInit] init complete for topic 'topic' partition {0, {32, 281474976715673}, 100000} generation 1 [32:7491643926643587129:2501] 2025-04-10T11:24:37.743024Z node 32 :PERSQUEUE INFO: new Cookie 123|a3d1713c-1c6387a0-8936a2ec-3cf1e849_0 generated for partition {0, {32, 281474976715673}, 100000} topic 'topic' owner 123 2025-04-10T11:24:37.745268Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-10T11:24:37.745376Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-10T11:24:37.745435Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-10T11:24:37.745480Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-10T11:24:37.750084Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-10T11:24:37.754692Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-10T11:24:37.754735Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-10T11:24:37.754756Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-10T11:24:37.837289Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|ab316db6-f2c260bc-cbffd11c-f581f083_0 grpc read done: success: 0 data: 2025-04-10T11:24:37.837334Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|ab316db6-f2c260bc-cbffd11c-f581f083_0 grpc read failed 2025-04-10T11:24:37.838069Z node 32 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 3 sessionId: 123|ab316db6-f2c260bc-cbffd11c-f581f083_0 2025-04-10T11:24:37.838113Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|ab316db6-f2c260bc-cbffd11c-f581f083_0 is DEAD 2025-04-10T11:24:37.838864Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-10T11:24:37.838942Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison Counters: ================================
name=api.grpc.topic.stream_write.bytes: 20796
name=api.grpc.topic.stream_write.messages: 4
name=api.grpc.topic.stream_write.uncommitted_bytes: 20796
name=api.grpc.topic.stream_write.uncommitted_messages: 4
name=topic.write.bytes: 20796
name=topic.write.discarded_bytes: 0
name=topic.write.discarded_messages: 0
name=topic.write.messages: 4
name=topic.write.uncommitted_bytes: 20796
name=topic.write.uncommitted_messages: 4
name=topic.write.uncompressed_bytes: 16
name=topic.write.lag_milliseconds:
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=180000: 0
    bin=200: 0
    bin=2000: 3
    bin=30000: 0
    bin=500: 0
    bin=5000: 1
    bin=60000: 0
    bin=999999: 0
name=topic.write.message_size_bytes:
    bin=1024: 1
    bin=10240: 2
    bin=102400: 0
    bin=1048576: 0
    bin=10485760: 0
    bin=20480: 1
    bin=204800: 0
    bin=2097152: 0
    bin=5120: 0
    bin=51200: 0
    bin=524288: 0
    bin=5242880: 0
    bin=67108864: 0
    bin=99999999: 0
name=topic.write.partition_throttled_milliseconds:
    bin=0: 4
    bin=1: 0
    bin=10: 0
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=20: 0
    bin=2500: 0
    bin=5: 0
    bin=50: 0
    bin=500: 0
    bin=5000: 0
    bin=999999: 0
2025-04-10T11:24:37.914083Z node 32 :PERSQUEUE WARN: [PQ: 72075186224037892] Unknown transaction 0 >> TPQTabletTests::Multiple_PQTablets_1 [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow >> TPQTabletTests::Multiple_PQTablets_2 >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig [GOOD] >> TPQTabletTests::Parallel_Transactions_2 >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> CdcStreamChangeCollector::DeleteNothing >> TPartitionTests::ShadowPartitionCountersFirstClass >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert >> TPQTabletTests::Multiple_PQTablets_2 [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline >> TPQTabletTests::DropTablet_And_Tx >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction [GOOD] >> TPQTest::TestWriteTimeStampEstimate [GOOD] >> TPQTestInternal::TestAsInt [GOOD] >> TPQTestInternal::TestAsIntWide [GOOD] >> TPQTestInternal::StoreKeys [GOOD] >> TPQTestInternal::RestoreKeys [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete >> TPQTabletTests::Parallel_Transactions_2 [GOOD] >> Yq_1::DescribeConnection [GOOD] >> Yq_1::DeleteQuery >> Yq_1::ListConnections [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable >> TPQTabletTests::Cancel_Tx >> TPQTabletTests::PQTablet_Send_RS_With_Abort >> TPQTabletTests::DropTablet_And_Tx [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline [GOOD] >> TPQTabletTests::DropTablet_Before_Write >> TPQTest::TestSeveralOwners [GOOD] >> TPQTest::TestSourceIdDropByUserWrites >> TQuotaTracker::TestSmallMessages [GOOD] >> TQuotaTracker::TestBigMessages [GOOD] >> TSourceIdTests::ExpensiveCleanup >> TPQTabletTests::Cancel_Tx [GOOD] >> TPQTabletTests::PQTablet_Send_RS_With_Abort [GOOD] >> TPQTabletTests::Config_TEvTxCommit_After_Restart >> TPQTest::TestMessageNo [GOOD] >> TPQTest::TestOwnership ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::RestoreKeys [GOOD] Test command err: 2025-04-10T11:24:40.338206Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.338280Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:40.357106Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-04-10T11:24:40.357220Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:24:40.371264Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2025-04-10T11:24:40.373868Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2025-04-10T11:24:40.374038Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.375713Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2025-04-10T11:24:40.375841Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:40.375907Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:40.376351Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:40.376740Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-04-10T11:24:40.377637Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-10T11:24:40.377688Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] 2025-04-10T11:24:40.377743Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:40.378719Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:40.378846Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-10T11:24:40.378907Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-10T11:24:40.378952Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa reinit request with generation 1 2025-04-10T11:24:40.378980Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa reinit with generation 1 done 2025-04-10T11:24:40.379406Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:40.379462Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:40.379499Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:40.379537Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:40.379586Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-10T11:24:40.379617Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-10T11:24:40.379638Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000caaa 2025-04-10T11:24:40.379681Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uaaa 2025-04-10T11:24:40.379722Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:40.379776Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:40.379864Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:40.379898Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:40.380034Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:40.380299Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:40.380543Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:188:2201] 2025-04-10T11:24:40.381244Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-04-10T11:24:40.381307Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:188:2201] 2025-04-10T11:24:40.381347Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:40.382139Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:40.382289Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-04-10T11:24:40.382330Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-04-10T11:24:40.460366Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user aaa reinit request with generation 1 2025-04-10T11:24:40.460467Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user aaa reinit with generation 1 done 2025-04-10T11:24:40.460593Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:40.460634Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:40.460668Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-10T11:24:40.460713Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-10T11:24:40.460739Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-04-10T11:24:40.460767Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-04-10T11:24:40.460791Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001caaa 2025-04-10T11:24:40.460812Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uaaa 2025-04-10T11:24:40.460837Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:40.460881Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-10T11:24:40.460964Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:40.461012Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user aaa readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:40.461194Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:40.470692Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:40.470834Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:40.471200Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:199:2208], now have 1 active actors on pipe 2025-04-10T11:24:40.471853Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:202:2210], now have 1 active actors on pipe 2025-04-10T11:24:40.471944Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:24:40.471995Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:24:40.472073Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 0 messageNo: 0 size 7 offset: -1 2025-04-10T11:24:40.472181Z node 1 :PERSQUEUE DEBUG: tablet ... Topic 'topic' partition 0 user aaa readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-04-10T11:24:40.721213Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitDataStep 2025-04-10T11:24:40.721253Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:24:40.721302Z node 1 :PERSQUEUE INFO: [topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:24:40.721326Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-04-10T11:24:40.721360Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 3 [1:292:2283] 2025-04-10T11:24:40.721394Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:40.765687Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:40.765833Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-04-10T11:24:40.765904Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user aaa readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-04-10T11:24:40.766365Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-10T11:24:40.766418Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-10T11:24:40.766490Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-10T11:24:40.766530Z node 1 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T11:24:40.766668Z node 1 :PERSQUEUE DEBUG: Topic 'topic' partition 0 user user readTimeStamp done, result 1000000 queuesize 1 startOffset 0 2025-04-10T11:24:40.766715Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-04-10T11:24:40.766762Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-04-10T11:24:40.767021Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 Topic 'topic' partition 0 user aaa offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-10T11:24:40.767058Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-10T11:24:40.767092Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-04-10T11:24:40.767130Z node 1 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T11:24:40.767202Z node 1 :PERSQUEUE DEBUG: Topic 'topic' partition 0 user aaa readTimeStamp done, result 1000000 queuesize 0 startOffset 0 2025-04-10T11:24:40.768111Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:316:2296], now have 1 active actors on pipe 2025-04-10T11:24:40.768221Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:24:40.768261Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:24:40.768789Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:24:40.769094Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:319:2298], now have 1 active actors on pipe 2025-04-10T11:24:40.769204Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:24:40.769256Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:24:40.769336Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid0' SeqNo: 2 partNo : 0 messageNo: 1 size 7 offset: -1 2025-04-10T11:24:40.769418Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'topic' partition 0 error: new GetOwnership request needed for owner 2025-04-10T11:24:40.769502Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 2, Error new GetOwnership request needed for owner 2025-04-10T11:24:40.769553Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-04-10T11:24:40.769992Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:321:2300], now have 1 active actors on pipe 2025-04-10T11:24:40.770077Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:24:40.770117Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:24:40.770224Z node 1 :PERSQUEUE INFO: new Cookie default|108d6d11-f96c7c4-7b0a0a88-4c4ac563_0 generated for partition 0 topic 'topic' owner default 2025-04-10T11:24:40.770339Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-10T11:24:40.770443Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:24:40.770762Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:323:2302], now have 1 active actors on pipe 2025-04-10T11:24:40.770877Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:24:40.770914Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:24:40.771000Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid0' SeqNo: 2 partNo : 0 messageNo: 0 size 7 offset: -1 2025-04-10T11:24:40.771106Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Send write quota request. Topic: "topic". Partition: 0. Amount: 16. Cookie: 1 2025-04-10T11:24:40.771210Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Got quota. Topic: "topic". Partition: 0: Cookie: 1 2025-04-10T11:24:40.771373Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob processing sourceId 'sourceid0' seqNo 2 partNo 0 2025-04-10T11:24:40.772350Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob complete sourceId 'sourceid0' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 79 count 1 nextOffset 2 batches 1 2025-04-10T11:24:40.772933Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'topic' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 65 WTime 2000000 2025-04-10T11:24:40.773074Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:40.773115Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:40.773164Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T11:24:40.773214Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:40.773257Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid0 2025-04-10T11:24:40.773311Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000001_00000_0000000001_00000| 2025-04-10T11:24:40.773338Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:40.773371Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:40.773460Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:40.773586Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:40.773678Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 1 partNo 0 count 1 size 65 2025-04-10T11:24:40.776961Z node 1 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 1 count 1 size 65 actorID [1:281:2274] 2025-04-10T11:24:40.777096Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 16 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:40.777164Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:40.777221Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid0', Topic: 'topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-10T11:24:40.777510Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Topic 'topic' counters. CacheSize 65 CachedBlobs 1 2025-04-10T11:24:40.777597Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 1 partno 0 count 1 parts 0 size 65 2025-04-10T11:24:40.777698Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:24:40.778055Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:329:2307], now have 1 active actors on pipe 2025-04-10T11:24:40.778163Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:24:40.778207Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:24:40.778304Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:24:40.778663Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:331:2309], now have 1 active actors on pipe 2025-04-10T11:24:40.778776Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:24:40.778813Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:24:40.778888Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:24:40.779189Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:333:2311], now have 1 active actors on pipe 2025-04-10T11:24:40.779281Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:24:40.871297Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:24:40.871531Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 >> TPQTabletTests::Partition_Send_Predicate_With_False >> TPQTabletTests::DropTablet_Before_Write [GOOD] >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction >> TPQTabletTests::Partition_Send_Predicate_With_False [GOOD] >> TPQTest::TestUserInfoCompatibility [GOOD] >> TPQTest::TestWaitInOwners >> TMeteringSink::FlushThroughputV1 [GOOD] >> TMeteringSink::UsedStorageV1 [GOOD] >> TMicrosecondsSlidingWindow::Basic [GOOD] >> TMultiBucketCounter::InsertAndUpdate [GOOD] >> TMultiBucketCounter::ManyCounters >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] >> TPQTabletTests::One_Tablet_For_All_Partitions >> TMultiBucketCounter::ManyCounters [GOOD] >> TPQRBDescribes::PartitionLocations >> TPQTest::TestReadRuleVersions [GOOD] >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction [GOOD] >> TPQTest::TestReserveBytes >> TPQTabletTests::All_New_Partitions_In_Another_Tablet >> TTypeCodecsTest::TestFixedLenCodec [GOOD] >> TTypeCodecsTest::TestVarLenCodec [GOOD] >> TTypeCodecsTest::TestVarIntCodec [GOOD] >> TTypeCodecsTest::TestZigZagCodec [GOOD] >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] |92.5%| [TA] $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTabletTests::ProposeTx_Missing_Operations >> TPartitionTests::CorrectRange_Multiple_Transactions >> TPQTabletTests::Huge_ProposeTransacton >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] >> TPQTabletTests::One_Tablet_For_All_Partitions [GOOD] >> TSourceIdTests::ExpensiveCleanup [GOOD] >> TPQTabletTests::ProposeTx_Missing_Operations [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] >> TPQTabletTests::ProposeTx_Unknown_Partition_1 Test command err: Size: 8002 Create chunk: 0.000236s Read by index: 0.000023s Iterate: 0.000140s Size: 8256 Create chunk: 0.000195s Read by index: 0.000037s Iterate: 0.000078s Size: 8532 Create chunk: 0.000144s Read by index: 0.000040s Iterate: 0.000036s Size: 7769 Create chunk: 0.000126s Read by index: 0.000045s Iterate: 0.000045s Size: 2853 Create chunk: 0.000123s Read by index: 0.000079s Iterate: 0.000041s Size: 2419 Create chunk: 0.000111s Read by index: 0.000110s Iterate: 0.000047s Size: 2929 Create chunk: 0.000086s Read by index: 0.000081s Iterate: 0.000039s Size: 2472 Create chunk: 0.000095s Read by index: 0.000095s Iterate: 0.000052s Size: 2407 Create chunk: 0.000097s Read by index: 0.000086s Iterate: 0.000043s Size: 2061 Create chunk: 0.000113s Read by index: 0.000091s Iterate: 0.000053s >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State >> TPQTabletTests::One_New_Partition_In_Another_Tablet |92.5%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTabletTests::ProposeTx_Unknown_Partition_1 [GOOD] >> TPartitionTests::CorrectRange_Multiple_Transactions [GOOD] >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State [GOOD] >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] >> TPartitionTests::CorrectRange_Multiple_Consumers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::ExpensiveCleanup [GOOD] Test command err: 2025-04-10T11:24:40.279732Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.279835Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:40.304602Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:40.310343Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:40.310870Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:179:2194] 2025-04-10T11:24:40.311901Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Initializing completed. 2025-04-10T11:24:40.311969Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:179:2194] 2025-04-10T11:24:40.312035Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:40.312533Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:40.313688Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:40.313748Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:40.313844Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-10T11:24:40.313902Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-10T11:24:40.313945Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cclient-1 2025-04-10T11:24:40.313971Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uclient-1 2025-04-10T11:24:40.313998Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] _config_1 2025-04-10T11:24:40.314054Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:40.314107Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-10T11:24:40.314879Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:40.315318Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-04-10T11:24:40.315494Z node 1 :PERSQUEUE INFO: new Cookie owner1|f1b8fca6-2176ce40-47a3c23b-6c49c557_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send disk status response with cookie: 0 2025-04-10T11:24:40.316790Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:40.316927Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2025-04-10T11:24:40.318287Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 1 2025-04-10T11:24:40.318442Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2025-04-10T11:24:40.318747Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2025-04-10T11:24:40.319702Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2025-04-10T11:24:40.320929Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 2025-04-10T11:24:40.321104Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:40.321153Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:40.321203Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] [x0000000001, x0000000002) 2025-04-10T11:24:40.321247Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-10T11:24:40.321323Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001pSourceId 2025-04-10T11:24:40.321357Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] d0000000001_00000000000000000100_00000_0000000001_00000| 2025-04-10T11:24:40.321384Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-10T11:24:40.321418Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:40.321459Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-10T11:24:40.365827Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:40.365931Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-04-10T11:24:40.366016Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk 2025-04-10T11:24:40.366284Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 100 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:40.682499Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2025-04-10T11:24:40.717159Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 2 2025-04-10T11:24:40.717287Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 2 2025-04-10T11:24:40.717449Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 1 partNo 0 2025-04-10T11:24:40.717801Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob sourceId 'SourceId' seqNo 1 partNo 0 result is x0000000001_00000000000000000100_00000_0000000001_00000 size 104 2025-04-10T11:24:40.717895Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 old key x0000000001_00000000000000000100_00000_0000000001_00000 new key d0000000001_00000000000000000100_00000_0000000001_00000 size 104 WTime 1329 2025-04-10T11:24:40.718963Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 1 partNo 0 FormedBlobsCount 1 NewHead: Offset 200 PartNo 0 PackedSize 118 count 1 nextOffset 201 batches 1 2025-04-10T11:24:40.719583Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 200,1 HeadOffset 100 endOffset 101 curOffset 201 d0000000001_00000000000000000200_00000_0000000001_00000| size 105 WTime 1329 2025-04-10T11:24:40.719755Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:40.719801Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:40.719853Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] [x0000000001, x0000000002) 2025-04-10T11:24:40.719898Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-10T11:24:40.719961Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] d0000000001_00000000000000000100_00000_0000000001_00000 2025-04-10T11:24:40.720003Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001pSourceId 2025-04-10T11:24:40.720044Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] d0000000001_00000000000000000200_00000_0000000001_00000| 2025-04-10T11:24:40.720072Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-10T11:24:40.720101Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:40.720145Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-10T11:24:40.742511Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:40.742626Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-04-10T11:24:40.742727Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 1, partNo: 0, Offset: 200 is stored on disk 2025-04-10T11:24:40.743042Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:40.743111Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:40.743158Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] [d0000000001_00000000000000000100_00000_0000000001_00000|, d0000000001_00000000000000000100_00000_0000000001_00000|] 2025-04-10T11:24:40.743210Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-10T11:24:40.743252Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-10T11:24:40.743314Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-10T11:2 ... 2 Iteration 143 Iteration 144 Iteration 145 Iteration 146 Iteration 147 Iteration 148 Iteration 149 Iteration 150 Iteration 151 Iteration 152 Iteration 153 Iteration 154 Iteration 155 Iteration 156 Iteration 157 Iteration 158 Iteration 159 Iteration 160 Iteration 161 Iteration 162 Iteration 163 Iteration 164 Iteration 165 Iteration 166 Iteration 167 Iteration 168 Iteration 169 Iteration 170 Iteration 171 Iteration 172 Iteration 173 Iteration 174 Iteration 175 Iteration 176 Iteration 177 Iteration 178 Iteration 179 Iteration 180 Iteration 181 Iteration 182 Iteration 183 Iteration 184 Iteration 185 Iteration 186 Iteration 187 Iteration 188 Iteration 189 Iteration 190 Iteration 191 Iteration 192 Iteration 193 Iteration 194 Iteration 195 Iteration 196 Iteration 197 Iteration 198 Iteration 199 Iteration 200 Iteration 201 Iteration 202 Iteration 203 Iteration 204 Iteration 205 Iteration 206 Iteration 207 Iteration 208 Iteration 209 Iteration 210 Iteration 211 Iteration 212 Iteration 213 Iteration 214 Iteration 215 Iteration 216 Iteration 217 Iteration 218 Iteration 219 Iteration 220 Iteration 221 Iteration 222 Iteration 223 Iteration 224 Iteration 225 Iteration 226 Iteration 227 Iteration 228 Iteration 229 Iteration 230 Iteration 231 Iteration 232 Iteration 233 Iteration 234 Iteration 235 Iteration 236 Iteration 237 Iteration 238 Iteration 239 Iteration 240 Iteration 241 Iteration 242 Iteration 243 Iteration 244 Iteration 245 Iteration 246 Iteration 247 Iteration 248 Iteration 249 Iteration 250 Iteration 251 Iteration 252 Iteration 253 Iteration 254 Iteration 255 Iteration 256 Iteration 257 Iteration 258 Iteration 259 Iteration 260 Iteration 261 Iteration 262 Iteration 263 Iteration 264 Iteration 265 Iteration 266 Iteration 267 Iteration 268 Iteration 269 Iteration 270 Iteration 271 Iteration 272 Iteration 273 Iteration 274 Iteration 275 Iteration 276 Iteration 277 Iteration 278 Iteration 279 Iteration 280 Iteration 281 Iteration 282 Iteration 283 Iteration 284 Iteration 285 Iteration 286 Iteration 287 Iteration 288 Iteration 289 Iteration 290 Iteration 291 Iteration 292 Iteration 293 Iteration 294 Iteration 295 Iteration 296 Iteration 297 Iteration 298 Iteration 299 Iteration 300 Iteration 301 Iteration 302 Iteration 303 Iteration 304 Iteration 305 Iteration 306 Iteration 307 Iteration 308 Iteration 309 Iteration 310 Iteration 311 Iteration 312 Iteration 313 Iteration 314 Iteration 315 Iteration 316 Iteration 317 Iteration 318 Iteration 319 Iteration 320 Iteration 321 Iteration 322 Iteration 323 Iteration 324 Iteration 325 Iteration 326 Iteration 327 Iteration 328 Iteration 329 Iteration 330 Iteration 331 Iteration 332 Iteration 333 Iteration 334 Iteration 335 Iteration 336 Iteration 337 Iteration 338 Iteration 339 Iteration 340 Iteration 341 Iteration 342 Iteration 343 Iteration 344 Iteration 345 Iteration 346 Iteration 347 Iteration 348 Iteration 349 Iteration 350 Iteration 351 Iteration 352 Iteration 353 Iteration 354 Iteration 355 Iteration 356 Iteration 357 Iteration 358 Iteration 359 Iteration 360 Iteration 361 Iteration 362 Iteration 363 Iteration 364 Iteration 365 Iteration 366 Iteration 367 Iteration 368 Iteration 369 Iteration 370 Iteration 371 Iteration 372 Iteration 373 Iteration 374 Iteration 375 Iteration 376 Iteration 377 Iteration 378 Iteration 379 Iteration 380 Iteration 381 Iteration 382 Iteration 383 Iteration 384 Iteration 385 Iteration 386 Iteration 387 Iteration 388 Iteration 389 Iteration 390 Iteration 391 Iteration 392 Iteration 393 Iteration 394 Iteration 395 Iteration 396 Iteration 397 Iteration 398 Iteration 399 Iteration 400 Iteration 401 Iteration 402 Iteration 403 Iteration 404 Iteration 405 Iteration 406 Iteration 407 Iteration 408 Iteration 409 Iteration 410 Iteration 411 Iteration 412 Iteration 413 Iteration 414 Iteration 415 Iteration 416 Iteration 417 Iteration 418 Iteration 419 Iteration 420 Iteration 421 Iteration 422 Iteration 423 Iteration 424 Iteration 425 Iteration 426 Iteration 427 Iteration 428 Iteration 429 Iteration 430 Iteration 431 Iteration 432 Iteration 433 Iteration 434 Iteration 435 Iteration 436 Iteration 437 Iteration 438 Iteration 439 Iteration 440 Iteration 441 Iteration 442 Iteration 443 Iteration 444 Iteration 445 Iteration 446 Iteration 447 Iteration 448 Iteration 449 Iteration 450 Iteration 451 Iteration 452 Iteration 453 Iteration 454 Iteration 455 Iteration 456 Iteration 457 Iteration 458 Iteration 459 Iteration 460 Iteration 461 Iteration 462 Iteration 463 Iteration 464 Iteration 465 Iteration 466 Iteration 467 Iteration 468 Iteration 469 Iteration 470 Iteration 471 Iteration 472 Iteration 473 Iteration 474 Iteration 475 Iteration 476 Iteration 477 Iteration 478 Iteration 479 Iteration 480 Iteration 481 Iteration 482 Iteration 483 Iteration 484 Iteration 485 Iteration 486 Iteration 487 Iteration 488 Iteration 489 Iteration 490 Iteration 491 Iteration 492 Iteration 493 Iteration 494 Iteration 495 Iteration 496 Iteration 497 Iteration 498 Iteration 499 Iteration 500 Iteration 501 Iteration 502 Iteration 503 Iteration 504 Iteration 505 Iteration 506 Iteration 507 Iteration 508 Iteration 509 Iteration 510 Iteration 511 Iteration 512 Iteration 513 Iteration 514 Iteration 515 Iteration 516 Iteration 517 Iteration 518 Iteration 519 Iteration 520 Iteration 521 Iteration 522 Iteration 523 Iteration 524 Iteration 525 Iteration 526 Iteration 527 Iteration 528 Iteration 529 Iteration 530 Iteration 531 Iteration 532 Iteration 533 Iteration 534 Iteration 535 Iteration 536 Iteration 537 Iteration 538 Iteration 539 Iteration 540 Iteration 541 Iteration 542 Iteration 543 Iteration 544 Iteration 545 Iteration 546 Iteration 547 Iteration 548 Iteration 549 Iteration 550 Iteration 551 Iteration 552 Iteration 553 Iteration 554 Iteration 555 Iteration 556 Iteration 557 Iteration 558 Iteration 559 Iteration 560 Iteration 561 Iteration 562 Iteration 563 Iteration 564 Iteration 565 Iteration 566 Iteration 567 Iteration 568 Iteration 569 Iteration 570 Iteration 571 Iteration 572 Iteration 573 Iteration 574 Iteration 575 Iteration 576 Iteration 577 Iteration 578 Iteration 579 Iteration 580 Iteration 581 Iteration 582 Iteration 583 Iteration 584 Iteration 585 Iteration 586 Iteration 587 Iteration 588 Iteration 589 Iteration 590 Iteration 591 Iteration 592 Iteration 593 Iteration 594 Iteration 595 Iteration 596 Iteration 597 Iteration 598 Iteration 599 Iteration 600 Iteration 601 Iteration 602 Iteration 603 Iteration 604 Iteration 605 Iteration 606 Iteration 607 Iteration 608 Iteration 609 Iteration 610 Iteration 611 Iteration 612 Iteration 613 Iteration 614 Iteration 615 Iteration 616 Iteration 617 Iteration 618 Iteration 619 Iteration 620 Iteration 621 Iteration 622 Iteration 623 Iteration 624 Iteration 625 Iteration 626 Iteration 627 Iteration 628 Iteration 629 Iteration 630 Iteration 631 Iteration 632 Iteration 633 Iteration 634 Iteration 635 Iteration 636 Iteration 637 Iteration 638 Iteration 639 Iteration 640 Iteration 641 Iteration 642 Iteration 643 Iteration 644 Iteration 645 Iteration 646 Iteration 647 Iteration 648 Iteration 649 Iteration 650 Iteration 651 Iteration 652 Iteration 653 Iteration 654 Iteration 655 Iteration 656 Iteration 657 Iteration 658 Iteration 659 Iteration 660 Iteration 661 Iteration 662 Iteration 663 Iteration 664 Iteration 665 Iteration 666 Iteration 667 Iteration 668 Iteration 669 Iteration 670 Iteration 671 Iteration 672 Iteration 673 Iteration 674 Iteration 675 Iteration 676 Iteration 677 Iteration 678 Iteration 679 Iteration 680 Iteration 681 Iteration 682 Iteration 683 Iteration 684 Iteration 685 Iteration 686 Iteration 687 Iteration 688 Iteration 689 Iteration 690 Iteration 691 Iteration 692 Iteration 693 Iteration 694 Iteration 695 Iteration 696 Iteration 697 Iteration 698 Iteration 699 Iteration 700 Iteration 701 Iteration 702 Iteration 703 Iteration 704 Iteration 705 Iteration 706 Iteration 707 Iteration 708 Iteration 709 Iteration 710 Iteration 711 Iteration 712 Iteration 713 Iteration 714 Iteration 715 Iteration 716 Iteration 717 Iteration 718 Iteration 719 Iteration 720 Iteration 721 Iteration 722 Iteration 723 Iteration 724 Iteration 725 Iteration 726 Iteration 727 Iteration 728 Iteration 729 Iteration 730 Iteration 731 Iteration 732 Iteration 733 Iteration 734 Iteration 735 Iteration 736 Iteration 737 Iteration 738 Iteration 739 Iteration 740 Iteration 741 Iteration 742 Iteration 743 Iteration 744 Iteration 745 Iteration 746 Iteration 747 Iteration 748 Iteration 749 Iteration 750 Iteration 751 Iteration 752 Iteration 753 Iteration 754 Iteration 755 Iteration 756 Iteration 757 Iteration 758 Iteration 759 Iteration 760 Iteration 761 Iteration 762 Iteration 763 Iteration 764 Iteration 765 Iteration 766 Iteration 767 Iteration 768 Iteration 769 Iteration 770 Iteration 771 Iteration 772 Iteration 773 Iteration 774 Iteration 775 Iteration 776 Iteration 777 Iteration 778 Iteration 779 Iteration 780 Iteration 781 Iteration 782 Iteration 783 Iteration 784 Iteration 785 Iteration 786 Iteration 787 Iteration 788 Iteration 789 Iteration 790 Iteration 791 Iteration 792 Iteration 793 Iteration 794 Iteration 795 Iteration 796 Iteration 797 Iteration 798 Iteration 799 Iteration 800 Iteration 801 Iteration 802 Iteration 803 Iteration 804 Iteration 805 Iteration 806 Iteration 807 Iteration 808 Iteration 809 Iteration 810 Iteration 811 Iteration 812 Iteration 813 Iteration 814 Iteration 815 Iteration 816 Iteration 817 Iteration 818 Iteration 819 Iteration 820 Iteration 821 Iteration 822 Iteration 823 Iteration 824 Iteration 825 Iteration 826 Iteration 827 Iteration 828 Iteration 829 Iteration 830 Iteration 831 Iteration 832 Iteration 833 Iteration 834 Iteration 835 Iteration 836 Iteration 837 Iteration 838 Iteration 839 Iteration 840 Iteration 841 Iteration 842 Iteration 843 Iteration 844 Iteration 845 Iteration 846 Iteration 847 Iteration 848 Iteration 849 Iteration 850 Iteration 851 Iteration 852 Iteration 853 Iteration 854 Iteration 855 Iteration 856 Iteration 857 Iteration 858 Iteration 859 Iteration 860 Iteration 861 Iteration 862 Iteration 863 Iteration 864 Iteration 865 Iteration 866 Iteration 867 Iteration 868 Iteration 869 Iteration 870 Iteration 871 Iteration 872 Iteration 873 Iteration 874 Iteration 875 Iteration 876 Iteration 877 Iteration 878 Iteration 879 Iteration 880 Iteration 881 Iteration 882 Iteration 883 Iteration 884 Iteration 885 Iteration 886 Iteration 887 Iteration 888 Iteration 889 Iteration 890 Iteration 891 Iteration 892 Iteration 893 Iteration 894 Iteration 895 Iteration 896 Iteration 897 Iteration 898 Iteration 899 Iteration 900 Iteration 901 Iteration 902 Iteration 903 Iteration 904 Iteration 905 Iteration 906 Iteration 907 Iteration 908 Iteration 909 Iteration 910 Iteration 911 Iteration 912 Iteration 913 Iteration 914 Iteration 915 Iteration 916 Iteration 917 Iteration 918 Iteration 919 Iteration 920 Iteration 921 Iteration 922 Iteration 923 Iteration 924 Iteration 925 Iteration 926 Iteration 927 Iteration 928 Iteration 929 Iteration 930 Iteration 931 Iteration 932 Iteration 933 Iteration 934 Iteration 935 Iteration 936 Iteration 937 Iteration 938 Iteration 939 Iteration 940 Iteration 941 Iteration 942 Iteration 943 Iteration 944 Iteration 945 Iteration 946 Iteration 947 Iteration 948 Iteration 949 Iteration 950 Iteration 951 Iteration 952 Iteration 953 Iteration 954 Iteration 955 Iteration 956 Iteration 957 Iteration 958 Iteration 959 Iteration 960 Iteration 961 Iteration 962 Iteration 963 Iteration 964 Iteration 965 Iteration 966 Iteration 967 Iteration 968 Iteration 969 Iteration 970 Iteration 971 Iteration 972 Iteration 973 Iteration 974 Iteration 975 Iteration 976 Iteration 977 Iteration 978 Iteration 979 Iteration 980 Iteration 981 Iteration 982 Iteration 983 Iteration 984 Iteration 985 Iteration 986 Iteration 987 Iteration 988 Iteration 989 Iteration 990 Iteration 991 Iteration 992 Iteration 993 Iteration 994 Iteration 995 Iteration 996 Iteration 997 Iteration 998 Iteration 999 >> TPQTabletTests::ProposeTx_Unknown_WriteId >> TPQTabletTests::ProposeTx_Unknown_WriteId [GOOD] >> TPartitionTests::UserActCount >> TPQTabletTests::ProposeTx_Unknown_Partition_2 >> TPQTabletTests::ProposeTx_Unknown_Partition_2 [GOOD] >> TPartitionTests::CorrectRange_Multiple_Consumers [GOOD] >> TPQTest::TestAccountReadQuota [GOOD] >> TPQTest::TestAlreadyWritten ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] Test command err: 2025-04-10T11:24:40.297986Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:24:40.302457Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:24:40.302784Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-10T11:24:40.302848Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:24:40.302888Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-10T11:24:40.302924Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:24:40.302983Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.303057Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:40.333461Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:206:2212], now have 1 active actors on pipe 2025-04-10T11:24:40.333598Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:24:40.354766Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-04-10T11:24:40.357761Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-04-10T11:24:40.357893Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.358923Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-04-10T11:24:40.359085Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:40.359481Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:40.359861Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:214:2218] 2025-04-10T11:24:40.360789Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-10T11:24:40.360845Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:214:2218] 2025-04-10T11:24:40.360909Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:40.361735Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:40.361907Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-10T11:24:40.361955Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-10T11:24:40.362014Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2025-04-10T11:24:40.362045Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2025-04-10T11:24:40.362228Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:40.362271Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:40.362310Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:40.362365Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:40.362394Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-10T11:24:40.362420Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-10T11:24:40.362442Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cconsumer 2025-04-10T11:24:40.362463Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uconsumer 2025-04-10T11:24:40.362498Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:40.362537Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:40.362632Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:40.362684Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:40.362861Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:40.365410Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:40.365839Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:221:2223], now have 1 active actors on pipe 2025-04-10T11:24:40.366491Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:224:2225], now have 1 active actors on pipe 2025-04-10T11:24:40.367296Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-10T11:24:40.367351Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-10T11:24:40.367480Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-04-10T11:24:40.367563Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-10T11:24:40.367599Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-10T11:24:40.367642Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-10T11:24:40.367678Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-10T11:24:40.367835Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 4294969488 } Partitions { } 2025-04-10T11:24:40.367924Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:40.371564Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:24:40.371621Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-04-10T11:24:40.371658Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-10T11:24:40.371698Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-10T11:24:40.372041Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67891 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-10T11:24:40.372078Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-10T11:24:40.372127Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67891, WriteId (empty maybe) 2025-04-10T11:24:40.372164Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-10T11:24:40.372202Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2025-04-10T11:24:40.372237Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-10T11:24:40.372274Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2025-04-10T11:24:40.372449Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 136 MaxStep: 30136 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 4294969488 } Partitions { } 2025-04-10T11:24:40.372531Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:40.376381Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:24:40.376441Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPA ... t with generation 2 done 2025-04-10T11:24:43.149139Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2025-04-10T11:24:43.149178Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user drop done 2025-04-10T11:24:43.149466Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:43.149517Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:43.149564Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000cuser, m0000000000cuser] 2025-04-10T11:24:43.149602Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000uuser, m0000000000uuser] 2025-04-10T11:24:43.149636Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:43.149675Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:43.149710Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:24:43.149739Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-04-10T11:24:43.149766Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-04-10T11:24:43.149794Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-3 2025-04-10T11:24:43.149821Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-3 2025-04-10T11:24:43.149848Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-04-10T11:24:43.149882Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:43.149920Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:43.149993Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-04-10T11:24:43.150217Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:43.150255Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:43.150286Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-10T11:24:43.150312Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-10T11:24:43.150339Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] I0000000001 2025-04-10T11:24:43.150366Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cclient-1 2025-04-10T11:24:43.150393Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uclient-1 2025-04-10T11:24:43.150416Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cclient-3 2025-04-10T11:24:43.150440Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uclient-3 2025-04-10T11:24:43.150462Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] _config_1 2025-04-10T11:24:43.150489Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:43.150516Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-10T11:24:43.150666Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:43.150751Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:43.153202Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-10T11:24:43.153261Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-04-10T11:24:43.155750Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:43.156202Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-04-10T11:24:43.156254Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-04-10T11:24:43.156293Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-04-10T11:24:43.156332Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-04-10T11:24:43.156369Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 2 2025-04-10T11:24:43.156407Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 status has not changed 2025-04-10T11:24:43.157771Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:43.157931Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 1 2025-04-10T11:24:43.157965Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-04-10T11:24:43.157991Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-04-10T11:24:43.158022Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-04-10T11:24:43.158048Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 2, Expected 2 2025-04-10T11:24:43.158080Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-10T11:24:43.158164Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] complete TxId 67890 2025-04-10T11:24:43.158409Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ChildPartitionIds: 1 ChildPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 0 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ParentPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } 2025-04-10T11:24:43.158478Z node 6 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:43.158551Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-04-10T11:24:43.158589Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-04-10T11:24:43.158628Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-04-10T11:24:43.158892Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 133 MaxStep: 18446744073709551615 PredicateRecipients: 22222 Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ChildPartitionIds: 1 ChildPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 0 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ParentPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 175 RawX2: 25769805966 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2025-04-10T11:24:43.159111Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:43.161904Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:24:43.161947Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-04-10T11:24:43.161976Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-10T11:24:43.162006Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-04-10T11:24:43.162039Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-10T11:24:43.162083Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-10T11:24:43.162122Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-04-10T11:24:43.162188Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-10T11:24:43.162224Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-10T11:24:43.162255Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-10T11:24:43.166530Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [6:363:2335], now have 1 active actors on pipe 2025-04-10T11:24:43.166713Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-04-10T11:24:43.166761Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvReadSetAck to 22222 2025-04-10T11:24:43.166816Z node 6 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 2025-04-10T11:24:43.166916Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSetAck Step: 100 TxId: 67890 TabletSource: 72057594037927937 TabletDest: 22222 TabletConsumer: 22222 Flags: 0 Seqno: 0 2025-04-10T11:24:43.166954Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSetAck 2025-04-10T11:24:43.166990Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Predicate acks 1/1 2025-04-10T11:24:43.167034Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state WAIT_RS_ACKS 2025-04-10T11:24:43.167069Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State WAIT_RS_ACKS 2025-04-10T11:24:43.167114Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 1/1 2025-04-10T11:24:43.167139Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-10T11:24:43.167172Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 1/1 2025-04-10T11:24:43.167213Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2025-04-10T11:24:43.167254Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState DELETING 2025-04-10T11:24:43.167297Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete key for TxId 67890 2025-04-10T11:24:43.167367Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State [GOOD] Test command err: 2025-04-10T11:24:40.273399Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:24:40.279940Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:24:40.280282Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-10T11:24:40.280342Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:24:40.280383Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-10T11:24:40.280443Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:24:40.280506Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.280572Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:40.307016Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-04-10T11:24:40.307177Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:24:40.339158Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:40.342223Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:40.342376Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.343413Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:40.343559Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:40.343966Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:40.344411Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-04-10T11:24:40.345456Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-10T11:24:40.345522Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] 2025-04-10T11:24:40.345597Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:40.346183Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:40.346333Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-10T11:24:40.346390Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-10T11:24:40.346536Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:40.346585Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:40.346675Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:40.346737Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:40.346775Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-10T11:24:40.346799Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-10T11:24:40.346830Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:40.346870Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:40.346961Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:40.347139Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:40.349902Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:40.350374Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:193:2204], now have 1 active actors on pipe 2025-04-10T11:24:40.351773Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:196:2206], now have 1 active actors on pipe 2025-04-10T11:24:40.351867Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvDropTablet 2025-04-10T11:24:40.916306Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:24:40.923306Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:24:40.923652Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-10T11:24:40.923706Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:24:40.923744Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-10T11:24:40.923782Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:24:40.923826Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.923880Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:40.952837Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [2:178:2193], now have 1 active actors on pipe 2025-04-10T11:24:40.952970Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:24:40.953266Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 2(current 0) received from actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-10T11:24:40.955918Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-10T11:24:40.956039Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.957245Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-10T11:24:40.957408Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:40.957489Z node 2 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:40.957961Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:40.958293Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:186:2199] 2025-04-10T11:24:40.959286Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-10T11:24:40.959344Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [2:186:2199] 2025-04-10T11:24:40.959426Z node 2 :PERSQUEUE DEBUG ... DEBUG: [PQ: 72057594037927937] TxId 67891 moved from PLANNED to CALCULATING 2025-04-10T11:24:43.127846Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 110, TxId 67891 2025-04-10T11:24:43.128098Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 110, TxId 67891, Partition 0, Predicate 1 2025-04-10T11:24:43.128126Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Handle TEvTxCalcPredicateResult 2025-04-10T11:24:43.128150Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Partition responses 1/1 2025-04-10T11:24:43.128177Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-04-10T11:24:43.128202Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATING 2025-04-10T11:24:43.128229Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATING FrontTxId 67891 2025-04-10T11:24:43.128263Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-10T11:24:43.128299Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState CALCULATED 2025-04-10T11:24:43.128341Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATING to CALCULATED 2025-04-10T11:24:43.128485Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: CALCULATED MinStep: 152 MaxStep: 30152 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Step: 110 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 25769805966 } Partitions { } 2025-04-10T11:24:43.128568Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:43.133700Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:24:43.133745Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-10T11:24:43.133771Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATED 2025-04-10T11:24:43.133808Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATED FrontTxId 67891 2025-04-10T11:24:43.133838Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState WAIT_RS 2025-04-10T11:24:43.133868Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATED to WAIT_RS 2025-04-10T11:24:43.133899Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-10T11:24:43.133930Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-10T11:24:43.133987Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 2025-04-10T11:24:43.137478Z node 6 :PERSQUEUE DEBUG: Client pipe to tablet 72057594037927937 from 22222 is reset 2025-04-10T11:24:43.156415Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:24:43.158122Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:24:43.159079Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] has a tx info 2025-04-10T11:24:43.159122Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 110, PlanTxId 67891, ExecStep 110, ExecTxId 67891 2025-04-10T11:24:43.159231Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] ReadRange pair. Key tx_00000000000000067890, Status 0 2025-04-10T11:24:43.159296Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Restore Tx. TxId: 67890, Step: 100, State: EXECUTED, WriteId: 2025-04-10T11:24:43.159355Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] ReadRange pair. Key tx_00000000000000067891, Status 0 2025-04-10T11:24:43.159381Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Restore Tx. TxId: 67891, Step: 110, State: CALCULATED, WriteId: 2025-04-10T11:24:43.159400Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Fix tx state 2025-04-10T11:24:43.159435Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=2, PlannedTxs.size=2 2025-04-10T11:24:43.159467Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] top tx queue (100, 67890) 2025-04-10T11:24:43.159495Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxsOrder: 67890 EXECUTED 0 2025-04-10T11:24:43.159522Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxsOrder: 67891 PLANNED 0 2025-04-10T11:24:43.159972Z node 6 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:43.160007Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937] has a tx writes info 2025-04-10T11:24:43.160091Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:43.160317Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:43.160514Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [6:360:2338] 2025-04-10T11:24:43.161165Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDiskStatusStep 2025-04-10T11:24:43.162092Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-04-10T11:24:43.162481Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-04-10T11:24:43.162959Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-04-10T11:24:43.163132Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-04-10T11:24:43.163173Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:24:43.163204Z node 6 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:24:43.163234Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-10T11:24:43.163275Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [6:360:2338] 2025-04-10T11:24:43.163327Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:43.163367Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:43.163423Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 6 2025-04-10T11:24:43.163595Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-10T11:24:43.163624Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-10T11:24:43.163718Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-04-10T11:24:43.163749Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-10T11:24:43.163779Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-04-10T11:24:43.163813Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-10T11:24:43.163841Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-10T11:24:43.163868Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-04-10T11:24:43.163897Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-10T11:24:43.163918Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-10T11:24:43.163942Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-10T11:24:43.163966Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PLANNED 2025-04-10T11:24:43.163983Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PLANNED 2025-04-10T11:24:43.164000Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State PLANNED FrontTxId 67891 2025-04-10T11:24:43.164017Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-04-10T11:24:43.164038Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 110, ExecTxId 67891 2025-04-10T11:24:43.164087Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState CALCULATING 2025-04-10T11:24:43.164123Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from PLANNED to CALCULATING 2025-04-10T11:24:43.164275Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 110, TxId 67891 2025-04-10T11:24:43.164604Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 110, TxId 67891, Partition 0, Predicate 1 2025-04-10T11:24:43.164634Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Handle TEvTxCalcPredicateResult 2025-04-10T11:24:43.164656Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Partition responses 1/1 2025-04-10T11:24:43.164681Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-04-10T11:24:43.164709Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATING 2025-04-10T11:24:43.164737Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATING FrontTxId 67891 2025-04-10T11:24:43.164762Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-10T11:24:43.164790Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState CALCULATED 2025-04-10T11:24:43.164824Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATING to CALCULATED 2025-04-10T11:24:43.164971Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: CALCULATED MinStep: 152 MaxStep: 30152 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Step: 110 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 25769805966 } Partitions { } 2025-04-10T11:24:43.165055Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:43.165203Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-10T11:24:43.165233Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-04-10T11:24:43.168186Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:24:43.168253Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-10T11:24:43.168291Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATED 2025-04-10T11:24:43.168351Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATED FrontTxId 67891 2025-04-10T11:24:43.168395Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState WAIT_RS 2025-04-10T11:24:43.168435Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATED to WAIT_RS 2025-04-10T11:24:43.168487Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-10T11:24:43.168527Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-10T11:24:43.168602Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 >> TPartitionTests::CorrectRange_Rollback >> TPQTabletTests::ProposeTx_Command_After_Propose >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] >> TPartitionTests::ShadowPartitionCountersRestore >> TPQTabletTests::Read_TEvTxCommit_After_Restart >> TPartitionTests::CorrectRange_Rollback [GOOD] >> TPQTest::TestDirectReadHappyWay >> TPartitionTests::DataTxCalcPredicateOk >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] >> Yq_1::Basic_Null [GOOD] >> Yq_1::Basic_TaggedLiteral >> TSourceIdTests::SourceIdStorageAdd [GOOD] >> TSourceIdTests::ProtoSourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageComplexDelete [GOOD] >> TSourceIdTests::HeartbeatEmitter >> TPartitionTests::ShadowPartitionCountersRestore [GOOD] >> TSourceIdTests::HeartbeatEmitter [GOOD] >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> TPartitionTests::TestBatchingWithChangeConfig >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] Test command err: 2025-04-10T11:24:42.620761Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:24:42.625143Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:24:42.625507Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-10T11:24:42.625581Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:24:42.625624Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-10T11:24:42.625664Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:24:42.625716Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:42.625823Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:42.644099Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-04-10T11:24:42.644258Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:24:42.661552Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:42.665562Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:42.665717Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:42.666728Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:42.666873Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:42.667453Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:42.667839Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-04-10T11:24:42.668815Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-10T11:24:42.668913Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] 2025-04-10T11:24:42.668979Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:42.669573Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:42.669709Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-10T11:24:42.669778Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-10T11:24:42.669929Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:42.669972Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:42.670018Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:42.670062Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:42.670107Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-10T11:24:42.670136Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-10T11:24:42.670226Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:42.670273Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:42.670379Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:42.670564Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:42.673363Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:42.673824Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:193:2204], now have 1 active actors on pipe 2025-04-10T11:24:42.674535Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:196:2206], now have 1 active actors on pipe 2025-04-10T11:24:42.675393Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 2 Data { Immediate: false } 2025-04-10T11:24:42.675459Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2 empty list of operations 2025-04-10T11:24:42.675523Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 2 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2025-04-10T11:24:43.043037Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:24:43.046584Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:24:43.046828Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-10T11:24:43.046872Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:24:43.046905Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-10T11:24:43.046942Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:24:43.046982Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:43.047029Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:43.064717Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [2:178:2193], now have 1 active actors on pipe 2025-04-10T11:24:43.064807Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:24:43.064995Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 2(current 0) received from actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-10T11:24:43.066920Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-10T11:24:43.067010Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:43.067609Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-10T11:24:43.067744Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:43.068071Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:43.068288Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:186:2199] 2025-04-10T11:24:43.068973Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-10T11:24:43.069014Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [2:186:2199] 2025-04-10T11:24:43.069059Z node 2 :PERSQUEUE DEBUG: [PQ: 7205759403792793 ... TInitInternalFieldsStep 2025-04-10T11:24:44.835037Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [6:302:2288] 2025-04-10T11:24:44.835860Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDiskStatusStep 2025-04-10T11:24:44.837052Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-04-10T11:24:44.837365Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-04-10T11:24:44.838132Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-04-10T11:24:44.838550Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-04-10T11:24:44.838598Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:24:44.838641Z node 6 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:24:44.838678Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-10T11:24:44.838736Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [6:302:2288] 2025-04-10T11:24:44.838792Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:44.838862Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:44.838953Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 6 2025-04-10T11:24:44.839116Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PLANNED 2025-04-10T11:24:44.839157Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNED 2025-04-10T11:24:44.839195Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State PLANNED FrontTxId 67890 2025-04-10T11:24:44.839251Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-04-10T11:24:44.839293Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-04-10T11:24:44.839371Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2025-04-10T11:24:44.839419Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2025-04-10T11:24:44.839722Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-04-10T11:24:44.840029Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2025-04-10T11:24:44.840077Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-04-10T11:24:44.840116Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2025-04-10T11:24:44.840154Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-04-10T11:24:44.840196Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-04-10T11:24:44.840236Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2025-04-10T11:24:44.840273Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-10T11:24:44.840321Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-04-10T11:24:44.840362Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2025-04-10T11:24:44.840555Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 133 MaxStep: 30133 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 25769805966 } Partitions { } 2025-04-10T11:24:44.840661Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:44.849379Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:24:44.849460Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-10T11:24:44.849526Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-04-10T11:24:44.849577Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-04-10T11:24:44.849618Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-04-10T11:24:44.849664Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-04-10T11:24:44.849711Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-10T11:24:44.849759Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-10T11:24:44.849900Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 2025-04-10T11:24:44.851607Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-10T11:24:44.851671Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-04-10T11:24:44.853899Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [6:327:2306], now have 1 active actors on pipe 2025-04-10T11:24:44.854089Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-04-10T11:24:44.854139Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2025-04-10T11:24:44.854205Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2025-04-10T11:24:44.854249Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2025-04-10T11:24:44.854293Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2025-04-10T11:24:44.854338Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2025-04-10T11:24:44.854385Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-04-10T11:24:44.854466Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-04-10T11:24:44.854515Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-04-10T11:24:44.854564Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 1 2025-04-10T11:24:44.854680Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-04-10T11:24:44.854747Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 67890 2025-04-10T11:24:44.854922Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:44.854970Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:44.855022Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:44.855067Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:44.855101Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:24:44.855126Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-10T11:24:44.855148Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-10T11:24:44.855182Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:44.855226Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:44.855322Z node 6 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 2025-04-10T11:24:44.855376Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:44.857945Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:44.858065Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-04-10T11:24:44.858111Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-04-10T11:24:44.858173Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-04-10T11:24:44.858214Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-04-10T11:24:44.858262Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-10T11:24:44.858313Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-10T11:24:44.858359Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] complete TxId 67890 2025-04-10T11:24:44.858401Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-04-10T11:24:44.858449Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-04-10T11:24:44.858507Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-04-10T11:24:44.858721Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 133 MaxStep: 30133 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 25769805966 } Partitions { } 2025-04-10T11:24:44.858819Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:44.862197Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:24:44.862261Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-04-10T11:24:44.862306Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-10T11:24:44.862353Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-04-10T11:24:44.862399Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-10T11:24:44.862454Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-04-10T11:24:44.862509Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-10T11:24:44.862548Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-04-10T11:24:44.862610Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-10T11:24:44.862648Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-10T11:24:44.862689Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> CacheEviction::DeleteKeys [GOOD] >> PQCountersLabeled::Partition >> TPartitionChooserSuite::THashChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage >> TPQTest::TestDirectReadHappyWay [GOOD] >> TPQTest::TestDescribeBalancer >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn [GOOD] >> TPartitionTests::ConflictingActsInSeveralBatches ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] Test command err: 2025-04-10T11:24:33.965571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:33.965815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:33.966038Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001009/r3tmp/tmp6LxYQa/pdisk_1.dat 2025-04-10T11:24:34.360279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:24:34.398027Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:34.404513Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-04-10T11:24:34.440657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:34.440810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:34.452395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:34.533351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:24:34.567951Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:24:34.568222Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:24:34.614278Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:24:34.614433Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:24:34.616372Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:24:34.616496Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:24:34.616567Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:24:34.617069Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:24:34.617248Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:24:34.617359Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:24:34.628278Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:24:34.654906Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:24:34.655186Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:24:34.655331Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:24:34.655379Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:34.655420Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:24:34.655459Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.656066Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:24:34.656176Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:24:34.656275Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.656333Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:34.656393Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:24:34.656439Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:34.656551Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:24:34.657100Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:24:34.657380Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:24:34.657475Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:24:34.659459Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:34.670358Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:24:34.670491Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:24:34.824670Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:24:34.838761Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:24:34.838879Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.840086Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.840147Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:34.840228Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:34.840527Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:24:34.840756Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:34.841715Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.841849Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:24:34.844247Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:24:34.844815Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:34.853539Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:24:34.853613Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.854284Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:24:34.854386Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:34.855323Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:34.855368Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:34.855433Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:24:34.855523Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:24:34.855586Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:24:34.855679Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.860268Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:34.863059Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:24:34.863137Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:24:34.863844Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:24:34.874943Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:24:34.875083Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-10T11:24:34.875145Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-10T11:24:34.875183Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-10T11:24:34.876235Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:34.906787Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:24:35.170723Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:24:35.170804Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:35.171232Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:35.171297Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:35.171346Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:35.171589Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-10T11:24:35.171768Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:35.172108Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:35.172943Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:35.233851Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-10T11:24:35.233971Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:35.234020Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:35.234065Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tab ... ecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:24:45.029605Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:45.030036Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:24:45.030132Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:45.030808Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:45.030846Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:45.030902Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:24:45.030986Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:24:45.031041Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:24:45.031178Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:45.032716Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:45.035160Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:24:45.035244Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:24:45.035458Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:24:45.047446Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:24:45.047621Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-10T11:24:45.047680Z node 3 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-10T11:24:45.047738Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-10T11:24:45.048859Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:45.074174Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:24:45.276471Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:24:45.276557Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:45.276774Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:45.276856Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:45.276915Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:45.277154Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-10T11:24:45.277334Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:45.277687Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:45.278689Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:45.319975Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-10T11:24:45.320082Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:45.320125Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:45.320196Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:45.320285Z node 3 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:24:45.320352Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-10T11:24:45.320487Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:45.322921Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-10T11:24:45.322994Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:24:45.330663Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:886:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:45.330775Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:895:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:45.330847Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:45.336677Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:24:45.343778Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:45.503844Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:45.507343Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:900:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:24:45.533627Z node 3 :TX_PROXY ERROR: Actor# [3:956:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:45.588575Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq3ych4a4tfqjrd83ck6rz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTcwNDhlZmEtMWE2OTQ5MWUtNjU3ZjAxZGMtZWFlZWI1ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:45.591152Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:987:2786], serverId# [3:988:2787], sessionId# [0:0:0] 2025-04-10T11:24:45.591586Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037888 2025-04-10T11:24:45.591900Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744284285591780 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-10T11:24:45.592109Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-04-10T11:24:45.603047Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-10T11:24:45.603147Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:45.645106Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq3yn6cakkc4x9s2zsscgf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTE5NzQxOTUtZjVhZjk0MjgtZTYyNzgwMjgtZTBhODRmZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:45.646810Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:4] at 72075186224037888 2025-04-10T11:24:45.647039Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744284285646961 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 50b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-10T11:24:45.647174Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-04-10T11:24:45.658075Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 50 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-10T11:24:45.658159Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:45.725110Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfq3ypx1qk441smecr34gjn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWRkMTQxNjAtODkxOTgwYmMtNzgyZDk5ZjYtMWVkYzU3ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:45.726798Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:5] at 72075186224037888 2025-04-10T11:24:45.727030Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1744284285726949 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-10T11:24:45.727133Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:5] at 72075186224037888, row count=1 2025-04-10T11:24:45.738113Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-10T11:24:45.738238Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:45.740161Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:1034:2816], serverId# [3:1035:2817], sessionId# [0:0:0] 2025-04-10T11:24:45.746403Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:1036:2818], serverId# [3:1037:2819], sessionId# [0:0:0] >> TPartitionTests::Batching >> PQCountersLabeled::Partition [GOOD] >> PQCountersLabeled::PartitionFirstClass >> Yq_1::DescribeJob [GOOD] >> Yq_1::DescribeQuery >> TPartitionTests::Batching [GOOD] >> TPQTest::TestDescribeBalancer [GOOD] >> TPQTest::TestCheckACL >> TPartitionTests::CommitOffsetRanges >> TPartitionTests::CommitOffsetRanges [GOOD] >> TPartitionTests::AfterRestart_1 >> TPartitionTests::IncorrectRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn [GOOD] Test command err: 2025-04-10T11:22:38.344919Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:38.433253Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:22:38.437527Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:22:38.437870Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:38.457057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:38.457350Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:38.465170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:38.465351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:38.465543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:38.465623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:38.465726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:38.465847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:38.465974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:38.466106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:38.466209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:38.466293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:38.466409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:38.466488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:38.485173Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:22:38.489348Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:38.489704Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:38.489764Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:38.489911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:38.490064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:38.490132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:38.490185Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:38.490267Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:38.490320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:38.490351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:38.490371Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:38.490530Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:38.490605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:38.490647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:38.490675Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:38.490773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:38.490828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:38.490864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:38.490886Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:38.490936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:38.490977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:38.491015Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:38.491050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:38.491082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:38.491105Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:38.491462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-10T11:22:38.491541Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-04-10T11:22:38.491640Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2025-04-10T11:22:38.491741Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-10T11:22:38.491910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:38.491959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:38.491986Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:38.492280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:38.492337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:38.492374Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:38.492528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:22:38.492574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:22:38.492611Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:22:38.492818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:22:38.492866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:22:38.492904Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T1 ... uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:45.798712Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:45.798746Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:24:45.798783Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:24:45.798888Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:24:45.798976Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:45.799019Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:24:45.799108Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-04-10T11:24:45.799158Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-04-10T11:24:45.799307Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:585:2601];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2025-04-10T11:24:45.799414Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:45.799515Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:45.799685Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:45.799811Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:24:45.799896Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:45.799973Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:45.800025Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: Scan [5:592:2608] finished for tablet 9437184 2025-04-10T11:24:45.800670Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[5:585:2601];stats={"p":[{"events":["f_bootstrap"],"t":0.064},{"events":["f_ProduceResults"],"t":0.515},{"events":["l_bootstrap"],"t":0.76},{"events":["f_processing","f_task_result"],"t":0.78},{"events":["l_task_result"],"t":7.276},{"events":["f_ack"],"t":7.311},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":8.193}],"full":{"a":1744284277606497,"name":"_full_task","f":1744284277606497,"d_finished":0,"c":0,"l":1744284285800107,"d":8193610},"events":[{"name":"bootstrap","f":1744284277671125,"d_finished":695662,"c":1,"l":1744284278366787,"d":695662},{"a":1744284285799794,"name":"ack","f":1744284284917882,"d_finished":818973,"c":903,"l":1744284285799720,"d":819286},{"a":1744284285799781,"name":"processing","f":1744284278386668,"d_finished":3662717,"c":4515,"l":1744284285799722,"d":3663043},{"name":"ProduceResults","f":1744284278122289,"d_finished":1510596,"c":5420,"l":1744284285800000,"d":1510596},{"a":1744284285800007,"name":"Finish","f":1744284285800007,"d_finished":0,"c":0,"l":1744284285800107,"d":100},{"name":"task_result","f":1744284278386714,"d_finished":2762310,"c":3612,"l":1744284284882922,"d":2762310}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:45.800770Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:585:2601];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:24:45.801395Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[5:585:2601];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.064},{"events":["f_ProduceResults"],"t":0.515},{"events":["l_bootstrap"],"t":0.76},{"events":["f_processing","f_task_result"],"t":0.78},{"events":["l_task_result"],"t":7.276},{"events":["f_ack"],"t":7.311},{"events":["l_ProduceResults","f_Finish"],"t":8.193},{"events":["l_ack","l_processing","l_Finish"],"t":8.194}],"full":{"a":1744284277606497,"name":"_full_task","f":1744284277606497,"d_finished":0,"c":0,"l":1744284285800832,"d":8194335},"events":[{"name":"bootstrap","f":1744284277671125,"d_finished":695662,"c":1,"l":1744284278366787,"d":695662},{"a":1744284285799794,"name":"ack","f":1744284284917882,"d_finished":818973,"c":903,"l":1744284285799720,"d":820011},{"a":1744284285799781,"name":"processing","f":1744284278386668,"d_finished":3662717,"c":4515,"l":1744284285799722,"d":3663768},{"name":"ProduceResults","f":1744284278122289,"d_finished":1510596,"c":5420,"l":1744284285800000,"d":1510596},{"a":1744284285800007,"name":"Finish","f":1744284285800007,"d_finished":0,"c":0,"l":1744284285800832,"d":825},{"name":"task_result","f":1744284278386714,"d_finished":2762310,"c":3612,"l":1744284284882922,"d":2762310}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-10T11:24:45.801491Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:24:37.537332Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-04-10T11:24:45.801543Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:24:45.801818Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:592:2608];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TPartitionTests::AfterRestart_1 [GOOD] >> TPartitionTests::TestBatchingWithChangeConfig [GOOD] >> Yq_1::Basic [GOOD] >> Yq_1::Basic_EmptyList >> TPartitionTests::TestBatchingWithProposeConfig >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] >> TPartitionTests::AfterRestart_2 >> TPartitionTests::IncorrectRange [GOOD] >> TPartitionTests::GetPartitionWriteInfoSuccess >> TPartitionTests::AfterRestart_2 [GOOD] >> TPQTest::TestWaitInOwners [GOOD] >> TPQTest::TestWriteOffsetWithBigMessage >> TPartitionTests::ChangeConfig >> TPartitionTests::DataTxCalcPredicateOk [GOOD] >> TPartitionTests::DataTxCalcPredicateError >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] >> TPartitionTests::ConflictingActsInSeveralBatches [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] >> TPQTest::TestReserveBytes [GOOD] >> TPQTest::TestSetClientOffset >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches >> TPartitionTests::ChangeConfig [GOOD] >> TPQTabletTests::UpdateConfig_1 >> TListAllTopicsTests::PlainList [GOOD] >> TListAllTopicsTests::RecursiveList ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: 2025-04-10T11:24:34.068598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:34.068866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:34.069095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fda/r3tmp/tmpMhVNm5/pdisk_1.dat 2025-04-10T11:24:34.451193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:24:34.491705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:34.532135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:34.532298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:34.543763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:34.624318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:24:34.671348Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-04-10T11:24:34.671689Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:24:34.719330Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:680:2580] 2025-04-10T11:24:34.719519Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:24:34.727034Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:24:34.727178Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:24:34.728697Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:24:34.728767Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:24:34.728813Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:24:34.729163Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:24:34.729362Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:24:34.729425Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:709:2578] in generation 1 2025-04-10T11:24:34.729718Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:24:34.729775Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:24:34.730954Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T11:24:34.730996Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T11:24:34.731034Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T11:24:34.731282Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:24:34.731360Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:24:34.731396Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2580] in generation 1 2025-04-10T11:24:34.742287Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:24:34.771994Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:24:34.772281Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:24:34.772414Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-10T11:24:34.772463Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:34.772504Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:24:34.772541Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.772649Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:24:34.772685Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T11:24:34.772739Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:24:34.772796Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-10T11:24:34.772821Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T11:24:34.772863Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T11:24:34.772888Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:24:34.773487Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:24:34.773623Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:24:34.773677Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T11:24:34.773753Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T11:24:34.773844Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.773882Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:34.773931Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:24:34.773973Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:34.774022Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:24:34.774045Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:34.774081Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-10T11:24:34.774113Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:24:34.774218Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2575], serverId# [1:693:2588], sessionId# [0:0:0] 2025-04-10T11:24:34.774380Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:24:34.774693Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-10T11:24:34.774810Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-10T11:24:34.777130Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:24:34.787851Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T11:24:34.787976Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-10T11:24:34.830802Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2574], serverId# [1:728:2606], sessionId# [0:0:0] 2025-04-10T11:24:34.830930Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:24:34.831096Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:24:34.831156Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:24:34.831676Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:34.842733Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:24:34.842836Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:24:34.968872Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2618], serverId# [1:743:2621], sessionId# [0:0:0] 2025-04-10T11:24:34.969022Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:741:2619], serverId# [1:744:2622], sessionId# [0:0:0] 2025-04-10T11:24:34.973917Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:24:34.973996Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.974329Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-10T11:24:34.974361Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:24:34.974571Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.974618Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:34.974676Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:34.974971Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:24:34.975129Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:34.975276Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:24:34.975301Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:34.975328Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-10T11:24:34.975485Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:24:34.975582Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:34.975713Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.975777Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-10T11:24:34.977817Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T ... 3377Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:24:48.923455Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:24:48.923935Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:24:48.924368Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:48.925576Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:48.925614Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:48.925650Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:48.925857Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:24:48.925987Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:48.926076Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-04-10T11:24:48.926109Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:24:48.926295Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:48.926350Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-10T11:24:48.926735Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:24:48.927056Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:48.929333Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-04-10T11:24:48.929415Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:24:48.931450Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:24:48.931507Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T11:24:48.931561Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-04-10T11:24:48.931655Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:24:48.931711Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:24:48.931807Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:24:48.932533Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:24:48.932572Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:48.932970Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:24:48.933020Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:48.934288Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:48.934331Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:48.934370Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:24:48.934426Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:24:48.934467Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:24:48.934531Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:48.935770Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:48.935867Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:24:48.938656Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-10T11:24:48.938719Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-10T11:24:48.939252Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:24:48.939797Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:24:48.939847Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:24:48.940146Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:24:48.948688Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:788:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:48.948792Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:798:2663], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:48.948858Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:48.954337Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:24:48.960924Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:48.961061Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:24:49.112878Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:49.112966Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:24:49.115813Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:802:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:24:49.150270Z node 4 :TX_PROXY ERROR: Actor# [4:880:2713] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:49.217410Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfq41xkeq499n9hn5bebcmv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZDM3MDk2LWNiZDBjNTMyLWIwOTk1Yjc0LTFkODQyMDYy, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:49.220228Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:949:2744], serverId# [4:950:2745], sessionId# [0:0:0] 2025-04-10T11:24:49.220685Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037889 2025-04-10T11:24:49.221028Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744284289220910 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-10T11:24:49.221208Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-04-10T11:24:49.232162Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-10T11:24:49.232254Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:24:49.284208Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq426kbkkgr9pbb4ch58t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YmVhMDViYWMtMTkzMzAzNzctNjJmZDZjNDItYzc2MjQ4ODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:49.286348Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037889 2025-04-10T11:24:49.286666Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744284289286549 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-10T11:24:49.286858Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1744284289286549 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-10T11:24:49.286970Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037889, row count=1 2025-04-10T11:24:49.298028Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-10T11:24:49.298095Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:24:49.301603Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:990:2776], serverId# [4:991:2777], sessionId# [0:0:0] 2025-04-10T11:24:49.307367Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:992:2778], serverId# [4:993:2779], sessionId# [0:0:0] >> CdcStreamChangeCollector::NewImage [GOOD] >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage >> TPartitionTests::GetPartitionWriteInfoSuccess [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] >> TPartitionTests::GetPartitionWriteInfoError >> TPQTabletTests::UpdateConfig_1 [GOOD] >> TPQTabletTests::UpdateConfig_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: 2025-04-10T11:24:33.998050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:33.998313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:33.998536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001027/r3tmp/tmpZsMZFn/pdisk_1.dat 2025-04-10T11:24:34.391767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:24:34.437479Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:34.477363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:34.477512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:34.488975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:34.569865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:24:34.618897Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-04-10T11:24:34.619180Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:24:34.672053Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:680:2580] 2025-04-10T11:24:34.672288Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:24:34.683013Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:24:34.683182Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:24:34.685026Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:24:34.685113Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:24:34.685176Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:24:34.685620Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:24:34.685864Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:24:34.685951Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:709:2578] in generation 1 2025-04-10T11:24:34.686384Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:24:34.686485Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:24:34.687860Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T11:24:34.687922Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T11:24:34.687974Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T11:24:34.688288Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:24:34.688399Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:24:34.688460Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2580] in generation 1 2025-04-10T11:24:34.699330Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:24:34.730051Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:24:34.730283Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:24:34.730378Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-10T11:24:34.730420Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:34.730454Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:24:34.730484Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.730555Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:24:34.730581Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T11:24:34.730611Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:24:34.730641Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-10T11:24:34.730653Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T11:24:34.730676Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T11:24:34.730696Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:24:34.731115Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:24:34.731190Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:24:34.731236Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T11:24:34.731291Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T11:24:34.731352Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.731384Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:34.731419Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:24:34.731452Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:34.731489Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:24:34.731504Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:34.731528Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-10T11:24:34.731548Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:24:34.731617Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2575], serverId# [1:693:2588], sessionId# [0:0:0] 2025-04-10T11:24:34.731773Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:24:34.732031Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-10T11:24:34.732115Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-10T11:24:34.734014Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:24:34.744563Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T11:24:34.744662Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-10T11:24:34.787430Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2574], serverId# [1:728:2606], sessionId# [0:0:0] 2025-04-10T11:24:34.787590Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:24:34.787800Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:24:34.787875Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:24:34.788244Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:34.798975Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:24:34.799079Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:24:34.918554Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2618], serverId# [1:743:2621], sessionId# [0:0:0] 2025-04-10T11:24:34.918708Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:741:2619], serverId# [1:744:2622], sessionId# [0:0:0] 2025-04-10T11:24:34.923630Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:24:34.923707Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.924037Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-10T11:24:34.924074Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:24:34.924287Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.924335Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:34.924397Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:34.924677Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:24:34.924846Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:34.924973Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:24:34.924999Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:34.925041Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-10T11:24:34.925201Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:24:34.925300Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:34.925452Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.925521Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-10T11:24:34.927667Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T ... cStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:24:49.516476Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:49.516512Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:49.516552Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:24:49.516605Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:24:49.516652Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:24:49.516716Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:49.516844Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2025-04-10T11:24:49.516908Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:24:49.517906Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:49.517987Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:24:49.518033Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-10T11:24:49.518810Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:24:49.518848Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-10T11:24:49.518882Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-04-10T11:24:49.518951Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:24:49.518993Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:24:49.519062Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:24:49.523741Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:24:49.524082Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-10T11:24:49.524161Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-10T11:24:49.524470Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:24:49.525154Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:24:49.525579Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:24:49.525661Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:24:49.526046Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-04-10T11:24:49.526080Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-10T11:24:49.535414Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:838:2697], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:49.535528Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:848:2702], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:49.535603Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:49.539633Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:24:49.544805Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:49.545119Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:24:49.545176Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-10T11:24:49.734018Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:49.734197Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:24:49.734259Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-10T11:24:49.737835Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:852:2705], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:24:49.773555Z node 4 :TX_PROXY ERROR: Actor# [4:934:2756] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:49.843187Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfq42fy3rsd3nhbk0t0vyr8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Y2EzMmMxNTYtZTVjNWUwNy04NmQyZjYzNC01NmJjNmE0OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:49.845566Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1039:2799], serverId# [4:1040:2800], sessionId# [0:0:0] 2025-04-10T11:24:49.845898Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037889 2025-04-10T11:24:49.846135Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744284289846052 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-10T11:24:49.846327Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744284289846052 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-10T11:24:49.846405Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-04-10T11:24:49.857579Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-10T11:24:49.857684Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:24:49.924109Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq42t59ecxjgtyw9bckxh8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzAwYWQ0NjMtZmQ5MGQ0NTgtZGFmNTI2ZjItNzIxYWMwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:49.925996Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037889 2025-04-10T11:24:49.926315Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1744284289926228 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-10T11:24:49.926477Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 1744284289926228 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-10T11:24:49.926570Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 1744284289926228 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-10T11:24:49.926646Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 1744284289926228 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-10T11:24:49.926731Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037889, row count=1 2025-04-10T11:24:49.937750Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-10T11:24:49.937818Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:24:49.942127Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1088:2839], serverId# [4:1089:2840], sessionId# [0:0:0] 2025-04-10T11:24:49.948355Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1090:2841], serverId# [4:1091:2842], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ChangeConfig [GOOD] Test command err: 2025-04-10T11:24:47.607122Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:47.607230Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:47.626335Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:179:2194] 2025-04-10T11:24:47.627346Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\002\030\003\"\014session-id-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-2" Value: "\010\000\020\004\030\005\"\014session-id-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-2" Value: "\000\000\000\000\000\000\000\000\004\000\000\000\005\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-3" Value: "\010\000\020\006\030\007\"\014session-id-3(\0000\004@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-3" Value: "\000\000\000\000\000\000\000\000\006\000\000\000\007\000\000\000session-id-3" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\010\030\t\"\014session-id-2(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\010\000\000\000\t\000\000\000session-id-2" StorageChannel: INLINE } 2025-04-10T11:24:48.202737Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:48.202798Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:48.216431Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:179:2194] 2025-04-10T11:24:48.217784Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:48.000000Z 2025-04-10T11:24:48.217830Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:179:2194] Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\200\210\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\200\210\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\200\210\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\004\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\004\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-10T11:24:49.015398Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:49.015480Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:49.031696Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [3:177:2192] 2025-04-10T11:24:49.033989Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:49.000000Z 2025-04-10T11:24:49.034068Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [3:177:2192] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\350\217\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\316\255\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\004\020\000\030\000\"\007session(\0000\000@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session" StorageChannel: INLINE } 2025-04-10T11:24:49.784058Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:49.784129Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:49.800105Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [4:179:2194] 2025-04-10T11:24:49.802271Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:49.000000Z 2025-04-10T11:24:49.802337Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [4:179:2194] 2025-04-10T11:24:50.402414Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:50.402494Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:50.417517Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [5:177:2192] 2025-04-10T11:24:50.420233Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:50.000000Z 2025-04-10T11:24:50.420308Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [5:177:2192] Send change config Wait cmd write (initial) Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\320\227\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } Wait commit 1 done Wait cmd write (change config) Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-2" IncludeFrom: true To: "m0000000003cclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-2" IncludeFrom: true To: "m0000000003uclient-2" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\320\227\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-3" Value: "\010\000\020\000\030\000\"\000(\0000\007@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-3" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-1@\000H\000\252\002\016\n\010client-3@\007H\000" StorageChannel: INLINE } Wait config changed ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] Test command err: 2025-04-10T11:24:29.874379Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643891674873326:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:29.874462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0410 11:24:30.176954656 1648785 dns_resolver.cc:162] no server name supplied in dns URI E0410 11:24:30.177138715 1648785 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T11:24:30.220278Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:3649: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3649 } ] 2025-04-10T11:24:30.879626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:31.244198Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:3649: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3649 } ] 2025-04-10T11:24:31.246960Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:3649: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:3649 2025-04-10T11:24:31.882561Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:32.886802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:33.141010Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:3649: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3649 } ] 2025-04-10T11:24:33.234203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491643908854742911:2310], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:33.234782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a6b/r3tmp/tmp4nquSu/pdisk_1.dat 2025-04-10T11:24:33.330893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491643908854742911:2310], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:33.468624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491643908854742911:2310], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:33.499875Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3649, node 1 2025-04-10T11:24:33.685708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:33.685736Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:33.685749Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:33.686058Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:34.158799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:34.295010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:34.295102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:34.298013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:34.868972Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643891674873326:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:34.869066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0410 11:24:35.177058524 1649009 dns_resolver.cc:162] no server name supplied in dns URI E0410 11:24:35.177216814 1649009 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T11:24:35.455257Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:24:35.461940Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-10T11:24:35.461973Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T11:24:35.461981Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T11:24:35.462700Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-10T11:24:35.462730Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T11:24:35.462736Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T11:24:35.466468Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-10T11:24:35.470436Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-10T11:24:35.470467Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T11:24:35.470481Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T11:24:35.471769Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T11:24:35.471803Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T11:24:35.474655Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-10T11:24:35.474704Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-10T11:24:35.475526Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-10T11:24:35.475540Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T11:24:35.475546Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T11:24:35.476329Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-10T11:24:35.476340Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T11:24:35.476345Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T11:24:35.476963Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-10T11:24:35.476978Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T11:24:35.476984Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T11:24:35.488322Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-10T11:24:35.488347Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T11:24:35.494176Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T11:24:35.507260Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-10T11:24:35.507282Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T11:24:35.507287Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T11:24:35.509522Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-10T11:24:35.509555Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T11:24:35.509561Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T11:24:35.513134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:24:35.514991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:24:35.517399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:24:35.519367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:35.519855Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-10T11:24:35.519876Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T11:24:35.519881Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T11:24:35.521706Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2 ... 4:7491643974963195283:2540], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. CustomerSuppliedId : . TraceId : 01jrfq40pncz5r9fnea6v44tqa. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7491643974963195283 RawX2: 4503616807242220 } } DstEndpoint { ActorId { RawX1: 7491643974963195284 RawX2: 4503616807242221 } } InMemory: true DstStageId: 1 } 2025-04-10T11:24:48.007751Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491643974963195283:2540], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. CustomerSuppliedId : . TraceId : 01jrfq40pncz5r9fnea6v44tqa. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update output channelId: 1, peer: [4:7491643974963195284:2541] 2025-04-10T11:24:48.007765Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-10T11:24:48.007784Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-10T11:24:48.007817Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491643974963195283:2540], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. CustomerSuppliedId : . TraceId : 01jrfq40pncz5r9fnea6v44tqa. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-10T11:24:48.007871Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491643974963195283:2540], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. CustomerSuppliedId : . TraceId : 01jrfq40pncz5r9fnea6v44tqa. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7491643974963195283 RawX2: 4503616807242220 } } DstEndpoint { ActorId { RawX1: 7491643974963195284 RawX2: 4503616807242221 } } InMemory: true DstStageId: 1 } 2025-04-10T11:24:48.007889Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-10T11:24:48.007900Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-10T11:24:48.007921Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491643974963195283:2540], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. CustomerSuppliedId : . TraceId : 01jrfq40pncz5r9fnea6v44tqa. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:24:48.007931Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-10T11:24:48.007944Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-10T11:24:48.008581Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. Recv TEvReadResult from ShardID=72075186224037896, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-04-10T11:24:48.008613Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. Taken 0 locks 2025-04-10T11:24:48.008627Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. new data for read #0 seqno = 1 finished = 1 2025-04-10T11:24:48.008646Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491643974963195283:2540], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. CustomerSuppliedId : . TraceId : 01jrfq40pncz5r9fnea6v44tqa. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2025-04-10T11:24:48.008662Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491643974963195283:2540], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. CustomerSuppliedId : . TraceId : 01jrfq40pncz5r9fnea6v44tqa. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:24:48.008681Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-10T11:24:48.008709Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. enter pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-10T11:24:48.008742Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. exit pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-10T11:24:48.008753Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. returned 0 rows; processed 0 rows 2025-04-10T11:24:48.008783Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. dropping batch for read #0 2025-04-10T11:24:48.008797Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. effective maxinflight 1 sorted 1 2025-04-10T11:24:48.008806Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-10T11:24:48.008855Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7491643974963195283:2540]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-10T11:24:48.008934Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491643974963195283:2540], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. CustomerSuppliedId : . TraceId : 01jrfq40pncz5r9fnea6v44tqa. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T11:24:48.009022Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491643974963195284:2541], TxId: 281474976715688, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. TraceId : 01jrfq40pncz5r9fnea6v44tqa. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-04-10T11:24:48.009048Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 2. Finish input channelId: 1, from: [4:7491643974963195283:2540] 2025-04-10T11:24:48.009078Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491643974963195284:2541], TxId: 281474976715688, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. TraceId : 01jrfq40pncz5r9fnea6v44tqa. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:24:48.009123Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491643974963195284:2541], TxId: 281474976715688, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. TraceId : 01jrfq40pncz5r9fnea6v44tqa. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T11:24:48.009134Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491643974963195283:2540], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. CustomerSuppliedId : . TraceId : 01jrfq40pncz5r9fnea6v44tqa. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-10T11:24:48.009160Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491643974963195283:2540], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. CustomerSuppliedId : . TraceId : 01jrfq40pncz5r9fnea6v44tqa. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:24:48.009178Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1. Tasks execution finished 2025-04-10T11:24:48.009190Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491643974963195283:2540], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. CustomerSuppliedId : . TraceId : 01jrfq40pncz5r9fnea6v44tqa. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T11:24:48.009288Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1. pass away 2025-04-10T11:24:48.009367Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715688;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:24:48.009624Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491643974963195284:2541], TxId: 281474976715688, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. TraceId : 01jrfq40pncz5r9fnea6v44tqa. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:24:48.009654Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-10T11:24:48.009662Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 2. Tasks execution finished 2025-04-10T11:24:48.009671Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491643974963195284:2541], TxId: 281474976715688, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjBhZjhiYi1iMjY2YjU2MC03MDU5YTFkMi1hMzY2NjE5Yw==. TraceId : 01jrfq40pncz5r9fnea6v44tqa. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T11:24:48.009713Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 2. pass away 2025-04-10T11:24:48.009756Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715688;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:24:48.945550Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:10295: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:10295 >> TPQTabletTests::UpdateConfig_2 [GOOD] >> TPartitionTests::GetPartitionWriteInfoError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: 2025-04-10T11:24:33.921543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:33.921803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:33.922012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fb9/r3tmp/tmporaChH/pdisk_1.dat 2025-04-10T11:24:34.305331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:24:34.356075Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:34.360320Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-04-10T11:24:34.396304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:34.396445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:34.408023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:34.488634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:24:34.528158Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:24:34.528488Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:24:34.580933Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:24:34.581073Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:24:34.583006Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:24:34.583110Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:24:34.583192Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:24:34.583577Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:24:34.583714Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:24:34.583795Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:24:34.594621Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:24:34.620659Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:24:34.620909Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:24:34.621029Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:24:34.621084Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:34.621122Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:24:34.621160Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.621769Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:24:34.621906Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:24:34.622006Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.622050Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:34.622107Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:24:34.622175Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:34.622305Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:24:34.622788Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:24:34.623052Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:24:34.623156Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:24:34.625101Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:34.635768Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:24:34.635870Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:24:34.785001Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:24:34.799293Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:24:34.799378Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.800367Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.800416Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:34.800469Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:34.800693Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:24:34.800853Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:34.801605Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.801686Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:24:34.803472Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:24:34.803942Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:34.805180Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:24:34.805230Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.805720Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:24:34.805776Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:34.806481Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:34.806523Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:34.806587Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:24:34.806646Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:24:34.806700Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:24:34.806783Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.811064Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:34.813450Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:24:34.813523Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:24:34.814047Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:24:34.824542Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:24:34.824755Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-10T11:24:34.824820Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-10T11:24:34.824859Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-10T11:24:34.825919Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:34.850756Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:24:35.102244Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:24:35.102368Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:35.102743Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:35.102787Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:35.102832Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:35.103028Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-10T11:24:35.103138Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:35.103401Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:35.104008Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:35.185416Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-10T11:24:35.185517Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:35.185578Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:35.185625Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tab ... DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:49.889686Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:49.889736Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:49.890051Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:24:49.890268Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:49.891167Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:49.891244Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:24:49.891761Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:24:49.892207Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:49.893866Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:24:49.893937Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:49.894547Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:24:49.894627Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:49.895377Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:49.895423Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:49.895469Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:24:49.895530Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:24:49.895588Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:24:49.895673Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:49.897016Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:49.899835Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:24:49.899911Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:24:49.900446Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:24:49.912414Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:24:49.912608Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-10T11:24:49.912665Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-10T11:24:49.912704Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-10T11:24:49.912989Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:49.938822Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:24:50.161579Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:24:50.161663Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:50.161912Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:50.161955Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:50.161995Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:50.162221Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-10T11:24:50.162333Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:50.162764Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:50.163444Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:50.200148Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-10T11:24:50.200264Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:50.200315Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:50.200375Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:50.200454Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:24:50.200523Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-10T11:24:50.200644Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:50.203431Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-10T11:24:50.203521Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:24:50.211613Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:886:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:50.211731Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:896:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:50.211797Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:50.217854Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:24:50.225197Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:50.383426Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:50.387333Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:900:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:24:50.414032Z node 4 :TX_PROXY ERROR: Actor# [4:956:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:50.469235Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq4351489ng85xfxg7m6zh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzViYzc0MDctOTRiNTg4MzMtZTY2M2ZjNjktNWU2NDdjYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:50.471583Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:987:2786], serverId# [4:988:2787], sessionId# [0:0:0] 2025-04-10T11:24:50.471908Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037888 2025-04-10T11:24:50.472160Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744284290472061 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-10T11:24:50.472381Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-04-10T11:24:50.483456Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-10T11:24:50.483536Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:50.564935Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq43dq46z69kszy64k9wgg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ODVjM2E4ZjgtM2ZhZGM1MjUtOTY3NDg1MWItZjA4NGYzMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:50.567135Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:4] at 72075186224037888 2025-04-10T11:24:50.567426Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744284290567315 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-10T11:24:50.567569Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-04-10T11:24:50.578653Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-10T11:24:50.578740Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:50.580748Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1015:2805], serverId# [4:1016:2806], sessionId# [0:0:0] 2025-04-10T11:24:50.587351Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1017:2807], serverId# [4:1018:2808], sessionId# [0:0:0] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders >> TPartitionTests::FailedTxsDontBlock >> TPQTabletTests::Huge_ProposeTransacton [GOOD] >> TPQTest::TestOwnership [GOOD] >> TPQTest::TestOffsetEstimation [GOOD] >> TPQTest::TestMaxTimeLagRewind >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: 2025-04-10T11:24:33.922994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:33.923223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:33.923456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fef/r3tmp/tmpBClti5/pdisk_1.dat 2025-04-10T11:24:34.304447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:24:34.348903Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:34.388400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:34.388541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:34.400129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:34.482798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:24:34.542253Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-04-10T11:24:34.542532Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:24:34.594902Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:680:2580] 2025-04-10T11:24:34.595156Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:24:34.605113Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:24:34.605295Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:24:34.607147Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:24:34.607238Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:24:34.607309Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:24:34.607731Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:24:34.607970Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:24:34.608054Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:709:2578] in generation 1 2025-04-10T11:24:34.608423Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:24:34.608521Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:24:34.609836Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T11:24:34.609904Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T11:24:34.609949Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T11:24:34.610302Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:24:34.610427Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:24:34.610496Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2580] in generation 1 2025-04-10T11:24:34.621509Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:24:34.658315Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:24:34.658581Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:24:34.658714Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-10T11:24:34.658762Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:34.658804Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:24:34.658838Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.658937Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:24:34.658972Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T11:24:34.659031Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:24:34.659091Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-10T11:24:34.659115Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T11:24:34.659159Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T11:24:34.659185Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:24:34.659764Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:24:34.659878Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:24:34.659931Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T11:24:34.659993Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T11:24:34.660079Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.660124Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:34.660184Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:24:34.660225Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:34.660266Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:24:34.660289Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:34.660325Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-10T11:24:34.660361Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:24:34.660452Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2575], serverId# [1:693:2588], sessionId# [0:0:0] 2025-04-10T11:24:34.660633Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:24:34.660962Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-10T11:24:34.661083Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-10T11:24:34.663459Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:24:34.674246Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T11:24:34.674374Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-10T11:24:34.717409Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2574], serverId# [1:728:2606], sessionId# [0:0:0] 2025-04-10T11:24:34.717540Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:24:34.717696Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:24:34.717774Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:24:34.718231Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:34.728809Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:24:34.728880Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:24:34.847801Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2618], serverId# [1:743:2621], sessionId# [0:0:0] 2025-04-10T11:24:34.847987Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:741:2619], serverId# [1:744:2622], sessionId# [0:0:0] 2025-04-10T11:24:34.854475Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:24:34.854573Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.854955Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-10T11:24:34.854989Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:24:34.855242Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.855293Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:34.855359Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:34.855653Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:24:34.855859Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:34.856002Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:24:34.856031Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:34.856061Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-10T11:24:34.856220Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:24:34.856310Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:34.856457Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.856543Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-10T11:24:34.858696Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T ... DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:50.122808Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:50.122871Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:50.123095Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:24:50.123233Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:50.123843Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:50.123896Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:24:50.124320Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:24:50.124680Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:50.125915Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:24:50.125958Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:50.126472Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:24:50.126564Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:50.127326Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:50.127368Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:50.127423Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:24:50.127490Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:24:50.127543Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:24:50.127644Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:50.129169Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:50.131381Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:24:50.131457Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:24:50.131863Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:24:50.142168Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:24:50.142305Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-10T11:24:50.142360Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-10T11:24:50.142388Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-10T11:24:50.142634Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:50.167042Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:24:50.362303Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:24:50.362397Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:50.362704Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:50.362757Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:50.362825Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:50.363043Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-10T11:24:50.363185Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:50.363689Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:50.364336Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:50.402823Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-10T11:24:50.402945Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:50.403003Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:50.403079Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:50.403155Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:24:50.403219Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-10T11:24:50.403353Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:50.457535Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-10T11:24:50.457657Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:24:50.466132Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:886:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:50.466256Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:896:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:50.466351Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:50.471884Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:24:50.478900Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:50.640403Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:50.643816Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:900:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:24:50.670323Z node 4 :TX_PROXY ERROR: Actor# [4:956:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:50.735540Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq43d005yraa2ehyzc1zx1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MWM0NjBkMDctY2Y0ZTZlNTYtNTFhMDUyNjAtYTZlYTQyYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:50.738562Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:987:2786], serverId# [4:988:2787], sessionId# [0:0:0] 2025-04-10T11:24:50.738992Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037888 2025-04-10T11:24:50.739339Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744284290739212 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-10T11:24:50.739579Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-04-10T11:24:50.750801Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-10T11:24:50.750903Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:50.819497Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq43p3et9qxhp8x66faj14, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDYxYTEyYy05NWQ5ZDEwOS0yZjVlMWFiMS1jMjJjMzk3ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:50.821725Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:4] at 72075186224037888 2025-04-10T11:24:50.822054Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744284290821941 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-10T11:24:50.822262Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-04-10T11:24:50.833305Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-10T11:24:50.833365Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:50.835120Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1015:2805], serverId# [4:1016:2806], sessionId# [0:0:0] 2025-04-10T11:24:50.841136Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1017:2807], serverId# [4:1018:2808], sessionId# [0:0:0] >> TPartitionTests::CorrectRange_Commit >> TPQTest::DirectReadBadSessionOrPipe >> TPQTest::TestCheckACL [GOOD] >> TPQTest::TestLowWatermark >> TPQTest::TestWritePQCompact [GOOD] >> TPQTest::TestWritePQBigMessage >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> TPartitionTests::DataTxCalcPredicateError [GOOD] >> TPartitionTests::CorrectRange_Commit [GOOD] >> TPartitionTests::SetOffset >> TPQTestInternal::TestPartitionedBlobSimpleTest [GOOD] >> TPQTestInternal::TestPartitionedBigTest >> TSourceIdTests::SourceIdWriterAddMessage [GOOD] >> TSourceIdTests::SourceIdWriterClean [GOOD] >> TSourceIdTests::SourceIdWriterFormCommand [GOOD] >> TTypeCodecsTest::TestBoolCodec [GOOD] >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] >> TPartitionTests::ConflictingTxIsAborted >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] >> TPartitionTests::ConflictingTxIsAborted [GOOD] >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::DataTxCalcPredicateError [GOOD] Test command err: 2025-04-10T11:24:42.652639Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:42.652759Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:42.670540Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:179:2194] 2025-04-10T11:24:42.672249Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:42.000000Z 2025-04-10T11:24:42.672305Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\220\331\307\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\220\331\307\373\3412" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\264\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\001\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-10T11:24:43.458323Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:43.458375Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:43.469938Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:179:2194] 2025-04-10T11:24:43.471226Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:43.000000Z 2025-04-10T11:24:43.471284Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\370\340\307\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\001\030\001\"\tsession-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\370\340\307\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\370\340\307\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\003\020\001\030\001\"\tsession-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\003\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\370\340\307\373\3412" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\001\020\001\030\001\"\tsession-2(\0000\003@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\006\020\001\030\001\"\tsession-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\006\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } 2025-04-10T11:24:44.276142Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:44.276216Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:44.290131Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [3:177:2192] 2025-04-10T11:24:44.291759Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:44.000000Z 2025-04-10T11:24:44.291821Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [3:177:2192] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\340\350\307\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-10T11:24:45.117803Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:45.117885Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:45.137024Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-10T11:24:45.137267Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:45.137598Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:178:2193] 2025-04-10T11:24:45.138734Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-10T11:24:45.138958Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-10T11:24:45.139171Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-10T11:24:45.139380Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-10T11:24:45.139857Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-04-10T11:24:45.139966Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-10T11:24:45.140013Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:24:45.140064Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:45.000000Z 2025-04-10T11:24:45.140118Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-10T11:24:45.140173Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:178:2193] 2025-04-10T11:24:45.140242Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-04-10T11:24:45.140294Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:46.425910Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client session is set to 0 (startOffset 0) session session 2025-04-10T11:24:46.426113Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:46.426210Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:46.426263Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:46.426311Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:46.426347Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient 2025-04-10T11:24:46.426379Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient 2025-04-10T11:24:46.426417Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:46.426468Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000(\310\360\307\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-10T11:24:46.448042Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Create distr tx with id = 0 and act no: 1 2025-04-10T11:24:46.448398Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 Wait first predicate result 2025-04-10T11:24:47.722213Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Create distr tx with id = 2 and act no: 3 2025-04-10T11:24:47.722544Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 Wait second predicate result 2025-04-10T11:24:48.942436Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:24:48.942546Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-04-10T11:24:48.942606Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-04-10T11:24:48.942691Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:24:48.942760Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, New ... State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-04-10T11:24:48.943043Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-04-10T11:24:48.943096Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:24:48.943157Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-10T11:24:48.943378Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:48.943446Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:48.943507Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:48.943554Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-10T11:24:48.943609Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:48.943638Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:48.943669Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:24:48.943704Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:48.943753Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 2025-04-10T11:24:48.944019Z node 4 :PERSQUEUE INFO: new Cookie owner1|20c7b80f-2c6d8519-558921f-5ba6f02b_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2025-04-10T11:24:48.955099Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 2025-04-10T11:24:48.955263Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 2025-04-10T11:24:48.956039Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'SourceId' seqNo 5 partNo 0 2025-04-10T11:24:48.956991Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'SourceId' seqNo 5 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 118 count 1 nextOffset 52 batches 1 2025-04-10T11:24:48.957729Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 51,1 HeadOffset 50 endOffset 50 curOffset 52 d0000000000_00000000000000000051_00000_0000000001_00000| size 104 WTime 15244 2025-04-10T11:24:48.957902Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:48.957960Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:48.958035Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T11:24:48.958075Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:48.958123Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000pSourceId 2025-04-10T11:24:48.958169Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000051_00000_0000000001_00000| 2025-04-10T11:24:48.958199Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:48.958236Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:48.958279Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 2025-04-10T11:24:48.990132Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:48.990271Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:48.990369Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 51 is stored on disk Wait third predicate result Create distr tx with id = 4 and act no: 5 2025-04-10T11:24:48.990834Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-04-10T11:24:50.196630Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 2025-04-10T11:24:50.564275Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:50.564341Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:50.579793Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-10T11:24:50.580091Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:50.580389Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:176:2191] 2025-04-10T11:24:50.581519Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-10T11:24:50.581716Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-10T11:24:50.581917Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-10T11:24:50.582211Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-10T11:24:50.582716Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-04-10T11:24:50.582834Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-10T11:24:50.582878Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:24:50.582934Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:50.000000Z 2025-04-10T11:24:50.582977Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-10T11:24:50.583030Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:176:2191] 2025-04-10T11:24:50.583099Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-04-10T11:24:50.583154Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:51.868975Z node 5 :PERSQUEUE INFO: new Cookie SourceId|84874ea8-75e766f-4c8d35a0-fc171415_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner SourceId 2025-04-10T11:24:51.869167Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Wait write response Wait kv request 2025-04-10T11:24:51.869557Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'SourceId' seqNo 4 partNo 0 2025-04-10T11:24:51.870541Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'SourceId' seqNo 4 partNo 0 FormedBlobsCount 0 NewHead: Offset 11 PartNo 0 PackedSize 118 count 1 nextOffset 12 batches 1 2025-04-10T11:24:51.871304Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 11,1 HeadOffset 1 endOffset 1 curOffset 12 d0000000000_00000000000000000011_00000_0000000001_00000| size 104 WTime 5131 2025-04-10T11:24:51.871489Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:51.871544Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:51.871590Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T11:24:51.871639Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:51.871690Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000pSourceId 2025-04-10T11:24:51.871726Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000011_00000_0000000001_00000| 2025-04-10T11:24:51.871754Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:51.871789Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:51.871840Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request 2025-04-10T11:24:51.892859Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:51.892989Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:51.893090Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 11 is stored on disk Wait second predicate result Create distr tx with id = 0 and act no: 1 2025-04-10T11:24:51.893557Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-10T11:24:53.177819Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 >> TPartitionTests::ConflictingTxProceedAfterRollback >> PQCountersLabeled::PartitionFirstClass [GOOD] >> PQCountersLabeled::ImportantFlagSwitching ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] Test command err: Size: 128 Create chunk: 0.000035s Read by index: 0.000014s Iterate: 0.000015s Size: 252 Create chunk: 0.000050s Read by index: 0.000022s Iterate: 0.000025s Size: 1887 Create chunk: 0.000094s Read by index: 0.000162s Iterate: 0.000075s Size: 1658 Create chunk: 0.000080s Read by index: 0.000160s Iterate: 0.000075s Size: 1889 Create chunk: 0.000075s Read by index: 0.000099s Iterate: 0.000046s Size: 1660 Create chunk: 0.000057s Read by index: 0.000054s Iterate: 0.000031s >> TPQTest::DirectReadBadSessionOrPipe [GOOD] >> TPQTest::DirectReadOldPipe >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] Test command err: 2025-04-10T11:23:46.766655Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643707417778007:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:46.766778Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c5a/r3tmp/tmpf5Etn0/pdisk_1.dat 2025-04-10T11:23:47.068234Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:47.124342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:47.124447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11943, node 1 2025-04-10T11:23:47.128269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:47.229069Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:47.229094Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:47.229105Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:47.229207Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:47.532050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:47.711174Z node 1 :TICKET_PARSER DEBUG: Ticket 31A8A206AB7B47C2A201B54F85D663A1074192BF980D981144269C7424AD7CE3 (ipv6:[::1]:54056) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-10T11:23:47.855413Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:54078) has now valid token of root@builtin 2025-04-10T11:23:47.937585Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:23:47.937623Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:23:47.937645Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:23:47.937686Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-10T11:23:51.324108Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643728395127631:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:51.324220Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c5a/r3tmp/tmpgewISV/pdisk_1.dat 2025-04-10T11:23:51.489094Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:51.520974Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:51.521070Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:51.525205Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27900, node 4 2025-04-10T11:23:51.612730Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:51.612753Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:51.612782Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:51.612932Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:51.867608Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:51.948918Z node 4 :TICKET_PARSER DEBUG: Ticket 31A8A206AB7B47C2A201B54F85D663A1074192BF980D981144269C7424AD7CE3 (ipv6:[::1]:54222) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-10T11:23:52.032285Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:54236) has now valid token of root@builtin 2025-04-10T11:23:52.116474Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:23:52.116507Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:23:52.116515Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:23:52.116586Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-10T11:23:55.690389Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643746203492040:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:55.690487Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c5a/r3tmp/tmpqmo6YX/pdisk_1.dat 2025-04-10T11:23:55.812730Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:55.848326Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:55.848427Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:55.850871Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18664, node 7 2025-04-10T11:23:55.910768Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:55.910795Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:55.910804Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:55.910959Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:56.172868Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:56.278018Z node 7 :TICKET_PARSER DEBUG: Ticket 17E0BA4EDC6B97D143472D983A15D4994008A4C464CA76D6809D23B3A875A594 (ipv6:[::1]:40460) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-10T11:23:56.352297Z node 7 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:40482) has now valid token of root@builtin 2025-04-10T11:23:56.429975Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:23:56.430007Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:23:56.430016Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:23:56.430048Z node 7 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-10T11:24:00.294475Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643768157934269:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:00.294532Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c5a/r3tmp/tmpPi6z44/pdisk_1.dat 2025-04-10T11:24:00.416015Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:00.450078Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:00.450186Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnect ... at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:21.052553Z node 19 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[19:7491643837798631142:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:21.052652Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:27.041451Z node 19 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:39834) has now valid token of root@builtin 2025-04-10T11:24:27.097288Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:24:27.097320Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:24:27.097328Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:24:27.097359Z node 19 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-10T11:24:28.430298Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7491643886108477718:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:28.430786Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c5a/r3tmp/tmp0HrLL7/pdisk_1.dat 2025-04-10T11:24:28.591131Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:28.637553Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:28.637662Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:28.642358Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7027, node 22 2025-04-10T11:24:28.800962Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:28.801015Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:28.801030Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:28.801231Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:29.213494Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:33.430903Z node 22 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[22:7491643886108477718:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:33.431017Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:39.364938Z node 22 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:39868) has now valid token of root@builtin 2025-04-10T11:24:39.430626Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:24:39.430663Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:24:39.430676Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:24:39.430721Z node 22 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-10T11:24:41.093971Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7491643944456578288:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:41.094054Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c5a/r3tmp/tmpMvnuzJ/pdisk_1.dat 2025-04-10T11:24:41.406359Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:41.461248Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:41.461397Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:41.464935Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1591, node 25 2025-04-10T11:24:41.638874Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:41.638907Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:41.638980Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:41.639199Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:42.067499Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:42.219121Z node 25 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:56714) has now valid token of root@builtin 2025-04-10T11:24:42.280852Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:24:42.280886Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:24:42.280901Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:24:42.280942Z node 25 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-10T11:24:47.194226Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7491643969874981615:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:47.194301Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001c5a/r3tmp/tmpouUCTD/pdisk_1.dat 2025-04-10T11:24:47.337181Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:47.380938Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:47.381058Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:47.390412Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17825, node 28 2025-04-10T11:24:47.462368Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:47.462403Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:47.462417Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:47.462611Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:47.896880Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:48.044378Z node 28 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:45028) has now valid token of root@builtin 2025-04-10T11:24:48.098081Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:24:48.098119Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:24:48.098134Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:24:48.098199Z node 28 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator >> TPartitionTests::SetOffset [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] Test command err: 2025-04-10T11:24:40.279752Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.279845Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:40.307576Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:179:2194] 2025-04-10T11:24:40.313017Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:40.000000Z 2025-04-10T11:24:40.313087Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:179:2194] Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-1" IncludeFrom: true To: "m0000000003cclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-1" IncludeFrom: true To: "m0000000003uclient-1" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\300\311\307\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\000\030\000\"\000(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-2@\000H\000" StorageChannel: INLINE } 2025-04-10T11:24:41.137164Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:41.137246Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:41.157171Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [2:179:2194] 2025-04-10T11:24:41.159010Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:24:41.159091Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [2:179:2194] 2025-04-10T11:24:41.494776Z node 2 :PERSQUEUE INFO: new Cookie owner1|7a023ece-a88029f2-eb5df4b3-e768f1c2_0 generated for partition {0, {0, 1111}, 123} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send write: 0 Send write: 1 Send write: 2 Send write: 3 Send write: 4 Send write: 5 Send write: 6 Send write: 7 Send write: 8 Send write: 9 Got write info response. Body keys: 0, head: 10, src id info: 1 2025-04-10T11:24:44.724751Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:44.724861Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:44.742832Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [3:177:2192] 2025-04-10T11:24:44.747393Z node 3 :PERSQUEUE INFO: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:24:44.747487Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [3:177:2192] 2025-04-10T11:24:45.565060Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:45.565131Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:45.582463Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-10T11:24:45.582773Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:45.583064Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:178:2193] 2025-04-10T11:24:45.583979Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-10T11:24:45.584157Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-10T11:24:45.584321Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-10T11:24:45.585233Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-10T11:24:45.585727Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-04-10T11:24:45.585824Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-10T11:24:45.585866Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:24:45.585914Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:45.000000Z 2025-04-10T11:24:45.585953Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-10T11:24:45.586021Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:178:2193] 2025-04-10T11:24:45.586091Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-04-10T11:24:45.586167Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:45.586260Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:45.586309Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-10T11:24:45.586352Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-10T11:24:45.586667Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-04-10T11:24:45.586870Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-04-10T11:24:45.586925Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. Create distr tx with id = 0 and act no: 1 2025-04-10T11:24:46.860378Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 Wait batch completion 2025-04-10T11:24:48.165095Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:24:48.165286Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-10T11:24:48.185667Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-10T11:24:48.185939Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:48.185990Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:48.186028Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:48.186065Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:48.186100Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:24:48.186121Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-04-10T11:24:48.186167Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-04-10T11:24:48.186205Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:48.186246Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 2 Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 2 2025-04-10T11:24:49.395696Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:49.395818Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 reinit with generation 7 done 2025-04-10T11:24:49.396019Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:49.396077Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:49.396122Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:49.396164Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:49.396209Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-04-10T11:24:49.396249Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-04-10T11:24:49.396271Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-04-10T11:24:49.396293Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-04-10T11:24:49.396316Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-04-10T11:24:49.396341Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:49.396387Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927 ... 27937] doesn't have tx writes info 2025-04-10T11:24:49.786675Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-10T11:24:49.787018Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:49.787314Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:176:2191] 2025-04-10T11:24:49.788367Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-10T11:24:49.788555Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-10T11:24:49.788733Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-10T11:24:49.789590Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-10T11:24:49.790141Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-04-10T11:24:49.790274Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-10T11:24:49.790327Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:24:49.790388Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:49.000000Z 2025-04-10T11:24:49.790440Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-10T11:24:49.790496Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:176:2191] 2025-04-10T11:24:49.790569Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-04-10T11:24:49.790634Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:49.790719Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:49.790772Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-10T11:24:49.790822Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-10T11:24:49.791179Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-04-10T11:24:49.791395Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-04-10T11:24:49.791456Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. Create distr tx with id = 0 and act no: 1 2025-04-10T11:24:51.085254Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-10T11:24:51.085392Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1, TxId 3 Wait batch completion 2025-04-10T11:24:52.390143Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:24:52.390384Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-10T11:24:52.390443Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-10T11:24:52.390657Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:52.390721Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:52.390775Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:52.390859Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:52.390900Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:24:52.390959Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-04-10T11:24:52.390988Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-04-10T11:24:52.391026Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:52.391087Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 2 Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 2 2025-04-10T11:24:53.620879Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Got batch complete: 1 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 Wait batch completion 2025-04-10T11:24:53.621205Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2025-04-10T11:24:53.621347Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 drop done 2025-04-10T11:24:53.621570Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:53.621614Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:53.621657Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000cclient-1, m0000000000cclient-1] 2025-04-10T11:24:53.621692Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000uclient-1, m0000000000uclient-1] 2025-04-10T11:24:53.621720Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:53.621761Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:53.621801Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:24:53.621828Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-04-10T11:24:53.621857Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-04-10T11:24:53.621883Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-04-10T11:24:53.621917Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:53.621960Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 2025-04-10T11:24:53.632912Z node 5 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'Root/PQ/rt3.dc1--account--topic' partition 0 error: cannot finish read request. Consumer client-1 is gone from partition 2025-04-10T11:24:53.633084Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:53.633154Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-10T11:24:53.633189Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-10T11:24:53.633331Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:53.633370Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:53.633402Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:53.633436Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:53.633468Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-04-10T11:24:53.633489Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-04-10T11:24:53.633512Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:53.633543Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request 2025-04-10T11:24:53.634016Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:53.634067Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 send read request for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 Got KV request Got batch complete: 1 Got KV request Got KV request Got KV request 2025-04-10T11:24:53.634423Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset 5 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 5 2025-04-10T11:24:53.634611Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 added 1 blobs, size 0 count 45 last offset 6, current partition end offset: 50 2025-04-10T11:24:53.634662Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 1. Send blob request. Got KV request Got KV request Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 4 2025-04-10T11:24:53.634897Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions [GOOD] >> TPartitionTests::OldPlanStep >> TPartitionTests::ConflictingCommitsInSeveralBatches >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients >> TPQTestInternal::TestPartitionedBigTest [GOOD] >> TPQTestInternal::TestBatchPacking [GOOD] >> TPQTestInternal::TestKeyRange [GOOD] >> TPQTestInternal::TestToHex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Huge_ProposeTransacton [GOOD] Test command err: 2025-04-10T11:24:40.273398Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:24:40.279458Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:24:40.279801Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-10T11:24:40.279860Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:24:40.279936Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-10T11:24:40.279984Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:24:40.280028Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.280124Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:40.313705Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:206:2212], now have 1 active actors on pipe 2025-04-10T11:24:40.313867Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:24:40.332267Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-04-10T11:24:40.335201Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-04-10T11:24:40.335333Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.337071Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-04-10T11:24:40.337244Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:40.337652Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:40.338114Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:214:2218] 2025-04-10T11:24:40.339108Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-10T11:24:40.339179Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:214:2218] 2025-04-10T11:24:40.339251Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:40.340126Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:40.340318Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-10T11:24:40.340370Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-10T11:24:40.340420Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2025-04-10T11:24:40.340447Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2025-04-10T11:24:40.340624Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:40.340671Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:40.340716Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:40.340766Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:40.340807Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-10T11:24:40.340843Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-10T11:24:40.340875Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cconsumer 2025-04-10T11:24:40.340905Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uconsumer 2025-04-10T11:24:40.340941Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:40.340987Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:40.341102Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:40.341145Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:40.341348Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:40.344497Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:40.344968Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:221:2223], now have 1 active actors on pipe 2025-04-10T11:24:40.345670Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:224:2225], now have 1 active actors on pipe 2025-04-10T11:24:40.346725Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-10T11:24:40.346787Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-10T11:24:40.346972Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-04-10T11:24:40.347036Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-10T11:24:40.347076Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-10T11:24:40.347165Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-10T11:24:40.347214Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-10T11:24:40.347375Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 4294969488 } Partitions { } 2025-04-10T11:24:40.347495Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:40.356493Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:24:40.356572Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-04-10T11:24:40.356617Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-10T11:24:40.356666Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-10T11:24:40.357084Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67891 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-10T11:24:40.357138Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-10T11:24:40.357213Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67891, WriteId (empty maybe) 2025-04-10T11:24:40.357261Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-10T11:24:40.357322Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2025-04-10T11:24:40.357370Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-10T11:24:40.357413Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2025-04-10T11:24:40.357574Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 136 MaxStep: 30136 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 4294969488 } Partitions { } 2025-04-10T11:24:40.357669Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:40.361389Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:24:40.361465Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPA ... aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2496" Generation: 2 Important: false } Consumers { Name: "fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2497" Generation: 2 Important: false } Consumers { Name: "fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2498" Generation: 2 Important: false } Consumers { Name: "fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2499" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 175 RawX2: 25769805966 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2025-04-10T11:24:52.392747Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:52.418345Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:24:52.418415Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-10T11:24:52.418450Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-04-10T11:24:52.418498Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-04-10T11:24:52.418544Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-04-10T11:24:52.418604Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-04-10T11:24:52.418704Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-10T11:24:52.418757Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients [GOOD] >> TPartitionTests::OldPlanStep [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients >> TPartitionTests::ReserveSubDomainOutOfSpace |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::TestToHex [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients [GOOD] >> TPQRBDescribes::PartitionLocations [GOOD] >> TPartitionTests::DataTxCalcPredicateOrder >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match >> TPQTest::DirectReadOldPipe [GOOD] >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test >> TFetchRequestTests::HappyWay [GOOD] >> TFetchRequestTests::BadTopicName >> TPartitionTests::ReserveSubDomainOutOfSpace [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients >> TPartitionTests::ShadowPartitionCounters >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients [GOOD] >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep >> TPartitionTests::FailedTxsDontBlock [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQRBDescribes::PartitionLocations [GOOD] Test command err: Bucket: 100 elems count: 97 Bucket: 200 elems count: 104 Bucket: 500 elems count: 288 Bucket: 1000 elems count: 528 Bucket: 2000 elems count: 1008 Bucket: 5000 elems count: 2976 2025-04-10T11:24:42.543580Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643946307110870:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:42.546460Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:42.591541Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491643946890175326:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:42.591596Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:42.799284Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:24:42.802756Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cc8/r3tmp/tmpJxf9ps/pdisk_1.dat 2025-04-10T11:24:43.013533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:43.013651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:43.016914Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:24:43.019031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:43.025507Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:43.053712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:43.053805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 27608, node 1 2025-04-10T11:24:43.067513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:43.134184Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000cc8/r3tmp/yandex6GzVOU.tmp 2025-04-10T11:24:43.134212Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000cc8/r3tmp/yandex6GzVOU.tmp 2025-04-10T11:24:43.134400Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000cc8/r3tmp/yandex6GzVOU.tmp 2025-04-10T11:24:43.134546Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:24:43.194132Z INFO: TTestServer started on Port 26738 GrpcPort 27608 TClient is connected to server localhost:26738 PQClient connected to localhost:27608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:43.531911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:43.605810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:24:45.897777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643959192013860:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:45.897780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491643959775077525:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:45.897790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491643959775077534:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:45.897969Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:45.898084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:45.898514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643959192013886:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:45.899022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643959192013891:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:45.899095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:45.902256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T11:24:45.906300Z node 2 :TX_PROXY ERROR: Actor# [2:7491643959775077542:2124] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:24:45.918322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643959192013892:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T11:24:45.918512Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491643959775077541:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T11:24:45.979861Z node 1 :TX_PROXY ERROR: Actor# [1:7491643959192013978:2818] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:45.999138Z node 2 :TX_PROXY ERROR: Actor# [2:7491643959775077569:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:46.168109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:46.169329Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491643959192013996:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:46.169578Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTI0NjI2ZDctYjIxZTI1YjMtMWI3ZWJkZWItOWQ1NTQwMg==, ActorId: [1:7491643959192013857:2341], ActorState: ExecuteState, TraceId: 01jrfq3yxd7ym9na9z77dm016b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:24:46.171296Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491643964070044880:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:46.171482Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzUwNTIzODQtNjkwNjliZjItNjk4ZDNhOGQtYjU3ZTYyNjQ=, ActorId: [2:7491643959775077510:2308], ActorState: ExecuteState, TraceId: 01jrfq3yy14mrhjxashk026f5r, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:24:46.171651Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:24:46.171845Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR Consumed ... 1 moved from EXECUTED to WAIT_RS_ACKS 2025-04-10T11:24:53.036096Z node 1 :PERSQUEUE DEBUG: [TxId: 281474976710681] PredicateAcks: 0/0 2025-04-10T11:24:53.036104Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-10T11:24:53.036116Z node 1 :PERSQUEUE DEBUG: [TxId: 281474976710681] PredicateAcks: 0/0 2025-04-10T11:24:53.036134Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976710681 to the list for deletion 2025-04-10T11:24:53.036153Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710681, NewState DELETING 2025-04-10T11:24:53.036179Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976710681 2025-04-10T11:24:53.036231Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:53.038175Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:24:53.038217Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-04-10T11:24:53.038232Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710681, State DELETING 2025-04-10T11:24:53.038254Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976710681 ===Query complete TClient::Ls request: /Root/PQ/rt3.dc1--topic TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710681 CreateStep: 1744284293010 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic" PathId: 13 TotalGroupCount: 5 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 2000... (TRUNCATED) GetTopicVersionFromPath: record Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710681 CreateStep: 1744284293010 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic" PathId: 13 TotalGroupCount: 5 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 3 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 4 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 5 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--topic" name rt3.dc1--topic version1 CallPersQueueGRPC request to localhost:27608 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-04-10T11:24:53.058125Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:27608 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-04-10T11:24:53.561434Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:27608 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-04-10T11:24:54.064887Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--topic" NumPartitions: 5 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 TClient::Ls request: /Root/PQ/rt3.dc1--topic TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710681 CreateStep: 1744284293010 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic" PathId: 13 TotalGroupCount: 5 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 2000... (TRUNCATED) 2025-04-10T11:24:54.068490Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7491643997846720859:3569] connected; active server actors: 1 2025-04-10T11:24:54.068720Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 1, Generation 1 2025-04-10T11:24:54.068750Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 1, NodeId 1, Generation 1 2025-04-10T11:24:54.068763Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 2, NodeId 1, Generation 1 2025-04-10T11:24:54.068778Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 1, Generation 1 2025-04-10T11:24:54.068789Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 4, NodeId 1, Generation 1 2025-04-10T11:24:54.069403Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7491643997846720860:3570] connected; active server actors: 1 2025-04-10T11:24:54.069544Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 1, Generation 1 2025-04-10T11:24:54.069564Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 1, NodeId 1, Generation 1 2025-04-10T11:24:54.069578Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 2, NodeId 1, Generation 1 2025-04-10T11:24:54.069592Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 1, Generation 1 2025-04-10T11:24:54.069602Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 4, NodeId 1, Generation 1 response: Status: true Locations { PartitionId: 0 NodeId: 1 Generation: 1 } Locations { PartitionId: 1 NodeId: 1 Generation: 1 } Locations { PartitionId: 2 NodeId: 1 Generation: 1 } Locations { PartitionId: 3 NodeId: 1 Generation: 1 } Locations { PartitionId: 4 NodeId: 1 Generation: 1 } 2025-04-10T11:24:54.070092Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7491643997846720861:3571] connected; active server actors: 1 2025-04-10T11:24:54.070331Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 1, Generation 1 response: Status: true Locations { PartitionId: 3 NodeId: 1 Generation: 1 } 2025-04-10T11:24:54.070801Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7491643997846720862:3572] connected; active server actors: 1 response: Status: false 2025-04-10T11:24:54.784269Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [1:7491643997846720910:2533]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0 2025-04-10T11:24:54.817455Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [1:7491643997846720910:2533]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-10T11:24:54.865578Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [1:7491643997846720910:2533]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] >> TPartitionTests::GetUsedStorage >> CdcStreamChangeCollector::OldImage [GOOD] >> TPartitionTests::ConflictingTxProceedAfterRollback [GOOD] >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches >> TPQTest::TestPQPartialRead [GOOD] >> TPQTest::TestPQRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] Test command err: 2025-04-10T11:24:54.320720Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:24:54.324004Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:24:54.324330Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-10T11:24:54.324397Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:24:54.324429Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-10T11:24:54.324501Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:24:54.324558Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:54.324635Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:54.353095Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-04-10T11:24:54.353205Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:24:54.366862Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:54.370007Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:54.370199Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:54.371827Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:54.371989Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:54.372082Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:54.372536Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:54.372914Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-04-10T11:24:54.373883Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-10T11:24:54.373945Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] 2025-04-10T11:24:54.374006Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:54.374582Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:54.374750Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-10T11:24:54.374818Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-10T11:24:54.374961Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:54.375006Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:54.375046Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:54.375090Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:54.375123Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-10T11:24:54.375147Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-10T11:24:54.375181Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:54.375221Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:54.375303Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:54.375459Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:54.375767Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:54.376032Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:188:2201] 2025-04-10T11:24:54.376696Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-04-10T11:24:54.376742Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:188:2201] 2025-04-10T11:24:54.376784Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:54.377155Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:54.377218Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-04-10T11:24:54.377251Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-04-10T11:24:54.377363Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:54.377419Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:54.377452Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-10T11:24:54.377481Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-10T11:24:54.377516Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-04-10T11:24:54.377537Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-04-10T11:24:54.377562Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:54.377591Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-10T11:24:54.377652Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:54.377823Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:54.391093Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:54.391610Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:54.392029Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:201:2210], now have 1 active actors on pipe 2025-04-10T11:24:54.392779Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:204:2212], now have 1 active actors on pipe 2025-04-10T11:24:54.393622Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Immediate: false } 2025-04-10T11:24:54.393692Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-10T11:24:54.393807Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-04-10T11:24:54.393853Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-10T11:24:54.393887Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-10T11:24:54.393943Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-10T11:24:54.393997Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-10T11:24:54.394171Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 231 MaxStep: 30231 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 4294969488 } Partitions { } 2025-04-10T11:24:54.394249Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:54.400772Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10 ... ve 1 active actors on pipe 2025-04-10T11:24:56.664183Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [6:222:2223], now have 1 active actors on pipe 2025-04-10T11:24:56.664404Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 25769805966 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 1 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-10T11:24:56.664456Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-10T11:24:56.664547Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-04-10T11:24:56.664581Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-10T11:24:56.664610Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-10T11:24:56.664645Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-10T11:24:56.664674Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-10T11:24:56.664783Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 133 MaxStep: 30133 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 1 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 25769805966 } Partitions { } 2025-04-10T11:24:56.664850Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:56.668278Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:24:56.668332Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-04-10T11:24:56.668373Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-10T11:24:56.668404Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-10T11:24:56.668590Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-04-10T11:24:56.668623Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2025-04-10T11:24:56.668652Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2025-04-10T11:24:56.668768Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 175 RawX2: 25769805966 } } Step: 100 2025-04-10T11:24:56.668815Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARED 2025-04-10T11:24:56.668853Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2025-04-10T11:24:56.668909Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2025-04-10T11:24:56.668942Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2025-04-10T11:24:56.669063Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 133 MaxStep: 30133 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 1 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 25769805966 } Partitions { } 2025-04-10T11:24:56.669135Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:56.671955Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:24:56.672018Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PLANNING 2025-04-10T11:24:56.672060Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNING 2025-04-10T11:24:56.672100Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNED 2025-04-10T11:24:56.672140Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from PLANNING to PLANNED 2025-04-10T11:24:56.672176Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-04-10T11:24:56.672211Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-04-10T11:24:56.672275Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2025-04-10T11:24:56.672320Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2025-04-10T11:24:56.672415Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-04-10T11:24:56.672492Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Partition 0 Consumer 'user' Bad request (behind the last offset) EndOffset 0 End 1 2025-04-10T11:24:56.672680Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 0 2025-04-10T11:24:56.672715Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-04-10T11:24:56.672749Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2025-04-10T11:24:56.672788Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-04-10T11:24:56.672829Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-04-10T11:24:56.672869Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2025-04-10T11:24:56.672909Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-10T11:24:56.672953Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-04-10T11:24:56.672990Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2025-04-10T11:24:56.673151Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 133 MaxStep: 30133 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 1 Consumer: "user" Path: "/topic" } Step: 100 Predicate: false Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 25769805966 } Partitions { } 2025-04-10T11:24:56.673243Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:56.679424Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:24:56.679475Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-10T11:24:56.679508Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-04-10T11:24:56.679542Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-04-10T11:24:56.679585Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-04-10T11:24:56.679632Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-04-10T11:24:56.679681Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-10T11:24:56.679725Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-10T11:24:56.679852Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-04-10T11:24:56.679901Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-04-10T11:24:56.679943Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-04-10T11:24:56.679978Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 0 2025-04-10T11:24:56.680022Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2025-04-10T11:24:56.680065Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] complete TxId 67890 2025-04-10T11:24:56.680107Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-04-10T11:24:56.680145Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-04-10T11:24:56.680193Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-04-10T11:24:56.680366Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 133 MaxStep: 30133 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 1 Consumer: "user" Path: "/topic" } Step: 100 Predicate: false Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 25769805966 } Partitions { } 2025-04-10T11:24:56.680458Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:24:56.680619Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:56.680662Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:56.680709Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:56.680755Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:56.680790Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:24:56.680824Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:56.680864Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:56.681104Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:56.685766Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-10T11:24:56.685824Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-04-10T11:24:56.687108Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:56.688722Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:24:56.688772Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-04-10T11:24:56.688804Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-10T11:24:56.688836Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-04-10T11:24:56.688877Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-10T11:24:56.688938Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-04-10T11:24:56.688986Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-10T11:24:56.689024Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-04-10T11:24:56.689077Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-10T11:24:56.689103Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-10T11:24:56.689153Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 >> TPartitionTests::GetUsedStorage [GOOD] >> YdbProxy::CopyTable >> PartitionEndWatcher::EmptyPartition [GOOD] >> PartitionEndWatcher::AfterCommit [GOOD] >> YdbProxy::AlterTable >> YdbProxy::CreateTable >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] >> YdbProxy::CreateTopic >> YdbProxy::ReadTopic >> YdbProxy::ListDirectory ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetUsedStorage [GOOD] Test command err: 2025-04-10T11:24:49.400771Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:49.400875Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:49.419492Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:179:2194] 2025-04-10T11:24:49.421418Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:49.000000Z 2025-04-10T11:24:49.421488Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\350\217\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\350\217\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\350\217\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\263\222\004" StorageChannel: INLINE } 2025-04-10T11:24:50.187009Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:50.187076Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:50.198279Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:50.198439Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:50.198601Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [2:178:2193] 2025-04-10T11:24:50.199161Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Initializing completed. 2025-04-10T11:24:50.199204Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [2:178:2193] 2025-04-10T11:24:50.199241Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition {2, {0, 10}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:50.199268Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:50.199381Z node 2 :PERSQUEUE INFO: new Cookie owner1|33f1058e-9ab20f79-d8f272bf-92adc2b2_0 generated for partition {2, {0, 10}, 100001} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2025-04-10T11:24:50.199451Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyOwnerOk. Partition: {2, {0, 10}, 100001} 2025-04-10T11:24:50.199684Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 2 partNo 0 2025-04-10T11:24:50.200347Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2025-04-10T11:24:50.200780Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 D0000100001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 2025-04-10T11:24:50.200937Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:50.200996Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:50.201030Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] [X0000100001, X0000100002) 2025-04-10T11:24:50.201064Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- write ----------------- 2025-04-10T11:24:50.201101Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] M0000100001pSourceId 2025-04-10T11:24:50.201132Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] D0000100001_00000000000000000100_00000_0000000001_00000| 2025-04-10T11:24:50.201156Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] J0000100001 2025-04-10T11:24:50.201181Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:50.201217Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] =========================== 2025-04-10T11:24:50.242675Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:50.242804Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-04-10T11:24:50.242890Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 2, partNo: 0, Offset: 100 is stored on disk 2025-04-10T11:24:50.519839Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 4 partNo 0 2025-04-10T11:24:50.520727Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 4 partNo 0 FormedBlobsCount 0 NewHead: Offset 101 PartNo 0 PackedSize 118 count 1 nextOffset 102 batches 1 2025-04-10T11:24:50.521183Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 101,1 HeadOffset 100 endOffset 101 curOffset 102 D0000100001_00000000000000000101_00000_0000000001_00000| size 104 WTime 1129 2025-04-10T11:24:50.521328Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:50.521373Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:50.521411Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] [X0000100001, X0000100002) 2025-04-10T11:24:50.521439Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- write ----------------- 2025-04-10T11:24:50.521484Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] M0000100001pSourceId 2025-04-10T11:24:50.521514Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] D0000100001_00000000000000000101_00000_0000000001_00000| 2025-04-10T11:24:50.521539Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] J0000100001 2025-04-10T11:24:50.521565Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:50.521595Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] =========================== 2025-04-10T11:24:50.552509Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:50.552614Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-04-10T11:24:50.552679Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 4, partNo: 0, Offset: 101 is stored on disk 2025-04-10T11:24:50.762123Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 6 partNo 0 2025-04-10T11:24:50.765242Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 6 partNo 0 FormedBlobsCount 0 NewHead: Offset 102 PartNo 0 PackedSize 118 count 1 nextOffset 103 batches 1 2025-04-10T11:24:50.765731Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 102,1 HeadOffset 100 endOffset 102 curOffset 103 D0000100001_00000000000000000102_00000_0000000001_00000| size 104 WTime 2130 2025-04-10T11:24:50.765863Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:50.765897Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:50.765927Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] [X0000100001, X0000100002) 2025-04-10T11:24:50.765957Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- write ----------------- 2025-04-10T11:24:50.765985Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] M0000100001pSourceId 2025-04-10T11:24:50.766011Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] D0000100001_00000000000000000102_00000_0000000001_00000| 2025-04-10T11:24:50.766035Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] J0000100001 2025-04-10T11:24:50.766063Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- rename ... rc2' seqNo 10 partNo 0 2025-04-10T11:24:55.440205Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src2' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 70 PartNo 0 PackedSize 552 count 10 nextOffset 80 batches 1 2025-04-10T11:24:55.440771Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 70,10 HeadOffset 20 endOffset 25 curOffset 80 d0000000000_00000000000000000070_00000_0000000010_00000| size 299 WTime 11240 2025-04-10T11:24:55.440913Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:55.440949Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:55.440994Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T11:24:55.441026Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:55.441057Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000020_00000_0000000005_00000 2025-04-10T11:24:55.441078Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc2 2025-04-10T11:24:55.441093Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000070_00000_0000000010_00000| 2025-04-10T11:24:55.441115Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:55.441145Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:55.441178Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 10 Got KV request Got KV request Send disk status response with cookie: 0 2025-04-10T11:24:55.461926Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 170 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:55.462044Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:55.462124Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 70 is stored on disk 2025-04-10T11:24:55.462193Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:55.462235Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 71 is stored on disk 2025-04-10T11:24:55.462267Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:55.462306Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 72 is stored on disk 2025-04-10T11:24:55.462337Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:55.462372Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 73 is stored on disk 2025-04-10T11:24:55.462404Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:55.462440Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 74 is stored on disk 2025-04-10T11:24:55.462473Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:55.462510Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 75 is stored on disk 2025-04-10T11:24:55.462538Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:55.462571Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 76 is stored on disk 2025-04-10T11:24:55.462599Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:55.462636Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 77 is stored on disk 2025-04-10T11:24:55.462664Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:55.462701Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 78 is stored on disk 2025-04-10T11:24:55.462731Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:55.462769Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 79 is stored on disk 2025-04-10T11:24:55.463051Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:55.463102Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:55.463146Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000020_00000_0000000005_00000|, d0000000000_00000000000000000020_00000_0000000005_00000|] 2025-04-10T11:24:55.463185Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:55.463225Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:55.463261Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:55.463304Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 10 and act no: 11 Create distr tx with id = 12 and act no: 13 2025-04-10T11:24:55.463571Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-04-10T11:24:55.463659Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 12 2025-04-10T11:24:56.399694Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:24:56.399809Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait batch completion 2025-04-10T11:24:56.399981Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:24:56.400029Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:24:56.400128Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=MinSeqNo violation failure on src2 Got batch complete: 3 2025-04-10T11:24:56.616680Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 12 2025-04-10T11:24:56.616758Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 12 2025-04-10T11:24:56.616841Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:24:56.616890Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 70 PartNo 0 PackedSize 299 count 10 nextOffset 80 batches 1, NewHead=Offset 80 PartNo 0 PackedSize 0 count 0 nextOffset 80 batches 0 2025-04-10T11:24:56.617072Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:56.617117Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:56.617156Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:56.617199Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrcId2 2025-04-10T11:24:56.617234Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:56.617257Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:56.617293Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:24:56.617322Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:56.617357Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 10 2025-04-10T11:24:56.648963Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 10 Errors { Kind: BAD_REQUEST Reason: "MinSeqNo violation failure on src2" } Wait tx committed for tx 12 2025-04-10T11:24:57.172488Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:57.172564Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:57.189146Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [5:177:2192] 2025-04-10T11:24:57.190961Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:24:57.191034Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [5:177:2192] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::OldImage [GOOD] Test command err: 2025-04-10T11:24:33.678289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:33.678580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:33.678824Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f99/r3tmp/tmpFZ0yGg/pdisk_1.dat 2025-04-10T11:24:34.185680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:24:34.242187Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:34.249510Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-04-10T11:24:34.287992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:34.288521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:34.301063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:34.394097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:24:34.448306Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:24:34.448578Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:24:34.493106Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:24:34.493201Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:24:34.495420Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:24:34.495493Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:24:34.495557Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:24:34.496870Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:24:34.497023Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:24:34.497088Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:24:34.507821Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:24:34.554838Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:24:34.556208Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:24:34.556366Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:24:34.556408Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:34.556443Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:24:34.556498Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.557950Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:24:34.558065Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:24:34.558179Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.558230Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:34.558321Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:24:34.558367Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:34.558470Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:24:34.559746Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:24:34.560093Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:24:34.560203Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:24:34.562120Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:34.572837Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:24:34.572934Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:24:34.722582Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:24:34.744241Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:24:34.744326Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.745418Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.745478Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:34.745544Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:34.745796Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:24:34.745986Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:34.746847Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:34.746924Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:24:34.749877Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:24:34.751228Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:34.753348Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:24:34.753414Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.754640Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:24:34.754726Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:34.755483Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:34.755523Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:34.755575Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:24:34.755631Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:24:34.755678Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:24:34.755749Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:34.759784Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:34.762013Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:24:34.762098Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:24:34.762711Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:24:34.778398Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:24:34.778533Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-10T11:24:34.778581Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-10T11:24:34.778614Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-10T11:24:34.779561Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:34.804253Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:24:35.090319Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:24:35.090378Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:35.090682Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:35.090714Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:35.090748Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:35.090895Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-10T11:24:35.091013Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:35.091241Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:35.092824Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:35.186810Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-10T11:24:35.186900Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:35.186936Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:35.186978Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tab ... DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:55.904675Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:55.904737Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:55.905092Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:24:55.905265Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:55.906171Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:55.906263Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:24:55.906868Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:24:55.907389Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:55.909335Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:24:55.909406Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:55.910025Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:24:55.910138Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:55.911088Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:55.911155Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:24:55.911222Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:24:55.911310Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:24:55.911380Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:24:55.911491Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:55.913255Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:55.915951Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:24:55.916032Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:24:55.916502Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:24:55.930122Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:24:55.930398Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-10T11:24:55.930485Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-10T11:24:55.930540Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-10T11:24:55.930884Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:55.956294Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:24:56.163603Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:24:56.163691Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:56.164103Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:56.164172Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:24:56.164237Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-10T11:24:56.164509Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-10T11:24:56.164680Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:24:56.165366Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:24:56.166412Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:24:56.211111Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-10T11:24:56.211252Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:56.211321Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:24:56.211380Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:56.211478Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:24:56.211554Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-10T11:24:56.211702Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:56.214466Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-10T11:24:56.214563Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:24:56.224598Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:886:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:56.224748Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:896:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:56.224849Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:56.231896Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:24:56.239220Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:56.398901Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:24:56.402397Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:900:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:24:56.429920Z node 4 :TX_PROXY ERROR: Actor# [4:956:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:56.502393Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq490y8pdwfcynqxw50dxv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZDEzNzgzNjEtZTZjMDk0YS1mNzYyNzkxOC1hZWY1MWU4ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:56.505414Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:987:2786], serverId# [4:988:2787], sessionId# [0:0:0] 2025-04-10T11:24:56.505872Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037888 2025-04-10T11:24:56.506183Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1744284296506043 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-10T11:24:56.506403Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-04-10T11:24:56.517842Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-10T11:24:56.517952Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:56.612159Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq49abch2bdjcmhcz8g849, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OGM2ZmI3YzYtZGQ2YjgxYzktYjUzNjVlZmMtZTdhOGRhNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:24:56.614631Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:4] at 72075186224037888 2025-04-10T11:24:56.614970Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1744284296614843 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-10T11:24:56.615180Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-04-10T11:24:56.627633Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-10T11:24:56.627736Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:24:56.629969Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1015:2805], serverId# [4:1016:2806], sessionId# [0:0:0] 2025-04-10T11:24:56.637317Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1017:2807], serverId# [4:1018:2808], sessionId# [0:0:0] >> YdbProxy::DescribePath >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] >> TPartitionTests::ConflictingCommitFails |92.6%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] Test command err: 2025-04-10T11:24:51.263647Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:24:51.268728Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:24:51.269101Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-10T11:24:51.269176Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:24:51.269216Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-10T11:24:51.269254Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:24:51.269320Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:51.269426Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:51.289189Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-04-10T11:24:51.289398Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:24:51.308639Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:51.315442Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:51.315631Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:51.317182Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:51.317304Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:51.317375Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:51.317827Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:51.318204Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-04-10T11:24:51.318914Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-10T11:24:51.318965Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] 2025-04-10T11:24:51.319009Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:51.319454Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:51.319592Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-10T11:24:51.319630Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-10T11:24:51.319760Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:51.319793Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:51.319840Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:51.319882Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:51.319970Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-10T11:24:51.319990Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-10T11:24:51.320015Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:51.320052Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:51.320110Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:51.320221Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:51.320499Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:51.320967Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:188:2201] 2025-04-10T11:24:51.321638Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-04-10T11:24:51.321677Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:188:2201] 2025-04-10T11:24:51.321719Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:51.322377Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:51.322456Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-04-10T11:24:51.322495Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-04-10T11:24:51.322593Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:51.322631Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:51.322666Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-10T11:24:51.322697Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-10T11:24:51.322729Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-04-10T11:24:51.322755Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-04-10T11:24:51.322796Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:51.322859Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-10T11:24:51.322920Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:51.323052Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:51.326643Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:51.326943Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:51.327223Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:201:2210], now have 1 active actors on pipe 2025-04-10T11:24:51.327861Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:204:2212], now have 1 active actors on pipe 2025-04-10T11:24:51.328914Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67890 Config { TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "client-1" Generation: 0 Important: false } Consumers { Name: "client-3" Generation: 7 Important: false } } BootstrapConfig { } } 2025-04-10T11:24:51.329085Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-04-10T11:24:51.329146Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-10T11:24:51.329185Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-10T11:24:51.329251Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-10T11:24:51.329315Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-10T11:24:51.329619Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 231 MaxStep: 18446744073709551615 Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" Ydb ... rmAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.542654Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.543786Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.544896Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.546201Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.547502Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.548802Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.549875Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.551056Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.551532Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 1 size 172682 from pos 0 cbcount 1 2025-04-10T11:24:57.553174Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.554306Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.555900Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.556918Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.557974Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.559051Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.560131Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.561201Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.562320Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.563402Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.564466Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.565567Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.566645Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.567758Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-10T11:24:57.568240Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 1 size 172682 from pos 0 cbcount 1 2025-04-10T11:24:57.571021Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 123 2025-04-10T11:24:57.610947Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:444:2420], now have 1 active actors on pipe 2025-04-10T11:24:57.611116Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:24:57.611185Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:24:57.611262Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 15 partNo : 0 messageNo: 1 size 102400 offset: 14 2025-04-10T11:24:57.611394Z node 10 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'topic' partition 0 error: new GetOwnership request needed for owner 2025-04-10T11:24:57.611547Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:57.611602Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:57.611658Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000002_00000_0000000001_00014, d0000000000_00000000000000000002_00000_0000000001_00014] 2025-04-10T11:24:57.611722Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000003_00000_0000000001_00014, d0000000000_00000000000000000003_00000_0000000001_00014] 2025-04-10T11:24:57.611764Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:57.611811Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:57.611864Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:57.611917Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:57.612005Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 45, Error new GetOwnership request needed for owner 2025-04-10T11:24:57.612056Z node 10 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-04-10T11:24:57.612140Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:57.612193Z node 10 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000002_00000_0000000001_00014(+) to d0000000000_00000000000000000002_00000_0000000001_00014(+) 2025-04-10T11:24:57.612229Z node 10 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000003_00000_0000000001_00014(+) to d0000000000_00000000000000000003_00000_0000000001_00014(+) 2025-04-10T11:24:57.615520Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:57.619722Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:456:2431], now have 1 active actors on pipe 2025-04-10T11:24:57.619897Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:24:57.619954Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:24:57.620085Z node 10 :PERSQUEUE INFO: new Cookie default|5759ec4-5adaa61c-dbb065e6-825f2ecd_14 generated for partition 0 topic 'topic' owner default 2025-04-10T11:24:57.620213Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-10T11:24:57.620304Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:24:57.620753Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:458:2433], now have 1 active actors on pipe 2025-04-10T11:24:57.620856Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:24:57.620897Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:24:57.620953Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 15 partNo : 0 messageNo: 0 size 102400 offset: 14 2025-04-10T11:24:57.621059Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Send write quota request. Topic: "topic". Partition: 0. Amount: 102409. Cookie: 15 2025-04-10T11:24:57.881095Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Got quota. Topic: "topic". Partition: 0: Cookie: 15 2025-04-10T11:24:57.881355Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob processing sourceId 'sourceid1' seqNo 15 partNo 0 2025-04-10T11:24:57.882310Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob complete sourceId 'sourceid1' seqNo 15 partNo 0 FormedBlobsCount 0 NewHead: Offset 14 PartNo 0 PackedSize 102472 count 1 nextOffset 15 batches 1 2025-04-10T11:24:57.883331Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'topic' partition 0 compactOffset 14,1 HeadOffset 14 endOffset 14 curOffset 15 d0000000000_00000000000000000014_00000_0000000001_00000| size 102462 WTime 2103 2025-04-10T11:24:57.883640Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:57.883701Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:57.883748Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T11:24:57.883801Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:57.883844Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid1 2025-04-10T11:24:57.883882Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000014_00000_0000000001_00000| 2025-04-10T11:24:57.883907Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:57.883943Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:57.883991Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:57.884598Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:57.884738Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 14 partNo 0 count 1 size 102462 2025-04-10T11:24:57.889294Z node 10 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 14 count 1 size 102462 actorID [10:134:2160] 2025-04-10T11:24:57.889457Z node 10 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 14 partno 0 count 1 parts 0 size 102462 2025-04-10T11:24:57.889558Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 102409 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:57.889627Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:57.889711Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid1', Topic: 'topic', Partition: 0, SeqNo: 15, partNo: 0, Offset: 14 is stored on disk 2025-04-10T11:24:57.890087Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:24:57.890666Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:469:2442], now have 1 active actors on pipe |92.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} >> Yq_1::DeleteQuery [GOOD] >> YdbProxy::MakeDirectory >> TPartitionTests::ShadowPartitionCounters [GOOD] >> TPartitionTests::NonConflictingCommitsBatch >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] >> TPartitionTests::DifferentWriteTxBatchingOptions >> YdbProxy::RemoveDirectory >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test >> TPartitionTests::ConflictingSrcIdForTxWithHead ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DeleteQuery [GOOD] Test command err: 2025-04-10T11:24:29.905320Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643890592640030:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:29.906136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0410 11:24:30.272471743 1648812 dns_resolver.cc:162] no server name supplied in dns URI E0410 11:24:30.273190848 1648812 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T11:24:30.274877Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2025-04-10T11:24:30.909104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:31.240913Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24424 2025-04-10T11:24:31.251633Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2025-04-10T11:24:31.918603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:32.919153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:33.154309Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a6d/r3tmp/tmporkGnm/pdisk_1.dat 2025-04-10T11:24:33.487684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:24:33.518672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491643907772509728:2314], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:33.598308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491643907772509728:2314], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:33.681169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:33.681292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24424, node 1 2025-04-10T11:24:33.686257Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:33.687339Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:33.690567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:34.000276Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:34.000306Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:34.000390Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:34.000561Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:24:34.000696Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:34.256561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:34.897982Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643890592640030:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:34.898063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0410 11:24:35.274911426 1649038 dns_resolver.cc:162] no server name supplied in dns URI E0410 11:24:35.275108771 1649038 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T11:24:35.982633Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:24:35.982914Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-10T11:24:35.982954Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T11:24:35.982963Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T11:24:35.987307Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-10T11:24:35.987371Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T11:24:35.987379Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T11:24:35.988384Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-10T11:24:35.988407Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T11:24:35.988412Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T11:24:35.989641Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-10T11:24:35.989653Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T11:24:35.989657Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T11:24:35.990553Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-10T11:24:35.990575Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-10T11:24:35.990607Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-10T11:24:35.990622Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T11:24:35.990628Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T11:24:35.994980Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-10T11:24:35.995017Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T11:24:35.995024Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T11:24:35.996971Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-10T11:24:35.996995Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-10T11:24:35.997000Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-10T11:24:35.997987Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-10T11:24:35.998009Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T11:24:35.998014Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T11:24:35.999441Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-10T11:24:35.999468Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T11:24:35.999474Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T11:24:36.000394Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-10T11:24:36.000406Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T11:24:36.000411Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T11:24:36.001464Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-10T11:24:36.001476Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T11:24:36.001481Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T11:24:36.003468Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-10T11:24:36.003487Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T11:24:36.003492Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T11:24:36.004342Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-10T11:24:36.004368Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T11:24:36.004372Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T11:24:36.017724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:24:36.019498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: ... 1, peer: [4:7491644010183537422:2984] 2025-04-10T11:24:56.449306Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-10T11:24:56.449329Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-10T11:24:56.449363Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644010183537421:2983], TxId: 281474976715799, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjNlNmM2YmItY2I0MzcxNjAtZGJjNzQ5YjQtNmJkMDdhMTA=. TraceId : 01jrfq48yq4agwqpg6db5e05e4. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-10T11:24:56.449423Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644010183537421:2983], TxId: 281474976715799, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjNlNmM2YmItY2I0MzcxNjAtZGJjNzQ5YjQtNmJkMDdhMTA=. TraceId : 01jrfq48yq4agwqpg6db5e05e4. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7491644010183537421 RawX2: 4503616807242663 } } DstEndpoint { ActorId { RawX1: 7491644010183537422 RawX2: 4503616807242664 } } InMemory: true DstStageId: 1 } 2025-04-10T11:24:56.449438Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-10T11:24:56.449451Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-10T11:24:56.449472Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644010183537421:2983], TxId: 281474976715799, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjNlNmM2YmItY2I0MzcxNjAtZGJjNzQ5YjQtNmJkMDdhMTA=. TraceId : 01jrfq48yq4agwqpg6db5e05e4. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:24:56.449486Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-10T11:24:56.449501Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-10T11:24:56.449744Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. Recv TEvReadResult from ShardID=72075186224037891, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-04-10T11:24:56.449765Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. Taken 0 locks 2025-04-10T11:24:56.449779Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. new data for read #0 seqno = 1 finished = 1 2025-04-10T11:24:56.449798Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644010183537421:2983], TxId: 281474976715799, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjNlNmM2YmItY2I0MzcxNjAtZGJjNzQ5YjQtNmJkMDdhMTA=. TraceId : 01jrfq48yq4agwqpg6db5e05e4. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2025-04-10T11:24:56.449819Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644010183537421:2983], TxId: 281474976715799, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjNlNmM2YmItY2I0MzcxNjAtZGJjNzQ5YjQtNmJkMDdhMTA=. TraceId : 01jrfq48yq4agwqpg6db5e05e4. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:24:56.449834Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-10T11:24:56.449851Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. enter pack cells method shardId: 72075186224037891 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-10T11:24:56.449877Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. exit pack cells method shardId: 72075186224037891 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-10T11:24:56.449892Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. returned 0 rows; processed 0 rows 2025-04-10T11:24:56.449926Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. dropping batch for read #0 2025-04-10T11:24:56.449942Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. effective maxinflight 1024 sorted 0 2025-04-10T11:24:56.449954Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-10T11:24:56.449968Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1, CA Id [4:7491644010183537421:2983]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-10T11:24:56.450027Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644010183537421:2983], TxId: 281474976715799, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjNlNmM2YmItY2I0MzcxNjAtZGJjNzQ5YjQtNmJkMDdhMTA=. TraceId : 01jrfq48yq4agwqpg6db5e05e4. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T11:24:56.450056Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644010183537422:2984], TxId: 281474976715799, task: 2. Ctx: { TraceId : 01jrfq48yq4agwqpg6db5e05e4. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjNlNmM2YmItY2I0MzcxNjAtZGJjNzQ5YjQtNmJkMDdhMTA=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-04-10T11:24:56.450092Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 2. Finish input channelId: 1, from: [4:7491644010183537421:2983] 2025-04-10T11:24:56.450136Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644010183537422:2984], TxId: 281474976715799, task: 2. Ctx: { TraceId : 01jrfq48yq4agwqpg6db5e05e4. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjNlNmM2YmItY2I0MzcxNjAtZGJjNzQ5YjQtNmJkMDdhMTA=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:24:56.450137Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644010183537421:2983], TxId: 281474976715799, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjNlNmM2YmItY2I0MzcxNjAtZGJjNzQ5YjQtNmJkMDdhMTA=. TraceId : 01jrfq48yq4agwqpg6db5e05e4. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-10T11:24:56.450174Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644010183537421:2983], TxId: 281474976715799, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjNlNmM2YmItY2I0MzcxNjAtZGJjNzQ5YjQtNmJkMDdhMTA=. TraceId : 01jrfq48yq4agwqpg6db5e05e4. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:24:56.450196Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1. Tasks execution finished 2025-04-10T11:24:56.450209Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644010183537421:2983], TxId: 281474976715799, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjNlNmM2YmItY2I0MzcxNjAtZGJjNzQ5YjQtNmJkMDdhMTA=. TraceId : 01jrfq48yq4agwqpg6db5e05e4. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T11:24:56.450284Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644010183537422:2984], TxId: 281474976715799, task: 2. Ctx: { TraceId : 01jrfq48yq4agwqpg6db5e05e4. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjNlNmM2YmItY2I0MzcxNjAtZGJjNzQ5YjQtNmJkMDdhMTA=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T11:24:56.450315Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 1. pass away 2025-04-10T11:24:56.450427Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715799;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:24:56.450499Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644010183537422:2984], TxId: 281474976715799, task: 2. Ctx: { TraceId : 01jrfq48yq4agwqpg6db5e05e4. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjNlNmM2YmItY2I0MzcxNjAtZGJjNzQ5YjQtNmJkMDdhMTA=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:24:56.450545Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-10T11:24:56.450556Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 2. Tasks execution finished 2025-04-10T11:24:56.450569Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644010183537422:2984], TxId: 281474976715799, task: 2. Ctx: { TraceId : 01jrfq48yq4agwqpg6db5e05e4. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjNlNmM2YmItY2I0MzcxNjAtZGJjNzQ5YjQtNmJkMDdhMTA=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T11:24:56.450649Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715799, task: 2. pass away 2025-04-10T11:24:56.450743Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715799;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:24:56.473360Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: DescribeQueryRequest - DescribeQueryResult: {query_id: "utquedikl9vb9puuaqji" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:662: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } E0410 11:24:56.700967379 1653349 dns_resolver.cc:162] no server name supplied in dns URI E0410 11:24:56.701126770 1653349 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T11:24:56.786692Z node 4 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:9307: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:9307 2025-04-10T11:24:57.179766Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:9307: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:9307 >> Yq_1::Basic_TaggedLiteral [GOOD] >> YdbProxy::DropTable >> TListAllTopicsTests::RecursiveList [GOOD] >> TListAllTopicsTests::ListLimitAndPaging >> YdbProxy::CreateTopic [GOOD] >> YdbProxy::DescribeConsumer >> YdbProxy::CopyTable [GOOD] >> YdbProxy::CopyTables >> YdbProxy::CreateTable [GOOD] >> YdbProxy::CreateCdcStream >> YdbProxy::ListDirectory [GOOD] >> YdbProxy::DropTopic >> YdbProxy::DescribePath [GOOD] >> YdbProxy::DescribeTable >> YdbProxy::AlterTable [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] >> TSyncNeighborsTests::SerDes3 [GOOD] >> TPartitionTests::NonConflictingCommitsBatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_TaggedLiteral [GOOD] Test command err: 2025-04-10T11:24:29.912396Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643893427790787:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:29.913358Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0410 11:24:30.212299606 1648820 dns_resolver.cc:162] no server name supplied in dns URI E0410 11:24:30.212422054 1648820 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T11:24:30.915431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:31.243122Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:22811: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22811 } ] 2025-04-10T11:24:31.249263Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:22811: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:22811 2025-04-10T11:24:31.915918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:32.933723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:32.978705Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:22811: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22811 } ] 2025-04-10T11:24:33.115169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491643910607660338:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:33.115281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:24:33.173966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491643910607660338:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a58/r3tmp/tmp9v8cty/pdisk_1.dat 2025-04-10T11:24:33.343735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491643910607660338:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 22811, node 1 2025-04-10T11:24:33.802997Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:33.806716Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:33.806745Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:33.806767Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:33.806919Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-04-10T11:24:34.134892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:34.135052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:34.148247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:34.158848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:34.913312Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643893427790787:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:34.913394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:35.202583Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-10T11:24:35.202630Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T11:24:35.202639Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T11:24:35.203373Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-10T11:24:35.203403Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T11:24:35.203416Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T11:24:35.211225Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-10T11:24:35.211257Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-10T11:24:35.211592Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-10T11:24:35.211623Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T11:24:35.211630Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T11:24:35.212508Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-10T11:24:35.212557Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T11:24:35.212566Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" E0410 11:24:35.218970656 1649027 dns_resolver.cc:162] no server name supplied in dns URI E0410 11:24:35.219116540 1649027 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T11:24:35.212836Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-10T11:24:35.212871Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T11:24:35.212877Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T11:24:35.226740Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-10T11:24:35.226780Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T11:24:35.226788Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T11:24:35.232860Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-10T11:24:35.232895Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T11:24:35.232904Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T11:24:35.238862Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-10T11:24:35.238895Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-10T11:24:35.238902Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-10T11:24:35.238941Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-10T11:24:35.238955Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T11:24:35.238961Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T11:24:35.239976Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-10T11:24:35.241655Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T11:24:35.241675Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T11:24:35.244974Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-10T11:24:35.245003Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T11:24:35.245011Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T11:24:35.245295Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-10T11:24:35.245318Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T11:24:35.245324Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T11:24:35.259538Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-10T11:24:35.259597Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T11:24:35.259607Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T11:24:35.268648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:24:35.270778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:35.272197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:24:35.275109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at ... ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.517651Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.517729Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.517813Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.517923Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.517962Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.518016Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.518052Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.518824Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.519054Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.519112Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.519318Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.519390Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.519549Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.519629Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.519996Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.520057Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.520214Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.520561Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.520733Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.520963Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.521162Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.521220Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.521334Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.521455Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.521596Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.521884Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.521937Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.522198Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.522229Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.522438Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.522464Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.522687Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.522730Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.522896Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.523028Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.523170Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.523252Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.523324Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.523358Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.523448Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.523540Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.523566Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.523643Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.523691Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.523747Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.523808Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.523940Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.523998Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.524116Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.524165Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.524292Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.524344Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.524483Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.524610Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.524693Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.524844Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.524891Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.525010Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.525106Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.525200Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.525397Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.525457Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.525590Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.525656Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.525781Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.525925Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526042Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526111Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526181Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526267Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526311Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526362Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526408Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526468Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526529Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526570Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526619Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526662Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526737Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526765Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526835Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526878Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526940Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.526982Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.527092Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.527138Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.527282Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.527323Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.527452Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.527520Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.527635Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.527688Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.527808Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.527863Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.527974Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.528071Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.528187Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.528289Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.528396Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.528486Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.528590Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.528687Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.528805Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.528899Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.529014Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.529091Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.529186Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.529254Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.529376Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.529445Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.529560Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.529650Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.529730Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.529825Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.529955Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.530014Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.530124Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.530216Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.530368Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.530425Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.530528Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.530617Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.530692Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.530781Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.530831Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.530925Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.530967Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.531016Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.531052Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:24:59.531098Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: >> YdbProxy::MakeDirectory [GOOD] >> YdbProxy::OAuthToken >> TPQTest::TestWritePQBigMessage [GOOD] >> TPQTest::TestWritePQ |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes3 [GOOD] |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTable [GOOD] Test command err: 2025-04-10T11:24:58.366965Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644017726112488:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:58.367038Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013f4/r3tmp/tmpMDd7Ix/pdisk_1.dat 2025-04-10T11:24:58.773740Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:58.830592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:58.830716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:58.833054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18637 TServer::EnableGrpc on GrpcPort 29587, node 1 2025-04-10T11:24:59.106558Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:59.106588Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:59.106599Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:59.106723Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:59.620259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:01.179826Z node 1 :TX_PROXY ERROR: Actor# [1:7491644030611015041:2305] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-10T11:25:01.192020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:25:01.309050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:01.329124Z node 1 :TX_PROXY ERROR: Actor# [1:7491644030611015157:2384] txid# 281474976710661, issues: { message: "Can\'t drop unknown column: \'extra\'" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::NonConflictingCommitsBatch [GOOD] Test command err: 2025-04-10T11:24:53.852872Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:53.852973Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:53.870774Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:179:2194] 2025-04-10T11:24:53.872214Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:53.000000Z 2025-04-10T11:24:53.872256Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\210\257\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\210\257\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\210\257\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-10T11:24:54.260454Z node 1 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic Root/PQ/rt3.dc1--account--topic partition 0 client client EndOffset 10 offset 13 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\210\257\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\n\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\n\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-10T11:24:54.698479Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:54.698561Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:54.718814Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:179:2194] 2025-04-10T11:24:54.720717Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:54.000000Z 2025-04-10T11:24:54.720791Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:179:2194] 2025-04-10T11:24:55.487145Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:55.487214Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:55.501345Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:177:2192] 2025-04-10T11:24:55.502103Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:177:2192] 2025-04-10T11:24:55.502659Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-04-10T11:24:55.502729Z node 3 :PERSQUEUE INFO: new Cookie owner1|e5875fd3-71de5bd4-93cc2dd4-11531828_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send disk status response with cookie: 0 2025-04-10T11:24:55.813034Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2025-04-10T11:24:56.292759Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:56.292826Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:56.306806Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [4:179:2194] 2025-04-10T11:24:56.310048Z node 4 :PERSQUEUE INFO: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:24:56.310102Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [4:179:2194] 2025-04-10T11:24:56.616998Z node 4 :PERSQUEUE INFO: new Cookie owner1|e30a770a-74bd3a96-e79ea4ca-d2cc7194_0 generated for partition {0, {0, 1111}, 123} topic 'rt3.dc1--account--topic' owner owner1 Send write: 0 Send write: 1 Send write: 2 Send write: 3 Send write: 4 Send write: 5 Send write: 6 Send write: 7 Send write: 8 Send write: 9 Got write info response. Body keys: 0, head: 10, src id info: 1 2025-04-10T11:24:59.825195Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:59.825298Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:59.842899Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-10T11:24:59.843276Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:59.843657Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:176:2191] 2025-04-10T11:24:59.844746Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-10T11:24:59.844971Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-10T11:24:59.845171Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-10T11:24:59.846524Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-10T11:24:59.847079Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-04-10T11:24:59.847192Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-10T11:24:59.847259Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:24:59.847321Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:59.000000Z 2025-04-10T11:24:59.847368Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-10T11:24:59.847426Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:176:2191] 2025-04-10T11:24:59.847507Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-04-10T11:24:59.847581Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:59.847675Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-2 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:59.847732Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-2 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-10T11:24:59.847783Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-10T11:24:59.847819Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 1 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-10T11:24:59.848236Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-2 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-04-10T11:24:59.848446Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-04-10T11:24:59.848506Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. 2025-04-10T11:25:00.154906Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 session is set to 0 (startOffset 0) session session-client-0 2025-04-10T11:25:00.155118Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:00.155178Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:00.155230Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:00.155302Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:00.155342Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-04-10T11:25:00.155376Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-04-10T11:25:00.155413Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:00.155461Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000(\370\335\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient-0" Value: "\010\000\020\001\030\001\"\020session-client-0(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient-0" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-client-0" StorageChannel: INLINE } 2025-04-10T11:25:00.188014Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Create distr tx with id = 0 and act no: 1 Created Tx with id 3 as act# 3 Created Tx with id 4 as act# 4 2025-04-10T11:25:01.129575Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-10T11:25:01.129729Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 3 2025-04-10T11:25:01.129776Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-04-10T11:25:02.392359Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 6 Wait batch completion Wait kv request 2025-04-10T11:25:02.392796Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 5 (startOffset 0) session session-client-0 2025-04-10T11:25:02.392854Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2025-04-10T11:25:02.392910Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 3 2025-04-10T11:25:02.392958Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 4 2025-04-10T11:25:02.392986Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 4 2025-04-10T11:25:02.393032Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 10 (startOffset 0) session session-client-0 2025-04-10T11:25:02.413072Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=incorrect offset range (gap) 2025-04-10T11:25:02.413405Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:02.413453Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:02.413494Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:02.413534Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:02.413566Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:25:02.413607Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-2 2025-04-10T11:25:02.413644Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-2 2025-04-10T11:25:02.413669Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-04-10T11:25:02.413688Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-04-10T11:25:02.413707Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-04-10T11:25:02.413726Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-04-10T11:25:02.413753Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:02.413804Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 3 2025-04-10T11:25:02.434615Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait tx committed for tx 4 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 6 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } >> YdbProxy::RemoveDirectory [GOOD] >> YdbProxy::StaticCreds >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] >> TPQTest::TestSetClientOffset [GOOD] >> TPQTest::TestReadSessions >> Yq_1::DescribeQuery [GOOD] >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] >> YdbOlapStore::LogPagingBetween [GOOD] >> YdbOlapStore::LogPagingAfter >> TPQTest::TestPartitionTotalQuota [GOOD] >> TPQTest::TestPartitionPerConsumerQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] Test command err: 2025-04-10T11:24:53.017226Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:53.017314Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:53.041645Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:179:2194] 2025-04-10T11:24:53.043517Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:53.000000Z 2025-04-10T11:24:53.043581Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\210\257\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\210\257\310\373\3412" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-10T11:24:53.791712Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:53.791801Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:54.180627Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:54.180752Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:54.194809Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-10T11:24:54.194996Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:54.195190Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:176:2191] 2025-04-10T11:24:54.195991Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-10T11:24:54.196178Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-10T11:24:54.196361Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-10T11:24:54.196566Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-10T11:24:54.196975Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-04-10T11:24:54.197054Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-10T11:24:54.197086Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:24:54.197122Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:54.000000Z 2025-04-10T11:24:54.197153Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-10T11:24:54.197190Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [3:176:2191] 2025-04-10T11:24:54.197233Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-04-10T11:24:54.197290Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create immediate tx with id = 4 and act no: 5 2025-04-10T11:24:55.465727Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-10T11:24:55.465839Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-04-10T11:24:56.699876Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:24:56.699966Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:24:56.699999Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Wait batch completion Got batch complete: 2 Wait batch completion Wait kv request 2025-04-10T11:24:56.927029Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-04-10T11:24:56.927109Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-04-10T11:24:56.927199Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:24:56.927247Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-10T11:24:56.927309Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-10T11:24:56.927355Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:24:56.927398Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-10T11:24:56.947444Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-10T11:24:56.947728Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:56.947779Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:56.947817Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:56.947858Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc2 2025-04-10T11:24:56.947903Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-10T11:24:56.947927Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:56.947948Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:56.947969Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:24:56.947999Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:56.948040Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 2 2025-04-10T11:24:56.968824Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 Wait immediate tx complete 4 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 2025-04-10T11:24:57.407844Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:57.407926Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:57.423273Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-10T11:24:57.423459Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:57.423695Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:178:2193] 2025-04-10T11:24:57.424425Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-10T11:24:57.424600Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-10T11:24:57.424775Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-10T11:24:57.424954Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-10T11:24:57.425378Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-04-10T11:24:57.425470Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-10T11:24:57.425504Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:24:57.425551Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:57.000000Z 2025-04-10T11:24:57.425603Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-10T11:24:57.425638Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:178:2193] 2025-04-10T11:24:57.425695Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-04-10T11:24:57.425741Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:57.726403Z node 4 :PERSQUEUE INFO: new Cookie src1|81485f39-ae42da39-e77e2f73-15739a9_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-04-10T11:24:57.726548Z node 4 :PERSQU ... PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-10T11:25:00.865115Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-10T11:25:00.865306Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-10T11:25:00.865629Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-04-10T11:25:00.865714Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-10T11:25:00.865753Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:25:00.865799Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:25:00.000000Z 2025-04-10T11:25:00.865835Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-10T11:25:00.865880Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:176:2191] 2025-04-10T11:25:00.865938Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-04-10T11:25:00.865981Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:01.172748Z node 5 :PERSQUEUE INFO: new Cookie src1|27e160c2-1da194d5-2cbf15e3-57734b_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-04-10T11:25:01.172861Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 2025-04-10T11:25:02.158828Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-10T11:25:03.452197Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Wait batch completion Wait 1st KV request Wait kv request 2025-04-10T11:25:03.452503Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-04-10T11:25:03.452564Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-04-10T11:25:03.452635Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=1 2025-04-10T11:25:03.452699Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2025-04-10T11:25:03.452834Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 10 partNo 0 2025-04-10T11:25:03.453686Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 85 count 1 nextOffset 2 batches 1 2025-04-10T11:25:03.454387Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 1,1 HeadOffset 1 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 71 WTime 10138 2025-04-10T11:25:03.454562Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:03.454598Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:03.454631Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T11:25:03.454664Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:03.454700Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-10T11:25:03.454733Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000001_00000_0000000001_00000| 2025-04-10T11:25:03.454760Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:03.454779Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:03.454795Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:25:03.454827Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:03.454865Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 0 2025-04-10T11:25:03.475809Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2025-04-10T11:25:03.476124Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 8. Committed seqNo: 10. Writing seqNo: (NULL). EndOffset: 2. CurOffset: 2. Offset: 20 2025-04-10T11:25:03.476194Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 10. Committed seqNo: 10. Writing seqNo: (NULL). EndOffset: 2. CurOffset: 2. Offset: 30 2025-04-10T11:25:03.476311Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 11 partNo 0 2025-04-10T11:25:03.476774Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob sourceId 'src1' seqNo 11 partNo 0 result is x0000000000_00000000000000000001_00000_0000000001_00000 size 71 2025-04-10T11:25:03.476855Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 old key x0000000000_00000000000000000001_00000_0000000001_00000 new key d0000000000_00000000000000000001_00000_0000000001_00000 size 71 WTime 10238 2025-04-10T11:25:03.481727Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 11 partNo 0 FormedBlobsCount 1 NewHead: Offset 40 PartNo 0 PackedSize 84 count 1 nextOffset 41 batches 1 2025-04-10T11:25:03.482468Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 40,1 HeadOffset 1 endOffset 2 curOffset 41 d0000000000_00000000000000000040_00000_0000000001_00000| size 70 WTime 10238 2025-04-10T11:25:03.482658Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:03.482707Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:03.482754Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T11:25:03.482798Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:03.482841Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000001_00000_0000000001_00000 2025-04-10T11:25:03.482880Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-10T11:25:03.482909Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000040_00000_0000000001_00000| 2025-04-10T11:25:03.482936Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:03.482972Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:03.483019Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 3 Got KV request Got KV request Wait batch completion Wait 2nd KV request Wait kv request 2025-04-10T11:25:03.504027Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 17 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:25:03.504121Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:25:03.504206Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 2 is already written 2025-04-10T11:25:03.504261Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:25:03.504302Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 2 is already written 2025-04-10T11:25:03.504344Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:25:03.504391Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 11, partNo: 0, Offset: 40 is stored on disk 2025-04-10T11:25:03.504707Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:03.504787Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:03.504841Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000001_00000_0000000001_00000|, d0000000000_00000000000000000001_00000_0000000001_00000|] 2025-04-10T11:25:03.504895Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:03.504953Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:03.505003Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:03.505068Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request >> YdbProxy::DropTable [GOOD] >> YdbProxy::DescribeTopic >> PQCountersLabeled::ImportantFlagSwitching [GOOD] >> PQCountersLabeled::NewConsumersCountersAppear >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId [GOOD] >> TSyncNeighborsTests::SerDes1 [GOOD] >> YdbProxy::DescribeConsumer [GOOD] >> TPartitionTests::ConflictingCommitFails [GOOD] >> YdbProxy::DropTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DescribeQuery [GOOD] Test command err: 2025-04-10T11:24:29.879203Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643890310598704:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:29.880880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0410 11:24:30.243122894 1648794 dns_resolver.cc:162] no server name supplied in dns URI E0410 11:24:30.243270281 1648794 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T11:24:30.300412Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26569: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26569 } ] 2025-04-10T11:24:30.883622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:31.281552Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26569: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26569 } ] 2025-04-10T11:24:31.311116Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26569: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26569 2025-04-10T11:24:31.884317Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:32.884842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:32.973345Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26569: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26569 } ] 2025-04-10T11:24:33.525329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:24:33.535534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491643907490468242:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a7e/r3tmp/tmpYQXPL0/pdisk_1.dat 2025-04-10T11:24:33.630130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491643907490468242:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:33.812461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491643907490468242:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 26569, node 1 2025-04-10T11:24:33.826138Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:33.826296Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:33.906480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:33.906572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:33.909328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:34.234697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:34.369410Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:34.371030Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:34.371053Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:34.371061Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:34.371188Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:24:34.879305Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643890310598704:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:34.879409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:35.200645Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:24:35.200724Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-10T11:24:35.200744Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T11:24:35.200752Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T11:24:35.205980Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-10T11:24:35.206011Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T11:24:35.206018Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T11:24:35.206720Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-10T11:24:35.206751Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T11:24:35.206757Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T11:24:35.207038Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-10T11:24:35.207060Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T11:24:35.207074Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T11:24:35.207650Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-10T11:24:35.207680Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T11:24:35.207686Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T11:24:35.208165Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-10T11:24:35.208175Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T11:24:35.208180Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T11:24:35.208625Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-10T11:24:35.208637Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T11:24:35.208642Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T11:24:35.209293Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-10T11:24:35.209314Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T11:24:35.209325Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T11:24:35.209465Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-10T11:24:35.209483Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T11:24:35.209487Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T11:24:35.210230Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-10T11:24:35.210251Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T11:24:35.210262Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T11:24:35.210301Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-10T11:24:35.210311Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T11:24:35.210316Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T11:24:35.222192Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-10T11:24:35.222230Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-10T11:24:35.222236Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-10T11:24:35.225174Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-10T11:24:35.225208Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T11:24:35.225214Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T11:24:35.239119Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-10T11:24:35.239161Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": E0410 11:24:35.243892742 1649036 dns_resolver.cc:162] no server name supplied in dns URI E0410 11:24:35.244038914 1649036 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T11:24:35.250583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28 ... 4029937177070:2935], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7491644029937177069 RawX2: 4503616807242614 } } DstEndpoint { ActorId { RawX1: 7491644029937177070 RawX2: 4503616807242615 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7491644029937177070 RawX2: 4503616807242615 } } DstEndpoint { ActorId { RawX1: 7491644029937177065 RawX2: 4503616807242068 } } InMemory: true } 2025-04-10T11:25:01.154879Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177070:2935], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update input channelId: 1, peer: [4:7491644029937177069:2934] 2025-04-10T11:25:01.154943Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177070:2935], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-10T11:25:01.155060Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177070:2935], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7491644029937177069 RawX2: 4503616807242614 } } DstEndpoint { ActorId { RawX1: 7491644029937177070 RawX2: 4503616807242615 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7491644029937177070 RawX2: 4503616807242615 } } DstEndpoint { ActorId { RawX1: 7491644029937177065 RawX2: 4503616807242068 } } InMemory: true } 2025-04-10T11:25:01.155098Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177070:2935], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:25:01.155158Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7491644029937177069:2934]. Recv TEvReadResult from ShardID=72075186224037890, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-04-10T11:25:01.155178Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7491644029937177069:2934]. Taken 0 locks 2025-04-10T11:25:01.155193Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7491644029937177069:2934]. new data for read #0 seqno = 1 finished = 1 2025-04-10T11:25:01.155206Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177069:2934], TxId: 281474976715783, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2025-04-10T11:25:01.155219Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177069:2934], TxId: 281474976715783, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:25:01.155238Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7491644029937177069:2934]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-10T11:25:01.155256Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7491644029937177069:2934]. enter pack cells method shardId: 72075186224037890 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-10T11:25:01.155278Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7491644029937177069:2934]. exit pack cells method shardId: 72075186224037890 processedRows: 0 packed rows: 1 freeSpace: 8386364 2025-04-10T11:25:01.155291Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7491644029937177069:2934]. returned 1 rows; processed 1 rows 2025-04-10T11:25:01.155328Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7491644029937177069:2934]. dropping batch for read #0 2025-04-10T11:25:01.155355Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7491644029937177069:2934]. effective maxinflight 1024 sorted 0 2025-04-10T11:25:01.155365Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7491644029937177069:2934]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-10T11:25:01.155379Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7491644029937177069:2934]. returned async data processed rows 1 left freeSpace 8386364 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-10T11:25:01.155579Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177069:2934], TxId: 281474976715783, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T11:25:01.155605Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177069:2934], TxId: 281474976715783, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:25:01.155621Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177070:2935], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-04-10T11:25:01.155642Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-10T11:25:01.155665Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 2. Finish input channelId: 1, from: [4:7491644029937177069:2934] 2025-04-10T11:25:01.155716Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177069:2934], TxId: 281474976715783, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-10T11:25:01.155727Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177070:2935], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:25:01.155743Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177069:2934], TxId: 281474976715783, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:25:01.155761Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1. Tasks execution finished 2025-04-10T11:25:01.155773Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177069:2934], TxId: 281474976715783, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T11:25:01.155867Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1. pass away 2025-04-10T11:25:01.155966Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715783;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:25:01.155975Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177070:2935], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T11:25:01.156222Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177070:2935], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-10T11:25:01.156257Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-10T11:25:01.156267Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 2. Tasks execution finished 2025-04-10T11:25:01.156278Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7491644029937177070:2935], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jrfq4dg62zjtd6p29pf3mpfd. SessionId : ydb://session/3?node_id=4&id=M2MxNmNhZGEtYTc1ODJkZDctMWU4M2RlNWMtYzgyZWJlZmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-10T11:25:01.156341Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 2. pass away 2025-04-10T11:25:01.156394Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715783;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:25:01.213355Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:19873: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:19873 2025-04-10T11:25:02.209293Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:19873: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:19873 >> TPartitionTests::ConflictingCommitProccesAfterRollback >> YdbProxy::CopyTables [GOOD] >> YdbProxy::AlterTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes1 [GOOD] Test command err: 2025-04-10T11:25:05.163356Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-04-10T11:25:05.163525Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:6:2053], token sent, active: 1, waiting: 0 >> YdbProxy::CreateCdcStream [GOOD] >> YdbProxy::DescribeTable [GOOD] >> ResultFormatter::List [GOOD] >> ResultFormatter::Null [GOOD] >> ResultFormatter::FormatEmptySchema [GOOD] >> ResultFormatter::FormatNonEmptySchema [GOOD] >> ResultFormatter::Primitive [GOOD] >> ResultFormatter::Struct [GOOD] >> ResultFormatter::EmptyResultSet [GOOD] >> ResultFormatter::EmptyList [GOOD] >> ResultFormatter::EmptyTuple [GOOD] >> YdbProxy::OAuthToken [GOOD] >> ResultFormatter::Tuple [GOOD] >> ResultFormatter::Tagged [GOOD] |92.6%| [TA] $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Null [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Struct [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::FormatNonEmptySchema [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::EmptyTuple [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Tagged [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DropTopic [GOOD] Test command err: 2025-04-10T11:24:58.377829Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644015113529947:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:58.377888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001406/r3tmp/tmpsEtzw8/pdisk_1.dat 2025-04-10T11:24:58.845997Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:58.850373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:58.850501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:58.853326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17068 TServer::EnableGrpc on GrpcPort 30546, node 1 2025-04-10T11:24:59.178163Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:59.178192Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:59.178198Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:59.178296Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:59.635576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:59.653470Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:02.029283Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644032043474540:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:02.029385Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001406/r3tmp/tmpIZoDg0/pdisk_1.dat 2025-04-10T11:25:02.171195Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:02.181577Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:02.181632Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:02.182822Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14366 TServer::EnableGrpc on GrpcPort 25796, node 2 2025-04-10T11:25:02.405308Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:02.405328Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:02.405334Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:02.405426Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14366 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:02.714172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:02.870988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:25:02.876825Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-10T11:25:02.876849Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-10T11:25:02.878013Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-04-10T11:25:02.878064Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-04-10T11:25:02.896539Z node 2 :TX_PROXY ERROR: Actor# [2:7491644032043475320:2396] txid# 281474976715660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] Test command err: 2025-04-10T11:24:58.367258Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644017257818559:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:58.367303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001414/r3tmp/tmpZskVsS/pdisk_1.dat 2025-04-10T11:24:58.775815Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:58.820178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:58.820325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:58.826973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29489 TServer::EnableGrpc on GrpcPort 13256, node 1 2025-04-10T11:24:59.106585Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:59.106611Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:59.106618Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:59.106754Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:59.655540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:59.704471Z node 1 :TX_PROXY ERROR: Actor# [1:7491644021552786485:2297] txid# 281474976710658, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } 2025-04-10T11:25:01.842779Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644027913803217:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:01.842903Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001414/r3tmp/tmpWWUEl9/pdisk_1.dat 2025-04-10T11:25:01.945933Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:01.989876Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:01.989970Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:01.991780Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29156 TServer::EnableGrpc on GrpcPort 16491, node 2 2025-04-10T11:25:02.211151Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:02.211181Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:02.211188Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:02.211310Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:02.501217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> ResultFormatter::Void [GOOD] >> ResultFormatter::VariantTuple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::CreateCdcStream [GOOD] Test command err: 2025-04-10T11:24:58.378716Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644015881570608:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:58.379030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013ea/r3tmp/tmpVVUslv/pdisk_1.dat 2025-04-10T11:24:58.792129Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:58.799594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:58.799735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:58.804086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65465 TServer::EnableGrpc on GrpcPort 30738, node 1 2025-04-10T11:24:59.110862Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:59.110892Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:59.110902Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:59.111038Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:59.632557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:01.304869Z node 1 :TX_PROXY ERROR: Actor# [1:7491644028766473170:2305] txid# 281474976710658, issues: { message: "Column key has wrong key type Float" severity: 1 } 2025-04-10T11:25:01.316428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:25:01.451412Z node 1 :TX_PROXY ERROR: Actor# [1:7491644028766473260:2366] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:02.013725Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644035447723675:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:02.013771Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013ea/r3tmp/tmpQu6Vkr/pdisk_1.dat 2025-04-10T11:25:02.101151Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:02.153597Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:02.153682Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:02.158534Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2140 TServer::EnableGrpc on GrpcPort 6746, node 2 2025-04-10T11:25:02.318941Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:02.318965Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:02.318974Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:02.319103Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:02.608424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:04.874358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:04.992342Z node 2 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][2:7491644044037659115:2344] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-10T11:25:05.052095Z node 2 :TX_PROXY ERROR: Actor# [2:7491644048332626470:2448] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/table/updates\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeCdcStream, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::OAuthToken [GOOD] Test command err: 2025-04-10T11:24:59.323504Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644022419587527:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:59.323552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013de/r3tmp/tmp9aARke/pdisk_1.dat 2025-04-10T11:24:59.763872Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:59.796459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:59.796581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:59.798616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10153 TServer::EnableGrpc on GrpcPort 2636, node 1 2025-04-10T11:25:00.014765Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:00.014797Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:00.014822Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:00.014956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10153 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:00.377875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:02.709297Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644034146674760:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:02.709643Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013de/r3tmp/tmpVpFn6b/pdisk_1.dat 2025-04-10T11:25:02.852580Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:02.870846Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:02.870932Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:02.872380Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6351 TServer::EnableGrpc on GrpcPort 27459, node 2 2025-04-10T11:25:03.126534Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:03.126592Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:03.126602Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:03.126709Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:03.404867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> YdbProxy::StaticCreds [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTable [GOOD] Test command err: 2025-04-10T11:24:58.567014Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644015908881877:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:58.567084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013e7/r3tmp/tmprOEfBt/pdisk_1.dat 2025-04-10T11:24:58.996902Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:59.030136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:59.030231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:59.032041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23547 TServer::EnableGrpc on GrpcPort 7546, node 1 2025-04-10T11:24:59.220618Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:59.220642Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:59.220654Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:59.220837Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:59.617348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:02.067836Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644035931995278:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:02.068098Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013e7/r3tmp/tmp6MLvPy/pdisk_1.dat 2025-04-10T11:25:02.198655Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:02.228177Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:02.228257Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:02.234992Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16919 TServer::EnableGrpc on GrpcPort 31939, node 2 2025-04-10T11:25:02.445963Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:02.445993Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:02.446001Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:02.446127Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:02.748498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:05.002822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> ResultFormatter::Utf8WithQuotes [GOOD] >> ResultFormatter::VariantStruct [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantTuple [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantStruct [GOOD] >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] >> YdbProxy::DescribeTopic [GOOD] >> ResultFormatter::StructWithNoFields [GOOD] >> ResultFormatter::StructTypeNameAsString [GOOD] >> ResultFormatter::Optional [GOOD] >> ResultFormatter::Pg >> ResultFormatter::Pg [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::StaticCreds [GOOD] Test command err: 2025-04-10T11:25:00.285870Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644023445093081:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:00.285993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013d0/r3tmp/tmppHtPMV/pdisk_1.dat 2025-04-10T11:25:00.606329Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:00.674714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:00.674860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:00.676954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8789 TServer::EnableGrpc on GrpcPort 2422, node 1 2025-04-10T11:25:00.863648Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:00.863679Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:00.863690Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:00.863853Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:01.208508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:01.273871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:25:01.287480Z node 1 :TX_PROXY ERROR: Actor# [1:7491644027740061030:2321] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-10T11:25:03.515259Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644036888636637:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:03.515327Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013d0/r3tmp/tmpPfpeWs/pdisk_1.dat 2025-04-10T11:25:03.599648Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:03.645180Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:03.645303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:03.647228Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62593 TServer::EnableGrpc on GrpcPort 5502, node 2 2025-04-10T11:25:03.815557Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:03.815587Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:03.815620Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:03.815759Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:04.059583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:04.108168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284304105 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284304105 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) >> Yq_1::Basic_EmptyList [GOOD] >> Yq_1::Basic_EmptyDict |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::StructTypeNameAsString [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] Test command err: 2025-04-10T11:24:47.470866Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:47.470961Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:47.495030Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-10T11:24:47.495310Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:47.495809Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:178:2193] 2025-04-10T11:24:47.496653Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-10T11:24:47.496809Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-10T11:24:47.496961Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request Got KV request 2025-04-10T11:24:47.497216Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-10T11:24:47.497594Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-04-10T11:24:47.497695Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-10T11:24:47.497730Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:24:47.497780Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:47.000000Z 2025-04-10T11:24:47.497829Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-10T11:24:47.497866Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:178:2193] 2025-04-10T11:24:47.497933Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-04-10T11:24:47.497990Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:47.824883Z node 1 :PERSQUEUE INFO: new Cookie src1|691b1df5-4c45cb3d-c9db1c12-210ca224_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-04-10T11:24:47.825068Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 2025-04-10T11:24:47.825345Z node 1 :PERSQUEUE INFO: new Cookie src4|e01526e1-ecdfc62b-8c55ef45-79a10111_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src4 2025-04-10T11:24:47.825434Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create distr tx with id = 4 and act no: 5 Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 11 and act no: 12 2025-04-10T11:24:48.791825Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-10T11:24:48.791911Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-04-10T11:24:48.791948Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-04-10T11:24:48.792082Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-04-10T11:24:50.043873Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:24:50.044031Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:24:50.044090Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:24:50.044136Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:24:50.044176Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 2 Wait batch completion 2025-04-10T11:24:50.044354Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-04-10T11:24:50.044414Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-04-10T11:24:50.044479Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:24:50.044529Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 Got batch complete: 1 Wait batch completion Wait batch completion 2025-04-10T11:24:50.273194Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 4 2025-04-10T11:24:50.273285Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 4 2025-04-10T11:24:50.273345Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:24:50.273406Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2025-04-10T11:24:50.273623Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 7 partNo 0 2025-04-10T11:24:50.274636Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 7 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 84 count 1 nextOffset 61 batches 1 2025-04-10T11:24:50.274717Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 8 partNo 0 2025-04-10T11:24:50.274765Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 8 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 136 count 2 nextOffset 62 batches 1 2025-04-10T11:24:50.274803Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 9 partNo 0 2025-04-10T11:24:50.274838Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 188 count 3 nextOffset 63 batches 1 2025-04-10T11:24:50.274873Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 10 partNo 0 2025-04-10T11:24:50.274916Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 240 count 4 nextOffset 64 batches 1 2025-04-10T11:24:50.274974Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 11 partNo 0 2025-04-10T11:24:50.275010Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 11 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 292 count 5 nextOffset 65 batches 1 2025-04-10T11:24:50.275066Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 12 partNo 0 2025-04-10T11:24:50.275135Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 12 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 344 count 6 nextOffset 66 batches 1 2025-04-10T11:24:50.275175Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 1 partNo 0 2025-04-10T11:24:50.276463Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob sourceId 'src4' seqNo 1 partNo 0 result is x0000000000_00000000000000000060_00000_0000000006_00000 size 211 2025-04-10T11:24:50.276551Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 old key x0000000000_00000000000000000060_00000_0000000006_00000 new key d0000000000_00000000000000000060_00000_0000000006_00000 size 211 WTime 11140 2025-04-10T11:24:50.277464Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 1 partNo 0 FormedBlobsCount 1 NewHead: Offset 70 PartNo 0 PackedSize 84 count 1 nextOffset 71 batches 1 2025-04-10T11:24:50.277551Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 2 partNo 0 2025-04-10T11:24:50.277611Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 70 PartNo 0 PackedSize 136 count 2 nextOffset 72 batches 1 2025-04-10T11:24:50.278245Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 70,2 HeadOffset 1 endOffset 1 curOffset 72 d0000000000_00000000000000000070_00000_0000000002_00000| size 121 WTime 11140 2025-04-10T11:24:50.278468Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:50.278515Z node 1 :PERSQUEUE DEBUG: [PQ: 7205759403792 ... c' partition 0 user client-1 offset is set to 3 (startOffset 0) session session-client-1 2025-04-10T11:25:04.102606Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:04.102668Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:04.102719Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:04.102773Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:04.102814Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:25:04.102831Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-04-10T11:25:04.102850Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-04-10T11:25:04.102878Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:04.102919Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait batch completion Wait kv request 2025-04-10T11:25:04.330358Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:25:04.330460Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Partition 0 Consumer 'client-1' Bad request (gap) Offset 3 Begin 0 Got batch complete: 1 Wait batch completion Wait kv request 2025-04-10T11:25:04.330776Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:04.330822Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:04.330859Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:04.330909Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:04.330936Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:25:04.330957Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:04.330993Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Create distr tx with id = 8 and act no: 9 2025-04-10T11:25:04.331189Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-04-10T11:25:04.351822Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:25:05.305396Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 3 Wait kv request 2025-04-10T11:25:05.305589Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-10T11:25:05.305638Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-10T11:25:05.305698Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=incorrect offset range (gap) 2025-04-10T11:25:05.305848Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:05.305885Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:05.305920Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:05.305955Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:05.305981Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:25:05.305997Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-04-10T11:25:05.306011Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-04-10T11:25:05.306034Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:05.306064Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait immediate tx complete 10 2025-04-10T11:25:05.316962Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 10 Wait immediate tx complete 11 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 11 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2025-04-10T11:25:05.853460Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:05.853565Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:25:05.876904Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-10T11:25:05.877299Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:05.877651Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:176:2191] 2025-04-10T11:25:05.878838Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-10T11:25:05.879060Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-10T11:25:05.879241Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-10T11:25:05.880065Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-10T11:25:05.880609Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-04-10T11:25:05.880720Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-10T11:25:05.880768Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:25:05.880827Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:25:05.000000Z 2025-04-10T11:25:05.880873Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-10T11:25:05.880928Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:176:2191] 2025-04-10T11:25:05.881003Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-04-10T11:25:05.881068Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:05.881158Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:25:05.881212Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-10T11:25:05.881258Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-10T11:25:05.881639Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-04-10T11:25:05.881849Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-04-10T11:25:05.881909Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. Created Tx with id 0 as act# 0 Created Tx with id 1 as act# 1 2025-04-10T11:25:07.218410Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-10T11:25:07.218560Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 1 Got batch complete: 1 Wait batch completion Got batch complete: 1 Wait batch completion Wait kv request 2025-04-10T11:25:07.437564Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 1 2025-04-10T11:25:07.437658Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 1 2025-04-10T11:25:07.437917Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:07.437970Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:07.438028Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:07.438076Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:07.438115Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:25:07.438143Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-04-10T11:25:07.438192Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-04-10T11:25:07.438236Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:07.438288Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 1 2025-04-10T11:25:07.469551Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait for no tx committed |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Pg [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTopic [GOOD] Test command err: 2025-04-10T11:25:01.223849Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644031070089733:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:01.223932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013c1/r3tmp/tmpumsW5M/pdisk_1.dat 2025-04-10T11:25:01.537672Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:01.599325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:01.599453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:01.601719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30610 TServer::EnableGrpc on GrpcPort 31000, node 1 2025-04-10T11:25:01.770030Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:01.770054Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:01.770059Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:01.770211Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:02.156444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:02.178399Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:03.820632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:03.935797Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T11:25:03.937922Z node 1 :TX_PROXY ERROR: Actor# [1:7491644039660025114:2395] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-10T11:25:04.528813Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644044319979382:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:04.528926Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013c1/r3tmp/tmpeHEbN0/pdisk_1.dat 2025-04-10T11:25:04.638758Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:04.678205Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:04.678294Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:04.682989Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11325 TServer::EnableGrpc on GrpcPort 61141, node 2 2025-04-10T11:25:04.863025Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:04.863049Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:04.863059Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:04.863194Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:05.161752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> ResultFormatter::EmptyDict [GOOD] >> ResultFormatter::Dict [GOOD] >> ResultFormatter::Decimal [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Decimal [GOOD] >> YdbProxy::AlterTopic [GOOD] |92.7%| [TA] $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TA] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFetchRequestTests::BadTopicName [GOOD] >> TFetchRequestTests::CheckAccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTopic [GOOD] Test command err: 2025-04-10T11:24:58.383645Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644016482029154:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:58.384957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013f8/r3tmp/tmpab1Y11/pdisk_1.dat 2025-04-10T11:24:58.764718Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:58.787511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:58.787603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:58.812903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1689 TServer::EnableGrpc on GrpcPort 18044, node 1 2025-04-10T11:24:59.106072Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:59.106097Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:59.106103Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:59.106226Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1689 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:59.615203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:01.026510Z node 1 :TX_PROXY ERROR: Actor# [1:7491644029366931710:2305] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-10T11:25:01.041574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:25:01.783727Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644030122525278:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:01.783787Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013f8/r3tmp/tmpSy3Lns/pdisk_1.dat 2025-04-10T11:25:01.895125Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:01.923493Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:01.923585Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:01.926412Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6478 TServer::EnableGrpc on GrpcPort 10291, node 2 2025-04-10T11:25:02.130955Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:02.130983Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:02.130991Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:02.131132Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:02.485468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:02.491975Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:04.902015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:04.975346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:25:05.665518Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491644045463219385:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:05.665642Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013f8/r3tmp/tmp6Yk2Rf/pdisk_1.dat 2025-04-10T11:25:05.827492Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:05.846109Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:05.846228Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:05.847873Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15353 TServer::EnableGrpc on GrpcPort 17986, node 3 2025-04-10T11:25:06.094416Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:06.094438Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:06.094447Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:06.094698Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:06.416955Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:06.424934Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:06.629220Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:25:06.697382Z node 3 :TX_PROXY ERROR: Actor# [3:7491644049758187476:2396] txid# 281474976715660, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } >> TPartitionTests::UserActCount [GOOD] >> KqpYql::InsertCVList+useSink >> KqpScripting::StreamExecuteYqlScriptData >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test >> KqpPragma::OrderedColumns >> KqpYql::TableConcat >> KqpScripting::StreamExecuteYqlScriptScan >> KqpYql::UuidPrimaryKeyDisabled >> KqpYql::BinaryJsonOffsetBound >> TPartitionTests::TooManyImmediateTxs >> KqpYql::EvaluateIf >> KqpYql::EvaluateExpr2 >> KqpScripting::LimitOnShard >> KqpYql::TestUuidPrimaryKeyPrefixSearch >> KqpScripting::ExecuteYqlScriptScanScalar >> KqpScripting::ScriptExplainCreatedTable >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] >> KqpScripting::ScanQueryInvalid >> TPQTest::TestAlreadyWritten [GOOD] >> TPQTest::TestAlreadyWrittenWithoutDeduplication >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test >> TListAllTopicsTests::ListLimitAndPaging [GOOD] >> TMeteringSink::FlushPutEventsV1 [GOOD] >> TMeteringSink::FlushResourcesReservedV1 [GOOD] >> TMeteringSink::FlushStorageV1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] Test command err: 2025-04-10T11:24:03.065547Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643782042444939:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:03.065611Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001be8/r3tmp/tmp0jDnOb/pdisk_1.dat 2025-04-10T11:24:03.444989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:03.445078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3739, node 1 2025-04-10T11:24:03.448188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:03.451651Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:03.458533Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:03.489391Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:03.501873Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:03.501902Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:03.501912Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:03.502069Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:03.780292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:03.857044Z node 1 :TICKET_PARSER DEBUG: Ticket F07EB0232EEE43AAD0B07A71E431B04367F77510073E30EF91CA254D82FF41AA (ipv6:[::1]:49000) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-10T11:24:03.962478Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:49006) has now valid token of root@builtin 2025-04-10T11:24:04.031112Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:24:04.031150Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:24:04.031173Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:24:04.031218Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-10T11:24:07.542880Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643795843333684:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:07.542949Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001be8/r3tmp/tmpZYl3LZ/pdisk_1.dat 2025-04-10T11:24:07.923425Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:07.953840Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:07.954047Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:07.959130Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23594, node 4 2025-04-10T11:24:08.157693Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:08.157716Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:08.157722Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:08.157840Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:08.464930Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:08.576744Z node 4 :TICKET_PARSER DEBUG: Ticket F07EB0232EEE43AAD0B07A71E431B04367F77510073E30EF91CA254D82FF41AA (ipv6:[::1]:54572) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-10T11:24:08.703176Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:54578) has now valid token of root@builtin 2025-04-10T11:24:08.794616Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:24:08.794656Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:24:08.794666Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:24:08.794710Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-10T11:24:12.742336Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643820420455624:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:12.742548Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001be8/r3tmp/tmp4Elt11/pdisk_1.dat 2025-04-10T11:24:12.964148Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:13.013621Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:13.013755Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:13.019019Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5044, node 7 2025-04-10T11:24:13.112833Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:13.112868Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:13.112878Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:13.113036Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:13.456323Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:13.563780Z node 7 :TICKET_PARSER DEBUG: Ticket 0574DF2196518261760E4B20DD813E15FABE72CFB0B8A56BE9F2C0C569844F06 (ipv6:[::1]:38502) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-10T11:24:13.642001Z node 7 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:38524) has now valid token of root@builtin 2025-04-10T11:24:13.740543Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-10T11:24:13.740573Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:24:13.740583Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:24:13.740623Z node 7 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-10T11:24:18.165321Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491643847192920962:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:18.165608Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001be8/r3tmp/tmpjjTCFq/pdisk_1.dat 2025-04-10T11:24:18.419330Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:18.464807Z node 10 :HIVE WARN: HIVE#72057594037968897 Node ... nt_certificate:certificate verify failed. E0410 11:24:41.454353912 1652193 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:24:41.475357365 1652541 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. 2025-04-10T11:24:45.949838Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7491643962765431100:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:45.949930Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001be8/r3tmp/tmpqmstYz/pdisk_1.dat 2025-04-10T11:24:46.090011Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:46.136255Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:46.136376Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:46.139526Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18411, node 25 2025-04-10T11:24:46.205828Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:46.205852Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:46.205863Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:46.206035Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:46.586127Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:50.950665Z node 25 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[25:7491643962765431100:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:50.950786Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0410 11:24:56.673744075 1655773 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:24:56.692684933 1655773 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:24:56.723189481 1655773 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:24:56.741956810 1655772 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:24:56.775475039 1661369 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:24:56.792319184 1661369 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:24:56.819192731 1655935 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:24:56.835980262 1655772 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:24:56.866819063 1655935 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:24:56.884047333 1655935 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:24:56.915480351 1661374 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:24:56.935392857 1661374 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. 2025-04-10T11:24:58.538178Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7491644017374814933:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:58.538243Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001be8/r3tmp/tmpTfqYEg/pdisk_1.dat 2025-04-10T11:24:58.797685Z node 28 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30181, node 28 2025-04-10T11:24:58.877191Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:58.877341Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:58.905016Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:58.934771Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:58.934800Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:58.934811Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:58.935004Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:59.424271Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:03.538451Z node 28 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[28:7491644017374814933:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:03.538616Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0410 11:25:09.531686000 1662285 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:25:09.550496881 1662285 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:25:09.578582448 1662285 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:25:09.595218449 1662285 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:25:09.626771458 1666784 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:25:09.642712684 1661898 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:25:09.668628307 1661898 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:25:09.684584741 1661898 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:25:09.710575210 1666820 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:25:09.727373388 1666820 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:25:09.751532688 1666820 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0410 11:25:09.767768715 1666819 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. >> KqpYql::TestUuidDefaultColumn >> TPartitionTests::TooManyImmediateTxs [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TMeteringSink::FlushStorageV1 [GOOD] Test command err: 2025-04-10T11:24:40.071297Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643939116496324:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:40.071445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:40.245453Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d1f/r3tmp/tmp2KdGBL/pdisk_1.dat 2025-04-10T11:24:40.470246Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2787, node 1 2025-04-10T11:24:40.486598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:40.486746Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:40.486765Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:40.489872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:40.491860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:40.677511Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000d1f/r3tmp/yandex6gsAJg.tmp 2025-04-10T11:24:40.677540Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000d1f/r3tmp/yandex6gsAJg.tmp 2025-04-10T11:24:40.678840Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000d1f/r3tmp/yandex6gsAJg.tmp 2025-04-10T11:24:40.679012Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:24:40.937284Z INFO: TTestServer started on Port 3186 GrpcPort 2787 TClient is connected to server localhost:3186 PQClient connected to localhost:2787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:41.228274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:41.243013Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:41.259431Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:41.270821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:41.458221Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:41.470203Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-10T11:24:43.059812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643952001399017:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:43.061288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643952001399030:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:43.061639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:43.078981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T11:24:43.097335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643952001399032:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T11:24:43.184607Z node 1 :TX_PROXY ERROR: Actor# [1:7491643952001399098:2453] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:43.601302Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491643952001399106:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:43.606910Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTEwOTQ0Y2MtNGNjNjEwOS1iNmFkYmQ3MS1iMmMxYjZiMQ==, ActorId: [1:7491643952001399015:2335], ActorState: ExecuteState, TraceId: 01jrfq3w5709yqd59wewaj9vdp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:24:43.610757Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:24:43.679089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:43.712051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:43.851383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:24:44.218451Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfq3x0pbxhcnd19fp9esm47, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzdkMDZjNWItYWNiNGQwNDgtMWI3MmMzN2QtOTBmYmVmYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491643956296366711:2640] 2025-04-10T11:24:45.071300Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643939116496324:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:45.071385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T11:24:50.209622Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-10T11:24:50.211343Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-10T11:24:50.242801Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][] pipe [1:7491643982066170855:2853] connected; active server actors: 1 2025-04-10T11:24:50.243114Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037895][topic1] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-10T11:24:50.243848Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037895][topic1] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037895 2025-04-10T11:24:50.243948Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037895][topic1] BALANCER INIT DONE for topic1: (0, 72075186224037894) 2025-04-10T11:24:50.244088Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7491643982066170849:2847] connected; active server actors: 1 2025-04-10T11:24:50.245169Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-10T11:24:50.245559Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] BALANCER INIT DONE for topic2: (0, 72075186224037892) 2025-04-10T11:24:50.245703Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037895][topic1] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-10T11:24:50.246212Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-10T11:24:50.246337Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:24:50.246612Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:24:50.246622Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-10T11:24:50.246762Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Registered with mediator time cast 2025-04-10T11:24:50.246912Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-10T11:24:50.247398Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:24:50.247421Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:24:50.247631Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-10T11:24:50.247633Z node 1 :PERSQUEUE DEBUG: [PQ: 72 ... 281474976715677, Partition 0 2025-04-10T11:25:11.427565Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] Handle TEvProposePartitionConfigResult 2025-04-10T11:25:11.427585Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] Partition responses 1/1 2025-04-10T11:25:11.427606Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state CALCULATING 2025-04-10T11:25:11.427624Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, State CALCULATING 2025-04-10T11:25:11.427639Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 State CALCULATING FrontTxId 281474976715677 2025-04-10T11:25:11.427654Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Received 1, Expected 1 2025-04-10T11:25:11.427677Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState CALCULATED 2025-04-10T11:25:11.427697Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 moved from CALCULATING to CALCULATED 2025-04-10T11:25:11.428056Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] save tx TxId: 281474976715677 State: CALCULATED MinStep: 1744284311441 MaxStep: 18446744073709551615 Step: 1744284311462 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic3" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir2/topic3" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7491644029269937573 RawX2: 12884904034 } Partitions { Partition { PartitionId: 0 } } 2025-04-10T11:25:11.428142Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:25:11.430898Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:25:11.430923Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state CALCULATED 2025-04-10T11:25:11.430939Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, State CALCULATED 2025-04-10T11:25:11.430967Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 State CALCULATED FrontTxId 281474976715677 2025-04-10T11:25:11.430984Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState WAIT_RS 2025-04-10T11:25:11.431002Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 moved from CALCULATED to WAIT_RS 2025-04-10T11:25:11.431046Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-04-10T11:25:11.431068Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] HaveParticipantsDecision 1 2025-04-10T11:25:11.431126Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState EXECUTING 2025-04-10T11:25:11.431158Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 moved from WAIT_RS to EXECUTING 2025-04-10T11:25:11.431171Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Received 0, Expected 1 2025-04-10T11:25:11.431211Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1744284311462, TxId 281474976715677 2025-04-10T11:25:11.431435Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:11.431456Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:11.431475Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:11.431491Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:11.431504Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:25:11.431514Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] _config_0 2025-04-10T11:25:11.431532Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:11.431551Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:25:11.431571Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:25:11.432469Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:25:11.432545Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvPQ::TEvTxCommitDone Step 1744284311462, TxId 281474976715677, Partition 0 2025-04-10T11:25:11.432561Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state EXECUTING 2025-04-10T11:25:11.432571Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, State EXECUTING 2025-04-10T11:25:11.432584Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 State EXECUTING FrontTxId 281474976715677 2025-04-10T11:25:11.432592Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Received 1, Expected 1 2025-04-10T11:25:11.432606Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId: 281474976715677 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-10T11:25:11.432629Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] complete TxId 281474976715677 2025-04-10T11:25:11.432893Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic3" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir2/topic3" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } 2025-04-10T11:25:11.432952Z node 3 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:11.433027Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] delete partitions for TxId 281474976715677 2025-04-10T11:25:11.433040Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState EXECUTED 2025-04-10T11:25:11.433055Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 moved from EXECUTING to EXECUTED 2025-04-10T11:25:11.433333Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] save tx TxId: 281474976715677 State: EXECUTED MinStep: 1744284311441 MaxStep: 18446744073709551615 Step: 1744284311462 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic3" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir2/topic3" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7491644029269937573 RawX2: 12884904034 } Partitions { Partition { PartitionId: 0 } } 2025-04-10T11:25:11.433496Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:25:11.435982Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:25:11.436002Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state EXECUTED 2025-04-10T11:25:11.436018Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, State EXECUTED 2025-04-10T11:25:11.436033Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 State EXECUTED FrontTxId 281474976715677 2025-04-10T11:25:11.436051Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TPersQueue::SendEvReadSetAckToSenders 2025-04-10T11:25:11.436063Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState WAIT_RS_ACKS 2025-04-10T11:25:11.436079Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 moved from EXECUTED to WAIT_RS_ACKS 2025-04-10T11:25:11.436100Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] PredicateAcks: 0/0 2025-04-10T11:25:11.436109Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-10T11:25:11.436121Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] PredicateAcks: 0/0 2025-04-10T11:25:11.436136Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] add an TxId 281474976715677 to the list for deletion 2025-04-10T11:25:11.436155Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState DELETING 2025-04-10T11:25:11.436176Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] delete key for TxId 281474976715677 2025-04-10T11:25:11.436222Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-10T11:25:11.437391Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-10T11:25:11.437415Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state DELETING 2025-04-10T11:25:11.437431Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, State DELETING 2025-04-10T11:25:11.437448Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] delete TxId 281474976715677 >> TPartitionTests::EndWriteTimestamp_DataKeysBody >> TPartitionTests::TestNonConflictingActsBatchOk >> TPQTest::TestPartitionPerConsumerQuota [GOOD] >> TPQTest::TestPartitionWriteQuota >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce >> TPartitionTests::EndWriteTimestamp_DataKeysBody [GOOD] >> TPartitionTests::EndWriteTimestamp_FromMeta >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn [GOOD] >> TPartitionTests::EndWriteTimestamp_FromMeta [GOOD] >> TPartitionTests::EndWriteTimestamp_HeadKeys >> KqpYql::InsertCV+useSink >> KqpYql::UuidPrimaryKeyDisabled [GOOD] >> KqpSystemView::QueryStatsSimple [GOOD] >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] >> KqpYql::EvaluateIf [GOOD] >> KqpYql::EvaluateFor >> KqpYql::InsertCVList+useSink [GOOD] >> KqpYql::InsertCVList-useSink >> KqpYql::TableConcat [GOOD] >> KqpYql::TableNameConflict >> KqpYql::TestUuidDefaultColumn [GOOD] >> KqpScripting::ScanQueryInvalid [GOOD] >> KqpScripting::ScanQueryTruncate >> KqpYql::BinaryJsonOffsetBound [GOOD] >> KqpYql::AnsiIn >> KqpYql::EvaluateExpr2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] Test command err: 2025-04-10T11:24:55.793852Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:55.793976Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:55.811512Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-10T11:24:55.811789Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:55.812216Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:178:2193] 2025-04-10T11:24:55.813153Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-10T11:24:55.813359Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-04-10T11:24:55.813534Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request Got KV request 2025-04-10T11:24:55.813778Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-10T11:24:55.814224Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-04-10T11:24:55.814322Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-10T11:24:55.814364Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:24:55.814424Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:24:55.000000Z 2025-04-10T11:24:55.814473Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-10T11:24:55.814521Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:178:2193] 2025-04-10T11:24:55.814599Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-04-10T11:24:55.814669Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 Create distr tx with id = 0 and act no: 1 2025-04-10T11:24:57.084587Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-10T11:24:58.345883Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Create distr tx with id = 2 and act no: 3 2025-04-10T11:24:58.346229Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-04-10T11:24:58.346323Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-04-10T11:24:58.346379Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-04-10T11:24:58.346444Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:24:58.346483Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-10T11:24:59.588855Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 2025-04-10T11:24:59.589083Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-04-10T11:24:59.589154Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-04-10T11:24:59.589209Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:24:59.589260Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-10T11:24:59.589487Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:59.589547Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:59.589602Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:59.589645Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-10T11:24:59.589745Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:59.589771Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:59.589789Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:24:59.589830Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:59.589906Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 Wait tx committed for tx 0 2025-04-10T11:24:59.603522Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 Wait tx committed for tx 2 2025-04-10T11:25:00.045921Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:00.045990Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:25:00.059836Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-04-10T11:25:00.060087Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:00.060392Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:178:2193] 2025-04-10T11:25:00.061369Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-04-10T11:25:00.061578Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request Got KV request 2025-04-10T11:25:00.061793Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-04-10T11:25:00.062042Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-04-10T11:25:00.062535Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-04-10T11:25:00.062642Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-04-10T11:25:00.062683Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:25:00.062733Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:25:00.000000Z 2025-04-10T11:25:00.062772Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-04-10T11:25:00.062821Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:178:2193] 2025-04-10T11:25:00.062877Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-04-10T11:25:00.062927Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:00.375340Z node 2 :PERSQUEUE INFO: new Cookie src1|a8d449b7-50acc64d-fe8ac714-c57dfd43_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-04-10T11:25:00.375512Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create immediate tx with id = 3 and act no: 4 Create immediate tx with id = 6 and act no: 7 2025-04-10T11:25:01.350704Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-04-10T11:25:02.688978Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Wait batch completion 2025-04-10T11:25:02.689234Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:25:02.689312Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:25:02.689494Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 1 partNo 0 2025-04-10T11:25:02.690728Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 20 PartNo 0 PackedSize 84 count 1 nextOffset 21 batches 1 2025-04-10T11:25:02.690845Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-10T11:25:02.690935Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:25:02.691000Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 20 PartNo 0 PackedSize 84 count 1 nextOffset 21 batches 1 2025-04-10T11:25:02.708491Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-10T11:25:02.708662Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 3 partNo 0 2025-04- ... -account--topic' partition 0 part blob processing sourceId 'src1' seqNo 10 partNo 0 2025-04-10T11:25:10.500954Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob sourceId 'src1' seqNo 10 partNo 0 result is x0000000000_00000000000000000160_00000_0000000001_00000 size 71 2025-04-10T11:25:10.501065Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 old key x0000000000_00000000000000000160_00000_0000000001_00000 new key d0000000000_00000000000000000160_00000_0000000001_00000 size 71 WTime 41173 2025-04-10T11:25:10.503147Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 10 partNo 0 FormedBlobsCount 1 NewHead: Offset 230 PartNo 0 PackedSize 84 count 1 nextOffset 231 batches 1 2025-04-10T11:25:10.503806Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 230,1 HeadOffset 160 endOffset 161 curOffset 231 d0000000000_00000000000000000230_00000_0000000001_00000| size 71 WTime 41173 2025-04-10T11:25:10.504025Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:10.504073Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:10.504122Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T11:25:10.504163Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:10.504209Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000160_00000_0000000001_00000 2025-04-10T11:25:10.504247Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-10T11:25:10.504270Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000230_00000_0000000001_00000| 2025-04-10T11:25:10.504293Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:10.504316Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:10.504338Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:25:10.504391Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:10.504442Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 Wait tx committed for tx 21 2025-04-10T11:25:10.536127Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 17 WriteNewSizeFromSupportivePartitions# 1 2025-04-10T11:25:10.536235Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:25:10.536303Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 230 is stored on disk 2025-04-10T11:25:10.536565Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:10.536604Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:10.536637Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000160_00000_0000000001_00000|, d0000000000_00000000000000000160_00000_0000000001_00000|] 2025-04-10T11:25:10.536674Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:10.536704Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:10.536736Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:10.536765Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Create distr tx with id = 24 and act no: 25 Create distr tx with id = 26 and act no: 27 Create immediate tx with id = 28 and act no: 29 2025-04-10T11:25:11.594295Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 24 2025-04-10T11:25:11.594413Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 26 2025-04-10T11:25:13.062699Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:25:13.062818Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait batch completion 2025-04-10T11:25:13.063024Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:25:13.063110Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 2 Wait batch completion 2025-04-10T11:25:13.063259Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 26 2025-04-10T11:25:13.063312Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 26 2025-04-10T11:25:13.063369Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:25:13.063426Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 230 PartNo 0 PackedSize 71 count 1 nextOffset 231 batches 1, NewHead=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0 2025-04-10T11:25:13.063550Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-10T11:25:13.063600Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:25:13.063652Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 230 PartNo 0 PackedSize 71 count 1 nextOffset 231 batches 1, NewHead=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0 2025-04-10T11:25:13.063699Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-10T11:25:13.063879Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:13.063920Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:13.063968Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:13.064014Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-10T11:25:13.064046Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:13.064068Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:13.064089Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:25:13.064119Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:13.064160Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Wait for no tx committed Send disk status response with cookie: 0 Wait tx committed for tx 26 2025-04-10T11:25:13.351185Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 Wait immediate tx complete 28 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 28 2025-04-10T11:25:13.782944Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:13.783022Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:25:13.798358Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [3:177:2192] 2025-04-10T11:25:13.800166Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:25:13.000000Z 2025-04-10T11:25:13.800231Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [3:177:2192] 2025-04-10T11:25:14.700490Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:14.700557Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:25:14.719954Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [4:179:2194] 2025-04-10T11:25:14.721634Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:25:14.721696Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [4:179:2194] 2025-04-10T11:25:15.526576Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:15.526654Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:25:15.544019Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [5:177:2192] >>>> ADD BLOB 0 writeTimestamp=2025-04-10T11:25:15.536092Z >>>> ADD BLOB 1 writeTimestamp=2025-04-10T11:25:15.536118Z >>>> ADD BLOB 2 writeTimestamp=2025-04-10T11:25:15.536135Z >>>> ADD BLOB 3 writeTimestamp=2025-04-10T11:25:15.536175Z >>>> ADD BLOB 4 writeTimestamp=2025-04-10T11:25:15.536188Z >>>> ADD BLOB 5 writeTimestamp=2025-04-10T11:25:15.536204Z >>>> ADD BLOB 6 writeTimestamp=2025-04-10T11:25:15.536218Z >>>> ADD BLOB 7 writeTimestamp=2025-04-10T11:25:15.536232Z >>>> ADD BLOB 8 writeTimestamp=2025-04-10T11:25:15.536244Z >>>> ADD BLOB 9 writeTimestamp=2025-04-10T11:25:15.536260Z 2025-04-10T11:25:15.547376Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-10T11:25:15.000000Z 2025-04-10T11:25:15.547450Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [5:177:2192] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyDisabled [GOOD] >> KqpYql::EvaluateExpr3 Test command err: Trying to start YDB, gRPC: 4244, MsgBus: 7281 2025-04-10T11:25:11.035614Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644073550966879:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:11.036456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f8d/r3tmp/tmph9MFrK/pdisk_1.dat 2025-04-10T11:25:11.512127Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:11.528266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:11.528344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:11.531405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4244, node 1 2025-04-10T11:25:11.691698Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:11.691718Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:11.691724Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:11.691809Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7281 TClient is connected to server localhost:7281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:12.354269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.393603Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:14.098202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644086435869279:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.098327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.693329Z node 1 :TX_PROXY ERROR: Actor# [1:7491644086435869300:2309] txid# 281474976710658, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2025-04-10T11:25:14.727657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644086435869308:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.727739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.742885Z node 1 :TX_PROXY ERROR: Actor# [1:7491644086435869315:2317] txid# 281474976710659, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2025-04-10T11:25:14.754583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644086435869323:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.754683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.772528Z node 1 :TX_PROXY ERROR: Actor# [1:7491644086435869330:2325] txid# 281474976710660, issues: { message: "Uuid as primary key is forbiden by configuration: val" severity: 1 } 2025-04-10T11:25:14.788648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644086435869338:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.788714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.803319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.935543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644086435869426:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.935604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpScripting::LimitOnShard [GOOD] >> KqpScripting::NoAstSizeLimit >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] >> KqpPragma::OrderedColumns [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer >> KqpScripting::StreamExecuteYqlScriptData [GOOD] >> KqpScripting::StreamExecuteYqlScriptEmptyResults ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn [GOOD] Test command err: 2025-04-10T11:22:42.494196Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:42.584787Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:22:42.589345Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:22:42.589769Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:42.614599Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:42.614909Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:42.623536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:42.623774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:42.624013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:42.624146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:42.624270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:42.624407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:42.624533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:42.624656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:42.624780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:42.624896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:42.625016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:42.625137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:42.652140Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:22:42.657091Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:42.657544Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:42.657608Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:42.657814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:42.658031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:42.658125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:42.658198Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:42.658309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:42.658387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:42.658443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:42.658506Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:42.658714Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:42.658794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:42.658841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:42.658888Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:42.659015Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:42.659079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:42.659129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:42.659162Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:42.659236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:42.659295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:42.659330Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:42.659382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:42.659433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:42.659464Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:42.659937Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-10T11:22:42.660044Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=49; 2025-04-10T11:22:42.660150Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=52; 2025-04-10T11:22:42.660272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=63; 2025-04-10T11:22:42.660460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:42.660519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:42.660559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:42.660852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:42.660910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:42.660946Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:42.661113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:22:42.661163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:22:42.661202Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:22:42.661440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:22:42.661495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:22:42.661551Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T1 ... umn_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:13.243723Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:13.243763Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:25:13.243802Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:25:13.243915Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:25:13.243996Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:13.244031Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:25:13.244100Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=73; 2025-04-10T11:25:13.244152Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=584;num_rows=73;batch_columns=timestamp; 2025-04-10T11:25:13.244253Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:851:2843];bytes=584;rows=73;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-04-10T11:25:13.244333Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:13.244430Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:13.244593Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:13.244685Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:25:13.244771Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:13.244860Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:13.244898Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: Scan [5:858:2850] finished for tablet 9437184 2025-04-10T11:25:13.245544Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[5:851:2843];stats={"p":[{"events":["f_bootstrap"],"t":0.058},{"events":["f_ProduceResults"],"t":0.488},{"events":["l_bootstrap"],"t":0.772},{"events":["f_processing","f_task_result"],"t":0.8},{"events":["l_task_result"],"t":8.293},{"events":["f_ack"],"t":8.346},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":9.347}],"full":{"a":1744284303897366,"name":"_full_task","f":1744284303897366,"d_finished":0,"c":0,"l":1744284313244964,"d":9347598},"events":[{"name":"bootstrap","f":1744284303956167,"d_finished":713836,"c":1,"l":1744284304670003,"d":713836},{"a":1744284313244672,"name":"ack","f":1744284312244095,"d_finished":930377,"c":904,"l":1744284313244620,"d":930669},{"a":1744284313244663,"name":"processing","f":1744284304698094,"d_finished":4037268,"c":4520,"l":1744284313244622,"d":4037569},{"name":"ProduceResults","f":1744284304386327,"d_finished":1674334,"c":5426,"l":1744284313244877,"d":1674334},{"a":1744284313244881,"name":"Finish","f":1744284313244881,"d_finished":0,"c":0,"l":1744284313244964,"d":83},{"name":"task_result","f":1744284304698135,"d_finished":3016954,"c":3616,"l":1744284312190458,"d":3016954}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:13.245672Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:851:2843];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:25:13.246428Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[5:851:2843];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.058},{"events":["f_ProduceResults"],"t":0.488},{"events":["l_bootstrap"],"t":0.772},{"events":["f_processing","f_task_result"],"t":0.8},{"events":["l_task_result"],"t":8.293},{"events":["f_ack"],"t":8.346},{"events":["l_ProduceResults","f_Finish"],"t":9.347},{"events":["l_ack","l_processing","l_Finish"],"t":9.348}],"full":{"a":1744284303897366,"name":"_full_task","f":1744284303897366,"d_finished":0,"c":0,"l":1744284313245752,"d":9348386},"events":[{"name":"bootstrap","f":1744284303956167,"d_finished":713836,"c":1,"l":1744284304670003,"d":713836},{"a":1744284313244672,"name":"ack","f":1744284312244095,"d_finished":930377,"c":904,"l":1744284313244620,"d":931457},{"a":1744284313244663,"name":"processing","f":1744284304698094,"d_finished":4037268,"c":4520,"l":1744284313244622,"d":4038357},{"name":"ProduceResults","f":1744284304386327,"d_finished":1674334,"c":5426,"l":1744284313244877,"d":1674334},{"a":1744284313244881,"name":"Finish","f":1744284313244881,"d_finished":0,"c":0,"l":1744284313245752,"d":871},{"name":"task_result","f":1744284304698135,"d_finished":3016954,"c":3616,"l":1744284312190458,"d":3016954}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:13.246538Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:25:03.832617Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=904;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7049848;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7049848;selected_rows=0; 2025-04-10T11:25:13.246607Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:25:13.246921Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:858:2850];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpScripting::StreamExecuteYqlScriptScan [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test >> KqpScripting::ExecuteYqlScriptScanScalar [GOOD] >> KqpScripting::JoinIndexLookup >> PQCountersLabeled::NewConsumersCountersAppear [GOOD] >> PQCountersSimple::Partition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 29081, MsgBus: 62041 2025-04-10T11:24:16.846628Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643835646894483:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:16.846678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:16.924415Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491643835251057758:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:16.924457Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:16.941097Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491643838046744396:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:16.941149Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a9f/r3tmp/tmpnDyHrQ/pdisk_1.dat 2025-04-10T11:24:17.818884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:17.818980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:17.899928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:17.899998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:17.900142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:17.900177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:17.929707Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:17.933895Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:24:17.933927Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:24:17.934129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:17.934837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:17.935174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:17.937624Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:17.961851Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:18.010993Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29081, node 1 2025-04-10T11:24:18.048154Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:18.048179Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:18.279143Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:18.279167Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:18.279174Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:18.279281Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62041 TClient is connected to server localhost:62041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:19.388768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:19.457612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:19.821381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:20.076800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:20.397090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:21.846794Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643835646894483:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:21.846925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:21.924737Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491643835251057758:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:21.924790Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:21.941514Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491643838046744396:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:21.941585Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:22.273810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643861416700312:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:22.273941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:22.614878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.675278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.750240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.805267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.860274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:24:22.951686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:24:23.108875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643865711668282:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:23.108947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:23.109150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643865711668287:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:23.112951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:24:23.142598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643865711668289:2407], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:24:23.223592Z node 1 :TX_PROXY ERROR: Actor# [1:7491643865711668374:4112] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:24.856689Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284264846, txId: 281474976710673] shutting down 2025-04-10T11:24:25.067034Z node 2 :BS_PROXY_PUT ERROR: [c509fd437c701c3d] Result# TEvPutResult {Id# [72075186224037912:1:20:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037912:1:20:0:0:41:0] R ... s: 20203 2025-04-10T11:25:05.138737Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7491644048030004498:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:05.138836Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:25:05.143857Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7491644047743799907:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:05.143956Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:25:05.146754Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7491644046944128398:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:05.146902Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a9f/r3tmp/tmpXosxAX/pdisk_1.dat 2025-04-10T11:25:05.327573Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:05.372201Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:05.372335Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:05.374028Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:05.374133Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:05.375580Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:05.375672Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:05.378481Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:05.379050Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 17 Cookie 17 2025-04-10T11:25:05.380055Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:05.380208Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 18 Cookie 18 2025-04-10T11:25:05.380872Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17048, node 16 2025-04-10T11:25:05.436164Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:05.436201Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:05.436212Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:05.436394Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20203 TClient is connected to server localhost:20203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:06.208906Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:06.247516Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:06.384520Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:06.569550Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:06.679269Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:09.868513Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7491644065209875699:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:09.868611Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:09.922684Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:09.975133Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:10.059332Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:10.121883Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:10.139099Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7491644048030004498:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:10.139159Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:10.143797Z node 17 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[17:7491644047743799907:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:10.143932Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:10.146831Z node 18 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7491644046944128398:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:10.146949Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:10.176420Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:10.240774Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:10.467933Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7491644069504843695:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:10.468056Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7491644069504843700:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:10.468112Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:10.473513Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:10.495800Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7491644069504843702:2404], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:10.582546Z node 16 :TX_PROXY ERROR: Actor# [16:7491644069504843779:4161] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:12.779708Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284312766, txId: 281474976710673] shutting down 2025-04-10T11:25:13.215588Z node 18 :BS_PROXY_PUT ERROR: [d2d17b4d96aa5189] Result# TEvPutResult {Id# [72075186224037888:1:23:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037888:1:23:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 16 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-10T11:25:13.225014Z node 17 :BS_PROXY_PUT ERROR: [2d7794ca5def0e82] Result# TEvPutResult {Id# [72075186224037895:1:23:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037895:1:23:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 16 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidDefaultColumn [GOOD] Test command err: Trying to start YDB, gRPC: 7795, MsgBus: 19041 2025-04-10T11:25:12.382620Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644076620854429:2150];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:12.383717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ef2/r3tmp/tmpo5892h/pdisk_1.dat 2025-04-10T11:25:12.792341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:12.793025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:12.793347Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:12.797401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7795, node 1 2025-04-10T11:25:12.949883Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:12.949913Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:12.949926Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:12.950028Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19041 TClient is connected to server localhost:19041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:13.430307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:13.454948Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:15.428245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644089505756877:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:15.428370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:15.776385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:15.899044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644089505756980:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:15.899143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:15.899244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644089505756985:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:15.903870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:25:15.915363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644089505756987:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:25:15.993211Z node 1 :TX_PROXY ERROR: Actor# [1:7491644089505757038:2396] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> PQCountersSimple::Partition [GOOD] |92.8%| [TA] $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] Test command err: Trying to start YDB, gRPC: 28705, MsgBus: 9472 2025-04-10T11:25:11.025406Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644074648184074:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:11.026521Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f12/r3tmp/tmpEKj7lM/pdisk_1.dat 2025-04-10T11:25:11.543967Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:11.557791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:11.557873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:11.561516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28705, node 1 2025-04-10T11:25:11.689989Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:11.690044Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:11.690055Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:11.690219Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9472 TClient is connected to server localhost:9472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:12.489685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.542317Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:14.409587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644087533086631:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.409748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.700845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.830179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644087533086734:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.830300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.830582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644087533086739:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.835060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:25:14.847107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644087533086741:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:25:14.916960Z node 1 :TX_PROXY ERROR: Actor# [1:7491644087533086792:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:16.025232Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644074648184074:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.025320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScripting::ScriptExplainCreatedTable [GOOD] >> KqpScripting::ScriptExplain >> KqpScripting::UnsafeTimestampCast ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::Partition [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:126:2057] recipient: [1:124:2158] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:126:2057] recipient: [1:124:2158] Leader for TabletID 72057594037927937 is [1:130:2162] sender: [1:131:2057] recipient: [1:124:2158] 2025-04-10T11:24:45.991686Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:45.991785Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:172:2057] recipient: [1:170:2193] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:172:2057] recipient: [1:170:2193] Leader for TabletID 72057594037927938 is [1:176:2197] sender: [1:177:2057] recipient: [1:170:2193] Leader for TabletID 72057594037927937 is [1:130:2162] sender: [1:202:2057] recipient: [1:14:2061] 2025-04-10T11:24:46.011714Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:46.035609Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:200:2215] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:46.036846Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:208:2221] 2025-04-10T11:24:46.039656Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:208:2221] 2025-04-10T11:24:46.041475Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:209:2222] 2025-04-10T11:24:46.043461Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:209:2222] 2025-04-10T11:24:46.051017Z node 1 :PERSQUEUE INFO: new Cookie default|d0870c19-b68ddf2a-1bf6c5ce-1c999127_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:46.056538Z node 1 :PERSQUEUE INFO: new Cookie default|3c653414-fc8dabe0-3f4ad24e-29f313b2_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:46.062130Z node 1 :PERSQUEUE INFO: new Cookie default|11431a64-bb67ef38-113f58a-6ed2ec40_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Expected: { "sensors": [ { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesQuota" }, "value": 1000000000 }, { "kind": "RATE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadOffsetRewindSum" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadTimeLagMs" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByCommitted" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByLastRead" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TimeSinceLastReadMs" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TotalMessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TotalSizeLagByLastRead" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TotalTimeLagMsByLastRead" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/UserPartitionsAnswered" }, "value": 2 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/WriteTimeLagMsByLastRead" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/WriteTimeLagMsByLastReadOld" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": ... itions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user/total/total ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic 2025-04-10T11:24:56.476341Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:56.481644Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 5 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 5 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/0/total ANS GROUP user/total/total ANS GROUP user/0/rt3.dc1--asdfgs--topic ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total/0/rt3.dc1--asdfgs--topic ANS GROUP total CHECKING GROUP user/0/rt3.dc1--asdfgs--topic 2025-04-10T11:24:58.505066Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:58.513179Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 6 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 6 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 ReadRuleGenerations: 6 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } Consumers { Name: "user2" Generation: 6 Important: true } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user2/1/total ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user2/total/total ANS GROUP user/total/total ANS GROUP user2/1/rt3.dc1--asdfgs--topic ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic CHECKING GROUP user2/1/rt3.dc1--asdfgs--topic 2025-04-10T11:25:00.500685Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:00.505461Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 7 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 7 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 ReadRuleGenerations: 6 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } Consumers { Name: "user2" Generation: 6 Important: false } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user2/0/total ANS GROUP user/1/total ANS GROUP user2/total/total ANS GROUP user/total/total ANS GROUP user2/0/rt3.dc1--asdfgs--topic ANS GROUP total/total/total ANS GROUP total/0/rt3.dc1--asdfgs--topic ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic CHECKING GROUP user2/0/rt3.dc1--asdfgs--topic 2025-04-10T11:25:02.588636Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:02.595973Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 8 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 8 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user/total/total ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic 2025-04-10T11:25:05.066590Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:05.066678Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:25:05.086226Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:05.087213Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 9 actor [4:200:2215] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 3600 ImportantClientId: "client" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 9 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 9 ReadRuleGenerations: 9 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 9 Important: false } Consumers { Name: "client" Generation: 9 Important: true } 2025-04-10T11:25:05.087981Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:208:2221] 2025-04-10T11:25:05.088916Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [4:208:2221] 2025-04-10T11:25:05.090104Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:209:2222] 2025-04-10T11:25:05.090824Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [4:209:2222] 2025-04-10T11:25:05.108490Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][] pipe [4:253:2252] connected; active server actors: 1 2025-04-10T11:25:05.112248Z node 4 :PERSQUEUE INFO: new Cookie default|cb0b06f8-5f8d3067-2e6135ef-2b379259_0 generated for partition 0 topic 'topic' owner default 2025-04-10T11:25:05.118097Z node 4 :PERSQUEUE INFO: new Cookie default|f5a10481-df80b5a3-812a557c-6c69ed10_1 generated for partition 0 topic 'topic' owner default 2025-04-10T11:25:05.126694Z node 4 :PERSQUEUE INFO: new Cookie default|9b419d5f-dbb77618-741eac76-a19d17b7_2 generated for partition 0 topic 'topic' owner default 2025-04-10T11:25:05.132321Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][topic] pipe [4:299:2292] connected; active server actors: 1 2025-04-10T11:25:10.940901Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][topic] pipe [4:400:2380] connected; active server actors: 1 2025-04-10T11:25:17.866965Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:17.867064Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:25:17.888721Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:17.889702Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 10 actor [5:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 10 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 10 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 10 Important: false } 2025-04-10T11:25:17.890461Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:206:2219] 2025-04-10T11:25:17.893203Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [5:206:2219] 2025-04-10T11:25:17.895510Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [5:207:2220] 2025-04-10T11:25:17.897511Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [5:207:2220] 2025-04-10T11:25:17.905318Z node 5 :PERSQUEUE INFO: new Cookie default|206498ea-3fe5cbe8-ee183730-5f45c91b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:17.912607Z node 5 :PERSQUEUE INFO: new Cookie default|c278627e-787fd347-e5dbffd1-4c0fbe1b_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:17.920874Z node 5 :PERSQUEUE INFO: new Cookie default|619803ec-728c8da3-8694ff46-77990dd3_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:17.929090Z node 5 :PERSQUEUE INFO: new Cookie default|df604916-33f3df6b-ca63f022-ec2ddd2e_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:17.930907Z node 5 :PERSQUEUE INFO: new Cookie default|800f6281-39466d5f-9ff7290f-d4917936_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default >> KqpYql::UpdatePk >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test >> KqpScripting::SelectNullType >> TPartitionTests::TestNonConflictingActsBatchOk [GOOD] >> KqpPragma::ResetPerQuery >> TPartitionTests::TestTxBatchInFederation >> KqpScripting::StreamScanQuery >> KqpScripting::ScanQuery >> KqpScripting::StreamExecuteYqlScriptScanCancelation >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce >> KqpYql::InsertCV+useSink [GOOD] >> KqpYql::InsertCV-useSink >> TPQTest::TestMaxTimeLagRewind [GOOD] >> TPQTest::TestManyConsumers >> KqpScripting::NoAstSizeLimit [GOOD] >> KqpScripting::ScanQueryTruncate [GOOD] >> KqpYql::EvaluateFor [GOOD] >> KqpYql::TableNameConflict [GOOD] >> KqpYql::EvaluateExpr3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::NoAstSizeLimit [GOOD] Test command err: Trying to start YDB, gRPC: 15508, MsgBus: 64166 2025-04-10T11:25:11.101046Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644072708552067:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:11.101162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fdd/r3tmp/tmpHuGFO0/pdisk_1.dat 2025-04-10T11:25:11.676000Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:11.687185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:11.687268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:11.691307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15508, node 1 2025-04-10T11:25:11.751534Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:11.751559Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:11.751571Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:11.751699Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64166 TClient is connected to server localhost:64166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:12.384369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.418996Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:12.434523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.589658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.763359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.836903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:14.352263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644085593455564:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.352355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.689686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.730503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.759599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.790131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.831403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.869736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.908675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644085593456073:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.908773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.908830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644085593456078:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.912802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:14.921989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644085593456080:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:15.014465Z node 1 :TX_PROXY ERROR: Actor# [1:7491644089888423430:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:16.100320Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644072708552067:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.100404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:16.182960Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284316215, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 1906, MsgBus: 11983 2025-04-10T11:25:16.982383Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644094127986310:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.984939Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fdd/r3tmp/tmpw4VzQc/pdisk_1.dat 2025-04-10T11:25:17.121554Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:17.142065Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 1906, node 2 2025-04-10T11:25:17.142274Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:17.145359Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:17.187093Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:17.187117Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:17.187129Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:17.187260Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11983 TClient is connected to server localhost:11983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:17.671432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.078392Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644111307856065:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.078464Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.103103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.178304Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644111307856167:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.178409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.200556Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644111307856178:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.200633Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.201014Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644111307856183:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.204510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:25:20.216632Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644111307856185:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:25:20.317170Z node 2 :TX_PROXY ERROR: Actor# [2:7491644111307856237:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpYql::AnsiIn [GOOD] >> KqpYql::InsertCVList-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryTruncate [GOOD] Test command err: Trying to start YDB, gRPC: 17032, MsgBus: 15341 2025-04-10T11:25:11.087316Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644070680712219:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:11.087763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f7b/r3tmp/tmpq4ey84/pdisk_1.dat 2025-04-10T11:25:11.514461Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:11.539704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:11.539801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:11.546316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17032, node 1 2025-04-10T11:25:11.699182Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:11.699201Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:11.699206Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:11.699280Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15341 TClient is connected to server localhost:15341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:12.482320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.510416Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:12.526022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.675185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.845534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.929343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:14.420386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644083565615733:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.420513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.693409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.725869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.758702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.794491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.829978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.872226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.922723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644083565616245:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.922810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.922881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644083565616250:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.925918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:14.933925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644083565616252:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:15.009769Z node 1 :TX_PROXY ERROR: Actor# [1:7491644087860583601:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:16.061747Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491644092155551229:2504], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2025-04-10T11:25:16.061927Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2Q4NmE1ZTctNGE2ZjRmMzQtYWNhYmM4MWYtNDhmNzUyNTU=, ActorId: [1:7491644092155551227:2503], ActorState: ExecuteState, TraceId: 01jrfq4wcfcpa6gv1f2vwcbcx9, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2025-04-10T11:25:16.086340Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644070680712219:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.086419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:16.153469Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491644092155551277:2518], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 2025-04-10T11:25:16.153662Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTliZTZkMDgtMzBlMGU2MGYtNWJkYTVhMDItNDFkYzJjNTg=, ActorId: [1:7491644092155551275:2517], ActorState: ExecuteState, TraceId: 01jrfq4wfj3d36kmy1a3vcwtep, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 Trying to start YDB, gRPC: 15462, MsgBus: 28931 2025-04-10T11:25:16.869900Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644094039494763:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.869988Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f7b/r3tmp/tmpEhucS6/pdisk_1.dat 2025-04-10T11:25:16.951912Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15462, node 2 2025-04-10T11:25:17.000155Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:17.000235Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:17.002042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:17.030675Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:17.030700Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:17.030706Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:17.030809Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28931 TClient is connected to server localhost:28931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:17.387861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.398800Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:17.409990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.452951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.622817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.697651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:19.657957Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644106924398417:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:19.658033Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:19.699447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:19.738551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:19.773494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:19.812463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:19.849048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:19.900126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:19.952043Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644106924398929:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:19.952125Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:19.952312Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644106924398934:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:19.956184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:19.968536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644106924398936:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:20.022432Z node 2 :TX_PROXY ERROR: Actor# [2:7491644111219366285:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:21.125886Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7491644115514333979:2056], tablet: [2:7491644098334462858:2324], scanId: 3, table: /Root/EightShard 2025-04-10T11:25:21.125945Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7491644115514333981:2054], tablet: [2:7491644098334462861:2326], scanId: 2, table: /Root/EightShard 2025-04-10T11:25:21.125989Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7491644115514333983:2055], tablet: [2:7491644098334462869:2328], scanId: 1, table: /Root/EightShard 2025-04-10T11:25:21.131857Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284321157, txId: 281474976715671] shutting down >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateFor [GOOD] Test command err: Trying to start YDB, gRPC: 9917, MsgBus: 19267 2025-04-10T11:25:10.989381Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644067657316187:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:10.989494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fae/r3tmp/tmpVU6yby/pdisk_1.dat 2025-04-10T11:25:11.555919Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:11.565076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:11.565160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:11.568554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9917, node 1 2025-04-10T11:25:11.690185Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:11.690234Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:11.690249Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:11.690411Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19267 TClient is connected to server localhost:19267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:12.589709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.612995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.782038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:12.955251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:13.044203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:14.313729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644084837187150:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.313871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.689793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.722277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.788953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.816762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.853177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.884103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.930862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644084837187661:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.930952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644084837187666:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.930953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.934028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:14.943131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644084837187668:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:15.007787Z node 1 :TX_PROXY ERROR: Actor# [1:7491644089132155017:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:15.989317Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644067657316187:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:15.989387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19739, MsgBus: 26944 2025-04-10T11:25:16.772100Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644095570732752:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.772177Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fae/r3tmp/tmpr3nqMf/pdisk_1.dat 2025-04-10T11:25:16.855134Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19739, node 2 2025-04-10T11:25:16.899763Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:16.899842Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:16.903420Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:16.928897Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:16.928918Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:16.928927Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:16.929025Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26944 TClient is connected to server localhost:26944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:17.367949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.385090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.476700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.615842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.685552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:19.723034Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644108455636406:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:19.723124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:19.760553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:19.796253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:19.827316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:19.861214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:19.894179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:19.930458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:19.984204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644108455636918:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:19.984291Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:19.984519Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644108455636923:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:19.988578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:19.998143Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644108455636925:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:20.073465Z node 2 :TX_PROXY ERROR: Actor# [2:7491644112750604275:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] >> YdbOlapStore::BulkUpsert [GOOD] >> YdbOlapStore::DuplicateRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableNameConflict [GOOD] Test command err: Trying to start YDB, gRPC: 11431, MsgBus: 63019 2025-04-10T11:25:10.996663Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644070533740519:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:10.997635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f9c/r3tmp/tmp9ose4o/pdisk_1.dat 2025-04-10T11:25:11.540373Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:11.544326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:11.544427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:11.548645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11431, node 1 2025-04-10T11:25:11.690700Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:11.690723Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:11.690731Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:11.690842Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63019 TClient is connected to server localhost:63019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:12.513459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.540777Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:12.564477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.721770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:12.902471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:12.983767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:14.459939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644087713611475:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.460026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.724539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.749733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.776806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.805742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.832852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.904189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.981076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644087713611994:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.981139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644087713611999:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.981154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.984521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:14.993294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644087713612001:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:15.094020Z node 1 :TX_PROXY ERROR: Actor# [1:7491644092008579352:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:15.996786Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644070533740519:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:15.996873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Table intent determination, code: 1040
:3:27: Error: CONCAT is not supported on Kikimr clusters. Trying to start YDB, gRPC: 12701, MsgBus: 12111 2025-04-10T11:25:16.819681Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644094052877654:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.819740Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f9c/r3tmp/tmpfI6alG/pdisk_1.dat 2025-04-10T11:25:16.954439Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12701, node 2 2025-04-10T11:25:16.976039Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:16.976116Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:16.979296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:17.019688Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:17.019708Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:17.019714Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:17.019822Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12111 TClient is connected to server localhost:12111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:25:17.446539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:17.508226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.582681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.753321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.843806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:19.949682Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644106937781302:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:19.949760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.000039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.038641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.072810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.146346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.177718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.246982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.326379Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644111232749118:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.326471Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.326641Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644111232749123:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.330101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:20.341512Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644111232749125:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:20.440997Z node 2 :TX_PROXY ERROR: Actor# [2:7491644111232749181:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiCreateTable!
:12:30: Error: Table name conflict: db.[/Root/Test] is used to reference multiple tables. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::AnsiIn [GOOD] Test command err: Trying to start YDB, gRPC: 23860, MsgBus: 17501 2025-04-10T11:25:11.013877Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644069523474667:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:11.017571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f15/r3tmp/tmpWeDsWj/pdisk_1.dat 2025-04-10T11:25:11.486489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:11.486578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:11.491627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:11.525606Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23860, node 1 2025-04-10T11:25:11.690550Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:11.690570Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:11.690581Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:11.690705Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17501 TClient is connected to server localhost:17501 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:12.358999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.395986Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:12.415030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.590991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.793410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.873430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:14.391846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644086703345621:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.392013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.689143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.718096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.742128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.766807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.802018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.852130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.905729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644086703346132:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.905829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.906073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644086703346138:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.909690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:14.922118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644086703346140:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:14.982529Z node 1 :TX_PROXY ERROR: Actor# [1:7491644086703346193:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:16.014109Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644069523474667:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.014220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:16.065745Z node 1 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=1&id=MWNjNTdjODctNmM4ZGM0MjYtZjA3NDNiMTktYjhkZTk1Yzk=, ActorId: [1:7491644090998313785:2496], ActorState: ExecuteState, TraceId: 01jrfq4w9j0y3hfzm2n5v0tns6, Internal error, message: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer 2025-04-10T11:25:16.065798Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWNjNTdjODctNmM4ZGM0MjYtZjA3NDNiMTktYjhkZTk1Yzk=, ActorId: [1:7491644090998313785:2496], ActorState: ExecuteState, TraceId: 01jrfq4w9j0y3hfzm2n5v0tns6, Create QueryResponse for error on request, msg: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer Trying to start YDB, gRPC: 65053, MsgBus: 2910 2025-04-10T11:25:16.856276Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644096369995696:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.856322Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f15/r3tmp/tmpTlOILc/pdisk_1.dat 2025-04-10T11:25:16.963242Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65053, node 2 2025-04-10T11:25:17.004346Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:17.004454Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:17.007643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:17.026025Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:17.026046Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:17.026053Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:17.026194Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2910 TClient is connected to server localhost:2910 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:17.427088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.440842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.520842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:17.667215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.733112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.195761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644113549866636:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.195831Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.245086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.279040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.307553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.334871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.364103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.419382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.468256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644113549867145:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.468335Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.468538Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644113549867150:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.471390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:20.480067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644113549867152:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:20.549723Z node 2 :TX_PROXY ERROR: Actor# [2:7491644113549867206:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExpr3 [GOOD] Test command err: Trying to start YDB, gRPC: 29168, MsgBus: 5955 2025-04-10T11:25:11.044907Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644074728805444:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:11.046210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f62/r3tmp/tmpoOnrVi/pdisk_1.dat 2025-04-10T11:25:11.491661Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:11.524362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:11.524446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:11.529187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29168, node 1 2025-04-10T11:25:11.726653Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:11.726673Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:11.726682Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:11.726752Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5955 TClient is connected to server localhost:5955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:12.417015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.442672Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:12.460866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.645494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.788310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.871698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:14.333223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644087613709086:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.333336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.748685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.787189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.822103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.856287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.894380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.964865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:15.008959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644091908676897:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:15.009076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:15.009659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644091908676902:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:15.015999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:15.030094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644091908676904:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:15.110376Z node 1 :TX_PROXY ERROR: Actor# [1:7491644091908676959:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:16.026493Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644074728805444:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.026575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24046, MsgBus: 17638 2025-04-10T11:25:16.922852Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644093425958361:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.923237Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f62/r3tmp/tmpP20Ztx/pdisk_1.dat 2025-04-10T11:25:17.096930Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:17.101589Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:17.101683Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:17.103441Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24046, node 2 2025-04-10T11:25:17.166746Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:17.166778Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:17.166788Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:17.166894Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17638 TClient is connected to server localhost:17638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:17.595376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.602940Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:17.607084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.683142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.840854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.939844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:19.983429Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644106310862017:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:19.983527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.020232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.052537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.127779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.166661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.235718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.279999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.326647Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644110605829832:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.326747Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.326918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644110605829837:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.329833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:20.343208Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644110605829840:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:20.400908Z node 2 :TX_PROXY ERROR: Actor# [2:7491644110605829893:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpScripting::JoinIndexLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCVList-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 28698, MsgBus: 24230 2025-04-10T11:25:10.994335Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644066953501325:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:10.995150Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f3e/r3tmp/tmpHHtDt2/pdisk_1.dat 2025-04-10T11:25:11.502641Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:11.505499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:11.505682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:11.526285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28698, node 1 2025-04-10T11:25:11.690569Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:11.690600Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:11.690610Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:11.690731Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24230 TClient is connected to server localhost:24230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:12.465911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.499725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.662729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.844775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.924360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:14.391976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644084133372219:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.392145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.706674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.740135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.770917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.801663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.850417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.885491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.930461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644084133372731:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.930539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644084133372736:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.930556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.935354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:14.942959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644084133372738:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:15.042918Z node 1 :TX_PROXY ERROR: Actor# [1:7491644088428340088:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:15.984921Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-10T11:25:15.992014Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644066953501325:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:15.992072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:15.993320Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:25:15.993526Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037914 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:25:15.993771Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491644088428340409:2501], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7491644088428340390:2501]Got CONSTRAINT VIOLATION for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7491644088428340409:2501].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:25:15.994475Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491644088428340402:2501], SessionActorId: [1:7491644088428340390:2501], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7491644088428340390:2501]. isRollback=0 2025-04-10T11:25:15.994748Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjcwZGYzYTUtYTc2MWJhZDYtZWMwN2JmYjMtYmY5NjljMQ==, ActorId: [1:7491644088428340390:2501], ActorState: ExecuteState, TraceId: 01jrfq4w6m07epm6a8nj9jt7v5, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7491644088428340403:2501] from: [1:7491644088428340402:2501] 2025-04-10T11:25:15.994838Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491644088428340403:2501] TxId: 281474976710671. Ctx: { TraceId: 01jrfq4w6m07epm6a8nj9jt7v5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjcwZGYzYTUtYTc2MWJhZDYtZWMwN2JmYjMtYmY5NjljMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/Test`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:25:15.995096Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjcwZGYzYTUtYTc2MWJhZDYtZWMwN2JmYjMtYmY5NjljMQ==, ActorId: [1:7491644088428340390:2501], ActorState: ExecuteState, TraceId: 01jrfq4w6m07epm6a8nj9jt7v5, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 Trying to start YDB, gRPC: 11140, MsgBus: 17104 2025-04-10T11:25:16.798613Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644096099188037:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.798670Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f3e/r3tmp/tmpPBLRfD/pdisk_1.dat 2025-04-10T11:25:16.897541Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11140, node 2 2025-04-10T11:25:16.937723Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:16.937825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:16.939836Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:16.977960Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:16.977985Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:16.977992Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:16.978103Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17104 TClient is connected to server localhost:17104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:17.399402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.416796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.496421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.660422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.749196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.034169Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644113279059010:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.034269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.081660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.114594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.184855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.220453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.264453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.301803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.385297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644113279059529:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.385390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.385800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644113279059534:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.389582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:20.404607Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644113279059536:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:20.469349Z node 2 :TX_PROXY ERROR: Actor# [2:7491644113279059591:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:21.799191Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644096099188037:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:21.799252Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:21.821541Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491644117574027221:2502], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfq51p50a3h2ztwwjykgty8. SessionId : ydb://session/3?node_id=2&id=ZGE5YTJiNGQtY2ZhMjMxYjAtYzNkZDBlMC04ZmIzMDlmNg==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-10T11:25:21.822019Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491644117574027222:2503], TxId: 281474976715672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZGE5YTJiNGQtY2ZhMjMxYjAtYzNkZDBlMC04ZmIzMDlmNg==. CustomerSuppliedId : . TraceId : 01jrfq51p50a3h2ztwwjykgty8. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7491644117574027218:2493], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:25:21.822699Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGE5YTJiNGQtY2ZhMjMxYjAtYzNkZDBlMC04ZmIzMDlmNg==, ActorId: [2:7491644117574027155:2493], ActorState: ExecuteState, TraceId: 01jrfq51p50a3h2ztwwjykgty8, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Duplicated keys found., code: 2012 >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] >> TPQTest::TestTimeRetention >> KqpYql::BinaryJsonOffsetNormal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] Test command err: Trying to start YDB, gRPC: 18689, MsgBus: 4452 2025-04-10T11:25:11.008862Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644067265480002:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:11.009036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f4f/r3tmp/tmppUwEuI/pdisk_1.dat 2025-04-10T11:25:11.569492Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:11.571434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:11.571554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:11.575478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18689, node 1 2025-04-10T11:25:11.696214Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:11.696231Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:11.696239Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:11.696367Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4452 TClient is connected to server localhost:4452 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:12.363260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.394516Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:12.414990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.580978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.741452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.831322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:14.290851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644084445350813:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.291016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.689394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.726389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.756991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.783689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.817803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.849658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.926248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644084445351326:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.926304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644084445351331:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.926307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.929922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:14.939228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644084445351333:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:15.010037Z node 1 :TX_PROXY ERROR: Actor# [1:7491644088740318682:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:16.002296Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644067265480002:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.002375Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:16.429077Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284316432, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 62679, MsgBus: 7729 2025-04-10T11:25:17.194864Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644096680955470:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:17.223930Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f4f/r3tmp/tmpuO4PPD/pdisk_1.dat 2025-04-10T11:25:17.294141Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62679, node 2 2025-04-10T11:25:17.333293Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:17.333375Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:17.336778Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:17.363681Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:17.363712Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:17.363724Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:17.363817Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7729 TClient is connected to server localhost:7729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:25:17.787414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:17.793953Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:17.804864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.888555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:18.039176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:18.110337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.397385Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644109565859116:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.397479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.448595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.483645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.517419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.556169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.593580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.634690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.722796Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644109565859631:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.722908Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.723177Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644109565859636:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.727284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:20.757531Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644109565859638:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:20.834741Z node 2 :TX_PROXY ERROR: Actor# [2:7491644109565859693:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:22.087434Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284322116, txId: 281474976715671] shutting down 2025-04-10T11:25:22.195399Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644096680955470:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:22.195479Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:22.245179Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284322277, txId: 281474976715673] shutting down >> KqpYql::UpdatePk [GOOD] >> KqpScripting::UnsafeTimestampCast [GOOD] >> KqpScripting::SystemTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 5987, MsgBus: 16754 2025-04-10T11:25:11.000747Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644067670111060:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:11.001462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fbc/r3tmp/tmpO00thk/pdisk_1.dat 2025-04-10T11:25:11.496404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:11.496511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:11.498234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:11.508341Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5987, node 1 2025-04-10T11:25:11.694759Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:11.694789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:11.694816Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:11.694931Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16754 TClient is connected to server localhost:16754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:12.486130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.498991Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:12.516229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.671469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.860432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.920118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:14.348380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644084849981996:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.348490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.688955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.721899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.751055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.778842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.809679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.878809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.926913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644084849982513:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.926979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.927080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644084849982518:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.930457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:14.940032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644084849982520:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:15.041499Z node 1 :TX_PROXY ERROR: Actor# [1:7491644089144949869:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:16.000991Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644067670111060:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.001078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:16.063336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:16.258528Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284316278, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 30140, MsgBus: 7351 2025-04-10T11:25:17.056378Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644097761870520:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:17.056543Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fbc/r3tmp/tmpw1N4z1/pdisk_1.dat 2025-04-10T11:25:17.176323Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:17.200620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:17.200688Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 30140, node 2 2025-04-10T11:25:17.202206Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:17.243253Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:17.243286Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:17.243295Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:17.243400Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7351 TClient is connected to server localhost:7351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:17.653717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.666486Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:17.688664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.760243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.926241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:18.003507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.309688Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644110646774190:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.309759Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.356633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.395095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.467704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.498397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.536651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.581431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.640810Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644110646774704:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.640894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.640938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644110646774709:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.649252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:20.663231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644110646774711:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:20.746882Z node 2 :TX_PROXY ERROR: Actor# [2:7491644110646774767:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:21.948503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.056944Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644097761870520:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:22.057024Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:22.737924Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284322760, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 26112, MsgBus: 3643 2025-04-10T11:25:10.992117Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644066504633610:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:10.993266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f6b/r3tmp/tmpJljyJy/pdisk_1.dat 2025-04-10T11:25:11.619619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:11.619746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:11.620997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:11.660680Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26112, node 1 2025-04-10T11:25:11.757373Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:11.757400Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:11.757423Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:11.757541Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3643 TClient is connected to server localhost:3643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:12.411459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.439753Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:12.451377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.601477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.791826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.866702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:14.608078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644083684504556:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.608189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.991710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:15.025310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:15.056384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:15.089766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:15.119210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:15.164644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:15.231549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644087979472366:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:15.231635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:15.238548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644087979472371:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:15.242329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:15.254055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644087979472373:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:15.357220Z node 1 :TX_PROXY ERROR: Actor# [1:7491644087979472428:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:15.992103Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644066504633610:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:15.992180Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:16.633326Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284316656, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 9291, MsgBus: 8537 2025-04-10T11:25:17.445956Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644097329690753:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:17.446032Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f6b/r3tmp/tmpi4vUCl/pdisk_1.dat 2025-04-10T11:25:17.600552Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:17.616087Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:17.616174Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:17.618337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9291, node 2 2025-04-10T11:25:17.717247Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:17.717287Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:17.717295Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:17.717416Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8537 TClient is connected to server localhost:8537 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:25:18.208574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:18.223090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:18.277352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:18.408813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:18.476795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.481619Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644110214594417:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.481726Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.522120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.558039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.596670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.671193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.765649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.828069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.905373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644110214594933:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.905472Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644110214594938:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.905490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.910005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:20.928848Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-10T11:25:20.929177Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644110214594940:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:21.031269Z node 2 :TX_PROXY ERROR: Actor# [2:7491644114509562292:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:22.183225Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284322207, txId: 281474976715671] shutting down 2025-04-10T11:25:22.436814Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284322459, txId: 281474976715673] shutting down 2025-04-10T11:25:22.446425Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644097329690753:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:22.446504Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:22.677864Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284322704, txId: 281474976715675] shutting down >> KqpYql::ColumnNameConflict >> KqpYql::TableRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::JoinIndexLookup [GOOD] Test command err: Trying to start YDB, gRPC: 29746, MsgBus: 27776 2025-04-10T11:25:11.062058Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644073883276677:2178];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:11.062619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f35/r3tmp/tmp9mGIzr/pdisk_1.dat 2025-04-10T11:25:11.553160Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:11.575976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:11.576058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:11.579735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29746, node 1 2025-04-10T11:25:11.726707Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:11.726732Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:11.726749Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:11.726865Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27776 TClient is connected to server localhost:27776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:12.438468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.463087Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:12.484592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.665058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.852230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.921139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:14.471709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644086768180201:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.471847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.825511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.851065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.874795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.899392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.927175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.957660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.996444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644086768180710:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.996528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.996684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644086768180715:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.999843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:15.010277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644086768180717:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:15.102225Z node 1 :TX_PROXY ERROR: Actor# [1:7491644091063148068:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:16.062340Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644073883276677:2178];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.062427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:16.827454Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284316838, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 62077, MsgBus: 24577 2025-04-10T11:25:17.632920Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644099062024571:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:17.632978Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f35/r3tmp/tmpRWuxv2/pdisk_1.dat 2025-04-10T11:25:17.785474Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:17.805422Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:17.805782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:17.807622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62077, node 2 2025-04-10T11:25:17.908821Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:17.908846Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:17.908855Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:17.908994Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24577 TClient is connected to server localhost:24577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:18.384386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:18.398756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:18.475392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:18.628789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:18.706505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.828232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644111946928231:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.828302Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:20.891357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.935776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.983319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:21.022367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:21.072949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:21.134257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:21.216351Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644116241896043:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:21.216452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:21.216976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644116241896048:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:21.221844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:21.234809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644116241896050:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:21.329334Z node 2 :TX_PROXY ERROR: Actor# [2:7491644116241896105:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:22.633483Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644099062024571:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:22.633594Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpScripting::ScriptExplain [GOOD] >> KqpScripting::SelectNullType [GOOD] >> KqpScripting::StreamDdlAndDml >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce >> KqpPragma::ResetPerQuery [GOOD] >> KqpPragma::Warning >> KqpYql::UuidPrimaryKeyBulkUpsert >> TPartitionTests::TestTxBatchInFederation [GOOD] >> KqpPragma::Auth ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdatePk [GOOD] Test command err: Trying to start YDB, gRPC: 13708, MsgBus: 22348 2025-04-10T11:25:18.850821Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644103797392372:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:18.850892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eb8/r3tmp/tmpXzkSwj/pdisk_1.dat 2025-04-10T11:25:19.214881Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:19.233832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:19.233933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:19.235573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13708, node 1 2025-04-10T11:25:19.336393Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:19.336417Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:19.336427Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:19.336571Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22348 TClient is connected to server localhost:22348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:19.905035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:19.932077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.097973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.280412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.356547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:22.134773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644120977263330:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.134922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.455057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.490584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.530689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.566362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.605680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.678820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.727640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644120977263849:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.727713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.727910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644120977263854:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.731907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:22.742694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644120977263856:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:22.826559Z node 1 :TX_PROXY ERROR: Actor# [1:7491644120977263909:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:3:20: Warning: At function: AsStruct
:4:31: Warning: At function: +
:4:31: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
:5:27: Error: At function: KiUpdateTable!
:5:27: Error: Cannot update primary key column: Group 2025-04-10T11:25:23.852460Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644103797392372:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:23.852527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpScripting::StreamScanQuery [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced >> KqpYql::ScriptUdf >> KqpYql::UuidPrimaryKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestTxBatchInFederation [GOOD] Test command err: 2025-04-10T11:24:43.920263Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:43.920364Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:43.940543Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:179:2194] 2025-04-10T11:24:43.941527Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageCh ... .931152Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 10 2025-04-10T11:25:22.931206Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:25:22.931243Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:25:23.656563Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:25:24.902499Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-10T11:25:24.902710Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 17 Wait batch completion 2025-04-10T11:25:24.902889Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 10 Wait kv request 2025-04-10T11:25:25.162550Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-04-10T11:25:25.162631Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-04-10T11:25:25.162700Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:25:25.162751Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-10T11:25:25.162834Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T11:25:25.162871Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-10T11:25:25.162987Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-10T11:25:25.163031Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:25:25.163073Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-04-10T11:25:25.163119Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-10T11:25:25.163195Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'rt3.dc1--account--topic' Partition: 0 SourceId: 'src4'. Message seqNo: 7. Committed seqNo: (NULL). Writing seqNo: 7. EndOffset: 50. CurOffset: 50. Offset: 50 2025-04-10T11:25:25.163299Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 8 partNo 0 2025-04-10T11:25:25.164153Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 8 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 84 count 1 nextOffset 52 batches 1 2025-04-10T11:25:25.164230Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 9 partNo 0 2025-04-10T11:25:25.164283Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 136 count 2 nextOffset 53 batches 1 2025-04-10T11:25:25.164323Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 10 partNo 0 2025-04-10T11:25:25.164361Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 188 count 3 nextOffset 54 batches 1 2025-04-10T11:25:25.164402Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 11 partNo 0 2025-04-10T11:25:25.164438Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 11 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 240 count 4 nextOffset 55 batches 1 2025-04-10T11:25:25.164478Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 12 partNo 0 2025-04-10T11:25:25.164522Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 12 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 292 count 5 nextOffset 56 batches 1 2025-04-10T11:25:25.164559Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-10T11:25:25.164596Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:25:25.164637Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 51 PartNo 0 PackedSize 292 count 5 nextOffset 56 batches 1 2025-04-10T11:25:25.164677Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-10T11:25:25.164728Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 10 2025-04-10T11:25:25.164764Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-04-10T11:25:25.164818Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 51 PartNo 0 PackedSize 292 count 5 nextOffset 56 batches 1 2025-04-10T11:25:25.165366Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic' partition 0 compactOffset 51,5 HeadOffset 50 endOffset 50 curOffset 56 d0000000000_00000000000000000051_00000_0000000005_00000| size 189 WTime 21150 2025-04-10T11:25:25.165537Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:25.165592Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:25.165645Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T11:25:25.165692Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:25.165741Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc2 2025-04-10T11:25:25.165785Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc4 2025-04-10T11:25:25.165829Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-04-10T11:25:25.165871Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000051_00000_0000000005_00000| 2025-04-10T11:25:25.165899Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:25.165936Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:25.165974Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-04-10T11:25:25.166003Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:25.166036Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got KV request Got KV request Got KV request Got KV request Got KV request Got KV request Got KV request Wait tx committed for tx 0 2025-04-10T11:25:25.202634Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 85 WriteNewSizeFromSupportivePartitions# 4 2025-04-10T11:25:25.202714Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:25:25.202815Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 50 is already written 2025-04-10T11:25:25.202857Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:25:25.202894Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 50 is already written 2025-04-10T11:25:25.202921Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:25:25.202953Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 50 is already written 2025-04-10T11:25:25.202981Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:25:25.203018Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 50 is already written 2025-04-10T11:25:25.203046Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:25:25.203096Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 11, partNo: 0, Offset: 50 is already written 2025-04-10T11:25:25.203126Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:25:25.203161Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 12, partNo: 0, Offset: 50 is already written Wait immediate tx complete 3 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 6 Wait tx committed for tx 10 >> TPQTest::TestTimeRetention [GOOD] >> TPQTest::TestTabletRestoreEventsOrder >> KqpScripting::ScanQuery [GOOD] >> KqpScripting::ScanQueryDisable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptExplain [GOOD] Test command err: Trying to start YDB, gRPC: 15404, MsgBus: 12774 2025-04-10T11:25:11.078475Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644072336070524:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:11.078930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fc8/r3tmp/tmpto5d4L/pdisk_1.dat 2025-04-10T11:25:11.527120Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:11.555996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 15404, node 1 2025-04-10T11:25:11.556082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:11.566694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:11.697233Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:11.697281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:11.697296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:11.697420Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12774 TClient is connected to server localhost:12774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:12.503109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:12.527058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:12.701529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.861828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.938699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:14.319035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644085220974062:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.319138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.691978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.720660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.787751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.820628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.855540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.927130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.964877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644085220974583:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.964940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.965013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644085220974588:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.968105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:14.976465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644085220974590:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:15.068225Z node 1 :TX_PROXY ERROR: Actor# [1:7491644089515941940:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:15.983259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:16.067898Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644072336070524:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.067971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13716, MsgBus: 18585 2025-04-10T11:25:18.485252Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644102787672749:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:18.485337Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000fc8/r3tmp/tmpfpmpAm/pdisk_1.dat 2025-04-10T11:25:18.612476Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13716, node 2 2025-04-10T11:25:18.629671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:18.629757Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:18.634001Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:18.662652Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:18.662675Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:18.662684Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:18.662792Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18585 TClient is connected to server localhost:18585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:19.077460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:19.085865Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:19.102868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:19.189909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:19.358670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:19.437882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:21.986307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644115672576398:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:21.986411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.032310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.074017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.154282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.194309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.235198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.309188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.382345Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644119967544214:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.382454Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.382774Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644119967544219:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.387804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:22.398979Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644119967544221:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:22.490454Z node 2 :TX_PROXY ERROR: Actor# [2:7491644119967544276:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:23.485364Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644102787672749:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:23.485441Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:24.053256Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491644128557479248:2503], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:168: Error: At function: DataQueryBlocks
:1:185: Error: At function: TKiDataQueryBlock
:1:208: Error: At function: KiEffects
:1:219: Error: At function: KiWriteTable!
:1:219: Error: Cannot find table 'db.[/Root/ScriptingTest]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:25:24.054383Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzE2N2FlZjMtZDllNzU5OGYtZmU3ZmQ4ZGUtYzc1MDZjOGI=, ActorId: [2:7491644128557479243:2502], ActorState: ExecuteState, TraceId: 01jrfq5461c5rk9ce9ad5tnehg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> KqpYql::InsertCV-useSink [GOOD] >> KqpScripting::ScriptValidate >> KqpYql::RefSelect >> TPQTest::TestTabletRestoreEventsOrder [GOOD] >> KqpScripting::StreamExecuteYqlScriptMixed >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestTabletRestoreEventsOrder [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-10T11:24:40.279775Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.279854Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-10T11:24:40.298078Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-04-10T11:24:40.298213Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:24:40.316886Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-04-10T11:24:40.320370Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-04-10T11:24:40.320554Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.323091Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-04-10T11:24:40.323477Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:40.323556Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:40.323618Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:40.323654Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:3:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:40.324276Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:40.324644Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-10T11:24:40.327499Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-04-10T11:24:40.327569Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-10T11:24:40.327638Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:40.330163Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:40.330313Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-04-10T11:24:40.330369Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-04-10T11:24:40.330425Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit request with generation 1 2025-04-10T11:24:40.330452Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit with generation 1 done 2025-04-10T11:24:40.330931Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:40.331136Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:40.331205Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:40.331252Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:40.331314Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-10T11:24:40.331356Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-10T11:24:40.331380Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000ctest 2025-04-10T11:24:40.331403Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000utest 2025-04-10T11:24:40.331439Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:40.331477Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:40.331562Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:40.331603Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:40.331763Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:40.332201Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:40.332507Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-10T11:24:40.334484Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2025-04-10T11:24:40.334539Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-10T11:24:40.334583Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:40.336357Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:40.336558Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2025-04-10T11:24:40.336619Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2025-04-10T11:24:40.336663Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit request with generation 1 2025-04-10T11:24:40.336692Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit with generation 1 done 2025-04-10T11:24:40.336799Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:40.336834Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:40.336866Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-10T11:24:40.336910Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-10T11:24:40.336939Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-04-10T11:24:40.336975Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-04-10T11:24:40.336998Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001ctest 2025-04-10T11:24:40.337018Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001utest 2025-04-10T11:24:40.337061Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:40.337098Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-10T11:24:40.337158Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:40.337191Z node 1 :PERSQUEUE DEBUG: [P ... etID 72057594037927938 is [33:153:2174] sender: [33:154:2057] recipient: [33:147:2170] Leader for TabletID 72057594037927937 is [33:107:2139] sender: [33:179:2057] recipient: [33:14:2061] 2025-04-10T11:25:25.014575Z node 33 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:25.015696Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 32 actor [33:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 1000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 32 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 32 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 32 Important: false } 2025-04-10T11:25:25.016573Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [33:185:2198] 2025-04-10T11:25:25.020020Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [33:185:2198] 2025-04-10T11:25:25.022436Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [33:186:2199] 2025-04-10T11:25:25.024813Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [33:186:2199] 2025-04-10T11:25:25.034317Z node 33 :PERSQUEUE INFO: new Cookie default|35cbeefb-555e937f-b8b1b23d-a19aa82b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:25.044227Z node 33 :PERSQUEUE INFO: new Cookie default|36dc2b44-fe1f607c-dfcda2c8-878afe84_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:25.052544Z node 33 :PERSQUEUE INFO: new Cookie default|a8ca762a-365540e1-c76f0399-a7f08a7f_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:25.067168Z node 33 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:25.073456Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 33 actor [33:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 33 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 32 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 32 Important: false } 2025-04-10T11:25:25.081921Z node 33 :PERSQUEUE INFO: new Cookie default|4b98c476-c0cf2b0e-af1997c6-11550c45_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:25.096353Z node 33 :PERSQUEUE INFO: new Cookie default|6edd2c26-ce5f8457-a0e3fa71-1b5af27d_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:25.112891Z node 33 :PERSQUEUE INFO: new Cookie default|35b0dd90-28206740-e8629717-1a94b6ab_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:103:2057] recipient: [34:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:103:2057] recipient: [34:101:2135] Leader for TabletID 72057594037927937 is [34:107:2139] sender: [34:108:2057] recipient: [34:101:2135] 2025-04-10T11:25:25.696159Z node 34 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:25.696243Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [34:149:2057] recipient: [34:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [34:149:2057] recipient: [34:147:2170] Leader for TabletID 72057594037927938 is [34:153:2174] sender: [34:154:2057] recipient: [34:147:2170] Leader for TabletID 72057594037927937 is [34:107:2139] sender: [34:179:2057] recipient: [34:14:2061] 2025-04-10T11:25:25.723403Z node 34 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:25.724387Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 34 actor [34:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 1000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 34 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 34 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 34 Important: false } 2025-04-10T11:25:25.725092Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [34:185:2198] 2025-04-10T11:25:25.728122Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [34:185:2198] 2025-04-10T11:25:25.730112Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [34:186:2199] 2025-04-10T11:25:25.732449Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [34:186:2199] 2025-04-10T11:25:25.740930Z node 34 :PERSQUEUE INFO: new Cookie default|c205c681-5d4d4f41-4c8120f1-adcefd32_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:25.753117Z node 34 :PERSQUEUE INFO: new Cookie default|e65858e4-82108b4a-81a6148a-d7bb24b6_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:25.760148Z node 34 :PERSQUEUE INFO: new Cookie default|7dbaa572-5ba9c6a4-4510b8ef-6acde995_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:25.775176Z node 34 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:25.782993Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 35 actor [34:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 35 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 34 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 34 Important: false } 2025-04-10T11:25:25.791005Z node 34 :PERSQUEUE INFO: new Cookie default|afdc148a-8f36817c-b1da06bc-a95f61d_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:25.805400Z node 34 :PERSQUEUE INFO: new Cookie default|38b56d0c-ea221e3d-967eccc-9be748a4_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:25.818520Z node 34 :PERSQUEUE INFO: new Cookie default|44f8029b-70176531-4bf9f995-6fbdd266_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:26.458788Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:26.458887Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:25:26.511010Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:26.511107Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:25:26.514562Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:26.516119Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 36 actor [35:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 36 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 36 ReadRuleGenerations: 36 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 36 Important: false } Consumers { Name: "aaa" Generation: 36 Important: true } 2025-04-10T11:25:26.517045Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [35:245:2245] 2025-04-10T11:25:26.518481Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [35:245:2245] 2025-04-10T11:25:26.520147Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [35:247:2247] 2025-04-10T11:25:26.521145Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 3 [35:247:2247] 2025-04-10T11:25:26.567252Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:26.567345Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:25:26.568368Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [35:324:2307] 2025-04-10T11:25:26.569944Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [35:326:2309] 2025-04-10T11:25:26.575871Z node 35 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:25:26.575957Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 4 [35:324:2307] 2025-04-10T11:25:26.576761Z node 35 :PERSQUEUE INFO: [topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:25:26.576816Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 4 [35:326:2309] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCV-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 23379, MsgBus: 63991 2025-04-10T11:25:15.406843Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644088803273284:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:15.408069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ec5/r3tmp/tmpRNPgrK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23379, node 1 2025-04-10T11:25:15.787789Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:25:15.787826Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:25:15.788675Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:15.811749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:15.811894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:15.813917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:15.844024Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:15.844047Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:15.844060Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:15.844223Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63991 TClient is connected to server localhost:63991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:16.311045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:16.351228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:16.493724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:16.648854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:16.714381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:25:18.360694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644101688176923:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:18.360820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:18.636040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:18.666478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:18.733721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:18.766952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:18.798780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:18.830480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:18.882478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644101688177437:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:18.882540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:18.882703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644101688177442:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:18.886607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:18.897535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644101688177444:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:18.993188Z node 1 :TX_PROXY ERROR: Actor# [1:7491644101688177498:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:20.013801Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-10T11:25:20.021957Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:25:20.022209Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037914 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:25:20.022477Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491644110278112410:2501], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7491644105983145098:2501]Got CONSTRAINT VIOLATION for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7491644110278112410:2501].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:25:20.022956Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491644110278112403:2501], SessionActorId: [1:7491644105983145098:2501], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7491644105983145098:2501]. isRollback=0 2025-04-10T11:25:20.023181Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmM2MDZhYTEtNTA4OTgzMzItMTEwYTQ2YzgtMWQwNTRkYmQ=, ActorId: [1:7491644105983145098:2501], ActorState: ExecuteState, TraceId: 01jrfq505y6rq8a49pgwcfhzkx, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7491644110278112404:2501] from: [1:7491644110278112403:2501] 2025-04-10T11:25:20.023268Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491644110278112404:2501] TxId: 281474976710671. Ctx: { TraceId: 01jrfq505y6rq8a49pgwcfhzkx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmM2MDZhYTEtNTA4OTgzMzItMTEwYTQ2YzgtMWQwNTRkYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/Test`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:25:20.023439Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmM2MDZhYTEtNTA4OTgzMzItMTEwYTQ2YzgtMWQwNTRkYmQ=, ActorId: [1:7491644105983145098:2501], ActorState: ExecuteState, TraceId: 01jrfq505y6rq8a49pgwcfhzkx, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 Trying to start YDB, gRPC: 31984, MsgBus: 5614 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ec5/r3tmp/tmpYrqxoZ/pdisk_1.dat 2025-04-10T11:25:20.963236Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:20.964139Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:25:20.993215Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:20.993310Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:20.994826Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31984, node 2 2025-04-10T11:25:21.057150Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:21.057173Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:21.057183Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:21.057311Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5614 TClient is connected to server localhost:5614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:21.555852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:21.562118Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:21.576102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:21.665504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:21.824307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:21.922608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:23.980788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644122225695262:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:23.980892Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:24.033520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:24.069421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:24.100503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:24.128648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:24.158440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:24.228699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:24.270825Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644126520663074:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:24.270912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644126520663079:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:24.270925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:24.273914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:24.283233Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644126520663081:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:24.385017Z node 2 :TX_PROXY ERROR: Actor# [2:7491644126520663134:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:25.595473Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491644130815630744:2502], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfq55et72b9fjz7qwfy88r0. SessionId : ydb://session/3?node_id=2&id=NWRhNWVhOS1lMGEwMGMzOS0yOGQ1YTJjZC00ZDExYmVhNg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-10T11:25:25.596296Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491644130815630745:2503], TxId: 281474976715672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWRhNWVhOS1lMGEwMGMzOS0yOGQ1YTJjZC00ZDExYmVhNg==. TraceId : 01jrfq55et72b9fjz7qwfy88r0. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7491644130815630741:2493], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:25:25.596622Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWRhNWVhOS1lMGEwMGMzOS0yOGQ1YTJjZC00ZDExYmVhNg==, ActorId: [2:7491644130815630699:2493], ActorState: ExecuteState, TraceId: 01jrfq55et72b9fjz7qwfy88r0, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Conflict with existing key., code: 2012 >> KqpScripting::StreamExecuteYqlScriptScanCancelation [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce >> KqpScripting::ScriptingCreateAndAlterTableTest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test >> KqpYql::FlexibleTypes >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn [GOOD] >> Yq_1::Basic_EmptyDict [GOOD] >> TPQTest::TestReadSessions [GOOD] >> TPQTest::TestReadSubscription >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] >> KqpYql::InsertIgnore >> KqpYql::BinaryJsonOffsetNormal [GOOD] >> KqpYql::Closure >> KqpYql::UpdateBadType >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_EmptyDict [GOOD] Test command err: 2025-04-10T11:24:29.880310Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643894225287619:2107];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:29.880389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0410 11:24:30.177150466 1648786 dns_resolver.cc:162] no server name supplied in dns URI E0410 11:24:30.177335428 1648786 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T11:24:30.238559Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8317: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8317 } ] 2025-04-10T11:24:30.882514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:31.248684Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8317: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8317 2025-04-10T11:24:31.251910Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8317: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8317 } ] 2025-04-10T11:24:31.883893Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:32.884621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:32.946417Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8317: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8317 } ] test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a53/r3tmp/tmpLshAvZ/pdisk_1.dat 2025-04-10T11:24:33.420858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:24:33.428910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491643911405157210:2314], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:33.522320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491643911405157210:2314], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 8317, node 1 TClient is connected to server localhost:16788 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-04-10T11:24:34.113232Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:34.113257Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:34.113264Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:34.113443Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:24:34.137701Z node 1 :IMPORT WARN: Table profiles were not loaded Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:34.158627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:34.198114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:34.198294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:34.252312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:34.882298Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643894225287619:2107];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:34.882405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0410 11:24:35.178265207 1649013 dns_resolver.cc:162] no server name supplied in dns URI E0410 11:24:35.178423452 1649013 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-10T11:24:35.294389Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-10T11:24:35.294432Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T11:24:35.294440Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-10T11:24:35.296014Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-10T11:24:35.296027Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-10T11:24:35.296033Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-10T11:24:35.296249Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-10T11:24:35.296272Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T11:24:35.296279Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-10T11:24:35.297433Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-10T11:24:35.297470Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T11:24:35.297480Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-10T11:24:35.297566Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-10T11:24:35.297578Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T11:24:35.297584Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-10T11:24:35.299191Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:24:35.300599Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-10T11:24:35.300612Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T11:24:35.300617Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-10T11:24:35.301499Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-10T11:24:35.301519Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T11:24:35.301527Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-10T11:24:35.311063Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-10T11:24:35.311097Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T11:24:35.311104Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-10T11:24:35.311726Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-10T11:24:35.311760Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T11:24:35.311766Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-10T11:24:35.312442Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-10T11:24:35.312465Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T11:24:35.312470Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-10T11:24:35.330356Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-10T11:24:35.330391Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T11:24:35.330399Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-10T11:24:35.341995Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-10T11:24:35.342039Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-10T11:24:35.344201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:24:35.345515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:24:35.346381Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-10T11:24:35.346407Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T11:24:35.346414Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-10T11:24:35.346891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:24:35.347318Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-10T11:24:35.347343Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-10T11:24:35.347350Z node 1 :YQ_CONTROL_ ... ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808107Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808153Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808208Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808255Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808345Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808381Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808450Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808491Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808555Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808591Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808650Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808696Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808746Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808799Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808851Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808905Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.808955Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809009Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809053Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809111Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809156Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809218Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809261Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809342Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809387Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809440Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809492Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809537Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809596Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809633Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809702Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809731Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809800Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809844Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809899Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.809999Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.810104Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.811485Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.811673Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.811786Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.811893Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.812023Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.812132Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.812253Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.812288Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.812381Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.812431Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.812512Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.812550Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.812622Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.812667Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.812734Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.812780Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.812843Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.812892Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.812955Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813012Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813076Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813121Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813191Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813240Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813313Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813361Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813420Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813474Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813535Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813630Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813682Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813775Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813827Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813893Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.813932Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.814006Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.814042Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.814121Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.814260Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.814362Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.814473Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.814568Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.814630Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.814864Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.814896Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.815076Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.815105Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.815210Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.815332Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.815436Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.815549Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.815597Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.815669Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.815721Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.815778Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.815832Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.815890Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.815936Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.815998Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816042Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816105Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816153Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816239Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816285Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816360Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816403Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816475Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816511Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816591Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816634Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816705Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816741Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816815Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816849Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816931Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.816988Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.817124Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.817158Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.817252Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.817314Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.817388Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.817429Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.817505Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.817552Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.817609Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.817664Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.817720Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.817775Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.817845Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.817910Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.817975Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.818037Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-10T11:25:26.818091Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] Test command err: 2025-04-10T11:22:34.610338Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:34.706010Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:22:34.710330Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:22:34.710657Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:34.727783Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:34.728099Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:34.735557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:34.735711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:34.735881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:34.735978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:34.736068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:34.736161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:34.736231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:34.736301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:34.736399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:34.736531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:34.736618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:34.736692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:34.754556Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:22:34.758079Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:34.758402Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:34.758462Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:34.758673Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:34.758862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:34.758947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:34.759000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:34.759071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:34.759133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:34.759173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:34.759201Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:34.759346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:34.759395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:34.759422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:34.759454Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:34.759536Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:34.759578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:34.759608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:34.759627Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:34.759675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:34.759722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:34.759750Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:34.759788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:34.759812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:34.759830Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:34.760202Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-10T11:22:34.760280Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-04-10T11:22:34.760350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-10T11:22:34.760432Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-04-10T11:22:34.760573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:34.760623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:34.760654Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:34.760850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:34.760897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:34.760941Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:34.761147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:22:34.761202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:22:34.761243Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:22:34.761501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:22:34.761563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:22:34.761600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T1 ... ctors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, TBasicString> const&, std::__y1::vector> const&, bool, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:143:16 #31 0xff9c125 in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:232:9 #32 0xff98779 in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1184:13 #33 0xff931e7 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #34 0xff931e7 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #35 0xff931e7 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #36 0xff931e7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #37 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #38 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #39 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #40 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #41 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #42 0xff92093 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #43 0x1084a305 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #44 0x1086a39c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #45 0x7f6e8d880d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x100b418d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0xf4fb11d in NObjectFactory::TFactoryObjectCreator::Create() const /-S/library/cpp/object_factory/object_factory.h:38:20 #2 0x1c9aded9 in MakeHolder /-S/library/cpp/object_factory/object_factory.h:137:38 #3 0x1c9aded9 in NKikimr::NOlap::NStorageOptimizer::IOptimizerPlannerConstructor::BuildDefault() /-S/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.h:204:23 #4 0x1c9ac26f in NKikimr::NOlap::TIndexInfo::DeserializeOptionsFromProto(NKikimrSchemeOp::TColumnTableSchemeOptions const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:209:40 #5 0x1c9b292b in NKikimr::NOlap::TIndexInfo::DeserializeFromProto(NKikimrSchemeOp::TColumnTableSchema const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:250:5 #6 0x1c9bf54b in NKikimr::NOlap::TIndexInfo::BuildFromProto(NKikimrSchemeOp::TColumnTableSchema const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:333:17 #7 0x25b38dab in NKikimr::NOlap::TColumnEngineForLogs::RegisterSchemaVersion(NKikimr::NOlap::TSnapshot const&, unsigned long, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData const&) /-S/ydb/core/tx/columnshard/engines/column_engine_logs.cpp:103:29 #8 0x25b3486e in NKikimr::NOlap::TColumnEngineForLogs::TColumnEngineForLogs(unsigned long, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&, NKikimr::NOlap::TSnapshot const&, unsigned long, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/column_engine_logs.cpp:42:5 #9 0x2a213c7a in make_unique &, std::__y1::shared_ptr &, std::__y1::shared_ptr &, const NKikimr::NOlap::TSnapshot &, const unsigned int &, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData, std::__y1::shared_ptr &> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:30 #10 0x2a213c7a in NKikimr::NColumnShard::TTablesManager::AddSchemaVersion(unsigned int, NKikimr::NOlap::TSnapshot const&, NKikimrSchemeOp::TColumnTableSchema const&, NKikimr::NIceDb::TNiceDb&) /-S/ydb/core/tx/columnshard/tables_manager.cpp:282:24 #11 0x2a215507 in NKikimr::NColumnShard::TTablesManager::AddTableVersion(NKikimr::NColumnShard::TInternalPathId, NKikimr::NOlap::TSnapshot const&, NKikimrTxColumnShard::TTableVersionInfo const&, std::__y1::optional const&, NKikimr::NIceDb::TNiceDb&) /-S/ydb/core/tx/columnshard/tables_manager.cpp:320:9 #12 0x2a0e6a09 in NKikimr::NColumnShard::TColumnShard::RunEnsureTable(NKikimrTxColumnShard::TCreateTable const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:431:19 #13 0x2a0e5370 in NKikimr::NColumnShard::TColumnShard::RunInit(NKikimrTxColumnShard::TInitShard const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:373:9 #14 0x2a0e4ae1 in NKikimr::NColumnShard::TColumnShard::RunSchemaTx(NKikimrTxColumnShard::TSchemaTxBody const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:328:13 #15 0xff0a29e in NKikimr::NColumnShard::TSchemaTransactionOperator::ProgressOnExecute(NKikimr::NColumnShard::TColumnShard&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/transactions/operators/schema.h:94:19 #16 0x25cccf3d in NKikimr::NColumnShard::TColumnShard::TTxProgressTx::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/columnshard__progress_tx.cpp:80:13 #17 0x1849b274 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #18 0x184609f6 in NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&) /-S/ydb/core/tablet_flat/flat_executor.cpp:4143:9 #19 0x115b762c in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #20 0x2cc61cf4 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #21 0x2cc5a569 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #22 0x2cc648e3 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #23 0x3148df63 in NKikimr::TEvTxProcessing::TEvPlanStepAck::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #24 0x3146cf82 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #25 0x3146cf82 in NKikimr::TEvTxProcessing::TEvPlanStepAck::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #26 0x3146be97 in NKikimr::NTxUT::PlanSchemaTx(NActors::TTestBasicRuntime&, NActors::TActorId const&, NKikimr::NOlap::TSnapshot) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:79:5 #27 0x31483b82 in NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, NKikimr::NOlap::TSnapshot const&, bool) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:480:13 #28 0xff9b983 in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:219:5 #29 0xff9878c in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1184:13 #30 0xff931e7 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #31 0xff931e7 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #32 0xff931e7 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #33 0xff931e7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #34 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #35 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #36 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #37 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #38 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 SUMMARY: AddressSanitizer: 2643002 byte(s) leaked in 62809 allocation(s). >> KqpYql::TableRange [GOOD] >> KqpYql::ColumnNameConflict [GOOD] >> KqpYql::ColumnTypeMismatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 23158, MsgBus: 4463 2025-04-10T11:25:11.193471Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644072861972885:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:11.193946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000f02/r3tmp/tmpPCM0GB/pdisk_1.dat 2025-04-10T11:25:11.727805Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:11.736508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:11.736607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:11.738535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23158, node 1 2025-04-10T11:25:11.814886Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:11.814908Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:11.814921Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:11.815052Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4463 TClient is connected to server localhost:4463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:12.464272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.498745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.700997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.863140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:12.951179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:14.522667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644085746876409:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.522815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:14.852287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.919501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:14.949210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:15.014927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:15.047546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:15.076335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:15.114635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644090041844220:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:15.114695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644090041844225:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:15.114723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:15.119086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:15.128749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644090041844227:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:15.203996Z node 1 :TX_PROXY ERROR: Actor# [1:7491644090041844281:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:15.951369Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644090041844571:2495] 2025-04-10T11:25:16.019232Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336811881:2500] 2025-04-10T11:25:16.038603Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336811893:2504] 2025-04-10T11:25:16.057664Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336811904:2508] 2025-04-10T11:25:16.090504Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336811915:2512] 2025-04-10T11:25:16.106559Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336811941:2517] 2025-04-10T11:25:16.141707Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336811960:2524] 2025-04-10T11:25:16.182692Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336811976:2530] 2025-04-10T11:25:16.183052Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644072861972885:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:16.183197Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:16.216194Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336811995:2537] 2025-04-10T11:25:16.252408Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336812011:2543] 2025-04-10T11:25:16.292275Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336812039:2549] 2025-04-10T11:25:16.335835Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336812058:2555] 2025-04-10T11:25:16.385518Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336812073:2561] 2025-04-10T11:25:16.434693Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336812090:2567] 2025-04-10T11:25:16.489363Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336812121:2573] 2025-04-10T11:25:16.542400Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336812139:2580] 2025-04-10T11:25:16.600307Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336812161:2588] 2025-04-10T11:25:16.665518Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336812198:2597] 2025-04-10T11:25:16.726500Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336812213:2602] 2025-04-10T11:25:16.796165Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336812233:2609] 2025-04-10T11:25:16.882529Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336812272:2618] 2025-04-10T11:25:16.941996Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336812283:2623] 2025-04-10T11:25:17.020256Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644094336812324:2633] 2025-04-10T11:25:17.103474Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644098631779635:2639] 2025-04-10T11:25:17.191718Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644098631779657:2648] 2025-04-10T11:25:17.274312Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644098631779675:2656] 2025-04-10T11:25:17.360958Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284317391, txId: 281474976710671] shutting down 2025-04-10T11:25:17.361373Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644098631779698:2665] 2025-04-10T11:25:17.456667Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644098631779807:2683] 2025-04-10T11:25:17.558422Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644098631779831:2693] 2025-04-10T11:25:17.662320Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644098631779853:2702] 2025-04-10T11:25:17.764463Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644098631779870:2708] 2025-04-10T11:25:17.872483Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644098631779892:2717] 2025-04-10T11:25:17.979976Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644098631779922:2726] 2025-04-10T11:25:17.980777Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491644098631779963:2731] TxId: 281474976710674. Ctx: { TraceId: 01jrfq4y5naarhyhe416ctkbqz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2U1NDFhMzEtMzA4YzhjOTQtZmVmYjg1YTEtNWQ5ZGE5ZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
... 2075186224037893, actor_id: [2:7491644117909999519:2329] 2025-04-10T11:25:27.229525Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805789:2712]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7491644117909999518:2328] 2025-04-10T11:25:27.230465Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037892, step: 1744284327233 2025-04-10T11:25:27.230539Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037891, step: 1744284327233 2025-04-10T11:25:27.230584Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037893, step: 1744284327233 2025-04-10T11:25:27.230630Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037890, step: 1744284327233 2025-04-10T11:25:27.230677Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805791:2714]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7491644117909999517:2327] 2025-04-10T11:25:27.230736Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805790:2713]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7491644117909999507:2322] 2025-04-10T11:25:27.230807Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805792:2715]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037893, actor_id: [2:7491644117909999519:2329] 2025-04-10T11:25:27.230877Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805789:2712]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7491644117909999518:2328] 2025-04-10T11:25:27.231621Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037892, step: 1744284327233 2025-04-10T11:25:27.231679Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037891, step: 1744284327233 2025-04-10T11:25:27.231722Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037893, step: 1744284327233 2025-04-10T11:25:27.231765Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037890, step: 1744284327233 2025-04-10T11:25:27.231814Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805791:2714]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7491644117909999517:2327] 2025-04-10T11:25:27.231878Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805790:2713]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7491644117909999507:2322] 2025-04-10T11:25:27.231919Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805792:2715]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037893, actor_id: [2:7491644117909999519:2329] 2025-04-10T11:25:27.231985Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805789:2712]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7491644117909999518:2328] 2025-04-10T11:25:27.232431Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037892, step: 1744284327233 2025-04-10T11:25:27.232482Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037891, step: 1744284327233 2025-04-10T11:25:27.232530Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805791:2714]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7491644117909999517:2327] 2025-04-10T11:25:27.232591Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037893, step: 1744284327233 2025-04-10T11:25:27.232636Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037890, step: 1744284327233 2025-04-10T11:25:27.232695Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805790:2713]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7491644117909999507:2322] 2025-04-10T11:25:27.232762Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805792:2715]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037893, actor_id: [2:7491644117909999519:2329] 2025-04-10T11:25:27.232813Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805789:2712]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7491644117909999518:2328] 2025-04-10T11:25:27.233167Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037892, step: 1744284327233 2025-04-10T11:25:27.233221Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037891, step: 1744284327233 2025-04-10T11:25:27.233331Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037893, step: 1744284327233 2025-04-10T11:25:27.233386Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037890, step: 1744284327233 2025-04-10T11:25:27.233429Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805791:2714]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7491644117909999517:2327] 2025-04-10T11:25:27.233487Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805790:2713]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7491644117909999507:2322] 2025-04-10T11:25:27.233532Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805792:2715]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037893, actor_id: [2:7491644117909999519:2329] 2025-04-10T11:25:27.233574Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805789:2712]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7491644117909999518:2328] 2025-04-10T11:25:27.234715Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037892, step: 1744284327233 2025-04-10T11:25:27.234775Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037891, step: 1744284327233 2025-04-10T11:25:27.234816Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037893, step: 1744284327233 2025-04-10T11:25:27.234861Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037890, step: 1744284327233 2025-04-10T11:25:27.234905Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805791:2714]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7491644117909999517:2327] 2025-04-10T11:25:27.234961Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805790:2713]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7491644117909999507:2322] 2025-04-10T11:25:27.235023Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805792:2715]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037893, actor_id: [2:7491644117909999519:2329] 2025-04-10T11:25:27.235039Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491644143679805792:2715]. TKqpScanFetcherActor: broken tablet for this request 72075186224037893, retries limit exceeded (0/20) 2025-04-10T11:25:27.235162Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805789:2712]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7491644117909999518:2328] 2025-04-10T11:25:27.235866Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037892, step: 1744284327233 2025-04-10T11:25:27.235940Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037891, step: 1744284327233 2025-04-10T11:25:27.235994Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805791:2714]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7491644117909999517:2327] 2025-04-10T11:25:27.236009Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491644143679805791:2714]. TKqpScanFetcherActor: broken tablet for this request 72075186224037892, retries limit exceeded (0/20) 2025-04-10T11:25:27.236097Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805790:2713]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7491644117909999507:2322] 2025-04-10T11:25:27.236107Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491644143679805790:2713]. TKqpScanFetcherActor: broken tablet for this request 72075186224037891, retries limit exceeded (0/20) 2025-04-10T11:25:27.236292Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037890, step: 1744284327233 2025-04-10T11:25:27.236364Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7491644143679805789:2712]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7491644117909999518:2328] 2025-04-10T11:25:27.236375Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491644143679805789:2712]. TKqpScanFetcherActor: broken tablet for this request 72075186224037890, retries limit exceeded (0/20) 2025-04-10T11:25:27.517065Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284327548, txId: 281474976715674] shutting down 2025-04-10T11:25:27.878729Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2JmZWEzNTEtZjIxYjI4MzUtYTk0NTEyZDMtZGI0ZDFiZjM=, ActorId: [2:7491644143679806083:2770], ActorState: ExecuteState, TraceId: 01jrfq57tk24zh46t1dggkvvb3, Create QueryResponse for error on request, msg: 2025-04-10T11:25:28.113088Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284328143, txId: 281474976715676] shutting down 2025-04-10T11:25:28.486713Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284328514, txId: 281474976715678] shutting down >> KqpScripting::StreamDdlAndDml [GOOD] >> KqpPragma::Warning [GOOD] >> KqpPragma::Auth [GOOD] >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::SystemTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 5860, MsgBus: 24811 2025-04-10T11:25:25.425955Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644133799726201:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:25.426012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e8f/r3tmp/tmpaDVp8u/pdisk_1.dat 2025-04-10T11:25:25.851217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:25.851344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:25.855660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5860, node 1 2025-04-10T11:25:25.895381Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:26.012176Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:26.012199Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:26.012208Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:26.012314Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24811 TClient is connected to server localhost:24811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:26.627553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.650679Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:28.677242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644146684628763:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.677376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.975389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.133021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644150979596170:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.133127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.133240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644150979596175:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.138673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:25:29.153212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644150979596177:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:25:29.235050Z node 1 :TX_PROXY ERROR: Actor# [1:7491644150979596229:2406] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |92.9%| [TA] $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpYql::ScriptUdf [GOOD] >> KqpYql::SelectNoAsciiValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn [GOOD] Test command err: 2025-04-10T11:22:38.912273Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:22:38.981510Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:22:38.985090Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:22:38.985377Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:22:39.002237Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:22:39.002474Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:22:39.008509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:22:39.008652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:22:39.008818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:22:39.008902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:22:39.008978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:22:39.009068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:22:39.009134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:22:39.009199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:22:39.009279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:22:39.009368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:22:39.009442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:22:39.009511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:22:39.027915Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:22:39.031134Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:22:39.031406Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:22:39.031466Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:22:39.031615Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:39.031740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:22:39.031795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:22:39.031822Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:22:39.031894Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:22:39.031957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:22:39.031998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:22:39.032021Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:22:39.032183Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:22:39.032253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:22:39.032291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:22:39.032314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:22:39.032421Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:22:39.032458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:22:39.032487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:22:39.032504Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:22:39.032564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:22:39.032626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:22:39.032658Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:22:39.032704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:22:39.032748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:22:39.032773Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:22:39.033171Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-10T11:22:39.033246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-04-10T11:22:39.033376Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=45; 2025-04-10T11:22:39.033474Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-04-10T11:22:39.033635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:22:39.033694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:22:39.033723Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:22:39.033922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:22:39.033954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:22:39.034000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:22:39.034174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:22:39.034220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:22:39.034254Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:22:39.034423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:22:39.034470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:22:39.034501Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T1 ... umn_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:26.900173Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:26.900234Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:25:26.900285Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:25:26.900413Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:25:26.900519Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:26.900549Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:25:26.900640Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=73; 2025-04-10T11:25:26.900685Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=584;num_rows=73;batch_columns=timestamp; 2025-04-10T11:25:26.900810Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:574:2590];bytes=584;rows=73;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-04-10T11:25:26.900895Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:26.901002Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:26.901210Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:26.901391Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:25:26.901533Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:26.901645Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:26.901696Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: Scan [5:581:2597] finished for tablet 9437184 2025-04-10T11:25:26.902538Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[5:574:2590];stats={"p":[{"events":["f_bootstrap"],"t":0.083},{"events":["f_ProduceResults"],"t":0.618},{"events":["l_bootstrap"],"t":0.932},{"events":["f_processing","f_task_result"],"t":0.96},{"events":["l_task_result"],"t":8.834},{"events":["f_ack"],"t":8.93},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":9.98}],"full":{"a":1744284316921571,"name":"_full_task","f":1744284316921571,"d_finished":0,"c":0,"l":1744284326901823,"d":9980252},"events":[{"name":"bootstrap","f":1744284317004826,"d_finished":848983,"c":1,"l":1744284317853809,"d":848983},{"a":1744284326901368,"name":"ack","f":1744284325852283,"d_finished":967069,"c":904,"l":1744284326901254,"d":967524},{"a":1744284326901348,"name":"processing","f":1744284317882483,"d_finished":4292544,"c":4520,"l":1744284326901257,"d":4293019},{"name":"ProduceResults","f":1744284317540206,"d_finished":1773908,"c":5426,"l":1744284326901671,"d":1773908},{"a":1744284326901675,"name":"Finish","f":1744284326901675,"d_finished":0,"c":0,"l":1744284326901823,"d":148},{"name":"task_result","f":1744284317882531,"d_finished":3228857,"c":3616,"l":1744284325756529,"d":3228857}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:26.902632Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:574:2590];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:25:26.903259Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[5:574:2590];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.083},{"events":["f_ProduceResults"],"t":0.618},{"events":["l_bootstrap"],"t":0.932},{"events":["f_processing","f_task_result"],"t":0.96},{"events":["l_task_result"],"t":8.834},{"events":["f_ack"],"t":8.93},{"events":["l_ProduceResults","f_Finish"],"t":9.98},{"events":["l_ack","l_processing","l_Finish"],"t":9.981}],"full":{"a":1744284316921571,"name":"_full_task","f":1744284316921571,"d_finished":0,"c":0,"l":1744284326902683,"d":9981112},"events":[{"name":"bootstrap","f":1744284317004826,"d_finished":848983,"c":1,"l":1744284317853809,"d":848983},{"a":1744284326901368,"name":"ack","f":1744284325852283,"d_finished":967069,"c":904,"l":1744284326901254,"d":968384},{"a":1744284326901348,"name":"processing","f":1744284317882483,"d_finished":4292544,"c":4520,"l":1744284326901257,"d":4293879},{"name":"ProduceResults","f":1744284317540206,"d_finished":1773908,"c":5426,"l":1744284326901671,"d":1773908},{"a":1744284326901675,"name":"Finish","f":1744284326901675,"d_finished":0,"c":0,"l":1744284326902683,"d":1008},{"name":"task_result","f":1744284317882531,"d_finished":3228857,"c":3616,"l":1744284325756529,"d":3228857}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:26.903373Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:25:16.840976Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=904;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7049848;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7049848;selected_rows=0; 2025-04-10T11:25:26.903458Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:25:26.903742Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableRange [GOOD] Test command err: Trying to start YDB, gRPC: 16176, MsgBus: 11768 2025-04-10T11:25:25.063025Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644134409801018:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:25.064071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e96/r3tmp/tmpgGrvsI/pdisk_1.dat 2025-04-10T11:25:25.533669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:25.533739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:25.535166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16176, node 1 2025-04-10T11:25:25.570089Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:25.694670Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:25.694691Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:25.694698Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:25.694802Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11768 TClient is connected to server localhost:11768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:26.270514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.298940Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:26.304787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.464825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.627453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.701366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.299022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644147294704598:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.299155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.632415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.672281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.703808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.736587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.779914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.820560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.874750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644147294705108:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.874832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.874918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644147294705113:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.878513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:28.892481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644147294705115:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:28.983968Z node 1 :TX_PROXY ERROR: Actor# [1:7491644147294705168:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:27: Error: RANGE is not supported on Kikimr clusters. 2025-04-10T11:25:30.061626Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644134409801018:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:30.061691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpYql::UuidPrimaryKey [GOOD] |92.9%| [TA] {RESULT} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamDdlAndDml [GOOD] Test command err: Trying to start YDB, gRPC: 26817, MsgBus: 27704 2025-04-10T11:25:19.192053Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644108021748323:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:19.192734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eb0/r3tmp/tmpCtz8u1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26817, node 1 2025-04-10T11:25:19.622797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:19.622886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:19.623173Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:19.633763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:19.689739Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:19.689766Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:19.689774Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:19.689907Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27704 TClient is connected to server localhost:27704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:20.210778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.226212Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:20.236934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:20.382969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.550261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.631490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:22.381356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644120906651845:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.381484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.702622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.729799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.756943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.782931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.813043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.852164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.929390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644120906652360:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.929460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.929621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644120906652365:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.934439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:22.944235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644120906652367:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:23.015541Z node 1 :TX_PROXY ERROR: Actor# [1:7491644125201619717:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:23.830402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:24.152829Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284324188, txId: 281474976710673] shutting down 2025-04-10T11:25:24.189414Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644108021748323:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:24.189500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17920, MsgBus: 23717 2025-04-10T11:25:25.039201Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644134942434334:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:25.039311Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eb0/r3tmp/tmpkkWcOr/pdisk_1.dat 2025-04-10T11:25:25.141529Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17920, node 2 2025-04-10T11:25:25.183129Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:25.183278Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:25.184846Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:25.216675Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:25.216702Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:25.216709Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:25.216820Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23717 TClient is connected to server localhost:23717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:25:25.668483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:25.686678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:25.766094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:25.934014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.014677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.384797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644147827337997:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.384851Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.436171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.465569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.498574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.537930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.566954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.599031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.650710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644147827338505:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.650896Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.651481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644147827338510:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.654921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:28.664789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644147827338512:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:28.753262Z node 2 :TX_PROXY ERROR: Actor# [2:7491644147827338566:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:29.649949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.039569Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644134942434334:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:30.039648Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:30.120974Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284330145, txId: 281474976715673] shutting down >> KqpScripting::ScriptValidate [GOOD] >> KqpScripting::ScriptStats >> KqpYql::TableUseBeforeCreate >> TPQTest::TestManyConsumers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] Test command err: Trying to start YDB, gRPC: 2313, MsgBus: 19966 2025-04-10T11:25:19.722115Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644108513667838:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:19.726097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ea7/r3tmp/tmpT4kVt9/pdisk_1.dat 2025-04-10T11:25:20.109404Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:20.138887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:20.138969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:20.140855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2313, node 1 2025-04-10T11:25:20.188529Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:20.188560Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:20.188578Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:20.190249Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19966 TClient is connected to server localhost:19966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:20.706909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.732607Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:20.737872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.906046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:21.097122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:21.182363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.745357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644121398571411:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.745484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:23.079953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.107142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.136839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.164621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.195737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.231451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.309537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644125693539222:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:23.309623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:23.310165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644125693539227:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:23.313808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:23.323341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644125693539229:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:23.406601Z node 1 :TX_PROXY ERROR: Actor# [1:7491644125693539284:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:24.721898Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644108513667838:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:24.721985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:24.778390Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284324797, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 11553, MsgBus: 63584 2025-04-10T11:25:25.617488Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644132642998666:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:25.617576Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ea7/r3tmp/tmp8auFQT/pdisk_1.dat 2025-04-10T11:25:25.735385Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11553, node 2 2025-04-10T11:25:25.772870Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:25.772959Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:25.778091Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:25.822738Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:25.822762Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:25.822769Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:25.822881Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63584 TClient is connected to server localhost:63584 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:26.268272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.282593Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:26.301506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.379669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:26.550292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:26.664750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.757635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644145527902313:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.757748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.801844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.847026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.887050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.922690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.958070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.008552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.102617Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644149822870125:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.102726Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.103162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644149822870130:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.107581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:29.120409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644149822870132:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:29.213292Z node 2 :TX_PROXY ERROR: Actor# [2:7491644149822870187:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpYql::RefSelect [GOOD] >> KqpYql::PgIntPrimaryKey >> KqpScripting::ScanQueryDisable [GOOD] >> KqpScripting::QueryStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Warning [GOOD] Test command err: Trying to start YDB, gRPC: 25750, MsgBus: 4711 2025-04-10T11:25:19.477390Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644105744435690:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:19.478088Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eab/r3tmp/tmpLqQFCt/pdisk_1.dat 2025-04-10T11:25:19.838914Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25750, node 1 2025-04-10T11:25:19.884261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:19.884329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:19.886169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:19.936367Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:19.936385Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:19.936420Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:19.936551Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4711 TClient is connected to server localhost:4711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:20.448250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.506614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:20.638627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.802663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:20.902705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:22.440713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644118629339182:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.440849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.831339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.866571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.898781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.928552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.963184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.032902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.107546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644122924307000:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:23.107644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:23.107711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644122924307005:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:23.110702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:23.119354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644122924307007:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:23.210591Z node 1 :TX_PROXY ERROR: Actor# [1:7491644122924307059:3440] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:24.474251Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644105744435690:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:24.474314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:24.563433Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491644127219274723:2509], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:2:50: Error: At function: AssumeColumnOrderPartial
:2:20: Error: At function: Aggregate /lib/yql/aggregate.yqls:648:18: Error: At function: AggregationTraits /lib/yql/aggregate.yqls:60:31: Error: At function: AggrCountInit
:2:20: Error: At function: PersistableRepr
:2:26: Error: At function: Member
:2:26: Error: Member not found: _yql_partition_id 2025-04-10T11:25:24.564929Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTgxMGJkYjMtNmQyZjMwYzUtOGJlNzY1ZDMtNTQ2ZjZjYmQ=, ActorId: [1:7491644127219274645:2496], ActorState: ExecuteState, TraceId: 01jrfq54n63dwjd5pk1xhfhs72, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 21715, MsgBus: 3442 2025-04-10T11:25:25.330359Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644132412346762:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:25.331186Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eab/r3tmp/tmpK0Wvdk/pdisk_1.dat 2025-04-10T11:25:25.487731Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:25.498519Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:25.498587Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:25.499865Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21715, node 2 2025-04-10T11:25:25.582631Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:25.582651Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:25.582658Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:25.582749Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3442 TClient is connected to server localhost:3442 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:26.031986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.039380Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:26.052241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.134089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.269906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.369672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.609457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644145297250413:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.609546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.653855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.690215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.717009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.744761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.816012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.847473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.945249Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644145297250929:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.945359Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.945500Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644145297250934:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.949505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:28.963021Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644145297250936:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:29.063372Z node 2 :TX_PROXY ERROR: Actor# [2:7491644149592218288:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:30.316202Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644132412346762:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:30.316275Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SystemTables [GOOD] Test command err: Trying to start YDB, gRPC: 32070, MsgBus: 20586 2025-04-10T11:25:18.835436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644104848399514:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:18.835488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eb4/r3tmp/tmpLsZjw4/pdisk_1.dat 2025-04-10T11:25:19.250664Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:19.283249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:19.283389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 32070, node 1 2025-04-10T11:25:19.287746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:19.344447Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:19.344480Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:19.344490Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:19.344638Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20586 TClient is connected to server localhost:20586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:19.936915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:19.962695Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:19.973266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.133166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.302299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:20.380653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:22.118222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644122028270461:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.118355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.492942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.530681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.564269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.631995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.660070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.690595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:22.741037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644122028270976:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.741119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.741196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644122028270981:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:22.744899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:22.754254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644122028270983:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:22.844881Z node 1 :TX_PROXY ERROR: Actor# [1:7491644122028271036:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:23.753892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.835408Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644104848399514:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:23.835474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22856, MsgBus: 16372 2025-04-10T11:25:24.819235Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644128238668564:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:24.819283Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000eb4/r3tmp/tmpduLXim/pdisk_1.dat 2025-04-10T11:25:24.926359Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:24.953855Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:24.953934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:24.954708Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22856, node 2 2025-04-10T11:25:25.008225Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:25.008247Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:25.008253Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:25.008349Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16372 TClient is connected to server localhost:16372 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:25.435691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:25.458305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:25.536550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:25.726175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:25.798040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.063195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644145418539530:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.063273Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.103861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.137585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.167712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.201158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.231193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.262489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.349554Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644145418540044:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.349647Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.349888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644145418540049:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.353518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:28.363216Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644145418540051:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:28.432689Z node 2 :TX_PROXY ERROR: Actor# [2:7491644145418540106:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:29.400650Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284329390, txId: 281474976715671] shutting down 2025-04-10T11:25:29.552040Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284329538, txId: 281474976715673] shutting down 2025-04-10T11:25:29.820479Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644128238668564:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:29.820568Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:30.510127Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284330530, txId: 281474976715675] shutting down >> KqpScripting::StreamExecuteYqlScriptSeveralQueries >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test >> KqpYql::EvaluateExpr1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 9264, MsgBus: 5714 2025-04-10T11:25:26.268950Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644135370203924:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:26.269012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e83/r3tmp/tmpwz31P7/pdisk_1.dat 2025-04-10T11:25:26.684553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:26.684649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:26.686337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9264, node 1 2025-04-10T11:25:26.719333Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:25:26.719360Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:25:26.758675Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:26.800346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:26.800368Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:26.800376Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:26.800497Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5714 TClient is connected to server localhost:5714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:27.385424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:29.438894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644148255106471:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.439017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.691897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.857845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644148255106574:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.857933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.858165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644148255106579:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.863394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:25:29.873684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644148255106581:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:25:29.952454Z node 1 :TX_PROXY ERROR: Actor# [1:7491644148255106633:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:30.318494Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491644152550074029:2368], status: GENERIC_ERROR, issues:
:3:25: Error: Invalid value "invalid-uuid" for type Uuid 2025-04-10T11:25:30.319851Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTdiNmY0NC05OGFmYjViOC0yYjM1ZjYwNC1mYmJjMWRmNA==, ActorId: [1:7491644148255106453:2329], ActorState: ExecuteState, TraceId: 01jrfq5a9y408bm4jas3m33skt, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:25:31.268561Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644135370203924:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:31.268631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpYql::FlexibleTypes [GOOD] >> KqpYql::FromBytes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestManyConsumers [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-10T11:24:40.279737Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.279815Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-10T11:24:40.302783Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.322537Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:40.324414Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-10T11:24:40.327141Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-10T11:24:40.329435Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-10T11:24:40.331325Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-10T11:24:40.339812Z node 1 :PERSQUEUE INFO: new Cookie default|afc8638f-f80a8a98-3f4d0632-fce9994b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-10T11:24:40.855543Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.855664Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] 2025-04-10T11:24:40.899344Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.900466Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-10T11:24:40.901184Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:185:2198] 2025-04-10T11:24:40.903944Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:185:2198] 2025-04-10T11:24:40.905935Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:186:2199] 2025-04-10T11:24:40.907944Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:186:2199] 2025-04-10T11:24:40.916705Z node 2 :PERSQUEUE INFO: new Cookie default|6ee50e00-42000ff3-a7088211-9a798085_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-10T11:24:41.509288Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:41.509370Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-04-10T11:24:41.526062Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:41.526894Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-10T11:24:41.527457Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-04-10T11:24:41.530011Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-04-10T11:24:41.531736Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:186:2199] 2025-04-10T11:24:41.533625Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:186:2199] 2025-04-10T11:24:41.539731Z node 3 :PERSQUEUE INFO: new Cookie default|359315fe-a43b3a2d-2caf4d8d-e3391900_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-04-10T11:24:41.962381Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:41.962469Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-04-10T11:24:41.982103Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:41.983042Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 10 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 4 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 Important: false } 2025-04-10T11:24:41.983663Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:185:2198] 2025-04-10T11:24:41.986548Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:185:2198] 2025-04-10T11:24:41.988474Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:186:2199] 2025-04-10T11:24:41.990572Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [4:186:2199] 2025-04-10T11:24:41.996845Z node 4 :PERSQUEUE INFO: new Cookie default|e763f05f-c5942bf2-54c3b525-2cbf8e39_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:41.997452Z node 4 :PERSQUEUE INFO: new Cookie default|c451b5d9-46cd6fcd-d33b9678-4ab4d4c8_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:103:2057] recipient: [5:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:103:2057] recipient: [5:101:2135] Leader for TabletID 72057594037927937 is [5:107:2139] sender: [5:108:2057] recipient: [5:101:2135] 2025-04-10T11:24:42.418416Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:42.418513Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [5:149:2057] recipient: [5:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [5:149:2057] recipient: [5:147:2170] Leader for TabletID 72057594037927938 is [5:153:2174] sender: [5:154:2057] recipient: [5:147:2170] Leader for TabletID 72057594037927 ... 6 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.431679Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.467897Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:999:2995], now have 1 active actors on pipe 2025-04-10T11:25:30.469504Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.484098Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.519544Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1002:2998], now have 1 active actors on pipe 2025-04-10T11:25:30.521035Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.535044Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.599196Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1005:3001], now have 1 active actors on pipe 2025-04-10T11:25:30.601068Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.614975Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.645975Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1008:3004], now have 1 active actors on pipe 2025-04-10T11:25:30.647580Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.664735Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.707181Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1011:3007], now have 1 active actors on pipe 2025-04-10T11:25:30.708980Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.727360Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.766961Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1014:3010], now have 1 active actors on pipe 2025-04-10T11:25:30.768848Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.786852Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.851049Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1017:3013], now have 1 active actors on pipe 2025-04-10T11:25:30.852879Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.873336Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.915922Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1020:3016], now have 1 active actors on pipe 2025-04-10T11:25:30.917806Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.935130Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.974084Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1023:3019], now have 1 active actors on pipe 2025-04-10T11:25:30.975970Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:30.993252Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:31.033073Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1026:3022], now have 1 active actors on pipe 2025-04-10T11:25:31.035101Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:31.052802Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:31.091214Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1029:3025], now have 1 active actors on pipe 2025-04-10T11:25:31.092950Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:31.110965Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:31.178791Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1032:3028], now have 1 active actors on pipe 2025-04-10T11:25:31.180628Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:31.199164Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:31.239940Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1035:3031], now have 1 active actors on pipe 2025-04-10T11:25:31.241904Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:31.259026Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:31.300344Z node 40 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [40:1038:3034] connected; active server actors: 1 >> KqpYql::DdlDmlMix ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryDisable [GOOD] Test command err: Trying to start YDB, gRPC: 15698, MsgBus: 26000 2025-04-10T11:25:20.078692Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644113405645961:2118];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:20.080163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ea2/r3tmp/tmpFSH33F/pdisk_1.dat 2025-04-10T11:25:20.439058Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15698, node 1 2025-04-10T11:25:20.448998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:20.449126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:20.450824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:20.501556Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:20.501598Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:20.501608Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:20.501754Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26000 TClient is connected to server localhost:26000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:21.270013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:21.287111Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:21.301481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:21.452253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:21.611982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:21.713207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:23.342577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644126290549572:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:23.342709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:23.734231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.772019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.844938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.885373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.926417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:24.004009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:24.054512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644130585517387:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:24.054590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:24.054817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644130585517392:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:24.058413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:24.070270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644130585517394:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:24.147991Z node 1 :TX_PROXY ERROR: Actor# [1:7491644130585517449:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:25.081265Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644113405645961:2118];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:25.081373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:25.432236Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284325441, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 11567, MsgBus: 25096 2025-04-10T11:25:26.288392Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644135553416752:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:26.288443Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ea2/r3tmp/tmpIP22BS/pdisk_1.dat 2025-04-10T11:25:26.398946Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11567, node 2 2025-04-10T11:25:26.459817Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:26.459954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:26.477173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:26.511103Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:26.511126Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:26.511134Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:26.511261Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25096 TClient is connected to server localhost:25096 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:25:26.954991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:26.965833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:27.026341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:25:27.175148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:27.277800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:29.429112Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644148438320399:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.429202Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.473466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.504401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.532640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.566389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.592850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.627133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.715525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644148438320912:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.715620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.715808Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644148438320917:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.719201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:29.729542Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644148438320919:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:29.798822Z node 2 :TX_PROXY ERROR: Actor# [2:7491644148438320974:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:31.207406Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331237, txId: 281474976715671] shutting down 2025-04-10T11:25:31.288942Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644135553416752:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:31.289014Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TPQTest::TestAlreadyWrittenWithoutDeduplication [GOOD] >> TPQTest::TestChangeConfig >> TPQTest::TestReadSubscription [GOOD] >> KqpScripting::StreamExecuteYqlScriptMixed [GOOD] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TFetchRequestTests::CheckAccess [GOOD] >> PQCountersSimple::PartitionWriteQuota >> TargetDiscoverer::InvalidCredentials >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadSubscription [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-10T11:24:40.279788Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.279863Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-10T11:24:40.302226Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.325302Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } Consumers { Name: "another-user" Generation: 1 Important: false } 2025-04-10T11:24:40.326408Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-10T11:24:40.328411Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-10T11:24:40.331669Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-10T11:24:40.333578Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-10T11:24:40.337067Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [1:187:2200] 2025-04-10T11:24:40.338931Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [1:187:2200] 2025-04-10T11:24:40.348344Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:207:2213], now have 1 active actors on pipe 2025-04-10T11:24:40.348475Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-10T11:24:40.348532Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-10T11:24:40.348796Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2025-04-10T11:24:40.348853Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2025-04-10T11:24:40.348934Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2025-04-10T11:24:40.349024Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-04-10T11:24:40.349078Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-04-10T11:24:40.349441Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:209:2215], now have 1 active actors on pipe 2025-04-10T11:24:40.349535Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-10T11:24:40.349577Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-10T11:24:40.349707Z node 1 :PERSQUEUE INFO: new Cookie default|12435009-adba268f-66255712-f2f2e7e7_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:40.349819Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-10T11:24:40.349937Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:24:40.350294Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:211:2217], now have 1 active actors on pipe 2025-04-10T11:24:40.350410Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-10T11:24:40.350475Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-10T11:24:40.350530Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2025-04-10T11:24:40.350573Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2025-04-10T11:24:40.350752Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 1 partNo 0 2025-04-10T11:24:40.351697Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 72 count 1 nextOffset 1 batches 1 2025-04-10T11:24:40.351797Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 2 partNo 0 2025-04-10T11:24:40.351868Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 112 count 2 nextOffset 2 batches 1 2025-04-10T11:24:40.352420Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--asdfgs--topic' partition 0 compactOffset 0,2 HeadOffset 0 endOffset 0 curOffset 2 d0000000000_00000000000000000000_00000_0000000002_00000| size 94 WTime 331 2025-04-10T11:24:40.352601Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:40.352654Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:40.352694Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T11:24:40.352739Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:40.352802Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid 2025-04-10T11:24:40.352844Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000002_00000| 2025-04-10T11:24:40.352867Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:40.352901Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:40.352974Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:40.353062Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:40.353179Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 2 size 94 2025-04-10T11:24:40.355450Z node 1 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 2 size 94 actorID [1:134:2160] 2025-04-10T11:24:40.355620Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 18 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:40.355685Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:40.355766Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid', Topic: 'rt3.dc1--asdfgs--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-10T11:24:40.355814Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:24:40.355868Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid', Topic: 'rt3.dc1--asdfgs--topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-10T11:24:40.356015Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-04-10T11:24:40.356066Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-04-10T11:24:40.356117Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user another-user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-04-10T11:24:40.356152Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 1 startOffset 0 ReadingTimestamp 1 rrg 1 2025-04-10T11:24:40.356314Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 2 parts 0 size 94 2025-04-10T11:24:40.356491Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:24:40.356582Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--asdfgs--topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 2 max time lag 0ms effective offset 0 2025-04-10T11:24:40.356640Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 2 2025-04-10T11:24:40.357003Z node 1 :PERSQUEUE DEBUG: [PQ: 72 ... s: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 55 ReadRuleGenerations: 55 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } Consumers { Name: "user1" Generation: 55 Important: true } 2025-04-10T11:25:29.156528Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [52:185:2198] 2025-04-10T11:25:29.160008Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [52:185:2198] 2025-04-10T11:25:29.163971Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [52:186:2199] 2025-04-10T11:25:29.166402Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [52:186:2199] 2025-04-10T11:25:29.169704Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [52:187:2200] 2025-04-10T11:25:29.172016Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [52:187:2200] 2025-04-10T11:25:29.174912Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [52:188:2201] 2025-04-10T11:25:29.177250Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [52:188:2201] 2025-04-10T11:25:29.180163Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [52:189:2202] 2025-04-10T11:25:29.182415Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [52:189:2202] 2025-04-10T11:25:29.195896Z node 52 :PERSQUEUE INFO: new Cookie default|cda70fc0-fd9ef88b-a7437ea6-7f6c1d88_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:30.172632Z node 52 :PERSQUEUE INFO: new Cookie default|1c0c2847-bc125a46-c97cd6e2-68baf48f_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:30.188909Z node 52 :PERSQUEUE INFO: new Cookie default|f3576e59-ae43d8c9-671e132f-ef6b16dc_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:103:2057] recipient: [53:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:103:2057] recipient: [53:101:2135] Leader for TabletID 72057594037927937 is [53:107:2139] sender: [53:108:2057] recipient: [53:101:2135] 2025-04-10T11:25:30.866452Z node 53 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:30.866551Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [53:149:2057] recipient: [53:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [53:149:2057] recipient: [53:147:2170] Leader for TabletID 72057594037927938 is [53:153:2174] sender: [53:154:2057] recipient: [53:147:2170] Leader for TabletID 72057594037927937 is [53:107:2139] sender: [53:177:2057] recipient: [53:14:2061] 2025-04-10T11:25:30.889226Z node 53 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:30.891249Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 56 actor [53:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 56 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 56 ReadRuleGenerations: 56 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 56 Important: false } Consumers { Name: "user1" Generation: 56 Important: true } 2025-04-10T11:25:30.892128Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [53:183:2196] 2025-04-10T11:25:30.894922Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [53:183:2196] 2025-04-10T11:25:30.897511Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [53:184:2197] 2025-04-10T11:25:30.899673Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [53:184:2197] 2025-04-10T11:25:30.902726Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [53:185:2198] 2025-04-10T11:25:30.904854Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [53:185:2198] 2025-04-10T11:25:30.907276Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [53:186:2199] 2025-04-10T11:25:30.909647Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [53:186:2199] 2025-04-10T11:25:30.912630Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [53:187:2200] 2025-04-10T11:25:30.915087Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [53:187:2200] 2025-04-10T11:25:30.936136Z node 53 :PERSQUEUE INFO: new Cookie default|116fef8a-e2358725-f34e3303-5e61b3a5_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:31.911680Z node 53 :PERSQUEUE INFO: new Cookie default|d7f5b6c8-d5a08984-d9c8fbfb-dd0a5166_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:31.927894Z node 53 :PERSQUEUE INFO: new Cookie default|b7364bb-7a1c7531-ad0e3c37-6820a6d3_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:108:2057] recipient: [54:101:2135] 2025-04-10T11:25:32.597823Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:32.597915Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927938 is [54:153:2174] sender: [54:154:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:179:2057] recipient: [54:14:2061] 2025-04-10T11:25:32.619807Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:32.622002Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 57 actor [54:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 57 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 57 ReadRuleGenerations: 57 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 57 Important: false } Consumers { Name: "user1" Generation: 57 Important: true } 2025-04-10T11:25:32.623182Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:185:2198] 2025-04-10T11:25:32.626194Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [54:185:2198] 2025-04-10T11:25:32.629463Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:186:2199] 2025-04-10T11:25:32.631680Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [54:186:2199] 2025-04-10T11:25:32.634709Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [54:187:2200] 2025-04-10T11:25:32.636755Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [54:187:2200] 2025-04-10T11:25:32.639093Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [54:188:2201] 2025-04-10T11:25:32.641161Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [54:188:2201] 2025-04-10T11:25:32.643918Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [54:189:2202] 2025-04-10T11:25:32.646084Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [54:189:2202] 2025-04-10T11:25:32.659381Z node 54 :PERSQUEUE INFO: new Cookie default|5aeab6b7-6ffd3b61-c6103768-4a12b651_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:33.669848Z node 54 :PERSQUEUE INFO: new Cookie default|c25fd19b-8e7706ea-bd5ebc45-53822dd2_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:33.683583Z node 54 :PERSQUEUE INFO: new Cookie default|d06ca17d-708601c7-18583464-5a209b13_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi >> KqpYql::Closure [GOOD] >> KqpYql::EvaluateExprPgNull >> KqpScripting::ScriptingCreateAndAlterTableTest [GOOD] >> KqpScripting::SecondaryIndexes >> KqpYql::InsertIgnore [GOOD] >> KqpYql::JsonCast >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> KqpYql::UpdateBadType [GOOD] >> KqpQueryPerf::Delete-QueryService-UseSink >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> KqpQueryPerf::IndexReplace-QueryService-UseSink >> KqpYql::ColumnTypeMismatch [GOOD] >> TPQTest::TestChangeConfig [GOOD] >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Closure [GOOD] Test command err: Trying to start YDB, gRPC: 13004, MsgBus: 24260 2025-04-10T11:25:24.338323Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644127963517113:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:24.338476Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e9b/r3tmp/tmpBrwvbZ/pdisk_1.dat 2025-04-10T11:25:24.693372Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:24.733739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:24.733894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:24.735447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13004, node 1 2025-04-10T11:25:24.781313Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:24.781339Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:24.781353Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:24.781493Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24260 TClient is connected to server localhost:24260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:25.470177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:25.499733Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:25.515030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:25.682335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:25.844667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:25.923677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:25:27.446829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644140848420781:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:27.446978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:27.727748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:27.760157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:27.793887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:27.826466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:27.862187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:27.942327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.011875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644145143388594:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.011963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.012192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644145143388599:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.015515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:28.026320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644145143388601:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:28.089085Z node 1 :TX_PROXY ERROR: Actor# [1:7491644145143388653:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19623, MsgBus: 7811 2025-04-10T11:25:29.722011Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644150993354106:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:29.722080Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e9b/r3tmp/tmpGuOTlg/pdisk_1.dat 2025-04-10T11:25:29.835991Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19623, node 2 2025-04-10T11:25:29.875161Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:29.875238Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:29.876928Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:29.894682Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:29.894706Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:29.894713Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:29.894811Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7811 TClient is connected to server localhost:7811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:25:30.350646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.371761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:30.447882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:30.607007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.681028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:32.668909Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644163878257755:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:32.669022Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:32.747256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:32.827583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:32.869223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:32.907155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:32.943204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:32.986637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.041705Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644168173225566:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:33.041813Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:33.042093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644168173225571:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:33.045953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:33.057148Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644168173225573:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:33.145648Z node 2 :TX_PROXY ERROR: Actor# [2:7491644168173225626:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> KqpQueryPerf::UpdateOn-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdateBadType [GOOD] Test command err: Trying to start YDB, gRPC: 10954, MsgBus: 15758 2025-04-10T11:25:29.826339Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644148508606316:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:29.826595Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000dd7/r3tmp/tmpE8LvdH/pdisk_1.dat 2025-04-10T11:25:30.176857Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10954, node 1 2025-04-10T11:25:30.212845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:30.212954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:30.219659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:30.262757Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:30.262784Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:30.262793Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:30.262942Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15758 TClient is connected to server localhost:15758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:30.820075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:30.859292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:31.024486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:31.193166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.270951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:33.003278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644161393509848:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:33.003373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:33.355241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.397066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.424878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.459784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.493746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.537467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.619187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644165688477661:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:33.619256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:33.619421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644165688477666:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:33.623353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:33.638662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644165688477668:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:33.732357Z node 1 :TX_PROXY ERROR: Actor# [1:7491644165688477724:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:4:26: Error: At function: KiUpdateTable!
:3:20: Error: Failed to convert type: Struct<'Amount':String?> to Struct<'Amount':Uint64?>
:3:20: Error: Failed to convert 'Amount': Optional to Optional
:3:20: Error: Row type mismatch for table: db.[/Root/Test] 2025-04-10T11:25:34.821536Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644148508606316:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:34.822686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> PQCountersSimple::PartitionWriteQuota [GOOD] >> PQCountersSimple::PartitionFirstClass >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptPg >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::ColumnTypeMismatch [GOOD] Test command err: Trying to start YDB, gRPC: 2906, MsgBus: 61589 2025-04-10T11:25:25.068169Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644131146738348:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:25.068220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e97/r3tmp/tmpgwtdMJ/pdisk_1.dat 2025-04-10T11:25:25.542750Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:25.543012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:25.543105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:25.546569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2906, node 1 2025-04-10T11:25:25.652378Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:25.652401Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:25.652411Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:25.652575Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61589 TClient is connected to server localhost:61589 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:26.273929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.293219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.453041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:26.633955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:26.715881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.288614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644144031642004:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.288734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.576141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.607734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.635678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.668561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.735043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.797134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.877891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644144031642522:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.877983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.878165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644144031642527:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.881819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:28.892684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644144031642529:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:28.973632Z node 1 :TX_PROXY ERROR: Actor# [1:7491644144031642585:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:7:30: Error: At function: KiCreateTable!
:7:30: Error: Duplicate column: Value. 2025-04-10T11:25:30.070243Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644131146738348:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:30.070328Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26286, MsgBus: 4487 2025-04-10T11:25:30.762481Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644155822032310:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:30.762637Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e97/r3tmp/tmpcfdD8L/pdisk_1.dat 2025-04-10T11:25:30.900693Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26286, node 2 2025-04-10T11:25:30.933984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:30.934075Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:30.937198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:30.978935Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:30.978958Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:30.978965Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:30.979083Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4487 TClient is connected to server localhost:4487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:31.438014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:31.455581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:31.527763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:31.658665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:31.735429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.959287Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644168706935966:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:33.959365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:33.999318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.031566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.076772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.146002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.228621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.275650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.327634Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644173001903778:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:34.327736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:34.327814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644173001903783:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:34.331143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:34.344317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644173001903785:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:34.402624Z node 2 :TX_PROXY ERROR: Actor# [2:7491644173001903838:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:35.258281Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491644177296871433:2501], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T11:25:35.260016Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmUxY2QyNjMtYmU5ZmFiZDQtMmVjZjZiNWMtYmRiNzJhYjQ=, ActorId: [2:7491644177296871425:2496], ActorState: ExecuteState, TraceId: 01jrfq5f46e5qs0bs9jqb6gcvq, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 >> KqpYql::SelectNoAsciiValue [GOOD] >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously [GOOD] Test command err: 2025-04-10T11:24:40.308345Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.308437Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:40.327932Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.345174Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "important_user" Generation: 1 Important: true } 2025-04-10T11:24:40.346077Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-04-10T11:24:40.347038Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] Run 1 CmdWrite 2025-04-10T11:24:40.352680Z node 1 :PERSQUEUE INFO: new Cookie default|cdaa460b-b4da0d9e-cf8e1b24-fbbdab2_0 generated for partition 0 topic 'topic' owner default Captured kesus quota request event from [1:207:2216] Captured kesus quota request event from [1:208:2217] CmdRead Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Captured kesus quota request event from [1:207:2216] Currently have 3 quoter requests Run 2 CmdWrite 2025-04-10T11:24:41.375296Z node 1 :PERSQUEUE INFO: new Cookie default|3a3e79d6-f183dd2d-7208104d-db3090d8_1 generated for partition 0 topic 'topic' owner default CmdRead Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Captured kesus quota request event from [1:207:2216] Currently have 4 quoter requests Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-10T11:24:44.171250Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:44.171322Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] 2025-04-10T11:24:44.189626Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:44.190738Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-10T11:24:44.191382Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:185:2198] 2025-04-10T11:24:44.193907Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:185:2198] 2025-04-10T11:24:44.195783Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:186:2199] 2025-04-10T11:24:44.197579Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:186:2199] 2025-04-10T11:24:44.203324Z node 2 :PERSQUEUE INFO: new Cookie default|bb8b9fc9-43f7f5f2-e206bc1a-1073353_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:44.209985Z node 2 :PERSQUEUE INFO: new Cookie default|f922bd29-7c79fd0e-79f26dfb-29bb632_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:44.215423Z node 2 :PERSQUEUE INFO: new Cookie default|740ab724-6c9cb7e0-5ec6be94-9453b983_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-10T11:24:44.614695Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:44.614771Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-04-10T11:24:44.632726Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:44.633588Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-10T11:24:44.634231Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-04-10T11:24:44.636791Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-04-10T11:24:44.638578Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:186:2199] 2025-04-10T11:24:44.640444Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:186:2199] !Reboot 72057594037927937 (actor [3:107:2139]) on event NKikimr::TEvPersQueue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:204:2057] recipient: [3:99:2134] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:207:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:208:2057] recipient: [3:206:2212] Leader for TabletID 72057594037927937 is [3:209:2213] sender: [3:210:2057] recipient: [3:206:2212] 2025-04-10T11:24:44.675346Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:44.675407Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:44.676106Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:262:2258] 2025-04-10T11:24:44.678493Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:263:2259] 2025-04-10T11:24:44.685016Z node 3 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:24:44.685082Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [3:262:2258] 2025-04-10T11:24:44.686718Z node 3 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:24:44.686772Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [3:263:2259] !Reboot 72057594037927937 (actor [3:107:2139]) rebooted! !Reboot 72057594037927937 (actor [3:107:2139]) tablet resolver refreshed! new actor is[3:209:2213] Leader for TabletID 72057594037927937 is [3:209:2213] sender: [3:307:2057] recipient: [3:14:2061] 2025-04-10T11:24:45.916071Z node 3 :PERSQUEUE INFO: new Cookie default|9fba0ec9-7a64ae24-ed0eef22-eb0a6f33_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:45.924928Z node 3 :PERSQUEUE INFO: new Cookie default|66aa66a7-4dae98bb-5e13845c-fd0b74dc_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:45.942211Z node 3 :PERSQUEUE INFO: new Cookie default|f2abac9c-efc8c55f-79df8516-80c9286e_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-04-10T11:24:46.430000Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:46.430091Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-04-10T11:24:46.451153Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927 ... s.size=0 2025-04-10T11:25:36.491284Z node 36 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:36.491370Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:25:36.491497Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:36.491874Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:36.492202Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [36:278:2272] 2025-04-10T11:25:36.493238Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDiskStatusStep 2025-04-10T11:25:36.494613Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-04-10T11:25:36.494942Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-04-10T11:25:36.495726Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-04-10T11:25:36.496515Z node 36 :PERSQUEUE DEBUG: [topic:0:TInitDataRangeStep] Got data offset 0 count 2 size 1049870 so 0 eo 2 d0000000000_00000000000000000000_00000_0000000002_00002| 2025-04-10T11:25:36.496697Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-04-10T11:25:36.503750Z node 36 :PERSQUEUE DEBUG: [topic:0:TInitDataStep] read res partition offset 0 endOffset 2 key 0,2 valuesize 1049870 expected 1049870 2025-04-10T11:25:36.506187Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:25:36.506276Z node 36 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:25:36.506322Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-10T11:25:36.506402Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [36:278:2272] 2025-04-10T11:25:36.506497Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 2 Head Offset 0 PartNo 0 PackedSize 1049870 count 2 nextOffset 2 batches 4 SYNC INIT sourceId sourceid1 seqNo 2 offset 1 SYNC INIT HEAD KEY: d0000000000_00000000000000000000_00000_0000000002_00002| size 1049870 2025-04-10T11:25:36.506574Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:36.506736Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:36.506793Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:36.506850Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000000_00000_0000000001_00000|, d0000000000_00000000000000000000_00000_0000000001_00000|] 2025-04-10T11:25:36.506925Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:36.506977Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:36.507053Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:36.507120Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:25:36.507227Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Init complete for topic 'topic' Partition: 0 SourceId: sourceid1 SeqNo: 2 offset: 1 MaxOffset: 2 2025-04-10T11:25:36.507302Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 39 2025-04-10T11:25:36.507366Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 39 2025-04-10T11:25:36.507673Z node 36 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:25:36.507733Z node 36 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000000_00000_0000000001_00000|(+) to d0000000000_00000000000000000000_00000_0000000001_00000|(+) 2025-04-10T11:25:36.508233Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 2 max time lag 0ms effective offset 0 2025-04-10T11:25:36.508309Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 2 2025-04-10T11:25:36.509394Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-10T11:25:36.509459Z node 36 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T11:25:36.509624Z node 36 :PERSQUEUE DEBUG: Topic 'topic' partition 0 user user readTimeStamp done, result 130 queuesize 0 startOffset 0 2025-04-10T11:25:36.515550Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 >>> write #3 2025-04-10T11:25:36.519663Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [36:303:2290], now have 1 active actors on pipe 2025-04-10T11:25:36.519804Z node 36 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:25:36.519865Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:25:36.519941Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 3 partNo : 0 messageNo: 1 size 1024 offset: 2 2025-04-10T11:25:36.520026Z node 36 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'topic' partition 0 error: new GetOwnership request needed for owner 2025-04-10T11:25:36.520141Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-04-10T11:25:36.520195Z node 36 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-04-10T11:25:36.520541Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [36:306:2292], now have 1 active actors on pipe 2025-04-10T11:25:36.520641Z node 36 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:25:36.520695Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:25:36.520811Z node 36 :PERSQUEUE INFO: new Cookie default|5c579ab7-aa5e7379-5aece0c-5cf3004c_0 generated for partition 0 topic 'topic' owner default 2025-04-10T11:25:36.520947Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-10T11:25:36.521045Z node 36 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:25:36.521423Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [36:308:2294], now have 1 active actors on pipe 2025-04-10T11:25:36.521507Z node 36 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:25:36.521541Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:25:36.521586Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 3 partNo : 0 messageNo: 0 size 1024 offset: 2 2025-04-10T11:25:36.521687Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Send write quota request. Topic: "topic". Partition: 0. Amount: 1033. Cookie: 1 2025-04-10T11:25:36.521791Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Got quota. Topic: "topic". Partition: 0: Cookie: 1 2025-04-10T11:25:36.521972Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob processing sourceId 'sourceid1' seqNo 3 partNo 0 2025-04-10T11:25:36.524088Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob complete sourceId 'sourceid1' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 1096 count 1 nextOffset 3 batches 1 2025-04-10T11:25:36.524882Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 1084 WTime 253 2025-04-10T11:25:36.525089Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:25:36.525150Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:25:36.525200Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-04-10T11:25:36.525252Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:25:36.525316Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid1 2025-04-10T11:25:36.525355Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-04-10T11:25:36.525385Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:25:36.525423Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:25:36.525474Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:25:36.525567Z node 36 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:25:36.525683Z node 36 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 1084 2025-04-10T11:25:36.530195Z node 36 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 1084 actorID [36:269:2265] 2025-04-10T11:25:36.530381Z node 36 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 2 partno 0 count 1 parts 0 size 1084 2025-04-10T11:25:36.530480Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1033 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:25:36.530564Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-10T11:25:36.530644Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid1', Topic: 'topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-10T11:25:36.530918Z node 36 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 >>> write #1 2025-04-10T11:25:36.531425Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [36:315:2300], now have 1 active actors on pipe >> KqpYql::PgIntPrimaryKey [GOOD] >> PQCountersSimple::PartitionFirstClass [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist >> KqpYql::TableUseBeforeCreate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall >> KqpQueryPerf::Upsert+QueryService+UseSink >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk >> TargetDiscoverer::InvalidCredentials [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::SelectNoAsciiValue [GOOD] Test command err: Trying to start YDB, gRPC: 24985, MsgBus: 15206 2025-04-10T11:25:26.003841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644135851858663:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:26.004201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e87/r3tmp/tmpFDPom6/pdisk_1.dat 2025-04-10T11:25:26.418559Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:26.421680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:26.421929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:26.424307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24985, node 1 2025-04-10T11:25:26.562742Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:26.562763Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:26.562776Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:26.562902Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15206 TClient is connected to server localhost:15206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:27.173377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:27.191057Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:27.201691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:27.349934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:27.514817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:27.586203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:29.209740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644148736762325:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.209892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.512669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.539553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.565635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.590378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.620909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.689551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.744973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644148736762842:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.745048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.745250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644148736762847:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.748647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:29.758278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644148736762849:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:29.849186Z node 1 :TX_PROXY ERROR: Actor# [1:7491644148736762902:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:10:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:10:20: Error: At function: Apply
:8:28: Error: At function: ScriptUdf
:8:28: Error: Module not loaded for script type: Python3 2025-04-10T11:25:31.006421Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644135851858663:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:31.006506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23286, MsgBus: 9634 2025-04-10T11:25:31.643305Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644156585025593:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:31.643428Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e87/r3tmp/tmp7muYBD/pdisk_1.dat 2025-04-10T11:25:31.762595Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23286, node 2 2025-04-10T11:25:31.778057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:31.778163Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:31.783018Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:31.851946Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:31.851980Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:31.851990Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:31.852099Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9634 TClient is connected to server localhost:9634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:32.271549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:32.292493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:32.370756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:32.543177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:32.624469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:34.932206Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644169469929267:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:34.932306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:34.981021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.009510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.041230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.077939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.112416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.157899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.219284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644173764897073:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:35.219385Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:35.219566Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644173764897078:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:35.224187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:35.239325Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644173764897080:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:35.309261Z node 2 :TX_PROXY ERROR: Actor# [2:7491644173764897133:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:36.204563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.524982Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284336564, txId: 281474976715673] shutting down 2025-04-10T11:25:36.645590Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644156585025593:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:36.645679Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] >> KqpYql::EvaluateExpr1 [GOOD] >> KqpYql::Discard >> KqpYql::FromBytes [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 1774, MsgBus: 28192 2025-04-10T11:25:25.579681Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644133317190083:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:25.579804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e8c/r3tmp/tmplHU7DJ/pdisk_1.dat 2025-04-10T11:25:26.009045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:26.015321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:26.015434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:26.018960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1774, node 1 2025-04-10T11:25:26.146825Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:26.146852Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:26.146858Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:26.146967Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28192 TClient is connected to server localhost:28192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:26.813723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.834112Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:26.839912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.987706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:27.150374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:27.219473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.795542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644146202093586:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.795675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.133803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.164774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.193052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.232452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.263781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.296448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.350520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644150497061392:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.350608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.350816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644150497061397:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.354670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:29.366915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644150497061399:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:29.437959Z node 1 :TX_PROXY ERROR: Actor# [1:7491644150497061453:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:30.433431Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491644154792029051:2501], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:2:34: Error: Pragma auth not supported inside Kikimr query., code: 2016 2025-04-10T11:25:30.433705Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjkxY2YwZjEtN2JlZGFhMjgtOTRkNTc2Yy00NTM0Y2ViYQ==, ActorId: [1:7491644154792029043:2496], ActorState: ExecuteState, TraceId: 01jrfq5ac11q8xxrk8wgdp9d34, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 9189, MsgBus: 3529 2025-04-10T11:25:31.212627Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644160840444074:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:31.212678Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e8c/r3tmp/tmpW1KkVc/pdisk_1.dat 2025-04-10T11:25:31.325616Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9189, node 2 2025-04-10T11:25:31.360687Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:31.360761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:31.364376Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:31.385200Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:31.385216Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:31.385221Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:31.385338Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3529 TClient is connected to server localhost:3529 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:31.765119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:31.776424Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:31.783272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:31.853616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:31.985782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:32.051518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:34.314068Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644173725347720:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:34.314135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:34.358909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.390890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.426377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.467958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.497410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.564326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.613050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644173725348235:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:34.613154Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:34.613484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644173725348240:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:34.617517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:34.628266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644173725348242:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:34.716774Z node 2 :TX_PROXY ERROR: Actor# [2:7491644173725348295:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:35.855183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.213122Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644160840444074:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:36.213208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:36.697761Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284336725, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableUseBeforeCreate [GOOD] Test command err: Trying to start YDB, gRPC: 27785, MsgBus: 14194 2025-04-10T11:25:32.250269Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644164150334416:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:32.250520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000db4/r3tmp/tmpNGw9JE/pdisk_1.dat 2025-04-10T11:25:32.614639Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:32.619846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:32.619956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:32.623541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27785, node 1 2025-04-10T11:25:32.743386Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:32.743403Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:32.743408Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:32.743511Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14194 TClient is connected to server localhost:14194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:33.273973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:33.293634Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:33.308349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:33.452336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:33.628997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:33.706541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:35.417577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644177035238082:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:35.417688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:35.782315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.818728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.851291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.887302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.921555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.954962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.037486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644181330205892:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.037597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.037701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644181330205897:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.041723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:36.051144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644181330205899:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:36.114811Z node 1 :TX_PROXY ERROR: Actor# [1:7491644181330205952:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:3:13: Error: At function: KiReadTable!
:3:13: Error: Cannot find table 'db.[/Root/NewTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:25:37.250254Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644164150334416:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:37.250352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::PgIntPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 23982, MsgBus: 18290 2025-04-10T11:25:26.878102Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644139122764801:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:26.881996Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e7e/r3tmp/tmphJvTPs/pdisk_1.dat 2025-04-10T11:25:27.229682Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23982, node 1 2025-04-10T11:25:27.292570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:27.292680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:27.295872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:27.355810Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:27.355835Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:27.355842Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:27.355958Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18290 TClient is connected to server localhost:18290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:27.879855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:27.909490Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:27.920465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.133294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.311006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.389324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:30.010192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644156302635613:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.010307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.308229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.345649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.377627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.451637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.487781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.525116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.572880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644156302636126:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.572955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.573014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644156302636131:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.576841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:30.588712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644156302636133:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:30.684027Z node 1 :TX_PROXY ERROR: Actor# [1:7491644156302636187:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Optimization, code: 1070
:4:20: Error: RefSelect mode isn't supported by provider: kikimr Trying to start YDB, gRPC: 1453, MsgBus: 28700 2025-04-10T11:25:32.450426Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644163386432200:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:32.450593Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e7e/r3tmp/tmpsUpzBs/pdisk_1.dat 2025-04-10T11:25:32.580196Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:32.597482Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:32.597558Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:32.599490Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1453, node 2 2025-04-10T11:25:32.640998Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:32.641022Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:32.641028Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:32.641158Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28700 TClient is connected to server localhost:28700 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:25:33.110321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.618791Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644176271334749:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:35.630551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:35.644934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.724398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644176271334852:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:35.724509Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:35.724673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644176271334857:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:35.728543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:25:35.739069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644176271334859:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:25:35.818100Z node 2 :TX_PROXY ERROR: Actor# [2:7491644176271334910:2394] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::AggregateToScalar+QueryService >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials [GOOD] Test command err: 2025-04-10T11:25:34.658863Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644171731440817:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:34.658910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e8b/r3tmp/tmpg9N5K1/pdisk_1.dat 2025-04-10T11:25:35.098751Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:35.158491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:35.159125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:35.165614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26173 TServer::EnableGrpc on GrpcPort 13366, node 1 2025-04-10T11:25:35.535896Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:35.535920Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:35.535934Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:35.536074Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:36.140209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:36.168678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:36.470025Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: CLIENT_UNAUTHENTICATED, issues: {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:217: Cannot find user: user } } } 2025-04-10T11:25:36.470083Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# CLIENT_UNAUTHENTICATED, issues# {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:217: Cannot find user: user } >> KqpQueryPerf::IndexReplace+QueryService-UseSink >> KqpScripting::QueryStats [GOOD] >> KqpScripting::Pure >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> KqpYql::DdlDmlMix [GOOD] >> KqpYql::CreateUseTable >> YdbOlapStore::LogPagingAfter [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 24441, MsgBus: 15319 2025-04-10T11:25:14.190754Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644084141144861:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:14.190864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000edc/r3tmp/tmpvCktZc/pdisk_1.dat 2025-04-10T11:25:14.580441Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24441, node 1 2025-04-10T11:25:14.625215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:14.625305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:14.628052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:14.674500Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:14.674518Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:14.674526Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:14.674646Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15319 TClient is connected to server localhost:15319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:15.197651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:15.224192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:15.361410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:15.495248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:15.563344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:17.201806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644097026048532:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:17.201925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:17.493331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:17.526300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:17.566234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:17.596708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:17.638784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:17.717513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:17.775918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644097026049048:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:17.775992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:17.776083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644097026049053:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:17.779738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:17.795178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644097026049055:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:17.870877Z node 1 :TX_PROXY ERROR: Actor# [1:7491644097026049108:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:19.123882Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284319050, txId: 281474976710672] shutting down 2025-04-10T11:25:19.124535Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284319050, txId: 281474976710671] shutting down 2025-04-10T11:25:19.127089Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284319050, txId: 281474976710673] shutting down 2025-04-10T11:25:19.191774Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644084141144861:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:19.201616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:19.274987Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284319176, txId: 281474976710678] shutting down 2025-04-10T11:25:19.276227Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284319176, txId: 281474976710677] shutting down 2025-04-10T11:25:19.429222Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284319372, txId: 281474976710682] shutting down 2025-04-10T11:25:19.430928Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284319372, txId: 281474976710681] shutting down 2025-04-10T11:25:19.464946Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284319477, txId: 281474976710685] shutting down 2025-04-10T11:25:19.465677Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284319477, txId: 281474976710686] shutting down 2025-04-10T11:25:19.624052Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284319638, txId: 281474976710690] shutting down 2025-04-10T11:25:19.624729Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284319638, txId: 281474976710689] shutting down 2025-04-10T11:25:19.826929Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284319841, txId: 281474976710694] shutting down 2025-04-10T11:25:19.828541Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284319841, txId: 281474976710693] shutting down 2025-04-10T11:25:19.996197Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284319974, txId: 281474976710698] shutting down 2025-04-10T11:25:19.999626Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284319974, txId: 281474976710697] shutting down 2025-04-10T11:25:20.128290Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284320149, txId: 281474976710702] shutting down 2025-04-10T11:25:20.128620Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284320149, txId: 281474976710701] shutting down 2025-04-10T11:25:20.311615Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284320331, txId: 281474976710705] shutting down 2025-04-10T11:25:20.312455Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284320331, txId: 281474976710706] shutting down 2025-04-10T11:25:20.467418Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284320492, txId: 281474976710710] shutting down 2025-04-10T11:25:20.471107Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284320492, txId: 281474976710709] shutting down 2025-04-10T11:25:20.725504Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284320730, txId: 281474976710713] shutting down 2025-04-10T11:25:20.729619Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our sna ... 11:25:28.965141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:28.974065Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644146015376719:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:29.069249Z node 2 :TX_PROXY ERROR: Actor# [2:7491644150310344068:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:30.303497Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644133130472582:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:30.305018Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:30.340549Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284330292, txId: 281474976715671] shutting down 2025-04-10T11:25:30.350358Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284330292, txId: 281474976715672] shutting down 2025-04-10T11:25:30.352249Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284330292, txId: 281474976715673] shutting down 2025-04-10T11:25:30.554960Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284330502, txId: 281474976715677] shutting down 2025-04-10T11:25:30.556256Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284330502, txId: 281474976715678] shutting down 2025-04-10T11:25:30.767157Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284330705, txId: 281474976715681] shutting down 2025-04-10T11:25:30.767838Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284330705, txId: 281474976715683] shutting down 2025-04-10T11:25:30.768514Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284330705, txId: 281474976715682] shutting down 2025-04-10T11:25:30.981896Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284330880, txId: 281474976715687] shutting down 2025-04-10T11:25:30.996487Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284330880, txId: 281474976715688] shutting down 2025-04-10T11:25:31.126467Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331020, txId: 281474976715691] shutting down 2025-04-10T11:25:31.131435Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331034, txId: 281474976715692] shutting down 2025-04-10T11:25:31.169906Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331181, txId: 281474976715696] shutting down 2025-04-10T11:25:31.171018Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331174, txId: 281474976715695] shutting down 2025-04-10T11:25:31.348875Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331363, txId: 281474976715700] shutting down 2025-04-10T11:25:31.349291Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331363, txId: 281474976715699] shutting down 2025-04-10T11:25:31.510494Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331496, txId: 281474976715703] shutting down 2025-04-10T11:25:31.511245Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331496, txId: 281474976715704] shutting down 2025-04-10T11:25:31.659133Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331678, txId: 281474976715708] shutting down 2025-04-10T11:25:31.660143Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331678, txId: 281474976715707] shutting down 2025-04-10T11:25:31.850499Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331874, txId: 281474976715711] shutting down 2025-04-10T11:25:31.979631Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331993, txId: 281474976715713] shutting down 2025-04-10T11:25:31.980426Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331993, txId: 281474976715714] shutting down 2025-04-10T11:25:32.163559Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332189, txId: 281474976715718] shutting down 2025-04-10T11:25:32.163876Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332189, txId: 281474976715717] shutting down 2025-04-10T11:25:32.368960Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332399, txId: 281474976715721] shutting down 2025-04-10T11:25:32.551490Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332574, txId: 281474976715723] shutting down 2025-04-10T11:25:32.589538Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332574, txId: 281474976715724] shutting down 2025-04-10T11:25:32.799627Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332819, txId: 281474976715728] shutting down 2025-04-10T11:25:32.802100Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332819, txId: 281474976715727] shutting down 2025-04-10T11:25:33.026350Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284333036, txId: 281474976715731] shutting down 2025-04-10T11:25:33.157455Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284333176, txId: 281474976715733] shutting down 2025-04-10T11:25:33.303914Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284333316, txId: 281474976715735] shutting down 2025-04-10T11:25:33.420897Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284333449, txId: 281474976715737] shutting down 2025-04-10T11:25:33.548180Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284333575, txId: 281474976715739] shutting down 2025-04-10T11:25:33.736926Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284333764, txId: 281474976715741] shutting down 2025-04-10T11:25:33.737521Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284333764, txId: 281474976715742] shutting down 2025-04-10T11:25:33.944898Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284333967, txId: 281474976715745] shutting down 2025-04-10T11:25:34.101732Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284334128, txId: 281474976715747] shutting down 2025-04-10T11:25:34.213765Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Mjg0NWNkMmItN2M0ZWViOTktODMzMTdjOGEtZTIwZmQ2MmQ=, ActorId: [2:7491644171785184757:3198], ActorState: ExecuteState, TraceId: 01jrfq5dzb1e5yg0ts3chykrpa, Create QueryResponse for error on request, msg: 2025-04-10T11:25:34.272457Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284334296, txId: 281474976715749] shutting down 2025-04-10T11:25:34.427974Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284334450, txId: 281474976715751] shutting down 2025-04-10T11:25:34.648177Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284334611, txId: 281474976715753] shutting down 2025-04-10T11:25:34.765412Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284334793, txId: 281474976715755] shutting down 2025-04-10T11:25:34.907654Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284334926, txId: 281474976715757] shutting down 2025-04-10T11:25:35.088709Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335115, txId: 281474976715759] shutting down 2025-04-10T11:25:35.289429Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335311, txId: 281474976715761] shutting down 2025-04-10T11:25:35.451226Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335479, txId: 281474976715763] shutting down 2025-04-10T11:25:35.609930Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335640, txId: 281474976715765] shutting down 2025-04-10T11:25:35.814438Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335836, txId: 281474976715767] shutting down 2025-04-10T11:25:35.991748Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284336018, txId: 281474976715769] shutting down 2025-04-10T11:25:36.221231Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284336242, txId: 281474976715771] shutting down 2025-04-10T11:25:36.337462Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284336368, txId: 281474976715773] shutting down 2025-04-10T11:25:36.587244Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284336613, txId: 281474976715775] shutting down 2025-04-10T11:25:36.733142Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284336767, txId: 281474976715777] shutting down 2025-04-10T11:25:36.958432Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284336963, txId: 281474976715779] shutting down 2025-04-10T11:25:37.152731Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284337187, txId: 281474976715781] shutting down 2025-04-10T11:25:37.396534Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284337390, txId: 281474976715783] shutting down 2025-04-10T11:25:37.612594Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284337642, txId: 281474976715785] shutting down 2025-04-10T11:25:37.746220Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284337782, txId: 281474976715787] shutting down >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::FromBytes [GOOD] Test command err: Trying to start YDB, gRPC: 20355, MsgBus: 26520 2025-04-10T11:25:28.233929Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644144366532106:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:28.234055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e07/r3tmp/tmpVB7tTN/pdisk_1.dat 2025-04-10T11:25:28.571744Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20355, node 1 2025-04-10T11:25:28.643306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:28.645987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:28.651437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:28.710427Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:28.710450Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:28.710463Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:28.710596Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26520 TClient is connected to server localhost:26520 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:29.264641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:29.296455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:29.414752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.569707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:29.636782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:31.219466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644157251435764:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.219649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.560011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.588131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.616755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.655448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.691597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.736503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.795767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644157251436276:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.795853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.796036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644157251436281:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.800060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:31.815165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644157251436283:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:31.875419Z node 1 :TX_PROXY ERROR: Actor# [1:7491644157251436336:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24839, MsgBus: 2206 2025-04-10T11:25:33.584894Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644167027961655:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:33.584953Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e07/r3tmp/tmpH0dYQV/pdisk_1.dat 2025-04-10T11:25:33.689355Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:33.715667Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:33.715739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24839, node 2 2025-04-10T11:25:33.717294Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:33.781779Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:33.781801Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:33.781808Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:33.781900Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2206 TClient is connected to server localhost:2206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:25:34.184937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.205474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:34.286879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:34.469663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:34.545291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:36.582270Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644179912865300:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.582347Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.627010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.660902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.695524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.760463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.795779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.834917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.917316Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644179912865820:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.917411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.917620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644179912865825:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.921093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:36.934603Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644179912865827:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:37.021784Z node 2 :TX_PROXY ERROR: Actor# [2:7491644184207833179:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpScripting::ScriptStats [GOOD] >> KqpYql::JsonCast [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 64712, MsgBus: 15850 2025-04-10T11:25:20.713599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644109910577627:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:20.713661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e9e/r3tmp/tmpFZAWcw/pdisk_1.dat 2025-04-10T11:25:21.146566Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:21.190016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:21.190126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:21.191614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64712, node 1 2025-04-10T11:25:21.248983Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:21.249013Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:21.249037Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:21.249164Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15850 TClient is connected to server localhost:15850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:21.736340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:21.759789Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:21.778938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:21.932215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:22.093172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:22.177727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:23.609566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644122795481289:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:23.609654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:23.898776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.932346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.968836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:23.997894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:24.034107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:24.110629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:24.188258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644127090449105:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:24.188332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:24.188411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644127090449110:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:24.192214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:24.202452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644127090449112:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:24.273183Z node 1 :TX_PROXY ERROR: Actor# [1:7491644127090449165:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:25.713903Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644109910577627:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:25.713978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:26.145632Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7491644131385416755:2495] 2025-04-10T11:25:26.146378Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491644131385416825:2501] TxId: 281474976710672. Ctx: { TraceId: 01jrfq558q0t4mhy1s43gm8q7z, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDRmN2RmZWItMmE3OThlNC1kNmI3MmYwOS1jNzg1NGIwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-10T11:25:26.146698Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDRmN2RmZWItMmE3OThlNC1kNmI3MmYwOS1jNzg1NGIwOQ==, ActorId: [1:7491644131385416771:2501], ActorState: ExecuteState, TraceId: 01jrfq558q0t4mhy1s43gm8q7z, Create QueryResponse for error on request, msg: 2025-04-10T11:25:26.177545Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491644131385416829:2505], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfq558q0t4mhy1s43gm8q7z. SessionId : ydb://session/3?node_id=1&id=NDRmN2RmZWItMmE3OThlNC1kNmI3MmYwOS1jNzg1NGIwOQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7491644131385416825:2501], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T11:25:26.177545Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491644131385416832:2508], TxId: 281474976710672, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NDRmN2RmZWItMmE3OThlNC1kNmI3MmYwOS1jNzg1NGIwOQ==. TraceId : 01jrfq558q0t4mhy1s43gm8q7z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491644131385416825:2501], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T11:25:26.178103Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491644131385416830:2506], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jrfq558q0t4mhy1s43gm8q7z. SessionId : ydb://session/3?node_id=1&id=NDRmN2RmZWItMmE3OThlNC1kNmI3MmYwOS1jNzg1NGIwOQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491644131385416825:2501], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T11:25:26.178566Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491644131385416833:2509], TxId: 281474976710672, task: 5. Ctx: { SessionId : ydb://session/3?node_id=1&id=NDRmN2RmZWItMmE3OThlNC1kNmI3MmYwOS1jNzg1NGIwOQ==. TraceId : 01jrfq558q0t4mhy1s43gm8q7z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7491644131385416825:2501], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T11:25:26.178743Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491644131385416831:2507], TxId: 281474976710672, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=NDRmN2RmZWItMmE3OThlNC1kNmI3MmYwOS1jNzg1NGIwOQ==. TraceId : 01jrfq558q0t4mhy1s43gm8q7z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491644131385416825:2501], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T11:25:26.178766Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491644131385416834:2510], TxId: 281474976710672, task: 6. Ctx: { SessionId : ydb://session/3?node_id=1&id=NDRmN2RmZWItMmE3OThlNC1kNmI3MmYwOS1jNzg1NGIwOQ==. TraceId : 01jrfq558q0t4mhy1s43gm8q7z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7491644131385416825:2501], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T11:25:26.178895Z node 1 :KQP_COMPUTE ER ... hed: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:28.673070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.693503Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:28.700967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.798294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.961649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:29.050432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:31.233824Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644160428022496:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.233912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.281147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.311411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.347900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.383058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.415925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.476638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.536503Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644160428023008:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.536590Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.536844Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644160428023013:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.540995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:31.552273Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644160428023015:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:31.605950Z node 2 :TX_PROXY ERROR: Actor# [2:7491644160428023068:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:32.457460Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990654:2495] 2025-04-10T11:25:32.479564Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990660:2498] 2025-04-10T11:25:32.481953Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990668:2502] 2025-04-10T11:25:32.494894Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990680:2506] 2025-04-10T11:25:32.519159Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990687:2509] 2025-04-10T11:25:32.548054Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990707:2516] 2025-04-10T11:25:32.574220Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990717:2520] 2025-04-10T11:25:32.602572Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990740:2526] 2025-04-10T11:25:32.635464Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990755:2531] 2025-04-10T11:25:32.698720Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990768:2536] 2025-04-10T11:25:32.710517Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990780:2541] 2025-04-10T11:25:32.750266Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990799:2548] 2025-04-10T11:25:32.794092Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990810:2552] 2025-04-10T11:25:32.842118Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990837:2558] 2025-04-10T11:25:32.890045Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990853:2564] 2025-04-10T11:25:32.943755Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990869:2570] 2025-04-10T11:25:32.998129Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644143248151540:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:32.998207Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:32.999014Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644164722990902:2578] 2025-04-10T11:25:33.056131Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644169017958218:2585] 2025-04-10T11:25:33.116556Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644169017958228:2589] 2025-04-10T11:25:33.182311Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644169017958256:2595] 2025-04-10T11:25:33.248761Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644169017958277:2604] 2025-04-10T11:25:33.323842Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644169017958298:2611] 2025-04-10T11:25:33.398752Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644169017958348:2617] 2025-04-10T11:25:33.479207Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644169017958390:2626] 2025-04-10T11:25:33.560408Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644169017958416:2632] 2025-04-10T11:25:33.649550Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644169017958437:2641] 2025-04-10T11:25:33.738344Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644169017958451:2647] 2025-04-10T11:25:33.824813Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644169017958476:2659] 2025-04-10T11:25:33.914893Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644169017958492:2665] 2025-04-10T11:25:34.112594Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644173312925838:2682] 2025-04-10T11:25:34.214355Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644173312925860:2691] 2025-04-10T11:25:34.322246Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644173312925884:2697] 2025-04-10T11:25:34.427050Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644173312925907:2707] 2025-04-10T11:25:34.536952Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644173312925928:2716] 2025-04-10T11:25:34.652997Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644173312925942:2722] 2025-04-10T11:25:34.767388Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644173312925963:2731] 2025-04-10T11:25:34.884010Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644173312925984:2740] 2025-04-10T11:25:35.014604Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644173312926005:2749] 2025-04-10T11:25:35.115079Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335143, txId: 281474976715672] shutting down 2025-04-10T11:25:35.254964Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644177607893429:2775] 2025-04-10T11:25:35.386770Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644177607893452:2785] 2025-04-10T11:25:35.519579Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644177607893473:2794] 2025-04-10T11:25:35.657668Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644177607893494:2803] 2025-04-10T11:25:35.796272Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644177607893515:2812] 2025-04-10T11:25:35.946345Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644177607893536:2821] 2025-04-10T11:25:36.088186Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644177607893557:2830] 2025-04-10T11:25:36.159581Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284336193, txId: 281474976715674] shutting down 2025-04-10T11:25:36.318359Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644181902860981:2856] 2025-04-10T11:25:36.469565Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644181902861005:2866] 2025-04-10T11:25:36.626839Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644181902861031:2875] 2025-04-10T11:25:36.953989Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644181902861075:2893] 2025-04-10T11:25:37.122239Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644181902861097:2902] 2025-04-10T11:25:37.291660Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644186197828418:2913] 2025-04-10T11:25:37.471983Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644186197828479:2923] 2025-04-10T11:25:37.643185Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644186197828516:2932] 2025-04-10T11:25:37.820317Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644186197828560:2941] 2025-04-10T11:25:38.001102Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7491644186197828580:2950] 2025-04-10T11:25:38.174694Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284338209, txId: 281474976715679] shutting down >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TKeyValueTest::TestConcatWorks |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } >> KqpYql::EvaluateExprPgNull [GOOD] >> KqpYql::EvaluateExprYsonAndType >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] Test command err: Trying to start YDB, gRPC: 7695, MsgBus: 27394 2025-04-10T11:25:26.978245Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644135416661224:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:26.978297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e6d/r3tmp/tmp9nn5tV/pdisk_1.dat 2025-04-10T11:25:27.376642Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:27.395552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:27.395648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:27.398765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7695, node 1 2025-04-10T11:25:27.495784Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:27.495811Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:27.495827Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:27.495959Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27394 TClient is connected to server localhost:27394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:28.101391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.116101Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.120956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:28.268096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.400339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.482304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:30.185384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644152596532167:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.185500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.547955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.578366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.606494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.635337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.662717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.705835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.765644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644152596532679:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.765720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644152596532684:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.766048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.769368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:30.781004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644152596532686:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:30.882793Z node 1 :TX_PROXY ERROR: Actor# [1:7491644152596532742:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:31.978356Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644135416661224:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:31.978441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:32.424629Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332434, txId: 281474976710671] shutting down 2025-04-10T11:25:32.787613Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332805, txId: 281474976710673] shutting down 2025-04-10T11:25:33.430259Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284333456, txId: 281474976710677] shutting down Trying to start YDB, gRPC: 18369, MsgBus: 19792 2025-04-10T11:25:34.186496Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644171371016395:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:34.186546Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e6d/r3tmp/tmpiHR0IF/pdisk_1.dat 2025-04-10T11:25:34.294027Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18369, node 2 2025-04-10T11:25:34.329475Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:34.329567Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:34.331045Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:34.370679Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:34.370706Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:34.370714Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:34.370827Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19792 TClient is connected to server localhost:19792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:25:34.775085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.788394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:34.868955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:35.057928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:35.126639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:37.140668Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644184255920056:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:37.140863Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:37.192895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:37.268708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:37.339940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:37.371792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:37.440127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:37.470792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:37.519179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644184255920578:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:37.519311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:37.519412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644184255920583:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:37.523419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:37.532722Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644184255920585:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:37.598976Z node 2 :TX_PROXY ERROR: Actor# [2:7491644184255920637:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:38.752389Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284338783, txId: 281474976715671] shutting down 2025-04-10T11:25:39.072326Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284339105, txId: 281474976715673] shutting down 2025-04-10T11:25:39.186758Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644171371016395:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:39.186822Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::ComputeLength+QueryService >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] Test command err: 2025-04-10T11:24:39.883601Z :HappyWay INFO: Random seed for debugging is 1744284279883567 2025-04-10T11:24:40.186930Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643938363460688:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:40.187171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:40.259770Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491643940295882255:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:40.259845Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:40.431725Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d11/r3tmp/tmp75TfQZ/pdisk_1.dat 2025-04-10T11:24:40.447818Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:24:40.698941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:40.699218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:40.700329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:40.700410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:40.704528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:40.712065Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:24:40.714362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16926, node 1 2025-04-10T11:24:40.772532Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:40.811133Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:40.817144Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:40.915437Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000d11/r3tmp/yandexWh0jYw.tmp 2025-04-10T11:24:40.915474Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000d11/r3tmp/yandexWh0jYw.tmp 2025-04-10T11:24:40.915627Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000d11/r3tmp/yandexWh0jYw.tmp 2025-04-10T11:24:40.915765Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:24:40.970371Z INFO: TTestServer started on Port 19103 GrpcPort 16926 TClient is connected to server localhost:19103 PQClient connected to localhost:16926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:41.228398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-10T11:24:43.674723Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491643953180784460:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:43.674861Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491643953180784467:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:43.674952Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:43.696828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:24:43.723626Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491643953180784474:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:24:43.838620Z node 2 :TX_PROXY ERROR: Actor# [2:7491643953180784502:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:44.177014Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491643953180784509:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:44.177589Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTA0ZTlmMWEtMTM1MzYxMTMtYTk1ZmFlZjgtNTM3ZDY3ZDM=, ActorId: [2:7491643953180784458:2308], ActorState: ExecuteState, TraceId: 01jrfq3wrk8v3jkb6624nyryqs, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:24:44.179987Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:24:44.179743Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491643951248363669:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:44.179960Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjNiNTRhYTYtMTk1NTVlOTMtZTZhZGQxZDMtNDg4ZGRkZDg=, ActorId: [1:7491643951248363628:2336], ActorState: ExecuteState, TraceId: 01jrfq3wvb8xr1sybxt6sfehnz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:24:44.180363Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:24:44.181969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:24:44.358822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:44.488521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:16926", true, true, 1000); 2025-04-10T11:24:44.688807Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jrfq3xmr4y0z9pt9artgm65t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWUwYTI1YzAtODMxOTI1NWMtZjE2ZTI5MzktYTRiMDY3ZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491643955543331385:2984] 2025-04-10T11:24:45.187572Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643938363460688:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:45.189958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:45.259872Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491643940295882255:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:45.259975Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T11:24:50.606983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:16926 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-10T11:24:50.682319Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:16926 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1-- ... 075186224037894] server disconnected, pipe [5:7491644118880930811:3695] destroyed 2025-04-10T11:25:32.673942Z node 5 :KQP_EXECUTER ERROR: ActorId: [5:7491644161830604385:2646] TxId: 281474976715709. Ctx: { TraceId: 01jrfq5c83758sm8qce942x3s0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZTMxMzI4ODUtM2Q3ODIzNjQtMjUwZTZkMDUtMzE2NzM1OTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 6 2025-04-10T11:25:32.674170Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7491644161830604395:2653], TxId: 281474976715709, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jrfq5c83758sm8qce942x3s0. SessionId : ydb://session/3?node_id=5&id=ZTMxMzI4ODUtM2Q3ODIzNjQtMjUwZTZkMDUtMzE2NzM1OTA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [5:7491644161830604385:2646], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-10T11:25:32.674498Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7491644161830604396:2654], TxId: 281474976715709, task: 4. Ctx: { TraceId : 01jrfq5c83758sm8qce942x3s0. SessionId : ydb://session/3?node_id=5&id=ZTMxMzI4ODUtM2Q3ODIzNjQtMjUwZTZkMDUtMzE2NzM1OTA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [5:7491644161830604385:2646], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-10T11:25:33.135125Z node 5 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715710. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T11:25:33.135278Z node 5 :KQP_EXECUTER WARN: ActorId: [5:7491644161830604409:2656] TxId: 281474976715710. Ctx: { TraceId: 01jrfq5cx23n5ymf4jchfy4186, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTI1MzBiN2UtNTQyZDM2NTYtOWY0MmVlOS0zMmFiNTgzMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-10T11:25:33.135542Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=MTI1MzBiN2UtNTQyZDM2NTYtOWY0MmVlOS0zMmFiNTgzMw==, ActorId: [5:7491644161830604406:2656], ActorState: ExecuteState, TraceId: 01jrfq5cx23n5ymf4jchfy4186, Create QueryResponse for error on request, msg: 2025-04-10T11:25:33.137072Z node 5 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jrfq5cx33xh0mf820ygbr01c" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-10T11:25:34.747840Z node 7 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:34.747987Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:25:34.796140Z node 7 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:34.797377Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [7:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:25:34.798252Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [7:207:2220] 2025-04-10T11:25:34.803143Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [7:207:2220] 2025-04-10T11:25:34.811251Z node 7 :PERSQUEUE INFO: new Cookie default|e7fbcffb-e76a3e9c-9663fccb-751cdeed_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-04-10T11:25:34.822137Z node 7 :PERSQUEUE INFO: new Cookie default|d6275c02-70201a45-70b1b01a-f80ee62_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-04-10T11:25:35.148822Z node 7 :PERSQUEUE INFO: new Cookie default|b6267979-1d8840f6-f751595f-7c420a69_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-04-10T11:25:35.428795Z node 7 :PERSQUEUE INFO: new Cookie default|212f2fb1-af247186-33f734e4-1f612413_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-04-10T11:25:35.685977Z node 7 :PERSQUEUE INFO: new Cookie default|1bda48e-765539cc-251483fb-494720cd_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-04-10T11:25:35.937299Z node 7 :PERSQUEUE INFO: new Cookie default|843d958e-ece8f653-b72b8921-fc121777_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] **** Total histogram: ****
Interval=0ms: 1
Interval=10000ms: 0
Interval=1000ms: 3
Interval=100ms: 0
Interval=10ms: 0
Interval=1ms: 0
Interval=20ms: 0
Interval=2500ms: 2
Interval=5000ms: 0
Interval=500ms: 0
Interval=50ms: 0
Interval=5ms: 0
Interval=999999ms: 0
**** **** **** **** 2025-04-10T11:25:37.008295Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:37.008405Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:25:37.031907Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:37.033230Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [8:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-10T11:25:37.034285Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [8:206:2219] 2025-04-10T11:25:37.035784Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [8:206:2219] 2025-04-10T11:25:37.037141Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [8:207:2220] 2025-04-10T11:25:37.038141Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [8:207:2220] 2025-04-10T11:25:37.054676Z node 8 :PERSQUEUE INFO: new Cookie default|b6af3885-70699ea5-d6f06017-c820abdf_0 generated for partition 0 topic 'topic' owner default 2025-04-10T11:25:37.065126Z node 8 :PERSQUEUE INFO: new Cookie default|c5a33922-8790c317-2988938f-b9ae40b6_1 generated for partition 0 topic 'topic' owner default 2025-04-10T11:25:37.076460Z node 8 :PERSQUEUE INFO: new Cookie default|2fd95484-2129f035-de87ed70-f60aa8ed_2 generated for partition 0 topic 'topic' owner default 2025-04-10T11:25:37.084898Z node 8 :PERSQUEUE INFO: new Cookie default|6e16b8a1-c5a513b5-f6bbcdb9-6b97a8dd_3 generated for partition 0 topic 'topic' owner default 2025-04-10T11:25:37.864255Z node 9 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:37.864394Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:25:37.894600Z node 9 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:37.895703Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [9:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-10T11:25:37.896584Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [9:207:2220] 2025-04-10T11:25:37.901652Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [9:207:2220] 2025-04-10T11:25:37.910548Z node 9 :PERSQUEUE INFO: new Cookie default|de99db47-19b78a49-eb79c8c9-3ca57ea2_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] 2025-04-10T11:25:37.922141Z node 9 :PERSQUEUE INFO: new Cookie default|262d4ed-fc7971fc-8abd101c-5f68a428_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] 2025-04-10T11:25:38.252763Z node 9 :PERSQUEUE INFO: new Cookie default|2d8acd6a-c1fe6238-e98c39c3-ffa78ec8_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] 2025-04-10T11:25:38.539045Z node 9 :PERSQUEUE INFO: new Cookie default|3fbc12e5-143331b3-5283ef6a-fc39d4af_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] Captured TEvRequest, cmd write size: 1 Captured TEvRequest, cmd write size: 1 2025-04-10T11:25:38.817728Z node 9 :PERSQUEUE INFO: new Cookie default|7107af61-17d390d3-60caae10-2b3d658_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] 2025-04-10T11:25:39.058623Z node 9 :PERSQUEUE INFO: new Cookie default|be7bdc29-1981906d-c164bfaf-6d1cdf06_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptStats [GOOD] Test command err: Trying to start YDB, gRPC: 6288, MsgBus: 25825 2025-04-10T11:25:26.830323Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644139304413385:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:26.831069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e76/r3tmp/tmplUxcWT/pdisk_1.dat 2025-04-10T11:25:27.170912Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:27.187231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:27.187305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:27.189144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6288, node 1 2025-04-10T11:25:27.300190Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:27.300226Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:27.300235Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:27.300366Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25825 TClient is connected to server localhost:25825 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:27.809037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:27.833214Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:27.847996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:27.979898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.148792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.226287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:29.848526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644152189317044:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.848741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.132665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.205502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.275073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.312756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.346645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.417758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.465602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644156484284865:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.465693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.465740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644156484284870:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.470948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:30.480495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644156484284872:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:30.564217Z node 1 :TX_PROXY ERROR: Actor# [1:7491644156484284926:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 1504, MsgBus: 1861 2025-04-10T11:25:32.189982Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644164042615057:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:32.190307Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e76/r3tmp/tmpqP51Os/pdisk_1.dat 2025-04-10T11:25:32.290940Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1504, node 2 2025-04-10T11:25:32.330458Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:32.330566Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:32.340771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:32.376638Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:32.376659Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:32.376667Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:32.376786Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1861 TClient is connected to server localhost:1861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:32.827890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:32.838935Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:32.847571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:32.928527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:33.113703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:33.182499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:35.823441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644176927518717:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:35.823534Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:35.877170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.914877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.985621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.014707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.050212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.084130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.138508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644181222486525:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.138627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.138682Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644181222486530:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.142405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:36.153483Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644181222486532:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:36.206258Z node 2 :TX_PROXY ERROR: Actor# [2:7491644181222486585:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:37.190423Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644164042615057:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:37.190520Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:37.923589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.387647Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284338391, txId: 281474976715674] shutting down 2025-04-10T11:25:38.937098Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284338951, txId: 281474976715678] shutting down 2025-04-10T11:25:39.305101Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284339298, txId: 281474976715682] shutting down >> YdbLogStore::LogTable [GOOD] >> YdbMonitoring::SelfCheck >> KqpQueryPerf::Delete-QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete-QueryService+UseSink >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonCast [GOOD] Test command err: Trying to start YDB, gRPC: 10328, MsgBus: 24284 2025-04-10T11:25:29.513392Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644149214868794:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:29.513454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000dec/r3tmp/tmpyJl0hq/pdisk_1.dat 2025-04-10T11:25:29.914919Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:29.961826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:29.961906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 10328, node 1 2025-04-10T11:25:29.968242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:30.034541Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:30.034567Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:30.034582Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:30.034741Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24284 TClient is connected to server localhost:24284 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:30.614646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:30.645137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:30.798991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:30.955543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:31.034036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:32.652353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644162099772458:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:32.652475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:33.000526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.035788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.072987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.107634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.145214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.174191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.235226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644166394740264:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:33.235293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:33.235587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644166394740269:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:33.239444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:33.249133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644166394740271:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:33.325421Z node 1 :TX_PROXY ERROR: Actor# [1:7491644166394740324:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:35: Error: INSERT OR IGNORE is not yet supported for Kikimr. 2025-04-10T11:25:34.512522Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644149214868794:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:34.512602Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11975, MsgBus: 4308 2025-04-10T11:25:35.214817Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644174432862012:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:35.215007Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000dec/r3tmp/tmpao5N3l/pdisk_1.dat 2025-04-10T11:25:35.327958Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11975, node 2 2025-04-10T11:25:35.361031Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:35.361111Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:35.365904Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:35.419981Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:35.420003Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:35.420010Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:35.420130Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4308 TClient is connected to server localhost:4308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:25:35.868659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.887681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:35.964476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:36.103424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:36.165519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:38.231465Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644187317765643:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.231540Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.276206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.303642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.333001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.364388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.395321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.430335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.513097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644187317766158:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.513211Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.513405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644187317766163:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.518079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:38.528221Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644187317766165:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:38.617193Z node 2 :TX_PROXY ERROR: Actor# [2:7491644187317766220:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } [[#]] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-04-10T11:25:40.121318Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:25:40.125885Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:25:40.126307Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-10T11:25:40.126370Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:25:40.126405Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-10T11:25:40.126458Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:25:40.126510Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:40.126574Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-10T11:25:40.127267Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2025-04-10T11:25:40.127344Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:25:40.145490Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:40.148536Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:40.148706Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:40.149646Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:40.149801Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:40.150327Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:40.150757Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:268:2258] 2025-04-10T11:25:40.153061Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-10T11:25:40.153149Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2258] 2025-04-10T11:25:40.153207Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:25:40.153255Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:40.154135Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:271:2260], now have 1 active actors on pipe 2025-04-10T11:25:40.202492Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:25:40.206467Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:25:40.206813Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-10T11:25:40.206862Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:25:40.206900Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-10T11:25:40.206937Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:25:40.206986Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:40.207078Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-10T11:25:40.207701Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:406:2361], now have 1 active actors on pipe 2025-04-10T11:25:40.208190Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:25:40.208386Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:40.211115Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:40.211234Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:40.212021Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:40.212130Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:40.212497Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:40.212716Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [2:414:2367] 2025-04-10T11:25:40.218741Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-10T11:25:40.218847Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:414:2367] 2025-04-10T11:25:40.218907Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:25:40.218955Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:40.219657Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:417:2369], now have 1 active actors on pipe 2025-04-10T11:25:40.236911Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:25:40.240831Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:25:40.241157Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-10T11:25:40.241222Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:25:40.241260Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-10T11:25:40.241311Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:25:40.241355Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:40.241420Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-10T11:25:40.242103Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:466:2406], now have 1 active actors on pipe 2025-04-10T11:25:40.242238Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:25:40.242418Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:25:40.244920Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:25:40.245041Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:40.245733Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:25:40.245859Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:40.246257Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:40.246489Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:474:2412] 2025-04-10T11:25:40.247951Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-10T11:25:40.248017Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:474:2412] 2025-04-10T11:25:40.248061Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:25:40.248098Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Proc ... edTxs.size=0 2025-04-10T11:25:40.935951Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:40.936009Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-10T11:25:40.936791Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:528:2453], now have 1 active actors on pipe 2025-04-10T11:25:40.936853Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:25:40.937060Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 8(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:25:40.939244Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:25:40.939392Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:40.940168Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 8 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:25:40.940293Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:40.940588Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:40.940793Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:536:2459] 2025-04-10T11:25:40.942507Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T11:25:40.942569Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:536:2459] 2025-04-10T11:25:40.942617Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:25:40.942658Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:40.943407Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:539:2461], now have 1 active actors on pipe 2025-04-10T11:25:40.944427Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:545:2464], now have 1 active actors on pipe 2025-04-10T11:25:40.944666Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:40.944777Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:546:2465], now have 1 active actors on pipe 2025-04-10T11:25:40.944972Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:40.945046Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:547:2465], now have 1 active actors on pipe 2025-04-10T11:25:40.945194Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:40.956182Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:555:2472], now have 1 active actors on pipe 2025-04-10T11:25:40.976294Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:25:40.978555Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:25:40.978860Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-10T11:25:40.978907Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:25:40.979074Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:25:40.979820Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:40.979865Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-10T11:25:40.979980Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:40.980200Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:40.980390Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:612:2517] 2025-04-10T11:25:40.981773Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-10T11:25:40.982743Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-10T11:25:40.982923Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-10T11:25:40.983173Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-10T11:25:40.983342Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-10T11:25:40.983372Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:25:40.983406Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:25:40.983448Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T11:25:40.983492Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:612:2517] 2025-04-10T11:25:40.983537Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:25:40.983575Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:40.984263Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:546:2465] destroyed 2025-04-10T11:25:40.984322Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:545:2464] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 39 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 39 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 79 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 79 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_UNKNOWN } ErrorCode: OK } } } >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] >> BSCRestartPDisk::RestartOneByOne >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition >> KqpScripting::SecondaryIndexes [GOOD] >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> BSCRestartPDisk::RestartOneByOneWithReconnects ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogPagingAfter [GOOD] Test command err: 2025-04-10T11:23:27.389788Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643627438190724:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:27.389923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cf9/r3tmp/tmpfx7m5g/pdisk_1.dat 2025-04-10T11:23:27.677300Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8319, node 1 2025-04-10T11:23:27.714352Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:27.714432Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:27.738723Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:27.738744Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:27.738753Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:27.738869Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:23:27.764417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:27.764524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:27.767741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:28.006649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:11193 2025-04-10T11:23:28.235515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:28.362679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643631733158863:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:23:28.363002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643631733158863:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:23:28.363359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643631733158863:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:23:28.363654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643631733158863:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:23:28.363836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643631733158863:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:23:28.364013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643631733158863:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:23:28.364199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643631733158863:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:23:28.364359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643631733158863:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:23:28.364513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643631733158863:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:23:28.364662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643631733158863:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:23:28.364837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643631733158863:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:23:28.364967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491643631733158863:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:23:28.408811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491643631733158872:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:23:28.408901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491643631733158872:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:23:28.409145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491643631733158872:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:23:28.409260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491643631733158872:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:23:28.409402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491643631733158872:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:23:28.409783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491643631733158872:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:23:28.409921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491643631733158872:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:23:28.410049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491643631733158872:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:23:28.410185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491643631733158872:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:23:28.410321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491643631733158872:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:23:28.410471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491643631733158872:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:23:28.410586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491643631733158872:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:23:28.445175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491643631733158865:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:23:28.445275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491643631733158865:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:23:28.445584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491643631733158865:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:23:28.445710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491643631733158865:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:23:28.445816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491643631733158865:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:23:28.445941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491643631733158865:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:23:28.446045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491643631733158865:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:23:28.446183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491643631733158865:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:23:28.446346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=7207518622403 ... aseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7491644184445445009:3206], task: 62, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1501 Tasks { TaskId: 62 CpuTimeUs: 613 FinishTimeMs: 1744284337402 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 38 BuildCpuTimeUs: 575 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-x5vqckr7qy" NodeId: 28 CreateTimeMs: 1744284337295 } MaxMemoryUsage: 1048576 } 2025-04-10T11:25:37.434403Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715670. Ctx: { TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7491644184445445009:3206] 2025-04-10T11:25:37.434443Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7491644184445444921:3134] TxId: 281474976715670. Ctx: { TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7491644184445445015:3210], 2025-04-10T11:25:37.434596Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7491644184445444921:3134] TxId: 281474976715670. Ctx: { TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7491644184445444987:3190], task: 46, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1600 Tasks { TaskId: 46 CpuTimeUs: 692 FinishTimeMs: 1744284337401 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 37 BuildCpuTimeUs: 655 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-x5vqckr7qy" NodeId: 28 CreateTimeMs: 1744284337282 } MaxMemoryUsage: 1048576 } 2025-04-10T11:25:37.434636Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7491644184445444921:3134] TxId: 281474976715670. Ctx: { TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7491644184445445015:3210], 2025-04-10T11:25:37.434777Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7491644184445444921:3134] TxId: 281474976715670. Ctx: { TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7491644184445445009:3206], task: 62, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1501 Tasks { TaskId: 62 CpuTimeUs: 613 FinishTimeMs: 1744284337402 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 38 BuildCpuTimeUs: 575 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-x5vqckr7qy" NodeId: 28 CreateTimeMs: 1744284337295 } MaxMemoryUsage: 1048576 } 2025-04-10T11:25:37.434814Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7491644184445444921:3134] TxId: 281474976715670. Ctx: { TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7491644184445445015:3210], 2025-04-10T11:25:37.434905Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, ActorId: [28:7491644175855510268:3134], ActorState: ExecuteState, TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Forwarded TEvStreamData to [28:7491644175855510266:3133] 2025-04-10T11:25:37.435555Z node 28 :KQP_EXECUTER DEBUG: TxId: 281474976715670, send ack to channelId: 66, seqNo: 1, enough: 0, freeSpace: 8388490, to: [28:7491644184445445055:3210] 2025-04-10T11:25:37.435647Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7491644184445445015:3210], TxId: 281474976715670, task: 66. Ctx: { TraceId : 01jrfq5fqd4826nv7tve1mhtg6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:25:37.435741Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 66. Tasks execution finished, don't wait for ack delivery in input channelId: 65, seqNo: [1] 2025-04-10T11:25:37.435765Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 66. Tasks execution finished 2025-04-10T11:25:37.435796Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7491644184445445015:3210], TxId: 281474976715670, task: 66. Ctx: { TraceId : 01jrfq5fqd4826nv7tve1mhtg6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-04-10T11:25:37.435916Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 66. pass away 2025-04-10T11:25:37.436045Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7491644184445444921:3134] TxId: 281474976715670. Ctx: { TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7491644184445445015:3210], task: 66, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1637 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 197 FinishTimeMs: 1744284337435 ComputeCpuTimeUs: 52 BuildCpuTimeUs: 145 HostName: "ghrun-x5vqckr7qy" NodeId: 28 CreateTimeMs: 1744284337300 } MaxMemoryUsage: 1048576 } 2025-04-10T11:25:37.436059Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=66;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:25:37.436109Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715670. Ctx: { TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7491644184445445015:3210] 2025-04-10T11:25:37.436217Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7491644184445445015:3210], TxId: 281474976715670, task: 66. Ctx: { TraceId : 01jrfq5fqd4826nv7tve1mhtg6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Send stats to executor actor [28:7491644184445444921:3134] TaskId: 66 Stats: CpuTimeUs: 1637 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 197 FinishTimeMs: 1744284337435 ComputeCpuTimeUs: 52 BuildCpuTimeUs: 145 HostName: "ghrun-x5vqckr7qy" NodeId: 28 CreateTimeMs: 1744284337300 } MaxMemoryUsage: 1048576 2025-04-10T11:25:37.436287Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7491644184445444921:3134] TxId: 281474976715670. Ctx: { TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:25:37.436378Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7491644184445444921:3134] TxId: 281474976715670. Ctx: { TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.115339s ReadRows: 0 ReadBytes: 0 ru: 76 rate limiter was not found force flag: 1 2025-04-10T11:25:37.436491Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, ActorId: [28:7491644175855510268:3134], ActorState: ExecuteState, TraceId: 01jrfq5fqd4826nv7tve1mhtg6, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-10T11:25:37.436975Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, ActorId: [28:7491644175855510268:3134], ActorState: ExecuteState, TraceId: 01jrfq5fqd4826nv7tve1mhtg6, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 236.696 QueriesCount: 1 2025-04-10T11:25:37.437082Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, ActorId: [28:7491644175855510268:3134], ActorState: ExecuteState, TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T11:25:37.437242Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, ActorId: [28:7491644175855510268:3134], ActorState: ExecuteState, TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T11:25:37.437310Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, ActorId: [28:7491644175855510268:3134], ActorState: ExecuteState, TraceId: 01jrfq5fqd4826nv7tve1mhtg6, EndCleanup, isFinal: 1 2025-04-10T11:25:37.437407Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, ActorId: [28:7491644175855510268:3134], ActorState: ExecuteState, TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7491644120020931545:2280] 2025-04-10T11:25:37.437458Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, ActorId: [28:7491644175855510268:3134], ActorState: unknown state, TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Cleanup temp tables: 0 2025-04-10T11:25:37.440895Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284337000, txId: 18446744073709551615] shutting down 2025-04-10T11:25:37.441088Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=OTU2OWJiMmYtYjc0MmY3NGQtNGM5YzMxMDMtNTQ4MGEzMWI=, ActorId: [28:7491644175855510268:3134], ActorState: unknown state, TraceId: 01jrfq5fqd4826nv7tve1mhtg6, Session actor destroyed RESULT: [] --------------------- STATS: total CPU: 3341 duration: 1750 usec cpu: 1750 usec duration: 217274 usec cpu: 232881 usec { name: "/Root/OlapStore/log1" } 2025-04-10T11:25:37.548982Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[28:7491644128610867020:2326];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2025-04-10T11:25:37.575620Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[28:7491644128610867030:2327];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-04-10T11:25:37.611968Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[28:7491644124315899714:2325];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartNotAllowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15933, MsgBus: 6396 2025-04-10T11:25:36.600582Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644181902961333:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:36.600703Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001616/r3tmp/tmptWF7Bi/pdisk_1.dat 2025-04-10T11:25:36.937756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:36.937855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:36.949515Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:36.951893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15933, node 1 2025-04-10T11:25:37.022995Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:37.023017Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:37.023024Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:37.023139Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6396 TClient is connected to server localhost:6396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:37.484582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:37.500255Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:37.512954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:37.633348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:37.806451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:37.883995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:39.642287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644194787865004:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:39.642434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:40.006645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.041826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.071409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.098820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.125438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.194004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.277728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644199082832818:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:40.277816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:40.277908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644199082832823:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:40.282831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:40.292013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644199082832825:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:40.352428Z node 1 :TX_PROXY ERROR: Actor# [1:7491644199082832877:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] Test command err: RandomSeed# 3095179022746701451 2025-04-10T11:25:42.521212Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.521401Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.521492Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.521586Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.521659Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.521725Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.521788Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.521864Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.522856Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:42.522956Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:42.523018Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:42.523084Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:42.523145Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:42.523202Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:42.523275Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:42.523350Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:42.523430Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.523487Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.523523Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.523569Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.523612Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.523675Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.523718Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.523765Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:42.525973Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:42.526056Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:42.526112Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:42.526195Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:42.526264Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:42.526319Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:42.526377Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:42.526431Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 >> KqpQueryPerf::IndexReplace-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace-QueryService+UseSink >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SecondaryIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 3935, MsgBus: 25799 2025-04-10T11:25:28.170070Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644146642467358:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:28.170129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e35/r3tmp/tmpjJMkLu/pdisk_1.dat 2025-04-10T11:25:28.547918Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:28.552030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:28.552105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:28.554775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3935, node 1 2025-04-10T11:25:28.650708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:28.650744Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:28.650752Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:28.650876Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25799 TClient is connected to server localhost:25799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:29.261145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:29.290369Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:29.301611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:29.480652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:29.636692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:29.707068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:31.259871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644159527370806:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.260010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.522020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.548637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.573415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.638860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.673062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.752627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.797179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644159527371323:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.797232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.797441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644159527371328:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.800534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:31.813708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644159527371330:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:31.909048Z node 1 :TX_PROXY ERROR: Actor# [1:7491644159527371384:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:32.801186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.178414Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644146642467358:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:33.178488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:33.271188Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284333309, txId: 281474976710673] shutting down 2025-04-10T11:25:33.314885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.405661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.418412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.427396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.493609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.506315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.866493Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284333897, txId: 281474976710682] shutting down 2025-04-10T11:25:33.911353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:25:33.943118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.293500Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284334331, txId: 281474976710687] shutting down Trying to start YDB, gRPC: 8987, MsgBus: 9136 2025-04-10T11:25:35.119709Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644177007553035:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:35.119773Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e35/r3tmp/tmp35XgKw/pdisk_1.dat 2025-04-10T11:25:35.241999Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8987, node 2 2025-04-10T11:25:35.255255Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:35.255337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:35.256604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:35.350846Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:35.350875Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:35.350885Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:35.351017Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9136 TClient is connected to server localhost:9136 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:35.788630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:35.812006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:35.889588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:36.072846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:36.151425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.260758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644189892456693:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.260860Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.285775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.312630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.339357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.368217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.397021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.428389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.479723Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644189892457202:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.479822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644189892457207:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.479827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.483224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:38.493864Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644189892457209:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:38.584545Z node 2 :TX_PROXY ERROR: Actor# [2:7491644189892457263:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:39.517625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.574749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.614843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.119851Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644177007553035:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:40.119920Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> KqpYql::Discard [GOOD] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [FAIL] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] >> KqpScripting::Pure [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 1249, MsgBus: 19134 2025-04-10T11:25:27.583921Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644140741977053:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:27.584572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e4a/r3tmp/tmpskgD4V/pdisk_1.dat 2025-04-10T11:25:27.933958Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1249, node 1 2025-04-10T11:25:27.979486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:27.979570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:27.987279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:28.025492Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:28.025521Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:28.025534Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:28.025659Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19134 TClient is connected to server localhost:19134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:28.530246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.556934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.668589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.832293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.914992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:30.546320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644153626880721:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.546485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:30.902478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.936050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:30.982777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.054354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.085731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.157922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:31.217398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644157921848535:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.217482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.218056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644157921848540:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:31.225476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:31.237671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644157921848543:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:31.334914Z node 1 :TX_PROXY ERROR: Actor# [1:7491644157921848599:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:32.341718Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODU4MGE5YWItYzdhNDk1ODAtOTkwYjkxZDAtMzA0NmJlNA==, ActorId: [1:7491644162216816189:2496], ActorState: ExecuteState, TraceId: 01jrfq5c9gewgp2m1gkszqnn21, Create QueryResponse for error on request, msg: 2025-04-10T11:25:32.399698Z node 1 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 5 2025-04-10T11:25:32.415920Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTM4MGNmYTQtNjVmMmViMzUtM2UxYzBmZjctYjc0ZGMxMTQ=, ActorId: [1:7491644162216816200:2501], ActorState: ExecuteState, TraceId: 01jrfq5cbj1m0v0zca4dfzr6s5, Create QueryResponse for error on request, msg: 2025-04-10T11:25:32.437866Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTY1YTVkYmMtYTQ3MDVlODEtZjNlMjQ0NzQtYWViMzE0MmY=, ActorId: [1:7491644162216816209:2505], ActorState: ExecuteState, TraceId: 01jrfq5ccb36frnqfer248xz0d, Create QueryResponse for error on request, msg: 2025-04-10T11:25:32.495767Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2Q5ZjQ3ZDQtNjNlNmJkNWUtZTg3MWUyNmUtZDlmZGQ0ZTU=, ActorId: [1:7491644162216816218:2509], ActorState: ExecuteState, TraceId: 01jrfq5cd1635k7cev3bvr6r4k, Create QueryResponse for error on request, msg: 2025-04-10T11:25:32.524310Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWUwZjg0N2ItN2Y1ZDJkMDEtOTgxNzI3OC05NDU1NjRlYw==, ActorId: [1:7491644162216816256:2518], ActorState: ExecuteState, TraceId: 01jrfq5cf0c4ar4fvbepdbmgs9, Create QueryResponse for error on request, msg: 2025-04-10T11:25:32.581425Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmI1ZTk5MTMtY2I3OTk2Y2MtMjk1ODQ5NTgtZWQ2ZDYyMDI=, ActorId: [1:7491644162216816265:2522], ActorState: ExecuteState, TraceId: 01jrfq5cfm3t8b92grv3jq2qkt, Create QueryResponse for error on request, msg: 2025-04-10T11:25:32.585048Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644140741977053:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:32.585135Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:32.670769Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODY2YzUwNjAtYzk5MjU1ZjUtNTQ5YTNjOWYtYzdlYzEyYWI=, ActorId: [1:7491644162216816279:2529], ActorState: ExecuteState, TraceId: 01jrfq5cht3fcpw8bz5c2myk2m, Create QueryResponse for error on request, msg: 2025-04-10T11:25:32.714712Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332714, txId: 281474976710671] shutting down 2025-04-10T11:25:32.716136Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332714, txId: 281474976710672] shutting down 2025-04-10T11:25:32.717924Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjM1MjY0NjUtZWQ3NmI4ZmEtMmY5M2QxYzktMzg3NDdhMGE=, ActorId: [1:7491644162216816436:2557], ActorState: ExecuteState, TraceId: 01jrfq5cmca7askfc0mt9v4gsf, Create QueryResponse for error on request, msg: 2025-04-10T11:25:32.789503Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzAxY2UyYjktMzdkMzdmOTAtZTkxYzIyMGQtNzE2NTk3MDI=, ActorId: [1:7491644162216816490:2561], ActorState: ExecuteState, TraceId: 01jrfq5cnt2py4swx6zexeebzx, Create QueryResponse for error on request, msg: 2025-04-10T11:25:32.859852Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjRkODYyMjMtYzZiZTQyMWYtZjE2ZDk3MjMtMjAyNmRjNzg=, ActorId: [1:7491644162216816550:2578], ActorState: ExecuteState, TraceId: 01jrfq5cr00zmqdq0thbv57yjp, Create QueryResponse for error on request, msg: 2025-04-10T11:25:32.867646Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332833, txId: 281474976710675] shutting down 2025-04-10T11:25:32.914987Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmY4MzZkZmUtNDA5OGQ2NDMtNDdmYmI3MTEtYWUyOTY1MzM=, ActorId: [1:7491644162216816623:2585], ActorState: ExecuteState, TraceId: 01jrfq5ct77c7kpw1sjp6n7csh, Create QueryResponse for error on request, msg: 2025-04-10T11:25:32.980550Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODM2ODI4ZjAtY2JkMDFhMjgtNGFkMDEyYmUt ... MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284334681, txId: 281474976710720] shutting down 2025-04-10T11:25:34.663391Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284334681, txId: 281474976710719] shutting down 2025-04-10T11:25:34.746335Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGZiMGEyNzMtYmZhOTYzZTEtMzQxOTQ2NGYtODU3MDNmOTE=, ActorId: [1:7491644170806753408:2956], ActorState: ExecuteState, TraceId: 01jrfq5ehm5f9qxws7dcg38vt6, Create QueryResponse for error on request, msg: 2025-04-10T11:25:34.842943Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284334870, txId: 281474976710724] shutting down 2025-04-10T11:25:34.843224Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284334870, txId: 281474976710723] shutting down 2025-04-10T11:25:34.958387Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjM3NzVkMjgtZjRhMzI5N2YtNmJlNmEzNjItNzgwN2Q5OTA=, ActorId: [1:7491644170806753649:2990], ActorState: ExecuteState, TraceId: 01jrfq5er85acd03vtzfhg8a8f, Create QueryResponse for error on request, msg: 2025-04-10T11:25:35.023962Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335052, txId: 281474976710727] shutting down 2025-04-10T11:25:35.078386Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTQwZTljNmYtZThiYTdlZS02NzQ5N2JiZS04NDgxZmM2OQ==, ActorId: [1:7491644170806753669:2999], ActorState: ExecuteState, TraceId: 01jrfq5evvdbkq72cph36r4727, Create QueryResponse for error on request, msg: 2025-04-10T11:25:35.144151Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335164, txId: 281474976710729] shutting down 2025-04-10T11:25:35.197511Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTQ3NTAwZDMtNmQ0ODE1MjItMWJhZjFlMzQtMzhhYmIzYzc=, ActorId: [1:7491644175101721073:3017], ActorState: ExecuteState, TraceId: 01jrfq5ezg7q4256gssy6w8cww, Create QueryResponse for error on request, msg: 2025-04-10T11:25:35.315999Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335346, txId: 281474976710731] shutting down 2025-04-10T11:25:35.316815Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335346, txId: 281474976710732] shutting down 2025-04-10T11:25:35.442689Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2U0YTMwZDUtM2MyN2U3NTEtNzg5ZjA2NDMtMTAyNDA3NmI=, ActorId: [1:7491644175101721345:3059], ActorState: ExecuteState, TraceId: 01jrfq5f70epdc826z4vefrjsj, Create QueryResponse for error on request, msg: 2025-04-10T11:25:35.571837Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWZlNGRjYTctZDA5Y2Y3MGEtZmRiMWQyMDMtMWUyNTk2N2E=, ActorId: [1:7491644175101721364:3068], ActorState: ExecuteState, TraceId: 01jrfq5fay1faeac2j3t9w30dz, Create QueryResponse for error on request, msg: 2025-04-10T11:25:35.579663Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335605, txId: 281474976710735] shutting down 2025-04-10T11:25:35.580128Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335605, txId: 281474976710736] shutting down 2025-04-10T11:25:35.706375Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWFjMWUzZC03MDRlZDZlYS04YWM1NzgwLWJlZjUzZmRl, ActorId: [1:7491644175101721528:3092], ActorState: ExecuteState, TraceId: 01jrfq5fezbwryvab1q46q2ney, Create QueryResponse for error on request, msg: 2025-04-10T11:25:35.801437Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335822, txId: 281474976710739] shutting down 2025-04-10T11:25:35.844125Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2ZkODQwZTQtZjI2NTlkMjgtYmQ2OWU3ZmQtZjM3OTdhZWY=, ActorId: [1:7491644175101721553:3104], ActorState: ExecuteState, TraceId: 01jrfq5fk848rw54b4yb0cjnzt, Create QueryResponse for error on request, msg: 2025-04-10T11:25:35.944197Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335969, txId: 281474976710742] shutting down 2025-04-10T11:25:35.944926Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335969, txId: 281474976710741] shutting down Trying to start YDB, gRPC: 18361, MsgBus: 24221 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e4a/r3tmp/tmploz2QE/pdisk_1.dat 2025-04-10T11:25:36.904460Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:25:36.952119Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18361, node 2 2025-04-10T11:25:36.992303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:36.992383Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:36.994094Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:37.015293Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:37.015321Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:37.015330Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:37.015476Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24221 TClient is connected to server localhost:24221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:37.554525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:37.568123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:37.641946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:37.777814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:37.889425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:40.259653Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644196515156496:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:40.259752Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:40.305437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.333758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.357718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.391839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.418474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.454803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.501418Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644196515157003:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:40.501499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644196515157008:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:40.501516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:40.505067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:40.516872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644196515157010:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:40.589263Z node 2 :TX_PROXY ERROR: Actor# [2:7491644196515157064:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> KqpYql::CreateUseTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13279, MsgBus: 63655 2025-04-10T11:25:38.090496Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644186928133478:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:38.097525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001614/r3tmp/tmpVYwsiK/pdisk_1.dat 2025-04-10T11:25:38.431768Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13279, node 1 2025-04-10T11:25:38.500521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:38.500855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:38.504663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:38.524861Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:38.524890Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:38.524905Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:38.525073Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63655 TClient is connected to server localhost:63655 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:39.022025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:39.043706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:39.212928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:39.434945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:39.505552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:41.217294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644199813037068:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.217456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.468724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.496420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.530097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.560610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.589477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.664734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.714249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644199813037581:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.714355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.714398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644199813037586:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.721216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:41.736108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644199813037588:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:41.815108Z node 1 :TX_PROXY ERROR: Actor# [1:7491644199813037641:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TVectorIndexTests::CreateTableMultiColumn >> TBoardSubscriberTest::SimpleSubscriber >> TBoardSubscriberTest::ManySubscribersManyPublisher ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Discard [GOOD] Test command err: Trying to start YDB, gRPC: 14756, MsgBus: 1192 2025-04-10T11:25:33.248889Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644168496068976:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:33.248952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d72/r3tmp/tmp2jKBKi/pdisk_1.dat 2025-04-10T11:25:33.657830Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:33.673859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:33.673951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14756, node 1 2025-04-10T11:25:33.677885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:33.726771Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:33.726797Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:33.726809Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:33.726944Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1192 TClient is connected to server localhost:1192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:34.285378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:34.311105Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:34.322784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:34.493254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:34.645722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:34.713194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:36.319545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644181380972416:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.319700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.585347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.613884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.649002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.681307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.710699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.748441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.793798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644181380972925:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.793855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.794826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644181380972930:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.798293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:36.810513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644181380972932:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:36.869840Z node 1 :TX_PROXY ERROR: Actor# [1:7491644181380972985:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17084, MsgBus: 61060 2025-04-10T11:25:38.633415Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644188405292263:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:38.633485Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d72/r3tmp/tmplcL3F4/pdisk_1.dat 2025-04-10T11:25:38.737870Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17084, node 2 2025-04-10T11:25:38.765643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:38.765750Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:38.768027Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:38.791897Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:38.791918Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:38.791924Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:38.792037Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61060 TClient is connected to server localhost:61060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:39.259683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:39.271909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:39.364387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:39.533241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:39.602016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:41.586781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644201290195932:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.586883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.638291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.673607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.702469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.730224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.763270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.832197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.908310Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644201290196450:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.908394Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.908522Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644201290196455:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.912164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:41.923749Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644201290196457:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:42.017426Z node 2 :TX_PROXY ERROR: Actor# [2:7491644205585163808:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:42.918121Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491644205585164105:2500], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:13: Error: DISCARD not supported in YDB queries, code: 2008 2025-04-10T11:25:42.918470Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGIzYmU0YjAtN2QyN2YyMDgtZGIzYzA4NzgtOTdlM2ZjMjg=, ActorId: [2:7491644205585164098:2496], ActorState: ExecuteState, TraceId: 01jrfq5pjb52pt8v67j2pdpbk5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: 2025-04-10T11:25:42.341159Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:25:42.349027Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:25:42.349461Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-10T11:25:42.349526Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:25:42.349579Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-10T11:25:42.349642Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:25:42.349698Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:42.349769Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-10T11:25:42.350534Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [1:260:2252], now have 1 active actors on pipe 2025-04-10T11:25:42.350665Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:25:42.374541Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:42.377539Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:42.377705Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:42.378709Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:42.378882Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:42.379287Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:42.379590Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [1:268:2258] 2025-04-10T11:25:42.382142Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-10T11:25:42.382244Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [1:268:2258] 2025-04-10T11:25:42.382303Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:25:42.382358Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:42.383221Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [1:271:2260], now have 1 active actors on pipe 2025-04-10T11:25:42.440162Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:25:42.447790Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:25:42.448140Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-10T11:25:42.448194Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:25:42.448231Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-10T11:25:42.448293Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:25:42.448348Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:42.448407Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-10T11:25:42.449194Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [1:405:2360], now have 1 active actors on pipe 2025-04-10T11:25:42.449259Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:25:42.449494Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:42.451990Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:42.452128Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:42.453078Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:42.453247Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:42.453620Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:42.453863Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [1:413:2366] 2025-04-10T11:25:42.456124Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-10T11:25:42.456190Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [1:413:2366] 2025-04-10T11:25:42.456242Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:25:42.456298Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:42.457200Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [1:416:2368], now have 1 active actors on pipe 2025-04-10T11:25:42.474138Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:25:42.478535Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:25:42.478852Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-10T11:25:42.478913Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:25:42.478953Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-10T11:25:42.479086Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:25:42.479138Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:42.479205Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-10T11:25:42.479907Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [1:465:2405], now have 1 active actors on pipe 2025-04-10T11:25:42.480020Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:25:42.480207Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:25:42.482509Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:25:42.482636Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:42.483476Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:25:42.483599Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:42.483939Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:42.484186Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [1:473:2411] 2025-04-10T11:25:42.486351Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-10T11:25:42.486424Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [1:473:2411] 2025-04-10T11:25:42.486537Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:25:42.486599Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:42.487514Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [1:476:2413], ... SQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:25:43.923390Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 11(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:25:43.925580Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:25:43.925705Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:43.926453Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 11 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-10T11:25:43.926577Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:43.926920Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:43.927142Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:476:2414] 2025-04-10T11:25:43.929591Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-10T11:25:43.929664Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:476:2414] 2025-04-10T11:25:43.929727Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:25:43.929779Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:43.930684Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:479:2416], now have 1 active actors on pipe 2025-04-10T11:25:43.946140Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:25:43.949733Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:25:43.950041Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-10T11:25:43.950094Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:25:43.950173Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-10T11:25:43.950223Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:25:43.950277Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:43.950342Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-10T11:25:43.951066Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:528:2453], now have 1 active actors on pipe 2025-04-10T11:25:43.951126Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:25:43.951317Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 12(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:25:43.954045Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:25:43.954188Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:43.954769Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 12 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:25:43.954891Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:43.955244Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:43.955445Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:536:2459] 2025-04-10T11:25:43.957860Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T11:25:43.957927Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:536:2459] 2025-04-10T11:25:43.957985Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:25:43.958038Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:43.958832Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:539:2461], now have 1 active actors on pipe 2025-04-10T11:25:43.960600Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:548:2464], now have 1 active actors on pipe 2025-04-10T11:25:43.961084Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:550:2465], now have 1 active actors on pipe 2025-04-10T11:25:43.961197Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:551:2465], now have 1 active actors on pipe 2025-04-10T11:25:43.961501Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:552:2465], now have 1 active actors on pipe 2025-04-10T11:25:43.962072Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:565:2476], now have 1 active actors on pipe 2025-04-10T11:25:43.985857Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:25:43.988951Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:25:43.989354Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-10T11:25:43.989420Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:25:43.989582Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:25:43.990134Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:43.990209Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-10T11:25:43.990331Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:43.990675Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:43.990890Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:622:2521] 2025-04-10T11:25:43.993128Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-10T11:25:43.994450Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-10T11:25:43.994704Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-10T11:25:43.995006Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-10T11:25:43.995254Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-10T11:25:43.995306Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:25:43.995348Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:25:43.995388Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T11:25:43.995454Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:622:2521] 2025-04-10T11:25:43.995518Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:25:43.995569Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:43.996652Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:548:2464] destroyed 2025-04-10T11:25:43.996716Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server disconnected, pipe [3:550:2465] destroyed 2025-04-10T11:25:43.996973Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:551:2465] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } ErrorCode: OK } } } >> KqpQueryPerf::AggregateToScalar+QueryService [GOOD] >> KqpQueryPerf::AggregateToScalar-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] Test command err: Trying to start YDB, gRPC: 26848, MsgBus: 11051 2025-04-10T11:25:32.815889Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644161321038195:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:32.816234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d81/r3tmp/tmpaFInjJ/pdisk_1.dat 2025-04-10T11:25:33.193984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:33.194116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:33.195826Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:33.197022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26848, node 1 2025-04-10T11:25:33.314875Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:33.314908Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:33.314916Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:33.315102Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11051 TClient is connected to server localhost:11051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:33.913447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:33.931809Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:33.945532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:34.114492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:34.279380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:34.356251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:36.050342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644178500909136:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.050542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.327378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.360077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.387882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.413948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.443306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.472813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.548440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644178500909651:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.548495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644178500909656:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.548517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.551903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:36.562245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644178500909658:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:36.634481Z node 1 :TX_PROXY ERROR: Actor# [1:7491644178500909712:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:37.816047Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644161321038195:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:37.816139Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20497, MsgBus: 13359 2025-04-10T11:25:38.364358Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644189348649528:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:38.364442Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d81/r3tmp/tmpGR5WlJ/pdisk_1.dat 2025-04-10T11:25:38.476872Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20497, node 2 2025-04-10T11:25:38.496934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:38.497026Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:38.500658Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:38.537689Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:38.537711Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:38.537718Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:38.537830Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13359 TClient is connected to server localhost:13359 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:38.946228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:38.955117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:39.003425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:39.131884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:39.205694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:41.261998Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644202233553180:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.262088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.307367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.337243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.364676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.397380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.428219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.465330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.510537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644202233553691:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.510613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.510681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644202233553696:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.514591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:41.526896Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644202233553698:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:41.601462Z node 2 :TX_PROXY ERROR: Actor# [2:7491644202233553752:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Result: [[[[101u]]];[[[102u]]];[[[103u]]];[[[104u]]];[[[105u]]]] >> TargetDiscoverer::SystemObjects >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 5876, MsgBus: 24286 2025-04-10T11:25:32.712165Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644162694754576:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:32.712273Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d83/r3tmp/tmpLiRIo1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5876, node 1 2025-04-10T11:25:33.120361Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:25:33.120393Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:25:33.125627Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:33.127992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:33.128630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:33.133797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:33.221824Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:33.221847Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:33.221854Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:33.221957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24286 TClient is connected to server localhost:24286 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:33.737620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:33.749219Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:33.769396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:33.890636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:34.030198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:34.103220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.847960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644175579658211:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:35.848118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.162940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.190342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.220276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.254533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.286056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.358777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.400721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644179874626019:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.400804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.401114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644179874626024:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:36.405061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:36.415027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644179874626026:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:36.508140Z node 1 :TX_PROXY ERROR: Actor# [1:7491644179874626079:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:37.712294Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644162694754576:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:37.712367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:38.453576Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284338496, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 14640, MsgBus: 17106 2025-04-10T11:25:39.250796Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644191741653586:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:39.250884Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d83/r3tmp/tmpNIpBgC/pdisk_1.dat 2025-04-10T11:25:39.374261Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:39.387780Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:39.387851Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:39.394309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14640, node 2 2025-04-10T11:25:39.450653Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:39.450676Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:39.450683Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:39.450788Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17106 TClient is connected to server localhost:17106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:39.863314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:39.880357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:39.939517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.085898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:40.159854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:41.990797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644200331589956:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.990905Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.030338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.096251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.119592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.145891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.177315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.224719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.299375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644204626557772:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.299448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.299573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644204626557778:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.303144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:42.311982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644204626557781:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:42.388821Z node 2 :TX_PROXY ERROR: Actor# [2:7491644204626557834:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::CreateUseTable [GOOD] Test command err: Trying to start YDB, gRPC: 16017, MsgBus: 18177 2025-04-10T11:25:33.964045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644166782747160:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:33.964087Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d68/r3tmp/tmpSpQOTk/pdisk_1.dat 2025-04-10T11:25:34.413402Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:34.424746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:34.424853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:34.426716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16017, node 1 2025-04-10T11:25:34.526729Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:34.526759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:34.526768Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:34.526885Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18177 TClient is connected to server localhost:18177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:35.078015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:35.127112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:35.261801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:35.414884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:35.494483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:37.002833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644183962618109:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:37.002940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:37.266494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:37.338636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:37.374635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:37.404341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:37.431869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:37.503904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:37.554242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644183962618627:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:37.554322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:37.554555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644183962618632:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:37.558425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:37.568352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644183962618634:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:37.637489Z node 1 :TX_PROXY ERROR: Actor# [1:7491644183962618687:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Optimization, code: 1070
:4:24: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 Trying to start YDB, gRPC: 27838, MsgBus: 2218 2025-04-10T11:25:39.361632Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644192470122754:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:39.361691Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d68/r3tmp/tmp2Rswcy/pdisk_1.dat 2025-04-10T11:25:39.479086Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27838, node 2 2025-04-10T11:25:39.512826Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:39.512900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:39.519014Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:39.553836Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:39.553858Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:39.553865Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:39.553980Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2218 TClient is connected to server localhost:2218 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:40.028665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:40.046733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:40.121799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:40.266385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:40.332522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:42.254686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644205355026399:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.254768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.299866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.333048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.366778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.402511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.433046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.468052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.511406Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644205355026909:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.511485Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644205355026914:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.511527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.515103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:42.525369Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644205355026916:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:42.598770Z node 2 :TX_PROXY ERROR: Actor# [2:7491644205355026970:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:43.420827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:43.610948Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284343648, txId: 281474976715673] shutting down >> KqpYql::EvaluateExprYsonAndType [GOOD] >> TBoardSubscriberTest::DropByDisconnect >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TVectorIndexTests::CreateTableMultiColumn [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TSchemeShardViewTest::ReadOnlyMode >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test >> TSchemeShardViewTest::CreateView >> TSchemeShardViewTest::AsyncCreateSameView >> TSchemeShardViewTest::AsyncDropSameView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-10T11:25:44.624035Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:25:44.628523Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:25:44.628836Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-10T11:25:44.628897Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:25:44.628930Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-10T11:25:44.628970Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:25:44.629009Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:44.629060Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-10T11:25:44.629726Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2025-04-10T11:25:44.629793Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:25:44.646776Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:44.649494Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:44.649688Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:44.650565Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:44.650722Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:44.651030Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:44.651394Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:268:2258] 2025-04-10T11:25:44.653289Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-10T11:25:44.653350Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2258] 2025-04-10T11:25:44.653402Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:25:44.653447Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:44.654273Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:271:2260], now have 1 active actors on pipe 2025-04-10T11:25:44.697930Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:25:44.701188Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:25:44.701444Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-10T11:25:44.701495Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:25:44.701526Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-10T11:25:44.701559Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:25:44.701607Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:44.701669Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-10T11:25:44.702272Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:405:2360], now have 1 active actors on pipe 2025-04-10T11:25:44.702400Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:25:44.702580Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:25:44.704488Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:25:44.704628Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:44.705284Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:25:44.705385Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:44.705653Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:44.705826Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:413:2366] 2025-04-10T11:25:44.707304Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T11:25:44.707355Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:413:2366] 2025-04-10T11:25:44.707396Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:25:44.707433Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:44.708032Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:416:2368], now have 1 active actors on pipe 2025-04-10T11:25:44.709141Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:422:2371], now have 1 active actors on pipe 2025-04-10T11:25:44.709400Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:424:2372], now have 1 active actors on pipe 2025-04-10T11:25:44.709554Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:44.709818Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:44.710113Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:422:2371] destroyed 2025-04-10T11:25:44.710449Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:424:2372] destroyed 2025-04-10T11:25:45.263437Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:25:45.267538Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:25:45.267970Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-10T11:25:45.268034Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:25:45.268083Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-10T11:25:45.268127Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:25:45.268192Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:45.268277Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-10T11:25:45.269048Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:262:2254], now have 1 active actors on pipe 2025-04-10T11:25:45.269183Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:25:45.269436Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 3(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:45.272415Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-10T11:25:45.272602Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:45.273468Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 3 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 L ... txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-10T11:25:45.400827Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:45.401210Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:45.401518Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:536:2459] 2025-04-10T11:25:45.403790Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T11:25:45.403879Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:536:2459] 2025-04-10T11:25:45.403952Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:25:45.404009Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:45.404782Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:539:2461], now have 1 active actors on pipe 2025-04-10T11:25:45.405997Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:545:2464], now have 1 active actors on pipe 2025-04-10T11:25:45.406302Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:45.406506Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:546:2465], now have 1 active actors on pipe 2025-04-10T11:25:45.406754Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:45.406832Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:547:2465], now have 1 active actors on pipe 2025-04-10T11:25:45.407010Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-10T11:25:45.418193Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:555:2472], now have 1 active actors on pipe 2025-04-10T11:25:45.443241Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:25:45.446451Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:25:45.446831Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-10T11:25:45.446887Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:25:45.447051Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:25:45.447897Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:45.447971Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-10T11:25:45.448097Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:25:45.448444Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:25:45.448712Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:612:2517] 2025-04-10T11:25:45.450989Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-10T11:25:45.452426Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-10T11:25:45.452724Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-10T11:25:45.453093Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-10T11:25:45.453355Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-10T11:25:45.453401Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-10T11:25:45.453444Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:25:45.453489Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-10T11:25:45.453541Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:612:2517] 2025-04-10T11:25:45.453617Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:25:45.453682Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-10T11:25:45.454661Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:546:2465] destroyed 2025-04-10T11:25:45.454741Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:545:2464] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 39 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 39 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 79 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 79 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 93 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 93 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableMultiColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:25:44.768009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:25:44.768095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:44.768127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:25:44.768160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:25:44.768194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:25:44.768213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:25:44.768253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:44.768304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:25:44.768557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:44.829969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:25:44.830018Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:44.844006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:44.844355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:25:44.844536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:25:44.852188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:25:44.852424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:25:44.853063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:44.853508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:25:44.855491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:44.856762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:44.856828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:44.857010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:25:44.857053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:44.857086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:25:44.857215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:25:44.864005Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:25:44.976927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:25:44.977132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:44.977330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:25:44.977523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:25:44.977565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:44.979759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:44.979882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:25:44.980088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:44.980139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:25:44.980163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:25:44.980201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:25:44.981936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:44.981980Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:25:44.982007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:25:44.983532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:44.983571Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:44.983626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:44.983678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:25:44.986451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:25:44.988054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:25:44.988186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:25:44.989114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:44.989234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:44.989297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:44.989547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:25:44.989594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:44.989747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:44.989806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:25:44.991828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:44.991884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:44.992055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:44.992101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:25:44.992411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:44.992454Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:25:44.992563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:44.992594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:44.992626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:44.992654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:44.992686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:25:44.992728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:44.992764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:25:44.992795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:25:44.992854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:44.992890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:25:44.992922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:25:44.995223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:44.995342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:44.995382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "covered1" DataColumnNames: "covered2" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:45.372486Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:25:45.372734Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 276us result status StatusSuccess 2025-04-10T11:25:45.373168Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_centroid" Type: "String" TypeId: 4097 Id: 3 NotNull: true IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:45.374785Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:25:45.375035Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 241us result status StatusSuccess 2025-04-10T11:25:45.375436Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "id1" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "id2" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "covered1" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "covered2" Type: "String" TypeId: 4097 Id: 5 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id1" KeyColumnNames: "id2" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexReplace+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace+QueryService+UseSink >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] >> TSchemeShardViewTest::EmptyName >> KqpQueryPerf::ComputeLength+QueryService [GOOD] >> KqpQueryPerf::ComputeLength-QueryService |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] |93.1%| [TA] $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {RESULT} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Initializer::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExprYsonAndType [GOOD] Test command err: Trying to start YDB, gRPC: 21758, MsgBus: 62466 2025-04-10T11:25:35.097306Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644177493998768:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:35.097354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d5a/r3tmp/tmpA49VsY/pdisk_1.dat 2025-04-10T11:25:35.535569Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:35.543004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:35.543124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:35.545346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21758, node 1 2025-04-10T11:25:35.659104Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:35.659134Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:35.659154Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:35.659269Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62466 TClient is connected to server localhost:62466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:36.233632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:36.252195Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:36.265975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:36.395061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:36.542787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:36.621967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:38.319384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644190378902433:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.319497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.670480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.699140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.727598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.797221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.828995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.897949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.974066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644190378902955:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.974178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.974193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644190378902960:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.977413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:38.986175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644190378902962:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:39.045455Z node 1 :TX_PROXY ERROR: Actor# [1:7491644194673870311:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:40.097449Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644177493998768:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:40.097532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24567, MsgBus: 25097 2025-04-10T11:25:40.857115Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644199339059046:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:40.857175Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d5a/r3tmp/tmp5ODrzw/pdisk_1.dat 2025-04-10T11:25:40.965202Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24567, node 2 2025-04-10T11:25:40.991456Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:40.991567Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:40.993463Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:41.018091Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:41.018112Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:41.018120Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:41.018262Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25097 TClient is connected to server localhost:25097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:41.426950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:41.437694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:41.509601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:41.662036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:41.743587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:43.766608Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644212223962689:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:43.766695Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:43.805121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:43.833354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:43.858084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:43.886542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:43.914221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:43.943302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:43.982790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644212223963198:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:43.982875Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644212223963203:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:43.982903Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:43.986239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:43.995589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644212223963205:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:44.080584Z node 2 :TX_PROXY ERROR: Actor# [2:7491644216518930556:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] Test command err: RandomSeed# 870322609611582342 2025-04-10T11:25:46.026495Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:46.026680Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:46.026753Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:46.026829Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:46.026914Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:46.027009Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:46.027086Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:46.028127Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:46.028236Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:46.028288Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:46.028341Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:46.028387Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:46.028443Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:46.028488Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:46.028565Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:46.028618Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:46.028648Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:46.028716Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:46.028749Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:46.028811Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:46.028844Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-10T11:25:46.030713Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:46.030784Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:46.030828Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:46.030919Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:46.030964Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:46.031006Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-10T11:25:46.031078Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test [GOOD] >> TPartitionGraphTest::BuildGraph [GOOD] >> TSchemeShardViewTest::CreateView [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] >> TSchemeShardViewTest::ReadOnlyMode [GOOD] >> TSchemeShardViewTest::EmptyName [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25087, MsgBus: 25981 2025-04-10T11:25:35.798517Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644174630689707:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:35.798627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001627/r3tmp/tmpUsip9i/pdisk_1.dat 2025-04-10T11:25:36.165625Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:36.186018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:36.186122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:36.189917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25087, node 1 2025-04-10T11:25:36.276507Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:36.276535Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:36.276544Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:36.276682Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25981 TClient is connected to server localhost:25981 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:36.824897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:36.843694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:36.982342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:37.115802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:37.179621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.887065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644187515593373:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.887183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:39.207172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.246858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.281841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.317938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.358054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.411796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.460011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644191810561180:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:39.460121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:39.460350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644191810561185:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:39.464440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:39.474427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644191810561187:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:39.577474Z node 1 :TX_PROXY ERROR: Actor# [1:7491644191810561241:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:40.791001Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644174630689707:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:40.791160Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12928, MsgBus: 22787 2025-04-10T11:25:41.497999Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644202798252844:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:41.498107Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001627/r3tmp/tmpI2pbxA/pdisk_1.dat 2025-04-10T11:25:41.583706Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12928, node 2 2025-04-10T11:25:41.636384Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:41.636462Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:41.637613Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:41.651713Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:41.651733Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:41.651739Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:41.651830Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22787 TClient is connected to server localhost:22787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:42.043773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:42.055550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:42.129432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:42.275444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:42.354497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:44.343718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644215683156523:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:44.343787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:44.358038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:44.386374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:44.412551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:44.441582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:44.470534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:44.498357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:44.538790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644215683157031:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:44.538864Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:44.538922Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644215683157036:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:44.541742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:44.549849Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644215683157038:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:44.644429Z node 2 :TX_PROXY ERROR: Actor# [2:7491644215683157093:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> YdbMonitoring::SelfCheck [GOOD] >> TSchemeShardViewTest::EmptyQueryText ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:25:46.589018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:25:46.589187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:46.589238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:25:46.589279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:25:46.590178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:25:46.590209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:25:46.590283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:46.590335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:25:46.591371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:46.675038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:25:46.675095Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:46.688551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:46.688825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:25:46.689001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:25:46.698344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:25:46.698560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:25:46.703017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:46.704161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:25:46.708775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:46.714653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:46.714724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:46.717205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:25:46.717292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:46.717395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:25:46.717585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.724137Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:25:46.845584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:25:46.845798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.845972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:25:46.846553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:25:46.846613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.849999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:46.850141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:25:46.850368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.850436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:25:46.850475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:25:46.850508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:25:46.852670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.852733Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:25:46.852768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:25:46.854592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.854636Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.854681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:46.854739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.859292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:25:46.861101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:25:46.863863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:25:46.864928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:46.865068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:46.865134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:46.866239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:25:46.866305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:46.866463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:46.866571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:25:46.868800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:46.868850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:46.869029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:46.869073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:25:46.869342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.869390Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:25:46.869482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:46.869522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.869562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:46.869613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.869651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:25:46.869695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.869744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:25:46.869774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:25:46.869860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:46.869898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:25:46.869930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:25:46.872171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:46.872288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:46.872329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... View Propose, path: /MyRoot/MyView, opId: 100:0, viewDescription: Name: "MyView" QueryText: "Some query" 2025-04-10T11:25:46.886126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:25:46.886216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-10T11:25:46.886260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:25:46.886604Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-10T11:25:46.905647Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-10T11:25:46.906437Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:25:46.908440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusAccepted TxId: 100 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-10T11:25:46.908592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-04-10T11:25:46.908864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.908910Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 ProgressState 2025-04-10T11:25:46.908957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-10T11:25:46.909065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:25:46.911005Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:25:46.911381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-10T11:25:46.911500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-10T11:25:46.911788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:46.912006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:46.912055Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-10T11:25:46.912160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-10T11:25:46.912413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:46.912495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 100 2025-04-10T11:25:46.914543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:46.914586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:46.914730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:25:46.914819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:46.914853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-10T11:25:46.914895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-10T11:25:46.915252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.915306Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-10T11:25:46.915403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:25:46.915466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:25:46.915559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-10T11:25:46.915593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:25:46.915632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-10T11:25:46.915667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-10T11:25:46.915699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-10T11:25:46.915730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-10T11:25:46.915790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:25:46.915825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-10T11:25:46.915856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-10T11:25:46.915884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-10T11:25:46.916413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:25:46.916524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:25:46.916575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:25:46.916614Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-10T11:25:46.916648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:46.917155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:25:46.917224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-10T11:25:46.917248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-10T11:25:46.917292Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-10T11:25:46.917319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:25:46.917387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-10T11:25:46.920691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-10T11:25:46.920796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 101 2025-04-10T11:25:46.921066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:25:46.921112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:25:46.921529Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:25:46.921613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:25:46.921648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:297:2288] TestWaitNotification: OK eventTxId 101 2025-04-10T11:25:46.922084Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:25:46.922280Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 204us result status StatusSuccess 2025-04-10T11:25:46.922616Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple [GOOD] Test command err: 2025-04-10T11:24:37.059480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:24:37.059679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:24:37.059853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0018b0/r3tmp/tmpUsK3wA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27389, node 1 TClient is connected to server localhost:62302 2025-04-10T11:24:38.027570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:24:38.093550Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:38.102337Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:38.102405Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:38.102433Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:38.102767Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:24:38.139424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:38.140274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:38.152812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:48.306312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:683:2574], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:48.306999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:693:2579], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:48.307078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:48.318107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:24:48.411309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:697:2582], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:24:48.489686Z node 1 :TX_PROXY ERROR: Actor# [1:769:2623] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:48.826469Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:779:2632], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:48.834187Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjczNmMxYi0yYWM1NjQ1OC1kOWZiNjZhZC1mODM5ZmZkNw==, ActorId: [1:679:2571], ActorState: ExecuteState, TraceId: 01jrfq419c1mr99wy4z60xz47t, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=0 2025-04-10T11:24:48.925218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-10T11:24:50.351133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:50.818330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:51.661691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715670:0, at schemeshard: 72057594046644480 Initialization finished 2025-04-10T11:25:02.696604Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jrfq4f716tkbdg7krak3323w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc2YTYwMDMtMmQ3OTFjNjAtNzJkZjA4NGEtZjA5M2Q1Nzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=1 REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-04-10T11:25:13.894091Z node 1 :TX_PROXY ERROR: Actor# [1:1366:3060] txid# 281474976715678, Access denied for root@builtin on path /Root/.metadata/test, with access RemoveSchema 2025-04-10T11:25:13.894405Z node 1 :TX_PROXY ERROR: Actor# [1:1366:3060] txid# 281474976715678, issues: { message: "Access denied for root@builtin on path /Root/.metadata/test" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/test`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/test, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-04-10T11:25:24.476666Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jrfq54gac0y09nbft84p7mq3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODYzYTE1NDctYWZmOTg5ODctYThjNjYxMTYtMzkxODZjODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;RESULT=
: Fatal: ydb/core/kqp/host/kqp_host.cpp:977 ExecuteDataQuery(): requirement false failed, message: Unexpected query type for execute script action: Ddl, code: 1 ;EXPECTATION=0 FINISHED_REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 2025-04-10T11:25:45.815324Z node 1 :TX_PROXY ERROR: Actor# [1:1568:3212] txid# 281474976715686, Access denied for root@builtin on path /Root/.metadata/initialization/migrations, with access RemoveSchema 2025-04-10T11:25:45.815508Z node 1 :TX_PROXY ERROR: Actor# [1:1568:3212] txid# 281474976715686, issues: { message: "Access denied for root@builtin on path /Root/.metadata/initialization/migrations" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/initialization/migrations, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:25:46.589057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:25:46.589179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:46.589239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:25:46.589304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:25:46.590229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:25:46.590290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:25:46.590391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:46.590474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:25:46.591508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:46.683689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:25:46.683757Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:46.700306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:46.700609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:25:46.700760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:25:46.708668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:25:46.708953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:25:46.709721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:46.710122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:25:46.712291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:46.714729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:46.714796Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:46.717250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:25:46.717341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:46.717408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:25:46.717641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.724616Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:25:46.858608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:25:46.858821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.858992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:25:46.859210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:25:46.859271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.861134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:46.861244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:25:46.861417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.861471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:25:46.861500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:25:46.861532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:25:46.863149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.863199Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:25:46.863226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:25:46.864523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.864558Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.864588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:46.864634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.867496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:25:46.868983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:25:46.869150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:25:46.869979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:46.870085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:46.870126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:46.870397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:25:46.870443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:46.870568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:46.870642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:25:46.872331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:46.872380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:46.872531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:46.872570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:25:46.872796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.872832Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:25:46.872939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:46.872973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.873014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:46.873075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.873109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:25:46.873140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.873185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:25:46.873214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:25:46.873281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:46.873319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:25:46.873354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:25:46.875082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:46.875188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:46.875220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Id: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2025-04-10T11:25:46.937608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), operation: DROP VIEW, path: /MyRoot/MyView 2025-04-10T11:25:46.937755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/MyView\', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2025-04-10T11:25:46.937855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), operation: DROP VIEW, path: /MyRoot/MyView 2025-04-10T11:25:46.939402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T11:25:46.939538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-10T11:25:46.939937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:46.940059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:46.940114Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-04-10T11:25:46.940248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-10T11:25:46.940442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:46.940525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:25:46.942757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:46.942806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:46.942954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:25:46.943105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:46.943320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-10T11:25:46.943391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T11:25:46.943740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.943790Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:25:46.943908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:25:46.943946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:25:46.943990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:25:46.944030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:25:46.944070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-10T11:25:46.944116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:25:46.944162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:25:46.944218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:25:46.944291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:25:46.944332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-10T11:25:46.944389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:25:46.944425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-10T11:25:46.945113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:25:46.945231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:25:46.945290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:25:46.945349Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:25:46.945394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:46.946476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:25:46.946592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:25:46.946621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:25:46.946647Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:25:46.946687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:25:46.946763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-10T11:25:46.947563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:25:46.947616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:25:46.947696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:46.949898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:25:46.951743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:25:46.951826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-04-10T11:25:46.952059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T11:25:46.952093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2025-04-10T11:25:46.952205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-10T11:25:46.952234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-10T11:25:46.952705Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:25:46.952781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:25:46.952823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:327:2318] 2025-04-10T11:25:46.952981Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-10T11:25:46.953033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:25:46.953050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:327:2318] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2025-04-10T11:25:46.953587Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:25:46.953761Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 167us result status StatusPathDoesNotExist 2025-04-10T11:25:46.953939Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:25:46.589042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:25:46.589168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:46.589236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:25:46.589297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:25:46.590195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:25:46.590272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:25:46.590372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:46.590464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:25:46.591484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:46.683421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:25:46.683495Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:46.697845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:46.698098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:25:46.698270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:25:46.705832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:25:46.706045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:25:46.706584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:46.706879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:25:46.708733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:46.714662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:46.714724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:46.717242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:25:46.717338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:46.717401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:25:46.717651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.724638Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:25:46.842239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:25:46.843950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.845127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:25:46.846559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:25:46.846626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.849998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:46.850140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:25:46.850366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.850486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:25:46.850525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:25:46.850563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:25:46.852668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.852730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:25:46.852770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:25:46.854546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.854598Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.854640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:46.854706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.859090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:25:46.861107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:25:46.863885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:25:46.864945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:46.865078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:46.865131Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:46.866202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:25:46.866269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:46.866454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:46.866544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:25:46.868749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:46.868796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:46.868983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:46.869024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:25:46.869343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.869391Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:25:46.869494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:46.869529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.869572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:46.869627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.869685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:25:46.869730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.869786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:25:46.869818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:25:46.869881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:46.869936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:25:46.869976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:25:46.872106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:46.872243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:46.872285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athStateCreate)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-10T11:25:46.917884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path exists but creating right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateCreate), operation: CREATE VIEW, path: /MyRoot/MyView 2025-04-10T11:25:46.919952Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:25:46.920897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-10T11:25:46.921042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-10T11:25:46.921464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:46.921595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:46.921662Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-10T11:25:46.921793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-10T11:25:46.921988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:46.922066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:25:46.924190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:46.924238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:46.924391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:25:46.924719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:46.924784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:25:46.924838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:25:46.925187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.925235Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:25:46.925346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:25:46.925384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:25:46.925430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:25:46.925469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:25:46.925506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:25:46.925549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:25:46.925585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:25:46.925623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:25:46.925700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:25:46.925751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-10T11:25:46.925808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-10T11:25:46.925841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-10T11:25:46.926691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:25:46.926787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:25:46.926824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:25:46.926880Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-10T11:25:46.926923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:46.927661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:25:46.927741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:25:46.927782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:25:46.927813Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-10T11:25:46.927851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:25:46.927921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:25:46.931156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:25:46.931325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-04-10T11:25:46.931662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:25:46.931911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-10T11:25:46.932086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:25:46.932124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-04-10T11:25:46.932196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T11:25:46.932220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T11:25:46.932760Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:25:46.933081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:25:46.933137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:303:2294] 2025-04-10T11:25:46.933382Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:25:46.933447Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:25:46.933491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:25:46.933528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:303:2294] 2025-04-10T11:25:46.933640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:25:46.933662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:303:2294] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-10T11:25:46.934289Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:25:46.934508Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 247us result status StatusSuccess 2025-04-10T11:25:46.934853Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] >> DataShardReadIteratorSysTables::ShouldRead >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks >> DataShardReadIterator::ShouldReceiveErrorAfterSplit >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive >> DataShardReadIteratorBatchMode::SelectingColumns >> DataShardReadIterator::ShouldReverseReadMultipleKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:25:46.985551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:25:46.985660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:46.985722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:25:46.985777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:25:46.985838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:25:46.985877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:25:46.985961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:46.986045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:25:46.986444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:47.095115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:25:47.095162Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:47.106881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:47.107187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:25:47.107355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:25:47.113510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:25:47.113744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:25:47.114358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:47.114682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:25:47.116222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:47.117202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:47.117242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:47.117390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:25:47.117423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:47.117456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:25:47.117628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.122649Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:25:47.232756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:25:47.233025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.233227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:25:47.233508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:25:47.233586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.235764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:47.235879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:25:47.236023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.236075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:25:47.236107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:25:47.236131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:25:47.237678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.237726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:25:47.237760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:25:47.239081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.239111Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.239141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:47.239183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:25:47.241814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:25:47.243485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:25:47.243629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:25:47.244349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:47.244446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:47.244492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:47.244694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:25:47.244729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:47.244871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:47.244945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:25:47.246985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:47.247028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:47.247186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:47.247219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:25:47.247437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.247474Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:25:47.247567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:47.247596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:47.247637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:47.247693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:47.247731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:25:47.247764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:47.247796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:25:47.247825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:25:47.247896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:47.247933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:25:47.247967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:25:47.250181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:47.250293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:47.250326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:25:47.250364Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:25:47.250400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:47.250580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:25:47.253360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:25:47.253829Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:25:47.256473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:25:47.256622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2025-04-10T11:25:47.256672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2025-04-10T11:25:47.256743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-04-10T11:25:47.257092Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-10T11:25:47.274532Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-10T11:25:47.275246Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:25:47.277443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:47.277590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE VIEW, path: /MyRoot/ 2025-04-10T11:25:47.278780Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec >> TargetDiscoverer::SystemObjects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:25:46.589042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:25:46.589139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:46.589203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:25:46.589244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:25:46.590201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:25:46.590259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:25:46.590340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:46.590407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:25:46.591457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:46.675012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:25:46.675075Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:46.690029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:46.690278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:25:46.690396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:25:46.698213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:25:46.698564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:25:46.703018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:46.704149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:25:46.709112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:46.714665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:46.714733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:46.717207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:25:46.717292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:46.717355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:25:46.717600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.724354Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:25:46.876575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:25:46.876825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.877045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:25:46.877297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:25:46.877363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.879639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:46.879771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:25:46.879939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.880001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:25:46.880037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:25:46.880068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:25:46.881803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.881861Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:25:46.881897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:25:46.883582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.883629Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.883668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:46.883745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.887447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:25:46.889379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:25:46.889599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:25:46.890374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:46.890478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:46.890530Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:46.890780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:25:46.890821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:46.890955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:46.891032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:25:46.892902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:46.892938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:46.893068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:46.893095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:25:46.893258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:46.893311Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:25:46.893405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:46.893439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.893489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:46.893532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.893560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:25:46.893589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:46.893615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:25:46.893632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:25:46.893691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:46.893717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:25:46.893738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:25:46.895368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:46.895469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:46.895509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.176533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.176685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.176774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.176855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.177014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.177119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.177240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.177480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.177650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.177695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.177738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.184972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:47.185046Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:47.185558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:25:47.185615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:47.185675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:25:47.187086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:378:2347] sender: [1:435:2058] recipient: [1:15:2062] 2025-04-10T11:25:47.221794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:25:47.222064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2025-04-10T11:25:47.222175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2025-04-10T11:25:47.222391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T11:25:47.222490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-10T11:25:47.222540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:25:47.225107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-04-10T11:25:47.225244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2025-04-10T11:25:47.225454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.225501Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2025-04-10T11:25:47.225560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-10T11:25:47.225689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:25:47.227653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-10T11:25:47.227839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000003 2025-04-10T11:25:47.228558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:47.228669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:47.228720Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003 2025-04-10T11:25:47.228877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-10T11:25:47.229037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:47.229115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 FAKE_COORDINATOR: Erasing txId 103 2025-04-10T11:25:47.231330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:47.231373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:47.231541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:25:47.231625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:47.231662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:426:2384], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-10T11:25:47.231700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:426:2384], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-10T11:25:47.232084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:25:47.232148Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-10T11:25:47.232250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:25:47.232286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:25:47.232326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:25:47.232353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:25:47.232392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-10T11:25:47.232444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:25:47.232478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:25:47.232505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:25:47.232567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:25:47.232606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-04-10T11:25:47.232641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:25:47.232668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-10T11:25:47.233295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:25:47.233388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:25:47.233423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:25:47.233457Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:25:47.233508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:25:47.234233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:25:47.234324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:25:47.234350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:25:47.234374Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-10T11:25:47.234400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:25:47.234535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-10T11:25:47.237803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:25:47.238961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionGraphTest::BuildGraph [GOOD] Test command err: 2025-04-10T11:24:40.163758Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643941499888994:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:40.163821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:40.193995Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491643939953195016:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:40.201537Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:40.381706Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:24:40.402939Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ccf/r3tmp/tmpKvZXUU/pdisk_1.dat 2025-04-10T11:24:40.659436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:40.659562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:40.660032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:40.660074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:40.663153Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:24:40.664103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:40.665020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:40.676333Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7925, node 1 2025-04-10T11:24:40.779911Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000ccf/r3tmp/yandexTL6rtV.tmp 2025-04-10T11:24:40.779946Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000ccf/r3tmp/yandexTL6rtV.tmp 2025-04-10T11:24:40.780132Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000ccf/r3tmp/yandexTL6rtV.tmp 2025-04-10T11:24:40.780305Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:24:40.937516Z INFO: TTestServer started on Port 2126 GrpcPort 7925 TClient is connected to server localhost:2126 PQClient connected to localhost:7925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:41.329037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:41.369910Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:41.414270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:24:43.677792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643954384791973:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:43.678064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643954384791952:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:43.678185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:43.681729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T11:24:43.722522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643954384791989:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T11:24:43.956878Z node 1 :TX_PROXY ERROR: Actor# [1:7491643954384792082:2763] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:43.981983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:43.984792Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491643952838097317:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:43.985158Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491643954384792102:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:43.985505Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDZlM2EzZTUtZTBkNzNmYzctZTM1MDg5NzktNjFhYTQwZjI=, ActorId: [1:7491643954384791948:2336], ActorState: ExecuteState, TraceId: 01jrfq3wrg246wxz2r94czzjn7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:24:43.985955Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGQ3YWQyM2EtZGMyMjgzODgtODk1YjZmN2UtM2FjNjE3N2I=, ActorId: [2:7491643952838097278:2312], ActorState: ExecuteState, TraceId: 01jrfq3wx67nks3s0x2sm9wgqz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:24:43.987773Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:24:43.987749Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:24:44.048796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:44.183081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:24:44.438689Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfq3xb5dtw912at0n5z08s9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjA1ZjdhODYtNzhmYWMxM2YtNmQ5ZjMwYmUtZGE0NTkzMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491643958679759835:3092] 2025-04-10T11:24:45.164382Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643941499888994:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:45.164457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:45.193933Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491643939953195016:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:45.194012Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T11:24:50.498556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480 2025-04-10T11:24:51.156157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:24:51.603576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo ... se: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YmZkYWVmODUtN2NlYTY1NWMtZTZmNDQ5YjUtY2MzY2RkYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [7:7491644171287998809:3094] === CheckClustersList. Ok 2025-04-10T11:25:39.942582Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:1, at schemeshard: 72057594046644480 2025-04-10T11:25:40.753826Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.487101Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.284122Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-10T11:25:43.107359Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-04-10T11:25:43.777728Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710699:0, at schemeshard: 72057594046644480 Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (11131928866524144434, "Root", "00415F536F757263655F35", 1744284344455, 1744284344455, 0, 13); 2025-04-10T11:25:44.574677Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710705. Ctx: { TraceId: 01jrfq5r4y2f4qhx2knq8z2w96, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzY1YTc0ZjktM2U2MDczODMtMmNiN2NkYmItN2Q1NTUwNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:25:44.589873Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-10T11:25:44.589904Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-10T11:25:44.589935Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-10T11:25:44.589976Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7491644214237673324:3991] (SourceId=A_Source_5, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2025-04-10T11:25:44.590183Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [7:7491644214237673325:3991], Recipient [7:7491644192762835728:3346]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [7:7491644214237673324:3991] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-04-10T11:25:44.590331Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [7:7491644214237673324:3991], Recipient [7:7491644192762835728:3346]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_5" 2025-04-10T11:25:44.590421Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [7:7491644192762835728:3346], Recipient [7:7491644214237673324:3991]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-10T11:25:44.590473Z node 7 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [7:7491644214237673324:3991] (SourceId=A_Source_5, PreferedPartition=(NULL)) InitTable: SourceId=A_Source_5 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-04-10T11:25:44.590554Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [7:7491644214237673324:3991], Recipient [7:7491644192762835728:3346]: NActors::TEvents::TEvPoison 2025-04-10T11:25:44.590669Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [7:7491644149813160686:2070], Recipient [7:7491644214237673324:3991]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-04-10T11:25:44.590707Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7491644214237673324:3991] (SourceId=A_Source_5, PreferedPartition=(NULL)) StartKqpSession 2025-04-10T11:25:44.594223Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [7:7491644149813160911:2280], Recipient [7:7491644214237673324:3991]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=7&id=MWQ0ZGU1YjAtZDk2YmUzYjUtMTRhMDk4NGMtZDRjMzAxZDg=" NodeId: 7 } YdbStatus: SUCCESS ResourceExhausted: false 2025-04-10T11:25:44.594275Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7491644214237673324:3991] (SourceId=A_Source_5, PreferedPartition=(NULL)) Select from the table 2025-04-10T11:25:44.746365Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [7:7491644149813160911:2280], Recipient [7:7491644214237673324:3991]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=7&id=MWQ0ZGU1YjAtZDk2YmUzYjUtMTRhMDk4NGMtZDRjMzAxZDg=" PreparedQuery: "d1648c2d-c4f01d46-4ab53b7e-264a3ba0" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jrfq5rd47498bczpzjqnrx7q" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1744284344455 } items { uint64_value: 1744284344455 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 95 2025-04-10T11:25:44.746580Z node 7 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [7:7491644214237673324:3991] (SourceId=A_Source_5, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2025-04-10T11:25:44.746628Z node 7 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [7:7491644214237673324:3991] (SourceId=A_Source_5, PreferedPartition=(NULL)) OnPartitionChosen 2025-04-10T11:25:44.746777Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [7:7491644214237673362:3991], Recipient [7:7491644192762835728:3346]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [7:7491644214237673324:3991] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-04-10T11:25:44.746855Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [7:7491644214237673324:3991], Recipient [7:7491644192762835728:3346]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2025-04-10T11:25:44.746938Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateCheckPartition, received event# 271188558, Sender [7:7491644192762835728:3346], Recipient [7:7491644214237673324:3991]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-10T11:25:44.746977Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7491644214237673324:3991] (SourceId=A_Source_5, PreferedPartition=(NULL)) Update the table 2025-04-10T11:25:44.747230Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [7:7491644214237673324:3991], Recipient [7:7491644192762835728:3346]: NActors::TEvents::TEvPoison 2025-04-10T11:25:44.791172Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:25:44.791209Z node 7 :IMPORT WARN: Table profiles were not loaded Received TEvChooseResult: 1 2025-04-10T11:25:44.834720Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [7:7491644149813160911:2280], Recipient [7:7491644214237673324:3991]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=7&id=MWQ0ZGU1YjAtZDk2YmUzYjUtMTRhMDk4NGMtZDRjMzAxZDg=" PreparedQuery: "35d7cf56-29e5bf7f-7fc3d6b5-a441b54a" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 51 2025-04-10T11:25:44.834760Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7491644214237673324:3991] (SourceId=A_Source_5, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-10T11:25:44.834790Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7491644214237673324:3991] (SourceId=A_Source_5, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=13 2025-04-10T11:25:44.834815Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7491644214237673324:3991] (SourceId=A_Source_5, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 11131928866524144434 AND Topic = "Root" AND ProducerId = "00415F536F757263655F35" 2025-04-10T11:25:45.066407Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710708. Ctx: { TraceId: 01jrfq5rgrdnnxb7rmtjqvjnfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MmMwOWQxYTQtOWI5NWI0ZDUtYTNiZTQ2ZS05MWFmYzQwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:25:45.557691Z node 7 :KQP_EXECUTER ERROR: ActorId: [7:7491644218532640771:2711] TxId: 281474976710709. Ctx: { TraceId: 01jrfq5s23brez1jb8abj3ys74, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTFkNjc0MDgtOTE5ZTkyNTMtZGYzOGEzNmItMmE4M2NmNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 8 2025-04-10T11:25:45.557874Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7491644218532640778:2711], TxId: 281474976710709, task: 2. Ctx: { TraceId : 01jrfq5s23brez1jb8abj3ys74. SessionId : ydb://session/3?node_id=7&id=YTFkNjc0MDgtOTE5ZTkyNTMtZGYzOGEzNmItMmE4M2NmNjE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [7:7491644218532640771:2711], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> DataShardReadIterator::ShouldReadKeyCellVec >> TPQTest::TestPartitionWriteQuota [GOOD] >> TPQTest::TestPartitionedBlobFails ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] Test command err: 2025-04-10T11:24:40.315446Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643937905335681:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:40.315512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:40.370626Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491643939152856760:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:40.371398Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:40.507420Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:24:40.520041Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cff/r3tmp/tmpvoVjhq/pdisk_1.dat 2025-04-10T11:24:40.829410Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:40.837249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:40.837354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:40.839095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:40.839184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:40.846718Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:24:40.846881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:40.848172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1145, node 1 2025-04-10T11:24:40.962035Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000cff/r3tmp/yandexp2oNqC.tmp 2025-04-10T11:24:40.962067Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000cff/r3tmp/yandexp2oNqC.tmp 2025-04-10T11:24:40.962266Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000cff/r3tmp/yandexp2oNqC.tmp 2025-04-10T11:24:40.962416Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:24:41.029694Z INFO: TTestServer started on Port 17159 GrpcPort 1145 TClient is connected to server localhost:17159 PQClient connected to localhost:1145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:41.314484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:41.377251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:24:43.780830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643950790238669:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:43.780944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:43.782403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643950790238681:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:43.786677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T11:24:43.810361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643950790238683:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T11:24:44.054286Z node 1 :TX_PROXY ERROR: Actor# [1:7491643950790238770:2766] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:44.080422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:44.091839Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491643955085206089:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:44.093567Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGE3N2U1Ny02NjhmZTU2Zi00ZTBmOTFjLTRjY2RjMDE1, ActorId: [1:7491643950790238666:2337], ActorState: ExecuteState, TraceId: 01jrfq3wv6cpmr9k8zzgberzzz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:24:44.095668Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:24:44.097944Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491643952037759060:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:44.098229Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTM4MzBmYzgtZGQyNjA0YmMtNDE3OWUwOTctNGQ2ZmRlYWE=, ActorId: [2:7491643952037759022:2312], ActorState: ExecuteState, TraceId: 01jrfq3wxq4d7z1m2yhc66pqn6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:24:44.098671Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:24:44.136507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:24:44.283171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:24:44.544186Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfq3xf0azm1ncx4xr6ad6fr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Q4ODg0ZjUtNGFkMDA5MjEtMThiNzRiYTEtNzU3ZWVkN2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491643955085206530:3097] 2025-04-10T11:24:45.315564Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643937905335681:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:45.315648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:45.364660Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491643939152856760:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:45.364728Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T11:24:50.574803Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-10T11:24:50.574867Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTim ... 1:25:33.835494Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:25:33.923551Z INFO: TTestServer started on Port 19189 GrpcPort 7502 TClient is connected to server localhost:19189 PQClient connected to localhost:7502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:34.252208Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:34.312931Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:25:38.058169Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7491644188303620491:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.058263Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7491644188303620496:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.058351Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.063533Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T11:25:38.095107Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-04-10T11:25:38.095413Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7491644188303620505:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T11:25:38.114429Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.131012Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7491644187595883499:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:25:38.131343Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=YzlkZjllMWQtMjMxYzNhZWUtZTMxNDBlYzQtNWVkMTFjYjI=, ActorId: [10:7491644187595883461:2309], ActorState: ExecuteState, TraceId: 01jrfq5hxcbqrcgn4jcbat895a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:25:38.132084Z node 10 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:25:38.191286Z node 9 :TX_PROXY ERROR: Actor# [9:7491644188303620664:2857] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:38.212964Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.230030Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7491644188303620680:2359], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:25:38.230358Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=MTQwY2Y0YzQtZDIzZmRkOTMtOTZlNDY0ZWUtMzdmNzVmZDA=, ActorId: [9:7491644188303620488:2342], ActorState: ExecuteState, TraceId: 01jrfq5hw7cjptr0sdeng4d6wk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:25:38.231048Z node 9 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:25:38.334258Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7491644166828782832:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:38.334319Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:38.343494Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:38.344940Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491644166121046653:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:38.345077Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:25:38.619509Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfq5j93f0jntrqgxp00k7cs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NTYyMTY5ZjAtMWIyMTgyZDUtMWQwMjEyMTItY2FiN2JkYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7491644188303620984:3101] === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2025-04-10T11:25:44.089247Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7491644214073425135:3299] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2025-04-10T11:25:44.089306Z node 9 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [9:7491644214073425135:3299] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId 2025-04-10T11:25:45.048583Z node 9 :KQP_COMPUTE WARN: TxId: 281474976710678, task: 1, CA Id [9:7491644218368392474:2465]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0 2025-04-10T11:25:45.084316Z node 9 :KQP_COMPUTE WARN: TxId: 281474976710678, task: 1, CA Id [9:7491644218368392474:2465]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-10T11:25:45.131406Z node 9 :KQP_COMPUTE WARN: TxId: 281474976710678, task: 1, CA Id [9:7491644218368392474:2465]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-10T11:25:45.195362Z node 9 :KQP_COMPUTE WARN: TxId: 281474976710678, task: 1, CA Id [9:7491644218368392474:2465]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-10T11:25:45.291016Z node 9 :KQP_COMPUTE WARN: TxId: 281474976710678, task: 1, CA Id [9:7491644218368392474:2465]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-10T11:25:45.470781Z node 9 :KQP_COMPUTE WARN: TxId: 281474976710678, task: 1, CA Id [9:7491644218368392474:2465]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-10T11:25:45.663394Z node 9 :KQP_COMPUTE WARN: TxId: 281474976710678, task: 1, CA Id [9:7491644218368392474:2465]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-10T11:25:45.695180Z node 9 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710679. Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-10T11:25:45.695361Z node 9 :KQP_EXECUTER WARN: ActorId: [9:7491644218368392533:2467] TxId: 281474976710679. Ctx: { TraceId: 01jrfq5rqt6bda1axqe8nvz6xn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=MmYwZWFlMDUtZjQ3NzY3ZTMtYjE3MDg1MC1mM2UzYTYzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-10T11:25:45.695708Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=MmYwZWFlMDUtZjQ3NzY3ZTMtYjE3MDg1MC1mM2UzYTYzOA==, ActorId: [9:7491644218368392483:2467], ActorState: ExecuteState, TraceId: 01jrfq5rqt6bda1axqe8nvz6xn, Create QueryResponse for error on request, msg: 2025-04-10T11:25:45.697020Z node 9 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jrfq5s5sde20fb1pw3bn9rc2" } } YdbStatus: UNAVAILABLE ConsumedRu: 295 } >> TSchemeShardViewTest::DropView >> DataShardReadIterator::ShouldHandleReadAck |93.2%| [TA] $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardViewTest::EmptyQueryText [GOOD] >> TBoardSubscriberTest::ReconnectReplica ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbMonitoring::SelfCheck [GOOD] Test command err: 2025-04-10T11:23:35.238529Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643661732856238:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:35.238751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cec/r3tmp/tmptKyfb6/pdisk_1.dat 2025-04-10T11:23:35.552738Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:35.596550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:35.596742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 9783, node 1 2025-04-10T11:23:35.615682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:35.640199Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:35.640226Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:35.640232Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:35.640350Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:35.913651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:36.074088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Uint8" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:38606" , at schemeshard: 72057594046644480 2025-04-10T11:23:36.074490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:36.074955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: LogStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-10T11:23:36.075002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-10T11:23:36.075069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:23:36.075133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:23:36.075174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-10T11:23:36.075246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-10T11:23:36.075497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-04-10T11:23:36.077176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-10T11:23:36.077468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:23:36.077489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:36.077635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:23:36.077689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-04-10T11:23:36.080010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-10T11:23:36.080237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-04-10T11:23:36.080484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:23:36.080525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:23:36.080667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:23:36.080768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:23:36.080794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491643661732856838:2386], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-10T11:23:36.080812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491643661732856838:2386], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-10T11:23:36.080870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:36.080902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-04-10T11:23:36.081710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-10T11:23:36.082272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } Binded ... etionResult> complete, operationId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.310437Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:36.310472Z node 64 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedDeleteParts operationId# 281474976710667:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:25:36.310519Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-04-10T11:25:36.310722Z node 64 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710667:0 progress is 1/1 2025-04-10T11:25:36.310748Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710667 ready parts: 1/1 2025-04-10T11:25:36.310773Z node 64 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710667:0 progress is 1/1 2025-04-10T11:25:36.310788Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710667 ready parts: 1/1 2025-04-10T11:25:36.310808Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710667, ready parts: 1/1, is published: true 2025-04-10T11:25:36.310869Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [64:7491644181121282403:2370] message: TxId: 281474976710667 2025-04-10T11:25:36.310894Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710667 ready parts: 1/1 2025-04-10T11:25:36.310919Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710667:0 2025-04-10T11:25:36.310933Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710667:0 2025-04-10T11:25:36.311111Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-04-10T11:25:36.316382Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:25:36.316419Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:25:36.316436Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:25:36.316455Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:25:36.318182Z node 64 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[64:7491644176826314429:2325];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-10T11:25:36.318246Z node 64 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[64:7491644176826314436:2326];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-10T11:25:36.321562Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[64:7491644176826314458:2327];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T11:25:36.322486Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037888 not found 2025-04-10T11:25:36.322523Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037889 not found 2025-04-10T11:25:36.322549Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037890 not found 2025-04-10T11:25:36.330228Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-10T11:25:36.331585Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-10T11:25:36.332638Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-10T11:25:36.333698Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-10T11:25:36.333882Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-10T11:25:36.334763Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:25:36.335034Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-10T11:25:36.336021Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:25:36.336349Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:25:36.336823Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-10T11:25:36.336897Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:25:36.339074Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[64:7491644176826314429:2325];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T11:25:36.345917Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[64:7491644176826314436:2326];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T11:25:36.347006Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037891 not found 2025-04-10T11:25:36.351715Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[64:7491644176826314425:2324];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T11:25:36.362069Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-10T11:25:36.362112Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-10T11:25:36.362204Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-10T11:25:36.362216Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-10T11:25:36.362242Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-10T11:25:36.362255Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-10T11:25:36.362279Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-10T11:25:36.362298Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-10T11:25:36.362337Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:25:41.708689Z node 67 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[67:7491644202497067861:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:41.709239Z node 67 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cec/r3tmp/tmpfLhUgP/pdisk_1.dat 2025-04-10T11:25:41.860285Z node 67 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:41.910764Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:41.910851Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:41.913989Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10386, node 67 2025-04-10T11:25:42.029410Z node 67 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:42.029437Z node 67 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:42.029451Z node 67 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:42.029635Z node 67 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:42.419715Z node 67 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-67" reason: "YELLOW-e9e2-1231c6b1-68" reason: "YELLOW-e9e2-1231c6b1-69" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-67" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 67 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-68" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 68 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-69" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 69 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 67 host: "::1" port: 12001 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] Test command err: 2025-04-10T11:24:46.092404Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643967045511220:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:46.092480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:46.114829Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491643966206094058:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:46.114870Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:46.269978Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:24:46.272821Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c8c/r3tmp/tmpHXCL2u/pdisk_1.dat 2025-04-10T11:24:46.452245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:46.452337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:46.457730Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:24:46.458664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:46.484376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:46.484496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6659, node 1 2025-04-10T11:24:46.490124Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:46.492221Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:46.492380Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:46.507120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:46.521411Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000c8c/r3tmp/yandexPD7tqe.tmp 2025-04-10T11:24:46.521438Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000c8c/r3tmp/yandexPD7tqe.tmp 2025-04-10T11:24:46.521585Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000c8c/r3tmp/yandexPD7tqe.tmp 2025-04-10T11:24:46.521684Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:24:46.568613Z INFO: TTestServer started on Port 26800 GrpcPort 6659 TClient is connected to server localhost:26800 PQClient connected to localhost:6659 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:46.810428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:46.850946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:24:49.034389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643979930414248:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:49.034539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643979930414271:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:49.034611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:49.038615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T11:24:49.044314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643979930414311:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:49.044424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:49.044754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491643979090996236:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:49.044800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491643979090996265:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:49.044862Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:49.054069Z node 2 :TX_PROXY ERROR: Actor# [2:7491643979090996274:2123] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:24:49.055949Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-04-10T11:24:49.057030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491643979930414277:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T11:24:49.057114Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491643979090996273:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T11:24:49.124637Z node 1 :TX_PROXY ERROR: Actor# [1:7491643979930414365:2809] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:49.128196Z node 2 :TX_PROXY ERROR: Actor# [2:7491643979090996303:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:49.285910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:24:49.289097Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491643979930414384:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:49.290700Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491643979090996318:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:49.290941Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWMwOGFmOWQtMWRlNzNmNWUtMmY0NjEyMmEtNzkzYzBjODc=, ActorId: [2:7491643979090996234:2307], ActorState: ExecuteState, TraceId: 01jrfq420g01sy3vp9ac8e38g8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:24:49.291113Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDhhZjY0ZTAtMjgzNWVjNGYtZGQ5ZmUzODctZjIzMTA4ZA==, ActorId: [1:7491643979930414245:2340], ActorState: ExecuteState, TraceId: 01jrfq41zp50arrsv9ca8m2se3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:24:49.292864Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:24:49.292894Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness ... d cache: : Created 2025-04-10T11:25:35.047020Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:35.086512Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:35.086636Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:35.090850Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-04-10T11:25:35.091390Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16119, node 7 2025-04-10T11:25:35.154825Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:35.154933Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:35.157075Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:35.164658Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000c8c/r3tmp/yandexyfSnDh.tmp 2025-04-10T11:25:35.164678Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000c8c/r3tmp/yandexyfSnDh.tmp 2025-04-10T11:25:35.164798Z node 7 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000c8c/r3tmp/yandexyfSnDh.tmp 2025-04-10T11:25:35.164935Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:25:35.231155Z INFO: TTestServer started on Port 17100 GrpcPort 16119 TClient is connected to server localhost:17100 PQClient connected to localhost:16119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:35.519760Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:35.571384Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:25:39.324862Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7491644194943251897:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:39.324949Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7491644194943251884:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:39.325020Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:39.332951Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-04-10T11:25:39.368548Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7491644194943251913:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-04-10T11:25:39.431795Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7491644191145822253:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:25:39.434584Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NzZmYzkzOTctNDhiNjFmZTEtNGFmOTBiY2ItOTBiMWU2OTA=, ActorId: [7:7491644191145822220:2342], ActorState: ExecuteState, TraceId: 01jrfq5k5xf33jb6kfc6yy9sn4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:25:39.435148Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:25:39.447572Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.469389Z node 8 :TX_PROXY ERROR: Actor# [8:7491644194943251941:2131] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:39.526632Z node 8 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [8:7491644194943251972:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:25:39.526938Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=N2E0YzAzYjctN2Y1YzE0ZDYtYTEwN2E4YWYtMzMzNTRhNWY=, ActorId: [8:7491644194943251881:2309], ActorState: ExecuteState, TraceId: 01jrfq5k3r434fvh4k4tvhbb2w, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:25:39.527532Z node 8 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:25:39.584494Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.732102Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.826837Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7491644169670984528:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:39.826961Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:25:39.854249Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7491644173468415092:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:39.854345Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:40.028734Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfq5kn4ce0ak1fqkkte7njv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NWVlNzcwMGMtZjk3Njg4LTkwNGJjOGRkLThmZDQ2NjE1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [7:7491644195440789937:3075] === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2025-04-10T11:25:45.199208Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7491644216915626758:3257] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2025-04-10T11:25:45.199251Z node 7 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [7:7491644216915626758:3257] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId 2025-04-10T11:25:46.580685Z node 7 :KQP_EXECUTER ERROR: ActorId: [7:7491644221210594144:2466] TxId: 281474976710676. Ctx: { TraceId: 01jrfq5sts594kt3hcynvtmga1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTQ5YjgzMzMtYzllNDgxMzYtM2U1YjE5ZDgtYTlmYWFhYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 8 2025-04-10T11:25:46.580864Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7491644221210594155:2474], TxId: 281474976710676, task: 2. Ctx: { SessionId : ydb://session/3?node_id=7&id=OTQ5YjgzMzMtYzllNDgxMzYtM2U1YjE5ZDgtYTlmYWFhYzg=. TraceId : 01jrfq5sts594kt3hcynvtmga1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [7:7491644221210594144:2466], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-10T11:25:46.580910Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7491644221210594157:2475], TxId: 281474976710676, task: 4. Ctx: { TraceId : 01jrfq5sts594kt3hcynvtmga1. SessionId : ydb://session/3?node_id=7&id=OTQ5YjgzMzMtYzllNDgxMzYtM2U1YjE5ZDgtYTlmYWFhYzg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [7:7491644221210594144:2466], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects [GOOD] Test command err: 2025-04-10T11:25:45.021986Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644220035285133:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:45.022110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e86/r3tmp/tmpwXhhK6/pdisk_1.dat 2025-04-10T11:25:45.324461Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:45.404435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:45.404549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:45.406249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17438 TServer::EnableGrpc on GrpcPort 6167, node 1 2025-04-10T11:25:45.544988Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:45.545008Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:45.545032Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:45.545125Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:45.838067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:45.854991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:45.955720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:46.001550Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744284345895, tx_id: 1 } } } 2025-04-10T11:25:46.001586Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-10T11:25:46.008649Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284345958, tx_id: 281474976710658 } }, { name: export-100500, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1744284345993, tx_id: 281474976710659 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-10T11:25:46.008695Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-10T11:25:47.453477Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284345958, tx_id: 281474976710658 } } } 2025-04-10T11:25:47.453517Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-04-10T11:25:47.453571Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table >> TSchemeShardViewTest::DropView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:25:48.099457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:25:48.099564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:48.099620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:25:48.099664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:25:48.099708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:25:48.099733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:25:48.099795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:48.099855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:25:48.100162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:48.182589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:25:48.182643Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:48.196793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:48.197068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:25:48.197221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:25:48.204423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:25:48.204692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:25:48.205339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:48.205717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:25:48.207639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:48.208889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:48.208946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:48.209157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:25:48.209199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:48.209249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:25:48.209427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.215856Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:25:48.310671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:25:48.310859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.311037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:25:48.311223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:25:48.311279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.313140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:48.313255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:25:48.313412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.313459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:25:48.313485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:25:48.313509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:25:48.314947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.314990Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:25:48.315012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:25:48.316155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.316189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.316230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:48.316266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:25:48.319048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:25:48.320751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:25:48.320942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:25:48.321878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:48.322003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:48.322075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:48.322364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:25:48.322419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:48.322599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:48.322700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:25:48.324957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:48.324998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:48.325157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:48.325195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:25:48.325422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.325476Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:25:48.325562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:48.325623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:48.325667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:48.325727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:48.325779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:25:48.325828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:48.325871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:25:48.325901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:25:48.325990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:48.326025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:25:48.326054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:25:48.328273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:48.328383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:48.328418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:25:48.328452Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:25:48.328490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:48.328644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:25:48.331439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:25:48.331963Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:25:48.334914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:25:48.335136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2025-04-10T11:25:48.335205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2025-04-10T11:25:48.335318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:25:48.335395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-10T11:25:48.335440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:25:48.335824Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-10T11:25:48.352427Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-10T11:25:48.353301Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:25:48.355509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-10T11:25:48.355666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-04-10T11:25:48.355941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.355994Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2025-04-10T11:25:48.356042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-10T11:25:48.356181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:25:48.358213Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:25:48.358601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-10T11:25:48.358704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-10T11:25:48.358968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:48.359085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:48.359131Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-10T11:25:48.359254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-10T11:25:48.359388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:48.359441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:25:48.360897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:48.360944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:48.361106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:25:48.361197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:48.361233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:25:48.361281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:25:48.361747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.361819Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:25:48.361918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:25:48.361954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:25:48.361995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:25:48.362023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:25:48.362058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:25:48.362093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:25:48.362126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:25:48.362189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:25:48.362253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:25:48.362289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-10T11:25:48.362318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-10T11:25:48.362345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-10T11:25:48.362859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:25:48.362951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:25:48.362982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:25:48.363007Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-10T11:25:48.363038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:48.363357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:25:48.363400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:25:48.363415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:25:48.363431Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-10T11:25:48.363449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:25:48.363486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:25:48.365650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:25:48.365709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 >> TBoardSubscriberTest::ReconnectReplica [GOOD] |93.2%| [TA] $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:25:48.622619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:25:48.622821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:48.622878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:25:48.622927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:25:48.622999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:25:48.623026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:25:48.623102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:48.623163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:25:48.623446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:48.704855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:25:48.704915Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:48.720256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:48.720572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:25:48.720751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:25:48.728016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:25:48.728236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:25:48.728865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:48.729226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:25:48.731377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:48.732718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:48.732777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:48.732971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:25:48.733018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:48.733057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:25:48.733207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.740038Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:25:48.861556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:25:48.861804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.862043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:25:48.862287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:25:48.862338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.864744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:48.864882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:25:48.865060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.865122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:25:48.865160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:25:48.865192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:25:48.867134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.867186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:25:48.867218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:25:48.868723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.868768Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.868805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:48.868843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:25:48.872212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:25:48.873930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:25:48.874083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:25:48.875002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:48.875119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:48.875183Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:48.875451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:25:48.875502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:48.875659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:48.875753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:25:48.877708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:48.877748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:48.877907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:48.877945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:25:48.878142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.878206Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:25:48.878296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:48.878325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:48.878358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:48.878409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:48.878449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:25:48.878487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:48.878518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:25:48.878545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:25:48.878615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:48.878651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:25:48.878681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:25:48.880739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:48.880835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:48.880867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... dReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-10T11:25:48.923546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropView Drop { Name: "MyView" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:25:48.923659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropView Propose, opId: 102:0, path: /MyRoot/MyView 2025-04-10T11:25:48.923737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-10T11:25:48.923774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:25:48.925383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAccepted TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-10T11:25:48.925506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP VIEW, path: /MyRoot/MyView 2025-04-10T11:25:48.925606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.925635Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 ProgressState 2025-04-10T11:25:48.925672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-10T11:25:48.925750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:25:48.926939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T11:25:48.927024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-10T11:25:48.927241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:48.927324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:48.927361Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-04-10T11:25:48.927448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-10T11:25:48.927547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:48.927598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:25:48.928816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:48.928850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:48.928952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:25:48.929050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:48.929080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-10T11:25:48.929116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:25:48.929390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:25:48.929431Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:25:48.929506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:25:48.929532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:25:48.929569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:25:48.929588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:25:48.929611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-10T11:25:48.929636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:25:48.929657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:25:48.929689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:25:48.929736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:25:48.929762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-10T11:25:48.929783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:25:48.929814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-10T11:25:48.930332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:25:48.930416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:25:48.930443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:25:48.930485Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:25:48.930514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:48.931233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:25:48.931283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:25:48.931298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:25:48.931337Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:25:48.931355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:25:48.931407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-10T11:25:48.931571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:25:48.931600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:25:48.931640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:48.933661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:25:48.934053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:25:48.934101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:25:48.934283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:25:48.934310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:25:48.934586Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:25:48.934643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:25:48.934669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:321:2312] TestWaitNotification: OK eventTxId 102 2025-04-10T11:25:48.934995Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:25:48.935143Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 143us result status StatusPathDoesNotExist 2025-04-10T11:25:48.935345Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> DataShardReadIterator::ShouldReadRangeCellVec |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> BSCRestartPDisk::RestartNotAllowed [GOOD] >> TAsyncIndexTests::Decimal |93.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartNotAllowed [GOOD] Test command err: RandomSeed# 15847574157319979475 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 26492, MsgBus: 16632 2025-04-10T11:25:39.224044Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644193672046133:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:39.224129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001605/r3tmp/tmpDPtror/pdisk_1.dat 2025-04-10T11:25:39.649893Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26492, node 1 2025-04-10T11:25:39.715777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:39.715968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:39.718573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:39.756816Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:39.756839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:39.756847Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:39.756957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16632 TClient is connected to server localhost:16632 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:40.322133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:40.347353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:40.471870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:40.626955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.693744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:42.407392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644206556949794:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.407511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.659062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.688895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.755037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.783749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.812905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.847020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.888115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644206556950305:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.888221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.888287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644206556950310:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.892073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:42.902863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644206556950312:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:42.973333Z node 1 :TX_PROXY ERROR: Actor# [1:7491644206556950365:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:44.225342Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644193672046133:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:44.225434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9751, MsgBus: 14500 2025-04-10T11:25:44.912894Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644212622192418:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:44.912955Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001605/r3tmp/tmpoZKmEI/pdisk_1.dat 2025-04-10T11:25:45.005485Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9751, node 2 2025-04-10T11:25:45.048545Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:45.048622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:45.049897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:45.064110Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:45.064136Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:45.064145Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:45.064263Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14500 TClient is connected to server localhost:14500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:45.429403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:45.446286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:45.494653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:45.621061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:45.696292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:47.581621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644225507096096:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:47.581727Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:47.624973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:47.656096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:47.683483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:47.708949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:47.734846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:47.772151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:47.804035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644225507096603:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:47.804119Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644225507096608:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:47.804128Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:47.807040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:47.816296Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644225507096610:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:47.916463Z node 2 :TX_PROXY ERROR: Actor# [2:7491644225507096665:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::ComputeLength-QueryService [GOOD] >> TAsyncIndexTests::Decimal [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14716, MsgBus: 7507 2025-04-10T11:25:36.053309Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644180871874838:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:36.053422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00161b/r3tmp/tmpS4rKwr/pdisk_1.dat 2025-04-10T11:25:36.395267Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14716, node 1 2025-04-10T11:25:36.431686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:36.432141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:36.435121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:36.466561Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:36.466590Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:36.466601Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:36.466723Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7507 TClient is connected to server localhost:7507 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:36.954546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:36.977762Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:36.982889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:37.101350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:37.244979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:37.316871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:38.928298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644189461811195:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:38.928388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:39.256033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.284981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.326357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.398067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.474347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.508918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:39.582527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644193756779013:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:39.582616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:39.582992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644193756779018:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:39.586841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:39.597704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644193756779020:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:39.656519Z node 1 :TX_PROXY ERROR: Actor# [1:7491644193756779073:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:40.628946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.669115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:25:40.716290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.053012Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644180871874838:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:41.053132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23669, MsgBus: 29174 2025-04-10T11:25:43.434352Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644209555465556:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:43.434505Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00161b/r3tmp/tmpfImzwz/pdisk_1.dat 2025-04-10T11:25:43.569905Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:43.576827Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:43.576909Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:43.578699Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23669, node 2 2025-04-10T11:25:43.609528Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:43.609558Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:43.609566Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:43.609711Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29174 TClient is connected to server localhost:29174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:43.982537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:43.998912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:44.099719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:44.235335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:44.289063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:46.300135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644222440369235:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:46.300253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:46.319621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:46.346584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:46.375611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:46.402715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:46.431829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:46.502122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:46.541259Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644222440369752:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:46.541356Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:46.541360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644222440369757:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:46.544844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:46.553088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644222440369759:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:46.613312Z node 2 :TX_PROXY ERROR: Actor# [2:7491644222440369812:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:47.368359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:47.405025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:25:47.446421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:25:48.434490Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644209555465556:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:48.434548Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:25:50.683674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:25:50.683743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:50.683775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:25:50.683797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:25:50.683830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:25:50.683850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:25:50.683908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:50.683956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:25:50.684197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:50.736279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:25:50.736327Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:50.746024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:50.746214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:25:50.746357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:25:50.751187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:25:50.751345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:25:50.751800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:50.752057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:25:50.753372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:50.754454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:50.754505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:50.754631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:25:50.754674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:50.754704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:25:50.754805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:25:50.759258Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:25:50.837646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:25:50.837858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:50.838039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:25:50.838233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:25:50.838269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:50.839997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:50.840130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:25:50.840299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:50.840356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:25:50.840382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:25:50.840429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:25:50.841777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:50.841812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:25:50.841834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:25:50.842987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:50.843022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:50.843061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:50.843100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:25:50.845614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:25:50.846867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:25:50.846993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:25:50.847698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:50.847791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:50.847825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:50.848066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:25:50.848111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:50.848260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:50.848317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:25:50.849692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:50.849723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:50.849891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:50.849923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:25:50.850117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:50.850175Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:25:50.850255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:50.850288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:50.850318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:50.850339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:50.850360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:25:50.850386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:50.850410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:25:50.850429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:25:50.850465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:50.850489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:25:50.850528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:25:50.856964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:50.857074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:50.857105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:51.080890Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T11:25:51.080914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:25:51.080941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-10T11:25:51.081362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:25:51.081385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-10T11:25:51.081438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:25:51.081456Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:25:51.081486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:25:51.081514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:51.081528Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:25:51.081553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:25:51.081571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-10T11:25:51.086955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:25:51.087277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:25:51.088278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:25:51.090007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:25:51.090081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T11:25:51.090388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:25:51.090585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T11:25:51.091014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T11:25:51.091061Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-10T11:25:51.091138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-10T11:25:51.091165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-10T11:25:51.091202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-10T11:25:51.091227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-10T11:25:51.091254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-10T11:25:51.091551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:25:51.091897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:25:51.091933Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:25:51.091989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-10T11:25:51.092011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-10T11:25:51.092038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-10T11:25:51.092078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-10T11:25:51.092099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-10T11:25:51.092161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 101 2025-04-10T11:25:51.092205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-10T11:25:51.092243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:25:51.092273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:25:51.092367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:25:51.092394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-10T11:25:51.092407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-10T11:25:51.092425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:25:51.092437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-10T11:25:51.092457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-10T11:25:51.092486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:25:51.094791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:25:51.094826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:378:2346] TestWaitNotification: OK eventTxId 101 2025-04-10T11:25:51.095252Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:25:51.095435Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 214us result status StatusSuccess 2025-04-10T11:25:51.096041Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpJoinOrder::ShuffleEliminationTpcdsMapJoinBug >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin >> KqpJoinOrder::CanonizedJoinOrderTPCH5 >> KqpJoin::JoinLeftPureCross >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin-NotNull >> KqpJoinOrder::DatetimeConstantFold+ColumnStore >> KqpJoin::RightTableIndexPredicate >> KqpJoinOrder::TPCDS88+ColumnStore >> KqpJoinOrder::Chain65Nodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 24231, MsgBus: 62327 2025-04-10T11:25:41.263031Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644201486372486:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:41.263220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015f8/r3tmp/tmpMuSCPD/pdisk_1.dat 2025-04-10T11:25:41.562008Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24231, node 1 2025-04-10T11:25:41.627373Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:41.627405Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:41.627426Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:41.627639Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:25:41.633531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:41.633718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:41.635132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62327 TClient is connected to server localhost:62327 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:42.134272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:42.169765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:42.313689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:42.469620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:42.538968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:44.072262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644214371276152:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:44.072387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:44.412657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:44.440473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:44.467116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:44.494119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:44.521466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:44.555792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:44.594802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644214371276661:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:44.594893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:44.594924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644214371276666:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:44.598686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:44.608944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644214371276668:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:44.698266Z node 1 :TX_PROXY ERROR: Actor# [1:7491644214371276723:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21672, MsgBus: 12241 2025-04-10T11:25:46.642220Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644221588531447:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:46.642293Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015f8/r3tmp/tmpD4cddU/pdisk_1.dat 2025-04-10T11:25:46.725868Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21672, node 2 2025-04-10T11:25:46.771521Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:46.771601Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:46.773343Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:46.780962Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:46.780991Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:46.780998Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:46.781122Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12241 TClient is connected to server localhost:12241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:47.203721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:47.220959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:47.268748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:47.427823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:47.493519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:49.026109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644234473435128:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:49.026204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:49.058417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:49.085708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:49.115178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:49.141720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:49.168609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:49.238969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:49.281573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644234473435641:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:49.281659Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:49.281669Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644234473435646:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:49.285572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:49.295795Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644234473435648:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:25:49.376638Z node 2 :TX_PROXY ERROR: Actor# [2:7491644234473435701:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] >> KqpJoinOrder::TPCDS94+ColumnStore >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull >> KqpIndexLookupJoin::LeftSemi >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive [GOOD] >> DataShardReadIterator::ShouldNotReadAfterCancel >> DataShardReadIteratorBatchMode::SelectingColumns [GOOD] >> DataShardReadIteratorBatchMode::ShouldHandleReadAck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16407, MsgBus: 1533 2025-04-10T11:25:39.204171Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644192429461843:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:39.204255Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001609/r3tmp/tmp4Cf8sV/pdisk_1.dat 2025-04-10T11:25:39.589335Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:39.595686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:39.595800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:39.597666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16407, node 1 2025-04-10T11:25:39.681465Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:39.681496Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:39.681503Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:39.681620Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1533 TClient is connected to server localhost:1533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:40.216906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:40.238376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:40.373796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:40.527972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:40.604241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:42.037312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644205314365522:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.037428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.282609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.307650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.338068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.362716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.391015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.425297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:42.469150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644205314366035:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.469279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.469351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644205314366040:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:42.473257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:42.483152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644205314366042:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:42.581381Z node 1 :TX_PROXY ERROR: Actor# [1:7491644205314366097:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:43.707272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:43.780425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:25:43.814660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:25:44.204457Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644192429461843:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:44.204515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23079, MsgBus: 1915 2025-04-10T11:25:46.329800Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644222015975101:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:46.329882Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001609/r3tmp/tmp3yijXJ/pdisk_1.dat 2025-04-10T11:25:46.428218Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23079, node 2 2025-04-10T11:25:46.464935Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:46.465037Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:46.466572Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:46.486465Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:46.486490Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:46.486498Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:46.486620Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1915 TClient is connected to server localhost:1915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:46.908501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:46.925663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:47.010667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:47.151527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:47.214477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:48.846170Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644230605911461:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:48.846256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:48.866704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:48.894621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:48.921660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:48.948977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:48.977000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:49.009600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:49.085120Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644234900879275:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:49.085187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:49.085424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644234900879280:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:49.088935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:49.098496Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644234900879282:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:49.175007Z node 2 :TX_PROXY ERROR: Actor# [2:7491644234900879337:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:50.003687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:50.073597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:25:50.103963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:25:51.330195Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644222015975101:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:51.330645Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardReadIteratorSysTables::ShouldRead [GOOD] >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid >> DataShardReadIterator::ShouldReverseReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow >> DataShardReadIterator::ShouldHandleReadAck [GOOD] >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks [GOOD] >> DataShardReadIterator::ShouldStopWhenNodeDisconnected >> DataShardReadIterator::ShouldReceiveErrorAfterSplit [GOOD] >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted >> DataShardReadIterator::ShouldReadKeyCellVec [GOOD] >> DataShardReadIterator::ShouldReadKeyArrow >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] |93.3%| [TA] $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds >> DataShardReadIterator::ShouldReadRangeCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeArrow >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false >> TStorageTenantTest::CreateSolomonInsideSubDomain >> TStorageTenantTest::CreateTableInsideSubDomain >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn [GOOD] Test command err: 2025-04-10T11:20:48.597212Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:20:48.679505Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:20:48.683642Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:20:48.683990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:20:48.702705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:20:48.702954Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:20:48.710772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:48.710934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:48.711095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:48.711178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:48.711258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:48.711380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:48.711452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:48.711541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:48.711635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:20:48.711714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:20:48.711785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:20:48.711848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:20:48.728532Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:20:48.732261Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:20:48.732535Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:20:48.732576Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:20:48.732714Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:48.732847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:20:48.732906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:20:48.732934Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:20:48.733015Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:20:48.733071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:20:48.733123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:20:48.733153Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:20:48.733281Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:48.733341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:20:48.733367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:20:48.733384Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:20:48.733464Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:20:48.733501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:20:48.733526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:20:48.733544Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:20:48.733588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:20:48.733625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:20:48.733644Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:20:48.733690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:20:48.733715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:20:48.733734Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:20:48.734024Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=32; 2025-04-10T11:20:48.734098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-04-10T11:20:48.734194Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=58; 2025-04-10T11:20:48.734269Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=27; 2025-04-10T11:20:48.734381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:20:48.734417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:20:48.734438Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:20:48.734627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:20:48.734658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:20:48.734677Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:20:48.734787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:20:48.734815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:20:48.734836Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:20:48.734947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:20:48.734984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:20:48.735003Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T1 ... ram_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:52.223069Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:52.223105Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:25:52.223146Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:25:52.223265Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:25:52.223375Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:52.223426Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:25:52.223526Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=73; 2025-04-10T11:25:52.223569Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=584;num_rows=73;batch_columns=timestamp; 2025-04-10T11:25:52.223718Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:574:2590];bytes=584;rows=73;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-04-10T11:25:52.223850Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:52.223972Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:52.224165Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:52.224299Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:25:52.224403Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:52.224472Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:52.224513Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: Scan [5:581:2597] finished for tablet 9437184 2025-04-10T11:25:52.225086Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[5:574:2590];stats={"p":[{"events":["f_bootstrap"],"t":0.221},{"events":["f_ProduceResults"],"t":1.577},{"events":["l_bootstrap"],"t":3.048},{"events":["f_processing","f_task_result"],"t":3.156},{"events":["l_task_result"],"t":20.66},{"events":["f_ack"],"t":20.757},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":25.528}],"full":{"a":1744284326695717,"name":"_full_task","f":1744284326695717,"d_finished":0,"c":0,"l":1744284352224581,"d":25528864},"events":[{"name":"bootstrap","f":1744284326916991,"d_finished":2827144,"c":1,"l":1744284329744135,"d":2827144},{"a":1744284352224281,"name":"ack","f":1744284347453226,"d_finished":4647134,"c":904,"l":1744284352224204,"d":4647434},{"a":1744284352224267,"name":"processing","f":1744284329851971,"d_finished":16602228,"c":4520,"l":1744284352224206,"d":16602542},{"name":"ProduceResults","f":1744284328273660,"d_finished":8637260,"c":5426,"l":1744284352224496,"d":8637260},{"a":1744284352224500,"name":"Finish","f":1744284352224500,"d_finished":0,"c":0,"l":1744284352224581,"d":81},{"name":"task_result","f":1744284329852366,"d_finished":11858962,"c":3616,"l":1744284347356505,"d":11858962}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:52.225165Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[5:574:2590];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:25:52.225614Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[5:574:2590];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.221},{"events":["f_ProduceResults"],"t":1.577},{"events":["l_bootstrap"],"t":3.048},{"events":["f_processing","f_task_result"],"t":3.156},{"events":["l_task_result"],"t":20.66},{"events":["f_ack"],"t":20.757},{"events":["l_ProduceResults","f_Finish"],"t":25.528},{"events":["l_ack","l_processing","l_Finish"],"t":25.529}],"full":{"a":1744284326695717,"name":"_full_task","f":1744284326695717,"d_finished":0,"c":0,"l":1744284352225204,"d":25529487},"events":[{"name":"bootstrap","f":1744284326916991,"d_finished":2827144,"c":1,"l":1744284329744135,"d":2827144},{"a":1744284352224281,"name":"ack","f":1744284347453226,"d_finished":4647134,"c":904,"l":1744284352224204,"d":4648057},{"a":1744284352224267,"name":"processing","f":1744284329851971,"d_finished":16602228,"c":4520,"l":1744284352224206,"d":16603165},{"name":"ProduceResults","f":1744284328273660,"d_finished":8637260,"c":5426,"l":1744284352224496,"d":8637260},{"a":1744284352224500,"name":"Finish","f":1744284352224500,"d_finished":0,"c":0,"l":1744284352225204,"d":704},{"name":"task_result","f":1744284329852366,"d_finished":11858962,"c":3616,"l":1744284347356505,"d":11858962}],"id":"9437184::15"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:25:52.225684Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:25:26.606613Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=904;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7049848;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7049848;selected_rows=0; 2025-04-10T11:25:52.225728Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:25:52.225950Z node 5 :TX_COLUMNSHARD_SCAN INFO: SelfId=[5:581:2597];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [FAIL] Test command err: 2025-04-10T11:25:38.244459Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644190861298301:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:38.244975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f4d/r3tmp/tmpxchkiK/pdisk_1.dat 2025-04-10T11:25:38.655370Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:38.667827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:38.668474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:38.683280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8901, node 1 2025-04-10T11:25:38.880679Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:38.880705Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:38.880715Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:38.880850Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:39.380754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:39.414406Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:41.183791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644203746201051:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.184397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.717759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:41.914270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644203746201245:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.914356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.914530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644203746201250:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:41.920972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:25:41.949179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644203746201252:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:25:42.048251Z node 1 :TX_PROXY ERROR: Actor# [1:7491644208041168632:2821] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:42.331376Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfq5nmr6k5babq8ak7ww4hd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmU0ODZmZTYtODZhYzQzZTAtYTVkNmVhNGYtZGVmYTI1NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root assertion failed at ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:253, virtual void NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext &): (session.GetId() == sessionId) failed: ("ydb://session/3?node_id=1&id=Y2QxODhmNWItYTVkZDkwZTMtYzAwN2I3NGEtZmU5ODU4ZjE=" != "ydb://session/3?node_id=1&id=MmU0ODZmZTYtODZhYzQzZTAtYTVkNmVhNGYtZGVmYTI1NQ==") , with diff: "ydb://session/3?node_id=1&id=(|MmU0ODZmZT)Y(2Qx|t)OD(|Z)h(mNWI|YzQzZTA)tYTVk(ZDkwZTMtYzAw|)N(2I3|mVh)NG(E|Y)tZ(|GV)m(U5ODU4ZjE|YTI1NQ=)=" TBackTrace::Capture()+28 (0x18DC746C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1928F5D0) NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext&)+7545 (0x18987419) std::__y1::__function::__func, void ()>::operator()()+280 (0x189FCB78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x192C6616) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19296149) NTestSuiteYdbSdkSessions::TCurrentTest::Execute()+1204 (0x189FBA24) NUnitTest::TTestFactory::Execute()+2438 (0x19297A16) NUnitTest::RunMain(int, char**)+5213 (0x192C0B8D) ??+0 (0x7F60B085BD90) __libc_start_main+128 (0x7F60B085BE40) _start+41 (0x162E7029) >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock >> TPQTest::TestPQRead [GOOD] >> TPQTest::TestPQSmallRead |93.3%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk >> KqpJoin::JoinWithDuplicates >> KqpJoinOrder::ShuffleEliminationReuseShuffleTwoJoins >> TStorageTenantTest::DeclareAndDefine >> KqpJoin::JoinLeftPureCross [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup >> DataShardReadIterator::ShouldNotReadAfterCancel [GOOD] >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid [GOOD] >> DataShardReadIteratorSysTables::ShouldNotAllowArrow >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow [GOOD] >> DataShardReadIterator::ShouldReadRangeReverse >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRanges >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead >> DataShardReadIteratorBatchMode::ShouldHandleReadAck [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace >> KqpJoin::RightTableIndexPredicate [GOOD] >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite >> DataShardReadIterator::ShouldReadKeyArrow [GOOD] >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureCross [GOOD] Test command err: Trying to start YDB, gRPC: 4435, MsgBus: 6175 2025-04-10T11:25:52.427723Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644250119559573:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:52.428231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001725/r3tmp/tmpRIIZx4/pdisk_1.dat 2025-04-10T11:25:52.791067Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4435, node 1 2025-04-10T11:25:52.853732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:52.854021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:52.857819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:52.932427Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:52.932461Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:52.932490Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:52.932630Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6175 TClient is connected to server localhost:6175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:53.594983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:53.660235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:53.800019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:53.952563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:54.041101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:55.328022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644263004463246:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.328098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.585522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.616077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.645640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.712018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.740472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.807868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.854696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644263004463765:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.854780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.855067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644263004463770:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.858496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:55.869320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644263004463772:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:55.972625Z node 1 :TX_PROXY ERROR: Actor# [1:7491644263004463826:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:57.432089Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644250119559573:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:57.432167Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin [GOOD] >> DataShardReadIterator::ShouldReadRangeArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec >> KqpIndexLookupJoin::LeftSemi [GOOD] >> TPQTest::TestSourceIdDropByUserWrites [GOOD] >> TPQTest::TestSourceIdDropBySourceIdCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableIndexPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 28859, MsgBus: 16909 2025-04-10T11:25:52.427714Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644249099733807:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:52.428291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00172b/r3tmp/tmpEe8wH9/pdisk_1.dat 2025-04-10T11:25:52.778373Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28859, node 1 2025-04-10T11:25:52.812941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:52.813032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:52.814848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:52.932286Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:52.932315Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:52.932352Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:52.932447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16909 TClient is connected to server localhost:16909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:53.629619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:53.649181Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:53.666878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:53.797486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:25:53.952475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:54.031222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:55.362165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644261984637469:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.362300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.688642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.720960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.753552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.785871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.830092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.878827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.925315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644261984637982:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.925389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.925702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644261984637987:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.929965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:55.938289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644261984637989:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:56.039568Z node 1 :TX_PROXY ERROR: Actor# [1:7491644266279605338:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:56.962684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:57.451953Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644249099733807:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:57.452016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin [GOOD] Test command err: Trying to start YDB, gRPC: 24823, MsgBus: 29093 2025-04-10T11:25:52.427691Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644248520065059:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:52.427776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001723/r3tmp/tmpj5GWPJ/pdisk_1.dat 2025-04-10T11:25:52.761748Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24823, node 1 2025-04-10T11:25:52.827739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:52.827886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:52.829660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:52.932465Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:52.932503Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:52.932512Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:52.932650Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29093 TClient is connected to server localhost:29093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:53.597085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:53.660118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:53.816877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:53.966410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:54.037644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:55.193200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644261404968717:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.193351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.585520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.614101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.644140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.675630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.704058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.773918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.823695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644261404969232:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.823778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.823978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644261404969237:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.827663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:55.837656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644261404969239:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:55.935507Z node 1 :TX_PROXY ERROR: Actor# [1:7491644261404969292:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:56.932067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.960362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.986805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:25:57.428116Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644248520065059:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:57.428176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemi [GOOD] Test command err: Trying to start YDB, gRPC: 11964, MsgBus: 27521 2025-04-10T11:25:53.527553Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644251131135439:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:53.527608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00171d/r3tmp/tmpoBcEg8/pdisk_1.dat 2025-04-10T11:25:53.870255Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11964, node 1 2025-04-10T11:25:53.893218Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:25:53.912445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:53.912544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:53.914479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:53.922976Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:53.922999Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:53.923009Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:53.923165Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27521 TClient is connected to server localhost:27521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:54.424642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:54.449117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:54.585576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:54.725297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:54.789840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:56.167734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644264016039103:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.167875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.411539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.445625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.474735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.501781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.565686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.640611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.728731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644264016039626:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.728814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.729222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644264016039631:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.734745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:56.754872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644264016039633:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:56.843022Z node 1 :TX_PROXY ERROR: Actor# [1:7491644264016039689:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:57.725582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:57.752168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:25:57.778787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:25:57.810886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:25:57.838646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:25:57.868329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:25:58.528162Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644251131135439:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:58.528230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:25:53.685428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:25:53.685596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:53.685664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:25:53.685704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:25:53.687515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:25:53.687566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:25:53.687657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:53.687754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:25:53.689495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:53.777939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:25:53.778013Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:53.794759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:53.795031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:25:53.795179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:25:53.804913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:25:53.805242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:25:53.809761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:53.810353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:25:53.816276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:53.823938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:53.824010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:53.825152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:25:53.825210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:53.825310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:25:53.825475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:25:53.831338Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:25:53.943858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:25:53.944934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:53.946744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:25:53.947906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:25:53.947973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:53.950944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:53.951070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:25:53.951226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:53.951341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:25:53.951366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:25:53.951399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:25:53.953890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:53.953989Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:25:53.954040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:25:53.959056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:53.959122Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:53.959199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:53.959291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:25:53.963289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:25:53.965502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:25:53.966733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:25:53.967764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:53.967910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:53.967952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:53.969204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:25:53.969295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:53.969576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:53.969693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:25:53.972483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:53.972538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:53.972778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:53.972829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:25:53.973134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:53.973211Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:25:53.973366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:53.973420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:53.973468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:53.973502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:53.973544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:25:53.973589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:53.973629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:25:53.973662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:25:53.973738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:25:53.973779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:25:53.973816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:25:53.984790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:53.984996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:25:53.985047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:25:56.927431Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:25:56.927492Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:25:56.927548Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:25:56.927586Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:25:56.927622Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:25:56.927655Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-04-10T11:25:56.927685Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-10T11:25:56.930078Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:56.930201Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-04-10T11:25:56.930386Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:56.930426Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:56.930584Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:56.930624Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:25:56.931034Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:25:56.931118Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:25:56.931154Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:25:56.931185Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-10T11:25:56.931228Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:56.931312Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:25:56.933164Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-04-10T11:25:56.934230Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-10T11:25:56.934279Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-10T11:25:56.970448Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-10T11:25:56.970594Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:56.970642Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:56.970832Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:56.970883Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:204:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-10T11:25:56.971397Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2025-04-10T11:25:56.973121Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-10T11:25:56.978513Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-10T11:25:56.978862Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-10T11:25:56.984833Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-10T11:25:56.985184Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-10T11:25:56.994617Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-10T11:25:56.995036Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-10T11:25:56.995158Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "User user1 login denied: too many failed password attempts", at schemeshard: 72057594046678944 2025-04-10T11:25:56.995635Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-10T11:25:56.995732Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "User user1 login denied: too many failed password attempts", at schemeshard: 72057594046678944 2025-04-10T11:25:56.996136Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:25:56.996324Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 210us result status StatusSuccess 2025-04-10T11:25:56.996737Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxpjBJGbSZf86grKsmvPJ\nqoBUVQoqeFDA2pNy6cq0+dlh81PWhT2orM0ED8MhC18XFGOqvnk/HM2Xd+GszCl0\nD840RupYptQvHHn42ooE5u6qXO72L8Dw10z6vKfSD9KZxGLulseeoWnLHffAFTAK\np8suFTonlzer3eOC/GSexJ0nNeJHFsKE4FT/lIElzdv9xDr0bvDWccmdg9cD5a2Z\nn3qV1XUOwqCwAq6IVB6IQD5+YYC/fCtpKumU3enJtF4Lfrmb92C3NHHOYlZZMl2h\nREaSKSPN7YELnyBmDqS/oDoX/dGqbWLP4/SuJa/HM0116d/BXhI5npc0E7P1fS4N\ndQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744370756962 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:26:00.997699Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-10T11:26:01.005071Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-10T11:26:01.005651Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-10T11:26:01.016618Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MzI3NTYxLCJpYXQiOjE3NDQyODQzNjEsInN1YiI6InVzZXIxIn0.qmRb13SJvzyNcfRd3iDhvU1zTG-GRf0WKkT7OEw-X8dYxM5jGfW7DiD6ZjeaWq_J40eyyNf_FdPD2Zn56oRd15nyXVYpWgdnNisHcz-LkaKA17Jun5Xtz4c8hqklpNwsJdaq1Ym9v1FqdC7ZMzV-i4AiKc6_gfggvBrX6c54N9SVcgODr0hzCuE1iUhDm_bcFnkCQS-rS9t6WYTwRPoAboXJCmQ-KXgp6sif-PkO72aXXqTvtHyllQ7ZXB0a-vrveEIwJC3KujybcP5hOVbYIXuLM66R-ZfptbBjNoRien0ISWjqUni3bxHJd9setwkSCqu_pTTjVUoSAe_7tRY_7Q" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MzI3NTYxLCJpYXQiOjE3NDQyODQzNjEsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-04-10T11:26:01.017397Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:26:01.017608Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 230us result status StatusSuccess 2025-04-10T11:26:01.018058Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxpjBJGbSZf86grKsmvPJ\nqoBUVQoqeFDA2pNy6cq0+dlh81PWhT2orM0ED8MhC18XFGOqvnk/HM2Xd+GszCl0\nD840RupYptQvHHn42ooE5u6qXO72L8Dw10z6vKfSD9KZxGLulseeoWnLHffAFTAK\np8suFTonlzer3eOC/GSexJ0nNeJHFsKE4FT/lIElzdv9xDr0bvDWccmdg9cD5a2Z\nn3qV1XUOwqCwAq6IVB6IQD5+YYC/fCtpKumU3enJtF4Lfrmb92C3NHHOYlZZMl2h\nREaSKSPN7YELnyBmDqS/oDoX/dGqbWLP4/SuJa/HM0116d/BXhI5npc0E7P1fS4N\ndQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744370756962 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] Test command err: 2025-04-10T11:25:55.900005Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644261661585889:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:55.900376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cc3/r3tmp/tmpIC2MbJ/pdisk_1.dat 2025-04-10T11:25:56.331161Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:56.346975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:56.347094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:56.364601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5087 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:25:56.566529Z node 1 :TX_PROXY DEBUG: actor# [1:7491644261661586002:2135] Handle TEvNavigate describe path dc-1 2025-04-10T11:25:56.566600Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644265956553753:2451] HANDLE EvNavigateScheme dc-1 2025-04-10T11:25:56.566747Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491644265956553322:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:56.566787Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491644265956553322:2149], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:25:56.566999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644265956553754:2452][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:25:56.569097Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644261661585650:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644265956553758:2452] 2025-04-10T11:25:56.569169Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644261661585650:2051] Subscribe: subscriber# [1:7491644265956553758:2452], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:25:56.569246Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644261661585653:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644265956553759:2452] 2025-04-10T11:25:56.569282Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644261661585653:2054] Subscribe: subscriber# [1:7491644265956553759:2452], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:25:56.569305Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644261661585656:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644265956553760:2452] 2025-04-10T11:25:56.569321Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644261661585656:2057] Subscribe: subscriber# [1:7491644265956553760:2452], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:25:56.569395Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644265956553758:2452][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644261661585650:2051] 2025-04-10T11:25:56.569419Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644265956553759:2452][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644261661585653:2054] 2025-04-10T11:25:56.569437Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644265956553760:2452][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644261661585656:2057] 2025-04-10T11:25:56.569484Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644265956553754:2452][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644265956553755:2452] 2025-04-10T11:25:56.569514Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644265956553754:2452][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644265956553756:2452] 2025-04-10T11:25:56.569590Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491644265956553754:2452][/dc-1] Set up state: owner# [1:7491644265956553322:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:25:56.569712Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644265956553754:2452][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644265956553757:2452] 2025-04-10T11:25:56.569771Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491644265956553754:2452][/dc-1] Path was already updated: owner# [1:7491644265956553322:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:25:56.569815Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644265956553758:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644265956553755:2452], cookie# 1 2025-04-10T11:25:56.569833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644265956553759:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644265956553756:2452], cookie# 1 2025-04-10T11:25:56.569847Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644265956553760:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644265956553757:2452], cookie# 1 2025-04-10T11:25:56.569876Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644261661585650:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644265956553758:2452] 2025-04-10T11:25:56.569902Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644261661585650:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644265956553758:2452], cookie# 1 2025-04-10T11:25:56.569935Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644261661585653:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644265956553759:2452] 2025-04-10T11:25:56.569955Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644261661585653:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644265956553759:2452], cookie# 1 2025-04-10T11:25:56.569973Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644261661585656:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644265956553760:2452] 2025-04-10T11:25:56.569986Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644261661585656:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644265956553760:2452], cookie# 1 2025-04-10T11:25:56.570248Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644265956553758:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644261661585650:2051], cookie# 1 2025-04-10T11:25:56.570276Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644265956553759:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644261661585653:2054], cookie# 1 2025-04-10T11:25:56.570317Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644265956553760:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644261661585656:2057], cookie# 1 2025-04-10T11:25:56.570368Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644265956553754:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644265956553755:2452], cookie# 1 2025-04-10T11:25:56.570398Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644265956553754:2452][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:25:56.570414Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644265956553754:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644265956553756:2452], cookie# 1 2025-04-10T11:25:56.570433Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644265956553754:2452][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:25:56.570452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644265956553754:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644265956553757:2452], cookie# 1 2025-04-10T11:25:56.570494Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644265956553754:2452][/dc-1] Unexpected sync response: sender# [1:7491644265956553757:2452], cookie# 1 2025-04-10T11:25:56.629666Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491644265956553322:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:25:56.630294Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491644265956553322:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... _TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2025-04-10T11:25:58.026868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-10T11:25:58.026996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-10T11:25:58.027122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-10T11:25:58.027274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:25:58.027296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-10T11:25:58.027349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:25:58.027486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:25:58.027504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-10T11:25:58.027702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:25:58.036344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-10T11:25:58.036378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-10T11:25:58.036422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-04-10T11:25:58.036426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-04-10T11:25:58.036522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-10T11:25:58.036527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-10T11:25:58.036572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-10T11:25:58.036585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-10T11:25:58.036599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-04-10T11:25:58.036602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-04-10T11:25:58.036617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-10T11:25:58.036624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-10T11:25:58.036645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:8 2025-04-10T11:25:58.036671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2025-04-10T11:25:58.036694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-10T11:25:58.036709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-10T11:25:58.036747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046644480 2025-04-10T11:25:58.036789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:25:58.036818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:25:58.036837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-10T11:25:58.036892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:25:58.043199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:25:58.402630Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491644268676904770:2116], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:58.402794Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491644268676904770:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:58.402819Z node 3 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [3:7491644268676904770:2116], path# /dc-1/USER_0, domainOwnerId# 72057594046644480 2025-04-10T11:25:58.402997Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491644272971872353:2302][/dc-1/USER_0] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:25:58.403526Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491644272971872353:2302][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7491644272971872354:2302] 2025-04-10T11:25:58.403583Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491644272971872353:2302][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7491644272971872355:2302] 2025-04-10T11:25:58.403615Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7491644272971872353:2302][/dc-1/USER_0] Set up state: owner# [3:7491644268676904770:2116], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:25:58.403636Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491644272971872353:2302][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7491644272971872356:2302] 2025-04-10T11:25:58.403685Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7491644272971872353:2302][/dc-1/USER_0] Ignore empty state: owner# [3:7491644268676904770:2116], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:25:58.403785Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7491644268676904770:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 } 2025-04-10T11:25:58.403852Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7491644268676904770:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [3:7491644272971872353:2302] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:25:58.403925Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491644268676904770:2116], cacheItem# { Subscriber: { Subscriber: [3:7491644272971872353:2302] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:25:58.403985Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491644272971872360:2303], recipient# [3:7491644272971872352:2301], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:58.404049Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491644268676904770:2116], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:58.404128Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491644272971872361:2304], recipient# [3:7491644272971872351:2318], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:58.404794Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:25:59.406688Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491644268676904770:2116], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:59.406893Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491644277266839659:2305], recipient# [3:7491644277266839658:2319], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:59.408732Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] Test command err: 2025-04-10T11:25:55.898429Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644260024950017:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:55.900051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cb4/r3tmp/tmpksDueT/pdisk_1.dat 2025-04-10T11:25:56.328644Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:56.348919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:56.349073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:56.363230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6024 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:25:56.563172Z node 1 :TX_PROXY DEBUG: actor# [1:7491644260024950268:2135] Handle TEvNavigate describe path dc-1 2025-04-10T11:25:56.563228Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644264319918007:2439] HANDLE EvNavigateScheme dc-1 2025-04-10T11:25:56.564383Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491644264319917589:2148], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:56.564428Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491644264319917589:2148], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:25:56.564702Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644264319918008:2440][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:25:56.566760Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644260024949916:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644264319918012:2440] 2025-04-10T11:25:56.566813Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644260024949916:2051] Subscribe: subscriber# [1:7491644264319918012:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:25:56.566892Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644260024949922:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644264319918014:2440] 2025-04-10T11:25:56.566929Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644260024949922:2057] Subscribe: subscriber# [1:7491644264319918014:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:25:56.566989Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644264319918012:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644260024949916:2051] 2025-04-10T11:25:56.567033Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644260024949919:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644264319918013:2440] 2025-04-10T11:25:56.567044Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644264319918014:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644260024949922:2057] 2025-04-10T11:25:56.567079Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644260024949919:2054] Subscribe: subscriber# [1:7491644264319918013:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:25:56.567088Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644264319918008:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644264319918009:2440] 2025-04-10T11:25:56.567113Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644264319918008:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644264319918011:2440] 2025-04-10T11:25:56.567155Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644260024949916:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644264319918012:2440] 2025-04-10T11:25:56.567160Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491644264319918008:2440][/dc-1] Set up state: owner# [1:7491644264319917589:2148], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:25:56.567180Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644260024949922:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644264319918014:2440] 2025-04-10T11:25:56.568210Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644264319918013:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644260024949919:2054] 2025-04-10T11:25:56.568262Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644264319918012:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644264319918009:2440], cookie# 1 2025-04-10T11:25:56.568282Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644264319918013:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644264319918010:2440], cookie# 1 2025-04-10T11:25:56.568309Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644264319918014:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644264319918011:2440], cookie# 1 2025-04-10T11:25:56.568357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644264319918008:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644264319918010:2440] 2025-04-10T11:25:56.568414Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491644264319918008:2440][/dc-1] Path was already updated: owner# [1:7491644264319917589:2148], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:25:56.571478Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644260024949919:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644264319918013:2440] 2025-04-10T11:25:56.571521Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644260024949919:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644264319918013:2440], cookie# 1 2025-04-10T11:25:56.571551Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644260024949916:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644264319918012:2440], cookie# 1 2025-04-10T11:25:56.571571Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644260024949922:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644264319918014:2440], cookie# 1 2025-04-10T11:25:56.571671Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644264319918013:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644260024949919:2054], cookie# 1 2025-04-10T11:25:56.571695Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644264319918012:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644260024949916:2051], cookie# 1 2025-04-10T11:25:56.571707Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644264319918014:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644260024949922:2057], cookie# 1 2025-04-10T11:25:56.571741Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644264319918008:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644264319918010:2440], cookie# 1 2025-04-10T11:25:56.571767Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644264319918008:2440][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:25:56.571781Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644264319918008:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644264319918009:2440], cookie# 1 2025-04-10T11:25:56.571796Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644264319918008:2440][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:25:56.571852Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644264319918008:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644264319918011:2440], cookie# 1 2025-04-10T11:25:56.571864Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644264319918008:2440][/dc-1] Unexpected sync response: sender# [1:7491644264319918011:2440], cookie# 1 2025-04-10T11:25:56.618805Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491644264319917589:2148], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:25:56.619233Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491644264319917589:2148], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... ction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744284358100 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "k... (TRUNCATED) 2025-04-10T11:25:58.170588Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-10T11:25:58.172132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:25:58.172458Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644260024949916:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7491644269986004940:2103] 2025-04-10T11:25:58.172507Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644260024949916:2051] Unsubscribe: subscriber# [3:7491644269986004940:2103], path# /dc-1/USER_0 2025-04-10T11:25:58.172555Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644260024949919:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7491644269986004941:2103] 2025-04-10T11:25:58.172574Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644260024949919:2054] Unsubscribe: subscriber# [3:7491644269986004941:2103], path# /dc-1/USER_0 2025-04-10T11:25:58.172608Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644260024949922:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7491644269986004942:2103] 2025-04-10T11:25:58.172635Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644260024949922:2057] Unsubscribe: subscriber# [3:7491644269986004942:2103], path# /dc-1/USER_0 2025-04-10T11:25:58.427729Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491644269986004946:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:58.427925Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491644269986004946:2107], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:58.427956Z node 3 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [3:7491644269986004946:2107], path# /dc-1/USER_0, domainOwnerId# 72057594046644480 2025-04-10T11:25:58.428174Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491644274280972658:2354][/dc-1/USER_0] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:25:58.428802Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491644274280972658:2354][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7491644274280972659:2354] 2025-04-10T11:25:58.428847Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491644274280972658:2354][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7491644274280972660:2354] 2025-04-10T11:25:58.428907Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7491644274280972658:2354][/dc-1/USER_0] Set up state: owner# [3:7491644269986004946:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:25:58.428939Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7491644274280972658:2354][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7491644274280972661:2354] 2025-04-10T11:25:58.428964Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7491644274280972658:2354][/dc-1/USER_0] Ignore empty state: owner# [3:7491644269986004946:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:25:58.428989Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7491644269986004946:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 } 2025-04-10T11:25:58.429062Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7491644269986004946:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [3:7491644274280972658:2354] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:25:58.429149Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491644269986004946:2107], cacheItem# { Subscriber: { Subscriber: [3:7491644274280972658:2354] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:25:58.429250Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491644274280972665:2355], recipient# [3:7491644274280972657:2353], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:58.429341Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491644269986004946:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:58.429427Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491644274280972666:2356], recipient# [3:7491644274280972656:2324], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:58.429793Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:25:59.430309Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491644269986004946:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:59.430477Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491644278575939964:2357], recipient# [3:7491644278575939963:2325], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:59.430587Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; >> KqpJoinOrder::CanonizedJoinOrderTPCH22 >> TStorageTenantTest::CreateTableInsideSubDomain2 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] >> KqpJoinOrder::FiveWayJoinWithPreds-ColumnStore >> TStorageTenantTest::LsLs >> KqpJoin::JoinMismatchDictKeyTypes >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse >> DataShardReadIteratorSysTables::ShouldNotAllowArrow [GOOD] >> ReadIteratorExternalBlobs::ExtBlobs >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 >> DataShardReadIterator::ShouldReverseReadMultipleRanges [GOOD] >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture >> TStorageTenantTest::DeclareAndDefine [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] Test command err: 2025-04-10T11:24:40.247602Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643938379663765:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:40.247685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:24:40.289342Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491643939704640903:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:40.289411Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000cdb/r3tmp/tmpbB7nfw/pdisk_1.dat 2025-04-10T11:24:40.500022Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:24:40.512190Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:24:40.713056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:40.713175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:40.719438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:40.719510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:40.723675Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:24:40.723887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:24:40.779832Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:40.781162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5361, node 1 2025-04-10T11:24:40.818179Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:40.818877Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:24:40.907021Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/000cdb/r3tmp/yandexVUlwJB.tmp 2025-04-10T11:24:40.907082Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/000cdb/r3tmp/yandexVUlwJB.tmp 2025-04-10T11:24:40.907308Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/000cdb/r3tmp/yandexVUlwJB.tmp 2025-04-10T11:24:40.907475Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:24:40.967034Z INFO: TTestServer started on Port 2687 GrpcPort 5361 TClient is connected to server localhost:2687 PQClient connected to localhost:5361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:41.249788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:24:41.342220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:24:43.536498Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491643952589543084:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:43.536565Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491643952589543076:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:43.536765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:24:43.543070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:24:43.561659Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491643952589543090:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:24:43.648587Z node 2 :TX_PROXY ERROR: Actor# [2:7491643952589543118:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:24:43.946953Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491643951264566877:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:43.947265Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWE2ODA1YWQtNjY2ZDkzZDMtZjhkY2RhMjgtMTkxYmUxOGM=, ActorId: [1:7491643951264566828:2340], ActorState: ExecuteState, TraceId: 01jrfq3wpq2zhhwhwtqfpfxmvc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:24:43.948994Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491643952589543133:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:24:43.949318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:24:43.950697Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGNmZGZkNDItM2MzMWM2MmMtNDI3Y2Q5YWUtNjExNzE2YmQ=, ActorId: [2:7491643952589543074:2308], ActorState: ExecuteState, TraceId: 01jrfq3wme5zp8ddd17ef2cdmq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:24:43.967139Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:24:43.967378Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:24:44.047532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:24:44.149948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:24:44.390992Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfq3xag6yask2e07x2149dj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3Y2JiZjEtN2YyZmYyODktNzdjYjc1NzQtMjEzNmExMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491643955559534555:3075] 2025-04-10T11:24:45.250760Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643938379663765:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:45.250820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:24:45.287018Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491643939704640903:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:45.287096Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T11:24:50.595105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:1, at schemeshard: 72057594046644480 2025-04-10T11:24:51.155582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:24:51.605552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose i ... load_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:49.836860Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7491644234455832126:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:25:49.837126Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=MTRkZTNkOGItNzNjNTYxNWUtZjkyMjk1ZDktZjRmNWU1Nzc=, ActorId: [10:7491644234455832054:2312], ActorState: ExecuteState, TraceId: 01jrfq5x7r74adn9xw6323yg20, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:25:49.837629Z node 10 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:25:49.859000Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:49.992201Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:25:50.231520Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfq5xks8f6bk2ceg9h2m729, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NTQyNTgyMzAtM2IxZjNiNWUtNDFlZTMwNjAtMTQ5NWNlM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7491644239905792424:3074] 2025-04-10T11:25:51.254125Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7491644222725921631:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:51.254234Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:51.263328Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491644221570929761:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:51.263395Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T11:25:55.283395Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:1, at schemeshard: 72057594046644480 2025-04-10T11:25:55.998438Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.807208Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:25:57.502066Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:25:58.250727Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-10T11:25:58.965603Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710699:0, at schemeshard: 72057594046644480 Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (16261273835729377752, "Root", "00415F536F757263655F3130", 1744284359685, 1744284359685, 0, 13); 2025-04-10T11:25:59.832005Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710703. Ctx: { TraceId: 01jrfq671gccmyhpv67j219hpw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YTZkZGY3MS1hZGIyNzBiMC03MzZhOGQxOS1kZGJkYzk2NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:25:59.851091Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-10T11:25:59.851119Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-10T11:25:59.851132Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-10T11:25:59.851155Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7491644278560499623:3969] (SourceId=A_Source_10, PreferedPartition=1) GetOwnershipFast Partition=1 TabletId=1001 2025-04-10T11:25:59.851326Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7491644278560499624:3969], Recipient [9:7491644261380629352:3332]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [9:7491644278560499623:3969] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-04-10T11:25:59.851432Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7491644278560499623:3969], Recipient [9:7491644261380629352:3332]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_10" 2025-04-10T11:25:59.851510Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [9:7491644261380629352:3332], Recipient [9:7491644278560499623:3969]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-10T11:25:59.851550Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7491644278560499623:3969] (SourceId=A_Source_10, PreferedPartition=1) InitTable: SourceId=A_Source_10 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-04-10T11:25:59.851639Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7491644278560499623:3969], Recipient [9:7491644261380629352:3332]: NActors::TEvents::TEvPoison 2025-04-10T11:25:59.851729Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7491644222725921627:2070], Recipient [9:7491644278560499623:3969]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-04-10T11:25:59.851755Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7491644278560499623:3969] (SourceId=A_Source_10, PreferedPartition=1) StartKqpSession 2025-04-10T11:25:59.855438Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7491644222725921852:2280], Recipient [9:7491644278560499623:3969]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=NThiZDgzMjktYWE2NDhlNDgtMjgyOWZlYmEtOTY2NDFhYTQ=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2025-04-10T11:25:59.855480Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7491644278560499623:3969] (SourceId=A_Source_10, PreferedPartition=1) Select from the table 2025-04-10T11:26:00.065379Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7491644222725921852:2280], Recipient [9:7491644278560499623:3969]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=NThiZDgzMjktYWE2NDhlNDgtMjgyOWZlYmEtOTY2NDFhYTQ=" PreparedQuery: "e4e38bee-35d77ca6-1d915654-9212911a" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jrfq67bqd2echw9zhjcnaf88" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1744284359685 } items { uint64_value: 1744284359685 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 127 2025-04-10T11:26:00.065613Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7491644278560499623:3969] (SourceId=A_Source_10, PreferedPartition=1) Selected from table PartitionId=0 SeqNo=13 2025-04-10T11:26:00.065642Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7491644278560499623:3969] (SourceId=A_Source_10, PreferedPartition=1) OnPartitionChosen Received TEvChooseError: MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all.2025-04-10T11:26:00.065706Z node 9 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [9:7491644278560499623:3969] (SourceId=A_Source_10, PreferedPartition=1) ReplyError: MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all. Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 16261273835729377752 AND Topic = "Root" AND ProducerId = "00415F536F757263655F3130" 2025-04-10T11:26:00.291418Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710708. Ctx: { TraceId: 01jrfq67d6a05b5rsft75hhx24, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=OWI2MTI3MS1mZDNlOTQ5MS0yMWY0ZjAyNi0xNzJlN2Vj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpJoin::JoinWithDuplicates [GOOD] >> DataShardReadIterator::ShouldReadRangeReverse [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite >> DataShardReadIterator::ShouldStopWhenNodeDisconnected [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn [GOOD] >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 16525, MsgBus: 3201 2025-04-10T11:25:52.430983Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644248444558949:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:52.431322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001731/r3tmp/tmph8HD2j/pdisk_1.dat 2025-04-10T11:25:52.802512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:52.802602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 16525, node 1 2025-04-10T11:25:52.805010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:52.806366Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:25:52.808777Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:25:52.813370Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:52.932347Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:52.932374Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:52.932384Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:52.932530Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3201 TClient is connected to server localhost:3201 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:53.599160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:53.660272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:53.816871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:53.977074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:54.044138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:55.449706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644261329462478:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.449846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.705649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.778820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.806045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.831187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.902227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.972744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.019333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644265624430294:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.019409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.019476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644265624430299:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.023779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:56.039599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644265624430301:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:56.125580Z node 1 :TX_PROXY ERROR: Actor# [1:7491644265624430354:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:57.031907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:57.078899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:25:57.430535Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644248444558949:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:57.449289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23694, MsgBus: 23121 2025-04-10T11:25:58.419383Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644275610077364:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:58.435944Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001731/r3tmp/tmpsVeryp/pdisk_1.dat 2025-04-10T11:25:58.544395Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23694, node 2 2025-04-10T11:25:58.556583Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:58.556670Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:58.559709Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:58.614697Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:58.614726Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:58.614736Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:58.614853Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23121 TClient is connected to server localhost:23121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:59.024818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:59.030616Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:59.041972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:59.118121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:59.264613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:59.369153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:01.457761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644288494981018:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:01.457890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:01.506404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.540100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.620918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.654047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.686344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.716618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.760488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644288494981530:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:01.760594Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:01.760930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644288494981535:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:01.764489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:01.773158Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644288494981537:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:26:01.873785Z node 2 :TX_PROXY ERROR: Actor# [2:7491644288494981592:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:02.755004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:02.814685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:03.415800Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644275610077364:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:03.415894Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::DeclareAndDefine [GOOD] Test command err: 2025-04-10T11:25:58.401726Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644275679975446:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:58.401799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ca8/r3tmp/tmp0Pt4wn/pdisk_1.dat 2025-04-10T11:25:58.859753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:58.859850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:58.861001Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:58.875510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5148 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:25:59.071542Z node 1 :TX_PROXY DEBUG: actor# [1:7491644275679975708:2139] Handle TEvNavigate describe path dc-1 2025-04-10T11:25:59.071680Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644279974943448:2445] HANDLE EvNavigateScheme dc-1 2025-04-10T11:25:59.071839Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491644275679975732:2152], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:59.071873Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491644275679975732:2152], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:25:59.072147Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644279974943449:2446][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:25:59.074277Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644275679975352:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644279974943453:2446] 2025-04-10T11:25:59.074366Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644275679975352:2050] Subscribe: subscriber# [1:7491644279974943453:2446], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:25:59.074423Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644275679975358:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644279974943455:2446] 2025-04-10T11:25:59.074439Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644275679975358:2056] Subscribe: subscriber# [1:7491644279974943455:2446], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:25:59.074507Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644279974943453:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644275679975352:2050] 2025-04-10T11:25:59.074537Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644279974943455:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644275679975358:2056] 2025-04-10T11:25:59.074578Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644279974943449:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644279974943450:2446] 2025-04-10T11:25:59.074606Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644279974943449:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644279974943452:2446] 2025-04-10T11:25:59.074673Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491644279974943449:2446][/dc-1] Set up state: owner# [1:7491644275679975732:2152], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:25:59.074798Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644279974943453:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644279974943450:2446], cookie# 1 2025-04-10T11:25:59.074836Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644279974943454:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644279974943451:2446], cookie# 1 2025-04-10T11:25:59.074853Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644279974943455:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644279974943452:2446], cookie# 1 2025-04-10T11:25:59.074893Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644275679975352:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644279974943453:2446] 2025-04-10T11:25:59.074921Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644275679975352:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644279974943453:2446], cookie# 1 2025-04-10T11:25:59.074942Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644275679975358:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644279974943455:2446] 2025-04-10T11:25:59.074954Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644275679975358:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644279974943455:2446], cookie# 1 2025-04-10T11:25:59.078205Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644275679975355:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644279974943454:2446] 2025-04-10T11:25:59.078274Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644275679975355:2053] Subscribe: subscriber# [1:7491644279974943454:2446], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:25:59.078339Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644275679975355:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644279974943454:2446], cookie# 1 2025-04-10T11:25:59.078413Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644279974943453:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644275679975352:2050], cookie# 1 2025-04-10T11:25:59.078439Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644279974943455:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644275679975358:2056], cookie# 1 2025-04-10T11:25:59.078489Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644279974943454:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644275679975355:2053] 2025-04-10T11:25:59.078514Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644279974943454:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644275679975355:2053], cookie# 1 2025-04-10T11:25:59.078560Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644279974943449:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644279974943450:2446], cookie# 1 2025-04-10T11:25:59.078593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644279974943449:2446][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:25:59.078610Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644279974943449:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644279974943452:2446], cookie# 1 2025-04-10T11:25:59.078645Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644279974943449:2446][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:25:59.078677Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644279974943449:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644279974943451:2446] 2025-04-10T11:25:59.078735Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491644279974943449:2446][/dc-1] Path was already updated: owner# [1:7491644275679975732:2152], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:25:59.078758Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644279974943449:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644279974943451:2446], cookie# 1 2025-04-10T11:25:59.078773Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644279974943449:2446][/dc-1] Unexpected sync response: sender# [1:7491644279974943451:2446], cookie# 1 2025-04-10T11:25:59.078799Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644275679975355:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644279974943454:2446] 2025-04-10T11:25:59.142422Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491644275679975732:2152], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:25:59.142908Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491644275679975732:2152], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... LocalPathId: 4] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Point: (Uint64 : 42) }] } 2025-04-10T11:26:01.569111Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644288564878839:3035] txid# 281474976710665 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-04-10T11:26:01.569945Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644288564878839:3035] txid# 281474976710665 SEND TEvProposeTransaction to datashard 72075186224037892 with 327 bytes program affected shards 2 followers disallowed marker# P4 2025-04-10T11:26:01.570134Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644288564878839:3035] txid# 281474976710665 SEND TEvProposeTransaction to datashard 72075186224037894 with 327 bytes program affected shards 2 followers disallowed marker# P4 2025-04-10T11:26:01.573885Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644288564878839:3035] txid# 281474976710665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037892 read size 0 out readset size 0 marker# P6 2025-04-10T11:26:01.573909Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644288564878839:3035] txid# 281474976710665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037894 read size 0 out readset size 0 marker# P6 2025-04-10T11:26:01.573940Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644288564878839:3035] txid# 281474976710665 SEND EvProposeTransaction to# 72075186224037889 Coordinator marker# P7 2025-04-10T11:26:01.574488Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644288564878839:3035] txid# 281474976710665 HANDLE TEvProposeTransactionStatus TDataReq marker# P11 Status# 16 2025-04-10T11:26:01.602418Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644288564878839:3035] txid# 281474976710665 HANDLE TEvProposeTransactionStatus TDataReq marker# P10 Status# 17 2025-04-10T11:26:01.605782Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644288564878839:3035] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037894 marker# P12 2025-04-10T11:26:01.606805Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644288564878839:3035] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2025-04-10T11:26:01.608111Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644288564878839:3035] txid# 281474976710665 MergeResult ExecComplete TDataReq marker# P17 2025-04-10T11:26:01.608255Z node 1 :TX_PROXY INFO: Actor# [1:7491644288564878839:3035] txid# 281474976710665 RESPONSE Status# ExecComplete prepare time: 0.005761s execute time: 0.034307s total time: 0.040068s marker# P13 2025-04-10T11:26:01.633610Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644275679975352:2050] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7491644282799356081:2101] 2025-04-10T11:26:01.633639Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644275679975352:2050] Unsubscribe: subscriber# [2:7491644282799356081:2101], path# /dc-1/USER_0 2025-04-10T11:26:01.633737Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644275679975355:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7491644282799356082:2101] 2025-04-10T11:26:01.633752Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644275679975355:2053] Unsubscribe: subscriber# [2:7491644282799356082:2101], path# /dc-1/USER_0 2025-04-10T11:26:01.633911Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 2 2025-04-10T11:26:01.634792Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644275679975358:2056] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7491644282799356083:2101] 2025-04-10T11:26:01.634817Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644275679975358:2056] Unsubscribe: subscriber# [2:7491644282799356083:2101], path# /dc-1/USER_0 2025-04-10T11:26:01.635390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:26:02.412137Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491644275679975732:2152], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:02.412274Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491644275679975732:2152], cacheItem# { Subscriber: { Subscriber: [1:7491644279974943690:2632] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:02.412430Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491644292859846160:3047], recipient# [1:7491644292859846159:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:02.531276Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491644275679975732:2152], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:02.531476Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491644275679975732:2152], cacheItem# { Subscriber: { Subscriber: [1:7491644288564878792:3022] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:02.531565Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491644292859846165:3048], recipient# [1:7491644292859846164:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:03.399889Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644275679975446:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:03.399964Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:26:03.412813Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491644275679975732:2152], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:03.412944Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491644275679975732:2152], cacheItem# { Subscriber: { Subscriber: [1:7491644279974943690:2632] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:03.413047Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491644297154813480:3054], recipient# [1:7491644297154813479:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:03.531640Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491644275679975732:2152], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:03.531800Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491644275679975732:2152], cacheItem# { Subscriber: { Subscriber: [1:7491644288564878792:3022] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:03.531903Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491644297154813485:3055], recipient# [1:7491644297154813484:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinWithDuplicates [GOOD] Test command err: Trying to start YDB, gRPC: 63875, MsgBus: 8438 2025-04-10T11:25:57.972734Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644271526958183:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:57.973405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00171c/r3tmp/tmpgBjDbI/pdisk_1.dat TServer::EnableGrpc on GrpcPort 63875, node 1 2025-04-10T11:25:58.330327Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:58.368765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:58.368877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:58.371142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:58.382137Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:58.382176Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:58.382186Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:58.382293Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8438 TClient is connected to server localhost:8438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:58.897678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:58.929878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:59.055973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:59.226895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:59.294258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:01.022883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644288706829155:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:01.022994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:01.328279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.357131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.383552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.411696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.436854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.468028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.520954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644288706829667:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:01.521044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:01.521525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644288706829672:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:01.525222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:01.537514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644288706829674:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:26:01.609005Z node 1 :TX_PROXY ERROR: Actor# [1:7491644288706829728:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:02.544187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:02.579302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:02.973032Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644271526958183:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:02.973085Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 23072, MsgBus: 11871 2025-04-10T11:25:52.427777Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644247901316871:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:52.428451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001721/r3tmp/tmpXnesc4/pdisk_1.dat 2025-04-10T11:25:52.778790Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:52.833583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:52.833645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 23072, node 1 2025-04-10T11:25:52.835595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:52.932126Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:52.932151Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:52.932155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:52.932250Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11871 TClient is connected to server localhost:11871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:53.603638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:53.660024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:53.821194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:53.981426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:54.061055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:55.259100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644260786220514:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.259223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.585626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.615761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.645447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.678883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.707703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.740858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.785012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644260786221023:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.785109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.785219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644260786221028:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.788350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:55.796281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644260786221030:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:55.890165Z node 1 :TX_PROXY ERROR: Actor# [1:7491644260786221084:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:56.941183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.967596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.991322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:25:57.018668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:25:57.043442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:25:57.067388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:25:57.427747Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644247901316871:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:57.427815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14996, MsgBus: 19341 2025-04-10T11:25:58.505065Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644274329489233:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:58.505424Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001721/r3tmp/tmpxTIEaj/pdisk_1.dat 2025-04-10T11:25:58.610973Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:58.652238Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:58.652320Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14996, node 2 2025-04-10T11:25:58.653991Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:58.701938Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:58.701959Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:58.701967Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:58.702101Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19341 TClient is connected to server localhost:19341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:59.188067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:59.192636Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:25:59.200162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:59.257927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:59.421361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:59.500017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.652525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644287214392755:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:01.652637Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:01.705576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.741164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.774100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.805056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.874668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.921326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:02.002310Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644291509360569:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:02.002390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:02.002490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644291509360574:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:02.005773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:02.014040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644291509360576:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:26:02.084420Z node 2 :TX_PROXY ERROR: Actor# [2:7491644291509360631:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:02.885738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:02.916290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:02.948883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:26:03.017153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-10T11:26:03.048450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-10T11:26:03.084309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:26:03.518173Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644274329489233:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:03.518493Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness1 >> KqpJoinOrder::FiveWayJoinStatsOverride+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 5725, MsgBus: 21764 2025-04-10T11:25:53.371017Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644251609928017:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:53.371092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00171e/r3tmp/tmpsJQpb8/pdisk_1.dat 2025-04-10T11:25:53.753045Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5725, node 1 2025-04-10T11:25:53.811539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:53.811736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:53.815512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:53.833062Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:53.833090Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:53.833210Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:53.833369Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21764 TClient is connected to server localhost:21764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:54.335661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:54.352935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:54.488006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:54.637287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:54.711422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:56.321888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644264494831709:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.321971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.611150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.643925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.676494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.709683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.746921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.822878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.865348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644264494832225:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.865453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.865721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644264494832230:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.869932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:56.881449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644264494832232:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:56.958237Z node 1 :TX_PROXY ERROR: Actor# [1:7491644264494832285:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:57.813742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:57.862050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:25:58.372238Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644251609928017:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:58.372319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26881, MsgBus: 28506 2025-04-10T11:25:59.554707Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644278752564931:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:59.554759Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00171e/r3tmp/tmpGY0F2A/pdisk_1.dat 2025-04-10T11:25:59.696893Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26881, node 2 2025-04-10T11:25:59.736841Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:59.736942Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:59.744372Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:59.774631Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:59.774654Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:59.774661Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:59.774768Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28506 TClient is connected to server localhost:28506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:26:00.210861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:26:00.230099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:00.306842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:00.442503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:00.518604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:02.888037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644291637468617:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:02.888130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:02.933114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:02.963393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:02.989804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:03.016795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:03.047477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:03.119021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:03.170704Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644295932436429:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:03.170780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:03.171067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644295932436434:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:03.174756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:03.184595Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644295932436436:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:26:03.259398Z node 2 :TX_PROXY ERROR: Actor# [2:7491644295932436489:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:04.059611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:04.126332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:04.557422Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644278752564931:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:04.557515Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] >> TSchemeShardLoginTest::BanUnbanUser |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TStorageTenantTest::Boot >> TTopicApiDescribes::GetLocalDescribe >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup [GOOD] >> TSchemeShardLoginTest::BanUnbanUser [GOOD] >> TSchemeShardLoginTest::BanUserWithWaiting >> TKeyValueTest::TestConcatWorks [GOOD] >> TKeyValueTest::TestConcatWorksNewApi >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] >> KqpJoinOrder::TPCDS87-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] Test command err: 2025-04-10T11:25:56.449652Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644266036726902:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:56.449733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000caa/r3tmp/tmpTwJltC/pdisk_1.dat 2025-04-10T11:25:56.898426Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:56.932034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:56.932163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:56.939710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2273 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:25:57.112547Z node 1 :TX_PROXY DEBUG: actor# [1:7491644266036727129:2141] Handle TEvNavigate describe path dc-1 2025-04-10T11:25:57.112637Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644270331694862:2439] HANDLE EvNavigateScheme dc-1 2025-04-10T11:25:57.112772Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491644266036727166:2159], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:25:57.112834Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491644266036727166:2159], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:25:57.113068Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644270331694863:2440][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:25:57.115188Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644266036726768:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644270331694867:2440] 2025-04-10T11:25:57.115190Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644266036726771:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644270331694868:2440] 2025-04-10T11:25:57.115249Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644266036726771:2054] Subscribe: subscriber# [1:7491644270331694868:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:25:57.115251Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644266036726768:2051] Subscribe: subscriber# [1:7491644270331694867:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:25:57.115294Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644266036726774:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644270331694869:2440] 2025-04-10T11:25:57.115308Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644266036726774:2057] Subscribe: subscriber# [1:7491644270331694869:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:25:57.115312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644270331694868:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644266036726771:2054] 2025-04-10T11:25:57.115332Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644270331694867:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644266036726768:2051] 2025-04-10T11:25:57.115335Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644266036726771:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644270331694868:2440] 2025-04-10T11:25:57.115351Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644270331694869:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644266036726774:2057] 2025-04-10T11:25:57.115353Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644266036726768:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644270331694867:2440] 2025-04-10T11:25:57.115363Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644266036726774:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644270331694869:2440] 2025-04-10T11:25:57.115384Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644270331694863:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644270331694865:2440] 2025-04-10T11:25:57.115403Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644270331694863:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644270331694864:2440] 2025-04-10T11:25:57.115463Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491644270331694863:2440][/dc-1] Set up state: owner# [1:7491644266036727166:2159], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:25:57.115587Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644270331694863:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644270331694866:2440] 2025-04-10T11:25:57.115624Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491644270331694863:2440][/dc-1] Path was already updated: owner# [1:7491644266036727166:2159], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:25:57.115653Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644270331694867:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644270331694864:2440], cookie# 1 2025-04-10T11:25:57.115662Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644270331694868:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644270331694865:2440], cookie# 1 2025-04-10T11:25:57.115678Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644270331694869:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644270331694866:2440], cookie# 1 2025-04-10T11:25:57.115707Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644266036726768:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644270331694867:2440], cookie# 1 2025-04-10T11:25:57.115729Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644266036726771:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644270331694868:2440], cookie# 1 2025-04-10T11:25:57.115746Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644266036726774:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644270331694869:2440], cookie# 1 2025-04-10T11:25:57.115780Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644270331694867:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644266036726768:2051], cookie# 1 2025-04-10T11:25:57.115819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644270331694868:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644266036726771:2054], cookie# 1 2025-04-10T11:25:57.115834Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644270331694869:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644266036726774:2057], cookie# 1 2025-04-10T11:25:57.115857Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644270331694863:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644270331694864:2440], cookie# 1 2025-04-10T11:25:57.115892Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644270331694863:2440][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:25:57.115911Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644270331694863:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644270331694865:2440], cookie# 1 2025-04-10T11:25:57.115935Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644270331694863:2440][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:25:57.115952Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644270331694863:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644270331694866:2440], cookie# 1 2025-04-10T11:25:57.115983Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644270331694863:2440][/dc-1] Unexpected sync response: sender# [1:7491644270331694866:2440], cookie# 1 2025-04-10T11:25:57.170651Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491644266036727166:2159], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:25:57.170986Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491644266036727166:2159], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... data/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:05.435176Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7491644306358949480:2347], recipient# [2:7491644306358949477:2570], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:05.510525Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491644271894034888:2234], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:05.510694Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491644271894034888:2234], cacheItem# { Subscriber: { Subscriber: [3:7491644284778936918:2314] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:05.510765Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491644306253786278:6618], recipient# [3:7491644306253786277:5421], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:05.512768Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491644271894034888:2234], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:05.512868Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491644271894034888:2234], cacheItem# { Subscriber: { Subscriber: [3:7491644284778936940:2317] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:05.512951Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491644271894034888:2234], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:05.513010Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491644271894034888:2234], cacheItem# { Subscriber: { Subscriber: [3:7491644284778936940:2317] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:05.513084Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491644306253786282:6620], recipient# [3:7491644306253786280:5423], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:05.513170Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491644306253786281:6619], recipient# [3:7491644306253786279:5422], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:06.287845Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7491644289179080057:2236], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:06.288028Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7491644289179080057:2236], cacheItem# { Subscriber: { Subscriber: [2:7491644306358949362:2302] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:06.288151Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7491644310653916778:2348], recipient# [2:7491644310653916777:2572], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:06.307178Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7491644289179080057:2236], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:06.307324Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7491644289179080057:2236], cacheItem# { Subscriber: { Subscriber: [2:7491644306358949362:2302] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:06.307405Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7491644310653916780:2349], recipient# [2:7491644310653916779:2573], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:06.314077Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7491644289179080057:2236], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:06.314247Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7491644289179080057:2236], cacheItem# { Subscriber: { Subscriber: [2:7491644306358949396:2315] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:06.314343Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7491644310653916782:2350], recipient# [2:7491644310653916781:2574], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TStorageTenantTest::LsLs [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 10784, MsgBus: 22212 2025-04-10T11:26:01.339809Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644288900964194:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:01.339904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001719/r3tmp/tmpAzWhby/pdisk_1.dat 2025-04-10T11:26:01.655760Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:01.661399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:01.661472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:01.664037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10784, node 1 2025-04-10T11:26:01.742300Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:01.742343Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:01.742392Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:01.742570Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22212 TClient is connected to server localhost:22212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:02.259843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:02.285626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:02.432895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:02.587365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:02.654831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:04.252297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644301785867873:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:04.252427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:04.641378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:04.674625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:04.704064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:04.744150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:04.779992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:04.817493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:04.881056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644301785868385:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:04.881143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:04.881378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644301785868390:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:04.885219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:04.894905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644301785868392:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:26:04.978077Z node 1 :TX_PROXY ERROR: Actor# [1:7491644301785868445:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:05.906117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:05.939947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:05.970525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:26:06.007650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-10T11:26:06.039545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-10T11:26:06.071197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:26:06.339556Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644288900964194:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:06.339633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] Test command err: 2025-04-10T11:26:03.174285Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644298252703091:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:03.187270Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c9c/r3tmp/tmp5E6X9X/pdisk_1.dat 2025-04-10T11:26:03.545222Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:03.554020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:03.554134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:03.560887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9337 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:26:03.766094Z node 1 :TX_PROXY DEBUG: actor# [1:7491644298252703316:2140] Handle TEvNavigate describe path dc-1 2025-04-10T11:26:03.766259Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644298252703757:2442] HANDLE EvNavigateScheme dc-1 2025-04-10T11:26:03.766460Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491644298252703341:2154], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:03.766538Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491644298252703341:2154], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:26:03.766777Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644298252703758:2443][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:26:03.768649Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644298252702963:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644298252703762:2443] 2025-04-10T11:26:03.768676Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644298252702966:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644298252703763:2443] 2025-04-10T11:26:03.768706Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644298252702963:2051] Subscribe: subscriber# [1:7491644298252703762:2443], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:03.768739Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644298252702966:2054] Subscribe: subscriber# [1:7491644298252703763:2443], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:03.768783Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644298252702969:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644298252703764:2443] 2025-04-10T11:26:03.768797Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644298252702969:2057] Subscribe: subscriber# [1:7491644298252703764:2443], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:03.768839Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644298252703762:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644298252702963:2051] 2025-04-10T11:26:03.768924Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644298252703763:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644298252702966:2054] 2025-04-10T11:26:03.768926Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644298252702963:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644298252703762:2443] 2025-04-10T11:26:03.768948Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644298252702966:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644298252703763:2443] 2025-04-10T11:26:03.768960Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644298252703764:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644298252702969:2057] 2025-04-10T11:26:03.768970Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644298252702969:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644298252703764:2443] 2025-04-10T11:26:03.769010Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644298252703758:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644298252703759:2443] 2025-04-10T11:26:03.769045Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644298252703758:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644298252703760:2443] 2025-04-10T11:26:03.769113Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491644298252703758:2443][/dc-1] Set up state: owner# [1:7491644298252703341:2154], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:26:03.769238Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644298252703758:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644298252703761:2443] 2025-04-10T11:26:03.769312Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491644298252703758:2443][/dc-1] Path was already updated: owner# [1:7491644298252703341:2154], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:26:03.769364Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644298252703762:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644298252703759:2443], cookie# 1 2025-04-10T11:26:03.769423Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644298252703763:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644298252703760:2443], cookie# 1 2025-04-10T11:26:03.769444Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644298252703764:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644298252703761:2443], cookie# 1 2025-04-10T11:26:03.769479Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644298252702963:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644298252703762:2443], cookie# 1 2025-04-10T11:26:03.769504Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644298252702966:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644298252703763:2443], cookie# 1 2025-04-10T11:26:03.769542Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644298252702969:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644298252703764:2443], cookie# 1 2025-04-10T11:26:03.769590Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644298252703762:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644298252702963:2051], cookie# 1 2025-04-10T11:26:03.769612Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644298252703763:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644298252702966:2054], cookie# 1 2025-04-10T11:26:03.769625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644298252703764:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644298252702969:2057], cookie# 1 2025-04-10T11:26:03.769655Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644298252703758:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644298252703759:2443], cookie# 1 2025-04-10T11:26:03.769695Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644298252703758:2443][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:26:03.769712Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644298252703758:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644298252703760:2443], cookie# 1 2025-04-10T11:26:03.769735Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644298252703758:2443][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:26:03.769754Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644298252703758:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644298252703761:2443], cookie# 1 2025-04-10T11:26:03.769767Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644298252703758:2443][/dc-1] Unexpected sync response: sender# [1:7491644298252703761:2443], cookie# 1 2025-04-10T11:26:03.810630Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491644298252703341:2154], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:26:03.811017Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491644298252703341:2154], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644311137606256:2870][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 }: sender# [1:7491644298252702963:2051], cookie# 2 2025-04-10T11:26:06.305341Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644311137606257:2870][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 }: sender# [1:7491644298252702966:2054], cookie# 2 2025-04-10T11:26:06.305358Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644311137606258:2870][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 }: sender# [1:7491644298252702969:2057], cookie# 2 2025-04-10T11:26:06.305385Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644311137606252:2870][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 }: sender# [1:7491644311137606253:2870], cookie# 2 2025-04-10T11:26:06.305402Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644311137606252:2870][/dc-1/USER_0/SimpleTable] Sync is in progress: cookie# 2, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:26:06.305456Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644311137606252:2870][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 }: sender# [1:7491644311137606254:2870], cookie# 2 2025-04-10T11:26:06.305476Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644311137606252:2870][/dc-1/USER_0/SimpleTable] Sync is done: cookie# 2, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:26:06.305498Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644311137606252:2870][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 }: sender# [1:7491644311137606255:2870], cookie# 2 2025-04-10T11:26:06.305509Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644311137606252:2870][/dc-1/USER_0/SimpleTable] Unexpected sync response: sender# [1:7491644311137606255:2870], cookie# 2 2025-04-10T11:26:06.305548Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491644298252703341:2154], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 } 2025-04-10T11:26:06.305612Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491644298252703341:2154], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7491644311137606252:2870] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744284366250 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2025-04-10T11:26:06.305693Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491644298252703341:2154], cacheItem# { Subscriber: { Subscriber: [1:7491644311137606252:2870] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1744284366250 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/SimpleTable TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-04-10T11:26:06.305851Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491644311137606262:2874], recipient# [1:7491644311137606261:2873], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/SimpleTable TableId: [72057594046644480:3:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:26:06.305876Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644311137606261:2873] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:26:06.305928Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644311137606261:2873] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0/SimpleTable" Options { ShowPrivateTable: true } 2025-04-10T11:26:06.306804Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644311137606261:2873] Handle TEvDescribeSchemeResult Forward to# [1:7491644311137606260:2872] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744284366250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744284366250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "k... (TRUNCATED) 2025-04-10T11:26:06.338676Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644298252702963:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7491644305236126224:2103] 2025-04-10T11:26:06.338737Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644298252702963:2051] Unsubscribe: subscriber# [3:7491644305236126224:2103], path# /dc-1/USER_0 2025-04-10T11:26:06.338781Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644298252702966:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7491644305236126225:2103] 2025-04-10T11:26:06.338793Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644298252702966:2054] Unsubscribe: subscriber# [3:7491644305236126225:2103], path# /dc-1/USER_0 2025-04-10T11:26:06.338812Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644298252702969:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7491644305236126226:2103] 2025-04-10T11:26:06.338823Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644298252702969:2057] Unsubscribe: subscriber# [3:7491644305236126226:2103], path# /dc-1/USER_0 2025-04-10T11:26:06.339040Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-10T11:26:06.340048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse [GOOD] >> DataShardReadIterator::ShouldForbidDuplicatedReadId >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight [GOOD] >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::LsLs [GOOD] Test command err: 2025-04-10T11:26:04.173160Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644300233638882:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:04.175125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:26:04.217871Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644298749972918:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:04.218077Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c92/r3tmp/tmpUbw1Zm/pdisk_1.dat 2025-04-10T11:26:04.614501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:04.614583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:04.618367Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:26:04.618531Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:04.620252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:04.621858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:04.621909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:04.627127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5248 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:26:04.850515Z node 1 :TX_PROXY DEBUG: actor# [1:7491644300233639040:2141] Handle TEvNavigate describe path dc-1 2025-04-10T11:26:04.850572Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644300233639479:2442] HANDLE EvNavigateScheme dc-1 2025-04-10T11:26:04.850716Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491644300233639063:2154], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:04.850763Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491644300233639063:2154], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:26:04.850948Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644300233639480:2443][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:26:04.853289Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644300233638680:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644300233639484:2443] 2025-04-10T11:26:04.853367Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644300233638680:2052] Subscribe: subscriber# [1:7491644300233639484:2443], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:04.853431Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644300233638686:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644300233639486:2443] 2025-04-10T11:26:04.853449Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644300233638686:2058] Subscribe: subscriber# [1:7491644300233639486:2443], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:04.853495Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644300233639484:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644300233638680:2052] 2025-04-10T11:26:04.853534Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644300233639486:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644300233638686:2058] 2025-04-10T11:26:04.853626Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644300233639480:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644300233639481:2443] 2025-04-10T11:26:04.853671Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644300233639480:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644300233639483:2443] 2025-04-10T11:26:04.853754Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491644300233639480:2443][/dc-1] Set up state: owner# [1:7491644300233639063:2154], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:26:04.853912Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644300233639484:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644300233639481:2443], cookie# 1 2025-04-10T11:26:04.853938Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644300233639485:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644300233639482:2443], cookie# 1 2025-04-10T11:26:04.853960Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644300233639486:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644300233639483:2443], cookie# 1 2025-04-10T11:26:04.854016Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644300233638680:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644300233639484:2443] 2025-04-10T11:26:04.854064Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644300233638680:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644300233639484:2443], cookie# 1 2025-04-10T11:26:04.854089Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644300233638686:2058] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644300233639486:2443] 2025-04-10T11:26:04.854102Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644300233638686:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644300233639486:2443], cookie# 1 2025-04-10T11:26:04.854893Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644300233638683:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644300233639485:2443] 2025-04-10T11:26:04.854986Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644300233638683:2055] Subscribe: subscriber# [1:7491644300233639485:2443], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:04.855134Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644300233638683:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644300233639485:2443], cookie# 1 2025-04-10T11:26:04.855208Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644300233639484:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644300233638680:2052], cookie# 1 2025-04-10T11:26:04.855238Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644300233639486:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644300233638686:2058], cookie# 1 2025-04-10T11:26:04.855275Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644300233639485:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644300233638683:2055] 2025-04-10T11:26:04.855321Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644300233639485:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644300233638683:2055], cookie# 1 2025-04-10T11:26:04.855420Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644300233639480:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644300233639481:2443], cookie# 1 2025-04-10T11:26:04.855475Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644300233639480:2443][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:26:04.855497Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644300233639480:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644300233639483:2443], cookie# 1 2025-04-10T11:26:04.855525Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644300233639480:2443][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:26:04.855614Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644300233639480:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644300233639482:2443] 2025-04-10T11:26:04.855694Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491644300233639480:2443][/dc-1] Path was already updated: owner# [1:7491644300233639063:2154], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:26:04.855740Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644300233639480:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644300233639482:2443], cookie# 1 2025-04-10T11:26:04.855758Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644300233639480:2443][/dc-1] Unexpected sync response: sender# [1:7491644300233639482:2443], cookie# 1 2025-04-10T11:26:04.855787Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644300233638683:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644300233639485:2443] 2025-04-10T11:26:04.922512Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491644300233639063:2154], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSiz ... ] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:07.618694Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7491644311634875116:2121], recipient# [2:7491644311634875090:2305], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:07.619048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7491644311634875090:2305], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:26:07.751861Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7491644298749973154:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:07.751988Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7491644298749973154:2107], cacheItem# { Subscriber: { Subscriber: [2:7491644311634875093:2116] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:07.752041Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7491644298749973154:2107], cacheItem# { Subscriber: { Subscriber: [2:7491644311634875097:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:07.752158Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7491644311634875117:2122], recipient# [2:7491644311634875090:2305], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:07.752634Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7491644311634875090:2305], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:26:08.065593Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7491644298749973154:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:08.065714Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7491644298749973154:2107], cacheItem# { Subscriber: { Subscriber: [2:7491644311634875093:2116] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:08.065766Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7491644298749973154:2107], cacheItem# { Subscriber: { Subscriber: [2:7491644311634875097:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:08.065839Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7491644315929842414:2123], recipient# [2:7491644311634875090:2305], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:08.065948Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7491644311634875090:2305], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:26:08.277185Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7491644298749973154:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:08.277393Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7491644298749973154:2107], cacheItem# { Subscriber: { Subscriber: [2:7491644303044940476:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:08.277567Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7491644315929842416:2124], recipient# [2:7491644315929842415:2308], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:08.558577Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7491644298749973154:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:08.563105Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7491644298749973154:2107], cacheItem# { Subscriber: { Subscriber: [2:7491644311634875101:2118] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:08.563284Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7491644315929842418:2125], recipient# [2:7491644315929842417:2309], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:08.563468Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies >> TStorageTenantTest::GenericCases >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks >> KqpJoinOrder::TPCDS16+ColumnStore >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] Test command err: Trying to start YDB, gRPC: 14250, MsgBus: 19507 2025-04-10T11:26:04.006593Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644300376272359:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:04.006672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001716/r3tmp/tmpchQdej/pdisk_1.dat 2025-04-10T11:26:04.372785Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:04.416225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:04.416332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14250, node 1 2025-04-10T11:26:04.417833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:04.454444Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:04.454471Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:04.454501Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:04.454636Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19507 TClient is connected to server localhost:19507 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:04.986338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:05.026279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:05.195945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:05.357721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:05.439601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:07.213286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644313261176034:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:07.213451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:07.560054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.594132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.622769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.687758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.723095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.756057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.848596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644313261176548:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:07.848673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:07.848850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644313261176553:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:07.852336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:07.862639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644313261176555:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:26:07.956798Z node 1 :TX_PROXY ERROR: Actor# [1:7491644313261176612:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:08.839537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:09.006655Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644300376272359:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:09.006753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoin::JoinAggregateSingleRow >> TStorageTenantTest::Boot [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TSchemeShardLoginTest::BanUserWithWaiting [GOOD] >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters >> KqpJoinOrder::ShuffleEliminationManyKeysJoinPredicate >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters [GOOD] >> TSchemeShardLoginTest::ChangeAccountLockoutParameters >> TPQTest::TestPQSmallRead [GOOD] >> TPQTest::TestPQReadAhead >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] >> DataShardReadIterator::ShouldForbidDuplicatedReadId [GOOD] >> DataShardReadIterator::ShouldFailUknownColumns >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds [GOOD] >> DataShardReadIterator::ShouldReadRangeLeftInclusive >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites >> TStorageTenantTest::GenericCases [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 62312, MsgBus: 13367 2025-04-10T11:26:09.197720Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644323445657437:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:09.207987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001711/r3tmp/tmp9woakm/pdisk_1.dat 2025-04-10T11:26:09.551641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:09.551731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:09.555523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:09.575035Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62312, node 1 2025-04-10T11:26:09.631768Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:09.631798Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:09.631805Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:09.631945Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13367 TClient is connected to server localhost:13367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:10.161692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:10.180726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:10.315817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:10.462930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:10.538301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:12.182459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644336330561121:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:12.182576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:12.482788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.516926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.550939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.615285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.647059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.718234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.761320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644336330561642:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:12.761384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:12.763142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644336330561647:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:12.767576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:12.781617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644336330561649:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:26:12.880553Z node 1 :TX_PROXY ERROR: Actor# [1:7491644336330561703:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:13.816654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.885146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.912611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.951279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.983325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:26:14.014469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:26:14.196485Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644323445657437:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:14.196578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix1 >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::GenericCases [GOOD] Test command err: 2025-04-10T11:26:10.963703Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644326365174068:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:10.967292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c84/r3tmp/tmpR7DSTD/pdisk_1.dat 2025-04-10T11:26:11.377972Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:11.390077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:11.390200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:11.395144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8044 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:26:11.609951Z node 1 :TX_PROXY DEBUG: actor# [1:7491644326365174255:2130] Handle TEvNavigate describe path dc-1 2025-04-10T11:26:11.610025Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644330660142002:2438] HANDLE EvNavigateScheme dc-1 2025-04-10T11:26:11.610183Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491644330660141576:2143], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:11.610243Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491644330660141576:2143], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:26:11.610566Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644330660142003:2439][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:26:11.617052Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644326365173911:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644330660142007:2439] 2025-04-10T11:26:11.617101Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644326365173914:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644330660142008:2439] 2025-04-10T11:26:11.617132Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644326365173911:2050] Subscribe: subscriber# [1:7491644330660142007:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:11.617154Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644326365173914:2053] Subscribe: subscriber# [1:7491644330660142008:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:11.617205Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644326365173917:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644330660142009:2439] 2025-04-10T11:26:11.617223Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644326365173917:2056] Subscribe: subscriber# [1:7491644330660142009:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:11.617294Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644330660142007:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644326365173911:2050] 2025-04-10T11:26:11.617317Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644330660142008:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644326365173914:2053] 2025-04-10T11:26:11.617337Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644330660142009:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644326365173917:2056] 2025-04-10T11:26:11.617360Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644326365173911:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644330660142007:2439] 2025-04-10T11:26:11.617395Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644330660142003:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644330660142004:2439] 2025-04-10T11:26:11.617398Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644326365173914:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644330660142008:2439] 2025-04-10T11:26:11.617442Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644330660142003:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644330660142005:2439] 2025-04-10T11:26:11.617442Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644326365173917:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644330660142009:2439] 2025-04-10T11:26:11.617505Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491644330660142003:2439][/dc-1] Set up state: owner# [1:7491644330660141576:2143], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:26:11.617622Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644330660142003:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644330660142006:2439] 2025-04-10T11:26:11.617671Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491644330660142003:2439][/dc-1] Path was already updated: owner# [1:7491644330660141576:2143], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:26:11.617714Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644330660142007:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644330660142004:2439], cookie# 1 2025-04-10T11:26:11.617733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644330660142008:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644330660142005:2439], cookie# 1 2025-04-10T11:26:11.617746Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644330660142009:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644330660142006:2439], cookie# 1 2025-04-10T11:26:11.617781Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644326365173911:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644330660142007:2439], cookie# 1 2025-04-10T11:26:11.617800Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644326365173914:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644330660142008:2439], cookie# 1 2025-04-10T11:26:11.617824Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644326365173917:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644330660142009:2439], cookie# 1 2025-04-10T11:26:11.617855Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644330660142007:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644326365173911:2050], cookie# 1 2025-04-10T11:26:11.617868Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644330660142008:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644326365173914:2053], cookie# 1 2025-04-10T11:26:11.617906Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644330660142009:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644326365173917:2056], cookie# 1 2025-04-10T11:26:11.617940Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644330660142003:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644330660142004:2439], cookie# 1 2025-04-10T11:26:11.617964Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644330660142003:2439][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:26:11.617977Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644330660142003:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644330660142005:2439], cookie# 1 2025-04-10T11:26:11.617996Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644330660142003:2439][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:26:11.618014Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644330660142003:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644330660142006:2439], cookie# 1 2025-04-10T11:26:11.618027Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644330660142003:2439][/dc-1] Unexpected sync response: sender# [1:7491644330660142006:2439], cookie# 1 2025-04-10T11:26:11.686059Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491644330660141576:2143], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:26:11.686432Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491644330660141576:2143], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... icasList 2025-04-10T11:26:14.224940Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644326365173911:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [1:7491644343545044849:3147] 2025-04-10T11:26:14.224956Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644326365173911:2050] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-04-10T11:26:14.224986Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644326365173911:2050] Subscribe: subscriber# [1:7491644343545044849:3147], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:14.225015Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644326365173914:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [1:7491644343545044850:3147] 2025-04-10T11:26:14.225024Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644326365173914:2053] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-04-10T11:26:14.225042Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644326365173914:2053] Subscribe: subscriber# [1:7491644343545044850:3147], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:14.225068Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644326365173917:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [1:7491644343545044851:3147] 2025-04-10T11:26:14.225078Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644326365173917:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-04-10T11:26:14.225098Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644326365173917:2056] Subscribe: subscriber# [1:7491644343545044851:3147], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:14.225126Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644343545044849:3147][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7491644326365173911:2050] 2025-04-10T11:26:14.225147Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644343545044850:3147][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7491644326365173914:2053] 2025-04-10T11:26:14.225171Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644343545044851:3147][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7491644326365173917:2056] 2025-04-10T11:26:14.225201Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644343545044839:3147][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7491644343545044846:3147] 2025-04-10T11:26:14.225230Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644343545044839:3147][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7491644343545044847:3147] 2025-04-10T11:26:14.225267Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491644343545044839:3147][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7491644330660141576:2143], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:26:14.225288Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644343545044839:3147][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7491644343545044848:3147] 2025-04-10T11:26:14.225308Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491644343545044839:3147][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7491644330660141576:2143], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:26:14.225326Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644326365173911:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7491644343545044849:3147] 2025-04-10T11:26:14.225340Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644326365173914:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7491644343545044850:3147] 2025-04-10T11:26:14.225364Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644326365173917:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7491644343545044851:3147] 2025-04-10T11:26:14.225395Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491644330660141576:2143], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-10T11:26:14.225443Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491644330660141576:2143], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7491644343545044839:3147] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-10T11:26:14.225506Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491644330660141576:2143], cacheItem# { Subscriber: { Subscriber: [1:7491644343545044839:3147] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:14.225588Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491644343545044852:3148], recipient# [1:7491644343545044836:2338], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:14.225707Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644343545044829:3144][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7491644343545044830:3144] 2025-04-10T11:26:14.225739Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491644343545044829:3144][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [1:7491644330660141576:2143], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:26:14.983932Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491644330660141576:2143], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:14.984065Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491644330660141576:2143], cacheItem# { Subscriber: { Subscriber: [1:7491644330660142298:2660] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:14.984149Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491644343545044869:3152], recipient# [1:7491644343545044868:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:15.224838Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491644330660141576:2143], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:15.224977Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7491644330660141576:2143], cacheItem# { Subscriber: { Subscriber: [1:7491644343545044829:3144] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:15.225052Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7491644347840012170:3153], recipient# [1:7491644347840012169:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] Test command err: 2025-04-10T11:26:08.086809Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644317738022365:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:08.086865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c8d/r3tmp/tmp1wTMD3/pdisk_1.dat 2025-04-10T11:26:08.450475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:08.450587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:08.454944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:08.477207Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5533 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:26:08.705295Z node 1 :TX_PROXY DEBUG: actor# [1:7491644317738022602:2102] Handle TEvNavigate describe path dc-1 2025-04-10T11:26:08.705354Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644317738023098:2435] HANDLE EvNavigateScheme dc-1 2025-04-10T11:26:08.705473Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491644317738022646:2126], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:08.705504Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491644317738022646:2126], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:26:08.705718Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644317738023099:2436][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:26:08.707749Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644317738022319:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644317738023105:2436] 2025-04-10T11:26:08.707830Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644317738022319:2055] Subscribe: subscriber# [1:7491644317738023105:2436], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:08.707914Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644317738023105:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644317738022319:2055] 2025-04-10T11:26:08.708046Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644317738022313:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644317738023103:2436] 2025-04-10T11:26:08.708079Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644317738022313:2049] Subscribe: subscriber# [1:7491644317738023103:2436], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:08.708106Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644317738023099:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644317738023102:2436] 2025-04-10T11:26:08.708143Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644317738022316:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644317738023104:2436] 2025-04-10T11:26:08.708144Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644317738023103:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644317738022313:2049] 2025-04-10T11:26:08.708170Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644317738023099:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644317738023100:2436] 2025-04-10T11:26:08.708187Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644317738022316:2052] Subscribe: subscriber# [1:7491644317738023104:2436], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:08.708219Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491644317738023099:2436][/dc-1] Set up state: owner# [1:7491644317738022646:2126], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:26:08.708221Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644317738022319:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644317738023105:2436] 2025-04-10T11:26:08.708235Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644317738022313:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644317738023103:2436] 2025-04-10T11:26:08.708351Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644317738023104:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644317738022316:2052] 2025-04-10T11:26:08.708381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644317738023103:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644317738023100:2436], cookie# 1 2025-04-10T11:26:08.708395Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644317738023104:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644317738023101:2436], cookie# 1 2025-04-10T11:26:08.708409Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644317738023105:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644317738023102:2436], cookie# 1 2025-04-10T11:26:08.708429Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644317738023099:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644317738023101:2436] 2025-04-10T11:26:08.708479Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491644317738023099:2436][/dc-1] Path was already updated: owner# [1:7491644317738022646:2126], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:26:08.709050Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644317738022313:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644317738023103:2436], cookie# 1 2025-04-10T11:26:08.709079Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644317738022319:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644317738023105:2436], cookie# 1 2025-04-10T11:26:08.709139Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644317738023103:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644317738022313:2049], cookie# 1 2025-04-10T11:26:08.709153Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644317738023105:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644317738022319:2055], cookie# 1 2025-04-10T11:26:08.709196Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644317738023099:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644317738023100:2436], cookie# 1 2025-04-10T11:26:08.709231Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644317738023099:2436][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:26:08.709245Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644317738023099:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644317738023102:2436], cookie# 1 2025-04-10T11:26:08.709273Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644317738023099:2436][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:26:08.709312Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644317738022316:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644317738023104:2436] 2025-04-10T11:26:08.709350Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644317738022316:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644317738023104:2436], cookie# 1 2025-04-10T11:26:08.709368Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644317738023104:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644317738022316:2052], cookie# 1 2025-04-10T11:26:08.709383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644317738023099:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644317738023101:2436], cookie# 1 2025-04-10T11:26:08.709398Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644317738023099:2436][/dc-1] Unexpected sync response: sender# [1:7491644317738023101:2436], cookie# 1 2025-04-10T11:26:08.755451Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491644317738022646:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-10T11:26:08.755852Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7491644317738022646:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... nknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:15.657188Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491644346131522336:2770], recipient# [4:7491644346131522312:2371], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:15.657235Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491644346131522337:2771], recipient# [4:7491644346131522314:2373], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:15.657916Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7491644346131522312:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:26:15.658083Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:26:15.740634Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7491644333246619369:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:15.740877Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491644333246619369:2107], cacheItem# { Subscriber: { Subscriber: [4:7491644346131522315:2767] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:15.740956Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491644333246619369:2107], cacheItem# { Subscriber: { Subscriber: [4:7491644346131522316:2768] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:15.741106Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491644346131522338:2772], recipient# [4:7491644346131522312:2371], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:15.741384Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7491644346131522312:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:26:15.845608Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7491644333246619369:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:15.845738Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491644333246619369:2107], cacheItem# { Subscriber: { Subscriber: [4:7491644346131522315:2767] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:15.845791Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491644333246619369:2107], cacheItem# { Subscriber: { Subscriber: [4:7491644346131522316:2768] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:15.845891Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491644346131522339:2773], recipient# [4:7491644346131522312:2371], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:15.846109Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7491644346131522312:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:26:16.054010Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7491644333246619369:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:16.054141Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491644333246619369:2107], cacheItem# { Subscriber: { Subscriber: [4:7491644346131522315:2767] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:16.054204Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7491644333246619369:2107], cacheItem# { Subscriber: { Subscriber: [4:7491644346131522316:2768] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:16.054328Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7491644350426489636:2774], recipient# [4:7491644346131522312:2371], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:16.054700Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7491644346131522312:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> KqpJoin::JoinAggregateSingleRow [GOOD] >> TPQTest::TestWritePQ [GOOD] >> TPQTest::TestWriteSplit >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinAggregateSingleRow [GOOD] Test command err: Trying to start YDB, gRPC: 4470, MsgBus: 14970 2025-04-10T11:26:11.586038Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644330308266304:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:11.587149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00170f/r3tmp/tmpr1OLnw/pdisk_1.dat 2025-04-10T11:26:11.988122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:11.988219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:11.990332Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:11.992755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4470, node 1 2025-04-10T11:26:12.064154Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:12.064168Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:12.064174Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:12.064243Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14970 TClient is connected to server localhost:14970 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:12.571237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:12.585666Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:26:12.602569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:12.736423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:12.913875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:12.994520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:14.604838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644343193169967:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:14.604960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:14.906863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:14.938839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:15.007682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:15.034791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:15.066678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:15.133869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:15.189344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644347488137784:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:15.189416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:15.189660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644347488137789:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:15.192846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:15.201351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644347488137791:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:26:15.287252Z node 1 :TX_PROXY ERROR: Actor# [1:7491644347488137845:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:16.203686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:16.238328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:16.276937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:26:16.589141Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644330308266304:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:16.589201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoin::LeftJoinPushdownPredicate_Simple >> YdbOlapStore::DuplicateRows [GOOD] >> YdbOlapStore::LogExistingRequest >> TTopicApiDescribes::GetLocalDescribe [GOOD] >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] >> TPQTest::TestWriteSplit [GOOD] >> TPQTest::TestWriteTimeLag >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 >> KqpJoinOrder::TestJoinOrderHintsComplex+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] Test command err: RandomSeed# 2133614794693810975 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] Test command err: 2025-04-10T11:26:08.209726Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644319377158649:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:08.209774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:26:08.262615Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644319426941199:2158];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:08.394354Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:26:08.408780Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0012bd/r3tmp/tmpYOd4iN/pdisk_1.dat 2025-04-10T11:26:08.431087Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:26:08.689270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:08.689365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:08.690444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:08.690497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:08.693883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:08.694454Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:26:08.695514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:08.721576Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15391, node 1 2025-04-10T11:26:08.760719Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:26:08.760754Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:26:08.861326Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/0012bd/r3tmp/yandexCnAVay.tmp 2025-04-10T11:26:08.861365Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/0012bd/r3tmp/yandexCnAVay.tmp 2025-04-10T11:26:08.865513Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/0012bd/r3tmp/yandexCnAVay.tmp 2025-04-10T11:26:08.865750Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:26:09.061627Z INFO: TTestServer started on Port 29612 GrpcPort 15391 TClient is connected to server localhost:29612 PQClient connected to localhost:15391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:09.396066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:26:09.478385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:26:11.374475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644332311843390:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:11.374616Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644332311843378:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:11.375590Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:11.380917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644332262061634:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:11.381049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:11.381373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644332262061655:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:11.385469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:26:11.394099Z node 1 :TX_PROXY ERROR: Actor# [1:7491644332262061658:2706] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:26:11.412147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644332311843393:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:26:11.414334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644332262061657:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:26:11.496783Z node 2 :TX_PROXY ERROR: Actor# [2:7491644332311843422:2179] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:11.506616Z node 1 :TX_PROXY ERROR: Actor# [1:7491644332262061754:2769] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:11.840102Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491644332262061764:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:26:11.842039Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491644332311843429:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:26:11.844968Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGE0MjQzZDUtNGQwNGFkZjUtMjBjMTRjODAtNTg5YzZkY2M=, ActorId: [2:7491644332311843376:2311], ActorState: ExecuteState, TraceId: 01jrfq6jdaftgms99hbyyp7j7a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:26:11.845604Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzVjOGE3MjgtN2NlYTUzNWMtNTNlNzA2ODQtYTM4OGMyOWQ=, ActorId: [1:7491644332262061630:2335], ActorState: ExecuteState, TraceId: 01jrfq6jdjd53aaxfved0zr548, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:26:11.848035Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:26:11.848611Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:26:11.895435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.007572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.155854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", ... UE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 1 [2:7491644362376615228:2434] 2025-04-10T11:26:18.907040Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 1 [2:7491644362376615227:2433] 2025-04-10T11:26:18.906990Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 1 [1:7491644362326834388:2517] 2025-04-10T11:26:18.910830Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [2:7491644362376615229:2435] 2025-04-10T11:26:18.911334Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] bootstrapping 10 [1:7491644362326834395:2524] 2025-04-10T11:26:18.912099Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] bootstrapping 5 [1:7491644362326834393:2522] 2025-04-10T11:26:18.913313Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 1 [1:7491644362326834395:2524] 2025-04-10T11:26:18.910933Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [2:7491644362376615231:2437] 2025-04-10T11:26:18.912979Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 1 [2:7491644362376615229:2435] 2025-04-10T11:26:18.912985Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 1 [2:7491644362376615231:2437] 2025-04-10T11:26:18.913950Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 1 [1:7491644362326834393:2522] 2025-04-10T11:26:18.914880Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7491644362376615233:2439] 2025-04-10T11:26:18.915270Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7491644362376615234:2440] 2025-04-10T11:26:18.916841Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 1 [2:7491644362376615233:2439] 2025-04-10T11:26:18.915692Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] bootstrapping 6 [1:7491644362326834397:2526] 2025-04-10T11:26:18.916264Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] bootstrapping 14 [1:7491644362326834389:2518] 2025-04-10T11:26:18.917522Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 1 [1:7491644362326834397:2526] 2025-04-10T11:26:18.918094Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 1 [1:7491644362326834389:2518] 2025-04-10T11:26:18.919025Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] bootstrapping 11 [1:7491644362326834391:2520] 2025-04-10T11:26:18.921036Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 1 [1:7491644362326834391:2520] 2025-04-10T11:26:18.918579Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 1 [2:7491644362376615234:2440] 2025-04-10T11:26:18.919108Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7491644362376615239:2444] 2025-04-10T11:26:18.921210Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 1 [2:7491644362376615239:2444] 2025-04-10T11:26:18.922901Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7491644362376615237:2442] 2025-04-10T11:26:18.924845Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 1 [2:7491644362376615237:2442] 2025-04-10T11:26:18.980106Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:18.980904Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:18.979680Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:18.980381Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:18.980838Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:18.981301Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:18.982284Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:18.982612Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig Create topic result: 1 2025-04-10T11:26:19.085316Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7491644366621801958:4052]: Request location 2025-04-10T11:26:19.086372Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491644366621801968:4056] connected; active server actors: 1 2025-04-10T11:26:19.086906Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 1 2025-04-10T11:26:19.086934Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2025-04-10T11:26:19.086952Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 1 2025-04-10T11:26:19.086966Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2025-04-10T11:26:19.086978Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 1 2025-04-10T11:26:19.086992Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2025-04-10T11:26:19.087002Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 1 2025-04-10T11:26:19.087016Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 1 2025-04-10T11:26:19.087027Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 1 2025-04-10T11:26:19.087038Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 1 2025-04-10T11:26:19.087049Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 1 2025-04-10T11:26:19.087062Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 1 2025-04-10T11:26:19.087074Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 1 2025-04-10T11:26:19.087086Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 1 2025-04-10T11:26:19.087097Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 1 2025-04-10T11:26:19.087922Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7491644366621801958:4052]: Got location 2025-04-10T11:26:19.088765Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491644366621801968:4056] disconnected; active server actors: 1 2025-04-10T11:26:19.088793Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491644366621801968:4056] disconnected no session 2025-04-10T11:26:19.091161Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491644366621801986:4073] connected; active server actors: 1 2025-04-10T11:26:19.092524Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2025-04-10T11:26:19.092546Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2025-04-10T11:26:19.092564Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2025-04-10T11:26:19.093287Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491644366621801986:4073] disconnected; active server actors: 1 2025-04-10T11:26:19.093306Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491644366621801986:4073] disconnected no session 2025-04-10T11:26:19.090848Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7491644366621801982:4069]: Request location 2025-04-10T11:26:19.092986Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7491644366621801982:4069]: Got location 2025-04-10T11:26:19.095482Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7491644366621801987:4074]: Request location 2025-04-10T11:26:19.097235Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491644366621801992:4079] connected; active server actors: 1 2025-04-10T11:26:19.849214Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491644366621802070:2557] TxId: 281474976710684. Ctx: { TraceId: 01jrfq6tp56cnd8g92h78w6hts, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWNkM2FmOWItZmEyMzVlZTgtY2JmZDYxMzUtYzcwZDJjMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2025-04-10T11:26:19.850014Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491644366621802076:2557], TxId: 281474976710684, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jrfq6tp56cnd8g92h78w6hts. SessionId : ydb://session/3?node_id=1&id=ZWNkM2FmOWItZmEyMzVlZTgtY2JmZDYxMzUtYzcwZDJjMTY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491644366621802070:2557], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNonExistingKey >> DataShardReadIterator::ShouldReadRangeLeftInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeRightInclusive >> TPQTest::TestWriteTimeLag [GOOD] >> DataShardReadIterator::ShouldFailUknownColumns [GOOD] >> DataShardReadIterator::ShouldFailWrongSchema >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite >> DataShardReadIterator::ShouldReadKeyPrefix1 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestWriteTimeLag [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-10T11:24:40.311916Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.311991Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-10T11:24:40.332601Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.347556Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:40.348274Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-10T11:24:40.350717Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-10T11:24:40.352682Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-10T11:24:40.354128Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-10T11:24:40.381029Z node 1 :PERSQUEUE INFO: new Cookie default|44073a0-7fc1c57a-d2a2e444-34fe99d1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:40.538688Z node 1 :PERSQUEUE INFO: new Cookie default|62ca981b-ea1a79f-b3b1f80c-f5f4dc6c_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:40.624133Z node 1 :PERSQUEUE INFO: new Cookie default|532ede1d-a4c245f6-8f5fe340-b4b214e3_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:40.673454Z node 1 :PERSQUEUE INFO: new Cookie default|75c75b20-5b1c5ee1-216bbc60-3e81f4d7_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:40.684678Z node 1 :PERSQUEUE INFO: new Cookie default|58a1f65b-ad451f9d-154ed343-53cb14f9_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:40.690403Z node 1 :PERSQUEUE INFO: new Cookie default|cdd0011f-9cebabba-6484bc5a-22c79f6e_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-10T11:24:41.220958Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:41.221051Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] 2025-04-10T11:24:41.244684Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:41.245800Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-10T11:24:41.246552Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:185:2198] 2025-04-10T11:24:41.249312Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:185:2198] 2025-04-10T11:24:41.252245Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:186:2199] 2025-04-10T11:24:41.253639Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:186:2199] 2025-04-10T11:24:41.292762Z node 2 :PERSQUEUE INFO: new Cookie default|cdaa11ad-dce13702-268e3264-fe5ba907_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:41.435304Z node 2 :PERSQUEUE INFO: new Cookie default|b4927bbb-6ed766d9-7d2f8fa7-84a34940_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:41.519039Z node 2 :PERSQUEUE INFO: new Cookie default|a7351419-120e305e-cfe5f481-abfc1be8_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:41.567280Z node 2 :PERSQUEUE INFO: new Cookie default|2f18b1fa-4aeee853-1968f7f9-39426608_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:41.575075Z node 2 :PERSQUEUE INFO: new Cookie default|3e2c790d-6a8136ee-cf745e38-55311a84_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvRequest ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:293:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:296:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:297:2057] recipient: [2:295:2293] Leader for TabletID 72057594037927937 is [2:298:2294] sender: [2:299:2057] recipient: [2:295:2293] 2025-04-10T11:24:41.621075Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:41.621140Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:24:41.622110Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:347:2335] 2025-04-10T11:24:41.624744Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:348:2336] 2025-04-10T11:24:41.635006Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:24:41.635100Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:348:2336] 2025-04-10T11:24:41.636200Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:24:41.636261Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:347:2335] !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:298:2294] Leader for TabletID 72057594037927937 is [2:298:2294] sender: [2:391:2057] recipient: [2:14:2061] 2025-04-10T11:24:42.822014Z node 2 :PERSQUEUE INFO: new Cookie default|b04c139a-76686184-9ff07993-c10d625e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-10T11:24:43.302192Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:43.302276Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-04-10T11:24:43.322542Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:43.323398Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-10T11:24:43.324048Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-04-10T11:24:43.327052Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-04-10T11:24:43.328857Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:186:2199] 2025-04-10T11:24:43.331052Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:186:2199] 2025-04-10T11:24:43.354497Z node 3 :PERSQUEUE INFO: new Cookie default|ff2035d2-74df675f-d50722f-b86af5b6_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topi ... Q: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:26:22.306335Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 63(current 62) received from actor [60:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 63 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 60 ReadRuleGenerations: 60 ReadRuleGenerations: 62 ReadRuleGenerations: 61 ReadRuleGenerations: 63 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 60 Important: false } Consumers { Name: "aaa" Generation: 60 Important: false } Consumers { Name: "another1" Generation: 62 Important: true } Consumers { Name: "important" Generation: 61 Important: true } Consumers { Name: "another" Generation: 63 Important: false } 2025-04-10T11:26:22.309754Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 63 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 60 ReadRuleGenerations: 60 ReadRuleGenerations: 62 ReadRuleGenerations: 61 ReadRuleGenerations: 63 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 60 Important: false } Consumers { Name: "aaa" Generation: 60 Important: false } Consumers { Name: "another1" Generation: 62 Important: true } Consumers { Name: "important" Generation: 61 Important: true } Consumers { Name: "another" Generation: 63 Important: false } 2025-04-10T11:26:22.309850Z node 60 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:22.310123Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user another reinit with generation 63 done 2025-04-10T11:26:22.310490Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:26:22.310549Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:26:22.310607Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:26:22.310661Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:26:22.310706Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-10T11:26:22.310734Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-10T11:26:22.310760Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000caaa 2025-04-10T11:26:22.310788Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uaaa 2025-04-10T11:26:22.310814Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000canother1 2025-04-10T11:26:22.310839Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uanother1 2025-04-10T11:26:22.310865Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000canother 2025-04-10T11:26:22.310894Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uanother 2025-04-10T11:26:22.310919Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cimportant 2025-04-10T11:26:22.310945Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uimportant 2025-04-10T11:26:22.310972Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-04-10T11:26:22.311017Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:26:22.311079Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:26:22.311193Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user another reinit with generation 63 done 2025-04-10T11:26:22.311400Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:26:22.311436Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-04-10T11:26:22.311467Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-04-10T11:26:22.311496Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-04-10T11:26:22.311522Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-04-10T11:26:22.311550Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-04-10T11:26:22.311575Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001caaa 2025-04-10T11:26:22.311601Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uaaa 2025-04-10T11:26:22.311629Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001canother1 2025-04-10T11:26:22.311655Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uanother1 2025-04-10T11:26:22.311682Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001canother 2025-04-10T11:26:22.311708Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uanother 2025-04-10T11:26:22.311732Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cimportant 2025-04-10T11:26:22.311757Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uimportant 2025-04-10T11:26:22.311782Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] _config_1 2025-04-10T11:26:22.311810Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-04-10T11:26:22.311839Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-04-10T11:26:22.311929Z node 60 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:26:22.312167Z node 60 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:26:22.317147Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:26:22.317410Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-04-10T11:26:22.318745Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:26:22.318921Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-04-10T11:26:22.319249Z node 60 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 63 actor [60:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 63 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 60 ReadRuleGenerations: 60 ReadRuleGenerations: 62 ReadRuleGenerations: 61 ReadRuleGenerations: 63 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 60 Important: false } Consumers { Name: "aaa" Generation: 60 Important: false } Consumers { Name: "another1" Generation: 62 Important: true } Consumers { Name: "important" Generation: 61 Important: true } Consumers { Name: "another" Generation: 63 Important: false } 2025-04-10T11:26:22.320019Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [60:617:2564], now have 1 active actors on pipe 2025-04-10T11:26:22.321005Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [60:620:2566], now have 1 active actors on pipe 2025-04-10T11:26:22.321164Z node 60 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:26:22.321233Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:26:22.321404Z node 60 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:26:22.321948Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [60:622:2568], now have 1 active actors on pipe 2025-04-10T11:26:22.322081Z node 60 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:26:22.322139Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:26:22.322288Z node 60 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:26:22.322941Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [60:624:2570], now have 1 active actors on pipe 2025-04-10T11:26:22.323055Z node 60 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:26:22.323146Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:26:22.323291Z node 60 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:26:22.323811Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [60:626:2572], now have 1 active actors on pipe 2025-04-10T11:26:22.323947Z node 60 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-10T11:26:22.324010Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-10T11:26:22.324117Z node 60 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites [GOOD] >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows >> TTxDataShardUploadRows::TestUploadRows >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite >> BSCRestartPDisk::RestartOneByOne [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup [GOOD] >> KqpJoin::RightSemiJoin_FullScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOne [GOOD] Test command err: RandomSeed# 17177260818833596299 |93.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 25137, MsgBus: 20203 2025-04-10T11:26:19.214553Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644363557548298:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:19.214605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00170d/r3tmp/tmpDjiW8i/pdisk_1.dat 2025-04-10T11:26:19.574452Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:19.592414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:19.592542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:19.595136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25137, node 1 2025-04-10T11:26:19.679503Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:19.679522Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:19.679528Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:19.679679Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20203 TClient is connected to server localhost:20203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:20.233090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:20.250587Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:26:20.265023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:20.398309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:20.543529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:20.599860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:22.167146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644376442451966:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:22.167257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:22.498197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:22.536884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:22.571203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:22.600660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:22.636328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:22.682301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:22.800153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644376442452487:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:22.800241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:22.800541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644376442452492:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:22.804242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:22.817526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644376442452494:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:26:22.903864Z node 1 :TX_PROXY ERROR: Actor# [1:7491644376442452548:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:23.789279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:23.857826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:23.886907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:26:23.919393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:26:23.955478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:26:23.985632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:26:24.215501Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644363557548298:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:24.231178Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:104:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:86:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:86:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:107:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (a ... 594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:85:2114] Leader for TabletID 72057594037927937 is [23:85:2114] sender: [23:139:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:81:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:85:2057] recipient: [24:83:2113] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:87:2057] recipient: [24:83:2113] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:86:2114] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:140:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:84:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:88:2057] recipient: [25:86:2116] Leader for TabletID 72057594037927937 is [25:89:2117] sender: [25:90:2057] recipient: [25:86:2116] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:89:2117] Leader for TabletID 72057594037927937 is [25:89:2117] sender: [25:143:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:84:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:86:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:88:2057] recipient: [26:87:2116] Leader for TabletID 72057594037927937 is [26:89:2117] sender: [26:90:2057] recipient: [26:87:2116] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:89:2117] Leader for TabletID 72057594037927937 is [26:89:2117] sender: [26:143:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:85:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:88:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:89:2057] recipient: [27:87:2116] Leader for TabletID 72057594037927937 is [27:90:2117] sender: [27:91:2057] recipient: [27:87:2116] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:90:2117] Leader for TabletID 72057594037927937 is [27:90:2117] sender: [27:144:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:88:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:91:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:92:2057] recipient: [28:90:2119] Leader for TabletID 72057594037927937 is [28:93:2120] sender: [28:94:2057] recipient: [28:90:2119] !Reboot 72057594037927937 (actor [28:56:2097]) rebooted! !Reboot 72057594037927937 (actor [28:56:2097]) tablet resolver refreshed! new actor is[28:93:2120] Leader for TabletID 72057594037927937 is [28:93:2120] sender: [28:147:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:88:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:91:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:92:2057] recipient: [29:90:2119] Leader for TabletID 72057594037927937 is [29:93:2120] sender: [29:94:2057] recipient: [29:90:2119] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:93:2120] Leader for TabletID 72057594037927937 is [29:93:2120] sender: [29:147:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:50:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:50:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:89:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:92:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:93:2057] recipient: [30:91:2119] Leader for TabletID 72057594037927937 is [30:94:2120] sender: [30:95:2057] recipient: [30:91:2119] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:94:2120] Leader for TabletID 72057594037927937 is [30:94:2120] sender: [30:148:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:91:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:94:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:95:2057] recipient: [31:93:2121] Leader for TabletID 72057594037927937 is [31:96:2122] sender: [31:97:2057] recipient: [31:93:2121] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:96:2122] Leader for TabletID 72057594037927937 is [31:96:2122] sender: [31:150:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:91:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:94:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:95:2057] recipient: [32:93:2121] Leader for TabletID 72057594037927937 is [32:96:2122] sender: [32:97:2057] recipient: [32:93:2121] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:96:2122] Leader for TabletID 72057594037927937 is [32:96:2122] sender: [32:150:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:92:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:95:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:96:2057] recipient: [33:94:2121] Leader for TabletID 72057594037927937 is [33:97:2122] sender: [33:98:2057] recipient: [33:94:2121] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:97:2122] Leader for TabletID 72057594037927937 is [33:97:2122] sender: [33:151:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_Simple [GOOD] Test command err: Trying to start YDB, gRPC: 28802, MsgBus: 16729 2025-04-10T11:26:20.032596Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644370584174662:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:20.032674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00170a/r3tmp/tmpAvjTI4/pdisk_1.dat 2025-04-10T11:26:20.431402Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:20.456930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:20.457052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28802, node 1 2025-04-10T11:26:20.458815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:20.514728Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:20.514759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:20.514769Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:20.514892Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16729 TClient is connected to server localhost:16729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:21.010827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:21.033199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:21.183425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:21.345331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:21.425477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:23.056344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644383469078335:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:23.056482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:23.359568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:23.392287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:23.426796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:23.455185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:23.480364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:23.555917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:23.641310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644383469078854:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:23.641378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:23.641453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644383469078859:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:23.645179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:23.656754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644383469078861:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:26:23.750071Z node 1 :TX_PROXY ERROR: Actor# [1:7491644383469078916:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:24.650813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:24.681291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:24.706766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:26:25.031282Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644370584174662:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:25.031334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoinOrder::CanonizedJoinOrderTPCH7 >> DataShardReadIterator::ShouldReadRangeRightInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeOneByOne >> TTxDataShardUploadRows::TestUploadRows [GOOD] >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace >> KqpJoin::TwoJoinsWithQueryService >> DataShardReadIterator::ShouldReadNonExistingKey [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeys >> DataShardReadIterator::ShouldFailWrongSchema [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange >> TTopicApiDescribes::DescribeTopic >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows [GOOD] >> DataShardReadIteratorConsistency::LeaseConfirmationNotOutOfOrder >> DataShardReadIterator::ShouldReadKeyPrefix2 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix3 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks [GOOD] >> DataShardReadIterator::ShouldRollbackLocksWhenWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... boot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:85:2114] Leader for TabletID 72057594037927937 is [22:85:2114] sender: [22:139:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:50:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:50:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:80:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:83:2057] recipient: [23:82:2113] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:84:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:85:2114] sender: [23:86:2057] recipient: [23:82:2113] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:85:2114] Leader for TabletID 72057594037927937 is [23:85:2114] sender: [23:139:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:81:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:85:2057] recipient: [24:83:2113] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:87:2057] recipient: [24:83:2113] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:86:2114] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:104:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:83:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:86:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:85:2115] Leader for TabletID 72057594037927937 is [25:88:2116] sender: [25:89:2057] recipient: [25:85:2115] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:88:2116] Leader for TabletID 72057594037927937 is [25:88:2116] sender: [25:142:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:83:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:86:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:87:2057] recipient: [26:85:2115] Leader for TabletID 72057594037927937 is [26:88:2116] sender: [26:89:2057] recipient: [26:85:2115] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:88:2116] Leader for TabletID 72057594037927937 is [26:88:2116] sender: [26:142:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:84:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:86:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:88:2057] recipient: [27:87:2115] Leader for TabletID 72057594037927937 is [27:89:2116] sender: [27:90:2057] recipient: [27:87:2115] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:89:2116] Leader for TabletID 72057594037927937 is [27:89:2116] sender: [27:107:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:86:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:89:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:90:2057] recipient: [28:88:2117] Leader for TabletID 72057594037927937 is [28:91:2118] sender: [28:92:2057] recipient: [28:88:2117] !Reboot 72057594037927937 (actor [28:56:2097]) rebooted! !Reboot 72057594037927937 (actor [28:56:2097]) tablet resolver refreshed! new actor is[28:91:2118] Leader for TabletID 72057594037927937 is [28:91:2118] sender: [28:145:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:86:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:89:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:90:2057] recipient: [29:88:2117] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:92:2057] recipient: [29:88:2117] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:91:2118] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:145:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:50:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:50:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:87:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:90:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:91:2057] recipient: [30:89:2117] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:93:2057] recipient: [30:89:2117] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:92:2118] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:146:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:90:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:93:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:94:2057] recipient: [31:92:2120] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:96:2057] recipient: [31:92:2120] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:95:2121] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:149:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:90:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:93:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:94:2057] recipient: [32:92:2120] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:96:2057] recipient: [32:92:2120] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:95:2121] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:149:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] >> TSchemeShardLoginTest::ChangeAccountLockoutParameters [GOOD] >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] Test command err: 2025-04-10T11:26:14.232599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644343801512212:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:14.240648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:26:14.274494Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644344823063727:2116];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c7a/r3tmp/tmpG3rkw3/pdisk_1.dat 2025-04-10T11:26:14.555194Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:26:14.743786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:14.743888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:14.745852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:14.745932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:14.746443Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:14.750341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:14.751776Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:26:14.753592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5087 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:26:14.972956Z node 1 :TX_PROXY DEBUG: actor# [1:7491644343801512363:2141] Handle TEvNavigate describe path dc-1 2025-04-10T11:26:14.973023Z node 1 :TX_PROXY DEBUG: Actor# [1:7491644343801512821:2458] HANDLE EvNavigateScheme dc-1 2025-04-10T11:26:14.973188Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7491644343801512405:2159], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:14.973234Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7491644343801512405:2159], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-10T11:26:14.973462Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644343801512822:2459][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:26:14.975555Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644343801512003:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644343801512826:2459] 2025-04-10T11:26:14.975641Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644343801512003:2052] Subscribe: subscriber# [1:7491644343801512826:2459], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:14.975739Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644343801512009:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644343801512828:2459] 2025-04-10T11:26:14.975759Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644343801512009:2058] Subscribe: subscriber# [1:7491644343801512828:2459], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:14.975816Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644343801512826:2459][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644343801512003:2052] 2025-04-10T11:26:14.975860Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644343801512828:2459][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644343801512009:2058] 2025-04-10T11:26:14.975908Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644343801512822:2459][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644343801512823:2459] 2025-04-10T11:26:14.975936Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644343801512822:2459][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644343801512825:2459] 2025-04-10T11:26:14.976023Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7491644343801512822:2459][/dc-1] Set up state: owner# [1:7491644343801512405:2159], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:26:14.976159Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644343801512826:2459][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644343801512823:2459], cookie# 1 2025-04-10T11:26:14.976175Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644343801512827:2459][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644343801512824:2459], cookie# 1 2025-04-10T11:26:14.976207Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644343801512828:2459][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644343801512825:2459], cookie# 1 2025-04-10T11:26:14.976247Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644343801512003:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644343801512826:2459] 2025-04-10T11:26:14.976279Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644343801512003:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644343801512826:2459], cookie# 1 2025-04-10T11:26:14.976331Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644343801512009:2058] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644343801512828:2459] 2025-04-10T11:26:14.976347Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644343801512009:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644343801512828:2459], cookie# 1 2025-04-10T11:26:14.978219Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644343801512006:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7491644343801512827:2459] 2025-04-10T11:26:14.978296Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7491644343801512006:2055] Subscribe: subscriber# [1:7491644343801512827:2459], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-10T11:26:14.978354Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644343801512006:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7491644343801512827:2459], cookie# 1 2025-04-10T11:26:14.978407Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644343801512826:2459][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644343801512003:2052], cookie# 1 2025-04-10T11:26:14.978450Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644343801512828:2459][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644343801512009:2058], cookie# 1 2025-04-10T11:26:14.978488Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644343801512827:2459][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644343801512006:2055] 2025-04-10T11:26:14.978517Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7491644343801512827:2459][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644343801512006:2055], cookie# 1 2025-04-10T11:26:14.978574Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644343801512822:2459][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644343801512823:2459], cookie# 1 2025-04-10T11:26:14.978614Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644343801512822:2459][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:26:14.978636Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644343801512822:2459][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644343801512825:2459], cookie# 1 2025-04-10T11:26:14.978656Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644343801512822:2459][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:26:14.978687Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644343801512822:2459][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7491644343801512824:2459] 2025-04-10T11:26:14.978769Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7491644343801512822:2459][/dc-1] Path was already updated: owner# [1:7491644343801512405:2159], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:26:14.978798Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644343801512822:2459][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7491644343801512824:2459], cookie# 1 2025-04-10T11:26:14.978824Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7491644343801512822:2459][/dc-1] Unexpected sync response: sender# [1:7491644343801512824:2459], cookie# 1 2025-04-10T11:26:14.978845Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7491644343801512006:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7491644343801512827:2459] 2025-04-10T11:26:15.043008Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7491644343801512405:2159], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSiz ... R_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:29.434468Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491644389689692658:2198], cacheItem# { Subscriber: { Subscriber: [3:7491644402574594663:2245] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:29.434514Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491644389689692658:2198], cacheItem# { Subscriber: { Subscriber: [3:7491644402574594664:2246] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:29.434614Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491644406869561981:2253], recipient# [3:7491644402574594661:2580], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:29.434915Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7491644402574594661:2580], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:26:29.526627Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491644389689692658:2198], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:29.526780Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491644389689692658:2198], cacheItem# { Subscriber: { Subscriber: [3:7491644389689692721:2229] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:29.526877Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491644406869561983:2254], recipient# [3:7491644406869561982:2584], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:29.574936Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7491644389689692658:2198], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:29.575076Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7491644389689692658:2198], cacheItem# { Subscriber: { Subscriber: [3:7491644389689692721:2229] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:29.575177Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7491644406869561985:2255], recipient# [3:7491644406869561984:2585], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:29.626716Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7491644344823063914:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:29.630272Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7491644344823063914:2107], cacheItem# { Subscriber: { Subscriber: [2:7491644349118031232:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:29.653305Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7491644409247573490:2153], recipient# [2:7491644409247573489:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:30.077628Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7491644344823063914:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:30.077845Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7491644344823063914:2107], cacheItem# { Subscriber: { Subscriber: [2:7491644362002933159:2118] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:30.077967Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7491644413542540788:2154], recipient# [2:7491644413542540787:2341], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:30.298657Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7491644344823063914:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:26:30.298816Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7491644344823063914:2107], cacheItem# { Subscriber: { Subscriber: [2:7491644349118031232:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-10T11:26:30.298920Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7491644413542540790:2155], recipient# [2:7491644413542540789:2342], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |93.5%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoin::RightSemiJoin_FullScan [GOOD] >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks >> KqpJoinOrder::CanonizedJoinOrderTPCH19 >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 >> KqpJoin::TwoJoinsWithQueryService [GOOD] >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamOperationTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_FullScan [GOOD] Test command err: Trying to start YDB, gRPC: 12983, MsgBus: 10077 2025-04-10T11:26:25.758297Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644390074097586:2089];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:25.771912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001706/r3tmp/tmp0qlDul/pdisk_1.dat 2025-04-10T11:26:26.157067Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12983, node 1 2025-04-10T11:26:26.199967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:26.200079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:26.202728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:26.217569Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:26.217599Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:26.217607Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:26.217724Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10077 TClient is connected to server localhost:10077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:26.726349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:26.755459Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:26:26.767770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:26:26.882813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:26:27.073490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:27.154338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:28.883300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644402959001218:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:28.883398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:29.197913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:29.235012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:29.266752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:29.295707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:29.326442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:29.366051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:29.462533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644407253969031:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:29.462676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:29.463528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644407253969037:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:29.467726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:29.480658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644407253969039:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:26:29.586742Z node 1 :TX_PROXY ERROR: Actor# [1:7491644407253969094:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:30.583911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:30.617506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:30.660749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:26:30.700363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:26:30.750555Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644390074097586:2089];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:30.750659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:26:30.777633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:49: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> TKeyValueTest::TestConcatWorksNewApi [GOOD] >> TKeyValueTest::TestConcatToLongKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::TwoJoinsWithQueryService [GOOD] Test command err: Trying to start YDB, gRPC: 13282, MsgBus: 29931 2025-04-10T11:26:29.271732Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644407319475424:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:29.271805Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001704/r3tmp/tmpAlu4Cj/pdisk_1.dat 2025-04-10T11:26:29.701788Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:29.704444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:29.704516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:29.706518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13282, node 1 2025-04-10T11:26:29.746524Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:29.746549Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:29.746556Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:29.746679Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29931 TClient is connected to server localhost:29931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:30.275508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:32.244958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644420204377964:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:32.245054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:32.454463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:26:32.672848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644420204378072:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:32.672916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:32.700287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:26:32.842272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644420204378155:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:32.842353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:32.855151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:32.930299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644420204378234:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:32.930402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:32.933398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644420204378239:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:32.941059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T11:26:32.956661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644420204378241:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T11:26:33.045868Z node 1 :TX_PROXY ERROR: Actor# [1:7491644424499345589:2495] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:34.271913Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644407319475424:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:34.271975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardReadIterator::ShouldReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne >> DataShardReadIterator::ShouldReadRangeOneByOne [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk5 >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] >> KqpFlipJoin::Inner_3 >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted >> DataShardReadIterator::ShouldReadKeyPrefix3 [GOOD] >> DataShardReadIterator::ShouldReadFromFollower ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:26:07.786781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:26:07.786863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:26:07.786896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:26:07.786921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:26:07.786953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:26:07.786973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:26:07.787015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:26:07.787142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:26:07.787406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:26:07.862449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:26:07.862512Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:07.879149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:26:07.879447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:26:07.879618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:26:07.887150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:26:07.887373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:26:07.887992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:26:07.888343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:26:07.890198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:26:07.891531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:26:07.891596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:26:07.891764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:26:07.891812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:26:07.891845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:26:07.892003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:26:07.898498Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:26:08.015816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:26:08.016036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:26:08.016225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:26:08.016488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:26:08.016541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:26:08.019965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:26:08.020121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:26:08.020298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:26:08.020347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:26:08.020384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:26:08.020432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:26:08.022554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:26:08.022611Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:26:08.022647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:26:08.024407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:26:08.024451Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:26:08.024523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:26:08.024585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:26:08.028402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:26:08.030586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:26:08.030757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:26:08.031743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:26:08.031873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:26:08.031920Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:26:08.032188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:26:08.032243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:26:08.032417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:26:08.032505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:26:08.034875Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:26:08.034921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:26:08.035085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:26:08.035127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:26:08.035408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:26:08.035457Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:26:08.035545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:26:08.035591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:26:08.035631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:26:08.035667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:26:08.035702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:26:08.035737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:26:08.035777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:26:08.035817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:26:08.035876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:26:08.035910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:26:08.035940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:26:08.038111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:26:08.038234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:26:08.038270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... val# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:26:34.350050Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:26:34.350103Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:26:34.350171Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:26:34.350209Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:26:34.350237Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:26:34.350312Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:26:34.350387Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:26:34.350694Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:26:34.370643Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:26:34.372105Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:26:34.372299Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:26:34.372427Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:26:34.372468Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:34.372640Z node 5 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:26:34.373484Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-10T11:26:34.373603Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.373683Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.374108Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.374208Z node 5 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-10T11:26:34.374421Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.374549Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.374637Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.374763Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.374842Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.374980Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.375270Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.375394Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.375784Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.375873Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.376043Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.376135Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.376235Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.376504Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.376591Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.376727Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.376964Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.377158Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.377246Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.377302Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-10T11:26:34.384876Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:26:34.384946Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:26:34.385155Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:26:34.385204Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:26:34.385259Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:26:34.385364Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:370:2339] sender: [5:425:2058] recipient: [5:15:2062] 2025-04-10T11:26:34.435286Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-10T11:26:34.435351Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-10T11:26:34.459364Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "User user1 login denied: too many failed password attempts", at schemeshard: 72057594046678944 2025-04-10T11:26:34.459524Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:26:34.459565Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:26:34.459771Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:26:34.459815Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:419:2377], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-10T11:26:34.460389Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 0 2025-04-10T11:26:36.466777Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-10T11:26:36.477389Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MzI3NTk2LCJpYXQiOjE3NDQyODQzOTYsInN1YiI6InVzZXIxIn0.IYw7fwWHm8vMwI-iTJfZllsgntOZuqazDmlqllIGZIEfxMm4GhVAUl2nk8tGx4OV-XAQxKCpyM6LG7gJuzEeawihIC4N_CLIEAcD9spgQYVkTZ0pP1KDO-W-klxLjByW1O1Wf81mXPatEXvOP-q06Zmjc8j9BpzCwXc-B9QhbQ17xkMvjUqCxT4gsftYxQThFXhyTPVEXtZ22uAp5IKBx_tgowAKX1yNNlgoBeqU_xe0nu4w7fzslhBFLZ4XYo3d-DGqONNSMCFR8CP0_VIukEu2mm9JbNiN0BNHa16gRXwX6b7gICft8VzeZYXZKo39PTMOLRfve_4CDHf2rCxzfA" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MzI3NTk2LCJpYXQiOjE3NDQyODQzOTYsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-04-10T11:26:36.477959Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:26:36.478185Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 246us result status StatusSuccess 2025-04-10T11:26:36.478630Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsi3yDOpoxXvy7OwHnceb\nUlwpvzb3rF9USk3FLTnPvGY6CCePl1I5IF9yT8WGClb5rEDD/NdZ+nJIAD5g88RD\nzuKd62YL2oJbVBhACdD+Zr9grybMzpjD1noZ7Gu4yY9sZah4ooj7xofxN7fU+iqb\nHQGyxzTgy0c6K2BM+g2A1ews96TWHObCQlqjNpD9JfuxY8+OTdh6d5eyWbsbYtaD\nqHB81QfMBOdJQB7/boTPl1OHTI9xF4CdSDN2yKkvqMpDDz2xF3ogMXi2p0m/vMBn\nXJgI8HtszGE7orWkDMPXAJ3Erkva2HXpZ1/HmC56830lMjJdLCdp6DlP6vIcqqf2\n0wIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744370792027 } PublicKeys { KeyId: 2 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzfqfHRup5HpKSwwb5cTL\n1bt/bCJ/ghsyF6ylRxaPkUieMb9vXimsLLtncyrD/hg/t7pZCVIET9I8HCxv1SZB\nJtflmT3oqbj35ZU/oSqPV0hcbgwdZXGHc08ZlygrG+r36BexvOHfyQUaqJ1NmC0e\nKnImSw5NLF3OuzKKwcq1SatDH4WW79uSUNESYA7/KHI7Uoh+U8CJG0sjdF2ON+LP\nDCGNzEkoHOoPin5D3XkWZ+ITrMKb4aOKMERMrhEtEKVph2stLIW2nQJy9twB1jCu\nZsI/Pa73Td4UaoYwlmUINXyXoLLj7/b+QcEXufLGwlsiyRRCP1RFQq11HojhqoFu\n7wIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744370792280 } PublicKeys { KeyId: 3 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmWXhmW+d75I0m+BNSBVj\nFiWV1MJx/MjMlJxL/2bejzC0Ei3En1mMROkQQcDydMdvfGCykq376+dkvovQYzex\nElHzitq+Y6RpqtnDDxIXh/ugkN5XEkfK/lBW3OvzjBC01D2I7rCEl43W/RBtRNUA\nQFv2Z/74iKlVVIH8a8Vu1qIBemAUCbIfpfG+xUomFQ3EXWepeZNRYi35Mtrqv69W\nbt2ahddvXLwLLDX9TK8Xd6NijxXPNknHTUnN8laVFUygG4xEu7xYIsN5EeNYnJdp\n3MTqCwHak/2bkI1fNNOwjo8s/nedfpCZe1RK7bsm9VTVj15R2HsTjL9fCxyWGqgV\nqwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744370794455 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite >> DataShardReadIterator::ShouldRollbackLocksWhenWrite [GOOD] >> DataShardReadIterator::TryWriteManyRows+Commit >> THiveTest::TestDrain >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull [GOOD] >> DataShardReadIteratorConsistency::LeaseConfirmationNotOutOfOrder [GOOD] >> DataShardReadIteratorFastCancel::ShouldProcessFastCancel >> THiveTest::TestHiveBalancerWithPrefferedDC1 >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+ColumnStore >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 28114, MsgBus: 24019 2025-04-10T11:26:25.308213Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644390817288959:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:25.308539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001707/r3tmp/tmpCZVfL4/pdisk_1.dat 2025-04-10T11:26:25.774553Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28114, node 1 2025-04-10T11:26:25.782555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:25.782671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:25.797992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:25.802201Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:26:25.803151Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:26:25.855018Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:25.855059Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:25.855068Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:25.855230Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24019 TClient is connected to server localhost:24019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:26.463000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:26.487152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:26.658386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:26.836982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:26.920665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:28.539213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644403702192626:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:28.539321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:28.888296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:28.927924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:28.969503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:29.001320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:29.029287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:29.113638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:29.182566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644407997160437:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:29.182694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:29.183037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644407997160442:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:29.187005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:29.199393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644407997160444:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:26:29.257718Z node 1 :TX_PROXY ERROR: Actor# [1:7491644407997160496:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:30.227525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:30.296978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:30.308604Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644390817288959:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:30.308678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6682, MsgBus: 12134 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001707/r3tmp/tmpaBwJXj/pdisk_1.dat 2025-04-10T11:26:32.390375Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:26:32.413540Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:32.431918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:32.431998Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:32.439248Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6682, node 2 2025-04-10T11:26:32.495126Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:32.495150Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:32.495161Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:32.495270Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12134 TClient is connected to server localhost:12134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:33.070585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:33.102376Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:26:33.117128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:33.174572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:26:33.366778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:33.447741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:35.581344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644435138430110:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:35.581425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:35.624492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:35.664953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:35.713358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:35.761942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:35.800135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:35.886068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:35.938884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644435138430625:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:35.938955Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:35.939108Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644435138430630:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:35.942727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:35.957079Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644435138430632:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:26:36.027348Z node 2 :TX_PROXY ERROR: Actor# [2:7491644439433397981:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:36.887271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:36.940304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] Test command err: 2025-04-10T11:26:27.427128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:26:27.427381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:26:27.427591Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c7e/r3tmp/tmpAuwtIC/pdisk_1.dat 2025-04-10T11:26:27.920672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:26:27.985101Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:28.032714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:28.033214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:28.046161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:28.147474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:26:28.217073Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:681:2579] 2025-04-10T11:26:28.217406Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:26:28.259398Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:26:28.259602Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:26:28.261747Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:26:28.261824Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:26:28.261887Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:26:28.264780Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:26:28.265364Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:686:2581] 2025-04-10T11:26:28.265598Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:26:28.274137Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:26:28.274250Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:718:2579] in generation 1 2025-04-10T11:26:28.275099Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:26:28.275216Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:26:28.276508Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T11:26:28.276564Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T11:26:28.276615Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T11:26:28.276896Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:26:28.277223Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:26:28.277288Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:725:2581] in generation 1 2025-04-10T11:26:28.279451Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:690:2583] 2025-04-10T11:26:28.279662Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:26:28.289708Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2585] 2025-04-10T11:26:28.289870Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:26:28.298804Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:26:28.298935Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:26:28.300221Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-04-10T11:26:28.300283Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-04-10T11:26:28.300347Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-04-10T11:26:28.300673Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:26:28.300858Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:26:28.300946Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037891 persisting started state actor id [1:751:2583] in generation 1 2025-04-10T11:26:28.301372Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:26:28.301457Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:26:28.302663Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-10T11:26:28.302727Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-10T11:26:28.302767Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-10T11:26:28.303011Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:26:28.303107Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:26:28.303169Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:752:2585] in generation 1 2025-04-10T11:26:28.314249Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:26:28.345048Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:26:28.350405Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:26:28.350713Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:757:2620] 2025-04-10T11:26:28.350756Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:26:28.350807Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:26:28.350858Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:26:28.352345Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:26:28.352398Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T11:26:28.352478Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:26:28.352564Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:758:2621] 2025-04-10T11:26:28.352603Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T11:26:28.352631Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T11:26:28.352655Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:26:28.353101Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:26:28.353135Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-04-10T11:26:28.353191Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:26:28.353273Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [1:759:2622] 2025-04-10T11:26:28.353297Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-04-10T11:26:28.353318Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-04-10T11:26:28.353339Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-10T11:26:28.353580Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:26:28.353686Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:26:28.353787Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:26:28.353814Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-10T11:26:28.353859Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:26:28.353923Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:760:2623] 2025-04-10T11:26:28.353944Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-10T11:26:28.353976Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-10T11:26:28.354010Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:26:28.355456Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:26:28.355512Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:26:28.355642Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:26:28.355695Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:26:28.355744Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T11:26:28.355827Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T11:26:28.355969Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2573], serverId# [1:714:2597], sessionId# [0:0:0] 2025-04-10T11:26:28.356011Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:26:28.356037Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:26:28.356059Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-10T11:26:28.356100Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:26:28.356151Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037891 2025-04-10T11:26:28.356208Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037891 2025-04-10T11:26:28.356400Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:26:28.356783Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:26:28.356908Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:26:28.357370Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:672:2574], serverId# [1:715:2598], sessionId# [0:0:0] 2025-04-10T11:26:28.357416Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-10T11:26:28.357441Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active ... 25-04-10T11:26:37.508729Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:26:37.519737Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:26:37.519877Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:26:37.680778Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-04-10T11:26:37.681569Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:26:37.681640Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:26:37.682963Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:26:37.683031Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:26:37.683086Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:26:37.683373Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:26:37.683526Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:26:37.684389Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:26:37.684465Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:26:37.684983Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:26:37.685474Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:26:37.687256Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:26:37.687319Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:26:37.687930Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:26:37.688015Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:26:37.688935Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:26:37.688987Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:26:37.689046Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:26:37.689125Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:26:37.689186Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:26:37.689303Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:26:37.692777Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:26:37.695167Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:26:37.695252Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:26:37.695466Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:26:37.751127Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:37.751304Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:37.751417Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:37.767107Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:37.779442Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:26:37.942402Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:26:37.945870Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:26:38.023067Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:39.269211Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfq7c5j99jk4bcq92dk60rx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzZlYzk1NDUtODhjNWQwZTMtN2E4OTI0YjgtYjJiNGE1ZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:26:39.336558Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-04-10T11:26:39.337263Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:26:39.337511Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 2025-04-10T11:26:39.348979Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:26:39.720300Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfq7dtg0rcwsef51gezhb7g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWNkNTQzMmMtNWU1MjlhMDUtYzFjMTJkNDMtNDdhZTk0Y2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:26:39.731316Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint32_value: 300 } } 2025-04-10T11:26:39.750313Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-04-10T11:26:39.761676Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-10T11:26:39.761760Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:26:39.761830Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-10T11:26:39.762572Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-04-10T11:26:39.762638Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:26:39.822757Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfq7e4k19z1bbwe7g0xdmx1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWNkNTQzMmMtNWU1MjlhMDUtYzFjMTJkNDMtNDdhZTk0Y2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:26:39.825326Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:5] at 72075186224037888 2025-04-10T11:26:39.825493Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-04-10T11:26:39.835699Z node 3 :TX_DATASHARD INFO: Write transaction 5 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2025-04-10T11:26:39.835976Z node 3 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:26:39.836172Z node 3 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-10T11:26:39.836253Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:26:39.836508Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:913:2693], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [3:863:2693]Got LOCKS BROKEN for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:913:2693].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:26:39.837042Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:906:2693], SessionActorId: [3:863:2693], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:863:2693]. isRollback=0 2025-04-10T11:26:39.837509Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OWNkNTQzMmMtNWU1MjlhMDUtYzFjMTJkNDMtNDdhZTk0Y2U=, ActorId: [3:863:2693], ActorState: ExecuteState, TraceId: 01jrfq7e4k19z1bbwe7g0xdmx1, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:907:2693] from: [3:906:2693] 2025-04-10T11:26:39.837706Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:907:2693] TxId: 281474976715662. Ctx: { TraceId: 01jrfq7e4k19z1bbwe7g0xdmx1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWNkNTQzMmMtNWU1MjlhMDUtYzFjMTJkNDMtNDdhZTk0Y2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:26:39.838028Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037888 2025-04-10T11:26:39.838079Z node 3 :TX_DATASHARD DEBUG: Skip empty write operation for [0:6] at 72075186224037888 2025-04-10T11:26:39.838299Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:26:39.838440Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OWNkNTQzMmMtNWU1MjlhMDUtYzFjMTJkNDMtNDdhZTk0Y2U=, ActorId: [3:863:2693], ActorState: ExecuteState, TraceId: 01jrfq7e4k19z1bbwe7g0xdmx1, Create QueryResponse for error on request, msg: >> KqpJoinOrder::GeneralPrioritiesBug4 |93.5%| [TA] $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScripting::StreamOperationTimeout [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReadNotExistingRange >> TTopicApiDescribes::DescribeTopic [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk5 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk7 >> TargetTrackingScaleRecommenderPolicy::ScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::SpikeResistance [GOOD] >> TargetTrackingScaleRecommenderPolicy::NearTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::AtTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::Fluctuations [GOOD] >> TargetTrackingScaleRecommenderPolicy::FluctuationsBigNumbers [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleInToMaxSeen [GOOD] >> TargetTrackingScaleRecommenderPolicy::Idle [GOOD] >> TStorageBalanceTest::TestScenario2 >> KqpIndexLookupJoin::RightSemi >> THiveTest::TestHiveBalancerWithPrefferedDC1 [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamOperationTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 23151, MsgBus: 24306 2025-04-10T11:25:25.733739Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644133567200239:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:25.737316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e89/r3tmp/tmpZueFCD/pdisk_1.dat 2025-04-10T11:25:26.195037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:26.195137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:26.198525Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:26.201903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23151, node 1 2025-04-10T11:25:26.273906Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:26.273923Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:26.273932Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:26.274054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24306 TClient is connected to server localhost:24306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:26.814626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:26.830909Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:26.840158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:25:26.968204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:25:27.130060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:27.198232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:28.996560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644146452103904:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:28.996689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.323044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.356714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.391839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.422558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.492441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.560989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:29.602490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644150747071717:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.602602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.602814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644150747071722:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:29.606978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:25:29.616359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644150747071724:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:25:29.710780Z node 1 :TX_PROXY ERROR: Actor# [1:7491644150747071777:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:30.734321Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644133567200239:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:30.734409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:30.857605Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284330887, txId: 281474976710672] shutting down 2025-04-10T11:25:31.160602Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331188, txId: 281474976710675] shutting down 2025-04-10T11:25:31.429321Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331461, txId: 281474976710678] shutting down 2025-04-10T11:25:31.721520Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284331755, txId: 281474976710681] shutting down 2025-04-10T11:25:31.968405Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332007, txId: 281474976710684] shutting down 2025-04-10T11:25:32.190785Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332224, txId: 281474976710687] shutting down 2025-04-10T11:25:32.490621Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332518, txId: 281474976710690] shutting down 2025-04-10T11:25:32.753046Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284332777, txId: 281474976710693] shutting down 2025-04-10T11:25:33.006670Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284333029, txId: 281474976710696] shutting down 2025-04-10T11:25:33.280513Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284333309, txId: 281474976710699] shutting down 2025-04-10T11:25:33.573471Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284333603, txId: 281474976710702] shutting down 2025-04-10T11:25:33.893868Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284333932, txId: 281474976710705] shutting down 2025-04-10T11:25:34.167472Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284334205, txId: 281474976710708] shutting down 2025-04-10T11:25:34.417463Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284334450, txId: 281474976710711] shutting down 2025-04-10T11:25:34.680831Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284334709, txId: 281474976710714] shutting down 2025-04-10T11:25:34.963435Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284334989, txId: 281474976710717] shutting down 2025-04-10T11:25:35.238615Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335262, txId: 281474976710720] shutting down 2025-04-10T11:25:35.493486Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335521, txId: 281474976710723] shutting down 2025-04-10T11:25:35.799229Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284335829, txId: 281474976710726] shutting down 2025-04-10T11:25:36.086696Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284336116, txId: 281474976710729] shutting down 2025-04-10T11:25:36.389698Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284336424, txId: 281474976710732] shutting down 2025-04-10T11:25:36.649849Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 174 ... MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284384857, txId: 281474976711206] shutting down 2025-04-10T11:26:25.169723Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284385193, txId: 281474976711209] shutting down 2025-04-10T11:26:25.606955Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284385627, txId: 281474976711212] shutting down 2025-04-10T11:26:25.994857Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284386012, txId: 281474976711215] shutting down 2025-04-10T11:26:26.376906Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284386390, txId: 281474976711218] shutting down 2025-04-10T11:26:26.792457Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284386810, txId: 281474976711221] shutting down 2025-04-10T11:26:27.213950Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284387230, txId: 281474976711224] shutting down 2025-04-10T11:26:27.602647Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284387622, txId: 281474976711227] shutting down 2025-04-10T11:26:27.974725Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284387993, txId: 281474976711230] shutting down 2025-04-10T11:26:28.390853Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284388420, txId: 281474976711233] shutting down 2025-04-10T11:26:28.853921Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284388875, txId: 281474976711236] shutting down 2025-04-10T11:26:29.255883Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284389274, txId: 281474976711239] shutting down 2025-04-10T11:26:29.769077Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284389785, txId: 281474976711242] shutting down 2025-04-10T11:26:30.254816Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284390275, txId: 281474976711245] shutting down 2025-04-10T11:26:30.734015Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284390688, txId: 281474976711248] shutting down 2025-04-10T11:26:31.227773Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284391234, txId: 281474976711251] shutting down 2025-04-10T11:26:31.635509Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284391647, txId: 281474976711254] shutting down 2025-04-10T11:26:32.130948Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284392137, txId: 281474976711257] shutting down 2025-04-10T11:26:32.617036Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284392627, txId: 281474976711260] shutting down 2025-04-10T11:26:33.078591Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284393096, txId: 281474976711263] shutting down 2025-04-10T11:26:33.451510Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284393460, txId: 281474976711266] shutting down 2025-04-10T11:26:33.890481Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284393915, txId: 281474976711269] shutting down Trying to start YDB, gRPC: 15303, MsgBus: 24599 2025-04-10T11:26:34.970689Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644429547438616:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:34.970788Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e89/r3tmp/tmpatn0Oj/pdisk_1.dat 2025-04-10T11:26:35.146325Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:35.180038Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:35.180156Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:35.185937Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15303, node 2 2025-04-10T11:26:35.250793Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:35.250820Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:35.250830Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:35.250980Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24599 TClient is connected to server localhost:24599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:35.836724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:35.844399Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:26:35.852608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:35.953430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:36.224399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:36.319210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:39.162708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644451022276875:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:39.162888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:39.198603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:39.235006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:39.273495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:39.359208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:39.429585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:39.509265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:39.606182Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644451022277402:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:39.606273Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:39.606817Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644451022277407:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:39.611792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:39.625939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644451022277409:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:26:39.725298Z node 2 :TX_PROXY ERROR: Actor# [2:7491644451022277463:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:39.978253Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644429547438616:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:39.978334Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TPQTest::TestPQReadAhead [GOOD] >> TPQTest::TestPQCacheSizeManagement >> KqpFlipJoin::Inner_3 [GOOD] >> KqpFlipJoin::LeftSemi_1 |93.5%| [TA] $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic [GOOD] Test command err: 2025-04-10T11:26:29.756673Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644409844144203:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:29.756842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:26:29.820494Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644407997135352:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:29.820671Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:26:30.035151Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:26:30.045205Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00129a/r3tmp/tmplyDshC/pdisk_1.dat 2025-04-10T11:26:30.312216Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:30.327841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:30.327978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:30.330456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:30.330554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:30.337177Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:26:30.337342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:30.337870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62501, node 1 2025-04-10T11:26:30.434740Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/00129a/r3tmp/yandexpikEiV.tmp 2025-04-10T11:26:30.434770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/00129a/r3tmp/yandexpikEiV.tmp 2025-04-10T11:26:30.434970Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/00129a/r3tmp/yandexpikEiV.tmp 2025-04-10T11:26:30.435086Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:26:30.495388Z INFO: TTestServer started on Port 8839 GrpcPort 62501 TClient is connected to server localhost:8839 PQClient connected to localhost:62501 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:30.865705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:26:30.969368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:26:31.246311Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-10T11:26:33.327806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644425177004778:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:33.327814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644425177004761:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:33.327906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:33.333678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:26:33.371242Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644425177004782:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:26:33.630026Z node 2 :TX_PROXY ERROR: Actor# [2:7491644425177004809:2179] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:33.656172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:33.659682Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491644427024014404:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:26:33.659899Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDhjYjM2ZTEtNGNiNzVmZDktZGE3NzNkNDktYzU3Y2I1OGQ=, ActorId: [1:7491644427024014348:2337], ActorState: ExecuteState, TraceId: 01jrfq77wh1vwagvjavja6taed, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:26:33.662083Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:26:33.662996Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491644425177004823:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:26:33.664435Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjNiZTk5MTUtM2U4NTA4MGMtNjYwNjYwYTUtMzEzZGM4OTY=, ActorId: [2:7491644425177004751:2312], ActorState: ExecuteState, TraceId: 01jrfq77tz5zkhjara7h0vnxew, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:26:33.664760Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:26:33.740591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:33.948806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:26:34.272019Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfq78m69jqnvhtz539we0n4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWIyYTk1ZjEtODI1MDdiMDEtOTI2MzJkOGItYTdiMTEyMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491644431318982150:3067] 2025-04-10T11:26:34.755134Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644409844144203:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:34.755192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:26:34.818017Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644407997135352:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:34.818089Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-04-10T11:26:40.246372Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-10T11:26:40.374313Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable met ... } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } consumer_stats { min_partitions_last_read_time { seconds: 1744284401 nanos: 172000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } topic_stats { min_last_write_time { seconds: 1744284401 nanos: 186000000 } max_write_time_lag { } bytes_written { } } } } } Describe topic with location 2025-04-10T11:26:41.537384Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-10T11:26:41.537453Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" include_location: true 2025-04-10T11:26:41.537532Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x 2025-04-10T11:26:41.538230Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7491644461383755242:2616]: Request location 2025-04-10T11:26:41.540050Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7491644461383755242:2616]: Got location 2025-04-10T11:26:41.538911Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491644461383755244:2617] connected; active server actors: 1 2025-04-10T11:26:41.539794Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 2 2025-04-10T11:26:41.539806Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-04-10T11:26:41.539813Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 2 2025-04-10T11:26:41.539821Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-04-10T11:26:41.539827Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 2 2025-04-10T11:26:41.539835Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 2 2025-04-10T11:26:41.539841Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 2 2025-04-10T11:26:41.539848Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 2 2025-04-10T11:26:41.539854Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 2 2025-04-10T11:26:41.539860Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 2 2025-04-10T11:26:41.539866Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 2 2025-04-10T11:26:41.539871Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 2 2025-04-10T11:26:41.539881Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 2 2025-04-10T11:26:41.539887Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 2 2025-04-10T11:26:41.539894Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 2 2025-04-10T11:26:41.540797Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491644461383755244:2617] disconnected; active server actors: 1 2025-04-10T11:26:41.540812Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491644461383755244:2617] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1744284400663 tx_id: 281474976710677 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe topic with no stats or location 2025-04-10T11:26:41.544842Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-10T11:26:41.544899Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" 2025-04-10T11:26:41.544979Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1744284400663 tx_id: 281474976710677 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe bad topic 2025-04-10T11:26:41.551511Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-10T11:26:41.551578Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2025-04-10T11:26:41.551648Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//bad-topic Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } |93.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted [GOOD] >> DataShardReadIterator::NoErrorOnFinalACK >> DataShardReadIterator::ShouldReadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadHeadFromFollower >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite >> KqpJoinOrder::FiveWayJoinWithPreds-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH10 >> TPQTest::TestPartitionedBlobFails [GOOD] >> TPQTest::TestReadAndDeleteConsumer >> DataShardReadIteratorFastCancel::ShouldProcessFastCancel [GOOD] >> DataShardReadIteratorLatency::ReadSplitLatency >> KqpJoin::JoinLeftPureInner >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPreds-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 11022, MsgBus: 25552 2025-04-10T11:26:03.820576Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644295323670853:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:03.820666Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001717/r3tmp/tmpnbP2Zm/pdisk_1.dat 2025-04-10T11:26:04.154333Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11022, node 1 2025-04-10T11:26:04.224230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:04.224344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:04.226222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:04.237885Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:04.237919Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:04.237928Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:04.238065Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25552 TClient is connected to server localhost:25552 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:04.742802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:06.654241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644308208573412:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:06.654674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644308208573404:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:06.654771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:06.664560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:06.681912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644308208573418:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:06.746327Z node 1 :TX_PROXY ERROR: Actor# [1:7491644308208573469:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:07.061412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.193650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.260252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.292738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.323391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.485723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.514029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.540731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.569942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.602350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.627796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.693598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:07.722503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.262728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:26:08.291268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.319839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.348424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.413218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.436485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.462187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.488146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.518271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.553067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.614425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.645897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.675403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.709817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.738649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.769031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.801340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-10T11:26:08.820653Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644295323670853:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:08.820714Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:26:08.828680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but p ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.688477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.688570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.694099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038441;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.694265Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.700211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.700215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.705903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.706107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.711625Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038437;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.711941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.717614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.717841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038449;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.723591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.723867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.729805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.729827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.735411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.735650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038445;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.741197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038494;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.741197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.746777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.746809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038467;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.752529Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.752532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038459;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.757929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.759786Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.763465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038439;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.765746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.771351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.771484Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.777077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.777685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.782707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.782746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.788210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038435;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.788249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.793907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038461;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.793908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.799217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.799502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038451;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.805130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.805138Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038447;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.811244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.811254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.817242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:36.897073Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq6gx05g71km1cwj53zhp4", SessionId: ydb://session/3?node_id=1&id=ZTg1ZjAwMDktY2IwZDIyYmUtMTBiODY5ODUtMjJlYjkxNWM=, Slow query, duration: 27.072425s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:26:37.228553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:37.229134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7491644368338132205:4463];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038170; 2025-04-10T11:26:37.229455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:37.230106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> THiveTest::TestLocalDisconnect >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness1 [GOOD] >> THiveTest::TestLocalDisconnect [GOOD] >> THiveTest::TestLocalReplacement >> THiveTest::TestHiveBalancerWithPrefferedDC2 [GOOD] >> THiveTest::TestHiveBalancerWithPreferredDC3 >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-ColumnStore >> THiveTest::TestDrain [GOOD] >> THiveTest::TestDrainWithMaxTabletsScheduled >> YdbOlapStore::LogExistingRequest [GOOD] >> YdbOlapStore::LogCountByResource >> THiveTest::TestLocalReplacement [GOOD] >> THiveTest::TestHiveRestart >> DataShardReadIterator::ShouldReadNotExistingRange [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk7 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1_100 >> DataShardReadIterator::ShouldReadRangePrefix1 >> KqpIndexLookupJoin::RightSemi [GOOD] >> THiveTest::TestHiveBalancerWithPreferredDC3 [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets >> KqpFlipJoin::LeftSemi_1 [GOOD] >> TPQTest::TestLowWatermark [GOOD] >> TPQTest::TestGetTimestamps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness1 [GOOD] Test command err: Trying to start YDB, gRPC: 5350, MsgBus: 64357 2025-04-10T11:26:06.485461Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644310923829964:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:06.485657Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001715/r3tmp/tmpS4uo39/pdisk_1.dat 2025-04-10T11:26:06.841399Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5350, node 1 2025-04-10T11:26:06.901059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:06.901339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:06.908067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:06.929742Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:06.929767Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:06.929780Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:06.929875Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64357 TClient is connected to server localhost:64357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:07.465746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:09.366419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644323808732516:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:09.366435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644323808732525:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:09.366536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:09.370365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:09.385831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644323808732530:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:09.475207Z node 1 :TX_PROXY ERROR: Actor# [1:7491644323808732581:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:09.797836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:09.898850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:26:09.926831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:09.951931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:09.974994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:10.103858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:10.135167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:10.160147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:10.185858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:26:10.213435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:26:10.243376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:26:10.267370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:10.291996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:10.913630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:26:10.947814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:26:10.985060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:26:11.017740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:26:11.046018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:26:11.081323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:26:11.112867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:26:11.142315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:26:11.208246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:26:11.249280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:26:11.278239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:26:11.305755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:26:11.381892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:26:11.423134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:26:11.454909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:26:11.485715Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644310923829964:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:11.485806Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:26:11.486234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:26:11.523663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-10T11:26:11.555570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pro ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.526699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.529562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.531786Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.534822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.537427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.540912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.543251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.546480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.549180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.552410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.554902Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.558226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038443;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.560790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.563962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.566466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.569531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.572120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.575167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.577682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.580552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.583449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.585968Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.589162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.592665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.599381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.601930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.605110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.608147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.610870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.614065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.616485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038445;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.620199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.622519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.626128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.628166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.634429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.634828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.640399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.641587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.646502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.647575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.652896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.653809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.660179Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.665165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:40.752726Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq6ksb62ws2jan71wfbkb6", SessionId: ydb://session/3?node_id=1&id=NTk5OGJmZi00NzI5ZGFkNS1hM2Q4OWQyYy03MTI0MGM1NA==, Slow query, duration: 27.972487s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:26:41.010205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:41.011198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7491644353873512439:3327];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-04-10T11:26:41.011598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:41.019718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> TPQTest::TestPQCacheSizeManagement [GOOD] >> THiveTest::TestHiveRestart [GOOD] >> THiveTest::TestLimitedNodeList ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::RightSemi [GOOD] Test command err: Trying to start YDB, gRPC: 61369, MsgBus: 25980 2025-04-10T11:26:43.731656Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644469834572554:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:43.731716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ff/r3tmp/tmp0FpBcG/pdisk_1.dat 2025-04-10T11:26:44.226416Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:44.233108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:44.233246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:44.237033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61369, node 1 2025-04-10T11:26:44.346874Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:44.346913Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:44.346952Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:44.347104Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25980 TClient is connected to server localhost:25980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:44.974687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:44.991101Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:26:45.005656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:45.153574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:26:45.336836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:45.434402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:47.139077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644487014443503:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:47.139189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:47.452407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.486244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.520605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.555566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.597569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.651380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.749127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644487014444022:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:47.749278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:47.749560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644487014444027:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:47.753768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:47.777043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644487014444029:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:26:47.853571Z node 1 :TX_PROXY ERROR: Actor# [1:7491644487014444087:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:48.712125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:48.734426Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644469834572554:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:48.734490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:26:48.749369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:48.787969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:26:48.824251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:26:48.889475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:26:48.922317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> DataShardReadIterator::ShouldReadHeadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadFromHead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestPQCacheSizeManagement [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-10T11:24:40.279753Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.279828Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-10T11:24:40.308323Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.334840Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2025-04-10T11:24:40.335718Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-10T11:24:40.338234Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-10T11:24:40.341186Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-10T11:24:40.342947Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-10T11:24:40.353306Z node 1 :PERSQUEUE INFO: new Cookie default|7d3e58eb-df16d6ab-c70e02c-58fb4212_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [1:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-10T11:24:40.836900Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.836985Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:181:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:184:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:185:2057] recipient: [2:183:2195] Leader for TabletID 72057594037927937 is [2:186:2196] sender: [2:187:2057] recipient: [2:183:2195] 2025-04-10T11:24:40.881386Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.881459Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:186:2196] Leader for TabletID 72057594037927937 is [2:186:2196] sender: [2:260:2057] recipient: [2:14:2061] 2025-04-10T11:24:42.542449Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:42.543675Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "aaa" Generation: 2 Important: true } 2025-04-10T11:24:42.545003Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:266:2258] 2025-04-10T11:24:42.548031Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:266:2258] 2025-04-10T11:24:42.551694Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:267:2259] 2025-04-10T11:24:42.553646Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:267:2259] 2025-04-10T11:24:42.582080Z node 2 :PERSQUEUE INFO: new Cookie default|cb8a975c-1e160550-8ae847d7-f341097f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [2:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-10T11:24:43.136202Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:43.136272Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:107:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:181:2057] recipient: [3:99:2134] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:184:2057] recipient: [3:183:2195] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:185:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:187:2057] recipient: [3:183:2195] 2025-04-10T11:24:43.181557Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:43.181632Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [3:107:2139]) rebooted! !Reboot 72057594037927937 (actor [3:107:2139]) tablet resolver refreshed! new actor is[3:186:2196] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:260:2057] recipient: [3:14:2061] 2025-04-10T11:24:44.777261Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:44.778054Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } Consumers { Name: "aaa" Generation: 3 Important: true } 2025-04-10T11:24:44.778897Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:266:2258] 2025-04-10T11:24:44.781211Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [3:266:2258] 2025-04-10T11:24:44.783784Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:267:2259] 2025-04-10T11:24:44.785644Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [3:267:2259] 2025-04-10T11:24:44.798131Z node 3 :PERSQUEUE INFO: new Cookie default|a2fc1d86-d5e1eea-6d441a46-b4d40e7a_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [3:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-04-10T11:24:45.199098Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:45.199266Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [ ... 49681Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:658:2551] 2025-04-10T11:26:50.252374Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:659:2552] 2025-04-10T11:26:50.264025Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:50.264098Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 7 [79:659:2552] 2025-04-10T11:26:50.288995Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:50.289087Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 7 [79:658:2551] 2025-04-10T11:26:50.336031Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:177:2192] Leader for TabletID 72057594037927937 is [79:601:2502] sender: [79:691:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:601:2502] sender: [79:694:2057] recipient: [79:99:2134] Leader for TabletID 72057594037927937 is [79:601:2502] sender: [79:697:2057] recipient: [79:696:2574] Leader for TabletID 72057594037927937 is [79:601:2502] sender: [79:698:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:699:2575] sender: [79:700:2057] recipient: [79:696:2574] 2025-04-10T11:26:50.393012Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:50.393080Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:26:50.393886Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:758:2626] 2025-04-10T11:26:50.397363Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:759:2627] 2025-04-10T11:26:50.407536Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:50.407602Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 8 [79:759:2627] 2025-04-10T11:26:50.427126Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:50.427221Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 8 [79:758:2626] 2025-04-10T11:26:50.463395Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:177:2192] Leader for TabletID 72057594037927937 is [79:699:2575] sender: [79:789:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:699:2575] sender: [79:792:2057] recipient: [79:99:2134] Leader for TabletID 72057594037927937 is [79:699:2575] sender: [79:795:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:699:2575] sender: [79:796:2057] recipient: [79:794:2647] Leader for TabletID 72057594037927937 is [79:797:2648] sender: [79:798:2057] recipient: [79:794:2647] 2025-04-10T11:26:50.518619Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:50.518684Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:26:50.519304Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:858:2701] 2025-04-10T11:26:50.521945Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:859:2702] 2025-04-10T11:26:50.532373Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:50.532444Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 9 [79:859:2702] 2025-04-10T11:26:50.559986Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:50.560088Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 9 [79:858:2701] 2025-04-10T11:26:50.594057Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:177:2192] Leader for TabletID 72057594037927937 is [79:797:2648] sender: [79:889:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:797:2648] sender: [79:892:2057] recipient: [79:99:2134] Leader for TabletID 72057594037927937 is [79:797:2648] sender: [79:895:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:797:2648] sender: [79:896:2057] recipient: [79:894:2722] Leader for TabletID 72057594037927937 is [79:897:2723] sender: [79:898:2057] recipient: [79:894:2722] 2025-04-10T11:26:50.662562Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:50.662625Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:26:50.663248Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:960:2778] 2025-04-10T11:26:50.665979Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:961:2779] 2025-04-10T11:26:50.675562Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:50.675638Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 10 [79:961:2779] 2025-04-10T11:26:50.696852Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:50.696946Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 10 [79:960:2778] 2025-04-10T11:26:50.752093Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:177:2192] Leader for TabletID 72057594037927937 is [79:897:2723] sender: [79:991:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:897:2723] sender: [79:994:2057] recipient: [79:99:2134] Leader for TabletID 72057594037927937 is [79:897:2723] sender: [79:997:2057] recipient: [79:996:2799] Leader for TabletID 72057594037927937 is [79:897:2723] sender: [79:998:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:999:2800] sender: [79:1000:2057] recipient: [79:996:2799] 2025-04-10T11:26:50.834933Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:50.835004Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:26:50.835683Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:1064:2857] 2025-04-10T11:26:50.838654Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:1065:2858] 2025-04-10T11:26:50.850561Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:50.850639Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 11 [79:1065:2858] 2025-04-10T11:26:50.871904Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:50.872022Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 11 [79:1064:2857] 2025-04-10T11:26:50.907023Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:177:2192] Leader for TabletID 72057594037927937 is [79:999:2800] sender: [79:1097:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:999:2800] sender: [79:1100:2057] recipient: [79:99:2134] Leader for TabletID 72057594037927937 is [79:999:2800] sender: [79:1103:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:999:2800] sender: [79:1104:2057] recipient: [79:1102:2880] Leader for TabletID 72057594037927937 is [79:1105:2881] sender: [79:1106:2057] recipient: [79:1102:2880] 2025-04-10T11:26:50.984287Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:50.984364Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:26:50.985182Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:1172:2940] 2025-04-10T11:26:50.988327Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:1173:2941] 2025-04-10T11:26:51.000491Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:51.000574Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 12 [79:1173:2941] 2025-04-10T11:26:51.035888Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:51.036001Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 12 [79:1172:2940] 2025-04-10T11:26:51.078430Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::LeftSemi_1 [GOOD] Test command err: Trying to start YDB, gRPC: 10117, MsgBus: 22256 2025-04-10T11:26:36.946755Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644439249693013:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:36.946802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001702/r3tmp/tmpqRonDL/pdisk_1.dat 2025-04-10T11:26:37.412625Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:37.416839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:37.416952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:37.419539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10117, node 1 2025-04-10T11:26:37.500565Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:37.500587Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:37.500595Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:37.500740Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22256 TClient is connected to server localhost:22256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:38.037115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:38.056596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:38.253978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:38.430379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:38.517565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:40.259122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644456429563955:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:40.259253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:40.575772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:40.608947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:40.635464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:40.666374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:40.700040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:40.767905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:40.857905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644456429564475:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:40.857990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:40.858194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644456429564480:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:40.861704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:40.874295Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:26:40.898378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644456429564482:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:26:40.981481Z node 1 :TX_PROXY ERROR: Actor# [1:7491644456429564540:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:41.947512Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644439249693013:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:41.947592Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:26:41.963161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:42.000603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:42.028471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:26:42.060771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26707, MsgBus: 26205 2025-04-10T11:26:44.252384Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644471940941460:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:44.252435Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001702/r3tmp/tmpiQ62lb/pdisk_1.dat 2025-04-10T11:26:44.439021Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:44.440927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:44.441005Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:44.447426Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26707, node 2 2025-04-10T11:26:44.558728Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:44.558753Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:44.558760Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:44.558878Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26205 TClient is connected to server localhost:26205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:45.084392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:45.110231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:45.193524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:45.372532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:45.458017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:47.786298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644484825845144:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:47.786423Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:47.825709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.858349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.884174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.917612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.999638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:48.049209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:48.146082Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644489120812957:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:48.146178Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:48.146403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644489120812962:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:48.149987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:48.168946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644489120812964:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:26:48.234948Z node 2 :TX_PROXY ERROR: Actor# [2:7491644489120813020:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:49.184115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:49.223905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:49.252474Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644471940941460:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:49.252862Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:26:49.261827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:26:49.308146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> DataShardReadIterator::NoErrorOnFinalACK [GOOD] >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture >> TPQTest::TestReadAndDeleteConsumer [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite >> KqpJoinOrder::FiveWayJoinWithPreds+ColumnStore >> THiveTest::TestLimitedNodeList [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC >> KqpJoin::JoinLeftPureInner [GOOD] >> TPQTest::TestGetTimestamps [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadAndDeleteConsumer [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-10T11:24:40.306948Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.307014Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-10T11:24:40.334448Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.356154Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "important_user" Generation: 1 Important: true } 2025-04-10T11:24:40.356941Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-10T11:24:40.358708Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-10T11:24:40.367594Z node 1 :PERSQUEUE INFO: new Cookie default|8c961700-178f3df-e9353b05-2ca8de69_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-10T11:24:48.510605Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:48.510671Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] 2025-04-10T11:24:48.524394Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:48.524858Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "important_user" Generation: 2 Important: true } 2025-04-10T11:24:48.525239Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:185:2198] 2025-04-10T11:24:48.526777Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:185:2198] 2025-04-10T11:24:48.535132Z node 2 :PERSQUEUE INFO: new Cookie default|c7d67e9b-7f422bb0-83de195e-def9c978_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [2:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [2:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-10T11:24:56.580442Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:56.580496Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-04-10T11:24:56.593156Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:56.593582Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 3 ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } Consumers { Name: "important_user" Generation: 3 Important: true } 2025-04-10T11:24:56.593958Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-04-10T11:24:56.595480Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-04-10T11:24:56.602624Z node 3 :PERSQUEUE INFO: new Cookie default|59e99c50-5d14ca74-7379d82c-999d8a8d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [3:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [3:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-04-10T11:25:04.532192Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:04.532295Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-04-10T11:25:04.557422Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:04.558231Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 4 ReadRuleGenerations: 4 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 Important: false } Consumers { Name: "important_user" Generation: 4 Important: true } 2025-04-10T11:25:04.559143Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:185:2198] 2025-04-10T11:25:04.562234Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:185:2198] 2025-04-10T11:25:04.579428Z node 4 :PERSQUEUE INFO: new Cookie default|562d6dce-ed37ebe6-aec7965f-6ac74d56_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [4:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [4:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [4:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user3" ... Config 2025-04-10T11:26:47.729664Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:26:47.730417Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [19:292:2283] 2025-04-10T11:26:47.762550Z node 19 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:47.762700Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [19:292:2283] 2025-04-10T11:26:47.803218Z node 19 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Leader for TabletID 72057594037927937 is [19:243:2242] sender: [19:315:2057] recipient: [19:14:2061] 2025-04-10T11:26:47.815770Z node 19 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:47.824319Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1001 actor [19:312:2296] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1001 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1000 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1000 Important: true } Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:103:2057] recipient: [20:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:103:2057] recipient: [20:101:2135] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:108:2057] recipient: [20:101:2135] 2025-04-10T11:26:48.569144Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:48.569295Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [20:149:2057] recipient: [20:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [20:149:2057] recipient: [20:147:2170] Leader for TabletID 72057594037927938 is [20:153:2174] sender: [20:154:2057] recipient: [20:147:2170] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:179:2057] recipient: [20:14:2061] 2025-04-10T11:26:48.601521Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:48.602449Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1002 actor [20:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1002 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1002 ReadRuleGenerations: 1002 ReadRuleGenerations: 1002 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1002 Important: false } Consumers { Name: "user1" Generation: 1002 Important: true } Consumers { Name: "user2" Generation: 1002 Important: true } 2025-04-10T11:26:48.603269Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [20:185:2198] 2025-04-10T11:26:48.606849Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [20:185:2198] 2025-04-10T11:26:48.622970Z node 20 :PERSQUEUE INFO: new Cookie default|5aa2e69c-7249272-cbab69ef-c624bec0_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:26:49.659527Z node 20 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-04-10T11:26:49.719182Z node 20 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:240:2057] recipient: [20:99:2134] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:243:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:244:2057] recipient: [20:242:2243] Leader for TabletID 72057594037927937 is [20:245:2244] sender: [20:246:2057] recipient: [20:242:2243] 2025-04-10T11:26:49.767314Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:49.767410Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:26:49.768128Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [20:294:2285] 2025-04-10T11:26:49.796081Z node 20 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:49.796237Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [20:294:2285] 2025-04-10T11:26:49.832217Z node 20 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Leader for TabletID 72057594037927937 is [20:245:2244] sender: [20:317:2057] recipient: [20:14:2061] 2025-04-10T11:26:49.842823Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:49.847533Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1003 actor [20:314:2298] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1003 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1002 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1002 Important: true } Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:103:2057] recipient: [21:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:103:2057] recipient: [21:101:2135] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:108:2057] recipient: [21:101:2135] 2025-04-10T11:26:50.691844Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:50.691931Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [21:149:2057] recipient: [21:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [21:149:2057] recipient: [21:147:2170] Leader for TabletID 72057594037927938 is [21:153:2174] sender: [21:154:2057] recipient: [21:147:2170] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:177:2057] recipient: [21:14:2061] 2025-04-10T11:26:50.715438Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:50.716167Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1004 actor [21:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1004 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1004 ReadRuleGenerations: 1004 ReadRuleGenerations: 1004 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1004 Important: false } Consumers { Name: "user1" Generation: 1004 Important: true } Consumers { Name: "user2" Generation: 1004 Important: true } 2025-04-10T11:26:50.716824Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [21:183:2196] 2025-04-10T11:26:50.720005Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [21:183:2196] 2025-04-10T11:26:50.744384Z node 21 :PERSQUEUE INFO: new Cookie default|9e121034-d38fa39f-12d1d0ba-6f6e3a7b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:26:51.938225Z node 21 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-04-10T11:26:52.018701Z node 21 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:238:2057] recipient: [21:99:2134] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:241:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:242:2057] recipient: [21:240:2241] Leader for TabletID 72057594037927937 is [21:243:2242] sender: [21:244:2057] recipient: [21:240:2241] 2025-04-10T11:26:52.058348Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:52.058416Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:26:52.059055Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [21:292:2283] 2025-04-10T11:26:52.090052Z node 21 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:52.090170Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [21:292:2283] 2025-04-10T11:26:52.125963Z node 21 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Leader for TabletID 72057594037927937 is [21:243:2242] sender: [21:315:2057] recipient: [21:14:2061] 2025-04-10T11:26:52.133173Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:52.136515Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1005 actor [21:312:2296] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1005 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1004 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1004 Important: true } >> DataShardReadIteratorLatency::ReadSplitLatency [GOOD] >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable >> TKeyValueTest::TestConcatToLongKey [GOOD] >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup >> DataShardReadIterator::TryWriteManyRows+Commit [GOOD] >> DataShardReadIterator::TryWriteManyRows-Commit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestGetTimestamps [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-10T11:24:45.032075Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-10T11:24:45.036819Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-10T11:24:45.037170Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-10T11:24:45.037214Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-10T11:24:45.037254Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-10T11:24:45.037304Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-10T11:24:45.037358Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:45.037433Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:180:2057] recipient: [1:14:2061] 2025-04-10T11:24:45.059237Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:179:2194], now have 1 active actors on pipe 2025-04-10T11:24:45.059387Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-10T11:24:45.080549Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-04-10T11:24:45.083582Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-04-10T11:24:45.083768Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:45.084614Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-04-10T11:24:45.084721Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-10T11:24:45.085154Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-10T11:24:45.085552Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-04-10T11:24:45.088080Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-04-10T11:24:45.088145Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:186:2199] 2025-04-10T11:24:45.088213Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-10T11:24:45.090452Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-10T11:24:45.090621Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-04-10T11:24:45.090673Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-04-10T11:24:45.090718Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2025-04-10T11:24:45.090754Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2025-04-10T11:24:45.090934Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-04-10T11:24:45.090977Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-04-10T11:24:45.091017Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-04-10T11:24:45.091072Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-04-10T11:24:45.091121Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-04-10T11:24:45.091148Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-04-10T11:24:45.091166Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser1 2025-04-10T11:24:45.091185Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser1 2025-04-10T11:24:45.091214Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-04-10T11:24:45.091248Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-04-10T11:24:45.091349Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:45.091407Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-10T11:24:45.091551Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-10T11:24:45.102597Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-10T11:24:45.103089Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:193:2204], now have 1 active actors on pipe 2025-04-10T11:24:45.105526Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:196:2206], now have 1 active actors on pipe 2025-04-10T11:24:45.105649Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-10T11:24:45.105693Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-10T11:24:45.107590Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 0 messageNo: 0 size: 511957 2025-04-10T11:24:45.108158Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 1 messageNo: 0 size: 511957 2025-04-10T11:24:45.108715Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 2 messageNo: 0 size: 511957 2025-04-10T11:24:45.109293Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 3 messageNo: 0 size: 511957 2025-04-10T11:24:45.109419Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size: 49324 2025-04-10T11:24:45.109459Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size 49324 offset: 0 2025-04-10T11:24:45.109592Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2025-04-10T11:24:45.109770Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-04-10T11:24:45.109816Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-04-10T11:24:45.110259Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2208], now have 1 active actors on pipe 2025-04-10T11:24:45.110344Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-10T11:24:45.110391Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-10T11:24:45.110486Z node 1 :PERSQUEUE INFO: new Cookie default|648657b-eb564af2-3ebd6381-5308c0da_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:24:45.110596Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-10T11:24:45.110683Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T11:24:45.110989Z node 1 :PERSQUEUE DEBUG: [P ... eGenerations: 188 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 188 Important: false } Consumers { Name: "user1" Generation: 188 Important: false } 2025-04-10T11:26:52.077583Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [72:185:2198] 2025-04-10T11:26:52.080821Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [72:185:2198] 2025-04-10T11:26:52.084280Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [72:186:2199] 2025-04-10T11:26:52.086651Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [72:186:2199] 2025-04-10T11:26:52.095669Z node 72 :PERSQUEUE INFO: new Cookie default|e11bc13e-1543681f-f6343f37-a9321ea_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:26:52.129211Z node 72 :PERSQUEUE INFO: new Cookie default|69b39491-a54f87fd-bbe1864f-e148f9e3_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [72:107:2139] sender: [72:280:2057] recipient: [72:99:2134] Leader for TabletID 72057594037927937 is [72:107:2139] sender: [72:283:2057] recipient: [72:14:2061] Leader for TabletID 72057594037927937 is [72:107:2139] sender: [72:284:2057] recipient: [72:282:2279] Leader for TabletID 72057594037927937 is [72:285:2280] sender: [72:286:2057] recipient: [72:282:2279] 2025-04-10T11:26:52.178400Z node 72 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:52.178486Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:26:52.179208Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [72:334:2321] 2025-04-10T11:26:52.182186Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [72:335:2322] 2025-04-10T11:26:52.191976Z node 72 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:52.192070Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [72:335:2322] 2025-04-10T11:26:52.194527Z node 72 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:52.194615Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [72:334:2321] Leader for TabletID 72057594037927937 is [72:285:2280] sender: [72:362:2057] recipient: [72:14:2061] 2025-04-10T11:26:52.197745Z node 72 :PERSQUEUE INFO: new Cookie default|14d73b1b-5a49f421-6f875c84-390cb332_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 100 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [72:177:2192] 2025-04-10T11:26:52.216728Z node 72 :PERSQUEUE INFO: Reading Timestamp failed for offset 50 ( 50 ) Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 104 Result { Offset: 100 Data: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" SourceId: "sourceid2" SeqNo: 1 WriteTimestampMS: 172800426 CreateTimestampMS: 100 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } BlobsCachedSize: 1064 SizeLag: 11563 RealReadOffset: 50 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 1086 LastOffset: 100 EndOffset: 104 StartOffset: 1 } } Leader for TabletID 72057594037927937 is [0:0:0] sender: [73:103:2057] recipient: [73:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [73:103:2057] recipient: [73:101:2135] Leader for TabletID 72057594037927937 is [73:107:2139] sender: [73:108:2057] recipient: [73:101:2135] 2025-04-10T11:26:52.741793Z node 73 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:52.741869Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [73:149:2057] recipient: [73:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [73:149:2057] recipient: [73:147:2170] Leader for TabletID 72057594037927938 is [73:153:2174] sender: [73:154:2057] recipient: [73:147:2170] Leader for TabletID 72057594037927937 is [73:107:2139] sender: [73:177:2057] recipient: [73:14:2061] 2025-04-10T11:26:52.761582Z node 73 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:52.762590Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 189 actor [73:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 10 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 189 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 189 ReadRuleGenerations: 189 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 189 Important: false } Consumers { Name: "user1" Generation: 189 Important: false } 2025-04-10T11:26:52.763342Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [73:183:2196] 2025-04-10T11:26:52.766168Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [73:183:2196] 2025-04-10T11:26:52.769179Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [73:184:2197] 2025-04-10T11:26:52.771483Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [73:184:2197] 2025-04-10T11:26:52.779323Z node 73 :PERSQUEUE INFO: new Cookie default|7a794936-f35f40ab-829a81df-bffc820e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:26:52.822517Z node 73 :PERSQUEUE INFO: new Cookie default|f22d46c4-70bc9d57-b4bca406-f2db02e5_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [73:107:2139] sender: [73:276:2057] recipient: [73:99:2134] Leader for TabletID 72057594037927937 is [73:107:2139] sender: [73:278:2057] recipient: [73:14:2061] Leader for TabletID 72057594037927937 is [73:107:2139] sender: [73:280:2057] recipient: [73:279:2275] Leader for TabletID 72057594037927937 is [73:281:2276] sender: [73:282:2057] recipient: [73:279:2275] 2025-04-10T11:26:52.862359Z node 73 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:26:52.862421Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:26:52.862990Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [73:330:2317] 2025-04-10T11:26:52.865497Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [73:331:2318] 2025-04-10T11:26:52.874804Z node 73 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:52.874877Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [73:331:2318] 2025-04-10T11:26:52.877109Z node 73 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:26:52.877169Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [73:330:2317] Leader for TabletID 72057594037927937 is [73:281:2276] sender: [73:358:2057] recipient: [73:14:2061] 2025-04-10T11:26:52.879661Z node 73 :PERSQUEUE INFO: new Cookie default|c617168-f7611f3e-80537a9e-5d8c7763_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 100 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [73:175:2190] 2025-04-10T11:26:52.899905Z node 73 :PERSQUEUE INFO: Reading Timestamp failed for offset 50 ( 50 ) Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 104 Result { Offset: 100 Data: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" SourceId: "sourceid2" SeqNo: 1 WriteTimestampMS: 172800425 CreateTimestampMS: 100 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } BlobsCachedSize: 1064 SizeLag: 11563 RealReadOffset: 50 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 1086 LastOffset: 100 EndOffset: 104 StartOffset: 1 } } >> KqpFlipJoin::RightSemi_2 >> THiveTest::TestHiveBalancerWithSystemTablets [GOOD] >> THiveTest::TestHiveBalancerWithFollowers >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureInner [GOOD] Test command err: Trying to start YDB, gRPC: 27192, MsgBus: 17080 2025-04-10T11:26:46.683798Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644479854416952:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:46.683862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016fa/r3tmp/tmpA0BJ9T/pdisk_1.dat 2025-04-10T11:26:47.057933Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:47.072532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:47.072607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:47.075208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27192, node 1 2025-04-10T11:26:47.154698Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:47.154735Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:47.154749Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:47.154882Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17080 TClient is connected to server localhost:17080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:47.824178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:26:47.842281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:26:48.017060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:48.181792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:48.263319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:49.869037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644492739320640:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:49.869139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:50.288764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:50.367563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:50.421766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:50.458272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:50.507632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:50.555586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:50.626523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644497034288451:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:50.626619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:50.628931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644497034288456:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:50.632612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:50.646418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644497034288458:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:26:50.714545Z node 1 :TX_PROXY ERROR: Actor# [1:7491644497034288511:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:51.686181Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644479854416952:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:51.686245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestConcatToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:93:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:92:2119] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:146:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7 is [33:56:2097] sender: [33:92:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:94:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:96:2057] recipient: [33:95:2121] Leader for TabletID 72057594037927937 is [33:97:2122] sender: [33:98:2057] recipient: [33:95:2121] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:97:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:76:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:79:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:80:2057] recipient: [36:78:2110] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:82:2057] recipient: [36:78:2110] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:81:2111] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:135:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:76:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:79:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:80:2057] recipient: [37:78:2110] Leader for TabletID 72057594037927937 is [37:81:2111] sender: [37:82:2057] recipient: [37:78:2110] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:81:2111] Leader for TabletID 72057594037927937 is [37:81:2111] sender: [37:135:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:50:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:50:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:77:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:79:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:81:2057] recipient: [38:80:2110] Leader for TabletID 72057594037927937 is [38:82:2111] sender: [38:83:2057] recipient: [38:80:2110] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:82:2111] Leader for TabletID 72057594037927937 is [38:82:2111] sender: [38:136:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:80:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:82:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:84:2057] recipient: [39:83:2113] Leader for TabletID 72057594037927937 is [39:85:2114] sender: [39:86:2057] recipient: [39:83:2113] !Reboot 72057594037927937 (actor [39:56:2097]) rebooted! !Reboot 72057594037927937 (actor [39:56:2097]) tablet resolver refreshed! new actor is[39:85:2114] Leader for TabletID 72057594037927937 is [39:85:2114] sender: [39:139:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:57:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:74:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:80:2057] recipient: [40:36:2083] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:83:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:84:2057] recipient: [40:82:2113] Leader for TabletID 72057594037927937 is [40:85:2114] sender: [40:86:2057] recipient: [40:82:2113] !Reboot 72057594037927937 (actor [40:56:2097]) rebooted! !Reboot 72057594037927937 (actor [40:56:2097]) tablet resolver refreshed! new actor is[40:85:2114] Leader for TabletID 72057594037927937 is [40:85:2114] sender: [40:139:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:57:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:74:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:81:2057] recipient: [41:36:2083] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:84:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:85:2057] recipient: [41:83:2113] Leader for TabletID 72057594037927937 is [41:86:2114] sender: [41:87:2057] recipient: [41:83:2113] !Reboot 72057594037927937 (actor [41:56:2097]) rebooted! !Reboot 72057594037927937 (actor [41:56:2097]) tablet resolver refreshed! new actor is[41:86:2114] Leader for TabletID 72057594037927937 is [41:86:2114] sender: [41:140:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:54:2057] recipient: [42:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:54:2057] recipient: [42:51:2095] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:57:2057] recipient: [42:51:2095] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:74:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:84:2057] recipient: [42:36:2083] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:86:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:88:2057] recipient: [42:87:2116] Leader for TabletID 72057594037927937 is [42:89:2117] sender: [42:90:2057] recipient: [42:87:2116] !Reboot 72057594037927937 (actor [42:56:2097]) rebooted! !Reboot 72057594037927937 (actor [42:56:2097]) tablet resolver refreshed! new actor is[42:89:2117] Leader for TabletID 72057594037927937 is [42:89:2117] sender: [42:143:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:54:2057] recipient: [43:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:54:2057] recipient: [43:51:2095] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:57:2057] recipient: [43:51:2095] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:74:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:84:2057] recipient: [43:36:2083] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:87:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:88:2057] recipient: [43:86:2116] Leader for TabletID 72057594037927937 is [43:89:2117] sender: [43:90:2057] recipient: [43:86:2116] !Reboot 72057594037927937 (actor [43:56:2097]) rebooted! !Reboot 72057594037927937 (actor [43:56:2097]) tablet resolver refreshed! new actor is[43:89:2117] Leader for TabletID 72057594037927937 is [43:89:2117] sender: [43:143:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:54:2057] recipient: [44:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:54:2057] recipient: [44:51:2095] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:57:2057] recipient: [44:51:2095] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:74:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:85:2057] recipient: [44:36:2083] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:88:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:89:2057] recipient: [44:87:2116] Leader for TabletID 72057594037927937 is [44:90:2117] sender: [44:91:2057] recipient: [44:87:2116] !Reboot 72057594037927937 (actor [44:56:2097]) rebooted! !Reboot 72057594037927937 (actor [44:56:2097]) tablet resolver refreshed! new actor is[44:90:2117] Leader for TabletID 72057594037927937 is [44:90:2117] sender: [44:144:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:57:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:74:2057] recipient: [45:14:2061] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+ColumnStore >> KqpJoinOrder::TPCDS16-ColumnStore >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 >> KqpIndexLookupJoin::MultiJoins >> ReadIteratorExternalBlobs::ExtBlobs [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithSpecificKeys >> KqpJoin::RightTableValuePredicate >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix1 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 >> THiveTest::TestHiveFollowersWithChangingDC [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage >> DataShardReadIterator::ShouldReadRangeChunk1_100 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] Test command err: 2025-04-10T11:20:47.195405Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:20:47.301230Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:20:47.325863Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:20:47.326122Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:20:47.336079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:47.336308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:47.336557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:47.336683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:47.336820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:47.336943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:47.337062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:47.337190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:47.337350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:20:47.337481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:20:47.337590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:20:47.337696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:20:47.373866Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:20:47.374112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:20:47.374186Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:20:47.374393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:47.374588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:20:47.374678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:20:47.374719Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:20:47.374823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:20:47.374897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:20:47.374961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:20:47.374996Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:20:47.375166Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:20:47.375223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:20:47.375262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:20:47.375291Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:20:47.375393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:20:47.375457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:20:47.375503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:20:47.375534Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:20:47.375600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:20:47.375635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:20:47.375683Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:20:47.375741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:20:47.375779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:20:47.375807Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:20:47.376197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-04-10T11:20:47.376297Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-10T11:20:47.376377Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-04-10T11:20:47.376477Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=48; 2025-04-10T11:20:47.376637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:20:47.376694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:20:47.376729Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:20:47.376965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:20:47.377023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:20:47.377054Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:20:47.377223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:20:47.377269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:20:47.377316Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:20:47.377503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:20:47.377565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:20:47.377597Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:20:47.377743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:20:47.377784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:20:47.377848Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... e=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:106:2848:0]; 2025-04-10T11:26:51.430519Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:107:2792:0]; 2025-04-10T11:26:51.430588Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:108:2776:0]; 2025-04-10T11:26:51.430656Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:109:2792:0]; 2025-04-10T11:26:51.430720Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:110:2776:0]; 2025-04-10T11:26:51.430788Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:111:2784:0]; 2025-04-10T11:26:51.430851Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:112:2760:0]; 2025-04-10T11:26:51.430915Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:113:2776:0]; 2025-04-10T11:26:51.430980Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:114:9568:0]; 2025-04-10T11:26:51.431042Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:115:2848:0]; 2025-04-10T11:26:51.431126Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:116:2840:0]; 2025-04-10T11:26:51.431208Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:117:2856:0]; 2025-04-10T11:26:51.431291Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:118:2856:0]; 2025-04-10T11:26:51.431351Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:119:2848:0]; 2025-04-10T11:26:51.431409Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:120:2840:0]; 2025-04-10T11:26:51.431483Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:121:2856:0]; 2025-04-10T11:26:51.431564Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:122:2848:0]; 2025-04-10T11:26:51.431627Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:123:2848:0]; 2025-04-10T11:26:51.431686Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:124:2856:0]; 2025-04-10T11:26:51.431748Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:125:2856:0]; 2025-04-10T11:26:51.431809Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:126:2848:0]; 2025-04-10T11:26:51.431868Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:127:2856:0]; 2025-04-10T11:26:51.431930Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:128:2832:0]; 2025-04-10T11:26:51.432010Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:129:2840:0]; 2025-04-10T11:26:51.432089Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:130:2848:0]; 2025-04-10T11:26:51.432141Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:131:2840:0]; 2025-04-10T11:26:51.432200Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:132:2840:0]; 2025-04-10T11:26:51.432260Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:133:2848:0]; 2025-04-10T11:26:51.432319Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:134:2848:0]; 2025-04-10T11:26:51.432417Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:135:2832:0]; 2025-04-10T11:26:51.432486Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:136:2848:0]; 2025-04-10T11:26:51.432542Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:137:2848:0]; 2025-04-10T11:26:51.432602Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:138:2840:0]; 2025-04-10T11:26:51.432661Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:139:2832:0]; 2025-04-10T11:26:51.432751Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:140:2840:0]; 2025-04-10T11:26:51.432822Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:141:2848:0]; 2025-04-10T11:26:51.432892Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:142:2848:0]; 2025-04-10T11:26:51.432960Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:143:2776:0]; 2025-04-10T11:26:51.433022Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:144:2792:0]; 2025-04-10T11:26:51.433101Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:145:2784:0]; 2025-04-10T11:26:51.433166Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:146:2784:0]; 2025-04-10T11:26:51.433244Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:147:2776:0]; 2025-04-10T11:26:51.433307Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:148:2792:0]; 2025-04-10T11:26:51.433380Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:149:2776:0]; 2025-04-10T11:26:51.433457Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:150:2768:0]; 2025-04-10T11:26:51.433518Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:151:2776:0]; 2025-04-10T11:26:51.433590Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:152:9576:0]; 2025-04-10T11:26:51.439160Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=100;task=cpu=0;mem=3571882;external_task_id=b2cd90fc-15fe11f0-854a52ae-79fe3b91;type=CS::INDEXATION;priority=0;; 2025-04-10T11:26:51.439217Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=b2cd90fc-15fe11f0-854a52ae-79fe3b91;mem=3571882;cpu=0; 2025-04-10T11:26:51.439281Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=b2cd90fc-15fe11f0-854a52ae-79fe3b91;task_id=100;mem=3571882;cpu=0; 2025-04-10T11:26:51.441392Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=b2cd90fc-15fe11f0-854a52ae-79fe3b91;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=b2cd90fc-15fe11f0-854a52ae-79fe3b91; 2025-04-10T11:26:53.408199Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=b2cd90fc-15fe11f0-854a52ae-79fe3b91;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-10T11:26:53.411076Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; 2025-04-10T11:26:53.422991Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=101;task=cpu=0;mem=2799958;external_task_id=b2cdd90e-15fe11f0-829977ea-8d934a1e;type=CS::INDEXATION;priority=0;; 2025-04-10T11:26:53.423064Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=b2cdd90e-15fe11f0-829977ea-8d934a1e;mem=2799958;cpu=0; 2025-04-10T11:26:53.423108Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=b2cdd90e-15fe11f0-829977ea-8d934a1e;task_id=101;mem=2799958;cpu=0; 2025-04-10T11:26:53.427239Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=b2cdd90e-15fe11f0-829977ea-8d934a1e;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=b2cdd90e-15fe11f0-829977ea-8d934a1e; 2025-04-10T11:26:54.930299Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=b2cdd90e-15fe11f0-829977ea-8d934a1e;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-10T11:26:54.936172Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; 2025-04-10T11:26:55.474828Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-10T11:26:55.475637Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[306] (CS::GENERAL) apply at tablet 9437184 2025-04-10T11:26:55.568902Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:125 Blob count: 422 2025-04-10T11:26:55.573818Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3445356;raw_bytes=5239242;count=3;records=53332} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=5183316;raw_bytes=7864534;count=3;records=80000} inactive {blob_bytes=215061504;raw_bytes=326598142;count=144;records=3322060} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 >> DataShardReadIterator::ShouldReadFromHead [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict+UseSink >> KqpJoinOrder::FiveWayJoinWithConstantFold+ColumnStore >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite >> KqpJoinOrder::TPCDS87-ColumnStore [GOOD] >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable [GOOD] >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup >> KqpFlipJoin::RightSemi_2 [GOOD] >> KqpFlipJoin::RightSemi_3 >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup [GOOD] >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 22817, MsgBus: 25734 2025-04-10T11:26:53.965176Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644512689094835:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:53.968712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016f7/r3tmp/tmp8Lrl4s/pdisk_1.dat 2025-04-10T11:26:54.598922Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:54.601390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:54.601459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:54.631563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22817, node 1 2025-04-10T11:26:54.794733Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:54.794764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:54.794772Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:54.794889Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25734 TClient is connected to server localhost:25734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:55.433699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:55.486407Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:26:55.506055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:55.683790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:55.866936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:55.961037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:57.816820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644529868965797:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:57.816901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:58.110851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.151379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.188256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.223270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.265900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.350799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.433262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644534163933612:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:58.433335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:58.433721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644534163933617:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:58.437649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:58.449195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644534163933619:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:26:58.522497Z node 1 :TX_PROXY ERROR: Actor# [1:7491644534163933674:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:58.966261Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644512689094835:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:58.966358Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:26:59.584084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:59.619809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:59.656193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:26:59.732879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:26:59.791728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:26:59.851105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS87-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 16448, MsgBus: 19962 2025-04-10T11:26:08.908521Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644316018680888:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:08.908716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001713/r3tmp/tmpKfuBAr/pdisk_1.dat 2025-04-10T11:26:09.223871Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16448, node 1 2025-04-10T11:26:09.287760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:09.287941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:09.293396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:09.310722Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:09.310744Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:09.310748Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:09.310888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19962 TClient is connected to server localhost:19962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:09.818316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:11.647923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644328903583433:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:11.648028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:11.648349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644328903583445:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:11.652304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:11.664189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644328903583447:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:11.756802Z node 1 :TX_PROXY ERROR: Actor# [1:7491644328903583498:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:12.057294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.189321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.217560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.245230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.271795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.394267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.432382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.467853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.503713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.538212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.602281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.681375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:12.721974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.341085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:26:13.414752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.445673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.474967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.540925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.570045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.605821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.640770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.669816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.707574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.777152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.808528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.848954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.879614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.904589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:26:13.908446Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644316018680888:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:13.908514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:26:13.936843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:26:14.003352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-10T11:26:14.036854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but p ... tablet_id=72075186224038544;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.313889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.318471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.319527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.324155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.325557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.329779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038466;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.329808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.335725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.335751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.341411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.341511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.346582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.347257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.353312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.353314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.359733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.359746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.365651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.366021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.371621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038444;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.371680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038500;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.377935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038430;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.378175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038460;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.383939Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.384482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.389936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.391064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.396197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.397405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.402363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038510;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.404795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.408164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038512;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.409879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038434;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.413839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.415699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038530;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.419929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.423020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038482;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.425834Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038468;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.429189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.431481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.433354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.438076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.438886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.444136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.444141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.477101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038522;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:39.514461Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq6p3jf5ecrkjnzb5zmexd", SessionId: ydb://session/3?node_id=1&id=NzkzZmZkODctNTg0MWE0MGYtN2MxYjFiZWQtZGNmZGQxNjY=, Slow query, duration: 24.359005s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:26:40.044813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:40.045275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:40.046009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 [GOOD] >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower >> KqpJoin::RightTableValuePredicate [GOOD] >> OlapEstimationRowsCorrectness::TPCH3 >> THiveTest::TestHiveBalancerWithFollowers [GOOD] >> THiveTest::TestHiveBalancerWithLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableValuePredicate [GOOD] Test command err: Trying to start YDB, gRPC: 14620, MsgBus: 12032 2025-04-10T11:26:56.666768Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644522783143878:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:56.670520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016f0/r3tmp/tmpVr2QPX/pdisk_1.dat 2025-04-10T11:26:57.255216Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:57.260627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:57.260710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:57.263093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14620, node 1 2025-04-10T11:26:57.418698Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:57.418720Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:57.418728Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:57.418837Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12032 TClient is connected to server localhost:12032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:58.040639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:58.079719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:26:58.230362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:58.396001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.472990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:00.355457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644539963014836:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:00.355611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:00.707295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.746322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.791709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.830619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.868843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.956320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:01.012885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644544257982649:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:01.012955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:01.013248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644544257982654:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:01.016466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:01.030888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644544257982656:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:01.097706Z node 1 :TX_PROXY ERROR: Actor# [1:7491644544257982709:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:01.685604Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644522783143878:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:01.685707Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:02.191371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCDS96+ColumnStore >> KqpIndexLookupJoin::MultiJoins [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::MultiJoins [GOOD] Test command err: Trying to start YDB, gRPC: 1988, MsgBus: 6459 2025-04-10T11:26:56.569938Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644523758663863:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:56.570379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016f1/r3tmp/tmpVuNwLB/pdisk_1.dat 2025-04-10T11:26:57.075584Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:57.079977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:57.080070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:57.084917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1988, node 1 2025-04-10T11:26:57.248593Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:57.248624Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:57.248635Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:57.248759Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6459 TClient is connected to server localhost:6459 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:57.996837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:58.018600Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:26:58.037492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:58.217252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:58.429149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:58.505918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:00.456756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644540938534647:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:00.456885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:00.956800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:01.023841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:01.098677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:01.136415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:01.178767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:01.266618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:01.382498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644545233502467:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:01.382597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:01.387676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644545233502472:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:01.398956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:01.416222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644545233502474:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:01.480920Z node 1 :TX_PROXY ERROR: Actor# [1:7491644545233502531:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:01.570492Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644523758663863:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:01.570543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:02.534225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:02.579680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:02.612150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:02.652090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:02.719951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:02.769687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> DataShardReadIterator::ShouldReadFromHeadWithConflict+UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict-UseSink >> OlapEstimationRowsCorrectness::TPCDS78 >> DataShardReadIterator::ShouldReadRangeChunk1 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk2 >> THiveTest::TestHiveBalancerWithLimit [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet >> DataShardReadIterator::TryWriteManyRows-Commit [GOOD] >> DataShardReadIteratorBatchMode::RangeFull >> KqpJoinOrder::DatetimeConstantFold+ColumnStore [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite >> KqpFlipJoin::RightSemi_1 >> KqpFlipJoin::RightSemi_3 [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 11368, MsgBus: 10314 2025-04-10T11:26:54.388773Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644513158610764:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:54.388804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016f4/r3tmp/tmp25iyOQ/pdisk_1.dat 2025-04-10T11:26:54.926543Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:54.947280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:54.947430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:54.949463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11368, node 1 2025-04-10T11:26:55.087904Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:55.087953Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:55.087963Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:55.088074Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10314 TClient is connected to server localhost:10314 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:55.751507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:55.789494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:55.940002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:56.164817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:56.272895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:58.003370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644526043514443:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:58.003546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:58.315573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.355670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.391835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.471428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.502852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.557138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.657162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644530338482257:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:58.657247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:58.657490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644530338482262:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:58.661534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:58.678657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644530338482264:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:26:58.749409Z node 1 :TX_PROXY ERROR: Actor# [1:7491644530338482320:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:59.392867Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644513158610764:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:59.392930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:26:59.733232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:59.783322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:59.856669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:26:59.881351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:26:59.925541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:26:59.999035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3537, MsgBus: 3976 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016f4/r3tmp/tmpl8ITBC/pdisk_1.dat 2025-04-10T11:27:02.075565Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:27:02.080556Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:02.109093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:02.109423Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:02.112002Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3537, node 2 2025-04-10T11:27:02.246681Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:02.246703Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:02.246710Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:02.246821Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3976 TClient is connected to server localhost:3976 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:02.967449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:02.976937Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:27:02.992292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:03.119274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:03.336071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:03.417905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:05.807045Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644560410025165:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:05.807140Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:05.878810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:05.928692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:05.965641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:06.000457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:06.041368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:06.092642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:06.144377Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644564704992974:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:06.144453Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:06.144612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644564704992979:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:06.148136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:06.159601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644564704992981:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:27:06.221278Z node 2 :TX_PROXY ERROR: Actor# [2:7491644564704993034:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:07.201050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:07.255661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:07.296187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:07.333253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:07.375354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:07.419093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightSemi_3 [GOOD] Test command err: Trying to start YDB, gRPC: 15258, MsgBus: 1494 2025-04-10T11:26:54.403309Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644514683429204:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:54.404778Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016f6/r3tmp/tmpurKZfz/pdisk_1.dat 2025-04-10T11:26:54.957055Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:54.982874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:54.982968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:54.984789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15258, node 1 2025-04-10T11:26:55.129059Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:55.129105Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:55.129120Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:55.129245Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1494 TClient is connected to server localhost:1494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:55.805549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:55.836051Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:26:55.857328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:55.990592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:56.133358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:56.214728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:57.971164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644527568332758:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:57.971295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:58.287093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.326403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.381796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.437101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.505841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.545592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:58.652827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644531863300576:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:58.652920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:58.653239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644531863300581:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:58.657675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:26:58.673623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644531863300583:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:26:58.731598Z node 1 :TX_PROXY ERROR: Actor# [1:7491644531863300639:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:59.395306Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644514683429204:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:59.395360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:26:59.682848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:59.768465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:59.862027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:26:59.897406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27101, MsgBus: 3683 2025-04-10T11:27:01.831402Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644547139618027:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:01.832498Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016f6/r3tmp/tmpBcdg7k/pdisk_1.dat 2025-04-10T11:27:02.093574Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27101, node 2 2025-04-10T11:27:02.155812Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:02.155897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:02.159099Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:27:02.273287Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:02.273331Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:02.273338Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:02.273437Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3683 TClient is connected to server localhost:3683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:02.855393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:02.872084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:02.974889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:03.165663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:03.270353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:05.603219Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644564319488865:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:05.603303Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:05.657944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:05.707439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:05.794001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:05.857370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:05.937190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:06.013098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:06.106072Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644568614456682:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:06.106209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:06.106452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644568614456687:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:06.111274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:06.124569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644568614456689:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:06.213148Z node 2 :TX_PROXY ERROR: Actor# [2:7491644568614456744:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:06.838248Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644547139618027:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:06.838320Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:07.214369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:07.261044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:07.297536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:07.348617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault [GOOD] >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite >> THiveTest::TestHiveNoBalancingWithLowResourceUsage [GOOD] >> THiveTest::TestLockTabletExecution ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::DatetimeConstantFold+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 8064, MsgBus: 8155 2025-04-10T11:25:52.428484Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644249297732972:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:52.429193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001726/r3tmp/tmpOxYR0q/pdisk_1.dat 2025-04-10T11:25:52.784218Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8064, node 1 2025-04-10T11:25:52.821838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:52.821946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:52.823570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:52.933100Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:52.933124Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:52.933130Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:52.933290Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8155 TClient is connected to server localhost:8155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:53.609137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:55.059555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644262182635523:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.059584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644262182635515:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.059709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.066713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:25:55.075968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644262182635529:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:25:55.138990Z node 1 :TX_PROXY ERROR: Actor# [1:7491644262182635580:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:55.609753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.841179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644262182635849:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.841504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644262182635849:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:25:55.841796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644262182635849:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:25:55.841961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644262182635849:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:25:55.842090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644262182635849:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:25:55.842271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644262182635849:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:25:55.842401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644262182635849:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:25:55.842518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644262182635849:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:25:55.842665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644262182635849:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:25:55.842800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644262182635849:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:25:55.842925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644262182635849:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:25:55.843017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644262182635849:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:25:55.843152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644262182635851:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.843243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644262182635851:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:25:55.843504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644262182635851:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:25:55.843684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644262182635851:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:25:55.843811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644262182635851:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:25:55.843931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644262182635851:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:25:55.844039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644262182635851:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:25:55.844196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644262182635851:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:25:55.844305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644262182635851:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:25:55.844415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644262182635851:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:25:55.844528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644262182635851:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:25:55.844648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644262182635851:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:25:55.883478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644262182635843:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.883545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644262182635843:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:25:55.883802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id= ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.821332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.823029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.828236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.828406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.835134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.835134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.840886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.840894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.847224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.847224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.853610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.853610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.860587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.860587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.867597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.867599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.874278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.874277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.880963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.880962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.887804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.887804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.895190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.895190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.901751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.903048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.907022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.910266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.912546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.916132Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.917737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.921934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.922328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.927555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.927564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.933767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.933809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.940192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.940375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.946657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.946912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.953395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.953496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.960154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.960159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:59.142521Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq740m24re9acgbq9vg24j", SessionId: ydb://session/3?node_id=1&id=NGRlNmNiMzQtYmU2OGQyMTctZTA3NThhNzItOWZhNWVhYmY=, Slow query, duration: 29.745722s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:26:59.424104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:59.424866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7491644459751178519:9306];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-10T11:26:59.425305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:59.426033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc+UseSink >> KqpJoinOrder::CanonizedJoinOrderTPCDS78 >> THiveTest::TestHiveBalancerIgnoreTablet [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts >> KqpJoin::FullOuterJoinSizeCheck >> THiveTest::TestLockTabletExecution [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner >> KqpJoinOrder::TestJoinHint2-ColumnStore >> THiveTest::TestLockTabletExecutionBadOwner [GOOD] >> THiveTest::TestLockTabletExecutionDelete >> KqpJoinOrder::CanonizedJoinOrderTPCH5 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix3 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix4 >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestDownAfterDrain >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] >> DataShardReadIteratorBatchMode::RangeFull [GOOD] >> DataShardReadIteratorBatchMode::RangeFromInclusive >> THiveTest::TestLockTabletExecutionDelete [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot >> DataShardReadIterator::ShouldReadRangeChunk2 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk3 >> KqpFlipJoin::RightSemi_1 [GOOD] >> KqpFlipJoin::RightOnly_3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-10T11:25:55.101442Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:55.101558Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-10T11:25:55.132378Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:55.152483Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:25:55.155684Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-10T11:25:55.158779Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-10T11:25:55.164165Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-10T11:25:55.166117Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-10T11:25:55.178764Z node 1 :PERSQUEUE INFO: new Cookie default|c78371a6-7932afa3-69b5614a-1cf90d80_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:55.186500Z node 1 :PERSQUEUE INFO: new Cookie default|e21445e6-f2ede5e2-4bb503da-af033199_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:55.209878Z node 1 :PERSQUEUE INFO: new Cookie default|e733551e-9ed2a9ed-444afe3e-3388e348_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:55.217934Z node 1 :PERSQUEUE INFO: new Cookie default|22f4737c-3e8e7fb-8f1b568-d8068f56_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:55.224900Z node 1 :PERSQUEUE INFO: new Cookie default|321e0d64-5dd46950-76902dfe-2bfee366_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:55.231869Z node 1 :PERSQUEUE INFO: new Cookie default|45b3a874-755c7227-d8839d88-9f36afaa_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-10T11:25:55.736688Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:55.736778Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:181:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:184:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:185:2057] recipient: [2:183:2195] Leader for TabletID 72057594037927937 is [2:186:2196] sender: [2:187:2057] recipient: [2:183:2195] 2025-04-10T11:25:55.787012Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:55.787084Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:186:2196] Leader for TabletID 72057594037927937 is [2:186:2196] sender: [2:260:2057] recipient: [2:14:2061] 2025-04-10T11:25:57.426336Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:57.427613Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-10T11:25:57.428863Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:266:2258] 2025-04-10T11:25:57.432366Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:266:2258] 2025-04-10T11:25:57.435113Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:267:2259] 2025-04-10T11:25:57.437162Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:267:2259] 2025-04-10T11:25:57.451424Z node 2 :PERSQUEUE INFO: new Cookie default|c42eec34-ae9a35c2-8b5659bc-f0687790_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:57.459416Z node 2 :PERSQUEUE INFO: new Cookie default|5c2439c4-350772c9-a2a6a6da-7c94da1_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:57.505041Z node 2 :PERSQUEUE INFO: new Cookie default|631053d6-17f81ce7-7c73c058-943620b_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:57.517902Z node 2 :PERSQUEUE INFO: new Cookie default|50975b61-d4ed2498-869a9b61-a037439b_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:57.529418Z node 2 :PERSQUEUE INFO: new Cookie default|ceb81b87-7d2e76d8-7896f01f-99f85f08_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:25:57.542096Z node 2 :PERSQUEUE INFO: new Cookie default|bb6064e6-13574fc8-dcbef9b8-8040feb6_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-10T11:25:58.023160Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:58.023252Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:107:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:181:2057] recipient: [3:99:2134] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:184:2057] recipient: [3:183:2195] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:185:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:187:2057] recipient: [3:183:2195] 2025-04-10T11:25:58.078669Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:58.078752Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [3:107:2139]) rebooted! !Reboot 72057594037927937 (actor [3:107:2139]) tablet resolver refreshed! new actor is[3:186:2196] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:260:2057] recipient: [3:14:2061] 2025-04-10T11:25:59.744442Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:25:59.745417Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-10T11:25:59.746413Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:266:2258] 2025-04-10T11:25:59.748894Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [3:266:2258] 2025-04-10T11:25:59.750891Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:267:2259] 2025-04-10T11:25:59.752818Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [3:267:2259] 2025-04-10T11:25:59.770468Z node 3 :PERSQUEUE INFO: new Cookie default|2e114f9a-82811ae9-60a84809-c ... 2.847351Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 47 actor [47:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 47 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 47 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 47 Important: false } 2025-04-10T11:27:12.848736Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [47:185:2198] 2025-04-10T11:27:12.852004Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [47:185:2198] 2025-04-10T11:27:12.854368Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [47:186:2199] 2025-04-10T11:27:12.856768Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [47:186:2199] 2025-04-10T11:27:12.870138Z node 47 :PERSQUEUE INFO: new Cookie default|5f53bf9b-4d255cae-3411adbb-8b253305_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:27:12.881941Z node 47 :PERSQUEUE INFO: new Cookie default|fa0c7dd2-af1b78-2ccc6858-a13d00f3_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:27:12.923498Z node 47 :PERSQUEUE INFO: new Cookie default|3d8df494-1b8d0f7a-732e0b10-f80b335d_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:27:12.936165Z node 47 :PERSQUEUE INFO: new Cookie default|aa452e9-d2429001-9c05de11-4a0aed3e_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:27:12.948949Z node 47 :PERSQUEUE INFO: new Cookie default|799d949f-5d22f0db-1dbbbdcf-316ad250_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:27:12.961640Z node 47 :PERSQUEUE INFO: new Cookie default|529a81ea-69dc97fd-1686c6e-c30f6511_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [47:107:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [47:107:2139] sender: [47:284:2057] recipient: [47:99:2134] Leader for TabletID 72057594037927937 is [47:107:2139] sender: [47:287:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [47:107:2139] sender: [47:288:2057] recipient: [47:286:2281] Leader for TabletID 72057594037927937 is [47:289:2282] sender: [47:290:2057] recipient: [47:286:2281] 2025-04-10T11:27:13.056489Z node 47 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:27:13.056575Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-10T11:27:13.058041Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [47:338:2323] 2025-04-10T11:27:13.061199Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [47:339:2324] 2025-04-10T11:27:13.077605Z node 47 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:27:13.077716Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [47:339:2324] 2025-04-10T11:27:13.080040Z node 47 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:27:13.080127Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [47:338:2323] !Reboot 72057594037927937 (actor [47:107:2139]) rebooted! !Reboot 72057594037927937 (actor [47:107:2139]) tablet resolver refreshed! new actor is[47:289:2282] Leader for TabletID 72057594037927937 is [47:289:2282] sender: [47:386:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:103:2057] recipient: [48:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:103:2057] recipient: [48:101:2135] Leader for TabletID 72057594037927937 is [48:107:2139] sender: [48:108:2057] recipient: [48:101:2135] 2025-04-10T11:27:15.036378Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:27:15.036459Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:149:2057] recipient: [48:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:149:2057] recipient: [48:147:2170] Leader for TabletID 72057594037927938 is [48:153:2174] sender: [48:154:2057] recipient: [48:147:2170] Leader for TabletID 72057594037927937 is [48:107:2139] sender: [48:179:2057] recipient: [48:14:2061] 2025-04-10T11:27:15.059220Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:27:15.060177Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 48 actor [48:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 48 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 Important: false } 2025-04-10T11:27:15.061111Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [48:185:2198] 2025-04-10T11:27:15.064069Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:185:2198] 2025-04-10T11:27:15.066142Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [48:186:2199] 2025-04-10T11:27:15.068401Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:186:2199] 2025-04-10T11:27:15.079034Z node 48 :PERSQUEUE INFO: new Cookie default|8749f769-b0ebc13f-35c9d3b5-7b823aa9_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:27:15.085603Z node 48 :PERSQUEUE INFO: new Cookie default|de3d19f0-17554429-98721bbb-bbffa7c0_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:27:15.121030Z node 48 :PERSQUEUE INFO: new Cookie default|db0ccbd1-86a12a4d-3a0e3d74-8c3cbd0b_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:27:15.131390Z node 48 :PERSQUEUE INFO: new Cookie default|52fc71c5-85509a11-e9e1d37f-57ef8b51_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:27:15.141872Z node 48 :PERSQUEUE INFO: new Cookie default|89c5fdf-7d36c0ad-e62cde13-da711797_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:27:15.152803Z node 48 :PERSQUEUE INFO: new Cookie default|305a45b0-6f2df57f-ce27299-b2e0136_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:103:2057] recipient: [49:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:103:2057] recipient: [49:101:2135] Leader for TabletID 72057594037927937 is [49:107:2139] sender: [49:108:2057] recipient: [49:101:2135] 2025-04-10T11:27:15.982860Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:27:15.982943Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:149:2057] recipient: [49:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:149:2057] recipient: [49:147:2170] Leader for TabletID 72057594037927938 is [49:153:2174] sender: [49:154:2057] recipient: [49:147:2170] Leader for TabletID 72057594037927937 is [49:107:2139] sender: [49:179:2057] recipient: [49:14:2061] 2025-04-10T11:27:16.009553Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:27:16.010455Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 49 actor [49:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 49 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 Important: false } 2025-04-10T11:27:16.011296Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [49:185:2198] 2025-04-10T11:27:16.014475Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:185:2198] 2025-04-10T11:27:16.016360Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [49:186:2199] 2025-04-10T11:27:16.018589Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:186:2199] 2025-04-10T11:27:16.028733Z node 49 :PERSQUEUE INFO: new Cookie default|7f0fe3a0-738ef57a-52d096ba-43990f20_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:27:16.035120Z node 49 :PERSQUEUE INFO: new Cookie default|1506835b-7506c0c9-b934e5d2-aa9fa93b_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:27:16.070537Z node 49 :PERSQUEUE INFO: new Cookie default|5fd174a9-77fdc468-36a9193b-cdbe66f9_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:27:16.080973Z node 49 :PERSQUEUE INFO: new Cookie default|18ff2aae-31177b60-9b24b0ad-95c5a7da_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:27:16.092547Z node 49 :PERSQUEUE INFO: new Cookie default|ddff19af-cea78289-b806b362-6960b695_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-10T11:27:16.102496Z node 49 :PERSQUEUE INFO: new Cookie default|270ea55a-ea5042b4-d5b5a3dc-ae48cc81_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default >> YdbProxy::ReadTopic [GOOD] >> YdbProxy::ReadNonExistentTopic >> KqpJoinOrder::ShuffleEliminationTpcdsMapJoinBug [GOOD] |93.6%| [TA] $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldReadFromHeadWithConflict-UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict+UseSink >> KqpJoinOrder::ShuffleEliminationReuseShuffleTwoJoins [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite >> DataShardReadIterator::HandlePersistentSnapshotGoneInContinue [GOOD] >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] >> BasicStatistics::NotFullStatisticsDatashard [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock >> KqpJoin::LeftJoinWithNull+StreamLookupJoin >> THiveTest::TestDownAfterDrain [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock [GOOD] >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite >> THiveTest::TestDeleteTablet >> THiveTest::TestLockTabletExecutionGoodUnlock >> KqpJoin::FullOuterJoinSizeCheck [GOOD] >> THiveTest::TestDeleteTablet [GOOD] >> THiveTest::TestDeleteOwnerTablets >> YdbProxy::ReadNonExistentTopic [GOOD] >> TSchemeShardTest::ManyDirs [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc+UseSink [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink >> THiveTest::TestLockTabletExecutionLocalGone >> TSchemeShardTest::ListNotCreatedDirCase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationReuseShuffleTwoJoins [GOOD] Test command err: Trying to start YDB, gRPC: 27626, MsgBus: 26385 2025-04-10T11:25:58.126474Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644272726749796:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:58.126550Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00171a/r3tmp/tmpDt6ttw/pdisk_1.dat 2025-04-10T11:25:58.488636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:58.488744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:58.490929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:58.512578Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27626, node 1 2025-04-10T11:25:58.526821Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:25:58.533471Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:25:58.615440Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:58.615466Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:58.615472Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:58.615558Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26385 TClient is connected to server localhost:26385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:59.189573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:59.214900Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:26:01.160309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644285611652360:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:01.160309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644285611652352:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:01.160394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:01.163905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:01.173016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644285611652366:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:01.239973Z node 1 :TX_PROXY ERROR: Actor# [1:7491644285611652417:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:01.496283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:01.749912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644285611652658:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:01.750120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644285611652658:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:01.750454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644285611652658:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:01.750580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644285611652658:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:01.750701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644285611652658:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:01.750815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644285611652658:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:01.750918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644285611652658:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:01.751024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644285611652658:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:01.751134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644285611652658:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:01.751237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644285611652658:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:01.751330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644285611652658:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:01.751431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644285611652658:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:01.767854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644285611652672:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:01.767928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644285611652672:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:01.768169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644285611652672:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:01.768293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644285611652672:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:01.768411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644285611652672:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:01.768517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644285611652672:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:01.768634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644285611652672:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:01.768796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644285611652672:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:01.768933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644285611652672:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:01.769048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644285611652672:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:01.769154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644285611652672:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:01.769274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644285611652672:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:01.786029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644285611652738:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26 ... =TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.450515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039202;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.454489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.456293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039246;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.461494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039254;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.466018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.468398Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039208;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.474049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.480372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.486931Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.493414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.499848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039228;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.507525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.515161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039262;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.522720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.529185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039226;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.530309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039214;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.536235Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.537448Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.544627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.552670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.567718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039252;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.569252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.575642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.576477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.583609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039218;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.583637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.590570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.591022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.598347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039244;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.598355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.606300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.606609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039250;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.612171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.613463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.618067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.618550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.624334Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.626550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039256;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.630634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.632698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.638758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.640455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.645483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.646516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.651115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.652315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.656902Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039234;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.658182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.660723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.663822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.665896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.669597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:04.671478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoinSizeCheck [GOOD] Test command err: Trying to start YDB, gRPC: 3476, MsgBus: 21461 2025-04-10T11:27:13.554813Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644598212529428:2155];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:13.555435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016e5/r3tmp/tmprqa8ao/pdisk_1.dat 2025-04-10T11:27:14.144393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:14.144505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:14.155634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:27:14.200440Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3476, node 1 2025-04-10T11:27:14.322085Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:14.322106Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:14.322114Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:14.322250Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21461 TClient is connected to server localhost:21461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:15.139747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:15.154109Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:15.170165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:15.348189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:15.556412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:15.655323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:17.575218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644615392400317:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:17.575341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:17.848112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:17.888877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:17.933231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:17.993384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:18.070668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:18.158876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:18.259678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644619687368130:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:18.259742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:18.260057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644619687368135:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:18.264341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:18.276416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644619687368137:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:18.350181Z node 1 :TX_PROXY ERROR: Actor# [1:7491644619687368191:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:18.510314Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644598212529428:2155];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:18.510376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:19.318446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:19.363233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:19.404320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> THiveTest::TestDeleteOwnerTablets [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany >> TSchemeShardTest::ListNotCreatedDirCase [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase |93.7%| [TA] $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestLockTabletExecutionLocalGone [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix4 [GOOD] >> KqpFlipJoin::RightOnly_3 [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves >> DataShardReadIterator::ShouldReadRangePrefix5 >> THiveTest::TestLocalRegistrationInSharedHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] Test command err: 2025-04-10T11:25:50.543583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:25:50.543725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:25:50.543841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e82/r3tmp/tmpmlbnUU/pdisk_1.dat 2025-04-10T11:25:50.855038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:25:50.891831Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:50.928318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:50.928413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:50.939755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:51.020582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:51.056676Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:25:51.057749Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:25:51.058167Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:25:51.058396Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:51.103639Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:25:51.104186Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:51.104248Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:25:51.105425Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:25:51.105492Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:25:51.105536Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:25:51.105787Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:25:51.105863Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:25:51.105913Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:25:51.116494Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:25:51.143854Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:25:51.144010Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:25:51.144110Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:25:51.144150Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:25:51.144177Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:25:51.144200Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.144353Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.144397Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.144774Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:25:51.144844Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:25:51.144895Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:51.144920Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:25:51.144962Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:25:51.144992Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:25:51.145018Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:25:51.145050Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:25:51.145094Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:25:51.145189Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.145212Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.145242Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:25:51.145534Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:25:51.145568Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:25:51.145649Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:25:51.145818Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:25:51.145850Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:25:51.145916Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:25:51.145963Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:25:51.145994Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:25:51.146016Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:25:51.146037Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.146251Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:25:51.146276Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:25:51.146297Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:25:51.146326Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.146379Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:25:51.146404Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:25:51.146447Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:25:51.146467Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.146493Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:25:51.147534Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:25:51.147564Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:25:51.158016Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:25:51.158074Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.158110Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.158180Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:25:51.158222Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:25:51.304516Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.304552Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.304577Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:25:51.304795Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:25:51.304827Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:25:51.304918Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.304959Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:25:51.304988Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:25:51.305009Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:25:51.307656Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:25:51.307700Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.308361Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.308392Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.308425Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:5 ... de 13 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715665] at 72075186224037888 has finished 2025-04-10T11:27:17.661114Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:27:17.661172Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T11:27:17.661244Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:27:17.661309Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:27:17.661547Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [13:885:2711], Recipient [13:885:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:27:17.661581Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:27:17.661631Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:27:17.661663Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:27:17.661692Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-10T11:27:17.661727Z node 13 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715665] in PlanQueue unit at 72075186224037889 2025-04-10T11:27:17.661755Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit PlanQueue 2025-04-10T11:27:17.661787Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-10T11:27:17.661815Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit PlanQueue 2025-04-10T11:27:17.661866Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit LoadTxDetails 2025-04-10T11:27:17.661905Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit LoadTxDetails 2025-04-10T11:27:17.662027Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715665 keys extracted: 0 2025-04-10T11:27:17.662066Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-10T11:27:17.662092Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit LoadTxDetails 2025-04-10T11:27:17.662117Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-10T11:27:17.666169Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-10T11:27:17.666283Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically complete end at 72075186224037889 2025-04-10T11:27:17.666329Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically incomplete end at 72075186224037889 2025-04-10T11:27:17.666374Z node 13 :TX_DATASHARD TRACE: Activated operation [3500:281474976715665] at 72075186224037889 2025-04-10T11:27:17.666428Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-10T11:27:17.666461Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-10T11:27:17.666494Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-10T11:27:17.666530Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-10T11:27:17.666642Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-10T11:27:17.666690Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-10T11:27:17.666735Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-10T11:27:17.666776Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-10T11:27:17.666818Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-10T11:27:17.666846Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-10T11:27:17.666873Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompleteOperation 2025-04-10T11:27:17.666902Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2025-04-10T11:27:17.667054Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is DelayComplete 2025-04-10T11:27:17.667082Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompleteOperation 2025-04-10T11:27:17.667121Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompletedOperations 2025-04-10T11:27:17.667161Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompletedOperations 2025-04-10T11:27:17.667192Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-10T11:27:17.667218Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompletedOperations 2025-04-10T11:27:17.667243Z node 13 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715665] at 72075186224037889 has finished 2025-04-10T11:27:17.667284Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:27:17.667324Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-10T11:27:17.667360Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-10T11:27:17.667404Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-10T11:27:17.679662Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-10T11:27:17.679827Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:27:17.679912Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037888 on unit CompleteOperation 2025-04-10T11:27:17.680028Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037888 at tablet 72075186224037888 send result to client [13:1080:2866], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:27:17.680120Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:27:17.680517Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-10T11:27:17.680571Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:27:17.680599Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2025-04-10T11:27:17.680640Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [13:1080:2866], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:27:17.680677Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:27:17.686812Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:593:2518], Recipient [13:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-10T11:27:17.687084Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:27:17.687204Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2025-04-10T11:27:17.687359Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-10T11:27:17.687436Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:27:17.687512Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:27:17.687579Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:27:17.687626Z node 13 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2025-04-10T11:27:17.687697Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-10T11:27:17.687727Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:27:17.687750Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:27:17.687779Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:27:17.687937Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW } 2025-04-10T11:27:17.688374Z node 13 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715665 2025-04-10T11:27:17.688470Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:593:2518], 1} after executionsCount# 1 2025-04-10T11:27:17.688564Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:593:2518], 1} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:17.688811Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:593:2518], 1} finished in read 2025-04-10T11:27:17.688912Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-10T11:27:17.688940Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:27:17.688966Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:27:17.688995Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:27:17.689049Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-10T11:27:17.689069Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:27:17.689105Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-04-10T11:27:17.689170Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:27:17.689395Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup >> KqpJoinOrder::CanonizedJoinOrderTPCH12 >> KqpJoinOrder::GeneralPrioritiesBug4 [GOOD] >> DataShardReadIteratorBatchMode::RangeFromInclusive [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk3 [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH22 [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite [GOOD] |93.7%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoin::LeftJoinWithNull+StreamLookupJoin [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict+UseSink [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves [GOOD] >> KqpJoinOrder::FiveWayJoinStatsOverride+ColumnStore [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite [GOOD] >> TSchemeShardTest::FindSubDomainPathId >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink >> THiveTest::TestHiveBalancerWithImmovableTablets >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite >> DataShardReadIterator::ShouldReadRangeChunk100 >> DataShardReadIteratorBatchMode::RangeFromNonInclusive >> TSchemeShardTest::FindSubDomainPathId [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActor >> TSchemeShardTest::FindSubDomainPathIdActor [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] >> YdbOlapStore::LogCountByResource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard [GOOD] Test command err: 2025-04-10T11:20:27.918207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:27.918632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:27.918715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d7a/r3tmp/tmpSDhA6D/pdisk_1.dat 2025-04-10T11:20:28.349236Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11949, node 1 2025-04-10T11:20:28.646781Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:28.646853Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:28.646896Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:28.647380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:28.655232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:28.774567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:28.774730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:28.788947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65240 2025-04-10T11:20:29.416272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:32.907469Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:32.947161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:32.947346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:32.983884Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:32.986508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:33.252347Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.253067Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.253849Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.254074Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.254440Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.254593Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.254694Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.254791Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.254934Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.447603Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:33.447914Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:33.461802Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:33.622646Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:33.685854Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:33.685984Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:33.719049Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:33.719550Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:33.719785Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:33.719843Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:33.719909Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:33.719977Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:33.720036Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:33.720094Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:33.720596Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:33.747391Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1868:2593] 2025-04-10T11:20:33.749870Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:20:33.769406Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:33.769483Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:33.769577Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:20:33.775300Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:33.775442Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1918:2621], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:33.794676Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1979:2648] 2025-04-10T11:20:33.795471Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1979:2648], schemeshard id = 72075186224037897 2025-04-10T11:20:33.859008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:33.866979Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:33.867145Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:34.055326Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:34.218188Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:34.326133Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:35.501809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2245:3077], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:35.501959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:35.522745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:20:36.097569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2551:3128], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:36.097876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:36.099605Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2556:3132]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:20:36.099805Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:20:36.099890Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2558:3134] 2025-04-10T11:20:36.099975Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2558:3134] 2025-04-10T11:20:36.100602Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2559:2996] 2025-04-10T11:20:36.101079Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2558:3134], server id = [2:2559:2996], tablet id = 72075186224037894, status = OK 2025-04-10T11:20:36.101201Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2559:2996], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:20:36.101276Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:20:36.101530Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:20:36.101614Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2556:3132], StatRequests.size() = 1 2025-04-10T11:20:36.123378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2563:3138], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:36.123511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:36.124002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2568:3143], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:36.130555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:20:36.323778Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-10T11:20:36.323897Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-10T11:20:36.418611Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2558:3134], schemeshard count = 1 2025-04-10T11:20:36.846656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... 0T11:26:08.220483Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:10.837650Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:26:10.838120Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:26:10.838548Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:10.849855Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:10.849939Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:13.425543Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:13.425633Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:14.598765Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:26:15.759217Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:15.759319Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:16.833646Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:26:16.833818Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:16.834116Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:26:18.229362Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:18.229421Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:20.717508Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:26:20.728421Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:20.728488Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:23.143043Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:26:23.143425Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:26:23.143675Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:23.155040Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:23.155116Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:25.577574Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:25.577652Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:26.869825Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:26:28.181691Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-10T11:26:28.181766Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:26:28.181800Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:26:28.181835Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:26:28.400592Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:28.400665Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:29.670726Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:26:29.670903Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:29.671171Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:26:31.154826Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:31.154900Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:33.910679Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:26:33.922758Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:33.922832Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:36.573605Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:26:36.573815Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:36.574219Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:26:36.585300Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:36.585387Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:39.200239Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:39.200315Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:40.534796Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:26:42.098983Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:42.099056Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:43.433957Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:26:43.434193Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:43.434545Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:26:44.617173Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:44.617275Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:47.141547Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:26:47.152486Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:47.152560Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:50.061857Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:26:50.062064Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:50.062434Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:26:50.073401Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:50.073471Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:52.923276Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:52.923346Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:54.227471Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:26:55.728105Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:55.728172Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:57.064984Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:26:57.065182Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:57.065537Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:26:58.558449Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:58.558519Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:27:01.236820Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:27:01.247699Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:27:01.247786Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:27:03.965248Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:27:03.965482Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:27:03.965815Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:27:03.978771Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:27:03.978847Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:27:06.419272Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:27:06.419353Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:27:07.806032Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:27:09.222139Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-10T11:27:09.222245Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:27:09.222281Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:27:09.222320Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:27:09.450033Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:27:09.450112Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:27:10.898922Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:27:10.899317Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:27:10.899580Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:27:10.923075Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-10T11:27:10.923170Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 217.000000s, at schemeshard: 72075186224037897 2025-04-10T11:27:10.923423Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2025-04-10T11:27:10.956511Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:27:12.514337Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:27:12.514444Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:27:15.210737Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:27:15.223051Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:27:15.223125Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:27:17.918854Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-10T11:27:17.919089Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 ... waiting for TEvPropagateStatistics (done) 2025-04-10T11:27:17.919577Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:13632:7564]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:27:17.920145Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:27:17.927672Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:27:17.927751Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [2:13632:7564], StatRequests.size() = 1 |93.7%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [GOOD] Test command err: 2025-04-10T11:25:50.428295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:25:50.428513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:25:50.428684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001eba/r3tmp/tmpwDHXEF/pdisk_1.dat 2025-04-10T11:25:50.811306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:25:50.862622Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:50.905778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:50.906352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:50.918335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:51.010304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:51.054756Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:25:51.055823Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:25:51.056221Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:25:51.056457Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:51.101552Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:25:51.102085Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:51.102168Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:25:51.103324Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:25:51.103394Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:25:51.103432Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:25:51.103678Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:25:51.103762Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:25:51.103823Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:25:51.114385Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:25:51.139061Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:25:51.139204Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:25:51.139299Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:25:51.139332Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:25:51.139359Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:25:51.139383Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.139521Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.139579Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.139792Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:25:51.139853Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:25:51.139889Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:51.139923Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:25:51.139962Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:25:51.139988Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:25:51.140010Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:25:51.140031Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:25:51.140072Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:25:51.140159Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.140178Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.140206Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:25:51.140450Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:25:51.140479Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:25:51.140556Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:25:51.140714Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:25:51.140746Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:25:51.140803Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:25:51.140860Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:25:51.140896Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:25:51.140921Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:25:51.140939Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.141137Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:25:51.141162Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:25:51.141186Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:25:51.141208Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.141246Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:25:51.141283Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:25:51.141305Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:25:51.141326Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.141342Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:25:51.142598Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:25:51.142645Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:25:51.153310Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:25:51.153389Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.153424Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.153468Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:25:51.153522Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:25:51.300378Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.300414Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.300448Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:25:51.300702Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:25:51.300740Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:25:51.300812Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.300852Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:25:51.300883Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:25:51.300907Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:25:51.303792Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:25:51.303861Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.305743Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.305778Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.305826Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:5 ... -04-10T11:27:19.669494Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-10T11:27:19.669521Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit DropIndexNotice 2025-04-10T11:27:19.669546Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit MoveTable 2025-04-10T11:27:19.669571Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit MoveTable 2025-04-10T11:27:19.669598Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-10T11:27:19.669624Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit MoveTable 2025-04-10T11:27:19.669647Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit MoveIndex 2025-04-10T11:27:19.669672Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit MoveIndex 2025-04-10T11:27:19.669699Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-10T11:27:19.669724Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit MoveIndex 2025-04-10T11:27:19.669746Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CreateCdcStream 2025-04-10T11:27:19.669771Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CreateCdcStream 2025-04-10T11:27:19.669797Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-10T11:27:19.669822Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CreateCdcStream 2025-04-10T11:27:19.669845Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit AlterCdcStream 2025-04-10T11:27:19.669868Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit AlterCdcStream 2025-04-10T11:27:19.669899Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-10T11:27:19.669924Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit AlterCdcStream 2025-04-10T11:27:19.669947Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit DropCdcStream 2025-04-10T11:27:19.669970Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit DropCdcStream 2025-04-10T11:27:19.669993Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-10T11:27:19.670019Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit DropCdcStream 2025-04-10T11:27:19.670043Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CreateIncrementalRestoreSrc 2025-04-10T11:27:19.670072Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CreateIncrementalRestoreSrc 2025-04-10T11:27:19.670102Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-10T11:27:19.670128Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CreateIncrementalRestoreSrc 2025-04-10T11:27:19.671026Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CompleteOperation 2025-04-10T11:27:19.671068Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CompleteOperation 2025-04-10T11:27:19.671499Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is DelayComplete 2025-04-10T11:27:19.671560Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CompleteOperation 2025-04-10T11:27:19.671639Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2025-04-10T11:27:19.671702Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CompletedOperations 2025-04-10T11:27:19.671749Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-04-10T11:27:19.671776Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2025-04-10T11:27:19.671813Z node 13 :TX_DATASHARD TRACE: Execution plan for [2500:281474976715663] at 72075186224037890 has finished 2025-04-10T11:27:19.671878Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:27:19.671939Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-10T11:27:19.672021Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-10T11:27:19.672089Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-10T11:27:19.674438Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [13:24:2071], Recipient [13:985:2786]: {TEvRegisterTabletResult TabletId# 72075186224037890 Entry# 2000} 2025-04-10T11:27:19.674504Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-10T11:27:19.674579Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 2000 2025-04-10T11:27:19.674647Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:27:19.675396Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 2500} 2025-04-10T11:27:19.675543Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:27:19.676271Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [13:685:2581], Recipient [13:885:2711]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:27:19.676317Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-10T11:27:19.676691Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [13:685:2581], Recipient [13:985:2786]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:27:19.676726Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-10T11:27:19.677217Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [13:685:2581], Recipient [13:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:27:19.677256Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:27:19.677803Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:27:19.677863Z node 13 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715663] at 72075186224037890 on unit CreateTable 2025-04-10T11:27:19.677939Z node 13 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-10T11:27:19.678028Z node 13 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-04-10T11:27:19.678087Z node 13 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715663] at 72075186224037890 on unit CompleteOperation 2025-04-10T11:27:19.679072Z node 13 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715663] from 72075186224037890 at tablet 72075186224037890 send result to client [13:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:27:19.679188Z node 13 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2025-04-10T11:27:19.679366Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:27:19.681524Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [13:1026:2819], Recipient [13:985:2786]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [13:1030:2823] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T11:27:19.681589Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T11:27:19.682591Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [13:24:2071], Recipient [13:985:2786]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2500 ReadStep# 2500 } 2025-04-10T11:27:19.682648Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-10T11:27:19.682728Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 2500 2025-04-10T11:27:19.683269Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [13:409:2404], Recipient [13:985:2786]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715663 2025-04-10T11:27:19.683322Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-10T11:27:19.683400Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037890 state Ready 2025-04-10T11:27:19.683940Z node 13 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-10T11:27:19.691925Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1045:2832], Recipient [13:985:2786]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:27:19.692031Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:27:19.692110Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [13:1044:2831], serverId# [13:1045:2832], sessionId# [0:0:0] 2025-04-10T11:27:19.692321Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [13:1043:2830], Recipient [13:985:2786]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-10T11:27:19.694053Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:593:2518], Recipient [13:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72075186224037888 TableId: 2 SchemaVersion: 1111 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-10T11:27:19.694277Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:27:19.694525Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:27:19.694894Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1047:2834], Recipient [13:985:2786]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:27:19.694946Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:27:19.695035Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [13:1046:2833], serverId# [13:1047:2834], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] Test command err: 2025-04-10T11:26:47.673169Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:26:47.676793Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:26:47.677009Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-10T11:26:47.677602Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-10T11:26:47.678785Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-10T11:26:47.678835Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:26:47.679730Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:30:2075] ControllerId# 72057594037932033 2025-04-10T11:26:47.679771Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:26:47.679893Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:26:47.680227Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:26:47.680369Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:30:2075] 2025-04-10T11:26:47.680403Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:30:2075] 2025-04-10T11:26:47.680447Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:26:47.680501Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:26:47.680664Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:26:47.680788Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:26:47.697847Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:26:47.697906Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:26:47.700255Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:47.700424Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:47.700579Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:47.700718Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:47.700832Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:47.700968Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:44:2087] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:47.701106Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:45:2088] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:47.701130Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:26:47.701306Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-10T11:26:47.701361Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:26:47.701389Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:26:47.702184Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:20:2063] 2025-04-10T11:26:47.702225Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:20:2063] 2025-04-10T11:26:47.722230Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-10T11:26:47.722307Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:26:47.722344Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-10T11:26:47.726133Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-10T11:26:47.726742Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:26:47.726922Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:26:47.728286Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-10T11:26:47.728456Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-04-10T11:26:47.728752Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:26:47.729138Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:26:47.729210Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-10T11:26:47.729267Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-10T11:26:47.729347Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-10T11:26:47.729402Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-10T11:26:47.729451Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:26:47.729608Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:51:2091] 2025-04-10T11:26:47.729650Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:51:2091] 2025-04-10T11:26:47.729711Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-04-10T11:26:47.729770Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-10T11:26:47.729832Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:26:47.729872Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:51:2091] 2025-04-10T11:26:47.730251Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-04-10T11:26:47.730287Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-04-10T11:26:47.730367Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-10T11:26:47.730505Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-10T11:26:47.730581Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-10T11:26:47.730625Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-04-10T11:26:47.730739Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-04-10T11:26:47.730799Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:30:2075] 2025-04-10T11:26:47.730855Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:30:2075] 2025-04-10T11:26:47.733580Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-10T11:26:47.735153Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-10T11:26:47.735304Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-10T11:26:47.739437Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-10T11:26:47.739530Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-10T11:26:47.739563Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-04-10T11:26:47.739664Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:26:47.739779Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:20:2063] 2025-04-10T11:26:47.739812Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:20:2063] 2025-04-10T11:26:47.739986Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-04-10T11:26:47.740024Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-04-10T11:26:47.740091Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-04-10T11:26:47.740222Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-10T11:26:47.740252Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:26:47.740361Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-04-10T11:26:47.740479Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-04-10T11:26:47.740525Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-04-10T11:26:47.740861Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForI ... 32Z node 34 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046678944 Cookie: 1} 2025-04-10T11:27:26.000787Z node 34 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046678944 Cookie: 2} 2025-04-10T11:27:26.001005Z node 35 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72057594046678944 CurrentLeader: [34:326:2265] CurrentLeaderTablet: [34:340:2273] CurrentGeneration: 2 CurrentStep: 0} 2025-04-10T11:27:26.001098Z node 35 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72057594046678944 CurrentLeader: [34:326:2265] CurrentLeaderTablet: [34:340:2273] CurrentGeneration: 2 CurrentStep: 0} 2025-04-10T11:27:26.001241Z node 35 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594046678944 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72057594046678944 Cookie: 0 CurrentLeader: [34:326:2265] CurrentLeaderTablet: [34:340:2273] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-10T11:27:26.001301Z node 35 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72057594046678944 followers: 0 2025-04-10T11:27:26.001375Z node 35 :TABLET_RESOLVER DEBUG: SelectForward node 35 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594046678944 followers: 0 countLeader 1 allowFollowers 0 winner: [34:326:2265] 2025-04-10T11:27:26.001501Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] forward result remote node 34 [35:548:2090] 2025-04-10T11:27:26.001647Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] remote node connected [35:548:2090] 2025-04-10T11:27:26.001733Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [35:548:2090] 2025-04-10T11:27:26.002023Z node 34 :PIPE_SERVER DEBUG: [72057594046678944] Accept Connect Originator# [35:548:2090] 2025-04-10T11:27:26.002339Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] connected with status OK role: Leader [35:548:2090] 2025-04-10T11:27:26.002409Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send queued [35:548:2090] 2025-04-10T11:27:26.002515Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send [35:548:2090] 2025-04-10T11:27:26.002542Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] push event to server [35:548:2090] 2025-04-10T11:27:26.002606Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [35:548:2090] 2025-04-10T11:27:26.002737Z node 34 :PIPE_SERVER DEBUG: [72057594046678944] Push Sender# [35:547:2090] EventType# 271122945 2025-04-10T11:27:26.002879Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme 2025-04-10T11:27:26.002966Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:27:26.003229Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:27:26.003315Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:27:26.008080Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [35:554:2091] 2025-04-10T11:27:26.008130Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [35:554:2091] 2025-04-10T11:27:26.008176Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [35:555:2092] 2025-04-10T11:27:26.008210Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [35:555:2092] 2025-04-10T11:27:26.008486Z node 35 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:27:26.010269Z node 35 :TABLET_RESOLVER DEBUG: SelectForward node 35 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [34:325:2264] 2025-04-10T11:27:26.010445Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [35:554:2091] 2025-04-10T11:27:26.010540Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] queue send [35:555:2092] 2025-04-10T11:27:26.012346Z node 35 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:27:26.012621Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 34 [35:554:2091] 2025-04-10T11:27:26.013002Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [35:554:2091] 2025-04-10T11:27:26.013049Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [35:554:2091] 2025-04-10T11:27:26.013238Z node 35 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:27:26.014598Z node 34 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [35:554:2091] 2025-04-10T11:27:26.015133Z node 34 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-10T11:27:26.015201Z node 34 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-10T11:27:26.015239Z node 34 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-10T11:27:26.015445Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [35:554:2091] 2025-04-10T11:27:26.015479Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [35:554:2091] 2025-04-10T11:27:26.015523Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [35:554:2091] 2025-04-10T11:27:26.015624Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [35:554:2091] 2025-04-10T11:27:26.022609Z node 35 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [34:463:2365] CurrentLeaderTablet: [34:478:2377] CurrentGeneration: 1 CurrentStep: 0} 2025-04-10T11:27:26.022802Z node 35 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [34:463:2365] CurrentLeaderTablet: [34:478:2377] CurrentGeneration: 1 CurrentStep: 0} 2025-04-10T11:27:26.022929Z node 35 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [34:463:2365] CurrentLeaderTablet: [34:478:2377] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-10T11:27:26.022969Z node 35 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2025-04-10T11:27:26.023046Z node 35 :TABLET_RESOLVER DEBUG: SelectForward node 35 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [34:463:2365] 2025-04-10T11:27:26.023132Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result remote node 34 [35:555:2092] 2025-04-10T11:27:26.023350Z node 34 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [35:551:2091] EventType# 268959744 2025-04-10T11:27:26.023631Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-04-10T11:27:26.023726Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:27:26.023961Z node 34 :HIVE WARN: HIVE#72057594037927937 Node(35, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:26.024068Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{14, redo 208b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-10T11:27:26.024142Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:27:26.024412Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-10T11:27:26.024492Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:27:26.024589Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:27:26.024676Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:27:26.024738Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] remote node connected [35:555:2092] 2025-04-10T11:27:26.024773Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [35:555:2092] 2025-04-10T11:27:26.024937Z node 34 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [35:555:2092] 2025-04-10T11:27:26.025090Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [35:555:2092] 2025-04-10T11:27:26.025125Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [35:555:2092] 2025-04-10T11:27:26.025155Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [35:555:2092] 2025-04-10T11:27:26.025222Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [35:555:2092] 2025-04-10T11:27:26.025375Z node 34 :PIPE_SERVER DEBUG: [72075186224037888] Push Sender# [35:552:2092] EventType# 268959744 2025-04-10T11:27:26.025494Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:5} Tx{6, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-04-10T11:27:26.025537Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:5} Tx{6, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:27:26.025638Z node 34 :HIVE WARN: HIVE#72075186224037888 Node(35, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:26.025706Z node 34 :HIVE WARN: HIVE#72075186224037888 Node(35, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:26.025764Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:5} Tx{6, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{6, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-10T11:27:26.025831Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:5} Tx{6, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:27:26.025995Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{7, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-10T11:27:26.026034Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{7, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:27:26.026087Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{7, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:27:26.026126Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{7, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH22 [GOOD] Test command err: Trying to start YDB, gRPC: 12645, MsgBus: 61553 2025-04-10T11:26:02.300320Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644293616609902:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:02.300655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001718/r3tmp/tmpaAmm5y/pdisk_1.dat 2025-04-10T11:26:02.673533Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12645, node 1 2025-04-10T11:26:02.712548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:02.712685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:02.718773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:02.754033Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:02.754087Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:02.754097Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:02.754259Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61553 TClient is connected to server localhost:61553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:03.242663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:05.370031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644306501512456:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:05.370031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644306501512464:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:05.370141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:05.373919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:05.385140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644306501512470:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:05.466846Z node 1 :TX_PROXY ERROR: Actor# [1:7491644306501512521:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:05.771786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:05.993613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644306501512776:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:05.993619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644306501512780:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:05.993830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644306501512780:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:05.994163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644306501512780:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:05.994245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644306501512776:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:05.994283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644306501512780:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:05.994384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644306501512780:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:05.994474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644306501512776:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:05.994517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644306501512780:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:05.994580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644306501512776:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:05.994651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644306501512780:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:05.994702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644306501512776:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:05.994790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644306501512780:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:05.994819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644306501512776:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:05.995292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644306501512780:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:05.995307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644306501512776:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:05.995431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644306501512776:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:05.995442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644306501512780:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:05.995539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644306501512776:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:05.995568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644306501512780:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:05.995636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644306501512776:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:05.995662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644306501512780:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:05.995730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644306501512776:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:05.995821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644306501512776:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:06.031815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644306501512832:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:06.031894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644306501512832:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:06.032148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;sel ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.332512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.339059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.339428Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.345522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.348596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.352317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.355302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.358947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.361959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.365786Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.367528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.372100Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.372775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.379052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.379651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.385655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.385976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.392483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.392482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.398274Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.398861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.403748Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.404977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.409113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.411062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.414811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.416834Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.419804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.423201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.425185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.429790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.430711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.436267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.436557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.440969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.442621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.447613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.449470Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.453795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.454836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.459449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.459545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.465121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.465341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.471211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:09.628333Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq7ekmbvr4qwsmbfjyfn6v", SessionId: ydb://session/3?node_id=1&id=MjY3OTUxOTQtZjU2NDU4ZWUtMzQwMjdhYTAtNTFkMmE4NGM=, Slow query, duration: 29.383543s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:27:09.986103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:09.986718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491644465415339185:7693];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-10T11:27:09.990973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:09.991981Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinWithNull+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 63410, MsgBus: 7086 2025-04-10T11:27:19.760740Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644622616994007:2216];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:19.760809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016e3/r3tmp/tmpTS6H1b/pdisk_1.dat 2025-04-10T11:27:20.322988Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:20.327529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:20.327605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:20.331223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63410, node 1 2025-04-10T11:27:20.564020Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:20.564052Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:20.564064Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:20.564200Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7086 TClient is connected to server localhost:7086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:21.223088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:21.269046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:21.466022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:21.621773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:21.701650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:23.584447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644639796864793:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:23.584583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:23.868295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:23.897760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:23.938817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:23.967831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:23.994274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:24.064523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:24.114409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644644091832604:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:24.114480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:24.114592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644644091832609:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:24.118686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:24.131101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644644091832611:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:24.190013Z node 1 :TX_PROXY ERROR: Actor# [1:7491644644091832664:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:24.749720Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644622616994007:2216];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:24.749789Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:25.150538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:25.234311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:25.273730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::ReadNonExistentTopic [GOOD] Test command err: 2025-04-10T11:24:58.382684Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644016742073899:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:24:58.383076Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013ed/r3tmp/tmp0wV1WW/pdisk_1.dat 2025-04-10T11:24:58.811117Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:24:58.839600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:24:58.839719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:24:58.841550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28127 TServer::EnableGrpc on GrpcPort 21571, node 1 2025-04-10T11:24:59.110773Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:24:59.110800Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:24:59.110808Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:24:59.110907Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:24:59.613150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:24:59.929927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-10T11:25:01.430075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644029626976718:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:01.430076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644029626976703:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:01.430205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:01.430403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644029626976723:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:01.435319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-10T11:25:01.439194Z node 1 :TX_PROXY ERROR: Actor# [1:7491644029626976729:2455] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:25:01.444312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644029626976722:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T11:25:01.444313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644029626976726:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T11:25:01.500694Z node 1 :TX_PROXY ERROR: Actor# [1:7491644029626976773:2483] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:01.505509Z node 1 :TX_PROXY ERROR: Actor# [1:7491644029626976791:2491] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:02.665349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:25:03.119065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:25:03.382676Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644016742073899:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:03.382773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:25:03.604208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:25:03.998283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:25:04.352689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:25:13.803657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:25:13.803695Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:17.623346Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644614605691818:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:17.623398Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0013ed/r3tmp/tmpOnilZx/pdisk_1.dat 2025-04-10T11:27:17.987497Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:18.020822Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:18.020934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:18.023669Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14365 TServer::EnableGrpc on GrpcPort 17202, node 2 2025-04-10T11:27:18.411146Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:18.411174Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:18.411182Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:18.411348Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14365 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:18.880027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:18.888318Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug4 [GOOD] Test command err: Trying to start YDB, gRPC: 31838, MsgBus: 12321 2025-04-10T11:26:41.928484Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644459794812277:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:41.928882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001700/r3tmp/tmpI0YCNC/pdisk_1.dat 2025-04-10T11:26:42.309990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:42.310091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:42.311877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:42.328426Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31838, node 1 2025-04-10T11:26:42.339603Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:26:42.383116Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:42.383159Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:42.383171Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:42.383285Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12321 TClient is connected to server localhost:12321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:42.998378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:43.011793Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:26:45.041069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644476974681982:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:45.041209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:45.041529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644476974681994:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:45.045524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:45.056608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644476974681996:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:45.142696Z node 1 :TX_PROXY ERROR: Actor# [1:7491644476974682047:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:45.439271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:45.572692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:26:45.607734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:45.642088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:45.709438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:45.906278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:45.978397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:46.014258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:46.044940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:26:46.104940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:26:46.176209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:26:46.213985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:46.248886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:46.838881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:26:46.866680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:26:46.889280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:26:46.925450Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644459794812277:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:46.925522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:26:46.931792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.003559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.049508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.082964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.114530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.154847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.186655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.262440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.361742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.409809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.477278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.529375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.594305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:26:47.627448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation pa ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.602006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.606626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.613157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.613826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.619640Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.622725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.625139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.630706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038483;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.634812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.636103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.640138Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.643970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.645557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.649964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.656252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.660537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.665860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.668211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.671795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.676160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.682441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.685757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.688041Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.692205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038427;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.697987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.702019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.703405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.709128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.713654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.717952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.724713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.726557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.731293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.732470Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.738673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.738809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.746368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.748663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.753330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.755919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.767899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.769426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.774822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.779440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.782730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:17.950863Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq7q5d0kyjxfh8m778yjnc", SessionId: ydb://session/3?node_id=1&id=NjVlMWFhMjgtYjlmNjlkNjEtY2QwM2JjODUtMzk0NDE5M2Y=, Slow query, duration: 28.944459s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:27:18.286928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:18.287263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:18.287326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7491644562874050059:5517];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038170; 2025-04-10T11:27:18.287725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationTpcdsMapJoinBug [GOOD] Test command err: Trying to start YDB, gRPC: 18976, MsgBus: 25358 2025-04-10T11:25:52.430898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644247745838847:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:52.431028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001727/r3tmp/tmpRLnMNE/pdisk_1.dat 2025-04-10T11:25:52.774609Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:52.802675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:52.802831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18976, node 1 2025-04-10T11:25:52.806396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:52.932472Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:52.932512Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:52.932519Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:52.932667Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25358 TClient is connected to server localhost:25358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:53.601336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:55.322465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644260630741198:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.322465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644260630741190:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.322591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.325944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:25:55.333408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644260630741204:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:25:55.404155Z node 1 :TX_PROXY ERROR: Actor# [1:7491644260630741255:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:55.732695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.951128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644260630741557:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.951385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644260630741557:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:25:55.951521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644260630741516:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.951574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644260630741516:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:25:55.951678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644260630741557:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:25:55.951730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644260630741516:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:25:55.951796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644260630741557:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:25:55.951830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644260630741516:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:25:55.951896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644260630741557:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:25:55.951955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644260630741516:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:25:55.952020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644260630741557:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:25:55.952064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644260630741516:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:25:55.952139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644260630741557:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:25:55.952163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644260630741516:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:25:55.952289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644260630741516:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:25:55.952291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644260630741557:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:25:55.952427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644260630741516:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:25:55.952458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644260630741557:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:25:55.952575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644260630741516:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:25:55.952590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644260630741557:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:25:55.952703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644260630741516:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:25:55.952711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644260630741557:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:25:55.952832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644260630741516:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:25:55.952949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644260630741557:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:25:55.989905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644260630741587:2362];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.989965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644260630741536:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.989988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74916442 ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.414962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.416464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.426764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.426768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.434016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.436831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.442657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.446614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.448930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.453824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.454888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.460699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.460847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.466784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.467019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.472946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039188;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.475175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.479451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.481131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.485862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.488139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.491945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.494516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.497266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.503317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.505724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.511646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.512575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.517772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.518647Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.524159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.525028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.530180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039198;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.531063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039196;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.536933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.542736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.545486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039238;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.552039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039216;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.552145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039218;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.558859Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039192;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.559776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039214;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.566432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039204;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.566687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039254;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.573192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039236;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.574874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.774636Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq73n3dzf0z033rnf9xq9j", SessionId: ydb://session/3?node_id=1&id=NzY1NjlhMjYtZTgwNzI1ZmMtYjUzZTljYjQtYmZiYmNjZDY=, Slow query, duration: 27.746468s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:26:57.300252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:57.300739Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:57.301073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491644488264063685:10559];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-10T11:26:57.301456Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH5 [GOOD] Test command err: Trying to start YDB, gRPC: 61058, MsgBus: 28319 2025-04-10T11:25:52.427629Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644248424019853:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:52.427700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001729/r3tmp/tmp7XzZnU/pdisk_1.dat 2025-04-10T11:25:52.776156Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61058, node 1 2025-04-10T11:25:52.823362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:52.823430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:52.824585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:52.932298Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:52.932349Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:52.932357Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:52.932480Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28319 TClient is connected to server localhost:28319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:53.632856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:53.647625Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:25:55.264985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644261308922404:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.264986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644261308922396:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.265086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.268482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:25:55.277368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644261308922410:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:25:55.361788Z node 1 :TX_PROXY ERROR: Actor# [1:7491644261308922461:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:55.666972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.868081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644261308922870:2363];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.868081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644261308922737:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.868252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644261308922870:2363];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:25:55.868259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644261308922737:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:25:55.868582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644261308922870:2363];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:25:55.868597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644261308922737:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:25:55.868718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644261308922870:2363];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:25:55.868755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644261308922737:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:25:55.868834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644261308922870:2363];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:25:55.868879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644261308922737:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:25:55.869000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644261308922737:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:25:55.869001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644261308922870:2363];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:25:55.869136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644261308922737:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:25:55.869453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644261308922870:2363];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:25:55.869476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644261308922737:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:25:55.869610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644261308922870:2363];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:25:55.869636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644261308922737:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:25:55.869795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644261308922737:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:25:55.869816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644261308922870:2363];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:25:55.869972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644261308922737:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:25:55.870006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644261308922870:2363];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:25:55.870141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644261308922737:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:25:55.870178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644261308922870:2363];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:25:55.870330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644261308922870:2363];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:25:55.903816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644261308922698:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.903901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644261308922698:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abs ... ARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.243982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.246181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.248598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.250730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.253344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.255883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.258416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.261714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.264060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.267576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.269858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.274609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.275437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.280493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.280910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.286161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.286576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.291445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.292047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.297013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.298638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.302119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.303776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.307147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.309702Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.312201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.317123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.317151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.322117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.328774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.331509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.333926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.336752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.339068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.341692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.346686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.348308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.352131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.355425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.357020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.360743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.426384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.436513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.436813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.499776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039244;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:56.532681Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq73b0b67mjvn2m97vnn8w", SessionId: ydb://session/3?node_id=1&id=MWU5NjRlYzQtY2MxZjNiYzQtNjQ4NzVhZWQtN2U2MDMyYg==, Slow query, duration: 27.828005s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:26:56.865387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:56.866001Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:56.866463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:56.867028Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224038933;local_tx_no=11;method=complete;tx_info=;fline=secondary.h:126;event=duplication_tablet_broken_flag;txId=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinStatsOverride+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 4874, MsgBus: 1809 2025-04-10T11:26:07.180970Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644311496989498:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:07.191042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001714/r3tmp/tmpuS12Ht/pdisk_1.dat 2025-04-10T11:26:07.559602Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4874, node 1 2025-04-10T11:26:07.607855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:07.608368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:07.611804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:07.641152Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:07.641180Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:07.641191Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:07.641321Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1809 TClient is connected to server localhost:1809 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:08.111808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:09.973968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644320086924755:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:09.973968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644320086924763:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:09.974060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:09.977815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:09.988176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644320086924769:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:10.072262Z node 1 :TX_PROXY ERROR: Actor# [1:7491644324381892116:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:10.361987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:10.572168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644324381892376:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:10.572398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644324381892376:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:10.572620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644324381892376:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:10.572717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644324381892376:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:10.572808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644324381892376:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:10.572897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644324381892376:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:10.572984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644324381892376:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:10.573095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644324381892376:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:10.573210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644324381892376:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:10.573341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644324381892376:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:10.573448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644324381892376:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:10.573525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644324381892376:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:10.593791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644324381892393:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:10.593850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644324381892393:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:10.594040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644324381892393:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:10.594140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644324381892393:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:10.594256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644324381892393:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:10.594351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644324381892393:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:10.594448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644324381892393:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:10.594563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644324381892393:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:10.594685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644324381892393:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:10.594793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644324381892393:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:10.594899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644324381892393:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:10.594986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644324381892393:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:10.602798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644324381892397:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:10.602853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644324381892397:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:10.603015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id= ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.205873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.212506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.215210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.220231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.222500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.226648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.229530Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.233843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.237866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.240295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.246684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.246800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.253817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.255783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.260735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.262042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.268937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.268956Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.276693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.277597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.283816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.283945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.290890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.290954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039228;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.297449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.300608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.306324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.308094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.312811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.315119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.319608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.321271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.327379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.328290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.335158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.336738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.341913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.342982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.349198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.352096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.358471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.362441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.364900Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.369482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.371876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:14.623695Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq7mb9f6c9cdprvv4fca0f", SessionId: ydb://session/3?node_id=1&id=NDU5MjZmYzUtMzI3ZjhiOTQtNmVkMGU2NWUtZjk1NTE4OGI=, Slow query, duration: 28.501497s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:27:14.929311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:14.929726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:14.930451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491644483295719088:7834];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-10T11:27:14.930827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] Test command err: 2025-04-10T11:20:28.035260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:28.035688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:28.035772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000d9d/r3tmp/tmp2o2bQf/pdisk_1.dat 2025-04-10T11:20:28.478301Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27617, node 1 2025-04-10T11:20:28.776186Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:28.776256Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:28.776300Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:28.776855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:28.779864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:28.881207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:28.881372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:28.895464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19160 2025-04-10T11:20:29.465844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:33.040931Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:33.078378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:33.078549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:33.116085Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:33.120400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:33.374798Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.375360Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.375946Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.376092Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.376334Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.376415Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.376491Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.376568Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.376642Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:33.556938Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:33.557051Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:33.570494Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:33.715722Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:33.779904Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:33.780023Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:33.814597Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:33.815086Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:33.815301Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:33.815373Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:33.815427Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:33.815495Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:33.815550Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:33.815610Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:33.816103Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:33.840191Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1868:2593] 2025-04-10T11:20:33.842613Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:20:33.884929Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:33.885011Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:33.885116Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:20:33.888732Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:33.888846Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1918:2621], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:33.909629Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1979:2648] 2025-04-10T11:20:33.910474Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1979:2648], schemeshard id = 72075186224037897 2025-04-10T11:20:33.936562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:33.944602Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:33.944809Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:34.178410Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:34.347763Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:34.426389Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:35.616207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2245:3077], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:35.616357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:35.639506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:20:35.868648Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2359:2867];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:35.868927Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2359:2867];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:35.869264Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2359:2867];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:35.869438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2359:2867];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:35.869558Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2359:2867];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:35.869672Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2359:2867];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:35.869799Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2359:2867];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:35.869916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2359:2867];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:35.870032Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2359:2867];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:20:35.870486Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2359:2867];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:20:35.870662Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2359:2867];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:20:35.870781Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2359:2867];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:20:35.927936Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2362:2869];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:35.928052Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2362:2869];tablet_id=72075186224037900;process= ... tTraversal 2025-04-10T11:26:01.360251Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:03.987387Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:26:03.987606Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:04.122703Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:04.122793Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:06.697441Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:06.697513Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:07.832651Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:26:09.210723Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:09.210794Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:10.359338Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:26:10.359523Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:11.708594Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:11.708665Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:14.188278Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:26:14.316373Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:14.316450Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:15.643590Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-10T11:26:15.643650Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:26:15.643679Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:26:15.643708Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:26:17.326605Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:26:17.326830Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:17.542943Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:17.543020Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:20.178036Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:20.178108Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:21.328495Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:26:22.774180Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:22.774260Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:23.908525Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:26:23.908749Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:25.300481Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:25.300558Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:27.805202Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:26:27.919446Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:27.919524Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:30.770798Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:26:30.771018Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:30.898520Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:30.898601Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:33.663013Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:33.663094Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:34.926794Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:26:36.562873Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:36.562947Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:37.907082Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:26:37.907309Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:39.426252Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:39.426327Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:41.928539Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:26:42.034166Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:42.034253Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:44.796745Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:26:44.796980Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:44.908515Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:44.908577Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:47.342991Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:47.343080Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:48.501898Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:26:49.710900Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:49.710974Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:50.799683Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:26:50.799905Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:52.162832Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:52.162901Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:54.782961Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:26:54.906854Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:54.906928Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:26:56.249770Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-10T11:26:56.249851Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:26:56.249890Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-10T11:26:56.249926Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-10T11:26:57.954509Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:26:57.954738Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:26:58.106906Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:26:58.106982Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:27:00.796058Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:27:00.796131Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:27:02.046135Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:27:03.638930Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:27:03.639011Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:27:04.900947Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:27:04.901142Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:27:06.452080Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:27:06.452159Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:27:08.923162Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:27:09.055050Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:27:09.055134Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:27:11.996557Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:27:11.996822Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-10T11:27:12.155916Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:27:12.155998Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:27:13.270929Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-10T11:27:13.271016Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 196.000000s, at schemeshard: 72075186224037897 2025-04-10T11:27:13.271312Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 53 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2025-04-10T11:27:13.305942Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-10T11:27:14.782922Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:27:14.783019Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:27:16.046716Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:27:17.775039Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:27:17.775121Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-10T11:27:19.123368Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-10T11:27:19.123727Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 ... waiting for TEvPropagateStatistics (done) 2025-04-10T11:27:19.124132Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:15533:9477]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:27:19.129989Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-10T11:27:19.130076Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [2:15533:9477], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightOnly_3 [GOOD] Test command err: Trying to start YDB, gRPC: 14430, MsgBus: 17161 2025-04-10T11:27:09.656194Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644577872127875:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:09.668969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016e7/r3tmp/tmpFTvst7/pdisk_1.dat 2025-04-10T11:27:10.176683Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:10.179796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:10.179898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:10.187483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14430, node 1 2025-04-10T11:27:10.366847Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:10.366876Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:10.366883Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:10.367004Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17161 TClient is connected to server localhost:17161 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:11.000040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:11.017979Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:11.033889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:11.207310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:11.374519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:11.447139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:13.398506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644595051998754:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:13.406316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:13.752261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:13.794917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:13.870444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:13.901894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:13.956141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:14.011878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:14.099472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644599346966568:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:14.099562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:14.099938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644599346966573:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:14.103566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:14.116530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644599346966575:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:14.182919Z node 1 :TX_PROXY ERROR: Actor# [1:7491644599346966630:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:14.658281Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644577872127875:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:14.658343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:15.277800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:15.343133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:15.377742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:15.416197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5958, MsgBus: 14962 2025-04-10T11:27:17.367417Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644615306493523:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:17.384543Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016e7/r3tmp/tmpnstZKh/pdisk_1.dat 2025-04-10T11:27:17.719291Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:17.767988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:17.768074Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:17.775227Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5958, node 2 2025-04-10T11:27:17.945151Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:17.945176Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:17.945182Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:17.945319Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14962 TClient is connected to server localhost:14962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:18.673654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:18.680775Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:18.685596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:27:18.771012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:27:18.958253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:19.079358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:21.290610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644632486364338:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:21.290691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:21.346136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.393842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.438795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.482516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.538194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.619699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.716878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644632486364862:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:21.716968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:21.717263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644632486364867:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:21.721421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:21.735108Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644632486364869:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:21.813175Z node 2 :TX_PROXY ERROR: Actor# [2:7491644632486364924:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:22.346602Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644615306493523:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:22.346718Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:22.869918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:22.923521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:22.961567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:23.001612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> THiveTest::TestDeleteOwnerTabletsMany [GOOD] >> THiveTest::TestDeleteTabletWithFollowers |93.7%| [TA] $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:23:12.976965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:23:12.977089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:23:12.977144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:23:12.977191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:23:12.977251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:23:12.977310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:23:12.977383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:23:12.977522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:23:12.977978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:23:13.041129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:23:13.041181Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:13.052891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:23:13.053202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:23:13.053405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:23:13.061126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:23:13.061396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:23:13.061959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.062285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:23:13.068054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.076874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:23:13.076932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:13.076989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:23:13.077139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.083517Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:23:13.212150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:23:13.212382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.212605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:23:13.212865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:23:13.212933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.215160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.215284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:23:13.215451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.215507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:23:13.215546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:23:13.215596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:23:13.217436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.217495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:23:13.217535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:23:13.219215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.219250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.219297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.219331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.231566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:23:13.233420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:23:13.233564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:23:13.234394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:23:13.234519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:23:13.234575Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.234830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:23:13.234870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:23:13.235015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:23:13.235073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:23:13.236948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:23:13.236984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:23:13.237104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:23:13.237147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:23:13.237350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:23:13.237383Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:23:13.237467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:23:13.237491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.237524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:23:13.237550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.237578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:23:13.237608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:23:13.237635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:23:13.237661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:23:13.237714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:23:13.237746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:23:13.237785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:23:13.239315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:23:13.239393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:23:13.239421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:27:29.335619Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:27:29.335654Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:209:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T11:27:29.335690Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:209:2211], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-10T11:27:29.336164Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:27:29.336218Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:27:29.336419Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:27:29.336483Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:27:29.336546Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:27:29.336601Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:27:29.336675Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-10T11:27:29.336754Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:27:29.336827Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:27:29.336876Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:27:29.337088Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-10T11:27:29.337174Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-10T11:27:29.337254Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-10T11:27:29.337308Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-10T11:27:29.338595Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:27:29.338673Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:27:29.338711Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:27:29.338793Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-10T11:27:29.338874Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:27:29.339635Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:27:29.339715Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:27:29.339749Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:27:29.339800Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-10T11:27:29.339836Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:27:29.339908Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-10T11:27:29.344042Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:27:29.347135Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:27:29.352563Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:27:29.352631Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:27:29.353188Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:27:29.353342Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:27:29.353415Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:516:2469] TestWaitNotification: OK eventTxId 102 2025-04-10T11:27:29.354103Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:27:29.354488Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA" took 422us result status StatusSuccess 2025-04-10T11:27:29.354970Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA" PathDescription { Self { Name: "SubDomenA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ChildrenExist: false BalancerTabletID: 72075186233409547 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:27:29.355677Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:27:29.355892Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA/Topic1" took 250us result status StatusSuccess 2025-04-10T11:27:29.356358Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:27:29.823311Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:27:29.823679Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 363us result status StatusSuccess 2025-04-10T11:27:29.824305Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:27:30.013898Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: FindTabletSubDomainPathId for tablet 72075186233409546 >> THiveTest::TestHiveBalancerWithImmovableTablets [GOOD] >> THiveTest::TestHiveBalancerHighUsage >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-ColumnStore [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] >> DataShardReadIteratorBatchMode::RangeFromNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeToInclusive >> KqpFlipJoin::LeftSemi_2 >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup >> KqpJoin::FullOuterJoinNotNullJoinKey >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup >> KqpFlipJoin::RightOnly_1 >> KqpJoinOrder::FiveWayJoinStatsOverride-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 12698, MsgBus: 16198 2025-04-10T11:27:25.226372Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644649563717792:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:25.229448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016e1/r3tmp/tmpquLwrm/pdisk_1.dat 2025-04-10T11:27:25.646187Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:25.649695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:25.649818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:25.651569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12698, node 1 2025-04-10T11:27:25.762648Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:25.762675Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:25.762683Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:25.762834Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16198 TClient is connected to server localhost:16198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:26.416678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:26.447099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:26.616156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:26.794483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:26.880378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:28.572666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644662448621390:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:28.572758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:28.897177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:28.934191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:28.966741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:28.992227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:29.024710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:29.097984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:29.172835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644666743589203:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:29.172955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:29.173463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644666743589208:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:29.178413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:29.192090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644666743589210:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:29.285854Z node 1 :TX_PROXY ERROR: Actor# [1:7491644666743589267:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:30.159653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:30.191606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:30.222924Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644649563717792:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:30.222975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:30.231686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:30.270338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:30.300565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:30.332364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup >> KqpJoinOrder::TPCDS95-ColumnStore >> KqpJoinOrder::CanonizedJoinOrderTPCH9 >> OlapEstimationRowsCorrectness::TPCH2 >> KqpJoinOrder::FiveWayJoinWithComplexPreds-ColumnStore >> KqpJoinOrder::ShuffleEliminationOneJoin >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> THiveTest::TestCreateTabletBeforeLocal |93.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoin::FullOuterJoin >> KqpJoinOrder::TPCDS94+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink [GOOD] Test command err: 2025-04-10T11:25:50.438381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:25:50.438569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:25:50.438723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001eab/r3tmp/tmpDvcQEn/pdisk_1.dat 2025-04-10T11:25:50.811140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:25:50.863627Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:50.905796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:50.906292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:50.918289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:51.010313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:51.054548Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:25:51.055366Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:25:51.055700Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:25:51.055868Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:51.088971Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:25:51.089516Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:51.089594Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:25:51.092449Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:25:51.092536Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:25:51.092583Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:25:51.093716Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:25:51.093859Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:25:51.093933Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:25:51.104505Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:25:51.129020Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:25:51.131261Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:25:51.131515Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:25:51.131568Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:25:51.131627Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:25:51.131681Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.131905Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.131955Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.133243Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:25:51.133362Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:25:51.133431Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:51.133471Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:25:51.133529Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:25:51.133563Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:25:51.133596Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:25:51.133628Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:25:51.133695Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:25:51.133812Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.133847Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.133895Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:25:51.135030Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:25:51.135079Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:25:51.135202Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:25:51.135585Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:25:51.135667Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:25:51.136417Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:25:51.136499Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:25:51.136543Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:25:51.136584Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:25:51.136635Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.137051Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:25:51.137087Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:25:51.137119Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:25:51.137154Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.137219Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:25:51.137250Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:25:51.137303Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:25:51.137338Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.137381Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:25:51.138874Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:25:51.138919Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:25:51.149576Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:25:51.149662Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.149717Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.149769Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:25:51.149830Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:25:51.298445Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.298501Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.298556Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:25:51.298954Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:25:51.298993Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:25:51.299135Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.299195Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:25:51.299237Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:25:51.299287Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:25:51.303777Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:25:51.303852Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.305913Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.305963Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.306020Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:5 ... TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:27:31.783722Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-10T11:27:31.783793Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:27:31.783867Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:8] at 72075186224037888 for ExecuteRead 2025-04-10T11:27:31.784232Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [15:61:2108], Recipient [15:1063:2855]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 15 Status: STATUS_NOT_FOUND 2025-04-10T11:27:31.784378Z node 15 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 6, sender: [15:593:2518], selfId: [15:57:2104], source: [15:1162:2929] 2025-04-10T11:27:31.784522Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:666:2570], Recipient [15:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:27:31.784553Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:27:31.784595Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:27:31.784630Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:27:31.784661Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:8] at 72075186224037888 2025-04-10T11:27:31.784707Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:27:31.784815Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-04-10T11:27:31.785221Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-04-10T11:27:31.785272Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:593:2518], 3} after executionsCount# 2 2025-04-10T11:27:31.785316Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:31.785466Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 3} finished in read 2025-04-10T11:27:31.785535Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-10T11:27:31.785567Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:27:31.785595Z node 15 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:27:31.785622Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:27:31.785662Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-10T11:27:31.785685Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:27:31.785708Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-04-10T11:27:31.785737Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:27:31.785792Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T11:27:31.785871Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:27:31.785943Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:27:31.795079Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=NWMxNzUxMTUtYjA2YzVlYzItZmUzYmEwNDktMzFlNGRlNjc=, workerId: [15:1162:2929], local sessions count: 0 2025-04-10T11:27:31.796819Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:593:2518], Recipient [15:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-10T11:27:31.797061Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:27:31.797210Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CheckRead 2025-04-10T11:27:31.797401Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-10T11:27:31.797499Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:27:31.797573Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:27:31.797646Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:27:31.797714Z node 15 :TX_DATASHARD TRACE: Activated operation [0:9] at 72075186224037888 2025-04-10T11:27:31.797786Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-10T11:27:31.797872Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:27:31.797908Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:27:31.797947Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:27:31.798131Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-04-10T11:27:31.798615Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-04-10T11:27:31.798717Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:593:2518], 4} after executionsCount# 1 2025-04-10T11:27:31.798830Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 4} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:31.799098Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 4} finished in read 2025-04-10T11:27:31.799216Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-10T11:27:31.799245Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:27:31.799272Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:27:31.799299Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:27:31.799352Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-10T11:27:31.799380Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:27:31.799414Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:9] at 72075186224037888 has finished 2025-04-10T11:27:31.799488Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:27:31.800613Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:593:2518], Recipient [15:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-10T11:27:31.800809Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:27:31.800936Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CheckRead 2025-04-10T11:27:31.801103Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-10T11:27:31.801180Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:27:31.801260Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:27:31.801311Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:27:31.801376Z node 15 :TX_DATASHARD TRACE: Activated operation [0:10] at 72075186224037888 2025-04-10T11:27:31.801445Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-10T11:27:31.801474Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:27:31.801498Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:27:31.801524Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:27:31.801672Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-04-10T11:27:31.802129Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-04-10T11:27:31.811173Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:593:2518], 5} after executionsCount# 1 2025-04-10T11:27:31.811310Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 5} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:31.811673Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 5} finished in read 2025-04-10T11:27:31.811806Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-10T11:27:31.811840Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:27:31.811872Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:27:31.811923Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:27:31.811991Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-10T11:27:31.812019Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:27:31.812066Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:10] at 72075186224037888 has finished 2025-04-10T11:27:31.812147Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 |93.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::ShuffleEliminationManyKeysJoinPredicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] Test command err: 2025-04-10T11:25:50.341477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:25:50.341770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:25:50.342010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001eb7/r3tmp/tmp1eGiSS/pdisk_1.dat 2025-04-10T11:25:50.814391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:25:50.860807Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:50.905679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:50.906276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:50.918122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:51.010426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:51.055808Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:25:51.056775Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:25:51.057150Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:25:51.057367Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:51.090995Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:25:51.091614Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:51.091726Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:25:51.093308Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:25:51.093390Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:25:51.093434Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:25:51.093736Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:25:51.093842Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:25:51.093936Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:25:51.104623Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:25:51.131833Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:25:51.132026Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:25:51.132150Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:25:51.132193Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:25:51.132221Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:25:51.132251Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.132546Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.132586Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.133283Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:25:51.133374Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:25:51.133423Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:51.133457Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:25:51.133506Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:25:51.133538Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:25:51.133567Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:25:51.133597Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:25:51.133660Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:25:51.133792Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.133830Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.133872Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:25:51.135085Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:25:51.135136Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:25:51.135271Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:25:51.135505Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:25:51.135565Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:25:51.136387Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:25:51.136792Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:25:51.136833Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:25:51.136873Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:25:51.136906Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.137188Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:25:51.137229Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:25:51.137271Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:25:51.137309Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.137383Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:25:51.137407Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:25:51.137439Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:25:51.137474Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.137492Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:25:51.138906Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:25:51.138947Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:25:51.149543Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:25:51.149608Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.149641Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.149689Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:25:51.149736Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:25:51.298805Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.298869Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.298909Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:25:51.299329Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:25:51.299391Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:25:51.299523Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.299582Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:25:51.299625Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:25:51.299669Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:25:51.315658Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:25:51.315744Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.316863Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.316916Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.316968Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:5 ... 15 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037888 has finished 2025-04-10T11:27:31.954638Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:27:31.954702Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T11:27:31.954767Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:27:31.954826Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:27:31.955109Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:884:2711], Recipient [15:884:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:27:31.955152Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:27:31.955205Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:27:31.955241Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:27:31.955298Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-10T11:27:31.955340Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2025-04-10T11:27:31.955372Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2025-04-10T11:27:31.955404Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-10T11:27:31.955431Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2025-04-10T11:27:31.955458Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2025-04-10T11:27:31.955487Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2025-04-10T11:27:31.955633Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2025-04-10T11:27:31.955680Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-10T11:27:31.955708Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2025-04-10T11:27:31.955748Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-10T11:27:31.955777Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-10T11:27:31.955815Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2025-04-10T11:27:31.955857Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2025-04-10T11:27:31.955897Z node 15 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2025-04-10T11:27:31.955940Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-10T11:27:31.955965Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-10T11:27:31.955990Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-10T11:27:31.956018Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-10T11:27:31.956119Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-10T11:27:31.956146Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-10T11:27:31.956185Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-10T11:27:31.956224Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-10T11:27:31.956253Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-10T11:27:31.956279Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-10T11:27:31.956303Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2025-04-10T11:27:31.956327Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-10T11:27:31.956469Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2025-04-10T11:27:31.956496Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2025-04-10T11:27:31.956530Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2025-04-10T11:27:31.956565Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2025-04-10T11:27:31.956596Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-10T11:27:31.956620Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2025-04-10T11:27:31.956645Z node 15 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2025-04-10T11:27:31.956678Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:27:31.956711Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-10T11:27:31.956746Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-10T11:27:31.957537Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-10T11:27:31.983002Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2025-04-10T11:27:31.983154Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:27:31.983228Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-10T11:27:31.983337Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1048:2835], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:27:31.983426Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:27:31.983790Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2025-04-10T11:27:31.983841Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:27:31.983869Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-10T11:27:31.983906Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1048:2835], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:27:31.983941Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:27:31.985558Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:593:2518], Recipient [15:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW RangesSize: 1 2025-04-10T11:27:31.985756Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:27:31.985865Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-10T11:27:31.986034Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:27:31.986107Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:27:31.990239Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:27:31.990337Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:27:31.990396Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-10T11:27:31.990461Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:27:31.990492Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:27:31.990519Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:27:31.990547Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:27:31.990759Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW } 2025-04-10T11:27:31.991259Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2025-04-10T11:27:31.991352Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:593:2518], 1} after executionsCount# 1 2025-04-10T11:27:31.991433Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:31.991787Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 1} finished in read 2025-04-10T11:27:31.991922Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:27:31.991955Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:27:31.991982Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:27:31.992009Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:27:31.992059Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:27:31.992084Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:27:31.992115Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-10T11:27:31.992180Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:27:31.992373Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 21963, MsgBus: 26955 2025-04-10T11:26:49.462652Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644491679967424:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:49.462694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016f9/r3tmp/tmpl0Tasg/pdisk_1.dat 2025-04-10T11:26:49.863886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:49.863975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 21963, node 1 2025-04-10T11:26:49.868012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:49.868232Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:26:49.868367Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:26:49.973747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:49.973785Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:49.973810Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:49.973930Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:26:49.977584Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26955 TClient is connected to server localhost:26955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:50.612074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:52.816187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644504564869906:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:52.816319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:52.816594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644504564869918:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:52.820713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:52.835550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644504564869920:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:52.900331Z node 1 :TX_PROXY ERROR: Actor# [1:7491644504564869971:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:53.213360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:53.338625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:26:53.378352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:26:53.411547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:26:53.448292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:26:53.627995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:26:53.700563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:26:53.733252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:26:53.771266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:26:53.796043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:26:53.828838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:26:53.858044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:26:53.889940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:26:54.470263Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644491679967424:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:54.470387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:26:54.587342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:26:54.689256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:26:54.725212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:26:54.764537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:26:54.794907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:26:54.831164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:26:54.909483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:26:54.960268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:26:54.996715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:26:55.022334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:26:55.047026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:26:55.075895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:26:55.160911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:26:55.198847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:26:55.230980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:26:55.261003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:26:55.298679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESc ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.428935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.434616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.434729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.440997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.441037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.447263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.447263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.453531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.454037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.459878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.459896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.465973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.466087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.472036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.472082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.478593Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.478593Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.484707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.484758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.490806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.490806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.497026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.497044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.503689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.503869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.510117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.510127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.515856Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.515980Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.522466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.522466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.528514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.528676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.534982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.535246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.540877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.541319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.546558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.547357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.552234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.552996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.558566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.559641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.565832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.570111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:25.702640Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq7ymj21t6dngm69nd0wjm", SessionId: ydb://session/3?node_id=1&id=MjNiNzQyOGMtOGRjYjJjMjAtZmIzODBjOWEtODcyMTUwOGQ=, Slow query, duration: 29.043756s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:27:25.915320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:25.915397Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:25.915898Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7491644616234046410:5955];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-04-10T11:27:25.916314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> THiveTest::TestCreateTabletBeforeLocal [GOOD] >> THiveTest::TestCreateTabletReboots >> THiveTest::TestHiveBalancerHighUsage [GOOD] >> THiveTest::TestHiveBalancerHighUsageAndColumnShards >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogCountByResource [GOOD] Test command err: 2025-04-10T11:23:26.892973Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643620911092416:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:26.893117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:23:26.913393Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491643619735565943:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:26.913441Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:23:26.917359Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491643620400704775:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:26.917439Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cfc/r3tmp/tmpwnTEU3/pdisk_1.dat 2025-04-10T11:23:27.276489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:27.276612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:27.277085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:27.277160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:27.280529Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:23:27.280587Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:23:27.281547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:27.281876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:27.315055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:27.315164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:27.319603Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17619, node 1 2025-04-10T11:23:27.334609Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:27.334645Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:27.334951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:27.364806Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:27.364836Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:27.364843Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:27.364969Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:27.620543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:31.893131Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491643620911092416:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:31.893261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:23:31.913591Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491643619735565943:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:31.913685Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:23:31.917594Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491643620400704775:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:31.917668Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Killing node 1 Killing node 2 2025-04-10T11:23:42.318947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:23:42.319002Z node 1 :IMPORT WARN: Table profiles were not loaded Killing node 3 2025-04-10T11:23:49.833964Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491643719593758238:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001cfc/r3tmp/tmpOBeqzV/pdisk_1.dat 2025-04-10T11:23:49.877768Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:23:50.009584Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:50.041866Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:50.041966Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:50.056720Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23814, node 5 2025-04-10T11:23:50.206738Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:50.206773Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:50.206783Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:50.206964Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:50.439422Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:6265 2025-04-10T11:23:50.802655Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "Root" DiffACL: "\n\033\010\000\022\027\010\001\020\200\200\002\032\ralice@builtin \003" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:23:50.802832Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:50.803038Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 1] name: Root type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:23:50.803055Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:23:50.803232Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-10T11:23:50.803255Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:23:50.803450Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-10T11:23:50.803475Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-10T11:23:50.803523Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-10T11:23:50.803553Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-10T11:23:50.803616Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T11:23:50.803704Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 1/1, is published: false 2025-04-10T11:23:50.803740Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T11:23:50.803765Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-10T11:23:50.803785Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-10T11:23:50.803805Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still ... channels: 1 2025-04-10T11:27:29.217176Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7491644666367411753:3429], TxId: 281474976715674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=. CustomerSuppliedId : . TraceId : 01jrfq8wgv13n563w0qs2whr51. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646926 2025-04-10T11:27:29.217183Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7491644666367411750:3142] TxId: 281474976715674. Ctx: { TraceId: 01jrfq8wgv13n563w0qs2whr51, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [50:7491644666367411753:3429], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-10T11:27:29.217241Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7491644666367411750:3142] TxId: 281474976715674. Ctx: { TraceId: 01jrfq8wgv13n563w0qs2whr51, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [50:7491644666367411753:3429], 2025-04-10T11:27:29.217361Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7491644666367411753:3429], TxId: 281474976715674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=. CustomerSuppliedId : . TraceId : 01jrfq8wgv13n563w0qs2whr51. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 SrcEndpoint { ActorId { RawX1: 7491644666367411753 RawX2: 4503814375738725 } } DstEndpoint { ActorId { RawX1: 7491644666367411750 RawX2: 4503814375738438 } } InMemory: true } 2025-04-10T11:27:29.217576Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7491644666367411753:3429], TxId: 281474976715674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=. CustomerSuppliedId : . TraceId : 01jrfq8wgv13n563w0qs2whr51. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-10T11:27:29.217777Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7491644666367411753:3429], TxId: 281474976715674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=. CustomerSuppliedId : . TraceId : 01jrfq8wgv13n563w0qs2whr51. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Send stats to executor actor [50:7491644666367411750:3142] TaskId: 1 Stats: CpuTimeUs: 566 Tasks { TaskId: 1 CpuTimeUs: 227 FinishTimeMs: 1744284449217 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 59 BuildCpuTimeUs: 168 HostName: "ghrun-x5vqckr7qy" NodeId: 50 CreateTimeMs: 1744284449216 } MaxMemoryUsage: 1048576 2025-04-10T11:27:29.217829Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7491644666367411753:3429], TxId: 281474976715674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=. CustomerSuppliedId : . TraceId : 01jrfq8wgv13n563w0qs2whr51. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:27:29.217883Z node 50 :KQP_COMPUTE DEBUG: TxId: 281474976715674, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-10T11:27:29.218177Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7491644666367411750:3142] TxId: 281474976715674. Ctx: { TraceId: 01jrfq8wgv13n563w0qs2whr51, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [50:7491644657777476321:3142], seqNo: 1, nRows: 1 2025-04-10T11:27:29.218381Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7491644666367411750:3142] TxId: 281474976715674. Ctx: { TraceId: 01jrfq8wgv13n563w0qs2whr51, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [50:7491644666367411753:3429], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 566 Tasks { TaskId: 1 CpuTimeUs: 227 FinishTimeMs: 1744284449217 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 59 BuildCpuTimeUs: 168 HostName: "ghrun-x5vqckr7qy" NodeId: 50 CreateTimeMs: 1744284449216 } MaxMemoryUsage: 1048576 } 2025-04-10T11:27:29.218504Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7491644666367411750:3142] TxId: 281474976715674. Ctx: { TraceId: 01jrfq8wgv13n563w0qs2whr51, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [50:7491644666367411753:3429], 2025-04-10T11:27:29.218577Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, ActorId: [50:7491644657777476321:3142], ActorState: ExecuteState, TraceId: 01jrfq8wgv13n563w0qs2whr51, Forwarded TEvStreamData to [50:7491644657777476319:3141] 2025-04-10T11:27:29.219427Z node 50 :KQP_EXECUTER DEBUG: TxId: 281474976715674, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 8388572, to: [50:7491644666367411754:3429] 2025-04-10T11:27:29.219523Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7491644666367411753:3429], TxId: 281474976715674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=. CustomerSuppliedId : . TraceId : 01jrfq8wgv13n563w0qs2whr51. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-04-10T11:27:29.219564Z node 50 :KQP_COMPUTE DEBUG: TxId: 281474976715674, task: 1. Tasks execution finished 2025-04-10T11:27:29.219591Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7491644666367411753:3429], TxId: 281474976715674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=. CustomerSuppliedId : . TraceId : 01jrfq8wgv13n563w0qs2whr51. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-04-10T11:27:29.219721Z node 50 :KQP_COMPUTE DEBUG: TxId: 281474976715674, task: 1. pass away 2025-04-10T11:27:29.219850Z node 50 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715674;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-10T11:27:29.220092Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7491644666367411750:3142] TxId: 281474976715674. Ctx: { TraceId: 01jrfq8wgv13n563w0qs2whr51, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [50:7491644666367411753:3429], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1424 Tasks { TaskId: 1 CpuTimeUs: 230 FinishTimeMs: 1744284449219 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 62 BuildCpuTimeUs: 168 HostName: "ghrun-x5vqckr7qy" NodeId: 50 CreateTimeMs: 1744284449216 } MaxMemoryUsage: 1048576 } 2025-04-10T11:27:29.220159Z node 50 :KQP_EXECUTER INFO: TxId: 281474976715674. Ctx: { TraceId: 01jrfq8wgv13n563w0qs2whr51, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [50:7491644666367411753:3429] 2025-04-10T11:27:29.220301Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7491644666367411750:3142] TxId: 281474976715674. Ctx: { TraceId: 01jrfq8wgv13n563w0qs2whr51, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:27:29.220364Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7491644666367411750:3142] TxId: 281474976715674. Ctx: { TraceId: 01jrfq8wgv13n563w0qs2whr51, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001424s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-10T11:27:29.220453Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, ActorId: [50:7491644657777476321:3142], ActorState: ExecuteState, TraceId: 01jrfq8wgv13n563w0qs2whr51, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-10T11:27:29.220836Z node 50 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, ActorId: [50:7491644657777476321:3142], ActorState: ExecuteState, TraceId: 01jrfq8wgv13n563w0qs2whr51, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 1032.722 QueriesCount: 1 2025-04-10T11:27:29.220917Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, ActorId: [50:7491644657777476321:3142], ActorState: ExecuteState, TraceId: 01jrfq8wgv13n563w0qs2whr51, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T11:27:29.221047Z node 50 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, ActorId: [50:7491644657777476321:3142], ActorState: ExecuteState, TraceId: 01jrfq8wgv13n563w0qs2whr51, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T11:27:29.221093Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, ActorId: [50:7491644657777476321:3142], ActorState: ExecuteState, TraceId: 01jrfq8wgv13n563w0qs2whr51, EndCleanup, isFinal: 1 2025-04-10T11:27:29.221162Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, ActorId: [50:7491644657777476321:3142], ActorState: ExecuteState, TraceId: 01jrfq8wgv13n563w0qs2whr51, Sent query response back to proxy, proxyRequestId: 5, proxyId: [50:7491644584763028162:2225] 2025-04-10T11:27:29.221222Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, ActorId: [50:7491644657777476321:3142], ActorState: unknown state, TraceId: 01jrfq8wgv13n563w0qs2whr51, Cleanup temp tables: 0 2025-04-10T11:27:29.221979Z node 50 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284448000, txId: 18446744073709551615] shutting down 2025-04-10T11:27:29.222128Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=Yzc0Mjg5Y2EtNDMyODc0ZmEtODYwYjAxY2ItYzUxODkzMjU=, ActorId: [50:7491644657777476321:3142], ActorState: unknown state, TraceId: 01jrfq8wgv13n563w0qs2whr51, Session actor destroyed RESULT: [[3u]] --------------------- STATS: total CPU: 1447 duration: 1020278 usec cpu: 776281 usec { name: "/Root/OlapStore/log1" reads { rows: 2 bytes: 16 } } duration: 4757 usec cpu: 6669 usec >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] Test command err: 2025-04-10T11:25:52.007358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:25:52.007500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:25:52.007613Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e7d/r3tmp/tmpxYJs4s/pdisk_1.dat 2025-04-10T11:25:52.314686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:25:52.353035Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:52.391440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:52.391547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:52.402681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:52.483861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:52.523669Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:25:52.524917Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:25:52.525473Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:25:52.525760Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:52.576047Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:25:52.576918Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:52.577041Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:25:52.579055Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:25:52.579175Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:25:52.579242Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:25:52.579669Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:25:52.579813Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:25:52.579914Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:25:52.590743Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:25:52.616579Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:25:52.616840Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:25:52.617000Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:25:52.617062Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:25:52.617105Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:25:52.617148Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:52.617423Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:52.617482Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:52.617848Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:25:52.617956Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:25:52.618024Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:52.618089Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:25:52.618181Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:25:52.618236Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:25:52.618276Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:25:52.618313Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:25:52.618380Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:25:52.618535Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:52.618575Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:52.618625Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:25:52.619075Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:25:52.619125Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:25:52.619308Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:25:52.619554Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:25:52.619622Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:25:52.619720Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:25:52.619801Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:25:52.619851Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:25:52.619894Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:25:52.619936Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:52.620259Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:25:52.620300Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:25:52.620354Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:25:52.620390Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:52.620451Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:25:52.620484Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:25:52.620523Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:25:52.620556Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:52.620585Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:25:52.622105Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:25:52.622174Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:25:52.633444Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:25:52.633534Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:52.633574Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:52.633633Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:25:52.633695Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:25:52.800434Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:52.800520Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:52.800563Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:25:52.800966Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:25:52.801009Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:25:52.801167Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:52.801237Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:25:52.801301Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:25:52.801335Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:25:52.805463Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:25:52.805535Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:52.806714Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:52.806770Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:52.806837Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:5 ... :2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.010530Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.010888Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709543002, quota bytes left# 18446744073709000383, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.011098Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:34.011156Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.011201Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.011560Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542903, quota bytes left# 18446744073708994047, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.011754Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:34.011812Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.011857Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.012222Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542804, quota bytes left# 18446744073708987711, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.012383Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:34.012438Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.012484Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.012844Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542705, quota bytes left# 18446744073708981375, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.013050Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:34.013113Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.013160Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.013548Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542606, quota bytes left# 18446744073708975039, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.013705Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:34.013759Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.013804Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.014143Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542507, quota bytes left# 18446744073708968703, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.014368Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:34.014423Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.014468Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.014808Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542408, quota bytes left# 18446744073708962367, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.014982Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:34.015032Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.015079Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.015426Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542309, quota bytes left# 18446744073708956031, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.015596Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:34.015648Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.015696Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.016061Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542210, quota bytes left# 18446744073708949695, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.016238Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:34.016294Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.016336Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.016684Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542111, quota bytes left# 18446744073708943359, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.016869Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:34.016923Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.016970Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.017332Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542012, quota bytes left# 18446744073708937023, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.017515Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:34.017565Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.017613Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.017960Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541913, quota bytes left# 18446744073708930687, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.018116Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:34.018191Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.018247Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.018594Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541814, quota bytes left# 18446744073708924351, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.018802Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:34.018852Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.018896Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.019240Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541715, quota bytes left# 18446744073708918015, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.019424Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:34.019482Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.019531Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.019857Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541616, quota bytes left# 18446744073708911679, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.020041Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:986:2787], Recipient [15:986:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:34.020097Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-04-10T11:27:34.020144Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-04-10T11:27:34.020288Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 1, bytes# 64, quota rows left# 18446744073709541615, quota bytes left# 18446744073708911615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.020384Z node 15 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[15:593:2518], 1} finished in ReadContinue >> KqpJoinOrder::TPCDS92+ColumnStore >> KqpJoin::JoinLeftPureExclusion >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] Test command err: 2025-04-10T11:25:50.386686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:25:50.386839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:25:50.386985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e9c/r3tmp/tmpa8Atkb/pdisk_1.dat 2025-04-10T11:25:50.811144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:25:50.862326Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:50.908714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:50.908829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:50.920030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:51.010248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:51.052140Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:25:51.053142Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:25:51.053604Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:25:51.053800Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:51.090815Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:25:51.091400Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:51.091495Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:25:51.093131Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:25:51.093205Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:25:51.093278Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:25:51.093720Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:25:51.093848Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:25:51.093964Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:25:51.104659Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:25:51.138460Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:25:51.138689Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:25:51.138859Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:25:51.138931Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:25:51.138976Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:25:51.139019Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.139323Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.139384Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.139826Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:25:51.139925Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:25:51.140003Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:51.140074Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:25:51.140140Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:25:51.140182Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:25:51.140219Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:25:51.140272Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:25:51.140343Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:25:51.140501Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.140540Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.140581Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:25:51.140977Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:25:51.141038Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:25:51.141160Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:25:51.141404Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:25:51.141474Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:25:51.141576Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:25:51.141640Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:25:51.141677Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:25:51.141706Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:25:51.141744Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.141993Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:25:51.142026Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:25:51.142061Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:25:51.142101Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.142159Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:25:51.142189Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:25:51.142227Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:25:51.142269Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.142294Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:25:51.143398Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:25:51.143431Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:25:51.153976Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:25:51.154055Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.154087Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.154160Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:25:51.154209Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:25:51.302204Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.302250Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.302282Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:25:51.302619Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:25:51.302649Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:25:51.302786Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.302844Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:25:51.302882Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:25:51.302935Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:25:51.306242Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:25:51.306298Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.307113Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.307147Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.307194Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:5 ... planned 0 immediate 0 planned 0 2025-04-10T11:27:34.498462Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T11:27:34.498544Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:27:34.498619Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:27:34.499021Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:884:2711], Recipient [15:884:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:27:34.499075Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:27:34.499145Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:27:34.499198Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:27:34.499239Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-10T11:27:34.499306Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-04-10T11:27:34.499356Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2025-04-10T11:27:34.499429Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-10T11:27:34.499475Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2025-04-10T11:27:34.499520Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2025-04-10T11:27:34.499564Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2025-04-10T11:27:34.499781Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-04-10T11:27:34.499857Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-10T11:27:34.499895Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2025-04-10T11:27:34.499934Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-10T11:27:34.499976Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-10T11:27:34.500038Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2025-04-10T11:27:34.500099Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2025-04-10T11:27:34.500155Z node 15 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2025-04-10T11:27:34.500217Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-10T11:27:34.500250Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-10T11:27:34.500283Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-10T11:27:34.500318Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-10T11:27:34.500460Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-10T11:27:34.500499Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-10T11:27:34.500559Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-10T11:27:34.500616Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-10T11:27:34.500650Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-10T11:27:34.500682Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-10T11:27:34.500714Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2025-04-10T11:27:34.500743Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-10T11:27:34.500931Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2025-04-10T11:27:34.500975Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2025-04-10T11:27:34.501028Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2025-04-10T11:27:34.501089Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2025-04-10T11:27:34.501132Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-10T11:27:34.501169Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2025-04-10T11:27:34.501222Z node 15 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2025-04-10T11:27:34.501299Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:27:34.501368Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-10T11:27:34.501419Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-10T11:27:34.501467Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-10T11:27:34.522546Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-10T11:27:34.522774Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:27:34.522898Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-04-10T11:27:34.523060Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1088:2866], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:27:34.523180Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:27:34.523760Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-10T11:27:34.523838Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:27:34.523900Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-10T11:27:34.523963Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1088:2866], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:27:34.524015Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:27:34.526420Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:593:2518], Recipient [15:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-10T11:27:34.526674Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:27:34.526821Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-10T11:27:34.526997Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:27:34.527089Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:27:34.527183Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:27:34.527264Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:27:34.527362Z node 15 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-10T11:27:34.527453Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:27:34.527495Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:27:34.527546Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:27:34.527579Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:27:34.527773Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2025-04-10T11:27:34.528341Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:27:34.528440Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2025-04-10T11:27:34.528540Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:593:2518], 3} after executionsCount# 1 2025-04-10T11:27:34.528655Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:34.528966Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 3} finished in read 2025-04-10T11:27:34.529101Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:27:34.529139Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:27:34.529174Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:27:34.529222Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:27:34.529285Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:27:34.529317Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:27:34.529367Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-10T11:27:34.529450Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:27:34.529701Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink [GOOD] >> KqpJoinOrder::TPCDS23+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS94+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 18572, MsgBus: 23354 2025-04-10T11:25:52.735796Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644247127202903:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:52.735847Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001720/r3tmp/tmpUWAy26/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18572, node 1 2025-04-10T11:25:53.041520Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:53.042228Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:25:53.042288Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:25:53.071386Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:53.071429Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:53.071441Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:53.071564Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:25:53.124225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:53.124340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:53.125760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23354 TClient is connected to server localhost:23354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:53.636639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:55.283985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644260012105328:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.283994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644260012105333:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.284113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.287367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:25:55.297687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644260012105342:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:25:55.374733Z node 1 :TX_PROXY ERROR: Actor# [1:7491644260012105393:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:55.639242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.897766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644260012105658:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.897767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644260012105660:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.897938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644260012105660:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:25:55.898214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644260012105660:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:25:55.898333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644260012105660:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:25:55.898388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644260012105658:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:25:55.898452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644260012105660:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:25:55.898556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644260012105658:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:25:55.898600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644260012105660:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:25:55.898664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644260012105658:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:25:55.898707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644260012105660:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:25:55.898775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644260012105658:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:25:55.898809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644260012105660:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:25:55.898880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644260012105658:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:25:55.898956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644260012105660:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:25:55.898988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644260012105658:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:25:55.899106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644260012105660:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:25:55.899111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644260012105658:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:25:55.899216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644260012105658:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:25:55.899218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644260012105660:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:25:55.899312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644260012105660:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:25:55.899322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644260012105658:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:25:55.899494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644260012105658:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:25:55.899590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644260012105658:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:25:55.929989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644260012105654:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.930051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644260012105654:2350];tablet_id=72075186224037891;process=TTxInitSchema::Exe ... 76710714; 2025-04-10T11:26:57.592264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.595382Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.598422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.601659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.604955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.607716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.610381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.613396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.619633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.622338Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.631842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.635077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.641836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.644835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.651118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.654461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.660435Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.665375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.675479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.679402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.681087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.685158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.687156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.690613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.692327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.695768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.697342Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.703359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.703375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.718078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.720637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.725883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.729789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.730824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.735065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.738055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.739988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.743196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.817796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039206;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:57.900732Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq73k73hven2ked87w9819", SessionId: ydb://session/3?node_id=1&id=YTBlOTYwODEtYzAyMGJkNDItMjhjMzg2YzEtNmRhN2ZhZGQ=, Slow query, duration: 28.932798s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:26:58.577570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:58.577911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:58.577975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491644410335997147:7632];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-10T11:26:58.578679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:24.303649Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq8cae1y9tfrrhhe7kk6h0", SessionId: ydb://session/3?node_id=1&id=YTBlOTYwODEtYzAyMGJkNDItMjhjMzg2YzEtNmRhN2ZhZGQ=, Slow query, duration: 13.632451s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n$bla1 = (select ws_order_number\n from web_sales\n group by ws_order_number\n having COUNT(DISTINCT ws_warehouse_sk) > 1);\n\n-- start query 1 in stream 0 using template query94.tpl and seed 2031708268\nselect\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\n left semi join $bla1 bla1 on (ws1.ws_order_number = bla1.ws_order_number)\n left only join web_returns on (ws1.ws_order_number = web_returns.wr_order_number)\nwhere\n cast(d_date as date) between cast('1999-4-01' as date) and\n (cast('1999-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'NE'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\norder by `order count`\nlimit 100;\n", parameters: 0b ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationManyKeysJoinPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 62182, MsgBus: 22565 2025-04-10T11:26:13.927779Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644339755269684:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:13.928516Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00170e/r3tmp/tmph3BVwY/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62182, node 1 2025-04-10T11:26:14.326594Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:14.334819Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:26:14.378893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:14.379008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:14.381681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:14.382878Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:14.382910Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:14.382926Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:14.383045Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22565 TClient is connected to server localhost:22565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:14.910959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:16.872406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644352640172090:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:16.872827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644352640172079:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:16.872926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:16.876065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:16.888534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644352640172093:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:16.966393Z node 1 :TX_PROXY ERROR: Actor# [1:7491644352640172146:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:17.274063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:17.486718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644356935139680:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:17.486942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644356935139680:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:17.487190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644356935139680:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:17.487297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644356935139680:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:17.487379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644356935139680:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:17.487476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644356935139680:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:17.487631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644356935139680:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:17.487736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644356935139680:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:17.487827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644356935139680:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:17.487904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644356935139680:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:17.488005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644356935139680:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:17.488097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644356935139680:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:17.489990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644356935139682:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:17.490043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644356935139682:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:17.490206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644356935139682:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:17.490349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644356935139682:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:17.490459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644356935139682:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:17.490586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644356935139682:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:17.490730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644356935139682:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:17.490821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644356935139682:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:17.490945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644356935139682:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:17.491061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644356935139682:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:17.491174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644356935139682:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:17.491286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644356935139682:2359];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:17.522260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644356935139708:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:17.522344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644356935139708:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2 ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.444276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.449706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.449706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.455214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.455214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.459580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.460381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.463667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.465603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039193;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.467571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.470462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.471560Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.475458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.475792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.479443Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.480483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.483106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.485308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.487439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.490985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.493393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.497398Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.498536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.502857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.503525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.507325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.509441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.511769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.514761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.516505Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.518866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.522235Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.523317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.527941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.528152Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.532705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.533831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.538282Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.538667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.543611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.544615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.549450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.552313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.555393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.558790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:24.709565Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq7tb6d9ejfw6q4xh18yk4", SessionId: ydb://session/3?node_id=1&id=ODIxMTcyNmQtYzkwN2NjMzItZDQ5MTJmN2UtYTgyODY5ZDE=, Slow query, duration: 32.446641s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:27:25.055269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:25.055610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:25.055634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491644515848964938:7639];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-10T11:27:25.056600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> HttpRequest::Probe [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink [GOOD] Test command err: 2025-04-10T11:25:50.655595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:25:50.655767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:25:50.655887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001e8d/r3tmp/tmpZJkf9w/pdisk_1.dat 2025-04-10T11:25:50.930572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:25:50.963982Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:51.001496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:51.001623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:51.012740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:51.093995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:51.129998Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:25:51.131226Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:25:51.131636Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:25:51.131897Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:51.176935Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:25:51.177534Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:51.177621Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:25:51.178892Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:25:51.178973Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:25:51.179008Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:25:51.179293Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:25:51.179379Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:25:51.179430Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:25:51.190009Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:25:51.205533Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:25:51.205704Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:25:51.205789Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:25:51.205822Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:25:51.205848Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:25:51.205870Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.206042Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.206077Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.206370Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:25:51.206446Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:25:51.206487Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:51.206513Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:25:51.206556Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:25:51.206592Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:25:51.206615Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:25:51.206635Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:25:51.206681Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:25:51.206767Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.206792Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.206820Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:25:51.207104Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:25:51.207133Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:25:51.207252Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:25:51.207464Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:25:51.207508Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:25:51.207577Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:25:51.207623Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:25:51.207656Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:25:51.207685Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:25:51.207707Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.207883Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:25:51.207909Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:25:51.207940Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:25:51.207962Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.208002Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:25:51.208020Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:25:51.208041Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:25:51.208060Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.208077Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:25:51.209050Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:25:51.209083Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:25:51.219563Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:25:51.219636Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.219662Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.219694Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:25:51.219739Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:25:51.367097Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.367150Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.367186Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:25:51.367538Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:25:51.367580Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:25:51.367707Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.367761Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:25:51.367806Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:25:51.367839Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:25:51.372110Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:25:51.372175Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.373112Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.373156Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.373206Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:5 ... teState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-04-10T11:27:36.263688Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3001} 2025-04-10T11:27:36.263755Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:27:36.263795Z node 15 :TX_DATASHARD TRACE: Complete execution for [3001:281474976715667] at 72075186224037888 on unit CompleteOperation 2025-04-10T11:27:36.263854Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:27:36.265114Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [15:1167:2934], Recipient [15:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:27:36.265206Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:27:36.265252Z node 15 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [15:1166:2933], serverId# [15:1167:2934], sessionId# [0:0:0] 2025-04-10T11:27:36.266182Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:593:2518], Recipient [15:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 KeysSize: 6 2025-04-10T11:27:36.266427Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:27:36.266529Z node 15 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-10T11:27:36.266608Z node 15 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v3001/18446744073709551615 2025-04-10T11:27:36.266718Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-10T11:27:36.266904Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:27:36.266987Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:27:36.267064Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:27:36.267124Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:27:36.267201Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-10T11:27:36.267269Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:27:36.267305Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:27:36.267343Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:27:36.267377Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:27:36.267576Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2025-04-10T11:27:36.268116Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Continue 2025-04-10T11:27:36.268174Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Continue at tablet# 72075186224037888 2025-04-10T11:27:36.268273Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:27:36.292928Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [15:1063:2855], Recipient [15:666:2570]: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-10T11:27:36.293095Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:27:36.293184Z node 15 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037891 dest 72075186224037888 producer 72075186224037891 txId 281474976715667 2025-04-10T11:27:36.293404Z node 15 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-10T11:27:36.293679Z node 15 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715667] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1156:2909], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:27:36.293820Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:27:36.293903Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:4] at 72075186224037888 for ExecuteRead 2025-04-10T11:27:36.294437Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1156:2909] TxId: 281474976715667. Ctx: { TraceId: 01jrfq950n1x4qh669s87f8ra1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ZWU2NWE0MDYtZTlmNzllYjUtZWE5M2NjODctNzlhMmI5ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-04-10T11:27:36.294741Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1156:2909] TxId: 281474976715667. Ctx: { TraceId: 01jrfq950n1x4qh669s87f8ra1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ZWU2NWE0MDYtZTlmNzllYjUtZWE5M2NjODctNzlhMmI5ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:27:36.294866Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1156:2909] TxId: 281474976715667. Ctx: { TraceId: 01jrfq950n1x4qh669s87f8ra1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ZWU2NWE0MDYtZTlmNzllYjUtZWE5M2NjODctNzlhMmI5ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-10T11:27:36.295087Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:666:2570], Recipient [15:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:27:36.295183Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:27:36.295469Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:27:36.296994Z node 15 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [15:593:2518], selfId: [15:57:2104], source: [15:1134:2909] 2025-04-10T11:27:36.297484Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:27:36.297572Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-10T11:27:36.297651Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:4] at 72075186224037888 2025-04-10T11:27:36.297732Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:27:36.297979Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2025-04-10T11:27:36.300111Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-04-10T11:27:36.300225Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:593:2518], 1} after executionsCount# 2 2025-04-10T11:27:36.300340Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 0 2025-04-10T11:27:36.300693Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:27:36.300774Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:27:36.300846Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:27:36.300910Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:27:36.300979Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:27:36.301005Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:27:36.301050Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-10T11:27:36.301124Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:27:36.301249Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T11:27:36.301332Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:27:36.301403Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:27:36.301883Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:666:2570], Recipient [15:666:2570]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:36.302001Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 2 2025-04-10T11:27:36.302367Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 2 2025-04-10T11:27:36.302650Z node 15 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[15:593:2518], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551611, quota bytes left# 18446744073709551551, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 2 2025-04-10T11:27:36.303088Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:666:2570], Recipient [15:666:2570]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:27:36.303159Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 4 2025-04-10T11:27:36.303378Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 4 2025-04-10T11:27:36.303521Z node 15 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[15:593:2518], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551609, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 6, firstUnprocessed# 4 2025-04-10T11:27:36.303660Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 read iterator# {[15:593:2518], 1} finished in ReadContinue 2025-04-10T11:27:36.304869Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=ZWU2NWE0MDYtZTlmNzllYjUtZWE5M2NjODctNzlhMmI5ZDQ=, workerId: [15:1134:2909], local sessions count: 0 2025-04-10T11:27:36.305159Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [15:61:2108], Recipient [15:1063:2855]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 15 Status: STATUS_NOT_FOUND >> KqpJoinOrder::CanonizedJoinOrderTPCH16 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Probe [GOOD] Test command err: 2025-04-10T11:20:26.491681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:20:26.492050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:20:26.492109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000e7d/r3tmp/tmphLPGIV/pdisk_1.dat 2025-04-10T11:20:27.025119Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5684, node 1 2025-04-10T11:20:27.605440Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:20:27.605504Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:20:27.605541Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:20:27.606084Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:20:27.620243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:20:27.709769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:27.709907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:27.732502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16643 2025-04-10T11:20:28.340023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:20:31.661666Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:20:31.708211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:31.708304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:31.737351Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:20:31.739502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:32.006862Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.007563Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.008085Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.008212Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.008422Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.008490Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.008573Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.008615Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.008664Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:20:32.185679Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:20:32.185838Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:20:32.200589Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:20:32.371118Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:20:32.419538Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:20:32.419647Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:20:32.462948Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:20:32.463177Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:20:32.463455Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:20:32.463519Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:20:32.463580Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:20:32.463640Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:20:32.463690Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:20:32.463758Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:20:32.464187Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:20:32.486805Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:32.486922Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:20:32.542727Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1884:2606] 2025-04-10T11:20:32.550598Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1921:2621] 2025-04-10T11:20:32.551484Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1921:2621], schemeshard id = 72075186224037897 2025-04-10T11:20:32.557056Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:20:32.574827Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:20:32.574888Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:20:32.574965Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:20:32.590025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:20:32.602512Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:20:32.602682Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:20:32.796926Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:20:32.987234Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:20:33.074857Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:20:34.579043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2235:3070], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:34.579192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:20:34.712782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-10T11:20:35.122138Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:35.122601Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:20:35.122953Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:20:35.123175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:20:35.123345Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:20:35.123511Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:20:35.123725Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:20:35.123891Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:20:35.124029Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:20:35.124203Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:20:35.124353Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:20:35.124481Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:20:35.181339Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2390:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:20:35.181444Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2390:2885];tablet_id=72075186224037900;process=T ... 985Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:27:33.448013Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:27:33.448043Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-10T11:27:34.878746Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-10T11:27:34.878843Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=tU460 2025-04-10T11:27:34.878895Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-10T11:27:35.634946Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-10T11:27:36.323383Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-10T11:27:36.323769Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-10T11:27:36.323849Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:27:36.324844Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-10T11:27:36.348007Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-10T11:27:36.348537Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-10T11:27:36.348624Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-10T11:27:36.349142Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-10T11:27:36.371896Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-10T11:27:36.372157Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-10T11:27:36.373369Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:16459:10264], server id = [2:16464:10269], tablet id = 72075186224037899, status = OK 2025-04-10T11:27:36.373512Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:16459:10264], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:27:36.373764Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:16460:10265], server id = [2:16465:10270], tablet id = 72075186224037900, status = OK 2025-04-10T11:27:36.373829Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:16460:10265], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:27:36.375238Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:16461:10266], server id = [2:16466:10271], tablet id = 72075186224037901, status = OK 2025-04-10T11:27:36.375312Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:16461:10266], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:27:36.376168Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:16462:10267], server id = [2:16467:10272], tablet id = 72075186224037902, status = OK 2025-04-10T11:27:36.376241Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:16462:10267], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:27:36.376967Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:16463:10268], server id = [2:16468:10273], tablet id = 72075186224037903, status = OK 2025-04-10T11:27:36.377032Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:16463:10268], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:27:36.382905Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-10T11:27:36.383245Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-10T11:27:36.384199Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-10T11:27:36.384367Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:16459:10264], server id = [2:16464:10269], tablet id = 72075186224037899 2025-04-10T11:27:36.384417Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:27:36.384778Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:16460:10265], server id = [2:16465:10270], tablet id = 72075186224037900 2025-04-10T11:27:36.384812Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:27:36.385077Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-10T11:27:36.385227Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-10T11:27:36.385671Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:16461:10266], server id = [2:16466:10271], tablet id = 72075186224037901 2025-04-10T11:27:36.385706Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:27:36.385790Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:16474:10279], server id = [2:16477:10282], tablet id = 72075186224037904, status = OK 2025-04-10T11:27:36.385878Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:16474:10279], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:27:36.399293Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:16475:10280], server id = [2:16478:10283], tablet id = 72075186224037905, status = OK 2025-04-10T11:27:36.399416Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:16475:10280], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:27:36.400294Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:16462:10267], server id = [2:16467:10272], tablet id = 72075186224037902 2025-04-10T11:27:36.400331Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:27:36.400446Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:16463:10268], server id = [2:16468:10273], tablet id = 72075186224037903 2025-04-10T11:27:36.400471Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:27:36.400991Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:16476:10281], server id = [2:16479:10284], tablet id = 72075186224037906, status = OK 2025-04-10T11:27:36.401065Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:16476:10281], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:27:36.401184Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:16480:10285], server id = [2:16483:10288], tablet id = 72075186224037907, status = OK 2025-04-10T11:27:36.401249Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:16480:10285], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:27:36.402624Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:16481:10286], server id = [2:16482:10287], tablet id = 72075186224037908, status = OK 2025-04-10T11:27:36.402690Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:16481:10286], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-10T11:27:36.402769Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-10T11:27:36.403491Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-10T11:27:36.404038Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:16474:10279], server id = [2:16477:10282], tablet id = 72075186224037904 2025-04-10T11:27:36.404068Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:27:36.404499Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:16475:10280], server id = [2:16478:10283], tablet id = 72075186224037905 2025-04-10T11:27:36.404527Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:27:36.404564Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-10T11:27:36.404763Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-10T11:27:36.405057Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-10T11:27:36.405117Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-10T11:27:36.405359Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-10T11:27:36.405582Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-10T11:27:36.405875Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:27:36.409162Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:16476:10281], server id = [2:16479:10284], tablet id = 72075186224037906 2025-04-10T11:27:36.409213Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:27:36.409354Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:16480:10285], server id = [2:16483:10288], tablet id = 72075186224037907 2025-04-10T11:27:36.409379Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:27:36.410095Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:27:36.410638Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:16481:10286], server id = [2:16482:10287], tablet id = 72075186224037908 2025-04-10T11:27:36.410670Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-10T11:27:36.445971Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjY2OGQzMGMtM2E3N2ViYWUtNTQyMjRlNzktNTljMjdmZDY=, TxId: 2025-04-10T11:27:36.446080Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjY2OGQzMGMtM2E3N2ViYWUtNTQyMjRlNzktNTljMjdmZDY=, TxId: 2025-04-10T11:27:36.447351Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-10T11:27:36.467854Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-10T11:27:36.467956Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=tU460, ActorId=[1:4075:3313] 2025-04-10T11:27:36.472031Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:16525:9595]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-10T11:27:36.472350Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:27:36.472414Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-10T11:27:36.477638Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:27:36.477714Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-10T11:27:36.477778Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-04-10T11:27:36.491212Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] Test command err: 2025-04-10T11:25:50.384297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:25:50.384489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:25:50.384637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001eb4/r3tmp/tmpMTlYYq/pdisk_1.dat 2025-04-10T11:25:50.813859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:25:50.862447Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:50.905687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:50.906313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:50.918133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:51.010395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:51.058828Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:25:51.059910Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:25:51.060346Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:25:51.060588Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:51.093664Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:25:51.094247Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:51.094343Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:25:51.095596Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:25:51.095670Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:25:51.095711Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:25:51.096001Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:25:51.096100Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:25:51.096168Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:25:51.106835Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:25:51.133174Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:25:51.133415Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:25:51.133542Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:25:51.133578Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:25:51.133614Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:25:51.133653Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.133873Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.133925Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.134274Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:25:51.134361Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:25:51.134456Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:51.134496Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:25:51.134549Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:25:51.134581Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:25:51.134616Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:25:51.134652Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:25:51.134736Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:25:51.134845Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.134879Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.134922Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:25:51.135261Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:25:51.135305Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:25:51.135432Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:25:51.135637Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:25:51.135698Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:25:51.136539Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:25:51.136620Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:25:51.136663Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:25:51.136696Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:25:51.136733Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.137036Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:25:51.137092Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:25:51.137126Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:25:51.137159Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.137207Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:25:51.137239Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:25:51.137292Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:25:51.137329Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.137376Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:25:51.138864Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:25:51.138922Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:25:51.149541Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:25:51.149613Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.149653Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.149762Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:25:51.149818Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:25:51.298686Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.298742Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.298779Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:25:51.299221Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:25:51.299274Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:25:51.299419Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.299463Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:25:51.299501Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:25:51.299538Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:25:51.304107Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:25:51.304180Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.305909Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.305955Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.306009Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:5 ... ed 0 immediate 0 planned 0 2025-04-10T11:27:37.481216Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T11:27:37.481284Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:27:37.481355Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:27:37.481605Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:884:2711], Recipient [15:884:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:27:37.481643Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:27:37.481698Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:27:37.481736Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:27:37.481776Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-10T11:27:37.481817Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-04-10T11:27:37.481860Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2025-04-10T11:27:37.481896Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-10T11:27:37.481931Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2025-04-10T11:27:37.481965Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2025-04-10T11:27:37.481999Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2025-04-10T11:27:37.482175Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-04-10T11:27:37.482238Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-10T11:27:37.482271Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2025-04-10T11:27:37.482302Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-10T11:27:37.482335Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-10T11:27:37.482383Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2025-04-10T11:27:37.482425Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2025-04-10T11:27:37.482465Z node 15 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2025-04-10T11:27:37.482517Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-10T11:27:37.482543Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-10T11:27:37.482568Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-10T11:27:37.482595Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-10T11:27:37.482705Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-10T11:27:37.482739Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-10T11:27:37.482781Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-10T11:27:37.482818Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-10T11:27:37.482846Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-10T11:27:37.482875Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-10T11:27:37.482901Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2025-04-10T11:27:37.482928Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-10T11:27:37.483080Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2025-04-10T11:27:37.483115Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2025-04-10T11:27:37.483158Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2025-04-10T11:27:37.483196Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2025-04-10T11:27:37.483257Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-10T11:27:37.483290Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2025-04-10T11:27:37.483321Z node 15 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2025-04-10T11:27:37.483360Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:27:37.483398Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-10T11:27:37.483438Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-10T11:27:37.483473Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-10T11:27:37.495256Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-10T11:27:37.495447Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:27:37.495530Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-10T11:27:37.495660Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1088:2866], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:27:37.495755Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:27:37.496097Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-10T11:27:37.496134Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:27:37.496156Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-04-10T11:27:37.496192Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1088:2866], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:27:37.496232Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:27:37.497793Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:593:2518], Recipient [15:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-10T11:27:37.497989Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:27:37.498088Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-10T11:27:37.498247Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:27:37.498329Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:27:37.498395Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:27:37.498444Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:27:37.498482Z node 15 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-10T11:27:37.498542Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:27:37.498587Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:27:37.498612Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:27:37.498633Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:27:37.498750Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2025-04-10T11:27:37.499171Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:27:37.499273Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2025-04-10T11:27:37.499359Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:593:2518], 10} after executionsCount# 1 2025-04-10T11:27:37.499440Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 10} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:27:37.570391Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 10} finished in read 2025-04-10T11:27:37.570536Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:27:37.570575Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:27:37.570609Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:27:37.570642Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:27:37.570689Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:27:37.570706Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:27:37.570742Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-10T11:27:37.570804Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:27:37.570991Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 |93.8%| [TA] $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestCreateTabletReboots [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup >> OlapEstimationRowsCorrectness::TPCH10 >> KqpFlipJoin::RightOnly_1 [GOOD] >> KqpFlipJoin::RightOnly_2 >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup [GOOD] >> KqpFlipJoin::LeftSemi_2 [GOOD] >> KqpFlipJoin::LeftSemi_3 >> THiveTest::TestHiveBalancerHighUsageAndColumnShards [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots >> KqpJoin::FullOuterJoin [GOOD] >> KqpJoinOrder::GeneralPrioritiesBug2 >> DataShardReadIteratorBatchMode::RangeToInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeToNonInclusive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] Test command err: Trying to start YDB, gRPC: 30922, MsgBus: 29585 2025-04-10T11:27:33.614282Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644680868912494:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:33.623839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016d9/r3tmp/tmpRciNzT/pdisk_1.dat 2025-04-10T11:27:34.336216Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:34.346944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:34.347027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:34.351169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30922, node 1 2025-04-10T11:27:34.543245Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:34.543266Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:34.543276Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:34.543397Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29585 TClient is connected to server localhost:29585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:35.287610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.311898Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:35.326489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:27:35.505901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:27:35.740727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.846068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:37.755034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644698048783324:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.755157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.036704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.071224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.107060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.155972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.232179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.280326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.339083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644702343751135:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.339158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.339350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644702343751140:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.343402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:38.355517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644702343751142:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:38.410742Z node 1 :TX_PROXY ERROR: Actor# [1:7491644702343751195:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:38.530271Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644680868912494:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:38.530349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:39.465656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.509172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpJoin::RightSemiJoin_ComplexKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 23634, MsgBus: 29357 2025-04-10T11:27:33.479115Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644681773105561:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:33.479164Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016d5/r3tmp/tmpGdDEM2/pdisk_1.dat 2025-04-10T11:27:34.054818Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:34.057650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:34.057728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:34.060118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23634, node 1 2025-04-10T11:27:34.296549Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:34.296573Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:34.296580Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:34.296724Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29357 TClient is connected to server localhost:29357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:35.009110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.026646Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:35.045993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.213541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.451388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.553851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:37.560100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644698952976510:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.560227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.885917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:37.960058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.012502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.049532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.101329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.181843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.283286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644703247944331:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.283355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.283564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644703247944336:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.287572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:38.301692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644703247944338:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:38.383500Z node 1 :TX_PROXY ERROR: Actor# [1:7491644703247944393:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:38.479545Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644681773105561:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:38.479604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:39.296396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.338023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.383573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.418488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.454643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.493112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 19179, MsgBus: 4329 2025-04-10T11:27:33.526628Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644681310619558:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:33.526996Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016dc/r3tmp/tmpiDijBy/pdisk_1.dat 2025-04-10T11:27:34.239072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:34.239166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:34.251349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:27:34.258404Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19179, node 1 2025-04-10T11:27:34.533930Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:34.533951Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:34.533964Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:34.534099Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4329 TClient is connected to server localhost:4329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:35.278034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.334256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.563709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.766528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.918083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:37.727157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644698490490397:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.727257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.008977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.049504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.089400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.124373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.151459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.213980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.300053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644702785458208:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.300151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.300266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644702785458213:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.304095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:38.328744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644702785458215:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:38.408024Z node 1 :TX_PROXY ERROR: Actor# [1:7491644702785458270:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:38.494315Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644681310619558:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:38.494390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:39.586904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.631758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.661218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.735090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.777741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.813488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoin [GOOD] Test command err: Trying to start YDB, gRPC: 18734, MsgBus: 12769 2025-04-10T11:27:34.388295Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644686504547613:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:34.388346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016cd/r3tmp/tmpndKtRJ/pdisk_1.dat 2025-04-10T11:27:35.100984Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:35.141800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:35.150270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:35.159367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18734, node 1 2025-04-10T11:27:35.326692Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:35.326713Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:35.326727Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:35.326840Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12769 TClient is connected to server localhost:12769 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:36.081644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:36.131175Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:36.152973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:36.289039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:36.491495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:36.613138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:38.437302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644703684418432:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.437431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.670187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.716750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.784952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.815308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.872120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.933910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.990464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644703684418948:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.990553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.990869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644703684418953:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.994105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:39.004965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644703684418955:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:39.082620Z node 1 :TX_PROXY ERROR: Actor# [1:7491644707979386304:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:39.370249Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644686504547613:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:39.450554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:40.176470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.255962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.322793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-ColumnStore >> KqpJoin::JoinLeftPureExclusion [GOOD] >> KqpJoin::FullOuterJoin2 >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup >> KqpJoinOrder::TestJoinOrderHintsComplex+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS61+ColumnStore >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureExclusion [GOOD] Test command err: Trying to start YDB, gRPC: 19906, MsgBus: 7847 2025-04-10T11:27:37.088050Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644699144141416:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:37.088096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016c6/r3tmp/tmpTTGcMy/pdisk_1.dat 2025-04-10T11:27:37.821551Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:37.841349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:37.841451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:37.845725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19906, node 1 2025-04-10T11:27:38.008935Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:38.008961Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:38.008968Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:38.009080Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7847 TClient is connected to server localhost:7847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:38.889688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:38.942188Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:38.964372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.191970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:39.378283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:39.474723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:41.348160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644716324012377:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:41.348254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:41.754073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:41.801658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:41.857145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:41.896195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:41.953809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:42.014247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:42.088360Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644699144141416:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:42.088422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:42.109216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644720618980185:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:42.109290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:42.109618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644720618980190:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:42.113719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:42.146956Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:27:42.151350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644720618980192:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:42.221423Z node 1 :TX_PROXY ERROR: Actor# [1:7491644720618980250:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots [GOOD] >> THiveTest::TestCreateTabletChangeToExternal >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] >> THiveTest::TestCreateTabletChangeToExternal [GOOD] >> THiveTest::TestExternalBoot >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 19417, MsgBus: 30916 2025-04-10T11:27:32.851648Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644678316401875:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:32.861347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016df/r3tmp/tmpR2w6vs/pdisk_1.dat 2025-04-10T11:27:33.324011Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:33.355548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:33.355674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:33.359744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19417, node 1 2025-04-10T11:27:33.466383Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:33.466409Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:33.466421Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:33.466553Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30916 TClient is connected to server localhost:30916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:34.106553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:27:34.135597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:27:34.348264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:34.591924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:34.677947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:36.632028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644695496272702:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:36.632275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:36.991196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:37.022490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:37.056398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:37.099936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:37.134785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:37.173548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:37.246493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644699791240511:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.246564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.248498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644699791240516:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.252000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:37.263225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644699791240518:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:37.341755Z node 1 :TX_PROXY ERROR: Actor# [1:7491644699791240572:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:37.834267Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644678316401875:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:37.834336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:38.464436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.557229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10825, MsgBus: 26765 2025-04-10T11:27:40.175670Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644710748336064:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:40.175836Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016df/r3tmp/tmpRgoQro/pdisk_1.dat 2025-04-10T11:27:40.443337Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:40.444695Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:40.444762Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:40.447060Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10825, node 2 2025-04-10T11:27:40.578766Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:40.578794Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:40.578802Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:40.578925Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26765 TClient is connected to server localhost:26765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:41.270731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:41.283099Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:27:41.292457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:41.391639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:27:41.577871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:41.665767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:44.466398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644727928206863:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:44.466500Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:44.510080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:44.549964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:44.587285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:44.625819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:44.675292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:44.745654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:44.834324Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644727928207374:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:44.834432Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:44.834527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644727928207380:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:44.838535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:44.852935Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644727928207382:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:27:44.948760Z node 2 :TX_PROXY ERROR: Actor# [2:7491644727928207437:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:45.178507Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644710748336064:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:45.178586Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:46.091691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.183690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsComplex+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 63753, MsgBus: 4128 2025-04-10T11:26:21.438122Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644372501552304:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:21.438220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001709/r3tmp/tmpGaJwQB/pdisk_1.dat 2025-04-10T11:26:21.829110Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63753, node 1 2025-04-10T11:26:21.856098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:21.856322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:21.859665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:21.886181Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:21.886202Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:21.886209Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:21.886319Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4128 TClient is connected to server localhost:4128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:22.405215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:22.419374Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:26:24.456488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644385386454869:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:24.456491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644385386454858:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:24.456583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:24.461364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:24.472037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644385386454872:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:24.551003Z node 1 :TX_PROXY ERROR: Actor# [1:7491644385386454925:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:24.880776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:25.126417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644389681422477:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:25.126421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644389681422475:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:25.126619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644389681422475:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:25.126710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644389681422477:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:25.127103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644389681422475:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:25.127107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644389681422477:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:25.127257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644389681422475:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:25.127266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644389681422477:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:25.127395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644389681422477:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:25.127404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644389681422475:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:25.127551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644389681422475:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:25.127554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644389681422477:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:25.127678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644389681422477:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:25.127694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644389681422475:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:25.127819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644389681422477:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:25.127819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644389681422475:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:25.127937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644389681422475:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:25.127985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644389681422477:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:25.128064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644389681422475:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:25.128102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644389681422477:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:25.128210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644389681422475:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:25.128239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644389681422477:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:25.128363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644389681422475:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:25.128898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644389681422477:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:25.161846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644389681422495:2360];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:25.161919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644389681422495:2360];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstra ... 10714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.870563Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.871844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.875168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.876447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.879925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.881083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.884511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.885920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.889188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.890897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.893701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039203;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.896164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.898584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.901449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.903713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039193;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.907413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.909854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.913340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.916281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.918815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.921602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.924158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.927656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.929921Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.933924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.935695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.940140Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.941700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.946481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.947591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.952681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.953572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.958060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.959409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.963802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.965073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.969816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039195;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.970864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.976064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.976639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.982217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.983080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.988030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:30.988598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.052361Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq83f63m9e23q0ee62h9bz", SessionId: ydb://session/3?node_id=1&id=OWVjZTI5NzUtNTk5NzhiYWItNjYzMjVmMmEtZjNiMzMyZDk=, Slow query, duration: 29.444945s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:27:31.282214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:31.282214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:31.282775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> KqpJoinOrder::TPCDS90-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] Test command err: 2025-04-10T11:26:38.762508Z node 4 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:26:38.765835Z node 4 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:26:38.766050Z node 4 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:26:38.766797Z node 4 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [4:153:2077] ControllerId# 72057594037932033 2025-04-10T11:26:38.766829Z node 4 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:26:38.766906Z node 4 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:26:38.767148Z node 4 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:26:38.768873Z node 4 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:26:38.768989Z node 4 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:26:38.771012Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:152:2076] Create Queue# [4:159:2081] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.771181Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:152:2076] Create Queue# [4:160:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.771323Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:152:2076] Create Queue# [4:161:2083] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.771512Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:152:2076] Create Queue# [4:162:2084] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.771661Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:152:2076] Create Queue# [4:163:2085] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.771791Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:152:2076] Create Queue# [4:164:2086] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.771907Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:152:2076] Create Queue# [4:165:2087] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.771929Z node 4 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:26:38.772012Z node 4 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [4:153:2077] 2025-04-10T11:26:38.772042Z node 4 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [4:153:2077] 2025-04-10T11:26:38.772099Z node 4 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:26:38.772151Z node 4 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:26:38.772676Z node 4 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:26:38.772773Z node 5 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:26:38.775311Z node 5 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:26:38.775424Z node 5 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:26:38.776222Z node 5 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [5:173:2077] ControllerId# 72057594037932033 2025-04-10T11:26:38.776264Z node 5 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:26:38.776334Z node 5 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:26:38.776507Z node 5 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:26:38.778735Z node 5 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:26:38.778788Z node 5 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:26:38.780407Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:172:2076] Create Queue# [5:179:2081] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.780544Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:172:2076] Create Queue# [5:180:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.780681Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:172:2076] Create Queue# [5:181:2083] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.780835Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:172:2076] Create Queue# [5:182:2084] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.781029Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:172:2076] Create Queue# [5:183:2085] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.781179Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:172:2076] Create Queue# [5:184:2086] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.781318Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:172:2076] Create Queue# [5:185:2087] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.781355Z node 5 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:26:38.781408Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [5:173:2077] 2025-04-10T11:26:38.781432Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [5:173:2077] 2025-04-10T11:26:38.781463Z node 5 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:26:38.781506Z node 5 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:26:38.782030Z node 5 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:26:38.782112Z node 6 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:26:38.784528Z node 6 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:26:38.784636Z node 6 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:26:38.785457Z node 6 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [6:193:2077] ControllerId# 72057594037932033 2025-04-10T11:26:38.785489Z node 6 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:26:38.785547Z node 6 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:26:38.785766Z node 6 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:26:38.787620Z node 6 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:26:38.787661Z node 6 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:26:38.789111Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:192:2076] Create Queue# [6:199:2081] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.789279Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:192:2076] Create Queue# [6:200:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.789412Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:192:2076] Create Queue# [6:201:2083] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.789556Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:192:2076] Create Queue# [6:202:2084] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.789684Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:192:2076] Create Queue# [6:203:2085] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.789834Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:192:2076] Create Queue# [6:204:2086] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.789981Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:192:2076] Create Queue# [6:205:2087] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.790016Z node 6 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:26:38.790165Z node 6 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [6:193:2077] 2025-04-10T11:26:38.790195Z node 6 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [6:193:2077] 2025-04-10T11:26:38.790235Z node 6 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:26:38.790295Z node 6 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:26:38.790670Z node 6 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:26:38.791612Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:26:38.794093Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:26:38.794279Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-10T11:26:38.794873Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-10T11:26:38.795927Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-10T11:26:38.795973Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:26:38.796773Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:214:2080] ControllerId# 72057594037932033 2025-04-10T11:26:38.796861Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:26:38.796933Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:26:38.797086Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:26:38.809166Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:26:38.809212Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:26:38.810806Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:213:2079] Create Queue# [1:222:2085] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.811872Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:213:2079] Create Queue# [1:223:2086] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.812121Z node 1 :BS_PROX ... 495Z node 56 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037892 CurrentLeader: [61:1947:2265] CurrentLeaderTablet: [61:1953:2268] CurrentGeneration: 3 CurrentStep: 0} 2025-04-10T11:27:46.453580Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037892 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037892 Cookie: 0 CurrentLeader: [61:1947:2265] CurrentLeaderTablet: [61:1953:2268] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {7, 10, 0}} 2025-04-10T11:27:46.453618Z node 56 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037892 followers: 0 2025-04-10T11:27:46.453666Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037892 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1947:2265] 2025-04-10T11:27:46.453778Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037892] forward result remote node 61 [56:2076:2736] 2025-04-10T11:27:46.453944Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037892] remote node connected [56:2076:2736] 2025-04-10T11:27:46.454013Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037892]::SendEvent [56:2076:2736] 2025-04-10T11:27:46.458498Z node 61 :PIPE_SERVER DEBUG: [72075186224037892] Accept Connect Originator# [56:2076:2736] 2025-04-10T11:27:46.458934Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037892] connected with status OK role: Leader [56:2076:2736] 2025-04-10T11:27:46.458981Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037892] send queued [56:2076:2736] 2025-04-10T11:27:46.460243Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] ::Bootstrap [56:2080:2738] 2025-04-10T11:27:46.460285Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] lookup [56:2080:2738] 2025-04-10T11:27:46.460366Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037893 entry.State: StNormal ev: {EvForward TabletID: 72075186224037893 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:27:46.460426Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1295:2098] 2025-04-10T11:27:46.460519Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] forward result remote node 61 [56:2080:2738] 2025-04-10T11:27:46.460619Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] remote node connected [56:2080:2738] 2025-04-10T11:27:46.460662Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893]::SendEvent [56:2080:2738] 2025-04-10T11:27:46.460869Z node 61 :PIPE_SERVER DEBUG: [72075186224037893] Accept Connect Originator# [56:2080:2738] 2025-04-10T11:27:46.461208Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] connected with status OK role: Leader [56:2080:2738] 2025-04-10T11:27:46.461253Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] send queued [56:2080:2738] 2025-04-10T11:27:46.465737Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] ::Bootstrap [56:2083:2740] 2025-04-10T11:27:46.465796Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [56:2083:2740] 2025-04-10T11:27:46.465867Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StNormal ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:27:46.465921Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [60:1300:2099] 2025-04-10T11:27:46.466038Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result remote node 60 [56:2083:2740] 2025-04-10T11:27:46.466212Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] remote node connected [56:2083:2740] 2025-04-10T11:27:46.466282Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [56:2083:2740] 2025-04-10T11:27:46.466651Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connect request undelivered [56:2083:2740] 2025-04-10T11:27:46.466694Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] immediate retry [56:2083:2740] 2025-04-10T11:27:46.466726Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [56:2083:2740] 2025-04-10T11:27:46.466785Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037894 entry.State: StNormal 2025-04-10T11:27:46.466942Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StProblemResolve ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:27:46.467036Z node 56 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:27:46.467210Z node 56 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2025-04-10T11:27:46.467285Z node 56 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2025-04-10T11:27:46.467342Z node 56 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2025-04-10T11:27:46.467418Z node 56 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [61:1948:2266] CurrentLeaderTablet: [61:1955:2269] CurrentGeneration: 3 CurrentStep: 0} 2025-04-10T11:27:46.467536Z node 56 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [61:1948:2266] CurrentLeaderTablet: [61:1955:2269] CurrentGeneration: 3 CurrentStep: 0} 2025-04-10T11:27:46.467632Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037894 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037894 Cookie: 0 CurrentLeader: [61:1948:2266] CurrentLeaderTablet: [61:1955:2269] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {7, 10, 0}} 2025-04-10T11:27:46.467687Z node 56 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037894 followers: 0 2025-04-10T11:27:46.467740Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1948:2266] 2025-04-10T11:27:46.467865Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result remote node 61 [56:2083:2740] 2025-04-10T11:27:46.467993Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] remote node connected [56:2083:2740] 2025-04-10T11:27:46.468032Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [56:2083:2740] 2025-04-10T11:27:46.468269Z node 61 :PIPE_SERVER DEBUG: [72075186224037894] Accept Connect Originator# [56:2083:2740] 2025-04-10T11:27:46.468635Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connected with status OK role: Leader [56:2083:2740] 2025-04-10T11:27:46.468678Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] send queued [56:2083:2740] 2025-04-10T11:27:46.469794Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] ::Bootstrap [56:2087:2742] 2025-04-10T11:27:46.469836Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] lookup [56:2087:2742] 2025-04-10T11:27:46.469893Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037895 entry.State: StNormal ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:27:46.469943Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1793:2193] 2025-04-10T11:27:46.470043Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] forward result remote node 61 [56:2087:2742] 2025-04-10T11:27:46.474815Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] remote node connected [56:2087:2742] 2025-04-10T11:27:46.474877Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895]::SendEvent [56:2087:2742] 2025-04-10T11:27:46.475166Z node 61 :PIPE_SERVER DEBUG: [72075186224037895] Accept Connect Originator# [56:2087:2742] 2025-04-10T11:27:46.475566Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] connected with status OK role: Leader [56:2087:2742] 2025-04-10T11:27:46.475614Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] send queued [56:2087:2742] 2025-04-10T11:27:46.476836Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] ::Bootstrap [56:2090:2744] 2025-04-10T11:27:46.476888Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] lookup [56:2090:2744] 2025-04-10T11:27:46.476961Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037896 entry.State: StNormal ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:27:46.477036Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1796:2195] 2025-04-10T11:27:46.477156Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] forward result remote node 61 [56:2090:2744] 2025-04-10T11:27:46.477298Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] remote node connected [56:2090:2744] 2025-04-10T11:27:46.477336Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896]::SendEvent [56:2090:2744] 2025-04-10T11:27:46.477556Z node 61 :PIPE_SERVER DEBUG: [72075186224037896] Accept Connect Originator# [56:2090:2744] 2025-04-10T11:27:46.477955Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] connected with status OK role: Leader [56:2090:2744] 2025-04-10T11:27:46.477999Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] send queued [56:2090:2744] 2025-04-10T11:27:46.483446Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [56:2092:2745] 2025-04-10T11:27:46.483544Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [56:2092:2745] 2025-04-10T11:27:46.483694Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:27:46.483801Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [56:597:2274] 2025-04-10T11:27:46.483943Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [56:2092:2745] 2025-04-10T11:27:46.484058Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [56:2092:2745] 2025-04-10T11:27:46.484141Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [56:2092:2745] 2025-04-10T11:27:46.484234Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [56:2092:2745] 2025-04-10T11:27:46.484491Z node 56 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [56:2092:2745] 2025-04-10T11:27:46.484762Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [56:2092:2745] 2025-04-10T11:27:46.484836Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [56:2092:2745] 2025-04-10T11:27:46.484895Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [56:2092:2745] 2025-04-10T11:27:46.485000Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [56:2092:2745] 2025-04-10T11:27:46.485076Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [56:2092:2745] 2025-04-10T11:27:46.485165Z node 56 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [56:569:2269] EventType# 268697616 >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] >> THiveTest::TestExternalBoot [GOOD] >> KqpFlipJoin::LeftSemi_3 [GOOD] >> KqpFlipJoin::RightOnly_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 3015, MsgBus: 32035 2025-04-10T11:27:33.575064Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644680815594961:2096];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:33.575697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016d1/r3tmp/tmpXpcCeo/pdisk_1.dat 2025-04-10T11:27:34.090642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:34.090734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:34.091313Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:34.095207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3015, node 1 2025-04-10T11:27:34.266613Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:34.266633Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:34.266638Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:34.266731Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32035 TClient is connected to server localhost:32035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:35.005219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.017449Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:35.032497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.270330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.437994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.522595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:37.504838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644697995465868:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.504970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.777395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:37.839915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:37.894248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:37.927050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:37.995545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.071069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.164295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644702290433688:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.164371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.164626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644702290433693:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.168624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:38.185959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644702290433695:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:38.268543Z node 1 :TX_PROXY ERROR: Actor# [1:7491644702290433750:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:38.575179Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644680815594961:2096];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:38.599034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:39.372571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.407060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.475505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.530084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.595340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.632339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24443, MsgBus: 15273 2025-04-10T11:27:41.707233Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644716210716750:2198];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016d1/r3tmp/tmpmjsXSL/pdisk_1.dat 2025-04-10T11:27:41.782891Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:27:41.880174Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:41.892179Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:41.892265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:41.899094Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24443, node 2 2025-04-10T11:27:42.070603Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:42.070625Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:42.070631Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:42.070747Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15273 TClient is connected to server localhost:15273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:42.679263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:42.698606Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:42.716394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:42.876809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:43.057759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:43.149963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:45.838341Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644733390587544:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:45.838447Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:45.904136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:45.986761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.033982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.086756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.144436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.216304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.326569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644737685555355:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:46.326668Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:46.327107Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644737685555360:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:46.331597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:46.348016Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644737685555362:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:46.425197Z node 2 :TX_PROXY ERROR: Actor# [2:7491644737685555417:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:46.694636Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644716210716750:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:46.694716Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:47.597593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.643770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.689924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.781777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.867434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.926672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::CanonizedJoinOrderTPCH20 >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+ColumnStore >> DataShardReadIteratorBatchMode::RangeToNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::MultipleRanges >> KqpJoin::JoinConvert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBoot [GOOD] Test command err: 2025-04-10T11:26:38.737520Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:26:38.743837Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:26:38.746001Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:26:38.746910Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:68:2073] ControllerId# 72057594037932033 2025-04-10T11:26:38.746952Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:26:38.748767Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:26:38.749162Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:26:38.752682Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:26:38.752752Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:26:38.758364Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:74:2077] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.758618Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:75:2078] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.758756Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:76:2079] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.758954Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:77:2080] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.759089Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:78:2081] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.759251Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:79:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.759398Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:80:2083] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.759430Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:26:38.759544Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:68:2073] 2025-04-10T11:26:38.759582Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:68:2073] 2025-04-10T11:26:38.759639Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:26:38.759688Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:26:38.761162Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:26:38.762334Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:26:38.765265Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:26:38.766206Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-10T11:26:38.769136Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-10T11:26:38.772617Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-10T11:26:38.772709Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:26:38.773593Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:91:2077] ControllerId# 72057594037932033 2025-04-10T11:26:38.773654Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:26:38.773742Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:26:38.773959Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:26:38.787589Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:26:38.787645Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:26:38.789462Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:99:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.789639Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:100:2083] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.789803Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:101:2084] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.789934Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:102:2085] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.790104Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:103:2086] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.790299Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:104:2087] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.790432Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:105:2088] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.790472Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:26:38.790544Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:91:2077] 2025-04-10T11:26:38.790588Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:91:2077] 2025-04-10T11:26:38.790635Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:26:38.790673Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:26:38.793538Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:26:38.793807Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:26:38.796443Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:26:38.796565Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:26:38.797435Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:114:2074] ControllerId# 72057594037932033 2025-04-10T11:26:38.797475Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:26:38.797538Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:26:38.797719Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:26:38.799797Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:26:38.799840Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:26:38.801539Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:120:2078] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.801686Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:121:2079] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.801853Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:122:2080] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.802001Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:123:2081] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.803910Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:124:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.804142Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:125:2083] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.804288Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:126:2084] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:38.804315Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:26:38.804381Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:114:2074] 2025-04-10T11:26:38.804411Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:114:2074] 2025-04-10T11:26:38.804471Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:26:38.804514Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:26:38.806030Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:26:38.806975Z node 3 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:26:38.807730Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:68:2073] 2025-04-10T11:26:38.807814Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:26:38.807853Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:26:38.808062Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:26:38.808375Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:114:2074] 2025-04-10T11:26:38.808440Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:26:38.808470Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:26:38.819534Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:26:38.819620Z node 3 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-10T11:26:38.826201Z node 3 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-10T11:26:38.827404Z node 3 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-10T11:26:38.854478Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:91:2077] 2025-04-10T11:26:38. ... rd result local node, try to connect [28:317:2292] 2025-04-10T11:27:49.897772Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [28:317:2292] 2025-04-10T11:27:49.897852Z node 28 :PIPE_SERVER DEBUG: [72057594037932033] Accept Connect Originator# [28:317:2292] 2025-04-10T11:27:49.897981Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [28:317:2292] 2025-04-10T11:27:49.898017Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [28:317:2292] 2025-04-10T11:27:49.898048Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [28:317:2292] 2025-04-10T11:27:49.898097Z node 28 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [28:283:2269] EventType# 268637702 2025-04-10T11:27:49.902354Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2025-04-10T11:27:49.902470Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:27:49.906039Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{20, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:27:49.906212Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:27:49.906629Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-10T11:27:49.906723Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:27:49.907190Z node 28 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923005356640}(72075186224037888)::Execute - TryToBoot was not successfull 2025-04-10T11:27:49.907330Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2025-04-10T11:27:49.907437Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:27:49.918961Z node 28 :BS_PROXY_PUT INFO: [49bb8b081a887568] bootstrap ActorId# [28:320:2295] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:693:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:27:49.919111Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] Id# [72057594037927937:2:4:0:0:693:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:27:49.919161Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] restore Id# [72057594037927937:2:4:0:0:693:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:27:49.919221Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:693:1] Marker# BPG33 2025-04-10T11:27:49.919268Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:693:1] Marker# BPG32 2025-04-10T11:27:49.919414Z node 28 :BS_PROXY DEBUG: Send to queueActorId# [28:35:2079] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:693:1] FDS# 693 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:27:49.923404Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:693:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 19 } Cost# 85456 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 20 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-10T11:27:49.923534Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:693:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-10T11:27:49.923591Z node 28 :BS_PROXY_PUT INFO: [49bb8b081a887568] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:693:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:27:49.923724Z node 28 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.639 sample PartId# [72057594037927937:2:4:0:0:693:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 28 } TEvVPutResult{ TimestampMs# 4.661 VDiskId# [0:1:0:0:0] NodeId# 28 Status# OK } ] } 2025-04-10T11:27:49.923854Z node 28 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:693:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-10T11:27:49.923990Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2025-04-10T11:27:49.924320Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:27:49.924423Z node 28 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-10T11:27:49.924472Z node 28 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-10T11:27:49.924508Z node 28 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-10T11:27:49.924551Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-10T11:27:49.924605Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-10T11:27:49.924650Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-10T11:27:49.925020Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [28:324:2298] 2025-04-10T11:27:49.925080Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [28:324:2298] 2025-04-10T11:27:49.925198Z node 28 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:27:49.925284Z node 28 :TABLET_RESOLVER DEBUG: SelectForward node 28 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [28:276:2265] 2025-04-10T11:27:49.925361Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [28:324:2298] 2025-04-10T11:27:49.925426Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [28:324:2298] 2025-04-10T11:27:49.925494Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [28:324:2298] 2025-04-10T11:27:49.925575Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [28:324:2298] 2025-04-10T11:27:49.925733Z node 28 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [28:324:2298] 2025-04-10T11:27:49.935928Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [28:324:2298] 2025-04-10T11:27:49.936093Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [28:324:2298] 2025-04-10T11:27:49.936166Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [28:324:2298] 2025-04-10T11:27:49.936261Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [28:324:2298] 2025-04-10T11:27:49.936327Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [28:324:2298] 2025-04-10T11:27:49.936533Z node 28 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [28:323:2297] EventType# 268697624 2025-04-10T11:27:49.936814Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2025-04-10T11:27:49.936921Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:27:49.937176Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{6, redo 83b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-10T11:27:49.937307Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:27:49.949220Z node 28 :BS_PROXY_PUT INFO: [6173685a7ad4b3c4] bootstrap ActorId# [28:327:2301] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:5:0:0:92:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:27:49.949413Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037927937:2:5:0:0:92:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:27:49.949508Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] restore Id# [72057594037927937:2:5:0:0:92:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:27:49.949607Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:5:0:0:92:1] Marker# BPG33 2025-04-10T11:27:49.949680Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:5:0:0:92:1] Marker# BPG32 2025-04-10T11:27:49.949927Z node 28 :BS_PROXY DEBUG: Send to queueActorId# [28:35:2079] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:5:0:0:92:1] FDS# 92 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:27:49.954895Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] received {EvVPutResult Status# OK ID# [72057594037927937:2:5:0:0:92:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 20 } Cost# 80724 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 21 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-10T11:27:49.955082Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Result# TEvPutResult {Id# [72057594037927937:2:5:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-10T11:27:49.955200Z node 28 :BS_PROXY_PUT INFO: [6173685a7ad4b3c4] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:5:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:27:49.955424Z node 28 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.022 sample PartId# [72057594037927937:2:5:0:0:92:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 28 } TEvVPutResult{ TimestampMs# 6.012 VDiskId# [0:1:0:0:0] NodeId# 28 Status# OK } ] } 2025-04-10T11:27:49.955666Z node 28 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:5:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-10T11:27:49.955851Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} commited cookie 1 for step 5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::LeftSemi_3 [GOOD] Test command err: Trying to start YDB, gRPC: 7452, MsgBus: 13016 2025-04-10T11:27:33.443881Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644684222852353:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:33.443932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016de/r3tmp/tmpLKjGhX/pdisk_1.dat 2025-04-10T11:27:34.071493Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:34.072633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:34.072733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:34.074366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7452, node 1 2025-04-10T11:27:34.230677Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:34.230702Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:34.230708Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:34.230804Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13016 TClient is connected to server localhost:13016 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:35.105259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.138498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.312942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.552604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.666211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:37.627017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644701402723341:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.627110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.960078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.004301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.046744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.099776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.157922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.206042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.298487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644705697691154:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.298559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.302457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644705697691159:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.306685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:38.321319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644705697691161:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:38.391257Z node 1 :TX_PROXY ERROR: Actor# [1:7491644705697691217:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:38.450584Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644684222852353:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:38.450735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:39.484228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.548284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.602593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.644971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61568, MsgBus: 5396 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016de/r3tmp/tmphpRvOi/pdisk_1.dat 2025-04-10T11:27:42.109118Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:27:42.185347Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:42.189406Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:42.189827Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:42.207619Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61568, node 2 2025-04-10T11:27:42.398596Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:42.398620Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:42.398628Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:42.398747Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5396 TClient is connected to server localhost:5396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:43.005920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:43.015977Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:43.033340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:43.123543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:43.310140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:43.393308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:46.343463Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644736898286693:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:46.343551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:46.420258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.472798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.525338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.560979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.608595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.728090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.801230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644736898287209:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:46.801311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:46.801684Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644736898287214:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:46.806615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:46.828317Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:27:46.828927Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644736898287216:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:46.914666Z node 2 :TX_PROXY ERROR: Actor# [2:7491644736898287272:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:48.128078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.178633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.257918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.341537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightOnly_2 [GOOD] Test command err: Trying to start YDB, gRPC: 32729, MsgBus: 3186 2025-04-10T11:27:33.546240Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644681513503191:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:33.552079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016d6/r3tmp/tmpLTv6hn/pdisk_1.dat 2025-04-10T11:27:34.265212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:34.265319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:34.270270Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:34.273075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32729, node 1 2025-04-10T11:27:34.470342Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:34.470369Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:34.470379Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:34.470481Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3186 TClient is connected to server localhost:3186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:35.311376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.335970Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:35.357274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.595238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.796901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.914389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:37.803198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644698693374179:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.803335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.144425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.187984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.229338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.282017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.354589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.434242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.535347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644702988342001:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.535481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.540056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644702988342006:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:38.542804Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644681513503191:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:38.542868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:38.546629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:38.577719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644702988342008:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:38.642477Z node 1 :TX_PROXY ERROR: Actor# [1:7491644702988342065:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:39.796409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.832069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.881794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.926527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7130, MsgBus: 13051 2025-04-10T11:27:41.661035Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644718412054453:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:41.661079Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016d6/r3tmp/tmpkLkCOx/pdisk_1.dat 2025-04-10T11:27:41.998656Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:42.036753Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:42.036840Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:42.038315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7130, node 2 2025-04-10T11:27:42.210784Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:42.210812Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:42.210821Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:42.210946Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13051 TClient is connected to server localhost:13051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:42.853387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:42.860464Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:42.873531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:42.960708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:43.144592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:43.234230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:46.143982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644739886892565:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:46.144100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:46.201791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.245943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.286349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.328951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.375317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.420278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.493973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644739886893075:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:46.494058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:46.494297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644739886893080:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:46.498756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:46.513247Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644739886893082:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:46.581008Z node 2 :TX_PROXY ERROR: Actor# [2:7491644739886893134:3438] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:46.661110Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644718412054453:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:46.661177Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:48.328356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.383131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.480662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.540639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull >> KqpJoin::RightSemiJoin_ComplexKey [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin >> KqpJoinOrder::GeneralPrioritiesBug3 >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup [GOOD] >> KqpJoin::FullOuterJoin2 [GOOD] >> TStorageBalanceTest::TestScenario2 [GOOD] >> TStorageBalanceTest::TestScenario3 >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup [GOOD] >> KqpJoinOrder::TPCDS94-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexKey [GOOD] Test command err: Trying to start YDB, gRPC: 14026, MsgBus: 2642 2025-04-10T11:27:43.123727Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644727266253055:2143];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:43.137925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016b0/r3tmp/tmpjf7FTq/pdisk_1.dat 2025-04-10T11:27:43.781908Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:43.794479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:43.794553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:43.809250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14026, node 1 2025-04-10T11:27:44.106108Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:44.106135Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:44.109124Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:44.109306Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2642 TClient is connected to server localhost:2642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:45.080142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:45.117832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:45.284418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:45.590538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:27:45.693942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.546882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644744446123935:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:47.546991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:48.124452Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644727266253055:2143];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:48.130233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:48.180336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.217486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.266703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.342663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.382904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.491592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.594625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644748741091761:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:48.594734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:48.595808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644748741091766:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:48.600907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:48.620153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644748741091768:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:48.691031Z node 1 :TX_PROXY ERROR: Actor# [1:7491644748741091823:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:49.866464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.919343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.985220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:50.029391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:50.078831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:43: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:56: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 30240, MsgBus: 7333 2025-04-10T11:27:45.777060Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644732795258696:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:45.777513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ad/r3tmp/tmpYrVVqS/pdisk_1.dat 2025-04-10T11:27:46.508903Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:46.543050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:46.543127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:46.571181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30240, node 1 2025-04-10T11:27:46.810952Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:46.810972Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:46.810979Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:46.811089Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7333 TClient is connected to server localhost:7333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:47.792874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:47.826914Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:47.853407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:48.117050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:48.384240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:48.471037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:50.576391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644754270096815:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:50.576496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:50.761478Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644732795258696:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:50.761536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:51.176923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.262674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.307510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.350110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.387526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.447002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.514328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644758565064628:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:51.514448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:51.514718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644758565064634:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:51.518581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:51.537927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644758565064636:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:51.593049Z node 1 :TX_PROXY ERROR: Actor# [1:7491644758565064692:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:52.842311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:52.882786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:52.963676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.009799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.099556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.188712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCDS16+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoin2 [GOOD] Test command err: Trying to start YDB, gRPC: 5067, MsgBus: 26723 2025-04-10T11:27:45.766353Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644736102517712:2277];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:45.766408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ae/r3tmp/tmpEmEUtD/pdisk_1.dat 2025-04-10T11:27:46.471884Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:46.510196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:46.510273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:46.515428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5067, node 1 2025-04-10T11:27:46.763230Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:46.763249Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:46.763256Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:46.763347Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26723 TClient is connected to server localhost:26723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:47.676105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:47.716644Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:47.724893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:47.933102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:48.195344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:48.295918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:50.388370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644757577355730:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:50.388517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:50.764179Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644736102517712:2277];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:50.764490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:50.867945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:50.910928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:50.950330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.019434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.072151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.123657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.225254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644761872323549:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:51.225346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:51.226013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644761872323554:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:51.230874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:51.243224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644761872323556:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:51.351270Z node 1 :TX_PROXY ERROR: Actor# [1:7491644761872323612:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:52.670293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:52.712735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:52.793137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 3392, MsgBus: 12500 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016c4/r3tmp/tmpBUzk7L/pdisk_1.dat 2025-04-10T11:27:37.638918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:27:37.912135Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:37.926767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:37.926861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:37.928938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3392, node 1 2025-04-10T11:27:38.054637Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:38.054661Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:38.054672Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:38.054773Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12500 TClient is connected to server localhost:12500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:38.841710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:38.874562Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:38.886793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:39.108616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:39.432450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:39.562632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:41.643004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644719184920657:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:41.643120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:42.109549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:42.171309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:42.242614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:42.303623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:42.349452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:42.430970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:42.552257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644723479888480:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:42.552358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:42.552667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644723479888485:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:42.556981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:42.573421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644723479888487:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:42.637577Z node 1 :TX_PROXY ERROR: Actor# [1:7491644723479888545:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:43.941130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:43.983297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:44.030170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:44.083814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:44.131177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:44.180236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26297, MsgBus: 14605 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016c4/r3tmp/tmpHp8COH/pdisk_1.dat 2025-04-10T11:27:46.641394Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:27:46.641678Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:46.697652Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:46.697731Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:46.707083Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26297, node 2 2025-04-10T11:27:46.917619Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:46.917641Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:46.917649Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:46.917756Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14605 TClient is connected to server localhost:14605 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:47.635463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:47.648806Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:27:47.665257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:47.754839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:47.925677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:48.037521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:50.988813Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644757411604723:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:50.988931Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:51.061070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.134371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.204751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.235326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.273583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.344454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.434344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644761706572534:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:51.434486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:51.434912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644761706572539:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:51.440033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:51.479512Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644761706572541:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:27:51.581283Z node 2 :TX_PROXY ERROR: Actor# [2:7491644761706572601:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:52.805963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:52.890859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:52.957932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.025378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.079667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.118414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FiveWayJoin-ColumnStore >> KqpJoinOrder::TPCDS16-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH17 >> KqpJoinOrder::CanonizedJoinOrderTPCH7 [GOOD] >> KqpIndexLookupJoin::LeftOnly+StreamLookup >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] >> KqpJoinOrder::TPCDS88-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 9763, MsgBus: 26515 2025-04-10T11:27:40.181350Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644711884358684:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:40.189284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ba/r3tmp/tmp1uOMq2/pdisk_1.dat 2025-04-10T11:27:40.840056Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:40.841179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:40.841305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:40.846733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9763, node 1 2025-04-10T11:27:41.022220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:41.022255Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:41.022263Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:41.022390Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26515 TClient is connected to server localhost:26515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:41.848663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:41.880232Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:41.886697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:42.145538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:42.450325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:42.558299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:44.565767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644729064229641:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:44.570396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:44.941412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:44.986870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:45.059946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:45.140862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:45.184110Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644711884358684:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:45.184689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:45.186555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:45.229086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:45.292339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644733359197452:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:45.292434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:45.292706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644733359197457:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:45.297128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:45.311124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644733359197459:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:45.405938Z node 1 :TX_PROXY ERROR: Actor# [1:7491644733359197518:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:46.564306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.659665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.751840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.867399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.916382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.961768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9121, MsgBus: 13569 2025-04-10T11:27:49.373123Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644753560724043:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:49.412609Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ba/r3tmp/tmpZ7WF12/pdisk_1.dat 2025-04-10T11:27:49.586764Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:49.615477Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:49.615560Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:49.619946Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9121, node 2 2025-04-10T11:27:49.825780Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:49.825804Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:49.825813Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:49.825927Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13569 TClient is connected to server localhost:13569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:50.942259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:51.005577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:51.178409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:51.416522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:51.512243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:54.310569Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644753560724043:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:54.310637Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:54.610101Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644775035562124:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:54.610221Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:54.672087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.768262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.815346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.895230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.940096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.999114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:55.087475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644779330529936:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:55.087576Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:55.087960Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644779330529941:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:55.091853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:55.107661Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:27:55.107925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644779330529943:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:55.208712Z node 2 :TX_PROXY ERROR: Actor# [2:7491644779330529998:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:56.491323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:56.534920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:56.578344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:27:56.673027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:56.740568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:56.800651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS16+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 18954, MsgBus: 12035 2025-04-10T11:26:10.990012Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644327131690907:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:10.990058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001710/r3tmp/tmpGUmQ6z/pdisk_1.dat 2025-04-10T11:26:11.338595Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18954, node 1 2025-04-10T11:26:11.406745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:11.406840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:11.408615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:26:11.423273Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:11.423314Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:11.423321Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:11.423441Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12035 TClient is connected to server localhost:12035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:11.916679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:14.064330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644344311560761:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:14.064348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644344311560753:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:14.064484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:14.068368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:14.077204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644344311560767:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:14.157520Z node 1 :TX_PROXY ERROR: Actor# [1:7491644344311560818:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:14.450949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:14.668194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644344311561081:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:14.668195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644344311561105:2362];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:14.668426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644344311561105:2362];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:14.668457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644344311561081:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:14.668744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644344311561081:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:14.668836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644344311561105:2362];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:14.668875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644344311561081:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:14.668972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644344311561105:2362];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:14.669016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644344311561081:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:14.669092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644344311561105:2362];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:14.669141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644344311561081:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:14.669189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644344311561105:2362];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:14.669278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644344311561081:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:14.669334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644344311561105:2362];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:14.669397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644344311561081:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:14.669447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644344311561105:2362];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:14.669539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644344311561081:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:14.669595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644344311561105:2362];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:14.669663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644344311561081:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:14.669715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644344311561105:2362];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:14.669774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644344311561081:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:14.669816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644344311561105:2362];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:14.669900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644344311561081:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:14.669936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644344311561105:2362];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:14.708492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644344311561083:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:14.708492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644344311561093:2356];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:14.708552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:74916443 ... p:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.101016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.106067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039240;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.107985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.111157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.114044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.115117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.120141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.125136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.125371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039203;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.130918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.130939Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039193;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.136308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.136329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.141294Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.145591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.150415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039195;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.158229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.162392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.167417Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.168387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.173112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.173904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.178779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.178890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.189093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.189438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.196556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039262;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.196814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.205281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.206276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.210711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.211258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.218247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.218762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.224282Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.224411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.229630Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.229838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:21.428942Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq7qgd4a1g68afg33yy2gf", SessionId: ydb://session/3?node_id=1&id=ZTk3OGI3YmQtNzVjMjFiYzItYzg4MzhhODgtNmZlMWFmZDY=, Slow query, duration: 32.070291s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:27:21.975948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:21.975964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:21.976313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491644503225387220:7728];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-10T11:27:21.976716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:47.265026Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq92g6fz1z3dp45t4ge1vr", SessionId: ydb://session/3?node_id=1&id=ZTk3OGI3YmQtNzVjMjFiYzItYzg4MzhhODgtNmZlMWFmZDY=, Slow query, duration: 13.882099s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "-- NB: Subquerys\n$orders_with_several_warehouses = (\n select cs_order_number\n from `/Root/test/ds/catalog_sales`\n group by cs_order_number\n having count(distinct cs_warehouse_sk) > 1\n);\n\n-- start query 1 in stream 0 using template query16.tpl and seed 171719422\nselect\n count(distinct cs1.cs_order_number) as `order count`\n ,sum(cs_ext_ship_cost) as `total shipping cost`\n ,sum(cs_net_profit) as `total net profit`\nfrom\n `/Root/test/ds/catalog_sales` cs1\n cross join `/Root/test/ds/date_dim`\n cross join `/Root/test/ds/customer_address`\n cross join `/Root/test/ds/call_center`\n left semi join $orders_with_several_warehouses cs2 on cs1.cs_order_number = cs2.cs_order_number\n left only join `/Root/test/ds/catalog_returns` cr1 on cs1.cs_order_number = cr1.cr_order_number\nwhere\n cast(d_date as date) between cast('1999-4-01' as date) and\n (cast('1999-4-01' as date) + DateTime::IntervalFromDays(60))\nand cs1.cs_ship_date_sk = d_date_sk\nand cs1.cs_ship_addr_sk = ca_address_sk\nand ca_state = 'IL'\nand cs1.cs_call_center_sk = cc_call_center_sk\nand cc_county in ('Richland County','Bronx County','Maverick County','Mesa County',\n 'Raleigh County'\n)\norder by `order count`\nlimit 100;\n", parameters: 0b >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS16-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 29306, MsgBus: 5090 2025-04-10T11:26:56.202206Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644524165755967:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:56.202246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016f2/r3tmp/tmpIrTYOo/pdisk_1.dat 2025-04-10T11:26:56.764465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:56.764553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:56.770698Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:56.783244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29306, node 1 2025-04-10T11:26:56.931812Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:56.931843Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:56.931851Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:56.931974Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5090 TClient is connected to server localhost:5090 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:57.699005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:57.727897Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:26:59.838810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644537050658522:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:59.838833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644537050658510:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:59.838950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:59.842942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:59.855643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644537050658524:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:59.917165Z node 1 :TX_PROXY ERROR: Actor# [1:7491644537050658575:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:00.318560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.434249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.476896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.515537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.549045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.694318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.734484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.769533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.850295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.897362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.929522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:27:00.965367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:01.037063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:01.207026Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644524165755967:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:01.207085Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:01.658317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:27:01.695664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:01.773016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:01.814354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:27:01.855344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:27:01.894438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:27:01.941931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:27:01.977749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:27:02.028766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:27:02.068639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:27:02.110841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:27:02.149563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:27:02.188693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:27:02.228180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:27:02.272967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:27:02.307647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:27:02.352687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... p:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.371277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.375367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.376758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.379340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.382172Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.383357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.387994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.393874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.399200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.405046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.410578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.416001Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.421648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.428011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.428626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.433924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.433937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.439369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.439392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.444714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.444886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.450099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.450446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.455272Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.455660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.460624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.460956Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.465972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.466734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.471539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.472699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.477189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.478608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.482406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.483875Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.487169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.488944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.491538Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:31.561835Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq85t350tymgy5rw6yqrbh", SessionId: ydb://session/3?node_id=1&id=ZDdkZTNmZjMtMzQyYTIzMzgtNjQwZjA0MWItZDQyNzRkMDM=, Slow query, duration: 27.557842s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:27:32.014101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:32.014195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:32.014424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7491644601475184955:4532];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-04-10T11:27:32.014730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:53.161040Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq9ar4f02fpvsnm9y18yh6", SessionId: ydb://session/3?node_id=1&id=ZDdkZTNmZjMtMzQyYTIzMzgtNjQwZjA0MWItZDQyNzRkMDM=, Slow query, duration: 11.332165s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "-- NB: Subquerys\n$orders_with_several_warehouses = (\n select cs_order_number\n from `/Root/test/ds/catalog_sales`\n group by cs_order_number\n having count(distinct cs_warehouse_sk) > 1\n);\n\n-- start query 1 in stream 0 using template query16.tpl and seed 171719422\nselect\n count(distinct cs1.cs_order_number) as `order count`\n ,sum(cs_ext_ship_cost) as `total shipping cost`\n ,sum(cs_net_profit) as `total net profit`\nfrom\n `/Root/test/ds/catalog_sales` cs1\n cross join `/Root/test/ds/date_dim`\n cross join `/Root/test/ds/customer_address`\n cross join `/Root/test/ds/call_center`\n left semi join $orders_with_several_warehouses cs2 on cs1.cs_order_number = cs2.cs_order_number\n left only join `/Root/test/ds/catalog_returns` cr1 on cs1.cs_order_number = cr1.cr_order_number\nwhere\n cast(d_date as date) between cast('1999-4-01' as date) and\n (cast('1999-4-01' as date) + DateTime::IntervalFromDays(60))\nand cs1.cs_ship_date_sk = d_date_sk\nand cs1.cs_ship_addr_sk = ca_address_sk\nand ca_state = 'IL'\nand cs1.cs_call_center_sk = cc_call_center_sk\nand cc_county in ('Richland County','Bronx County','Maverick County','Mesa County',\n 'Raleigh County'\n)\norder by `order count`\nlimit 100;\n", parameters: 0b >> KqpJoinOrder::TestJoinHint2-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH7 [GOOD] Test command err: Trying to start YDB, gRPC: 25991, MsgBus: 21562 2025-04-10T11:26:28.545163Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644401842412791:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:28.546250Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001705/r3tmp/tmpKzVuZF/pdisk_1.dat 2025-04-10T11:26:29.041186Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:29.045033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:29.045385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:29.048501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25991, node 1 2025-04-10T11:26:29.134951Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:29.134971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:29.134979Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:29.135082Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21562 TClient is connected to server localhost:21562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:29.733538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:31.690921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644414727315345:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:31.691086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:31.694285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644414727315357:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:31.698409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:31.713166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644414727315359:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:31.785977Z node 1 :TX_PROXY ERROR: Actor# [1:7491644414727315410:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:32.145035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:32.396531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644419022282950:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:32.396557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644419022283008:2360];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:32.396727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644419022282950:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:32.397049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644419022282950:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:32.397189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644419022282950:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:32.397338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644419022282950:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:32.397453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644419022282950:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:32.397482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644419022283008:2360];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:32.397567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644419022282950:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:32.397674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644419022283008:2360];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:32.397680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644419022282950:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:32.397803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644419022282950:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:32.397844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644419022283008:2360];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:32.397919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644419022282950:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:32.397949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644419022283008:2360];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:32.398393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644419022282950:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:32.398516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644419022282950:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:32.402426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644419022283008:2360];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:32.402561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644419022283008:2360];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:32.402664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644419022283008:2360];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:32.402769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644419022283008:2360];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:32.402868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644419022283008:2360];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:32.402961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644419022283008:2360];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:32.403055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644419022283008:2360];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:32.442205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644419022283049:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:32.442283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644419022283049:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:32.442542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;sel ... tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.485010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.490063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.495013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.499775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.504864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.509896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.514882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.519880Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.524882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.530050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.535464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.540676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.545858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.551920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039314;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.557777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.563059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.565996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.568506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.572326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.573913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.578689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.579581Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.584491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.584895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.591109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.591131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.596935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.597468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.601777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.603809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.608284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.609965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.615673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.617737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.623047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.624686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.630757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.631060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.637343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.642432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.649135Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.652611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.659325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.664650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.666674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.673763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:36.930989Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq8a9q44fe1s5m14mb64xh", SessionId: ydb://session/3?node_id=1&id=N2I3NWNjNDQtYTQwOGMwZDctYzIzYWU2OTktZmEzOTU4NzM=, Slow query, duration: 28.331391s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:27:37.302638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:37.302654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:37.303513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoin::JoinConvert [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin [GOOD] >> OlapEstimationRowsCorrectness::TPCH9 >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinConvert [GOOD] Test command err: Trying to start YDB, gRPC: 3939, MsgBus: 63244 2025-04-10T11:27:52.534513Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644765669949115:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:52.534555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016a5/r3tmp/tmpK4OJMG/pdisk_1.dat 2025-04-10T11:27:53.252045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:53.263444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:53.263551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:53.266374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3939, node 1 2025-04-10T11:27:53.482455Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:53.482478Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:53.482486Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:53.482596Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63244 TClient is connected to server localhost:63244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:54.505452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:54.550417Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:54.573457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:27:54.768511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.981587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:55.082350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:57.542242Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644765669949115:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:57.542557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:57.558715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644787144787150:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:57.558822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:58.248329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:58.310356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:58.384845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:58.453400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:58.558973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:58.656529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:58.751493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644791439754976:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:58.751565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:58.751741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644791439754981:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:58.755750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:27:58.779826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644791439754983:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:27:58.849119Z node 1 :TX_PROXY ERROR: Actor# [1:7491644791439755038:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:00.080728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.147656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.200032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.726352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 >> KqpJoin::HashJoinWithAsTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] Test command err: 2025-04-10T11:25:50.463651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:25:50.463840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:25:50.463981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001eac/r3tmp/tmpbJAO2e/pdisk_1.dat 2025-04-10T11:25:50.810982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:25:50.863660Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:50.905675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:50.906254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:50.918106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:51.010286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:51.052390Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:25:51.053290Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:25:51.053625Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:25:51.053802Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:51.088879Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:25:51.089513Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:51.089603Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:25:51.092407Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:25:51.092496Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:25:51.092552Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:25:51.093719Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:25:51.093852Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:25:51.093920Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:25:51.104483Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:25:51.133246Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:25:51.133429Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:25:51.133561Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:25:51.133613Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:25:51.133652Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:25:51.133687Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.133916Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.133964Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.134262Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:25:51.134365Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:25:51.134431Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:51.134470Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:25:51.134520Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:25:51.134554Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:25:51.134587Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:25:51.134619Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:25:51.134687Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:25:51.134824Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.134866Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.134908Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:25:51.135245Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:25:51.135294Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:25:51.135446Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:25:51.135637Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:25:51.135686Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:25:51.136539Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:25:51.136623Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:25:51.136661Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:25:51.136700Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:25:51.136736Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.137021Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:25:51.137059Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:25:51.137095Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:25:51.137134Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.137208Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:25:51.137240Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:25:51.137312Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:25:51.137358Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.137449Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:25:51.138861Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:25:51.138911Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:25:51.149549Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:25:51.149615Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.149649Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.149691Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:25:51.149771Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:25:51.298138Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.298213Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.298247Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:25:51.298525Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:25:51.298555Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:25:51.298694Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.298755Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:25:51.298794Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:25:51.298835Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:25:51.310090Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:25:51.310179Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.311002Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.311039Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.311074Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:5 ... ssTransaction::Execute at 72075186224037889 2025-04-10T11:28:01.030392Z node 16 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:28:01.030440Z node 16 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-10T11:28:01.030475Z node 16 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2025-04-10T11:28:01.030506Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2025-04-10T11:28:01.030535Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-10T11:28:01.030559Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2025-04-10T11:28:01.030586Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2025-04-10T11:28:01.030614Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2025-04-10T11:28:01.030802Z node 16 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2025-04-10T11:28:01.030881Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-10T11:28:01.030915Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2025-04-10T11:28:01.030948Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-10T11:28:01.030984Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-10T11:28:01.031038Z node 16 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2025-04-10T11:28:01.031090Z node 16 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2025-04-10T11:28:01.031143Z node 16 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2025-04-10T11:28:01.031204Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-10T11:28:01.031236Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-10T11:28:01.031264Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-10T11:28:01.031297Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-10T11:28:01.031385Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-10T11:28:01.031418Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-10T11:28:01.031457Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-10T11:28:01.031493Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-10T11:28:01.031518Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-10T11:28:01.031540Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-10T11:28:01.031563Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2025-04-10T11:28:01.031585Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-10T11:28:01.031709Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2025-04-10T11:28:01.031732Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2025-04-10T11:28:01.031768Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2025-04-10T11:28:01.031811Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2025-04-10T11:28:01.031848Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-10T11:28:01.031878Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2025-04-10T11:28:01.031909Z node 16 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2025-04-10T11:28:01.031952Z node 16 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:28:01.031997Z node 16 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-10T11:28:01.032040Z node 16 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-10T11:28:01.032083Z node 16 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-10T11:28:01.055178Z node 16 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2025-04-10T11:28:01.055360Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:28:01.055453Z node 16 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-10T11:28:01.055587Z node 16 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [16:1048:2835], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:28:01.055720Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:28:01.056128Z node 16 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2025-04-10T11:28:01.056188Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:28:01.056227Z node 16 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-10T11:28:01.056276Z node 16 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [16:1048:2835], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:28:01.056324Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:28:01.063137Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [16:593:2518], Recipient [16:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 RangesSize: 3 2025-04-10T11:28:01.063448Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:28:01.063612Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-10T11:28:01.063849Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:28:01.063943Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:28:01.064030Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:28:01.064100Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:28:01.064173Z node 16 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-10T11:28:01.064270Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:28:01.064313Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:28:01.064346Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:28:01.064383Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:28:01.064600Z node 16 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 } 2025-04-10T11:28:01.064709Z node 16 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2025-04-10T11:28:01.065071Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:28:01.065104Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:28:01.065139Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:28:01.065188Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:28:01.065253Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:28:01.065282Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:28:01.065339Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-10T11:28:01.065419Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:28:01.065610Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:28:01.067183Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553236, Sender [16:1068:2853], Recipient [16:666:2570]: NKikimr::TEvDataShard::TEvReadScanStarted 2025-04-10T11:28:01.067442Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553237, Sender [16:1068:2853], Recipient [16:666:2570]: NKikimr::TEvDataShard::TEvReadScanFinished 2025-04-10T11:28:01.067849Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [16:666:2570], Recipient [16:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:28:01.067901Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:28:01.068024Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:28:01.068109Z node 16 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:28:01.068186Z node 16 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T11:28:01.068265Z node 16 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:28:01.068336Z node 16 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:28:01.068419Z node 16 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:28:01.068521Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 >> KqpJoinOrder::TPCDS96-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 32228, MsgBus: 12750 2025-04-10T11:27:55.111805Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644777539676911:2278];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:55.112011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00169a/r3tmp/tmpeRI5Jn/pdisk_1.dat 2025-04-10T11:27:55.891359Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:55.894865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:55.894954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:55.901420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32228, node 1 2025-04-10T11:27:56.135626Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:56.135651Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:56.135658Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:56.135774Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12750 TClient is connected to server localhost:12750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:57.001582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:57.043348Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:57.060002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:57.275901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:57.565902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:57.662219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:59.547845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644794719547642:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:59.547978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:59.893794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:59.944785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:59.993236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.067908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.110683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.111911Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644777539676911:2278];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:00.112005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:00.150878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.248928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644799014515458:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:00.249009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:00.249086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644799014515463:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:00.253563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:00.267739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644799014515465:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:00.363526Z node 1 :TX_PROXY ERROR: Actor# [1:7491644799014515521:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:01.514770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.557467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TestJoinHint2+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint2-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 32411, MsgBus: 30626 2025-04-10T11:27:15.010714Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644603200118060:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:15.030918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016e4/r3tmp/tmpxocoxJ/pdisk_1.dat 2025-04-10T11:27:15.562465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:15.562572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:15.562766Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:15.566722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32411, node 1 2025-04-10T11:27:15.706236Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:15.706262Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:15.706270Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:15.706411Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30626 TClient is connected to server localhost:30626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:16.257642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:18.652358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644620379987901:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:18.652549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:18.653021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644620379987913:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:18.657951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:18.685692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644620379987915:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:18.771367Z node 1 :TX_PROXY ERROR: Actor# [1:7491644620379987966:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:19.085484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:19.249010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:27:19.292803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:19.323859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:19.357541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:19.508791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:19.545252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:19.622816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:19.657561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:27:19.731424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:27:19.773129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:27:19.812942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:19.857354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:20.013023Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644603200118060:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:20.013093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:20.652396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:27:20.737534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:20.778918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:20.818422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:27:20.854224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:27:20.892660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:27:20.926434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:27:20.965161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.007582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.051126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.140678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.179741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.221859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.257027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.294977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.343718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.385187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-10T11:27:21.425375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but p ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.783868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.788728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.794068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.804203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.806839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.812685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.818608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.826999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.832802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.840121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.843082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.850336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.857231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.864555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.869362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.879751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.884517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.896447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.896820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.910895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.915651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.916919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.921446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.922897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.927680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.928528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.934606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.939874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.942864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.945863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.948761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.952612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.956128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.958562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.962360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.964612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.968115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.970457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.973700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.976137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.979392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.982175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.985776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:54.988591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:55.053036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:55.150473Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq8ra1191htwh6janrpgh5", SessionId: ydb://session/3?node_id=1&id=NThmNGVjNWYtNDIyY2I2MGUtODVjNjgwYzUtZDNhZjZlOTY=, Slow query, duration: 32.204605s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:27:55.452625Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:55.453062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:55.453864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7491644646149797813:2781];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-04-10T11:27:55.455942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCH19 [GOOD] >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup >> KqpJoin::IdxLookupLeftPredicate >> KqpJoinOrder::DatetimeConstantFold-ColumnStore >> KqpJoin::RightSemiJoin_SecondaryIndex >> KqpIndexLookupJoin::LeftOnly+StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH19 [GOOD] Test command err: Trying to start YDB, gRPC: 1170, MsgBus: 29146 2025-04-10T11:26:34.589112Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644428911174139:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:34.603493Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001703/r3tmp/tmpvtlmi6/pdisk_1.dat 2025-04-10T11:26:34.942804Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:34.943481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:34.943532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:34.947416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1170, node 1 2025-04-10T11:26:35.013703Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:35.013727Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:35.013734Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:35.013835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29146 TClient is connected to server localhost:29146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:35.508421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:37.578258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644441796076543:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:37.578348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:37.578411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644441796076552:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:37.582574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:37.594380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644441796076557:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:37.680583Z node 1 :TX_PROXY ERROR: Actor# [1:7491644441796076610:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:38.067804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:38.276418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644446091044124:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:38.276612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644446091044124:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:38.276832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644446091044124:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:38.276930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644446091044124:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:38.277018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644446091044124:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:38.277103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644446091044124:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:38.277190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644446091044124:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:38.277292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644446091044124:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:38.277393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644446091044124:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:38.277481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644446091044124:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:38.277565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644446091044124:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:38.277650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644446091044124:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:38.300550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644446091044142:2361];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:38.300604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644446091044142:2361];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:38.300749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644446091044142:2361];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:38.300859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644446091044142:2361];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:38.300944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644446091044142:2361];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:38.301031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644446091044142:2361];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:38.301118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644446091044142:2361];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:38.301202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644446091044142:2361];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:38.301330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644446091044142:2361];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:38.301432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644446091044142:2361];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:38.301523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644446091044142:2361];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:38.301608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644446091044142:2361];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:38.311485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644446091044144:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:38.311539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644446091044144:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:38.311706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_ ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.028641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.035694Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.042513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.046858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.048359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.053086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.057724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.067523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.072489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.077557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.082610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.088255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.093159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.098507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.099495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.104388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.109559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.110244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.115213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.120097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.126316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.128604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.132361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.136442Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.143572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.146459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.154121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.159467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.166533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.171463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.174088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.181337Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.187978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.191777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.195009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.197915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.201020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.205744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.206662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.211570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.213290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.217734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.220128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.274002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.276809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:45.385948Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq8h8a93cdftckvq2r6d3r", SessionId: ydb://session/3?node_id=1&id=N2E1MjQwZDUtOTQxM2EzYTktY2YwMGZmNDgtNjNmNThhZjA=, Slow query, duration: 29.662896s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:27:45.675254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:45.675742Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:45.676774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491644622184741484:8171];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-10T11:27:45.677118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnly+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 18867, MsgBus: 26143 2025-04-10T11:27:59.806316Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644795234605082:2103];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:59.806422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00168f/r3tmp/tmpKIUl0p/pdisk_1.dat 2025-04-10T11:28:00.417363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:00.417469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:00.425516Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:00.427410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18867, node 1 2025-04-10T11:28:00.562751Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:00.563157Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:00.563172Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:00.563311Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26143 TClient is connected to server localhost:26143 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:01.248914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:01.297742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:01.469514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:01.722112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:01.888835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:04.105130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644816709443284:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:04.105258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:04.424420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:04.473555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:04.519469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:04.562872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:04.649559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:04.711662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:04.790749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644816709443797:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:04.790843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:04.791277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644816709443802:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:04.795467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:04.806914Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644795234605082:2103];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:04.806965Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:04.810928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644816709443804:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:04.899161Z node 1 :TX_PROXY ERROR: Actor# [1:7491644816709443862:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:06.200164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:06.277046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:06.355638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:28:06.440657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:06.519026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:06.568990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCH9_100 >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] >> KqpJoin::HashJoinWithAsTable [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFold-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 17886, MsgBus: 13066 2025-04-10T11:27:54.380762Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644772581832670:2113];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:54.380807Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00169f/r3tmp/tmpBjhrdS/pdisk_1.dat 2025-04-10T11:27:55.128353Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:55.140207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:55.140277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:55.147277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17886, node 1 2025-04-10T11:27:55.354650Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:55.354671Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:55.354677Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:55.354771Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13066 TClient is connected to server localhost:13066 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:56.327248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:56.346171Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:56.363799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:56.603165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:56.811028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:56.912346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:59.382334Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644772581832670:2113];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:59.382423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:59.636464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644794056670895:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:59.636586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:00.172865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.263832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.317517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.396869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.483227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.586078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.686314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644798351638726:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:00.686435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:00.687151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644798351638731:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:00.691962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:00.715204Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:28:00.716202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644798351638733:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:00.801731Z node 1 :TX_PROXY ERROR: Actor# [1:7491644798351638788:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:02.438759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.540144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16805, MsgBus: 64853 2025-04-10T11:28:04.432717Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644814293106155:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:04.443640Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00169f/r3tmp/tmpFOINqi/pdisk_1.dat 2025-04-10T11:28:04.635359Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:04.639736Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:04.639837Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:04.648086Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16805, node 2 2025-04-10T11:28:04.866626Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:04.866646Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:04.866653Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:04.866773Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64853 TClient is connected to server localhost:64853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:05.760505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:05.773766Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:05.781260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:05.945465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:06.204511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:06.308024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:09.047943Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644835767944273:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:09.048041Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:09.100519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:09.141157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:09.196341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:09.249453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:09.342923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:09.422650Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644814293106155:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:09.422691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:09.422854Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:09.560748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644835767944790:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:09.560846Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:09.561279Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491644835767944795:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:09.565191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:09.581042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491644835767944797:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:09.645333Z node 2 :TX_PROXY ERROR: Actor# [2:7491644835767944851:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:11.073163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:11.188316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::HashJoinWithAsTable [GOOD] Test command err: Trying to start YDB, gRPC: 13951, MsgBus: 14950 2025-04-10T11:28:04.695360Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644814268850496:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:04.696269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00168c/r3tmp/tmpnJ9Y3e/pdisk_1.dat 2025-04-10T11:28:05.350771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:05.350892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:05.383064Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:05.390917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13951, node 1 2025-04-10T11:28:05.706604Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:05.706628Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:05.706641Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:05.706748Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14950 TClient is connected to server localhost:14950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:06.647831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:06.700072Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:06.728602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:06.924472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:07.177859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:07.273520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:09.711242Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644814268850496:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:09.711475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:09.766715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644835743688743:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:09.766843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:10.107175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:10.167758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:10.233959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:10.321559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:10.378634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:10.452811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:10.582592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644840038656570:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:10.582691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:10.582994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644840038656575:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:10.587603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:10.606373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644840038656577:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:10.678641Z node 1 :TX_PROXY ERROR: Actor# [1:7491644840038656631:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:12.057965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS34-ColumnStore >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+ColumnStore >> KqpJoin::IdxLookupLeftPredicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 16669, MsgBus: 26517 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001687/r3tmp/tmpjJU2IG/pdisk_1.dat 2025-04-10T11:28:08.330332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:28:08.560137Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:08.600671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:08.600745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:08.619222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16669, node 1 2025-04-10T11:28:08.950613Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:08.950633Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:08.950639Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:08.950746Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26517 TClient is connected to server localhost:26517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:10.196909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:10.227672Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:10.244642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:10.466529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:10.690497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:10.787584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:13.111774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644853343696266:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:13.111879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:13.675832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.723966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.758785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.841061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.901300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.978480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.086245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644857638664089:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:14.086339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:14.094288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644857638664094:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:14.100574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:14.116621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644857638664097:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:14.205961Z node 1 :TX_PROXY ERROR: Actor# [1:7491644857638664151:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:15.398238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:15.434135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:15.467113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:28:15.510523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:15.544481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:15.578529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupLeftPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 19671, MsgBus: 22794 2025-04-10T11:28:07.782495Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644827104239755:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:07.782902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001683/r3tmp/tmpHYwgeW/pdisk_1.dat 2025-04-10T11:28:08.573541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:08.573620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:08.582605Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:08.583770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19671, node 1 2025-04-10T11:28:08.847674Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:08.847697Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:08.847704Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:08.847819Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22794 TClient is connected to server localhost:22794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:09.925003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:09.973280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:10.251651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:10.528046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:28:10.676537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:12.774333Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644827104239755:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:12.807442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:13.936651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644852874045163:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:13.936772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:14.527437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.679500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.761644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.823702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.875453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.980253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:15.117034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644861463980286:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:15.117119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:15.117593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644861463980291:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:15.122712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:15.143664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644861463980293:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:15.236507Z node 1 :TX_PROXY ERROR: Actor# [1:7491644861463980351:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:16.244282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.278830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.380547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 27951, MsgBus: 13371 2025-04-10T11:26:39.469342Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644452204618664:2112];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:39.469484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001701/r3tmp/tmpU69qvU/pdisk_1.dat 2025-04-10T11:26:39.946980Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:39.950122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:39.950222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:39.953744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27951, node 1 2025-04-10T11:26:40.120915Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:40.120937Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:40.120944Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:40.121055Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13371 TClient is connected to server localhost:13371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:40.858562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:42.999948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644465089521160:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:43.000061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:43.000537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644465089521172:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:43.004172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:43.016197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644469384488470:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:43.075409Z node 1 :TX_PROXY ERROR: Actor# [1:7491644469384488521:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:43.442100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:43.732596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644469384488755:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:43.732603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644469384488761:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:43.732775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644469384488761:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:43.732881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644469384488755:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:43.733250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644469384488761:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:43.733264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644469384488755:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:43.733395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644469384488761:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:43.733427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644469384488755:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:43.733511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644469384488761:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:43.733552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644469384488755:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:43.733666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644469384488761:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:43.733668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644469384488755:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:43.733806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644469384488761:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:43.733810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644469384488755:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:43.733936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644469384488755:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:43.734058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644469384488755:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:43.734190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644469384488755:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:43.734333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644469384488755:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:43.734338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644469384488761:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:43.734451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644469384488755:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:43.734485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644469384488761:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:43.734585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644469384488761:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:43.734705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644469384488761:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:43.734804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644469384488761:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:43.772499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644469384488769:2359];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:43.772566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644469384488769:2359];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:43.772809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;sel ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.432889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.439467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.439696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039240;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.445916Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.445962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.452965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.454967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.596357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.600644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.608443Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.609557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.621097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.631652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.631652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.636677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.641899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.654012Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.655291Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.664368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.668113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.674047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.674280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.680989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.680990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.687709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.693010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.695255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.701798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.707743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.708177Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.714329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.714329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.720010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.720010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.726745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.727149Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.734977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.741962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.742879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.748683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.748685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.755188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.755570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.765897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.766104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:27:57.961624Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq8qq25bn5yf6591qdac3p", SessionId: ydb://session/3?node_id=1&id=ZjdhNDgzMC1lYmIzNTdlYS1mZjNlYjU3ZS05MGI4MTgwYg==, Slow query, duration: 35.622719s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:27:58.390467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:58.395983Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:27:58.400412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491644645478183810:7675];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-10T11:27:58.400858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TestJoinOrderHintsSimple-ColumnStore >> KqpJoin::RightSemiJoin_SecondaryIndex [GOOD] >> KqpJoinOrder::GeneralPrioritiesBug1 >> KqpJoinOrder::TPCDS92-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_SecondaryIndex [GOOD] Test command err: Trying to start YDB, gRPC: 14026, MsgBus: 22763 2025-04-10T11:28:08.514838Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644833806661733:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:08.534804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001680/r3tmp/tmp8Uymzj/pdisk_1.dat 2025-04-10T11:28:09.370137Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:09.423183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:09.423281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:09.428102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14026, node 1 2025-04-10T11:28:09.629903Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:09.629937Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:09.629946Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:09.630065Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22763 TClient is connected to server localhost:22763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:10.873522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:10.910262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:11.180516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:11.470794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:11.605190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:13.518800Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644833806661733:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:13.518907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:13.711803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644855281499975:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:13.711943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:14.211088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.260384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.299301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.372786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.407119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.465565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.570691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644859576467793:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:14.570779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:14.571353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644859576467798:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:14.575752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:14.595458Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:28:14.595721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644859576467801:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:14.678306Z node 1 :TX_PROXY ERROR: Actor# [1:7491644859576467860:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:16.568286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.656909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.752882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.831971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.921250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:18.018265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:43: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:53: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpJoinOrder::FiveWayJoinStatsOverride-ColumnStore [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup >> KqpJoinOrder::FiveWayJoinWithComplexPreds-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinStatsOverride-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 26718, MsgBus: 22245 2025-04-10T11:27:33.618269Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644684810835821:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:33.618741Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016d8/r3tmp/tmpzcoE1T/pdisk_1.dat 2025-04-10T11:27:34.319320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:34.319405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:34.320896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:27:34.325564Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26718, node 1 2025-04-10T11:27:34.505045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:34.505065Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:34.505076Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:34.505202Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22245 TClient is connected to server localhost:22245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:35.316371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.346942Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:37.522444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644701990705542:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.526264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644701990705530:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.526359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.526949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:37.538980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644701990705544:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:37.614049Z node 1 :TX_PROXY ERROR: Actor# [1:7491644701990705595:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:37.910388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.090241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.161550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.212530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.244574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.387483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.436688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.474844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.521870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.564240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.598186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.606943Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644684810835821:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:38.607250Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:38.648394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.726478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.546504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:27:39.588446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.634025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.727459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.769794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.810365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.853040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.894894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.934522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.016295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.065352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.157422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.207578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.279423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.326676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.364838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.407727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.847805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.852156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.852631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.858375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.858406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.867730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038461;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.872146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.877356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.883104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.888718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.890457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.895600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.898245Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.901348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.906709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.911658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.914101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.919621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.920879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.925453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.930096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.935440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.935984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.940934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.941193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.947904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.952940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.959057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.966054Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.968547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.978122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.979843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.989400Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.992056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.001589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.003341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.007492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.013506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.018867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.021967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.024414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.029481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.031539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.063532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.114008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038435;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.138442Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq9azpcwvenychkjsm0r2v", SessionId: ydb://session/3?node_id=1&id=NjJjMmMwZTktZDkzNGZmOGMtYmJhMTEzZTUtZDI4ODYwOTI=, Slow query, duration: 34.063788s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:16.501566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:16.502037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:16.502712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7491644822249815277:5766];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-04-10T11:28:16.503114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCH10 [GOOD] >> TPQTest::TestSourceIdDropBySourceIdCount [GOOD] >> TPQTest::TestStorageRetention ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 14527, MsgBus: 19057 2025-04-10T11:27:33.640011Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644681991916867:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:33.689895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016d2/r3tmp/tmprCnF31/pdisk_1.dat 2025-04-10T11:27:34.248076Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:34.272758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:34.272831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:34.277321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14527, node 1 2025-04-10T11:27:34.494645Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:34.494666Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:34.494672Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:34.494776Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19057 TClient is connected to server localhost:19057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:35.515281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.544143Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:37.691060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644699171786579:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.691166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.694243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644699171786591:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.699111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:37.713134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644699171786593:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:37.776774Z node 1 :TX_PROXY ERROR: Actor# [1:7491644699171786647:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:38.078429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.210438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.265570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.303990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.345432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.537309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.583259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.636048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.638797Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644681991916867:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:38.639361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:38.687789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.758749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.810977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.850852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.927463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.875588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:27:39.914698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.956181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.990032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.068607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.153946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.190744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.225633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.273177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.319950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.387752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.427835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.481523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.523839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.565658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.598910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.680827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.965844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.971505Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.977294Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.979637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.988475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.991782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.997520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.001885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.013437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.018287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.023319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.027182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.035036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.041053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.048044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.052002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.058017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.062069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.069122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.073658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.084602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.086690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.095409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.100636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.107244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.111997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.116811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.121827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.131526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.135099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.141289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.145088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.154754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.159644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.165464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.168051Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.176584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.190876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.196999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.199007Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038516;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.203664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.208572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.209812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.215886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.217210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.363924Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq9b4667n8bh8v8zg01crb", SessionId: ydb://session/3?node_id=1&id=MTg2N2UwZGEtN2U1ZDNlZDAtYzZhYjhhZTktZjRlYjBiZjU=, Slow query, duration: 35.148937s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:17.709348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:17.709770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:17.710537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7491644780776183887:4743];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-04-10T11:28:17.710932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+ColumnStore [GOOD] >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPreds+ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2 >> TPQTest::TestStorageRetention [GOOD] >> TPQTest::TestStatusWithMultipleConsumers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH10 [GOOD] Test command err: Trying to start YDB, gRPC: 12595, MsgBus: 3905 2025-04-10T11:26:45.891868Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644478612442954:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:45.899526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016fb/r3tmp/tmpJEupfm/pdisk_1.dat 2025-04-10T11:26:46.307297Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:46.310194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:46.310273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:46.314765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12595, node 1 2025-04-10T11:26:46.429620Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:46.429646Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:46.429653Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:46.429758Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3905 TClient is connected to server localhost:3905 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:47.025867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:47.043659Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:26:49.166899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644495792312806:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:49.167035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644495792312818:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:49.167125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:49.171429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:49.183022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644495792312820:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:49.288102Z node 1 :TX_PROXY ERROR: Actor# [1:7491644495792312871:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:49.664496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:49.889126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644495792313136:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:49.889334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644495792313136:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:49.889609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644495792313136:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:49.889730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644495792313136:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:49.889832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644495792313136:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:49.889932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644495792313136:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:49.890026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644495792313136:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:49.890116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644495792313136:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:49.890498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644495792313136:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:49.890623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644495792313136:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:49.890721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644495792313136:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:49.890813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644495792313136:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:49.944597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644495792313144:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:49.944662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644495792313144:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:49.944906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644495792313144:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:49.945009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644495792313144:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:49.945111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644495792313144:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:49.945205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644495792313144:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:49.945312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644495792313144:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:49.945413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644495792313144:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:49.945877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644495792313144:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:49.946018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644495792313144:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:49.946290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644495792313144:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:49.946399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644495792313144:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:49.954259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644495792313138:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:49.954324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644495792313138:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstra ... tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.164725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.171735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.174578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.178373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.184681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.184773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.195883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.199299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.209415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.210452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.216421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.221401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.232843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.239047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.244704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.246700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.254661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.257268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.267827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.271011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.276536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.280616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.291190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.296956Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.297258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.306783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.312855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.319459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.319790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.326387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.331461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.332382Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.342497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.347183Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.354543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.358888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.364881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.369451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.375649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.380648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.392544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.398815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.405411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.406547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.422423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.515844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:05.646399Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq8y047ezkq1hgnpc4pqae", SessionId: ydb://session/3?node_id=1&id=NTU5MWI2M2EtMjNlZGJlNDEtNDFhMTMzOC1iNGI3M2RhZQ==, Slow query, duration: 36.873736s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:06.105585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:06.105986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:06.106741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> TPQTest::TestStatusWithMultipleConsumers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 27639, MsgBus: 31084 2025-04-10T11:27:40.435219Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644713044698425:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:40.435259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016b9/r3tmp/tmpbX1ccd/pdisk_1.dat 2025-04-10T11:27:41.193872Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:41.195248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:41.195344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:41.204989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27639, node 1 2025-04-10T11:27:41.466662Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:41.466687Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:41.466696Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:41.466804Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31084 TClient is connected to server localhost:31084 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:42.406373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:42.453794Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:27:44.792189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644730224568070:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:44.798138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:44.798281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644730224568062:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:44.798443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:44.814736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644730224568076:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:27:44.894310Z node 1 :TX_PROXY ERROR: Actor# [1:7491644730224568127:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:45.264547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:45.438664Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644713044698425:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:45.438735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:45.444840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:27:45.505179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:45.618024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:45.669543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:45.873407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:45.911166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:45.997407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.076843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.116169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.173339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.247596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.286837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.970777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:2, at schemeshard: 72057594046644480 2025-04-10T11:27:47.014860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.046505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.120988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.167790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.214685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.256509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.358959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.451663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.535309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.598835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.689531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.755744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.829651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.908660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.953409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.001063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.285136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.289094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.298572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.302876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.308074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.311814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.317824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.321269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.328290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.332576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.337772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.347361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.352427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.356882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.361768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.365879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.375345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.379733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.384572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.389047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.393858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.397954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.403211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.407187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.408292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.413832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.421850Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.423596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.433254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.436426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.445869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.448036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.458432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.462003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.470783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.471921Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.479346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.485394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.492592Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038425;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.497872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.498715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.503758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.510180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.511442Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.520792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:28:24.717682Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq9jjdex8vp9th2dkpmqy2", SessionId: ydb://session/3?node_id=1&id=M2E4YmM0ZGQtMzE1OGZjM2QtZjA0NmJjYTEtNWFjODQyMWM=, Slow query, duration: 34.880112s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:25.039395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715716; 2025-04-10T11:28:25.039850Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715716; 2025-04-10T11:28:25.042644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7491644764584313339:3008];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-04-10T11:28:25.043090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 17766, MsgBus: 12761 2025-04-10T11:26:55.619322Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644519500414253:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:55.619381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016f3/r3tmp/tmpmoRRGO/pdisk_1.dat 2025-04-10T11:26:56.155879Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:56.157753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:56.157812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:56.167307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17766, node 1 2025-04-10T11:26:56.301609Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:56.301634Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:56.301646Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:56.301794Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12761 TClient is connected to server localhost:12761 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:56.995915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:57.023291Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:26:59.027308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644536680284098:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:59.027432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:59.027771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644536680284110:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:59.032021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:59.046266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644536680284112:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:59.104706Z node 1 :TX_PROXY ERROR: Actor# [1:7491644536680284163:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:59.467244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:59.746614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644536680284445:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:59.746861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644536680284445:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:59.747159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644536680284445:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:59.747290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644536680284445:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:59.747400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644536680284425:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:59.747446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644536680284425:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:59.747622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644536680284425:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:59.747664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644536680284445:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:59.747736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644536680284425:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:59.747788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644536680284445:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:59.747832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644536680284425:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:59.747903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644536680284445:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:59.747929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644536680284425:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:59.748253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644536680284445:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:59.748398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644536680284445:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:59.748520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644536680284445:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:59.748676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644536680284445:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:59.748803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644536680284445:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:59.754557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644536680284425:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:59.754718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644536680284425:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:59.754834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644536680284425:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:59.754949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644536680284425:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:59.755077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644536680284425:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:59.755180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644536680284425:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:59.802435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644536680284453:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:59.802494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644536680284453:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.802891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.812843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.814838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.824639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.827171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.837241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039266;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.839025Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.844979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.851102Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.864125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.866579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.876289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.879062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.889879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.891061Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.901838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.904512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.917006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.927146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.932196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.950727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.972650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.972651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.983737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.993515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:16.998896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.013066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.029873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.057846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.086751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.110782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.116675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.123656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.129606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.135711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.141722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.147853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.151039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.153259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.158339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.159744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.164971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.168386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.172950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.393426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:17.470314Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq97x338fwmr1x5c1np073", SessionId: ydb://session/3?node_id=1&id=MzJlNmExYmMtMmYwYmFlNGEtZmZkZGNkMTEtN2QzMTg0NDc=, Slow query, duration: 38.554088s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:18.039712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:18.040178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:18.040665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491644717068946925:7658];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-10T11:28:18.041098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestStatusWithMultipleConsumers [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-04-10T11:24:40.294507Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.294609Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-10T11:24:40.317262Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.342032Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-10T11:24:40.343184Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-10T11:24:40.345797Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-10T11:24:40.348057Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-04-10T11:24:40.350140Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-04-10T11:24:40.357638Z node 1 :PERSQUEUE INFO: new Cookie owner1|645e62d0-721610c2-e9cc2e65-a09959c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-04-10T11:24:40.358268Z node 1 :PERSQUEUE INFO: new Cookie owner2|73ea717e-99799242-36997110-9de046c0_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 2025-04-10T11:24:40.374325Z node 1 :PERSQUEUE INFO: new Cookie owner1|8377012d-22b340ba-6468930a-ab9e9eaa_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-04-10T11:24:40.987743Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:40.987811Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] 2025-04-10T11:24:41.025715Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:41.026887Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-10T11:24:41.027646Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:185:2198] 2025-04-10T11:24:41.030553Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:185:2198] 2025-04-10T11:24:41.032588Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:186:2199] 2025-04-10T11:24:41.034644Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:186:2199] 2025-04-10T11:24:41.040732Z node 2 :PERSQUEUE INFO: new Cookie owner1|8ce06264-7dbb3289-d9509c5-edc8ca4e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-04-10T11:24:41.041267Z node 2 :PERSQUEUE INFO: new Cookie owner2|cd598c72-6d05b241-41903818-f54a4ad5_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 2025-04-10T11:24:41.061838Z node 2 :PERSQUEUE INFO: new Cookie owner1|8f068b0-2dd26419-3270d763-269df9f7_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-04-10T11:24:41.408163Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:41.408235Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-04-10T11:24:41.425772Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:41.426655Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-10T11:24:41.427247Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-04-10T11:24:41.429807Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-04-10T11:24:41.431615Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:186:2199] 2025-04-10T11:24:41.433553Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:186:2199] 2025-04-10T11:24:41.439705Z node 3 :PERSQUEUE INFO: new Cookie owner1|91290478-ce43d090-8b76a990-7189f9c3_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-04-10T11:24:41.440126Z node 3 :PERSQUEUE INFO: new Cookie owner2|fa9370b9-563f243d-a161cc3e-c939c389_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 2025-04-10T11:24:41.455602Z node 3 :PERSQUEUE INFO: new Cookie owner1|4fab2341-c378130f-42bee36c-234d372a_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-04-10T11:24:41.872879Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:41.872962Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-04-10T11:24:41.894879Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:24:41.895824Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 4 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 Important: false } 2025-04-10T11:24:41.896467Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:185:2198] 2025-04-10T11:24:41.899181Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:185:2198] 2025-04-10T11:24:41.901023Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:186:2199] 2025-04-10T11:24:41.902994Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [4:186:2199] 2025-04-10T11:24:41.910006Z node 4 :PERSQUEUE INFO: new Cookie default|e50847e8-d29a45c7-9a0f6b6d-fe70b0ee_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner d ... R INFO: [72057594037927938][rt3.dc1--topic] pipe [134:394:2388] connected; active server actors: 1 2025-04-10T11:28:34.687336Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:399:2393] connected; active server actors: 1 2025-04-10T11:28:34.697983Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:404:2398] connected; active server actors: 1 2025-04-10T11:28:34.700858Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:409:2403] connected; active server actors: 1 2025-04-10T11:28:34.707366Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:414:2408] connected; active server actors: 1 2025-04-10T11:28:34.709894Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:419:2413] connected; active server actors: 1 2025-04-10T11:28:34.717568Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:424:2418] connected; active server actors: 1 2025-04-10T11:28:34.720216Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:429:2423] connected; active server actors: 1 2025-04-10T11:28:34.727328Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:434:2428] connected; active server actors: 1 2025-04-10T11:28:34.730011Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:439:2433] connected; active server actors: 1 2025-04-10T11:28:34.739555Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:444:2438] connected; active server actors: 1 2025-04-10T11:28:34.742390Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:449:2443] connected; active server actors: 1 2025-04-10T11:28:34.751090Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:454:2448] connected; active server actors: 1 2025-04-10T11:28:34.753844Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:459:2453] connected; active server actors: 1 2025-04-10T11:28:34.756532Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:464:2458] connected; active server actors: 1 2025-04-10T11:28:34.763296Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:469:2463] connected; active server actors: 1 2025-04-10T11:28:34.771448Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:474:2468] connected; active server actors: 1 2025-04-10T11:28:34.774212Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:479:2473] connected; active server actors: 1 2025-04-10T11:28:34.776714Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:484:2478] connected; active server actors: 1 2025-04-10T11:28:34.783458Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:489:2483] connected; active server actors: 1 2025-04-10T11:28:34.791439Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:494:2488] connected; active server actors: 1 2025-04-10T11:28:34.794194Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:499:2493] connected; active server actors: 1 2025-04-10T11:28:34.803538Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:504:2498] connected; active server actors: 1 2025-04-10T11:28:34.806381Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:509:2503] connected; active server actors: 1 2025-04-10T11:28:34.808797Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:514:2508] connected; active server actors: 1 2025-04-10T11:28:34.815401Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:519:2513] connected; active server actors: 1 2025-04-10T11:28:34.817961Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:524:2518] connected; active server actors: 1 2025-04-10T11:28:34.827520Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:529:2523] connected; active server actors: 1 2025-04-10T11:28:34.830292Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:534:2528] connected; active server actors: 1 2025-04-10T11:28:34.839551Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:539:2533] connected; active server actors: 1 2025-04-10T11:28:34.842371Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:544:2538] connected; active server actors: 1 2025-04-10T11:28:34.844856Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:549:2543] connected; active server actors: 1 2025-04-10T11:28:34.850428Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:554:2548] connected; active server actors: 1 2025-04-10T11:28:34.852965Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:559:2553] connected; active server actors: 1 2025-04-10T11:28:34.855378Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:564:2558] connected; active server actors: 1 2025-04-10T11:28:34.857689Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:569:2563] connected; active server actors: 1 2025-04-10T11:28:34.860034Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:574:2568] connected; active server actors: 1 2025-04-10T11:28:34.862249Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:579:2573] connected; active server actors: 1 2025-04-10T11:28:34.864479Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:584:2578] connected; active server actors: 1 2025-04-10T11:28:34.866633Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:589:2583] connected; active server actors: 1 2025-04-10T11:28:34.868779Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:594:2588] connected; active server actors: 1 2025-04-10T11:28:34.870851Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:599:2593] connected; active server actors: 1 2025-04-10T11:28:34.872905Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:604:2598] connected; active server actors: 1 2025-04-10T11:28:34.874989Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:609:2603] connected; active server actors: 1 2025-04-10T11:28:34.877078Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:614:2608] connected; active server actors: 1 2025-04-10T11:28:34.879336Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:619:2613] connected; active server actors: 1 2025-04-10T11:28:34.881370Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:624:2618] connected; active server actors: 1 2025-04-10T11:28:34.883328Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:629:2623] connected; active server actors: 1 2025-04-10T11:28:34.885435Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:634:2628] connected; active server actors: 1 2025-04-10T11:28:34.887493Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:639:2633] connected; active server actors: 1 2025-04-10T11:28:34.889555Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:644:2638] connected; active server actors: 1 2025-04-10T11:28:34.891574Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:649:2643] connected; active server actors: 1 2025-04-10T11:28:34.893606Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:654:2648] connected; active server actors: 1 2025-04-10T11:28:34.895735Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:659:2653] connected; active server actors: 1 2025-04-10T11:28:34.897860Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:664:2658] connected; active server actors: 1 2025-04-10T11:28:34.899846Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:669:2663] connected; active server actors: 1 2025-04-10T11:28:34.901878Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:674:2668] connected; active server actors: 1 2025-04-10T11:28:34.903862Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:679:2673] connected; active server actors: 1 2025-04-10T11:28:34.905785Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:684:2678] connected; active server actors: 1 2025-04-10T11:28:34.907818Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:689:2683] connected; active server actors: 1 2025-04-10T11:28:34.909799Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:694:2688] connected; active server actors: 1 2025-04-10T11:28:34.911919Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:699:2693] connected; active server actors: 1 2025-04-10T11:28:34.916211Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:704:2698] connected; active server actors: 1 2025-04-10T11:28:34.918474Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:709:2703] connected; active server actors: 1 2025-04-10T11:28:34.920698Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:714:2708] connected; active server actors: 1 2025-04-10T11:28:34.922803Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:719:2713] connected; active server actors: 1 2025-04-10T11:28:34.924857Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:724:2718] connected; active server actors: 1 2025-04-10T11:28:34.926826Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:729:2723] connected; active server actors: 1 2025-04-10T11:28:34.928895Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:734:2728] connected; active server actors: 1 2025-04-10T11:28:34.930935Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:739:2733] connected; active server actors: 1 2025-04-10T11:28:34.932965Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:744:2738] connected; active server actors: 1 2025-04-10T11:28:34.934954Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:749:2743] connected; active server actors: 1 2025-04-10T11:28:34.937244Z node 134 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [134:754:2748], now have 1 active actors on pipe 2025-04-10T11:28:34.938507Z node 134 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [134:757:2751], now have 1 active actors on pipe 2025-04-10T11:28:34.939687Z node 134 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [134:760:2754], now have 1 active actors on pipe 2025-04-10T11:28:34.940748Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:763:2757] connected; active server actors: 1 >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPreds+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 11665, MsgBus: 32271 2025-04-10T11:26:53.146038Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644508984060505:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:26:53.146177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016f8/r3tmp/tmp0TN1vV/pdisk_1.dat 2025-04-10T11:26:53.752177Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:26:53.753832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:26:53.753916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:26:53.757935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11665, node 1 2025-04-10T11:26:53.896804Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:26:53.896830Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:26:53.896838Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:26:53.896956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32271 TClient is connected to server localhost:32271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:26:54.678620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:26:54.698975Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:26:56.824163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644521868962841:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:56.824320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:56.824604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644521868962853:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:26:56.828567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:26:56.839020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644521868962855:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:26:56.902401Z node 1 :TX_PROXY ERROR: Actor# [1:7491644521868962906:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:57.208999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:26:57.436567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644526163930477:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:57.436753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644526163930477:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:57.436995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644526163930477:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:57.437101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644526163930477:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:57.437196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644526163930477:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:57.437314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644526163930477:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:57.437409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644526163930477:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:57.437510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644526163930477:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:57.437609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644526163930477:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:57.437703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644526163930477:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:57.437798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644526163930477:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:57.437892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644526163930477:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:57.461968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644526163930482:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:57.463268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644526163930482:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:26:57.463475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644526163930482:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:26:57.463591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644526163930482:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:26:57.463690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644526163930482:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:26:57.463793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644526163930482:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:26:57.463885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644526163930482:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:26:57.463980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644526163930482:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:26:57.464097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644526163930482:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:26:57.464199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644526163930482:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:26:57.464307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644526163930482:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:26:57.464401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644526163930482:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:26:57.487985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644526163930458:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:26:57.488050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644526163930458:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abs ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.496445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.499799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.502693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.505552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.508377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.511432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.515155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.517123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.520908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.522371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.525989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.527600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.531744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.533908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.538026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.542255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.544190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.548264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.551211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.554401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.556822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.560664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.562496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.566343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.567780Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.572359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.575518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.582092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.584489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.588774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.589727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.599031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.599908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.608162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.608895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.615661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.615678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.624741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.626704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.635112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.635913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.641812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.646496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.663006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.669204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:15.848973Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq949q7yzw4hjsw2yzqemb", SessionId: ydb://session/3?node_id=1&id=M2E1YjlkYTktYTFkMWU3N2QtZWEzYmNkMGItYTg2ZmMwNTI=, Slow query, duration: 40.625022s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:16.138836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:16.139293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:16.140008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491644805336860431:10806];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-10T11:28:16.140390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::FourWayJoinLeftFirst-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 13823, MsgBus: 26304 2025-04-10T11:28:26.880990Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644908747918598:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:26.881483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001671/r3tmp/tmp9cS5ri/pdisk_1.dat 2025-04-10T11:28:27.638920Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:27.669892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:27.681798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:27.684499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13823, node 1 2025-04-10T11:28:27.977954Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:27.977974Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:27.977981Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:27.978082Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26304 TClient is connected to server localhost:26304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:28.893304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:28.919784Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:28.940566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:28:29.172263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:28:29.403062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:29.495814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:31.794436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644930222756738:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:31.794549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:31.869432Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644908747918598:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:31.881169Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:32.138903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:32.173346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:32.259292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:32.309679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:32.362704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:32.452228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:32.563113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644934517724558:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:32.563244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:32.563491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644934517724563:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:32.574055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:32.588255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644934517724565:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:32.646911Z node 1 :TX_PROXY ERROR: Actor# [1:7491644934517724619:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:33.723673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:33.806133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:33.844976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:28:33.881651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:33.924537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:33.969514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin >> KqpJoinOrder::TPCDS88+ColumnStore [GOOD] |94.0%| [TA] $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoin::ComplexJoin >> KqpJoin::IdxLookupSelf |94.0%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::GeneralPrioritiesBug2 [GOOD] >> KqpJoinOrder::TPCDS61-ColumnStore >> KqpJoinOrder::TPCDS23-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug2 [GOOD] Test command err: Trying to start YDB, gRPC: 27134, MsgBus: 8984 2025-04-10T11:27:42.402700Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644720341773578:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:42.403216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016b7/r3tmp/tmpTmfyDB/pdisk_1.dat 2025-04-10T11:27:43.028996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:43.029088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:43.043002Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:43.045693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27134, node 1 2025-04-10T11:27:43.398425Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:43.398447Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:43.398455Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:43.398592Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8984 TClient is connected to server localhost:8984 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:44.399785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:44.438427Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:47.197286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644741816610573:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:47.197286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644741816610585:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:47.197389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:47.202385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:47.219678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644741816610587:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:47.316810Z node 1 :TX_PROXY ERROR: Actor# [1:7491644741816610639:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:47.364892Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644720341773578:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:47.364981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:47.779059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.932143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:27:47.983441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.034016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.092748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.352353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.381128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.409826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.443286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.487043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.535088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.580109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:48.654126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.362306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:27:49.412571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.461814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.494803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.526632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.556453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.599241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.638849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.681802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.724135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.769886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.809941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.849957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.897972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.944236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:27:49.981875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:27:50.018883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.820874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.831705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.834603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.840644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.845571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.855406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.855633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.861332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.865779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.870393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.871357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038447;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.876141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.880085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.886097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.889331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.891870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.897313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.902857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.906349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.912115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.915967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.921113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.925432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.935655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.938413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.943655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.945319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.949658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.955539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.959301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.964413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.965776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.971144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.974594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.984146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.990041Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038435;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.991279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:29.999628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:30.005216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:30.008806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:30.018275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:30.022602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:30.030538Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:30.083892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:30.085732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:30.211325Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq9m9y4z83fzeaqgfh24na", SessionId: ydb://session/3?node_id=1&id=YWFiN2IxYmYtOWI5MDJkNWYtMTBhMzJhNDQtMzZkNTJhNDM=, Slow query, duration: 38.596859s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:30.504562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:30.504988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:30.505455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7491644771881388141:2871];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038629; 2025-04-10T11:28:30.505763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup >> ReadIteratorExternalBlobs::ExtBlobsWithSpecificKeys [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS88+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 1512, MsgBus: 63569 2025-04-10T11:25:52.427820Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644250286485207:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:52.428455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001728/r3tmp/tmpY5LwLK/pdisk_1.dat 2025-04-10T11:25:52.762320Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1512, node 1 2025-04-10T11:25:52.812396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:52.812548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:52.814448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:52.932269Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:52.932300Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:52.932307Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:52.932422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63569 TClient is connected to server localhost:63569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:53.632944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:55.265369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644263171387764:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.265393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644263171387756:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.265486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.268641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:25:55.278350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644263171387770:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:25:55.360862Z node 1 :TX_PROXY ERROR: Actor# [1:7491644263171387821:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:25:55.660288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.870032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644263171388078:2348];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.870036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644263171388079:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.870292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644263171388079:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:25:55.870574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644263171388079:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:25:55.870695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644263171388079:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:25:55.870792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644263171388078:2348];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:25:55.870802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644263171388079:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:25:55.870929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644263171388079:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:25:55.870969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644263171388078:2348];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:25:55.871066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644263171388079:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:25:55.871095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644263171388078:2348];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:25:55.871194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644263171388078:2348];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:25:55.871203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644263171388079:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:25:55.871293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644263171388078:2348];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:25:55.871315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644263171388079:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:25:55.871442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644263171388079:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:25:55.871517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644263171388078:2348];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:25:55.871555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644263171388079:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:25:55.871620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644263171388078:2348];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:25:55.871662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644263171388079:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:25:55.871759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644263171388078:2348];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:25:55.871872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644263171388078:2348];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:25:55.871984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644263171388078:2348];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:25:55.872091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644263171388078:2348];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:25:55.906365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644263171388118:2358];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.906366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644263171388089:2352];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:25:55.906419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644263 ... _state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.541929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.543591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.549711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.557296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.564107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.568098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.577476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.580033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.586084Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.593567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.595362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.600543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.601252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.610085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.624549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.634662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.641890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.649906Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:26:58.855219Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq73xkctb795hgmbnk111h", SessionId: ydb://session/3?node_id=1&id=MTYyYTcwMy00OWVmYTk2NC0zYjBmMjM1Mi01OTNmYzlkOA==, Slow query, duration: 29.554809s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:26:59.398929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:59.399336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7491644469329868208:9773];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-10T11:26:59.403179Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:26:59.403511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:25.052446Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq92w417gw763ksh6svdnx", SessionId: ydb://session/3?node_id=1&id=MTYyYTcwMy00OWVmYTk2NC0zYjBmMjM1Mi01OTNmYzlkOA==, Slow query, duration: 51.287171s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query88.tpl and seed 318176889\nselect *\nfrom\n (select count(*) h8_30_to_9\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 8\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s1 cross join\n (select count(*) h9_to_9_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s2 cross join\n (select count(*) h9_30_to_10\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s3 cross join\n (select count(*) h10_to_10_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s4 cross join\n (select count(*) h10_30_to_11\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s5 cross join\n (select count(*) h11_to_11_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s6 cross join\n (select count(*) h11_30_to_12\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s7 cross join\n (select count(*) h12_to_12_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 12\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s8\n;", parameters: 0b >> KqpJoinOrder::FiveWayJoinWithConstantFold+ColumnStore [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-ColumnStore >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFold+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 16074, MsgBus: 2114 2025-04-10T11:27:00.926301Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644542984818337:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:00.926947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ef/r3tmp/tmpJg8TW8/pdisk_1.dat 2025-04-10T11:27:01.399092Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16074, node 1 2025-04-10T11:27:01.426200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:01.426293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:01.433445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:27:01.508014Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:01.508043Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:01.508056Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:01.508186Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2114 TClient is connected to server localhost:2114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:02.109820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:02.136913Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:04.345157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644560164687989:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:04.347012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644560164687981:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:04.347111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:04.349026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:04.361026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644560164687995:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:04.462760Z node 1 :TX_PROXY ERROR: Actor# [1:7491644560164688046:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:04.824405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:05.107666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644564459655608:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:05.107830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644564459655608:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:05.108088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644564459655608:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:05.108217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644564459655608:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:05.108318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644564459655608:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:05.108417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644564459655608:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:05.108523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644564459655608:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:05.108619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644564459655608:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:05.108714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644564459655608:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:05.108803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644564459655608:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:05.108899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644564459655608:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:05.109008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644564459655608:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:05.115603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644564459655616:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:05.115704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644564459655616:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:05.115891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644564459655616:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:05.116021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644564459655616:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:05.116124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644564459655616:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:05.116225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644564459655616:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:05.116318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644564459655616:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:05.116423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644564459655616:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:05.116559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644564459655616:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:05.116678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644564459655616:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:05.116865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644564459655616:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:05.116987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644564459655616:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:05.147241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644560164688306:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:05.147295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644560164688306:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstra ... E=RestorePortionFromChunks; 2025-04-10T11:28:16.629912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039341;self_id=[1:7491644860812456100:10986];tablet_id=72075186224039341;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:28:16.630049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039341;self_id=[1:7491644860812456100:10986];tablet_id=72075186224039341;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:28:16.630215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039341;self_id=[1:7491644860812456100:10986];tablet_id=72075186224039341;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:28:16.630327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039341;self_id=[1:7491644860812456100:10986];tablet_id=72075186224039341;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:28:16.648009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:28:16.648059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:28:16.648203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:28:16.648248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:28:16.648503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:28:16.648536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:28:16.648661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:28:16.648712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:28:16.648802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:28:16.648834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:28:16.648877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:28:16.648906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:28:16.648952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:28:16.648991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:28:16.649083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:28:16.649111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:28:16.649325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:28:16.649359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:28:16.649486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:28:16.649544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:28:16.649555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:28:16.649602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:28:16.649635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:28:16.649674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:28:16.649731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:28:16.649772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:28:16.649858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:28:16.649894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:28:16.650077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:28:16.650135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:28:16.650529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:28:16.650568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:28:16.650825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:28:16.650862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:28:16.651042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:28:16.651086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:28:16.651282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:28:16.651309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:28:16.651321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:28:16.651343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:28:16.651494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:28:16.651525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039188;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:28:16.651531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:28:16.651563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039407;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:28:16.661347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039397;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:28:16.661394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039397;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:28:16.661522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039397;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:28:16.661555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039397;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:28:16.661574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039408;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:28:16.661636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039408;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 16018, MsgBus: 13940 2025-04-10T11:27:45.397803Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644736143491325:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:45.397864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016af/r3tmp/tmpTHEOsS/pdisk_1.dat 2025-04-10T11:27:46.136189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:46.136295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:46.139635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:27:46.147327Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16018, node 1 2025-04-10T11:27:46.477436Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:46.477459Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:46.477467Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:46.477562Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13940 TClient is connected to server localhost:13940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:47.453348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:47.486588Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:50.171183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644757618328474:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:50.171189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644757618328486:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:50.171335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:50.175565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:50.198375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644757618328488:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:50.303774Z node 1 :TX_PROXY ERROR: Actor# [1:7491644757618328539:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:50.402431Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644736143491325:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:50.402504Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:50.818782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.034078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.083445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.136846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.179936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.486726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.542033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.577743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.656223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.712863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.767148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.823181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:51.898554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:52.804867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:27:52.866802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:52.924497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:52.979318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.031700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.116486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.157320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.199455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.247340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.313063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.395409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.472712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.510272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.570253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.612407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.645254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:27:53.699164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.382005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.388187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.393078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.395249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.401859Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.405433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.411362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.415321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.418860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.420755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.424707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.427454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.433375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.438794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.440285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.449875Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038461;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.450815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038459;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.456658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.467885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.468219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.474992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.480900Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038437;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.482588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.488790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.496881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.501488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.508441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.513981Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.525922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.526644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.532404Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.540595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.542446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.549708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.554514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.564015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.566818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.573934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.580156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.581268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.586907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.588463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.593201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.595502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.601195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038455;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.765689Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq9r0kfnma69prtyswe655", SessionId: ydb://session/3?node_id=1&id=NzliM2M5NzQtYjY3MDc4MjctNTRhYmMzZTMtYmMxZWM4ZjY=, Slow query, duration: 41.353786s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:37.253036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:37.253545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:37.253914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7491644791978073094:2831];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038629; 2025-04-10T11:28:37.254328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin >> KqpJoinOrder::GeneralPrioritiesBug3 [GOOD] >> KqpJoin::IdxLookupSelf [GOOD] >> KqpFlipJoin::Right_1 >> KqpJoin::ComplexJoin [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH18 >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug3 [GOOD] Test command err: Trying to start YDB, gRPC: 20089, MsgBus: 20578 2025-04-10T11:27:55.324071Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644779042501785:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:55.324747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00169b/r3tmp/tmpPG3H1W/pdisk_1.dat 2025-04-10T11:27:56.059630Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:56.091545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:56.091640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:56.095639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20089, node 1 2025-04-10T11:27:56.319884Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:56.319906Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:56.319913Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:56.320043Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20578 TClient is connected to server localhost:20578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:57.273683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:57.340001Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:59.749318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644796222371515:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:59.749445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644796222371504:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:59.749601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:59.767216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:59.801399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644796222371518:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:59.886759Z node 1 :TX_PROXY ERROR: Actor# [1:7491644796222371569:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:00.330316Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644779042501785:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:00.330390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:00.434600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.629860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.695711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.734421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.810940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.038489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.133572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.197664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.262522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.350697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.401281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.487325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.549489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.464207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:28:02.510217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.557563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.595752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.634010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.686252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.725213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.789541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.856313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.892490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.935682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.975105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:28:03.057025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:28:03.119111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:28:03.157577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:28:03.198867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:28:03.243551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.236851Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038467;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.240094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.245828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.247828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.256834Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.262579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.263206Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.268670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038443;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.272755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.282060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.282267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038459;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.291345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.297464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.298991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.307848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.311997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.317025Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.325029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.327022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.336481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.342330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.342917Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.359843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.365613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.370696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.372846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.378217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.383700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.385186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.393889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.400230Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.405355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.407137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.416587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.420112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.429435Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.430378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.440069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.443287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.449535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.452360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.461744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.463297Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.476386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.660084Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.711271Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqa1c23sq2vt8ywhdhkgqg", SessionId: ydb://session/3?node_id=1&id=MzFhNDY3OGQtNDg1ZjlkYjMtYjhjODQxOGEtMzAzNWViODc=, Slow query, duration: 36.716352s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:42.255352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:42.255805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:42.256479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7491644929366383523:5820];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-04-10T11:28:42.256806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> OlapEstimationRowsCorrectness::TPCH3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupSelf [GOOD] Test command err: Trying to start YDB, gRPC: 16689, MsgBus: 18709 2025-04-10T11:28:39.652656Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644967438357721:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:39.653256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00165e/r3tmp/tmpVOTv8U/pdisk_1.dat 2025-04-10T11:28:40.442378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:40.442454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:40.455109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:28:40.516311Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16689, node 1 2025-04-10T11:28:40.746704Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:40.746724Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:40.746735Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:40.746825Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18709 TClient is connected to server localhost:18709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:41.905861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:41.926667Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:41.948646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:42.292424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:28:42.612300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:42.701904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:44.642261Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644967438357721:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:44.642320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:45.203317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644993208163124:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:45.203441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:45.657015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.755612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.852387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.908250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.959953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.028224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.107660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644997503130937:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:46.107729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:46.107999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644997503130942:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:46.111881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:46.133595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644997503130944:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:46.203056Z node 1 :TX_PROXY ERROR: Actor# [1:7491644997503131001:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:47.472841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.537230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.593962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:3:29: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpJoinOrder::TPCDS95-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::ComplexJoin [GOOD] Test command err: Trying to start YDB, gRPC: 64785, MsgBus: 25967 2025-04-10T11:28:39.317382Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644966196300039:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:39.318175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00165f/r3tmp/tmphoCieg/pdisk_1.dat 2025-04-10T11:28:40.176206Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:40.184332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:40.184413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:40.193671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64785, node 1 2025-04-10T11:28:40.467416Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:40.467439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:40.467459Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:40.467581Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25967 TClient is connected to server localhost:25967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:41.273760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:41.332243Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:41.353289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:41.521471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:28:41.732146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:41.846066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:44.018472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644987671138141:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:44.018572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:44.299117Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644966196300039:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:44.299174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:44.369336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.407228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.462543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.530213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.569621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.622667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.684433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644987671138655:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:44.684521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:44.684732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644987671138660:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:44.688713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:44.709133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644987671138662:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:44.817953Z node 1 :TX_PROXY ERROR: Actor# [1:7491644987671138720:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:46.108583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.162281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.221905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.305193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.404666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 30839, MsgBus: 4206 2025-04-10T11:28:31.522929Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644933009380354:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:31.523823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00166f/r3tmp/tmpEFba3U/pdisk_1.dat 2025-04-10T11:28:32.226345Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:32.227937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:32.228002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:32.237079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30839, node 1 2025-04-10T11:28:32.602685Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:32.602705Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:32.602712Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:32.602835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4206 TClient is connected to server localhost:4206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:33.897715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:33.950062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:34.193447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:34.470087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:34.599575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:36.523420Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644933009380354:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:36.523524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:37.435925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644958779185903:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:37.436032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:37.770837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:37.819274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:37.897714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:37.943683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:38.027892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:38.096791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:38.197380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644963074153728:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:38.197504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:38.202441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644963074153733:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:38.206624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:38.228750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644963074153735:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:38.302944Z node 1 :TX_PROXY ERROR: Actor# [1:7491644963074153790:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:39.794831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:39.864459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:39.916096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:28:39.994327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:40.056260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:40.109142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12275, MsgBus: 13307 2025-04-10T11:28:43.301367Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491644983526018682:2270];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00166f/r3tmp/tmpapUFxQ/pdisk_1.dat 2025-04-10T11:28:43.434692Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:28:43.569052Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:43.586724Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:43.586817Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:43.595390Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12275, node 2 2025-04-10T11:28:43.810672Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:43.810696Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:43.810704Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:43.810831Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13307 TClient is connected to server localhost:13307 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:28:44.786606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.795774Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:28:44.809333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:44.927357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:45.098758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:28:45.182640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.739589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645000705889416:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:47.739694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:47.792375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.834835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.905212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.942881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.990234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.040976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.106304Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645005000857224:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:48.106517Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:48.107128Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645005000857229:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:48.110993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:48.130374Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645005000857232:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:28:48.188865Z node 2 :TX_PROXY ERROR: Actor# [2:7491645005000857285:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:48.261248Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491644983526018682:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:48.261296Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:49.492199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:49.535222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:49.603866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:28:49.666484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:49.751753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:49.794840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-ColumnStore >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS95-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 23608, MsgBus: 14632 2025-04-10T11:27:33.709525Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644682713931844:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:33.709926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016da/r3tmp/tmpgtapV6/pdisk_1.dat 2025-04-10T11:27:34.405099Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:34.407845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:34.407932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:34.412985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23608, node 1 2025-04-10T11:27:34.606780Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:34.606801Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:34.606819Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:34.606964Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14632 TClient is connected to server localhost:14632 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:35.461150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.499350Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:37.707183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644699893801534:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.707186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644699893801525:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.707274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.712037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:37.728602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644699893801539:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:37.787349Z node 1 :TX_PROXY ERROR: Actor# [1:7491644699893801592:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:38.130283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.243964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.277959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.315915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.360125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.557340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.627100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.705232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.706291Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644682713931844:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:38.706389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:38.744387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.778982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.817977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.849142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.890617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.708051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:27:39.796720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.838768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.872236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.932719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:27:39.975713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.012045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.050051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.099544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.152738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.238426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.341337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.394911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.478946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.512130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.552643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.591485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... _state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:21.910798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:21.916826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:21.921558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:21.931373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:21.936158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:21.941953Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:21.945066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:21.956638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:21.963241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:21.969693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:21.970251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:21.980725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:21.984814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.000108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.001537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.007304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.016178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.022645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.030247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.035583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.045302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.049813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.052281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.057730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.059117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.069798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.074188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.080297Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.083922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.089298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.092302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.101208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.103200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.110009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.112708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.116802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.119522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.123229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.215724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:22.319671Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq9b6r0xbtpb650anam0ns", SessionId: ydb://session/3?node_id=1&id=N2UxMmIxNC1hYjQyMGVlOS00ZTk1YzZlNi0zMzBjYjU1, Slow query, duration: 40.022546s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:23.099010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:23.099511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:23.100182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:46.846947Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqaypa3whqhby5cngchr05", SessionId: ydb://session/3?node_id=1&id=N2UxMmIxNC1hYjQyMGVlOS00ZTk1YzZlNi0zMzBjYjU1, Slow query, duration: 11.827729s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n$ws_wh =\n(select ws1.ws_order_number ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2\n from web_sales ws1 cross join web_sales ws2\n where ws1.ws_order_number = ws2.ws_order_number\n and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk);\n-- start query 1 in stream 0 using template query95.tpl and seed 2031708268\n select\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\nwhere\n cast(d_date as date) between cast('2002-4-01' as date) and\n (cast('2002-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'AL'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\nand ws1.ws_order_number in (select ws_order_number\n from $ws_wh)\nand ws1.ws_order_number in (select wr_order_number\n from web_returns cross join $ws_wh ws_wh\n where wr_order_number = ws_wh.ws_order_number)\norder by `order count`\nlimit 100;\n", parameters: 0b >> KqpJoinOrder::TPCH12_100 >> KqpJoinOrder::FiveWayJoin-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH3 [GOOD] Test command err: Trying to start YDB, gRPC: 18326, MsgBus: 8789 2025-04-10T11:27:05.826423Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644560648423943:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:05.826481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ee/r3tmp/tmpc98LfC/pdisk_1.dat 2025-04-10T11:27:06.444482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:06.444561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:06.451185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:27:06.487021Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18326, node 1 2025-04-10T11:27:06.666784Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:06.666810Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:06.666817Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:06.666956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8789 TClient is connected to server localhost:8789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:07.407200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:09.452796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644577828293561:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:09.452904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:09.456550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644577828293573:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:09.462398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:09.478780Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:27:09.479257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644577828293575:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:09.577616Z node 1 :TX_PROXY ERROR: Actor# [1:7491644577828293626:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:09.932564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:10.230069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644582123261167:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:10.230302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644582123261167:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:10.230546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644582123261167:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:10.230660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644582123261167:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:10.230768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644582123261167:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:10.230868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644582123261167:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:10.230979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644582123261167:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:10.231097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644582123261167:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:10.231256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644582123261190:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:10.231330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644582123261190:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:10.231489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644582123261190:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:10.231578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644582123261190:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:10.231695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644582123261190:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:10.231742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644582123261167:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:10.231789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644582123261190:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:10.231870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644582123261167:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:10.231915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644582123261190:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:10.231984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644582123261167:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:10.232010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644582123261190:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:10.232080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644582123261167:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:10.232116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644582123261190:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:10.232234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644582123261190:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:10.232340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644582123261190:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:10.232424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644582123261190:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:10.265155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644582123261165:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:10.265239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644582123261165:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstra ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.464730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.467193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.476026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.478967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.488444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.490051Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.500948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.501333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.507497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.511491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.522829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.527861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.533811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.536691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.544488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.547284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.555656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.561676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.566068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.576355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.582539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.587742Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.591961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.600328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.601068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.610962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.616658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.622616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.625518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.635471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.639240Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.645417Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.649854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.659463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.664271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.671663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.678297Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.685147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.689125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.699278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.702626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.712719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.715073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.727587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:33.936344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.046835Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq9k7y7y2dkq0n6d11zvn2", SessionId: ydb://session/3?node_id=1&id=ZWYyNzNiYi0xZDIxZTAyMi03ZTY0MDFjNS0xNzkxYmY5MQ==, Slow query, duration: 43.519664s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:34.565632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:34.565690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:34.566026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491644887065995839:11072];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-10T11:28:34.566456Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 1391, MsgBus: 63209 2025-04-10T11:28:45.909025Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644992331571520:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:45.938680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001654/r3tmp/tmpX1SO30/pdisk_1.dat 2025-04-10T11:28:46.725005Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:46.734321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:46.734398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:46.747506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1391, node 1 2025-04-10T11:28:46.990659Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:46.990679Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:46.990685Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:46.990785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63209 TClient is connected to server localhost:63209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:48.153721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:48.214378Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:48.232105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:48.450616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:48.655632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:48.795620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:50.910509Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644992331571520:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:50.910588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:51.172456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645018101376979:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:51.172550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:51.590600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:51.687333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:51.731808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:51.781666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:51.843202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:51.932786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:52.042544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645022396344796:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:52.042700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:52.046484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645022396344801:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:52.054414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:52.065185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645022396344803:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:52.143802Z node 1 :TX_PROXY ERROR: Actor# [1:7491645022396344858:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:53.253202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.287026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.344663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.390543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.474207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.553391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpFlipJoin::Inner_1 >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 1953, MsgBus: 27400 2025-04-10T11:27:58.284991Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644789237524456:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:58.285726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001697/r3tmp/tmpTHqIna/pdisk_1.dat 2025-04-10T11:27:58.886578Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1953, node 1 2025-04-10T11:27:59.004757Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:59.004779Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:59.004786Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:59.004905Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:27:59.026922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:59.027022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:59.028193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27400 TClient is connected to server localhost:27400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:59.739485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:59.766914Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:02.183892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644806417394279:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:02.184028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:02.184373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644806417394291:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:02.188745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:02.209198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644806417394293:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:02.303546Z node 1 :TX_PROXY ERROR: Actor# [1:7491644806417394344:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:02.750744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.903547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.938432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.974965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:03.018250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:03.205921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:03.250322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:03.287293Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644789237524456:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:03.287352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:03.315552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:03.353255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:28:03.393198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:28:03.441995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:28:03.514387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:03.593107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:04.527529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:28:04.623814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:04.678309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:04.758035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:28:04.799678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:28:04.878546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:28:04.934496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:28:05.003627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:28:05.059765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:28:05.097657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:28:05.169896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:28:05.217269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:28:05.265673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:28:05.346774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:28:05.393002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:28:05.442057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:28:05.496641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.870065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.870575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.876607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.888188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.893958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.894684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.904715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.908661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038443;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.915836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038483;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.927937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038453;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.928380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038429;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.936181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.939413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.958454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.961295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.968706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.972920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038431;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.983993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.985528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.998058Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:45.998828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.008681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038455;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.013333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.023034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.026712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.032823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.036147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.044285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.044958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.059278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.062860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.066336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.073290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.073432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.080137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038497;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.087189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.087420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.093907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.094567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.100489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.100846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.107092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.110844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.118873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.124834Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:46.280531Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqa3mz3mzb5agey9qz6gk2", SessionId: ydb://session/3?node_id=1&id=YzAyNjYxOGYtNDI4MWVjZWUtZTIwNGY5ODUtYTRkOGE5MTk=, Slow query, duration: 38.952898s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:46.575828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:46.576079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:46.576189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7491644836482171553:2912];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-04-10T11:28:46.576535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TPCDS90-ColumnStore [GOOD] >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF >> KqpJoinOrder::TPCDS96+ColumnStore [GOOD] >> KqpFlipJoin::Right_1 [GOOD] >> KqpFlipJoin::Right_2 >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] >> KqpJoinOrder::TPCDS90+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 26749, MsgBus: 7748 2025-04-10T11:28:38.636108Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644961896778476:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:38.636652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001665/r3tmp/tmp519KNs/pdisk_1.dat 2025-04-10T11:28:39.382376Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:39.414756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:39.414868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:39.420323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26749, node 1 2025-04-10T11:28:39.690655Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:39.690679Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:39.690686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:39.690789Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7748 TClient is connected to server localhost:7748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:40.683887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:40.730657Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:40.744814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:41.003240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:41.230630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:41.361603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:43.537670Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644961896778476:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:43.537730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:43.831883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644983371616566:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:43.831983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:44.283858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.324690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.412175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.451871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.521514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.595609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.675875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644987666584383:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:44.675958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:44.676201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644987666584388:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:44.681311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:44.696599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644987666584390:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:44.774746Z node 1 :TX_PROXY ERROR: Actor# [1:7491644987666584444:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:46.183905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.245422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1943, MsgBus: 18610 2025-04-10T11:28:49.907363Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645010868611005:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001665/r3tmp/tmp4ttqTq/pdisk_1.dat 2025-04-10T11:28:49.950586Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:28:50.045888Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:50.060803Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:50.060888Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:50.063305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1943, node 2 2025-04-10T11:28:50.209300Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:50.209323Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:50.209332Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:50.209466Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18610 TClient is connected to server localhost:18610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:50.669917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:50.682735Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:50.704277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:50.786713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:50.956829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:51.034421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:54.170250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645032343449071:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:54.170315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:54.259987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:54.343711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:54.410169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:54.455489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:54.499970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:54.555081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:54.631343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645032343449586:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:54.631415Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:54.631719Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645032343449591:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:54.635581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:54.666248Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645032343449593:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:54.755449Z node 2 :TX_PROXY ERROR: Actor# [2:7491645032343449648:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:54.807447Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645010868611005:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:54.807507Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:55.773163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:55.835122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::DatetimeConstantFold-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS90-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 2967, MsgBus: 29757 2025-04-10T11:27:49.854082Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644752694808838:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:49.854363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016a9/r3tmp/tmpKrlOc3/pdisk_1.dat 2025-04-10T11:27:50.466655Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:50.484473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:50.484562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:50.486929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2967, node 1 2025-04-10T11:27:50.648385Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:50.648419Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:50.648429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:50.648570Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29757 TClient is connected to server localhost:29757 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:51.202736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:51.222982Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:53.446976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644769874678535:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:53.447108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:53.450972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644769874678547:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:53.458098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:53.476872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644769874678549:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:53.562177Z node 1 :TX_PROXY ERROR: Actor# [1:7491644769874678600:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:54.077720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.197285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.241378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.318601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.360046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.586598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.652021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.700992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.743889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.780194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.811335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.841229Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644752694808838:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:54.841416Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:54.846643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:27:54.886010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:27:55.535851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:27:55.584659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:27:55.623096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:27:55.660677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:27:55.703137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:27:55.744514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:27:55.799150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:27:55.888738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:27:55.949097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:27:55.991043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:27:56.059077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:27:56.094272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:27:56.129153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:27:56.189969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:27:56.220381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:27:56.265218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:27:56.298584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.458759Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.465095Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.469514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.477083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.485660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.486143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.491847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.499452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.505573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.510367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.517617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.523601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.527122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.532868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.536290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.554575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.559756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.566582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.568929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.586479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.592194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.596692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.605964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.608812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.617659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.625769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.627457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.634742Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.639269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.643448Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.645094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.651488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.653556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.662916Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.665931Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.673362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.677028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.686649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.686908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.692247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.699727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.700787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.705188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.749853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.790366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.878477Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq9t9xawcb4xr6jtntfhh9", SessionId: ydb://session/3?node_id=1&id=NWQ3YWY3Ny0yMTgzNTIxMi1hMzM3NDA3NS0xYzk0NDU4YQ==, Slow query, duration: 39.120957s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:37.588400Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:37.588843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:37.589541Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7491644907313658125:5833];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-04-10T11:28:37.590003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS96+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 27317, MsgBus: 23694 2025-04-10T11:27:06.144141Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644568621264076:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:06.144200Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ea/r3tmp/tmp7ILgd0/pdisk_1.dat 2025-04-10T11:27:06.856259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:06.856419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:06.862797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:27:06.863751Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27317, node 1 2025-04-10T11:27:07.086424Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:07.086449Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:07.086458Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:07.086569Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23694 TClient is connected to server localhost:23694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:07.775353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:07.786964Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:09.934982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644581506166411:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:09.935103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:09.935704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644581506166423:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:09.940089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:09.953890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644581506166425:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:10.031158Z node 1 :TX_PROXY ERROR: Actor# [1:7491644585801133772:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:10.367470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:10.759669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644585801133987:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:10.759867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644585801133987:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:10.760166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644585801133987:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:10.760264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644585801133987:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:10.760378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644585801133987:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:10.760496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644585801133987:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:10.760595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644585801133987:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:10.760689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644585801133987:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:10.760781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644585801133987:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:10.760867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644585801133987:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:10.760967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644585801133987:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:10.761080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644585801133987:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:10.764387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644585801134044:2363];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:10.764449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644585801134044:2363];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:10.764690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644585801134044:2363];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:10.764814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644585801134044:2363];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:10.764938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644585801134044:2363];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:10.765033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644585801134044:2363];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:10.765122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644585801134044:2363];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:10.765245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644585801134044:2363];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:10.765383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644585801134044:2363];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:10.765495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644585801134044:2363];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:10.765605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644585801134044:2363];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:10.765703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644585801134044:2363];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:10.853327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644585801134035:2360];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:10.855689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644585801134019:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abs ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.906437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.907019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.912414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.912744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039202;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.918695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.918728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.924036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.927075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.930542Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.933403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.936446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.939279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.942439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.945028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.948461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.951498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.954832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.961074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.964038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039238;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.976158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.982326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039262;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:34.997707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.000954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039226;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.008441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.009860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.014527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.018508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039228;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.021407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.025294Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039266;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.027925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.033413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039234;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.035354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.040648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.043084Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.047157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.049785Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.054092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.057062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039314;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.060792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.064137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.067727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.070561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.074786Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.079327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.081364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.258657Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq9m1y1msq0tdjqac33ncy", SessionId: ydb://session/3?node_id=1&id=ZWUzYTBmNzMtZTNhMjI1OTMtYjU0MzQ0MWMtMWRiZGQzODI=, Slow query, duration: 43.899707s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:35.942614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:35.943092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:35.943454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491644890743868511:10991];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-04-10T11:28:35.947083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoin::LeftJoinWithNull-StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::DatetimeConstantFold-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 21647, MsgBus: 3293 2025-04-10T11:28:07.936512Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644827384022304:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:07.958985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001682/r3tmp/tmpxHKPqh/pdisk_1.dat 2025-04-10T11:28:08.745145Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:08.745496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:08.745604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:08.756326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21647, node 1 2025-04-10T11:28:09.114284Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:09.114305Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:09.114313Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:09.114422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3293 TClient is connected to server localhost:3293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:10.223182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:10.242191Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:12.724415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644848858859381:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:12.724575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:12.724983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644848858859393:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:12.729219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:12.747417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644848858859395:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:12.835036Z node 1 :TX_PROXY ERROR: Actor# [1:7491644848858859447:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:12.942280Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644827384022304:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:12.962440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:13.541206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.720725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.804447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.851085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.914081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.179552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.237035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.280373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.324929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.378704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.442921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.527261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:14.567079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:15.836169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:28:15.875328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:15.923998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:15.971402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.018113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.081744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.116773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.154727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.186231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.251898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.286307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.330762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.365300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.399481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.436528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.477450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:28:16.526275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... tablet_id=72075186224038483;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.632230Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.636359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038429;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.646458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.650614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.658425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.663042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.669122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.671267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.681333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.684233Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.695376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.699594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.705914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.709778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.719996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.723619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.733866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.734325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.744646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.747633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.758413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.762963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.768560Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.772981Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.782967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.786999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.792348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.793224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.808966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038455;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.814684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.820665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.826805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.832906Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.840930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.843057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.853477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.857056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.865709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.871104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.877553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.879860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.887960Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.892046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.904919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.910178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:55.956965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:56.145728Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqae9h2q6g1rrndvmhzw1s", SessionId: ydb://session/3?node_id=1&id=MmRlYWU5ZWEtN2IwMjBlNjgtOWU1YjMzYTItN2M4ZTY4MzA=, Slow query, duration: 37.920033s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:56.483528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:56.483986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:56.485042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCH3 >> KqpJoin::IdxLookupPartialLeftPredicate >> KqpFlipJoin::Inner_1 [GOOD] >> KqpFlipJoin::Inner_2 >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull >> KqpJoinOrder::TestJoinHint1+ColumnStore >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup [GOOD] >> KqpJoinOrder::TPCDS96-ColumnStore [GOOD] >> KqpFlipJoin::Right_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex [GOOD] Test command err: Trying to start YDB, gRPC: 9243, MsgBus: 29894 2025-04-10T11:28:56.018719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645035502186656:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:56.019339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001646/r3tmp/tmppi2VXf/pdisk_1.dat 2025-04-10T11:28:56.876033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:56.886313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:56.886434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:56.898213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9243, node 1 2025-04-10T11:28:57.166676Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:57.166697Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:57.166710Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:57.166828Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29894 TClient is connected to server localhost:29894 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:58.161524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:58.191676Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:58.202281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:58.427920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:58.679665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:58.787568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:00.682525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645056977024775:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:00.682742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:00.974549Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645035502186656:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:00.985923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:01.302497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:01.376096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:01.458523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:01.516106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:01.562142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:01.640881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:01.754503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645061271992595:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:01.754585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:01.754989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645061271992600:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:01.759024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:01.772722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645061271992602:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:01.845777Z node 1 :TX_PROXY ERROR: Actor# [1:7491645061271992656:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:03.091131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:03.152918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:03.221075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:03.284562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:03.335975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:47: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:57: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 23626, MsgBus: 5897 2025-04-10T11:28:59.221371Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645054077627098:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:59.221765Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001639/r3tmp/tmpbaH3hD/pdisk_1.dat 2025-04-10T11:28:59.914060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:59.914259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:59.922758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:28:59.981112Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23626, node 1 2025-04-10T11:29:00.254799Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:00.254819Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:00.254829Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:00.254964Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5897 TClient is connected to server localhost:5897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:01.541858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:01.593720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:01.844486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:02.076612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:02.205812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:04.210489Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645054077627098:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:04.210579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:04.716494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645075552465222:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:04.716613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:05.299133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:05.388626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:05.437628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:05.479255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:05.551527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:05.644520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:05.766630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645079847433047:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:05.766723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:05.767288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645079847433052:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:05.771787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:05.799181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645079847433055:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:05.863230Z node 1 :TX_PROXY ERROR: Actor# [1:7491645079847433109:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:07.100119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:07.172718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:07.230479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:07.322543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:07.392006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:29:07.446505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::Right_2 [GOOD] Test command err: Trying to start YDB, gRPC: 12344, MsgBus: 14425 2025-04-10T11:28:51.803677Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645016159610105:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:51.804045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00164f/r3tmp/tmpLo83U8/pdisk_1.dat 2025-04-10T11:28:52.626387Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:52.640320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:52.640418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:52.644146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12344, node 1 2025-04-10T11:28:52.869541Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:52.869560Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:52.869566Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:52.869662Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14425 TClient is connected to server localhost:14425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:53.944830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:53.970368Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:53.981905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:54.240816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:54.556030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:54.684439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:56.739993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645037634448219:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:56.740096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:56.798255Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645016159610105:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:56.798315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:57.153329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:57.201427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:57.249190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:57.314127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:57.400018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:57.463025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:57.543927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645041929416035:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:57.544005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:57.544319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645041929416040:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:57.548756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:28:57.575783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645041929416042:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:28:57.642309Z node 1 :TX_PROXY ERROR: Actor# [1:7491645041929416098:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:58.758344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:58.823392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:58.881933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:28:58.925180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22797, MsgBus: 24651 2025-04-10T11:29:00.766274Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645056296001465:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:00.767264Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00164f/r3tmp/tmpfd2UIV/pdisk_1.dat 2025-04-10T11:29:01.122385Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:01.178872Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:01.178947Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:01.186438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22797, node 2 2025-04-10T11:29:01.398693Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:01.398714Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:01.398722Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:01.398841Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24651 TClient is connected to server localhost:24651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:02.308717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:02.351286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:02.439797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:02.649504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:02.745686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:05.492170Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645077770839703:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:05.492240Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:05.611919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:05.662226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:05.714227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:05.766483Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645056296001465:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:05.766554Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:05.816568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:05.861805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:05.917745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:06.026492Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645082065807517:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:06.026593Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:06.030389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645082065807522:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:06.037821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:06.058317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645082065807524:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:06.153588Z node 2 :TX_PROXY ERROR: Actor# [2:7491645082065807580:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:07.460343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:07.527447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:07.611382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:07.791144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS96-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 17319, MsgBus: 25231 2025-04-10T11:28:05.247022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644820207786444:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:05.247072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00168b/r3tmp/tmpdy0vjS/pdisk_1.dat 2025-04-10T11:28:06.104270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:06.104351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:06.114224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:28:06.116750Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17319, node 1 2025-04-10T11:28:06.434894Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:06.434914Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:06.434921Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:06.435013Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25231 TClient is connected to server localhost:25231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:07.385338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:10.254262Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644820207786444:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:10.254341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:10.353084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644841682623594:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:10.353263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:10.354022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644841682623606:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:10.361094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:10.382004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644841682623608:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:10.447074Z node 1 :TX_PROXY ERROR: Actor# [1:7491644841682623659:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:11.054685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:11.173050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:11.205093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:11.282045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:11.360188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:11.545301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:11.597879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:11.631991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:11.666600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:28:11.699930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:28:11.777471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:28:11.831725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:11.890949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:12.591435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:28:12.640101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:12.684815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:12.722284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:28:12.777341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:28:12.850756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:28:12.895962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:28:12.936796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:28:12.979117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.029312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.069473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.121792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.180025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.239536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.315907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.392267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.454863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-10T11:28:13.547581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but p ... tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.084567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.092228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.096606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.106691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038447;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.112051Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.118007Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.120290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.130076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.131671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.140108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.143801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.151836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.157664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.165447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.167266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.175704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.182570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.189381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.191573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.200338Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.206494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.214261Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.220653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.227926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.231309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.245829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.246449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.256763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.260420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.266583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.274560Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.280796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.284996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.294522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.299425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038530;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.304187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.314259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.318782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.324633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.329056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.339418Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.343523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.353869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.366022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.368336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.380525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:53.602441Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqabba6gnqsx92phyk161e", SessionId: ydb://session/3?node_id=1&id=MmU2NDkzNGMtNTZhMmI1NjMtZDVhZTVmMzQtMzg1MmE1NTc=, Slow query, duration: 38.391549s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:54.416730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:54.416769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:54.417363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small >> KqpJoin::JoinDupColumnRightPure >> KqpJoin::ExclusionJoin >> OlapEstimationRowsCorrectness::TPCH5 >> KqpJoinOrder::FiveWayJoinWithConstantFold-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS94-ColumnStore [GOOD] >> KqpJoin::LeftJoinWithNull-StreamLookupJoin [GOOD] >> KqpJoin::IdxLookupPartialLeftPredicate [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH12 [GOOD] >> KqpFlipJoin::Inner_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFold-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 1464, MsgBus: 3273 2025-04-10T11:28:14.064881Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644857119215821:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:14.064992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00167e/r3tmp/tmpAsuepQ/pdisk_1.dat 2025-04-10T11:28:14.846948Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:14.876445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:14.876522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:14.887939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1464, node 1 2025-04-10T11:28:15.234357Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:15.234377Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:15.234383Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:15.234498Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3273 TClient is connected to server localhost:3273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:16.343163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:19.070318Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644857119215821:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:19.070400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:19.689234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644878594052844:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:19.689368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:19.689804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644878594052856:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:19.694048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:19.716946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644878594052858:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:19.814984Z node 1 :TX_PROXY ERROR: Actor# [1:7491644878594052909:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:20.342135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:20.473450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:20.574184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:20.614481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:20.661298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:20.861509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:20.914884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:21.002121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:21.071740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:28:21.154134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:28:21.247603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:28:21.328537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:21.401355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:22.364049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:28:22.437119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:22.484044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:22.559421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:28:22.602715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:28:22.648266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:28:22.691403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:28:22.737906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:28:22.775318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:28:22.837151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:28:22.916632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:28:22.961745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:28:23.004426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:28:23.051009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:28:23.096052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:28:23.149459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:28:23.204612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-10T11:28:23.280588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propos ... tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.910559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.912786Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.917030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038528;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.919712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.925611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.926471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.933997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.937647Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.945584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038500;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.947149Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.952752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.954628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038494;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.959383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038484;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.962263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.965595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038504;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.972258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.976128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.983425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038518;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.991918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:05.998035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.014513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.021751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.026300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.035237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.040744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.047337Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.054081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.055122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.060253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038522;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.063407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038490;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.067559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.069397Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.075819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038536;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.077246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038542;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.082443Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.084246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.095019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.099373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.105858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.106995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.112441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.116192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.123401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.126764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.133242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.133826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:06.279395Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqamv25bcpckjk93vx7acq", SessionId: ydb://session/3?node_id=1&id=N2RkNDEwOTktZWQ5YjljNTItMTQ2MGM5NGItMmM0YTM3ZjU=, Slow query, duration: 41.348564s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:06.568886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:06.568963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:06.569566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinWithNull-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 22229, MsgBus: 22677 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00162d/r3tmp/tmpgX044R/pdisk_1.dat 2025-04-10T11:29:06.804439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:29:07.125616Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:07.196723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:07.196809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:07.204289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22229, node 1 2025-04-10T11:29:07.486533Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:07.486559Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:07.486567Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:07.486685Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22677 TClient is connected to server localhost:22677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:08.677680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:08.709604Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:08.725063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:08.929645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:09.246950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:09.389890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:12.448500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645107346498029:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:12.448626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:13.015258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:13.076849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:13.168332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:13.280369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:13.359227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:13.440806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:13.539355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645111641465853:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:13.539454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:13.539668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645111641465858:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:13.543975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:13.560765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645111641465860:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:13.655020Z node 1 :TX_PROXY ERROR: Actor# [1:7491645111641465915:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:14.821598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.855956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.893112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupPartialLeftPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 27894, MsgBus: 11752 2025-04-10T11:29:08.134254Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645092481691499:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:08.134473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00162c/r3tmp/tmp5ACugP/pdisk_1.dat 2025-04-10T11:29:08.923406Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:08.930423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:08.930498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:08.939276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27894, node 1 2025-04-10T11:29:09.164227Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:09.164249Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:09.164256Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:09.164366Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11752 TClient is connected to server localhost:11752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:10.572955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:10.603095Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:10.612503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:10.922820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:11.204133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:11.329865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:13.098327Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645092481691499:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:13.098403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:13.807681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645113956529615:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:13.807837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:14.282075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.319722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.395468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.436089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.495470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.577976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.666339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645118251497433:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:14.666460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:14.670359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645118251497438:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:14.678269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:14.692043Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:29:14.693185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645118251497440:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:14.753506Z node 1 :TX_PROXY ERROR: Actor# [1:7491645118251497496:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:16.066095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:16.135267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:16.185335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 query_phases { duration_us: 6136 table_access { name: "/Root/Join1_1" reads { rows: 8 bytes: 136 } partitions_count: 1 } cpu_time_us: 4353 affected_shards: 1 } query_phases { duration_us: 9671 table_access { name: "/Root/Join1_2" reads { rows: 3 bytes: 57 } partitions_count: 1 } cpu_time_us: 7348 affected_shards: 1 } compilation { duration_us: 691300 cpu_time_us: 686835 } process_cpu_time_us: 599 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":11,\"Plans\":[{\"PlanNodeId\":10,\"Plans\":[{\"PlanNodeId\":9,\"Plans\":[{\"E-Size\":\"No estimate\",\"PlanNodeId\":8,\"LookupKeyColumns\":[\"Key1\"],\"Node Type\":\"TableLookup\",\"Path\":\"\\/Root\\/Join1_2\",\"Columns\":[\"Fk3\",\"Key1\",\"Key2\",\"Value\"],\"E-Rows\":\"No estimate\",\"Table\":\"Join1_2\",\"Plans\":[{\"PlanNodeId\":7,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Iterator\":\"PartitionByKey\",\"Name\":\"Iterator\"},{\"Inputs\":[],\"Name\":\"PartitionByKey\",\"Input\":\"precompute_0_0\"}],\"Node Type\":\"ConstantExpr-Aggregate\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"FinishTimeMs\":1744284557269,\"Host\":\"ghrun-x5vqckr7qy\",\"ResultRows\":2,\"ResultBytes\":7,\"OutputRows\":2,\"ComputeTimeUs\":56,\"NodeId\":1,\"OutputChannels\":[{\"ChannelId\":1,\"Rows\":2,\"DstStageId\":0,\"Bytes\":7}],\"TaskId\":1,\"OutputBytes\":7}],\"PeakMemoryUsageBytes\":2031616,\"CpuTimeUs\":1575}],\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7,\"History\":[4,7]}},\"Name\":\"RESULT\",\"Push\":{\"Chunks\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[4,1048576]},\"ResultRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Tasks\":1,\"ResultBytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"OutputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FinishedTasks\":1,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"BaseTimeMs\":1744284557266,\"CpuTimeUs\":{\"Count\":1,\"Sum\":1153,\"Max\":1153,\"Min\":1153,\"History\":[4,1153]},\"OutputBytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7}},\"CTE Name\":\"precompute_0_0\"}],\"PlanNodeType\":\"Connection\",\"E-Cost\":\"No estimate\"}],\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"TopSort\",\"Limit\":\"1001\",\"TopSortBy\":\"row.t2.Value\"},{\"Inputs\":[{\"InternalOperatorId\":3},{\"InternalOperatorId\":2}],\"E-Rows\":\"No estimate\",\"Condition\":\"t2.Key1 = t1.Fk21\",\"Name\":\"InnerJoin (MapJoin)\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"},{\"Inputs\":[],\"ToFlow\":\"precompute_0_0\",\"Name\":\"ToFlow\"},{\"Inputs\":[{\"ExternalPlanNodeId\":8}],\"E-Rows\":\"No estimate\",\"Predicate\":\"Exist(item.Key1)\",\"Name\" ... "Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":34,\"Max\":34,\"Min\":34,\"History\":[4,34]}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":34,\"Max\":34,\"Min\":34,\"History\":[4,34]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1410,\"Max\":1410,\"Min\":1410,\"History\":[4,1410]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":2,\"Min\":1}}}],\"InputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Tasks\":1}}],\"Node Type\":\"Precompute_0\",\"Parent Relationship\":\"InitPlan\",\"PlanNodeType\":\"Materialize\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":691300,\"CpuTimeUs\":686835},\"ProcessCpuTimeUs\":599,\"TotalDurationUs\":727122,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":0},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Plans\":[{\"PlanNodeId\":7,\"Operators\":[{\"E-Rows\":\"No estimate\",\"Columns\":[\"Fk3\",\"Key1\",\"Key2\",\"Value\"],\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\",\"Name\":\"TableLookup\",\"Table\":\"Join1_2\",\"LookupKeyColumns\":[\"Key1\"]}],\"Node Type\":\"TableLookup\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"E-Rows\":\"No estimate\",\"Predicate\":\"Exist(item.Key1)\",\"Name\":\"Filter\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"Filter\"},{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":13,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/Join1_1\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"Join1_1\",\"ReadColumns\":[\"Fk21\",\"Fk22\",\"Key\",\"Value\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"E-Size\":\"No estimate\",\"A-SelfCpu\":0.978,\"Name\":\"Filter\",\"Predicate\":\"Exist(item.Fk21) AND item.Value == \\\"Value3\\\"\",\"A-Rows\":2,\"E-Rows\":\"No estimate\",\"A-Cpu\":0.978,\"E-Cost\":\"No estimate\",\"A-Size\":34}],\"Node Type\":\"Filter\"}],\"Operators\":[{\"E-Rows\":\"No estimate\",\"Condition\":\"t2.Key1 = t1.Fk21\",\"Name\":\"InnerJoin (MapJoin)\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"InnerJoin (MapJoin)\"}],\"Operators\":[{\"A-Rows\":3,\"A-SelfCpu\":1.969,\"A-Cpu\":2.947,\"A-Size\":108,\"Name\":\"TopSort\",\"Limit\":\"1001\",\"TopSortBy\":\"row.t2.Value\"}],\"Node Type\":\"TopSort\"}],\"Operators\":[{\"A-Rows\":3,\"A-SelfCpu\":0.861,\"A-Cpu\":3.808,\"A-Size\":108,\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(declare %kqp%tx_result_binding_0_0 (ListType (StructType \'(\'\"Fk21\" (OptionalType (DataType \'Int32))) \'(\'\"Fk22\" (OptionalType (DataType \'String))) \'(\'\"Key\" (OptionalType (DataType \'Int32))) \'(\'\"Value\" (OptionalType (DataType \'String))))))\n(let $1 (KqpTable \'\"/Root/Join1_1\" \'\"72057594046644480:16\" \'\"\" \'1))\n(let $2 \'(\'\"Fk21\" \'\"Fk22\" \'\"Key\" \'\"Value\"))\n(let $3 (KqpRowsSourceSettings $1 $2 \'() (Void) \'()))\n(let $4 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $3)) (lambda \'($32) (FromFlow (Filter (ToFlow $32) (lambda \'($33) (And (Exists (Member $33 \'\"Fk21\")) (Coalesce (== (Member $33 \'\"Value\") (String \'\"Value3\")) (Bool \'false))))))) \'(\'(\'\"_logical_id\" \'1035) \'(\'\"_id\" \'\"bace1c7d-de17d5a5-71ad1af2-663b612c\"))))\n(let $5 (DqCnUnionAll (TDqOutput $4 \'0)))\n(let $6 (DqPhyStage \'($5) (lambda \'($34) $34) \'(\'(\'\"_logical_id\" \'1413) \'(\'\"_id\" \'\"8ea5a376-fe871e82-e631ad45-b982b811\"))))\n(let $7 (DqCnResult (TDqOutput $6 \'0) \'()))\n(let $8 \'(\'(\'\"type\" \'\"data\")))\n(let $9 (KqpPhysicalTx \'($4 $6) \'($7) \'() $8))\n(let $10 \'\"%kqp%tx_result_binding_0_0\")\n(let $11 (DataType \'Int32))\n(let $12 (OptionalType $11))\n(let $13 (OptionalType (DataType \'String)))\n(let $14 (StructType \'(\'\"Fk21\" $12) \'(\'\"Fk22\" $13) \'(\'\"Key\" $12) \'(\'\"Value\" $13)))\n(let $15 (ListType $14))\n(let $16 %kqp%tx_result_binding_0_0)\n(let $17 \'(\'(\'\"_logical_id\" \'1078) \'(\'\"_id\" \'\"cac6ae50-85245ef1-79c1f407-e07799b8\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $18 (DqPhyStage \'() (lambda \'() (Iterator (PartitionByKey $16 (lambda \'($35) (Member $35 \'\"Fk21\")) (Void) (Void) (lambda \'($36) (Map (Filter (FlatMap $36 (lambda \'($37) (Map (Take (Nth $37 \'1) (Uint64 \'1)) (lambda \'($38) (AsStruct \'(\'\"Fk21\" (Member $38 \'\"Fk21\"))))))) (lambda \'($39) (Exists (Member $39 \'\"Fk21\")))) (lambda \'($40) (AsStruct \'(\'\"Key1\" (Member $40 \'\"Fk21\"))))))))) $17))\n(let $19 (KqpTable \'\"/Root/Join1_2\" \'\"72057594046644480:17\" \'\"\" \'1))\n(let $20 \'(\'\"Fk3\" \'\"Key1\" \'\"Key2\" \'\"Value\"))\n(let $21 (KqpCnStreamLookup (TDqOutput $18 \'0) $19 $20 (ListType (StructType \'(\'\"Key1\" $12))) \'(\'(\'\"Strategy\" \'\"LookupRows\"))))\n(let $22 (Uint64 \'\"1001\"))\n(let $23 (StructType \'(\'\"t1.Fk21\" $12) \'(\'\"t1.Fk22\" $13) \'(\'\"t1.Key\" $12) \'(\'\"t1.Value\" $13) \'(\'\"t2.Fk3\" $13) \'(\'\"t2.Key1\" $12) \'(\'\"t2.Key2\" $13) \'(\'\"t2.Value\" $13)))\n(let $24 \'(\'(\'\"_logical_id\" \'1298) \'(\'\"_id\" \'\"b4c906df-a7a896e6-6c29c20a-244efc08\") \'(\'\"_wide_channels\" $23)))\n(let $25 (DqPhyStage \'($21) (lambda \'($41) (block \'(\n (let $42 \'(\'Many \'Hashed \'Compact))\n (let $43 (SqueezeToDict (FlatMap (ToFlow $16) (lambda \'($46) (block \'(\n (let $47 (Member $46 \'\"Fk21\"))\n (let $48 (Nothing (OptionalType (TupleType $11 $14))))\n (let $49 (IfPresent $47 (lambda \'($50) (Just \'($50 $46))) $48))\n (return (If (Exists $47) $49 $48))\n )))) (lambda \'($51) (Nth $51 \'0)) (lambda \'($52) (Nth $52 \'1)) $42))\n (let $44 (TopSort (FlatMap $43 (lambda \'($53) (block \'(\n (let $54 \'(\'\"Fk3\" \'\"t2.Fk3\" \'\"Key1\" \'\"t2.Key1\" \'\"Key2\" \'\"t2.Key2\" \'\"Value\" \'\"t2.Value\"))\n (let $55 \'(\'\"Fk21\" \'\"t1.Fk21\" \'\"Fk22\" \'\"t1.Fk22\" \'\"Key\" \'\"t1.Key\" \'\"Value\" \'\"t1.Value\"))\n (return (MapJoinCore (OrderedFilter (ToFlow $41) (lambda \'($56) (Exists (Member $56 \'\"Key1\")))) $53 \'\"Inner\" \'(\'\"Key1\") \'(\'\"Fk21\") $54 $55 \'(\'\"t2.Key1\") \'(\'\"t1.Fk21\")))\n )))) $22 (Bool \'true) (lambda \'($57) (Member $57 \'\"t2.Value\"))))\n (let $45 (lambda \'($58) (Member $58 \'\"t1.Fk21\") (Member $58 \'\"t1.Fk22\") (Member $58 \'\"t1.Key\") (Member $58 \'\"t1.Value\") (Member $58 \'\"t2.Fk3\") (Member $58 \'\"t2.Key1\") (Member $58 \'\"t2.Key2\") (Member $58 \'\"t2.Value\")))\n (return (FromFlow (ExpandMap $44 $45)))\n))) $24))\n(let $26 (DqCnMerge (TDqOutput $25 \'0) \'(\'(\'\"7\" \'\"Asc\"))))\n(let $27 (DqPhyStage \'($26) (lambda \'($59) (FromFlow (NarrowMap (Take (ToFlow $59) $22) (lambda \'($60 $61 $62 $63 $64 $65 $66 $67) (AsStruct \'(\'\"t1.Fk21\" $60) \'(\'\"t1.Fk22\" $61) \'(\'\"t1.Key\" $62) \'(\'\"t1.Value\" $63) \'(\'\"t2.Fk3\" $64) \'(\'\"t2.Key1\" $65) \'(\'\"t2.Key2\" $66) \'(\'\"t2.Value\" $67)))))) \'(\'(\'\"_logical_id\" \'1311) \'(\'\"_id\" \'\"9183d1d0-2efe9e11-646c5428-34aa34b0\"))))\n(let $28 \'($18 $25 $27))\n(let $29 (DqCnResult (TDqOutput $27 \'0) \'()))\n(let $30 (KqpTxResultBinding $15 \'0 \'0))\n(let $31 (KqpPhysicalTx $28 \'($29) \'(\'($10 $30)) $8))\n(return (KqpPhysicalQuery \'($9 $31) \'((KqpTxResultBinding (ListType $23) \'1 \'0)) \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 727122 total_cpu_time_us: 699135 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Join1_2\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":17},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Fk3\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key1\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key2\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Value\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key1\\\",\\\"Key2\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\",\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Join1_1\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":16},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Fk21\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Fk22\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Value\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1744284557\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"901e1502-155cc099-682deca7-536fa57f\",\"version\":\"1.0\"}" >> KqpJoinOrder::TestJoinOrderHintsSimple-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::Inner_2 [GOOD] Test command err: Trying to start YDB, gRPC: 31834, MsgBus: 2451 2025-04-10T11:28:57.325101Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645044052516179:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:57.325462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001641/r3tmp/tmpMOluma/pdisk_1.dat 2025-04-10T11:28:58.018546Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:58.020435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:58.020512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:58.032391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31834, node 1 2025-04-10T11:28:58.355040Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:58.355062Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:58.355069Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:58.355160Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2451 TClient is connected to server localhost:2451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:59.410648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:59.424562Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:59.444973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:59.656958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:28:59.894075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:00.033210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:02.326268Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645044052516179:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:02.326349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:03.206380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645069822321595:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:03.206596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:03.605811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:03.668538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:03.747173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:03.781194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:03.817505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:03.879091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:03.984414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645069822322117:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:03.984488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:03.984852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645069822322122:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:03.988399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:04.014358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645069822322124:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:04.083421Z node 1 :TX_PROXY ERROR: Actor# [1:7491645074117289474:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:05.456247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:05.517008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:05.578440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:05.631183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"Tables":["FJ_Table_1"],"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/FJ_Table_1","ReadRangesPointPrefixLen":"0","E-Rows":"4","Table":"FJ_Table_1","ReadColumns":["Key","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"},{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["FJ_Table_2"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/FJ_Table_2","ReadRangesPointPrefixLen":"0","E-Rows":"2","Table":"FJ_Table_2","ReadColumns":["Fk1","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"ComputeNodes":[{"Tasks":[{"FinishTimeMs":1744284546585,"Host":"ghrun-x5vqckr7qy","OutputRows":2,"StartTimeMs":1744284546584,"IngressRows":2,"ComputeTimeUs":158,"NodeId":1,"OutputChannels":[{"ChannelId":1,"Rows":2,"DstStageId":1,"Bytes":24}],"TaskId":1,"OutputBytes":24}],"PeakMemoryUsageBytes":65536,"DurationUs":1000,"CpuTimeUs":1567}],"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":24,"Max":24,"Min":24,"History":[5,24]}},"Name":"4","Push":{"WaitTimeUs":{"Count":1,"Sum":3196,"Max":3196,"Min":3196,"History":[5,3196]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[5,1048576]},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"FinishedTasks":1,"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":1000,"Table":[{"Path":"\/Root\/FJ_Table_2","ReadRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"ReadBytes":{"Count":1,"Sum":22,"Max":22,"Min":22}}],"BaseTimeMs":1744284546581,"OutputBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"CpuTimeUs":{"Count":1,"Sum":768,"Max":768,"Min":768,"History":[5,768]},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":64,"Max":64,"Min":64,"History":[5,64]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2 ... 2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"FirstMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"Bytes":{"Count":1,"Sum":38,"Max":38,"Min":38,"History":[8,38]}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"FirstMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":4281,"Max":4281,"Min":4281,"History":[8,4281]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":6,"Min":2}}}],"CpuTimeUs":{"Count":1,"Sum":537,"Max":537,"Min":537,"History":[8,537]},"StageDurationUs":1000,"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResultBytes":{"Count":1,"Sum":38,"Max":38,"Min":38},"OutputBytes":{"Count":1,"Sum":38,"Max":38,"Min":38},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"FirstMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"Bytes":{"Count":1,"Sum":38,"Max":38,"Min":38,"History":[8,38]}},"Name":"7","Push":{"LastMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"FirstMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"Bytes":{"Count":1,"Sum":38,"Max":38,"Min":38,"History":[8,38]},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":4223,"Max":4223,"Min":4223,"History":[8,4223]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":6,"Min":2}}}],"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":523098,"CpuTimeUs":518977},"ProcessCpuTimeUs":504,"TotalDurationUs":567171,"ResourcePoolId":"default","QueuedTimeUs":0},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/FJ_Table_1","ReadRangesPointPrefixLen":"0","E-Rows":"4","Table":"FJ_Table_1","ReadColumns":["Key","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"4","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"},{"PlanNodeId":12,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/FJ_Table_2","ReadRangesPointPrefixLen":"0","E-Rows":"2","Table":"FJ_Table_2","ReadColumns":["Fk1","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"2","Condition":"t1.Key = t2.Fk1","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"14.4"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"A-Rows":2,"A-SelfCpu":2.033,"A-Cpu":2.033,"A-Size":38,"Name":"TopSort","Limit":"1001","TopSortBy":"[row.t1.Value,row.t2.Value]"}],"Node Type":"TopSort"}],"Operators":[{"A-Rows":2,"A-SelfCpu":0.537,"A-Cpu":2.57,"A-Size":38,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 24576, MsgBus: 4732 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001641/r3tmp/tmpvr6zyq/pdisk_1.dat 2025-04-10T11:29:08.598346Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:29:08.625750Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:08.645272Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:08.650304Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:08.659439Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24576, node 2 2025-04-10T11:29:08.930829Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:08.930854Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:08.930879Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:08.931032Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4732 TClient is connected to server localhost:4732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:09.753979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:09.762921Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:29:09.775442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:09.888592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:10.265575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:10.376785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:13.602299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645114051952269:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:13.602625Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:13.646783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:13.709854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:13.801150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:13.846377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:13.930200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.012466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.188129Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645118346920088:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:14.188226Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:14.188630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645118346920093:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:14.193055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:14.209468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645118346920095:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:29:14.296521Z node 2 :TX_PROXY ERROR: Actor# [2:7491645118346920148:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:15.571867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:15.644487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:15.696343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:15.758549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 24777, MsgBus: 22360 2025-04-10T11:28:58.724120Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645047123137862:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:58.724479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00163f/r3tmp/tmpVAM2Qf/pdisk_1.dat 2025-04-10T11:28:59.603951Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:59.616010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:59.616095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:59.617731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24777, node 1 2025-04-10T11:28:59.890811Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:59.890835Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:59.890842Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:59.890940Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22360 TClient is connected to server localhost:22360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:01.000788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:01.068607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:01.286261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:01.501069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:01.648866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:03.722290Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645047123137862:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:03.792580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:04.040901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645072892943286:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:04.040995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:04.525498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:04.612460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:04.700532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:04.797169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:04.888010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:04.972968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:05.086342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645077187911106:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:05.086473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:05.086986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645077187911111:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:05.091863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:05.117972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645077187911113:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:05.204680Z node 1 :TX_PROXY ERROR: Actor# [1:7491645077187911169:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:06.709963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:06.796883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63260, MsgBus: 19747 2025-04-10T11:29:09.159128Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645093011842758:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00163f/r3tmp/tmpzJKMpH/pdisk_1.dat 2025-04-10T11:29:09.239023Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:29:09.453732Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:09.474661Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:09.474746Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:09.483224Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63260, node 2 2025-04-10T11:29:09.690916Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:09.690939Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:09.690947Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:09.691058Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19747 TClient is connected to server localhost:19747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:10.699740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:10.731476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:10.885798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:11.235360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:11.361455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:14.054399Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645093011842758:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:14.054478Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:14.474337Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645114486680852:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:14.474463Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:14.557924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.602624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.644667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.732671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.775624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.835763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:14.931702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645114486681372:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:14.931792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:14.932336Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645114486681377:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:14.937356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:14.970823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645114486681379:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:15.057234Z node 2 :TX_PROXY ERROR: Actor# [2:7491645118781648730:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:16.356011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:16.460453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS94-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 62188, MsgBus: 2355 2025-04-10T11:27:55.822595Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644776742921197:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:55.830583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001698/r3tmp/tmpEnkYS7/pdisk_1.dat 2025-04-10T11:27:56.425713Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:56.461747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:56.461884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:56.464995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62188, node 1 2025-04-10T11:27:56.686792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:56.686835Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:56.686843Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:56.686970Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2355 TClient is connected to server localhost:2355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:57.506699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:57.524093Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:59.856846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644793922790923:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:59.856860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644793922790913:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:59.857022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:59.861869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:59.878625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644793922790927:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:59.951279Z node 1 :TX_PROXY ERROR: Actor# [1:7491644793922790978:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:00.347651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.607633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.668118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.738062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:00.786409Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644776742921197:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:00.786470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:00.824264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.028368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.071272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.127367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.166351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.222851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.262859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.303347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:01.405842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.128213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:28:02.210761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.291574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.337983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.426760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.474745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.518471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.557058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.593869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.637804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.687239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.727963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.785238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.826543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.863804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.904691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:28:02.937912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... 76710714; 2025-04-10T11:28:43.266244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.280324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.290855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.296780Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038445;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.302333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038461;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.312915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.315139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038429;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.325772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.328395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.340398Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.343915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038451;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.351511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.361631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038439;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.364079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038443;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.377234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038483;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.382168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038441;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.387798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.391579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038502;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.397169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.400895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038437;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.410109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.410643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038453;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.419868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.423885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.429413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038431;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.433604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.443001Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.447650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.453203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.462655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038462;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.466290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038455;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.472844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.476750Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038476;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.487358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.491900Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038490;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.502716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.507091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.519013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.588359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:43.778728Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqa0yaa407kx2hbtrnzrqb", SessionId: ydb://session/3?node_id=1&id=ZTQ1YjZhMGQtYTFjMDFjNi05NzE5ZmZiYi1jNDAyNTRmNA==, Slow query, duration: 39.223662s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:44.418352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:44.418659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:44.419026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7491644828282536354:3008];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-04-10T11:28:44.419417Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:11.937734Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqbnfj9229r55aa0zp2dxj", SessionId: ydb://session/3?node_id=1&id=ZTQ1YjZhMGQtYTFjMDFjNi05NzE5ZmZiYi1jNDAyNTRmNA==, Slow query, duration: 13.582543s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n$bla1 = (select ws_order_number\n from web_sales\n group by ws_order_number\n having COUNT(DISTINCT ws_warehouse_sk) > 1);\n\n-- start query 1 in stream 0 using template query94.tpl and seed 2031708268\nselect\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\n left semi join $bla1 bla1 on (ws1.ws_order_number = bla1.ws_order_number)\n left only join web_returns on (ws1.ws_order_number = web_returns.wr_order_number)\nwhere\n cast(d_date as date) between cast('1999-4-01' as date) and\n (cast('1999-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'NE'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\norder by `order count`\nlimit 100;\n", parameters: 0b >> KqpJoinOrder::GeneralPrioritiesBug1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH12 [GOOD] Test command err: Trying to start YDB, gRPC: 18917, MsgBus: 1872 2025-04-10T11:27:25.419858Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644646317880747:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:25.420368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016e2/r3tmp/tmpFyjWMe/pdisk_1.dat 2025-04-10T11:27:25.884117Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:25.919370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:25.919478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18917, node 1 2025-04-10T11:27:25.923021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:27:25.970299Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:25.970330Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:25.970362Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:25.970507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1872 TClient is connected to server localhost:1872 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:26.652446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:26.685149Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:28.524081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644659202783164:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:28.524081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644659202783172:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:28.524181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:28.528274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:28.542786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644659202783178:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:28.632935Z node 1 :TX_PROXY ERROR: Actor# [1:7491644659202783230:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:28.963934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:29.215289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644663497750792:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:29.215465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644663497750792:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:29.215721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644663497750792:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:29.215820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644663497750792:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:29.215914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644663497750792:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:29.216016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644663497750792:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:29.216114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644663497750792:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:29.216215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644663497750792:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:29.216318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644663497750792:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:29.216409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644663497750792:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:29.216506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644663497750792:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:29.216623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644663497750792:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:29.224778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644663497750757:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:29.224840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644663497750757:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:29.225050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644663497750757:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:29.225179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644663497750757:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:29.225335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644663497750757:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:29.225472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644663497750757:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:29.225607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644663497750757:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:29.225723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644663497750757:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:29.225853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644663497750757:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:29.225980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644663497750757:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:29.226098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644663497750757:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:29.226213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644663497750757:2348];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:29.254592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644663497750782:2350];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:29.254653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644663497750782:2350];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstra ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.263669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.269891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.273599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.280334Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.285733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.295701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.301941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.304018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.309961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.319704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.322884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.326342Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.329099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.335074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.338038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.347197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.350901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.358360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.362933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.368064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.372774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.373906Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.380123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.387585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.390105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.403843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.406573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.413345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.419657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.420307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.425598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.433369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.435451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.444872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.447247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.457583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.463451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.463765Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.469751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.475214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.488434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.490462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.500491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.501607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.712480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:57.856697Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqab6efwx3j731pcxb015c", SessionId: ydb://session/3?node_id=1&id=NWY2YzJkZGUtNmFkZjU3YWUtNTU5ZDVmM2ItZWUzODIxMDk=, Slow query, duration: 42.801767s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:58.378844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:58.379288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:58.379429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491644998505258409:11530];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-10T11:28:58.380626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin >> KqpIndexLookupJoin::LeftOnly-StreamLookup >> KqpJoinOrder::CanonizedJoinOrderTPCH14 >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup >> KqpJoin::CrossJoinCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsSimple-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 8657, MsgBus: 18886 2025-04-10T11:28:22.518759Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644891280675595:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:22.519377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001674/r3tmp/tmpGADdlF/pdisk_1.dat 2025-04-10T11:28:23.182458Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:23.202523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:23.202601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:23.215255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8657, node 1 2025-04-10T11:28:23.486607Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:23.486630Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:23.486640Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:23.486738Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18886 TClient is connected to server localhost:18886 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:24.714985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:27.180715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644912755512609:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:27.180849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:27.181249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644912755512621:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:27.185425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:27.201025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644912755512623:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:27.290920Z node 1 :TX_PROXY ERROR: Actor# [1:7491644912755512674:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:27.503277Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644891280675595:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:27.503355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:27.945540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.124169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.174650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.252615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.334555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.626954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.680330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.724742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.801210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.852294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.897519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.960038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:29.017483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:29.867963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:28:29.909559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:29.987482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.040302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.095930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.139437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.182164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.241070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.284045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.320371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.371817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.459453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.528884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.587988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.633859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.705889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.780491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.841131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pro ... 10714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.396367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.401343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038446;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.401635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.407232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038502;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.407456Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.412964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038472;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.413808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.419271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038445;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.426116Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.431969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038453;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.437759Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.442663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.450085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.456569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.459964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.468407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.481092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.489651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.491320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.496651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.500956Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.502249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.508205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.513625Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.518730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.523723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.528789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.533509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.542858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.547429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.550089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.561518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.564679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.577898Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.580275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038443;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.592598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.597108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.619551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.623958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.629700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.637668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.645725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.647296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.660890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:12.830278Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqawb756cx3w14mjby8ppt", SessionId: ydb://session/3?node_id=1&id=OTkzN2I0YTItODlkZjRjNWUtNTg3NjJlY2EtMjM1MGM3ZWM=, Slow query, duration: 40.214075s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:13.199365Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:13.199778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:13.200333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7491644947115257244:2823];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-04-10T11:29:13.200658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> KqpJoin::JoinDupColumnRightPure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug1 [GOOD] Test command err: Trying to start YDB, gRPC: 1276, MsgBus: 10640 2025-04-10T11:28:23.613468Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644898728301398:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:23.638715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001672/r3tmp/tmpIpOcFo/pdisk_1.dat 2025-04-10T11:28:24.309258Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:24.316606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:24.316682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:24.321012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1276, node 1 2025-04-10T11:28:24.553689Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:24.553720Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:24.553731Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:24.553837Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10640 TClient is connected to server localhost:10640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:25.594354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:27.750746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644915908171071:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:27.750890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:27.751766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644915908171083:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:27.756297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:27.771702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644915908171085:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:28:27.858835Z node 1 :TX_PROXY ERROR: Actor# [1:7491644915908171136:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:28.323252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.465058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.498088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.531310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.571194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.575377Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644898728301398:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:28.575419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:28.797283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.829798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.900119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:28.956160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-10T11:28:29.026821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-04-10T11:28:29.093983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-10T11:28:29.148011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:29.212184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.014815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:2, at schemeshard: 72057594046644480 2025-04-10T11:28:30.067164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.132348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.182883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.239536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.301722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.340570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.414956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.499740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.582107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.625994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.708263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.746281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.781644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.816413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.872013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.911573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.962002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pro ... oller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.063094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.065325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038494;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.070035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038520;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.072331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038480;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.078298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038496;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.078546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038478;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.083575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038488;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.083967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038470;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.088908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.093333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038466;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.097447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038464;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.100823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.105409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.110093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038462;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.114911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.120537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.125036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038460;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.128928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.137944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038472;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.138573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038426;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.143300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.149464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038456;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.151935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.155015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.158974Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.161313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.168026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038458;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.168920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.173496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.175185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.179689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.180061Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.187257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038570;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.187277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.192752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.192753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.200067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038428;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.206899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.213345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.214920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.220499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038450;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.223978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038454;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.227350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.255135Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.265804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038438;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715714; 2025-04-10T11:29:12.330500Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqawga2zjv9m3sad6gzqn7", SessionId: ydb://session/3?node_id=1&id=NzEyZjZkOGItMWM2MzE1NWUtY2I5ZWRhNmEtNjhiOGQ5N2I=, Slow query, duration: 39.551571s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:12.707041Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715716; 2025-04-10T11:29:12.707543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715716; 2025-04-10T11:29:12.712425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7491645027577341861:5240];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-04-10T11:29:12.712885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715716; >> KqpJoin::ExclusionJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinDupColumnRightPure [GOOD] Test command err: Trying to start YDB, gRPC: 17681, MsgBus: 1375 2025-04-10T11:29:14.570657Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645117113867212:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:14.570923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001623/r3tmp/tmpfKSexr/pdisk_1.dat 2025-04-10T11:29:15.344877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:15.344984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:15.358408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:15.385780Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17681, node 1 2025-04-10T11:29:15.622844Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:15.622863Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:15.622875Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:15.622986Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1375 TClient is connected to server localhost:1375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:16.513960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:16.546289Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:16.568162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:16.815815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:17.085919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:17.184974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:19.166887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645138588705315:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:19.166987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:19.467026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:19.543119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:19.558471Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645117113867212:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:19.560876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:19.635084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:19.715466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:19.754823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:19.821394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:19.886488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645138588705835:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:19.886533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:19.886763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645138588705840:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:19.894574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:19.910302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645138588705842:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:19.983284Z node 1 :TX_PROXY ERROR: Actor# [1:7491645138588705895:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:21.085915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:21.157535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:21.224157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> OlapEstimationRowsCorrectness::TPCH11 >> KqpJoinOrder::TPCDS34+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::ExclusionJoin [GOOD] Test command err: Trying to start YDB, gRPC: 16562, MsgBus: 26824 2025-04-10T11:29:14.966232Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645116752647829:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:14.966330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001620/r3tmp/tmpNesoQT/pdisk_1.dat 2025-04-10T11:29:15.678874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:15.678946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:15.683846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:15.722815Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16562, node 1 2025-04-10T11:29:15.962592Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:15.962612Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:15.962618Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:15.962724Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26824 TClient is connected to server localhost:26824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:16.854467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:16.890597Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:16.913965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:17.125984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:29:17.331983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:17.455728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:19.547195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645138227485959:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:19.547312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:19.905281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:19.932073Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645116752647829:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:19.932228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:19.945682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:20.021595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:20.068337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:20.155838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:20.233966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:20.334371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645142522453781:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:20.334472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:20.334605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645142522453786:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:20.343117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:20.358117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645142522453788:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:20.451004Z node 1 :TX_PROXY ERROR: Actor# [1:7491645142522453844:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:21.862060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:21.917272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:21.968321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 >> KqpIndexLookupJoin::Inner+StreamLookup >> KqpFlipJoin::Right_3 >> OlapEstimationRowsCorrectness::TPCDS78 [GOOD] >> KqpJoinOrder::ShuffleEliminationOneJoin [GOOD] >> KqpJoinOrder::TPCDS34-ColumnStore [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull >> KqpIndexLookupJoin::LeftOnly-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCDS78 [GOOD] Test command err: Trying to start YDB, gRPC: 6658, MsgBus: 61103 2025-04-10T11:27:08.114359Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644574302051489:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:08.116502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016e8/r3tmp/tmpO59NGw/pdisk_1.dat 2025-04-10T11:27:08.501108Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6658, node 1 2025-04-10T11:27:08.562855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:08.562958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:08.652774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:27:08.701368Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:08.701395Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:08.701408Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:08.701532Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61103 TClient is connected to server localhost:61103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:09.514406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:09.535595Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:11.583437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644587186954046:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:11.583561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:11.583844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644587186954058:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:11.592927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:11.611538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644587186954060:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:11.688150Z node 1 :TX_PROXY ERROR: Actor# [1:7491644587186954111:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:12.106946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:12.441935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644591481921685:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:12.442225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644591481921685:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:12.442546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644591481921685:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:12.442694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644591481921685:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:12.442807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644591481921685:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:12.442924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644591481921685:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:12.443052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644591481921685:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:12.443181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644591481921685:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:12.443327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644591481921685:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:12.443472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644591481921685:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:12.443578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644591481921685:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:12.443682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644591481921685:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:12.451186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644591481921683:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:12.451246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644591481921683:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:12.451482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644591481921683:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:12.451596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644591481921683:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:12.451694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644591481921683:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:12.451799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644591481921683:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:12.451899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644591481921683:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:12.452009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644591481921683:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:12.452101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644591481921683:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:12.452192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644591481921683:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:12.452294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644591481921683:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:12.452430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644591481921683:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:12.516536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644591481921677:2353];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:12.516610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644591481921677:2353];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstr ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.468311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.473631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.480649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.485184Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.496222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.502548Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.503949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.513057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.514797Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.521329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.529228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.544380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.545226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.551312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.555846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.565452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.566256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.576156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.579917Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.582876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.590032Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.595959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.601594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.605838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.607093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.611888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.612441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.618469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.618921Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.625049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.625282Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.632411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.635164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.638778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.646762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.648304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.657746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.660463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.664831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.669314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.683197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.684555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.692308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.698670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:35.703691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:36.030456Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq9mx631rzym34eq6tbbgb", SessionId: ydb://session/3?node_id=1&id=MjQ1NTBjN2ItMzZkYTVlMjMtZDc1MTMwYTctZTI0MGE1NGM=, Slow query, duration: 43.799447s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:36.793901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:36.793987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:36.794560Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491644771870584761:7849];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-10T11:28:36.794990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TPCDS92-ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinLeftFirst-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 3386, MsgBus: 63235 2025-04-10T11:29:22.724820Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645151798972584:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:22.725322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001615/r3tmp/tmpGOP2Co/pdisk_1.dat 2025-04-10T11:29:23.477406Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:23.489893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:23.489969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:23.496516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3386, node 1 2025-04-10T11:29:23.802623Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:23.802642Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:23.802649Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:23.802748Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63235 TClient is connected to server localhost:63235 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:24.625471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:24.684376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:29:24.984222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:29:25.234375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:29:25.358896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.458967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645173273810735:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.459059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.702239Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645151798972584:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:27.702301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:27.796120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.846697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.919639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.958033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.999026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:28.056046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:28.138756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645177568778550:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:28.138854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:28.139138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645177568778555:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:28.142739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:28.159429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645177568778557:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:28.222390Z node 1 :TX_PROXY ERROR: Actor# [1:7491645177568778612:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:29.601184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:29.635980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnly-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 31887, MsgBus: 4372 2025-04-10T11:29:22.874521Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645150923925246:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:22.878365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00160d/r3tmp/tmpUYVt3p/pdisk_1.dat 2025-04-10T11:29:23.508929Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:23.530941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:23.531039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:23.533113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31887, node 1 2025-04-10T11:29:23.750663Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:23.750685Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:23.750691Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:23.750798Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4372 TClient is connected to server localhost:4372 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:24.693335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:24.720310Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:29:24.736629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:24.985083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:25.226362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:25.320741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:27.299902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645172398763465:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.299981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.606756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.658475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.705708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.744185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.786476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.848912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.882251Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645150923925246:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:27.882396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:27.920172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645172398763980:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.920243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.920549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645172398763985:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.925236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:27.939644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645172398763987:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:29:28.044986Z node 1 :TX_PROXY ERROR: Actor# [1:7491645176693731338:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:29.044315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:29.096490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:29.154958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:29.198874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:29.236888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-10T11:29:29.276298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS34-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 7919, MsgBus: 29342 2025-04-10T11:28:17.677070Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644873547915135:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:17.677178Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00167b/r3tmp/tmpKTQivh/pdisk_1.dat 2025-04-10T11:28:18.379164Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:18.418340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:18.418432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:18.419717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7919, node 1 2025-04-10T11:28:18.731071Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:18.731101Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:18.731108Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:18.731234Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29342 TClient is connected to server localhost:29342 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:19.942626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:22.678335Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644873547915135:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:22.678408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:22.887311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644895022752282:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:22.887394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644895022752293:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:22.887452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:22.891553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:22.910606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644895022752296:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:23.011246Z node 1 :TX_PROXY ERROR: Actor# [1:7491644895022752347:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:23.433291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:23.581962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:23.628271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:23.667067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:23.776474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:24.189826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:24.245053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:24.308314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:24.384895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:28:24.477050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:28:24.545085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:28:24.648438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:24.698302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:25.772050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:28:25.842194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:25.947516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:26.012235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:28:26.053683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:28:26.094920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:28:26.198634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:28:26.284448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:28:26.345422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:28:26.402566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:28:26.459983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:28:26.536065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:28:26.588596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:28:26.639073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:28:26.680213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:28:26.757304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:28:26.823948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-10T11:28:26.858812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pro ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.073049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.087183Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.107724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.121852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.135835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.162701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.177026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.198361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.208276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.211659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.221512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.224221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.237826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.238499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.247570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.251040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.256755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.260259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.265854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.269091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.274054Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.279591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038441;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.287534Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038473;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.297064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.303092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.309278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.327481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.330764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.336989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.341215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.351946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.354795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038542;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.361266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.365534Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.376740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.379832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.389771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.390796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.401579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.413662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.417074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.427587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.431045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.437662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.541453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038486;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:08.729019Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqarjs9jb61c9v2kn4er3p", SessionId: ydb://session/3?node_id=1&id=NjFiNzI1NWYtOGZlMzI4OWMtNTNmNmVkOTEtYjZjMDMwZg==, Slow query, duration: 39.966351s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:09.551462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:09.551944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:09.552485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7491644946562368642:3337];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038629; 2025-04-10T11:29:09.552855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationOneJoin [GOOD] Test command err: Trying to start YDB, gRPC: 26450, MsgBus: 25395 2025-04-10T11:27:33.763172Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644684662173950:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:33.763748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016cf/r3tmp/tmpn8VbDy/pdisk_1.dat 2025-04-10T11:27:34.317859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:34.317935Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:34.317955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:34.323426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26450, node 1 2025-04-10T11:27:34.526698Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:34.526722Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:34.526731Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:34.526855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25395 TClient is connected to server localhost:25395 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:35.338991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.352259Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:37.477864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644701842043794:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.477985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.478099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644701842043806:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.484055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:37.493554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644701842043808:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:37.573024Z node 1 :TX_PROXY ERROR: Actor# [1:7491644701842043860:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:38.030733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.378199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644706137011415:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:38.378423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644706137011415:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:38.378734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644706137011415:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:38.378874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644706137011415:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:38.378992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644706137011415:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:38.379119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644706137011415:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:38.379281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644706137011415:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:38.379391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644706137011415:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:38.379505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644706137011415:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:38.379615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644706137011415:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:38.379732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644706137011415:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:38.379859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644706137011415:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:38.385422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644706137011431:2355];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:38.385513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644706137011431:2355];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:38.385692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644706137011431:2355];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:38.385819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644706137011431:2355];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:38.385929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644706137011431:2355];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:38.386039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644706137011431:2355];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:38.386434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644706137011431:2355];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:38.386568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644706137011431:2355];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:38.386673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644706137011431:2355];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:38.386794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644706137011431:2355];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:38.386894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644706137011431:2355];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:38.387008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644706137011431:2355];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:38.429856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644706137011413:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:38.429856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644706137011478:2362];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.187209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.199003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.202575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.208157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.210577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.220132Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.223929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.232194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.248792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.263046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.271774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.284552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.298976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.314834Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.327346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.331229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.339620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.344706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.353860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.358162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.360723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.364652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.371520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.378710Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.385856Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.389078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.399436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.404493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.410824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.417972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.419453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.426349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.430076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.439921Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.444231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.453166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.456770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.467489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.470646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.477771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.479961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.492174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.494409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.501834Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.503603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.922428Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqav89abex0s7ttqe4rmwh", SessionId: ydb://session/3?node_id=1&id=OThmNzlhMjMtNGFhNTJjZGMtNTNmOWYxNDUtYTkxZjYyMGU=, Slow query, duration: 44.423714s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:16.379990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:16.380454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:16.381013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7491645006784769281:9622];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-10T11:29:16.381379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoin::CrossJoinCount [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH4 [GOOD] >> KqpJoinOrder::FiveWayJoin+ColumnStore >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::CrossJoinCount [GOOD] Test command err: Trying to start YDB, gRPC: 11303, MsgBus: 11075 2025-04-10T11:29:23.133545Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645156226826882:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:23.133915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00160b/r3tmp/tmpzWkB2C/pdisk_1.dat 2025-04-10T11:29:23.964473Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:23.966442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:23.966519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:23.976879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11303, node 1 2025-04-10T11:29:24.212964Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:24.212987Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:24.213000Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:24.213134Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11075 TClient is connected to server localhost:11075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:25.385635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:25.413324Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:25.428779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:25.697595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:26.036444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:29:26.170287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:29:28.126250Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645156226826882:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:28.126333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:28.762001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645177701665019:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:28.762128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:29.160997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:29.210217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:29.277166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:29.308778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:29.351907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:29.428943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:29.498894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645181996632833:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:29.498980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:29.499318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645181996632838:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:29.503134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:29.515992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645181996632840:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:29.622307Z node 1 :TX_PROXY ERROR: Actor# [1:7491645181996632896:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:30.744626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:30.801166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:30.845236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpJoin::JoinDupColumnRight ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinLeftFirst-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 22764, MsgBus: 31768 2025-04-10T11:28:37.974253Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644959597426543:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:37.974776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001666/r3tmp/tmp0MmDFi/pdisk_1.dat 2025-04-10T11:28:38.736412Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:38.759707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:38.759809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:38.775977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22764, node 1 2025-04-10T11:28:39.102672Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:39.102702Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:39.102710Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:39.102812Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31768 TClient is connected to server localhost:31768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:40.277448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:40.303095Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:42.873113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644981072263690:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:42.873249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:42.873645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644981072263702:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:42.878112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:42.889877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644981072263704:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:42.966523Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644959597426543:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:42.966844Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:42.972380Z node 1 :TX_PROXY ERROR: Actor# [1:7491644981072263756:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:43.318112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:43.540293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:43.583866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:43.622958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:43.693645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:43.877370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:43.912068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:43.974998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.050166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.128752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.188377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.242065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:44.276526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.251811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:28:45.310062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.341990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.375413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.407177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.439362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.469109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.536900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.575744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.613177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.648374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.697675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.733885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.779329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.817361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.853744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.887073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.862846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.869770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.873818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038510;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.884679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038476;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.888644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038478;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.900020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038454;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.903208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.913706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.915466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038464;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.925593Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.933916Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.949064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038502;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.956872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038482;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.972438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.978273Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038480;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.986716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038498;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:22.992508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.001325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038504;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.014710Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.024707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038462;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.028907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038466;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.043401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038474;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.047204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.055185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.061615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.069633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.072222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.080691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.081256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038496;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.104027Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.111245Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.117758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.118960Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038458;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.125117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038468;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.129592Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.136582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.143731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.153225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.158640Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.167692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038528;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.170136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.182923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038492;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.184915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.201449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.219036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:23.381325Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqbavf4evcx9n2mkxsedfr", SessionId: ydb://session/3?node_id=1&id=YzhhZTFhYTctMmRlNGJjYjktNjdjNTA3OWItNTlmYjE0NjA=, Slow query, duration: 35.908928s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:23.667125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:23.667550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:23.668007Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7491645015432008916:3004];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-04-10T11:29:23.668359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCH13 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS92-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 15286, MsgBus: 28033 2025-04-10T11:28:23.720313Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644897347059271:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:23.720365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001673/r3tmp/tmpFZz6z9/pdisk_1.dat 2025-04-10T11:28:24.380872Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:24.383472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:24.383730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:24.388106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15286, node 1 2025-04-10T11:28:24.640813Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:24.640834Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:24.640840Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:24.640980Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28033 TClient is connected to server localhost:28033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:25.691797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:25.715435Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:28.751606Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644897347059271:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:28.751723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:28.783315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644918821896419:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:28.783448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:28.790314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644918821896431:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:28.796903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:28.812519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644918821896433:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:28.913106Z node 1 :TX_PROXY ERROR: Actor# [1:7491644918821896484:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:29.296212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:29.488267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:29.535040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:29.585771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:29.630853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:29.905804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:29.968846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.017435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.094291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.139370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.185933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.254546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:30.293568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:31.376103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:28:31.459360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:31.502041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:31.573491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:28:31.616463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:28:31.666217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:28:31.734729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:28:31.815931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:28:31.867919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:28:31.929994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:28:31.980179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:28:32.024106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:28:32.085137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:28:32.134330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:28:32.186776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:28:32.239848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:28:32.311248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.843179Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.853706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.857660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.870130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.878578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.880841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038538;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.885539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.892219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.903950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038536;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.922207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038552;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.928790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.943316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.958007Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038502;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.969057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038514;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.980789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.983363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038496;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.991129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:10.995767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.001447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038528;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.006229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038544;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.013776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.018629Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.027817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038570;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.031245Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038542;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.041618Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038516;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.044185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.051974Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038518;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.056999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038530;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.070010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.074561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038510;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.081327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.084802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038500;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.093212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.097070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.110276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038504;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.115528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038540;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.122124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.124848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.138256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038526;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.142348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038520;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.148605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.151680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038512;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.162126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.163889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038532;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.179060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:11.322450Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqaxse50pvxd3mxbhmc098", SessionId: ydb://session/3?node_id=1&id=ZTlhZGY4NzQtMTZiNWIxMmQtY2Y2N2UxZDQtODdkMmIyNjY=, Slow query, duration: 37.227060s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:11.970347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:11.970784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:11.971785Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7491645034786035616:5526];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-04-10T11:29:11.972260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup [GOOD] >> OlapEstimationRowsCorrectness::TPCH2 [GOOD] >> KqpIndexLookupJoin::Left+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees [GOOD] Test command err: Trying to start YDB, gRPC: 24897, MsgBus: 18791 2025-04-10T11:27:35.645237Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644689730408515:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:35.646644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ca/r3tmp/tmp4eurps/pdisk_1.dat 2025-04-10T11:27:36.260322Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:36.261587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:36.261655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:36.270393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24897, node 1 2025-04-10T11:27:36.514673Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:36.514698Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:36.514705Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:36.514807Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18791 TClient is connected to server localhost:18791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:37.399400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:37.427724Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:39.770218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644706910278241:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:39.770321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:39.770399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644706910278253:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:39.774774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:39.786616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644706910278255:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:39.852346Z node 1 :TX_PROXY ERROR: Actor# [1:7491644706910278306:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:40.289377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:40.584148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644711205245863:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:40.584520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644711205245863:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:40.584830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644711205245863:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:40.584960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644711205245863:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:40.585090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644711205245863:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:40.585404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644711205245863:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:40.585533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644711205245863:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:40.585676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644711205245863:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:40.585794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644711205245863:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:40.585895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644711205245863:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:40.586011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644711205245863:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:40.586110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644711205245863:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:40.587122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644711205245899:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:40.587186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644711205245899:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:40.587380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644711205245899:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:40.587493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644711205245899:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:40.587593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644711205245899:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:40.587703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644711205245899:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:40.587815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644711205245899:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:40.587939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644711205245899:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:40.588056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644711205245899:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:40.588176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644711205245899:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:40.588285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644711205245899:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:40.588398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491644711205245899:2357];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:40.625085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644711205245909:2362];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:40.625154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644711205245909:2362];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abs ... 10714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.768573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.774047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.776592Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.785014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.788494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.794069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.797463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.808032Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.810363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.817572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.819439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.826114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.829323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.839638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.841357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.851379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.864677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.874546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.888207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.897490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.911039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.920721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.944440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.966675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.991156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.997253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.013321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.030795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.032566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.040302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.043845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.054036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.057055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.074493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.078423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.085405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.092539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.107068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.114961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.121180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.129433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.146973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.152059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.331201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.402588Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqavz65nbh14zp6z1p9q75", SessionId: ydb://session/3?node_id=1&id=NTEwYjc1ZjktYWM5OTE4MzItM2ZjZDBlZmUtMTdjYzgwYWI=, Slow query, duration: 43.171610s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:15.761644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:15.762033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:15.762530Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7491645011853003849:9572];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-10T11:29:15.762857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> KqpJoinOrder::CanonizedJoinOrderTPCH9 [GOOD] >> KqpFlipJoin::Right_3 [GOOD] >> KqpIndexLookupJoin::CheckAllKeyTypesCast ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 29413, MsgBus: 5027 2025-04-10T11:29:22.946631Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645152933507868:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:22.946685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001617/r3tmp/tmpTssK2a/pdisk_1.dat 2025-04-10T11:29:23.764062Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:23.775665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:23.775747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:23.783148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29413, node 1 2025-04-10T11:29:24.006841Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:24.006860Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:24.006865Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:24.006956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5027 TClient is connected to server localhost:5027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:25.021209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:25.053141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:25.311824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:25.640927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:25.752595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:27.731913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645174408346122:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.732008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.954085Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645152933507868:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:27.963503Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:28.028913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:28.067767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:28.140815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:28.178794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:28.223091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:28.281801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:28.365254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645178703313934:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:28.365310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:28.365463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645178703313939:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:28.369357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:28.384186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645178703313941:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:28.479071Z node 1 :TX_PROXY ERROR: Actor# [1:7491645178703313997:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull >> KqpIndexLookupJoin::Inner+StreamLookup [GOOD] >> KqpIndexLookupJoin::Inner-StreamLookup >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup >> OlapEstimationRowsCorrectness::TPCH21 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH4 [GOOD] Test command err: Trying to start YDB, gRPC: 2307, MsgBus: 2097 2025-04-10T11:27:39.088753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644709542555512:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:39.095062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016c1/r3tmp/tmpDJUAhu/pdisk_1.dat 2025-04-10T11:27:39.523775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:39.523908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:39.528112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:27:39.570679Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2307, node 1 2025-04-10T11:27:39.846476Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:39.846500Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:39.846513Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:39.846614Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2097 TClient is connected to server localhost:2097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:40.788861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:40.842339Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:43.168253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644726722425222:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:43.168394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:43.168765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644726722425234:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:43.173234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:43.196830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644726722425236:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:43.291082Z node 1 :TX_PROXY ERROR: Actor# [1:7491644726722425287:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:43.722229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:44.060403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644726722425547:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:44.060747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644726722425547:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:44.061097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644726722425547:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:44.061253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644726722425547:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:44.061358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644726722425547:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:44.061499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644726722425547:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:44.061635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644726722425547:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:44.061763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644726722425547:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:44.061874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644726722425547:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:44.061996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644726722425547:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:44.062104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644726722425547:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:44.062215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644726722425547:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:44.069275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644726722425559:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:44.069445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644726722425559:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:44.069655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644726722425559:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:44.069785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644726722425559:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:44.069908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644726722425559:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:44.070020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644726722425559:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:44.070121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644726722425559:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:44.070251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644726722425559:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:44.070424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644726722425559:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:44.070572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644726722425559:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:44.070697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644726722425559:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:44.070792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644726722425559:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:44.159646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644726722425545:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:44.159725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644726722425545:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.706781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.712844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.715113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.720468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.726183Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.727199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.738365Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.745458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.748777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.751280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.754889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.760031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.760871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.766782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.766981Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.774038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.778964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.780048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.785512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.787854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.796709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.797792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.803787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.808781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.809852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.814871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.819884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.820322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.825576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.830297Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.835198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.840508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.844771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.854328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.859132Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.859781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.864902Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.868380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.871615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.877950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.880903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.889410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.895368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.901323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:18.086263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:18.174833Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqax7q63xdccd0tva4j40b", SessionId: ydb://session/3?node_id=1&id=MWI2ZTQzMmMtM2M0NDRhOTAtYTI5ZTFiMmMtOGFlNDY3ZmI=, Slow query, duration: 44.647116s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:18.664016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:18.664472Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:18.665232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491645078909800470:11360];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-10T11:29:18.665632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> OlapEstimationRowsCorrectness::TPCH10 [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78 [GOOD] >> KqpJoinOrder::TPCDS95+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH2 [GOOD] Test command err: Trying to start YDB, gRPC: 5311, MsgBus: 27401 2025-04-10T11:27:33.673437Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644683914880869:2168];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:33.673879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016d4/r3tmp/tmpDLpYgt/pdisk_1.dat 2025-04-10T11:27:34.457272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:34.457365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:34.461263Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:34.475972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5311, node 1 2025-04-10T11:27:34.598771Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:34.598795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:34.598806Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:34.598933Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27401 TClient is connected to server localhost:27401 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:35.488383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:35.506952Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:37.529322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644701094750619:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.529400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644701094750609:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.529672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.533314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:37.548791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644701094750623:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:37.642959Z node 1 :TX_PROXY ERROR: Actor# [1:7491644701094750674:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:37.983296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.303147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644705389718236:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:38.303361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644705389718236:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:38.303655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644705389718236:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:38.303801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644705389718236:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:38.303930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644705389718236:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:38.304058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644705389718236:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:38.304183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644705389718236:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:38.304319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644705389718236:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:38.304436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644705389718236:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:38.304548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644705389718236:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:38.304656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644705389718236:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:38.304761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644705389718236:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:38.308307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644705389718232:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:38.308407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644705389718232:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:38.308642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644705389718232:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:38.308754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644705389718232:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:38.308858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644705389718232:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:38.308963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644705389718232:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:38.309074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644705389718232:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:38.309208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644705389718232:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:38.309333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644705389718232:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:38.309440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644705389718232:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:38.309546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644705389718232:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:38.309646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644705389718232:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:38.365851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491644705389718249:2358];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:38.365851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644705389718240:2354];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstr ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.670436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.677614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.687321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039226;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.693826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039208;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.696019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039222;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.699440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039200;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.707985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.711570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.720970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.722577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.728036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.732141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.736103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.739952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.744592Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.746712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039262;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.751852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.755792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039238;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.761550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.765010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.769320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.774965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039234;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.779786Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.783877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.786468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.791262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.792777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039314;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.797410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.879864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.887803Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039228;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.894014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.900387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.906633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.912544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.918457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.924090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.930028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.936064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.942508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.946886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.949799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.954076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.956454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.962386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.965214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.094682Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqas1p6nfp93w1zfq9dym6", SessionId: ydb://session/3?node_id=1&id=OWRhMTM2NmQtOTBiZDM5NS00OGU0NTQ2Yy0yNDg3ZjEwMg==, Slow query, duration: 44.855864s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:14.447081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:14.447497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:14.449485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7491644997447541171:9528];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-10T11:29:14.449837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH9 [GOOD] Test command err: Trying to start YDB, gRPC: 20831, MsgBus: 27728 2025-04-10T11:27:33.714876Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644684183928150:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:33.715508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ce/r3tmp/tmpvhTz7u/pdisk_1.dat 2025-04-10T11:27:34.146622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:34.146712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:34.148813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20831, node 1 2025-04-10T11:27:34.215260Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:27:34.216741Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:27:34.274438Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:34.350612Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:34.350635Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:34.350641Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:34.350748Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27728 TClient is connected to server localhost:27728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:35.293341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:37.415576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644701363797858:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.415678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.415761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644701363797868:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:37.420465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:37.436997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644701363797872:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:37.515538Z node 1 :TX_PROXY ERROR: Actor# [1:7491644701363797923:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:37.924822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:38.221935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644705658765480:2349];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:38.222217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644705658765480:2349];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:38.222532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644705658765480:2349];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:38.222583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644705658765486:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:38.222624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644705658765486:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:38.222669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644705658765480:2349];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:38.222772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644705658765480:2349];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:38.222783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644705658765486:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:38.222881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644705658765486:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:38.222896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644705658765480:2349];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:38.222973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644705658765486:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:38.223050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644705658765480:2349];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:38.223091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644705658765486:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:38.223168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644705658765480:2349];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:38.223184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644705658765486:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:38.223291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644705658765480:2349];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:38.223313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644705658765486:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:38.224073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644705658765480:2349];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:38.224203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644705658765480:2349];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:38.224300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491644705658765480:2349];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:38.227943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644705658765486:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:38.228084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644705658765486:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:38.228189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644705658765486:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:38.228306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644705658765486:2352];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:38.272729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644705658765521:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:38.272792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644705658765521:2358];tablet_id=72075186224037897;process=TTxInitSchema::Exe ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.183979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.187965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.196500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.199901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.210036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.210947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.219754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.222128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.240663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.244435Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.251380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.255117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.264646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.272111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.292131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.298330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.305504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.308751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.312197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.314494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.318616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.320480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.324565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.329398Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.331283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.336864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.339580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.343641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.345692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.349081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.353477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.356012Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.359820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.361728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.366124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.367999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.372424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.373937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.377910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.384141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.389709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.397256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.489089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.552805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.619772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:13.690615Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqaqkv8faq6zckpwgc94dt", SessionId: ydb://session/3?node_id=1&id=MzRhNzUwNzEtYWFkZWQzNjQtZmI3YTBhMTQtMmRjODgzM2M=, Slow query, duration: 45.918432s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:14.051578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:14.052015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:14.052581Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7491644993421621853:9604];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-10T11:29:14.052922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+ColumnStore >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 63979, MsgBus: 17214 2025-04-10T11:29:22.539311Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645151696998626:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:22.539496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00161c/r3tmp/tmpFN4O6n/pdisk_1.dat 2025-04-10T11:29:23.387165Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:23.393876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:23.393995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:23.396792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63979, node 1 2025-04-10T11:29:23.658696Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:23.658733Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:23.658743Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:23.658957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17214 TClient is connected to server localhost:17214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:24.437085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:24.459839Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:24.483602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:24.748266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:25.016368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:25.139010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:27.119069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645173171836666:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.119168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.548052Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645151696998626:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:27.548110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:27.611304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.662125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.704329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.745088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.796769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.841013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:27.933368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645173171837184:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.933444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.933759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645173171837189:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.937455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:27.955018Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:29:27.955302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645173171837191:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:28.013681Z node 1 :TX_PROXY ERROR: Actor# [1:7491645177466804543:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:29.151351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:29.259764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4269, MsgBus: 11760 2025-04-10T11:29:31.224243Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645191138069555:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:31.224716Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00161c/r3tmp/tmpNFCwSz/pdisk_1.dat 2025-04-10T11:29:31.412554Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:31.445871Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:31.445950Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:31.455149Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4269, node 2 2025-04-10T11:29:31.598636Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:31.598657Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:31.598665Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:31.598767Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11760 TClient is connected to server localhost:11760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:32.363747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:32.386445Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:29:32.407185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:32.543165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:32.900601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:33.029080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:36.021536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645212612907603:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:36.021644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:36.064644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:36.116498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:36.137875Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645191138069555:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:36.137930Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:36.168119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:36.203796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:36.242983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:36.322722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:36.393196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645212612908120:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:36.393274Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:36.393612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645212612908125:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:36.396604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:36.408178Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645212612908127:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:29:36.500328Z node 2 :TX_PROXY ERROR: Actor# [2:7491645212612908183:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:37.497283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:37.607774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH10 [GOOD] Test command err: Trying to start YDB, gRPC: 5403, MsgBus: 27405 2025-04-10T11:27:41.698075Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644718198613008:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:41.698128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016b8/r3tmp/tmp6MZBSt/pdisk_1.dat 2025-04-10T11:27:42.282069Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:42.317405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:42.317487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:42.323210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5403, node 1 2025-04-10T11:27:42.514385Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:42.514412Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:42.514423Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:42.514545Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27405 TClient is connected to server localhost:27405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:43.501578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:43.531085Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:45.993940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644735378482867:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:45.994188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644735378482879:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:45.998655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:46.001420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:46.013517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644735378482881:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:46.090345Z node 1 :TX_PROXY ERROR: Actor# [1:7491644739673450228:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:46.550935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:46.702311Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644718198613008:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:46.702373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:46.858054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644739673450491:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:46.858415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644739673450491:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:46.858743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644739673450491:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:46.858915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644739673450491:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:46.859173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644739673450491:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:46.859303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644739673450491:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:46.859418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644739673450491:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:46.859525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644739673450491:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:46.859656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644739673450491:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:46.859765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644739673450491:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:46.859863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644739673450491:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:46.859955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644739673450491:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:46.915127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644739673450489:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:46.926265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644739673450489:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:46.928843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644739673450489:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:46.929049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644739673450489:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:46.929148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644739673450489:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:46.929262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644739673450489:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:46.929364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644739673450489:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:46.929473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644739673450489:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:46.929581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644739673450489:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:46.929698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644739673450489:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:46.929806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644739673450489:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:46.929905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644739673450489:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:46.977568Z node 1 :TX_ ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.619937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.620246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.625718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.625831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.631905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.632368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.638808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.641470Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.644627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.648575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.651382Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.656260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.657455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.663149Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.663781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.672284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.673512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.684542Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.685713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.691947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.693297Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.698930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.699158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.705873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.706164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.713114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.717309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.719312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.726188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.731988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.733534Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.741011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.748008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.750572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.754508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.761098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.764875Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.775858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.780793Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.785763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.790162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.800613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.802686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.821893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:17.831822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:18.114720Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqb02x8t2bvpfmjd05a7es", SessionId: ydb://session/3?node_id=1&id=ZGU2MjYwZDctZDlkNGQxZGQtOWZhNmM5YjAtNjIzYWU1MDY=, Slow query, duration: 41.668633s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:18.490312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:18.490885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:18.495640Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7491645018846371012:9370];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-10T11:29:18.496015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoin::RightSemiJoin_KeyPrefix ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS78 [GOOD] Test command err: Trying to start YDB, gRPC: 25936, MsgBus: 7186 2025-04-10T11:27:13.128144Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644598173526239:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:13.128181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016e6/r3tmp/tmpr7qXhF/pdisk_1.dat 2025-04-10T11:27:13.924396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:13.924501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:13.926022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:27:13.974280Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25936, node 1 2025-04-10T11:27:14.146977Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:14.146998Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:14.147006Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:14.147107Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7186 TClient is connected to server localhost:7186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:14.813382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:14.838419Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:16.967980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644611058428786:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:16.968155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:16.968566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644611058428798:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:16.972752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:16.988347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644611058428800:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:17.074261Z node 1 :TX_PROXY ERROR: Actor# [1:7491644615353396147:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:17.486409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:17.825376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644615353396457:2361];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:17.826192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644615353396457:2361];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:17.826435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644615353396457:2361];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:17.826562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644615353396457:2361];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:17.826682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644615353396457:2361];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:17.826798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644615353396457:2361];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:17.826907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644615353396457:2361];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:17.827027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644615353396457:2361];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:17.827155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644615353396457:2361];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:17.827257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644615353396457:2361];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:17.827388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644615353396457:2361];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:17.827535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644615353396457:2361];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:17.831638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644615353396435:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:17.831689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644615353396435:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:17.831849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644615353396435:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:17.831970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644615353396435:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:17.832057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644615353396435:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:17.832152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644615353396435:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:17.832285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644615353396435:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:17.832381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644615353396435:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:17.832473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644615353396435:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:17.832603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644615353396435:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:17.832724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644615353396435:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:17.833775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644615353396435:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:17.868207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644615353396474:2363];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:17.868260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644615353396474:2363];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstra ... X WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.831137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.839073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.846945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.852698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.856497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.861368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.862216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.868282Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.874099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.874366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.880093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.885378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.889684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.895029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.903195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.908599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.912850Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.918249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.926774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.932744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.938233Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.938462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.945666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.950801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.960608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.962696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.972983Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.976134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:41.980178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:42.193423Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfq9vv43p5xw216d8wm469p", SessionId: ydb://session/3?node_id=1&id=ZTE4MjMwM2EtZDk2OTkyNWYtZDU0YjJmNGEtODMxMGRlY2I=, Slow query, duration: 42.860434s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:42.847247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:42.847730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:42.848048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7491644873051481714:9503];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-10T11:28:42.848774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:28.131350Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqbthpa8ph7mxq4gxx3f90", SessionId: ydb://session/3?node_id=1&id=ZTE4MjMwM2EtZDk2OTkyNWYtZDU0YjJmNGEtODMxMGRlY2I=, Slow query, duration: 24.588241s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "PRAGMA TablePathPrefix='/Root/test/ds';\n\n-- NB: Subquerys\n\n$ws =\n\n (select date_dim.d_year AS ws_sold_year, web_sales.ws_item_sk ws_item_sk,\n\n web_sales.ws_bill_customer_sk ws_customer_sk,\n\n sum(ws_quantity) ws_qty,\n\n sum(ws_wholesale_cost) ws_wc,\n\n sum(ws_sales_price) ws_sp\n\n from web_sales as web_sales\n\n left join web_returns as web_returns on web_returns.wr_order_number=web_sales.ws_order_number and web_sales.ws_item_sk=web_returns.wr_item_sk\n\n join date_dim as date_dim on web_sales.ws_sold_date_sk = date_dim.d_date_sk\n\n where wr_order_number is null\n\n group by date_dim.d_year, web_sales.ws_item_sk, web_sales.ws_bill_customer_sk\n\n );\n\n$cs =\n\n (select date_dim.d_year AS cs_sold_year, catalog_sales.cs_item_sk cs_item_sk,\n\n catalog_sales.cs_bill_customer_sk cs_customer_sk,\n\n sum(cs_quantity) cs_qty,\n\n sum(cs_wholesale_cost) cs_wc,\n\n sum(cs_sales_price) cs_sp\n\n from catalog_sales as catalog_sales\n\n left join catalog_returns as catalog_returns on catalog_returns.cr_order_number=catalog_sales.cs_order_number and catalog_sales.cs_item_sk=catalog_returns.cr_item_sk\n\n join date_dim as date_dim on catalog_sales.cs_sold_date_sk = date_dim.d_date_sk\n\n where cr_order_number is null\n\n group by date_dim.d_year, catalog_sales.cs_item_sk, catalog_sales.cs_bill_customer_sk\n\n );\n\n$ss=\n\n (select date_dim.d_year AS ss_sold_year, store_sales.ss_item_sk ss_item_sk,\n\n store_sales.ss_customer_sk ss_customer_sk,\n\n sum(ss_quantity) ss_qty,\n\n sum(ss_wholesale_cost) ss_wc,\n\n sum(ss_sales_price) ss_sp\n\n from store_sales as store_sales\n\n left join store_returns as store_returns on store_returns.sr_ticket_number=store_sales.ss_ticket_number and store_sales.ss_item_sk=store_returns.sr_item_sk\n\n join date_dim as date_dim on store_sales.ss_sold_date_sk = date_dim.d_date_sk\n\n where sr_ticket_number is null\n\n group by date_dim.d_year, store_sales.ss_item_sk, store_sales.ss_customer_sk\n\n );\n\n-- start query 1 in stream 0 using template query78.tpl and seed 1819994127\n\n select\n\nss_sold_year, ss_item_sk, ss_customer_sk,\n\ncast(ss_qty as double)/(coalesce(ws_qty,0)+coalesce(cs_qty,0)) ratio,\n\nss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price,\n\ncoalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty,\n\ncoalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost,\n\ncoalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price\n\nfrom $ss ss\n\nleft join $ws ws on (ws.ws_sold_year=ss.ss_sold_year and ws.ws_item_sk=ss.ss_item_sk and ws.ws_customer_sk=ss.ss_customer_sk)\n\nleft join $cs cs on (cs.cs_sold_year=ss.ss_sold_year and cs.cs_item_sk=ss.ss_item_sk and cs.cs_customer_sk=ss.ss_customer_sk)\n\nwhere (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=2001\n\norder by\n\n ss_sold_year, ss_item_sk, ss_customer_sk,\n\n store_qty desc, store_wholesale_cost desc, store_sales_price desc,\n\n other_chan_qty,\n\n other_chan_wholesale_cost,\n\n other_chan_sales_price,\n\n ratio\n\nlimit 100;\n\n\n\n-- end query 1 in stream 0 using template query78.tpl", parameters: 0b >> KqpJoinOrder::CanonizedJoinOrderLookupBug >> KqpJoinOrder::CanonizedJoinOrderTPCH16 [GOOD] >> KqpJoinOrder::TPCDS92+ColumnStore [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 16131, MsgBus: 1672 2025-04-10T11:28:46.392482Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644996134894783:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:46.392514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001652/r3tmp/tmpt1qgU5/pdisk_1.dat 2025-04-10T11:28:47.260836Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:47.309068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:47.309161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:47.319256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16131, node 1 2025-04-10T11:28:47.599051Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:47.599073Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:47.599080Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:47.599177Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1672 TClient is connected to server localhost:1672 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:48.600180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:48.615969Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:50.848079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645013314764576:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:50.848213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:50.848613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645013314764588:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:50.852767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:50.886076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645013314764590:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:50.982422Z node 1 :TX_PROXY ERROR: Actor# [1:7491645013314764641:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:51.397849Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644996134894783:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:51.398135Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:51.409314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:51.561785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:51.618613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:51.655411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:51.707112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:51.906956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:51.985045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:52.027028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:52.069164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:28:52.141246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:28:52.215534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:28:52.266981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:52.316867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.193512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:28:53.252442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.295117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.339686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.437163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.509973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.553481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.610189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.645236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.678607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.748690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.794395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.835295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.870391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.903550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:28:53.945834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:28:54.019560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.340365Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.341776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038552;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.348081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.349009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038506;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.353268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.356410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.360988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038510;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.362732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038504;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.368211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038500;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.369811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.377948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038542;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.384174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.384532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.391369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038502;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.397037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038518;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.399493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.404402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038532;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.404662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.410359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038530;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.410444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.415983Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.417835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.422383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.423947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.428228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038526;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.429870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.433515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.436463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.439233Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038538;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.441813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.445921Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.449053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.451943Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038520;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.454901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.457522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.460751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038512;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.463917Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038516;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.466429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038540;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.469536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.471805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.475200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038544;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.477234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.481413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.487119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.553955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.570400Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqbjsy0temqn000j5p1t62", SessionId: ydb://session/3?node_id=1&id=YjhmMmNiNmYtYjRiN2FjYTAtYTNiMjhiMDItMmM3YzNiMmY=, Slow query, duration: 35.955013s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:31.930434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:31.930575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:31.930782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7491645155048711820:5966];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-04-10T11:29:31.931072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCH15 >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd >> KqpJoin::JoinDupColumnRight [GOOD] >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup >> KqpJoin::JoinAggregate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH16 [GOOD] Test command err: Trying to start YDB, gRPC: 15654, MsgBus: 8525 2025-04-10T11:27:39.503357Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644707502371207:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:39.504435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016bc/r3tmp/tmpMn35H0/pdisk_1.dat 2025-04-10T11:27:40.251431Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:40.253385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:40.253455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:40.259507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15654, node 1 2025-04-10T11:27:40.544760Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:40.544781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:40.544790Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:40.544893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8525 TClient is connected to server localhost:8525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:41.321357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:41.346391Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:43.576017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644724682240922:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:43.576086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644724682240933:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:43.576140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:43.580308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:43.594783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644724682240936:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:43.694560Z node 1 :TX_PROXY ERROR: Actor# [1:7491644724682240987:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:44.061398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:44.396071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644728977208550:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:44.396277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644728977208550:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:44.396543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644728977208550:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:44.396644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644728977208550:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:44.396738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644728977208550:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:44.396875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644728977208550:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:44.396994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644728977208550:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:44.397094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644728977208550:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:44.397212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644728977208550:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:44.397317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644728977208550:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:44.397424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644728977208550:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:44.397520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491644728977208550:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:44.402069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644728977208558:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:44.402125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644728977208558:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:44.410519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644728977208558:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:44.410732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644728977208558:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:44.410855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644728977208558:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:44.410958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644728977208558:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:44.411091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644728977208558:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:44.411203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644728977208558:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:44.411316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644728977208558:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:44.411410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644728977208558:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:44.411506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644728977208558:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:44.411601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644728977208558:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:44.464024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644728977208552:2354];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:44.464090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491644728977208552:2354];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstra ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:19.979127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:19.984735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:19.987388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.001118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.006403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.012424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.016045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.028304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.032200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.034083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.042573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.052013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.055358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.069113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.079324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.087120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.088876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.096648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.105378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.114024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.118861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.135134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.136887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.140335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.147219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.152384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.157195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.167706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.169197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.176835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.182426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.189750Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.191592Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.197527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.198807Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.209311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.211686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.230517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.232896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.237701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.243417Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.251602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039203;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.257788Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.264523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.267945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:20.546047Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqb008a6je53mqfqfr8jcx", SessionId: ydb://session/3?node_id=1&id=MjY0MDUyODktNTFjODFiMzgtNTVlZjIyZDctZTIyODMxNGM=, Slow query, duration: 44.185549s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:21.138376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:21.138843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:21.139660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491644969495413617:7902];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-10T11:29:21.140031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinDupColumnRight [GOOD] Test command err: Trying to start YDB, gRPC: 24593, MsgBus: 14114 2025-04-10T11:29:34.984544Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645203105481304:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:34.984609Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015fa/r3tmp/tmpg074f1/pdisk_1.dat 2025-04-10T11:29:35.511204Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:35.542497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:35.542563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:35.546746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24593, node 1 2025-04-10T11:29:35.667809Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:35.667831Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:35.667839Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:35.667959Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14114 TClient is connected to server localhost:14114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:36.444962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:36.473936Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:36.501115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:36.752527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:37.039103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:37.166885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:38.816697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645220285352037:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:38.816817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:39.179743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:39.227677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:39.311284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:39.375510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:39.480264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:39.572656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:39.681194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645224580319856:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:39.681283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:39.686312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645224580319861:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:39.695318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:39.718346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645224580319863:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:39.774397Z node 1 :TX_PROXY ERROR: Actor# [1:7491645224580319918:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:39.970313Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645203105481304:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:39.970370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:40.781412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:40.830954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:40.907417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::Inner-StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup >> KqpIndexLookupJoin::Left+StreamLookup [GOOD] >> KqpIndexLookupJoin::Left-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS92+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 6291, MsgBus: 21227 2025-04-10T11:27:37.102899Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644697745151937:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:37.106808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016c7/r3tmp/tmp67XJlj/pdisk_1.dat 2025-04-10T11:27:37.667447Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:37.668670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:37.668769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:37.683168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6291, node 1 2025-04-10T11:27:37.914067Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:37.914100Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:37.914108Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:37.914277Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21227 TClient is connected to server localhost:21227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:38.773660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:40.946324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644710630054491:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:40.946425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644710630054483:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:40.946753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:40.952049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:40.969294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644710630054497:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:41.048898Z node 1 :TX_PROXY ERROR: Actor# [1:7491644714925021845:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:41.429356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:41.735427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644714925022122:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:41.735677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644714925022122:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:41.735969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644714925022122:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:41.736109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644714925022122:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:41.736249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644714925022122:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:41.736385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644714925022122:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:41.736504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644714925022122:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:41.736614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644714925022122:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:41.736758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644714925022122:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:41.736865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644714925022122:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:41.737002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644714925022122:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:41.737136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644714925022122:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:41.739836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644714925022112:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:41.739934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644714925022112:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:41.740164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644714925022112:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:41.740277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644714925022112:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:41.740384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644714925022112:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:41.740528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644714925022112:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:41.740632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644714925022112:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:41.740752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644714925022112:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:41.740875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644714925022112:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:41.740999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644714925022112:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:41.741144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644714925022112:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:41.741260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644714925022112:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:41.781304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644714925022124:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:41.781373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644714925022124:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:41.781623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_ ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.794476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.799547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.801427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.805443Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.807606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.811168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.813279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.816482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.819282Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.822620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.825005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.828285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.830744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.833635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.835782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.838543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.840827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.847694Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.854609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.854996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.860482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.860545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.868005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.873524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.874493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.879621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.880437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.886068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.886843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.892690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.893015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.898605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.899620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.905037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.905725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.911176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.912126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.917151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.918651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.923056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.924721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.929320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.931432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.935788Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.937281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.161269Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqax2g8qe48qt4y58z6mpp", SessionId: ydb://session/3?node_id=1&id=Yjg2MDA3YzctZTk1NjgxY2EtM2M3MzJiNWYtNDMyMzQ2Ng==, Slow query, duration: 41.800371s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:15.996378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:15.996799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:15.997394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491645075702333270:11637];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-10T11:29:15.997734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::Inner-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 10794, MsgBus: 22058 2025-04-10T11:29:27.399201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645172369170779:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:27.399232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001600/r3tmp/tmpI0OSPf/pdisk_1.dat 2025-04-10T11:29:28.121821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:28.121908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:28.131015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:28.164913Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10794, node 1 2025-04-10T11:29:28.354638Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:28.354657Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:28.354663Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:28.354760Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22058 TClient is connected to server localhost:22058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:29.210141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:29.278356Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:29.308534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:29.592899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:29.828458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:29.949730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:31.855443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645189549041737:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:31.855572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:32.356912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:32.402324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:32.406319Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645172369170779:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:32.407672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:32.560457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:32.625905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:32.700896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:32.809230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:32.948114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645193844009556:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:32.948234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:32.948673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645193844009561:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:32.953643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:32.967820Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:29:32.968020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645193844009563:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:33.050910Z node 1 :TX_PROXY ERROR: Actor# [1:7491645198138976914:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:34.727300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:34.775808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:34.836903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:34.952432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:35.026461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:29:35.066325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13860, MsgBus: 2934 2025-04-10T11:29:37.230813Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645214404922128:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:37.327467Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001600/r3tmp/tmp3quS0e/pdisk_1.dat 2025-04-10T11:29:37.486454Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:37.513042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:37.513138Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:37.518371Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13860, node 2 2025-04-10T11:29:37.758722Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:37.758746Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:37.758752Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:37.758898Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2934 TClient is connected to server localhost:2934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:38.440314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:38.451412Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:38.464717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:38.601579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:38.838559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:29:38.987446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:29:41.578417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645231584792956:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:41.578516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:41.700140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:41.735638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:41.773011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:41.809456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:41.844774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:41.918841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:41.995744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645231584793473:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:41.995825Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:41.996033Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645231584793478:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:42.000225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:42.013266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645231584793480:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:42.084944Z node 2 :TX_PROXY ERROR: Actor# [2:7491645235879760831:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:42.203453Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645214404922128:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:42.203513Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:43.222455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.323736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.376786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.429592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.514907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.585559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoin::JoinLeftPureFull >> KqpJoinOrder::CanonizedJoinOrderTPCH11 >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+ColumnStore [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 31794, MsgBus: 29019 2025-04-10T11:29:39.257202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645222216224421:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:39.257250Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015ee/r3tmp/tmpsaHQ4j/pdisk_1.dat 2025-04-10T11:29:39.859236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:39.864898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:39.893141Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:39.900190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31794, node 1 2025-04-10T11:29:40.114923Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:40.114945Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:40.114951Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:40.115061Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29019 TClient is connected to server localhost:29019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:40.937286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:40.954418Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:40.973329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:41.285910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:41.625329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:41.736302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:43.896347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645239396095264:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:43.896466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:44.178261Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645222216224421:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:44.197565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:44.255894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:44.294329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:44.364188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:44.421477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:44.476325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:44.520849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:44.619439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645243691063077:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:44.619554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:44.619923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645243691063082:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:44.624053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:44.639134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645243691063084:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:44.720389Z node 1 :TX_PROXY ERROR: Actor# [1:7491645243691063140:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:45.812431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:45.869859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:45.955457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-ColumnStore [GOOD] >> TStorageBalanceTest::TestScenario3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 26365, MsgBus: 22786 2025-04-10T11:27:52.157607Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644764156046148:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:52.157672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016a2/r3tmp/tmprVaSHm/pdisk_1.dat 2025-04-10T11:27:52.837308Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:52.883237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:52.883565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:52.887731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26365, node 1 2025-04-10T11:27:53.137825Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:53.137849Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:53.137858Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:53.137982Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22786 TClient is connected to server localhost:22786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:54.069967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:54.093204Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:56.668479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644781335915765:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:56.668575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:56.668809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644781335915773:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:56.673044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:56.685569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644781335915779:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:56.789791Z node 1 :TX_PROXY ERROR: Actor# [1:7491644781335915830:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:57.157795Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644764156046148:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:57.157856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:57.292708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:57.733273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644785630883440:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:57.733699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644785630883440:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:57.733950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644785630883440:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:57.734065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644785630883440:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:57.734175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644785630883440:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:57.734285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644785630883440:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:57.734386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644785630883440:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:57.734494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644785630883440:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:57.734591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644785630883440:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:57.734685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644785630883440:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:57.734781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644785630883440:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:57.734954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644785630883440:2364];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:57.750799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644785630883442:2365];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:57.750870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644785630883442:2365];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:57.751035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644785630883442:2365];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:57.751144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644785630883442:2365];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:57.751240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644785630883442:2365];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:57.751332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644785630883442:2365];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:57.751450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644785630883442:2365];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:57.751571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644785630883442:2365];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:57.751715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644785630883442:2365];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:57.751860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644785630883442:2365];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:57.751955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644785630883442:2365];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:57.752052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491644785630883442:2365];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:57.779021Z node 1 :T ... tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.040786Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.046870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.052390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.054556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.061626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.063830Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.073168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.074632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.082090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.083267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.091064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.096455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.101326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.103946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.112576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.114304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.121809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.123809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.133008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.135161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.143864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.146357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.153030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.155346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.160327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.169145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.170243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.178902Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.182846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.187787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.190980Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.192726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.197161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.198006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.203924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.204177Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.210484Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.210611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.216312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.219154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.223528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.224988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.236027Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.236637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.249913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.255208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.502335Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqbd9ee3ymvpp3agy8kz6c", SessionId: ydb://session/3?node_id=1&id=ODY3OTJmNjYtMWRhMzg0ZDItMWI3MThkNS1kODkxMjJhMA==, Slow query, duration: 40.515939s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:30.872857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:30.879712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:30.880028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TestJoinOrderHintsSimple+ColumnStore >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] >> KqpJoin::RightSemiJoin_KeyPrefix [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] Test command err: c[def1] ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) 2025-04-10T11:26:44.636623Z node 10 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:26:44.640166Z node 10 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:26:44.640399Z node 10 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:26:44.641164Z node 10 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [10:285:2080] ControllerId# 72057594037932033 2025-04-10T11:26:44.641198Z node 10 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:26:44.641343Z node 10 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:26:44.641612Z node 10 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:26:44.643898Z node 4 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:26:44.646329Z node 4 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:26:44.646458Z node 4 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:26:44.647235Z node 4 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [4:294:2080] ControllerId# 72057594037932033 2025-04-10T11:26:44.647266Z node 4 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:26:44.647324Z node 4 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:26:44.647480Z node 4 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:26:44.649207Z node 10 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:26:44.649269Z node 10 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:26:44.651180Z node 10 :BS_PROXY DEBUG: Group# 0 Actor# [10:284:2079] Create Queue# [10:299:2084] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.651305Z node 10 :BS_PROXY DEBUG: Group# 0 Actor# [10:284:2079] Create Queue# [10:300:2085] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.651403Z node 10 :BS_PROXY DEBUG: Group# 0 Actor# [10:284:2079] Create Queue# [10:301:2086] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.651474Z node 10 :BS_PROXY DEBUG: Group# 0 Actor# [10:284:2079] Create Queue# [10:302:2087] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.651564Z node 10 :BS_PROXY DEBUG: Group# 0 Actor# [10:284:2079] Create Queue# [10:303:2088] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.651716Z node 10 :BS_PROXY DEBUG: Group# 0 Actor# [10:284:2079] Create Queue# [10:304:2089] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.651839Z node 10 :BS_PROXY DEBUG: Group# 0 Actor# [10:284:2079] Create Queue# [10:305:2090] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.651863Z node 10 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:26:44.651992Z node 10 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [10:285:2080] 2025-04-10T11:26:44.652025Z node 10 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [10:285:2080] 2025-04-10T11:26:44.652082Z node 10 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:26:44.652286Z node 10 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:26:44.652759Z node 4 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:26:44.652798Z node 4 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:26:44.654213Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:293:2079] Create Queue# [4:309:2084] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.654319Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:293:2079] Create Queue# [4:310:2085] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.654401Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:293:2079] Create Queue# [4:311:2086] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.654486Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:293:2079] Create Queue# [4:312:2087] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.654565Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:293:2079] Create Queue# [4:313:2088] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.654657Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:293:2079] Create Queue# [4:314:2089] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.654776Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:293:2079] Create Queue# [4:315:2090] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.654791Z node 4 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:26:44.654863Z node 4 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [4:294:2080] 2025-04-10T11:26:44.654879Z node 4 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [4:294:2080] 2025-04-10T11:26:44.654924Z node 4 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:26:44.654989Z node 4 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:26:44.655181Z node 10 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:26:44.655518Z node 4 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:26:44.655581Z node 5 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:26:44.657440Z node 5 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:26:44.657513Z node 5 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:26:44.657991Z node 5 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [5:323:2081] ControllerId# 72057594037932033 2025-04-10T11:26:44.658019Z node 5 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:26:44.658067Z node 5 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:26:44.658306Z node 5 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:26:44.660185Z node 5 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:26:44.660217Z node 5 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:26:44.661217Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:322:2080] Create Queue# [5:329:2085] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.661337Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:322:2080] Create Queue# [5:330:2086] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.661437Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:322:2080] Create Queue# [5:331:2087] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.661531Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:322:2080] Create Queue# [5:332:2088] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.661616Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:322:2080] Create Queue# [5:333:2089] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.661706Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:322:2080] Create Queue# [5:334:2090] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.661795Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:322:2080] Create Queue# [5:335:2091] targetNodeId# 1 Marker# DSP01 2025-04-10T11:26:44.661811Z node 5 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:26:44.661844Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [5:323:2081] 2025-04-10T11:26:44.661860Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [5:323:2081] 2025-04-10T11:26:44.661883Z node 5 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:26:44.661914Z node 5 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:26:44.662377Z node 5 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:26:44.662491Z node 6 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:26:44.664841Z node 6 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeI ... Z node 11 :BS_PROXY_PUT DEBUG: [a0fed0545d3aeb5b] Result# TEvPutResult {Id# [72057594037927937:2:492:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-10T11:29:48.132732Z node 11 :BS_PROXY_PUT INFO: [a0fed0545d3aeb5b] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:492:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:29:48.132833Z node 11 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.861 sample PartId# [72057594037927937:2:492:0:0:246:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 11 } TEvVPutResult{ TimestampMs# 5.098 VDiskId# [0:1:0:0:0] NodeId# 11 Status# OK } ] } 2025-04-10T11:29:48.133427Z node 11 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:492:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-10T11:29:48.133894Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} commited cookie 1 for step 492 2025-04-10T11:29:48.135198Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-04-10T11:29:48.135253Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:29:48.135510Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{996, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-04-10T11:29:48.135558Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:29:48.135669Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [11:1297:2642] 2025-04-10T11:29:48.135701Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [11:1297:2642] 2025-04-10T11:29:48.135748Z node 11 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [11:1239:2604] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.046) *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.058) ******---------------------------------------------------------------------------------------------- (0.056) *****----------------------------------------------------------------------------------------------- (0.046) *******--------------------------------------------------------------------------------------------- (0.072) ******---------------------------------------------------------------------------------------------- (0.06) *****----------------------------------------------------------------------------------------------- (0.054) *******--------------------------------------------------------------------------------------------- (0.068) *****----------------------------------------------------------------------------------------------- (0.054) ****------------------------------------------------------------------------------------------------ (0.04) 2025-04-10T11:29:48.239520Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-10T11:29:48.239606Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:29:48.239731Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005416896}: tablet 72075186224037965 wasn't changed 2025-04-10T11:29:48.239781Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005416896}: tablet 72075186224037965 skipped channel 0 2025-04-10T11:29:48.239878Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005416896}: tablet 72075186224037965 skipped channel 1 2025-04-10T11:29:48.239910Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005416896}: tablet 72075186224037965 skipped channel 2 2025-04-10T11:29:48.239979Z node 11 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923005416896}(72075186224037965)::Execute - TryToBoot was not successfull 2025-04-10T11:29:48.240054Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{997, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-04-10T11:29:48.240105Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:29:48.255973Z node 11 :BS_PROXY_PUT INFO: [15e1c368cad2f8bf] bootstrap ActorId# [11:11637:6261] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:493:0:0:246:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:29:48.256123Z node 11 :BS_PROXY_PUT DEBUG: [15e1c368cad2f8bf] Id# [72057594037927937:2:493:0:0:246:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:29:48.256166Z node 11 :BS_PROXY_PUT DEBUG: [15e1c368cad2f8bf] restore Id# [72057594037927937:2:493:0:0:246:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:29:48.256221Z node 11 :BS_PROXY_PUT DEBUG: [15e1c368cad2f8bf] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:493:0:0:246:1] Marker# BPG33 2025-04-10T11:29:48.256255Z node 11 :BS_PROXY_PUT DEBUG: [15e1c368cad2f8bf] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:493:0:0:246:1] Marker# BPG32 2025-04-10T11:29:48.256366Z node 11 :BS_PROXY DEBUG: Send to queueActorId# [11:483:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:493:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:29:48.272270Z node 11 :BS_PROXY_PUT DEBUG: [15e1c368cad2f8bf] received {EvVPutResult Status# OK ID# [72057594037927937:2:493:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 510 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 511 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-10T11:29:48.272420Z node 11 :BS_PROXY_PUT DEBUG: [15e1c368cad2f8bf] Result# TEvPutResult {Id# [72057594037927937:2:493:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-10T11:29:48.272468Z node 11 :BS_PROXY_PUT INFO: [15e1c368cad2f8bf] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:493:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:29:48.272575Z node 11 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.761 sample PartId# [72057594037927937:2:493:0:0:246:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 11 } TEvVPutResult{ TimestampMs# 16.729 VDiskId# [0:1:0:0:0] NodeId# 11 Status# OK } ] } 2025-04-10T11:29:48.273323Z node 11 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:493:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-10T11:29:48.273946Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} commited cookie 1 for step 493 2025-04-10T11:29:48.277140Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1492, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-04-10T11:29:48.277202Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1492, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:29:48.277454Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1492, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{998, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-04-10T11:29:48.277508Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1492, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:29:48.277631Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [11:1297:2642] 2025-04-10T11:29:48.277661Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [11:1297:2642] 2025-04-10T11:29:48.277706Z node 11 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [11:1239:2604] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.046) *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.058) ******---------------------------------------------------------------------------------------------- (0.056) *****----------------------------------------------------------------------------------------------- (0.046) *******--------------------------------------------------------------------------------------------- (0.072) ******---------------------------------------------------------------------------------------------- (0.06) *****----------------------------------------------------------------------------------------------- (0.054) *******--------------------------------------------------------------------------------------------- (0.068) *****----------------------------------------------------------------------------------------------- (0.054) ****------------------------------------------------------------------------------------------------ (0.04) 2025-04-10T11:29:48.382872Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1493, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-10T11:29:48.382960Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1493, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:29:48.383084Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005417344}: tablet 72075186224037921 wasn't changed 2025-04-10T11:29:48.383120Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005417344}: tablet 72075186224037921 skipped channel 0 2025-04-10T11:29:48.383206Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005417344}: tablet 72075186224037921 skipped channel 1 2025-04-10T11:29:48.383273Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005417344}: tablet 72075186224037921 skipped channel 2 2025-04-10T11:29:48.383350Z node 11 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923005417344}(72075186224037921)::Execute - TryToBoot was not successfull 2025-04-10T11:29:48.383427Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1493, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{999, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-04-10T11:29:48.383478Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1493, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} >> KqpJoinOrder::CanonizedJoinOrderTPCH20 [GOOD] >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 24355, MsgBus: 10154 2025-04-10T11:28:54.994638Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645032269748694:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:55.038229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001648/r3tmp/tmphAf5T7/pdisk_1.dat 2025-04-10T11:28:55.746300Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:55.792977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:55.793051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:55.802825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24355, node 1 2025-04-10T11:28:56.096305Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:56.096324Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:56.096330Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:56.096432Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10154 TClient is connected to server localhost:10154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:57.083619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:57.116566Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:59.633004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645053744585831:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:59.633134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:59.633367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645053744585843:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:59.637399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:59.650120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645053744585845:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:59.726128Z node 1 :TX_PROXY ERROR: Actor# [1:7491645053744585896:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:59.994370Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645032269748694:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:59.994441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:00.132872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:00.254715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:29:00.308438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:00.352035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:00.430944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:00.669631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:00.707464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:00.759521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:00.833239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:29:00.897236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:29:00.959767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:29:01.024045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:01.106483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:02.100808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:29:02.153070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:02.212803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:29:02.266721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:29:02.319452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:29:02.382067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:29:02.490826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:29:02.567997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:29:02.706801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:29:02.784133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:29:02.832377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:29:02.880605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:29:02.961953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:29:03.018097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:29:03.107108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:29:03.201043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:29:03.281663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.460192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.465661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.471198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038473;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.474285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.478303Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.479990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.484110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.485597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.490405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.495585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.496603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.501539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.502589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.507871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.508183Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.513223Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.516889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.517700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.522558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.523573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.528264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.531813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.534695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.538240Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.540627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.544392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.547781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.553559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.563821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.563990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.570729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.571086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.577843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.578093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.584955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.585713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.591289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.607316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.638682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.645580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.651841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.658388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.664183Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.666775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.685926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038488;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:40.756920Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqbw52fsh3yf6ze78pbkbc", SessionId: ydb://session/3?node_id=1&id=ODc0ZmFjMGQtYTdjMWYzZmItYjNjMjU5MDktMzc1MDFiNjE=, Slow query, duration: 35.569710s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:41.063317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:41.063784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:41.064409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7491645088104330503:2844];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038629; 2025-04-10T11:29:41.064778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 24328, MsgBus: 27333 2025-04-10T11:29:34.863539Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645201056854696:2240];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:34.863748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015fb/r3tmp/tmp0iBOkN/pdisk_1.dat 2025-04-10T11:29:35.448927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:35.449011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:35.450919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:35.481702Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24328, node 1 2025-04-10T11:29:35.650657Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:35.650683Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:35.650689Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:35.650779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27333 TClient is connected to server localhost:27333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:36.314959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:36.334115Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:36.349904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:36.626722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:36.906411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:37.006591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:39.011324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645218236725473:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:39.011448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:39.544498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:39.594822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:39.697656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:39.769112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:39.819053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:39.851455Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645201056854696:2240];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:39.851583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:39.889346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:39.979971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645222531693283:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:39.980053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:39.980338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645222531693288:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:39.986345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:40.035072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645222531693290:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:40.122592Z node 1 :TX_PROXY ERROR: Actor# [1:7491645226826660645:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:41.365702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:41.413544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11964, MsgBus: 10616 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015fb/r3tmp/tmpWC59iM/pdisk_1.dat 2025-04-10T11:29:43.470774Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:29:43.471782Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:43.506769Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:43.506865Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:43.515321Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11964, node 2 2025-04-10T11:29:43.698576Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:43.698597Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:43.698604Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:43.698711Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10616 TClient is connected to server localhost:10616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:44.247449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:44.258555Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:44.274748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:44.364580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:44.710031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:44.808487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:47.467270Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645259212902206:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:47.467360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:47.540657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:47.622080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:47.689850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:47.787750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:47.843120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:47.895022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:47.966688Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645259212902724:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:47.966803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:47.968147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645259212902729:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:47.976120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:48.011315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645259212902731:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:48.113185Z node 2 :TX_PROXY ERROR: Actor# [2:7491645263507870084:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:49.438535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:49.483592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_KeyPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 2286, MsgBus: 16695 2025-04-10T11:29:41.722652Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645234285748002:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:41.722688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015e8/r3tmp/tmpupc62m/pdisk_1.dat 2025-04-10T11:29:42.465645Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:42.476757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:42.476844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:42.479045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2286, node 1 2025-04-10T11:29:42.710657Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:42.710676Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:42.710683Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:42.710789Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16695 TClient is connected to server localhost:16695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:43.595698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:43.627749Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:43.644698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:43.890852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:44.087545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:44.217542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:46.301785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645255760586174:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:46.301912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:46.580966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.615923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.660990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.734711Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645234285748002:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:46.737908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:46.757325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.820814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.913788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:47.017042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645260055553991:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:47.017202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:47.017496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645260055553996:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:47.021272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:47.046359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645260055553998:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:47.108118Z node 1 :TX_PROXY ERROR: Actor# [1:7491645260055554052:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:48.230594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.283003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.360264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.402632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.482513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:57: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 7559, MsgBus: 11822 2025-04-10T11:29:43.696464Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645242619529935:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:43.696518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015e3/r3tmp/tmpTy58eD/pdisk_1.dat 2025-04-10T11:29:44.418969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:44.419054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:44.421561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:44.471355Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7559, node 1 2025-04-10T11:29:44.662965Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:44.662997Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:44.663008Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:44.663170Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11822 TClient is connected to server localhost:11822 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:45.580118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:45.657886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:45.929473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:46.276481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:46.375304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:48.395186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645264094368203:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:48.395314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:48.702269Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645242619529935:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:48.702329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:48.796990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.856618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.946362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.985218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:49.013708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:49.053774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:49.120721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645268389336014:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:49.120801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:49.121523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645268389336019:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:49.125810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:49.139894Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:29:49.140218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645268389336021:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:49.224892Z node 1 :TX_PROXY ERROR: Actor# [1:7491645268389336075:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:50.253798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.303145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.360761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.421974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.508799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.574903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::CanonizedJoinOrderTPCDS64 >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF [GOOD] Test command err: Trying to start YDB, gRPC: 32604, MsgBus: 15876 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001638/r3tmp/tmpXrCFal/pdisk_1.dat 2025-04-10T11:29:00.713031Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:29:01.153676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:01.172184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:01.172314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:01.183345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32604, node 1 2025-04-10T11:29:01.505239Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:01.505263Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:01.505272Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:01.505401Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15876 TClient is connected to server localhost:15876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:02.819562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:02.834364Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:05.399896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645078270191830:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:05.400024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:05.402505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645078270191842:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:05.407686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:05.426063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645078270191844:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:05.489409Z node 1 :TX_PROXY ERROR: Actor# [1:7491645078270191896:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:05.940137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:06.102972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:29:06.175012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:06.230857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:06.315512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:06.521851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:06.613956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:06.683189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:06.728462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:29:06.796303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:29:06.847924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:29:06.890452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:06.931281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:07.747008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:29:07.802043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:07.847441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:29:07.890842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:29:07.938854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.030867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.079585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.167626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.297065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.334845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.372444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.413556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.468872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.555337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.605684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.656267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.692305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.787289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.838754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.903193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.785104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.789566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038512;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.796367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.796640Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.802894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038443;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.805348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038441;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.809281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038439;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.813162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.816734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.820499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038431;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.823414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.828609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.832879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.836488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.839774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.848095Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038453;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.849105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.856308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.860187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.871748Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.871768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.883950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.884676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.890832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.895076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.901626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.905109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.911662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.921708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.926346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.932277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.936517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.946721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.950656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038433;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.957872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.961156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.973356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.974788Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.984461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.988101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.994002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:43.999031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.000019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.005195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.016226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.158463Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqc19qd03fc6kq5e6b1w14", SessionId: ydb://session/3?node_id=1&id=NzVhNDk1MWEtMjYyN2VlN2MtM2ZkMjMzYmYtYTA5YjhkZDM=, Slow query, duration: 33.702857s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:44.624923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:44.625448Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:44.626774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7491645112629936936:2914];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038629; 2025-04-10T11:29:44.627128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH21 >> KqpIndexLookupJoin::Left-StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH20 [GOOD] Test command err: Trying to start YDB, gRPC: 9744, MsgBus: 8414 2025-04-10T11:27:51.809894Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644761100684394:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:51.812264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016a3/r3tmp/tmp1tfNMv/pdisk_1.dat 2025-04-10T11:27:52.478856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:52.478922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:52.491124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:27:52.518236Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9744, node 1 2025-04-10T11:27:52.832444Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:52.832462Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:52.832474Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:52.832566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8414 TClient is connected to server localhost:8414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:53.784879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:53.835847Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:56.811754Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644761100684394:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:56.811848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:56.824908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644782575521407:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:56.825031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:56.830320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644782575521419:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:56.834762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:56.860012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644782575521421:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:56.956130Z node 1 :TX_PROXY ERROR: Actor# [1:7491644782575521474:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:57.332383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:57.592007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644786870489044:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:57.592115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644786870489032:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:57.592210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644786870489044:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:57.592211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644786870489032:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:57.592496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644786870489032:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:57.592502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644786870489044:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:57.592638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644786870489044:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:57.592639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644786870489032:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:57.592744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644786870489032:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:57.592745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644786870489044:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:57.592843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644786870489032:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:57.592846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644786870489044:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:57.592943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644786870489032:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:57.592946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644786870489044:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:57.593059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644786870489032:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:57.593062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644786870489044:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:57.593158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644786870489032:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:57.593165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644786870489044:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:57.593287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644786870489044:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:57.593288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644786870489032:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:57.593394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644786870489032:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:57.593399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644786870489044:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:57.593487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644786870489032:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:57.593498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644786870489044:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:57.629295Z node 1 :TX_COL ... tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.807516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.818576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.823558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.829753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.833152Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.840635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.845822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.856639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.860985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.866225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.867098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.873267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.873276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.880268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.884231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.896732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.897680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.903938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.905704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.913756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.915659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.924613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.925640Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.931297Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.931579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.937794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.941637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.944618Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.950909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.952988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.958495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.965036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.967409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.975420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.977189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.983533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.995242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:30.997188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.006500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.009190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.018459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.026176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.034426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.038446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.051550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.052102Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:31.383893Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqbdxz8yzxsbh1xgv7gg1k", SessionId: ydb://session/3?node_id=1&id=MTkxOTNmNTUtYzk4Mjg0ZmEtY2RmMGFiY2YtNTgxMzA0M2E=, Slow query, duration: 40.760082s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:31.809501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:31.809592Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:31.810128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 12286, MsgBus: 2498 2025-04-10T11:29:45.660668Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645247837844883:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:45.660872Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015e1/r3tmp/tmpqRvSvn/pdisk_1.dat 2025-04-10T11:29:46.398990Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:46.400849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:46.400910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:46.407572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12286, node 1 2025-04-10T11:29:46.742036Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:46.742057Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:46.742064Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:46.742210Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2498 TClient is connected to server localhost:2498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:47.438489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:29:47.488664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:29:47.785779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:48.049480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:48.154595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:50.204713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645269312682994:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:50.204877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:50.525218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.582233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.622036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.662595Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645247837844883:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:50.662727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:50.692155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.748610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.834036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.905306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645269312683513:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:50.905388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:50.905617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645269312683518:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:50.909781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:50.926532Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:29:50.927017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645269312683520:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:51.014864Z node 1 :TX_PROXY ERROR: Actor# [1:7491645273607650872:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:52.077944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.114516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.162711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.213690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.261920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.301288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::CanonizedJoinOrderTPCH17 [GOOD] >> KqpJoin::RightSemiJoin_SimpleKey >> KqpJoinOrder::FourWayJoinLeftFirst+ColumnStore >> KqpJoin::JoinAggregate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 17963, MsgBus: 2845 2025-04-10T11:29:37.171533Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645213424771174:2261];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:37.171585Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015f2/r3tmp/tmpRLdKIw/pdisk_1.dat 2025-04-10T11:29:37.976255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:37.976337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:37.983471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:38.002368Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17963, node 1 2025-04-10T11:29:38.304979Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:38.305010Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:38.305018Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:38.305161Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2845 TClient is connected to server localhost:2845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:39.505297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:39.552116Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:39.562857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:39.832640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:40.106940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:40.240357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:42.174576Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645213424771174:2261];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:42.186026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:42.340348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645234899609234:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:42.340458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:42.764408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:42.851989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:42.888445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:42.935786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:42.982709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.039514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.111967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645239194577045:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:43.112038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:43.112262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645239194577050:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:43.115600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:43.128233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645239194577052:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:43.230072Z node 1 :TX_PROXY ERROR: Actor# [1:7491645239194577110:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:44.317787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:44.421343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2073, MsgBus: 11934 2025-04-10T11:29:46.757697Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645253560066317:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:46.810690Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015f2/r3tmp/tmpyLlD6W/pdisk_1.dat 2025-04-10T11:29:46.955952Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:46.971239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:46.971325Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:46.972957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2073, node 2 2025-04-10T11:29:47.078764Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:47.078787Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:47.078795Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:47.078902Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11934 TClient is connected to server localhost:11934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:29:47.975271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:29:47.986797Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:48.005019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:48.142710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:48.449328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:48.547222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:50.914298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645270739937077:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:50.914400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:50.964165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.996420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:51.035818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:51.084545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:51.129342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:51.205924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:51.277385Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645275034904888:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:51.277502Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:51.278720Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645275034904893:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:51.283237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:51.294944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645275034904895:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:51.379629Z node 2 :TX_PROXY ERROR: Actor# [2:7491645275034904951:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:51.698923Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645253560066317:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:51.707745Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:52.928370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:53.021053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::CanonizedJoinOrderTPCH8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::Left-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 2475, MsgBus: 29318 2025-04-10T11:29:35.965047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645205040344876:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:35.965247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015f9/r3tmp/tmp1sMexB/pdisk_1.dat 2025-04-10T11:29:36.737856Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:36.740732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:36.740810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:36.752029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2475, node 1 2025-04-10T11:29:37.058924Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:37.058950Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:37.058969Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:37.059105Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29318 TClient is connected to server localhost:29318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:38.031120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:38.085187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:38.364105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:38.588265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:38.684895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:40.971770Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645205040344876:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:40.971828Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:41.286975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645230810150434:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:41.287078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:41.738005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:41.774793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:41.856296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:41.888311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:41.930112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:42.012453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:42.083700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645235105118250:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:42.083797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:42.084742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645235105118255:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:42.088365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:42.105527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645235105118257:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:42.170835Z node 1 :TX_PROXY ERROR: Actor# [1:7491645235105118310:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:43.382603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.434629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.474074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.551949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.592200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.629916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29149, MsgBus: 30256 2025-04-10T11:29:45.859442Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645248823524248:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:45.860276Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015f9/r3tmp/tmp6B43Eo/pdisk_1.dat 2025-04-10T11:29:46.218188Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:46.243742Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:46.243826Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:46.245719Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29149, node 2 2025-04-10T11:29:46.422624Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:46.422646Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:46.422654Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:46.422773Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30256 TClient is connected to server localhost:30256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:47.017472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:47.027627Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:47.033522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:47.134206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:47.309313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:47.395606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:50.155212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645270298362456:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:50.155283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:50.200268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.240210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.292412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.334198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.377274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.483161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.568932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645270298362971:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:50.569013Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:50.569238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645270298362976:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:50.573031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:50.587047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645270298362978:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:50.676297Z node 2 :TX_PROXY ERROR: Actor# [2:7491645270298363034:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:50.862267Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645248823524248:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:50.862348Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:52.340259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.421871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.477068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.527743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.632009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.690951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 21312, MsgBus: 1292 2025-04-10T11:29:37.367963Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645217201718638:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:37.368860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015f1/r3tmp/tmpxYE8U6/pdisk_1.dat 2025-04-10T11:29:38.039975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:38.040161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:38.042330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:38.056989Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21312, node 1 2025-04-10T11:29:38.366654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:38.366672Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:38.366677Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:38.366769Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1292 TClient is connected to server localhost:1292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:39.179441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:39.199664Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:39.224676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:39.406947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:39.755292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:39.873830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:42.307239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645238676556884:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:42.307373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:42.374280Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645217201718638:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:42.374361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:42.869328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:42.928127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:42.974300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.057822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.116288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.170946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:43.264252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645242971524700:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:43.264337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:43.264582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645242971524705:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:43.269086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:43.288613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645242971524707:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:43.375662Z node 1 :TX_PROXY ERROR: Actor# [1:7491645242971524763:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:44.484348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:44.529765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:44.570739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:44.618732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:44.699131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:29:44.742752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14634, MsgBus: 27436 2025-04-10T11:29:46.797063Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645251850944250:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015f1/r3tmp/tmpojxo4h/pdisk_1.dat 2025-04-10T11:29:46.824526Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:29:46.901807Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:46.915164Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:46.915245Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:46.919294Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14634, node 2 2025-04-10T11:29:47.094700Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:47.094723Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:47.094733Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:47.094865Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27436 TClient is connected to server localhost:27436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:47.867583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:47.882791Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:29:47.905062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:48.039440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:48.333207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:48.442599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:50.970311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645269030815028:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:50.970399Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:51.019320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:51.068576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:51.155322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:51.200255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:51.261083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:51.323221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:51.422729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645273325782843:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:51.422860Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:51.423313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645273325782849:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:51.427622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:51.445446Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645273325782851:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:29:51.545162Z node 2 :TX_PROXY ERROR: Actor# [2:7491645273325782906:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:51.714283Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645251850944250:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:51.714355Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:52.658521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.720342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.788046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.838787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.885332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.938690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> KqpJoin::JoinLeftPureFull [GOOD] >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup >> KqpJoin::JoinLeftPureInnerConverted >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinAggregate [GOOD] Test command err: Trying to start YDB, gRPC: 61691, MsgBus: 27412 2025-04-10T11:29:44.123841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645246997930108:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:44.123985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015e2/r3tmp/tmpRqLvSA/pdisk_1.dat 2025-04-10T11:29:44.850102Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:44.854129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:44.855455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:44.858300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61691, node 1 2025-04-10T11:29:45.094662Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:45.094687Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:45.094698Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:45.098305Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27412 TClient is connected to server localhost:27412 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:45.859567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:45.897502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:29:46.084079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.351027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:29:46.442484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.836075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645264177800839:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:48.836175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:49.126260Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645246997930108:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:49.126918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:49.246444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:49.338267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:49.419930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:49.482431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:49.571274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:49.635384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:49.703918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645268472768660:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:49.704006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:49.704418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645268472768665:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:49.708026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:49.722107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645268472768667:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:49.818026Z node 1 :TX_PROXY ERROR: Actor# [1:7491645268472768725:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:50.860621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.914082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:50.976483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureFull [GOOD] Test command err: Trying to start YDB, gRPC: 7040, MsgBus: 27608 2025-04-10T11:29:47.670297Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645256274570443:2090];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015db/r3tmp/tmpa4P4HD/pdisk_1.dat 2025-04-10T11:29:48.035492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:29:48.288821Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:48.293285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:48.293386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:48.298320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7040, node 1 2025-04-10T11:29:48.602758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:48.602788Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:48.602797Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:48.602920Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27608 TClient is connected to server localhost:27608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:49.451964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:49.478745Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:49.496768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:29:49.720990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:50.021822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:50.149859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:52.475717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645277749408627:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:52.475814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:52.646411Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645256274570443:2090];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:52.646473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:53.053782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:53.105324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:53.148887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:53.206178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:53.299090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:53.392188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:53.494047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645282044376452:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:53.494126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:53.494638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645282044376457:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:53.498604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:53.515596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645282044376459:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:53.590448Z node 1 :TX_PROXY ERROR: Actor# [1:7491645282044376513:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup >> KqpJoinOrder::TestJoinHint2+ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH1 >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull >> KqpJoinOrder::CanonizedJoinOrderTPCC ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH17 [GOOD] Test command err: Trying to start YDB, gRPC: 10645, MsgBus: 29723 2025-04-10T11:27:59.258832Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644793643120469:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:59.261196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001696/r3tmp/tmp28WmgU/pdisk_1.dat 2025-04-10T11:27:59.914837Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:59.945537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:59.945630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:59.949063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10645, node 1 2025-04-10T11:28:00.074642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:00.074661Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:00.074686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:00.074799Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29723 TClient is connected to server localhost:29723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:00.861171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:00.885209Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:03.181086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644810822990324:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:03.181246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:03.181572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644810822990336:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:03.185590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:03.217586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644810822990338:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:03.301081Z node 1 :TX_PROXY ERROR: Actor# [1:7491644810822990389:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:03.819092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:04.111933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644810822990632:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:28:04.112143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644810822990632:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:28:04.112421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644810822990632:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:28:04.112557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644810822990632:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:28:04.112664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644810822990632:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:28:04.112777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644810822990632:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:28:04.112883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644810822990632:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:28:04.112988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644810822990632:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:28:04.113100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644810822990632:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:28:04.113232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644810822990632:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:28:04.113362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644810822990632:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:28:04.113455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644810822990632:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:28:04.120575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644810822990636:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:28:04.120633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644810822990636:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:28:04.120868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644810822990636:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:28:04.120973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644810822990636:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:28:04.121068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644810822990636:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:28:04.121187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644810822990636:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:28:04.121318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644810822990636:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:28:04.121436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644810822990636:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:28:04.121561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644810822990636:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:28:04.121679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644810822990636:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:28:04.121776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644810822990636:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:28:04.121865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644810822990636:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:28:04.159201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644815117958005:2360];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:28:04.159262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644815117958005:2360];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abs ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.328126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.329317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.335205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.335805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.342494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039203;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.342650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.350060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.350463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.357398Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.359925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.372742Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.374555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.380413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.387067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.397384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.398237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.404044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.409938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.418693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.421745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.424850Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.427878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.435093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.437605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.448291Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.449772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.459567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.463961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.473183Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.474777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.484374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.487312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.492271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.493990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.497791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.499485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.503674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.505224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.509173Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.510862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.516995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.520416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.526970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.527180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.622979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:36.745509Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqbks6fmdy80qmwqf2a9t0", SessionId: ydb://session/3?node_id=1&id=Mzc5ZmJlYmEtYmZmNjE3Y2YtMjA4OWNjMjgtMzZhZTYwMzc=, Slow query, duration: 40.130710s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:37.088263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:37.088807Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491645167305333633:11322];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-04-10T11:29:37.182451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:37.183191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCH6 >> OlapEstimationRowsCorrectness::TPCDS96 >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint2+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 25166, MsgBus: 26390 2025-04-10T11:28:05.689244Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644821898069775:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:05.689599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00168a/r3tmp/tmp41e7l4/pdisk_1.dat 2025-04-10T11:28:06.538384Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:06.540803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:06.540915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:06.546257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25166, node 1 2025-04-10T11:28:06.826323Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:06.826346Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:06.826356Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:06.826460Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26390 TClient is connected to server localhost:26390 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:07.879066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:10.722483Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644821898069775:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:10.722827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:10.778354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644843372906790:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:10.778490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:10.778884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644843372906802:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:10.783285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:10.814184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644843372906804:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:10.898804Z node 1 :TX_PROXY ERROR: Actor# [1:7491644843372906858:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:11.370752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:11.673107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644847667874416:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:28:11.674081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644847667874416:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:28:11.674375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644847667874416:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:28:11.674524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644847667874416:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:28:11.674646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644847667874416:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:28:11.674772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644847667874416:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:28:11.674893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644847667874416:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:28:11.675036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644847667874416:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:28:11.675179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644847667874416:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:28:11.675297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644847667874416:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:28:11.675420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644847667874416:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:28:11.675541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644847667874416:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:28:11.688980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644847667874398:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:28:11.689304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644847667874398:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:28:11.689529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644847667874398:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:28:11.689642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644847667874398:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:28:11.689755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644847667874398:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:28:11.689892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644847667874398:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:28:11.689997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644847667874398:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:28:11.690414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644847667874398:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:28:11.690544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644847667874398:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:28:11.690645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644847667874398:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:28:11.690744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644847667874398:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:28:11.690850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491644847667874398:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:28:11.720325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644847667874501:2362];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.c ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.471810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.476568Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.481529Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.490923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.495308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.505022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.505502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.511862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.518312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.523495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.528362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.533937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.542173Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.547930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.552942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.553378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.558994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.564411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.575873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.578977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.586038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.588841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.596211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.598517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.604555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.608497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.617430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.622864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.623028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.629337Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.633050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.644477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.645793Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.660553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.675636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.689604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.707949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.713263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.723037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.727151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.728204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.733799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.745019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.748975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.763202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:45.176897Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqbw8f75asf34kpemke8dm", SessionId: ydb://session/3?node_id=1&id=YmQ5Yjc5ZTItMTA0OGQ4ODgtOGU3MjUyZmMtYmEzOTk5NGU=, Slow query, duration: 39.881005s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:45.476635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:45.476837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:45.478415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7491645152610598746:9515];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-10T11:29:45.478808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::FiveWayJoinWithComplexPreds+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 9840, MsgBus: 32160 2025-04-10T11:29:49.586501Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645265288597225:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:49.589711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015d8/r3tmp/tmplgvuGh/pdisk_1.dat 2025-04-10T11:29:50.381036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:50.381137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:50.388040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:50.392798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9840, node 1 2025-04-10T11:29:50.717985Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:50.718004Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:50.718012Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:50.718101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32160 TClient is connected to server localhost:32160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:51.777371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:51.800088Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:51.807963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:52.063663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:52.329873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:52.477374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:54.447951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645286763435420:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:54.448050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:54.570284Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645265288597225:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:54.570345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:54.827359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:54.905048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:54.972264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:55.023724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:55.107014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:55.186196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:55.256055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645291058403238:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:55.256114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:55.256516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645291058403243:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:55.260323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:55.284514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645291058403245:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:55.379452Z node 1 :TX_PROXY ERROR: Actor# [1:7491645291058403301:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:57.122508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:57.162908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:57.220980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:57.259537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:57.307825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:49: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpJoin::LeftJoinPushdownPredicate_Nulls >> OlapEstimationRowsCorrectness::TPCH9 [GOOD] >> KqpJoin::IdxLookupPartialWithTempTable >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup >> KqpJoin::RightSemiJoin_SimpleKey [GOOD] >> KqpJoin::JoinLeftPureInnerConverted [GOOD] >> KqpJoinOrder::TPCDS61-ColumnStore [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 8142, MsgBus: 8905 2025-04-10T11:29:48.208010Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645263012757516:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:48.208419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015da/r3tmp/tmpLkdQ9d/pdisk_1.dat 2025-04-10T11:29:48.885699Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:48.888652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:48.888758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:48.891783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8142, node 1 2025-04-10T11:29:49.098599Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:49.098618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:49.098626Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:49.098714Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8905 TClient is connected to server localhost:8905 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:49.765664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:49.796252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:50.029231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:50.304348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:50.433386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:52.615345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645280192628331:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:52.615447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:53.135343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:53.188951Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645263012757516:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:53.189002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:53.195156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:53.232602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:53.276098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:53.352211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:53.419278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:53.485716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645284487596145:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:53.485787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:53.486093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645284487596150:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:53.491099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:53.505983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645284487596152:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:53.568952Z node 1 :TX_PROXY ERROR: Actor# [1:7491645284487596206:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:54.632507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:54.747750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2304, MsgBus: 2439 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015da/r3tmp/tmpoeteFl/pdisk_1.dat 2025-04-10T11:29:57.451884Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:29:57.463234Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:57.507140Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:57.507218Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:57.515220Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2304, node 2 2025-04-10T11:29:57.714662Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:57.714686Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:57.714692Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:57.714795Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2439 TClient is connected to server localhost:2439 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:58.713604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:58.728984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:58.821131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:59.030263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:59.149987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:01.969642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645316250892532:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:01.969734Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.027082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.123119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.168257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.242989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.287888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.347305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.462297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645320545860347:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.462418Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.462881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645320545860354:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.467802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:02.480654Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:30:02.481589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645320545860356:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:30:02.572915Z node 2 :TX_PROXY ERROR: Actor# [2:7491645320545860411:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:03.937950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:04.027129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_SimpleKey [GOOD] Test command err: Trying to start YDB, gRPC: 7606, MsgBus: 21799 2025-04-10T11:29:55.639115Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645292917205152:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:55.650239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015d0/r3tmp/tmpTrdAqe/pdisk_1.dat 2025-04-10T11:29:56.332835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:56.332926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:56.342487Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:56.343879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7606, node 1 2025-04-10T11:29:56.614845Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:56.614873Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:56.614887Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:56.614987Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21799 TClient is connected to server localhost:21799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:57.583245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:57.641451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:57.882519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:58.213178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:58.310319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:00.385497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645314392043272:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:00.385620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:00.600096Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645292917205152:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:00.600201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:00.681927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:00.717791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:00.751719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:00.786604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:00.853647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:00.903585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:00.997063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645314392043791:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:00.997165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:00.997697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645314392043796:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:01.001692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:01.011492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645314392043798:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:30:01.098921Z node 1 :TX_PROXY ERROR: Actor# [1:7491645318687011151:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:02.327884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.372497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.432248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.511809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.554987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:49: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH9 [GOOD] Test command err: Trying to start YDB, gRPC: 64890, MsgBus: 13136 2025-04-10T11:28:04.290063Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644817590304757:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:04.290109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00168e/r3tmp/tmpYR3RXV/pdisk_1.dat 2025-04-10T11:28:04.949623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:04.949701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:04.953145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:28:05.048864Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64890, node 1 2025-04-10T11:28:05.335045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:05.335066Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:05.335078Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:05.335203Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13136 TClient is connected to server localhost:13136 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:06.403676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:06.438316Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:09.099982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644839065141893:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:09.100114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:09.100550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644839065141905:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:09.104654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:09.129021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644839065141907:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:09.223238Z node 1 :TX_PROXY ERROR: Actor# [1:7491644839065141958:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:09.294366Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644817590304757:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:09.294453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:09.813560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:10.120909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644843360109593:2364];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:28:10.121446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644843360109593:2364];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:28:10.121690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644843360109593:2364];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:28:10.121811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644843360109593:2364];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:28:10.121907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644843360109593:2364];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:28:10.122023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644843360109593:2364];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:28:10.122123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644843360109593:2364];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:28:10.122236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644843360109593:2364];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:28:10.122335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644843360109593:2364];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:28:10.122434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644843360109593:2364];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:28:10.122546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644843360109593:2364];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:28:10.122638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644843360109593:2364];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:28:10.122781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644843360109580:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:28:10.122808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644843360109580:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:28:10.122957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644843360109580:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:28:10.123045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644843360109580:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:28:10.123140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644843360109580:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:28:10.123226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644843360109580:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:28:10.123310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644843360109580:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:28:10.123415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644843360109580:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:28:10.123510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644843360109580:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:28:10.123618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644843360109580:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:28:10.123714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644843360109580:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:28:10.123831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491644843360109580:2362];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:28:10.188039Z node 1 :T ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.030498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.033306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.043543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.047531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.053200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.054907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.059985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.060767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.068770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.069004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.075582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.077148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.086688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.086741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.092973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.095439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.097998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.102209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.104664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.108667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.113540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.115295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.122587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.122822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.130826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.132020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.144177Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.144243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.149029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.155601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.162771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.163305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.169644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.176985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.192642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.194556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.199119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.200578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.205726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.207043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.214036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.216372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.221988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.222097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.346478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039256;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:44.407466Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqbtcp3yjt5vjxknkjkqjn", SessionId: ydb://session/3?node_id=1&id=NmYwNTEwY2ItMTdiNjk0MDMtZTBmYzI0Y2QtZmRkNTgyMjY=, Slow query, duration: 41.024401s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:44.801616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:44.802053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:44.802461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491645199842452721:11416];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-10T11:29:44.802801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureInnerConverted [GOOD] Test command err: Trying to start YDB, gRPC: 5567, MsgBus: 8024 2025-04-10T11:29:57.247813Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645299420295614:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:57.247860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015c9/r3tmp/tmpUIM1s1/pdisk_1.dat 2025-04-10T11:29:58.245379Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:58.258199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:58.258267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:58.260958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:58.261575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 5567, node 1 2025-04-10T11:29:58.552075Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:58.552097Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:58.552104Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:58.552209Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8024 TClient is connected to server localhost:8024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:59.697879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:59.729848Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:59.752711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:59.968469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:00.230102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:30:00.339400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.254850Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645299420295614:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:02.254907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:02.267946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645320895133645:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.268038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.576932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.637888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.730094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.792484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.829941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.880719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.941744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645320895134159:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.941811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.942115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645320895134164:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.946262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:02.974838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645320895134166:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:30:03.072311Z node 1 :TX_PROXY ERROR: Actor# [1:7491645325190101518:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:04.339037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:04.450796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:30:04.548433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS61-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 20474, MsgBus: 15980 2025-04-10T11:28:40.580704Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644972072367633:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:40.581067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00165b/r3tmp/tmpeFBxhi/pdisk_1.dat 2025-04-10T11:28:41.483052Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:41.513330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:41.513447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:41.516053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20474, node 1 2025-04-10T11:28:41.882767Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:41.882792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:41.882798Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:41.882922Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15980 TClient is connected to server localhost:15980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:42.673743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:45.209332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644993547204647:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:45.209493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:45.209953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644993547204659:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:45.215653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:45.246471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644993547204661:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:45.330645Z node 1 :TX_PROXY ERROR: Actor# [1:7491644993547204712:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:45.570256Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644972072367633:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:45.570320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:45.686078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.836224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.880574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.931175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:45.988246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.253509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.304114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.344312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.431538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.493524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.544234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.597517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.658368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.347906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:28:47.389321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.444592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.499434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.557273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.606976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.644159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.686636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.737882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.784921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.848644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.936795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.004884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.072903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.135865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.218414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.373912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.442981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but p ... 6710714; 2025-04-10T11:29:28.320922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.324467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.326519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.329785Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.331886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.335665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.339799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.341411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.345702Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.347165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.351940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.353047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.357999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.359644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.368050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.369408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.373802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.375162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.381165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.382529Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.388800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.389598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.394357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.399969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.402650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.405723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.408146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.411508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.413316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.425590Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.430738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.439030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.440012Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.445801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.446602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.452093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.493737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038470;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.611037Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqbdje80pkqmh5ct9xj67n", SessionId: ydb://session/3?node_id=1&id=Yjg0MGE0YjAtZDlkZjQzMmQtMTZkMTRhNGMtNTc3NWIwOTM=, Slow query, duration: 38.356082s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:29.189095Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:29.189643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:29.190897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7491645083741535731:4636];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038170; 2025-04-10T11:29:29.191331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:01.179470Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqd1ry8h6vxbt0ajaw9f1g", SessionId: ydb://session/3?node_id=1&id=Yjg0MGE0YjAtZDlkZjQzMmQtMTZkMTRhNGMtNTc3NWIwOTM=, Slow query, duration: 17.468522s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query61.tpl and seed 1930872976\nselect promotions,total,cast(promotions as float)/cast(total as float)*100\nfrom\n (select sum(ss_ext_sales_price) promotions\n from store_sales\n cross join store\n cross join promotion\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_promo_sk = p_promo_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y')\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) promotional_sales cross join\n (select sum(ss_ext_sales_price) total\n from store_sales\n cross join store\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) all_sales\norder by promotions, total\nlimit 100;\n", parameters: 0b >> KqpJoin::RightTableKeyPredicate >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull >> KqpJoinOrder::TPCDS87+ColumnStore >> KqpJoinOrder::TPCDS88-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex-ColumnStore >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup [GOOD] >> KqpJoinOrder::TestJoinHint1-ColumnStore >> KqpJoinOrder::TPCH9_100 [GOOD] >> KqpJoinOrder::TPCDS23-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 26166, MsgBus: 22174 2025-04-10T11:29:58.317911Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645305852830071:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:58.318026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015c3/r3tmp/tmpTg2Xuo/pdisk_1.dat 2025-04-10T11:29:58.787424Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:58.790727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:58.790822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:58.795379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26166, node 1 2025-04-10T11:29:58.926706Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:58.926729Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:58.926736Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:58.926831Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22174 TClient is connected to server localhost:22174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:59.944966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:59.971434Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:59.977830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:00.153860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:00.392323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:00.494634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:02.328836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645323032701061:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.328991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.652405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.686197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.720056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.754376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.814790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.875859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.942164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645323032701577:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.942257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.942451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645323032701582:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.946253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:02.959552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645323032701584:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:30:03.029546Z node 1 :TX_PROXY ERROR: Actor# [1:7491645327327668933:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:03.318241Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645305852830071:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:03.318298Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS88-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 22323, MsgBus: 19877 2025-04-10T11:28:00.977483Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644797570163220:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:00.979567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001693/r3tmp/tmprBQxGW/pdisk_1.dat 2025-04-10T11:28:01.808407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:01.808487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:01.814754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:28:01.862633Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22323, node 1 2025-04-10T11:28:02.070826Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:02.070847Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:02.070964Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:02.071073Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19877 TClient is connected to server localhost:19877 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:03.221307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:03.244825Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:05.874287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644819045000332:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:05.874421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:05.881593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644819045000344:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:05.885988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:05.914354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644819045000346:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:05.986253Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644797570163220:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:05.986328Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:06.019501Z node 1 :TX_PROXY ERROR: Actor# [1:7491644823339967693:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:06.378208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:06.493912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:06.551232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:06.584333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:06.618137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:06.877000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:06.935171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:07.003625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:07.090549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:28:07.142615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:28:07.181636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:28:07.216918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:07.261140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:08.162726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:28:08.246185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:08.285612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:08.318722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:28:08.371472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:28:08.422439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:28:08.471341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:28:08.498768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:28:08.535940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:28:08.616418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:28:08.670531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:28:08.724577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:28:08.767504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:28:08.805569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:28:08.843857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:28:08.931589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:28:09.032509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... _state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.889524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.890454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.895293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.903495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.907158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.912096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.915914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.921012Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.924450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.929252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.935468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.938648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.940074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.949643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.952749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.963462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.966918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:47.973817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:28:48.130649Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqa6yb2pedts8r0yt1m0nf", SessionId: ydb://session/3?node_id=1&id=ODg5OWE1MzYtZGMzMzgxNTktYWY0Y2QzODYtMTdkZmU3NjA=, Slow query, duration: 37.430947s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:28:48.871176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:48.871577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:28:48.878488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7491644965073915389:6104];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-04-10T11:28:48.878948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:04.893344Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqcgpw6e93gnvv5xn4h612", SessionId: ydb://session/3?node_id=1&id=ODg5OWE1MzYtZGMzMzgxNTktYWY0Y2QzODYtMTdkZmU3NjA=, Slow query, duration: 38.655755s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query88.tpl and seed 318176889\nselect *\nfrom\n (select count(*) h8_30_to_9\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 8\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s1 cross join\n (select count(*) h9_to_9_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s2 cross join\n (select count(*) h9_30_to_10\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s3 cross join\n (select count(*) h10_to_10_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s4 cross join\n (select count(*) h10_30_to_11\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s5 cross join\n (select count(*) h11_to_11_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s6 cross join\n (select count(*) h11_30_to_12\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s7 cross join\n (select count(*) h12_to_12_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 12\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s8\n;", parameters: 0b >> KqpJoinOrder::TPCDS23+ColumnStore [GOOD] >> KqpJoin::IdxLookupPartialWithTempTable [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+ColumnStore [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_Nulls [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS23-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 27684, MsgBus: 21783 2025-04-10T11:28:41.048400Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644975325797999:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:41.048786Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001656/r3tmp/tmpiJpX4Q/pdisk_1.dat 2025-04-10T11:28:41.826999Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:41.852610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:41.852706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:41.855393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27684, node 1 2025-04-10T11:28:42.072758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:42.072790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:42.072804Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:42.072956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21783 TClient is connected to server localhost:21783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:42.941346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:42.965590Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:45.411111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644992505667707:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:45.411247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:45.415951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644992505667719:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:45.420245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:45.458384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644992505667721:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:45.562437Z node 1 :TX_PROXY ERROR: Actor# [1:7491644992505667772:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:46.027911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.042441Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644975325797999:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:46.042512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:46.176764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.229186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.311047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.398962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.752586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.835591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.885443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:28:46.932940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.011851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.049681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.089589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.130672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:28:47.913632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:28:48.023108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.129223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.184721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.247248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.307124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.360408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.442860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.528192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.612707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.672798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.737115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.796108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.866035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.942392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:28:48.996197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:28:49.063641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.104820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.106751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038498;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.111579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.113473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.117532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.120684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038526;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.124775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.126717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.131690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.137686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.141144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.146856Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.152878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.153064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.158511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.158589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.164274Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038538;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.166915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.172742Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038478;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.175174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.181285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038540;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.183905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.189598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.192050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.201037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.203301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038522;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.212407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.214244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.231022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.231079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.243119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.246468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.252421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038514;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.258686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.266234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.272197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.275285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.280997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.285336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.290089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.293844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.303480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038516;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.307236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.310612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.313145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038530;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:27.460984Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqbej22g2815279c3p7xrq", SessionId: ydb://session/3?node_id=1&id=OTA4M2MyYTUtZTc0YWI2MTEtNTg1OTg5ZTctY2I0Njk0N2I=, Slow query, duration: 36.193484s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:28.048273Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:28.048689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:28.049312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7491645142829551922:6392];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-04-10T11:29:28.049674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupPartialWithTempTable [GOOD] Test command err: Trying to start YDB, gRPC: 12677, MsgBus: 7129 2025-04-10T11:30:05.087002Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645336443536098:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:05.089171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b8/r3tmp/tmpNv1JMs/pdisk_1.dat 2025-04-10T11:30:05.804143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:05.804260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:05.824649Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:05.824967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12677, node 1 2025-04-10T11:30:06.099532Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:06.099553Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:06.099559Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:06.099675Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7129 TClient is connected to server localhost:7129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:07.087896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:07.130427Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:07.150244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:07.400733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:07.662879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:07.788838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:10.054266Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645336443536098:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:10.054347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:10.330280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645357918374211:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:10.330386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:10.638755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:10.715728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:10.887937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:10.930443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:10.972061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:11.067361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:11.187755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645362213342032:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:11.187836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:11.188167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645362213342037:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:11.192684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:11.211960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645362213342039:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:30:11.314719Z node 1 :TX_PROXY ERROR: Actor# [1:7491645362213342100:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:12.464154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:12.518603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:30:12.595112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:3:44: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_Nulls [GOOD] Test command err: Trying to start YDB, gRPC: 18524, MsgBus: 18062 2025-04-10T11:30:04.972665Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645330545816426:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:04.973144Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b9/r3tmp/tmpVoSIy8/pdisk_1.dat 2025-04-10T11:30:05.713012Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:05.716740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:05.716827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:05.722334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18524, node 1 2025-04-10T11:30:06.026573Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:06.026594Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:06.026602Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:06.026743Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18062 TClient is connected to server localhost:18062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:06.873551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:06.899692Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:06.905882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:07.184463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:07.421813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:07.545402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:09.697964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645352020654546:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:09.698079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:09.953835Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645330545816426:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:09.953892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:10.036757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:10.095270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:10.143704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:10.186167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:10.231139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:10.301876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:10.366480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645356315622362:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:10.366588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:10.368215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645356315622367:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:10.373035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:10.388055Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:30:10.388551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645356315622369:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:30:10.499192Z node 1 :TX_PROXY ERROR: Actor# [1:7491645356315622426:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:11.874722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:11.917542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:30:12.011484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH9_100 [GOOD] Test command err: Trying to start YDB, gRPC: 1559, MsgBus: 10153 2025-04-10T11:28:12.420173Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644848638060833:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:12.420646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00167f/r3tmp/tmpiOjOQN/pdisk_1.dat 2025-04-10T11:28:13.191535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:13.207938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:13.208032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:13.213569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1559, node 1 2025-04-10T11:28:13.478672Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:13.478693Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:13.478703Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:13.478826Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10153 TClient is connected to server localhost:10153 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:14.777451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:17.418262Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644848638060833:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:17.418325Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:18.063028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644874407865147:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:18.063178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:18.063622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644874407865159:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:18.067772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:18.082064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644874407865161:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:18.139422Z node 1 :TX_PROXY ERROR: Actor# [1:7491644874407865212:2346] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:18.514426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:18.848281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644874407865475:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:28:18.848515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644874407865475:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:28:18.848789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644874407865475:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:28:18.848907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644874407865475:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:28:18.849025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644874407865475:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:28:18.849123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644874407865475:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:28:18.849252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644874407865475:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:28:18.849353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644874407865475:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:28:18.849481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644874407865475:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:28:18.849485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644874407865487:2358];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:28:18.849512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644874407865487:2358];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:28:18.849589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644874407865475:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:28:18.849629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644874407865487:2358];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:28:18.849685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644874407865475:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:28:18.849740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644874407865487:2358];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:28:18.849800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491644874407865475:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:28:18.849841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644874407865487:2358];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:28:18.849947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644874407865487:2358];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:28:18.850038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644874407865487:2358];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:28:18.850133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644874407865487:2358];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:28:18.850266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644874407865487:2358];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:28:18.850356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644874407865487:2358];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:28:18.850441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644874407865487:2358];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:28:18.850526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644874407865487:2358];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:28:18.932854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491644874407865477:2353];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp ... essTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.700991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.705517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.706928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.712474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.714401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.718993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.722932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.725030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.730757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.731283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.736843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.737264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.744431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.748583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.754303Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.754706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.758743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.760664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.765447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.766460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.773083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.774853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.781493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.783841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.787767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.792465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.803332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.806722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.813375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.817580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.823777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.825627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.839596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039314;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.848456Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.850953Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.855267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.856696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.861388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.862515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.867707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.877365Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.879437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.885227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:49.889555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:50.091277Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqc3mf7qgwz3jsmdxg044b", SessionId: ydb://session/3?node_id=1&id=YWY0ZWQxYzctN2U2ODNlZjctNDRiMTJlNTgtNjk3NTQ3Yg==, Slow query, duration: 37.243449s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:50.407373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:50.407761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7491645175055622872:9564];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-10T11:29:50.407848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:50.408186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:50.575913Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710717, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 12273, MsgBus: 11694 2025-04-10T11:29:56.759890Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645298386007250:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:56.759933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015cb/r3tmp/tmpAy1N5m/pdisk_1.dat 2025-04-10T11:29:57.504165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:57.504243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:57.513811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:57.521683Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12273, node 1 2025-04-10T11:29:57.813812Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:57.813834Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:57.813852Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:57.813946Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11694 TClient is connected to server localhost:11694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:58.701222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:58.716598Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:58.725846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:58.945814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:59.156071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:59.262303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:01.478649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645319860845502:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:01.478785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:01.766315Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645298386007250:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:01.766387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:01.908592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:01.971084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.022176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.104478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.171311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.242966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.322373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645324155813319:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.322486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.330249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645324155813324:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.338446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:02.363334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645324155813326:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:30:02.453941Z node 1 :TX_PROXY ERROR: Actor# [1:7491645324155813382:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:03.708424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:03.747350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:30:03.784997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:30:03.817183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:30:03.900386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:30:03.957085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5276, MsgBus: 29443 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015cb/r3tmp/tmprPMZBs/pdisk_1.dat 2025-04-10T11:30:06.606499Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:30:06.608555Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:06.608619Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:06.615202Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:30:06.619312Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5276, node 2 2025-04-10T11:30:06.786680Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:06.786705Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:06.786711Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:06.786816Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29443 TClient is connected to server localhost:29443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:07.583399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:07.604015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:07.674517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:07.948450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:08.096248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:11.087547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645359608142016:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:11.087640Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:11.133284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:11.191299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:11.232456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:11.309435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:11.356454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:11.403756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:11.518545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645359608142538:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:11.518630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:11.519070Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645359608142543:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:11.523327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:11.543082Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:30:11.547549Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645359608142545:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:30:11.622668Z node 2 :TX_PROXY ERROR: Actor# [2:7491645359608142599:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:13.100020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:13.171762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:30:13.252386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:30:13.324186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:30:13.367111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:30:13.428251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS23+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 11486, MsgBus: 61880 2025-04-10T11:27:37.879979Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644700110254495:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:37.886600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016c3/r3tmp/tmpj9gmRf/pdisk_1.dat 2025-04-10T11:27:38.427387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:38.427465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:38.434836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:27:38.485847Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11486, node 1 2025-04-10T11:27:38.658721Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:38.658754Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:38.658765Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:38.658868Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61880 TClient is connected to server localhost:61880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:39.346439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:39.371617Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:27:41.757861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644717290124349:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:41.758814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644717290124341:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:41.758977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:41.766098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:41.780203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644717290124355:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:41.860715Z node 1 :TX_PROXY ERROR: Actor# [1:7491644717290124406:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:42.292846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:42.520867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644721585091930:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:42.521039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644721585091930:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:42.521265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644721585091930:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:42.521360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644721585091930:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:42.521521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644721585091930:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:42.521626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644721585091930:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:42.521753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644721585091930:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:42.521858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644721585091930:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:42.521952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644721585091930:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:42.522048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644721585091930:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:42.522133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644721585091930:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:42.522585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644721585091930:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:42.552107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644721585091935:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:42.552204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644721585091935:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:42.552431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644721585091935:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:42.552543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644721585091935:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:42.552688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644721585091935:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:42.552819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644721585091935:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:42.552925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644721585091935:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:42.553053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644721585091935:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:42.553168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644721585091935:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:42.553313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644721585091935:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:42.553445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644721585091935:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:42.553552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491644721585091935:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:42.556367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644721585091924:2349];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:42.556412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644721585091924:2349];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.498133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039195;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.498317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.504629Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.504728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.511719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.515355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.519882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.526829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.530241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.541501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.548195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.552372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.559164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.576743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.578761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.589320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.591533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.606593Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.610180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.616880Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.620874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039193;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.631633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.634274Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.638561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.640632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.644431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.646672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.652567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.658815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.663693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.669360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.678800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.682885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.689756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.691471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.699300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.708507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.711660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.724630Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.728539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.739490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.745086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.759390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.766433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:14.784801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:15.088425Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqavqz2c2qxx34mec0ekp7", SessionId: ydb://session/3?node_id=1&id=NDFiNjY2ZjktNWU4NjljMjMtM2E3NWRiMzAtNDFmM2E1YzI=, Slow query, duration: 43.088301s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:15.815092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:15.815777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491644957808331727:8259];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-10T11:29:15.816159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:15.816791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 22954, MsgBus: 16203 2025-04-10T11:29:58.079418Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645306418359043:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:58.079550Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015c5/r3tmp/tmpwY41nu/pdisk_1.dat 2025-04-10T11:29:58.568452Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:58.588066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:58.588142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:58.591738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22954, node 1 2025-04-10T11:29:58.732942Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:58.732962Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:58.732979Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:58.733096Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16203 TClient is connected to server localhost:16203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:59.599540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:59.637339Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:59.645472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:59.841348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:00.084474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:00.205181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:02.467017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645323598229866:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.467132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:02.827295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.870255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.932702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:02.978355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:03.011539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:03.068324Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645306418359043:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:03.068624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:03.081994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:03.180145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645327893197680:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:03.180232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:03.185316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645327893197685:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:03.188806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:03.217153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645327893197687:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:30:03.318557Z node 1 :TX_PROXY ERROR: Actor# [1:7491645327893197745:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:04.507108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:04.545778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:30:04.628182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:30:04.683591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:30:04.780589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:30:04.853933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1739, MsgBus: 24092 2025-04-10T11:30:07.039932Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645345434407247:2229];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:07.039981Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015c5/r3tmp/tmpMyizQz/pdisk_1.dat 2025-04-10T11:30:07.404389Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:07.430671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:07.430753Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:07.439204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1739, node 2 2025-04-10T11:30:07.608288Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:07.608308Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:07.608315Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:07.608415Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24092 TClient is connected to server localhost:24092 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:08.555572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:08.568132Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:08.580026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:08.670452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:08.873545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:08.985942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:11.646509Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645362614278026:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:11.646582Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:11.697482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:11.749631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:11.801853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:11.840634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:11.895193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:11.986006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:12.046265Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645345434407247:2229];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:12.047741Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:12.097023Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645366909245843:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:12.097198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:12.097481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645366909245849:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:12.101008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:12.126374Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645366909245851:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:30:12.220089Z node 2 :TX_PROXY ERROR: Actor# [2:7491645366909245907:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:13.422688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:13.509308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:30:13.545130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:30:13.599619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:30:13.651334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:30:13.706578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 13824, MsgBus: 7494 2025-04-10T11:28:18.391570Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644875514285009:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:18.402547Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001676/r3tmp/tmpxMRTEG/pdisk_1.dat 2025-04-10T11:28:19.141364Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:19.171746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:19.171815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:19.179144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13824, node 1 2025-04-10T11:28:19.444115Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:19.444137Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:19.444145Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:19.444242Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7494 TClient is connected to server localhost:7494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:20.568747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:20.595079Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:28:23.099697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644896989122024:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:23.099820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:23.100346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644896989122036:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:23.104328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:23.119812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644896989122038:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:23.205232Z node 1 :TX_PROXY ERROR: Actor# [1:7491644896989122089:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:23.385850Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644875514285009:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:23.385910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:23.713141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:23.987100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644896989122342:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:28:23.987273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644896989122342:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:28:23.987629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644896989122342:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:28:23.987734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644896989122342:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:28:23.987845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644896989122342:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:28:23.987967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644896989122342:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:28:23.988068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644896989122342:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:28:23.988167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644896989122342:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:28:23.988277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644896989122342:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:28:23.988391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644896989122342:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:28:23.988487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644896989122342:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:28:23.988598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644896989122342:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:28:23.994955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644896989122348:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:28:23.995009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644896989122348:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:28:23.995170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644896989122348:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:28:23.995284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644896989122348:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:28:23.995404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644896989122348:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:28:23.995511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644896989122348:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:28:23.995601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644896989122348:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:28:23.995690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644896989122348:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:28:23.995824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644896989122348:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:28:23.995915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644896989122348:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:28:23.996008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644896989122348:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:28:23.996111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491644896989122348:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:28:24.058861Z node 1 :TX_C ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.948355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.950698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.957134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.958240Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.963942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.964814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.970565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.973627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.976327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.979756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.981843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.985343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.987386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.990875Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.992834Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.996750Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:56.999006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.002487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.004814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.008196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.011170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.013351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.017360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.018441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.023472Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.024241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.030110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.030110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.035244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.040797Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.043899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.056300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.062118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.062127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.071295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.084760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.096571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.111946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.122105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.143210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.158907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.169370Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.183317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.203053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.414439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:57.559579Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqc9qpa4zry7fg1qspezpz", SessionId: ydb://session/3?node_id=1&id=MTZlMTMyNzItYWQyMjE5MDktYmEyZjYwNTAtMWIxYmE3ZDQ=, Slow query, duration: 38.464876s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:57.853888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:57.854373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:57.855140Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491645150392229617:7984];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-10T11:29:57.855644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationGood >> TTicketParserTest::LoginGood >> TTicketParserTest::BulkAuthorizationRetryError >> TTicketParserTest::AuthorizationRetryError >> TTicketParserTest::LoginRefreshGroupsWithError >> TTicketParserTest::NebiusAuthenticationUnavailable >> KqpJoin::RightTableKeyPredicate [GOOD] >> TTicketParserTest::AuthenticationWithUserAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 7096, MsgBus: 30058 2025-04-10T11:29:59.464165Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645311662008627:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:59.464204Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015c0/r3tmp/tmplhx7k4/pdisk_1.dat 2025-04-10T11:30:00.179876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:00.179961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:00.186546Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:00.189668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7096, node 1 2025-04-10T11:30:00.514700Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:00.514722Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:00.514729Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:00.514828Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30058 TClient is connected to server localhost:30058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:01.668052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:01.694541Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:01.707503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:01.952976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:02.212477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:02.321305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:04.470260Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645311662008627:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:04.470326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:04.615769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645333136846743:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:04.616007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:05.062512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.103023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.171972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.255316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.324116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.402623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.490334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645337431814562:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:05.490537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:05.494584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645337431814568:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:05.503743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:05.574336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645337431814570:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:30:05.674861Z node 1 :TX_PROXY ERROR: Actor# [1:7491645337431814633:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:07.369911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.483511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2099, MsgBus: 3213 2025-04-10T11:30:10.487872Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645359043673690:2205];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015c0/r3tmp/tmp6wQo5G/pdisk_1.dat 2025-04-10T11:30:10.555168Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:30:10.744814Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:10.749180Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:10.749263Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:10.751678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2099, node 2 2025-04-10T11:30:10.829982Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:10.830000Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:10.830007Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:10.830143Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3213 TClient is connected to server localhost:3213 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:11.419969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:11.476538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:11.580858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:11.750514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:11.837550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:14.574295Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645376223544495:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:14.574390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:14.630433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:14.712908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:14.802557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:14.881252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:14.928801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:14.995075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:15.104157Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645380518512314:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:15.104266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:15.104515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645380518512319:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:15.108589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:15.126383Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645380518512321:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:30:15.194271Z node 2 :TX_PROXY ERROR: Actor# [2:7491645380518512374:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:15.490174Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645359043673690:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:15.490263Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:16.339701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:16.457404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> TTicketParserTest::TicketFromCertificateCheckIssuerGood >> TTicketParserTest::AccessServiceAuthenticationOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableKeyPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 3512, MsgBus: 12942 2025-04-10T11:30:10.185707Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645356575885998:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:10.192058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b7/r3tmp/tmpFC5aiV/pdisk_1.dat 2025-04-10T11:30:10.746203Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:10.750402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:10.750491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:10.752098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3512, node 1 2025-04-10T11:30:10.950800Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:10.950822Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:10.950830Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:10.950963Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12942 TClient is connected to server localhost:12942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:12.290673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:12.362477Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:12.392090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:12.663107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:12.916060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:13.020035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:15.120250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645378050724118:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:15.120352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:15.158607Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645356575885998:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:15.171829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:15.575584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:15.620218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:15.658089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:15.694608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:15.776961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:15.847995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:15.944501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645378050724638:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:15.944589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:15.944845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645378050724643:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:15.949239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:15.982239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645378050724645:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:30:16.039094Z node 1 :TX_PROXY ERROR: Actor# [1:7491645382345691997:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:17.386468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> TTicketParserTest::NebiusAuthenticationUnavailable [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryError >> KqpJoinOrder::TPCDS61+ColumnStore [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood >> TTicketParserTest::LoginGood [GOOD] >> TTicketParserTest::LoginGoodWithGroups >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerGood [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerBad >> TTicketParserTest::AccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk >> TTicketParserTest::AuthenticationWithUserAccount [GOOD] >> TTicketParserTest::AuthenticationUnsupported ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 [GOOD] Test command err: Trying to start YDB, gRPC: 15115, MsgBus: 21652 2025-04-10T11:29:26.009778Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645162019793086:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:26.010116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001603/r3tmp/tmpPsfDGH/pdisk_1.dat 2025-04-10T11:29:26.650517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:26.650594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:26.654166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:26.728615Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15115, node 1 2025-04-10T11:29:26.878831Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:26.878853Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:26.878859Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:26.878951Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21652 TClient is connected to server localhost:21652 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:27.835206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:27.866535Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:30.477279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645183494630104:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:30.477431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:30.477860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645183494630116:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:30.482692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:30.497682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645183494630118:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:30.604588Z node 1 :TX_PROXY ERROR: Actor# [1:7491645183494630170:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:30.990814Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645162019793086:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:30.990926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:30.998283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:31.171788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:29:31.210205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:31.290669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:31.374495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:31.590645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:31.663825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:31.702878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:31.747593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:29:31.801457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:29:31.838589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:29:31.886597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:31.964421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:33.228366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:29:33.274052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:33.323942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:29:33.368491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:29:33.413773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:29:33.468250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:29:33.513820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:29:33.563683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:29:33.633002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:29:33.681624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:29:33.756541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:29:33.811639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:29:33.858472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:29:33.921571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:29:33.979611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:29:34.023906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:29:34.107176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... ontroller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.048325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.051860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.063293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.069668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.077989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038433;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.080381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.088254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.093076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.103482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.108754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.118487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.122809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.128942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.133321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.143579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.148124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.153867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.158120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.168224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038427;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.172637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.187119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.195019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.201731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.209493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.227862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.232186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.245065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.258704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.272996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.295064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.309063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.331421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.345761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.372796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.390933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.398019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.405954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038473;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.420812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.422946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.427811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.439453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.443222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.453055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.454563Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.509545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:11.666409Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqct1ncp61bdxvjg4d1hvn", SessionId: ydb://session/3?node_id=1&id=ZmE4ZDNhNi0xOGE3MWFlMi02NTFiZWFlLTUyNzg1NzYz, Slow query, duration: 35.868565s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:30:12.010030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:12.010567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:12.010975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7491645217854375646:2983];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038629; 2025-04-10T11:30:12.011371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS61+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 14302, MsgBus: 7095 2025-04-10T11:27:46.305750Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644737224322833:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:46.306211Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ac/r3tmp/tmpK5zxSc/pdisk_1.dat 2025-04-10T11:27:46.963647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:27:46.963740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:27:46.965781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14302, node 1 2025-04-10T11:27:47.007697Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:27:47.007719Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:27:47.106804Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:27:47.222181Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:27:47.222209Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:27:47.222217Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:27:47.222335Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7095 TClient is connected to server localhost:7095 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:27:48.470504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:27:51.302700Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644737224322833:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:27:51.302771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:27:51.786903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644758699159909:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:51.787099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:51.787463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644758699159921:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:27:51.791814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:27:51.814560Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:27:51.814789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644758699159923:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:27:51.901930Z node 1 :TX_PROXY ERROR: Actor# [1:7491644758699159974:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:27:52.268873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:27:52.759692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644762994127563:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:52.759919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644762994127563:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:52.760207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644762994127563:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:52.760326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644762994127563:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:52.760426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644762994127563:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:52.760643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644762994127563:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:52.760760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644762994127563:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:52.760889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644762994127563:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:52.760994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644762994127563:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:52.761122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644762994127563:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:52.761273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644762994127563:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:52.761381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644762994127563:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:27:52.761669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644762994127551:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:27:52.761699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644762994127551:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:27:52.761821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644762994127551:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:27:52.761935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644762994127551:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:27:52.762053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644762994127551:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:27:52.762197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644762994127551:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:27:52.762321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644762994127551:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:27:52.762422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644762994127551:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:27:52.762515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644762994127551:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:27:52.762636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644762994127551:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:27:52.762733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644762994127551:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:27:52.762848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491644762994127551:2358];tablet_id=720 ... essTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.484269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.484577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.492114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.493224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.498897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.502739Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.508901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.509306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.515837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.519614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.525074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.525266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.531736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.538261Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.538294Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.547802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.552388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.558107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.560796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.565468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.567042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.572078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.573613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.578842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.582594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.584437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.594489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.596552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.606117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.611724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.614170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.617390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.619357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.624901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.625089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.634673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.643830Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.702445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:29:28.763624Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqb86cd04k5v5xzbkqw1cm", SessionId: ydb://session/3?node_id=1&id=ZDFkNjIxNDctOGU5ZGYyNTUtMWQ3OWFiNzktZmE0ODkzNTE=, Slow query, duration: 44.014885s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:29:29.429677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:29.430118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:29:29.431999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:08.475068Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqd5a654vdden3gm5dj0gf", SessionId: ydb://session/3?node_id=1&id=ZDFkNjIxNDctOGU5ZGYyNTUtMWQ3OWFiNzktZmE0ODkzNTE=, Slow query, duration: 21.139236s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query61.tpl and seed 1930872976\nselect promotions,total,cast(promotions as float)/cast(total as float)*100\nfrom\n (select sum(ss_ext_sales_price) promotions\n from store_sales\n cross join store\n cross join promotion\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_promo_sk = p_promo_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y')\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) promotional_sales cross join\n (select sum(ss_ext_sales_price) total\n from store_sales\n cross join store\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) all_sales\norder by promotions, total\nlimit 100;\n", parameters: 0b >> YdbIndexTable::MultiShardTableOneUniqIndex >> YdbIndexTable::MultiShardTableOneIndex >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex >> TTicketParserTest::LoginGoodWithGroups [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad >> TTicketParserTest::TicketFromCertificateCheckIssuerBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationBad >> TTicketParserTest::AuthenticationUnsupported [GOOD] >> TTicketParserTest::AuthenticationUnknown >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk [GOOD] >> TTicketParserTest::AuthenticationUnavailable >> TTicketParserTest::LoginRefreshGroupsWithError [GOOD] >> TTicketParserTest::NebiusAccessServiceAuthenticationOk >> TTicketParserTest::BulkAuthorizationRetryError [GOOD] >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately >> TTicketParserTest::AuthorizationRetryError [GOOD] >> TTicketParserTest::AuthorizationRetryErrorImmediately >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood >> TTicketParserTest::AuthenticationUnavailable [GOOD] >> TTicketParserTest::AuthenticationRetryError >> TTicketParserTest::TicketFromCertificateWithValidationBad [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2 [GOOD] >> KqpJoinOrder::CanonizedJoinOrderLookupBug [GOOD] >> TTicketParserTest::AuthenticationUnknown [GOOD] >> TTicketParserTest::Authorization ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:25:54.390304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:25:54.390403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:54.390437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:25:54.390466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:25:54.390517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:25:54.390541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:25:54.390614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:25:54.390684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:25:54.391030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:54.470341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:25:54.470420Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:25:54.487245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:54.487512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:25:54.487692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:25:54.495909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:25:54.496245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:25:54.496907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:54.497527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:25:54.500561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:54.501928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:54.501983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:54.502163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:25:54.502209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:54.502263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:25:54.502414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:25:54.509471Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:25:54.617380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:25:54.617680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:54.617946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:25:54.618242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:25:54.618297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:54.620979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:54.621122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:25:54.621362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:54.621421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:25:54.621475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:25:54.621511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:25:54.623836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:54.623903Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:25:54.623947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:25:54.625699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:54.625763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:54.625811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:54.625884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:25:54.629208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:25:54.631287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:25:54.631496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:25:54.632550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:25:54.632678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:25:54.632722Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:54.633021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:25:54.633093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:25:54.633304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:25:54.633380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:25:54.635663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:25:54.635707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:25:54.635883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:25:54.635927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:25:54.636174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:25:54.636237Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:25:54.636333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:54.636363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:25:54.636397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:25:54.636446Z no ... uccess 2025-04-10T11:30:34.575526Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:34.584374Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:30:34.584674Z node 103 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 331us result status StatusSuccess 2025-04-10T11:30:34.585500Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTicketParserTest::NebiusAuthorizationRetryError [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately >> TTicketParserTest::NebiusAccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryError >> TExternalTableTest::ParallelCreateExternalTable >> TExternalTableTest::ReadOnlyMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderLookupBug [GOOD] Test command err: Trying to start YDB, gRPC: 30261, MsgBus: 10944 2025-04-10T11:29:41.890392Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645234449057181:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:41.890431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015e7/r3tmp/tmpUdbVew/pdisk_1.dat 2025-04-10T11:29:42.521466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:42.521585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:42.523700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:42.625883Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30261, node 1 2025-04-10T11:29:42.800165Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:42.800184Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:42.800193Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:42.800317Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10944 TClient is connected to server localhost:10944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:43.633597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:43.646888Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:45.728027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645251628926971:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:45.728141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:45.728410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645251628926983:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:45.732350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:45.745653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645251628926985:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:45.817994Z node 1 :TX_PROXY ERROR: Actor# [1:7491645251628927036:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:46.282534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.391422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.440776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.482816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.517340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.719492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.768502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.802430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.841700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.878301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.894304Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645234449057181:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:46.894370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:46.930781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:29:46.968974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:47.007926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:47.667777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:29:47.714809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:29:47.769368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:29:47.820192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:29:47.868539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:29:47.939676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.001445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.038320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.090402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.180825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.230065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.269572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.320311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.349193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.381930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.421487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.488152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... 10714; 2025-04-10T11:30:24.943712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038520;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.947615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.948946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.953735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.955989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.959067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.960972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.966568Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.967107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.973516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.975928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.980736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.982179Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.987092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.993205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038540;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.995108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:24.999268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.002225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038512;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.007836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.012887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.017969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.023578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.024931Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.029235Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038570;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.033014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.035806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.042726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.052586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.054284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.061122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.065499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.070833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.073948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.076661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.079237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.081653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.085417Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.087249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.092946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.096059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038518;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.099919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.102316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.105467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.218508Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqd80p7vvy0ch7pgc8j52a", SessionId: ydb://session/3?node_id=1&id=NjhkY2RlZTYtMTM3ZDU2ZjItZDdlNmNiMjYtN2EzOGM4YTk=, Slow query, duration: 35.115657s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:30:25.576952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:25.577323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:25.577809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7491645337528291388:4730];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-04-10T11:30:25.578124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716;
:3:9: Warning: Symbol $limit is not used, code: 4527
:2:9: Warning: Symbol $browserGroup is not used, code: 4527
:1:9: Warning: Symbol $quotaName is not used, code: 4527
:4:9: Warning: Symbol $offset is not used, code: 4527
:3:9: Warning: Symbol $limit is not used, code: 4527
:2:9: Warning: Symbol $browserGroup is not used, code: 4527
:1:9: Warning: Symbol $quotaName is not used, code: 4527
:4:9: Warning: Symbol $offset is not used, code: 4527 >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::BulkAuthorization >> TTicketParserTest::AuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::AuthorizationWithRequiredPermissions >> TExternalTableTest::DropExternalTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH2 [GOOD] Test command err: Trying to start YDB, gRPC: 18474, MsgBus: 17464 2025-04-10T11:28:33.646799Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644942403412111:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:33.702519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001669/r3tmp/tmpZ65YvV/pdisk_1.dat 2025-04-10T11:28:34.593252Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:34.605953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:34.606059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:34.608296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18474, node 1 2025-04-10T11:28:34.972207Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:34.972230Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:34.972238Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:34.972350Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17464 TClient is connected to server localhost:17464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:36.042004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:38.283826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644963878249253:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:38.283901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:38.284080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644963878249264:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:38.287861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:38.301499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644963878249267:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:38.394310Z node 1 :TX_PROXY ERROR: Actor# [1:7491644963878249318:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:38.654241Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491644942403412111:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:38.654317Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:38.910932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:39.310494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644968173216889:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:28:39.310768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644968173216889:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:28:39.311086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644968173216889:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:28:39.311254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644968173216889:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:28:39.311401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644968173216889:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:28:39.311545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644968173216889:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:28:39.311685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644968173216889:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:28:39.311831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644968173216889:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:28:39.311984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644968173216889:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:28:39.312136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644968173216889:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:28:39.314179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644968173216873:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:28:39.314225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644968173216873:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:28:39.314395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644968173216873:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:28:39.314501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644968173216873:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:28:39.314599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644968173216873:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:28:39.314686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644968173216873:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:28:39.314770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644968173216873:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:28:39.314862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644968173216873:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:28:39.314949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644968173216873:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:28:39.315045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644968173216873:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:28:39.315164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644968173216873:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:28:39.315262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491644968173216873:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:28:39.319133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644968173216889:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:28:39.319242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491644968173216889:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:28:39.368330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491644968173216897:2362];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.c ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.646396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.650484Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.657154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.660836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.671433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.675398Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.681799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.685547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.696144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.699482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.709710Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.710315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.720842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.724148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.735031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.738547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.744632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.749220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.759841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.762991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.780874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.784375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039244;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.794778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.794808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.801133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.805446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.821208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.827081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.833615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.835116Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.841294Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.852262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.855366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.861670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.864744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.875495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.879128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.885334Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.888932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.899230Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.904346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.913965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.915318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.925551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039226;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:08.929424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:09.372222Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqcp9j6rqncyy55gccq7kv", SessionId: ydb://session/3?node_id=1&id=YzU3ZTYwZmYtNGY2ZTUzNDYtNDgwZDA3MDEtNGMwM2Q4NDA=, Slow query, duration: 37.417191s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:30:09.714880Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:09.715356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:09.715705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7491645251641104742:9424];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-10T11:30:09.716070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::NebiusAuthorizationUnavailable >> TExternalTableTest::DropExternalTable [GOOD] >> TExternalTableTest::Decimal >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad >> TExternalTableTest::ParallelCreateExternalTable [GOOD] >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists >> TTicketParserTest::Authorization [GOOD] >> TTicketParserTest::AuthorizationModify >> TExternalTableTest::ReadOnlyMode [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TExternalTableTest::Decimal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:30:39.454373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:30:39.454572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:39.454615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:30:39.454664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:30:39.458233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:30:39.458313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:30:39.458452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:39.458555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:30:39.466548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:30:39.745445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:30:39.745514Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:39.799519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:30:39.799841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:30:39.800005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:30:39.855001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:30:39.855237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:30:39.855896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:39.856123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:30:39.862802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:39.864248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:39.864310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:39.864472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:30:39.864535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:39.864584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:30:39.864676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:30:39.887170Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:30:40.061597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:40.061854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.062091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:30:40.066955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:30:40.067063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.075056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:40.075218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:30:40.075437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.075530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:30:40.075587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:30:40.075626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:30:40.077646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.077708Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:30:40.077742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:30:40.079634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.079683Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.079731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:40.079786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:30:40.083578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:30:40.091109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:30:40.091377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:30:40.092425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:40.092572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:40.092635Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:40.092964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:30:40.093037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:40.093250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:30:40.093336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:30:40.099820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:40.099870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:40.100080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:40.100134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:30:40.100451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.100498Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:30:40.100598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:40.100634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:40.100670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:40.100699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:40.100765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:30:40.100819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:40.100854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:30:40.100889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:30:40.100951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:30:40.100987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:30:40.101020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:30:40.107014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... satisfy waiter [1:370:2361] 2025-04-10T11:30:40.234575Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-04-10T11:30:40.234640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-04-10T11:30:40.234664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:370:2361] 2025-04-10T11:30:40.234726Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-04-10T11:30:40.234807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-04-10T11:30:40.234836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:370:2361] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-04-10T11:30:40.235339Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:30:40.235543Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 234us result status StatusSuccess 2025-04-10T11:30:40.235849Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:40.236549Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:30:40.236764Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 184us result status StatusSuccess 2025-04-10T11:30:40.237038Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:40.237749Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:30:40.237895Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 147us result status StatusSuccess 2025-04-10T11:30:40.239403Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:40.242685Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:30:40.242911Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 246us result status StatusSuccess 2025-04-10T11:30:40.243215Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:40.243770Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:30:40.243989Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 180us result status StatusSuccess 2025-04-10T11:30:40.244272Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:30:39.454613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:30:39.454776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:39.454832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:30:39.454880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:30:39.458293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:30:39.458364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:30:39.458456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:39.458578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:30:39.462693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:30:39.685428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:30:39.685502Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:39.716022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:30:39.716361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:30:39.716534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:30:39.734118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:30:39.734480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:30:39.735259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:39.735492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:30:39.741284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:39.742734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:39.742806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:39.742997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:30:39.743069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:39.743129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:30:39.743237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:30:39.759012Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:30:40.016047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:40.016316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.016556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:30:40.016800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:30:40.016886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.023448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:40.023642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:30:40.023836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.023905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:30:40.023944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:30:40.023979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:30:40.027323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.027398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:30:40.027443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:30:40.035112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.035209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.035275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:40.035345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:30:40.039716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:30:40.047079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:30:40.047354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:30:40.048549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:40.048715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:40.048778Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:40.049153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:30:40.049227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:40.049432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:30:40.049539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:30:40.055684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:40.055742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:40.055959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:40.056009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:30:40.056358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.056414Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:30:40.056521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:40.056560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:40.056602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:40.056636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:40.056701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:30:40.056758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:40.056806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:30:40.056846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:30:40.056924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:30:40.056965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:30:40.057002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:30:40.063127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... HEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:30:40.889071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-10T11:30:40.889180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-10T11:30:40.911501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusAccepted TxId: 129 SchemeshardId: 72057594046678944 PathId: 5, at schemeshard: 72057594046678944 2025-04-10T11:30:40.911711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2025-04-10T11:30:40.912036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:40.912093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:40.912338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-10T11:30:40.912462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:40.912531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:485:2443], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-04-10T11:30:40.912605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:485:2443], at schemeshard: 72057594046678944, txId: 129, path id: 5 2025-04-10T11:30:40.912930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.912991Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 129:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:30:40.913055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2025-04-10T11:30:40.913212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:30:40.919070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-04-10T11:30:40.919249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-04-10T11:30:40.919324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-04-10T11:30:40.919384Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-10T11:30:40.919439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-04-10T11:30:40.920542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-04-10T11:30:40.920628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-04-10T11:30:40.920656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-04-10T11:30:40.920687Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-04-10T11:30:40.920719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-10T11:30:40.920794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2025-04-10T11:30:40.932144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-04-10T11:30:40.932439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000005 2025-04-10T11:30:40.935035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-10T11:30:40.935866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:40.936052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:40.936120Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2025-04-10T11:30:40.936308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 129:0 128 -> 240 2025-04-10T11:30:40.936504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-10T11:30:40.936587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-10T11:30:40.936948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-10T11:30:40.951499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:40.951584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:40.951774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-10T11:30:40.951883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:40.951925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:485:2443], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-04-10T11:30:40.951979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:485:2443], at schemeshard: 72057594046678944, txId: 129, path id: 5 FAKE_COORDINATOR: Erasing txId 129 2025-04-10T11:30:40.952248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-04-10T11:30:40.952298Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 129:0 ProgressState 2025-04-10T11:30:40.952431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-10T11:30:40.952470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-10T11:30:40.952520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-10T11:30:40.952556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-10T11:30:40.952599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-04-10T11:30:40.952648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-10T11:30:40.952719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2025-04-10T11:30:40.952761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2025-04-10T11:30:40.952854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-10T11:30:40.952899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-04-10T11:30:40.952934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-10T11:30:40.952990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 5], 3 2025-04-10T11:30:40.953993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-04-10T11:30:40.954114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-04-10T11:30:40.961899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-04-10T11:30:40.962004Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-10T11:30:40.962076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-04-10T11:30:40.963505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-04-10T11:30:40.963622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-04-10T11:30:40.963677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-04-10T11:30:40.963740Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-04-10T11:30:40.963776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-10T11:30:40.963859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-04-10T11:30:40.967689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-10T11:30:40.967822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:30:39.597126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:30:39.597216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:39.597249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:30:39.597294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:30:39.597360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:30:39.597389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:30:39.597448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:39.597535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:30:39.597855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:30:39.684469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:30:39.684535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:39.705031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:30:39.705415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:30:39.705641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:30:39.715405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:30:39.715717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:30:39.719678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:39.720943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:30:39.726116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:39.735586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:39.735669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:39.736639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:30:39.736704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:39.736793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:30:39.736894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:30:39.745171Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:30:39.883981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:39.885636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:39.887399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:30:39.888449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:30:39.888536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:39.891677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:39.891822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:30:39.892001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:39.892147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:30:39.892182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:30:39.892210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:30:39.894406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:39.894457Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:30:39.894489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:30:39.896274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:39.896321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:39.896376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:39.896435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:30:39.900757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:30:39.902731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:30:39.903563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:30:39.904520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:39.904657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:39.904703Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:39.905836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:30:39.905911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:39.906129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:30:39.906270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:30:39.908718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:39.908759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:39.908950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:39.909005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:30:39.909327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:39.909371Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:30:39.909454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:39.909486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:39.909522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:39.909565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:39.909614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:30:39.909649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:39.909693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:30:39.909725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:30:39.909777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:30:39.909812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:30:39.909841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:30:39.911501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... TOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-10T11:30:41.255301Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:41.255409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 8589936749 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:41.255459Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateExternalTable TPropose, operationId: 101:0 HandleReply TEvOperationPlan: step# 5000003 2025-04-10T11:30:41.255570Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-10T11:30:41.255714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:30:41.255786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:30:41.255835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:30:41.257075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:30:41.257661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:30:41.258654Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:41.258687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:41.258810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:30:41.258905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:30:41.258970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:30:41.259067Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:41.259099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:30:41.259131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-10T11:30:41.259173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-10T11:30:41.259195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:30:41.259358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:30:41.259394Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:30:41.259487Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:30:41.259521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:30:41.259558Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:30:41.259600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:30:41.259639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:30:41.259675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:30:41.259710Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:30:41.259739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:30:41.259797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:30:41.259830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:30:41.259864Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-04-10T11:30:41.259895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-10T11:30:41.259922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-10T11:30:41.259942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-10T11:30:41.260966Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:30:41.261037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:30:41.261067Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:30:41.261120Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:30:41.261154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:30:41.261991Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:30:41.262059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:30:41.262086Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:30:41.262111Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-10T11:30:41.262135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:30:41.262617Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:30:41.262677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:30:41.262701Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:30:41.262725Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-10T11:30:41.262748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:30:41.262803Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:30:41.264196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:30:41.265288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:30:41.265362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:30:41.265554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:30:41.265590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:30:41.265970Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:30:41.266049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:30:41.266082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:333:2324] TestWaitNotification: OK eventTxId 101 2025-04-10T11:30:41.266529Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:30:41.266721Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 215us result status StatusSuccess 2025-04-10T11:30:41.267007Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAuthorization >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:30:41.737575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:30:41.737680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:41.737719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:30:41.737750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:30:41.737806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:30:41.737835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:30:41.737901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:41.737974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:30:41.738303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:30:41.824223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:30:41.824281Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:41.839999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:30:41.840317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:30:41.840485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:30:41.848006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:30:41.848197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:30:41.848884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:41.849121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:30:41.851244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:41.852469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:41.852537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:41.852692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:30:41.852753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:41.852798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:30:41.852889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:30:41.859575Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:30:42.007593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:42.007858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.008115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:30:42.008353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:30:42.008421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.019807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:42.019965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:30:42.020202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.020272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:30:42.020313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:30:42.020377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:30:42.031004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.031075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:30:42.031112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:30:42.037747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.037808Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.037865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:42.037931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:30:42.041829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:30:42.044451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:30:42.044665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:30:42.045703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:42.045841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:42.045895Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:42.046209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:30:42.046277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:42.046471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:30:42.046569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:30:42.050169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:42.050230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:42.050440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:42.050495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:30:42.050815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.050862Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:30:42.050953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:42.050998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:42.051042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:42.051075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:42.051110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:30:42.051151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:42.051183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:30:42.051215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:30:42.051279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:30:42.051315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:30:42.051346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:30:42.053279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... 2025-04-10T11:30:42.152201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:30:42.152990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:30:42.171735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:42.171789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:42.171971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:30:42.172076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:30:42.172168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:42.172212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-10T11:30:42.172252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-10T11:30:42.172276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-10T11:30:42.172552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.172599Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:30:42.172697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:30:42.172729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:30:42.172767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:30:42.172797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:30:42.172844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-10T11:30:42.172902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:30:42.172938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:30:42.172969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:30:42.173039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:30:42.173104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-10T11:30:42.173141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-04-10T11:30:42.173168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-10T11:30:42.179353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:30:42.179464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:30:42.179497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:30:42.179539Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-10T11:30:42.179608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:30:42.190854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:30:42.190977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:30:42.191008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:30:42.191040Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-10T11:30:42.191085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:30:42.191187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-10T11:30:42.203165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:30:42.211802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:30:42.212099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:30:42.212151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:30:42.212566Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:30:42.212668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:30:42.212722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:325:2316] TestWaitNotification: OK eventTxId 102 2025-04-10T11:30:42.213442Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:30:42.213676Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 268us result status StatusSuccess 2025-04-10T11:30:42.214009Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-10T11:30:42.217929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:42.218316Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-04-10T11:30:42.218419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists: 1 2025-04-10T11:30:42.218555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, at schemeshard: 72057594046678944 2025-04-10T11:30:42.232722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-10T11:30:42.232931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T11:30:42.233276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T11:30:42.233323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T11:30:42.233795Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:30:42.233889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:30:42.233957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:333:2324] TestWaitNotification: OK eventTxId 103 >> TTicketParserTest::BulkAuthorization [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 >> KqpJoinOrder::TPCH12_100 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:30:42.247499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:30:42.247603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:42.247646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:30:42.247686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:30:42.247736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:30:42.247763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:30:42.247835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:42.247920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:30:42.248289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:30:42.342646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:30:42.342715Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:42.360230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:30:42.360547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:30:42.360718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:30:42.368083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:30:42.368296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:30:42.369031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:42.369299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:30:42.371355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:42.372678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:42.372740Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:42.372917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:30:42.373018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:42.373067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:30:42.373190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.380268Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:30:42.569193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:42.569458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.569710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:30:42.569953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:30:42.570031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.572681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:42.572849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:30:42.573066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.573147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:30:42.573191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:30:42.573244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:30:42.575418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.575471Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:30:42.575507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:30:42.577529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.577591Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.577644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:42.577690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:30:42.581708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:30:42.584103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:30:42.584342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:30:42.585446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:42.585593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:42.585650Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:42.585976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:30:42.586035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:42.586241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:30:42.586387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:30:42.588793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:42.588844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:42.589076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:42.589167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:30:42.589544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.589593Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:30:42.589698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:42.589733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:42.589772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:42.589807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:42.589859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:30:42.589912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:42.589947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:30:42.589978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:30:42.590048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:30:42.590088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:30:42.590123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:30:42.592140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... d: 2] 2025-04-10T11:30:42.670775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:42.670806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:30:42.670855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:30:42.670885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:30:42.671066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:30:42.671105Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:30:42.671233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:30:42.671298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:30:42.671345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:30:42.671377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:30:42.671411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:30:42.671453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:30:42.671489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:30:42.671518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:30:42.671585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:30:42.671624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-10T11:30:42.671657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-10T11:30:42.671691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-10T11:30:42.672707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:30:42.672789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:30:42.672822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:30:42.672860Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:30:42.672899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:30:42.673755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:30:42.673903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:30:42.673938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:30:42.673976Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-10T11:30:42.674015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:30:42.674082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:30:42.685294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:30:42.685650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:30:42.685895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:30:42.685940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:30:42.686500Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:30:42.686624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:30:42.686668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:304:2295] TestWaitNotification: OK eventTxId 101 2025-04-10T11:30:42.687185Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:30:42.687466Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 260us result status StatusSuccess 2025-04-10T11:30:42.687840Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-10T11:30:42.695564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:42.695993Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-04-10T11:30:42.696086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-10T11:30:42.696133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-10T11:30:42.704009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:42.704251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:30:42.704605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:30:42.704658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:30:42.705458Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:30:42.705588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:30:42.705643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:312:2303] TestWaitNotification: OK eventTxId 102 2025-04-10T11:30:42.706211Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:30:42.706440Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 264us result status StatusPathDoesNotExist 2025-04-10T11:30:42.706611Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TTicketParserTest::AuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount >> TExternalTableTest::SchemeErrors >> KqpJoinOrder::CanonizedJoinOrderTPCH18 [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] >> TTicketParserTest::AuthorizationModify [GOOD] >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] >> TExternalTableTest::SchemeErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH12_100 [GOOD] Test command err: Trying to start YDB, gRPC: 26655, MsgBus: 15082 2025-04-10T11:28:56.810769Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645041209173034:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:56.830230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001643/r3tmp/tmpAvrzi9/pdisk_1.dat 2025-04-10T11:28:57.851556Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:28:57.903078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:57.903154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:57.910726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:28:57.911801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 26655, node 1 2025-04-10T11:28:58.214898Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:58.214917Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:58.214924Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:58.215023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15082 TClient is connected to server localhost:15082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:59.201298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:59.231162Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:01.814585Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645041209173034:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:01.835456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:02.230847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645066978977471:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:02.230965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:02.231250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645066978977483:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:02.235189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:02.260630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645066978977485:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:02.316164Z node 1 :TX_PROXY ERROR: Actor# [1:7491645066978977536:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:02.759352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:03.136117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645066978977810:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:03.136320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645066978977810:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:03.136574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645066978977810:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:03.136717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645066978977810:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:03.136815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645066978977810:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:03.136922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645066978977810:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:03.137057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645066978977810:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:03.137179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645066978977810:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:03.137307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645066978977810:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:03.137417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645066978977810:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:03.137518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645066978977810:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:03.137635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645066978977810:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:03.143108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645066978977806:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:03.143161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645066978977806:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:03.143346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645066978977806:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:03.143477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645066978977806:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:03.143588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645066978977806:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:03.143686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645066978977806:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:03.143784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645066978977806:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:03.143881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645066978977806:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:03.143976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645066978977806:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:03.144075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645066978977806:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:03.144169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645066978977806:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:03.144262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=7207518622403790 ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.489712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.493820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.494893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.500112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.505367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.514122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.516111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.520757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.522755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.526650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.528212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.533368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.533832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.549503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.558393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.564600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.567548Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.576935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.578535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.588827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.593862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.600246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.606137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.608808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.617802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.618888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.626960Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.630692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.641701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.644941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.649483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.656405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.661573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.663282Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.671944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.676042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.686958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.689147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.698757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.703958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.711351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.719224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.728540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.729839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:25.743027Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:26.034743Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqd8chdmbnvs0kjje0adsm", SessionId: ydb://session/3?node_id=1&id=MzQ5MjA3NmItZTc5ZDY4ODUtODJjMjVhMzAtYzk3Njc5NjM=, Slow query, duration: 35.553051s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:30:26.329873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:26.330300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:26.330731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491645410576424267:12436];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-10T11:30:26.331107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:30:46.427882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:30:46.427989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:46.428026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:30:46.428056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:30:46.428098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:30:46.428128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:30:46.428181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:46.428259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:30:46.428588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:30:46.513028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:30:46.513164Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:46.529522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:30:46.529827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:30:46.530006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:30:46.536152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:30:46.536354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:30:46.537052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:46.537293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:30:46.539165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:46.540409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:46.540463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:46.540636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:30:46.540695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:46.540732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:30:46.540826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:30:46.547516Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:30:46.670941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:46.671170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:46.671416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:30:46.671644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:30:46.671709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:46.674115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:46.674300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:30:46.674477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:46.674554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:30:46.674589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:30:46.674621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:30:46.676538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:46.676588Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:30:46.676620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:30:46.678368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:46.678412Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:46.678455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:46.678499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:30:46.681937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:30:46.684077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:30:46.684273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:30:46.685145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:46.685278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:46.685329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:46.685623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:30:46.685673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:46.685831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:30:46.685894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:30:46.687789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:46.687831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:46.688014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:46.688055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:30:46.688364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:46.688404Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:30:46.688494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:46.688528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:46.688561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:46.688590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:46.688639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:30:46.688678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:46.688720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:30:46.688753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:30:46.688810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:30:46.688845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:30:46.688874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:30:46.690773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 126 2025-04-10T11:30:46.774745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:46.775072Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } 2025-04-10T11:30:46.775167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 126:0, path# /MyRoot/DirA/Table2 2025-04-10T11:30:46.775462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2025-04-10T11:30:46.777580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:46.777758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-04-10T11:30:46.780541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:46.780841Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2025-04-10T11:30:46.780927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2025-04-10T11:30:46.781078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2025-04-10T11:30:46.783174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:46.783353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-04-10T11:30:46.786426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:46.786726Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2025-04-10T11:30:46.786817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2025-04-10T11:30:46.786930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2025-04-10T11:30:46.789012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:46.789197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-04-10T11:30:46.791955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:46.792255Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2025-04-10T11:30:46.792339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2025-04-10T11:30:46.792452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2025-04-10T11:30:46.794584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:46.794742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2025-04-10T11:30:46.797603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:46.797943Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2025-04-10T11:30:46.798022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2025-04-10T11:30:46.798479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2025-04-10T11:30:46.800625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:46.800789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2025-04-10T11:30:46.803760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:46.804159Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2025-04-10T11:30:46.804234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2025-04-10T11:30:46.804363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-04-10T11:30:46.806515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:46.806729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 >> TTicketParserTest::NebiusAuthorization [GOOD] >> TTicketParserTest::NebiusAuthorizationModify ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] Test command err: 2025-04-10T11:30:19.773238Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645396988312255:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:19.773999Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016fd/r3tmp/tmpyUFvy6/pdisk_1.dat 2025-04-10T11:30:20.371177Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:20.379454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:20.379548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:20.381269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25373, node 1 2025-04-10T11:30:20.486812Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:20.486834Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:20.486840Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:20.486965Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10693 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:21.053582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:21.094442Z node 1 :TICKET_PARSER DEBUG: Ticket 241EDE4A7D926294AEEDBE11516CFA65D58AD2AA660DDA6547DE0BD75B0D93F2 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-10T11:30:24.366691Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645417797216660:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016fd/r3tmp/tmpjL8Tg3/pdisk_1.dat 2025-04-10T11:30:24.444036Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:30:24.600695Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:24.617435Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:24.617515Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:24.627350Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20262, node 2 2025-04-10T11:30:24.851166Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:24.851186Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:24.851192Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:24.851290Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:25.215535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:25.295448Z node 2 :TICKET_PARSER DEBUG: Ticket E6E6B6F1FE96B5A9E12F0F9F3C6842533C170042D070C17C91B054ACCDACBC16 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-10T11:30:29.756426Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645440308263415:2114];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:29.865481Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016fd/r3tmp/tmpfYtUXz/pdisk_1.dat 2025-04-10T11:30:30.131438Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:30.147885Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:30.147986Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:30.150019Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6640, node 3 2025-04-10T11:30:30.362869Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:30.362891Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:30.362899Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:30.363008Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:30.827800Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:30.852367Z node 3 :TICKET_PARSER DEBUG: Ticket D69803C94C37CB1AE1E08501B98B0A2B1CFB28399C6D69DC2797133EDCBC730D () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-10T11:30:30.853147Z node 3 :TICKET_PARSER ERROR: Ticket D69803C94C37CB1AE1E08501B98B0A2B1CFB28399C6D69DC2797133EDCBC730D: Cannot create token from certificate. Client certificate failed verification 2025-04-10T11:30:34.802624Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491645460825456055:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:34.885909Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016fd/r3tmp/tmpwBZCZC/pdisk_1.dat 2025-04-10T11:30:35.074767Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:35.159164Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:35.159253Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:35.165375Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27295, node 4 2025-04-10T11:30:35.356069Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:35.356088Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:35.356096Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:35.356211Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18753 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:35.751071Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:35.774766Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:35.783201Z node 4 :TICKET_PARSER DEBUG: Ticket 763DEA8EA04A618FC92641BC7F86632A222F692D0E19A0C186D1E4580339E007 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-10T11:30:41.218308Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491645492111735092:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:41.292430Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016fd/r3tmp/tmpCPjVkN/pdisk_1.dat 2025-04-10T11:30:41.452030Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:41.504381Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:41.504486Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:41.511599Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25873, node 5 2025-04-10T11:30:41.687156Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:41.687183Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:41.687194Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:41.687358Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21684 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:42.078760Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:42.087222Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:30:42.092926Z node 5 :TICKET_PARSER DEBUG: Ticket 36E04F202C9AF54D463B402532B01283F8A5A0DB913FE4CE92DFD248FEA1CFF1 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-10T11:30:42.093518Z node 5 :TICKET_PARSER ERROR: Ticket 36E04F202C9AF54D463B402532B01283F8A5A0DB913FE4CE92DFD248FEA1CFF1: Cannot create token from certificate. Client certificate failed verification |94.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationModify [GOOD] Test command err: 2025-04-10T11:30:20.147241Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645398680542456:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:20.147284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00170b/r3tmp/tmpq147et/pdisk_1.dat 2025-04-10T11:30:20.992993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:20.993128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:21.006873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:30:21.101739Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14892, node 1 2025-04-10T11:30:21.289530Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:21.289553Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:21.289560Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:21.289659Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:21.791178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:21.830800Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:21.830852Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:21.830877Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:21.831283Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-10T11:30:21.831386Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:19434 2025-04-10T11:30:21.837017Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-10T11:30:21.860161Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:21.862310Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-10T11:30:21.867694Z node 1 :GRPC_CLIENT DEBUG: [517000010788] Connect to grpc://localhost:4866 2025-04-10T11:30:21.868678Z node 1 :GRPC_CLIENT DEBUG: [517000010788] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-10T11:30:21.927006Z node 1 :GRPC_CLIENT DEBUG: [517000010788] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-10T11:30:21.930524Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-10T11:30:25.570204Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645423420519172:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:25.633490Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00170b/r3tmp/tmpgdcB0v/pdisk_1.dat 2025-04-10T11:30:25.798628Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:25.847057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:25.847129Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:25.848279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25229, node 2 2025-04-10T11:30:26.054657Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:26.054682Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:26.054693Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:26.054827Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:30:26.602957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:30:26.610393Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:30:26.612227Z node 2 :TICKET_PARSER ERROR: Ticket **** (8E120919): Token is not supported 2025-04-10T11:30:30.374737Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645442729930336:2075];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00170b/r3tmp/tmpZ2ZMU7/pdisk_1.dat 2025-04-10T11:30:30.566661Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:30:30.762138Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:30.765234Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:30.765912Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:30.778923Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2761, node 3 2025-04-10T11:30:30.959717Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:30.959737Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:30.959744Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:30.959863Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:31.253592Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:31.263069Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:31.264909Z node 3 :TICKET_PARSER ERROR: Ticket **** (8E120919): Unknown token 2025-04-10T11:30:35.968298Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491645463454904925:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00170b/r3tmp/tmpTnwWYD/pdisk_1.dat 2025-04-10T11:30:36.148544Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:30:36.228980Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:36.261400Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:36.261731Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:36.263628Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12163, node 4 2025-04-10T11:30:36.422749Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:36.422774Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:36.422784Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (em ... nied" retryable:0 2025-04-10T11:30:37.004694Z node 4 :TICKET_PARSER DEBUG: Ticket **** (E2D1584C) () has now permanent error message 'Access Denied' 2025-04-10T11:30:37.006691Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:37.006720Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:37.006731Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:37.006766Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:37.006958Z node 4 :GRPC_CLIENT DEBUG: [517000068d88] Request AuthorizeRequest { iam_token: "**** (BE2EA0D0)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:37.012693Z node 4 :GRPC_CLIENT DEBUG: [517000068d88] Status 16 Access Denied 2025-04-10T11:30:37.012823Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) permission something.read now has a permanent error "Access Denied" retryable:0 2025-04-10T11:30:37.012849Z node 4 :TICKET_PARSER DEBUG: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2025-04-10T11:30:37.013372Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:37.013402Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:37.013411Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:37.013440Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:37.013609Z node 4 :GRPC_CLIENT DEBUG: [517000068d88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2025-04-10T11:30:37.020799Z node 4 :GRPC_CLIENT DEBUG: [517000068d88] Status 16 Access Denied 2025-04-10T11:30:37.020932Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2025-04-10T11:30:37.020959Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-04-10T11:30:37.021532Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:37.021555Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:37.021568Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:37.021592Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:37.021758Z node 4 :GRPC_CLIENT DEBUG: [517000068d88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:37.028338Z node 4 :GRPC_CLIENT DEBUG: [517000068d88] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:37.028493Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-10T11:30:37.028568Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-10T11:30:37.030728Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:37.030756Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:37.030768Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:37.030798Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:37.030984Z node 4 :GRPC_CLIENT DEBUG: [517000068d88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2025-04-10T11:30:37.037382Z node 4 :GRPC_CLIENT DEBUG: [517000068d88] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:37.037513Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-10T11:30:37.037600Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-10T11:30:37.038271Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:37.038292Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:37.038302Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:37.038326Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(monitoring.view) 2025-04-10T11:30:37.038478Z node 4 :GRPC_CLIENT DEBUG: [517000068d88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "monitoring.view" resource_path { id: "gizmo" type: "iam.gizmo" } } 2025-04-10T11:30:37.043667Z node 4 :GRPC_CLIENT DEBUG: [517000068d88] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:37.046398Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission monitoring.view now has a valid subject "user1@as" 2025-04-10T11:30:37.046497Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-10T11:30:41.342780Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491645491031278890:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:41.342822Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00170b/r3tmp/tmpZsdnne/pdisk_1.dat 2025-04-10T11:30:41.722463Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:41.775570Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:41.775653Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:41.783358Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29237, node 5 2025-04-10T11:30:41.939069Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:41.939094Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:41.939120Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:41.939438Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:42.219809Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:42.235633Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:42.242432Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:42.242470Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:42.242481Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:42.242519Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:42.242601Z node 5 :GRPC_CLIENT DEBUG: [51700009e708] Connect to grpc://localhost:17925 2025-04-10T11:30:42.243459Z node 5 :GRPC_CLIENT DEBUG: [51700009e708] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:42.261287Z node 5 :GRPC_CLIENT DEBUG: [51700009e708] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:42.261612Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-10T11:30:42.261703Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-10T11:30:42.266429Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:42.266466Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:42.266482Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:42.266519Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:42.266568Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-10T11:30:42.266743Z node 5 :GRPC_CLIENT DEBUG: [51700009e708] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:42.267448Z node 5 :GRPC_CLIENT DEBUG: [51700009e708] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:42.274981Z node 5 :GRPC_CLIENT DEBUG: [51700009e708] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:42.275150Z node 5 :GRPC_CLIENT DEBUG: [51700009e708] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:42.275402Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-10T11:30:42.275444Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-04-10T11:30:42.275543Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> TTicketParserTest::AuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::BulkAuthorizationModify ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] Test command err: 2025-04-10T11:30:20.671044Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645401782033751:2081];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016c8/r3tmp/tmpLzHO7d/pdisk_1.dat 2025-04-10T11:30:21.090968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:30:21.276399Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:21.284596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:21.284686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:21.287042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25856, node 1 2025-04-10T11:30:21.426725Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:21.426763Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:21.426773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:21.426878Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:21.768724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:21.812213Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:21.817281Z node 1 :TICKET_PARSER DEBUG: Ticket 85BEBD9A728C83D8D76FD36A5263D1D02B1ECE2B48D68AD3114E5BE40691F9FD () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-10T11:30:25.502890Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645420524234587:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:25.502938Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016c8/r3tmp/tmpUn9l0F/pdisk_1.dat 2025-04-10T11:30:25.894898Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:25.927469Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:25.927547Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:25.935459Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30966, node 2 2025-04-10T11:30:26.134779Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:26.134801Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:26.134806Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:26.134897Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:26.424661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:26.433076Z node 2 :TICKET_PARSER DEBUG: Ticket A744696A76A070386F83F0001DDE2C919F4F256ECFC5072055ADEB9470675141 () has now permanent error message 'Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers' 2025-04-10T11:30:26.433658Z node 2 :TICKET_PARSER ERROR: Ticket A744696A76A070386F83F0001DDE2C919F4F256ECFC5072055ADEB9470675141: Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers 2025-04-10T11:30:30.358898Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645443823397202:2087];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:30.359647Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016c8/r3tmp/tmp7ezfgB/pdisk_1.dat 2025-04-10T11:30:30.546999Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:30.561028Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:30.561333Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:30.563409Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2366, node 3 2025-04-10T11:30:30.819045Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:30.819068Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:30.819075Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:30.819203Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17769 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:31.154919Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:31.169244Z node 3 :TICKET_PARSER DEBUG: Ticket 0C3D5AAE3478F696FD177F0C82FBADA502B38CA83D395D8638BFF784D1206453 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-10T11:30:31.169810Z node 3 :TICKET_PARSER ERROR: Ticket 0C3D5AAE3478F696FD177F0C82FBADA502B38CA83D395D8638BFF784D1206453: Cannot create token from certificate. Client certificate failed verification test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016c8/r3tmp/tmp1nT8dv/pdisk_1.dat 2025-04-10T11:30:35.585846Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:30:35.597819Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:35.653673Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:35.653790Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:35.663299Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16734, node 4 2025-04-10T11:30:35.838753Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:35.838778Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:35.838786Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:35.838910Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:36.293000Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:36.307041Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:36.309975Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:36.310017Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:36.310028Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:36.310489Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-10T11:30:36.310556Z node 4 :GRPC_CLIENT DEBUG: [51700002a808] Connect to grpc://localhost:25569 2025-04-10T11:30:36.325344Z node 4 :GRPC_CLIENT DEBUG: [51700002a808] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-04-10T11:30:36.359095Z node 4 :GRPC_CLIENT DEBUG: [51700002a808] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2025-04-10T11:30:36.359356Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-04-10T11:30:36.359481Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-10T11:30:36.360020Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:36.360037Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:36.360044Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:36.360096Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-10T11:30:36.360313Z node 4 :GRPC_CLIENT DEBUG: [51700002a808] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-04-10T11:30:36.370371Z node 4 :GRPC_CLIENT DEBUG: [51700002a808] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2025-04-10T11:30:36.370671Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-04-10T11:30:36.370732Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for aaaa1234 bbbb4554 - PERMISSION_DENIED' 2025-04-10T11:30:40.468344Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491645485491981221:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:40.468892Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016c8/r3tmp/tmpTBZhT9/pdisk_1.dat 2025-04-10T11:30:40.824774Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:40.835628Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:40.835731Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:40.841122Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12998, node 5 2025-04-10T11:30:40.985893Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:40.985913Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:40.985922Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:40.986039Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:41.421047Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:41.444033Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:41.444063Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:41.444070Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:41.444137Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-10T11:30:41.444178Z node 5 :GRPC_CLIENT DEBUG: [517000110308] Connect to grpc://localhost:19816 2025-04-10T11:30:41.445017Z node 5 :GRPC_CLIENT DEBUG: [517000110308] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-10T11:30:41.478311Z node 5 :GRPC_CLIENT DEBUG: [517000110308] Status 14 Service Unavailable NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 14: "Service Unavailable" 2025-04-10T11:30:41.482389Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-10T11:30:41.482433Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-04-10T11:30:41.482466Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:41.482542Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-10T11:30:41.482873Z node 5 :GRPC_CLIENT DEBUG: [517000110308] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2025-04-10T11:30:41.491027Z node 5 :GRPC_CLIENT DEBUG: [517000110308] Status 14 Service Unavailable 2025-04-10T11:30:41.502289Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-10T11:30:41.502314Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-04-10T11:30:41.502341Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' >> TTicketParserTest::AuthenticationRetryError [GOOD] >> TTicketParserTest::AuthenticationRetryErrorImmediately >> TTicketParserTest::AuthorizationWithUserAccount [GOOD] >> TTicketParserTest::AuthorizationUnavailable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH18 [GOOD] Test command err: Trying to start YDB, gRPC: 20176, MsgBus: 8398 2025-04-10T11:28:52.542197Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645023839149892:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:52.543337Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00164d/r3tmp/tmpodYbES/pdisk_1.dat 2025-04-10T11:28:53.229799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:28:53.229870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:28:53.231248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:28:53.250595Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20176, node 1 2025-04-10T11:28:53.551820Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:28:53.551839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:28:53.551852Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:28:53.551941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8398 TClient is connected to server localhost:8398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:28:54.669096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:28:57.016909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645045313987026:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:57.017019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:57.017404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645045313987038:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:28:57.021368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:28:57.052380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645045313987040:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:28:57.146007Z node 1 :TX_PROXY ERROR: Actor# [1:7491645045313987091:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:28:57.495759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:28:57.518326Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645023839149892:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:28:57.518410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:28:57.913194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645045313987346:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:28:57.913406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645045313987346:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:28:57.913662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645045313987346:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:28:57.913779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645045313987346:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:28:57.913874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645045313987346:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:28:57.913968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645045313987346:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:28:57.914066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645045313987346:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:28:57.915231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645045313987360:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:28:57.915271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645045313987360:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:28:57.915438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645045313987360:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:28:57.915538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645045313987360:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:28:57.915635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645045313987360:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:28:57.915718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645045313987360:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:28:57.915795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645045313987360:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:28:57.915894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645045313987360:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:28:57.916009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645045313987360:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:28:57.916121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645045313987360:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:28:57.916216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645045313987360:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:28:57.916348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645045313987360:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:28:57.918818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645045313987346:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:28:57.918983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645045313987346:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:28:57.919080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645045313987346:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:28:57.919174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645045313987346:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:28:57.919258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645045313987346:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:28:58.008045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645045313987423:2361];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp: ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.217771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.223300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039202;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.224567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039200;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.230205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.230646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.236106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.238129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039192;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.242504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.243714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.248829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.249050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.254565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039238;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.256165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.260088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.261607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.266006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.268610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039193;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.272767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.275114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.279073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.281340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039203;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.285031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.289327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.290877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.295548Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039218;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.298048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.300895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.303533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.306400Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.309492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.315394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.318082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039204;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.320806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039196;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.324378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.326301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039216;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.330192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039236;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.331834Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039220;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.336308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039240;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.338181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039256;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.341905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039242;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.343899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.347849Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.349089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.353062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039214;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.354232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:27.550416Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqd6p63t13395wp9cx9zpv", SessionId: ydb://session/3?node_id=1&id=Nzk4NTY3YzYtYmU4OWMzZGEtZDg1ZjczMTMtYzQ0MTZmMjY=, Slow query, duration: 38.807091s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:30:27.934942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:27.935358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:27.939123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491645397501363422:11464];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-10T11:30:27.939584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> TTicketParserTest::LoginRefreshGroupsGood [GOOD] >> TTicketParserTest::LoginCheckRemovedUser >> TTicketParserTest::NebiusAuthenticationRetryError [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BaseCase >> ColumnBuildTest::ValidDefaultValue >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlreadyExists >> ColumnBuildTest::CancelBuild |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] >> TTicketParserTest::NebiusAuthorizationModify [GOOD] >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::AuthorizationUnavailable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] Test command err: 2025-04-10T11:30:19.569068Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645394967460088:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:19.570314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00170c/r3tmp/tmp0Wgysg/pdisk_1.dat 2025-04-10T11:30:20.219422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:20.219498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:20.223631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:30:20.262363Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7843, node 1 2025-04-10T11:30:20.468511Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:20.468535Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:20.468546Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:20.468650Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:21.052574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:21.099440Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-10T11:30:21.099531Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Connect to grpc://localhost:14993 2025-04-10T11:30:21.112096Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-10T11:30:21.143806Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-04-10T11:30:21.143933Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-10T11:30:21.143963Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:21.150434Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-10T11:30:21.150691Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-10T11:30:21.152973Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-04-10T11:30:21.153222Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-10T11:30:21.153247Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:22.625216Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-10T11:30:22.625339Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-10T11:30:22.625525Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-10T11:30:22.638267Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-04-10T11:30:22.640517Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-10T11:30:22.640575Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:24.569394Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645394967460088:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:24.569468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:24.630347Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-10T11:30:24.630422Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-10T11:30:24.630643Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-10T11:30:24.654750Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:24.658478Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-04-10T11:30:34.032433Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645456113830792:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:34.089336Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00170c/r3tmp/tmpWaGEtq/pdisk_1.dat 2025-04-10T11:30:34.283267Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:34.340707Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:34.340804Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:34.349244Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22292, node 2 2025-04-10T11:30:34.514745Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:34.514764Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:34.514771Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:34.514868Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61971 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:34.767438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:34.777651Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:34.779792Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-10T11:30:34.779836Z node 2 :GRPC_CLIENT DEBUG: [51700000cc08] Connect to grpc://localhost:62243 2025-04-10T11:30:34.780651Z node 2 :GRPC_CLIENT DEBUG: [51700000cc08] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-10T11:30:34.798570Z node 2 :GRPC_CLIENT DEBUG: [51700000cc08] Status 14 Service Unavailable 2025-04-10T11:30:34.798979Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-10T11:30:34.799018Z node 2 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:34.799092Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-10T11:30:34.799356Z node 2 :GRPC_CLIENT DEBUG: [51700000cc08] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-10T11:30:34.801618Z node 2 :GRPC_CLIENT DEBUG: [51700000cc08] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } ... N: Table profiles were not loaded 2025-04-10T11:30:44.059072Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:44.059167Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:44.067329Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14999, node 4 2025-04-10T11:30:44.242171Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:44.242195Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:44.242202Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:44.242325Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:30:44.603618Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:30:44.621174Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:44.621211Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:44.621225Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:44.621282Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:44.621332Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(somewhere.sleep) 2025-04-10T11:30:44.621355Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.list) 2025-04-10T11:30:44.621376Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-10T11:30:44.621396Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.eat) 2025-04-10T11:30:44.621465Z node 4 :GRPC_CLIENT DEBUG: [51700010d908] Connect to grpc://localhost:22826 2025-04-10T11:30:44.624866Z node 4 :GRPC_CLIENT DEBUG: [51700010d908] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:44.625232Z node 4 :GRPC_CLIENT DEBUG: [51700010d908] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:44.625355Z node 4 :GRPC_CLIENT DEBUG: [51700010d908] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.list" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:44.625522Z node 4 :GRPC_CLIENT DEBUG: [51700010d908] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:44.625670Z node 4 :GRPC_CLIENT DEBUG: [51700010d908] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.eat" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:44.644578Z node 4 :GRPC_CLIENT DEBUG: [51700010d908] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:44.644769Z node 4 :GRPC_CLIENT DEBUG: [51700010d908] Status 16 Access Denied 2025-04-10T11:30:44.645216Z node 4 :GRPC_CLIENT DEBUG: [51700010d908] Status 16 Access Denied 2025-04-10T11:30:44.645362Z node 4 :GRPC_CLIENT DEBUG: [51700010d908] Status 16 Access Denied 2025-04-10T11:30:44.645527Z node 4 :GRPC_CLIENT DEBUG: [51700010d908] Status 16 Access Denied 2025-04-10T11:30:44.646705Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-04-10T11:30:44.646784Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list now has a permanent error "Access Denied" retryable:0 2025-04-10T11:30:44.646806Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep now has a permanent error "Access Denied" retryable:0 2025-04-10T11:30:44.646826Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2025-04-10T11:30:44.646848Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat now has a permanent error "Access Denied" retryable:0 2025-04-10T11:30:44.646876Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-10T11:30:44.648378Z node 4 :GRPC_CLIENT DEBUG: [51700010e008] Connect to grpc://localhost:8383 2025-04-10T11:30:44.649338Z node 4 :GRPC_CLIENT DEBUG: [51700010e008] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-10T11:30:44.657479Z node 4 :GRPC_CLIENT DEBUG: [51700010e008] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-10T11:30:44.666505Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00170c/r3tmp/tmpWL50Xm/pdisk_1.dat 2025-04-10T11:30:48.744109Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:30:48.901075Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:48.916241Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:48.916540Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:48.923174Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27285, node 5 2025-04-10T11:30:49.023769Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:49.023790Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:49.023798Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:49.023921Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:49.368978Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:49.375207Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:30:49.383181Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:49.383214Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:49.383226Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:49.383312Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2025-04-10T11:30:49.383364Z node 5 :GRPC_CLIENT DEBUG: [51700006fd88] Connect to grpc://localhost:27793 2025-04-10T11:30:49.384371Z node 5 :GRPC_CLIENT DEBUG: [51700006fd88] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-10T11:30:49.398408Z node 5 :GRPC_CLIENT DEBUG: [51700006fd88] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:49.398781Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-10T11:30:49.399348Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:49.399372Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:49.399379Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:49.399473Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-10T11:30:49.399690Z node 5 :GRPC_CLIENT DEBUG: [51700006fd88] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-10T11:30:49.406913Z node 5 :GRPC_CLIENT DEBUG: [51700006fd88] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:49.409493Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> KqpJoinOrder::CanonizedJoinOrderTPCC [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationModify [GOOD] Test command err: 2025-04-10T11:30:19.566993Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645395066142412:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:19.568283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016d7/r3tmp/tmpEmZ0dd/pdisk_1.dat 2025-04-10T11:30:20.143483Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:20.167793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:20.167896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:20.178670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22280, node 1 2025-04-10T11:30:20.466298Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:20.466330Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:20.466339Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:20.466441Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21718 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:21.057174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:21.110421Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:21.110467Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:21.110481Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:21.110941Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-10T11:30:21.112019Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:16448 2025-04-10T11:30:21.114294Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-04-10T11:30:21.140285Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-04-10T11:30:21.140693Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:21.140716Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-10T11:30:21.140823Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-04-10T11:30:21.142317Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-04-10T11:30:21.142505Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:23.459670Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645414250126896:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:23.459713Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016d7/r3tmp/tmpv3m8H9/pdisk_1.dat 2025-04-10T11:30:23.740665Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15702, node 2 2025-04-10T11:30:23.839077Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:23.839194Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:23.938541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:30:23.958668Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:23.958690Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:23.958696Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:23.958793Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63481 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:24.369904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:24.388243Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:30:24.393070Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:24.393121Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:24.393130Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:24.393214Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-10T11:30:24.393256Z node 2 :GRPC_CLIENT DEBUG: [517000030d88] Connect to grpc://localhost:18716 2025-04-10T11:30:24.395701Z node 2 :GRPC_CLIENT DEBUG: [517000030d88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-10T11:30:24.410422Z node 2 :GRPC_CLIENT DEBUG: [517000030d88] Status 14 Service Unavailable 2025-04-10T11:30:24.410753Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-10T11:30:24.410783Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:24.410856Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-10T11:30:24.411073Z node 2 :GRPC_CLIENT DEBUG: [517000030d88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-10T11:30:24.412840Z node 2 :GRPC_CLIENT DEBUG: [517000030d88] Status 14 Service Unavailable 2025-04-10T11:30:24.413076Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-10T11:30:24.413114Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:25.474415Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-04-10T11:30:25.474524Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-10T11:30:25.474737Z node 2 :GRPC_CLIENT DEBUG: [517000030d88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-10T11:30:25.482607Z node 2 :GRPC_CLIENT DEBUG: [517000030d88] Status 14 Service Unavailable 2025-04-10T11:30:25.482984Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-10T11:30:25.483025Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:27.482620Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-04-10T11:30:27.482701Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-10T11:30:27.482942Z node 2 :GRPC_CLIENT DEBUG: [517000030d88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessServi ... 1 2025-04-10T11:30:43.798604Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:43.798614Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:43.798668Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-10T11:30:43.798888Z node 4 :GRPC_CLIENT DEBUG: [51700004bc08] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "XXXXXXXX" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "XXXXXXXX" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-04-10T11:30:43.805407Z node 4 :GRPC_CLIENT DEBUG: [51700004bc08] Response AuthorizeResponse { results { key: 0 value { resultCode: PERMISSION_DENIED } } } 2025-04-10T11:30:43.805516Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read access denied for subject "" 2025-04-10T11:30:43.805545Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-04-10T11:30:43.806242Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:43.806265Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:43.806275Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:43.806321Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-10T11:30:43.806502Z node 4 :GRPC_CLIENT DEBUG: [51700004bc08] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "XXXXXXXX" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "XXXXXXXX" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-10T11:30:43.822415Z node 4 :GRPC_CLIENT DEBUG: [51700004bc08] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-04-10T11:30:43.826337Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-10T11:30:43.829879Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:43.829907Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:43.829918Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:43.830008Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-10T11:30:43.830287Z node 4 :GRPC_CLIENT DEBUG: [51700004bc08] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-10T11:30:43.832674Z node 4 :GRPC_CLIENT DEBUG: [51700004bc08] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-04-10T11:30:43.832961Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-10T11:30:43.833484Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:43.833503Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:43.833513Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:43.833557Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( monitoring.view) 2025-04-10T11:30:43.833720Z node 4 :GRPC_CLIENT DEBUG: [51700004bc08] Request AuthorizeRequest { checks { key: 0 value { permission { name: "monitoring.view" } container_id: "gizmo" iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "monitoring.view" } container_id: "gizmo" iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-10T11:30:43.835646Z node 4 :GRPC_CLIENT DEBUG: [51700004bc08] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-04-10T11:30:43.835900Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-10T11:30:48.247993Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491645521083386996:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:48.248752Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016d7/r3tmp/tmpkghSp6/pdisk_1.dat 2025-04-10T11:30:48.588773Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:48.643790Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:48.643882Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:48.645289Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13435, node 5 2025-04-10T11:30:48.794873Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:48.794896Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:48.794904Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:48.795037Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:49.113170Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:49.130714Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:49.130753Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:49.130764Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:49.130821Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-10T11:30:49.130866Z node 5 :GRPC_CLIENT DEBUG: [5170000af808] Connect to grpc://localhost:12348 2025-04-10T11:30:49.131750Z node 5 :GRPC_CLIENT DEBUG: [5170000af808] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-10T11:30:49.141733Z node 5 :GRPC_CLIENT DEBUG: [5170000af808] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-04-10T11:30:49.142195Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-10T11:30:49.142642Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:49.142664Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:49.142674Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:49.142730Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-10T11:30:49.142947Z node 5 :GRPC_CLIENT DEBUG: [5170000af808] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-10T11:30:49.144853Z node 5 :GRPC_CLIENT DEBUG: [5170000af808] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { account { user_account { id: "user1" } } } } } 2025-04-10T11:30:49.145107Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] Test command err: 2025-04-10T11:30:21.170984Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645404068644799:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:21.171743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00161e/r3tmp/tmpO7HLZL/pdisk_1.dat 2025-04-10T11:30:21.788289Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:21.815954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:21.816026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:21.817937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24061, node 1 2025-04-10T11:30:21.988774Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:21.988797Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:21.988803Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:21.988907Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:22.357710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:22.387079Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:22.391213Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-10T11:30:22.391268Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:1450 2025-04-10T11:30:22.393907Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-10T11:30:22.424786Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:22.430429Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00161e/r3tmp/tmpWY61bZ/pdisk_1.dat 2025-04-10T11:30:25.641152Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:30:25.756849Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:25.803106Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:25.803193Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:25.808389Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11105, node 2 2025-04-10T11:30:26.030747Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:26.030767Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:26.030773Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:26.030870Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:26.572516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:26.586605Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:26.598317Z node 2 :TICKET_PARSER TRACE: Ticket ApiK****alid (AB5B5EA8) asking for AccessServiceAuthentication 2025-04-10T11:30:26.598390Z node 2 :GRPC_CLIENT DEBUG: [517000041088] Connect to grpc://localhost:30088 2025-04-10T11:30:26.599243Z node 2 :GRPC_CLIENT DEBUG: [517000041088] Request AuthenticateRequest { api_key: "ApiK****alid (AB5B5EA8)" } 2025-04-10T11:30:26.618412Z node 2 :GRPC_CLIENT DEBUG: [517000041088] Response AuthenticateResponse { subject { user_account { id: "ApiKey-value-valid" } } } 2025-04-10T11:30:26.619037Z node 2 :TICKET_PARSER DEBUG: Ticket ApiK****alid (AB5B5EA8) () has now valid token of ApiKey-value-valid@as 2025-04-10T11:30:30.378392Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645444896162846:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:30.389706Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00161e/r3tmp/tmpD1jd04/pdisk_1.dat 2025-04-10T11:30:30.639366Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20172, node 3 2025-04-10T11:30:30.722436Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:30.722521Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:30.806760Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:30:30.814196Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:30.814216Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:30.814225Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:30.814341Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10639 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:31.188324Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:31.194909Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:31.201375Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:31.201414Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:31.201428Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:31.201457Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-10T11:30:31.201518Z node 3 :GRPC_CLIENT DEBUG: [517000082e08] Connect to grpc://localhost:3447 2025-04-10T11:30:31.202297Z node 3 :GRPC_CLIENT DEBUG: [517000082e08] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-10T11:30:31.212742Z node 3 :GRPC_CLIENT DEBUG: [517000082e08] Status 14 Service Unavailable 2025-04-10T11:30:31.213637Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:31.213672Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-10T11:30:31.213823Z node 3 :GRPC_CLIENT DEBUG: [517000082e08] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-10T11:30:31.218050Z node 3 :GRPC_CLIENT DEBUG: [517000082e08] Status 1 CANCELLED 2025-04-10T11:30:31.218389Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00161e/r3tmp/tmpx9PcUT/pdisk_1.dat 2025-04-10T11:30:35.246123Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:30:35.280015Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:35.330404Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:35.330495Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:35.335511Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1787, node 4 2025-04-10T11:30:35.518771Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:35.518793Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:35.518802Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:35.518923Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:35.853817Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:35.880970Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:30:35.886296Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-10T11:30:35.886370Z node 4 :GRPC_CLIENT DEBUG: [517000110a08] Connect to grpc://localhost:11142 2025-04-10T11:30:35.887598Z node 4 :GRPC_CLIENT DEBUG: [517000110a08] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-10T11:30:35.902314Z node 4 :GRPC_CLIENT DEBUG: [517000110a08] Status 14 Service Unavailable 2025-04-10T11:30:35.906370Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:35.906404Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-10T11:30:35.906585Z node 4 :GRPC_CLIENT DEBUG: [517000110a08] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-10T11:30:35.918275Z node 4 :GRPC_CLIENT DEBUG: [517000110a08] Status 14 Service Unavailable 2025-04-10T11:30:35.922454Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:36.990288Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-10T11:30:36.990335Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-10T11:30:36.990551Z node 4 :GRPC_CLIENT DEBUG: [517000110a08] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-10T11:30:36.999419Z node 4 :GRPC_CLIENT DEBUG: [517000110a08] Status 14 Service Unavailable 2025-04-10T11:30:36.999756Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:37.992165Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-10T11:30:37.992201Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-10T11:30:37.992357Z node 4 :GRPC_CLIENT DEBUG: [517000110a08] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-10T11:30:37.994641Z node 4 :GRPC_CLIENT DEBUG: [517000110a08] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:37.995440Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-04-10T11:30:48.875577Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491645522202636820:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:48.875680Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00161e/r3tmp/tmp7M53Vx/pdisk_1.dat 2025-04-10T11:30:49.233865Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:49.251389Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:49.251482Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:49.255330Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4046, node 5 2025-04-10T11:30:49.414389Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:49.414418Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:49.414425Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:49.414557Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:49.721137Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:49.734505Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:49.739897Z node 5 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-10T11:30:49.739964Z node 5 :GRPC_CLIENT DEBUG: [517000126f08] Connect to grpc://localhost:22270 2025-04-10T11:30:49.740959Z node 5 :GRPC_CLIENT DEBUG: [517000126f08] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-10T11:30:49.754833Z node 5 :GRPC_CLIENT DEBUG: [517000126f08] Status 14 Service Unavailable 2025-04-10T11:30:49.754999Z node 5 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:49.755047Z node 5 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-10T11:30:49.755364Z node 5 :GRPC_CLIENT DEBUG: [517000126f08] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-10T11:30:49.761517Z node 5 :GRPC_CLIENT DEBUG: [517000126f08] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:49.761978Z node 5 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationUnavailable [GOOD] Test command err: 2025-04-10T11:30:19.612151Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645395006018406:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:19.612741Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016eb/r3tmp/tmpfdS5OA/pdisk_1.dat 2025-04-10T11:30:20.223651Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:20.255375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:20.255452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:20.267035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22325, node 1 2025-04-10T11:30:20.466659Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:20.466678Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:20.466686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:20.466791Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4905 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:21.131886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:21.157020Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:21.159343Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:21.162771Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:1639 2025-04-10T11:30:21.167709Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:21.188150Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-04-10T11:30:21.188949Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-10T11:30:21.188980Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:21.189018Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:21.189281Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:21.191898Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-04-10T11:30:21.192187Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-10T11:30:21.192216Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:22.626304Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-10T11:30:22.626356Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:22.626588Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:22.647545Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-04-10T11:30:22.648734Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-10T11:30:22.648771Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:24.569397Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645395006018406:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:24.569545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:24.634476Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-10T11:30:24.634526Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:24.634900Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:24.642369Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:24.644514Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a valid subject "user1@as" 2025-04-10T11:30:24.644730Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-04-10T11:30:34.143439Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645454968549503:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:34.193237Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016eb/r3tmp/tmppkGl3a/pdisk_1.dat 2025-04-10T11:30:34.391693Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:34.391786Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:34.394835Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:34.395504Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14607, node 2 2025-04-10T11:30:34.590735Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:34.590757Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:34.590764Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:34.590850Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3169 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:34.968063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:34.979111Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:34.983967Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:34.984056Z node 2 :GRPC_CLIENT DEBUG: [517000034c88] Connect to grpc://localhost:24860 2025-04-10T11:30:34.984881Z node 2 :GRPC_CLIENT DEBUG: [517000034c88] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:35.010623Z node 2 :GRPC_CLIENT DEBUG: [517000034c88] Status 14 Service Unavailable 2025-04-10T11:30:35.011076Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-10T11:30:35.011111Z node 2 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:35.011149Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:35.018437Z node 2 :GRPC_CLIENT DEBUG: [517000034c88] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:35.020645Z node 2 :GRPC_CLIENT DEBUG: [517000034c88] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:35.021084Z node 2 :TI ... CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:45.087580Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:45.098715Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:30:45.101952Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:45.101981Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:45.101988Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:45.102029Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:45.102110Z node 4 :GRPC_CLIENT DEBUG: [517000100388] Connect to grpc://localhost:7891 2025-04-10T11:30:45.103038Z node 4 :GRPC_CLIENT DEBUG: [517000100388] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:45.138972Z node 4 :GRPC_CLIENT DEBUG: [517000100388] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:45.139309Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-10T11:30:45.139346Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-10T11:30:45.142142Z node 4 :GRPC_CLIENT DEBUG: [517000100a88] Connect to grpc://localhost:2680 2025-04-10T11:30:45.143128Z node 4 :GRPC_CLIENT DEBUG: [517000100a88] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-10T11:30:45.158624Z node 4 :GRPC_CLIENT DEBUG: [517000100a88] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-10T11:30:45.159074Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-10T11:30:45.159616Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:45.159652Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:45.159659Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:45.159691Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-10T11:30:45.159852Z node 4 :GRPC_CLIENT DEBUG: [517000100388] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:45.166725Z node 4 :GRPC_CLIENT DEBUG: [517000100388] Status 16 Access Denied 2025-04-10T11:30:45.167092Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Access Denied" retryable:0 2025-04-10T11:30:45.167120Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-04-10T11:30:45.167926Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:45.167947Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:45.167954Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:45.167983Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:45.168033Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-10T11:30:45.168200Z node 4 :GRPC_CLIENT DEBUG: [517000100388] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:45.168765Z node 4 :GRPC_CLIENT DEBUG: [517000100388] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:45.171535Z node 4 :GRPC_CLIENT DEBUG: [517000100388] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:45.171647Z node 4 :GRPC_CLIENT DEBUG: [517000100388] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:45.171979Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-10T11:30:45.172035Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-04-10T11:30:45.172053Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-10T11:30:45.172199Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-10T11:30:49.231159Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491645524487537678:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:49.231196Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016eb/r3tmp/tmpoIxMzh/pdisk_1.dat 2025-04-10T11:30:49.465896Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:49.478315Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:49.478406Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:49.479510Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23906, node 5 2025-04-10T11:30:49.550840Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:49.550867Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:49.550902Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:49.551050Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:49.837205Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:49.844911Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:49.847030Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:49.847065Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:49.847073Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:49.847106Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:49.847158Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-10T11:30:49.847205Z node 5 :GRPC_CLIENT DEBUG: [517000004708] Connect to grpc://localhost:9984 2025-04-10T11:30:49.848425Z node 5 :GRPC_CLIENT DEBUG: [517000004708] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:49.848762Z node 5 :GRPC_CLIENT DEBUG: [517000004708] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:49.892298Z node 5 :GRPC_CLIENT DEBUG: [517000004708] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-10T11:30:49.892460Z node 5 :GRPC_CLIENT DEBUG: [517000004708] Status 14 Service Unavailable 2025-04-10T11:30:49.897134Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-10T11:30:49.897232Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Service Unavailable" retryable:1 2025-04-10T11:30:49.897262Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:49.897291Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-10T11:30:49.897338Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-10T11:30:49.897580Z node 5 :GRPC_CLIENT DEBUG: [517000004708] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:49.898265Z node 5 :GRPC_CLIENT DEBUG: [517000004708] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-10T11:30:49.902571Z node 5 :GRPC_CLIENT DEBUG: [517000004708] Status 1 CANCELLED 2025-04-10T11:30:49.902787Z node 5 :GRPC_CLIENT DEBUG: [517000004708] Status 1 CANCELLED 2025-04-10T11:30:49.903973Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" 2025-04-10T11:30:49.904034Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "CANCELLED" retryable:1 2025-04-10T11:30:49.904065Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' >> ColumnBuildTest::AlreadyExists [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAccessKeySignatureUnsupported ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:30:52.621217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:30:52.621305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:52.621364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:30:52.621409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:30:52.626549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:30:52.626618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:30:52.626710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:52.626784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:30:52.627141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:30:52.753320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:30:52.753370Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:52.778287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:30:52.778604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:30:52.778794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:30:52.815718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:30:52.815961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:30:52.816675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:52.817044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:30:52.818996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:52.820353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:52.820407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:52.821724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:30:52.821780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:52.821893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:30:52.822120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:30:52.834126Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:30:53.028638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:53.028893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.029146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:30:53.029394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:30:53.029444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.031626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:53.031795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:30:53.031988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.032084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:30:53.032119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:30:53.032156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:30:53.034182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.034240Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:30:53.034282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:30:53.036003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.036057Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.036110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:53.036167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.039827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:30:53.041842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:30:53.042041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:30:53.043061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:53.043182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:53.043250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:53.043555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:30:53.043608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:53.043775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:30:53.043883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:30:53.050327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:53.050385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:53.050553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:53.050601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:30:53.050837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.050881Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:30:53.050999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:53.051052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.051090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:53.051136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.051180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:30:53.051220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.051287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:30:53.051317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:30:53.051399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:30:53.051437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:30:53.051470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:30:53.053677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:30:53.053783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:30:53.053823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:780:2661] TestWaitNotification: OK eventTxId 105 2025-04-10T11:30:56.414765Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-04-10T11:30:56.415052Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 326us result status StatusSuccess 2025-04-10T11:30:56.415538Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 2 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2025-04-10T11:30:56.423382Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 106 DatabaseName: "/MyRoot/ServerLessDB" Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } 2025-04-10T11:30:56.427143Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-10T11:30:56.427282Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1145:3016], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:30:56.427482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 106, at schemeshard: 72075186233409549 2025-04-10T11:30:56.427569Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 106, txId# 281474976725757 2025-04-10T11:30:56.427649Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1145:3016], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:30:56.435807Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-10T11:30:56.435984Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1145:3016], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:30:56.439185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerLessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" Columns { Name: "value" Type: "Uint64" DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: true } } Internal: true } TxId: 281474976725757 TabletId: 72075186233409549 FailOnExist: true , at schemeshard: 72075186233409549 2025-04-10T11:30:56.439449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/ServerLessDB/Table, pathId: , opId: 281474976725757:0, at schemeshard: 72075186233409549 2025-04-10T11:30:56.439805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976725757:1, propose status:StatusInvalidParameter, reason: Cannot alter type for column 'value', at schemeshard: 72075186233409549 2025-04-10T11:30:56.449317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976725757, response: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2025-04-10T11:30:56.449544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976725757, database: /MyRoot/ServerLessDB, subject: , status: StatusInvalidParameter, reason: Cannot alter type for column 'value', operation: ALTER TABLE, path: /MyRoot/ServerLessDB/Table 2025-04-10T11:30:56.449744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976725757, status# StatusInvalidParameter 2025-04-10T11:30:56.449842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2025-04-10T11:30:56.449922Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 106, cookie: 106, txId: 281474976725757, status: StatusInvalidParameter 2025-04-10T11:30:56.450054Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1145:3016], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2025-04-10T11:30:56.450859Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuilder::TTxReply: ReplyOnCreation, BuildIndexId: 106, status: BAD_REQUEST, error: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column 'value', replyTo: [1:1145:3016] 2025-04-10T11:30:56.451168Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Message: TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 } BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 } >> KqpJoinOrder::TestJoinHint1+ColumnStore [GOOD] >> ColumnBuildTest::CancelBuild [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCC [GOOD] Test command err: Trying to start YDB, gRPC: 2347, MsgBus: 15201 2025-04-10T11:29:59.703581Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645309216953816:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:59.732781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015c2/r3tmp/tmpAQYD4z/pdisk_1.dat 2025-04-10T11:30:00.434404Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:00.438395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:00.438472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:00.445528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2347, node 1 2025-04-10T11:30:00.699965Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:00.699998Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:00.700010Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:00.700138Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15201 TClient is connected to server localhost:15201 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:01.734118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:04.545813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645330691790966:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:04.545958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:04.546257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645330691790978:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:04.550317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:30:04.565322Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:30:04.565807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645330691790980:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:30:04.634652Z node 1 :TX_PROXY ERROR: Actor# [1:7491645330691791031:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:04.651555Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645309216953816:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:04.651939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:04.998538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.179297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.217733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.256517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.320027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.485977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.542860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.589015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.629780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.693347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.753481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.812849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.878793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:30:06.946846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:30:06.990353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.024360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.066927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.114769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.171069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.253905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.312751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.415994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.460115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.495075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.540336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.594268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.628008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.691313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.737579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:30:07.797561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.976114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.980605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.988182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.995421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.002566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.006632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.015043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038497;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.021914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.025055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.036108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.039202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.055233Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.073812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.077686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.098024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.111165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.116293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.125725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.137684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.142982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038433;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.149484Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.164586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.172003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.226990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038494;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.233441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.234318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038528;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.239798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038526;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.241157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.246658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.248516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.256743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.263456Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.267184Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.273010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.276692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.282951Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.291586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.297006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.303467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.307486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.309495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.316383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.322369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.324772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.327648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.335964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:45.476543Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqdv9qe4m1hvn5sw19qtyc", SessionId: ydb://session/3?node_id=1&id=MmZkYWY5NTMtNTcyMzk3MGUtODVkOTQzNjYtYWEzMzdjYg==, Slow query, duration: 35.625863s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:30:45.861861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:45.862188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:45.862642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> TProxyActorTest::TestCreateSemaphoreInterrupted |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:30:52.628551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:30:52.628662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:52.628725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:30:52.628763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:30:52.628800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:30:52.628827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:30:52.628881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:52.628940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:30:52.629296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:30:52.751642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:30:52.751711Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:52.785802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:30:52.786053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:30:52.786283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:30:52.796798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:30:52.797041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:30:52.803612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:52.804945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:30:52.815128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:52.820440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:52.820508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:52.825045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:30:52.825125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:52.825184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:30:52.825370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:30:52.846972Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:30:53.076598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:53.076832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.077047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:30:53.077298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:30:53.077356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.079846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:53.080002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:30:53.080193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.080269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:30:53.080302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:30:53.080332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:30:53.082266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.082332Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:30:53.082367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:30:53.084082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.084129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.084173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:53.084222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.087740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:30:53.092704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:30:53.092889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:30:53.094376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:53.094513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:53.094578Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:53.094910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:30:53.094982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:53.095199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:30:53.095292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:30:53.097769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:53.097815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:53.097985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:53.098026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:30:53.098454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.098521Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:30:53.098617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:53.098650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.098691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:53.098745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.098784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:30:53.098823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.098871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:30:53.098901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:30:53.098993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:30:53.099039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:30:53.099072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:30:53.101283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:30:53.101396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:30:53.101435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:30:57.411307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-04-10T11:30:57.411425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:30:57.411575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-10T11:30:57.411606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-10T11:30:57.411638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-04-10T11:30:57.411929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:57.412015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:57.412065Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710761:0 HandleReply TEvOperationPlan: step# 5000007 2025-04-10T11:30:57.412108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710761:0 128 -> 240 2025-04-10T11:30:57.413776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-10T11:30:57.413821Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710761:0 ProgressState 2025-04-10T11:30:57.413895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-10T11:30:57.413922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-10T11:30:57.413953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-10T11:30:57.413975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-10T11:30:57.414016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: true 2025-04-10T11:30:57.414074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:123:2149] message: TxId: 281474976710761 2025-04-10T11:30:57.414121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-10T11:30:57.414170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-04-10T11:30:57.414198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-04-10T11:30:57.414255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-04-10T11:30:57.415863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-10T11:30:57.415922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-04-10T11:30:57.415997Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfoId: 102 2025-04-10T11:30:57.416072Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1168:3024], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:30:57.417631Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-10T11:30:57.417722Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1168:3024], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:30:57.417774Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-04-10T11:30:57.419654Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-10T11:30:57.419725Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1168:3024], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:30:57.419772Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-10T11:30:57.419894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:30:57.419929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1192:3048] TestWaitNotification: OK eventTxId 102 2025-04-10T11:30:57.423877Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-10T11:30:57.424195Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 } 2025-04-10T11:30:57.428025Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:30:57.428260Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 265us result status StatusSuccess 2025-04-10T11:30:57.429048Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "DefaultValue" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ColumnBuildTest::ValidDefaultValue [GOOD] >> ColumnBuildTest::BaseCase [GOOD] >> TProxyActorTest::TestCreateSemaphore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH3 [GOOD] >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::ValidDefaultValue [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:30:52.621191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:30:52.621328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:52.621411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:30:52.621459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:30:52.624142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:30:52.624209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:30:52.624318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:52.624438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:30:52.629755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:30:52.768716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:30:52.768790Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:52.791651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:30:52.791966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:30:52.792129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:30:52.800802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:30:52.801076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:30:52.804038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:52.805918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:30:52.814038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:52.821773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:52.821865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:52.822099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:30:52.822176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:52.822241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:30:52.822433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:30:52.829846Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:30:52.982322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:52.984599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:52.988542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:30:52.993742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:30:52.993876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.001378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:53.001595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:30:53.001826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.001967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:30:53.002013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:30:53.002052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:30:53.011184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.011274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:30:53.011316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:30:53.018044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.018125Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.018200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:53.018296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.026140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:30:53.034910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:30:53.036972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:30:53.038387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:53.038553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:53.038641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:53.040681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:30:53.040765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:53.040987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:30:53.041129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:30:53.048502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:53.048570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:53.048798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:53.048855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:30:53.049319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.049381Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:30:53.049507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:53.049550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.049595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:53.049662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.049718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:30:53.049765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.049822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:30:53.049853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:30:53.049967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:30:53.050022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:30:53.050062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:30:53.053236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:30:53.053373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:30:53.053429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1155:3026], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2025-04-10T11:30:58.487606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-10T11:30:58.487662Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2025-04-10T11:30:58.487715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2025-04-10T11:30:58.487828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2025-04-10T11:30:58.491295Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-10T11:30:58.491395Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1155:3026], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-10T11:30:58.491761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2025-04-10T11:30:58.491921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2025-04-10T11:30:58.492147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-10T11:30:58.492184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2025-04-10T11:30:58.492220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-10T11:30:58.508958Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1830:3693], Recipient [1:761:2650]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409549 ClientId: [1:1830:3693] ServerId: [1:1832:3695] } 2025-04-10T11:30:58.509029Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-10T11:30:58.595821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2025-04-10T11:30:58.595965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 0 RawX2: 0 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2025-04-10T11:30:58.596031Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2025-04-10T11:30:58.596078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2025-04-10T11:30:58.603256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-10T11:30:58.603356Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-04-10T11:30:58.603444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-10T11:30:58.603479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-10T11:30:58.603515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-10T11:30:58.603544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-10T11:30:58.603577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-04-10T11:30:58.603647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:571:2510] message: TxId: 281474976725761 2025-04-10T11:30:58.603692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-10T11:30:58.603725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2025-04-10T11:30:58.603753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2025-04-10T11:30:58.603841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-10T11:30:58.617840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-04-10T11:30:58.617944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2025-04-10T11:30:58.618022Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2025-04-10T11:30:58.618108Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1155:3026], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-10T11:30:58.622535Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-10T11:30:58.622684Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1155:3026], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-10T11:30:58.622776Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T11:30:58.624980Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-10T11:30:58.625102Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1155:3026], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-10T11:30:58.625148Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-04-10T11:30:58.625301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-10T11:30:58.625366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1175:3046] TestWaitNotification: OK eventTxId 106 2025-04-10T11:30:58.628120Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-04-10T11:30:58.628446Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 } |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:30:52.634849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:30:52.634959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:52.635020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:30:52.635056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:30:52.635096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:30:52.635124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:30:52.635183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:52.635270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:30:52.635593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:30:52.750816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:30:52.750892Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:52.779807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:30:52.780077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:30:52.780274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:30:52.792243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:30:52.792572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:30:52.803880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:52.805515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:30:52.819103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:52.820354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:52.820417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:52.826339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:30:52.826418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:52.826486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:30:52.826684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:30:52.833433Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:30:53.037981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:53.050346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.050627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:30:53.050910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:30:53.050984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.058930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:53.059095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:30:53.059275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.059359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:30:53.059393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:30:53.059428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:30:53.066929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.066998Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:30:53.067035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:30:53.074991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.075047Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.075089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:53.075147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.080542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:30:53.086718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:30:53.086908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:30:53.087886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:53.088014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:53.088087Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:53.088383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:30:53.088435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:53.088617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:30:53.088708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:30:53.091934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:53.091983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:53.092159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:53.092198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:30:53.092428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.092481Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:30:53.092580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:53.092611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.092649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:53.092692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.092734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:30:53.092771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.092816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:30:53.092845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:30:53.092923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:30:53.092971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:30:53.093007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:30:53.095547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:30:53.095658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:30:53.095698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... , LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1155:3026], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2025-04-10T11:30:58.815531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-10T11:30:58.815591Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2025-04-10T11:30:58.815659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2025-04-10T11:30:58.815779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2025-04-10T11:30:58.819572Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-10T11:30:58.819671Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1155:3026], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-10T11:30:58.820028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2025-04-10T11:30:58.820213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2025-04-10T11:30:58.820388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-10T11:30:58.820530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2025-04-10T11:30:58.820572Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-10T11:30:58.837747Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1830:3693], Recipient [1:761:2650]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409549 ClientId: [1:1830:3693] ServerId: [1:1832:3695] } 2025-04-10T11:30:58.837817Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-10T11:30:58.909568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2025-04-10T11:30:58.909720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 0 RawX2: 0 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2025-04-10T11:30:58.909780Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2025-04-10T11:30:58.909832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2025-04-10T11:30:58.913384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-10T11:30:58.913478Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-04-10T11:30:58.913571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-10T11:30:58.913600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-10T11:30:58.913635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-10T11:30:58.913660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-10T11:30:58.913693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-04-10T11:30:58.913767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:571:2510] message: TxId: 281474976725761 2025-04-10T11:30:58.913806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-10T11:30:58.913837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2025-04-10T11:30:58.913866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2025-04-10T11:30:58.913944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-10T11:30:58.918276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-04-10T11:30:58.918369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2025-04-10T11:30:58.918452Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2025-04-10T11:30:58.918536Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1155:3026], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-10T11:30:58.921253Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-10T11:30:58.921382Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1155:3026], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-10T11:30:58.921462Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T11:30:58.923633Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-10T11:30:58.923749Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1155:3026], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-10T11:30:58.923798Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-04-10T11:30:58.923960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-10T11:30:58.924010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1175:3046] TestWaitNotification: OK eventTxId 106 2025-04-10T11:30:58.926937Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-04-10T11:30:58.927254Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 } |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint1+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 19774, MsgBus: 24111 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00162b/r3tmp/tmp81ICJw/pdisk_1.dat 2025-04-10T11:29:09.908686Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645097048889533:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:09.908754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:29:10.334473Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:10.451002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:10.451183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:10.459377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19774, node 1 2025-04-10T11:29:10.770731Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:10.770752Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:10.770758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:10.770866Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24111 TClient is connected to server localhost:24111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:12.227643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:14.414946Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645097048889533:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:14.415011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:15.017448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645122818693841:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:15.017585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:15.018012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645122818693853:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:15.026496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:15.053463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645122818693855:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:15.125569Z node 1 :TX_PROXY ERROR: Actor# [1:7491645122818693906:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:15.510564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:15.854488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645122818694116:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:15.854488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645122818694120:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:15.854711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645122818694120:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:15.854840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645122818694116:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:15.855065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645122818694116:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:15.855066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645122818694120:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:15.855183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645122818694116:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:15.855222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645122818694120:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:15.855319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645122818694116:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:15.855325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645122818694120:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:15.855448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645122818694120:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:15.855571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645122818694120:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:15.855598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645122818694116:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:15.855736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645122818694116:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:15.855747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645122818694120:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:15.855855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645122818694116:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:15.855880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645122818694120:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:15.855999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645122818694116:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:15.856133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645122818694116:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:15.856146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645122818694120:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:15.856243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645122818694116:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:15.856244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645122818694120:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:15.856377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645122818694120:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:15.856496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645122818694116:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:15.902474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645122818694118:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.c ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.478673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.484533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.489638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.501559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.506971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.513031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.515619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.524945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.526479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.532042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.533296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.537885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.538380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.544494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.573525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.579167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.585916Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.591450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.596875Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.602059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.607018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.612695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.616147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.618882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.622942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.624842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.661994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.663789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.668858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.686072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.688724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.695413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.701116Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.702171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.710813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.712083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.717523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.719351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.722911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.724815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.728233Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.729852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.769817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.771613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.785844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:44.891196Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqdkgjbc9j2ceecmbkndjn", SessionId: ydb://session/3?node_id=1&id=ODZmNDU4NzAtODRiYTdjOGUtZDk1NWRkMDItZjliNmYzYg==, Slow query, duration: 43.016108s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:30:45.298962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:45.299674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491645453531233069:11246];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-10T11:30:45.300013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:45.304321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> TProxyActorTest::TestAttachSession [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> KqpJoinOrder::TestJoinHint1-ColumnStore [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:30:52.620713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:30:52.620885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:52.620965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:30:52.621002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:30:52.623478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:30:52.623539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:30:52.623630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:30:52.623708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:30:52.624963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:30:52.783617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:30:52.783690Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:52.810083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:30:52.810396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:30:52.810601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:30:52.843795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:30:52.844080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:30:52.844837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:52.845260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:30:52.850705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:52.852068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:52.852131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:52.852313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:30:52.852360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:52.852428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:30:52.852594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:30:52.876380Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:30:53.101927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:30:53.102213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.102462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:30:53.102708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:30:53.102765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.107640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:53.107817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:30:53.107992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.108070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:30:53.108107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:30:53.108140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:30:53.110224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.110284Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:30:53.110320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:30:53.114689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.114757Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.114816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:53.114875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.118889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:30:53.121647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:30:53.121870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:30:53.122833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:30:53.122969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:30:53.123024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:53.123305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:30:53.123358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:30:53.123528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:30:53.123643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:30:53.126434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:30:53.126480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:30:53.126681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:30:53.126723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:30:53.126956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:30:53.127001Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:30:53.127107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:53.127142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.127180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:30:53.127223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.127265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:30:53.127303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:30:53.127366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:30:53.127395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:30:53.127455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:30:53.127490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:30:53.127520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:30:53.129722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:30:53.129872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:30:53.129915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... MKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'27))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.439784Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2061:3924], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'28))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.448382Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2062:3925], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'29))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.456883Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2063:3926], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'30))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.465017Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2064:3927], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'31))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.473228Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2065:3928], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'32))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.482322Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2066:3929], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'33))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.490270Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2067:3930], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'34))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.498895Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2068:3931], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'35))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.507221Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2069:3932], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'36))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.515587Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2070:3933], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'37))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.524259Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2071:3934], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'38))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.532862Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2072:3935], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'39))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.541284Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2073:3936], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'40))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.550541Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2074:3937], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'41))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.558894Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2075:3938], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'42))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.567333Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2076:3939], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'43))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.576366Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2077:3940], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'44))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.585621Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2078:3941], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'45))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.594138Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2079:3942], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'46))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.616037Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2080:3943], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'47))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.634244Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2081:3944], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'48))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.651556Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2082:3945], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'49))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-10T11:31:00.669197Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2083:3946], Recipient [1:761:2650]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'50))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } |94.6%| [TA] $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTicketParserTest::LoginCheckRemovedUser [GOOD] >> TTicketParserTest::LoginEmptyTicketBad |94.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH3 [GOOD] Test command err: Trying to start YDB, gRPC: 18516, MsgBus: 11662 2025-04-10T11:29:07.028645Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645083358649070:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:07.189399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001633/r3tmp/tmpR0aV6H/pdisk_1.dat 2025-04-10T11:29:07.730381Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:07.755179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:07.755259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:07.758070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18516, node 1 2025-04-10T11:29:08.094654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:08.094676Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:08.094683Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:08.094785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11662 TClient is connected to server localhost:11662 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:09.292895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:11.974603Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645083358649070:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:11.989035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:12.527417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645109128453373:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:12.527554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:12.528381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645109128453385:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:12.532537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:12.548682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645109128453387:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:12.637496Z node 1 :TX_PROXY ERROR: Actor# [1:7491645109128453438:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:13.256003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:13.687109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645113423421026:2366];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:13.687293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645113423421026:2366];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:13.687575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645113423421026:2366];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:13.687713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645113423421026:2366];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:13.687818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645113423421026:2366];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:13.687925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645113423421026:2366];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:13.688016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645113423421026:2366];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:13.688108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645113423421026:2366];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:13.688226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645113423421026:2366];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:13.688371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645113423421026:2366];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:13.688514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645113423421026:2366];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:13.688648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645113423421026:2366];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:13.699659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645113423421020:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:13.699783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645113423421020:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:13.700005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645113423421020:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:13.700120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645113423421020:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:13.700228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645113423421020:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:13.700352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645113423421020:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:13.700479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645113423421020:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:13.700597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645113423421020:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:13.704019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645113423421020:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:13.704269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645113423421020:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:13.704368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645113423421020:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:13.704453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645113423421020:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:13.781495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645113423421028:2367];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.c ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.542194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.542459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.548645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.552449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.555810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.559255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.562967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.565566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.569128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.571705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.581413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.600474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.609868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.616164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.622884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.629768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039234;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.643255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.643402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.650662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.650683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.657517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.659846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.664755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.666674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.671787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.672863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.678383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.679051Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.687752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.687956Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.695873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.696033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039195;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.703434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.704381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.713079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.713323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.720681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.721013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.727775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.728112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.735528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.735768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.745193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.748042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.756501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:39.978665Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqdjqx6wqmcxq80py6vew6", SessionId: ydb://session/3?node_id=1&id=MmE2OTZmYi0zY2MxNjg2My00ZTgzZDY4YS01MGYzZTAzYw==, Slow query, duration: 38.892658s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:30:40.331554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:40.332108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:40.333004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491645439840992975:11281];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-10T11:30:40.333476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint1-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 29461, MsgBus: 8554 2025-04-10T11:30:11.667702Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645362160884617:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:11.667787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b0/r3tmp/tmpFfAyxz/pdisk_1.dat 2025-04-10T11:30:12.527567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:12.527646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:12.527850Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:12.536473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29461, node 1 2025-04-10T11:30:12.846079Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:12.846114Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:12.846123Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:12.846254Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8554 TClient is connected to server localhost:8554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:13.853537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:13.916549Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:16.670853Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645362160884617:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:16.670916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:16.889572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645383635721772:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:16.889686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:16.889963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645383635721784:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:16.893847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:30:16.913712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645383635721786:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:30:17.006895Z node 1 :TX_PROXY ERROR: Actor# [1:7491645383635721837:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:17.358181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.522790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.559509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.609406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.651345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.849268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.890836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.927908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:18.001791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:30:18.045508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:30:18.083494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:30:18.118641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:18.190140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.284139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:30:19.337195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.385395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.443236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.481981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.536953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.584074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.669674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.753672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.803244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.896339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.964944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:30:20.021756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:30:20.088414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:30:20.172609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:30:20.219892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:30:20.273774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.508989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.514408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.520134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.526052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.536653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.540437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.551092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.554893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.561520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.565229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038552;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.575676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.579982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038544;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.586140Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.594637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.600902Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.604985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.611500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.617636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.624027Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.626520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.641841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.647582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.655941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.669904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.670329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.675897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.688136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.706306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.714426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.719942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.728023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.734072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.742100Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.752260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.755278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.764496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.767389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.776353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.778409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.787511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.790450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.800522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.804499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.890338Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:55.959784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:56.058610Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqe76qbcddermpv520px66", SessionId: ydb://session/3?node_id=1&id=NWE4MTJkMmYtYTI1OTQzOTUtOGJiZTQyNTItNjQ5NDI5NWE=, Slow query, duration: 34.018521s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:30:56.467345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:56.467865Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:56.468238Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7491645508189799130:5839];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-04-10T11:30:56.474988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] Test command err: 2025-04-10T11:30:19.581353Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645395653190825:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:19.581392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ed/r3tmp/tmpJxRuiZ/pdisk_1.dat 2025-04-10T11:30:20.175622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:20.175714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:20.179464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:30:20.209739Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19981, node 1 2025-04-10T11:30:20.474240Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:20.474262Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:20.474267Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:20.478302Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:21.101300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:21.200629Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 2025-04-10T11:30:21.209252Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:30:21.209294Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:21.209909Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****jKkg (D81F784E) () has now retryable error message 'Security state is empty' 2025-04-10T11:30:21.210086Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:30:21.210103Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:21.210322Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****jKkg (D81F784E) () has now retryable error message 'Security state is empty' 2025-04-10T11:30:21.210333Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-04-10T11:30:21.210344Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-04-10T11:30:21.210369Z node 1 :TICKET_PARSER ERROR: Ticket eyJh****jKkg (D81F784E): Security state is empty 2025-04-10T11:30:22.586683Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****jKkg (D81F784E) 2025-04-10T11:30:22.586993Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:30:22.587017Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:22.587387Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****jKkg (D81F784E) () has now retryable error message 'Security state is empty' 2025-04-10T11:30:22.587410Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-04-10T11:30:24.214937Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:30:24.586308Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645395653190825:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:24.586544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:25.599008Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****jKkg (D81F784E) 2025-04-10T11:30:25.599275Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:30:25.599292Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:25.600285Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****jKkg (D81F784E) () has now valid token of user1 2025-04-10T11:30:25.600302Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-04-10T11:30:29.607967Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****jKkg (D81F784E) 2025-04-10T11:30:29.608334Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****jKkg (D81F784E) () has now valid token of user1 2025-04-10T11:30:32.190642Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645451953434576:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:32.191216Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ed/r3tmp/tmpuWsxdg/pdisk_1.dat 2025-04-10T11:30:32.590725Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:32.643412Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:32.643485Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:32.651277Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8863, node 2 2025-04-10T11:30:32.874526Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:32.874556Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:32.874565Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:32.874668Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:33.277284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:33.287273Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:33.293572Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-10T11:30:33.293653Z node 2 :GRPC_CLIENT DEBUG: [517000024608] Connect to grpc://localhost:21972 2025-04-10T11:30:33.296002Z node 2 :GRPC_CLIENT DEBUG: [517000024608] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-04-10T11:30:33.320860Z node 2 :GRPC_CLIENT DEBUG: [517000024608] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-04-10T11:30:33.322305Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-10T11:30:37.380742Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645473846303039:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:37.380777Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ed/r3tmp/tmps2Nrsa/pdisk_1.dat 2025-04-10T11:30:37.709824Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:37.732686Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:37.738569Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:37.747859Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6660, node 3 2025-04-10T11:30:37.890749Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:37.890771Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:37.890781Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:37.890901Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubD ... thTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:38.239645Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:38.250499Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:38.253411Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:38.253438Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:38.253451Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:38.253481Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-10T11:30:38.253526Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Connect to grpc://localhost:22740 2025-04-10T11:30:38.254550Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-04-10T11:30:38.274879Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Status 14 Service Unavailable 2025-04-10T11:30:38.278342Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:38.278379Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-10T11:30:38.278537Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-04-10T11:30:38.284019Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Status 14 Service Unavailable 2025-04-10T11:30:38.284315Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:39.402293Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-04-10T11:30:39.402343Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-10T11:30:39.402686Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-04-10T11:30:39.414720Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Status 14 Service Unavailable 2025-04-10T11:30:39.415067Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:40.402298Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-04-10T11:30:40.402334Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-10T11:30:40.402679Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-04-10T11:30:40.409211Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-04-10T11:30:40.409366Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-10T11:30:42.399487Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645473846303039:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:42.399618Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:51.440509Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491645534244937583:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:51.440559Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ed/r3tmp/tmpiYxtCX/pdisk_1.dat 2025-04-10T11:30:51.932342Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:52.011034Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:52.011143Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:52.019714Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6844, node 4 2025-04-10T11:30:52.242935Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:52.242970Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:52.242979Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:52.243120Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12684 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:53.041097Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:53.055990Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:53.060674Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-10T11:30:53.060708Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:53.060718Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-10T11:30:53.060754Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-10T11:30:53.060807Z node 4 :GRPC_CLIENT DEBUG: [517000111b88] Connect to grpc://localhost:27973 2025-04-10T11:30:53.061814Z node 4 :GRPC_CLIENT DEBUG: [517000111b88] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-04-10T11:30:53.090398Z node 4 :GRPC_CLIENT DEBUG: [517000111b88] Status 14 Service Unavailable 2025-04-10T11:30:53.091673Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-10T11:30:53.091703Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-10T11:30:53.091871Z node 4 :GRPC_CLIENT DEBUG: [517000111b88] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-04-10T11:30:53.119002Z node 4 :GRPC_CLIENT DEBUG: [517000111b88] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-04-10T11:30:53.119439Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-10T11:30:57.852744Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491645558141887897:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:57.852810Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016ed/r3tmp/tmpdMaF4k/pdisk_1.dat 2025-04-10T11:30:58.206700Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:58.289128Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:58.289262Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:58.295907Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18278, node 5 2025-04-10T11:30:58.490947Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:58.490986Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:58.490998Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:58.491178Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:58.992175Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:58.999699Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:59.001872Z node 5 :TICKET_PARSER ERROR: Ticket AKIA****MPLE (B3EDC139): Access key signature is not supported |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex-ColumnStore [GOOD] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] |94.7%| [TA] $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::CanonizedJoinOrderTPCH14 [GOOD] >> TTicketParserTest::LoginEmptyTicketBad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::LoginEmptyTicketBad [GOOD] Test command err: 2025-04-10T11:30:19.547004Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645394608514742:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:19.547047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016db/r3tmp/tmpvHd73G/pdisk_1.dat 2025-04-10T11:30:20.198165Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:20.208620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:20.208762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:20.213684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18511, node 1 2025-04-10T11:30:20.469543Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:20.469562Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:20.469567Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:20.469654Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:21.148119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:21.422046Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:30:21.438550Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:30:21.438590Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:21.439721Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****B9xQ (255B2289) () has now valid token of user1 2025-04-10T11:30:21.439737Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-04-10T11:30:23.908583Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645413334993274:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:23.908884Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016db/r3tmp/tmpNjEZsK/pdisk_1.dat 2025-04-10T11:30:24.258701Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:24.260932Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:24.260996Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:24.271239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1167, node 2 2025-04-10T11:30:24.430078Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:24.430095Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:24.430107Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:24.430233Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:24.821603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:24.835705Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:24.982242Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:30:24.990360Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:30:24.990387Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:24.991064Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****cGBg (48A39513) () has now valid token of user1 2025-04-10T11:30:24.991077Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-04-10T11:30:28.790377Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645435761297404:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016db/r3tmp/tmpKOPCNu/pdisk_1.dat 2025-04-10T11:30:28.832625Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:30:28.966237Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:29.000641Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:29.000715Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:29.005751Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10119, node 3 2025-04-10T11:30:29.274693Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:29.274712Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:29.274719Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:29.274832Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:30:29.603469Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:30:29.916691Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:30:29.931372Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:30:29.931402Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:29.932255Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****JhRg (3C067DE7) () has now valid token of user1 2025-04-10T11:30:29.932277Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-04-10T11:30:29.934684Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:30:33.722488Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645435761297404:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:33.722617Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:34.710317Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****JhRg (3C067DE7) 2025-04-10T11:30:34.710844Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****JhRg (3C067DE7) () has now valid token of user1 2025-04-10T11:30:38.726282Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****JhRg (3C067DE7) 2025-04-10T11:30:38.726616Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****JhRg (3C067DE7) () has now valid token of user1 2025-04-10T11:30:39.940559Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:30:41.730412Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****JhRg (3C067DE7) 2025-04-10T11:30:41.730799Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****JhRg (3C067DE7) () has now valid token of user1 2025-04-10T11:30:43.900717Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:30:43.900745Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:45.745198Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****JhRg (3C067DE7) 2025-04-10T11:30:45.745647Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****JhRg (3C067DE7) () has now valid token of user1 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016db/r3tmp/tmp47zYg1/pdisk_1.dat 2025-04-10T11:30:50.965678Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:30:51.097328Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:51.128620Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:51.128700Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:51.135218Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9713, node 4 2025-04-10T11:30:51.366567Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:51.366587Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:51.366595Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:51.366705Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:51.836260Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:51.969824Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:30:51.994760Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-10T11:30:51.994807Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-10T11:30:51.995892Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****rJ_Q (109128E2) () has now valid token of user1 2025-04-10T11:30:51.995922Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-04-10T11:30:51.998436Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:30:55.834575Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****rJ_Q (109128E2) 2025-04-10T11:30:55.834945Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****rJ_Q (109128E2) () has now permanent error message 'User not found' 2025-04-10T11:30:59.846336Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****rJ_Q (109128E2) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0016db/r3tmp/tmpxryhfp/pdisk_1.dat 2025-04-10T11:31:03.224263Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:31:03.320299Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:03.369415Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:03.369500Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:03.371926Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23490, node 5 2025-04-10T11:31:03.574773Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:03.574794Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:03.574809Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:03.574927Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:03.896583Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:03.974312Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-10T11:31:03.984563Z node 5 :TICKET_PARSER ERROR: Ticket **** (00000000): Ticket is empty ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsComplex-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 10122, MsgBus: 27016 2025-04-10T11:30:11.474275Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645362154170808:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:11.475378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b6/r3tmp/tmpOcspBG/pdisk_1.dat 2025-04-10T11:30:12.244899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:12.245011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:12.248002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:30:12.327533Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10122, node 1 2025-04-10T11:30:12.654654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:12.654676Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:12.654686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:12.654776Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27016 TClient is connected to server localhost:27016 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:13.671660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:13.708857Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:16.420361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645383629007953:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:16.420495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:16.421015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645383629007965:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:16.425341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:30:16.454346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645383629007967:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:30:16.478269Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645362154170808:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:16.482318Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:16.523557Z node 1 :TX_PROXY ERROR: Actor# [1:7491645383629008018:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:16.934203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.100000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.139965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.188737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.244161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.438369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.532386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.589384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.629620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.682045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.751358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.807023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:17.888311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:30:18.564222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-04-10T11:30:18.648523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:30:18.688838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-10T11:30:18.769177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:30:18.857451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T11:30:18.944039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T11:30:18.998707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.055136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.115570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.156714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.195080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.230489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.295353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.335425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.369220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.431206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-10T11:30:19.524754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.422135Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.428299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.429359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.434235Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.434808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.439848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.441199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.445655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.446911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.452141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.452909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.459358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.461896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.467930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.472875Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.473584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.481555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.485631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.487211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.493545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.499237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.501414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.505102Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.510502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.510697Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.515918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.519663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.524878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.525097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.530655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.539986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.545332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.546913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.551040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.558285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.561752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.569426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.570064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.575554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.580066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.581206Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.586129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:58.681165Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqe67fc2d4xz3b7956r9hh", SessionId: ydb://session/3?node_id=1&id=ODNhOTA4MTAtNTc0YzA2ZGYtMTZhMGYzYjItNTZmN2FiNTY=, Slow query, duration: 37.641373s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:30:59.026438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:59.026903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:59.028463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7491645486708245030:5496];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-04-10T11:30:59.029014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> TDataShardTrace::TestTraceDistributedSelect >> TDataShardTrace::TestTraceDistributedUpsert+UseSink >> TDataShardTrace::TestTraceWriteImmediateOnShard >> TDataShardTrace::TestTraceDistributedUpsert-UseSink |94.7%| [TA] $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::TPCDS90+ColumnStore [GOOD] >> BackupRestoreS3::RestoreViewQueryText >> BackupRestore::RestoreTablePartitioningSettings >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream >> BackupRestore::RestoreViewQueryText >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup >> OlapEstimationRowsCorrectness::TPCH5 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH14 [GOOD] Test command err: Trying to start YDB, gRPC: 30182, MsgBus: 28044 2025-04-10T11:29:22.981516Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645152496240708:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:22.981891Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00160c/r3tmp/tmprkUi96/pdisk_1.dat 2025-04-10T11:29:23.945568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:23.945644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:23.954739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:24.007327Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:24.018278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 30182, node 1 2025-04-10T11:29:24.213194Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:24.213212Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:24.213218Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:24.213321Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28044 TClient is connected to server localhost:28044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:25.084858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:27.515223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645173971077725:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.515325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.515414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645173971077737:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:27.519870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:27.534412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645173971077739:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:27.611265Z node 1 :TX_PROXY ERROR: Actor# [1:7491645173971077790:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:28.001083Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645152496240708:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:28.026519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:28.082868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:28.378967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645178266045353:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:28.379311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645178266045353:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:28.379596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645178266045353:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:28.379733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645178266045353:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:28.379844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645178266045353:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:28.379969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645178266045353:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:28.380078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645178266045353:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:28.380209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645178266045353:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:28.380620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645178266045353:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:28.380750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645178266045353:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:28.380872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645178266045353:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:28.380996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645178266045353:2363];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:28.383720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645178266045339:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:28.383800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645178266045339:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:28.383991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645178266045339:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:28.384119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645178266045339:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:28.384265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645178266045339:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:28.384381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645178266045339:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:28.384504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645178266045339:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:28.384625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645178266045339:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:28.384952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645178266045339:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:28.385092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645178266045339:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:28.385231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645178266045339:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:28.385336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645178266045339:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CL ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.211894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.220579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.218140Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.230393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.234863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.241970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.248442Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039262;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.251487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.256642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.266958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039314;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.270420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039242;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.276119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.280460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.290136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.290289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.301142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.306654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.312765Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.324968Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.327333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.337210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.340483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039240;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.352358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.353516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.363598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.368410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.376288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.380904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.390273Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.393935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.400105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.403522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.411460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.416257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.428430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.430509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.440747Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.442898Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.456732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.471018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.477996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.495257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.509333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.521905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.565686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.875385Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqdxgv6ng8yxa9ekx6c1p4", SessionId: ydb://session/3?node_id=1&id=ZmQ4Y2Y0NWUtYTA5ZWYzOTMtNmZjNjM5ZmYtYzc5Y2NhNTc=, Slow query, duration: 38.751161s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:30:51.323634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:51.324033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7491645440259098009:9541];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-10T11:30:51.324191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:51.324671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpQueryService::ShowCreateTable >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap >> KqpQueryService::DdlGroup >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery >> KqpQueryService::Ddl >> KqpQueryService::AlterTempTable >> KqpQueryService::TableSink_OlapUpdate >> KqpQueryServiceScripts::TestPaging >> KqpQueryService::PeriodicTaskInSessionPool >> KqpQueryService::TableSink_OltpUpsert >> KqpQueryService::ExecStats >> KqpQueryService::ExecuteQueryPg >> KqpService::Shutdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS90+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 15319, MsgBus: 28469 2025-04-10T11:29:03.023149Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645064539464624:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:03.023191Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001637/r3tmp/tmpNZ18bv/pdisk_1.dat 2025-04-10T11:29:03.802537Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:03.807727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:03.807818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:03.815195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15319, node 1 2025-04-10T11:29:04.054609Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:04.054629Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:04.054635Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:04.054781Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28469 TClient is connected to server localhost:28469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:05.399213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:07.981123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645086014301759:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:07.981289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:07.981791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645086014301771:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:07.986921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:08.003185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645086014301773:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:08.025394Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645064539464624:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:08.025462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:08.091152Z node 1 :TX_PROXY ERROR: Actor# [1:7491645090309269123:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:08.498993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:08.842691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645090309269408:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:08.842962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645090309269408:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:08.843303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645090309269408:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:08.843429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645090309269408:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:08.843553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645090309269408:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:08.843681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645090309269408:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:08.843802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645090309269408:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:08.843923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645090309269408:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:08.844079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645090309269408:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:08.844206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645090309269408:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:08.844312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645090309269408:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:08.844424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645090309269408:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:08.847712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645090309269390:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:08.847779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645090309269390:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:08.848016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645090309269390:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:08.848171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645090309269390:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:08.848293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645090309269390:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:08.848404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645090309269390:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:08.848526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645090309269390:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:08.848651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645090309269390:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:08.848766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645090309269390:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:08.848874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645090309269390:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:08.849018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645090309269390:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:08.849123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645090309269390:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:08.887657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645090309269416:2364];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.c ... Tx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.842099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.847566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.864285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.864736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.870225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.870768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.876638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.880932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.884802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.887086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.892503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.900866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.907463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.909383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.921461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.925398Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.936351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.945277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.948575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.956205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.968525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.973001Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.983349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.989911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:36.996770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:37.007454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:37.012453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:37.013321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:37.019962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:37.019968Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:37.028138Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:37.028925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:37.035178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:37.036058Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:37.046395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:37.052934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:37.059306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:37.136897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:37.364217Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqdg36bkdrffdxdrpmhypp", SessionId: ydb://session/3?node_id=1&id=NGYzYTRiYWYtYzg1ZTY3NGItZTRhNDBiMzEtZTk3YzQxOGY=, Slow query, duration: 38.988653s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:30:37.999277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:37.999756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:38.000397Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491645425316775969:11333];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-04-10T11:30:38.000772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:58.594842Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqf0ckb14nfxk62ng6q6a4", SessionId: ydb://session/3?node_id=1&id=NGYzYTRiYWYtYzg1ZTY3NGItZTRhNDBiMzEtZTk3YzQxOGY=, Slow query, duration: 10.767186s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query90.tpl and seed 2031708268\nselect cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio\n from ( select count(*) amc\n from web_sales cross join household_demographics cross join time_dim cross join web_page\n where ws_sold_time_sk = time_dim.t_time_sk\n and ws_ship_hdemo_sk = household_demographics.hd_demo_sk\n and ws_web_page_sk = web_page.wp_web_page_sk\n and time_dim.t_hour between 9 and 9+1\n and household_demographics.hd_dep_count = 3\n and web_page.wp_char_count between 5000 and 5200) at cross join\n ( select count(*) pmc\n from web_sales cross join household_demographics cross join time_dim cross join web_page\n where ws_sold_time_sk = time_dim.t_time_sk\n and ws_ship_hdemo_sk = household_demographics.hd_demo_sk\n and ws_web_page_sk = web_page.wp_web_page_sk\n and time_dim.t_hour between 16 and 16+1\n and household_demographics.hd_dep_count = 3\n and web_page.wp_char_count between 5000 and 5200) pt\n order by am_pm_ratio\n limit 100;\n ", parameters: 0b ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small [GOOD] Test command err: Trying to start YDB, gRPC: 62907, MsgBus: 9826 2025-04-10T11:29:14.015754Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645118516601755:2228];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:14.015934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00162a/r3tmp/tmpBqH4fj/pdisk_1.dat 2025-04-10T11:29:14.936348Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:14.969682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:14.969790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:14.991372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62907, node 1 2025-04-10T11:29:15.280747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:15.280766Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:15.280773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:15.280891Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9826 TClient is connected to server localhost:9826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:16.377018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:19.015098Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645118516601755:2228];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:19.037435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:19.155155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645139991438739:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:19.155294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:19.162285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645139991438751:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:19.169940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:19.198573Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:29:19.199688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645139991438753:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:19.298925Z node 1 :TX_PROXY ERROR: Actor# [1:7491645139991438804:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:19.691047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:20.057760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645139991439064:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:20.057961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645139991439064:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:20.058229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645139991439064:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:20.058346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645139991439064:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:20.058447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645139991439064:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:20.058582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645139991439064:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:20.058693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645139991439064:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:20.058825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645139991439064:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:20.058948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645139991439064:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:20.059066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645139991439064:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:20.059165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645139991439064:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:20.059261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645139991439064:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:20.081915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645139991439066:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:20.082010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645139991439066:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:20.086554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645139991439066:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:20.086772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645139991439066:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:20.086886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645139991439066:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:20.087000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645139991439066:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:20.087097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645139991439066:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:20.087205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645139991439066:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:20.087320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645139991439066:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:20.087420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645139991439066:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:20.087526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645139991439066:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:20.087633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645139991439066:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:20.108641Z node 1 :TX_C ... tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.699859Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.705255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.705415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.711434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.711479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.719076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.719086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.725403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.725661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.731913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.732331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.738120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.738558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.744430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039248;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.745007Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039226;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.749335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039192;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.753568Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.758498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.763439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.764190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.772319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.775881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.780375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039222;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.786034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.786117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.792350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.800134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.802543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.812017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.816152Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.822317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.830024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.836299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.840236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.846298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.854225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.860280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.864090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.871149Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.880837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.884603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.895684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.903674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.917350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:50.935187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:51.091101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:51.240170Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqdttm7b3cvcc47f4gyqr2", SessionId: ydb://session/3?node_id=1&id=ZDQ3MzcwZDgtZjU0YTFiNS02OWI3YWU4Mi05MWQ3MzZjYQ==, Slow query, duration: 41.873517s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:30:51.988499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:51.988911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:51.989919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH5 [GOOD] Test command err: Trying to start YDB, gRPC: 22160, MsgBus: 27675 2025-04-10T11:29:15.461952Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645121792297784:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:15.466867Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00161f/r3tmp/tmpYYmBmg/pdisk_1.dat 2025-04-10T11:29:16.249167Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:16.249467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:16.249556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:16.263026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22160, node 1 2025-04-10T11:29:16.475227Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:16.475253Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:16.475260Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:16.475367Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27675 TClient is connected to server localhost:27675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:17.509370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:17.538641Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:20.176609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645143267134784:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:20.176749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:20.177233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645143267134796:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:20.186429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:20.208757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645143267134798:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:20.297405Z node 1 :TX_PROXY ERROR: Actor# [1:7491645143267134849:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:20.446329Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645121792297784:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:20.446407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:20.955610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:21.358459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645147562102435:2365];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:21.358715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645147562102435:2365];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:21.359019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645147562102435:2365];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:21.359148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645147562102435:2365];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:21.359252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645147562102435:2365];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:21.359363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645147562102435:2365];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:21.360322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645147562102435:2365];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:21.362104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645147562102415:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:21.362141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645147562102415:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:21.362351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645147562102415:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:21.362456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645147562102415:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:21.362554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645147562102415:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:21.362642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645147562102415:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:21.362728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645147562102415:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:21.362829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645147562102415:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:21.362936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645147562102415:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:21.363058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645147562102415:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:21.363166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645147562102415:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:21.363273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645147562102415:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:21.367321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645147562102435:2365];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:21.367468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645147562102435:2365];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:21.367573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645147562102435:2365];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:21.367712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645147562102435:2365];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:21.367822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645147562102435:2365];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:21.463487Z node 1 :T ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.801238Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.802797Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.808774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.809514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.817072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.817135Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.828441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.828477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.835100Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.836742Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.842506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.842758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039203;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.851042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.851111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.858327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.859021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.864405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.865293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.871052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.872141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.878201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.880356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.892255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.898494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.898610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.905172Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.908431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.911542Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.914613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.917488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.923649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.924826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.929657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.929809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.935177Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.941755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.942436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.948654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.949686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.958401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.960772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.966759Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.967044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.976643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:48.977535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:49.164972Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqdthg5w6zxp5s3qz2sncw", SessionId: ydb://session/3?node_id=1&id=Yjc2Y2E3MzYtYmYzYmJhNjYtYjZmMTlkZGUtODU4MjQxMmI=, Slow query, duration: 40.091689s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:30:49.517950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:49.518399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:49.519053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491645362310503458:7790];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-10T11:30:49.519412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpQueryService::ReadManyRanges >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex >> KqpQueryService::TableSink_Htap+withOltpSink >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: 2025-04-10T11:31:13.248547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:31:13.248780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:31:13.248947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000aa4/r3tmp/tmpuweqkh/pdisk_1.dat 2025-04-10T11:31:13.758434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:31:13.796044Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:13.843885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:13.844019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:13.859227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:31:13.964116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 >> BackupRestore::RestoreViewQueryText [GOOD] >> BackupRestore::RestoreViewReferenceTable >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus >> BackupRestore::RestoreTablePartitioningSettings [GOOD] >> BackupRestore::RestoreIndexTablePartitioningSettings >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] >> KqpQueryService::AlterTempTable [GOOD] >> KqpQueryService::CTASWithoutPerStatement >> BackupRestoreS3::RestoreViewQueryText [GOOD] >> BackupRestoreS3::RestoreViewReferenceTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] Test command err: 2025-04-10T11:31:12.890081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:31:12.890402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:31:12.890567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a72/r3tmp/tmpertBMU/pdisk_1.dat 2025-04-10T11:31:13.602923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:31:13.645007Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:13.702897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:13.704397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:13.720816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:31:13.848225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:31:16.248895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.249019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:944:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.249122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.258388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:31:16.287793Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T11:31:16.507217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:31:16.615758Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:18.232021Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqfw4n3cbdv6ma2fb6ket3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDhhYWNkM2EtMzYzYTE4MjUtMTFmODQ1NjgtOTY0MTgyYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (DataExecuter -> [(WaitForTableResolve) , (ComputeActor -> [(ForwardWriteActor)]) , (RunTasks) , (Commit -> [(Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)]) , (Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)])])])]) >> KqpQueryService::ExecStats [GOOD] >> KqpQueryService::ExecStatsPlan >> KqpQueryService::ExecuteQueryPg [GOOD] >> KqpQueryService::ExecuteQueryPgTableSelect >> TDataShardTrace::TestTraceDistributedSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] Test command err: 2025-04-10T11:31:12.884462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:31:12.884841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:31:12.885129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a86/r3tmp/tmpd0Cf5y/pdisk_1.dat 2025-04-10T11:31:13.610464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:31:13.649048Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:13.703597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:13.703767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:13.719737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:31:13.847487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:31:16.252808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.252936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:944:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.253030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.264566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:31:16.306822Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T11:31:16.531899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:31:16.607055Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:18.323456Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqfw4n7phrcx6dagf57kzb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjA3MzU1MjYtODVhYTRiMGUtY2M3YjVmYzQtYWQzYjYyMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (LiteralExecuter) , (DataExecuter -> [(WaitForTableResolve) , (RunTasks) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)]) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)])])]) >> KqpQueryService::StreamExecuteQueryPure >> OlapEstimationRowsCorrectness::TPCH11 [GOOD] >> KqpQueryService::ShowCreateTable [GOOD] >> KqpQueryService::ShowCreateTableDisable >> KqpQueryService::DdlGroup [GOOD] >> KqpQueryService::DdlPermission >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] >> KqpJoinOrder::FiveWayJoin+ColumnStore [GOOD] >> KqpQueryService::Ddl [GOOD] >> KqpQueryService::DdlColumnTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: 2025-04-10T11:31:13.101957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:31:13.102282Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:31:13.102498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a8c/r3tmp/tmpS2CP8p/pdisk_1.dat 2025-04-10T11:31:13.737562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:31:13.792104Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:13.840996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:13.841167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:13.857130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:31:13.967759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:31:16.315620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.315766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:944:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.315855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.321912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:31:16.362889Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T11:31:16.591325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:31:16.750708Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:18.231304Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqfw6rcja0w8frj8dfd0b7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTg4MTQyNzUtZTgyNzQxZjUtYmYyNmNjZDQtNDZhZThhYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:18.416428Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfqfy5m9adhq24dkcbwedq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTVkYzhkZjItZTQ0YmVjZDEtZGMwYzIzMTgtMTRkNTg2MTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:19.699217Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfqfyh86706cccvtkqtgeyz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzZhNTcwMzYtNGYyNTUyZjAtMTcyY2RkMWMtZWM3YzhkOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |94.7%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::ExecuteQueryWithWorkloadManager >> KqpQueryService::TableSink_Olap_Replace ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] Test command err: 2025-04-10T11:31:10.475423Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645614948792451:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:10.475468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d1d/r3tmp/tmp5K9udo/pdisk_1.dat 2025-04-10T11:31:11.384007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:11.384095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:11.414707Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:11.417685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8271, node 1 2025-04-10T11:31:11.875026Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:11.875060Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:11.875068Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:11.875227Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:12.753591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:15.478315Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645614948792451:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:15.478420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:15.998955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645636423630016:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:15.999074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.311404Z node 1 :TX_PROXY DEBUG: actor# [1:7491645614948792686:2141] Handle TEvProposeTransaction 2025-04-10T11:31:16.311448Z node 1 :TX_PROXY DEBUG: actor# [1:7491645614948792686:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T11:31:16.311506Z node 1 :TX_PROXY DEBUG: actor# [1:7491645614948792686:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491645640718597333:2645] 2025-04-10T11:31:16.423316Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597333:2645] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-04-10T11:31:16.423377Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597333:2645] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:31:16.423734Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597333:2645] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:31:16.423804Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597333:2645] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:31:16.423955Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597333:2645] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:31:16.424065Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597333:2645] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:31:16.424128Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597333:2645] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T11:31:16.424267Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597333:2645] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T11:31:16.425878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:16.432769Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597333:2645] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-10T11:31:16.432845Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597333:2645] txid# 281474976710658 SEND to# [1:7491645640718597332:2345] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-10T11:31:16.630343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645640718597491:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.630469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.696948Z node 1 :TX_PROXY DEBUG: actor# [1:7491645614948792686:2141] Handle TEvProposeTransaction 2025-04-10T11:31:16.696978Z node 1 :TX_PROXY DEBUG: actor# [1:7491645614948792686:2141] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T11:31:16.697014Z node 1 :TX_PROXY DEBUG: actor# [1:7491645614948792686:2141] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7491645640718597503:2774] 2025-04-10T11:31:16.700509Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597503:2774] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateCdcStream CreateCdcStream { TableName: "table" StreamDescription { Name: "a" Mode: ECdcStreamModeUpdate Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" } } } } UserToken: "" DatabaseName: "" PeerName: "" 2025-04-10T11:31:16.700547Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597503:2774] txid# 281474976710659 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:31:16.700620Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597503:2774] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:31:16.700881Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597503:2774] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:31:16.700964Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597503:2774] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:31:16.701062Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597503:2774] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-04-10T11:31:16.701210Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597503:2774] txid# 281474976710659 HANDLE EvClientConnected 2025-04-10T11:31:16.712237Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597503:2774] txid# 281474976710659 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-10T11:31:16.712288Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597503:2774] txid# 281474976710659 SEND to# [1:7491645640718597502:2359] Source {TEvProposeTransactionStatus txid# 281474976710659 Status# 53} 2025-04-10T11:31:16.869640Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7491645640718597690:2366] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-10T11:31:16.920024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645640718597797:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.920096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.932843Z node 1 :TX_PROXY DEBUG: actor# [1:7491645614948792686:2141] Handle TEvProposeTransaction 2025-04-10T11:31:16.932896Z node 1 :TX_PROXY DEBUG: actor# [1:7491645614948792686:2141] TxId# 281474976710660 ProcessProposeTransaction 2025-04-10T11:31:16.932938Z node 1 :TX_PROXY DEBUG: actor# [1:7491645614948792686:2141] Cookie# 0 userReqId# "" txid# 281474976710660 SEND to# [1:7491645640718597809:2985] 2025-04-10T11:31:16.935061Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597809:2985] txid# 281474976710660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateCdcStream CreateCdcStream { TableName: "table" StreamDescription { Name: "b" Mode: ECdcStreamModeUpdate Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" } } } } UserToken: "" DatabaseName: "" PeerName: "" 2025-04-10T11:31:16.935116Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645640718597809:2985] txid# 281474976710660 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdmin ... port::TTxProgress: DoComplete 2025-04-10T11:31:19.247837Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-10T11:31:19.247854Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976715766, id# 281474976710664 2025-04-10T11:31:19.247906Z node 1 :IMPORT INFO: TImport::TTxProgress: CreateChangefeed propose: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715766 2025-04-10T11:31:19.247992Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-10T11:31:19.254332Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-10T11:31:19.254362Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715766, status# StatusAccepted 2025-04-10T11:31:19.254453Z node 1 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Subscribed WaitTxId: 281474976715766 Issue: '' } 2025-04-10T11:31:19.261691Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-10T11:31:19.321417Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][1:7491645653603501886:2499] Failed entry at 'ResolveCdcStream': entry# { Path: TableId: [72057594046644480:16:0] RequestType: ByTableId Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-10T11:31:19.395562Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-10T11:31:19.395589Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715766 2025-04-10T11:31:19.395704Z node 1 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-10T11:31:19.399440Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-10T11:31:19.399539Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-10T11:31:19.399552Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976715767, id# 281474976710664 2025-04-10T11:31:19.399599Z node 1 :IMPORT INFO: TImport::TTxProgress: CreateConsumers propose: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715767 2025-04-10T11:31:19.400574Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-10T11:31:19.401279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715767:0, at schemeshard: 72057594046644480 2025-04-10T11:31:19.408652Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-10T11:31:19.408680Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715767, status# StatusAccepted 2025-04-10T11:31:19.408776Z node 1 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Subscribed WaitTxId: 281474976715767 Issue: '' } 2025-04-10T11:31:19.428592Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-10T11:31:19.472432Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-10T11:31:19.472459Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715767 2025-04-10T11:31:19.475050Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-10T11:31:19.633565Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7491645653603502073:2513] [0] Resolve database: name# /Root 2025-04-10T11:31:19.638398Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7491645653603502073:2513] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:31:19.638441Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7491645653603502073:2513] [0] Send request: schemeShardId# 72057594046644480 2025-04-10T11:31:19.643237Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7491645653603502073:2513] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710664 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:14605" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1744284678 } EndTime { seconds: 1744284679 } } 2025-04-10T11:31:19.654783Z node 1 :TX_PROXY DEBUG: actor# [1:7491645614948792686:2141] Handle TEvNavigate describe path /Root/table 2025-04-10T11:31:19.654826Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645653603502079:4879] HANDLE EvNavigateScheme /Root/table 2025-04-10T11:31:19.655040Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645653603502079:4879] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:31:19.655143Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645653603502079:4879] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false } 2025-04-10T11:31:19.656436Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645653603502079:4879] Handle TEvDescribeSchemeResult Forward to# [1:7491645653603502077:2514] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 11 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715760 CreateStep: 1744284678759 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableSchemaVersion: 4 IsBackup: false CdcStreams { Name: "a" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046644480 LocalId: 14 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } CdcStreams { Name: "b" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046644480 LocalId: 16 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } CdcStreams { Name: "c" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046644480 LocalId: 12 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 8 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 11 PathOwnerId: 72057594046644480 >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecutionRace >> KqpService::Shutdown [GOOD] >> KqpService::SessionBusyRetryOperationSync >> KqpJoinOrder::TPCDS34+ColumnStore [GOOD] >> KqpQueryServiceScripts::TestPaging [GOOD] >> KqpQueryServiceScripts::TestFetchMoreThanLimit >> KqpQueryService::ReadManyRanges [GOOD] >> KqpQueryService::ReadManyShardsRange >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex [GOOD] >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH11 [GOOD] Test command err: Trying to start YDB, gRPC: 61956, MsgBus: 19268 2025-04-10T11:29:25.339697Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645164822559577:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:25.339742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001607/r3tmp/tmpPUeb2h/pdisk_1.dat 2025-04-10T11:29:26.077260Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:26.090793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:26.090876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:26.093236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61956, node 1 2025-04-10T11:29:26.363463Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:26.363487Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:26.363494Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:26.363610Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19268 TClient is connected to server localhost:19268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:27.349424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:27.407309Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:29.524095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645182002429410:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:29.524239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:29.524592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645182002429422:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:29.528555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:29.542630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645182002429424:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:29.622117Z node 1 :TX_PROXY ERROR: Actor# [1:7491645182002429475:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:29.953473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:30.257573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645186297397000:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:30.257573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645186297396990:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:30.257780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645186297396990:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:30.257793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645186297397000:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:30.258029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645186297396990:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:30.258032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645186297397000:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:30.258165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645186297396990:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:30.258213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645186297397000:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:30.258295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645186297396990:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:30.258317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645186297397000:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:30.258450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645186297396990:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:30.258561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645186297396990:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:30.258565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645186297397000:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:30.258662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645186297397000:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:30.258689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645186297396990:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:30.258756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645186297397000:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:30.258818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645186297396990:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:30.258912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645186297396990:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:30.258993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645186297396990:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:30.259079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645186297396990:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:30.259274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645186297397000:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:30.259425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645186297397000:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:30.259541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645186297397000:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:30.259638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645186297397000:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:30.293459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645186297397040:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:30.293520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645186297397040:2357];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.932113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.937552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.939772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.944025Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.945391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.950020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.950598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.956314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.956336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.961432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.965115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.968447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.971050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.974668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.978479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.980242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.984286Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.985783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.989775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.993475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.998588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:00.998855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.005407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.005970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.011741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.012037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.017885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.019081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.023278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.024047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.028763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.030645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.035301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.038328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.041383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.047014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.047205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.053418Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.055909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.059434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.062452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.064899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.069619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.071080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.077574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:01.265673Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqe38e1w7qt9ge8mrybchw", SessionId: ydb://session/3?node_id=1&id=MTVhMTRmMWUtNDBjNGE5NzAtMzBkMTYwYTYtOGIwN2Q5OQ==, Slow query, duration: 43.266740s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:01.573944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:01.574418Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:01.574833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491645405340764908:7714];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-10T11:31:01.575217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpQueryService::TableSink_OltpUpsert [GOOD] >> KqpQueryService::TableSink_OltpUpdate >> KqpQueryService::TableSink_HtapInteractive-withOltpSink >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 19225, MsgBus: 17227 2025-04-10T11:29:34.594491Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645201155393578:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:34.594939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015fd/r3tmp/tmpv3ybBn/pdisk_1.dat 2025-04-10T11:29:35.216962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:35.217065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:35.223575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:35.238383Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19225, node 1 2025-04-10T11:29:35.378714Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:35.378750Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:35.378759Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:35.378865Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17227 TClient is connected to server localhost:17227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:36.124645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:38.458636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645218335263285:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:38.458716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:38.461468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645218335263297:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:38.466115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:38.488876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645218335263299:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:38.595722Z node 1 :TX_PROXY ERROR: Actor# [1:7491645218335263350:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:38.995448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:39.295544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645222630230935:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:39.295577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645222630231038:2364];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:39.295784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645222630230935:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:39.296029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645222630230935:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:39.296154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645222630230935:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:39.296267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645222630230935:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:39.296389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645222630230935:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:39.296497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645222630230935:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:39.296599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645222630230935:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:39.296716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645222630230935:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:39.296834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645222630230935:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:39.296976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645222630230935:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:39.297084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645222630230935:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:39.297528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645222630231038:2364];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:39.297699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645222630231038:2364];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:39.297817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645222630231038:2364];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:39.297911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645222630231038:2364];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:39.298003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645222630231038:2364];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:39.298188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645222630231038:2364];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:39.298299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645222630231038:2364];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:39.298416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645222630231038:2364];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:39.298524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645222630231038:2364];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:39.298641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645222630231038:2364];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:39.298734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645222630231038:2364];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:39.341298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645222630230889:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:39.341352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645222630230889:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:39.341558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;sel ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.275868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.275869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.281996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.282084Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.288411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.288446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.295836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.296070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.303723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.305016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.312175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.387460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.400448Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.405543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.413734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.419402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.420612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.428088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.430300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.438299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.439408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.452535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.455878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.464463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.468465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.480216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.481014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.492216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.500211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.500993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.518934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.522791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.534081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.534916Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.541519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.549231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.559075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.562283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.565669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.569460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.573845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.575982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.582475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.582509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.591523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:03.749976Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqeahc29dbgwydg19m7x5w", SessionId: ydb://session/3?node_id=1&id=Y2IzMmEyMmMtNjRlMWZhZmMtODQyMDQwNDItYTk5MmEzNTY=, Slow query, duration: 38.297276s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:04.020424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:04.021049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491645536162900334:11226];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-04-10T11:31:04.021748Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:04.022492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpService::SwitchCache-UseCache >> KqpQueryService::CTASWithoutPerStatement [GOOD] >> KqpQueryService::CheckIsolationLevelFroPerStatementMode >> KqpQueryService::ExecuteQueryPgTableSelect [GOOD] >> KqpQueryService::ExecuteQueryMultiScalar >> KqpQueryServiceScripts::ExecuteScript >> KqpJoinOrder::Chain65Nodes [FAIL] >> BackupRestore::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestore::RestoreTableSplitBoundaries >> KqpJoinOrder::CanonizedJoinOrderTPCH13 [GOOD] >> KqpQueryService::TableSink_OlapUpdate [GOOD] >> KqpQueryService::TableSink_OlapOrder >> BackupRestore::RestoreViewReferenceTable [GOOD] >> BackupRestore::RestoreViewToDifferentDatabase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS34+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 18307, MsgBus: 11597 2025-04-10T11:29:25.670804Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645163369036262:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:25.671348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001606/r3tmp/tmpX3ykS4/pdisk_1.dat 2025-04-10T11:29:26.490700Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:26.511700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:26.511776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:26.517568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18307, node 1 2025-04-10T11:29:26.802261Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:26.802284Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:26.802291Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:26.802440Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11597 TClient is connected to server localhost:11597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:27.569004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:27.618940Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:30.097762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645184843873411:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:30.097883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:30.098290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645184843873423:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:30.102071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:30.119371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645184843873425:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:30.178283Z node 1 :TX_PROXY ERROR: Actor# [1:7491645184843873476:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:30.560440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:30.674257Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645163369036262:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:30.674322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:30.901644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645184843873783:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:30.901859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645184843873783:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:30.902109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645184843873783:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:30.909317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645184843873758:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:30.909377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645184843873758:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:30.909582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645184843873758:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:30.909697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645184843873758:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:30.909797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645184843873758:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:30.909898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645184843873758:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:30.910003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645184843873758:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:30.910110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645184843873758:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:30.914339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645184843873783:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:30.914512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645184843873783:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:30.914632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645184843873783:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:30.914732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645184843873783:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:30.914819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645184843873783:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:30.914923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645184843873783:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:30.915070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645184843873783:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:30.915178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645184843873783:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:30.915277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645184843873783:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:30.918315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645184843873758:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:30.918476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645184843873758:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:30.918607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645184843873758:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:30.918737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645184843873758:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:30.975911Z node 1 :T ... LUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.512530Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.517802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.520457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.525970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.531614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.537400Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.542926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.547344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.548847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.553539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.558791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.560017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.565995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.568253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.577992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.579939Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.587471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.589728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.599032Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.601428Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.608556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.614714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.619904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.625434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.626481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.631883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.641861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.643808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.653802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.656130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.665565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.667847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.677039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.679262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.690471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:30:53.942402Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqe08eat1btchrc2jchsz1", SessionId: ydb://session/3?node_id=1&id=MjAzM2EyZC1jNzNjYTM4ZC03MmQ1OTU2NS1mZjFiNjg2Nw==, Slow query, duration: 39.014972s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:30:54.608461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:54.608765Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491645386707373152:7853];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-10T11:30:54.609373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:30:54.610229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:13.925764Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqfft026t1v3j1j3gcnzt1", SessionId: ydb://session/3?node_id=1&id=MjAzM2EyZC1jNzNjYTM4ZC03MmQ1OTU2NS1mZjFiNjg2Nw==, Slow query, duration: 10.308404s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query34.tpl and seed 1971067816\nselect c_last_name\n ,c_first_name\n ,c_salutation\n ,c_preferred_cust_flag\n ,ss_ticket_number\n ,cnt from\n (select store_sales.ss_ticket_number ss_ticket_number\n ,store_sales.ss_customer_sk ss_customer_sk\n ,count(*) cnt\n from store_sales as store_sales \n cross join date_dim as date_dim\n cross join store as store\n cross join household_demographics as household_demographics\n where store_sales.ss_sold_date_sk = date_dim.d_date_sk\n and store_sales.ss_store_sk = store.s_store_sk\n and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk\n and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28)\n and (household_demographics.hd_buy_potential = '>10000' or\n household_demographics.hd_buy_potential = 'Unknown')\n and household_demographics.hd_vehicle_count > 0\n and (case when household_demographics.hd_vehicle_count > 0\n\t then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count\n\t else null\n\t end) > 1.2\n and date_dim.d_year in (2000,2000+1,2000+2)\n and store.s_county in ('Salem County','Terrell County','Arthur County','Oglethorpe County',\n 'Lunenburg County','Perry County','Halifax County','Sumner County')\n group by store_sales.ss_ticket_number,store_sales.ss_customer_sk) dn \n cross join customer as customer\n where ss_customer_sk = c_customer_sk\n and cnt between 15 and 20\n order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number;\n\n-- end query 1 in stream 0 using template query34.tpl", parameters: 0b >> KqpQueryService::ExecStatsPlan [GOOD] >> KqpQueryService::ExecStatsAst >> KqpQueryService::ShowCreateTableDisable [GOOD] >> KqpQueryService::ShowCreateSysView >> KqpQueryService::StreamExecuteQueryPure [GOOD] >> KqpQueryService::StreamExecuteQueryMultiResult >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex [GOOD] >> KqpQueryService::TableSink_OltpOrder >> KqpQueryService::ReadManyShardsRange [GOOD] >> KqpQueryService::ReadManyRangesAndPoints >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH13 [GOOD] Test command err: Trying to start YDB, gRPC: 21392, MsgBus: 24101 2025-04-10T11:29:35.104395Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645207671948240:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:35.111081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015f6/r3tmp/tmpJW4s5k/pdisk_1.dat 2025-04-10T11:29:36.206630Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:36.208325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:36.208390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:36.215695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21392, node 1 2025-04-10T11:29:36.482261Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:36.482284Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:36.482295Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:36.482406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24101 TClient is connected to server localhost:24101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:37.672889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:37.690950Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:40.104768Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645207671948240:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:40.104829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:40.317568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645229146785306:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:40.317699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:40.318025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645229146785318:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:40.322702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:40.337648Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:29:40.337910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645229146785320:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:40.414663Z node 1 :TX_PROXY ERROR: Actor# [1:7491645229146785371:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:40.835408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:41.238920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645233441752916:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:41.239563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645233441752916:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:41.239884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645233441752916:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:41.240013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645233441752916:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:41.240124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645233441752916:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:41.240489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645233441752916:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:41.240647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645233441752916:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:41.240769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645233441752916:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:41.241048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645233441752916:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:41.241192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645233441752916:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:41.241311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645233441752916:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:41.241619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645233441752916:2352];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:41.244770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645233441752926:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:41.244836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645233441752926:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:41.244997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645233441752926:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:41.245141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645233441752926:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:41.245248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645233441752926:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:41.245372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645233441752926:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:41.245478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645233441752926:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:41.245587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645233441752926:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:41.245707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645233441752926:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:41.245799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645233441752926:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:41.245894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645233441752926:2357];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:41.246004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645233441752926:2357];tablet_id=72075186224037 ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.391516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.400714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039314;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.403843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.413696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.414722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.423064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.428524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.436052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.441998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.450816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.455891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.461171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.462288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.467676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.468559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.474950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.481556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.487039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.487652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.493670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.493921Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.501576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.503193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.507791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.510539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.513572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.516955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.519912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.523653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.525613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.532035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.537620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.543251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.548583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.554166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.560002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.565947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.572133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.578514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.584644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.590881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.595331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.596967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.602556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.603381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.782397Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqecz00hvexb6d9bpn830k", SessionId: ydb://session/3?node_id=1&id=NGEwYzZhNS03N2I5ZGU1MC1mNDc1YTU3NS02NTg0MjVjMA==, Slow query, duration: 41.845755s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:10.073572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:10.074377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491645564154292285:11366];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-10T11:31:10.074499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:10.075124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpQueryService::DdlPermission [GOOD] >> KqpQueryService::DdlSecret >> KqpQueryService::ExecuteQueryWithWorkloadManager [GOOD] >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier >> KqpQueryService::PeriodicTaskInSessionPool [GOOD] >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle >> BackupRestoreS3::RestoreViewReferenceTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal >> KqpQueryService::TableSink_Olap_Replace [GOOD] >> KqpQueryService::TableSink_OlapUpsert >> YdbIndexTable::MultiShardTableOneIndex [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::Chain65Nodes [FAIL] Test command err: Trying to start YDB, gRPC: 16073, MsgBus: 26379 2025-04-10T11:25:52.427664Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491644250864832771:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:25:52.428355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00172e/r3tmp/tmpgecTsU/pdisk_1.dat 2025-04-10T11:25:52.771589Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16073, node 1 2025-04-10T11:25:52.834229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:52.834315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:52.835423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:52.932370Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:25:52.932406Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:25:52.932416Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:25:52.932545Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26379 TClient is connected to server localhost:26379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:25:53.596323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:25:55.390651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644263749735312:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.390747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.603201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.719238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644263749735416:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.719308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.734534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.780320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644263749735491:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.780430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.786505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.827016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644263749735566:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.827097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.843659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.883644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644263749735643:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.883736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.899094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.938252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644263749735719:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.938330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.952640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:25:55.993957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644263749735796:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:55.994058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.007422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.055118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644268044703169:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.055205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.063494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.099595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644268044703246:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.099705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.115693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.161778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644268044703323:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.161871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.176564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.230476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644268044703401:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.230642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:56.237902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:25:56.312718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644268044703481:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T ... info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:58.878657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:58.888668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 2025-04-10T11:25:58.927529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644276634641698:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:58.927595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:58.939897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-04-10T11:25:58.987779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644276634641777:2851], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:58.987887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.002018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-10T11:25:59.059579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644280929609155:2860], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.059683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.072737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 2025-04-10T11:25:59.130247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644280929609236:2869], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.130453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.137924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710717:0, at schemeshard: 72057594046644480 2025-04-10T11:25:59.240735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644280929609324:2878], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.240873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.255343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710718:0, at schemeshard: 72057594046644480 2025-04-10T11:25:59.297172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644280929609404:2887], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.297236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.312343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 2025-04-10T11:25:59.358224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644280929609482:2896], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.358319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.369326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 2025-04-10T11:25:59.430738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644280929609565:2906], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.430881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.443804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710721:0, at schemeshard: 72057594046644480 2025-04-10T11:25:59.527597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644280929609653:2917], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.527702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.543576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 2025-04-10T11:25:59.597084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644280929609734:2926], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.597197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.597400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491644280929609739:2929], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:25:59.601412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710723:3, at schemeshard: 72057594046644480 2025-04-10T11:25:59.613415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491644280929609741:2930], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710723 completed, doublechecking } 2025-04-10T11:25:59.705196Z node 1 :TX_PROXY ERROR: Actor# [1:7491644280929609800:5784] txid# 281474976710724, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 70], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:26:07.768334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:26:07.768370Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:59.598552Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTM5ZTFjYzktMzRmMDk4MDctODliM2RlOWMtOGEwOTBjNDU=, ActorId: [1:7491644263749735294:2328], ActorState: ExecuteState, TraceId: 01jrfq66xcfhx8nkfqrzz0qrzv, Create QueryResponse for error on request, msg:
: Error: Request timeout 300000ms exceeded
: Error: Cancelling after 300001ms during compilation assertion failed at ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:379, void NKikimr::NKqp::TChainTester::JoinTables(): (result.GetStatus() == EStatus::SUCCESS) failed: (TIMEOUT != SUCCESS) , with diff: (TIM|SUCC)E(OUT|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193A6ECB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x19878CFF 2. /tmp//-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:379: JoinTables @ 0x18FB72E1 3. /tmp//-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:325: Test @ 0x18F6F600 4. /tmp//-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:546: Execute_ @ 0x18F6F600 5. /tmp//-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:544: operator() @ 0x18FB3937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:544:1) &> @ 0x18FB3937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:544:1) &> @ 0x18FB3937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18FB3937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18FB3937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198AFD25 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198AFD25 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198AFD25 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1987F878 14. /tmp//-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:544: Execute @ 0x18FB2B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19881145 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198AA29C 17. ??:0: ?? @ 0x7FE5F9EC4D8F 18. ??:0: ?? @ 0x7FE5F9EC4E3F 19. ??:0: ?? @ 0x16572028 >> KqpService::SessionBusyRetryOperationSync [GOOD] >> KqpService::SwitchCache+UseCache >> KqpJoinOrder::CanonizedJoinOrderTPCH15 [GOOD] >> KqpQueryService::TableSink_HtapInteractive-withOltpSink [GOOD] >> KqpQueryService::TableSink_OlapInsert >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] >> YdbYqlClient::SimpleColumnFamilies [GOOD] >> YdbYqlClient::TableKeyRangesSinglePartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 11418, MsgBus: 15122 2025-04-10T11:29:40.083053Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645229609826605:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:40.083114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015eb/r3tmp/tmpxymL0B/pdisk_1.dat 2025-04-10T11:29:40.743989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:40.744070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:40.755050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:40.790320Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11418, node 1 2025-04-10T11:29:41.054831Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:41.054852Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:41.054860Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:41.054970Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15122 TClient is connected to server localhost:15122 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:41.869238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:41.881333Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:43.918141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645242494728923:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:43.918296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:43.922523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645242494728935:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:43.930882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:43.958403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645242494728937:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:44.042980Z node 1 :TX_PROXY ERROR: Actor# [1:7491645246789696284:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:44.518923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:44.824246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645246789696565:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:44.824530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645246789696565:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:44.824770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645246789696565:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:44.824885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645246789696565:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:44.824990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645246789696565:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:44.825107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645246789696565:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:44.825249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645246789696565:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:44.825375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645246789696565:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:44.825499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645246789696565:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:44.825603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645246789696565:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:44.825705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645246789696565:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:44.825813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645246789696565:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:44.877734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645246789696567:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:44.877800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645246789696567:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:44.878062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645246789696567:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:44.878188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645246789696567:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:44.878297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645246789696567:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:44.878402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645246789696567:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:44.878500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645246789696567:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:44.878606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645246789696567:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:44.878739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645246789696567:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:44.878856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645246789696567:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:44.878984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645246789696567:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:44.879094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645246789696567:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:44.896905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491645246789696561:2357];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:44.903113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491645246789696561:2357];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.529583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.533247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.534694Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.539055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.540267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.544271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.545705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.550214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.558372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.563861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.566440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.575669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.579255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.584478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.587234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.596012Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.599328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.603271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.608225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.619076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.622866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039254;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.624720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.629199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.630519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.635839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.641375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.643250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.649478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.650812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.664522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.668458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.670059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.674008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.675994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.681733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.688937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.694902Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.695029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.703572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.709410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.710650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.715210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.716303Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.724347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.824211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.910102Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqekft2fndaz8nq0bny3jy", SessionId: ydb://session/3?node_id=1&id=NzJhZmFmNjYtMmJmOTE4ZTctODhhMWNhMDQtZGJjYWRmNA==, Slow query, duration: 39.291150s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:14.152735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:14.153214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7491645525962616718:9309];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-10T11:31:14.153258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:14.154274Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpQueryService::TableSink_Htap+withOltpSink [GOOD] >> KqpQueryService::TableSink_Htap-withOltpSink >> KqpQueryServiceScripts::ForgetScriptExecutionRace [GOOD] >> KqpQueryServiceScripts::InvalidFetchToken >> KqpQueryServiceScripts::TestFetchMoreThanLimit [GOOD] >> KqpQueryServiceScripts::TestAstWithCompression >> KqpQueryService::CheckIsolationLevelFroPerStatementMode [GOOD] >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid >> OlapEstimationRowsCorrectness::TPCH21 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] Test command err: Trying to start YDB, gRPC: 19420, MsgBus: 6192 2025-04-10T11:31:11.809495Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645620540750175:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:11.812330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001574/r3tmp/tmpFfNjG4/pdisk_1.dat 2025-04-10T11:31:12.639074Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:12.702532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:12.702628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:12.707620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19420, node 1 2025-04-10T11:31:12.999495Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:12.999516Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:12.999522Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:12.999623Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6192 TClient is connected to server localhost:6192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:14.172491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.214584Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:14.228784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.445283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.724099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.854312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:16.806619Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645620540750175:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:16.806697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:16.889899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645642015588359:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.890005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.321535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.378632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.414185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.459127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.535453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.623383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.703709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645646310556174:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.703791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.704017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645646310556179:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.714772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:17.738024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645646310556181:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:17.843409Z node 1 :TX_PROXY ERROR: Actor# [1:7491645646310556240:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 14831, MsgBus: 6623 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001574/r3tmp/tmpCcmu7i/pdisk_1.dat 2025-04-10T11:31:20.587490Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645656813031937:2217];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:20.671809Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:31:20.737094Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:20.796701Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:20.796794Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:20.803604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14831, node 2 2025-04-10T11:31:21.065647Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:21.065751Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:21.065759Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:21.065877Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6623 TClient is connected to server localhost:6623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:21.789956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:21.801321Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:24.911393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645673992901598:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:24.911489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:24.929316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:25.039255Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645678287869000:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:25.039331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:25.039668Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645678287869006:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:25.044087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:31:25.057409Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-10T11:31:25.057691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645678287869008:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:31:25.145267Z node 2 :TX_PROXY ERROR: Actor# [2:7491645678287869059:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:25.474261Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645656813031937:2217];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:25.477895Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24245, MsgBus: 2267 2025-04-10T11:31:26.264574Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645681288958292:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:26.264632Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001574/r3tmp/tmpzoozc0/pdisk_1.dat 2025-04-10T11:31:26.527124Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:26.613363Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:26.613467Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:26.615534Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24245, node 3 2025-04-10T11:31:26.786717Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:26.786741Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:26.786753Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:26.786855Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2267 TClient is connected to server localhost:2267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:27.627580Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:27.638904Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:31:27.660459Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:27.806648Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:28.090976Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:28.188131Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:30.283280Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645698468829242:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:30.283376Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:30.333262Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.413106Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.502639Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.562062Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.609168Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.708601Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.795662Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645698468829767:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:30.795764Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:30.796222Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645698468829772:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:30.800939Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:30.816993Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645698468829774:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:31:30.873107Z node 3 :TX_PROXY ERROR: Actor# [3:7491645698468829828:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:31.290518Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645681288958292:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:31.290609Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> KqpQueryService::ExecStatsAst [GOOD] >> KqpQueryService::DmlNoTx >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap >> KqpQueryService::TableSink_OltpUpdate [GOOD] >> KqpQueryService::TableSink_Oltp_Replace+UseSink >> BackupRestore::RestoreTableSplitBoundaries [GOOD] >> BackupRestore::ImportDataShouldHandleErrors >> KqpQueryService::ShowCreateSysView [GOOD] >> TGroupMapperTest::SanitizeGroupTest3dc >> KqpQueryService::StreamExecuteQueryMultiResult [GOOD] >> KqpQueryService::TableSink_BadTransactions |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] >> KqpQueryService::ReadManyRangesAndPoints [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH15 [GOOD] Test command err: Trying to start YDB, gRPC: 3654, MsgBus: 28563 2025-04-10T11:29:43.372977Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645241781497096:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:43.373521Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015e5/r3tmp/tmpd2awm5/pdisk_1.dat 2025-04-10T11:29:44.040073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:44.040169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:44.042035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:44.063727Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3654, node 1 2025-04-10T11:29:44.299004Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:44.299026Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:44.299043Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:44.299159Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28563 TClient is connected to server localhost:28563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:45.281832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:47.758769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645258961366807:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:47.758936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:47.759306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645258961366819:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:47.763812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:47.789004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645258961366821:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:47.847377Z node 1 :TX_PROXY ERROR: Actor# [1:7491645258961366872:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:48.349850Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645241781497096:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:48.358721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:48.390922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:48.693630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645263256334441:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:48.702433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645263256334441:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:48.702683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645263256334441:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:48.702786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645263256334441:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:48.702869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645263256334441:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:48.702961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645263256334441:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:48.703065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645263256334441:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:48.703190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645263256334441:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:48.703288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645263256334441:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:48.703392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645263256334441:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:48.703502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645263256334441:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:48.703587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645263256334441:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:48.708910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645263256334435:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:48.708951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645263256334435:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:48.709095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645263256334435:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:48.709221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645263256334435:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:48.709324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645263256334435:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:48.709411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645263256334435:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:48.709508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645263256334435:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:48.709600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645263256334435:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:48.709700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645263256334435:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:48.709815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645263256334435:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:48.709909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645263256334435:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:48.709993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645263256334435:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:48.761623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645263256334457:2363];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.491289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.496155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.501301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.505588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.511998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.518205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.524134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.535093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.536502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.543285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.546168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.557105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.560190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.565385Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.569858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.579087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.583586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.584402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.589638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.593984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.603806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.609664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.615358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.615678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.620968Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.624689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.633968Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.638393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.642890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.649752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.659357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.668488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.669009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.675715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.676446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.683726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.685705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.690012Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.695787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.699778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.704370Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.709426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.711133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.716445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.720917Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:13.984673Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqeng39pqnvggfrp12crv6", SessionId: ydb://session/3?node_id=1&id=NjJiYzkwY2MtZDdiNTEzMTgtNjM4ZjAxNi0xNWYyYmZkMw==, Slow query, duration: 37.306168s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:14.443273Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:14.443801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:14.444438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491645482299701041:7648];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-10T11:31:14.444840Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpQueryServiceScripts::ExecuteScript [GOOD] >> KqpQueryServiceScripts::ExecuteMultiScript ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ShowCreateSysView [GOOD] Test command err: Trying to start YDB, gRPC: 15772, MsgBus: 5722 2025-04-10T11:31:11.840309Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645620360573342:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:11.840352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00156b/r3tmp/tmpSVMlR4/pdisk_1.dat 2025-04-10T11:31:12.820368Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:12.888235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:12.888336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:12.891558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:31:12.891923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15772, node 1 2025-04-10T11:31:13.074764Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:13.074794Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:13.074804Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:13.074930Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5722 TClient is connected to server localhost:5722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:14.288198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.322460Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:14.329846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.559178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.815327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.954172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:16.841467Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645620360573342:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:16.841592Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:17.176453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645646130378842:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.176572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.655856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.695049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.726946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.803613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.846816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.924595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.987988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645646130379367:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.988058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.988244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645646130379372:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.992559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:18.016154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645646130379374:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:18.085104Z node 1 :TX_PROXY ERROR: Actor# [1:7491645650425346724:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:19.373165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23227, MsgBus: 24900 2025-04-10T11:31:20.795342Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645656276676775:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:20.795382Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00156b/r3tmp/tmpXGXj3t/pdisk_1.dat 2025-04-10T11:31:21.156852Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:21.156950Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:21.178061Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:21.179952Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23227, node 2 2025-04-10T11:31:21.350727Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:21.350761Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:21.350769Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:21.350895Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24900 TClient is connected to server localhost:24900 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:22.146634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:22.155752Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:22.165135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:22.3077 ... rId: [2:7491645682046482838:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:26.051731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:26.077478Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:31:26.078489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645682046482840:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:26.150090Z node 2 :TX_PROXY ERROR: Actor# [2:7491645682046482896:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:27.330739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:27.403520Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645686341450573:2513], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiReadTable!
:2:35: Error: SHOW CREATE statement is not supported 2025-04-10T11:31:27.405467Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDQ3ODk4YzktMjJmNjg3ZWQtZGI1ODE0MDEtYmRlZjY3ZDM=, ActorId: [2:7491645686341450489:2499], ActorState: ExecuteState, TraceId: 01jrfqg70k7jn1mdrwcfzz59x5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 3879, MsgBus: 17622 2025-04-10T11:31:28.529179Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645693170436473:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:28.529254Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00156b/r3tmp/tmpctpRAY/pdisk_1.dat 2025-04-10T11:31:28.841116Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:28.875880Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:28.875981Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:28.881021Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3879, node 3 2025-04-10T11:31:29.058835Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:29.058870Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:29.058880Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:29.059042Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17622 TClient is connected to server localhost:17622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:31:29.777728Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:31:29.793203Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:29.816588Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:29.921483Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:30.131663Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:30.254186Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:32.825718Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645710350307436:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:32.825845Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:32.936443Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:32.990063Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.042872Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.121273Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.162681Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.209769Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.299784Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645714645275253:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:33.299892Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:33.300362Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645714645275258:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:33.305797Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:33.322802Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645714645275260:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:33.378439Z node 3 :TX_PROXY ERROR: Actor# [3:7491645714645275316:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:33.525622Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645693170436473:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:33.525703Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:34.544418Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:34.696225Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491645718940243017:2513], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:31:34.696540Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjgyYTk4ZDgtOTU1NGFjMzktNzYwZTAxNWEtY2RkMTk2NjA=, ActorId: [3:7491645718940242907:2497], ActorState: ExecuteState, TraceId: 01jrfqge3t51qj9mvrf9gb3hm9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:31:34.748735Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491645718940243030:2516], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:31:34.750599Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjgyYTk4ZDgtOTU1NGFjMzktNzYwZTAxNWEtY2RkMTk2NjA=, ActorId: [3:7491645718940242907:2497], ActorState: ExecuteState, TraceId: 01jrfqge5pbhqbnfy9yr6kss0g, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> TGroupMapperTest::MapperSequentialCalls >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadManyRangesAndPoints [GOOD] Test command err: Trying to start YDB, gRPC: 24768, MsgBus: 3164 2025-04-10T11:31:15.847696Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645635541941361:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:15.847727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001562/r3tmp/tmpAQ6WM0/pdisk_1.dat 2025-04-10T11:31:16.480340Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:16.486310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:16.486434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:16.493058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24768, node 1 2025-04-10T11:31:16.662906Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:16.662932Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:16.662939Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:16.663090Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3164 TClient is connected to server localhost:3164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:17.662650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:17.701710Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:20.310968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645657016778512:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:20.311087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:20.848347Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645635541941361:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:20.848415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:20.912852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:21.332426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645661311746382:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:21.332529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:21.337282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645661311746387:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:21.342874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:31:21.360422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645661311746389:2384], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:31:21.450492Z node 1 :TX_PROXY ERROR: Actor# [1:7491645661311746444:2709] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 23860, MsgBus: 14205 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001562/r3tmp/tmptawkcq/pdisk_1.dat 2025-04-10T11:31:24.438338Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:31:24.483312Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:24.499654Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:24.499745Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:24.500840Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23860, node 2 2025-04-10T11:31:24.730758Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:24.730790Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:24.730804Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:24.730938Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14205 TClient is connected to server localhost:14205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:25.431432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:25.450799Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:28.340598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645691838091802:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.340692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.372415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:28.486036Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645691838092026:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.486141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.486437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645691838092031:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.491741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:31:28.509451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645691838092033:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:31:28.581372Z node 2 :TX_PROXY ERROR: Actor# [2:7491645691838092084:2472] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21791, MsgBus: 4348 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001562/r3tmp/tmpAth0ux/pdisk_1.dat 2025-04-10T11:31:30.498367Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:31:30.507763Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:30.525441Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:30.525539Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:30.528777Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21791, node 3 2025-04-10T11:31:30.645190Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:30.645215Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:30.645225Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:30.645354Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4348 TClient is connected to server localhost:4348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:31.455736Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:31.464358Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:31:34.434320Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645716872748178:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:34.434450Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:34.462086Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:34.657392Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645716872748631:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:34.657482Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:34.657894Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645716872748636:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:34.666134Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:31:34.680453Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645716872748638:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:31:34.765114Z node 3 :TX_PROXY ERROR: Actor# [3:7491645716872748689:2620] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TGroupMapperTest::MakeDisksUnusable [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksUnusable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH21 [GOOD] Test command err: Trying to start YDB, gRPC: 25969, MsgBus: 28895 2025-04-10T11:29:37.690682Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645214646125552:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:37.731131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015f4/r3tmp/tmpXUaMPT/pdisk_1.dat 2025-04-10T11:29:38.351712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:38.351783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:38.355750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:38.390334Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25969, node 1 2025-04-10T11:29:38.626709Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:38.626735Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:38.626746Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:38.626864Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28895 TClient is connected to server localhost:28895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:39.589627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:41.947925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645231825995275:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:41.948012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645231825995267:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:41.948152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:41.952284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:41.969233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645231825995281:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:42.068670Z node 1 :TX_PROXY ERROR: Actor# [1:7491645236120962629:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:42.528238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:42.670313Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645214646125552:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:42.670386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:42.760751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645236120962893:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:42.760959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645236120962893:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:42.761322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645236120962893:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:42.761444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645236120962893:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:42.761574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645236120962893:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:42.761715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645236120962893:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:42.761821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645236120962893:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:42.761924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645236120962893:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:42.762022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645236120962893:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:42.762111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645236120962893:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:42.762245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645236120962893:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:42.762394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7491645236120962893:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:42.776403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645236120962891:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:42.776471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645236120962891:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:42.776721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645236120962891:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:42.776843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645236120962891:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:42.776946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645236120962891:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:42.777038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645236120962891:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:42.777143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645236120962891:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:42.777242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645236120962891:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:42.777338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645236120962891:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:42.777430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645236120962891:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:42.777519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645236120962891:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:42.777616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645236120962891:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:42.838631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7491645236120962907:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.c ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.106219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.112551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.113003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.120122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.120153Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.131700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.138121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039214;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.139464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.147142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.147288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.153954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.155129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.161260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.161716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.170564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.171220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.178038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.184194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.189525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.192980Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.207107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.207414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.214039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.214877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.223994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.225580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.231119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.237649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.238774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.246060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.247289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.268316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.279576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.289832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.299237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.309652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.310637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.321129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.324706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.331518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.333770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.337577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.340381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.346393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.510975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:12.714325Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqeg6t83w8052zf7zeba00", SessionId: ydb://session/3?node_id=1&id=M2QxODZjZDgtY2MyMjBkNDItZmMwODlmZDYtNDFiNjY1NjQ=, Slow query, duration: 41.455265s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:13.092709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:13.092709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:13.093056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491645579718403766:11339];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-04-10T11:31:13.093806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> TGroupMapperTest::NonUniformCluster >> BackupRestore::RestoreViewToDifferentDatabase [GOOD] >> BackupRestore::RestoreViewDependentOnAnotherView >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] >> TMultiversionObjectMap::MonteCarlo |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] >> TGroupMapperTest::MakeDisksNonoperational [GOOD] >> TGroupMapperTest::Block42_1disk >> KqpJoinOrder::FourWayJoinLeftFirst+ColumnStore [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksNonoperational [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple+ColumnStore [GOOD] >> TGroupMapperTest::InterlacedRacksWithoutInterlacedNodes [GOOD] >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::InterlacedRacksWithoutInterlacedNodes [GOOD] >> TGroupMapperTest::NonUniformCluster2 >> TGroupMapperTest::Mirror3dc3Nodes [GOOD] >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc3Nodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] Test command err: Trying to start YDB, gRPC: 9238, MsgBus: 13897 2025-04-10T11:31:11.874324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645617370227983:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:11.874406Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00156f/r3tmp/tmpSTSpBi/pdisk_1.dat 2025-04-10T11:31:12.650459Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:12.701090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:12.701221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:12.707795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9238, node 1 2025-04-10T11:31:13.067797Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:13.067821Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:13.067827Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:13.067969Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13897 TClient is connected to server localhost:13897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:14.198620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:16.547113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645638845064991:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.547286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.547997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645638845065003:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.562522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:31:16.577203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645638845065005:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:31:16.651338Z node 1 :TX_PROXY ERROR: Actor# [1:7491645638845065056:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:16.862407Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645617370227983:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:16.870820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:17.118967Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-10T11:31:17.123745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480 2025-04-10T11:31:17.423291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.508852Z node 1 :TX_PROXY ERROR: Actor# [1:7491645643140032602:2489] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:17.532159Z node 1 :TX_PROXY ERROR: Actor# [1:7491645643140032610:2495] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZDIwMDZiNjUtYjhmYmMwNjMtNjA2OGJlMDMtYWYyNzMxMTc=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:17.588857Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T11:31:17.598436Z node 1 :TX_PROXY ERROR: Actor# [1:7491645643140032670:2542] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:17.602290Z node 1 :TX_PROXY ERROR: Actor# [1:7491645643140032677:2547] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZDIwMDZiNjUtYjhmYmMwNjMtNjA2OGJlMDMtYWYyNzMxMTc=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:17.605205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T11:31:18.445402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:31:18.684206Z node 1 :TX_PROXY ERROR: Actor# [1:7491645647435000164:2661] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:18.686783Z node 1 :TX_PROXY ERROR: Actor# [1:7491645647435000171:2666] txid# 281474976710675, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZDIwMDZiNjUtYjhmYmMwNjMtNjA2OGJlMDMtYWYyNzMxMTc=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:18.730563Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-10T11:31:18.742594Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491645647435000222:2412], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:21: Error: At function: KiReadTable!
:3:21: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:31:18.743904Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDk1Mzg0NTctYzkyMjg3Yi0xNmFiMzU1Ny05OTcxODhiYg==, ActorId: [1:7491645647435000216:2411], ActorState: ExecuteState, TraceId: 01jrfqfyj32mtrzpehfgnn85rd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:31:18.786678Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491645647435000235:2417], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:31:18.787987Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjE4YzU4NDUtNDdjYmU2MzMtZTUzOTQzZGItNTg3MDEyNDc=, ActorId: [1:7491645647435000233:2416], ActorState: ExecuteState, TraceId: 01jrfqfykm27ab76zatzca1mmj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 23800, MsgBus: 14980 2025-04-10T11:31:19.875497Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645654705637093:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:19.876165Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00156f/r3tmp/tmpMd88NZ/pdisk_1.dat 2025-04-10T11:31:20.157521Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:20.171916Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:20.172009Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:20.177927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23800, node 2 2025-04-10T11:31:20.394671Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:20.394696Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:20.394701Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:20.394811Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14980 TClient is connected to server localhost:14980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: t ... on type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.315320Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.354574Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.439369Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.503406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.588500Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645701823630231:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:30.588594Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:30.588983Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645701823630236:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:30.592786Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:30.614455Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645701823630238:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:31:30.717454Z node 3 :TX_PROXY ERROR: Actor# [3:7491645701823630294:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:31.166250Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645684643758763:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:31.166318Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:31.982285Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:32.142800Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:31:32.255008Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:31:32.801166Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.000495Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.193681Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.385219Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.632127Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.889223Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3029, MsgBus: 4944 2025-04-10T11:31:35.119089Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491645721486140828:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:35.119131Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00156f/r3tmp/tmpgJprbh/pdisk_1.dat 2025-04-10T11:31:35.394706Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3029, node 4 2025-04-10T11:31:35.530501Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:35.530602Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:35.536901Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:31:35.566674Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:35.566699Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:35.566706Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:35.566822Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4944 TClient is connected to server localhost:4944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:36.291779Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:39.145380Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491645738666010665:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:39.145442Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491645738666010691:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:39.145488Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:39.151015Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:31:39.164966Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:31:39.165264Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491645738666010695:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:31:39.242876Z node 4 :TX_PROXY ERROR: Actor# [4:7491645738666010746:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:39.348147Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-04-10T11:31:39.650687Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7491645738666010900:2356], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:84: Error: Failed to convert type: Struct<'id':Int32,'val1':Null,'val2':Int32> to Struct<'id':Int32,'val1':Int32,'val2':Int32?>
:2:84: Error: Failed to convert 'val1': Null to Int32
:2:84: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T11:31:39.652685Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzdkMzcwNDQtZGQzNjcyMWItNTM1NmE5N2EtNDgxNWQ4MzU=, ActorId: [4:7491645738666010898:2355], ActorState: ExecuteState, TraceId: 01jrfqgjyx95ab1yeyztvkj0ss, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:31:39.709776Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:39.760399Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:40.122280Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491645721486140828:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:40.122373Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TGroupMapperTest::ReassignGroupTest3dc >> KqpQueryService::DmlNoTx [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dc >> KqpQueryService::TableSink_OlapUpsert [GOOD] >> KqpQueryService::TableSink_OltpDelete >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier [GOOD] >> KqpQueryService::ExecuteRetryQuery >> KqpQueryService::TableSink_Oltp_Replace+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] Test command err: 2025-04-10T11:23:36.071834Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491643666544294502:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:36.071992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ce7/r3tmp/tmp3MC7hN/pdisk_1.dat 2025-04-10T11:23:36.420678Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24343, node 1 2025-04-10T11:23:36.460092Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:36.460150Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:23:36.482974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:36.483124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:36.489726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:23:36.489931Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:36.489950Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:36.489966Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:36.490087Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16841 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:36.808163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:38.882757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643675134230118:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:38.882869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:39.170566Z node 1 :TX_PROXY ERROR: Actor# [1:7491643679429197443:2631] txid# 281474976710658, Access denied for badguy@builtin on path /Root, with access CreateTable 2025-04-10T11:23:39.170756Z node 1 :TX_PROXY ERROR: Actor# [1:7491643679429197443:2631] txid# 281474976710658, issues: { message: "Access denied for badguy@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-04-10T11:23:39.269950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491643679429197455:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:39.270078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:39.298891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:23:40.960552Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491643681046335989:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:40.961740Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ce7/r3tmp/tmpyHZy4P/pdisk_1.dat 2025-04-10T11:23:41.127087Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:41.163845Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:41.163944Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:41.169681Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22178, node 4 2025-04-10T11:23:41.249026Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:41.249063Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:41.249077Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:41.249203Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11034 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:23:41.436017Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:23:44.062250Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643698226206207:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:44.062369Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:44.075355Z node 4 :TX_PROXY ERROR: Actor# [4:7491643698226206228:2633] txid# 281474976710658, Access denied for badguy@builtin on path /Root, with access CreateTable 2025-04-10T11:23:44.075457Z node 4 :TX_PROXY ERROR: Actor# [4:7491643698226206228:2633] txid# 281474976710658, issues: { message: "Access denied for badguy@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-04-10T11:23:44.145587Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491643698226206240:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:44.145673Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:23:44.161048Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:23:45.883570Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491643704425571103:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:23:45.883711Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ce7/r3tmp/tmplvI5gm/pdisk_1.dat 2025-04-10T11:23:46.037496Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:23:46.076846Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:23:46.076950Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:23:46.080361Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24109, node 7 2025-04-10T11:23:46.147993Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:23:46.148015Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:23:46.148027Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:23:46.148150Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 Proc ... tabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:28.570358Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719645. Ctx: { TraceId: 01jrfqg82a9ft5qfvpqx61rmtp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:28.710794Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719646. Ctx: { TraceId: 01jrfqg86r53ed3bp1kyr384kp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:28.865461Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719647. Ctx: { TraceId: 01jrfqg8ba29t1vhmj9he6a7nd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:29.023871Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719648. Ctx: { TraceId: 01jrfqg8fra4dvjzk10spcknt9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:29.151606Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719649. Ctx: { TraceId: 01jrfqg8n36gswbzmmy5t7x8dw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:29.295462Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719650. Ctx: { TraceId: 01jrfqg8rrf5c27s8qnp1xmkbk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:29.415521Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719651. Ctx: { TraceId: 01jrfqg8xa8b9qzw7fxys1r09j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:29.536163Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719652. Ctx: { TraceId: 01jrfqg91a2bpdmw9bjdzvvr5v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:29.686258Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719653. Ctx: { TraceId: 01jrfqg95j64d86svgrf3v7m3q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:29.827124Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719654. Ctx: { TraceId: 01jrfqg99p7jd2j9gd6fgmftnp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:29.981148Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719655. Ctx: { TraceId: 01jrfqg9e87a3m9xjvqmtanmqf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:30.142941Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719656. Ctx: { TraceId: 01jrfqg9k575nxtv4k710f9y1x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:30.335590Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719657. Ctx: { TraceId: 01jrfqg9qte3f1vt1z9b5te4dm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:30.443745Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719658. Ctx: { TraceId: 01jrfqg9xx8mc86xqrtmsazssn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:30.579803Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719659. Ctx: { TraceId: 01jrfqga1bck0vv31tcz3gfdw9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:30.707103Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719660. Ctx: { TraceId: 01jrfqga5jbydjt709hsw40xpr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:30.845267Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719661. Ctx: { TraceId: 01jrfqga9k6xpp7r91f5jtkgg6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:30.978855Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719662. Ctx: { TraceId: 01jrfqgae224ngatekg6rqqsm6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:31.091999Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719663. Ctx: { TraceId: 01jrfqgahx36qx3hn5nbcya6hn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:31.251983Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719664. Ctx: { TraceId: 01jrfqgap27gp9kqxp768qjrz6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:31.367622Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719665. Ctx: { TraceId: 01jrfqgatv7c1pkht46wnhzn8s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:31.491570Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719666. Ctx: { TraceId: 01jrfqgay24ppmen4t0vnqycc5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:31.615575Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719667. Ctx: { TraceId: 01jrfqgb2ed8j6mhn8vv11zx1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:31.777837Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719668. Ctx: { TraceId: 01jrfqgb6admqkec4b8acqx510, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI4ZTQ3NDAtOWVlNmQ2ZDAtY2Y2ZjdjZTUtNWRiODNlNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:31:31.807518Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-04-10T11:31:31.808507Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:31:34.788586Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491645715765006039:2203];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ce7/r3tmp/tmp9hHyx7/pdisk_1.dat 2025-04-10T11:31:34.971162Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:31:35.156552Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:35.227115Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:35.227237Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:35.240641Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11849, node 10 2025-04-10T11:31:35.486983Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:35.487019Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:35.487034Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:35.487204Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:35.949014Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:39.803219Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491645715765006039:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:39.804060Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:39.829898Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinLeftFirst+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 10975, MsgBus: 25535 2025-04-10T11:29:55.838300Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645291796706075:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:55.843222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015cc/r3tmp/tmpT8yQEU/pdisk_1.dat 2025-04-10T11:29:56.498127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:56.498241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:56.507363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:56.542570Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10975, node 1 2025-04-10T11:29:56.863835Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:56.863858Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:56.863865Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:56.863999Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25535 TClient is connected to server localhost:25535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:57.493782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:57.520657Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:59.759101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645308976575919:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:59.759243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:59.766293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645308976575931:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:59.771874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:59.786301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645308976575933:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:59.877099Z node 1 :TX_PROXY ERROR: Actor# [1:7491645308976575984:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:00.259858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:30:00.541815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645313271543559:2358];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:00.542048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645313271543559:2358];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:00.542478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645313271543559:2358];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:00.542605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645313271543559:2358];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:00.542696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645313271543559:2358];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:00.542794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645313271543559:2358];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:00.542899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645313271543559:2358];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:00.543004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645313271543559:2358];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:00.543152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645313271543559:2358];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:00.543196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645313271543557:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:00.543278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645313271543559:2358];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:00.543279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645313271543557:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:00.543697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645313271543559:2358];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:30:00.544066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645313271543559:2358];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:30:00.547134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645313271543557:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:00.547288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645313271543557:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:00.547374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645313271543557:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:00.547464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645313271543557:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:00.547543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645313271543557:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:00.547628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645313271543557:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:00.547723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645313271543557:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:00.547873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645313271543557:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:00.547967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645313271543557:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:30:00.548065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645313271543557:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:30:00.596405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491645313271543545:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:00.596463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491645313271543545:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.511284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.520156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.520345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.533641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.535867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.544161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.550625Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.557841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.566602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.572004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.577900Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.584913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.588474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.602543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.603867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.610331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.613137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.617582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.621768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.625242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.631800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.632506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.639483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.642714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.651122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.653287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.658125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.659307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.670592Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.676369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.678787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.688255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.692821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.695524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.701740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.704277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.711316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.716168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.719777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.723809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.731035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.733491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.737414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.742432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.881868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.956369Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqf0qr53dry3z0p3nx8w5m", SessionId: ydb://session/3?node_id=1&id=ZjUzMjZkMTktOTQ2ZjU4OTAtNWE5YjQyNmQtNGMxNzM0OWM=, Slow query, duration: 35.771692s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:24.271098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:24.271789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491645532314911775:7832];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-10T11:31:24.272192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:24.272991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpQueryService::TableSink_BadTransactions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsSimple+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 8569, MsgBus: 3603 2025-04-10T11:29:51.729631Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645273545056284:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:51.730044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015d5/r3tmp/tmpvVwa4M/pdisk_1.dat 2025-04-10T11:29:52.694117Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:52.701820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:52.701901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:52.713134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8569, node 1 2025-04-10T11:29:53.014551Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:53.014577Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:53.014592Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:53.014719Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3603 TClient is connected to server localhost:3603 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:54.213499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:54.242309Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:56.718225Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645273545056284:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:56.718293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:57.109801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645299314860577:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:57.109901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645299314860588:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:57.109954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:57.114593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:57.133395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645299314860591:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:57.231325Z node 1 :TX_PROXY ERROR: Actor# [1:7491645299314860643:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:57.637943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:57.985526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645299314860879:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:57.985759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645299314860879:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:57.985993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645299314860879:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:57.986103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645299314860879:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:57.986401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645299314860879:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:57.986534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645299314860879:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:57.986651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645299314860879:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:57.986753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645299314860879:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:57.986849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645299314860879:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:57.986942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645299314860879:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:57.987032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645299314860879:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:57.987212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645299314860879:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:57.989606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645299314860860:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:57.989659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645299314860860:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:57.989818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645299314860860:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:57.989912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645299314860860:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:57.990032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645299314860860:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:57.998422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645299314860860:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:57.998539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645299314860860:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:57.998635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645299314860860:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:57.998768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645299314860860:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:57.998882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645299314860860:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:57.999013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645299314860860:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:57.999144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491645299314860860:2355];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:58.023172Z node 1 :TX_COL ... 10714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.315680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.316113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039204;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.321724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.335428Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.338505Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.345163Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039228;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.347709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.356682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039262;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.358542Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.362727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.364307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.369262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.373935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039236;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.382644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039254;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.388220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039256;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.396835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.398346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.407417Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.412778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.417574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.419339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039192;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.425682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039210;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.431545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.438127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.444996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039218;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.455081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.460770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.465826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039214;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.479328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.482509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.489272Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.501003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039252;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.506655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.512088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.519920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.521986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.532754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.536861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039248;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.547632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.551370Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.569148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039314;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.580571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.581126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.594226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:27.810502Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqf17n4w2qchtfja249vbt", SessionId: ydb://session/3?node_id=1&id=MjZmMGJlODUtNWM1MThjMzUtZGZkNDQ5YmQtNWE5NmE4YjU=, Slow query, duration: 39.116009s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:28.266437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:28.266595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:28.266861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491645535538098371:7798];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-10T11:31:28.267225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged [GOOD] >> KqpQueryService::ExecuteQueryPure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DmlNoTx [GOOD] Test command err: Trying to start YDB, gRPC: 7392, MsgBus: 23483 2025-04-10T11:31:11.841485Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645619049543577:2274];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:11.841526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001571/r3tmp/tmp80b23T/pdisk_1.dat 2025-04-10T11:31:12.698942Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:12.701717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:12.701805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:12.707101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7392, node 1 2025-04-10T11:31:12.966797Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:12.966830Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:12.966839Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:12.966965Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23483 TClient is connected to server localhost:23483 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:14.086240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.102395Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:14.136753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.338615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.560404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.665056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:16.586666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645640524381612:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.586827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.846274Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645619049543577:2274];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:16.846352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:17.094075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.174702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.250865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.295075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.330544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.367033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.430023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645644819349426:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.430110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.430650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645644819349431:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.435737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:17.451695Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:31:17.452041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645644819349433:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:17.521464Z node 1 :TX_PROXY ERROR: Actor# [1:7491645644819349487:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27432, MsgBus: 14649 2025-04-10T11:31:20.253996Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645656343784581:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:20.254069Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001571/r3tmp/tmpSAPma3/pdisk_1.dat 2025-04-10T11:31:20.529793Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:20.529887Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:20.532961Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:20.542407Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27432, node 2 2025-04-10T11:31:20.774696Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:20.774721Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:20.774730Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:20.774845Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14649 TClient is connected to server localhost:14649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:21.640698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:21.652099Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:31:21.669089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:21.755342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:21.963697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation par ... ose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.035605Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645689819181797:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:33.036063Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:33.036698Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645715588987687:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:33.036771Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:33.036934Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645715588987692:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:33.040736Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:33.054812Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645715588987694:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:33.109335Z node 3 :TX_PROXY ERROR: Actor# [3:7491645715588987748:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:34.273512Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491645719883955375:2505], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject, At function: SqlProjectItem
:1:8: Error: At function: Member
:1:8: Error: Member not found: test_ast_column 2025-04-10T11:31:34.274417Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjI5NmMwN2YtZjkwMDU0ODgtZWY1N2VjMDYtNDVkZmMwNzU=, ActorId: [3:7491645719883955373:2504], ActorState: ExecuteState, TraceId: 01jrfqgdpp2fmhec16sk8efmz6, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:31:34.400542Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491645719883955393:2510], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jrfqgdrm1g88gw98z7ydbanb. SessionId : ydb://session/3?node_id=3&id=NjgwNDI5Y2QtODU0ODA2ZTQtNWVmZTc3MGMtNTczY2M4NDg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(43):
:1:8: Failed to unwrap empty optional }. 2025-04-10T11:31:34.402254Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjgwNDI5Y2QtODU0ODA2ZTQtNWVmZTc3MGMtNTczY2M4NDg=, ActorId: [3:7491645719883955386:2510], ActorState: ExecuteState, TraceId: 01jrfqgdrm1g88gw98z7ydbanb, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 65474, MsgBus: 3419 2025-04-10T11:31:35.615477Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491645720470276951:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:35.615536Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001571/r3tmp/tmpeX5KF2/pdisk_1.dat 2025-04-10T11:31:35.895644Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:35.915232Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:35.915409Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:35.917962Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65474, node 4 2025-04-10T11:31:36.081406Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:36.081436Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:36.081444Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:36.081576Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3419 TClient is connected to server localhost:3419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:36.908346Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:36.926802Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:36.944863Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:37.044372Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:37.265688Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:37.412066Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:40.079362Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491645741945115208:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:40.079497Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:40.133439Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:40.183532Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:40.246447Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:40.291264Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:40.336687Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:40.392721Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:40.461673Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491645741945115722:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:40.461796Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:40.462017Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491645741945115727:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:40.467041Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:40.481574Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491645741945115729:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:40.559944Z node 4 :TX_PROXY ERROR: Actor# [4:7491645741945115784:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:40.627691Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491645720470276951:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:40.627866Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] >> KqpQueryService::TableSink_OlapInsert [GOOD] >> KqpQueryService::TableSink_OlapDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_Oltp_Replace+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5393, MsgBus: 10223 2025-04-10T11:31:11.818835Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645619340994507:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:11.818880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001570/r3tmp/tmpPV0jIp/pdisk_1.dat 2025-04-10T11:31:12.683657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:12.683756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:12.688777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:31:12.695311Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5393, node 1 2025-04-10T11:31:12.966809Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:12.966845Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:12.966855Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:12.967003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10223 TClient is connected to server localhost:10223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:14.207173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.240011Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:16.544404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645640815831659:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.544509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.823020Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645619340994507:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:16.823099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:17.095399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.248301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645645110799062:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.248388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.248809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645645110799067:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.253154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:31:17.276824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645645110799069:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:31:17.378556Z node 1 :TX_PROXY ERROR: Actor# [1:7491645645110799121:2403] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:18.572794Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491645649405766522:2379], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Missing key column in input: Col1 for table: /Root/DataShard, code: 2029 2025-04-10T11:31:18.574422Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjljMGFlMDYtM2RjMDY0NDYtYWEwMDY4ZTctNTNjZjM4MzA=, ActorId: [1:7491645649405766520:2378], ActorState: ExecuteState, TraceId: 01jrfqfycg36rntctfq8js1vp7, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 20245, MsgBus: 28645 2025-04-10T11:31:24.646523Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645674485394662:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:24.739364Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001570/r3tmp/tmpGiG4Kj/pdisk_1.dat 2025-04-10T11:31:24.959301Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:24.992549Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:24.992630Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:24.995695Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20245, node 2 2025-04-10T11:31:25.238814Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:25.238839Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:25.238845Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:25.238968Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28645 TClient is connected to server localhost:28645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:25.969475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:25.975139Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:31:28.714899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645691665264366:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.714984Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.748962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:28.823268Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645691665264469:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.823377Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.823634Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645691665264474:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.827819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:31:28.847437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645691665264476:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:31:28.931040Z node 2 :TX_PROXY ERROR: Actor# [2:7491645691665264527:2396] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:29.598428Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645674485394662:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:29.598499Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 21767, MsgBus: 7144 2025-04-10T11:31:35.754895Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645719986516458:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:35.754932Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001570/r3tmp/tmpIOyrs0/pdisk_1.dat 2025-04-10T11:31:36.078377Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:36.107019Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:36.107114Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:36.115496Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21767, node 3 2025-04-10T11:31:36.318641Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:36.318665Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:36.318671Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:36.318812Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7144 TClient is connected to server localhost:7144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:37.167523Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:37.182602Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:31:39.986045Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645737166386307:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:39.986260Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:40.010592Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:40.259689Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:31:40.591018Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645741461354964:2442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:40.591150Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:40.591456Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645741461354969:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:40.598445Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:31:40.609882Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645741461354971:2446], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T11:31:40.681414Z node 3 :TX_PROXY ERROR: Actor# [3:7491645741461355038:3246] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:40.758260Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645719986516458:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:40.758360Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoinOrder::CanonizedJoinOrderTPCH6 [GOOD] >> BackupRestore::ImportDataShouldHandleErrors [GOOD] >> BackupRestore::RestoreKesusResources >> TGroupMapperTest::MapperSequentialCalls [GOOD] >> TGroupMapperTest::MakeDisksForbidden [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_BadTransactions [GOOD] Test command err: Trying to start YDB, gRPC: 3514, MsgBus: 19634 2025-04-10T11:31:20.646469Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645658275857616:2289];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:20.646578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00155d/r3tmp/tmpX4Z36P/pdisk_1.dat 2025-04-10T11:31:21.390694Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:21.402457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:21.402544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:21.407787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3514, node 1 2025-04-10T11:31:21.630747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:21.630769Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:21.630776Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:21.630880Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19634 TClient is connected to server localhost:19634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:22.838807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:22.894749Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:22.916401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:23.081507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:23.306276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:23.407252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:25.625204Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645658275857616:2289];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:25.625280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:25.784289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645679750695608:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:25.784393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:26.236674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:26.273560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:26.310862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:26.353497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:26.400577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:26.470303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:26.610028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645684045663422:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:26.610116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:26.610485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645684045663427:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:26.614773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:26.630903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645684045663429:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:26.711389Z node 1 :TX_PROXY ERROR: Actor# [1:7491645684045663487:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24993, MsgBus: 63391 2025-04-10T11:31:28.782076Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645693513479738:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:28.782339Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00155d/r3tmp/tmpQEbKHF/pdisk_1.dat 2025-04-10T11:31:29.023482Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:29.041434Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:29.041511Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:29.043156Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24993, node 2 2025-04-10T11:31:29.149305Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:29.149328Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:29.149336Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:29.149465Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63391 TClient is connected to server localhost:63391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:29.755695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:29.846807Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:29.869089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:29.991600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:30.246662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting.. ... nt=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:40.692338Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:40.692370Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:40.692460Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:40.692485Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:40.741578Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:40.741677Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:40.749283Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:40.749283Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:40.754956Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:40.756011Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:40.760481Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:40.761190Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:40.766406Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:40.766493Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:40.773538Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:31:40.872827Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645743865711479:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:40.872917Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:40.873131Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645743865711484:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:40.877109Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T11:31:40.886810Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645743865711486:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T11:31:40.984786Z node 3 :TX_PROXY ERROR: Actor# [3:7491645743865711537:2686] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:41.099305Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645726685840970:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:41.100487Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:41.152138Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976710663;tx_id=281474976710663;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710663; 2025-04-10T11:31:41.152208Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976710663;tx_id=281474976710663;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710663; 2025-04-10T11:31:41.152698Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7491645743865710995:2344];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037894;local_tx_no=14;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037897;receive=72075186224037893; 2025-04-10T11:31:41.152758Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7491645743865710995:2344];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037894;local_tx_no=15;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037897;receive=72075186224037889; 2025-04-10T11:31:41.152812Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7491645743865710995:2344];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037894;local_tx_no=16;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037897;receive=72075186224037893; 2025-04-10T11:31:41.152899Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7491645743865710995:2344];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037894;local_tx_no=17;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037897;receive=72075186224037889; 2025-04-10T11:31:41.153247Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976710663;tx_id=281474976710663;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710663; 2025-04-10T11:31:41.154528Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976710663;tx_id=281474976710663;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710663; 2025-04-10T11:31:41.545419Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTA1MGY1MGQtOWIxODBmMTEtNDIwN2M3NDgtMjc1Y2QyM2Q=, ActorId: [3:7491645748160678965:2490], ActorState: ExecuteState, TraceId: 01jrfqgmk7b7fz5cakbswr9v3h, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-04-10T11:31:41.665397Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2U3ZmI3OWMtMWYwMWI0MzgtZjA2YmJjNDQtYzYwOThhYjE=, ActorId: [3:7491645748160678987:2497], ActorState: ExecuteState, TraceId: 01jrfqgmvs57sy9fmgbk8rafn8, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-04-10T11:31:41.851196Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDMwYjY0Y2MtOTgyY2EyZTAtZjQ0YjczMGYtOWYwMjc3NWU=, ActorId: [3:7491645748160679006:2504], ActorState: ExecuteState, TraceId: 01jrfqgmzhf0y2zcv01naz0fhw, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-04-10T11:31:42.174647Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODc3ZjlmNTMtODk3ODI1MDctNjhjZWU5ZmEtZGQxYzU2MmI=, ActorId: [3:7491645748160679027:2513], ActorState: ExecuteState, TraceId: 01jrfqgn5cc83850p4w8q77kt1, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-04-10T11:31:42.389616Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZjY1MmI2YWUtOTdkYzdmOTMtMWUwNTE2NTMtMzgyMWUxYjE=, ActorId: [3:7491645752455646347:2523], ActorState: ExecuteState, TraceId: 01jrfqgnfvdt2rxbnv7a3sfde2, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-04-10T11:31:42.579333Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976710671;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:31:42.580197Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7491645743865710995:2344];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037894;local_tx_no=31;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893,72075186224037897;receive=72075186224037889; 2025-04-10T11:31:42.580271Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7491645743865710995:2344];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037894;local_tx_no=32;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893,72075186224037897;receive=72075186224037889; 2025-04-10T11:31:42.580740Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976710671;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:31:42.581276Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976710671;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T11:31:42.582181Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7491645743865710995:2344];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037894;local_tx_no=34;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037897;receive=72075186224037893; 2025-04-10T11:31:42.582293Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7491645743865710995:2344];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037894;local_tx_no=35;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037897;receive=72075186224037893; 2025-04-10T11:31:42.582858Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976710671;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 18271, MsgBus: 7134 2025-04-10T11:31:11.830673Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645619927170749:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:11.831047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001566/r3tmp/tmptXdXe6/pdisk_1.dat 2025-04-10T11:31:12.694872Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:12.704121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:12.704237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:12.718130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18271, node 1 2025-04-10T11:31:12.958911Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:12.958930Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:12.958944Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:12.959058Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7134 TClient is connected to server localhost:7134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:14.215315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.237851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.424602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.728538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.858706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:16.380304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645641402008866:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.380488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.825215Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645619927170749:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:16.825290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:17.093830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.191987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.251045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.317025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.371839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.440543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.541408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645645696976686:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.541478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.541850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645645696976691:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.545374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:17.560208Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:31:17.560462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645645696976693:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:17.650316Z node 1 :TX_PROXY ERROR: Actor# [1:7491645645696976747:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:18.800894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:18.802834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:31:18.804854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:31:20.026101Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 851af339-8971b57a-456163a7-ebf42c04, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-10T11:31:20.055716Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 851af339-8971b57a-456163a7-ebf42c04, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-10T11:31:20.098037Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 851af339-8971b57a-456163a7-ebf42c04, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-10T11:31:20.144549Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 851af339-8971b57a-456163a7-ebf42c04, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-10T11:31:20.167887Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 851af339-8971b57a-456163a7-ebf42c04, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-10T11:31:20.195927Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 851af339-8971b57a-456163a7-ebf42c04, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-10T11:31:20.239126Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 851af339-8971b57a-456163a7-ebf42c04, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-10T11:31:20.285802Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 851af339-8971b57a-456163a7-ebf42c04, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-10T11:31:20.334604Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 851af339-8971b57a-456163a7-ebf42c04, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-10T11:31:20.375021Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 851af339-8971b57a-456163a7-ebf42c04, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-10T11:31:20.408712Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 851af339-8971b57a-456163a7-ebf42c04, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-10T11:31:20.443516Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 851af339-8971b57a-456163a7-ebf42c04, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-10T11:31:20.467952Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 851af339-8971b57a-456163a7-ebf42c04, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-10T11:31:20.516409Z node 1 :K ... found or you don't have access permissions } 2025-04-10T11:31:28.957042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.957278Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645693822285249:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.961425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:28.979303Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:31:28.979568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645693822285251:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:29.060936Z node 2 :TX_PROXY ERROR: Actor# [2:7491645698117252601:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:30.160464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.167327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.169216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.202811Z node 2 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: 691870c5-df372c1-884fd080-b98afff6, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=Njg1YzAwMjEtMTY2OGZjYzAtMzRhZjgwNGItY2FkNzMyN2I=, TxId: 2025-04-10T11:31:33.231133Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 691870c5-df372c1-884fd080-b98afff6, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=NTg5Zjk4OWItYTY1ZjVkOGYtOWYxYTg4MjMtZGYzMDJlMTE=, TxId: 2025-04-10T11:31:33.245974Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 691870c5-df372c1-884fd080-b98afff6, reply NOT_FOUND, issues: {
: Error: No such execution } Trying to start YDB, gRPC: 19572, MsgBus: 11702 2025-04-10T11:31:34.922256Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645718705376810:2258];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:34.934384Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001566/r3tmp/tmpbZ60Gi/pdisk_1.dat 2025-04-10T11:31:35.092421Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:35.092518Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:35.093786Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:31:35.100838Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19572, node 3 2025-04-10T11:31:35.214884Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:35.214919Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:35.214928Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:35.215089Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11702 TClient is connected to server localhost:11702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:35.861242Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:35.896354Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:35.926123Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:36.010387Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:36.268915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:36.379153Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:39.103733Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645740180214847:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:39.103846Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:39.168853Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:39.212558Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:39.291654Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:39.335182Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:39.380454Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:39.434996Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:39.492659Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645740180215357:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:39.492757Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:39.492967Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645740180215362:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:39.497047Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:39.518393Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645740180215364:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:31:39.602673Z node 3 :TX_PROXY ERROR: Actor# [3:7491645740180215420:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:39.902258Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645718705376810:2258];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:39.902339Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:40.574581Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:31:40.577349Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:40.579124Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksForbidden [GOOD] >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH11 [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds+ColumnStore [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MapperSequentialCalls [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> BackupRestore::RestoreViewDependentOnAnotherView [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] Test command err: Trying to start YDB, gRPC: 64246, MsgBus: 63612 2025-04-10T11:31:11.839385Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645619959833619:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:11.840015Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001577/r3tmp/tmpN5PUGP/pdisk_1.dat 2025-04-10T11:31:12.656291Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:12.705202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:12.705295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:12.715470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64246, node 1 2025-04-10T11:31:12.982729Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:12.982757Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:12.982770Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:12.982902Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63612 TClient is connected to server localhost:63612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:14.387387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.433177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.632999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.857729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:31:14.939058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:31:16.831149Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645619959833619:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:16.831303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:16.875101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645641434671725:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.875221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.520950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.566265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.641629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.679439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.715908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.798034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.931846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645645729639551:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.931927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.932290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645645729639556:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.936684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:17.956920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645645729639558:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:18.047277Z node 1 :TX_PROXY ERROR: Actor# [1:7491645650024606910:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:19.158347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:19.159966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:31:19.161299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:31:22.537290Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-10T11:31:22.537315Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491645667204477564:2788] TxId: 281474976710702. Ctx: { TraceId: 01jrfqg1t4dmzk98c4a1tkfa51, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTcyM2RkZTgtYmUyYWFjMDItZmU3ZGFkNTctMTgxOTFiNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-10T11:31:22.537607Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTcyM2RkZTgtYmUyYWFjMDItZmU3ZGFkNTctMTgxOTFiNDg=, ActorId: [1:7491645667204477530:2788], ActorState: ExecuteState, TraceId: 01jrfqg1t4dmzk98c4a1tkfa51, Create QueryResponse for error on request, msg: 2025-04-10T11:31:22.538161Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491645667204477569:2795], TxId: 281474976710702, task: 2. Ctx: { TraceId : 01jrfqg1t4dmzk98c4a1tkfa51. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTcyM2RkZTgtYmUyYWFjMDItZmU3ZGFkNTctMTgxOTFiNDg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7491645667204477564:2788], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T11:31:22.538240Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284682560, txId: 281474976710701] shutting down 2025-04-10T11:31:22.933217Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-10T11:31:22.933424Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491645667204477643:2806] TxId: 281474976710706. Ctx: { TraceId: 01jrfqg2a8es5c0y06d12d5yyj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM0M2Q4OWUtYmNkM2QyNDEtNzNhMDU1YjQtOWFiODgxYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-10T11:31:22.933559Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTM0M2Q4OWUtYmNkM2QyNDEtNzNhMDU1YjQtOWFiODgxYzQ=, ActorId: [1:7491645667204477610:2806], ActorState: ExecuteState, TraceId: 01jrfqg2a8es5c0y06d12d5yyj, Create QueryResponse for error on request, msg: 2025-04-10T11:31:22.933975Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284682966, txId: 281474976710705] shutting down 2025-04-10T11:31:23.197271Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284683211, txId: 281474976710709] shutting down Trying to start YDB, gRPC: 30256, MsgBus: 15728 2025-04-10T11:31:24.302474Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645673379288174:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001577/r3tmp/tmpeErdB7/pdisk_1.dat 2025-04-10T11:31:24.388509Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:31:24.525323Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:24.525417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) Vo ... 480 2025-04-10T11:31:29.198567Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645694854126752:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:29.198696Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:29.198956Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645694854126757:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:29.203414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:29.215526Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-10T11:31:29.218408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645694854126759:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:29.278276Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645673379288174:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:29.278372Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:29.320752Z node 2 :TX_PROXY ERROR: Actor# [2:7491645694854126814:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:30.388368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.390981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.397297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.872087Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284693907, txId: 281474976710709] shutting down Trying to start YDB, gRPC: 10937, MsgBus: 12732 2025-04-10T11:31:34.995135Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645719906418711:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:34.995202Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001577/r3tmp/tmph0hCoC/pdisk_1.dat 2025-04-10T11:31:35.252299Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:35.274561Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:35.274658Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:35.278635Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10937, node 3 2025-04-10T11:31:35.370842Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:35.370873Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:35.370881Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:35.371026Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12732 TClient is connected to server localhost:12732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:36.027662Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:36.036864Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:36.041647Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:36.131363Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:36.350960Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:36.433149Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:39.338317Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645741381256963:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:39.338432Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:39.387330Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:39.434990Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:39.502691Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:39.617450Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:39.676655Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:39.752785Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:39.859646Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645741381257492:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:39.859757Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:39.860310Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645741381257497:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:39.864853Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:39.879860Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645741381257499:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:39.940566Z node 3 :TX_PROXY ERROR: Actor# [3:7491645741381257554:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:39.998283Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645719906418711:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:39.998368Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:41.062438Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:41.064132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:31:41.072203Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpQueryServiceScripts::ExecuteMultiScript [GOOD] >> KqpQueryServiceScripts::ExecuteScriptPg >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 >> TColumnShardTestReadWrite::WriteReadExoticTypes >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] >> KqpJoinOrder::TPCDS95+ColumnStore [GOOD] >> TColumnShardTestReadWrite::CompactionGCFailingBs >> Backup::ProposeBackup >> TColumnShardTestReadWrite::WriteOverload+InStore-WithWritePortionsOnInsert >> TColumnShardTestReadWrite::WriteReadZSTD >> Normalizers::SchemaVersionsNormalizer >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalTable >> TColumnShardTestReadWrite::WriteReadDuplicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH6 [GOOD] Test command err: Trying to start YDB, gRPC: 14207, MsgBus: 29726 2025-04-10T11:30:00.366333Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645315573904687:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:00.367485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015bd/r3tmp/tmp6x2h11/pdisk_1.dat 2025-04-10T11:30:01.180581Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:01.198827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:01.198914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:01.201598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14207, node 1 2025-04-10T11:30:01.509706Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:01.509729Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:01.509737Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:01.509840Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29726 TClient is connected to server localhost:29726 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:02.508419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:02.538772Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:05.029317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645337048741689:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:05.029442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:05.030283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645337048741701:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:05.038831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:30:05.054667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645337048741703:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:30:05.117496Z node 1 :TX_PROXY ERROR: Actor# [1:7491645337048741754:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:05.354290Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645315573904687:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:05.372888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:05.492163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:30:05.804132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645337048742042:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:05.804318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645337048742042:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:05.804607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645337048742042:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:05.804741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645337048742042:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:05.804853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645337048742042:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:05.804961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645337048742042:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:05.805057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645337048742042:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:05.805194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645337048742042:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:05.805314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645337048742042:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:05.805438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645337048742042:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:05.805557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645337048742042:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:30:05.805655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645337048742042:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:30:05.809028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645337048742022:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:05.809126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645337048742022:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:05.809354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645337048742022:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:05.809479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645337048742022:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:05.809579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645337048742022:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:05.809671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645337048742022:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:05.809772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645337048742022:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:05.809867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645337048742022:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:05.809966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645337048742022:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:05.810064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645337048742022:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:05.813574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645337048742022:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:30:05.818706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491645337048742022:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:30:05.889411Z node 1 :T ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.427842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.434744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.435971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.443702Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.444975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.450387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.454517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.457428Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.461496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.463591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.473256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.473437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.480890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.481042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.488335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.488628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.495694Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.499171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.504193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.506516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.510770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.512636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.517001Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.518655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.523187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.524973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.528879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.530985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.537201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.541869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.542437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.548586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.555367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.564413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.567624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.574525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.582005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.589003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.591867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.599423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.603179Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.605386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.611169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.616964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.619860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.844339Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqf7r47syk3aqcnt4332b4", SessionId: ydb://session/3?node_id=1&id=YTFkZWQxYjEtNGRiODc5YzktZGQyN2RlZjQtMmU3YWE1ZGU=, Slow query, duration: 37.477454s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:33.157703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:33.158256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:33.158670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7491645637696505056:10649];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-04-10T11:31:33.159080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> TColumnShardTestReadWrite::ReadWithProgram |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 5710, MsgBus: 12276 2025-04-10T11:30:02.453315Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645322885014268:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:02.453342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015bb/r3tmp/tmp3mB0UM/pdisk_1.dat 2025-04-10T11:30:03.425496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:03.425576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 5710, node 1 2025-04-10T11:30:03.444928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:30:03.679189Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:03.679740Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:03.679748Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:03.679761Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:03.679847Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:30:03.686542Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:30:03.686569Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:30:03.710554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:12276 TClient is connected to server localhost:12276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:04.621568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:04.658329Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:07.152616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645344359851428:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:07.152732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:07.152994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645344359851440:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:07.156882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:30:07.181697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645344359851442:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:30:07.273472Z node 1 :TX_PROXY ERROR: Actor# [1:7491645344359851493:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:07.462252Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645322885014268:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:07.462314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:07.820074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:30:08.159377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645348654819048:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:08.159579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645348654819048:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:08.159824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645348654819048:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:08.159959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645348654819048:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:08.160118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645348654819048:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:08.160255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645348654819048:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:08.160357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645348654819048:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:08.160500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645348654819048:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:08.160681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645348654819048:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:08.160913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645348654819048:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:08.161038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645348654819048:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:30:08.161157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645348654819048:2361];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:30:08.162007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491645344359851723:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:08.162074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491645344359851723:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:08.162254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491645344359851723:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:08.162348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491645344359851723:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:08.162437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491645344359851723:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:08.162533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491645344359851723:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:08.162617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491645344359851723:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:08.162703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491645344359851723:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:08.162795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491645344359851723:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:08.162908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491645344359851723:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:08.163007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7491645344359851723:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fl ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.220635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.226091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.228955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.234121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.236131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.242964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.249819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.257084Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.272168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.285166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.300940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.309207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.324861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.334424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.337292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.346944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.347655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.354724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.363930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.369726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.376346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.378094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.384320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.392419Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.393403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.400764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.405126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.409005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.413079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.417611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.420473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.426905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.430559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.434663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.441337Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.443434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.447715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.449751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.454964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.457391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.461417Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.463571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.468023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.470312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.476000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:30.565923Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqf5jpbshez2rgwy0ab4bn", SessionId: ydb://session/3?node_id=1&id=OWNmN2UzZjktODFjZjAwNmEtOTg4ODdjZmYtYjk4ZjMyMTM=, Slow query, duration: 37.422945s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:30.890994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:30.891499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:30.898603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491645550518317501:7653];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-10T11:31:30.899145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH11 [GOOD] Test command err: Trying to start YDB, gRPC: 13395, MsgBus: 12748 2025-04-10T11:29:47.646352Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645256241225074:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:47.651431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015de/r3tmp/tmpvzSELB/pdisk_1.dat 2025-04-10T11:29:48.348955Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:48.380097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:48.380186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:48.382643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13395, node 1 2025-04-10T11:29:48.581315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:48.581337Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:48.581343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:48.581456Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12748 TClient is connected to server localhost:12748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:49.359626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:49.378869Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:51.500392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645273421094792:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:51.500549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:51.501043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645273421094804:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:51.505717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:51.528256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645273421094806:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:51.606671Z node 1 :TX_PROXY ERROR: Actor# [1:7491645273421094857:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:52.004875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:52.312539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645277716062409:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:52.312539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645277716062406:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:52.312774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645277716062406:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:52.313093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645277716062406:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:52.313190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645277716062409:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:52.313228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645277716062406:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:52.313334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645277716062406:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:52.313361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645277716062409:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:52.313483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645277716062406:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:52.313486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645277716062409:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:52.313592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645277716062409:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:52.313607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645277716062406:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:52.313696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645277716062409:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:52.313733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645277716062406:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:52.313803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645277716062409:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:52.313858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645277716062406:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:52.313901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645277716062409:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:52.313974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645277716062406:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:52.314011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645277716062409:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:52.314095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645277716062406:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:52.314117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645277716062409:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:52.314220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645277716062406:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:52.314240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645277716062409:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:52.314333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645277716062409:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:52.352780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645277716062471:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:52.352852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645277716062471:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abs ... ller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.131235Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.135621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.137048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.141660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.143159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.148797Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.148983Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.154699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.154959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039244;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.160718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.160718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.166485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.167667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039248;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.173092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.174835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.185661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039314;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.190641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.195252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.198567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.204116Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.206576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.215420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.217984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.223391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.229904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.237597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.241714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.244686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.247758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.252611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.254578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.258549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.260097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.265847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.267391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.271588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.273426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.279406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.280474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.286126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.287228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.290868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.293218Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.295498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.299104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.502630Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqew243fsbz1war204wm80", SessionId: ydb://session/3?node_id=1&id=ZTc0ZWM2MzQtOTBmMmJhNjQtZjkzNGJlZDMtOTczNDI0ZjM=, Slow query, duration: 41.103734s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:24.835470Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491645629903438294:11428];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-10T11:31:24.835515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:24.835819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:24.836694Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle [GOOD] >> KqpQueryService::ReadDatashardAndColumnshard >> Backup::ProposeBackup [GOOD] >> EvWrite::AbortInTransaction >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] >> Normalizers::ColumnChunkNormalizer >> KqpQueryService::ExecuteQueryPure [GOOD] >> KqpQueryService::ExecuteQueryScalar >> KqpQueryService::ExecuteRetryQuery [GOOD] >> TColumnShardTestReadWrite::ReadSomePrograms ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] Test command err: 2025-04-10T11:31:48.848842Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:48.957634Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:48.979454Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:48.979738Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:48.987541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:48.987762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:48.988004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:48.988110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:48.988207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:48.988321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:48.988443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:48.988567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:48.988676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:48.988772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.988885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:48.988993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:49.019666Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:49.020145Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:49.020215Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:49.020534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:49.020723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:49.020820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:49.020873Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:49.021013Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:49.021115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:49.021169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:49.021209Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:49.021428Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:49.021510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:49.021556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:49.021594Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:49.021706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:49.021809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:49.021873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:49.021907Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:49.021989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:49.022033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:49.022071Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:49.022132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:49.022200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:49.022236Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:49.022723Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=78; 2025-04-10T11:31:49.022815Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-10T11:31:49.022921Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=50; 2025-04-10T11:31:49.023042Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-10T11:31:49.023249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:49.023333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:49.023373Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:49.023614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:49.023671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:49.023705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:49.023882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:49.023933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:49.023990Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:49.024217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:49.024269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:49.024305Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:49.024444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:49.024489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:49.024544Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... or.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T11:31:50.006412Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-10T11:31:50.006454Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-10T11:31:50.006489Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:31:50.006558Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T11:31:50.006591Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-04-10T11:31:50.006634Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-04-10T11:31:50.006710Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=0;interval_id=2;memory=8401426;count=1; 2025-04-10T11:31:50.007175Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=0; 2025-04-10T11:31:50.014738Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=source.cpp:50;event=source_ready;intervals_count=1;source_idx=0; 2025-04-10T11:31:50.014974Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-10T11:31:50.015074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-10T11:31:50.015125Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:31:50.015376Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T11:31:50.015421Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-10T11:31:50.015464Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-10T11:31:50.015527Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=0;merger=0;interval_id=2; 2025-04-10T11:31:50.015579Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-10T11:31:50.015715Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-10T11:31:50.015896Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-10T11:31:50.016118Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:31:50.016248Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-10T11:31:50.016369Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-10T11:31:50.016429Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:284:2302] finished for tablet 9437184 2025-04-10T11:31:50.017035Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:283:2301];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.02},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.021}],"full":{"a":1744284709995398,"name":"_full_task","f":1744284709995398,"d_finished":0,"c":0,"l":1744284710016538,"d":21140},"events":[{"name":"bootstrap","f":1744284709995677,"d_finished":4231,"c":1,"l":1744284709999908,"d":4231},{"a":1744284710016092,"name":"ack","f":1744284710016092,"d_finished":0,"c":0,"l":1744284710016538,"d":446},{"a":1744284710016078,"name":"processing","f":1744284710000006,"d_finished":11566,"c":9,"l":1744284710015979,"d":12026},{"name":"ProduceResults","f":1744284709997155,"d_finished":2702,"c":11,"l":1744284710016408,"d":2702},{"a":1744284710016412,"name":"Finish","f":1744284710016412,"d_finished":0,"c":0,"l":1744284710016538,"d":126},{"name":"task_result","f":1744284710000029,"d_finished":11388,"c":9,"l":1744284710015976,"d":11388}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-10T11:31:50.017183Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:283:2301];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:31:50.017706Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:283:2301];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.02},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.021}],"full":{"a":1744284709995398,"name":"_full_task","f":1744284709995398,"d_finished":0,"c":0,"l":1744284710017245,"d":21847},"events":[{"name":"bootstrap","f":1744284709995677,"d_finished":4231,"c":1,"l":1744284709999908,"d":4231},{"a":1744284710016092,"name":"ack","f":1744284710016092,"d_finished":0,"c":0,"l":1744284710017245,"d":1153},{"a":1744284710016078,"name":"processing","f":1744284710000006,"d_finished":11566,"c":9,"l":1744284710015979,"d":12733},{"name":"ProduceResults","f":1744284709997155,"d_finished":2702,"c":11,"l":1744284710016408,"d":2702},{"a":1744284710016412,"name":"Finish","f":1744284710016412,"d_finished":0,"c":0,"l":1744284710017245,"d":833},{"name":"task_result","f":1744284710000029,"d_finished":11388,"c":9,"l":1744284710015976,"d":11388}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-10T11:31:50.017826Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:31:49.994648Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-10T11:31:50.017884Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:31:50.018424Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot >> BackupRestore::RestoreKesusResources [GOOD] >> BackupRestore::RestoreReplicationWithoutSecret ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS95+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 4915, MsgBus: 9373 2025-04-10T11:29:38.171035Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645219087337691:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:38.171161Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015ef/r3tmp/tmpkDpzaj/pdisk_1.dat 2025-04-10T11:29:38.909210Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:38.915146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:38.915237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:38.922917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4915, node 1 2025-04-10T11:29:39.082642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:39.082662Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:39.082667Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:39.082777Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9373 TClient is connected to server localhost:9373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:40.022997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:40.032396Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:42.307778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645236267207405:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:42.307868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:42.308184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645236267207417:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:42.311057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:42.320430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645236267207419:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:42.379768Z node 1 :TX_PROXY ERROR: Actor# [1:7491645236267207470:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:42.662978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:42.751336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645236267207600:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:42.751573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645236267207600:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:42.751819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645236267207600:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:42.752021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645236267207600:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:42.752168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645236267207600:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:42.752298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645236267207600:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:42.755001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645236267207600:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:42.755127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645236267207600:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:42.755247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645236267207600:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:42.755361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645236267207600:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:42.755458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645236267207600:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:42.755547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645236267207600:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:42.786247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:29:42.786309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:29:42.786397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:29:42.786415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:29:42.786569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:29:42.786586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:29:42.786668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:29:42.786710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:29:42.786762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:29:42.786775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:29:42.786801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:29:42.786817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:29:42.787308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:29:42.787349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:29:42.787477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:29:42.787494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:29:42.787622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=a ... TxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.261163Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.268137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.268146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.275582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.275600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.282734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.286932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.293585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.297606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.303497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.309388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.314357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.316606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.321261Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.323737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.328881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.331136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.337957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.341177Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.347502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.347551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.353847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.358627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.366009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.366024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.372823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.373150Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.380234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.385316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.386533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.391533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.392223Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.397653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.397749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.404315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.405225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.411353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.418784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.467442Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:09.601142Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqeefzan5rqh6cdsda0wgm", SessionId: ydb://session/3?node_id=1&id=ZGY4OTdkMjUtODk2YzY3MjktZGNkOTdkNDItZWM1OWFiNDY=, Slow query, duration: 40.097532s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:10.119633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:10.119639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:10.120431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:39.468054Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqg4b6apk3rfhswczv9rxf", SessionId: ydb://session/3?node_id=1&id=ZGY4OTdkMjUtODk2YzY3MjktZGNkOTdkNDItZWM1OWFiNDY=, Slow query, duration: 14.821136s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n$ws_wh =\n(select ws1.ws_order_number ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2\n from web_sales ws1 cross join web_sales ws2\n where ws1.ws_order_number = ws2.ws_order_number\n and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk);\n-- start query 1 in stream 0 using template query95.tpl and seed 2031708268\n select\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\nwhere\n cast(d_date as date) between cast('2002-4-01' as date) and\n (cast('2002-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'AL'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\nand ws1.ws_order_number in (select ws_order_number\n from $ws_wh)\nand ws1.ws_order_number in (select wr_order_number\n from web_returns cross join $ws_wh ws_wh\n where wr_order_number = ws_wh.ws_order_number)\norder by `order count`\nlimit 100;\n", parameters: 0b >> KqpQueryService::TableSink_Htap-withOltpSink [GOOD] >> KqpQueryService::TableSink_DisableSink >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 |94.9%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> EvWrite::AbortInTransaction [GOOD] >> TColumnShardTestReadWrite::WriteReadNoCompression >> KqpJoinOrder::CanonizedJoinOrderTPCH1 [GOOD] >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::AbortInTransaction [GOOD] Test command err: 2025-04-10T11:31:48.352778Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:48.524944Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:48.552082Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:48.552436Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:48.564137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:48.564361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:48.564617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:48.564747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:48.564879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:48.565047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:48.565277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:48.565415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:48.565543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:48.565650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.565787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:48.565916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:48.607378Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:48.608422Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:48.608487Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:48.608674Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.608876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:48.608968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:48.609017Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:48.609156Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:48.609226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:48.609278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:48.609311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:48.609508Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.609588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:48.609633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:48.609664Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:48.609779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:48.609894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:48.609949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:48.609980Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:48.610044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:48.610084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:48.610117Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:48.610210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:48.610263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:48.610294Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:48.610706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=35; 2025-04-10T11:31:48.610785Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-10T11:31:48.611003Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=165; 2025-04-10T11:31:48.611100Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-10T11:31:48.611278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:48.611350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:48.611393Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:48.611589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:48.611641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.611671Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.611850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:48.611893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:48.611945Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:48.612129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:48.612169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:48.612201Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:48.612343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:48.612386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:48.612491Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-10T11:31:51.101758Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:245;method=RegisterTable;path_id=1; 2025-04-10T11:31:51.101822Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-04-10T11:31:51.102462Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-04-10T11:31:51.102650Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:212;event=finished_tx;tx_id=10; 2025-04-10T11:31:51.129379Z node 2 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-10T11:31:51.129568Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=229592;columns=2; 2025-04-10T11:31:51.151891Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[2:108:2140];fline=actor.cpp:22;event=flush_writing;size=229592;count=1; 2025-04-10T11:31:51.155688Z node 2 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-04-10T11:31:51.157739Z node 2 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-10T11:31:51.172533Z node 2 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-10T11:31:51.172682Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:51.173186Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=222;problem=finished; 2025-04-10T11:31:51.173269Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=222;problem=finished; 2025-04-10T11:31:51.173495Z node 2 :TX_COLUMNSHARD DEBUG: PlanStep 10 at tablet 9437184, mediator 0 2025-04-10T11:31:51.173559Z node 2 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] execute at tablet 9437184 2025-04-10T11:31:51.173614Z node 2 :TX_COLUMNSHARD ERROR: TxPlanStep[5] Ignore old txIds [112] for step 10 last planned step 10 at tablet 9437184 2025-04-10T11:31:51.173673Z node 2 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] complete at tablet 9437184 2025-04-10T11:31:51.174027Z node 2 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {10:max} readable: {10:max} at tablet 9437184 2025-04-10T11:31:51.174143Z node 2 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:31:51.181366Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-04-10T11:31:51.181493Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-04-10T11:31:51.182436Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"5":{"p":{"p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"1,2","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"2","p":{"address":{"name":"field","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,2","t":"Projection"},"w":18,"id":0}}}; 2025-04-10T11:31:51.182603Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:31:51.183302Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[2:169:2187];trace_detailed=; 2025-04-10T11:31:51.183932Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=1,2;column_names=field,key;);; 2025-04-10T11:31:51.184124Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; 2025-04-10T11:31:51.184437Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:31:51.184566Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:31:51.184690Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:31:51.184792Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: Scan [2:169:2187] finished for tablet 9437184 2025-04-10T11:31:51.185250Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[2:168:2186];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1744284711183208,"name":"_full_task","f":1744284711183208,"d_finished":0,"c":0,"l":1744284711184859,"d":1651},"events":[{"name":"bootstrap","f":1744284711183424,"d_finished":861,"c":1,"l":1744284711184285,"d":861},{"a":1744284711184408,"name":"ack","f":1744284711184408,"d_finished":0,"c":0,"l":1744284711184859,"d":451},{"a":1744284711184382,"name":"processing","f":1744284711184382,"d_finished":0,"c":0,"l":1744284711184859,"d":477},{"name":"ProduceResults","f":1744284711184265,"d_finished":304,"c":2,"l":1744284711184758,"d":304},{"a":1744284711184765,"name":"Finish","f":1744284711184765,"d_finished":0,"c":0,"l":1744284711184859,"d":94}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:31:51.185341Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[2:168:2186];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:31:51.185713Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[2:168:2186];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1744284711183208,"name":"_full_task","f":1744284711183208,"d_finished":0,"c":0,"l":1744284711185389,"d":2181},"events":[{"name":"bootstrap","f":1744284711183424,"d_finished":861,"c":1,"l":1744284711184285,"d":861},{"a":1744284711184408,"name":"ack","f":1744284711184408,"d_finished":0,"c":0,"l":1744284711185389,"d":981},{"a":1744284711184382,"name":"processing","f":1744284711184382,"d_finished":0,"c":0,"l":1744284711185389,"d":1007},{"name":"ProduceResults","f":1744284711184265,"d_finished":304,"c":2,"l":1744284711184758,"d":304},{"a":1744284711184765,"name":"Finish","f":1744284711184765,"d_finished":0,"c":0,"l":1744284711185389,"d":624}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:31:51.185869Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:31:51.182555Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:31:51.185925Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:31:51.186031Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:169:2187];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=100; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; >> KqpJoinOrder::CanonizedJoinOrderTPCH8 [GOOD] >> Normalizers::SchemaVersionsNormalizer [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString >> OlapEstimationRowsCorrectness::TPCDS96 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] Test command err: 2025-04-10T11:31:51.113658Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:51.219391Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:51.243939Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:51.244262Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:51.252826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:51.253036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:51.253296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:51.253422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:51.253530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:51.253640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:51.253753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:51.253876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:51.253983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:51.254078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:51.254211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:51.254316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:51.288138Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:51.288391Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:51.288457Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:51.288709Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:51.288893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:51.288983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:51.289029Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:51.289142Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:51.289214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:51.289261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:51.289315Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:51.289524Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:51.289604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:51.289646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:51.289679Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:51.289766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:51.289833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:51.289893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:51.289923Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:51.290014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:51.290055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:51.290088Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:51.290168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:51.290214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:51.290250Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:51.290691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=58; 2025-04-10T11:31:51.290783Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-10T11:31:51.290862Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-04-10T11:31:51.290968Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-10T11:31:51.291148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:51.291208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:51.291243Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:51.291468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:51.291520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:51.291552Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:51.291716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:51.291764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:51.291797Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:51.291986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:51.292028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:51.292079Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:51.292208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:51.292253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:51.292318Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... RegisterTable;path_id=1; 2025-04-10T11:31:51.901486Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-04-10T11:31:51.908773Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-04-10T11:31:51.909015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:212;event=finished_tx;tx_id=10; 2025-04-10T11:31:51.941302Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-10T11:31:51.941518Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3200;columns=5; 2025-04-10T11:31:51.959973Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=3200;count=1; 2025-04-10T11:31:51.965447Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-04-10T11:31:51.965866Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-10T11:31:51.978741Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-10T11:31:51.978917Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:51.993743Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 100 at tablet 9437184, mediator 0 2025-04-10T11:31:51.993842Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] execute at tablet 9437184 2025-04-10T11:31:51.994240Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=abstract.h:83;progress_tx_id=100;lock_id=1;broken=0; 2025-04-10T11:31:51.994507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=tx_controller.cpp:212;event=finished_tx;tx_id=100; 2025-04-10T11:31:52.009328Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] complete at tablet 9437184 2025-04-10T11:31:52.009462Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=100;lock_id=1;broken=0; 2025-04-10T11:31:52.009621Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=3384; 2025-04-10T11:31:52.014500Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::65f83f56-15ff11f0-86f0a40f-d35590d3; 2025-04-10T11:31:52.014613Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-04-10T11:31:52.014746Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:754;event=indexation;bytes=3384;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=65f83f56-15ff11f0-86f0a40f-d35590d3; 2025-04-10T11:31:52.015075Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:620;event=start_changes;type=CS::INDEXATION;task_id=65f83f56-15ff11f0-86f0a40f-d35590d3; 2025-04-10T11:31:52.015559Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=3035;external_task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;type=CS::INDEXATION;priority=0;; 2025-04-10T11:31:52.015806Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=1;task=cpu=0;mem=3035;external_task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;type=CS::INDEXATION;priority=0;; 2025-04-10T11:31:52.015867Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;mem=3035;cpu=0; 2025-04-10T11:31:52.016105Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;task_id=1;mem=3035;cpu=0; 2025-04-10T11:31:52.016257Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=65f83f56-15ff11f0-86f0a40f-d35590d3; 2025-04-10T11:31:52.023012Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-10T11:31:52.023211Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-04-10T11:31:52.026088Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-10T11:31:52.026557Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[8] (CS::INDEXATION) apply at tablet 9437184 2025-04-10T11:31:52.027434Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-10T11:31:52.027552Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:31:52.028099Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:239;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:31:52.028267Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:31:52.028360Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=3384;indexing_debug={task_ids=65f83f56-15ff11f0-86f0a40f-d35590d3,;}; 2025-04-10T11:31:52.028461Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-10T11:31:52.028768Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:31:52.028844Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:31:52.028892Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:31:52.028997Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:31:52.029478Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 100 scanId: 0 version: {100:100} readable: {100:max} at tablet 9437184 2025-04-10T11:31:52.041756Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-10T11:31:52.041834Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;fline=with_appended.cpp:65;portions=1,;task_id=65f83f56-15ff11f0-86f0a40f-d35590d3; 2025-04-10T11:31:52.042054Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::65f83f56-15ff11f0-86f0a40f-d35590d3; 2025-04-10T11:31:52.042142Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:31:52.042288Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:52.042364Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-10T11:31:52.042478Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:31:52.042551Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:31:52.042689Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:31:52.042797Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.998500s; 2025-04-10T11:31:52.042870Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:31:52.042982Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:3384:0] 2025-04-10T11:31:52.043046Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-10T11:31:52.043190Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=65f83f56-15ff11f0-86f0a40f-d35590d3;mem=3035;cpu=0; 2025-04-10T11:31:52.043344Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:52.043536Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2025-04-10T11:31:52.043676Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=100;scan_id=0;gen=0;table=;snapshot={100:100};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: Can't parse TOlapProgram protobuf; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteRetryQuery [GOOD] Test command err: Trying to start YDB, gRPC: 26829, MsgBus: 24702 2025-04-10T11:31:22.776954Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645665669108047:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:22.777537Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00155c/r3tmp/tmpCUAuWU/pdisk_1.dat 2025-04-10T11:31:23.533972Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:23.556428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:23.556515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:23.562281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26829, node 1 2025-04-10T11:31:23.793930Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:23.793967Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:23.793986Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:23.794189Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24702 TClient is connected to server localhost:24702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:24.737324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:24.771748Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:24.791667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:24.993462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:25.266096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:25.377956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:27.483122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645687143946159:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:27.483208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:27.768877Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645665669108047:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:27.768959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:27.865007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:27.901863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:27.973056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:28.011879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:28.096772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:28.156195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:28.227770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645691438913973:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.227889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.228114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645691438913978:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.237867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:28.252350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645691438913980:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:28.369113Z node 1 :TX_PROXY ERROR: Actor# [1:7491645691438914036:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:29.836576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645695733881705:2515], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-10T11:31:29.836647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645695733881707:2517], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-10T11:31:29.836702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool another_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-10T11:31:29.836782Z node 1 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491645695733881706:2516], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=YTliZDZmNzAtNGNkODAyNmMtZGFlZDQ0ZGUtM2NmN2YzNzY=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-10T11:31:29.836860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491645695733881706:2516], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=YTliZDZmNzAtNGNkODAyNmMtZGFlZDQ0ZGUtM2NmN2YzNzY=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-10T11:31:29.836957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [1:7491645695733881703:2514]: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-10T11:31:29.837038Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTliZDZmNzAtNGNkODAyNmMtZGFlZDQ0ZGUtM2NmN2YzNzY=, ActorId: [1:7491645695733881703:2514], ActorState: ExecuteState, TraceId: 01jrfqg9d58yjpnkrq2x73pwfv, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-04-10T11:31:29.837280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7491645695733881703:2514]: Pool another_pool_id not found Trying to start YDB, gRPC: 9405, MsgBus: 5063 2025-04-10T11:31:30.799399Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645701475708409:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:30.799451Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00155c/r3tmp/tmpbwVhqX/pdisk_1.dat 2025-04-10T11:31:31.079603Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:31.088037Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:31.088124Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:31.089914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9405, node 2 2025-04-10T11:31:31.290787Z node 2 :NET_CLASSIFIER WARN: distributable config is ... 47107:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:35.199293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:35.215202Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-10T11:31:35.215494Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645722950547109:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:31:35.278818Z node 2 :TX_PROXY ERROR: Actor# [2:7491645722950547163:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:35.802314Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645701475708409:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:35.802390Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:36.519758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:36.534116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:1, at schemeshard: 72057594046644480 2025-04-10T11:31:37.253213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-10T11:31:37.852424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:1, at schemeshard: 72057594046644480 2025-04-10T11:31:38.453016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-10T11:31:39.060789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-10T11:31:39.539527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715697:0, at schemeshard: 72057594046644480 2025-04-10T11:31:41.358079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715710:0, at schemeshard: 72057594046644480 Wait resource pool classifier 0.104349s: status = SUCCESS, issues = 2025-04-10T11:31:42.487530Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDI4ODdlZGMtNTdiMDAzZTEtNWI0ODI0OTEtYTgwMWMwNzY=, ActorId: [2:7491645753015319864:2806], ActorState: ExecuteState, TraceId: 01jrfqgnrm39xmbt742083qf2v, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool MyPool Trying to start YDB, gRPC: 27849, MsgBus: 14260 2025-04-10T11:31:43.690800Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645756107862450:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:43.690929Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00155c/r3tmp/tmpShKyxg/pdisk_1.dat 2025-04-10T11:31:43.841748Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:43.905232Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:43.905337Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:43.907175Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27849, node 3 2025-04-10T11:31:44.078433Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:44.078466Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:44.078480Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:44.078622Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14260 TClient is connected to server localhost:14260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:44.759907Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:44.847296Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:31:44.868693Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:44.949965Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:45.234868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:45.334745Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:47.754185Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645773287733399:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:47.754274Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:47.821625Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:47.868192Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:47.905242Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:47.985289Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:48.060454Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:48.110968Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:48.162659Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645777582701208:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:48.162750Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:48.162771Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645777582701213:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:48.167273Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:48.184241Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645777582701215:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:31:48.269379Z node 3 :TX_PROXY ERROR: Actor# [3:7491645777582701269:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:48.692880Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645756107862450:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:48.756344Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SchemaVersionsNormalizer [GOOD] Test command err: 2025-04-10T11:31:48.352761Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:48.522461Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:48.558290Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:48.558607Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:48.569844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SchemaVersionCleaner; 2025-04-10T11:31:48.570115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-10T11:31:48.570382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:48.570576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:48.571239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:48.571403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:48.571560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:48.571704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:48.571843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:48.571981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:48.572128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.572305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:48.572446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:48.614033Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:48.614237Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=SchemaVersionCleaner; 2025-04-10T11:31:48.614298Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-10T11:31:48.614606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SchemaVersionCleaner;id=NO_VALUE_OPTIONAL; 2025-04-10T11:31:48.614715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-10T11:31:48.614766Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-10T11:31:48.614932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.615025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:48.615090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:48.615124Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-10T11:31:48.615631Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:48.615713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:48.615763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:48.615810Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:48.616014Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.616092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:48.616167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:48.616204Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:48.616393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:48.616457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:48.616509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:48.616543Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:48.616622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:48.616666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:48.616699Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:48.616752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:48.616843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:48.616879Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:48.617379Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=60; 2025-04-10T11:31:48.617485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-10T11:31:48.617596Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=64; 2025-04-10T11:31:48.617681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-04-10T11:31:48.617876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:48.617955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:48.617996Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:48.618391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:48.618453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.618498Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.618702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:48.618765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:48.618803Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:48.618999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;pr ... ;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:31:52.460579Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T11:31:52.460643Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-10T11:31:52.460709Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-10T11:31:52.460777Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-04-10T11:31:52.460835Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-10T11:31:52.460962Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:52.461016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-04-10T11:31:52.461101Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:31:52.461404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:31:52.461598Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:52.461651Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:31:52.461800Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-04-10T11:31:52.461885Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-04-10T11:31:52.462024Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:455:2462];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-04-10T11:31:52.462212Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:52.462349Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:52.462489Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:52.463504Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:31:52.463695Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:52.463852Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:52.463914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:457:2463] finished for tablet 9437184 2025-04-10T11:31:52.464591Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:455:2462];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.007},{"events":["f_processing","f_task_result"],"t":0.011},{"events":["f_ack","l_task_result"],"t":0.311},{"events":["l_ProduceResults","f_Finish"],"t":0.313},{"events":["l_ack","l_processing","l_Finish"],"t":0.314}],"full":{"a":1744284712149986,"name":"_full_task","f":1744284712149986,"d_finished":0,"c":0,"l":1744284712464001,"d":314015},"events":[{"name":"bootstrap","f":1744284712150455,"d_finished":7133,"c":1,"l":1744284712157588,"d":7133},{"a":1744284712463456,"name":"ack","f":1744284712461362,"d_finished":1159,"c":1,"l":1744284712462521,"d":1704},{"a":1744284712463434,"name":"processing","f":1744284712161573,"d_finished":165381,"c":9,"l":1744284712462524,"d":165948},{"name":"ProduceResults","f":1744284712152340,"d_finished":3304,"c":12,"l":1744284712463886,"d":3304},{"a":1744284712463890,"name":"Finish","f":1744284712463890,"d_finished":0,"c":0,"l":1744284712464001,"d":111},{"name":"task_result","f":1744284712161601,"d_finished":164006,"c":8,"l":1744284712461168,"d":164006}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:52.464716Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:455:2462];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:31:52.465319Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:455:2462];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.007},{"events":["f_processing","f_task_result"],"t":0.011},{"events":["f_ack","l_task_result"],"t":0.311},{"events":["l_ProduceResults","f_Finish"],"t":0.313},{"events":["l_ack","l_processing","l_Finish"],"t":0.314}],"full":{"a":1744284712149986,"name":"_full_task","f":1744284712149986,"d_finished":0,"c":0,"l":1744284712464787,"d":314801},"events":[{"name":"bootstrap","f":1744284712150455,"d_finished":7133,"c":1,"l":1744284712157588,"d":7133},{"a":1744284712463456,"name":"ack","f":1744284712461362,"d_finished":1159,"c":1,"l":1744284712462521,"d":2490},{"a":1744284712463434,"name":"processing","f":1744284712161573,"d_finished":165381,"c":9,"l":1744284712462524,"d":166734},{"name":"ProduceResults","f":1744284712152340,"d_finished":3304,"c":12,"l":1744284712463886,"d":3304},{"a":1744284712463890,"name":"Finish","f":1744284712463890,"d_finished":0,"c":0,"l":1744284712464787,"d":897},{"name":"task_result","f":1744284712161601,"d_finished":164006,"c":8,"l":1744284712461168,"d":164006}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:52.465472Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:31:52.149220Z;index_granules=0;index_portions=1;index_batches=953;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589608;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589608;selected_rows=0; 2025-04-10T11:31:52.465528Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:31:52.465858Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:457:2463];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> KqpJoinOrder::CanonizedJoinOrderTPCH21 [GOOD] >> KqpQueryService::TableSink_OltpDelete [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> Normalizers::ColumnChunkNormalizer [GOOD] >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes >> EvWrite::WriteWithSplit >> KqpService::SwitchCache-UseCache [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime >> KqpService::ToDictCache+UseCache |94.9%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::ColumnChunkNormalizer [GOOD] Test command err: 2025-04-10T11:31:50.505347Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:50.610251Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:50.637065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:50.637413Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:50.646755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-10T11:31:50.647034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:50.647296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:50.647427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:50.647537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:50.647679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:50.647833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:50.647968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:50.648112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:50.648236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:50.648346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:50.648473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:50.687653Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:50.687874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:50.687950Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-10T11:31:50.688255Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:50.688440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:50.688539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:50.688604Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-10T11:31:50.688704Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:50.688780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:50.688834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:50.688870Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:50.689091Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:50.689182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:50.689241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:50.689274Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:50.689388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:50.689465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:50.689518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:50.689546Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:50.689637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:50.689684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:50.689713Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:50.689761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:50.689850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:50.689904Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:50.690449Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=78; 2025-04-10T11:31:50.690567Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-04-10T11:31:50.690671Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-10T11:31:50.690768Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-10T11:31:50.690983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:50.691065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:50.691109Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:50.691362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:50.691425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:50.691457Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:50.691650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:50.691736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:50.691771Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:50.691985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:50.692040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:50.692100Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:50.692253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:50.692307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normaliza ... bletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:31:54.359905Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T11:31:54.359973Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-10T11:31:54.360025Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-10T11:31:54.360095Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-04-10T11:31:54.360147Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-10T11:31:54.360257Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:54.360295Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-04-10T11:31:54.360362Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:31:54.360626Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:31:54.360816Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:54.360928Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:31:54.361137Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-04-10T11:31:54.361210Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-04-10T11:31:54.361333Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:477:2483];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-04-10T11:31:54.361469Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:54.361602Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:54.361713Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:54.362868Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:31:54.363017Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:54.363161Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:54.363212Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:479:2484] finished for tablet 9437184 2025-04-10T11:31:54.363724Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:477:2483];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack","l_task_result"],"t":0.242},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.245}],"full":{"a":1744284714117620,"name":"_full_task","f":1744284714117620,"d_finished":0,"c":0,"l":1744284714363297,"d":245677},"events":[{"name":"bootstrap","f":1744284714117857,"d_finished":2842,"c":1,"l":1744284714120699,"d":2842},{"a":1744284714362825,"name":"ack","f":1744284714360599,"d_finished":1145,"c":1,"l":1744284714361744,"d":1617},{"a":1744284714362792,"name":"processing","f":1744284714124033,"d_finished":126481,"c":9,"l":1744284714361746,"d":126986},{"name":"ProduceResults","f":1744284714119544,"d_finished":3043,"c":12,"l":1744284714363191,"d":3043},{"a":1744284714363197,"name":"Finish","f":1744284714363197,"d_finished":0,"c":0,"l":1744284714363297,"d":100},{"name":"task_result","f":1744284714124057,"d_finished":125154,"c":8,"l":1744284714360426,"d":125154}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:54.363831Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:477:2483];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:31:54.364309Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:477:2483];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack","l_task_result"],"t":0.242},{"events":["l_ProduceResults","f_Finish"],"t":0.245},{"events":["l_ack","l_processing","l_Finish"],"t":0.246}],"full":{"a":1744284714117620,"name":"_full_task","f":1744284714117620,"d_finished":0,"c":0,"l":1744284714363874,"d":246254},"events":[{"name":"bootstrap","f":1744284714117857,"d_finished":2842,"c":1,"l":1744284714120699,"d":2842},{"a":1744284714362825,"name":"ack","f":1744284714360599,"d_finished":1145,"c":1,"l":1744284714361744,"d":2194},{"a":1744284714362792,"name":"processing","f":1744284714124033,"d_finished":126481,"c":9,"l":1744284714361746,"d":127563},{"name":"ProduceResults","f":1744284714119544,"d_finished":3043,"c":12,"l":1744284714363191,"d":3043},{"a":1744284714363197,"name":"Finish","f":1744284714363197,"d_finished":0,"c":0,"l":1744284714363874,"d":677},{"name":"task_result","f":1744284714124057,"d_finished":125154,"c":8,"l":1744284714360426,"d":125154}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:31:54.364400Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:31:54.116916Z;index_granules=0;index_portions=1;index_batches=939;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589280;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589280;selected_rows=0; 2025-04-10T11:31:54.364443Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:31:54.364728Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:479:2484];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] Test command err: 2025-04-10T11:31:48.354399Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:48.522555Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:48.549983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:48.550333Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:48.564118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:48.564351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:48.564587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:48.564746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:48.564854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:48.564958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:48.565086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:48.565205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:48.565311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:48.565441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.565559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:48.565669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:48.607630Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:48.608173Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:48.608248Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:48.608476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.608678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:48.608780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:48.608830Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:48.608948Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:48.609037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:48.609148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:48.609186Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:48.609415Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.609504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:48.609558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:48.609594Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:48.609710Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:48.609780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:48.609833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:48.609864Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:48.609941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:48.609994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:48.610024Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:48.610121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:48.610187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:48.610226Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:48.610667Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-10T11:31:48.610770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-04-10T11:31:48.611040Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=201; 2025-04-10T11:31:48.611189Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=59; 2025-04-10T11:31:48.611428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:48.611488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:48.611526Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:48.611734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:48.611784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.611818Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.611995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:48.612046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:48.612084Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:48.612282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:48.612329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:48.612359Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:48.612516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:48.612566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:48.612622Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:54.416105Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:31:54.416276Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-10T11:31:54.416380Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-10T11:31:54.416552Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:424:2439];bytes=2759;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-04-10T11:31:54.416723Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:54.416892Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:54.417072Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:54.417332Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:31:54.417907Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:54.418096Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:54.418264Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:425:2440] finished for tablet 9437184 2025-04-10T11:31:54.418823Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:424:2439];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.018},{"events":["l_ProduceResults","f_Finish"],"t":0.02},{"events":["l_ack","l_processing","l_Finish"],"t":0.021}],"full":{"a":1744284714397279,"name":"_full_task","f":1744284714397279,"d_finished":0,"c":0,"l":1744284714418336,"d":21057},"events":[{"name":"bootstrap","f":1744284714397534,"d_finished":3769,"c":1,"l":1744284714401303,"d":3769},{"a":1744284714417303,"name":"ack","f":1744284714415731,"d_finished":1391,"c":1,"l":1744284714417122,"d":2424},{"a":1744284714417287,"name":"processing","f":1744284714402907,"d_finished":7997,"c":10,"l":1744284714417125,"d":9046},{"name":"ProduceResults","f":1744284714399648,"d_finished":4251,"c":13,"l":1744284714418244,"d":4251},{"a":1744284714418249,"name":"Finish","f":1744284714418249,"d_finished":0,"c":0,"l":1744284714418336,"d":87},{"name":"task_result","f":1744284714402931,"d_finished":6444,"c":9,"l":1744284714415534,"d":6444}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:54.418911Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:424:2439];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:31:54.419448Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:424:2439];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.018},{"events":["l_ProduceResults","f_Finish"],"t":0.02},{"events":["l_ack","l_processing","l_Finish"],"t":0.021}],"full":{"a":1744284714397279,"name":"_full_task","f":1744284714397279,"d_finished":0,"c":0,"l":1744284714418964,"d":21685},"events":[{"name":"bootstrap","f":1744284714397534,"d_finished":3769,"c":1,"l":1744284714401303,"d":3769},{"a":1744284714417303,"name":"ack","f":1744284714415731,"d_finished":1391,"c":1,"l":1744284714417122,"d":3052},{"a":1744284714417287,"name":"processing","f":1744284714402907,"d_finished":7997,"c":10,"l":1744284714417125,"d":9674},{"name":"ProduceResults","f":1744284714399648,"d_finished":4251,"c":13,"l":1744284714418244,"d":4251},{"a":1744284714418249,"name":"Finish","f":1744284714418249,"d_finished":0,"c":0,"l":1744284714418964,"d":715},{"name":"task_result","f":1744284714402931,"d_finished":6444,"c":9,"l":1744284714415534,"d":6444}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:54.419542Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:31:54.396507Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13268;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13268;selected_rows=0; 2025-04-10T11:31:54.419596Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:31:54.420028Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH1 [GOOD] Test command err: Trying to start YDB, gRPC: 22532, MsgBus: 1790 2025-04-10T11:29:58.564449Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645305254302115:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:58.564812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015c6/r3tmp/tmp3z0mz5/pdisk_1.dat 2025-04-10T11:29:59.398973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:59.399069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:59.407626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:29:59.438860Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22532, node 1 2025-04-10T11:29:59.758673Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:59.758707Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:59.758715Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:59.758827Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1790 TClient is connected to server localhost:1790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:00.859108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:03.200505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645326729139125:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:03.200627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:03.200982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645326729139137:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:03.205393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:30:03.220048Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:30:03.220644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645326729139139:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:30:03.306993Z node 1 :TX_PROXY ERROR: Actor# [1:7491645326729139190:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:03.562265Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645305254302115:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:03.562341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:03.860627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:30:04.177034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645331024106745:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:04.177243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645331024106745:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:04.177507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645331024106745:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:04.177651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645331024106745:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:04.177764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645331024106745:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:04.177862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645331024106745:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:04.177961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645331024106745:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:04.178061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645331024106745:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:04.180699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645326729139435:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:04.180765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645326729139435:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:04.180936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645326729139435:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:04.181046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645326729139435:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:04.181162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645326729139435:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:04.181251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645326729139435:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:04.181336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645326729139435:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:04.181424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645326729139435:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:04.185865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645331024106745:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:04.186082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645331024106745:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:04.186223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645331024106745:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:30:04.186318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645331024106745:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:30:04.186919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645326729139435:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:04.187034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645326729139435:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:04.187140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645326729139435:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:30:04.187231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645326729139435:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:30:04.256717Z node 1 :TX_C ... current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.247354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.250570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.254548Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.257900Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.261872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.268017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.268243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.274828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.274828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.281579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.281578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.288108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.288106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.295091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.295091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.305230Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.305231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.311333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.311333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.317529Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.317529Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.323793Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.323794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.328603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.333835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.334041Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.337998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.342483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.342937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.347357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.351312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.354539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.357921Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.360474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.364167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.366706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.369634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.372199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.375761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.378029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.380939Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.397588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.592433Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqf9bj7tt3cvvxnxmx6pjv", SessionId: ydb://session/3?node_id=1&id=Y2Q5NjJkZGYtNjZjZmQ4MC05NzU2Yzg2Zi05ZmYyOWE4MA==, Slow query, duration: 36.581593s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:33.918691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:33.919510Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:33.919648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:34.782396Z node 1 :KQP_YQL WARN: TraceId: 01jrfqgdjs6c1yz41c1sq68pyc, SessionId: CompileActor 2025-04-10 11:31:34.769 WARN ydb-core-kqp-ut-join(pid=1718675, tid=0x00007F99C3344640) [KQP] kqp_opt_phy_olap_agg.cpp:50: Expected TCoMember callable to get column under aggregation. Got: Failed to render expression to pretty string: yql/essentials/ast/yql_expr.cpp:1973 BuildValueNode(): requirement ctx.AllowFreeArgs failed, message: Free arguments are not allowed 2025-04-10T11:31:37.171363Z node 1 :KQP_YQL WARN: TraceId: 01jrfqgg0qacq9s0ys0cx39p3w, SessionId: CompileActor 2025-04-10 11:31:37.170 WARN ydb-core-kqp-ut-join(pid=1718675, tid=0x00007F99C2B35640) [KQP] kqp_opt_phy_olap_agg.cpp:50: Expected TCoMember callable to get column under aggregation. Got: Failed to render expression to pretty string: yql/essentials/ast/yql_expr.cpp:1973 BuildValueNode(): requirement ctx.AllowFreeArgs failed, message: Free arguments are not allowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH8 [GOOD] Test command err: Trying to start YDB, gRPC: 6751, MsgBus: 8513 2025-04-10T11:29:56.504856Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645296392589154:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:56.505395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015cf/r3tmp/tmps8Kj05/pdisk_1.dat 2025-04-10T11:29:57.310676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:57.334966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:57.335042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:57.351170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6751, node 1 2025-04-10T11:29:57.624989Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:57.625009Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:57.625015Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:57.625102Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8513 TClient is connected to server localhost:8513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:58.563909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:01.070509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645317867426170:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:01.070666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:01.074596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645317867426182:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:01.079495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:30:01.095201Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:30:01.095485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645317867426184:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:30:01.162864Z node 1 :TX_PROXY ERROR: Actor# [1:7491645317867426235:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:01.461810Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645296392589154:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:01.461915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:01.657295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:30:01.999762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645317867426523:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:01.999994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645317867426523:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:02.000310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645317867426523:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:02.000467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645317867426523:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:02.000590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645317867426523:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:02.000742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645317867426523:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:02.000868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645317867426523:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:02.000980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645317867426523:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:02.001160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645317867426523:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:02.001317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645317867426523:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:02.001456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645317867426523:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:30:02.001570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645317867426523:2360];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:30:02.052415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645317867426519:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:02.052523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645317867426519:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:02.052787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645317867426519:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:02.052895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645317867426519:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:02.053005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645317867426519:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:02.053121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645317867426519:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:02.053223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645317867426519:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:02.053330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645317867426519:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:02.053441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645317867426519:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:02.053575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645317867426519:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:02.053697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645317867426519:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:30:02.053790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645317867426519:2358];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:30:02.057025Z node 1 :TX_COL ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.638405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.640504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.644651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.647891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.653802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.654670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.660577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.664230Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.669138Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.673621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.674910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.684918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.693078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.699556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.709340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.711585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.717379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.723502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.730660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.730660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.737451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.738903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.748155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.751275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.758067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.761432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.771655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.774919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.780250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.783129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.792606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.795969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.800368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.801396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.805732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.807743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.811863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.813058Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.818741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.818777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.824664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.825295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.858358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.886682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:24.930276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:25.167849Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqf2194rxgrvbanzw5mwyr", SessionId: ydb://session/3?node_id=1&id=NGQxYzIxYjUtNGRhZTA2MWItYzFjZDAxY2UtOTAyN2YyMDQ=, Slow query, duration: 35.653853s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:25.474991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:25.475420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:25.476151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491645532615827733:7899];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-04-10T11:31:25.476488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpDelete [GOOD] Test command err: Trying to start YDB, gRPC: 16148, MsgBus: 30189 2025-04-10T11:31:23.243456Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645668792380707:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:23.243956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00155b/r3tmp/tmpSp8ikE/pdisk_1.dat 2025-04-10T11:31:24.036713Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:24.043501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:24.043640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:24.052313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16148, node 1 2025-04-10T11:31:24.338626Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:24.338646Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:24.338651Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:24.338749Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30189 TClient is connected to server localhost:30189 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:25.567900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:25.602859Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:27.847781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645685972250405:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:27.847889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.178711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:28.212800Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645668792380707:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:28.212873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:28.443049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645690267217851:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:28.443072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645690267217847:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:28.443267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645690267217851:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:28.443560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645690267217851:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:28.443668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645690267217851:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:28.443774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645690267217851:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:28.443829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645690267217847:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:28.443877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645690267217851:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:28.443982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645690267217851:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:28.444046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645690267217847:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:28.444118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645690267217851:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:28.444173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645690267217847:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:28.444242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645690267217851:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:28.444273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645690267217847:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:28.444345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645690267217851:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:28.444395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645690267217847:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:28.444461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645690267217851:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:28.444516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645690267217847:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:28.444596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645690267217851:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:28.444635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645690267217847:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:28.444741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645690267217847:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:28.444846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645690267217847:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:28.444949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645690267217847:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:28.445044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645690267217847:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:28.502886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645690267217849:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:28.502968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645690267217849:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:28.503263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645690267217849:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:28.503400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645690267217849:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:28.503509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645690267217849:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;flin ... PeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:36.911489Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:7491645722941323021:2341];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:36.931427Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7491645722941322979:2337];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:36.932353Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7491645722941322979:2337];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:36.942935Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7491645722941323037:2342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:36.943661Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7491645722941323016:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:36.944348Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7491645722941323037:2342];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:36.944505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7491645722941323016:2339];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:36.950597Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7491645722941323045:2344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:36.971016Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7491645722941323042:2343];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:36.971637Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7491645722941323045:2344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:36.971896Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7491645722941323019:2340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:36.972546Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7491645722941323122:2345];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:36.976316Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7491645722941323042:2343];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:36.976642Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7491645722941323019:2340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037892;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:36.976856Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7491645722941323122:2345];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:37.044923Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715667; 2025-04-10T11:31:37.045703Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715667; 2025-04-10T11:31:37.050099Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:37.050348Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:37.101905Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645731531258408:2576], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Missing key column in input: Col1 for table: /Root/DataShard, code: 2029 2025-04-10T11:31:37.102331Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjA2MjE3NzctNWUwZGRlZDAtZTdiYmQwZTktNzBhZGM3YmQ=, ActorId: [2:7491645731531258406:2575], ActorState: ExecuteState, TraceId: 01jrfqggfm9d2f3m4cgkgst9ed, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 30744, MsgBus: 23429 2025-04-10T11:31:43.324061Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645756691173860:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:43.328149Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00155b/r3tmp/tmpRpL9ZM/pdisk_1.dat 2025-04-10T11:31:43.583420Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:43.631108Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:43.631217Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:43.636112Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30744, node 3 2025-04-10T11:31:43.842813Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:43.842843Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:43.842852Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:43.843023Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23429 TClient is connected to server localhost:23429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:44.515675Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:44.524233Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:47.606976Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645773871043707:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:47.607097Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:47.633074Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:47.738113Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645773871043811:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:47.738211Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:47.738421Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645773871043816:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:47.743339Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:31:47.769030Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645773871043818:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:31:47.845448Z node 3 :TX_PROXY ERROR: Actor# [3:7491645773871043869:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:48.325845Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645756691173860:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:48.325941Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpQueryService::TableSink_OltpOrder [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCDS96 [GOOD] Test command err: Trying to start YDB, gRPC: 23989, MsgBus: 6055 2025-04-10T11:30:01.027544Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645318487599530:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:01.027920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015bc/r3tmp/tmp4GKqwm/pdisk_1.dat 2025-04-10T11:30:01.717327Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:01.718581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:01.718667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:01.733957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23989, node 1 2025-04-10T11:30:01.926747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:01.926771Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:01.926777Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:01.926922Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6055 TClient is connected to server localhost:6055 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:03.026093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:03.082289Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:05.434430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645335667469245:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:05.434571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:05.438280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645335667469255:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:05.442907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:30:05.458322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645335667469259:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:30:05.534733Z node 1 :TX_PROXY ERROR: Actor# [1:7491645335667469310:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:05.977708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:30:06.022281Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645318487599530:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:06.022372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:06.403871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645339962436880:2361];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:06.404121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645339962436880:2361];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:06.404440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645339962436880:2361];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:06.404578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645339962436880:2361];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:06.404687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645339962436880:2361];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:06.404807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645339962436880:2361];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:06.404929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645339962436880:2361];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:06.405034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645339962436880:2361];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:06.405163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645339962436880:2361];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:06.405307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645339962436880:2361];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:06.405446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645339962436880:2361];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:30:06.405552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491645339962436880:2361];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:30:06.407667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645339962436876:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:06.407727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645339962436876:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:06.407976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645339962436876:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:06.408093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645339962436876:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:06.408199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645339962436876:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:06.408317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645339962436876:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:06.408450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645339962436876:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:06.408559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645339962436876:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:06.408673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645339962436876:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:06.408786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645339962436876:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:06.408904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645339962436876:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:30:06.409013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7491645339962436876:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:30:06.471976Z node 1 :TX_C ... oller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.973392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.975985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.979191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.981692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.984917Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.987994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.991328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.995099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:33.996462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.001878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.001929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.008286Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.015513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.015512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.023237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.028582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.030068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.036589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.036799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.043427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.043465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.048972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.049346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.054688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.054843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.061453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.061603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.069467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.069816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.079201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.081215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.085085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.086331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.089582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.094213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.094492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.100396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.100396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.106407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.106407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.113093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.113643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.119144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.119432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.124761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:34.288771Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqf6y28dnbhz2dr2zqcvke", SessionId: ydb://session/3?node_id=1&id=YzcxY2EyMjgtZGU0YmI0Y2ItOGZjMzk2MDEtYzU1MzgwYw==, Slow query, duration: 39.757852s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:34.889944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:34.890456Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7491645559005805153:7785];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-04-10T11:31:34.899262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:34.899795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> KqpQueryServiceScripts::ExecuteScriptPg [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpOrder [GOOD] Test command err: Trying to start YDB, gRPC: 24929, MsgBus: 12074 2025-04-10T11:31:16.150859Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645640516495763:2154];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:16.150907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001561/r3tmp/tmpF5uSfm/pdisk_1.dat 2025-04-10T11:31:16.750821Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:16.752891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:16.752973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:16.755707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24929, node 1 2025-04-10T11:31:16.968590Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:16.968624Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:16.968643Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:16.968785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12074 TClient is connected to server localhost:12074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:17.758351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:20.005156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645657696365524:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:20.005269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:20.345330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:20.708607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645657696365679:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:20.708729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:20.709142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645657696365684:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:20.713262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:31:20.727029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645657696365686:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:31:20.813496Z node 1 :TX_PROXY ERROR: Actor# [1:7491645657696365739:2437] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:21.152442Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645640516495763:2154];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:21.152515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31570, MsgBus: 4658 2025-04-10T11:31:24.378655Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645676802785970:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:24.378786Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001561/r3tmp/tmpUyayIA/pdisk_1.dat 2025-04-10T11:31:24.761489Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:24.761564Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:24.787134Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:24.788543Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31570, node 2 2025-04-10T11:31:24.922711Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:24.922738Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:24.922746Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:24.922866Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4658 TClient is connected to server localhost:4658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:25.558513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:25.567336Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:28.387170Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645693982655736:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.387247Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.413117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:28.482600Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645693982655837:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.482710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.482946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645693982655842:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:28.486671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:31:28.503089Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645693982655844:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:31:28.597310Z node 2 :TX_PROXY ERROR: Actor# [2:7491645693982655895:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 11660, MsgBus: 32581 2025-04-10T11:31:29.927346Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645698325823166:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:29.927397Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/i ... 424697Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491645784225173181:2341] TxId: 281474976715755. Ctx: { TraceId: 01jrfqgwf7exfqrq0x5pfhdmnh, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjNkMzhhMGEtNDFlNzNiM2EtNzA4NzViYjUtMjYwY2NhMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:31:49.424860Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjNkMzhhMGEtNDFlNzNiM2EtNzA4NzViYjUtMjYwY2NhMzQ=, ActorId: [3:7491645719800660322:2341], ActorState: ExecuteState, TraceId: 01jrfqgwf7exfqrq0x5pfhdmnh, Create QueryResponse for error on request, msg: 2025-04-10T11:31:49.591722Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=97; 2025-04-10T11:31:49.592029Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491645784225173225:2388], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7491645719800660516:2388]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7491645784225173225:2388].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:31:49.592101Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491645784225173215:2388], SessionActorId: [3:7491645719800660516:2388], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7491645719800660516:2388]. isRollback=0 2025-04-10T11:31:49.592290Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTI3Yzk2MGUtOGZjZTk2ZDgtZTA0ZmQ0YjYtZGMyZjhiNmM=, ActorId: [3:7491645719800660516:2388], ActorState: ExecuteState, TraceId: 01jrfqgwmvfhvg3ddxqfg7vw24, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7491645784225173216:2388] from: [3:7491645784225173215:2388] 2025-04-10T11:31:49.592353Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491645784225173216:2388] TxId: 281474976715756. Ctx: { TraceId: 01jrfqgwmvfhvg3ddxqfg7vw24, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTI3Yzk2MGUtOGZjZTk2ZDgtZTA0ZmQ0YjYtZGMyZjhiNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:31:49.592515Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTI3Yzk2MGUtOGZjZTk2ZDgtZTA0ZmQ0YjYtZGMyZjhiNmM=, ActorId: [3:7491645719800660516:2388], ActorState: ExecuteState, TraceId: 01jrfqgwmvfhvg3ddxqfg7vw24, Create QueryResponse for error on request, msg: 2025-04-10T11:31:49.759833Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=98; 2025-04-10T11:31:49.760120Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491645784225173259:2388], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7491645719800660516:2388]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7491645784225173259:2388].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:31:49.760187Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491645784225173249:2388], SessionActorId: [3:7491645719800660516:2388], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7491645719800660516:2388]. isRollback=0 2025-04-10T11:31:49.760361Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTI3Yzk2MGUtOGZjZTk2ZDgtZTA0ZmQ0YjYtZGMyZjhiNmM=, ActorId: [3:7491645719800660516:2388], ActorState: ExecuteState, TraceId: 01jrfqgwspdrp312tygpkbbx8k, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7491645784225173250:2388] from: [3:7491645784225173249:2388] 2025-04-10T11:31:49.760436Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491645784225173250:2388] TxId: 281474976715757. Ctx: { TraceId: 01jrfqgwspdrp312tygpkbbx8k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTI3Yzk2MGUtOGZjZTk2ZDgtZTA0ZmQ0YjYtZGMyZjhiNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:31:49.760585Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTI3Yzk2MGUtOGZjZTk2ZDgtZTA0ZmQ0YjYtZGMyZjhiNmM=, ActorId: [3:7491645719800660516:2388], ActorState: ExecuteState, TraceId: 01jrfqgwspdrp312tygpkbbx8k, Create QueryResponse for error on request, msg: 2025-04-10T11:31:49.953463Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=99; 2025-04-10T11:31:49.953846Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491645784225173295:2388], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7491645719800660516:2388]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7491645784225173295:2388].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:31:49.953920Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491645784225173287:2388], SessionActorId: [3:7491645719800660516:2388], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7491645719800660516:2388]. isRollback=0 2025-04-10T11:31:49.954135Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTI3Yzk2MGUtOGZjZTk2ZDgtZTA0ZmQ0YjYtZGMyZjhiNmM=, ActorId: [3:7491645719800660516:2388], ActorState: ExecuteState, TraceId: 01jrfqgwz2514qjhvh5c2bf3a0, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7491645784225173288:2388] from: [3:7491645784225173287:2388] 2025-04-10T11:31:49.954230Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491645784225173288:2388] TxId: 281474976715758. Ctx: { TraceId: 01jrfqgwz2514qjhvh5c2bf3a0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTI3Yzk2MGUtOGZjZTk2ZDgtZTA0ZmQ0YjYtZGMyZjhiNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:31:49.954416Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTI3Yzk2MGUtOGZjZTk2ZDgtZTA0ZmQ0YjYtZGMyZjhiNmM=, ActorId: [3:7491645719800660516:2388], ActorState: ExecuteState, TraceId: 01jrfqgwz2514qjhvh5c2bf3a0, Create QueryResponse for error on request, msg: 2025-04-10T11:31:50.131148Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=100; 2025-04-10T11:31:50.131511Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491645788520140628:2388], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7491645719800660516:2388]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7491645788520140628:2388].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:31:50.131571Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491645788520140620:2388], SessionActorId: [3:7491645719800660516:2388], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7491645719800660516:2388]. isRollback=0 2025-04-10T11:31:50.131767Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTI3Yzk2MGUtOGZjZTk2ZDgtZTA0ZmQ0YjYtZGMyZjhiNmM=, ActorId: [3:7491645719800660516:2388], ActorState: ExecuteState, TraceId: 01jrfqgx5achrv1e3b6bbxczjf, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7491645788520140621:2388] from: [3:7491645788520140620:2388] 2025-04-10T11:31:50.131836Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491645788520140621:2388] TxId: 281474976715759. Ctx: { TraceId: 01jrfqgx5achrv1e3b6bbxczjf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTI3Yzk2MGUtOGZjZTk2ZDgtZTA0ZmQ0YjYtZGMyZjhiNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:31:50.131996Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTI3Yzk2MGUtOGZjZTk2ZDgtZTA0ZmQ0YjYtZGMyZjhiNmM=, ActorId: [3:7491645719800660516:2388], ActorState: ExecuteState, TraceId: 01jrfqgx5achrv1e3b6bbxczjf, Create QueryResponse for error on request, msg: 2025-04-10T11:31:50.362396Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=101; 2025-04-10T11:31:50.362585Z node 3 :TX_DATASHARD ERROR: Prepare transaction failed. txid 101 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:31:50.362710Z node 3 :TX_DATASHARD ERROR: Errors while proposing transaction txid 101 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-10T11:31:50.362909Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491645788520140664:2388], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7491645719800660516:2388]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7491645788520140664:2388].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-10T11:31:50.362976Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491645788520140654:2388], SessionActorId: [3:7491645719800660516:2388], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7491645719800660516:2388]. isRollback=0 2025-04-10T11:31:50.363169Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTI3Yzk2MGUtOGZjZTk2ZDgtZTA0ZmQ0YjYtZGMyZjhiNmM=, ActorId: [3:7491645719800660516:2388], ActorState: ExecuteState, TraceId: 01jrfqgxaf8kdkea0r9a495x18, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7491645788520140655:2388] from: [3:7491645788520140654:2388] 2025-04-10T11:31:50.363236Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491645788520140655:2388] TxId: 281474976715760. Ctx: { TraceId: 01jrfqgxaf8kdkea0r9a495x18, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTI3Yzk2MGUtOGZjZTk2ZDgtZTA0ZmQ0YjYtZGMyZjhiNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-10T11:31:50.363399Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTI3Yzk2MGUtOGZjZTk2ZDgtZTA0ZmQ0YjYtZGMyZjhiNmM=, ActorId: [3:7491645719800660516:2388], ActorState: ExecuteState, TraceId: 01jrfqgxaf8kdkea0r9a495x18, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot >> KqpQueryService::TableSink_OlapDelete [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] >> TColumnShardTestReadWrite::Write ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH21 [GOOD] Test command err: Trying to start YDB, gRPC: 64724, MsgBus: 65093 2025-04-10T11:29:55.167181Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645294023657861:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:55.167313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015d2/r3tmp/tmpeHphp1/pdisk_1.dat 2025-04-10T11:29:55.893230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:55.893322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:55.927785Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:55.928908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64724, node 1 2025-04-10T11:29:56.206613Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:56.206631Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:56.206637Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:56.206734Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65093 TClient is connected to server localhost:65093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:57.205657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:00.170321Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645294023657861:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:00.170406Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:00.546025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645315498494886:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:00.546194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:00.546561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645315498494898:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:00.550503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:30:00.564025Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:30:00.564714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645315498494900:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:30:00.650842Z node 1 :TX_PROXY ERROR: Actor# [1:7491645315498494952:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:01.187292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:30:01.606245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645319793462515:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:01.606449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645319793462515:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:01.606698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645319793462515:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:01.606813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645319793462515:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:01.606947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645319793462515:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:01.607049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645319793462515:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:01.607169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645319793462515:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:01.607995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645319793462527:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:01.608086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645319793462527:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:01.618378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645319793462527:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:01.618554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645319793462527:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:01.618644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645319793462527:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:01.618729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645319793462527:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:01.618815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645319793462527:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:01.618961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645319793462527:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:01.619077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645319793462527:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:01.619185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645319793462527:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:01.619291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645319793462527:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:30:01.619392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645319793462527:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:30:01.622411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645319793462515:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:01.622576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645319793462515:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:01.622699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645319793462515:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:01.622833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645319793462515:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:30:01.622926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645319793462515:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:30:01.673087Z node 1 :T ... ontroller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.836128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.838473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.842965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.845331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.850191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.852367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.859311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.862352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.864756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.867642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.871106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.874017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.876582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.879668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.882071Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.885831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.888671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.891167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.894384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.896552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.900353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.902649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.907319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.908752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.914069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.915106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.920657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.920999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.927702Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.927728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.934772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.934777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.941721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.941720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.948766Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.948766Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.955515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.955817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.962786Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.962785Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.969665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.969793Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.976358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.976359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:31.983597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:32.174445Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqf31v8yj2gk9zn6ex2ah5", SessionId: ydb://session/3?node_id=1&id=Y2MzZDAzYTUtYTMwMTcxNC0zMDc1MDVlLTRlYjIyM2Vm, Slow query, duration: 41.618788s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:32.631972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:32.632531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:32.633254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7491645603261350512:9381];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-04-10T11:31:32.633672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; >> TColumnShardTestReadWrite::RebootWriteReadStandalone >> TColumnShardTestReadWrite::CompactionInGranule_PKString ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d30/r3tmp/tmpKaWhh1/pdisk_1.dat 2025-04-10T11:31:11.386043Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:31:11.848962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:11.849063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:11.864103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:31:11.925778Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10533, node 1 2025-04-10T11:31:12.178493Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:31:12.178516Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:31:12.258822Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:12.258847Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:12.258854Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:12.258985Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:12.830516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:15.810018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645637565962827:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:15.810171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:15.810680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645637565962839:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:15.811089Z node 1 :TX_PROXY DEBUG: actor# [1:7491645616091125468:2114] Handle TEvProposeTransaction 2025-04-10T11:31:15.811112Z node 1 :TX_PROXY DEBUG: actor# [1:7491645616091125468:2114] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T11:31:15.811156Z node 1 :TX_PROXY DEBUG: actor# [1:7491645616091125468:2114] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491645637565962842:2642] 2025-04-10T11:31:15.949530Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645637565962842:2642] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-10T11:31:15.949622Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645637565962842:2642] txid# 281474976710658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:31:15.949651Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645637565962842:2642] txid# 281474976710658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-10T11:31:15.951294Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645637565962842:2642] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:31:15.951420Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645637565962842:2642] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:31:15.951620Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645637565962842:2642] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:31:15.951812Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645637565962842:2642] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:31:15.951874Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645637565962842:2642] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T11:31:15.952014Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645637565962842:2642] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T11:31:15.953427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:31:15.964420Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645637565962842:2642] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-10T11:31:15.964483Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645637565962842:2642] txid# 281474976710658 SEND to# [1:7491645637565962841:2345] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-10T11:31:15.992249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645637565962841:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:31:16.082583Z node 1 :TX_PROXY DEBUG: actor# [1:7491645616091125468:2114] Handle TEvProposeTransaction 2025-04-10T11:31:16.082612Z node 1 :TX_PROXY DEBUG: actor# [1:7491645616091125468:2114] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T11:31:16.082659Z node 1 :TX_PROXY DEBUG: actor# [1:7491645616091125468:2114] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7491645641860930213:2697] 2025-04-10T11:31:16.085301Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645641860930213:2697] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-10T11:31:16.085346Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645641860930213:2697] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:31:16.085360Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645641860930213:2697] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-10T11:31:16.085920Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645641860930213:2697] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:31:16.085981Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645641860930213:2697] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:31:16.086125Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645641860930213:2697] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:31:16.086757Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645641860930213:2697] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:31:16.086805Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645641860930213:2697] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-04-10T11:31:16.086964Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645641860930213:2697] txid# 281474976710659 HANDLE EvClientConnected 2025-04-10T11:31:16.091405Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645641860930213:2697] txid# 281474976710659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-10T11:31:16.091601Z node 1 :TX_PROXY ERROR: Actor# [1:7491645641860930213:2697] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:16.091629Z node 1 :TX_PROXY DEBUG: Actor# [1:7491645641860930213:2697] txid# 281474976710659 SEND to# [1:7491645637565962841:2345] Source {TEvProposeTransactionStatus txid# 281474976710659 Sta ... node 13 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976710762 2025-04-10T11:31:54.190704Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-10T11:31:54.735733Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7491645802834603401:2426] [0] Resolve database: name# /Root 2025-04-10T11:31:54.736404Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7491645802834603401:2426] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:31:54.736447Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7491645802834603401:2426] [0] Send request: schemeShardId# 72057594046644480 2025-04-10T11:31:54.737345Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7491645802834603401:2426] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715661 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:63779" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1744284713 } EndTime { seconds: 1744284714 } } 2025-04-10T11:31:54.750059Z node 13 :TX_PROXY DEBUG: actor# [13:7491645777064796504:2110] Handle TEvNavigate describe path /Root/table 2025-04-10T11:31:54.750123Z node 13 :TX_PROXY DEBUG: Actor# [13:7491645802834603407:4531] HANDLE EvNavigateScheme /Root/table 2025-04-10T11:31:54.750979Z node 13 :TX_PROXY DEBUG: Actor# [13:7491645802834603407:4531] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:31:54.751144Z node 13 :TX_PROXY DEBUG: Actor# [13:7491645802834603407:4531] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false } 2025-04-10T11:31:54.753219Z node 13 :TX_PROXY DEBUG: Actor# [13:7491645802834603407:4531] Handle TEvDescribeSchemeResult Forward to# [13:7491645802834603405:2427] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710760 CreateStep: 1744284713584 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Group" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableIndexes { Name: "value_idx" LocalPathId: 9 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "Value" SchemaVersion: 2 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: SIMILARITY_INNER_PRODUCT vector_type: VECTOR_TYPE_FLOAT vector_dimension: 768 } clusters: 80 levels: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046644480 >> KqpQueryService::ReadDatashardAndColumnshard [GOOD] >> KqpQueryService::ExecuteQueryScalar [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] Test command err: 2025-04-10T11:31:48.352788Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:48.521352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:48.549465Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:48.550477Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:48.564142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:48.564443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:48.564680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:48.564811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:48.564911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:48.565011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:48.565150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:48.565343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:48.565492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:48.565599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.565712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:48.565822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:48.605980Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:48.606452Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:48.606523Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:48.606792Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.609903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:48.610027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:48.610092Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:48.610234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:48.610302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:48.610348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:48.610380Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:48.610569Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.610648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:48.610692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:48.610722Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:48.610829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:48.610882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:48.610930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:48.610962Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:48.611026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:48.611095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:48.611130Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:48.611190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:48.611249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:48.611277Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:48.611694Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=62; 2025-04-10T11:31:48.611784Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-10T11:31:48.611864Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-04-10T11:31:48.611977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-10T11:31:48.612179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:48.612235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:48.612270Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:48.612457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:48.612501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.612532Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.612694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:48.612738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:48.612777Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:48.612970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:48.613014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:48.613043Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:48.613217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:48.613257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:48.613305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... t_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:57.220861Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:31:57.221019Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-10T11:31:57.221158Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-10T11:31:57.221319Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1056:2927];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-10T11:31:57.221473Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:57.221609Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:57.221754Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:57.222014Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:31:57.222215Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:57.222378Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:57.222428Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1057:2928] finished for tablet 9437184 2025-04-10T11:31:57.222982Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1056:2927];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.015},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.017}],"full":{"a":1744284717205205,"name":"_full_task","f":1744284717205205,"d_finished":0,"c":0,"l":1744284717222495,"d":17290},"events":[{"name":"bootstrap","f":1744284717205431,"d_finished":3527,"c":1,"l":1744284717208958,"d":3527},{"a":1744284717221987,"name":"ack","f":1744284717220534,"d_finished":1263,"c":1,"l":1744284717221797,"d":1771},{"a":1744284717221972,"name":"processing","f":1744284717210507,"d_finished":6530,"c":10,"l":1744284717221799,"d":7053},{"name":"ProduceResults","f":1744284717207377,"d_finished":3415,"c":13,"l":1744284717222411,"d":3415},{"a":1744284717222414,"name":"Finish","f":1744284717222414,"d_finished":0,"c":0,"l":1744284717222495,"d":81},{"name":"task_result","f":1744284717210534,"d_finished":5119,"c":9,"l":1744284717220328,"d":5119}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:57.223082Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1056:2927];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:31:57.223568Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1056:2927];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.015},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.017}],"full":{"a":1744284717205205,"name":"_full_task","f":1744284717205205,"d_finished":0,"c":0,"l":1744284717223130,"d":17925},"events":[{"name":"bootstrap","f":1744284717205431,"d_finished":3527,"c":1,"l":1744284717208958,"d":3527},{"a":1744284717221987,"name":"ack","f":1744284717220534,"d_finished":1263,"c":1,"l":1744284717221797,"d":2406},{"a":1744284717221972,"name":"processing","f":1744284717210507,"d_finished":6530,"c":10,"l":1744284717221799,"d":7688},{"name":"ProduceResults","f":1744284717207377,"d_finished":3415,"c":13,"l":1744284717222411,"d":3415},{"a":1744284717222414,"name":"Finish","f":1744284717222414,"d_finished":0,"c":0,"l":1744284717223130,"d":716},{"name":"task_result","f":1744284717210534,"d_finished":5119,"c":9,"l":1744284717220328,"d":5119}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:31:57.223651Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:31:57.204498Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-10T11:31:57.223695Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:31:57.224070Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> KqpQueryService::TableSink_OlapOrder [GOOD] >> KqpQueryService::TableSink_OlapRWQueries >> KqpQueryService::TableSink_DisableSink [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapDelete [GOOD] Test command err: Trying to start YDB, gRPC: 29295, MsgBus: 7336 2025-04-10T11:31:24.824275Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645672695571195:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:24.824355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001558/r3tmp/tmpL4TTcL/pdisk_1.dat 2025-04-10T11:31:25.511050Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:25.515219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:25.515320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:25.519999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29295, node 1 2025-04-10T11:31:25.838768Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:25.838795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:25.838802Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:25.838921Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7336 TClient is connected to server localhost:7336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:26.868902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:26.899506Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:29.443519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645694170408122:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:29.443643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:29.782674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:29.825082Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645672695571195:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:29.825228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:30.071435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645694170408290:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:30.071659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645694170408290:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:30.071838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645694170408296:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:30.071927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645694170408296:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:30.071936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645694170408290:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:30.072491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645694170408290:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:30.072600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645694170408290:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:30.072701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645694170408290:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:30.072801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645694170408290:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:30.072931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645694170408290:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:30.073059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645694170408290:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:30.073201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645694170408290:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:30.073569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645694170408296:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:30.073665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645694170408296:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:30.073759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645694170408296:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:30.073790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645694170408290:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:30.073850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645694170408296:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:30.073944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645694170408290:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:30.073961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645694170408296:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:30.074056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645694170408296:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:30.074189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645694170408296:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:30.074294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645694170408296:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:30.074383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645694170408296:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:30.074470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491645694170408296:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:30.112715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645694170408286:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:30.112770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645694170408286:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:30.112962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645694170408286:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:30.113085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645694170408286:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:30.113188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645694170408286:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=a ... id=72075186224037890;self_id=[3:7491645782604627217:2338];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.337213Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[3:7491645782604627237:2341];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.337698Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7491645782604627217:2338];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.340605Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[3:7491645782604627215:2337];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.342737Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7491645778309659917:2336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.346486Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[3:7491645782604627237:2341];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.346850Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[3:7491645782604627233:2339];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.352630Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[3:7491645782604627235:2340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.352707Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7491645778309659915:2335];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.352996Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[3:7491645782604627239:2342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.353174Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[3:7491645782604627245:2343];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.353253Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[3:7491645782604627215:2337];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.353429Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7491645778309659917:2336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.353843Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[3:7491645782604627245:2343];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.354016Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[3:7491645782604627352:2344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.354175Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[3:7491645782604627352:2344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037892;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.355645Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[3:7491645782604627235:2340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.355827Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7491645778309659915:2335];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.356018Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[3:7491645782604627239:2342];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.374911Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=6593286e-15ff11f0-a26388cb-7ddb33a5;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.375356Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T11:31:51.380379Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7491645778309659917:2336];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380447Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;self_id=[3:7491645782604627235:2340];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380459Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7491645778309659917:2336];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380483Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;self_id=[3:7491645782604627235:2340];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380566Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;self_id=[3:7491645782604627233:2339];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380581Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;self_id=[3:7491645782604627217:2338];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380598Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;self_id=[3:7491645782604627233:2339];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380603Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;self_id=[3:7491645782604627217:2338];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380671Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;self_id=[3:7491645778309659915:2335];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380692Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;self_id=[3:7491645778309659915:2335];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380710Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;self_id=[3:7491645782604627215:2337];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380732Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;self_id=[3:7491645782604627215:2337];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380755Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;self_id=[3:7491645782604627352:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380776Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;self_id=[3:7491645782604627352:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380801Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7491645782604627245:2343];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380837Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7491645782604627245:2343];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380869Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;self_id=[3:7491645782604627237:2341];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380892Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;self_id=[3:7491645782604627237:2341];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380921Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7491645782604627239:2342];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.380942Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7491645782604627239:2342];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-10T11:31:51.479091Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715672; 2025-04-10T11:31:51.479636Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;commit_tx_id=281474976715672;commit_lock_id=281474976715671;fline=manager.cpp:94;broken_lock_id=281474976715669; 2025-04-10T11:31:51.479872Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> Normalizers::EmptyTablesNormalizer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 4451, MsgBus: 28332 2025-04-10T11:31:26.784963Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645684480506826:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:26.785505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00154d/r3tmp/tmpC7cIL0/pdisk_1.dat 2025-04-10T11:31:27.471456Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4451, node 1 2025-04-10T11:31:27.614701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:27.614801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:27.638915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:31:27.734933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:27.734961Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:27.734973Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:27.735095Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28332 TClient is connected to server localhost:28332 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:28.813884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:28.898588Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:28.912659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:29.107458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:29.321733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:29.446583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:31.678861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645705955345064:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:31.678963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:31.788208Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645684480506826:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:31.788294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:32.132001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:32.192195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:32.226541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:32.303707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:32.343946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:32.425746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:32.526595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645710250312887:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:32.526796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:32.527139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645710250312892:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:32.538020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:32.579466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645710250312894:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:32.683209Z node 1 :TX_PROXY ERROR: Actor# [1:7491645710250312954:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:33.665780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.667231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.669157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:36.302864Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284696315, txId: 281474976710694] shutting down Trying to start YDB, gRPC: 4906, MsgBus: 9970 2025-04-10T11:31:37.233444Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645732187710769:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:37.233512Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00154d/r3tmp/tmpv92eRe/pdisk_1.dat 2025-04-10T11:31:37.392482Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:37.425867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:37.425958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:37.431416Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4906, node 2 2025-04-10T11:31:37.517017Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:37.517042Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:37.517049Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:37.517233Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9970 TClient is connected to server localhost:9970 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:38.096207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:38 ... h pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:41.187913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:41.188308Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645749367582238:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:41.191525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:41.200540Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645749367582240:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:31:41.285689Z node 2 :TX_PROXY ERROR: Actor# [2:7491645749367582296:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:42.239738Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645732187710769:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:42.243461Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:42.282132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:42.283991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:31:42.286437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:31:44.994953Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284705023, txId: 281474976715700] shutting down 2025-04-10T11:31:45.291718Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284705324, txId: 281474976715703] shutting down Trying to start YDB, gRPC: 12046, MsgBus: 26505 2025-04-10T11:31:47.626627Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645774268861475:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:47.626685Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00154d/r3tmp/tmpjdIlgq/pdisk_1.dat 2025-04-10T11:31:47.844137Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:47.844681Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:47.844772Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:47.858475Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12046, node 3 2025-04-10T11:31:47.986730Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:47.986755Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:47.986763Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:47.986903Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26505 TClient is connected to server localhost:26505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:48.609498Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:48.621576Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:31:48.650906Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:48.739463Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:48.931773Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:49.031137Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:51.677667Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645791448732437:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:51.677790Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:51.740656Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:51.787578Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:51.869410Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:51.914288Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:51.962441Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:52.047871Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:52.130381Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645795743700256:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:52.130488Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:52.130748Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645795743700261:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:52.136522Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:52.154198Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645795743700263:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:31:52.238402Z node 3 :TX_PROXY ERROR: Actor# [3:7491645795743700317:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:52.626774Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645774268861475:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:52.626861Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:53.417203Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:53.420590Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:31:53.422893Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:31:56.244570Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284716279, txId: 281474976715704] shutting down >> TColumnShardTestReadWrite::WriteOverload+InStore+WithWritePortionsOnInsert >> EvWrite::WriteWithLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadDatashardAndColumnshard [GOOD] Test command err: Trying to start YDB, gRPC: 4723, MsgBus: 26512 2025-04-10T11:31:11.978727Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645620821725439:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:11.979329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001572/r3tmp/tmphNwadD/pdisk_1.dat 2025-04-10T11:31:12.788280Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:12.800103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:12.800222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:12.807541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4723, node 1 2025-04-10T11:31:12.962662Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:12.962684Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:12.962690Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:12.962796Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26512 TClient is connected to server localhost:26512 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:14.262299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.282286Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:14.291374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.472816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.743082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.822340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:16.855478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645642296563528:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.855633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.962275Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645620821725439:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:16.962357Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:17.315703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.350124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.394904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.448420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.526101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.613149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.686005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645646591531349:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.686087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.686692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645646591531354:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.690747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:17.700410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645646591531357:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:17.792307Z node 1 :TX_PROXY ERROR: Actor# [1:7491645646591531414:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:27.706292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:31:27.706330Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 4852, MsgBus: 10172 2025-04-10T11:31:31.007259Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645698799421739:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001572/r3tmp/tmpjhcj8T/pdisk_1.dat 2025-04-10T11:31:31.155609Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:31:31.283720Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:31.332665Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:31.332787Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:31.343209Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4852, node 2 2025-04-10T11:31:31.551759Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:31.551789Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:31.551798Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:31.551966Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10172 TClient is connected to server localhost:10172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:32.235703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:32.257363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:32.347249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:32.611438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 wa ... TxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:54.719286Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:54.719743Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:54.719795Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:54.719912Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:54.719941Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:54.720007Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:54.720031Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:54.720070Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:54.720095Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:54.720701Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:54.720750Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:54.720981Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:54.721012Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:54.721190Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:54.721217Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:54.721388Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:54.721414Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:54.721540Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:54.721575Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:54.729229Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:54.729293Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:54.729411Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:54.729442Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:54.729631Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:54.729671Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:54.729773Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:54.729816Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:54.729905Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:54.729949Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:54.730025Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:54.730062Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:54.730682Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:54.730736Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:54.730960Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:54.730994Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:54.731154Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:54.731201Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:54.731444Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:54.731606Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:54.731749Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:54.731768Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:54.771573Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T11:31:54.773309Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T11:31:54.777159Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T11:31:54.780237Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T11:31:54.783405Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T11:31:54.786811Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T11:31:54.787892Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T11:31:54.791999Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T11:31:54.793131Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T11:31:54.799143Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710661; 2025-04-10T11:31:55.007547Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645788356875709:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:55.007626Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:55.024723Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710664; >> TColumnShardTestReadWrite::WriteOverload+InStore-WithWritePortionsOnInsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryScalar [GOOD] Test command err: Trying to start YDB, gRPC: 15647, MsgBus: 18753 2025-04-10T11:31:28.990296Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645690596300128:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:28.990379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001548/r3tmp/tmpx0YdoQ/pdisk_1.dat 2025-04-10T11:31:29.608936Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:29.642528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:29.642642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:29.646715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15647, node 1 2025-04-10T11:31:29.878584Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:29.878610Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:29.878619Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:29.878745Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18753 TClient is connected to server localhost:18753 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:30.692527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:30.754400Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:30.773781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:31.001147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:31.204200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:31.299991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:33.120405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645712071138404:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:33.120589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:33.510432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.546303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.597986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.629485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.669016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.749623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:33.842503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645712071138925:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:33.842608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:33.847029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645712071138930:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:33.852266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:33.870028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645712071138932:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:33.947232Z node 1 :TX_PROXY ERROR: Actor# [1:7491645712071138987:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:33.993115Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645690596300128:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:33.993174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:35.048330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63591, MsgBus: 9021 2025-04-10T11:31:44.438404Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645759803275476:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:44.438483Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001548/r3tmp/tmpStnTau/pdisk_1.dat 2025-04-10T11:31:44.572500Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:44.599904Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:44.599984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:44.603217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63591, node 2 2025-04-10T11:31:44.786734Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:44.786761Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:44.786767Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:44.786881Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9021 TClient is connected to server localhost:9021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:45.355750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:45.374489Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:45.385550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:45.481913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... ... : [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645772688179128:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:47.976051Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:48.039449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:48.075387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:48.110719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:48.186440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:48.230596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:48.271541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:48.362607Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645776983146943:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:48.362709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:48.363179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645776983146948:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:48.367689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:48.385387Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645776983146950:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:48.465148Z node 2 :TX_PROXY ERROR: Actor# [2:7491645776983147007:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:49.439806Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645759803275476:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:49.439872Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21185, MsgBus: 5207 2025-04-10T11:31:50.705786Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645787480626747:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:50.705869Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001548/r3tmp/tmpYtiycf/pdisk_1.dat 2025-04-10T11:31:50.888728Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:50.919507Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:50.919617Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:50.921095Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21185, node 3 2025-04-10T11:31:51.078862Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:51.078891Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:51.078900Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:51.079055Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5207 TClient is connected to server localhost:5207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:51.691605Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:51.709149Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:51.795910Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:52.001679Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:52.104833Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:55.023923Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645808955465005:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:55.024063Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:55.111266Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:55.167622Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:55.249740Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:55.281874Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:55.317038Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:55.361385Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:55.419419Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645808955465515:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:55.419528Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:55.419776Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645808955465520:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:55.423752Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:55.435197Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645808955465522:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:31:55.529091Z node 3 :TX_PROXY ERROR: Actor# [3:7491645808955465578:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:55.706065Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645787480626747:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:55.706173Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_DisableSink [GOOD] Test command err: Trying to start YDB, gRPC: 3092, MsgBus: 15856 2025-04-10T11:31:16.540488Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645638916524855:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:16.540642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00155e/r3tmp/tmpurnGqp/pdisk_1.dat 2025-04-10T11:31:17.154030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:17.155217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:17.158768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:31:17.230649Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3092, node 1 2025-04-10T11:31:17.363054Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:17.363094Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:17.363103Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:17.363260Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15856 TClient is connected to server localhost:15856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:18.491040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:18.516215Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:20.792042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645656096394549:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:20.792246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:21.132139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:21.345915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645660391362008:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:21.346159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645660391362008:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:21.346434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645660391362008:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:21.346546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645660391362008:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:21.346650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645660391362008:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:21.346759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645660391362008:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:21.346857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645660391362008:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:21.346965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645660391362008:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:21.347080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645660391362008:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:21.347208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645660391362008:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:21.347311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645660391362008:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:21.347403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645660391362008:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:21.353329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645660391361976:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:21.353420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645660391361976:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:21.353582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645660391361976:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:21.353687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645660391361976:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:21.353781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645660391361976:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:21.353873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645660391361976:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:21.353957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645660391361976:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:21.354047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645660391361976:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:21.354177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645660391361976:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:21.354280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645660391361976:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:21.356087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645660391361976:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:21.356235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491645660391361976:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:21.396705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645660391361983:2340];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:21.396770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645660391361983:2340];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:21.397012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645660391361983:2340];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:21.397141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645660391361983:2340];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:21.397245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645660391361983:2340];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:21.397375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645660391361983:2340];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:21.397472Z node 1 :TX_COLUMN ... eSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:56.647507Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:56.647549Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:56.647794Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:56.647830Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:56.655171Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:56.655248Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:56.655369Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:56.655403Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:56.655681Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:56.655722Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:56.655831Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:56.655867Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:56.655946Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:56.655976Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:56.656022Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:56.656058Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:56.656615Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:56.656650Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:56.656846Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:56.656878Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:56.657013Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:56.657040Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:56.657268Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:56.657294Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:56.657406Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:56.657441Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:56.664873Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:56.716675Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.717516Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.724538Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.726325Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.733688Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.733917Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.742290Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.742290Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.750772Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.750772Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.759678Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.760258Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.768578Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.768578Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.776726Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.778275Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:31:56.799955Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645812339624383:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:56.800096Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:56.800372Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645812339624388:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:56.806375Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:31:56.826332Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645812339624390:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:31:56.885945Z node 3 :TX_PROXY ERROR: Actor# [3:7491645812339624443:2717] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:56.991066Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491645812339624468:2437] TxId: 281474976710661. Ctx: { TraceId: 01jrfqh3qvaja2jg3krvh3pxyy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjEyYWFmYWMtNDYzZDUwYzYtMTIzM2FkZjYtYTAxZTVlMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Data manipulation queries do not support column shard tables. 2025-04-10T11:31:56.991347Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjEyYWFmYWMtNDYzZDUwYzYtMTIzM2FkZjYtYTAxZTVlMzM=, ActorId: [3:7491645812339624381:2437], ActorState: ExecuteState, TraceId: 01jrfqh3qvaja2jg3krvh3pxyy, Create QueryResponse for error on request, msg: >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload+InStore-WithWritePortionsOnInsert [GOOD] Test command err: 2025-04-10T11:31:48.355668Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:48.523171Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:48.553651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:48.554015Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:48.564673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:48.564952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:48.565288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:48.565441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:48.565568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:48.565702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:48.565835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:48.565974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:48.566106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:48.566252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.566426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:48.566561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:48.612335Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:48.612685Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:48.612754Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:48.612947Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.613204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:48.613314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:48.613365Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:48.613464Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:48.613546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:48.613602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:48.613637Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:48.613891Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.613979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:48.614028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:48.614067Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:48.614195Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:48.614259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:48.614323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:48.614356Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:48.614472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:48.614520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:48.614559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:48.614637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:48.614688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:48.614719Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:48.615180Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-04-10T11:31:48.615274Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-10T11:31:48.615399Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=74; 2025-04-10T11:31:48.615509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-04-10T11:31:48.615727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:48.615793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:48.615830Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:48.616043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:48.616097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.616154Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.616992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:48.617108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:48.617146Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:48.617373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:48.617429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:48.617473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:48.617618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:48.617670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:48.617747Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 11:31:58.549388Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 4 at tablet 9437184 2025-04-10T11:31:58.549674Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-10T11:31:58.567251Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-10T11:31:58.567411Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.569099Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 5 at tablet 9437184 2025-04-10T11:31:58.569388Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-10T11:31:58.582723Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-10T11:31:58.582887Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.584733Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 6 at tablet 9437184 2025-04-10T11:31:58.585004Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-10T11:31:58.597209Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-10T11:31:58.597354Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=9;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.606138Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 7 at tablet 9437184 2025-04-10T11:31:58.606457Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-10T11:31:58.620315Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-10T11:31:58.620448Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=10;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.622233Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 8 at tablet 9437184 2025-04-10T11:31:58.622500Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-10T11:31:58.635644Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-10T11:31:58.635808Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=11;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.637625Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 9 at tablet 9437184 2025-04-10T11:31:58.637917Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-10T11:31:58.656539Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-10T11:31:58.656709Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=12;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.669480Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 10 at tablet 9437184 2025-04-10T11:31:58.669786Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-10T11:31:58.683167Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-10T11:31:58.683331Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=13;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.685065Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 11 at tablet 9437184 2025-04-10T11:31:58.685348Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-10T11:31:58.698049Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-10T11:31:58.698226Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=14;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.699984Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 12 at tablet 9437184 2025-04-10T11:31:58.700256Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-10T11:31:58.712546Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-10T11:31:58.712696Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=15;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.725579Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 13 at tablet 9437184 2025-04-10T11:31:58.725903Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-10T11:31:58.740563Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-10T11:31:58.740717Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=16;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.742435Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 14 at tablet 9437184 2025-04-10T11:31:58.742732Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-10T11:31:58.754897Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-10T11:31:58.755041Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=17;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.756630Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 15 at tablet 9437184 2025-04-10T11:31:58.756898Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-10T11:31:58.769257Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-10T11:31:58.769417Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=18;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.780736Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 16 at tablet 9437184 2025-04-10T11:31:58.781028Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-10T11:31:58.801187Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-10T11:31:58.801322Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=19;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.803036Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 17 at tablet 9437184 2025-04-10T11:31:58.803291Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-10T11:31:58.815838Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-10T11:31:58.815987Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=20;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.817693Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 18 at tablet 9437184 2025-04-10T11:31:58.817978Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-10T11:31:58.830587Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-10T11:31:58.830724Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=21;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.850973Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 19 at tablet 9437184 2025-04-10T11:31:58.851289Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-10T11:31:58.864036Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-10T11:31:58.864175Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=22;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.865965Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 20 at tablet 9437184 2025-04-10T11:31:58.866282Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-10T11:31:58.879973Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-10T11:31:58.880117Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=23;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:58.881816Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 21 at tablet 9437184 2025-04-10T11:31:58.882083Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-10T11:31:58.896154Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-10T11:31:58.896307Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=24;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:31:59.216574Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=6330728;count=1; 2025-04-10T11:31:59.282669Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 22 at tablet 9437184 2025-04-10T11:31:59.282973Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-10T11:31:59.295615Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-10T11:31:59.295784Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=25;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> TColumnShardTestReadWrite::WriteOverload-InStore+WithWritePortionsOnInsert >> TColumnShardTestReadWrite::WriteReadModifications >> EvWrite::WriteWithLock [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 >> TColumnShardTestReadWrite::WriteStandalone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithLock [GOOD] Test command err: 2025-04-10T11:31:59.751261Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:59.868216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:59.892856Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:59.893188Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:59.901516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:59.901756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:59.901980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:59.902095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:59.902266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:59.902437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:59.902557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:59.902664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:59.902780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:59.902896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:59.903009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:59.903128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:59.933226Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:59.933739Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:59.933802Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:59.933976Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:59.934126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:59.934238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:59.934306Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:59.934397Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:59.934456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:59.934500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:59.934530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:59.934728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:59.934789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:59.934827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:59.934858Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:59.934938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:59.934984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:59.935027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:59.935053Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:59.935136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:59.935174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:59.935203Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:59.935257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:59.935290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:59.935315Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:59.935695Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-10T11:31:59.935777Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-10T11:31:59.935860Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-04-10T11:31:59.935973Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=61; 2025-04-10T11:31:59.936136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:59.936201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:59.936236Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:59.936424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:59.936473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:59.936506Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:59.936640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:59.936700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:59.936731Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:59.936926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:59.936972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:59.937000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:59.937129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:59.937182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:59.937227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ;;;); 2025-04-10T11:32:01.148682Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-10T11:32:01.148711Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:32:01.149181Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T11:32:01.149229Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-10T11:32:01.149275Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=1; 2025-04-10T11:32:01.149334Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=4096;merger=0;interval_id=1; 2025-04-10T11:32:01.149402Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-10T11:32:01.149481Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:01.149512Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=4096;finished=1; 2025-04-10T11:32:01.149580Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:32:01.149861Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:01.150032Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:4096;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:01.150085Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:32:01.150219Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=4096; 2025-04-10T11:32:01.150306Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=458752;num_rows=4096;batch_columns=key,field; 2025-04-10T11:32:01.150422Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:301:2319];bytes=458752;rows=4096;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-04-10T11:32:01.150547Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:01.150657Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:01.150784Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:01.151193Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:01.151335Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:01.151439Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:01.151478Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:305:2323] finished for tablet 9437184 2025-04-10T11:32:01.151961Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:301:2319];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.072},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.074}],"full":{"a":1744284721077170,"name":"_full_task","f":1744284721077170,"d_finished":0,"c":0,"l":1744284721151531,"d":74361},"events":[{"name":"bootstrap","f":1744284721077510,"d_finished":3504,"c":1,"l":1744284721081014,"d":3504},{"a":1744284721151168,"name":"ack","f":1744284721149830,"d_finished":997,"c":1,"l":1744284721150827,"d":1360},{"a":1744284721151150,"name":"processing","f":1744284721081371,"d_finished":34773,"c":9,"l":1744284721150829,"d":35154},{"name":"ProduceResults","f":1744284721079650,"d_finished":2763,"c":12,"l":1744284721151464,"d":2763},{"a":1744284721151468,"name":"Finish","f":1744284721151468,"d_finished":0,"c":0,"l":1744284721151531,"d":63},{"name":"task_result","f":1744284721081390,"d_finished":33527,"c":8,"l":1744284721149628,"d":33527}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:01.152048Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:301:2319];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:32:01.152541Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:301:2319];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.072},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.074}],"full":{"a":1744284721077170,"name":"_full_task","f":1744284721077170,"d_finished":0,"c":0,"l":1744284721152103,"d":74933},"events":[{"name":"bootstrap","f":1744284721077510,"d_finished":3504,"c":1,"l":1744284721081014,"d":3504},{"a":1744284721151168,"name":"ack","f":1744284721149830,"d_finished":997,"c":1,"l":1744284721150827,"d":1932},{"a":1744284721151150,"name":"processing","f":1744284721081371,"d_finished":34773,"c":9,"l":1744284721150829,"d":35726},{"name":"ProduceResults","f":1744284721079650,"d_finished":2763,"c":12,"l":1744284721151464,"d":2763},{"a":1744284721151468,"name":"Finish","f":1744284721151468,"d_finished":0,"c":0,"l":1744284721152103,"d":635},{"name":"task_result","f":1744284721081390,"d_finished":33527,"c":8,"l":1744284721149628,"d":33527}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:01.152658Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:32:01.076545Z;index_granules=0;index_portions=1;index_batches=176;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=494016;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=494016;selected_rows=0; 2025-04-10T11:32:01.152703Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:32:01.152943Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:305:2323];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> TColumnShardTestReadWrite::WriteReadStandalone >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime >> KqpQueryService::DdlSecret [GOOD] >> KqpQueryService::DdlMixedDml ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] Test command err: 2025-04-10T11:31:55.450505Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:55.545329Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:55.564696Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:55.564911Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:55.572202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:55.572452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:55.572695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:55.572837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:55.572945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:55.573079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:55.573186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:55.573300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:55.573421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:55.573572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:55.573678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:55.573796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:55.602628Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:55.602941Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:55.603020Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:55.603229Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:55.603434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:55.603533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:55.603583Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:55.603689Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:55.603762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:55.603810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:55.603843Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:55.604081Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:55.604180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:55.604235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:55.604275Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:55.604370Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:55.604431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:55.604503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:55.604545Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:55.604625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:55.604666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:55.604695Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:55.604756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:55.604804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:55.604836Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:55.605354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-04-10T11:31:55.605474Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=55; 2025-04-10T11:31:55.605563Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-04-10T11:31:55.605680Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=48; 2025-04-10T11:31:55.605914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:55.605996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:55.606031Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:55.606310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:55.606376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:55.606412Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:55.606606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:55.606658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:55.606690Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:55.606904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:55.606952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:55.606991Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:55.607145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:55.607200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:55.607280Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... umn_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.800819Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:32:01.800991Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-10T11:32:01.801105Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-10T11:32:01.801269Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:424:2439];bytes=2759;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-04-10T11:32:01.801447Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.801612Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.801759Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.802021Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:01.803925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.804137Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.804200Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:425:2440] finished for tablet 9437184 2025-04-10T11:32:01.804738Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:424:2439];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.012},{"events":["f_ack"],"t":0.013},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.016}],"full":{"a":1744284721787447,"name":"_full_task","f":1744284721787447,"d_finished":0,"c":0,"l":1744284721804265,"d":16818},"events":[{"name":"bootstrap","f":1744284721787668,"d_finished":2597,"c":1,"l":1744284721790265,"d":2597},{"a":1744284721801990,"name":"ack","f":1744284721800463,"d_finished":1340,"c":1,"l":1744284721801803,"d":3615},{"a":1744284721801974,"name":"processing","f":1744284721791468,"d_finished":6477,"c":10,"l":1744284721801807,"d":8768},{"name":"ProduceResults","f":1744284721789067,"d_finished":5308,"c":13,"l":1744284721804179,"d":5308},{"a":1744284721804184,"name":"Finish","f":1744284721804184,"d_finished":0,"c":0,"l":1744284721804265,"d":81},{"name":"task_result","f":1744284721791482,"d_finished":5010,"c":9,"l":1744284721800257,"d":5010}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.804819Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:424:2439];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:32:01.805364Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:424:2439];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.012},{"events":["f_ack"],"t":0.013},{"events":["l_ProduceResults","f_Finish"],"t":0.016},{"events":["l_ack","l_processing","l_Finish"],"t":0.017}],"full":{"a":1744284721787447,"name":"_full_task","f":1744284721787447,"d_finished":0,"c":0,"l":1744284721804866,"d":17419},"events":[{"name":"bootstrap","f":1744284721787668,"d_finished":2597,"c":1,"l":1744284721790265,"d":2597},{"a":1744284721801990,"name":"ack","f":1744284721800463,"d_finished":1340,"c":1,"l":1744284721801803,"d":4216},{"a":1744284721801974,"name":"processing","f":1744284721791468,"d_finished":6477,"c":10,"l":1744284721801807,"d":9369},{"name":"ProduceResults","f":1744284721789067,"d_finished":5308,"c":13,"l":1744284721804179,"d":5308},{"a":1744284721804184,"name":"Finish","f":1744284721804184,"d_finished":0,"c":0,"l":1744284721804866,"d":682},{"name":"task_result","f":1744284721791482,"d_finished":5010,"c":9,"l":1744284721800257,"d":5010}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.805449Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:32:01.786824Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13268;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13268;selected_rows=0; 2025-04-10T11:32:01.805499Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:32:01.805899Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] >> KqpService::SwitchCache+UseCache [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] Test command err: 2025-04-10T11:31:52.818568Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:52.911386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:52.935999Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:52.936406Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:52.944551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:52.944779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:52.945002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:52.945121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:52.945193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:52.945270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:52.945360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:52.945478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:52.945565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:52.945676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:52.945743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:52.945826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:52.972633Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:52.972892Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:52.972944Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:52.973138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:52.973298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:52.973384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:52.973423Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:52.973523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:52.973610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:52.973667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:52.973702Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:52.973886Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:52.973956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:52.973996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:52.974033Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:52.974133Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:52.974219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:52.974265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:52.974294Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:52.974369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:52.974406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:52.974436Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:52.974487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:52.974537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:52.974590Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:52.974990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-04-10T11:31:52.975070Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-10T11:31:52.975168Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-10T11:31:52.975275Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-04-10T11:31:52.975417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:52.975459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:52.975481Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:52.975649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:52.975682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:52.975701Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:52.975810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:52.975840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:52.975868Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:52.975982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:52.976010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:52.976035Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:52.976125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:52.976158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:52.976207Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ata:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.811473Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:32:01.811646Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-10T11:32:01.811761Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-10T11:32:01.811940Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1056:2927];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-10T11:32:01.812117Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.812262Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.812449Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.812723Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:01.812890Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.813065Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.813123Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1057:2928] finished for tablet 9437184 2025-04-10T11:32:01.813772Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1056:2927];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.016},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.018}],"full":{"a":1744284721794285,"name":"_full_task","f":1744284721794285,"d_finished":0,"c":0,"l":1744284721813213,"d":18928},"events":[{"name":"bootstrap","f":1744284721794539,"d_finished":3810,"c":1,"l":1744284721798349,"d":3810},{"a":1744284721812690,"name":"ack","f":1744284721811079,"d_finished":1409,"c":1,"l":1744284721812488,"d":1932},{"a":1744284721812674,"name":"processing","f":1744284721800040,"d_finished":7224,"c":10,"l":1744284721812491,"d":7763},{"name":"ProduceResults","f":1744284721796618,"d_finished":3772,"c":13,"l":1744284721813104,"d":3772},{"a":1744284721813108,"name":"Finish","f":1744284721813108,"d_finished":0,"c":0,"l":1744284721813213,"d":105},{"name":"task_result","f":1744284721800062,"d_finished":5645,"c":9,"l":1744284721810849,"d":5645}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.813890Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1056:2927];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:32:01.814445Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1056:2927];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.016},{"events":["l_ProduceResults","f_Finish"],"t":0.018},{"events":["l_ack","l_processing","l_Finish"],"t":0.019}],"full":{"a":1744284721794285,"name":"_full_task","f":1744284721794285,"d_finished":0,"c":0,"l":1744284721813945,"d":19660},"events":[{"name":"bootstrap","f":1744284721794539,"d_finished":3810,"c":1,"l":1744284721798349,"d":3810},{"a":1744284721812690,"name":"ack","f":1744284721811079,"d_finished":1409,"c":1,"l":1744284721812488,"d":2664},{"a":1744284721812674,"name":"processing","f":1744284721800040,"d_finished":7224,"c":10,"l":1744284721812491,"d":8495},{"name":"ProduceResults","f":1744284721796618,"d_finished":3772,"c":13,"l":1744284721813104,"d":3772},{"a":1744284721813108,"name":"Finish","f":1744284721813108,"d_finished":0,"c":0,"l":1744284721813945,"d":837},{"name":"task_result","f":1744284721800062,"d_finished":5645,"c":9,"l":1744284721810849,"d":5645}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:01.814553Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:32:01.793466Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-10T11:32:01.814609Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:32:01.815041Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] >> Normalizers::EmptyTablesNormalizer [GOOD] >> TColumnShardTestReadWrite::WriteExoticTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] Test command err: 2025-04-10T11:32:01.263118Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:01.405958Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:01.433727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:01.434060Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:01.441977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:01.442214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:01.442485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:01.442597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:01.442725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:01.442898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:01.443003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:01.443102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:01.443224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:01.443340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:01.443473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:01.443582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:01.473999Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:01.474305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:01.474385Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:01.474576Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:01.474785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:01.474890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:01.474929Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:01.475014Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:01.475082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:01.475138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:01.475166Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:01.475372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:01.475445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:01.475490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:01.475525Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:01.475607Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:01.475663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:01.475713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:01.475744Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:01.475854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:01.475898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:01.475930Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:01.475987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:01.476039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:01.476073Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:01.476460Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-10T11:32:01.476595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=77; 2025-04-10T11:32:01.476715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-04-10T11:32:01.476837Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=51; 2025-04-10T11:32:01.477031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:01.477108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:01.477142Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:01.477376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:01.477440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:01.477485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:01.477677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:01.477727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:01.477762Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:01.477941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:01.477990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:01.478019Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:01.478215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:01.478271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:01.478317Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... tails={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};;scan_step_idx=2; 2025-04-10T11:32:02.981991Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=ASSEMBLER::LAST_PK;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};;scan_step_idx=3; 2025-04-10T11:32:02.982411Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=SNAPSHOT;details={};;scan_step_idx=4; 2025-04-10T11:32:02.982690Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=5; 2025-04-10T11:32:02.982913Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T11:32:02.982958Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-04-10T11:32:02.983018Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-04-10T11:32:02.983078Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=0;interval_id=6;memory=8391908;count=2; 2025-04-10T11:32:02.983477Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=0; 2025-04-10T11:32:02.984005Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=source.cpp:50;event=source_ready;intervals_count=1;source_idx=0; 2025-04-10T11:32:02.984135Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:02.984181Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-10T11:32:02.984227Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:32:02.984455Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T11:32:02.984495Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-10T11:32:02.984551Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=6; 2025-04-10T11:32:02.984605Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=0;merger=0;interval_id=6; 2025-04-10T11:32:02.984665Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-10T11:32:02.984791Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:02.984959Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:02.985252Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:02.985402Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:02.985590Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:02.985650Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:419:2437] finished for tablet 9437184 2025-04-10T11:32:02.986255Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:415:2433];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.006},{"events":["f_processing","f_task_result"],"t":0.008},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.022}],"full":{"a":1744284722962912,"name":"_full_task","f":1744284722962912,"d_finished":0,"c":0,"l":1744284722985710,"d":22798},"events":[{"name":"bootstrap","f":1744284722963381,"d_finished":6036,"c":1,"l":1744284722969417,"d":6036},{"a":1744284722985215,"name":"ack","f":1744284722985215,"d_finished":0,"c":0,"l":1744284722985710,"d":495},{"a":1744284722985195,"name":"processing","f":1744284722971403,"d_finished":4567,"c":10,"l":1744284722985032,"d":5082},{"name":"ProduceResults","f":1744284722966925,"d_finished":2534,"c":12,"l":1744284722985631,"d":2534},{"a":1744284722985635,"name":"Finish","f":1744284722985635,"d_finished":0,"c":0,"l":1744284722985710,"d":75},{"name":"task_result","f":1744284722971441,"d_finished":4358,"c":10,"l":1744284722985029,"d":4358}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:02.986370Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:415:2433];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:32:02.986893Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:415:2433];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.006},{"events":["f_processing","f_task_result"],"t":0.008},{"events":["f_ack","l_ProduceResults","f_Finish","l_task_result"],"t":0.022},{"events":["l_ack","l_processing","l_Finish"],"t":0.023}],"full":{"a":1744284722962912,"name":"_full_task","f":1744284722962912,"d_finished":0,"c":0,"l":1744284722986431,"d":23519},"events":[{"name":"bootstrap","f":1744284722963381,"d_finished":6036,"c":1,"l":1744284722969417,"d":6036},{"a":1744284722985215,"name":"ack","f":1744284722985215,"d_finished":0,"c":0,"l":1744284722986431,"d":1216},{"a":1744284722985195,"name":"processing","f":1744284722971403,"d_finished":4567,"c":10,"l":1744284722985032,"d":5803},{"name":"ProduceResults","f":1744284722966925,"d_finished":2534,"c":12,"l":1744284722985631,"d":2534},{"a":1744284722985635,"name":"Finish","f":1744284722985635,"d_finished":0,"c":0,"l":1744284722986431,"d":796},{"name":"task_result","f":1744284722971441,"d_finished":4358,"c":10,"l":1744284722985029,"d":4358}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:02.987024Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:32:02.961928Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=2812;inserted_portions_bytes=0;committed_portions_bytes=1384;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4196;selected_rows=0; 2025-04-10T11:32:02.987082Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:32:02.987908Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:419:2437];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TMultiversionObjectMap::MonteCarlo [GOOD] >> TColumnShardTestReadWrite::Write [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] Test command err: 2025-04-10T11:31:10.508520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645613722048108:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:10.508717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpuZeJ5a/pdisk_1.dat 2025-04-10T11:31:11.286757Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:11.306025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:11.306607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:11.337326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24908, node 1 2025-04-10T11:31:11.894929Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:11.894967Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:11.894976Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:11.895175Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:12.734007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:15.111598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645635196885523:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:15.111756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:15.112889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645635196885535:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:15.117281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:31:15.163843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645635196885537:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:31:15.271545Z node 1 :TX_PROXY ERROR: Actor# [1:7491645635196885604:2684] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:15.506250Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645613722048108:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:15.506356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Backup "/Root" to "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpM6PQjT/"Create temporary directory "/Root/~backup_20250410T113116" in databaseProcess "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpM6PQjT/topic"Backup topic "/Root/topic" to "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpM6PQjT/topic"Write topic into "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpM6PQjT/topic/create_topic.pb"Write ACL into "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpM6PQjT/topic/permissions.pb"Remove temporary directory "/Root/~backup_20250410T113116" in database2025-04-10T11:31:16.380028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710662:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-10T11:31:16.466818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:16.505672Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-10T11:31:16.514847Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T11:31:16.516301Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-10T11:31:16.535930Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-04-10T11:31:16.536008Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-04-10T11:31:16.536042Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found Restore "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpM6PQjT/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpM6PQjT/" to "/Root"Process "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpM6PQjT/topic"Restore topic "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpM6PQjT/topic" to "/Root/topic"Read topic from "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpM6PQjT/topic/create_topic.pb"Created "/Root/topic"Restore ACL "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpM6PQjT/topic" to "/Root/topic"Read ACL from "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpM6PQjT/topic/permissions.pb"2025-04-10T11:31:16.736757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-10T11:31:19.123391Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491645655397743386:2089];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpQLlRCN/pdisk_1.dat 2025-04-10T11:31:19.283650Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:31:19.580807Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:19.616006Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:19.616101Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:19.625759Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30131, node 4 2025-04-10T11:31:19.889291Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:19.889329Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:19.889337Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:19.889500Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9863 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:20.292226Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:20.473510Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpUwitmh/"Create temporary directory "/Root/~backup_20250410T113120" in databaseProcess "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpUwitmh/kesus"Backup coordination node "/Root/kesus" to "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpUwitmh/kesus"Write coordination node into "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpUwitmh/kesus/create_coordination_node.pb"Write ACL into "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpUwitmh/kesus/permissions.pb"Remove temporary directory "/Root/~backup_20250410T113120" in database2025-04-10T11:31:20.836699Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710660:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-10T11:31:20.911376Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part ... :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/"Create temporary directory "/Root/~backup_20250410T113153" in databaseProcess "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalTable"Backup external table "/Root/externalTable" to "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalTable"Write external table into "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalTable/create_external_table.sql"Write ACL into "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalTable/permissions.pb"Process "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalDataSource"Backup external data source "/Root/externalDataSource" to "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalDataSource"Write external data source into "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalDataSource/create_external_data_source.sql"Write ACL into "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalDataSource/permissions.pb"Remove temporary directory "/Root/~backup_20250410T113153" in database2025-04-10T11:31:53.637916Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710663:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-10T11:31:53.738635Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7491645777325900002:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:53.738716Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Restore "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/" to "/Root"Process "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalDataSource"Restore external data source "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalDataSource" to "/Root/externalDataSource"Read external data source from "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalDataSource/create_external_data_source.sql"2025-04-10T11:31:53.897923Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710666:0, at schemeshard: 72057594046644480 Created "/Root/externalDataSource"Restore ACL "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalDataSource" to "/Root/externalDataSource"Read ACL from "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalDataSource/permissions.pb"2025-04-10T11:31:53.943986Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 Process "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalTable"Restore external table "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalTable" to "/Root/externalTable"Read external table from "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalTable/create_external_table.sql"2025-04-10T11:31:53.991001Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 Created "/Root/externalTable"Restore ACL "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalTable" to "/Root/externalTable"Read ACL from "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpkaAEZp/externalTable/permissions.pb"2025-04-10T11:31:54.035588Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-10T11:31:55.979275Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7491645808418219427:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:55.980937Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmpGwyYTB/pdisk_1.dat 2025-04-10T11:31:56.234382Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18943, node 13 2025-04-10T11:31:56.342404Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:56.342630Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:56.389443Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:31:56.434929Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:56.434961Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:56.434969Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:56.435151Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:56.847442Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:00.699744Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491645829893057006:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:00.699857Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7491645829893056998:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:00.700056Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:00.705627Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:32:00.744717Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7491645829893057012:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:32:00.808159Z node 13 :TX_PROXY ERROR: Actor# [13:7491645829893057085:2678] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:32:00.836703Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710660:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmp64Jc9W/"Create temporary directory "/Root/~backup_20250410T113200" in database2025-04-10T11:32:00.962397Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7491645808418219427:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:00.962482Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Process "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmp64Jc9W/externalDataSource"Backup external data source "/Root/externalDataSource" to "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmp64Jc9W/externalDataSource"Write external data source into "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmp64Jc9W/externalDataSource/create_external_data_source.sql"Write ACL into "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmp64Jc9W/externalDataSource/permissions.pb"Remove temporary directory "/Root/~backup_20250410T113200" in database2025-04-10T11:32:01.046004Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710662:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmp64Jc9W/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmp64Jc9W/" to "/Root"Process "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmp64Jc9W/externalDataSource"Restore external data source "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmp64Jc9W/externalDataSource" to "/Root/externalDataSource"Read external data source from "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmp64Jc9W/externalDataSource/create_external_data_source.sql"2025-04-10T11:32:01.266711Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710664:0, at schemeshard: 72057594046644480 Created "/Root/externalDataSource"Restore ACL "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmp64Jc9W/externalDataSource" to "/Root/externalDataSource"Read ACL from "/home/runner/.ya/build/build_root/2gt4/001d3d/r3tmp/tmp64Jc9W/externalDataSource/permissions.pb"2025-04-10T11:32:01.325698Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 Restore completed successfully >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::EmptyTablesNormalizer [GOOD] Test command err: 2025-04-10T11:31:59.470525Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:59.584321Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:59.614466Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:59.614795Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:59.625298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=PortionsCleaner; 2025-04-10T11:31:59.625573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-10T11:31:59.625783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:59.625970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:59.626077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:59.626380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:59.626561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:59.626687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:59.626815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:59.626944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:59.627073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:59.627201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:59.627455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:59.674889Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:59.675107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=PortionsCleaner; 2025-04-10T11:31:59.675170Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-10T11:31:59.675759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=83; 2025-04-10T11:31:59.675866Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2025-04-10T11:31:59.675969Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2025-04-10T11:31:59.676064Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-10T11:31:59.676258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=PortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-04-10T11:31:59.676363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-10T11:31:59.676412Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-10T11:31:59.676571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:59.676669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:59.676720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:59.676754Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-10T11:31:59.676884Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:59.676953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:59.676999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:59.677042Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:59.677258Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:59.677327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:59.677380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:59.677413Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:59.677538Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:59.677615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:59.677660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:59.677693Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:59.677778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:59.677832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:59.677873Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:59.677934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:59.677985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:59.678014Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:59.678603Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-04-10T11:31:59.678698Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-10T11:31:59.678793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-04-10T11:31:59.678879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-10T11:31:59.679042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:59.679095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:59.679134Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:59.679389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:59.679451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:59.679488Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:59.679657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpda ... 2025-04-10T11:32:03.139576Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:32:03.139935Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:32:03.140038Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:32:03.140076Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:32:03.140107Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:32:03.140163Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:32:03.140234Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:32:03.140302Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-10T11:32:03.140381Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:32:03.140448Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:32:03.140521Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:32:03.140645Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:32:03.318645Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {11:111} readable: {11:max} at tablet 9437184 2025-04-10T11:32:03.318863Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-04-10T11:32:03.319127Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-04-10T11:32:03.319273Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-04-10T11:32:03.320228Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4},{"from":6}]},{"owner_id":2,"inputs":[{"from":7}]},{"owner_id":4,"inputs":[{"from":7}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key1","id":1}},"o":"1","t":"AssembleOriginalData"},"w":11,"id":2},"6":{"p":{"i":"3","p":{"address":{"name":"field","id":3}},"o":"3","t":"AssembleOriginalData"},"w":11,"id":6},"7":{"p":{"p":{"data":[{"name":"key1","id":1},{"name":"key2","id":2},{"name":"field","id":3}]},"o":"1,2,3","t":"FetchOriginalData"},"w":6,"id":7},"4":{"p":{"i":"2","p":{"address":{"name":"key2","id":2}},"o":"2","t":"AssembleOriginalData"},"w":11,"id":4},"0":{"p":{"i":"1,2,3","t":"Projection"},"w":33,"id":0}}}; 2025-04-10T11:32:03.320368Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:32:03.321117Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:402:2415];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:463:2468];trace_detailed=; 2025-04-10T11:32:03.321906Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=1,2,3;column_names=field,key1,key2;);; 2025-04-10T11:32:03.328628Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; 2025-04-10T11:32:03.329194Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:03.329373Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:03.329531Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:03.329583Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:463:2468] finished for tablet 9437184 2025-04-10T11:32:03.330069Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:461:2467];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.007},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.008}],"full":{"a":1744284723320996,"name":"_full_task","f":1744284723320996,"d_finished":0,"c":0,"l":1744284723329651,"d":8655},"events":[{"name":"bootstrap","f":1744284723321249,"d_finished":7671,"c":1,"l":1744284723328920,"d":7671},{"a":1744284723329158,"name":"ack","f":1744284723329158,"d_finished":0,"c":0,"l":1744284723329651,"d":493},{"a":1744284723329132,"name":"processing","f":1744284723329132,"d_finished":0,"c":0,"l":1744284723329651,"d":519},{"name":"ProduceResults","f":1744284723328897,"d_finished":348,"c":2,"l":1744284723329566,"d":348},{"a":1744284723329570,"name":"Finish","f":1744284723329570,"d_finished":0,"c":0,"l":1744284723329651,"d":81}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:03.330170Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:461:2467];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:32:03.330615Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:461:2467];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.007},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.008},{"events":["l_ack","l_processing","l_Finish"],"t":0.009}],"full":{"a":1744284723320996,"name":"_full_task","f":1744284723320996,"d_finished":0,"c":0,"l":1744284723330229,"d":9233},"events":[{"name":"bootstrap","f":1744284723321249,"d_finished":7671,"c":1,"l":1744284723328920,"d":7671},{"a":1744284723329158,"name":"ack","f":1744284723329158,"d_finished":0,"c":0,"l":1744284723330229,"d":1071},{"a":1744284723329132,"name":"processing","f":1744284723329132,"d_finished":0,"c":0,"l":1744284723330229,"d":1097},{"name":"ProduceResults","f":1744284723328897,"d_finished":348,"c":2,"l":1744284723329566,"d":348},{"a":1744284723329570,"name":"Finish","f":1744284723329570,"d_finished":0,"c":0,"l":1744284723330229,"d":659}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:03.330737Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:32:03.320328Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:32:03.330794Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:32:03.330941Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:463:2468];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=20048; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=20048; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::SwitchCache+UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 15507, MsgBus: 20316 2025-04-10T11:31:13.098655Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645626261463034:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:13.098701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001563/r3tmp/tmpwJu3BV/pdisk_1.dat 2025-04-10T11:31:13.783912Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:13.786215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:13.786287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:13.793285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15507, node 1 2025-04-10T11:31:14.123423Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:14.123457Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:14.123464Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:14.123597Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20316 TClient is connected to server localhost:20316 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:15.008020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:15.055044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:15.272805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:15.459758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:15.564056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:17.450675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645643441334007:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.450869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.830880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.907552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.962941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:18.004631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:18.051059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:18.105690Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645626261463034:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:18.106625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:18.117402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:18.204343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645647736301817:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:18.204483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:18.207612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645647736301822:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:18.212534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:18.235233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645647736301824:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:18.340276Z node 1 :TX_PROXY ERROR: Actor# [1:7491645647736301881:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15610, MsgBus: 32496 2025-04-10T11:31:23.500442Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645671304689714:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001563/r3tmp/tmpTFYANp/pdisk_1.dat 2025-04-10T11:31:23.650693Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:31:23.823894Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:23.839093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:23.839173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:23.843416Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15610, node 2 2025-04-10T11:31:24.046694Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:24.046722Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:24.046732Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:24.046848Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32496 TClient is connected to server localhost:32496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:24.803480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:24.833269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:24.991602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:25.227931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:25.327558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:27.999628Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService ... GUtNmIzM2JhZWMtNzE1MmJmZGE=, ActorId: [2:7491645701369463472:2561], ActorState: ExecuteState, TraceId: 01jrfqg9sk0d74qgey28j712f4, Reply query error, msg: Pending previous query completion proxyRequestId: 49 2025-04-10T11:31:30.398666Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGNhYzlhN2ItYzdkNjAyMDctNTg4MjUwY2QtODJkNTJmMTE=, ActorId: [2:7491645701369463548:2573], ActorState: ExecuteState, TraceId: 01jrfqg9yx79xaf08gdw9aw272, Reply query error, msg: Pending previous query completion proxyRequestId: 52 2025-04-10T11:31:30.398746Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGNhYzlhN2ItYzdkNjAyMDctNTg4MjUwY2QtODJkNTJmMTE=, ActorId: [2:7491645701369463548:2573], ActorState: ExecuteState, TraceId: 01jrfqg9yx79xaf08gdw9aw272, Reply query error, msg: Pending previous query completion proxyRequestId: 53 2025-04-10T11:31:30.398773Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGNhYzlhN2ItYzdkNjAyMDctNTg4MjUwY2QtODJkNTJmMTE=, ActorId: [2:7491645701369463548:2573], ActorState: ExecuteState, TraceId: 01jrfqg9yx79xaf08gdw9aw272, Reply query error, msg: Pending previous query completion proxyRequestId: 54 2025-04-10T11:31:30.398809Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGNhYzlhN2ItYzdkNjAyMDctNTg4MjUwY2QtODJkNTJmMTE=, ActorId: [2:7491645701369463548:2573], ActorState: ExecuteState, TraceId: 01jrfqg9yx79xaf08gdw9aw272, Reply query error, msg: Pending previous query completion proxyRequestId: 55 2025-04-10T11:31:30.522050Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTZhM2NiYzUtMjcyNGM2NGEtYjRiMThiZGItYTRkODY4MmQ=, ActorId: [2:7491645701369463577:2584], ActorState: ExecuteState, TraceId: 01jrfqga2pbgdwxsn50dz509y7, Reply query error, msg: Pending previous query completion proxyRequestId: 58 2025-04-10T11:31:30.522123Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTZhM2NiYzUtMjcyNGM2NGEtYjRiMThiZGItYTRkODY4MmQ=, ActorId: [2:7491645701369463577:2584], ActorState: ExecuteState, TraceId: 01jrfqga2pbgdwxsn50dz509y7, Reply query error, msg: Pending previous query completion proxyRequestId: 59 2025-04-10T11:31:30.522176Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTZhM2NiYzUtMjcyNGM2NGEtYjRiMThiZGItYTRkODY4MmQ=, ActorId: [2:7491645701369463577:2584], ActorState: ExecuteState, TraceId: 01jrfqga2pbgdwxsn50dz509y7, Reply query error, msg: Pending previous query completion proxyRequestId: 60 2025-04-10T11:31:30.657984Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODMzYWFkOWYtNjhiZDA3LTQ2MmYwZDVjLTIxMWYwN2Jh, ActorId: [2:7491645701369463632:2595], ActorState: ExecuteState, TraceId: 01jrfqga716awjtn7z9az3x7zz, Reply query error, msg: Pending previous query completion proxyRequestId: 63 2025-04-10T11:31:30.662469Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODMzYWFkOWYtNjhiZDA3LTQ2MmYwZDVjLTIxMWYwN2Jh, ActorId: [2:7491645701369463632:2595], ActorState: ExecuteState, TraceId: 01jrfqga716awjtn7z9az3x7zz, Reply query error, msg: Pending previous query completion proxyRequestId: 64 2025-04-10T11:31:30.768537Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTBmNjY5NTktZmYxMDg0ZjgtMjRlYzA3MmItNmE4NWQzMDc=, ActorId: [2:7491645701369463660:2605], ActorState: ExecuteState, TraceId: 01jrfqgaaf6937vak87hs1agry, Reply query error, msg: Pending previous query completion proxyRequestId: 67 Trying to start YDB, gRPC: 24405, MsgBus: 20519 2025-04-10T11:31:32.051373Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645705815782956:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:32.079811Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001563/r3tmp/tmp4RbNCv/pdisk_1.dat 2025-04-10T11:31:32.245658Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:32.266462Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:32.266543Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:32.268274Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24405, node 3 2025-04-10T11:31:32.478704Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:32.478727Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:32.478737Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:32.478854Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20519 TClient is connected to server localhost:20519 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:33.060228Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:33.076893Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:33.161730Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:33.376777Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:33.481872Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:36.049230Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645727290621048:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:36.049349Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:36.121340Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:36.176931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:36.253174Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:36.343087Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:36.396686Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:36.472277Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:36.557384Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645727290621567:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:36.557517Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:36.557922Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645727290621573:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:36.562726Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:36.577421Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645727290621575:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:31:36.661827Z node 3 :TX_PROXY ERROR: Actor# [3:7491645727290621629:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:36.966269Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645705815782956:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:36.966425Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:38.134102Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:47.214320Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:31:47.214354Z node 3 :IMPORT WARN: Table profiles were not loaded took: 23.470028s took: 23.470866s took: 23.473850s took: 23.479274s took: 23.485159s took: 23.483810s took: 23.484481s took: 23.484656s took: 23.484910s took: 23.485501s >> TColumnShardTestReadWrite::ReadAggregate >> TGroupMapperTest::NonUniformCluster [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::Write [GOOD] Test command err: 2025-04-10T11:31:57.947344Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:58.098409Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:58.124530Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:58.124898Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:58.134910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:58.135152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:58.135456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:58.135577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:58.135704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:58.135868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:58.135994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:58.136121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:58.136251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:58.136359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:58.136498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:58.136618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:58.171096Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:58.171438Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:58.171508Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:58.171738Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:58.171931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:58.172031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:58.172098Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:58.172200Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:58.172278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:58.172330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:58.172363Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:58.172584Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:58.172666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:58.172710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:58.172748Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:58.172865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:58.172951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:58.173011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:58.173064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:58.173137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:58.173185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:58.173215Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:58.173272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:58.173342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:58.173378Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:58.173832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-04-10T11:31:58.173938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-10T11:31:58.174027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-10T11:31:58.174139Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-10T11:31:58.174386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:58.174470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:58.174517Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:58.174744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:58.174797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:58.174834Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:58.175040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:58.175110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:58.175152Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:58.175372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:58.175428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:58.175476Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:58.175637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:58.175701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:58.175771Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-10T11:32:03.580339Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TMultiversionObjectMap::MonteCarlo [GOOD] >> KqpQueryService::TableSink_OlapRWQueries [GOOD] >> Normalizers::CleanEmptyPortionsNormalizer |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot >> TColumnShardTestReadWrite::ReadWithProgramLike >> TColumnShardTestReadWrite::ReadStale ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapRWQueries [GOOD] Test command err: Trying to start YDB, gRPC: 6744, MsgBus: 6569 2025-04-10T11:31:11.880262Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645618018945890:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:11.880307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001573/r3tmp/tmpXvBr14/pdisk_1.dat 2025-04-10T11:31:12.703024Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:12.709106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:12.709206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:12.711732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6744, node 1 2025-04-10T11:31:12.962795Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:12.962811Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:12.962817Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:12.962927Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6569 TClient is connected to server localhost:6569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:13.971967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:16.202204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645639493783045:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.202361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.881137Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645618018945890:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:16.881249Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:17.118865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.365713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645643788750469:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:17.365913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645643788750469:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:17.366461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645643788750469:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:17.366652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645643788750469:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:17.366771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645643788750469:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:17.366894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645643788750469:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:17.367021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645643788750469:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:17.367149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645643788750469:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:17.367258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645643788750469:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:17.367359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645643788750469:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:17.367456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645643788750469:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:17.367548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491645643788750469:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:17.397397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645643788750479:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:17.402365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645643788750479:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:17.402611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645643788750479:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:17.402703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645643788750479:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:17.402798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645643788750479:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:17.402885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645643788750479:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:17.402969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645643788750479:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:17.403060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645643788750479:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:17.403151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645643788750479:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:17.403244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645643788750479:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:17.403333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645643788750479:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:17.403422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645643788750479:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:17.470798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645643788750473:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:17.470857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645643788750473:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:17.471056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645643788750473:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:17.471152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645643788750473:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:17.471249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491645643788750473:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:17.471339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... =CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:03.204781Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:03.204834Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:03.237161Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7491645842070804590:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:03.237243Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7491645842070804590:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:03.237564Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7491645842070804590:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:03.237672Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7491645842070804590:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:03.237787Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7491645842070804590:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:03.237903Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7491645842070804590:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:03.237991Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7491645842070804590:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:03.238076Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7491645842070804590:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:03.238237Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7491645842070804590:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:03.238384Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7491645842070804590:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:03.238529Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7491645842070804590:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:03.238667Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7491645842070804590:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:03.242552Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:03.242632Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:03.242739Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:03.242764Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:03.242924Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:03.242955Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:03.243053Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:03.243077Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:03.243162Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:03.243186Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:03.243220Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:03.243241Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:03.243973Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:03.244023Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:03.244226Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:03.244250Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:03.244413Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:03.244444Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:03.244620Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:03.244642Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:03.244753Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:03.244777Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:03.265334Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:32:03.265720Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:32:03.274294Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710658; 2025-04-10T11:32:03.302708Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645842070804692:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:03.302841Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:03.302962Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645842070804697:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:03.308158Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:32:03.334862Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645842070804699:2363], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:32:03.395725Z node 3 :TX_PROXY ERROR: Actor# [3:7491645842070804750:2438] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:32:03.597667Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645820595967367:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:03.597770Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:32:04.669152Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976710665;tx_id=281474976710665;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710665; 2025-04-10T11:32:04.669152Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=281474976710665;tx_id=281474976710665;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710665; 2025-04-10T11:32:04.670090Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976710665;tx_id=281474976710665;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710665; >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] >> EvWrite::WriteInTransaction >> TColumnShardTestReadWrite::WriteStandalone [GOOD] >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] Test command err: 2025-04-10T11:31:57.997395Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:58.121471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:58.149938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:58.150323Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:58.159465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:58.159712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:58.159957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:58.160103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:58.160233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:58.160373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:58.160503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:58.160652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:58.160778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:58.160930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:58.161046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:58.161187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:58.194295Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:58.194531Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:58.194587Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:58.194764Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:58.194941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:58.195051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:58.195091Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:58.195201Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:58.195286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:58.195337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:58.195374Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:58.195564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:58.195632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:58.195673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:58.195711Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:58.195824Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:58.195886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:58.195936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:58.195966Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:58.196043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:58.196079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:58.196111Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:58.196159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:58.196198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:58.196224Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:58.196626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-10T11:31:58.196711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-10T11:31:58.196795Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-10T11:31:58.196926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=74; 2025-04-10T11:31:58.197122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:58.197178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:58.197214Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:58.197419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:58.197468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:58.197498Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:58.197655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:58.197702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:58.197734Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:58.197913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:58.197952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:58.197979Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:58.198105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:58.198188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:58.198249Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:06.952214Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:32:06.952333Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-10T11:32:06.952397Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-10T11:32:06.952507Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1056:2927];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-10T11:32:06.952673Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:06.952798Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:06.952929Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:06.953134Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:06.953253Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:06.953367Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:06.953401Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1057:2928] finished for tablet 9437184 2025-04-10T11:32:06.953771Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:1056:2927];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":1744284726937681,"name":"_full_task","f":1744284726937681,"d_finished":0,"c":0,"l":1744284726953444,"d":15763},"events":[{"name":"bootstrap","f":1744284726937885,"d_finished":3287,"c":1,"l":1744284726941172,"d":3287},{"a":1744284726953110,"name":"ack","f":1744284726951909,"d_finished":1047,"c":1,"l":1744284726952956,"d":1381},{"a":1744284726953099,"name":"processing","f":1744284726942564,"d_finished":6009,"c":10,"l":1744284726952959,"d":6354},{"name":"ProduceResults","f":1744284726939667,"d_finished":3061,"c":13,"l":1744284726953391,"d":3061},{"a":1744284726953393,"name":"Finish","f":1744284726953393,"d_finished":0,"c":0,"l":1744284726953444,"d":51},{"name":"task_result","f":1744284726942583,"d_finished":4813,"c":9,"l":1744284726951757,"d":4813}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:06.953870Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:1056:2927];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:32:06.954272Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:1056:2927];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ProduceResults","f_Finish"],"t":0.015},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":1744284726937681,"name":"_full_task","f":1744284726937681,"d_finished":0,"c":0,"l":1744284726953948,"d":16267},"events":[{"name":"bootstrap","f":1744284726937885,"d_finished":3287,"c":1,"l":1744284726941172,"d":3287},{"a":1744284726953110,"name":"ack","f":1744284726951909,"d_finished":1047,"c":1,"l":1744284726952956,"d":1885},{"a":1744284726953099,"name":"processing","f":1744284726942564,"d_finished":6009,"c":10,"l":1744284726952959,"d":6858},{"name":"ProduceResults","f":1744284726939667,"d_finished":3061,"c":13,"l":1744284726953391,"d":3061},{"a":1744284726953393,"name":"Finish","f":1744284726953393,"d_finished":0,"c":0,"l":1744284726953948,"d":555},{"name":"task_result","f":1744284726942583,"d_finished":4813,"c":9,"l":1744284726951757,"d":4813}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:06.954334Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:32:06.937028Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-10T11:32:06.954370Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:32:06.954656Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1057:2928];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] >> TColumnShardTestReadWrite::ReadStale [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone [GOOD] Test command err: 2025-04-10T11:32:01.884662Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:01.981897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:02.006999Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:02.007289Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:02.016219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:02.016454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:02.016713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:02.016865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:02.016996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:02.017174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:02.017311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:02.017437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:02.017563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:02.017668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:02.017798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:02.017926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:02.054050Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:02.054713Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:02.054801Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:02.055032Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:02.055227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:02.055329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:02.055379Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:02.055480Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:02.055573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:02.055628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:02.055664Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:02.055902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:02.055984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:02.056033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:02.056066Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:02.056172Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:02.056240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:02.056325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:02.056365Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:02.056458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:02.056507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:02.056540Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:02.056606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:02.056656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:02.056692Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:02.057221Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2025-04-10T11:32:02.057338Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=50; 2025-04-10T11:32:02.057429Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-10T11:32:02.057560Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=45; 2025-04-10T11:32:02.057812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:02.057890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:02.057941Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:02.058201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:02.058260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:02.058296Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:02.058475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:02.058528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:02.058571Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:02.058805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:02.058865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:02.058903Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:02.059054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:02.059113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:02.059172Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-10T11:32:07.059334Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] >> EvWrite::WriteInTransaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] Test command err: 2025-04-10T11:32:07.003446Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:07.115880Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:07.142204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:07.142549Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:07.151964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:07.152211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:07.152475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:07.152597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:07.152729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:07.152865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:07.152992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:07.153156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:07.153282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:07.153395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:07.153515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:07.153626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:07.186908Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:07.187331Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:07.187413Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:07.187679Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:07.187851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:07.187957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:07.188014Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:07.188111Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:07.188178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:07.188221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:07.188257Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:07.188500Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:07.188575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:07.188622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:07.188672Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:07.188781Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:07.188846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:07.188900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:07.188931Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:07.189007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:07.189068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:07.189098Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:07.189154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:07.189201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:07.189235Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:07.189695Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=60; 2025-04-10T11:32:07.189800Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-10T11:32:07.189880Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-10T11:32:07.189998Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=48; 2025-04-10T11:32:07.190253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:07.190328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:07.190379Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:07.190612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:07.190670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:07.190707Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:07.190914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:07.190968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:07.191010Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:07.191219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:07.191293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:07.191328Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:07.191467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:07.191518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:07.191571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ethod=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:32:08.126175Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T11:32:08.126204Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-10T11:32:08.126253Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=6; 2025-04-10T11:32:08.126301Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=10;merger=0;interval_id=6; 2025-04-10T11:32:08.126335Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-10T11:32:08.126402Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-10T11:32:08.126438Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-04-10T11:32:08.126462Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:32:08.126618Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:08.126752Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:10;schema=message: string;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-10T11:32:08.126791Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:32:08.126890Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;);columns=1;rows=10; 2025-04-10T11:32:08.126947Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=61;num_rows=10;batch_columns=message; 2025-04-10T11:32:08.127071Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:301:2319];bytes=61;rows=10;faults=0;finished=0;fault=0;schema=message: string; 2025-04-10T11:32:08.127181Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-10T11:32:08.127259Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-10T11:32:08.127313Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-10T11:32:08.127427Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:08.127515Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-10T11:32:08.127589Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-10T11:32:08.127617Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:302:2320] finished for tablet 9437184 2025-04-10T11:32:08.127947Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:301:2319];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":1744284728116481,"name":"_full_task","f":1744284728116481,"d_finished":0,"c":0,"l":1744284728127650,"d":11169},"events":[{"name":"bootstrap","f":1744284728116640,"d_finished":2000,"c":1,"l":1744284728118640,"d":2000},{"a":1744284728127407,"name":"ack","f":1744284728126595,"d_finished":737,"c":1,"l":1744284728127332,"d":980},{"a":1744284728127399,"name":"processing","f":1744284728119628,"d_finished":5510,"c":9,"l":1744284728127334,"d":5761},{"name":"ProduceResults","f":1744284728117722,"d_finished":2138,"c":12,"l":1744284728127607,"d":2138},{"a":1744284728127609,"name":"Finish","f":1744284728127609,"d_finished":0,"c":0,"l":1744284728127650,"d":41},{"name":"task_result","f":1744284728119637,"d_finished":4673,"c":8,"l":1744284728126494,"d":4673}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-10T11:32:08.128010Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:301:2319];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:32:08.128290Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:301:2319];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":1744284728116481,"name":"_full_task","f":1744284728116481,"d_finished":0,"c":0,"l":1744284728128038,"d":11557},"events":[{"name":"bootstrap","f":1744284728116640,"d_finished":2000,"c":1,"l":1744284728118640,"d":2000},{"a":1744284728127407,"name":"ack","f":1744284728126595,"d_finished":737,"c":1,"l":1744284728127332,"d":1368},{"a":1744284728127399,"name":"processing","f":1744284728119628,"d_finished":5510,"c":9,"l":1744284728127334,"d":6149},{"name":"ProduceResults","f":1744284728117722,"d_finished":2138,"c":12,"l":1744284728127607,"d":2138},{"a":1744284728127609,"name":"Finish","f":1744284728127609,"d_finished":0,"c":0,"l":1744284728128038,"d":429},{"name":"task_result","f":1744284728119637,"d_finished":4673,"c":8,"l":1744284728126494,"d":4673}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-10T11:32:08.128353Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:32:08.116003Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-10T11:32:08.128391Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:32:08.128650Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] Test command err: 2025-04-10T11:32:02.294974Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:02.407850Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:02.436728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:02.437148Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:02.447391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:02.447688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:02.447975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:02.448131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:02.448283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:02.448403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:02.448545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:02.448686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:02.448825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:02.448982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:02.449129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:02.449277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:02.484295Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:02.484804Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:02.484870Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:02.485169Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:02.485415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:02.485521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:02.485581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:02.485695Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:02.485777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:02.485832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:02.485908Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:02.486180Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:02.486275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:02.486332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:02.486372Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:02.486506Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:02.486587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:02.486645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:02.486684Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:02.486766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:02.486819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:02.486853Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:02.486949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:02.487007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:02.487045Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:02.487528Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2025-04-10T11:32:02.487652Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=56; 2025-04-10T11:32:02.487773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=50; 2025-04-10T11:32:02.487927Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=71; 2025-04-10T11:32:02.488145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:02.488214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:02.488259Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:02.488525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:02.488585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:02.488625Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:02.488834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:02.488894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:02.488942Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:02.489207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:02.489272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:02.489317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:02.489466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:02.489523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:02.489580Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... _id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:07.852163Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:32:07.852352Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-10T11:32:07.852465Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-10T11:32:07.852656Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:424:2439];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-10T11:32:07.852835Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:07.853015Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:07.853210Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:07.853514Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:07.853729Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:07.853900Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:07.853955Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:425:2440] finished for tablet 9437184 2025-04-10T11:32:07.854589Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:424:2439];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.017},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.019}],"full":{"a":1744284727834193,"name":"_full_task","f":1744284727834193,"d_finished":0,"c":0,"l":1744284727854027,"d":19834},"events":[{"name":"bootstrap","f":1744284727834440,"d_finished":3850,"c":1,"l":1744284727838290,"d":3850},{"a":1744284727853484,"name":"ack","f":1744284727851757,"d_finished":1496,"c":1,"l":1744284727853253,"d":2039},{"a":1744284727853464,"name":"processing","f":1744284727839845,"d_finished":8046,"c":10,"l":1744284727853256,"d":8609},{"name":"ProduceResults","f":1744284727836504,"d_finished":4239,"c":13,"l":1744284727853934,"d":4239},{"a":1744284727853941,"name":"Finish","f":1744284727853941,"d_finished":0,"c":0,"l":1744284727854027,"d":86},{"name":"task_result","f":1744284727839866,"d_finished":6358,"c":9,"l":1744284727851511,"d":6358}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:07.854690Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:424:2439];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:32:07.855237Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:424:2439];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.017},{"events":["l_ProduceResults","f_Finish"],"t":0.019},{"events":["l_ack","l_processing","l_Finish"],"t":0.02}],"full":{"a":1744284727834193,"name":"_full_task","f":1744284727834193,"d_finished":0,"c":0,"l":1744284727854752,"d":20559},"events":[{"name":"bootstrap","f":1744284727834440,"d_finished":3850,"c":1,"l":1744284727838290,"d":3850},{"a":1744284727853484,"name":"ack","f":1744284727851757,"d_finished":1496,"c":1,"l":1744284727853253,"d":2764},{"a":1744284727853464,"name":"processing","f":1744284727839845,"d_finished":8046,"c":10,"l":1744284727853256,"d":9334},{"name":"ProduceResults","f":1744284727836504,"d_finished":4239,"c":13,"l":1744284727853934,"d":4239},{"a":1744284727853941,"name":"Finish","f":1744284727853941,"d_finished":0,"c":0,"l":1744284727854752,"d":811},{"name":"task_result","f":1744284727839866,"d_finished":6358,"c":9,"l":1744284727851511,"d":6358}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:07.855341Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:32:07.833384Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-10T11:32:07.855395Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:32:07.855836Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadStale [GOOD] Test command err: 2025-04-10T11:32:07.050042Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:07.165685Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:07.190922Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:07.191324Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:07.201027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:07.201355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:07.201684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:07.201848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:07.201992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:07.202170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:07.202328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:07.202469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:07.202614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:07.202743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:07.202875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:07.203017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:07.243077Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:07.243611Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:07.243716Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:07.243934Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:07.244113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:07.244250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:07.244309Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:07.244417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:07.244495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:07.244546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:07.244584Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:07.244825Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:07.244924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:07.244975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:07.245015Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:07.245188Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:07.245259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:07.245345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:07.245394Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:07.245492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:07.245548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:07.245587Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:07.245658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:07.245705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:07.245736Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:07.246229Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=88; 2025-04-10T11:32:07.246368Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=60; 2025-04-10T11:32:07.246487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=49; 2025-04-10T11:32:07.246641Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=56; 2025-04-10T11:32:07.246902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:07.246983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:07.247046Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:07.247339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:07.247408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:07.247449Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:07.247662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:07.247719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:07.247771Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:07.248001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:07.248052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:07.248089Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:07.248290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:07.248349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:07.248406Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 4-10T11:32:07.985900Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=77304; 2025-04-10T11:32:07.989946Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::6f7e0844-15ff11f0-9b8e6544-1d7c4f67; 2025-04-10T11:32:07.990013Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-04-10T11:32:07.990136Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:754;event=indexation;bytes=77304;blobs_count=9;max_limit=251658240;has_more=0;external_task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67; 2025-04-10T11:32:07.990504Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:620;event=start_changes;type=CS::INDEXATION;task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67; 2025-04-10T11:32:07.990928Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=69691;external_task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;type=CS::INDEXATION;priority=0;; 2025-04-10T11:32:07.991193Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=1;task=cpu=0;mem=69691;external_task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;type=CS::INDEXATION;priority=0;; 2025-04-10T11:32:07.991241Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;mem=69691;cpu=0; 2025-04-10T11:32:07.991428Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;task_id=1;mem=69691;cpu=0; 2025-04-10T11:32:07.991613Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67; 2025-04-10T11:32:08.016053Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-10T11:32:08.016259Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-04-10T11:32:08.016939Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:239;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:32:08.017022Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:32:08.017156Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=77304;indexing_debug={task_ids=6f7e0844-15ff11f0-9b8e6544-1d7c4f67,;}; 2025-04-10T11:32:08.017259Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-10T11:32:08.017536Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:32:08.017614Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:32:08.017698Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:32:08.017808Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:32:08.018336Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 1 version: {640000:max} readable: {1000000:max} at tablet 9437184 2025-04-10T11:32:08.035231Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 1 at tablet 9437184 2025-04-10T11:32:08.035392Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=constructor.cpp:18;event=overriden_columns;ids=1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043; 2025-04-10T11:32:08.035505Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {640000:max}. CS min read snapshot: {700000:max}. now: 2025-04-10T11:32:08.035473Z; 2025-04-10T11:32:08.050293Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:1:255:1:6824:0]; 2025-04-10T11:32:08.050450Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:1:255:2:6824:0]; 2025-04-10T11:32:08.056616Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-10T11:32:08.056927Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[8] (CS::INDEXATION) apply at tablet 9437184 2025-04-10T11:32:08.058114Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 7 2025-04-10T11:32:08.058332Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:32:08.058946Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {640000:max} readable: {1000000:max} at tablet 9437184 2025-04-10T11:32:08.071584Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-10T11:32:08.071653Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;fline=with_appended.cpp:65;portions=1,;task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67; 2025-04-10T11:32:08.071871Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::6f7e0844-15ff11f0-9b8e6544-1d7c4f67; 2025-04-10T11:32:08.071945Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:32:08.072012Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:32:08.072076Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-10T11:32:08.072144Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:32:08.072229Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:32:08.072280Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:32:08.072359Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.993000s; 2025-04-10T11:32:08.072432Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:32:08.072589Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 7 2025-04-10T11:32:08.072787Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=6f7e0844-15ff11f0-9b8e6544-1d7c4f67;mem=69691;cpu=0; 2025-04-10T11:32:08.072917Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:32:08.072990Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-10T11:32:08.075438Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-04-10T11:32:08.075562Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-04-10T11:32:08.076370Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"5":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"1,6","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,6","t":"Projection"},"w":18,"id":0}}}; 2025-04-10T11:32:08.076488Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {640000:max}. CS min read snapshot: {700000:max}. now: 2025-04-10T11:32:08.076451Z; >> TColumnShardTestReadWrite::ReadAggregate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteInTransaction [GOOD] Test command err: 2025-04-10T11:32:07.744048Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:07.844759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:07.871282Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:07.871640Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:07.881682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:07.881979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:07.882310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:07.882456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:07.882600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:07.882745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:07.882890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:07.883019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:07.883147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:07.883268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:07.883393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:07.883524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:07.922101Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:07.922334Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:07.922404Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:07.922661Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:07.922837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:07.922937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:07.922988Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:07.923079Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:07.923146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:07.923190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:07.923222Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:07.923429Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:07.923508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:07.923551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:07.923583Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:07.923681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:07.923736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:07.923786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:07.923815Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:07.923889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:07.923934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:07.923975Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:07.924026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:07.924075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:07.924108Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:07.924557Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-04-10T11:32:07.924652Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-10T11:32:07.924748Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=47; 2025-04-10T11:32:07.924853Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=45; 2025-04-10T11:32:07.925066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:07.925126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:07.925175Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:07.925373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:07.925428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:07.925480Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:07.925654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:07.925699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:07.925735Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:07.925952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:07.926000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:07.926042Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:07.926205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:07.926260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:07.926323Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... X_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T11:32:08.895219Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-10T11:32:08.895457Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=1; 2025-04-10T11:32:08.895756Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=2048;merger=0;interval_id=1; 2025-04-10T11:32:08.895986Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-10T11:32:08.896294Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:08.896351Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=2048;finished=1; 2025-04-10T11:32:08.896415Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:32:08.897166Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:08.897901Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:2048;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:08.898086Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:32:08.898710Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=2048; 2025-04-10T11:32:08.899034Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=229376;num_rows=2048;batch_columns=key,field; 2025-04-10T11:32:08.899362Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:283:2301];bytes=229376;rows=2048;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-04-10T11:32:08.899879Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:08.900105Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:08.900446Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:08.901119Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:08.901524Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:08.902757Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:08.902896Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:287:2305] finished for tablet 9437184 2025-04-10T11:32:08.903934Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:283:2301];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.008},{"events":["l_task_result"],"t":0.054},{"events":["f_ack"],"t":0.055},{"events":["l_ProduceResults","f_Finish"],"t":0.06},{"events":["l_ack","l_processing","l_Finish"],"t":0.061}],"full":{"a":1744284728841960,"name":"_full_task","f":1744284728841960,"d_finished":0,"c":0,"l":1744284728903066,"d":61106},"events":[{"name":"bootstrap","f":1744284728842333,"d_finished":4180,"c":1,"l":1744284728846513,"d":4180},{"a":1744284728901001,"name":"ack","f":1744284728897066,"d_finished":3455,"c":1,"l":1744284728900521,"d":5520},{"a":1744284728900961,"name":"processing","f":1744284728850703,"d_finished":32794,"c":9,"l":1744284728900540,"d":34899},{"name":"ProduceResults","f":1744284728844868,"d_finished":10036,"c":12,"l":1744284728902836,"d":10036},{"a":1744284728902857,"name":"Finish","f":1744284728902857,"d_finished":0,"c":0,"l":1744284728903066,"d":209},{"name":"task_result","f":1744284728850741,"d_finished":28927,"c":8,"l":1744284728896529,"d":28927}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:08.904095Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:283:2301];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:32:08.904932Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:283:2301];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.008},{"events":["l_task_result"],"t":0.054},{"events":["f_ack"],"t":0.055},{"events":["l_ProduceResults","f_Finish"],"t":0.06},{"events":["l_ack","l_processing","l_Finish"],"t":0.062}],"full":{"a":1744284728841960,"name":"_full_task","f":1744284728841960,"d_finished":0,"c":0,"l":1744284728904183,"d":62223},"events":[{"name":"bootstrap","f":1744284728842333,"d_finished":4180,"c":1,"l":1744284728846513,"d":4180},{"a":1744284728901001,"name":"ack","f":1744284728897066,"d_finished":3455,"c":1,"l":1744284728900521,"d":6637},{"a":1744284728900961,"name":"processing","f":1744284728850703,"d_finished":32794,"c":9,"l":1744284728900540,"d":36016},{"name":"ProduceResults","f":1744284728844868,"d_finished":10036,"c":12,"l":1744284728902836,"d":10036},{"a":1744284728902857,"name":"Finish","f":1744284728902857,"d_finished":0,"c":0,"l":1744284728904183,"d":1326},{"name":"task_result","f":1744284728850741,"d_finished":28927,"c":8,"l":1744284728896529,"d":28927}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:08.905137Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:32:08.841393Z;index_granules=0;index_portions=1;index_batches=82;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=238056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=238056;selected_rows=0; 2025-04-10T11:32:08.905233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:32:08.905803Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] Test command err: 304 176 28 48 32 24 16 24 56 >> TColumnShardTestReadWrite::WriteRead >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2025-04-10T11:32:05.161233Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:05.254212Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:05.275521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:05.275816Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:05.282730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:05.282941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:05.283188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:05.283342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:05.283458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:05.283590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:05.283712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:05.283839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:05.283979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:05.284125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:05.284242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:05.284372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:05.311441Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:05.311842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:05.311914Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:05.312112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:05.312284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:05.312364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:05.312437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:05.312540Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:05.312621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:05.312669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:05.312700Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:05.312890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:05.312974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:05.313022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:05.313072Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:05.313186Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:05.313258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:05.313323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:05.313364Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:05.313437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:05.313479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:05.313520Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:05.313578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:05.313620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:05.313650Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:05.314057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-04-10T11:32:05.314168Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-04-10T11:32:05.314285Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-10T11:32:05.314394Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=54; 2025-04-10T11:32:05.314592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:05.314648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:05.314684Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:05.314912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:05.314959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:05.314995Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:05.315195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:05.315259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:05.315300Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:05.315496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:05.315545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:05.315577Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:05.315729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:05.315782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:05.315835Z node 1 :TX_COLUMNSHARD INFO: tablet_i ... Id=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:32:09.177283Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T11:32:09.177318Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-10T11:32:09.177358Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=76; 2025-04-10T11:32:09.177404Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=76; 2025-04-10T11:32:09.177441Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-10T11:32:09.177524Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-10T11:32:09.177558Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-04-10T11:32:09.177592Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:32:09.178071Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:09.178241Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-10T11:32:09.178283Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:32:09.178379Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;);columns=4;rows=1; 2025-04-10T11:32:09.178440Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=26;num_rows=1;batch_columns=100,101,102,103; 2025-04-10T11:32:09.178561Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[2:434:2452];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-04-10T11:32:09.178646Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-10T11:32:09.178729Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-10T11:32:09.178794Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-10T11:32:09.179012Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:09.179085Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-10T11:32:09.179144Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-10T11:32:09.179170Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: Scan [2:435:2453] finished for tablet 9437184 2025-04-10T11:32:09.179604Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[2:434:2452];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":1744284729165473,"name":"_full_task","f":1744284729165473,"d_finished":0,"c":0,"l":1744284729179212,"d":13739},"events":[{"name":"bootstrap","f":1744284729165704,"d_finished":2249,"c":1,"l":1744284729167953,"d":2249},{"a":1744284729178997,"name":"ack","f":1744284729178044,"d_finished":767,"c":1,"l":1744284729178811,"d":982},{"a":1744284729178987,"name":"processing","f":1744284729169132,"d_finished":6725,"c":10,"l":1744284729178813,"d":6950},{"name":"ProduceResults","f":1744284729166955,"d_finished":2323,"c":13,"l":1744284729179160,"d":2323},{"a":1744284729179162,"name":"Finish","f":1744284729179162,"d_finished":0,"c":0,"l":1744284729179212,"d":50},{"name":"task_result","f":1744284729169149,"d_finished":5816,"c":9,"l":1744284729177640,"d":5816}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-10T11:32:09.179681Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[2:434:2452];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:32:09.180058Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[2:434:2452];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":1744284729165473,"name":"_full_task","f":1744284729165473,"d_finished":0,"c":0,"l":1744284729179719,"d":14246},"events":[{"name":"bootstrap","f":1744284729165704,"d_finished":2249,"c":1,"l":1744284729167953,"d":2249},{"a":1744284729178997,"name":"ack","f":1744284729178044,"d_finished":767,"c":1,"l":1744284729178811,"d":1489},{"a":1744284729178987,"name":"processing","f":1744284729169132,"d_finished":6725,"c":10,"l":1744284729178813,"d":7457},{"name":"ProduceResults","f":1744284729166955,"d_finished":2323,"c":13,"l":1744284729179160,"d":2323},{"a":1744284729179162,"name":"Finish","f":1744284729179162,"d_finished":0,"c":0,"l":1744284729179719,"d":557},{"name":"task_result","f":1744284729169149,"d_finished":5816,"c":9,"l":1744284729177640,"d":5816}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-10T11:32:09.180148Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:32:09.164933Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=16001;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=16001;selected_rows=0; 2025-04-10T11:32:09.180187Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:32:09.180429Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes >> KqpQueryService::DdlMixedDml [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] Test command err: 2025-04-10T11:31:48.560779Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:48.668619Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:48.690922Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:48.691229Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:48.699763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:48.700047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:48.700296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:48.700412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:48.700586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:48.700690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:48.700800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:48.700923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:48.701043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:48.701192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.701306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:48.701403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:48.731725Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:48.732081Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:48.732168Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:48.732395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.732559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:48.732641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:48.732681Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:48.732768Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:48.732824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:48.732878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:48.732913Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:48.733139Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.733204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:48.733242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:48.733273Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:48.733365Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:48.733408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:48.733453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:48.733477Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:48.733550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:48.733584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:48.733608Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:48.733660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:48.733698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:48.733723Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:48.734125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=64; 2025-04-10T11:31:48.734239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-04-10T11:31:48.734332Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-10T11:31:48.734453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=56; 2025-04-10T11:31:48.734631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:48.734683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:48.734726Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:48.734924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:48.734969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.735000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.735166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:48.735210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:48.735240Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:48.735434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:48.735477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:48.735505Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:48.735626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:48.735658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:48.735702Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=10;merger=0;interval_id=49; 2025-04-10T11:32:09.835290Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-10T11:32:09.835396Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:09.835436Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-04-10T11:32:09.835482Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:32:09.836824Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:09.836985Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:10;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:09.837028Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:32:09.837146Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=10; 2025-04-10T11:32:09.837200Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=80;num_rows=10;batch_columns=timestamp; 2025-04-10T11:32:09.837320Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:3520:5532];bytes=80;rows=10;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-04-10T11:32:09.837426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:09.837551Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:09.837641Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:09.838188Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:09.838307Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:09.838410Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:09.838451Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:3525:5537] finished for tablet 9437184 2025-04-10T11:32:09.838915Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:3520:5532];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.022},{"events":["f_ack"],"t":0.023},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.025}],"full":{"a":1744284729812900,"name":"_full_task","f":1744284729812900,"d_finished":0,"c":0,"l":1744284729838503,"d":25603},"events":[{"name":"bootstrap","f":1744284729813593,"d_finished":1978,"c":1,"l":1744284729815571,"d":1978},{"a":1744284729838142,"name":"ack","f":1744284729836789,"d_finished":881,"c":1,"l":1744284729837670,"d":1242},{"a":1744284729838123,"name":"processing","f":1744284729816860,"d_finished":3702,"c":8,"l":1744284729837673,"d":4082},{"name":"ProduceResults","f":1744284729814692,"d_finished":2323,"c":11,"l":1744284729838435,"d":2323},{"a":1744284729838438,"name":"Finish","f":1744284729838438,"d_finished":0,"c":0,"l":1744284729838503,"d":65},{"name":"task_result","f":1744284729816880,"d_finished":2631,"c":7,"l":1744284729835545,"d":2631}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:09.838984Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:3520:5532];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:32:09.839388Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:3520:5532];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.022},{"events":["f_ack"],"t":0.023},{"events":["l_ProduceResults","f_Finish"],"t":0.025},{"events":["l_ack","l_processing","l_Finish"],"t":0.026}],"full":{"a":1744284729812900,"name":"_full_task","f":1744284729812900,"d_finished":0,"c":0,"l":1744284729839027,"d":26127},"events":[{"name":"bootstrap","f":1744284729813593,"d_finished":1978,"c":1,"l":1744284729815571,"d":1978},{"a":1744284729838142,"name":"ack","f":1744284729836789,"d_finished":881,"c":1,"l":1744284729837670,"d":1766},{"a":1744284729838123,"name":"processing","f":1744284729816860,"d_finished":3702,"c":8,"l":1744284729837673,"d":4606},{"name":"ProduceResults","f":1744284729814692,"d_finished":2323,"c":11,"l":1744284729838435,"d":2323},{"a":1744284729838438,"name":"Finish","f":1744284729838438,"d_finished":0,"c":0,"l":1744284729839027,"d":589},{"name":"task_result","f":1744284729816880,"d_finished":2631,"c":7,"l":1744284729835545,"d":2631}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-10T11:32:09.839461Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:32:09.812389Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=2812;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2812;selected_rows=0; 2025-04-10T11:32:09.839500Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:32:09.839689Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:3525:5537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] Test command err: 2025-04-10T11:32:04.029708Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:04.152129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:04.178372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:04.178652Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:04.187474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:04.187689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:04.187933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:04.188062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:04.188201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:04.188337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:04.188462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:04.188586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:04.188709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:04.188814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:04.188965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:04.189101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:04.222580Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:04.222983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:04.223058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:04.223254Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:04.223432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:04.223515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:04.223560Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:04.223654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:04.223722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:04.223766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:04.223796Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:04.224001Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:04.224073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:04.224116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:04.224150Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:04.224288Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:04.224370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:04.224470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:04.224503Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:04.224580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:04.224627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:04.224661Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:04.224718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:04.224755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:04.224783Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:04.225191Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-10T11:32:04.225299Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=52; 2025-04-10T11:32:04.225385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-04-10T11:32:04.225486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-10T11:32:04.225707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:04.225785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:04.225826Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:04.226058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:04.226113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:04.226165Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:04.226348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:04.226396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:04.226437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:04.226640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:04.226681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:04.226711Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:04.226861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:04.226907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:04.226954Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-10T11:32:09.794195Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> TColumnShardTestReadWrite::WriteOverload+InStore+WithWritePortionsOnInsert [GOOD] >> Normalizers::PortionsNormalizer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlMixedDml [GOOD] Test command err: Trying to start YDB, gRPC: 62773, MsgBus: 4518 2025-04-10T11:31:11.869791Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645620101626337:2275];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:11.869849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00156c/r3tmp/tmpLBrK7E/pdisk_1.dat 2025-04-10T11:31:12.768482Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:12.775742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:12.775850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:12.779485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62773, node 1 2025-04-10T11:31:12.995635Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:12.995665Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:12.995675Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:12.995797Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4518 TClient is connected to server localhost:4518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:14.098378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.115600Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:14.133790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.337167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.567692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.692272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:16.575602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645641576464353:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.575813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.862516Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645620101626337:2275];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:16.862617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:17.093089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.184578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.246818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.293319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.375153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.444923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.521196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645645871432171:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.521265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.521528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645645871432176:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.526516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:17.546951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645645871432178:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:17.610745Z node 1 :TX_PROXY ERROR: Actor# [1:7491645645871432234:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:18.904169Z node 1 :TX_PROXY ERROR: Actor# [1:7491645650166399872:3691] txid# 281474976710672, issues: { message: "Group already exists" severity: 1 } 2025-04-10T11:31:18.916318Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmI3ZWUwMy1hNGQ0YTk1ZC03MjM3ZDI3NS05ZmNjOGExYw==, ActorId: [1:7491645650166399849:2507], ActorState: ExecuteState, TraceId: 01jrfqfypz8qgmzaq9732j3s5r, Create QueryResponse for error on request, msg: 2025-04-10T11:31:19.027980Z node 1 :TX_PROXY ERROR: Actor# [1:7491645654461367217:3716] txid# 281474976710676, issues: { message: "Group not found" severity: 1 } 2025-04-10T11:31:19.028172Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2ZmMGRiNzMtYjI2MTY3YzItNTBjMGFkM2QtNWUyNmI0Mg==, ActorId: [1:7491645654461367211:2517], ActorState: ExecuteState, TraceId: 01jrfqfyv62t6p6jf3j67gk2ns, Create QueryResponse for error on request, msg: 2025-04-10T11:31:19.135050Z node 1 :TX_PROXY ERROR: Actor# [1:7491645654461367294:3763] txid# 281474976710681, issues: { message: "Group already exists" severity: 1 } 2025-04-10T11:31:19.135781Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjRlYmYwOS03ZjBlODY4Ni0xOGJjZDU2YS0yMGM4MTI0Mw==, ActorId: [1:7491645654461367288:2529], ActorState: ExecuteState, TraceId: 01jrfqfyybb6z5bhw13svef9qy, Create QueryResponse for error on request, msg: 2025-04-10T11:31:19.328276Z node 1 :TX_PROXY ERROR: Actor# [1:7491645654461367373:3810] txid# 281474976710686, issues: { message: "Role \"user1\" is already a member of role \"group1\"" issue_code: 2 severity: 3 } 2025-04-10T11:31:19.355784Z node 1 :TX_PROXY ERROR: Actor# [1:7491645654461367387:3817] txid# 281474976710687, issues: { message: "Member account not found" severity: 1 } 2025-04-10T11:31:19.356014Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjJiNTViOGQtMzQ4NDI1NmMtMjY5OTRkOTItMWQ2YjFjYg==, ActorId: [1:7491645654461367381:2544], ActorState: ExecuteState, TraceId: 01jrfqfz5cb912gvgx3k2cvsc1, Create QueryResponse for error on request, msg: 2025-04-10T11:31:19.399463Z node 1 :TX_PROXY ERROR: Actor# [1:7491645654461367417:3831] txid# 281474976710690, issues: { message: "Role \"user1\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-04-10T11:31:19.425410Z node 1 :TX_PROXY ERROR: Actor# [1:7491645654461367450:3857] txid# 281474976710691, issues: { message: "Role \"user3\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-04-10T11:31:19.480398Z node 1 :TX_PROXY ERROR: Actor# [1:7491645654461367478:3873] txid# 281474976710693, issues: { message: "Member account not found" severity: 1 } 2025-04-10T11:31:19.480812Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTA0YTNmMzktNjM2NWZiNzctZmRiOGFiN2EtYTJkODJhMDI=, ActorId: [1:7491645654461367458:2556], ActorState: ExecuteState, TraceId: 01jrfqfz8b0vbergrwzrb0kq6z, Create QueryResponse for error on request, msg: 2025-04-10T11:31:19.510140Z node 1 :TX_PROXY ERROR: Actor# [1:7491645654461367494:3880] txid# 281474976710695, issues: { message: "Role \"user1\" is already a member of role \"group1\"" issue_code: 2 severity: 3 } 2025-04-10T11:31:19.591541Z node 1 :TX_PROXY ERROR: Actor# [1:7491645654461367564:3922] txid# 281474976710698, issues: { message: "Role \"user3\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-04-10T11:31:19.643754Z node 1 :TX_PROXY ERROR: Actor# [1:7491645654461367586:3934] txid# 281474976710700, issues: { message: "Role \"user1\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-04-10T11:31:19.678057Z node 1 :TX_PROXY ERROR: Actor# [1:7491645654461367604:3942] txid# 281474976710701, issues: { message: "Role \"user2\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-04-10T11:31:19.767633Z node 1 :TX_PROXY ERROR: Actor ... 491645824700363456:4005], TxId: 281474976715842, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfqh5tj68ytmytpf2x9v72g. SessionId : ydb://session/3?node_id=3&id=NjQ5Njg1YmUtOTdjMzRlMTktYTlhODg4NDUtYTVmYjdhY2Y=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-10T11:31:59.371120Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491645824700363457:4006], TxId: 281474976715842, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=NjQ5Njg1YmUtOTdjMzRlMTktYTlhODg4NDUtYTVmYjdhY2Y=. CustomerSuppliedId : . TraceId : 01jrfqh5tj68ytmytpf2x9v72g. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7491645824700363453:3899], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:31:59.371434Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjQ5Njg1YmUtOTdjMzRlMTktYTlhODg4NDUtYTVmYjdhY2Y=, ActorId: [3:7491645816110428538:3899], ActorState: ExecuteState, TraceId: 01jrfqh5tj68ytmytpf2x9v72g, Create QueryResponse for error on request, msg: 2025-04-10T11:31:59.373265Z node 3 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jrfqh5b185cf1m4rgew4r312" } } } } ;request=session_id: "ydb://session/3?node_id=3&id=NjQ5Njg1YmUtOTdjMzRlMTktYTlhODg4NDUtYTVmYjdhY2Y=" tx_control { tx_id: "01jrfqh5b185cf1m4rgew4r312" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/values`\nSELECT ownerUserId,secretId,value FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "value" type { type_id: UTF8 } } } } } } value { items { items { text_value: "" } items { text_value: "my_secret_2" } items { text_value: "qwerty" } } } } } ; 2025-04-10T11:31:59.374067Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDNmYTIzMC05ZmJkYzU4ZS0xNmEyYTdjYS03ZGNlYjQ1OA==, ActorId: [3:7491645816110428526:3893], ActorState: ExecuteState, TraceId: 01jrfqh4rhbvz951wvc7q9amqh, Create QueryResponse for error on request, msg: Execute SQL: UPSERT OBJECT my_secret_2 (TYPE SECRET) WITH value = "edcba"; Trying to start YDB, gRPC: 65112, MsgBus: 1617 2025-04-10T11:32:02.761009Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491645836241839300:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:02.761094Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00156c/r3tmp/tmpjIVPlH/pdisk_1.dat 2025-04-10T11:32:02.972737Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:03.008682Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:03.008822Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:03.011496Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65112, node 4 2025-04-10T11:32:03.086783Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:03.086820Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:03.086830Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:03.087010Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1617 TClient is connected to server localhost:1617 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:32:03.737257Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:03.747175Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:32:03.766899Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:03.888414Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:04.127537Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:04.234584Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:07.307799Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491645857716677563:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:07.307910Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:07.372729Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:32:07.448586Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:32:07.492011Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:32:07.570369Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:32:07.627779Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:32:07.693279Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:32:07.753640Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491645857716678077:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:07.753738Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:07.753804Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491645857716678082:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:07.758266Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:32:07.761689Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491645836241839300:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:07.761848Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:32:07.779087Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491645857716678084:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:32:07.872438Z node 4 :TX_PROXY ERROR: Actor# [4:7491645857716678143:3462] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:32:09.511653Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7491645866306613064:2504], status: GENERIC_ERROR, issues:
: Error: Optimization, code: 1070
:8:25: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 2025-04-10T11:32:09.514971Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YTA1M2RiNTQtYTYxMjJiNDMtNWE2Njk4MmYtNWU5NjhjMA==, ActorId: [4:7491645866306613057:2500], ActorState: ExecuteState, TraceId: 01jrfqhg3xdvr0ey5c9k44e1nz, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload+InStore+WithWritePortionsOnInsert [GOOD] Test command err: 2025-04-10T11:31:59.707481Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:59.830990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:59.855980Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:59.856285Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:59.865192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:59.865415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:59.865679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:59.865788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:59.865905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:59.866027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:59.866138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:59.866661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:59.866783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:59.866897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:59.867036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:59.867154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:59.910265Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:59.910552Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:59.910612Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:59.910804Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:59.910972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:59.911072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:59.911115Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:59.911261Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:59.911342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:59.911392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:59.911430Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:59.911619Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:59.911696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:59.911737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:59.911767Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:59.911857Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:59.911905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:59.911955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:59.911982Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:59.912055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:59.912125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:59.912167Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:59.912219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:59.912257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:59.912288Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:59.912719Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-04-10T11:31:59.912831Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-04-10T11:31:59.912937Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-10T11:31:59.913065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=49; 2025-04-10T11:31:59.913239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:59.913325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:59.913364Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:59.913572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:59.913643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:59.913678Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:59.913862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:59.913920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:59.913954Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:59.914125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:59.914272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:59.914322Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:59.914484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:59.914539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:59.914609Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... =9437184;self_id=[1:139:2171];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:32:00.246883Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:32:00.246936Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:32:00.246976Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:32:00.247038Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:32:00.247087Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:521;problem=Background activities cannot be started: no index at tablet; 2025-04-10T11:32:00.545171Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=88923003853600;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;fline=schema.h:36;event=sync_schema; 2025-04-10T11:32:00.563884Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923003853600;op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881024960;fline=columnshard__propose_transaction.cpp:103;event=actual tx operator; 2025-04-10T11:32:00.563980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923003853600;op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881024960;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:99:2134]; 2025-04-10T11:32:00.564051Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923003853600;op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881024960;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2025-04-10T11:32:00.564516Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-04-10T11:32:00.564722Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 10 at tablet 9437184, mediator 0 2025-04-10T11:32:00.564786Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2025-04-10T11:32:00.565155Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 1 ttl settings: { Version: 1 } at tablet 9437184 2025-04-10T11:32:00.575547Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-10T11:32:00.575707Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:245;method=RegisterTable;path_id=1; 2025-04-10T11:32:00.575763Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-04-10T11:32:00.583226Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-04-10T11:32:00.583429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:212;event=finished_tx;tx_id=10; 2025-04-10T11:32:00.607742Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-10T11:32:00.607943Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6330728;columns=10; CATCH TEvWritePortionResult, status OK 2025-04-10T11:32:09.460308Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=columnshard__write.cpp:96;event=shard_overload;reason=writes_size_in_fly;sum=132945288;limit=128000000; 2025-04-10T11:32:09.462426Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_OVERLOADED;details=overload data error;tx_id=0; CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK 2025-04-10T11:32:10.574727Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-10T11:32:10.598004Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-10T11:32:10.618893Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-10T11:32:10.635296Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-10T11:32:10.638089Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-04-10T11:32:10.651016Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-04-10T11:32:10.653974Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-10T11:32:10.666353Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-10T11:32:10.677872Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-10T11:32:10.691138Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-10T11:32:10.693943Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-10T11:32:10.706828Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-10T11:32:10.709816Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-10T11:32:10.722020Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-10T11:32:10.736252Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-10T11:32:10.752657Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-10T11:32:10.755561Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-10T11:32:10.767921Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-10T11:32:10.770648Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-10T11:32:10.782699Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-10T11:32:10.794374Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-10T11:32:10.810046Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-10T11:32:10.813386Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-10T11:32:10.826302Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-10T11:32:10.829207Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-10T11:32:10.843286Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-10T11:32:10.866507Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-10T11:32:10.879341Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-10T11:32:10.882347Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-10T11:32:10.894981Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-10T11:32:10.898298Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-10T11:32:10.911996Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-10T11:32:10.928303Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-10T11:32:10.943275Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-10T11:32:10.946414Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-10T11:32:10.961545Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-10T11:32:10.964705Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-10T11:32:10.986802Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-10T11:32:11.001590Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-10T11:32:11.014776Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-10T11:32:11.017937Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-10T11:32:11.039087Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-10T11:32:11.528408Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-10T11:32:11.540677Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 >> TColumnShardTestReadWrite::WriteOverload-InStore+WithWritePortionsOnInsert [GOOD] >> BackupRestore::RestoreReplicationWithoutSecret [GOOD] >> BackupRestore::RestoreExternalDataSourceWithoutSecret ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: 2025-04-10T11:31:10.487041Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645612791892381:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:10.487496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpKF6Uow/pdisk_1.dat 2025-04-10T11:31:11.231638Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:11.330560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:11.330651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:11.340999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25409, node 1 2025-04-10T11:31:11.882864Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:11.882895Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:11.882904Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:11.883034Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27309 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:12.753649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.714365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645629971762573:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:14.714483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645629971762585:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:14.714551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:14.742613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:31:14.800619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645629971762587:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:31:14.912628Z node 1 :TX_PROXY ERROR: Actor# [1:7491645629971762661:2689] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:15.480142Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645612791892381:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:15.480206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Backup "/Root" to "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpHtDrXk/"Create temporary directory "/Root/~backup_20250410T113115" in databaseProcess "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpHtDrXk/view"Backup view "/Root/view" to "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpHtDrXk/view"Write view into "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpHtDrXk/view/create_view.sql"Write ACL into "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpHtDrXk/view/permissions.pb"Remove temporary directory "/Root/~backup_20250410T113115" in database2025-04-10T11:31:16.098439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710662:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpHtDrXk/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpHtDrXk/" to "/Root"Process "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpHtDrXk/view"Restore view "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpHtDrXk/view" to "/Root/view"Read view from "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpHtDrXk/view/create_view.sql"Created "/Root/view"Restore ACL "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpHtDrXk/view" to "/Root/view"Read ACL from "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpHtDrXk/view/permissions.pb"2025-04-10T11:31:16.421929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-10T11:31:18.550800Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491645647685844771:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:18.584123Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmp843wfF/pdisk_1.dat 2025-04-10T11:31:18.907653Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:18.958102Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:18.958218Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:18.971753Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25091, node 4 2025-04-10T11:31:19.222726Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:19.222749Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:19.222755Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:19.222888Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:19.667377Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:22.763807Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491645664865715039:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:22.763879Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491645664865715050:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:22.763936Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:22.767932Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:31:22.816732Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491645664865715053:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:31:22.884323Z node 4 :TX_PROXY ERROR: Actor# [4:7491645664865715131:2690] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:22.920988Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T11:31:23.482262Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491645647685844771:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:23.482342Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path exi ... rActor] ActorId: [16:7491645828130966683:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:00.769695Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Restore "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmphN4CGx/" to "/Root"2025-04-10T11:32:00.877468Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037889 not found 2025-04-10T11:32:00.877515Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037888 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmphN4CGx/" to "/Root"Process "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmphN4CGx/table"Read scheme from "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmphN4CGx/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmphN4CGx/table" to "/Root/table"2025-04-10T11:32:00.970529Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmphN4CGx/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-04-10T11:32:01.142587Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T11:32:01.291022Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 Restore ACL "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmphN4CGx/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmphN4CGx/table/permissions.pb"2025-04-10T11:32:01.517150Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-10T11:32:03.519492Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7491645842333588028:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:03.519562Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpxPR2uT/pdisk_1.dat 2025-04-10T11:32:03.777294Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:03.833286Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:03.833429Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:03.845221Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19482, node 19 2025-04-10T11:32:03.937409Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:03.937436Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:03.937453Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:03.937629Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:32:04.127783Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:08.375591Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [19:7491645863808425604:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:08.375710Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:08.419046Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:32:08.522946Z node 19 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[19:7491645842333588028:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:08.523263Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Backup "/Root" to "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpsVWsUh/"Create temporary directory "/Root/~backup_20250410T113208" in databaseProcess "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpsVWsUh/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250410T113208/table" }Backup table "/Root/~backup_20250410T113208/table" to "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpsVWsUh/table"Describe table "/Root/~backup_20250410T113208/table"Write scheme into "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpsVWsUh/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpsVWsUh/table/permissions.pb"Read table "/Root/~backup_20250410T113208/table"Write data into "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpsVWsUh/table/data_00.csv"Drop table "/Root/~backup_20250410T113208/table"Remove temporary directory "/Root/~backup_20250410T113208" in database2025-04-10T11:32:09.307796Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037892 not found 2025-04-10T11:32:09.307848Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037893 not found 2025-04-10T11:32:09.312233Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037891 not found 2025-04-10T11:32:09.334500Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715664:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-10T11:32:09.385304Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [19:7491645868103393963:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:09.385445Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Restore "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpsVWsUh/" to "/Root"2025-04-10T11:32:09.583935Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037889 not found 2025-04-10T11:32:09.584025Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037890 not found 2025-04-10T11:32:09.584053Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037888 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpsVWsUh/" to "/Root"Process "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpsVWsUh/table"Read scheme from "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpsVWsUh/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpsVWsUh/table" to "/Root/table"2025-04-10T11:32:09.639717Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpsVWsUh/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-04-10T11:32:09.830104Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:32:09.931509Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:32:10.034990Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-04-10T11:32:10.306653Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710761:0, at schemeshard: 72057594046644480 2025-04-10T11:32:10.422771Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037898 not found 2025-04-10T11:32:10.422823Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037897 not found Restore ACL "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpsVWsUh/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2gt4/001d26/r3tmp/tmpsVWsUh/table/permissions.pb"2025-04-10T11:32:10.614470Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload-InStore+WithWritePortionsOnInsert [GOOD] Test command err: 2025-04-10T11:32:01.187049Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:01.301902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:01.327871Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:01.328213Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:01.337330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:01.337548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:01.337822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:01.337939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:01.338055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:01.338206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:01.338328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:01.338448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:01.338567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:01.338663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:01.338800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:01.338903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:01.368991Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:01.369426Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:01.369498Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:01.369750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:01.369924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:01.370023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:01.370077Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:01.370224Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:01.370305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:01.370351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:01.370382Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:01.370571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:01.370641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:01.370689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:01.370718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:01.370840Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:01.370911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:01.370971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:01.370999Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:01.371064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:01.371111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:01.371138Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:01.371189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:01.371234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:01.371263Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:01.371689Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-10T11:32:01.371797Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=48; 2025-04-10T11:32:01.371891Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-04-10T11:32:01.372005Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=47; 2025-04-10T11:32:01.372207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:01.372298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:01.372338Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:01.372546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:01.372597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:01.372628Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:01.372791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:01.372843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:01.372876Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:01.373090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:01.373139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:01.373168Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:01.373326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:01.373375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:01.373425Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... =9437184;self_id=[1:139:2171];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:32:01.643257Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:32:01.643317Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:32:01.643371Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:32:01.643446Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:32:01.643509Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:521;problem=Background activities cannot be started: no index at tablet; 2025-04-10T11:32:01.925344Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=88923003853600;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;fline=schema.h:36;event=sync_schema; 2025-04-10T11:32:01.931638Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923003853600;op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881024576;fline=columnshard__propose_transaction.cpp:103;event=actual tx operator; 2025-04-10T11:32:01.931761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923003853600;op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881024576;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:99:2134]; 2025-04-10T11:32:01.931832Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923003853600;op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881024576;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2025-04-10T11:32:01.932157Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-04-10T11:32:01.932336Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 10 at tablet 9437184, mediator 0 2025-04-10T11:32:01.932389Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2025-04-10T11:32:01.932686Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 1 ttl settings: { Version: 1 } at tablet 9437184 2025-04-10T11:32:01.932800Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:245;method=RegisterTable;path_id=1; 2025-04-10T11:32:01.937489Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-04-10T11:32:01.942243Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-10T11:32:01.943689Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-04-10T11:32:01.943922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:212;event=finished_tx;tx_id=10; 2025-04-10T11:32:01.970623Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-10T11:32:01.970816Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6330728;columns=10; CATCH TEvWritePortionResult, status OK 2025-04-10T11:32:10.870769Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=columnshard__write.cpp:96;event=shard_overload;reason=writes_size_in_fly;sum=132945288;limit=128000000; 2025-04-10T11:32:10.873134Z node 1 :TX_COLUMNSHARD_WRITE WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_OVERLOADED;details=overload data error;tx_id=0; CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK CATCH TEvWritePortionResult, status OK 2025-04-10T11:32:12.018818Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-10T11:32:12.049152Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-10T11:32:12.066508Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-10T11:32:12.082860Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-10T11:32:12.086392Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-04-10T11:32:12.099437Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-04-10T11:32:12.102840Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-10T11:32:12.117773Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-04-10T11:32:12.130447Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-10T11:32:12.143940Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-04-10T11:32:12.146799Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-10T11:32:12.159037Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 1 2025-04-10T11:32:12.162072Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-10T11:32:12.174992Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 1 2025-04-10T11:32:12.187883Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-10T11:32:12.200148Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 1 2025-04-10T11:32:12.203237Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-10T11:32:12.218893Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 1 2025-04-10T11:32:12.221750Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-10T11:32:12.233912Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 1 2025-04-10T11:32:12.248311Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-10T11:32:12.261881Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 1 2025-04-10T11:32:12.265128Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-10T11:32:12.279907Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-10T11:32:12.282869Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-10T11:32:12.296560Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 1 2025-04-10T11:32:12.312003Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-10T11:32:12.328050Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 1 2025-04-10T11:32:12.331331Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-10T11:32:12.344852Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 1 2025-04-10T11:32:12.348544Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-10T11:32:12.361069Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 1 2025-04-10T11:32:12.384040Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-10T11:32:12.396612Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 1 2025-04-10T11:32:12.399769Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-10T11:32:12.413337Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-04-10T11:32:12.416722Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-10T11:32:12.429686Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-04-10T11:32:12.445920Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-10T11:32:12.458321Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-04-10T11:32:12.461694Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-10T11:32:12.474267Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-04-10T11:32:12.888825Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-04-10T11:32:12.901224Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpJoinOrder::TPCDS87+ColumnStore [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> Normalizers::PortionsNormalizer [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TColumnShardTestReadWrite::WriteRead [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::PortionsNormalizer [GOOD] Test command err: 2025-04-10T11:32:12.094213Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:12.200553Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:12.219928Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:12.220182Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:12.229893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-04-10T11:32:12.230194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=LeakedBlobsNormalizer; 2025-04-10T11:32:12.230331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-10T11:32:12.230522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:12.230705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:12.230846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:12.230969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:12.231103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:12.231224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:12.231337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:12.231458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:12.231569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:12.231685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:12.231793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:12.264848Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:12.265295Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-04-10T11:32:12.265375Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-10T11:32:12.265748Z node 1 :TX_COLUMNSHARD CRIT: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:286;tasks_for_remove=0; 2025-04-10T11:32:12.265900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-04-10T11:32:12.265997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-04-10T11:32:12.266046Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-10T11:32:12.266581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-04-10T11:32:12.266681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-10T11:32:12.266765Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-10T11:32:12.266854Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-10T11:32:12.266959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-04-10T11:32:12.267022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-10T11:32:12.267068Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-10T11:32:12.267216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:12.267311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:12.267364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:12.267406Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-10T11:32:12.267520Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:12.267586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:12.267646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:12.267684Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:12.267875Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:12.267943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:12.267988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:12.268021Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:12.268165Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:12.268236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:12.268285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:12.268347Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:12.268457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:12.268499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:12.268541Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:12.268626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:12.268670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:12.268705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:12.269145Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=36; 2025-04-10T11:32:12.269225Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=25; 2025-04-10T11:32:12.269308Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-04-10T11:32:12.269405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-10T11:32:12.269589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:12.269641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=a ... 37184;self_id=[1:396:2410];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:32:15.686296Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:32:15.686337Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:32:15.686368Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:32:15.686431Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:32:15.686520Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:32:15.686603Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-10T11:32:15.686687Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:32:15.686787Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:32:15.686851Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:32:15.687054Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-04-10T11:32:15.687147Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:32:15.865245Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {11:111} readable: {11:max} at tablet 9437184 2025-04-10T11:32:15.865460Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-04-10T11:32:15.865715Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-04-10T11:32:15.865816Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-04-10T11:32:15.866755Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4},{"from":6}]},{"owner_id":2,"inputs":[{"from":7}]},{"owner_id":4,"inputs":[{"from":7}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key1","id":1}},"o":"1","t":"AssembleOriginalData"},"w":11,"id":2},"6":{"p":{"i":"3","p":{"address":{"name":"field","id":3}},"o":"3","t":"AssembleOriginalData"},"w":11,"id":6},"7":{"p":{"p":{"data":[{"name":"key1","id":1},{"name":"key2","id":2},{"name":"field","id":3}]},"o":"1,2,3","t":"FetchOriginalData"},"w":6,"id":7},"4":{"p":{"i":"2","p":{"address":{"name":"key2","id":2}},"o":"2","t":"AssembleOriginalData"},"w":11,"id":4},"0":{"p":{"i":"1,2,3","t":"Projection"},"w":33,"id":0}}}; 2025-04-10T11:32:15.866949Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:131;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-10T11:32:15.867670Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:396:2410];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:166;event=TTxScan started;actor_id=[1:469:2475];trace_detailed=; 2025-04-10T11:32:15.868491Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=1,2,3;column_names=field,key1,key2;);; 2025-04-10T11:32:15.868770Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; 2025-04-10T11:32:15.869198Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:15.869380Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:15.871701Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:15.871798Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:469:2475] finished for tablet 9437184 2025-04-10T11:32:15.872419Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:467:2474];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.004}],"full":{"a":1744284735867577,"name":"_full_task","f":1744284735867577,"d_finished":0,"c":0,"l":1744284735871908,"d":4331},"events":[{"name":"bootstrap","f":1744284735867830,"d_finished":1139,"c":1,"l":1744284735868969,"d":1139},{"a":1744284735869165,"name":"ack","f":1744284735869165,"d_finished":0,"c":0,"l":1744284735871908,"d":2743},{"a":1744284735869145,"name":"processing","f":1744284735869145,"d_finished":0,"c":0,"l":1744284735871908,"d":2763},{"name":"ProduceResults","f":1744284735868951,"d_finished":2543,"c":2,"l":1744284735871767,"d":2543},{"a":1744284735871778,"name":"Finish","f":1744284735871778,"d_finished":0,"c":0,"l":1744284735871908,"d":130}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:15.872536Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:467:2474];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:32:15.873107Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:467:2474];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ProduceResults","f_Finish"],"t":0.004},{"events":["l_ack","l_processing","l_Finish"],"t":0.005}],"full":{"a":1744284735867577,"name":"_full_task","f":1744284735867577,"d_finished":0,"c":0,"l":1744284735872615,"d":5038},"events":[{"name":"bootstrap","f":1744284735867830,"d_finished":1139,"c":1,"l":1744284735868969,"d":1139},{"a":1744284735869165,"name":"ack","f":1744284735869165,"d_finished":0,"c":0,"l":1744284735872615,"d":3450},{"a":1744284735869145,"name":"processing","f":1744284735869145,"d_finished":0,"c":0,"l":1744284735872615,"d":3470},{"name":"ProduceResults","f":1744284735868951,"d_finished":2543,"c":2,"l":1744284735871767,"d":2543},{"a":1744284735871778,"name":"Finish","f":1744284735871778,"d_finished":0,"c":0,"l":1744284735872615,"d":837}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:15.873229Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:32:15.866901Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-10T11:32:15.873308Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:32:15.873444Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:469:2475];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteRead [GOOD] Test command err: 2025-04-10T11:32:10.210397Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:10.327832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:10.350973Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:10.351294Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:10.359508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:10.359702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:10.359927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:10.360056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:10.360167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:10.360288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:10.360391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:10.360510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:10.360623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:10.360745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:10.360849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:10.360944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:10.393470Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:10.393715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:10.393765Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:10.393959Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:10.394098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:10.394208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:10.394250Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:10.394327Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:10.394400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:10.394443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:10.394471Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:10.394666Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:10.394727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:10.394770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:10.394800Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:10.394877Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:10.394930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:10.394975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:10.395001Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:10.395067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:10.395099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:10.395125Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:10.395173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:10.395211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:10.395239Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:10.395634Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=66; 2025-04-10T11:32:10.395716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-10T11:32:10.395784Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=27; 2025-04-10T11:32:10.395909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=57; 2025-04-10T11:32:10.396092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:10.396171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:10.396220Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:10.396413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:10.396462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:10.396495Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:10.396650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:10.396696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:10.396729Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:10.396908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:10.396964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:10.396995Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:10.397135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:10.397196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:10.397259Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:15.847956Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:32:15.848115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-10T11:32:15.848205Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-10T11:32:15.848368Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:424:2439];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-10T11:32:15.848549Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:15.848697Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:15.848846Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:15.849111Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:15.849265Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:15.849414Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:15.849464Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:425:2440] finished for tablet 9437184 2025-04-10T11:32:15.849969Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:424:2439];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.016}],"full":{"a":1744284735832715,"name":"_full_task","f":1744284735832715,"d_finished":0,"c":0,"l":1744284735849520,"d":16805},"events":[{"name":"bootstrap","f":1744284735832943,"d_finished":3357,"c":1,"l":1744284735836300,"d":3357},{"a":1744284735849084,"name":"ack","f":1744284735847623,"d_finished":1256,"c":1,"l":1744284735848879,"d":1692},{"a":1744284735849070,"name":"processing","f":1744284735837640,"d_finished":6680,"c":10,"l":1744284735848882,"d":7130},{"name":"ProduceResults","f":1744284735834850,"d_finished":3620,"c":13,"l":1744284735849447,"d":3620},{"a":1744284735849451,"name":"Finish","f":1744284735849451,"d_finished":0,"c":0,"l":1744284735849520,"d":69},{"name":"task_result","f":1744284735837662,"d_finished":5266,"c":9,"l":1744284735847410,"d":5266}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:15.850050Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:424:2439];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:32:15.850564Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:424:2439];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ProduceResults","f_Finish"],"t":0.016},{"events":["l_ack","l_processing","l_Finish"],"t":0.017}],"full":{"a":1744284735832715,"name":"_full_task","f":1744284735832715,"d_finished":0,"c":0,"l":1744284735850101,"d":17386},"events":[{"name":"bootstrap","f":1744284735832943,"d_finished":3357,"c":1,"l":1744284735836300,"d":3357},{"a":1744284735849084,"name":"ack","f":1744284735847623,"d_finished":1256,"c":1,"l":1744284735848879,"d":2273},{"a":1744284735849070,"name":"processing","f":1744284735837640,"d_finished":6680,"c":10,"l":1744284735848882,"d":7711},{"name":"ProduceResults","f":1744284735834850,"d_finished":3620,"c":13,"l":1744284735849447,"d":3620},{"a":1744284735849451,"name":"Finish","f":1744284735849451,"d_finished":0,"c":0,"l":1744284735850101,"d":650},{"name":"task_result","f":1744284735837662,"d_finished":5266,"c":9,"l":1744284735847410,"d":5266}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-10T11:32:15.850655Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:32:15.832084Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-10T11:32:15.850741Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:32:15.851102Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:425:2440];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] >> KqpService::ToDictCache+UseCache [GOOD] >> KqpService::ToDictCache-UseCache |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] Test command err: 2025-04-10T11:32:05.910721Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:06.018946Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:06.044048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:06.044331Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:06.052859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-04-10T11:32:06.053133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-10T11:32:06.053333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:06.053508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:06.053632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:06.053777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:06.053908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:06.054032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:06.054187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:06.054311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:06.054429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:06.054552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:06.054672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:06.086278Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:06.086497Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-04-10T11:32:06.086555Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-10T11:32:06.086945Z node 1 :TX_COLUMNSHARD CRIT: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:286;tasks_for_remove=0; 2025-04-10T11:32:06.087083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-04-10T11:32:06.087176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-10T11:32:06.087223Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-10T11:32:06.087390Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:06.087472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:06.087517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:06.087552Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-10T11:32:06.087633Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:06.087691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:06.087731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:06.087765Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:06.087949Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:06.088029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:06.088083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:06.088113Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:06.088224Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:06.088289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:06.088336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:06.088370Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:06.088474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:06.088519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:06.088553Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:06.088610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:06.088678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:06.088713Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:06.089125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=67; 2025-04-10T11:32:06.089228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-10T11:32:06.089307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-10T11:32:06.089396Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-04-10T11:32:06.089599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:06.089651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:06.089689Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:06.089922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:06.089975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:06.090012Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:06.090195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:06.090274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:06.090324Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;eve ... ;TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:32:16.314558Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T11:32:16.314644Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-10T11:32:16.314706Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-10T11:32:16.314780Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-04-10T11:32:16.314840Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-10T11:32:16.314967Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:16.315011Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-04-10T11:32:16.315054Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:32:16.315323Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:16.315524Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:16.315583Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:32:16.315740Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-04-10T11:32:16.315827Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-04-10T11:32:16.315989Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:495:2498];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-04-10T11:32:16.316161Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:16.316311Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:16.316443Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:16.317382Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:16.317560Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:16.317704Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:16.317756Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:497:2499] finished for tablet 9437184 2025-04-10T11:32:16.318381Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:495:2498];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.26},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.262}],"full":{"a":1744284736055118,"name":"_full_task","f":1744284736055118,"d_finished":0,"c":0,"l":1744284736317837,"d":262719},"events":[{"name":"bootstrap","f":1744284736055429,"d_finished":2280,"c":1,"l":1744284736057709,"d":2280},{"a":1744284736317346,"name":"ack","f":1744284736315283,"d_finished":1196,"c":1,"l":1744284736316479,"d":1687},{"a":1744284736317322,"name":"processing","f":1744284736060330,"d_finished":138242,"c":9,"l":1744284736316482,"d":138757},{"name":"ProduceResults","f":1744284736056766,"d_finished":3280,"c":12,"l":1744284736317739,"d":3280},{"a":1744284736317743,"name":"Finish","f":1744284736317743,"d_finished":0,"c":0,"l":1744284736317837,"d":94},{"name":"task_result","f":1744284736060353,"d_finished":136839,"c":8,"l":1744284736315118,"d":136839}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:16.318526Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:495:2498];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:32:16.319066Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:495:2498];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.26},{"events":["l_ProduceResults","f_Finish"],"t":0.262},{"events":["l_ack","l_processing","l_Finish"],"t":0.263}],"full":{"a":1744284736055118,"name":"_full_task","f":1744284736055118,"d_finished":0,"c":0,"l":1744284736318587,"d":263469},"events":[{"name":"bootstrap","f":1744284736055429,"d_finished":2280,"c":1,"l":1744284736057709,"d":2280},{"a":1744284736317346,"name":"ack","f":1744284736315283,"d_finished":1196,"c":1,"l":1744284736316479,"d":2437},{"a":1744284736317322,"name":"processing","f":1744284736060330,"d_finished":138242,"c":9,"l":1744284736316482,"d":139507},{"name":"ProduceResults","f":1744284736056766,"d_finished":3280,"c":12,"l":1744284736317739,"d":3280},{"a":1744284736317743,"name":"Finish","f":1744284736317743,"d_finished":0,"c":0,"l":1744284736318587,"d":844},{"name":"task_result","f":1744284736060353,"d_finished":136839,"c":8,"l":1744284736315118,"d":136839}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-10T11:32:16.319195Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:32:16.054405Z;index_granules=0;index_portions=1;index_batches=939;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589280;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589280;selected_rows=0; 2025-04-10T11:32:16.319261Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:32:16.319612Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] Test command err: 2025-04-10T11:32:10.603261Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:10.698772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:10.721755Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:10.722005Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:10.729186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:10.729352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:10.729586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:10.729694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:10.729811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:10.729912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:10.730003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:10.730073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:10.730171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:10.730278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:10.730371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:10.730456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:10.751623Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:10.751808Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:10.751877Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:10.752017Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:10.752150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:10.752221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:10.752254Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:10.752359Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:10.752429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:10.752458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:10.752482Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:10.752606Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:10.752648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:10.752675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:10.752693Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:10.752759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:10.752798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:10.752841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:10.752862Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:10.752917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:10.752955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:10.752981Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:10.753021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:10.753064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:10.753100Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:10.753378Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=33; 2025-04-10T11:32:10.753433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=23; 2025-04-10T11:32:10.753505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-04-10T11:32:10.753575Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-04-10T11:32:10.753695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:10.753733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:10.753758Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:10.753919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:10.753952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:10.753974Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:10.754188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:10.754263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:10.754296Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:10.754488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:10.754545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:10.754588Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:10.754740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:10.754786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:10.754848Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-10T11:32:16.631054Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS87+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 27037, MsgBus: 30541 2025-04-10T11:30:10.955765Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645358514951227:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:10.956160Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015b3/r3tmp/tmpUqS5Jr/pdisk_1.dat 2025-04-10T11:30:11.806848Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:11.807765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:11.807835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:11.827184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27037, node 1 2025-04-10T11:30:12.135685Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:12.135710Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:12.135717Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:12.135854Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30541 TClient is connected to server localhost:30541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:13.175396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:13.198760Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:15.547956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645379989788242:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:15.548089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:15.548554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645379989788254:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:15.552593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:30:15.567018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645379989788256:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:30:15.639522Z node 1 :TX_PROXY ERROR: Actor# [1:7491645379989788307:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:15.918252Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645358514951227:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:15.918314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:16.308745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:30:16.719159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645384284755855:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:16.719429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645384284755855:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:16.719811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645384284755855:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:16.719956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645384284755855:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:16.720078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645384284755855:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:16.720233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645384284755855:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:16.720352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645384284755855:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:16.720464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645384284755855:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:16.720592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645384284755855:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:16.720702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645384284755855:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:16.720818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645384284755855:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:30:16.720927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491645384284755855:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:30:16.724434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645384284755843:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:30:16.724503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645384284755843:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:30:16.724785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645384284755843:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:30:16.724935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645384284755843:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:30:16.725059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645384284755843:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:30:16.725190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645384284755843:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:30:16.725291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645384284755843:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:30:16.725398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645384284755843:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:30:16.725515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645384284755843:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:30:16.725631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645384284755843:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:30:16.725727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645384284755843:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:30:16.725829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645384284755843:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:30:16.837603Z node 1 :T ... troller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.477621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.482072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.483229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.489022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.489053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.495006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.495184Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.500736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.500911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.506476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.506507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.512547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.513888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.518518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.519011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.524171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.524171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.530341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.530808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.537957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.538192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.545386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.545673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.552832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.553195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.559829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.560060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.566728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.567168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.573556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.573837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.580611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.580763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.586569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.586770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.592434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.600680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:43.839411Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqfj5385yaf8qrwnevvysk", SessionId: ydb://session/3?node_id=1&id=NDZiMWQ4OGItZmZmMjRjMzctOWM2YjdhZS1hYWExNTMxOQ==, Slow query, duration: 37.818965s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:44.434524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:44.435304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:44.435700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:32:06.583605Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqh15g88q5c4dcj0nyvx12", SessionId: ydb://session/3?node_id=1&id=NDZiMWQ4OGItZmZmMjRjMzctOWM2YjdhZS1hYWExNTMxOQ==, Slow query, duration: 12.422233s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "PRAGMA TablePathPrefix='/Root/test/ds';\n\n-- NB: Subquerys\n$bla1 = (select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from store_sales as store_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where store_sales.ss_sold_date_sk = date_dim.d_date_sk\n and store_sales.ss_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11);\n\n$bla2 = ((select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from catalog_sales as catalog_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk\n and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11)\n union all\n (select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from web_sales as web_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where web_sales.ws_sold_date_sk = date_dim.d_date_sk\n and web_sales.ws_bill_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11));\n\n-- start query 1 in stream 0 using template query87.tpl and seed 1819994127\nselect count(*)\nfrom $bla1 bla1 left only join $bla2 bla2 using (c_last_name, c_first_name, d_date)\n;\n\n-- end query 1 in stream 0 using template query87.tpl", parameters: 0b |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> YdbIndexTable::MultiShardTableOneIndexDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> YdbSdkSessions::TestMultipleSessions >> TWebLoginService::AuditLogLoginSuccess |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] >> TTopicApiDescribes::DescribeConsumer >> TWebLoginService::AuditLogLoginSuccess [GOOD] >> TWebLoginService::AuditLogLoginBadPassword |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |95.4%| [TA] $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] >> TWebLoginService::AuditLogLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginSuccess |95.4%| [TA] {RESULT} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] >> TWebLoginService::AuditLogLogout |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TTopicApiDescribes::GetPartitionDescribe >> EvWrite::WriteWithSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 5858, MsgBus: 10820 2025-04-10T11:30:28.963275Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645435743131157:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:28.963344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c86/r3tmp/tmpPl2jNh/pdisk_1.dat 2025-04-10T11:30:29.824049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:29.824141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:29.841690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:30:29.856750Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5858, node 1 2025-04-10T11:30:30.246845Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:30.246875Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:30.246881Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:30.247003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10820 TClient is connected to server localhost:10820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:31.464957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:31.529454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:31.709553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:31.962815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:32.048922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:33.806208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645457217969202:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:33.806376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:33.975472Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645435743131157:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:33.975532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:34.712379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:34.765083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:34.849469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:34.912376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:35.004714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:35.060520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:35.146341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645465807904322:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:35.146480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:35.150606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645465807904327:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:35.158408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:35.175774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645465807904329:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:30:35.280530Z node 1 :TX_PROXY ERROR: Actor# [1:7491645465807904386:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:37.253346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:38.931949Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jrfqeqhy5xsnatw7wgb0jb8c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE0N2ExYTktYTEwODk2MWUtNTUwMWYyZGYtYWMwYWQzYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:38.946641Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jrfqeqjj69b36tzs2f742g4s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmU1N2FlYTItN2VkMTcwNTUtM2ZjOTVjNTItZjc5ZTFkNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:38.987460Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jrfqeqjj8tbpqwg8jm1qz6xa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWU1ZTU3MDQtMWUwYmZlZjUtYTYxZDgwNTUtNDk3MTU4NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:38.989286Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jrfqeqjf4qtkwz4qewck8vat, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDU4M2YzMzAtN2U0M2IzOTQtYjVkMTEwYWQtYjkwYzFlOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:38.989746Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jrfqeqjjacewsw9nhz50vxdg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWYyNTk5MDMtNWZjYzkwNTctMTdjMmNlZjAtNjM5ZTY4NjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.002522Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jrfqeqjfd8yh578mjm8yap91, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzMwZTUzZjQtNmFlNDVmNmMtYTI0NTkzNjYtOTRiMWFmMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.003223Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jrfqeqjhaegyf2h7pc42fam2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmNmZTYxZC1iZTE1YWM3YS02M2RlYWJiNC1jNDE4NmQ2Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.003804Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jrfqeqjfb2h01q6smjtfvenq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjFlYmIzYzEtOWU5MDgxZDYtMjU4YWU4NzYtM2IwMTZlOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.015798Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jrfqeqjqezmgjy841s6syenh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQwODI1NjMtZDg4YjRlMTEtOTM3NzE0MjMtOTUxMTMxMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.041614Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jrfqeqjf4qtkwz4qewck8vat, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDU4M2YzMzAtN2U0M2IzOTQtYjVkMTEwYWQtYjkwYzFlOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.041921Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jrfqeqhy5xsnatw7wgb0jb8c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE0N2ExYTktYTEwODk2MWUtNTUwMWYyZGYtYWMwYWQzYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.048684Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01jrfqeqjj69 ... ionId: ydb://session/3?node_id=2&id=YjYzMjU0ZWQtYzhiY2RmMGMtMmU5MmZhOC0xZDg5Y2Uz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.011541Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719484. Ctx: { TraceId: 01jrfqhway18znhgjtzd9neymz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjY1OTgxZGYtZjlhNTc2ZGEtOWM4ZGQ0MmItNTkwYjYyZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.013623Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719483. Ctx: { TraceId: 01jrfqhwax1j6kbz1a00wjxy3j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjIwMDJlZDMtYjQ2YTJlYzktNjZhMTQxZjQtNGU4Y2FiZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.017239Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719485. Ctx: { TraceId: 01jrfqhwbj4tp4335gxw1jp3t0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWYwMTMwZTktNjc1MzJlYWYtNjc2NjA4MGMtZWQ2MDc3Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.028893Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719486. Ctx: { TraceId: 01jrfqhwbj4tp4335gxw1jp3t0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWYwMTMwZTktNjc1MzJlYWYtNjc2NjA4MGMtZWQ2MDc3Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.030990Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719487. Ctx: { TraceId: 01jrfqhwc74gp0hptp4zgxxnbc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDkzODc2NTMtMzcxOTFmZjUtMWFkZDc0ZTctOWFhNmNlZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.043816Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719488. Ctx: { TraceId: 01jrfqhwcq19rkrczy858kps27, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzRkMDIyOWItMzc4MWQzOWYtYmE5NDM0MzQtODk1OTMzNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.048877Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719489. Ctx: { TraceId: 01jrfqhwcxdte5s7bm6g06p11d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjYzMjU0ZWQtYzhiY2RmMGMtMmU5MmZhOC0xZDg5Y2Uz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.062988Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719490. Ctx: { TraceId: 01jrfqhwd58ckxbc3sd9bpx143, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjIwMDJlZDMtYjQ2YTJlYzktNjZhMTQxZjQtNGU4Y2FiZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.074055Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719491. Ctx: { TraceId: 01jrfqhwdb9vptgzw68k4xtr5a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjY1OTgxZGYtZjlhNTc2ZGEtOWM4ZGQ0MmItNTkwYjYyZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.076218Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719492. Ctx: { TraceId: 01jrfqhwd58ckxbc3sd9bpx143, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjIwMDJlZDMtYjQ2YTJlYzktNjZhMTQxZjQtNGU4Y2FiZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.083269Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719493. Ctx: { TraceId: 01jrfqhwdb9vptgzw68k4xtr5a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjY1OTgxZGYtZjlhNTc2ZGEtOWM4ZGQ0MmItNTkwYjYyZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.087389Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719495. Ctx: { TraceId: 01jrfqhwdr5peekdqpxncdrcem, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWYwMTMwZTktNjc1MzJlYWYtNjc2NjA4MGMtZWQ2MDc3Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.088799Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719494. Ctx: { TraceId: 01jrfqhwdr0hda92mj22cwnhch, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiNjMzN2QtZjJkMDMxMDgtODA0M2RjMDMtNTQ5YmVjOWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.094779Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719496. Ctx: { TraceId: 01jrfqhwdxd59sh1m3nj7qsxzt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDkzODc2NTMtMzcxOTFmZjUtMWFkZDc0ZTctOWFhNmNlZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.099953Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719497. Ctx: { TraceId: 01jrfqhwdr5peekdqpxncdrcem, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWYwMTMwZTktNjc1MzJlYWYtNjc2NjA4MGMtZWQ2MDc3Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.101877Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719498. Ctx: { TraceId: 01jrfqhwdr0hda92mj22cwnhch, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiNjMzN2QtZjJkMDMxMDgtODA0M2RjMDMtNTQ5YmVjOWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.108930Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719499. Ctx: { TraceId: 01jrfqhwdxd59sh1m3nj7qsxzt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDkzODc2NTMtMzcxOTFmZjUtMWFkZDc0ZTctOWFhNmNlZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.123400Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719500. Ctx: { TraceId: 01jrfqhwf3fpzssd670hk9n0cx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjYzMjU0ZWQtYzhiY2RmMGMtMmU5MmZhOC0xZDg5Y2Uz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.124021Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719501. Ctx: { TraceId: 01jrfqhwf7et7jbjaybep421a9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjIwMDJlZDMtYjQ2YTJlYzktNjZhMTQxZjQtNGU4Y2FiZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.130514Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719502. Ctx: { TraceId: 01jrfqhwf7et7jbjaybep421a9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjIwMDJlZDMtYjQ2YTJlYzktNjZhMTQxZjQtNGU4Y2FiZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.133868Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719503. Ctx: { TraceId: 01jrfqhwfe9pgcvzywcg8kmf4e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjY1OTgxZGYtZjlhNTc2ZGEtOWM4ZGQ0MmItNTkwYjYyZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.134233Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719504. Ctx: { TraceId: 01jrfqhwf3fpzssd670hk9n0cx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjYzMjU0ZWQtYzhiY2RmMGMtMmU5MmZhOC0xZDg5Y2Uz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.144467Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719505. Ctx: { TraceId: 01jrfqhwfvf2sh7q0gdpqq0azk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzRkMDIyOWItMzc4MWQzOWYtYmE5NDM0MzQtODk1OTMzNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.153962Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719506. Ctx: { TraceId: 01jrfqhwg55rc800hmz9hksjxg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiNjMzN2QtZjJkMDMxMDgtODA0M2RjMDMtNTQ5YmVjOWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.167927Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719507. Ctx: { TraceId: 01jrfqhwgh41t8rgqae5ncbesa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDkzODc2NTMtMzcxOTFmZjUtMWFkZDc0ZTctOWFhNmNlZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.181553Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719508. Ctx: { TraceId: 01jrfqhwgh41t8rgqae5ncbesa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDkzODc2NTMtMzcxOTFmZjUtMWFkZDc0ZTctOWFhNmNlZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.187481Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719509. Ctx: { TraceId: 01jrfqhwgh41t8rgqae5ncbesa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDkzODc2NTMtMzcxOTFmZjUtMWFkZDc0ZTctOWFhNmNlZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.200085Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719510. Ctx: { TraceId: 01jrfqhwh2e89d5hky72429eaz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjY1OTgxZGYtZjlhNTc2ZGEtOWM4ZGQ0MmItNTkwYjYyZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.206994Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719512. Ctx: { TraceId: 01jrfqhwh966je58f4940qb968, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWYwMTMwZTktNjc1MzJlYWYtNjc2NjA4MGMtZWQ2MDc3Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.208825Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719511. Ctx: { TraceId: 01jrfqhwgh1cd9dw28dcx2n434, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjIwMDJlZDMtYjQ2YTJlYzktNjZhMTQxZjQtNGU4Y2FiZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-10T11:32:22.220230Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719513. Ctx: { TraceId: 01jrfqhwhnatdj9076r8fr6abe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzRkMDIyOWItMzc4MWQzOWYtYmE5NDM0MzQtODk1OTMzNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-10T11:32:22.222996Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719514. Ctx: { TraceId: 01jrfqhwh2e89d5hky72429eaz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjY1OTgxZGYtZjlhNTc2ZGEtOWM4ZGQ0MmItNTkwYjYyZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.224166Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719515. Ctx: { TraceId: 01jrfqhwh966je58f4940qb968, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWYwMTMwZTktNjc1MzJlYWYtNjc2NjA4MGMtZWQ2MDc3Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-10T11:32:22.229636Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719516. Ctx: { TraceId: 01jrfqhwh966je58f4940qb968, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWYwMTMwZTktNjc1MzJlYWYtNjc2NjA4MGMtZWQ2MDc3Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:22.233772Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719517. Ctx: { TraceId: 01jrfqhwhnatdj9076r8fr6abe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzRkMDIyOWItMzc4MWQzOWYtYmE5NDM0MzQtODk1OTMzNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS |95.4%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TWebLoginService::AuditLogLogout [GOOD] >> TTabletPipeTest::TestPipeConnectToHint ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithSplit [GOOD] Test command err: 2025-04-10T11:31:55.578979Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:55.692621Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:55.720768Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:55.721218Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:55.730263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:55.730555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:55.730802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:55.730926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:55.731024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:55.731196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:55.731334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:55.731450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:55.731563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:55.731686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:55.731810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:55.731938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:55.765153Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:55.765760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:55.765837Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:55.766044Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:55.766307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:55.766441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:55.766511Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:55.766623Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:55.766695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:55.766745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:55.766776Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:55.766975Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:55.767043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:55.767085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:55.767116Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:55.767207Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:55.767256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:55.767313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:55.767341Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:55.767414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:55.767451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:55.767478Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:55.767529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:55.767571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:55.767607Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:55.768014Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-04-10T11:31:55.768130Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-10T11:31:55.768213Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-04-10T11:31:55.768318Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=50; 2025-04-10T11:31:55.768766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:55.768835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:55.768873Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:55.769098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:55.769152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:55.769183Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:55.769348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:55.769415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:55.769449Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:55.769639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:55.769679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:55.769735Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:55.769867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:55.769918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:55.769972Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... d=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:856:2873];bytes=3691800;rows=450;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-04-10T11:32:27.237120Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:27.237240Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:27.237276Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:32:27.237308Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T11:32:27.238609Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:27.238743Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:27.238786Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T11:32:27.238877Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=149; 2025-04-10T11:32:27.238928Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1222396;num_rows=149;batch_columns=key,field; 2025-04-10T11:32:27.239059Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:856:2873];bytes=1222396;rows=149;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-04-10T11:32:27.239149Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:27.239232Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:27.239364Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:27.239915Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T11:32:27.240026Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:27.240117Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:27.240155Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:865:2882] finished for tablet 9437184 2025-04-10T11:32:27.240759Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:415;event=scan_finish;compute_actor_id=[1:856:2873];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.005},{"events":["l_bootstrap"],"t":0.014},{"events":["f_processing","f_task_result"],"t":0.016},{"events":["l_task_result"],"t":0.715},{"events":["f_ack"],"t":21.172},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":21.186}],"full":{"a":1744284726053445,"name":"_full_task","f":1744284726053445,"d_finished":0,"c":0,"l":1744284747240222,"d":21186777},"events":[{"name":"bootstrap","f":1744284726054265,"d_finished":13767,"c":1,"l":1744284726068032,"d":13767},{"a":1744284747239891,"name":"ack","f":1744284747225773,"d_finished":7811,"c":9,"l":1744284747239402,"d":8142},{"a":1744284747239872,"name":"processing","f":1744284726069934,"d_finished":268093,"c":53,"l":1744284747239405,"d":268443},{"name":"ProduceResults","f":1744284726058594,"d_finished":16435,"c":64,"l":1744284747240137,"d":16435},{"a":1744284747240141,"name":"Finish","f":1744284747240141,"d_finished":0,"c":0,"l":1744284747240222,"d":81},{"name":"task_result","f":1744284726069952,"d_finished":259017,"c":44,"l":1744284726769097,"d":259017}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:27.240853Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:856:2873];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-10T11:32:27.241388Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:370;event=scan_finished;compute_actor_id=[1:856:2873];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.005},{"events":["l_bootstrap"],"t":0.014},{"events":["f_processing","f_task_result"],"t":0.016},{"events":["l_task_result"],"t":0.715},{"events":["f_ack"],"t":21.172},{"events":["l_ProduceResults","f_Finish"],"t":21.186},{"events":["l_ack","l_processing","l_Finish"],"t":21.187}],"full":{"a":1744284726053445,"name":"_full_task","f":1744284726053445,"d_finished":0,"c":0,"l":1744284747240901,"d":21187456},"events":[{"name":"bootstrap","f":1744284726054265,"d_finished":13767,"c":1,"l":1744284726068032,"d":13767},{"a":1744284747239891,"name":"ack","f":1744284747225773,"d_finished":7811,"c":9,"l":1744284747239402,"d":8821},{"a":1744284747239872,"name":"processing","f":1744284726069934,"d_finished":268093,"c":53,"l":1744284747239405,"d":269122},{"name":"ProduceResults","f":1744284726058594,"d_finished":16435,"c":64,"l":1744284747240137,"d":16435},{"a":1744284747240141,"name":"Finish","f":1744284747240141,"d_finished":0,"c":0,"l":1744284747240901,"d":760},{"name":"task_result","f":1744284726069952,"d_finished":259017,"c":44,"l":1744284726769097,"d":259017}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-10T11:32:27.241501Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-10T11:32:06.051912Z;index_granules=0;index_portions=5;index_batches=2052;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=17133336;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=17133336;selected_rows=0; 2025-04-10T11:32:27.241550Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-10T11:32:27.241822Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLogout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:32:24.536850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:32:24.536955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:32:24.536995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:32:24.537034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:32:24.537096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:32:24.537126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:32:24.537225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:32:24.537328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:32:24.537709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:32:24.624846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:32:24.624927Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:24.644498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:32:24.645421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:32:24.645628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:32:24.658954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:32:24.659324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:32:24.659995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:24.660234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:32:24.663973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:24.665609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:24.665690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:24.665865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:32:24.665918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:24.665979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:32:24.666204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:32:24.675696Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:241:2058] recipient: [1:15:2062] 2025-04-10T11:32:24.810769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:32:24.811054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:24.811283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:32:24.811570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:32:24.811645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:24.814577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:24.814959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:32:24.815488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:24.815564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:32:24.815607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:32:24.815640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:32:24.817939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:24.817999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:32:24.818052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:32:24.820359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:24.820421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:24.820493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:24.820566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:32:24.824382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:32:24.826658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:32:24.826878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:32:24.828004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:24.828186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:32:24.828236Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:24.828522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:32:24.828573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:24.828760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:32:24.828854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:32:24.831234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:24.831270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:24.831435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:24.831475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:32:24.831754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:24.831792Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:32:24.831879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:32:24.831907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:24.831948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:32:24.831970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:24.831997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:32:24.832025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:24.832049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:32:24.832072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:32:24.832119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:32:24.832146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:32:24.832168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:32:24.833629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:32:24.833714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:32:24.833741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:32:27.729437Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:32:27.729480Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:32:27.729538Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:32:27.729658Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:32:27.736779Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:32:27.737395Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:32:27.737974Z node 4 :TX_PROXY DEBUG: actor# [4:270:2261] Bootstrap 2025-04-10T11:32:27.759367Z node 4 :TX_PROXY DEBUG: actor# [4:270:2261] Become StateWork (SchemeCache [4:276:2267]) 2025-04-10T11:32:27.762724Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:32:27.769433Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-10T11:32:27.769616Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:32:27.769665Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:32:27.769788Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:32:27.769828Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:32:27.769892Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:32:27.769956Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:32:27.770000Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:32:27.770041Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:32:27.770081Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-04-10T11:32:27.770129Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-10T11:32:27.771794Z node 4 :TX_PROXY DEBUG: actor# [4:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:32:27.775588Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:32:27.775719Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-04-10T11:32:27.775970Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:27.776018Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:27.776200Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:27.776286Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:32:27.777222Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:32:27.777330Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:32:27.777391Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:32:27.777438Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-10T11:32:27.777486Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:32:27.777594Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:32:27.778044Z node 4 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:32:27.779904Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-04-10T11:32:27.780334Z node 4 :HTTP WARN: 127.0.0.1:0 POST /login 2025-04-10T11:32:27.782370Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-10T11:32:27.782424Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-10T11:32:28.009906Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MzI3OTQ3LCJpYXQiOjE3NDQyODQ3NDcsInN1YiI6InVzZXIxIn0.KF2IXwqTDlMY6HPWs5v-Ant9U9wLjlak0oNAsUoRr0jQAhjOh9upezPrC4DU0ZgZvIjSeoOHfvP5WEeSO7YW5Pnht8NmUOLq42vP-jiyPlCqJb8ld0t5kyAfeeEL6jYLjuve8uWPXOfAEcqcfnW06AUEprh2P_AJljW1y9cSVmAI24tFwYTaNq5MPMsd4nB1BD6YRKjTZO-fHj5qPNgp7t2HoQ_oMA02vz2QdXmRPlZr8bBN8yoTDj8xEY5DARZ3wGRfLD4x8KLafElqYrDYxB8f_cU3eX21URni61IjkNrx_zeKCCh_NqYO0E32urkttS-mgCi71lHtoip-Sd-EMQ" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MzI3OTQ3LCJpYXQiOjE3NDQyODQ3NDcsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-04-10T11:32:28.010323Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:28.010385Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:28.010607Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:28.010660Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-10T11:32:28.011929Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2025-04-10T11:32:28.012724Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:32:28.012942Z node 4 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 235us result status StatusSuccess 2025-04-10T11:32:28.013450Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAy7QIKQGPUgOAAN/OUsky\nor2WNh7Nmvq0nsx6p9meUAhR2os/0rGc/l8wJGge/qPlrw3lW+RK0uJsDWBwtP0h\nhTc72kcEAHtDXVQJR+1NSTMVP/wOmBYRRZxZsCPrNQEqn6WGudKN2kYjpkLnkImW\nNa4snXVME30TR0q/BxiR8tmvOanx40JuLcN3aAKINyge+YNldhYtyD/8H9SJ2dun\no/g6nYWIw5q1CdHOifqDW6g/lyDsSEiWF9gNsBv1cviwv6xICvJSu4DIkkLx8A/o\ntkQsczHBzqpr6ROEhmtivfJ68tVslvutXHEJGDU94bSKloNet25SErKVynX3q1I2\nkQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744371147997 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:32:28.018447Z node 4 :HTTP WARN: 127.0.0.1:0 POST /logout 2025-04-10T11:32:28.018560Z node 4 :HTTP ERROR: Logout: No ydb_session_id cookie 2025-04-10T11:32:28.019100Z node 4 :HTTP WARN: 127.0.0.1:0 POST /logout 2025-04-10T11:32:28.025130Z node 4 :TICKET_PARSER ERROR: Ticket **** (589A015B): Token is not in correct format 2025-04-10T11:32:28.025314Z node 4 :HTTP ERROR: Logout: Token is not in correct format 2025-04-10T11:32:28.025854Z node 4 :HTTP WARN: 127.0.0.1:0 POST /logout AUDIT LOG buffer(4): 2025-04-10T11:32:27.715037Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-10T11:32:27.769275Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-04-10T11:32:28.010114Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MzI3OTQ3LCJpYXQiOjE3NDQyODQ3NDcsInN1YiI6InVzZXIxIn0.**, login_user_level=admin 2025-04-10T11:32:28.027332Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MzI3OTQ3LCJpYXQiOjE3NDQyODQ3NDcsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS AUDIT LOG checked line: 2025-04-10T11:32:28.027332Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MzI3OTQ3LCJpYXQiOjE3NDQyODQ3NDcsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TWebLoginService::AuditLogEmptySIDsLoginSuccess >> YdbSdkSessions::TestMultipleSessions [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectToHint [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TGroupMapperTest::Block42_1disk [GOOD] >> TWebLoginService::AuditLogEmptySIDsLoginSuccess [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_1disk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestMultipleSessions [GOOD] Test command err: 2025-04-10T11:32:24.419110Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645934334621036:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:24.419270Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f39/r3tmp/tmphxLLRN/pdisk_1.dat 2025-04-10T11:32:24.913915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:24.914047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:24.916309Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:24.922867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10164, node 1 2025-04-10T11:32:25.128700Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:25.128738Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:25.128747Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:25.128901Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:32:25.467670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:27.787413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645947219523828:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:27.787504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645947219523839:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:27.787554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:27.795992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:32:27.799124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645947219523872:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:27.799195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645947219523877:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:27.799728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:27.808207Z node 1 :TX_PROXY ERROR: Actor# [1:7491645947219523882:2659] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:32:27.846758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645947219523881:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:32:27.846941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645947219523842:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:32:27.923772Z node 1 :TX_PROXY ERROR: Actor# [1:7491645947219523986:2724] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:32:27.933002Z node 1 :TX_PROXY ERROR: Actor# [1:7491645947219524003:2733] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginBadUser |95.5%| [TA] $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.5%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TIcNodeCache::GetNodesInfoTest >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey >> TMiniKQLEngineFlatTest::TestPureProgram >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow >> TMiniKQLEngineFlatTest::TestPureProgram [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportBoolYdb >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo >> TMiniKQLProtoTestYdb::TestExportBoolYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo >> TMiniKQLProtoTestYdb::TestExportDoubleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown >> TMiniKQLProtoTestYdb::TestExportDecimalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange >> TMiniKQLEngineFlatTest::TestMapsPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictYdb >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown >> TMiniKQLEngineFlatTest::TestEmptyProgram >> TMiniKQLProtoTestYdb::TestExportDictYdb [GOOD] >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown >> TMiniKQLEngineFlatTest::TestEmptyProgram [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRow >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination >> TMiniKQLEngineFlatTest::TestEraseRow [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb >> TMiniKQLEngineFlatTest::TestCASBoth2Success [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNoShards >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb >> TMiniKQLEngineFlatTest::TestEraseRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidYdb >> TMiniKQLEngineFlatTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown >> TMiniKQLProtoTestYdb::TestExportVoidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStringYdb >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown >> TMiniKQLProtoTestYdb::TestExportStringYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidYdb |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown >> TMiniKQLProtoTestYdb::TestExportUuidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleYdb >> TMiniKQLEngineFlatTest::TestLengthPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-04-10T11:32:29.912299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:32:29.912407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:32:29.912467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:32:29.912499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:32:29.912545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:32:29.912575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:32:29.912668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:32:29.912760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:32:29.913095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:32:30.003276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:32:30.003367Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:30.020512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:32:30.020643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:32:30.020815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:32:30.030992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:32:30.031280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:32:30.031963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:30.032303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:32:30.035083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:30.036838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:30.036911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:30.037023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:32:30.037102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:30.037147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:32:30.037333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:32:30.044997Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:240:2058] recipient: [1:15:2062] 2025-04-10T11:32:30.190547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:32:30.190779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:30.190971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:32:30.191216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:32:30.191272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:30.193733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:30.193949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:32:30.194119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:30.194189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:32:30.194236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:32:30.194267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:32:30.196414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:30.196464Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:32:30.196497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:32:30.198435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:30.198483Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:30.198537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:30.198615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:32:30.202006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:32:30.203960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:32:30.204141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:32:30.205023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:30.205167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:32:30.205209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:30.205467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:32:30.205516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:30.205709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:32:30.205825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:32:30.207932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:30.207977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:30.208145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:30.208207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:32:30.208515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:30.208570Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:32:30.208658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:32:30.208693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:30.208729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:32:30.208757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:30.208791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:32:30.208833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:30.208864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:32:30.208889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:32:30.208960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:32:30.209000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:32:30.209030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:32:30.210872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:32:30.210979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:32:30.211015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3004Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:32.413130Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:32.413312Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:32:32.413416Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:32.413520Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:32:32.413753Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:32:32.424001Z node 4 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [4:126:2152] sender: [4:240:2058] recipient: [4:15:2062] 2025-04-10T11:32:32.442960Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:32:32.443280Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:32.443564Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:32:32.443827Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:32:32.443908Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:32.447792Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:32.448073Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:32:32.448428Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:32.448515Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:32:32.448722Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:32:32.448784Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:32:32.451700Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:32.451792Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:32:32.451877Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:32:32.454537Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:32.454622Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:32.454703Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:32.454788Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:32:32.455006Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:32:32.457732Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:32:32.458003Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:32:32.459294Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:32.459486Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 17179871340 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:32:32.459554Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:32.459933Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:32:32.460020Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:32.460268Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:32:32.460379Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:32:32.463805Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:32.463886Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:32.464155Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:32.464229Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:32:32.464706Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:32.464784Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:32:32.464973Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:32:32.465029Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:32.465117Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:32:32.465173Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:32.465232Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:32:32.465300Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:32.465365Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:32:32.465418Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:32:32.465532Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:32:32.465595Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:32:32.465654Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:32:32.466525Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:32:32.466678Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:32:32.466735Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:32:32.466800Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:32:32.466856Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:32:32.466993Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:32:32.471345Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:32:32.472060Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:32.473612Z node 4 :TX_PROXY DEBUG: actor# [4:270:2261] Bootstrap 2025-04-10T11:32:32.496928Z node 4 :TX_PROXY DEBUG: actor# [4:270:2261] Become StateWork (SchemeCache [4:278:2269]) 2025-04-10T11:32:32.497222Z node 4 :HTTP WARN: 127.0.0.1:0 POST /login 2025-04-10T11:32:32.497549Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:9763, port: 9763 2025-04-10T11:32:32.497650Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-10T11:32:32.502736Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:9763. Invalid credentials 2025-04-10T11:32:32.503372Z node 4 :HTTP ERROR: Login fail for user1@ldap: Could not login via LDAP 2025-04-10T11:32:32.504373Z node 4 :TX_PROXY DEBUG: actor# [4:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:32:32.514430Z node 4 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2025-04-10T11:32:32.448011Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-10T11:32:32.503197Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:9763. Invalid credentials, login_user=user1@ldap, sanitized_token={none} AUDIT LOG checked line: 2025-04-10T11:32:32.503197Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:9763. Invalid credentials, login_user=user1@ldap, sanitized_token={none} |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantYdb >> TMiniKQLEngineFlatTest::TestInternalResult [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] Test command err: PrepareShardPrograms (491): too many shard readsets (2 > 1), src tables: [200:301:0], dst tables: [200:301:0] Type { Kind: Struct } |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TCutHistoryRestrictions::EmptyDenyList [GOOD] >> TCutHistoryRestrictions::SameTabletInBothLists [GOOD] >> THeavyPerfTest::TTestLoadEverything >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion >> THiveTest::TestUpdateChannelValues >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] Test command err: SetProgram (370): ydb/core/engine/mkql_engine_flat.cpp:183: ExtractResultType(): requirement !label.StartsWith(TxInternalResultPrefix) failed. Label can't be used in SetResult as it's reserved for internal purposes: __cantuse PrepareShardPrograms (491): too many shard readsets (1 > 0), src tables: [200:301:0], dst tables: [200:302:0] Type { Kind: Struct } >> THiveTest::TestFollowers >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes >> TVectorIndexTests::CreateTablePrefixInvalidKeyType >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 >> BackupRestore::RestoreExternalDataSourceWithoutSecret [GOOD] >> BackupRestore::PrefixedVectorIndex >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks >> THiveTest::TestUpdateChannelValues [GOOD] >> THiveTest::TestStorageBalancer >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda >> THiveTest::TestCreateTablet >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown >> THiveTest::TestFollowers [GOOD] >> THiveTest::TestFollowersReconfiguration >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable >> TVectorIndexTests::CreateTablePrefixInvalidKeyType [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite >> DataShardWrite::UpsertImmediate >> DataShardWrite::UpsertPrepared+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefixInvalidKeyType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:32:35.856571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:32:35.856676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:32:35.856725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:32:35.856761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:32:35.856808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:32:35.856833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:32:35.856884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:32:35.856971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:32:35.857395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:32:35.940452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:32:35.940501Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:35.955324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:32:35.955688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:32:35.955896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:32:35.964465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:32:35.964760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:32:35.965561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:35.966091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:32:35.968557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:35.970095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:35.970219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:35.970433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:32:35.970482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:35.970525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:32:35.970708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:32:35.978361Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:32:36.119195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:32:36.119499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:36.119793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:32:36.120114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:32:36.120181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:36.122936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:36.123121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:32:36.123421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:36.123491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:32:36.123557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:32:36.123640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:32:36.126217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:36.126292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:32:36.126340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:32:36.128578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:36.128654Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:36.128715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:36.128787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:32:36.132573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:32:36.135299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:32:36.135530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:32:36.136633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:36.136764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:32:36.136812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:36.137150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:32:36.137207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:36.137417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:32:36.137500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:32:36.139815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:36.139862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:36.140055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:36.140106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:32:36.140374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:36.140417Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:32:36.140535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:32:36.140571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:36.140609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:32:36.140642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:36.140695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:32:36.140737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:36.140771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:32:36.140808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:32:36.140887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:32:36.140922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:32:36.140951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:32:36.143700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:32:36.143829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:32:36.143877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:32:36.143917Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:32:36.143960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:32:36.144126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:32:36.147632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:32:36.148209Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:32:36.153025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } Columns { Name: "covered" Type: "String" } Columns { Name: "prefix" Type: "Float" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "prefix" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "covered" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:32:36.153553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-04-10T11:32:36.153843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Column 'prefix' has wrong key type Float for being key, at schemeshard: 72057594046678944 2025-04-10T11:32:36.153892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Column 'prefix' has wrong key type Float for being key, at schemeshard: 72057594046678944 2025-04-10T11:32:36.155399Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-10T11:32:36.177637Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-10T11:32:36.178732Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:32:36.184165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Column \'prefix\' has wrong key type Float for being key" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:32:36.184385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'prefix' has wrong key type Float for being key, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2025-04-10T11:32:36.187311Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-10T11:32:36.187612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-10T11:32:36.187661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-10T11:32:36.188298Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-10T11:32:36.188429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:32:36.188489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:284:2275] TestWaitNotification: OK eventTxId 101 2025-04-10T11:32:36.189194Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:32:36.189447Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector" took 290us result status StatusPathDoesNotExist 2025-04-10T11:32:36.189686Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/vectors/idx_vector\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/vectors/idx_vector" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards >> THiveTest::TestCreateTablet [GOOD] >> THiveTest::TestCreate100Tablets >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> THiveTest::TestFollowersReconfiguration [GOOD] >> THiveTest::TestFollowerPromotion >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot >> THiveTest::TestStorageBalancer [GOOD] >> THiveTest::TestRestartsWithFollower |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TTopicApiDescribes::DescribeConsumer [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> THiveTest::TestNoMigrationToSelf >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false |95.5%| [TA] $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.5%| [TA] {RESULT} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowerPromotionFollowerDies >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer [GOOD] Test command err: 2025-04-10T11:32:24.750706Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645932245737097:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:24.750759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:32:24.822073Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645930842188470:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:24.823101Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:32:25.040246Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:32:25.060860Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001287/r3tmp/tmpGIGJfA/pdisk_1.dat 2025-04-10T11:32:25.420260Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:25.430074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:25.430208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:25.431314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:25.431375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:25.442425Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:32:25.442572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:32:25.443193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9293, node 1 2025-04-10T11:32:25.568373Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001287/r3tmp/yandexLM7x2u.tmp 2025-04-10T11:32:25.568398Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001287/r3tmp/yandexLM7x2u.tmp 2025-04-10T11:32:25.568572Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001287/r3tmp/yandexLM7x2u.tmp 2025-04-10T11:32:25.568751Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:32:25.621589Z INFO: TTestServer started on Port 24709 GrpcPort 9293 TClient is connected to server localhost:24709 PQClient connected to localhost:9293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:32:25.923010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:32:25.984214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:32:28.608082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645949425607375:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:28.613841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T11:32:28.615307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645949425607363:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:28.615433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:28.650657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645949425607401:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T11:32:28.723735Z node 1 :TX_PROXY ERROR: Actor# [1:7491645949425607482:2756] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:32:29.035794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:32:29.039694Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491645949425607499:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:32:29.040050Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTkwNGU4MDktODA1NjkzMzYtZDBmZWIxMmItNjY0MmI0MGQ=, ActorId: [1:7491645949425607360:2336], ActorState: ExecuteState, TraceId: 01jrfqj2s56tvfpa1hpzsdwzwj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:32:29.039746Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645948022058063:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:32:29.040203Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTgzZGQ2MTktZjI3MjZhZDctNTJkNmYwMDMtNTc4Yzk3MDk=, ActorId: [2:7491645948022058037:2312], ActorState: ExecuteState, TraceId: 01jrfqj2ws05j61eav8mvdd5wd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:32:29.042675Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:32:29.042702Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:32:29.122408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:32:29.259348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:32:29.492761Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfqj3h8fsyket3gsk1yjs1y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDc4ODJkZTQtOGNmZDc1N2QtY2JlZjcwZjYtNTg4NTg0YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491645953720575247:3095] 2025-04-10T11:32:29.751417Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645932245737097:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:29.751529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:32:29.821343Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645930842188470:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:29.821447Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-04-10T11:32:35.913234Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-10T11:32:36.073741Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:32:36.073796Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898] doesn't have tx writes ... _time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 11 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1744284756 nanos: 648000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1744284756 nanos: 663000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 12 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1744284756 nanos: 683000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1744284756 nanos: 695000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 13 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1744284756 nanos: 668000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1744284756 nanos: 687000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 14 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1744284756 nanos: 646000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1744284756 nanos: 663000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } } } } 2025-04-10T11:32:37.281918Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-04-10T11:32:37.282043Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" include_location: true 2025-04-10T11:32:37.282746Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7491645988080316085:2638]: Request location 2025-04-10T11:32:37.284025Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491645988080316087:2639] connected; active server actors: 1 2025-04-10T11:32:37.284523Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7491645988080316085:2638]: Got location Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1744284756200 tx_id: 281474976710681 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } } } } 2025-04-10T11:32:37.287850Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-04-10T11:32:37.287922Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" Got response: 2025-04-10T11:32:37.284224Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 2 2025-04-10T11:32:37.284240Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-04-10T11:32:37.284249Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 2 2025-04-10T11:32:37.284259Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-04-10T11:32:37.284267Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 2 2025-04-10T11:32:37.284276Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 2 2025-04-10T11:32:37.284289Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 2 2025-04-10T11:32:37.284297Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 2 2025-04-10T11:32:37.284304Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 2 2025-04-10T11:32:37.284311Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 2 2025-04-10T11:32:37.284318Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 2 2025-04-10T11:32:37.284325Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 2 operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1744284756200 tx_id: 281474976710681 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } } } } 2025-04-10T11:32:37.284332Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 2 2025-04-10T11:32:37.284337Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 2 2025-04-10T11:32:37.284344Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 2 2025-04-10T11:32:37.286333Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491645988080316087:2639] disconnected; active server actors: 1 2025-04-10T11:32:37.286351Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491645988080316087:2639] disconnected no session 2025-04-10T11:32:37.292019Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-04-10T11:32:37.292083Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//bad-topic" consumer: "my-consumer" include_stats: true include_location: true Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } >> THiveTest::TestNoMigrationToSelf [GOOD] >> THiveTest::TestReCreateTablet >> THiveTest::TestRestartsWithFollower [GOOD] >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus >> THiveTest::TestStartTabletTwiceInARow >> THealthCheckTest::Issues100GroupsListing >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] >> THiveTest::TestCreate100Tablets [GOOD] >> THiveTest::TestCreateSubHiveCreateTablet >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false >> THiveTest::TestStartTabletTwiceInARow [GOOD] >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject >> THiveTest::TestReCreateTablet [GOOD] >> THiveTest::TestReCreateTabletError >> THiveTest::TestFollowerPromotionFollowerDies [GOOD] >> THiveTest::TestHiveBalancer >> KqpService::ToDictCache-UseCache [GOOD] >> TTopicApiDescribes::GetPartitionDescribe [GOOD] >> DataShardWrite::UpsertImmediate [GOOD] >> DataShardWrite::UpsertImmediateManyColumns >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true >> THealthCheckTest::StorageLimit95 >> THiveTest::TestCreateSubHiveCreateTablet [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets >> THiveTest::TestReCreateTabletError [GOOD] >> THiveTest::TestNodeDisconnect >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] Test command err: 2025-04-10T11:32:27.985294Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645946878348990:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:27.987637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:32:28.267548Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001270/r3tmp/tmpa7K8Yo/pdisk_1.dat 2025-04-10T11:32:28.272370Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:32:28.315934Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:28.579470Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:28.585637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:28.585762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:28.589102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:28.589184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:28.597089Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:32:28.597260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:32:28.599294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30811, node 1 2025-04-10T11:32:28.714945Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/001270/r3tmp/yandexDD7OsN.tmp 2025-04-10T11:32:28.714982Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/001270/r3tmp/yandexDD7OsN.tmp 2025-04-10T11:32:28.715162Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/001270/r3tmp/yandexDD7OsN.tmp 2025-04-10T11:32:28.715400Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:32:28.779331Z INFO: TTestServer started on Port 8979 GrpcPort 30811 TClient is connected to server localhost:8979 PQClient connected to localhost:30811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:32:29.164197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:32:29.247365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:32:31.645697Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645961668274816:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:31.645837Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645961668274841:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:31.646021Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:31.653342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:32:31.682721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645961668274845:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:32:31.773552Z node 2 :TX_PROXY ERROR: Actor# [2:7491645961668274872:2180] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:32:32.161857Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645961668274879:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:32:32.162482Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2U5MGRhMDAtMjg5MTg0YzctMTM3MzJmNjAtMTIzYmFjZA==, ActorId: [2:7491645961668274814:2312], ActorState: ExecuteState, TraceId: 01jrfqj5rrd390ayyeft4zqmpw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:32:32.164662Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491645964058219349:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:32:32.164873Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:32:32.166846Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGU3NGU3ZmItNDkxMmU2NDAtMzZkZWE5MDktMTU3Yzc1NWQ=, ActorId: [1:7491645964058219308:2337], ActorState: ExecuteState, TraceId: 01jrfqj5wefackr742yqc918d3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:32:32.167286Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:32:32.167292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:32:32.239568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:32:32.432370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:32:32.784386Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfqj6q04t35gvee7zr5kn34, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM2NmNmNTItODNjNzE2YTMtMzNkMTQ2NGUtOWVhYTBhNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491645968353187111:3096] 2025-04-10T11:32:32.980247Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645946878348990:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:32.980320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-04-10T11:32:39.134303Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-10T11:32:39.271009Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jrfqjd3722cbjj2mrvfxcvbn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJjODYwMzUtMTg1NzMwMGQtM2RhNzFlMTQtODY4ODdkYWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:32:39.283196Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:32:39.283274Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895] doesn't have tx writes info 2025-04-10T11:32:39.284444Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][] pipe [1:7491645998417958921:3539] connected; active server actors: 1 2025-04-10T11:32:39.287382Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# b ... FO: [PQ: 72075186224037893] has a tx writes info 2025-04-10T11:32:39.590457Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [2:7491645996028014474:2480] 2025-04-10T11:32:39.590457Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] bootstrapping 9 [2:7491645996028014473:2479] 2025-04-10T11:32:39.591393Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:32:39.591432Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898] has a tx writes info 2025-04-10T11:32:39.591520Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] bootstrapping 1 [1:7491645998417959988:2570] 2025-04-10T11:32:39.591956Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] bootstrapping 6 [1:7491645998417959991:2572] 2025-04-10T11:32:39.595569Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:32:39.595590Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897] has a tx writes info 2025-04-10T11:32:39.595588Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-10T11:32:39.595602Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894] has a tx writes info 2025-04-10T11:32:39.594684Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] bootstrapping 14 [1:7491645998417959990:2571] 2025-04-10T11:32:39.595371Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] bootstrapping 11 [1:7491645998417959994:2573] 2025-04-10T11:32:39.596093Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7491645996028014485:2485] 2025-04-10T11:32:39.600195Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:6:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:32:39.600224Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 2 [1:7491645998417959991:2572] 2025-04-10T11:32:39.600783Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:32:39.600812Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 2 [1:7491645998417959988:2570] 2025-04-10T11:32:39.598380Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7491645996028014483:2483] 2025-04-10T11:32:39.598755Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7491645996028014486:2486] 2025-04-10T11:32:39.600772Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:32:39.600803Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 2 [2:7491645996028014432:2473] 2025-04-10T11:32:39.601064Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:32:39.601093Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 2 [2:7491645996028014435:2474] 2025-04-10T11:32:39.602485Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:14:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:32:39.602636Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 2 [1:7491645998417959990:2571] 2025-04-10T11:32:39.602975Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037893, NodeId 1, Generation 2 2025-04-10T11:32:39.603012Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037892, NodeId 2, Generation 2 2025-04-10T11:32:39.603528Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7491645996028014484:2484] 2025-04-10T11:32:39.603630Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:11:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:32:39.603659Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 2 [1:7491645998417959994:2573] 2025-04-10T11:32:39.605920Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037898, NodeId 1, Generation 2 2025-04-10T11:32:39.607342Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:9:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:32:39.607385Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 2 [2:7491645996028014473:2479] 2025-04-10T11:32:39.607902Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:32:39.607941Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 2 [2:7491645996028014474:2480] 2025-04-10T11:32:39.608386Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:13:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:32:39.608429Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 2 [2:7491645996028014485:2485] 2025-04-10T11:32:39.608452Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037895, NodeId 2, Generation 2 2025-04-10T11:32:39.609969Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:8:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:32:39.609996Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 2 [2:7491645996028014483:2483] 2025-04-10T11:32:39.610448Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:12:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:32:39.610486Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 2 [2:7491645996028014486:2486] 2025-04-10T11:32:39.611306Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037894, NodeId 2, Generation 2 2025-04-10T11:32:39.611542Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:7:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-10T11:32:39.611573Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 2 [2:7491645996028014484:2484] 2025-04-10T11:32:39.612364Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037897, NodeId 2, Generation 2 2025-04-10T11:32:40.419120Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-10T11:32:40.419238Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 1 include_location: true 2025-04-10T11:32:40.419283Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7491646002712927482:2595]: Bootstrap 2025-04-10T11:32:40.426312Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7491646002712927482:2595]: Request location 2025-04-10T11:32:40.430214Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491646002712927484:2596] connected; active server actors: 1 2025-04-10T11:32:40.434903Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7491646002712927482:2595]: Got location 2025-04-10T11:32:40.434653Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-04-10T11:32:40.435747Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491646002712927484:2596] disconnected; active server actors: 1 2025-04-10T11:32:40.435769Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491646002712927484:2596] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } } } } 2025-04-10T11:32:40.442902Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-10T11:32:40.443024Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 3 include_stats: true include_location: true 2025-04-10T11:32:40.443062Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7491646002712927485:2597]: Bootstrap 2025-04-10T11:32:40.443809Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7491646002712927485:2597]: Request location 2025-04-10T11:32:40.447688Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7491646002712927485:2597]: Got location 2025-04-10T11:32:40.446713Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491646002712927488:2599] connected; active server actors: 1 2025-04-10T11:32:40.447262Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-04-10T11:32:40.450447Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491646002712927488:2599] disconnected; active server actors: 1 2025-04-10T11:32:40.450474Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7491646002712927488:2599] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 3 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1744284759 nanos: 582000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_location { node_id: 2 generation: 2 } } } } } 2025-04-10T11:32:40.462579Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-10T11:32:40.462693Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2025-04-10T11:32:40.462731Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7491646002712927490:2600]: Bootstrap Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::ToDictCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 7401, MsgBus: 27060 2025-04-10T11:31:25.766596Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645677322662724:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:25.766651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001556/r3tmp/tmpHOT1EM/pdisk_1.dat 2025-04-10T11:31:26.403264Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:26.404485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:26.404538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:26.411430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7401, node 1 2025-04-10T11:31:26.682691Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:26.682719Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:26.682729Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:26.682840Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27060 TClient is connected to server localhost:27060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:27.604296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:27.651023Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:27.666942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:27.873375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:28.067277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:28.149737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:30.041234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645698797500766:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:30.041346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:30.367997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.408974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.449020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.489581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.532832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.612672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:30.693785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645698797501285:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:30.693919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:30.694409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645698797501290:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:30.700878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:30.719770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645698797501292:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:30.770824Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645677322662724:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:30.770902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:30.802660Z node 1 :TX_PROXY ERROR: Actor# [1:7491645698797501350:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:32.050139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:41.394659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:31:41.394691Z node 1 :IMPORT WARN: Table profiles were not loaded took: 21.949469s took: 21.977251s took: 22.012178s took: 21.992567s took: 22.035671s took: 21.997842s took: 22.015575s took: 22.037121s took: 22.050235s took: 22.012154s Trying to start YDB, gRPC: 22731, MsgBus: 1213 2025-04-10T11:31:55.288861Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645807001830081:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:55.288936Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001556/r3tmp/tmpN0odqd/pdisk_1.dat 2025-04-10T11:31:55.439201Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:55.439297Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:55.439819Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:55.456031Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22731, node 2 2025-04-10T11:31:55.496463Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:55.496484Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:55.496491Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:55.496633Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1213 TClient is connected to server localhost:1213 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:56.048344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:59.016172Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645824181699905:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access p ... _exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645807001830081:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:00.290593Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 1.055838s took: 1.058621s took: 1.059457s took: 1.067681s took: 1.067856s took: 1.218498s took: 1.221481s took: 1.221916s took: 0.929079s took: 0.928501s took: 0.930608s took: 0.930930s took: 1.021215s took: 1.021826s took: 1.024662s took: 1.024719s took: 1.107153s took: 1.107508s took: 1.125580s took: 1.125458s took: 1.033467s took: 1.035887s took: 1.036011s took: 1.036337s took: 1.065760s took: 1.067065s took: 1.067152s took: 1.069262s took: 1.111278s took: 1.111920s took: 1.113887s took: 1.117520s took: 1.068034s took: 1.070454s took: 1.070693s took: 1.072038s 2025-04-10T11:32:10.407763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:32:10.407806Z node 2 :IMPORT WARN: Table profiles were not loaded took: 1.051504s took: 1.052493s took: 1.052496s took: 1.052730s took: 1.120499s took: 1.122216s took: 1.122058s took: 1.124208s took: 1.090566s took: 1.094645s took: 1.094723s took: 1.097254s took: 1.046751s took: 1.056728s took: 1.064456s took: 1.067908s took: 1.180588s took: 1.179284s took: 1.181927s took: 1.182931s took: 1.245599s took: 1.247683s took: 1.249094s took: 1.251471s Trying to start YDB, gRPC: 23945, MsgBus: 2310 2025-04-10T11:32:17.813272Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491645902776404828:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:17.813337Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001556/r3tmp/tmpeZXULs/pdisk_1.dat 2025-04-10T11:32:17.993555Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:18.019983Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:18.020080Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:18.021672Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23945, node 3 2025-04-10T11:32:18.121941Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:18.121970Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:18.121979Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:18.122135Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2310 TClient is connected to server localhost:2310 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:32:18.763887Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:18.773520Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:32:22.007400Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645924251242001:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:22.007478Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645924251242000:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:22.007529Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645924251241981:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:22.007578Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645924251242005:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:22.007627Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491645924251241999:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:22.007720Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:22.016619Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:32:22.022582Z node 3 :TX_PROXY ERROR: Actor# [3:7491645924251242019:2318] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:32:22.022772Z node 3 :TX_PROXY ERROR: Actor# [3:7491645924251242017:2316] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:32:22.030330Z node 3 :TX_PROXY ERROR: Actor# [3:7491645924251242016:2315] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:32:22.038814Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645924251242014:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T11:32:22.038895Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645924251242015:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T11:32:22.038951Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645924251242013:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T11:32:22.038977Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491645924251242012:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T11:32:22.105697Z node 3 :TX_PROXY ERROR: Actor# [3:7491645924251242090:2363] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:32:22.117132Z node 3 :TX_PROXY ERROR: Actor# [3:7491645924251242109:2372] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:32:22.119804Z node 3 :TX_PROXY ERROR: Actor# [3:7491645924251242115:2377] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:32:22.134497Z node 3 :TX_PROXY ERROR: Actor# [3:7491645924251242125:2384] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:32:22.813445Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491645902776404828:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:22.813542Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 4.418588s took: 4.421198s took: 4.423048s took: 4.481780s took: 1.191459s took: 1.195472s took: 1.197346s took: 1.199167s took: 1.224022s took: 1.227143s took: 1.227775s took: 1.229967s took: 1.033612s took: 1.038831s took: 1.042223s took: 1.045303s took: 1.123519s took: 1.125663s took: 1.126649s took: 1.126715s took: 1.176186s took: 1.177245s took: 1.177964s took: 1.234559s took: 1.070436s took: 1.072133s took: 1.070438s took: 1.075723s took: 1.095956s took: 1.097079s took: 1.097616s took: 1.099116s took: 1.200913s took: 1.202981s took: 1.204733s took: 1.209897s 2025-04-10T11:32:32.978523Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:32:32.978551Z node 3 :IMPORT WARN: Table profiles were not loaded took: 1.269295s took: 1.272318s took: 1.282883s took: 1.283611s took: 1.248685s took: 1.250543s took: 1.254913s took: 1.256988s took: 1.130515s took: 1.134480s took: 1.134548s took: 1.138317s took: 1.238227s took: 1.244442s took: 1.245109s took: 1.247419s took: 1.071171s took: 1.072009s took: 1.074163s took: 1.074813s took: 1.294815s took: 1.297571s took: 1.297916s took: 1.300981s took: 1.184844s took: 1.189535s took: 1.191845s took: 1.192625s >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] >> THiveTest::TestNodeDisconnect [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:32:39.825403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:32:39.825510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:32:39.825559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:32:39.825590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:32:39.825635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:32:39.825664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:32:39.825721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:32:39.825794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:32:39.826131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:32:39.908671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:32:39.908734Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:39.924882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:32:39.925198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:32:39.925381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:32:39.933410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:32:39.933672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:32:39.934425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:39.934842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:32:39.936960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:39.938521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:39.938600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:39.938776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:32:39.938822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:39.938861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:32:39.939010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:32:39.946101Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:32:40.084397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:32:40.084656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:40.084892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:32:40.085145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:32:40.085199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:40.087900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:40.088095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:32:40.088288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:40.088342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:32:40.088381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:32:40.088431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:32:40.090748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:40.090808Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:32:40.090846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:32:40.092915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:40.092962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:40.093012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:40.093094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:32:40.096618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:32:40.098949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:32:40.099124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:32:40.100186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:40.100349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:32:40.100398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:40.100669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:32:40.100720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:40.100935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:32:40.101024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:32:40.103515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:40.103569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:40.103780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:40.103827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:32:40.104045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:40.104089Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:32:40.104174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:32:40.104223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:40.104264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:32:40.104295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:40.104331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:32:40.104375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:40.104412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:32:40.104442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:32:40.104559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:32:40.104628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:32:40.104666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:32:40.107096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:32:40.107209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:32:40.107243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 105:0, at schemeshard: 72057594046678944 2025-04-10T11:32:43.952763Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T11:32:43.952801Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T11:32:43.952852Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T11:32:43.952889Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T11:32:43.952964Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-10T11:32:43.953104Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:32:43.953154Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-10T11:32:43.953203Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T11:32:43.953295Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T11:32:43.953337Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-10T11:32:43.953385Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2025-04-10T11:32:43.953428Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-04-10T11:32:43.953464Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2025-04-10T11:32:43.956476Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusSuccess TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:32:43.956700Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /MyRoot/Dir1/DirSub1, set owner:user2 2025-04-10T11:32:43.956937Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:43.956990Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:32:43.957155Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:32:43.957284Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:43.957336Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-10T11:32:43.957434Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-04-10T11:32:43.958088Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:32:43.958234Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:32:43.958279Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-10T11:32:43.958330Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-04-10T11:32:43.958377Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:32:43.958877Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:32:43.958999Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 105 2025-04-10T11:32:43.959031Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-10T11:32:43.959058Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-04-10T11:32:43.959087Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:32:43.959170Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-10T11:32:43.963674Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:32:43.964049Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2025-04-10T11:32:43.967001Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "user1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:32:43.969326Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-10T11:32:43.969478Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T11:32:43.969565Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T11:32:43.969616Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-10T11:32:43.969655Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T11:32:43.969723Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:32:43.969796Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-04-10T11:32:43.969840Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-10T11:32:43.969879Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-10T11:32:43.969919Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-04-10T11:32:43.969961Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-04-10T11:32:43.972594Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:32:43.972715Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE USER, path: /MyRoot 2025-04-10T11:32:43.972963Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:43.973010Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:43.973232Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:43.973283Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:204:2206], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-04-10T11:32:43.973886Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T11:32:43.974005Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-04-10T11:32:43.974046Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-10T11:32:43.974107Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-10T11:32:43.974167Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:32:43.974276Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-10T11:32:43.976382Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-04-10T11:32:43.976998Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1/DirSub1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:32:43.977241Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir1/DirSub1" took 261us result status StatusSuccess 2025-04-10T11:32:43.977614Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1/DirSub1" PathDescription { Self { Name: "DirSub1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "user2" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:32:43.978192Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-10T11:32:43.978315Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Cannot find user: user1", at schemeshard: 72057594046678944 >> THealthCheckTest::Issues100Groups100VCardListing >> KqpIndexLookupJoin::CheckAllKeyTypesCast [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet [GOOD] >> THiveTest::TestReassignUseRelativeSpace >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners >> THiveTest::TestHiveBalancer [GOOD] >> THiveTest::TestHiveBalancerDifferentResources >> BackupRestore::PrefixedVectorIndex [GOOD] >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::ReplaceImmediate >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckAllKeyTypesCast [GOOD] Test command err: Trying to start YDB, gRPC: 17394, MsgBus: 61941 2025-04-10T11:29:28.470583Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645178316820620:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:28.779662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015ff/r3tmp/tmp5Mzxx7/pdisk_1.dat 2025-04-10T11:29:29.111478Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:29.116380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:29.116463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:29.119617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17394, node 1 2025-04-10T11:29:29.286686Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:29.286707Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:29.286713Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:29.286842Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61941 TClient is connected to server localhost:61941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:30.052322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:30.110385Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:30.135178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:30.378849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:30.627605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:30.733584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:32.416831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645195496691394:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:32.416947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:32.668737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:29:32.709862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:29:32.749656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:29:32.802429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:29:32.886939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:29:32.955410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:29:33.030278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645199791659206:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:33.030402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:33.034774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645199791659211:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:33.041240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:29:33.053849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645199791659213:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:29:33.154389Z node 1 :TX_PROXY ERROR: Actor# [1:7491645199791659270:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:33.415060Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645178316820620:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:33.415137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:29:34.238735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:29:34.307694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-10T11:29:34.344204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T11:29:34.379820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:8:40: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 8036, MsgBus: 16495 2025-04-10T11:29:36.812537Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645211626268724:2086];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:36.813477Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015ff/r3tmp/tmpB0xWbO/pdisk_1.dat 2025-04-10T11:29:37.101954Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:37.110918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:37.111004Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:37.114609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8036, node 2 2025-04-10T11:29:37.274611Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:37.274634Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:37.274640Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:37.274739Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16495 TClient is connected to server localhost:16495 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:37.795444Z node 2 :FLAT_TX_SC ... in>:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-10T11:31:56.921212Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzg4MmEwOTktZDU2MzU0NDUtNDUyNDdiOGYtN2RhNDliNTU=, ActorId: [2:7491645237396075077:2497], ActorState: ExecuteState, TraceId: 01jrfqh3v095kth2emtpt4h5r7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:32:00.790675Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645830101574695:5956], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-10T11:32:00.793031Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzg4MmEwOTktZDU2MzU0NDUtNDUyNDdiOGYtN2RhNDliNTU=, ActorId: [2:7491645237396075077:2497], ActorState: ExecuteState, TraceId: 01jrfqh7ky7khxwr2gcgxkcf0h, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:32:00.831612Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645830101574709:5962], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-10T11:32:00.834345Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzg4MmEwOTktZDU2MzU0NDUtNDUyNDdiOGYtN2RhNDliNTU=, ActorId: [2:7491645237396075077:2497], ActorState: ExecuteState, TraceId: 01jrfqh7n21yaj473my6ehbse1, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:32:07.680556Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645860166346310:6109], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-10T11:32:07.684316Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzg4MmEwOTktZDU2MzU0NDUtNDUyNDdiOGYtN2RhNDliNTU=, ActorId: [2:7491645237396075077:2497], ActorState: ExecuteState, TraceId: 01jrfqheaq6tzwt7wahr1t3bqw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:32:11.150518Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645877346215799:6186], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-10T11:32:11.152801Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzg4MmEwOTktZDU2MzU0NDUtNDUyNDdiOGYtN2RhNDliNTU=, ActorId: [2:7491645237396075077:2497], ActorState: ExecuteState, TraceId: 01jrfqhhqm56ppp9sr758ghy2t, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:32:11.189851Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645877346215816:6194], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-10T11:32:11.190435Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzg4MmEwOTktZDU2MzU0NDUtNDUyNDdiOGYtN2RhNDliNTU=, ActorId: [2:7491645237396075077:2497], ActorState: ExecuteState, TraceId: 01jrfqhhrsea8t95efqqbcp6c0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:32:11.244411Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645877346215829:6200], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-10T11:32:11.246856Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzg4MmEwOTktZDU2MzU0NDUtNDUyNDdiOGYtN2RhNDliNTU=, ActorId: [2:7491645237396075077:2497], ActorState: ExecuteState, TraceId: 01jrfqhhtb999b6gwxc7ew7at7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:32:19.302647Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645911705954758:6351], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-10T11:32:19.304372Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzg4MmEwOTktZDU2MzU0NDUtNDUyNDdiOGYtN2RhNDliNTU=, ActorId: [2:7491645237396075077:2497], ActorState: ExecuteState, TraceId: 01jrfqhsp6aw8r3f16rxggzt5x, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:32:19.349836Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645911705954771:6357], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-10T11:32:19.350342Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzg4MmEwOTktZDU2MzU0NDUtNDUyNDdiOGYtN2RhNDliNTU=, ActorId: [2:7491645237396075077:2497], ActorState: ExecuteState, TraceId: 01jrfqhsqr19vn1ywxe8eewh3s, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:32:23.514857Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645928885824262:6435], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-10T11:32:23.515131Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzg4MmEwOTktZDU2MzU0NDUtNDUyNDdiOGYtN2RhNDliNTU=, ActorId: [2:7491645237396075077:2497], ActorState: ExecuteState, TraceId: 01jrfqhxt31ftxxaj4fmxkyvtj, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:32:23.548341Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645928885824275:6441], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-10T11:32:23.550596Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzg4MmEwOTktZDU2MzU0NDUtNDUyNDdiOGYtN2RhNDliNTU=, ActorId: [2:7491645237396075077:2497], ActorState: ExecuteState, TraceId: 01jrfqhxv2dnkr6d8q1f10ymkz, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:32:31.603557Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645963245563213:6591], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-10T11:32:31.604126Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzg4MmEwOTktZDU2MzU0NDUtNDUyNDdiOGYtN2RhNDliNTU=, ActorId: [2:7491645237396075077:2497], ActorState: ExecuteState, TraceId: 01jrfqj5ppdebceb6n3sdjszsy, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:32:35.865834Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645980425432674:6670], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-10T11:32:35.866354Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzg4MmEwOTktZDU2MzU0NDUtNDUyNDdiOGYtN2RhNDliNTU=, ActorId: [2:7491645237396075077:2497], ActorState: ExecuteState, TraceId: 01jrfqj9vr807t0m2z6mgddcfk, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:32:35.914125Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645980425432687:6676], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-10T11:32:35.916875Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzg4MmEwOTktZDU2MzU0NDUtNDUyNDdiOGYtN2RhNDliNTU=, ActorId: [2:7491645237396075077:2497], ActorState: ExecuteState, TraceId: 01jrfqj9xebsxpewb3v2wwxtc2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:32:35.960403Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645980425432701:6682], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-04-10T11:32:35.961733Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzg4MmEwOTktZDU2MzU0NDUtNDUyNDdiOGYtN2RhNDliNTU=, ActorId: [2:7491645237396075077:2497], ActorState: ExecuteState, TraceId: 01jrfqj9yrf3g3z7shw0mreag4, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> THiveTest::TestReassignUseRelativeSpace [GOOD] >> THiveTest::TestManyFollowersOnOneNode >> THiveTest::TestSpreadNeighboursDifferentOwners [GOOD] >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile >> TIcNodeCache::GetNodesInfoTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::PrefixedVectorIndex [GOOD] Test command err: 2025-04-10T11:31:10.549867Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645614799021398:2224];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:10.550208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmp0t8PYc/pdisk_1.dat 2025-04-10T11:31:11.516624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:11.516706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:11.532680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:31:11.552375Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:31:11.591241Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2274, node 1 2025-04-10T11:31:11.730407Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:31:11.730438Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:31:11.953150Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:11.953174Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:11.953181Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:11.953288Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17849 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:12.736847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:15.275034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645636273858809:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:15.275189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:15.542262Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645614799021398:2224];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:15.542338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:15.895019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpa24KsP/"Create temporary directory "/Root/~backup_20250410T113116" in databaseProcess "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpa24KsP/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250410T113116/table" }Backup table "/Root/~backup_20250410T113116/table" to "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpa24KsP/table"Describe table "/Root/~backup_20250410T113116/table"Write scheme into "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpa24KsP/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpa24KsP/table/permissions.pb"Read table "/Root/~backup_20250410T113116/table"Write data into "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpa24KsP/table/data_00.csv"Drop table "/Root/~backup_20250410T113116/table"Remove temporary directory "/Root/~backup_20250410T113116" in database2025-04-10T11:31:16.858715Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-10T11:31:16.876851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710664:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-10T11:31:16.908387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645640568826702:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.908458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.978196Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Restore "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpa24KsP/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpa24KsP/" to "/Root"Process "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpa24KsP/table"Read scheme from "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpa24KsP/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpa24KsP/table" to "/Root/table"2025-04-10T11:31:17.030464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpa24KsP/table/data_00.csv"Restore ACL "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpa24KsP/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpa24KsP/table/permissions.pb"2025-04-10T11:31:17.140505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-10T11:31:19.179168Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491645652406658674:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:19.186693Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmprBz2ed/pdisk_1.dat 2025-04-10T11:31:19.454871Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:19.529312Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:19.529399Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:19.546223Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63210, node 4 2025-04-10T11:31:19.706850Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:19.706874Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:19.706880Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:19.707000Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63411 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:20.010312Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:23.307130Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491645669586528931:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:23.307239Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:23.336569Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:23.522515Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491645669586529180:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:23.522622Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: R ... t4/001d38/r3tmp/tmpFrP3ZV/externalDataSource/create_external_data_source.sql"Check existence of the secret "secret"2025-04-10T11:32:29.643063Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710723:0, at schemeshard: 72057594046644480 2025-04-10T11:32:30.455994Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710728:0, at schemeshard: 72057594046644480 2025-04-10T11:32:31.281502Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710735:0, at schemeshard: 72057594046644480 2025-04-10T11:32:31.989399Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710740:0, at schemeshard: 72057594046644480 2025-04-10T11:32:33.638021Z node 19 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=19&id=NGEyNDg0NjYtNTFiNmYzYjMtNjM5MmI4MGEtOTY2NTRjNTY=, ActorId: [19:7491645953949405564:2840], ActorState: ExecuteState, TraceId: 01jrfqj3snct9n8ynsz6tzjp37, Create QueryResponse for error on request, msg: 2025-04-10T11:32:33.640516Z node 19 :KQP_EXECUTER ERROR: TxId: 281474976710751. Ctx: { TraceId: 01jrfqj3snct9n8ynsz6tzjp37, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=19&id=NGEyNDg0NjYtNTFiNmYzYjMtNjM5MmI4MGEtOTY2NTRjNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore failed: [ {
: Info: path: /home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpFrP3ZV/externalDataSource } {
: Error: Secret "secret" does not exist or you do not have access permissions } ]Cleanup 2025-04-10T11:32:36.328817Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7491645984677160724:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:36.334064Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpLt64qC/pdisk_1.dat 2025-04-10T11:32:36.721787Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:36.784180Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:36.784350Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:36.791953Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23001, node 22 2025-04-10T11:32:36.926336Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:36.926372Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:36.926391Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:36.926622Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12726 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:32:37.442495Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:41.330296Z node 22 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[22:7491645984677160724:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:41.330412Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:32:42.407551Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7491646010446965649:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:42.407808Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:42.455607Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpDG8Org/"Create temporary directory "/Root/~backup_20250410T113242" in databaseProcess "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpDG8Org/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250410T113242/table" }Backup table "/Root/~backup_20250410T113242/table" to "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpDG8Org/table"Describe table "/Root/~backup_20250410T113242/table"Write scheme into "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpDG8Org/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpDG8Org/table/permissions.pb"Read table "/Root/~backup_20250410T113242/table"Write data into "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpDG8Org/table/data_00.csv"Drop table "/Root/~backup_20250410T113242/table"2025-04-10T11:32:43.388791Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037895 not found 2025-04-10T11:32:43.393540Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037893 not found 2025-04-10T11:32:43.393597Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037892 not found Remove temporary directory "/Root/~backup_20250410T113242" in database2025-04-10T11:32:43.415942Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037894 not found 2025-04-10T11:32:43.453443Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715664:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-10T11:32:43.506653Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7491646014741934239:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:43.506794Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Restore "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpDG8Org/" to "/Root"2025-04-10T11:32:43.651672Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037889 not found 2025-04-10T11:32:43.666384Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037890 not found 2025-04-10T11:32:43.666443Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037891 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpDG8Org/" to "/Root"Process "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpDG8Org/table"Read scheme from "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpDG8Org/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpDG8Org/table" to "/Root/table"2025-04-10T11:32:43.699277Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037888 not found 2025-04-10T11:32:43.727421Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpDG8Org/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-04-10T11:32:43.885053Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-10T11:32:44.063993Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-10T11:32:44.157852Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-04-10T11:32:44.364586Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037900 not found 2025-04-10T11:32:44.364950Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710762:0, at schemeshard: 72057594046644480 2025-04-10T11:32:44.485714Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 2025-04-10T11:32:44.604374Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037901 not found 2025-04-10T11:32:44.609789Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037902 not found Restore ACL "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpDG8Org/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2gt4/001d38/r3tmp/tmpDG8Org/table/permissions.pb"2025-04-10T11:32:44.752076Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::DeleteImmediate >> THiveTest::TestManyFollowersOnOneNode [GOOD] >> THiveTest::TestLockTabletExecutionTimeout |95.6%| [TA] $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {RESULT} $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics [GOOD] >> THiveTest::TestRestartTablets >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus [GOOD] >> THealthCheckTest::YellowGroupIssueOnYellowSpace ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] Test command err: 2025-04-10T11:32:31.877659Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645962916548520:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:31.877742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:32:31.992336Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491645963668058882:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:32.174054Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:32:32.174533Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T11:32:32.183888Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00126f/r3tmp/tmpkZCpKq/pdisk_1.dat 2025-04-10T11:32:32.570745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:32.570914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:32.574576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:32.574620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:32.578344Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:32:32.578517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:32:32.581407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:32:32.630951Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23780, node 1 2025-04-10T11:32:32.686616Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:32:32.697948Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:32:32.799096Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/00126f/r3tmp/yandexABUjNe.tmp 2025-04-10T11:32:32.799143Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/00126f/r3tmp/yandexABUjNe.tmp 2025-04-10T11:32:32.807966Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/00126f/r3tmp/yandexABUjNe.tmp 2025-04-10T11:32:32.809899Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:32:32.930367Z INFO: TTestServer started on Port 18027 GrpcPort 23780 TClient is connected to server localhost:18027 PQClient connected to localhost:23780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:32:33.363619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:32:33.510541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:32:36.059965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645985142895713:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:36.059963Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491645985142895733:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:36.060076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:36.067298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T11:32:36.084927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645984391386108:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:36.085062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:36.087182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645984391386124:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:36.132558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645984391386157:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:36.132668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:32:36.148725Z node 1 :TX_PROXY ERROR: Actor# [1:7491645984391386127:2721] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:32:36.170489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645984391386126:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:32:36.170826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491645985142895740:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T11:32:36.231228Z node 1 :TX_PROXY ERROR: Actor# [1:7491645984391386209:2780] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:32:36.265453Z node 2 :TX_PROXY ERROR: Actor# [2:7491645985142895769:2180] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:32:36.546082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:32:36.568398Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491645985142895776:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:32:36.568717Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzgxOTNhYTktZjRkOTgxMmYtN2I0NjUyZDktMWRiYjM5Y2Y=, ActorId: [2:7491645985142895709:2312], ActorState: ExecuteState, TraceId: 01jrfqja2qbvfrrbnxykpdbm7d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:32:36.571234Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:32:36.572744Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491645984391386228:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:32:36.574549Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzcyYmNlYmYtOWEzNWU4ZTctMjU3MjhhZjYtYzA0MDRiMzM=, ActorId: [1:7491645984391386085:2337], ActorState: ExecuteState, TraceId: 01jrfqja3ja2639gmhe5xpcq3a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:32:36.575004Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:32:36.678626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:32:36.870265Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645962916548520:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:36.870335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:32:36.883540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:32:36.943840Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491645963668058882:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:36.943911Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:32:37.168114Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfqjb0j9ng1jrgc6yfwgctg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQxM2Q1MzktN2U5ZGU5ODctOTAxZGM3MmItZWUzNWFkNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491645988686353996:3133] === CheckClustersList. Ok >> THiveTest::TestHiveBalancerDifferentResources [GOOD] >> THiveTest::TestFollowersCrossDC_Easy |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> THealthCheckTest::Issues100GroupsListing [GOOD] >> THealthCheckTest::Issues100VCardListing |95.6%| [TA] $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TLocksTest::BrokenLockErase >> THiveTest::TestRestartTablets [GOOD] >> THiveTest::TestServerlessComputeResourcesMode >> THealthCheckTest::StorageLimit95 [GOOD] >> THealthCheckTest::StorageLimit87 >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue >> THealthCheckTest::Basic >> THiveTest::TestLockTabletExecutionTimeout [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter >> TFlatTest::PathSorting >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile >> THealthCheckTest::Issues100Groups100VCardListing [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending >> THiveTest::TestServerlessComputeResourcesMode [GOOD] >> THiveTest::TestResetServerlessComputeResourcesMode >> THealthCheckTest::Basic [GOOD] >> THealthCheckTest::BasicNodeCheckRequest >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus [GOOD] >> THealthCheckTest::ServerlessBadTablets >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] >> THiveTest::TestSkipBadNode >> TFlatTest::PathSorting [GOOD] >> TFlatTest::PartBloomFilter >> THiveTest::TestFollowers_LocalNodeOnly [GOOD] >> THiveTest::TestFollowersCrossDC_Tight >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::UpdateImmediate >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus [GOOD] >> THealthCheckTest::RedGroupIssueOnRedSpace >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart >> THiveTest::TestSkipBadNode [GOOD] >> THiveTest::TestStopTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] Test command err: 2025-04-10T11:31:48.352741Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:48.522076Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:48.551977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:48.552466Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:48.564128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:48.564457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:48.564733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:48.564889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:48.565009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:48.565221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:48.565354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:48.565472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:48.565609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:48.565735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.565849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:48.565951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:48.606704Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:48.607207Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:48.607275Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:48.607834Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.608443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:48.608531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:48.608583Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:48.608690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:48.608762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:48.608845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:48.608887Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:48.609107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.609189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:48.609234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:48.609267Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:48.609360Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:48.609424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:48.609476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:48.609508Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:48.609589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:48.609631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:48.609664Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:48.609720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:48.609763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:48.609799Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:48.610227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=80; 2025-04-10T11:31:48.610353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=61; 2025-04-10T11:31:48.610962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=524; 2025-04-10T11:31:48.611089Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=53; 2025-04-10T11:31:48.611293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:48.611355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:48.611394Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:48.611596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:48.611647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.611680Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.611882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:48.611932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:48.611978Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:48.612186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:48.612234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:48.612265Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:48.612396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:48.612445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:48.612490Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=8ba90c1c-15ff11f0-a432dc23-71fddcea,;}; 2025-04-10T11:32:55.695279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:277:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:831;event=skip_compaction;reason=disabled; 2025-04-10T11:32:55.695321Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:277:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-10T11:32:55.695371Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:277:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:32:55.695411Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:277:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:32:55.695446Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:277:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:32:55.695511Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:277:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.406500s; 2025-04-10T11:32:55.695555Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:277:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:32:55.921114Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:277:2290];fline=actor.cpp:22;event=flush_writing;size=4735248;count=1; 2025-04-10T11:32:55.962955Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 45 at tablet 9437184 2025-04-10T11:32:55.963317Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 3:87 Blob count: 1 2025-04-10T11:32:55.983043Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 3:87 Blob count: 1 2025-04-10T11:32:55.983343Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=263;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:769;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=8ba90c1c-15ff11f0-a432dc23-71fddcea,;}; 2025-04-10T11:32:55.984658Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:3:44:255:1:574112:0]; 2025-04-10T11:32:55.984791Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:3:44:255:2:592928:0]; GC for channel 3 deletes blobs: GC for channel 2 deletes blobs: GC for channel 4 deletes blobs: [9437184:3:83:4:0:5870200:0] Added portions: 151 152 2025-04-10T11:32:56.010964Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-10T11:32:56.011311Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[264] (CS::INDEXATION) apply at tablet 9437184 2025-04-10T11:32:56.015694Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 3:86 Blob count: 2 2025-04-10T11:32:56.015861Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=92311612;raw_bytes=143732845;count=39;records=1462497} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7381080;raw_bytes=7369506;count=2;records=75000} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:32:56.042883Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8ba90c1c-15ff11f0-a432dc23-71fddcea;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-10T11:32:56.042977Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8ba90c1c-15ff11f0-a432dc23-71fddcea;fline=with_appended.cpp:65;portions=75,76,;task_id=8ba90c1c-15ff11f0-a432dc23-71fddcea; 2025-04-10T11:32:56.043247Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8ba90c1c-15ff11f0-a432dc23-71fddcea;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::8ba90c1c-15ff11f0-a432dc23-71fddcea; 2025-04-10T11:32:56.043309Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8ba90c1c-15ff11f0-a432dc23-71fddcea;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:32:56.043375Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8ba90c1c-15ff11f0-a432dc23-71fddcea;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:32:56.043424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;task_id=8ba90c1c-15ff11f0-a432dc23-71fddcea;tablet_id=9437184;fline=columnshard_impl.cpp:831;event=skip_compaction;reason=disabled; 2025-04-10T11:32:56.043476Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8ba90c1c-15ff11f0-a432dc23-71fddcea;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-04-10T11:32:56.043572Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8ba90c1c-15ff11f0-a432dc23-71fddcea;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:32:56.043642Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8ba90c1c-15ff11f0-a432dc23-71fddcea;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:32:56.043691Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8ba90c1c-15ff11f0-a432dc23-71fddcea;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:32:56.043781Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8ba90c1c-15ff11f0-a432dc23-71fddcea;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.395000s; 2025-04-10T11:32:56.043842Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8ba90c1c-15ff11f0-a432dc23-71fddcea;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:32:56.044016Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:85:3:0:5870200:0] 2025-04-10T11:32:56.044072Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 3:86 Blob count: 2 2025-04-10T11:32:56.045275Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=45;external_task_id=8ba90c1c-15ff11f0-a432dc23-71fddcea;mem=5963210;cpu=0; 2025-04-10T11:32:56.046311Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:32:56.047287Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 6080043 at tablet 9437184, mediator 0 2025-04-10T11:32:56.047365Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[267] execute at tablet 9437184 2025-04-10T11:32:56.047700Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=abstract.h:83;progress_tx_id=1043;lock_id=1;broken=0; 2025-04-10T11:32:56.047923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=tx_controller.cpp:212;event=finished_tx;tx_id=1043; 2025-04-10T11:32:56.063080Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[267] complete at tablet 9437184 2025-04-10T11:32:56.063202Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=1043;lock_id=1;broken=0; 2025-04-10T11:32:56.063343Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=5870200; 2025-04-10T11:32:56.063482Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::8c26100e-15ff11f0-b6d04669-e1b29d59; 2025-04-10T11:32:56.063536Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-04-10T11:32:56.063632Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:754;event=indexation;bytes=5870200;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=8c26100e-15ff11f0-b6d04669-e1b29d59; 2025-04-10T11:32:56.063705Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:620;event=start_changes;type=CS::INDEXATION;task_id=8c26100e-15ff11f0-b6d04669-e1b29d59; 2025-04-10T11:32:56.063856Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:277:2290];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=5963210;external_task_id=8c26100e-15ff11f0-b6d04669-e1b29d59;type=CS::INDEXATION;priority=0;; 2025-04-10T11:32:56.064151Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:277:2290];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=46;task=cpu=0;mem=5963210;external_task_id=8c26100e-15ff11f0-b6d04669-e1b29d59;type=CS::INDEXATION;priority=0;; 2025-04-10T11:32:56.064199Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:277:2290];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=8c26100e-15ff11f0-b6d04669-e1b29d59;mem=5963210;cpu=0; 2025-04-10T11:32:56.064244Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:277:2290];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=8c26100e-15ff11f0-b6d04669-e1b29d59;task_id=46;mem=5963210;cpu=0; 2025-04-10T11:32:56.064361Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=8c26100e-15ff11f0-b6d04669-e1b29d59;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=8c26100e-15ff11f0-b6d04669-e1b29d59; Added portions: 153 154 2025-04-10T11:32:56.617458Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=8c26100e-15ff11f0-b6d04669-e1b29d59;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-10T11:32:56.617754Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:277:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; Compactions happened: 14 Indexations happened: 31 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> THealthCheckTest::BasicNodeCheckRequest [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks >> THealthCheckTest::YellowGroupIssueOnYellowSpace [GOOD] >> THealthCheckTest::YellowIssueReadyVDisksOnFaultyPDisks >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile >> THealthCheckTest::Issues100VCardListing [GOOD] >> THealthCheckTest::Issues100GroupsMerging >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableTwoIndexes >> THiveTest::TestStopTenant [GOOD] >> TScaleRecommenderTest::BasicTest >> THiveTest::TestFollowersCrossDC_Tight [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader >> THealthCheckTest::StorageLimit87 [GOOD] >> THealthCheckTest::StorageNoQuota >> TFlatTest::PartBloomFilter [GOOD] >> THealthCheckTest::StaticGroupIssue >> THealthCheckTest::ServerlessBadTablets [GOOD] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes >> TScaleRecommenderTest::BasicTest [GOOD] >> TStorageBalanceTest::TestScenario1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::PartBloomFilter [GOOD] Test command err: 2025-04-10T11:32:52.570345Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646052623731773:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:52.570410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00201f/r3tmp/tmpfbMGFp/pdisk_1.dat 2025-04-10T11:32:52.930941Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:52.961729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:52.961860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:52.963591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18813 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:32:53.231988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:53.254716Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:53.262714Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... waiting... TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284773294 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "A" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710663 CreateStep: 1744284773364 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "B" PathId: 4 Sche... (TRUNCATED) 2025-04-10T11:32:55.929168Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646064988510070:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:55.929679Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00201f/r3tmp/tmpRfIXSP/pdisk_1.dat 2025-04-10T11:32:56.085197Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:56.095579Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:56.095657Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:56.097176Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16476 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:32:56.290275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:56.302896Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:32:56.316558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:32:56.792644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow >> THealthCheckTest::GreenStatusWhenInitPending [GOOD] >> THealthCheckTest::IgnoreOtherGenerations >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart >> TFlatTest::CrossRW >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::InsertImmediate >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes >> THealthCheckTest::RedGroupIssueOnRedSpace [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues >> THealthCheckTest::YellowIssueReadyVDisksOnFaultyPDisks [GOOD] >> THealthCheckTest::TestTabletIsDead >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter >> TFlatTest::CrossRW [GOOD] >> TFlatTest::GetTabletCounters >> THealthCheckTest::StorageNoQuota [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead >> THealthCheckTest::StaticGroupIssue [GOOD] >> THealthCheckTest::StorageLimit80 >> THealthCheckTest::Issues100GroupsMerging [GOOD] >> THealthCheckTest::Issues100VCardMerging >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> DataShardWrite::DelayedVolatileTxAndEvWrite >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace >> TLocksTest::BrokenLockErase [GOOD] >> TLocksTest::BrokenDupLock >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] >> TFlatTest::GetTabletCounters [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ShardsLimit800 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: 2025-04-10T11:32:40.555515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:40.555849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:40.556111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00176b/r3tmp/tmpoUV0YU/pdisk_1.dat 2025-04-10T11:32:41.090321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:32:41.140235Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:41.187905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:41.188069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:41.199680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:32:41.298350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:32:41.353507Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:32:41.355513Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:32:41.356032Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:32:41.356338Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:32:41.408851Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:32:41.409716Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:32:41.409864Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:32:41.411667Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:32:41.411772Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:32:41.411838Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:32:41.412229Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:32:41.412371Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:32:41.412467Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:32:41.423315Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:32:41.463550Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:32:41.463840Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:32:41.464018Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:32:41.464063Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:32:41.464105Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:32:41.464145Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:32:41.464410Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:41.464477Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:41.464881Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:32:41.465011Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:32:41.465115Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:32:41.465161Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:32:41.465237Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:32:41.465277Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:32:41.465312Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:32:41.465351Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:32:41.465403Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:32:41.465598Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:41.465643Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:41.465693Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:32:41.466096Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:32:41.466192Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:32:41.466325Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:32:41.466575Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:32:41.466660Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:32:41.466767Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:32:41.466831Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:32:41.466874Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:32:41.466912Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:32:41.466967Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:32:41.467342Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:32:41.467435Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:32:41.467473Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:32:41.467513Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:32:41.467569Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:32:41.467603Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:32:41.467641Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:32:41.467700Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:32:41.467758Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:32:41.469441Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:32:41.469500Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:32:41.480301Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:32:41.480392Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:32:41.480428Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:32:41.480500Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:32:41.480595Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:32:41.634451Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:41.634525Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:41.634572Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:32:41.635036Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:32:41.635086Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:32:41.635229Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:32:41.635281Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:32:41.635338Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:32:41.635385Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:32:41.640495Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:32:41.640595Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:32:41.643196Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:41.643263Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:41.643353Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:32:4 ... chemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-10T11:33:09.397249Z node 6 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[6:1003:2819], 1001} after executionsCount# 1 2025-04-10T11:33:09.397293Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:1003:2819], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:33:09.397385Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:1003:2819], 1001} finished in read 2025-04-10T11:33:09.397452Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-10T11:33:09.397477Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-04-10T11:33:09.397500Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-04-10T11:33:09.397525Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-04-10T11:33:09.397586Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-10T11:33:09.397613Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-04-10T11:33:09.397637Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-04-10T11:33:09.397678Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-10T11:33:09.397764Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-10T11:33:09.398677Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1009:2825], Recipient [6:717:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:09.398726Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:09.398763Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:1008:2824], serverId# [6:1009:2825], sessionId# [0:0:0] 2025-04-10T11:33:09.398848Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:1007:2823], Recipient [6:717:2597]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-10T11:33:09.399610Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1012:2828], Recipient [6:717:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:09.399653Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:09.399688Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:1011:2827], serverId# [6:1012:2828], sessionId# [0:0:0] 2025-04-10T11:33:09.399854Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:1010:2826], Recipient [6:717:2597]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-10T11:33:09.399964Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-04-10T11:33:09.400000Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T11:33:09.400032Z node 6 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-04-10T11:33:09.400088Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2025-04-10T11:33:09.400173Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-10T11:33:09.400203Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2025-04-10T11:33:09.400228Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-10T11:33:09.400266Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-10T11:33:09.400312Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037890 2025-04-10T11:33:09.400345Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-10T11:33:09.400372Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-10T11:33:09.400394Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2025-04-10T11:33:09.400415Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2025-04-10T11:33:09.400500Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-10T11:33:09.400653Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[6:1010:2826], 1002} after executionsCount# 1 2025-04-10T11:33:09.400694Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:1010:2826], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:33:09.400795Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:1010:2826], 1002} finished in read 2025-04-10T11:33:09.400844Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-10T11:33:09.400870Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2025-04-10T11:33:09.400897Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2025-04-10T11:33:09.400920Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2025-04-10T11:33:09.400959Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-10T11:33:09.400979Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2025-04-10T11:33:09.401001Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037890 has finished 2025-04-10T11:33:09.401046Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-10T11:33:09.401122Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-10T11:33:09.401893Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1016:2832], Recipient [6:715:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:09.402017Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:09.402068Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:1015:2831], serverId# [6:1016:2832], sessionId# [0:0:0] 2025-04-10T11:33:09.402220Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:1014:2830], Recipient [6:715:2595]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-10T11:33:09.402980Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1019:2835], Recipient [6:715:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:09.403020Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:09.403054Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:1018:2834], serverId# [6:1019:2835], sessionId# [0:0:0] 2025-04-10T11:33:09.403180Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:1017:2833], Recipient [6:715:2595]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-10T11:33:09.403300Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-04-10T11:33:09.403352Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T11:33:09.403384Z node 6 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-04-10T11:33:09.403425Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2025-04-10T11:33:09.403522Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-10T11:33:09.403548Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2025-04-10T11:33:09.403574Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-04-10T11:33:09.403598Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2025-04-10T11:33:09.403638Z node 6 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037891 2025-04-10T11:33:09.403670Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-10T11:33:09.403712Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-04-10T11:33:09.403738Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2025-04-10T11:33:09.403762Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2025-04-10T11:33:09.403849Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-10T11:33:09.403971Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[6:1017:2833], 1003} after executionsCount# 1 2025-04-10T11:33:09.404013Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:1017:2833], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:33:09.404065Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:1017:2833], 1003} finished in read 2025-04-10T11:33:09.404108Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-10T11:33:09.404134Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2025-04-10T11:33:09.404157Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2025-04-10T11:33:09.404181Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2025-04-10T11:33:09.404228Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-10T11:33:09.404263Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2025-04-10T11:33:09.404288Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037891 has finished 2025-04-10T11:33:09.404315Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-04-10T11:33:09.404400Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 >> THealthCheckTest::IgnoreOtherGenerations [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestGetStorageInfo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 22898, MsgBus: 18918 2025-04-10T11:30:28.967433Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645435780964454:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:28.968622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c7d/r3tmp/tmpWvB6s5/pdisk_1.dat 2025-04-10T11:30:29.723823Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:29.787782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:29.794193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:29.817662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22898, node 1 2025-04-10T11:30:30.243713Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:30.243738Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:30.243748Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:30.243893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18918 TClient is connected to server localhost:18918 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:31.520262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:31.570999Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:30:31.593023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:31.945177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:32.238129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:32.364119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:33.812036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645457255802561:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:33.812138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:33.968905Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645435780964454:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:33.968983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:34.705604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:34.747524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:34.785016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:34.860624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:34.916352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:34.990511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:35.137082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645465845737681:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:35.137174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:35.137515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645465845737686:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:35.146374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:35.169838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645465845737688:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:30:35.255193Z node 1 :TX_PROXY ERROR: Actor# [1:7491645465845737746:3466] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:37.247583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:39.143063Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jrfqeqs0fk1fzj75b82h24pt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWUzY2IwMWYtZTAwNjE3MGQtNjU5YmQ1ZDUtM2ZiYTBiMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.147822Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jrfqeqs1f92e8b26k7e4545b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTA0YTAzYTAtZDJkNjgzNDUtNDI4MTY2NTgtMTdiZmI0ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.150904Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jrfqeqs19jnqtqwyrgvn1c61, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGNlYTVmMTAtMTY3ZjE3NTUtZmE5Y2UwZWEtNmZkMDI0YWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.174563Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jrfqeqsgbvq1d2c50xc4e512, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzc2OGRmOTgtNjI3M2FjZTUtOTI5YjZhYWItYWFiYzMzNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.187026Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jrfqeqsf96vwvdsjys3x6eqy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjA1Y2Q3ODktNzExMWUxNDYtODliNjlhYmItMmM1MzM5MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.187828Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jrfqeqsgd1drp50fpab793tq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGQ5MzRiOGItYWJjZWI1My04ZmM1ZjE2Ny1mYjIzMThmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.192383Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jrfqeqsg7jarejfameavhb7g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg2ZDc4ZTEtNjcwMjVjZDMtNzE1NGJjNjMtOTBlZTJkMDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.193055Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jrfqeqt940evpscj82z8wjnp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjk1OGVkZTYtMWE2MDEzYjQtODVlZGVmYjMtMTIyNDNjMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.202972Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01jrfqeqs0fk1fzj75b82h24pt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWUzY2IwMWYtZTAwNjE3MGQtNjU5YmQ1ZDUtM2ZiYTBiMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.203338Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jrfqeqt96nh1tbx1eemt1q50, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTMxM2MxNTEtZjRhYjk5MzUtNTFlNzdmYzgtYmEzN2FmZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.203991Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jrfqeqwj04cta8ctx9609d6b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODJjY2ZlMTYtNmIwMzI3NTMtZWYxOTc4MGEtNGJmZDQ4ZGU=, CurrentExecutionId: , CustomerSuppl ... sion/3?node_id=3&id=MzIxMzliNDAtODU5MGVlYWYtM2Q3MGNmNTctYjgwMTJlY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.056178Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714486. Ctx: { TraceId: 01jrfqk8av4jc86ttm9dfjs61j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzY5NzAwNzMtYTg1MTAzYzUtNTgyOWVkYzEtMTgzZjE0NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.056665Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714487. Ctx: { TraceId: 01jrfqk8avbw6nm5dgkybn3kgj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGU4ODNmYTAtYWRjMDcwNGEtZGRlNmM5MzUtNjEzYmEwNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.060942Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714489. Ctx: { TraceId: 01jrfqk8b258383v8nj8pbt90x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWFmOGU0NmEtZTc3NGY1MzQtMWY4ZWU4ZWYtYTAzYjU3NDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.061366Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714488. Ctx: { TraceId: 01jrfqk8b84j8drssp1t2yg7kw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDNkYjU3YjItNmViNmYxM2YtZmY5NDlkN2UtOWZlMjg3YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.066591Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714490. Ctx: { TraceId: 01jrfqk8av4jc86ttm9dfjs61j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzY5NzAwNzMtYTg1MTAzYzUtNTgyOWVkYzEtMTgzZjE0NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.073738Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714491. Ctx: { TraceId: 01jrfqk8b84j8drssp1t2yg7kw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDNkYjU3YjItNmViNmYxM2YtZmY5NDlkN2UtOWZlMjg3YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.077699Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714492. Ctx: { TraceId: 01jrfqk8b258383v8nj8pbt90x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWFmOGU0NmEtZTc3NGY1MzQtMWY4ZWU4ZWYtYTAzYjU3NDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.091932Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714493. Ctx: { TraceId: 01jrfqk8cf1vfaettp8cgw36jz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWE5OTVmMTctNzIyZWZhOTUtNzdiNTZjMDYtYjYyMzU5MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.098771Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714494. Ctx: { TraceId: 01jrfqk8cf1vfaettp8cgw36jz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWE5OTVmMTctNzIyZWZhOTUtNzdiNTZjMDYtYjYyMzU5MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.111501Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714495. Ctx: { TraceId: 01jrfqk8ctcpf02t72tadqkqr7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDk1OWViOGUtNDU0NDVmYTMtZTAxYjFmODEtOGYwZWI1NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.119252Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714496. Ctx: { TraceId: 01jrfqk8ctcpf02t72tadqkqr7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDk1OWViOGUtNDU0NDVmYTMtZTAxYjFmODEtOGYwZWI1NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.124087Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714497. Ctx: { TraceId: 01jrfqk8db52tv1q68k3dp2kna, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGU4ODNmYTAtYWRjMDcwNGEtZGRlNmM5MzUtNjEzYmEwNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.124086Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714498. Ctx: { TraceId: 01jrfqk8db6k49cgg7q69hs8t2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDNkYjU3YjItNmViNmYxM2YtZmY5NDlkN2UtOWZlMjg3YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.131328Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714500. Ctx: { TraceId: 01jrfqk8db52tv1q68k3dp2kna, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGU4ODNmYTAtYWRjMDcwNGEtZGRlNmM5MzUtNjEzYmEwNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.133431Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714499. Ctx: { TraceId: 01jrfqk8db6k49cgg7q69hs8t2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDNkYjU3YjItNmViNmYxM2YtZmY5NDlkN2UtOWZlMjg3YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.142600Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714501. Ctx: { TraceId: 01jrfqk8dz06xbvz5dgh8w5mgv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWFmOGU0NmEtZTc3NGY1MzQtMWY4ZWU4ZWYtYTAzYjU3NDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.153958Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714502. Ctx: { TraceId: 01jrfqk8ee71wxfgqkgbr4x2tn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWE5OTVmMTctNzIyZWZhOTUtNzdiNTZjMDYtYjYyMzU5MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.160257Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714503. Ctx: { TraceId: 01jrfqk8ee71wxfgqkgbr4x2tn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWE5OTVmMTctNzIyZWZhOTUtNzdiNTZjMDYtYjYyMzU5MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.165005Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714504. Ctx: { TraceId: 01jrfqk8ee71wxfgqkgbr4x2tn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWE5OTVmMTctNzIyZWZhOTUtNzdiNTZjMDYtYjYyMzU5MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.170836Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714506. Ctx: { TraceId: 01jrfqk8ek7jrk9h1jmmys8j3d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDk1OWViOGUtNDU0NDVmYTMtZTAxYjFmODEtOGYwZWI1NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.172558Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714507. Ctx: { TraceId: 01jrfqk8ekah31xcc0w90vq8sm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDNkYjU3YjItNmViNmYxM2YtZmY5NDlkN2UtOWZlMjg3YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.172951Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714505. Ctx: { TraceId: 01jrfqk8eh3htvgyk56h6axvtr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzY5NzAwNzMtYTg1MTAzYzUtNTgyOWVkYzEtMTgzZjE0NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.180560Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714508. Ctx: { TraceId: 01jrfqk8ek7jrk9h1jmmys8j3d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDk1OWViOGUtNDU0NDVmYTMtZTAxYjFmODEtOGYwZWI1NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.185713Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714509. Ctx: { TraceId: 01jrfqk8ekah31xcc0w90vq8sm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDNkYjU3YjItNmViNmYxM2YtZmY5NDlkN2UtOWZlMjg3YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.188480Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714510. Ctx: { TraceId: 01jrfqk8f6bg2snn8h0e6zd4ny, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGU4ODNmYTAtYWRjMDcwNGEtZGRlNmM5MzUtNjEzYmEwNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.199389Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714511. Ctx: { TraceId: 01jrfqk8f6bg2snn8h0e6zd4ny, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGU4ODNmYTAtYWRjMDcwNGEtZGRlNmM5MzUtNjEzYmEwNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.212945Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714512. Ctx: { TraceId: 01jrfqk8ga3edjcyhzsgjy32t2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWFmOGU0NmEtZTc3NGY1MzQtMWY4ZWU4ZWYtYTAzYjU3NDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.215793Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714513. Ctx: { TraceId: 01jrfqk8gb1xkpqsfqtf05xxsv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWE5OTVmMTctNzIyZWZhOTUtNzdiNTZjMDYtYjYyMzU5MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.221592Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714514. Ctx: { TraceId: 01jrfqk8ge54qnw5c0pc5znawg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzY5NzAwNzMtYTg1MTAzYzUtNTgyOWVkYzEtMTgzZjE0NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-10T11:33:07.224550Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714515. Ctx: { TraceId: 01jrfqk8gef64k4m2s1482t9b6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDk1OWViOGUtNDU0NDVmYTMtZTAxYjFmODEtOGYwZWI1NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.228801Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714516. Ctx: { TraceId: 01jrfqk8ge54qnw5c0pc5znawg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzY5NzAwNzMtYTg1MTAzYzUtNTgyOWVkYzEtMTgzZjE0NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.236127Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714517. Ctx: { TraceId: 01jrfqk8greyz1f6gsfwhy1k30, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDNkYjU3YjItNmViNmYxM2YtZmY5NDlkN2UtOWZlMjg3YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2025-04-10T11:33:07.245205Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714518. Ctx: { TraceId: 01jrfqk8h2cb45rtaj03gxrfm9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGU4ODNmYTAtYWRjMDcwNGEtZGRlNmM5MzUtNjEzYmEwNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:07.251969Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714519. Ctx: { TraceId: 01jrfqk8h2cb45rtaj03gxrfm9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGU4ODNmYTAtYWRjMDcwNGEtZGRlNmM5MzUtNjEzYmEwNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-10T11:33:07.282580Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714520. Ctx: { TraceId: 01jrfqk8greyz1f6gsfwhy1k30, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDNkYjU3YjItNmViNmYxM2YtZmY5NDlkN2UtOWZlMjg3YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::GetTabletCounters [GOOD] Test command err: 2025-04-10T11:33:03.793523Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646100018908651:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:03.802870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00201d/r3tmp/tmpqwt4sh/pdisk_1.dat 2025-04-10T11:33:04.169252Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:04.246709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:04.246800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:04.249644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13660 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:04.483255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:33:04.510218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:07.097120Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646116643079831:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:07.097225Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00201d/r3tmp/tmpoewHhP/pdisk_1.dat 2025-04-10T11:33:07.255975Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:07.270254Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:07.270348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:07.273249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2509 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:07.444011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:07.455435Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:07.472516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284787602 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) >> THiveTest::TestGetStorageInfo [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> KqpQueryService::DdlColumnTable [GOOD] >> KqpQueryService::DdlCache >> THealthCheckTest::OneIssueListing >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] >> THiveTest::TestExternalBootWhenLocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] Test command err: 2025-04-10T11:32:40.631883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:40.632141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:40.632370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001746/r3tmp/tmpRr714J/pdisk_1.dat 2025-04-10T11:32:41.088263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:32:41.138327Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:41.181244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:41.181793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:41.194299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:32:41.298354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:32:41.354615Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:32:41.355779Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:32:41.356276Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:32:41.356540Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:32:41.403218Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:32:41.404130Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:32:41.404263Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:32:41.407081Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:32:41.407188Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:32:41.407266Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:32:41.408540Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:32:41.408688Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:32:41.408762Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:32:41.419609Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:32:41.459642Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:32:41.459837Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:32:41.459971Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:32:41.460014Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:32:41.460053Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:32:41.460093Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:32:41.460330Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:41.461506Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:41.463814Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:32:41.463995Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:32:41.464156Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:32:41.464206Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:32:41.464272Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:32:41.464319Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:32:41.464361Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:32:41.464399Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:32:41.464454Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:32:41.464623Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:41.464689Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:41.464743Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:32:41.465221Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:32:41.465293Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:32:41.465434Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:32:41.465708Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:32:41.465778Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:32:41.465894Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:32:41.465976Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:32:41.466022Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:32:41.466067Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:32:41.466121Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:32:41.466520Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:32:41.466573Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:32:41.466612Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:32:41.466674Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:32:41.466749Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:32:41.466788Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:32:41.466822Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:32:41.466893Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:32:41.466930Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:32:41.468922Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:32:41.468988Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:32:41.479846Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:32:41.479965Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:32:41.480008Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:32:41.480081Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:32:41.480149Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:32:41.638938Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:41.639021Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:41.639067Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:32:41.639518Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:32:41.639559Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:32:41.639703Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:32:41.639764Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:32:41.639811Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:32:41.639854Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:32:41.652819Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:32:41.652921Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:32:41.654275Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:41.654336Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:41.654425Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:32:4 ... d: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-10T11:33:12.058982Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[7:934:2775], 1001} after executionsCount# 1 2025-04-10T11:33:12.059023Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:934:2775], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:33:12.059077Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:934:2775], 1001} finished in read 2025-04-10T11:33:12.059124Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-10T11:33:12.059147Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-04-10T11:33:12.059172Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-04-10T11:33:12.059219Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-04-10T11:33:12.059267Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-10T11:33:12.059288Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-04-10T11:33:12.059312Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2025-04-10T11:33:12.059344Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-10T11:33:12.059434Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-10T11:33:12.060015Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:939:2780], Recipient [7:720:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:12.060056Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:12.060112Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:938:2779], serverId# [7:939:2780], sessionId# [0:0:0] 2025-04-10T11:33:12.060228Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:937:2778], Recipient [7:720:2597]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-10T11:33:12.061081Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:942:2783], Recipient [7:720:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:12.061123Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:12.061168Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:941:2782], serverId# [7:942:2783], sessionId# [0:0:0] 2025-04-10T11:33:12.061351Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:940:2781], Recipient [7:720:2597]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-10T11:33:12.061453Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-04-10T11:33:12.061493Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T11:33:12.061523Z node 7 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-04-10T11:33:12.061586Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CheckRead 2025-04-10T11:33:12.061650Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-10T11:33:12.061676Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CheckRead 2025-04-10T11:33:12.061703Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-10T11:33:12.061730Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-10T11:33:12.061771Z node 7 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037890 2025-04-10T11:33:12.061806Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-10T11:33:12.061870Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-10T11:33:12.061904Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit ExecuteRead 2025-04-10T11:33:12.061928Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit ExecuteRead 2025-04-10T11:33:12.062054Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-10T11:33:12.062276Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[7:940:2781], 1002} after executionsCount# 1 2025-04-10T11:33:12.062320Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:940:2781], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:33:12.062374Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:940:2781], 1002} finished in read 2025-04-10T11:33:12.062424Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-10T11:33:12.062451Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit ExecuteRead 2025-04-10T11:33:12.062476Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit CompletedOperations 2025-04-10T11:33:12.062503Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CompletedOperations 2025-04-10T11:33:12.062578Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-10T11:33:12.062607Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CompletedOperations 2025-04-10T11:33:12.062633Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037890 has finished 2025-04-10T11:33:12.062665Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-10T11:33:12.062740Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-10T11:33:12.063444Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:945:2786], Recipient [7:714:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:12.063491Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:12.063531Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:944:2785], serverId# [7:945:2786], sessionId# [0:0:0] 2025-04-10T11:33:12.063612Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:943:2784], Recipient [7:714:2595]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-10T11:33:12.064535Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:948:2789], Recipient [7:714:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:12.064578Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:12.064613Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:947:2788], serverId# [7:948:2789], sessionId# [0:0:0] 2025-04-10T11:33:12.064793Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:946:2787], Recipient [7:714:2595]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-10T11:33:12.064909Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-04-10T11:33:12.064944Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T11:33:12.064999Z node 7 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-04-10T11:33:12.065066Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CheckRead 2025-04-10T11:33:12.065144Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-10T11:33:12.065172Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CheckRead 2025-04-10T11:33:12.065196Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-04-10T11:33:12.065234Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit BuildAndWaitDependencies 2025-04-10T11:33:12.065282Z node 7 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037891 2025-04-10T11:33:12.065312Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-10T11:33:12.065338Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-04-10T11:33:12.065360Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit ExecuteRead 2025-04-10T11:33:12.065384Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit ExecuteRead 2025-04-10T11:33:12.065462Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-10T11:33:12.065661Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[7:946:2787], 1003} after executionsCount# 1 2025-04-10T11:33:12.065710Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:946:2787], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:33:12.065762Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:946:2787], 1003} finished in read 2025-04-10T11:33:12.065809Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-10T11:33:12.065852Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit ExecuteRead 2025-04-10T11:33:12.065879Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit CompletedOperations 2025-04-10T11:33:12.065918Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CompletedOperations 2025-04-10T11:33:12.065964Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-10T11:33:12.065988Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CompletedOperations 2025-04-10T11:33:12.066014Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037891 has finished 2025-04-10T11:33:12.066040Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-04-10T11:33:12.066112Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 >> TSchemeShardLoginTest::UserLogin >> StoragePool::TestDistributionRandomProbability [GOOD] >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin >> THealthCheckTest::GreenStatusWhenCreatingGroup [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific >> THiveTest::TestExternalBootWhenLocked [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] >> TSchemeShardLoginTest::UserLogin [GOOD] >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootWhenLocked [GOOD] Test command err: 2025-04-10T11:32:34.982247Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:32:34.986072Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:32:34.986256Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:32:34.987114Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:68:2073] ControllerId# 72057594037932033 2025-04-10T11:32:34.987150Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:32:34.987272Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:32:34.987608Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:32:34.990219Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:32:34.990270Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:32:34.992407Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:74:2077] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.992636Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:75:2078] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.992835Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:76:2079] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.993062Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:77:2080] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.993218Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:78:2081] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.993357Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:79:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.993514Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:80:2083] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.993543Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:32:34.993631Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:68:2073] 2025-04-10T11:32:34.993663Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:68:2073] 2025-04-10T11:32:34.993704Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:32:34.993742Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:32:34.994132Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:32:34.994409Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:32:34.997023Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:32:34.997226Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-10T11:32:34.997796Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-10T11:32:34.998915Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-10T11:32:34.998967Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:32:34.999882Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:91:2077] ControllerId# 72057594037932033 2025-04-10T11:32:34.999923Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:32:34.999993Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:32:35.000162Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:32:35.012455Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:32:35.012498Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:32:35.014233Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:99:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:35.014395Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:100:2083] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:35.014563Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:101:2084] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:35.014709Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:102:2085] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:35.014853Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:103:2086] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:35.015011Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:104:2087] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:35.015141Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:105:2088] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:35.015172Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:32:35.015243Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:91:2077] 2025-04-10T11:32:35.015272Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:91:2077] 2025-04-10T11:32:35.015312Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:32:35.015360Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:32:35.016127Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:32:35.016215Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:32:35.019178Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:32:35.019312Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:32:35.020190Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:114:2074] ControllerId# 72057594037932033 2025-04-10T11:32:35.020221Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:32:35.020280Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:32:35.020459Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:32:35.022765Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:32:35.022813Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:32:35.024510Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:120:2078] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:35.024679Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:121:2079] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:35.024844Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:122:2080] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:35.024987Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:123:2081] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:35.025164Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:124:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:35.025328Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:125:2083] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:35.025484Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:126:2084] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:35.025511Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:32:35.025575Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:114:2074] 2025-04-10T11:32:35.025603Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:114:2074] 2025-04-10T11:32:35.025640Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:32:35.025675Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:32:35.026267Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:32:35.026468Z node 3 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:32:35.026592Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:68:2073] 2025-04-10T11:32:35.026652Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:35.026687Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:32:35.026831Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:32:35.027087Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:114:2074] 2025-04-10T11:32:35.027141Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:35.027165Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:32:35.034790Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:35.034857Z node 3 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-10T11:32:35.040555Z node 3 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-10T11:32:35.040847Z node 3 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-10T11:32:35.052465Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:91:2077] 2025-04-10T11:32:35. ... st# true Marker# BPP21 2025-04-10T11:33:14.508201Z node 59 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.089 sample PartId# [72057594037927937:2:8:0:0:200:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 59 } TEvVPutResult{ TimestampMs# 3.788 VDiskId# [0:1:0:0:0] NodeId# 59 Status# OK } ] } 2025-04-10T11:33:14.508431Z node 59 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:8:0:0:200:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-10T11:33:14.508633Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} commited cookie 1 for step 8 2025-04-10T11:33:14.508890Z node 59 :TABLET_MAIN DEBUG: Tablet: 72075186224037888 Received TEvTabletStop from [59:95:2093], reason = ReasonStop Marker# TSYS29 2025-04-10T11:33:14.508975Z node 59 :PIPE_SERVER DEBUG: [72075186224037888] Stop 2025-04-10T11:33:14.509284Z node 59 :TABLET_MAIN NOTICE: Tablet: 72075186224037888 Type: Dummy, EReason: ReasonPill, SuggestedGeneration: 1, KnownGeneration: 1 Marker# TSYS31 2025-04-10T11:33:14.509342Z node 59 :PIPE_SERVER DEBUG: [72075186224037888] Detach 2025-04-10T11:33:14.509540Z node 59 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:3} suiciding, Waste{1:0, 289b +(0, 0b), 2 trc, -0b acc} 2025-04-10T11:33:14.510522Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] peer closed [59:440:2351] 2025-04-10T11:33:14.510599Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [59:440:2351] 2025-04-10T11:33:14.510806Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send [59:96:2093] 2025-04-10T11:33:14.510869Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [59:96:2093] 2025-04-10T11:33:14.510947Z node 59 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [59:95:2093] EventType# 268960257 2025-04-10T11:33:14.511197Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2025-04-10T11:33:14.511303Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:33:14.511478Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:33:14.511582Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:33:14.511849Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-10T11:33:14.511940Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:33:14.512052Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:33:14.512147Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:33:14.512709Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [59:454:2358] 2025-04-10T11:33:14.512772Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [59:454:2358] 2025-04-10T11:33:14.512910Z node 59 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:14.513004Z node 59 :TABLET_RESOLVER DEBUG: SelectForward node 59 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [59:374:2299] 2025-04-10T11:33:14.513162Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [59:454:2358] 2025-04-10T11:33:14.513255Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [59:454:2358] 2025-04-10T11:33:14.513412Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [59:454:2358] 2025-04-10T11:33:14.513495Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [59:454:2358] 2025-04-10T11:33:14.513600Z node 59 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2025-04-10T11:33:14.513861Z node 59 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:33:14.514022Z node 59 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-10T11:33:14.514095Z node 59 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-10T11:33:14.514131Z node 59 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-10T11:33:14.514240Z node 59 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [59:374:2299] CurrentLeaderTablet: [59:389:2311] CurrentGeneration: 1 CurrentStep: 0} 2025-04-10T11:33:14.514369Z node 59 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [59:374:2299] CurrentLeaderTablet: [59:389:2311] CurrentGeneration: 1 CurrentStep: 0} 2025-04-10T11:33:14.514485Z node 59 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [59:374:2299] CurrentLeaderTablet: [59:389:2311] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-10T11:33:14.514658Z node 59 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-04-10T11:33:14.515122Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [60:456:2093] 2025-04-10T11:33:14.515191Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [60:456:2093] 2025-04-10T11:33:14.515293Z node 60 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:14.515387Z node 60 :TABLET_RESOLVER DEBUG: SelectForward node 60 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [59:323:2263] 2025-04-10T11:33:14.515476Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [60:456:2093] 2025-04-10T11:33:14.515559Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [60:456:2093] 2025-04-10T11:33:14.515629Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 59 [60:456:2093] 2025-04-10T11:33:14.515800Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [60:456:2093] 2025-04-10T11:33:14.515889Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [60:456:2093] 2025-04-10T11:33:14.516199Z node 59 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [60:456:2093] 2025-04-10T11:33:14.516562Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [60:456:2093] 2025-04-10T11:33:14.516634Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [60:456:2093] 2025-04-10T11:33:14.516693Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [60:456:2093] 2025-04-10T11:33:14.516806Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [60:456:2093] 2025-04-10T11:33:14.516881Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [60:456:2093] 2025-04-10T11:33:14.516942Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [60:456:2093] 2025-04-10T11:33:14.517305Z node 59 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [60:443:2088] EventType# 268697624 2025-04-10T11:33:14.517554Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2025-04-10T11:33:14.517650Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:33:14.517906Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{13, redo 83b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-10T11:33:14.518010Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:33:14.529581Z node 59 :BS_PROXY_PUT INFO: [9eafe310d4d96c0d] bootstrap ActorId# [59:459:2361] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:92:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:33:14.529807Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] Id# [72057594037927937:2:9:0:0:92:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:33:14.529903Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] restore Id# [72057594037927937:2:9:0:0:92:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:33:14.530011Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG33 2025-04-10T11:33:14.530094Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG32 2025-04-10T11:33:14.530342Z node 59 :BS_PROXY DEBUG: Send to queueActorId# [59:59:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:92:1] FDS# 92 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:33:14.535079Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:92:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 24 } Cost# 80724 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 25 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-10T11:33:14.535271Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-10T11:33:14.535370Z node 59 :BS_PROXY_PUT INFO: [9eafe310d4d96c0d] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:33:14.535598Z node 59 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.047 sample PartId# [72057594037927937:2:9:0:0:92:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 59 } TEvVPutResult{ TimestampMs# 5.818 VDiskId# [0:1:0:0:0] NodeId# 59 Status# OK } ] } 2025-04-10T11:33:14.535839Z node 59 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-10T11:33:14.536008Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> THealthCheckTest::StorageLimit80 [GOOD] >> THealthCheckTest::StorageLimit50 >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain [GOOD] >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] Test command err: 2025-04-10T11:32:40.555831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:40.556077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:40.556265Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00175a/r3tmp/tmp9Y9K22/pdisk_1.dat 2025-04-10T11:32:41.090456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:32:41.151980Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:41.192208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:41.192353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:41.204045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:32:41.298350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:32:41.353420Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:32:41.354944Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:32:41.355638Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:32:41.355939Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:32:41.409069Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:32:41.409923Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:32:41.410045Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:32:41.412169Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:32:41.412288Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:32:41.412386Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:32:41.412810Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:32:41.412976Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:32:41.413128Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:32:41.424107Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:32:41.457628Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:32:41.459469Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:32:41.459674Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:32:41.459725Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:32:41.459762Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:32:41.459805Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:32:41.460054Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:41.461024Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:41.462489Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:32:41.462641Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:32:41.462788Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:32:41.462843Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:32:41.462903Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:32:41.462949Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:32:41.462987Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:32:41.463076Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:32:41.463149Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:32:41.463323Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:41.463380Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:41.463433Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:32:41.465010Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:32:41.465088Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:32:41.465219Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:32:41.465621Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:32:41.465710Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:32:41.465824Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:32:41.465961Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:32:41.466011Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:32:41.466064Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:32:41.466121Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:32:41.466546Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:32:41.466602Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:32:41.466643Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:32:41.466689Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:32:41.466759Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:32:41.466816Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:32:41.466857Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:32:41.466900Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:32:41.466931Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:32:41.468819Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:32:41.468890Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:32:41.479939Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:32:41.480030Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:32:41.480093Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:32:41.480169Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:32:41.480263Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:32:41.639243Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:41.639307Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:41.639349Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:32:41.639748Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:32:41.639782Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:32:41.639886Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:32:41.639922Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:32:41.639958Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:32:41.639984Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:32:41.645149Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:32:41.645250Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:32:41.646525Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:41.646583Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:41.646645Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:32:4 ... p# 1505 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-10T11:33:14.472171Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:33:14.472198Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715660 2025-04-10T11:33:14.472240Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 1505 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-10T11:33:14.472340Z node 7 :TX_DATASHARD DEBUG: Complete volatile write [1505 : 281474976715660] from 72075186224037889 at tablet 72075186224037889 send result to client [7:916:2664] 2025-04-10T11:33:14.472819Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:33:14.473048Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:33:14.473634Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:33:14.474076Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:33:14.475342Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-10T11:33:14.475479Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:696:2585], Recipient [7:700:2587]: {TEvReadSet step# 1505 txid# 281474976715660 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-10T11:33:14.475520Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:33:14.475591Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715660 2025-04-10T11:33:14.475766Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-10T11:33:14.476104Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:700:2587], Recipient [7:696:2585]: {TEvReadSet step# 1505 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-10T11:33:14.476139Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:33:14.476170Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715660 ... validating table 2025-04-10T11:33:14.830645Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfqkfky2241zb816c04mzf5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NDE3NTYzZGYtZTFlZGIzYjUtZjhhZDY2MDItNmZjZjkyZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:14.839639Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:969:2782], Recipient [7:696:2585]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1505 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-10T11:33:14.839868Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:33:14.839984Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-04-10T11:33:14.840123Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-10T11:33:14.840188Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:33:14.840240Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:33:14.840279Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:33:14.840337Z node 7 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-04-10T11:33:14.840387Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-10T11:33:14.840416Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:33:14.840448Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:33:14.840478Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:33:14.840607Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1505 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-10T11:33:14.840901Z node 7 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1505/18446744073709551615 2025-04-10T11:33:14.840967Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:969:2782], 0} after executionsCount# 1 2025-04-10T11:33:14.841037Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:969:2782], 0} sends rowCount# 1, bytes# 64, quota rows left# 1000, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:33:14.841135Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:969:2782], 0} finished in read 2025-04-10T11:33:14.841211Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-10T11:33:14.841236Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:33:14.841272Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:33:14.841302Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:33:14.841349Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-10T11:33:14.841371Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:33:14.841402Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-10T11:33:14.841445Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:33:14.841582Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:33:14.842709Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:969:2782], Recipient [7:696:2585]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:33:14.842804Z node 7 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-10T11:33:14.843245Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:969:2782], Recipient [7:700:2587]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1505 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2025-04-10T11:33:14.843404Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-10T11:33:14.843462Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CheckRead 2025-04-10T11:33:14.843530Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-10T11:33:14.843559Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CheckRead 2025-04-10T11:33:14.843596Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-10T11:33:14.843621Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-10T11:33:14.843667Z node 7 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037889 2025-04-10T11:33:14.843696Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-10T11:33:14.843729Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-10T11:33:14.843757Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit ExecuteRead 2025-04-10T11:33:14.843779Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit ExecuteRead 2025-04-10T11:33:14.843861Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1505 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2025-04-10T11:33:14.844062Z node 7 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v1505/18446744073709551615 2025-04-10T11:33:14.844097Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[7:969:2782], 1} after executionsCount# 1 2025-04-10T11:33:14.844131Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:969:2782], 1} sends rowCount# 1, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:33:14.844188Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:969:2782], 1} finished in read 2025-04-10T11:33:14.844233Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-10T11:33:14.844257Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-04-10T11:33:14.844280Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-04-10T11:33:14.844315Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-04-10T11:33:14.844370Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-10T11:33:14.844394Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-04-10T11:33:14.844419Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-04-10T11:33:14.844443Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-10T11:33:14.844515Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-10T11:33:14.845263Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:969:2782], Recipient [7:700:2587]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-04-10T11:33:14.845313Z node 7 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 2 } items { int32_value: 3 } items { int32_value: 4 } }, { items { int32_value: 11 } items { int32_value: 12 } items { int32_value: 12 } items { int32_value: 12 } } >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword [GOOD] >> TWebLoginService::AuditLogAdminLoginSuccess >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] >> THealthCheckTest::HealthCheckConfigUpdate >> TWebLoginService::AuditLogAdminLoginSuccess [GOOD] >> THealthCheckTest::Issues100VCardMerging [GOOD] >> THealthCheckTest::Issues100Groups100VCardMerging ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogAdminLoginSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:33:14.853250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:33:14.853374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:33:14.853416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:33:14.853452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:33:14.853507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:33:14.853544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:33:14.853605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:33:14.853691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:33:14.854043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:33:14.940497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:33:14.940575Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:14.957234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:33:14.957538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:33:14.957752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:33:14.969086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:33:14.969375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:33:14.970053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:33:14.970492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:33:14.972607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:14.974327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:33:14.974414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:14.974600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:33:14.974671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:33:14.974728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:33:14.974885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:33:14.982357Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:33:15.131873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:33:15.132157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:15.132409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:33:15.132692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:33:15.132755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:15.135467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:33:15.135660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:33:15.135897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:15.135958Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:33:15.135999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:33:15.136055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:33:15.138391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:15.138457Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:33:15.138496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:33:15.140611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:15.140666Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:15.140719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:33:15.140783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:33:15.144426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:33:15.146610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:33:15.146813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:33:15.147875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:33:15.148025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:33:15.148072Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:33:15.148352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:33:15.148403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:33:15.148601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:33:15.148699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:33:15.151188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:33:15.151244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:33:15.151495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:15.151550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:33:15.151831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:15.151881Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:33:15.151972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:33:15.152020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:33:15.152062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:33:15.152100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:33:15.152140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:33:15.152183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:33:15.152230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:33:15.152260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:33:15.152336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:33:15.152378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:33:15.152408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:33:15.154857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:33:15.154977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:33:15.155091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:33:17.504374Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:33:17.504473Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:33:17.506884Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:33:17.506940Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:33:17.507148Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:17.507199Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:33:17.507580Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:17.507637Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:33:17.507762Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:33:17.507803Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:33:17.507846Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:33:17.507885Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:33:17.507933Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:33:17.507980Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:33:17.508020Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:33:17.508059Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:33:17.508135Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:33:17.508177Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:33:17.508213Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:33:17.508645Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:33:17.508715Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:33:17.508751Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:33:17.508780Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:33:17.508810Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:33:17.508875Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:33:17.512049Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:33:17.512571Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:33:17.513055Z node 4 :TX_PROXY DEBUG: actor# [4:270:2261] Bootstrap 2025-04-10T11:33:17.531806Z node 4 :TX_PROXY DEBUG: actor# [4:270:2261] Become StateWork (SchemeCache [4:275:2266]) 2025-04-10T11:33:17.533930Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:33:17.537834Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-10T11:33:17.538000Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:33:17.538046Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:33:17.538093Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:33:17.538128Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:33:17.538219Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:33:17.538284Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-10T11:33:17.538326Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:33:17.538361Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:33:17.538398Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-04-10T11:33:17.538430Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-10T11:33:17.539233Z node 4 :TX_PROXY DEBUG: actor# [4:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:33:17.544656Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:33:17.544772Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-04-10T11:33:17.545154Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:33:17.545198Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:33:17.545354Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:17.545390Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:33:17.546016Z node 4 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:33:17.546179Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:33:17.546288Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:33:17.546331Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:33:17.546374Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-10T11:33:17.546414Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:33:17.546530Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-10T11:33:17.548393Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-04-10T11:33:17.548864Z node 4 :HTTP WARN: 127.0.0.1:0 POST /login 2025-04-10T11:33:17.551332Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-10T11:33:17.551384Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-10T11:33:17.639991Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MzI3OTk3LCJpYXQiOjE3NDQyODQ3OTcsInN1YiI6InVzZXIxIn0.iJivWnWMILvxx7FTgsOG3PT_ZvDaEZU3IPPkyjuVUOwlbuwTkERIAFny47GHPA8DI-nCWQ5j4k0XBTLxDvgZbhbXw9hAP76RQRHm7vOrvQ9ZCphNAscjrOn3yo9pNOucJDiz-b34dEg1oygn9MdYf2_0MO81NIGMDfGgKUQ32hjCaH-JGgniSgXv6Fdd4fgkyuuFNfmnuBExhKS_jYOf5UysQXEr4pmcARayIi-0t9GQV-KtzuH1q3Nuzuwx-O-1QAlBb-0eidoee6KMXWP6T9mCRbl96tXjmYi7iNwhxquOhOG87nr0VTZqdRQuOG5a0qMtjNo5MB95Q-nbh69h-g" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MzI3OTk3LCJpYXQiOjE3NDQyODQ3OTcsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-04-10T11:33:17.640833Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:33:17.640896Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:33:17.641124Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:17.641172Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-10T11:33:17.641946Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(3): 2025-04-10T11:33:17.496033Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-10T11:33:17.537710Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-04-10T11:33:17.640236Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MzI3OTk3LCJpYXQiOjE3NDQyODQ3OTcsInN1YiI6InVzZXIxIn0.**, login_user_level=admin AUDIT LOG checked line: 2025-04-10T11:33:17.640236Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ0MzI3OTk3LCJpYXQiOjE3NDQyODQ3OTcsInN1YiI6InVzZXIxIn0.**, login_user_level=admin >> TFlatTest::SelectRangeReverseItemsLimit >> StoragePool::TestDistributionExactMin [GOOD] >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] >> THealthCheckTest::TestReBootingTabletIsDead >> KqpQueryService::DdlCache [GOOD] >> KqpQueryService::DdlExecuteScript >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes [GOOD] >> THealthCheckTest::ShardsLimit999 >> THealthCheckTest::TestTabletIsDead [GOOD] >> ExternalBlobsMultipleChannels::Simple >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction >> TFlatTest::ReadOnlyMode >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] >> THealthCheckTest::OneIssueListing [GOOD] >> THealthCheckTest::NoStoragePools >> THealthCheckTest::ShardsLimit800 [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] >> TFlatTest::SelectRangeReverseItemsLimit [GOOD] >> TFlatTest::SelectRangeReverseIncludeKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestTabletIsDead [GOOD] Test command err: 2025-04-10T11:32:47.017454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:47.018371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:47.018455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:32:47.020530Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:47.020863Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:47.021230Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011ba/r3tmp/tmp6HipCk/pdisk_1.dat 2025-04-10T11:32:47.592999Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16004, node 1 TClient is connected to server localhost:18401 2025-04-10T11:32:48.349894Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:48.349991Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:48.350040Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:48.350502Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:32:56.308686Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:56.309079Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:56.309387Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:32:56.311046Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:56.311343Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:56.311567Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011ba/r3tmp/tmplCpHoA/pdisk_1.dat 2025-04-10T11:32:56.707971Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19861, node 3 TClient is connected to server localhost:28793 2025-04-10T11:32:57.109735Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:57.109809Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:57.109869Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:57.110204Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-70fb-1231c6b1" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" reason: "YELLOW-5321-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-3" reason: "YELLOW-e9e2-1231c6b1-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-5321-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-595f-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-595f-1231c6b1-f7549920" status: YELLOW message: "Pool degraded" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "YELLOW-ef3e-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "YELLOW-99d2-1231c6b1-3-2147483648-3-55-0-55" status: YELLOW message: "VDisks have space issue" location { storage { node { id: 3 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483648-3-55-0-55" id: "2147483648-3-56-0-56" id: "2147483648-3-57-0-57" } } } } database { name: "/Root" } } reason: "YELLOW-e463-3-3-42" reason: "YELLOW-e463-3-3-43" reason: "YELLOW-e463-3-3-44" type: "VDISK" level: 5 listed: 3 count: 3 } issue_log { id: "YELLOW-e463-3-3-42" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/2gt4/0011ba/r3tmp/tmplCpHoA/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-43" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/2gt4/0011ba/r3tmp/tmplCpHoA/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-44" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/2gt4/0011ba/r3tmp/tmplCpHoA/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-ef3e-1231c6b1-2147483648" status: YELLOW message: "Group degraded" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "YELLOW-99d2-1231c6b1-3-2147483648-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 3 host: "::1" port: 12001 } 2025-04-10T11:33:04.392548Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:04.393213Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:04.393408Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:04.393787Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:04.394029Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:04.394116Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011ba/r3tmp/tmp8ONyXf/pdisk_1.dat 2025-04-10T11:33:04.732307Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25225, node 5 TClient is connected to server localhost:8160 2025-04-10T11:33:05.158904Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:05.158978Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:05.159017Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:05.159568Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-70fb-1231c6b1" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" reason: "YELLOW-5321-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-5" reason: "YELLOW-e9e2-1231c6b1-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-5321-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-595f-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-595f-1231c6b1-f7549920" status: YELLOW message: "Pool degraded" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "YELLOW-ef3e-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-a594-5-5-42" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-42" path: "/home/runner/.ya/build/build_root/2gt4/0011ba/r3tmp/tmp8ONyXf/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-5-5-43" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-43" path: "/home/runner/.ya/build/build_root/2gt4/0011ba/r3tmp/tmp8ONyXf/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-5-5-44" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-44" path: "/home/runner/.ya/build/build_root/2gt4/0011ba/r3tmp/tmp8ONyXf/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-ef3e-1231c6b1-2147483648" status: YELLOW message: "Group degraded" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } type: "STORAGE_GROUP" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-04-10T11:33:13.920647Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:773:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:13.921003Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:13.921283Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:13.922725Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:770:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:13.923049Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:13.923250Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011ba/r3tmp/tmpvIsX8p/pdisk_1.dat 2025-04-10T11:33:14.304761Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11826, node 7 TClient is connected to server localhost:63416 2025-04-10T11:33:18.508339Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:18.508421Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:18.508470Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:18.509403Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:18.526009Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:18.526235Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:18.571810Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-04-10T11:33:18.572547Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:18.802636Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-04-10T11:33:18.803417Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: EMERGENCY issue_log { id: "RED-f489-1231c6b1" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-6fa7-1231c6b1" reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "RED-6fa7-1231c6b1" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-e5e3-1231c6b1-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-7" reason: "YELLOW-e9e2-1231c6b1-8" reason: "YELLOW-e9e2-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-e5e3-1231c6b1-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 7 host: "::1" port: 12001 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] Test command err: 2025-04-10T11:32:51.901260Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646050396829217:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:51.901327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ac4/r3tmp/tmpoBn6ZM/pdisk_1.dat 2025-04-10T11:32:52.269932Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:52.276082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:52.277066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:52.284521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23337, node 1 2025-04-10T11:32:52.368550Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:52.368610Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:52.368621Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:52.368774Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:32:52.695912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:54.976175Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646061215504936:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:54.976383Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ac4/r3tmp/tmpgB9B4T/pdisk_1.dat 2025-04-10T11:32:55.087377Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15225, node 2 2025-04-10T11:32:55.137601Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:55.137689Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:55.144785Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:32:55.169866Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:55.169893Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:55.169899Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:55.170004Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17802 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:32:55.298477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:55.302967Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:33:04.725150Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:04.725654Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:04.726021Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:04.728375Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:04.728752Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:04.729013Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ac4/r3tmp/tmpd0Hq0R/pdisk_1.dat 2025-04-10T11:33:05.075262Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20018, node 3 TClient is connected to server localhost:22794 2025-04-10T11:33:05.477381Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:05.477433Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:05.477457Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:05.477675Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:12.790718Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:12.791245Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:12.791439Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:12.791789Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:12.792021Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:12.792091Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ac4/r3tmp/tmpxfK6N5/pdisk_1.dat 2025-04-10T11:33:13.116430Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27373, node 5 TClient is connected to server localhost:28648 2025-04-10T11:33:13.600510Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:13.600580Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:13.600623Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:13.601286Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-5" reason: "YELLOW-e9e2-1231c6b1-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-04-10T11:33:18.957523Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:325:2215], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:18.957862Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:18.958007Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ac4/r3tmp/tmpXZYLZO/pdisk_1.dat 2025-04-10T11:33:19.398231Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23197, node 7 TClient is connected to server localhost:24485 2025-04-10T11:33:19.870773Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:19.870837Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:19.870873Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:19.871312Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsLimit800 [GOOD] Test command err: 2025-04-10T11:32:53.133156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:53.133815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:53.133896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:32:53.135721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:53.136019Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:53.136362Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001190/r3tmp/tmp7JGgR6/pdisk_1.dat 2025-04-10T11:32:53.549381Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1623, node 1 TClient is connected to server localhost:12151 2025-04-10T11:32:53.995855Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:53.995925Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:53.995971Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:53.996247Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:32:58.771188Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:58.771491Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:58.771693Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001190/r3tmp/tmpW9lGlC/pdisk_1.dat 2025-04-10T11:32:59.159859Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62215, node 3 TClient is connected to server localhost:20937 2025-04-10T11:32:59.585385Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:59.585447Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:59.585491Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:59.586047Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:03.928649Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:297:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:03.929172Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:03.929241Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001190/r3tmp/tmpWkz4qm/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25758, node 5 TClient is connected to server localhost:9612 2025-04-10T11:33:09.477716Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:09.478209Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:09.478361Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001190/r3tmp/tmpdw7Hyf/pdisk_1.dat 2025-04-10T11:33:09.846241Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28790, node 6 TClient is connected to server localhost:21594 2025-04-10T11:33:10.366583Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:10.366666Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:10.366715Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:10.367756Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:19.353392Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:19.353668Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:19.353904Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:19.354956Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:19.355161Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:19.355231Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001190/r3tmp/tmpSM3CLL/pdisk_1.dat 2025-04-10T11:33:19.752834Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11253, node 8 TClient is connected to server localhost:27009 2025-04-10T11:33:20.253352Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:20.253427Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:20.253472Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:20.253901Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] Test command err: 2025-04-10T11:32:45.409824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:45.410183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:45.410429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001734/r3tmp/tmp45m9C0/pdisk_1.dat 2025-04-10T11:32:45.803734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:32:45.860066Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:45.901882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:45.902087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:45.914471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:32:46.005368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:32:46.054063Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:32:46.055471Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:32:46.056056Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:32:46.056407Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:32:46.110806Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:32:46.111639Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:32:46.111761Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:32:46.113614Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:32:46.113724Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:32:46.113796Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:32:46.114225Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:32:46.114378Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:32:46.114463Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:32:46.126831Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:32:46.161681Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:32:46.161922Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:32:46.162057Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:32:46.162097Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:32:46.162130Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:32:46.162197Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:32:46.162447Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:46.162502Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:46.162888Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:32:46.163010Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:32:46.163098Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:32:46.163143Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:32:46.163190Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:32:46.163225Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:32:46.163269Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:32:46.163315Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:32:46.163362Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:32:46.163491Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:46.163525Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:46.163564Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:32:46.164014Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:32:46.164056Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:32:46.164181Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:32:46.164449Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:32:46.164500Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:32:46.164591Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:32:46.164640Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:32:46.164676Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:32:46.164710Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:32:46.164761Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:32:46.165148Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:32:46.165189Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:32:46.165223Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:32:46.165257Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:32:46.165329Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:32:46.165374Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:32:46.165410Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:32:46.165442Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:32:46.165466Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:32:46.166986Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:32:46.167051Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:32:46.178939Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:32:46.179038Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:32:46.179074Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:32:46.179124Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:32:46.179193Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:32:46.333679Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:46.333748Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:32:46.333792Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:32:46.334361Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:32:46.334407Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:32:46.334515Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:32:46.334554Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:32:46.334586Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:32:46.334616Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:32:46.338295Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:32:46.338381Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:32:46.339590Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:46.339640Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:32:46.339705Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:32:4 ... 75186224037888 is Executed 2025-04-10T11:33:21.163405Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:33:21.163450Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:33:21.163680Z node 7 :TX_DATASHARD DEBUG: Planned transaction txId 1234567890011 at step 3500 at tablet 72075186224037888 { Transactions { TxId: 1234567890011 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:33:21.163734Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:33:21.164135Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [7:810:2667], Recipient [7:810:2667]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:33:21.164177Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:33:21.164242Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:33:21.164288Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:33:21.164332Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T11:33:21.164383Z node 7 :TX_DATASHARD DEBUG: Found ready operation [3500:1234567890011] in PlanQueue unit at 72075186224037888 2025-04-10T11:33:21.164431Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PlanQueue 2025-04-10T11:33:21.164480Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-10T11:33:21.164526Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PlanQueue 2025-04-10T11:33:21.164575Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadWriteDetails 2025-04-10T11:33:21.164615Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadTxDetails 2025-04-10T11:33:21.164991Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-04-10T11:33:21.165111Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-04-10T11:33:21.165178Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-04-10T11:33:21.165260Z node 7 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 3500:1234567890011 keys extracted: 1 2025-04-10T11:33:21.165306Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-10T11:33:21.165332Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadWriteDetails 2025-04-10T11:33:21.165357Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:33:21.165382Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:33:21.165448Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically complete end at 72075186224037888 2025-04-10T11:33:21.165495Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically incomplete end at 72075186224037888 2025-04-10T11:33:21.165542Z node 7 :TX_DATASHARD TRACE: Activated operation [3500:1234567890011] at 72075186224037888 2025-04-10T11:33:21.165589Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-10T11:33:21.165612Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:33:21.165635Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildWriteOutRS 2025-04-10T11:33:21.165657Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildWriteOutRS 2025-04-10T11:33:21.165697Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-10T11:33:21.165718Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildWriteOutRS 2025-04-10T11:33:21.165740Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-04-10T11:33:21.165763Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-04-10T11:33:21.165790Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-10T11:33:21.165809Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-04-10T11:33:21.165830Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-04-10T11:33:21.165851Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PrepareWriteTxInRS 2025-04-10T11:33:21.165877Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-10T11:33:21.165899Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-04-10T11:33:21.165919Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-04-10T11:33:21.165938Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadAndWaitInRS 2025-04-10T11:33:21.165963Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-10T11:33:21.165982Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-04-10T11:33:21.166003Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit ExecuteWrite 2025-04-10T11:33:21.166023Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-04-10T11:33:21.166064Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-04-10T11:33:21.166473Z node 7 :TX_DATASHARD TRACE: Tablet 72075186224037888 is not ready for [3500:1234567890011] execution 2025-04-10T11:33:21.166561Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 released its data 2025-04-10T11:33:21.166610Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Restart 2025-04-10T11:33:21.166639Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:33:21.166685Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T11:33:21.166740Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:33:21.166790Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:33:21.167311Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:33:21.167372Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-04-10T11:33:21.167420Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-04-10T11:33:21.167746Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-04-10T11:33:21.167824Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-04-10T11:33:21.167901Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-04-10T11:33:21.167991Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 restored its data 2025-04-10T11:33:21.168164Z node 7 :TX_DATASHARD DEBUG: Executed write operation for [3500:1234567890011] at 72075186224037888, row count=1 2025-04-10T11:33:21.168233Z node 7 :TX_DATASHARD TRACE: Lock 1234567890001 marked broken at v{min} 2025-04-10T11:33:21.168348Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-10T11:33:21.168430Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:33:21.168478Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit ExecuteWrite 2025-04-10T11:33:21.168526Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompleteWrite 2025-04-10T11:33:21.168568Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-04-10T11:33:21.168801Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is DelayComplete 2025-04-10T11:33:21.168849Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompleteWrite 2025-04-10T11:33:21.168894Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:33:21.168935Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:33:21.168976Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-10T11:33:21.169005Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:33:21.169066Z node 7 :TX_DATASHARD TRACE: Execution plan for [3500:1234567890011] at 72075186224037888 has finished 2025-04-10T11:33:21.169110Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:33:21.169148Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T11:33:21.169191Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:33:21.169239Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:33:21.169691Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-10T11:33:21.170754Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:33:21.170812Z node 7 :TX_DATASHARD TRACE: Complete execution for [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-04-10T11:33:21.170886Z node 7 :TX_DATASHARD DEBUG: Complete write [3500 : 1234567890011] from 72075186224037888 at tablet 72075186224037888 send result to client [7:803:2661] 2025-04-10T11:33:21.170948Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |95.6%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] >> ExternalBlobsMultipleChannels::SingleChannel >> StoragePool::TestDistributionRandomMin7p [GOOD] >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] >> THealthCheckTest::StorageLimit50 [GOOD] >> THealthCheckTest::SpecificServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:32:24.775228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:32:24.775344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:32:24.775390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:32:24.775439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:32:24.775492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:32:24.775525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:32:24.775613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:32:24.775702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:32:24.776108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:32:24.877621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:32:24.877721Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:32:24.902341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:32:24.902701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:32:24.902897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:32:24.912691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:32:24.913116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:32:24.913883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:24.918654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:32:24.922916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:24.925284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:24.925362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:24.925545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:32:24.925594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:24.925639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:32:24.925827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:32:24.943404Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:32:25.070504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:32:25.070802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:25.071096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:32:25.071399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:32:25.071473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:25.074480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:25.074666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:32:25.074918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:25.074976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:32:25.075031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:32:25.075082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:32:25.080516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:25.080610Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:32:25.080652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:32:25.084508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:25.084596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:25.084645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:25.084725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:32:25.096212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:32:25.101518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:32:25.101822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:32:25.103068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:25.103277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:32:25.103344Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:25.103711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:32:25.103795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:25.104023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:32:25.104125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:32:25.107582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:25.107654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:25.107879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:25.107938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:32:25.108228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:25.108306Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:32:25.108430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:32:25.108469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:25.108515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:32:25.108571Z no ... tusSuccess 2025-04-10T11:33:23.556405Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:33:23.564588Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:33:23.564905Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 348us result status StatusSuccess 2025-04-10T11:33:23.565794Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TFlatTest::ReadOnlyMode [GOOD] >> TFlatTest::RejectByIncomingReadSetSize >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] Test command err: 2025-04-10T11:32:51.703784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:51.704443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:51.704525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:32:51.706400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:51.706715Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:51.707080Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00119c/r3tmp/tmp83D2SN/pdisk_1.dat 2025-04-10T11:32:52.145544Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20158, node 1 TClient is connected to server localhost:3547 2025-04-10T11:32:52.583579Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:52.583654Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:52.583694Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:52.583975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:00.387093Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:00.387464Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:00.387883Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:00.389536Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:00.389828Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:00.390058Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00119c/r3tmp/tmpeXIhc2/pdisk_1.dat 2025-04-10T11:33:00.757155Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27140, node 3 TClient is connected to server localhost:13797 2025-04-10T11:33:01.228895Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:01.228968Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:01.229039Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:01.229316Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-3" reason: "YELLOW-e9e2-1231c6b1-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 3 host: "::1" port: 12001 } 2025-04-10T11:33:09.224042Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:09.224621Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:09.224796Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:09.225153Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:09.225338Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:09.225402Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00119c/r3tmp/tmpPFL2fZ/pdisk_1.dat 2025-04-10T11:33:09.580055Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27291, node 5 TClient is connected to server localhost:63118 2025-04-10T11:33:10.061262Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:10.061337Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:10.062053Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:10.062743Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-5" reason: "YELLOW-e9e2-1231c6b1-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-04-10T11:33:15.632067Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:15.632538Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:15.632714Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00119c/r3tmp/tmpEuKFGx/pdisk_1.dat 2025-04-10T11:33:15.984854Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12074, node 7 TClient is connected to server localhost:23678 2025-04-10T11:33:16.483995Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:16.484061Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:16.484106Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:16.484810Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:21.847808Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:452:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:21.848251Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:21.848433Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00119c/r3tmp/tmps2amnw/pdisk_1.dat 2025-04-10T11:33:22.310812Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24668, node 9 TClient is connected to server localhost:11502 2025-04-10T11:33:22.905340Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:22.905435Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:22.905500Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:22.908008Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TFlatTest::Init ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: Took 11.009461 seconds |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> TStorageBalanceTest::TestScenario1 [GOOD] >> THealthCheckTest::NoStoragePools [GOOD] >> THealthCheckTest::NoBscResponse >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues >> THiveTest::TestCreateSubHiveCreateManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario1 [GOOD] Test command err: 2025-04-10T11:32:34.880779Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:32:34.884242Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:32:34.884481Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-10T11:32:34.885118Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-10T11:32:34.886369Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-10T11:32:34.886430Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:32:34.887339Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:30:2075] ControllerId# 72057594037932033 2025-04-10T11:32:34.887378Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:32:34.887505Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:32:34.887821Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:32:34.887978Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:30:2075] 2025-04-10T11:32:34.888011Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:30:2075] 2025-04-10T11:32:34.888059Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:32:34.888120Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:32:34.888298Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:32:34.888452Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:32:34.909953Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:32:34.910030Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:32:34.912227Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.912456Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.912647Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.912824Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.912982Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.913132Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:44:2087] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.913300Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:45:2088] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.913326Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:32:34.913511Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-10T11:32:34.913567Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:34.913601Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:32:34.915008Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:20:2063] 2025-04-10T11:32:34.915065Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:20:2063] 2025-04-10T11:32:34.929628Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-10T11:32:34.929727Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:34.929789Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-10T11:32:34.936464Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-10T11:32:34.937118Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:32:34.937316Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:32:34.938876Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-10T11:32:34.939101Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-04-10T11:32:34.939380Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:32:34.939702Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:34.939748Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-10T11:32:34.939777Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-10T11:32:34.939831Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-10T11:32:34.939865Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-10T11:32:34.939895Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:32:34.939998Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:51:2091] 2025-04-10T11:32:34.940017Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:51:2091] 2025-04-10T11:32:34.940071Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-04-10T11:32:34.940115Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-10T11:32:34.940174Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:32:34.940217Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:51:2091] 2025-04-10T11:32:34.942686Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-04-10T11:32:34.942739Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-04-10T11:32:34.942823Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-10T11:32:34.942955Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-10T11:32:34.943023Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-10T11:32:34.943057Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-04-10T11:32:34.943145Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-04-10T11:32:34.943182Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:30:2075] 2025-04-10T11:32:34.943216Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:30:2075] 2025-04-10T11:32:34.945306Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-10T11:32:34.946916Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-10T11:32:34.947100Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-10T11:32:34.948523Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-10T11:32:34.948616Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-10T11:32:34.948648Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-04-10T11:32:34.948763Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:32:34.948867Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:20:2063] 2025-04-10T11:32:34.948906Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:20:2063] 2025-04-10T11:32:34.949081Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-04-10T11:32:34.949120Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-04-10T11:32:34.949191Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-04-10T11:32:34.949335Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-10T11:32:34.949367Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:32:34.949480Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-04-10T11:32:34.949610Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-04-10T11:32:34.949656Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-04-10T11:32:34.950037Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForI ... Tx{539, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{361, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-04-10T11:33:25.604850Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{539, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:33:25.619872Z node 24 :BS_PROXY_PUT INFO: [e7f5c6a1b70e5a8f] bootstrap ActorId# [24:3849:5134] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:182:0:0:248:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:33:25.620038Z node 24 :BS_PROXY_PUT DEBUG: [e7f5c6a1b70e5a8f] Id# [72057594037927937:2:182:0:0:248:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:33:25.620086Z node 24 :BS_PROXY_PUT DEBUG: [e7f5c6a1b70e5a8f] restore Id# [72057594037927937:2:182:0:0:248:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:33:25.620148Z node 24 :BS_PROXY_PUT DEBUG: [e7f5c6a1b70e5a8f] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:182:0:0:248:1] Marker# BPG33 2025-04-10T11:33:25.620192Z node 24 :BS_PROXY_PUT DEBUG: [e7f5c6a1b70e5a8f] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:182:0:0:248:1] Marker# BPG32 2025-04-10T11:33:25.620343Z node 24 :BS_PROXY DEBUG: Send to queueActorId# [24:37:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:182:0:0:248:1] FDS# 248 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:33:25.622220Z node 24 :BS_PROXY_PUT DEBUG: [e7f5c6a1b70e5a8f] received {EvVPutResult Status# OK ID# [72057594037927937:2:182:0:0:248:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 197 } Cost# 81952 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 198 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-10T11:33:25.622341Z node 24 :BS_PROXY_PUT DEBUG: [e7f5c6a1b70e5a8f] Result# TEvPutResult {Id# [72057594037927937:2:182:0:0:248:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-10T11:33:25.622396Z node 24 :BS_PROXY_PUT INFO: [e7f5c6a1b70e5a8f] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:182:0:0:248:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:33:25.622529Z node 24 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.79 sample PartId# [72057594037927937:2:182:0:0:248:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 24 } TEvVPutResult{ TimestampMs# 2.675 VDiskId# [0:1:0:0:0] NodeId# 24 Status# OK } ] } 2025-04-10T11:33:25.622805Z node 24 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:182:0:0:248:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-10T11:33:25.623032Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} commited cookie 1 for step 182 2025-04-10T11:33:25.623463Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{540, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-04-10T11:33:25.623555Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{540, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:33:25.623781Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{540, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{362, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-04-10T11:33:25.623877Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{540, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:33:25.624284Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [24:513:2455] 2025-04-10T11:33:25.624327Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [24:513:2455] 2025-04-10T11:33:25.624399Z node 24 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [24:473:2426] EventType# 268637702 c[def1] *************************--------------------------------------------------------------------------- (0.25) *************************--------------------------------------------------------------------------- (0.25) 2025-04-10T11:33:25.725483Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{541, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-10T11:33:25.725564Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{541, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:33:25.725669Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004387168}: tablet 72075186224037927 wasn't changed 2025-04-10T11:33:25.725702Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004387168}: tablet 72075186224037927 skipped channel 0 2025-04-10T11:33:25.725778Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004387168}: tablet 72075186224037927 skipped channel 1 2025-04-10T11:33:25.725809Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004387168}: tablet 72075186224037927 skipped channel 2 2025-04-10T11:33:25.725876Z node 24 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923004387168}(72075186224037927)::Execute - TryToBoot was not successfull 2025-04-10T11:33:25.725964Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{541, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{363, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-04-10T11:33:25.726022Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{541, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:33:25.739133Z node 24 :BS_PROXY_PUT INFO: [f9fe49aaf76e0da8] bootstrap ActorId# [24:3851:5136] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:183:0:0:248:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:33:25.739473Z node 24 :BS_PROXY_PUT DEBUG: [f9fe49aaf76e0da8] Id# [72057594037927937:2:183:0:0:248:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:33:25.739523Z node 24 :BS_PROXY_PUT DEBUG: [f9fe49aaf76e0da8] restore Id# [72057594037927937:2:183:0:0:248:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:33:25.739592Z node 24 :BS_PROXY_PUT DEBUG: [f9fe49aaf76e0da8] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:183:0:0:248:1] Marker# BPG33 2025-04-10T11:33:25.739634Z node 24 :BS_PROXY_PUT DEBUG: [f9fe49aaf76e0da8] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:183:0:0:248:1] Marker# BPG32 2025-04-10T11:33:25.739784Z node 24 :BS_PROXY DEBUG: Send to queueActorId# [24:37:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:183:0:0:248:1] FDS# 248 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:33:25.741792Z node 24 :BS_PROXY_PUT DEBUG: [f9fe49aaf76e0da8] received {EvVPutResult Status# OK ID# [72057594037927937:2:183:0:0:248:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 198 } Cost# 81952 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 199 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-10T11:33:25.742052Z node 24 :BS_PROXY_PUT DEBUG: [f9fe49aaf76e0da8] Result# TEvPutResult {Id# [72057594037927937:2:183:0:0:248:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-10T11:33:25.742225Z node 24 :BS_PROXY_PUT INFO: [f9fe49aaf76e0da8] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:183:0:0:248:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:33:25.742363Z node 24 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.041 sample PartId# [72057594037927937:2:183:0:0:248:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 24 } TEvVPutResult{ TimestampMs# 3.148 VDiskId# [0:1:0:0:0] NodeId# 24 Status# OK } ] } 2025-04-10T11:33:25.745475Z node 24 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:183:0:0:248:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-10T11:33:25.746033Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} commited cookie 1 for step 183 2025-04-10T11:33:25.746894Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-04-10T11:33:25.746955Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:33:25.747357Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{364, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-04-10T11:33:25.747416Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:33:25.747688Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [24:513:2455] 2025-04-10T11:33:25.747738Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [24:513:2455] 2025-04-10T11:33:25.747848Z node 24 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [24:473:2426] EventType# 268637702 c[def1] *************************--------------------------------------------------------------------------- (0.25) *************************--------------------------------------------------------------------------- (0.25) 2025-04-10T11:33:25.848961Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{543, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-10T11:33:25.849070Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{543, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:33:25.849210Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003281056}: tablet 72075186224037898 wasn't changed 2025-04-10T11:33:25.849259Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003281056}: tablet 72075186224037898 skipped channel 0 2025-04-10T11:33:25.849342Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003281056}: tablet 72075186224037898 skipped channel 1 2025-04-10T11:33:25.849377Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003281056}: tablet 72075186224037898 skipped channel 2 2025-04-10T11:33:25.849445Z node 24 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923003281056}(72075186224037898)::Execute - TryToBoot was not successfull 2025-04-10T11:33:25.849514Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{543, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{365, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-04-10T11:33:25.849563Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{543, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] Test command err: 2025-04-10T11:33:18.732011Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646165979704882:2257];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:18.733025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00201b/r3tmp/tmpTjc7tx/pdisk_1.dat 2025-04-10T11:33:19.097399Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:19.127065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:19.127179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:19.128751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28269 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:19.413865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:33:19.444676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:22.019304Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646179665637149:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:22.019359Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00201b/r3tmp/tmpgf6er3/pdisk_1.dat 2025-04-10T11:33:22.172630Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:22.207287Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:22.207370Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:22.211203Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14560 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:22.387805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:22.409216Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:22.457996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> TFlatTest::MiniKQLRanges >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] >> TTabletResolver::NodeProblem >> TFlatTest::Init [GOOD] >> TFlatTest::LargeDatashardReply >> KqpQueryService::DdlExecuteScript [GOOD] >> KqpUserConstraint::KqpReadNull+UploadNull >> THealthCheckTest::ShardsLimit999 [GOOD] >> THealthCheckTest::ShardsLimit995 >> TFlatTest::RejectByIncomingReadSetSize [GOOD] >> TTabletResolver::NodeProblem [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] Test command err: 2025-04-10T11:32:47.734607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:47.735441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:47.735549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:32:47.738071Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:47.738533Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:47.739038Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011de/r3tmp/tmpyQRfEK/pdisk_1.dat 2025-04-10T11:32:48.213349Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19297, node 1 TClient is connected to server localhost:8125 2025-04-10T11:32:48.695720Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:48.695792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:48.695830Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:48.696108Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:32:56.569183Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:56.569580Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:56.569893Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:32:56.571484Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:56.571787Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:56.572048Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011de/r3tmp/tmpGFML7F/pdisk_1.dat 2025-04-10T11:32:56.907868Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61847, node 3 TClient is connected to server localhost:6791 2025-04-10T11:32:57.370192Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:57.370269Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:57.370324Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:57.370654Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:05.979225Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:280:2215], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:05.979574Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:05.979787Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:05.981813Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:05.981944Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:05.982387Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011de/r3tmp/tmpXKSx8t/pdisk_1.dat 2025-04-10T11:33:06.382855Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14808, node 5 TClient is connected to server localhost:63224 2025-04-10T11:33:06.889395Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:06.889470Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:06.889516Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:06.891184Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:15.994994Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:15.995725Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:15.996020Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:15.996214Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:15.996528Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:15.996674Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011de/r3tmp/tmp83pZfp/pdisk_1.dat 2025-04-10T11:33:16.383915Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13846, node 7 TClient is connected to server localhost:9772 2025-04-10T11:33:16.924647Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:16.924728Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:16.924772Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:16.925696Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:26.410378Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:26.411072Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:26.411346Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:26.412338Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:26.412778Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:26.413117Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011de/r3tmp/tmpWlDkjf/pdisk_1.dat 2025-04-10T11:33:26.806475Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15550, node 9 TClient is connected to server localhost:3732 2025-04-10T11:33:27.371721Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:27.371798Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:27.371841Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:27.372569Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> THealthCheckTest::SpecificServerless [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2025-04-10T11:33:29.561950Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StInit ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:29.562782Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [1:207:2136] CurrentLeaderTablet: [1:208:2137] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-10T11:33:29.562863Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-04-10T11:33:29.562935Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2136] 2025-04-10T11:33:29.563250Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StInit ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:29.563614Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [1:213:2140] CurrentLeaderTablet: [1:214:2141] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-10T11:33:29.563680Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-04-10T11:33:29.563746Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:213:2140] 2025-04-10T11:33:29.565723Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:29.565827Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2136] 2025-04-10T11:33:29.566137Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:29.567343Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:213:2140] 2025-04-10T11:33:29.567804Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 2 2025-04-10T11:33:29.567897Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [1:207:2136] by NodeId 2025-04-10T11:33:29.568011Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:29.568476Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [2:223:2094] CurrentLeaderTablet: [2:224:2095] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-10T11:33:29.568548Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-04-10T11:33:29.568625Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2025-04-10T11:33:29.569043Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [1:213:2140] by NodeId 2025-04-10T11:33:29.569139Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:29.569495Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [2:229:2096] CurrentLeaderTablet: [2:230:2097] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 0, 9}} 2025-04-10T11:33:29.569588Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-04-10T11:33:29.569667Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:229:2096] 2025-04-10T11:33:29.572100Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 2 2025-04-10T11:33:29.572204Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:29.572273Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2025-04-10T11:33:29.572634Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:29.572707Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:229:2096] 2025-04-10T11:33:29.573050Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 4 2025-04-10T11:33:29.573130Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [2:223:2094] by NodeId 2025-04-10T11:33:29.573348Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:29.573891Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [3:241:2094] CurrentLeaderTablet: [3:242:2095] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-10T11:33:29.573986Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-04-10T11:33:29.574050Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:241:2094] 2025-04-10T11:33:29.574443Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:29.574527Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:229:2096] 2025-04-10T11:33:29.574857Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 5 2025-04-10T11:33:29.574941Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:29.575016Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:241:2094] 2025-04-10T11:33:29.575381Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [2:229:2096] by NodeId 2025-04-10T11:33:29.575479Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:29.575814Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [3:247:2096] CurrentLeaderTablet: [3:248:2097] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-10T11:33:29.575874Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-04-10T11:33:29.575936Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [3:247:2096] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlExecuteScript [GOOD] Test command err: Trying to start YDB, gRPC: 28289, MsgBus: 11525 2025-04-10T11:31:11.863270Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645619612796306:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:11.863792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00157a/r3tmp/tmpT24vJy/pdisk_1.dat 2025-04-10T11:31:12.602989Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:12.656034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:12.656144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:12.691212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28289, node 1 2025-04-10T11:31:12.958816Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:12.958847Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:12.958853Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:12.958961Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11525 TClient is connected to server localhost:11525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:14.175871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.233148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.542815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.804924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:31:14.992752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:31:16.850337Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645619612796306:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:16.850414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:31:16.943902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645641087634448:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.944014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.290545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.379033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.418515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.492432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.538206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.595919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:31:17.672143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645645382602267:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.672239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.674318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645645382602272:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.678516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:31:17.694432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645645382602274:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:31:17.790321Z node 1 :TX_PROXY ERROR: Actor# [1:7491645645382602331:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:19.028123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:31:19.363216Z node 1 :TX_PROXY ERROR: Actor# [1:7491645653972537400:3777] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/TestDdl_0\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:19.363443Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710674, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl_0', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-10T11:31:19.363583Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGVmYjg5Ni04NjQ0OGE3Ni0zZjVkYzYwOC1mNDU1ODg5OQ==, ActorId: [1:7491645653972537388:2527], ActorState: ExecuteState, TraceId: 01jrfqfz5698vz7vvfn0437drx, Create QueryResponse for error on request, msg: 2025-04-10T11:31:19.400096Z node 1 :TX_PROXY ERROR: Actor# [1:7491645653972537425:3788] txid# 281474976710676, issues: { message: "Check failed: path: \'/Root/TestDdl_0\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:31:19.503867Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-04-10T11:31:19.576228Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491645653972537527:2552], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:31:19.577731Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGU3NTMxYjUtNjRkYjRjYzgtMWYxNDI5NDEtYzFmOWI2ZjU=, ActorId: [1:7491645653972537521:2551], ActorState: ExecuteState, TraceId: 01jrfqfzb940q19a74se8nh882, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:31:19.603214Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491645653972537535:2556], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiDropTable!
:2:29: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:31:19.604350Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDRjMmE5YmItYTA2MDFhYTctM2EzMjVlMmQtODZjYzE2NGM=, ActorId: [1:7491645653972537533:2555], ActorState: ExecuteState, TraceId: 01jrfqfzd3fwba6kf4fv0jqkcy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:31:19.635095Z node 1 :TX_PROXY ERROR: Actor# [1:7491645653972537568:3889] txid# 281474976710679, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-10T11:31:19.672174Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491645653972537574:2564], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:31:19.673677Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2E0MDA2OGMtZDJmYzlkOWYtNTYzZjhjZDgtN2VjZDVjYzE=, ActorId: [1:7491645653972537572:2563], Ac ... e itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:33:17.880188Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:33:17.941450Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646159425937175:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:17.941619Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:17.941801Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646159425937180:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:17.946338Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:33:17.962450Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646159425937182:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:33:18.054694Z node 3 :TX_PROXY ERROR: Actor# [3:7491646163720904534:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:33:18.426042Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646142246065731:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:18.426176Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:33:19.210602Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:33:19.337738Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037919 not found 2025-04-10T11:33:19.369060Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9899, MsgBus: 5476 2025-04-10T11:33:20.469607Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646173011609733:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:20.469712Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00157a/r3tmp/tmpBudTWG/pdisk_1.dat 2025-04-10T11:33:20.601624Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:20.634502Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:20.634627Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:20.636509Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9899, node 4 2025-04-10T11:33:20.696693Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:20.696727Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:20.696736Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:20.696878Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5476 TClient is connected to server localhost:5476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:33:21.291188Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:21.313825Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:21.414831Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:21.631634Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:21.779483Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:24.769295Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646190191480700:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:24.769429Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:24.838810Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:33:24.880612Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:33:24.934236Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:33:24.975417Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:33:25.012830Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:33:25.059377Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:33:25.149971Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646194486448512:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:25.150115Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:25.150137Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646194486448517:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:25.156268Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:33:25.170522Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491646194486448519:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:33:25.240450Z node 4 :TX_PROXY ERROR: Actor# [4:7491646194486448574:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:33:25.469116Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491646173011609733:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:25.469220Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:33:26.561530Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:33:26.565165Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:33:26.567366Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:33:26.927664Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink >> DataShardSnapshots::MvccSnapshotAndSplit >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByIncomingReadSetSize [GOOD] Test command err: 2025-04-10T11:33:21.219004Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646178640526508:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:21.219079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002018/r3tmp/tmpHDuIjG/pdisk_1.dat 2025-04-10T11:33:21.855445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:21.855531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:21.859340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:21.862469Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24166 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:22.255414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:22.446680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpMkDir MkDir { Name: "Dir1" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:33:22.447039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /dc-1/Dir1, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:33:22.447229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: dc-1, child name: Dir1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-10T11:33:22.447276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-10T11:33:22.447393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:33:22.447641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:33:22.447711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:33:22.450601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-10T11:33:22.450800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /dc-1/Dir1 2025-04-10T11:33:22.451040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:33:22.451055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:33:22.451244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:33:22.451333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:33:22.451351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491646178640527180:2378], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-04-10T11:33:22.451374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491646178640527180:2378], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2025-04-10T11:33:22.451424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:33:22.451443Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:33:22.451477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715658 ready parts: 1/1 waiting... 2025-04-10T11:33:22.468627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:33:22.470570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-10T11:33:22.470711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-10T11:33:22.470728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-10T11:33:22.470773Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-10T11:33:22.470797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-10T11:33:22.471063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-10T11:33:22.471112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-10T11:33:22.471121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-10T11:33:22.471130Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-10T11:33:22.471139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:33:22.471198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-04-10T11:33:22.471437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:33:22.471457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-04-10T11:33:22.471493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:33:22.472266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715658 msg type: 269090816 2025-04-10T11:33:22.472405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715658, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:33:22.473026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-04-10T11:33:22.473925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-04-10T11:33:22.475403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744284802519, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:33:22.475588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744284802519 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T11:33:22.475653Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1744284802519, at schemeshard: 72057594046644480 2025-04-10T11:33:22.475836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-04-10T11:33:22.476015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:33:22.476116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:33:22.477776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:33:22.477796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:33:22.477933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:33:22.477995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:33:22.478007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491646178640527180:2378], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-04-10T11:33:22.478018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491646178640527180:2378], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2025-04-10T11:33:22.478053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:33:22.478086Z node 1 : ... s: tx: 281474976715661, [OwnerId: 72057594046644480, LocalPathId: 1], 9 2025-04-10T11:33:22.748792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715661, [OwnerId: 72057594046644480, LocalPathId: 4], 3 2025-04-10T11:33:22.748908Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:16} Tx{27, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{36, redo 162b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-04-10T11:33:22.748932Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:16} Tx{27, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:33:22.749484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-04-10T11:33:22.749550Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-04-10T11:33:22.749581Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:33:22.749623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-04-10T11:33:22.749631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715661 2025-04-10T11:33:22.749642Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2025-04-10T11:33:22.749654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 4 2025-04-10T11:33:22.749777Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{37, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-04-10T11:33:22.749806Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:33:22.749905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-04-10T11:33:22.749936Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-04-10T11:33:22.749964Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:33:22.750041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-04-10T11:33:22.750050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715661 2025-04-10T11:33:22.750088Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-10T11:33:22.750101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-10T11:33:22.750178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715661, subscribers: 1 2025-04-10T11:33:22.750188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7491646182935494854:2303] 2025-04-10T11:33:22.750228Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{38, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-04-10T11:33:22.750287Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-10T11:33:22.750321Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:33:22.750346Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:33:22.750398Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:33:22.750447Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:33:22.750510Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [1:7491646178640527145:2341] 2025-04-10T11:33:22.750520Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [1:7491646178640527145:2341] 2025-04-10T11:33:22.750612Z node 1 :PIPE_SERVER DEBUG: [72057594046382081] HandleSend Sender# [1:7491646178640527139:2341] EventType# 269156352 2025-04-10T11:33:22.752176Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:17:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:33:22.752183Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:17:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:33:22.752207Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:17:1:24576:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:33:22.752209Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:17:1:24576:119:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:33:22.752244Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:16:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:33:22.752260Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:16:1:24576:122:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:33:22.752269Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:18:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:33:22.752296Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:18:1:24576:132:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:33:22.752341Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} commited cookie 1 for step 17 2025-04-10T11:33:22.752349Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 16 2025-04-10T11:33:22.752459Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 17 2025-04-10T11:33:22.752507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2025-04-10T11:33:22.752536Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 18 2025-04-10T11:33:22.752547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2025-04-10T11:33:22.752803Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] received poison pill [1:7491646182935494855:2303] 2025-04-10T11:33:22.752835Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] notify reset [1:7491646182935494855:2303] 2025-04-10T11:33:22.752849Z node 1 :PIPE_SERVER DEBUG: [72057594046644480] Got PeerClosed from# [1:7491646182935494855:2303] 2025-04-10T11:33:25.271685Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646194962095358:2217];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002018/r3tmp/tmpZDT3Ne/pdisk_1.dat 2025-04-10T11:33:25.312439Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:33:25.362757Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:25.403575Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:25.403670Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:25.406197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30902 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:25.577943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:33:25.596831Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:33:25.603712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:29.006234Z node 2 :TX_PROXY ERROR: Actor# [2:7491646212141965492:2616] txid# 281474976715700 FailProposedRequest: Transaction incoming read set size 1000087 for tablet 72075186224037889 exceeded limit 1000 Status# ExecError 2025-04-10T11:33:29.006308Z node 2 :TX_PROXY ERROR: Actor# [2:7491646212141965492:2616] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c >> TFlatTest::MiniKQLRanges [GOOD] >> TFlatTest::MergeEmptyAndWrite >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink >> TLocksTest::Range_BrokenLock2 >> DataShardSnapshots::VolatileSnapshotSplit >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] >> ExternalBlobsMultipleChannels::Simple [GOOD] >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup [GOOD] >> TSchemeShardLoginTest::FailedLoginWithInvalidUser >> TLocksTest::BrokenDupLock [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] Test command err: 2025-04-10T11:33:24.308367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:24.308632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:24.308842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00137d/r3tmp/tmpDNjWKe/pdisk_1.dat 2025-04-10T11:33:24.872077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:33:24.926298Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:24.970593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:24.971351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:24.986399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:25.084805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:25.503913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2025-04-10T11:33:25.790972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:817:2675], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:25.791132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:828:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:25.791243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:25.797536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:33:25.952383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:831:2683], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:33:26.018641Z node 1 :TX_PROXY ERROR: Actor# [1:890:2723] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:33:26.570905Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfqktmvemj622dn0szcwg60, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmI3MmI4ZS1kYjRkNTIyNS0zNzY1NmU5OS05NDExNTFlZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:26.681211Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqkvf2fct28pq9nc495txy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTBjMmM2OTgtOGU1YmYzNjMtOTczOGIwMmEtZTNjMWUwNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:26.739589Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfqkvh08mddy2m68msc1s10, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzhmNzhjNzgtNDIxYzBlNDktYzIzNWY4YTUtZDhkYzRkY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:26.786257Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfqkvjtcvf414e1rzt72qnz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODg2Nzk0NjAtZGRiMGI3NWItODQyMTM5N2QtYTdiZjUxMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:26.831116Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfqkvm7d2yx5f2yj9akg3kk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmQ0YzEzNDQtMTY2ZDUwYmQtMjhjZWFlNTEtNDVmMGQ5ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:26.878577Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfqkvnn4x26g4gfvawagnd2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzg4MzYzYzgtZjIwMjJmNDctNTBjY2VhNTAtYjU5OWRiODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:26.937597Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfqkvq3c3tte8a17d0b4hrj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWVjM2JlYjEtZmZmOTM1YTEtNjUyYjk1ZC1kMzNjMGIyNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:26.991344Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfqkvs0bqszmx0f3pwgm53h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFlNTQ4MGQtZDYzNWRmZTUtZDJiYzQzZDAtMWJlNjczZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.050973Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jrfqkvtp8nm5fbpjcd972p23, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGRlZDFhY2YtMjQ0YTAxZWQtYWRjYjNjMDctMTcxNTA5YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.110338Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrfqkvwhfyphdp3wq617hkps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ1OTViOTUtOTE3N2FiNWEtNTRkOGUzYmItNmJhYjVjMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.170870Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jrfqkvydb9pn076kwm3yapaz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhNDJkYzMtNzI2ZGM1MjItZTU0NzliZGMtYmZlMzY2M2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.229805Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jrfqkw0992txvy4f1dst4mas, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjExMTFiOTAtNjNlM2IwY2MtY2U3ZDE4ZDItNzZiMGIyN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.289311Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jrfqkw24aq5j4nz8bpfn341t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRmNjZhOTQtNmVmZDMyM2MtOWE0NjAwYy01NmFiNTdjMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.347695Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jrfqkw3z0z5tg3d5w1q4nxyq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTYyMGE5MzctOTAzN2YwZjUtMjk5MTY5NDYtMWU0MmQxYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.397107Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jrfqkw5vfy2hg9051wgax7h4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDEyYjNkOGItNmUzOTZjN2YtNTJjMzgxN2YtNmNiOTAyZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.494748Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jrfqkw8h3rrd6p6f3wcdry7r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTJlZjJjYzYtYWU4MTA5NjUtY2IxMGEwNTYtZTdlNmEwZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.550464Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jrfqkwaccee16mtz4fm9g2x5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIxZjUzMmQtZjllZDRiM2MtZTMxMDhlYTctYTg0MjcwNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.611112Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jrfqkwc503mwn5sqahbakwws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzBlMzQ4ZDUtZTAyNmZkOGYtMzJhNmMzN2ItMTYwZjY5YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.672730Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jrfqkwe13j0hcgmvcn5jzb5e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGRhZGFiZmYtYWU0MjQ1Y2EtYjJiZjY3ZmEtZWNjMTFlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.731506Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jrfqkwfzfktvddtynvg7b28n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg4YWUwNjUtZGUyYTc5YWItODljY2Y4OGQtMTJmNTVkNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.788639Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jrfqkwhtbm99fhydezntq8nd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDM4NGQwYmQtZjVkYWFjMDMtOTU4MDcwZjgtZjRiMDRjZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.846034Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jrfqkwkk8f9tgxasesz7pjng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0NjM4YmUtZGIwMWY3MGUtZTc3MGIwYjEtNGYyNWVhZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.908168Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jrfqkwncdsxrhx33nb2f2z84, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDc1YTg5YmEtZjQyMzdkMGMtZDk2MDhlZS1lYzdjNjAxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.964590Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jrfqkwqb3zptf1cetwzwtaza, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODM0MzVkMjgtYWE5OGRkNjEtYmYyZTZmYWItN2E1NzM5YjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:28.026855Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jrfqkws31232fhtjt43yd3w2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM1ZWIzZTItMjMxOTU1MWItZWRmMWFmNjktZDVlYTIzZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:28.08869 ... 10T11:33:30.489843Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jrfqkz68cj64gfvx8qksd21s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjFkNDUyNjYtYjFmNDNkZTgtYzhjZTMyZWMtZDE2ODZjZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:30.546846Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jrfqkz809t8m2sbf3whgkg3t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTk0YzgwMzgtNTJjYjY0Mi1jNTk0ZmM1NC0zMjUzNjg0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:30.604762Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jrfqkz9s76wwmdwysyqdycr6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDg4OTg4NTQtYjY4YWI0YzEtYjQwZjMwYTktMzZkMGE5M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:30.662048Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jrfqkzbkfrmtn1kksbbgng4c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmQ4M2QxYmMtNmY3ZTgyNzQtN2M5YWY0NzUtZmY2YjQ3ZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:30.719975Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jrfqkzdc3r42dmxbjqq4k2h9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ2Njk5YzEtNzUxNWZmODMtZTI5Yjc4YjAtOGVhNzM5NTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:30.777833Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jrfqkzf6ad67cc8751z98m0h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWRkM2JkY2EtYmY1ODI0ODUtZGU2ZTMzM2MtYzQ5MzQ2NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:30.836146Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jrfqkzh0fj4895vec53zvmhe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDIwODA5YzUtMTRlNjgwMzctYWE5MWU4NTAtNTYyMDVhMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:30.934451Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jrfqkzjt42qprsg1c75y8xgq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTRjYTI5N2EtN2MzNjAzLWYyNTAxMjgzLTljMzcxZWI5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:30.990076Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jrfqkznxafnpdvwcp1jkbbn2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZiNzZjMzEtNjlkNDdjNmMtM2RkMTcxMDctMjc4MDQ1ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.046190Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jrfqkzqma9771awdgzgwpzmf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTRkMmIyMTMtNTVmNDU2YTUtNWZjMDgxMzItMjJiYTAzMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.102173Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jrfqkzsc90ntd4nk25sk807n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTcyNDFhNi1mNGU1NTdmMy1kZTMyOTQwZi01ZDE4ZmI2Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.159024Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jrfqkzv4bk88wv907ftzaw5a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2YzM4ZWMtZGQxNjU4MjUtZGVjNmU2OWQtYjI5MmI3ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.214938Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jrfqkzwyfrarm3ddgmmw92gs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGM1ODNiZjQtMjE0NDkxNTYtZmUzNDVkNTMtN2MxZDkzMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.272058Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jrfqkzyn6m53cb75kn8czn3c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjExMGNjMzEtMmFmMmY5YTctYjk1OTRhNWYtYTlhNWE1YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.330985Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jrfqm00fe5zwsp0261mdj9pq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDBiZmQwMjItMzM5ZWYyMzgtNmMyNWU2NWYtZTM1OTE4Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.388544Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jrfqm0297fg07jy90j9q6eq1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDc1NTQwNzctZDE2YTkwZWEtNGVkYTNhZDEtMmU5ZmM4MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.445825Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jrfqm043b1x5rzy87kd3vx5w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGU4YzNmMmUtYTQwMmU0ZDctZmUzNjNjMWQtNjA0NGI1M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.502639Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jrfqm05w4yjgtp6zac6z06a1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzAxMjljMzUtNDAzMWZkMWUtNGRkMmMxMTMtMmU3YmYyZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.559328Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jrfqm07n8mthnk1kvxevpv79, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDhlZGY4NDktMWQyNTI5NC1jNjVkODMzMS0xNTkxZDI4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.644084Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jrfqm0a90rw4tcs0d4h4wkx8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVmMTA5ZGMtYjc3MGYxOGUtNTU1NDg4MjUtYzZiMmQ1NDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.698911Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jrfqm0c2cmnm4fvasrh2awct, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y2ZjEzMTQtMTZhNDhkMDktZDUwY2E5NGItMmQ2MjQ5NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.754440Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jrfqm0dsce50ezj8ny3ajpa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjlkMDA2NTYtNmFlMDQxODktYmY1YzcxMjEtNzRmNzQyM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.811131Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jrfqm0fh7srh4kvr61ne8m08, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjdhM2JjMS1mODc3YWRhOS1mN2RhZDRjNS0zZDcyNmUw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.868394Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jrfqm0haacmm0zq8yc5b1f4f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQ3NTViZjEtYTRmMGFmMTEtZTliMDE5MTItNjc4NTAwNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.927145Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jrfqm0k38cgd3p43t2z83b59, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEyZjFiLTFjZmRjZTExLWM0ZjRiMzVkLWM0NDkxNzA1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.984663Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jrfqm0mx87gjbhbw0fejq6x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThkMzNjZDgtMThkMTRlOTItOTAxYjY1NzItODEzNjVjYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.043139Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jrfqm0pqa2qcsgfrn8p4v52q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTY2NDgwY2ItNTlhMjBlNDUtZDY1N2RmZmUtMjBhZmNjZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.100155Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jrfqm0rhcmdspb0tpvrr6kyz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQ1ZjQyOTUtZGVlMTBjMi1lODUyZGQ4ZS1iNzk0OTQ5Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.157915Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jrfqm0tab1a30t3krtd66n7w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVhNzdjMzYtNmEzZGNiZDctNjQ1ZmEwMWQtZTQ1Njk5N2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.215362Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jrfqm0w4a46r5rh08xw77xyw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTI2YzVlZDEtZjIzYzA1NzMtZGRlM2UyODgtODg3OThhMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.292503Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jrfqm0xy83v9w3hbs3mqgb9w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2VhMjIyYmQtNzY1ODNmMzEtNjkzNWFjMTgtYTAwMzQ0M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.352927Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jrfqm10bd097yd2nrck97xtq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzdkYzFkMWYtNWY4OThiODYtNzUyZjViYzctM2I3N2FhNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.414316Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jrfqm1276tjcfx56k5bwhawf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmYyNWFmNGYtOGE5ZTM5NGYtNGYzMWY4OWUtY2QyMDViZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.476500Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jrfqm146cf4hh81gd8zbmbnw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjkwN2RmM2QtMWFmYzhiMzUtN2IwNzIxODYtNmI4ZjY5NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.535983Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jrfqm163801edhrp66px3e3a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhNTEyNTEtMjAwMDJmMTgtZTc4Y2VhYWEtOTRhZjViYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.999866Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jrfqm1djaeg4cnmeswt1wh3s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM2OTEzMjMtOTdhNWI0YzgtN2JlNGYyMDAtZDM0NmYzNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] >> TFlatTest::MergeEmptyAndWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] Test command err: 2025-04-10T11:33:24.461236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:24.461518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:24.461739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00138f/r3tmp/tmpnlxHhW/pdisk_1.dat 2025-04-10T11:33:24.879225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:33:24.932202Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:24.976774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:24.976942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:24.988374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:25.086531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:25.482378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:25.482535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:25.482620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:25.492241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:33:25.659303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:33:25.744844Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:33:26.570929Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfqktb627mrvskpdcvvgxrg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM4ZDAyYTYtM2M2YmQyYmItN2QyZmU5YTctM2FkMTMzOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:26.686191Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqkvf2c4cw9f695n548xww, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTVkMjQwZGUtNmFiYTI0YmItMzQxN2QyMTMtNjZiNmVlNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:26.756744Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfqkvh59afrpdry4z6w535v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWZmYTQwMjAtN2VkN2I3MDYtYmYyNWQ3MTMtNTM5ZjMxNDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:26.820024Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfqkvkb9yrqw7k3wambfxr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjkwY2EyYjItYWFjMGU1OGEtYzk5OWVhZDktMzYwMzhkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:26.884396Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfqkvna4sjvgj3a9kk8jnb1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmNmZWZiYmEtOGQxZjI2YWYtODhjOTYxZTMtZDdlZGQwMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:26.949478Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfqkvqb06pwp1y4my75pfq7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA5OTVhYmYtOGUyOGQ3MTUtNWE4YjMwOWItZTYxY2M3Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.012917Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfqkvsc9x70tr6n64j2bjee, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTY3YTA3MmMtNjRlNzMwY2ItNTI2NGY3NDgtNTEyNTU2MmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.073484Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfqkvvb3zprhgmqwjw9rv7s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTNhOGFlMTItNzEzYWY4NzUtNTA0MDgyNWEtNzU3ZTEwMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.134134Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jrfqkvx74xgc8qqjywyv6w1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzg3NWM1NDQtM2FhYjFkZTYtMmQ3NzMwNjQtNzdjM2MwMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.193817Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrfqkvz48b1a88gpvyb0yeex, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzhmOGZmOTctZmIwY2QxODktZjFhYzhiMmUtZGY2MGU5YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.249528Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jrfqkw101spvg3yvj794eqj8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjcyMTYwZGYtZDk3YjBkNmQtYTk4MDEyZmQtN2M2NzRmYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.303003Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jrfqkw2q2n30n68hjrvd78vb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjljMjk5MjYtMTNhYTRiNjYtYjQwY2E0YzktZGRjNGNhYzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.363624Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jrfqkw4b5z6ery3fyk0gzzma, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJiMWY4MTEtMWFiMTIwZWEtMjFlZTgxOTAtN2QyMGM0MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.426020Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jrfqkw6af1c7y44vgn2b5ycg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmUyNGNlNzQtY2IwNTc4YjItZTZmMmViYzItOTkwNDZhYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.491130Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jrfqkw892r66v572brrdyvhf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg3OGEyYzctNjJmYjU4YzItOTQxNzE0YWItNzcwMjBkNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.554135Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jrfqkwaa4bt5v2441h9en56t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2M2NjU2OTYtYTIwNWY5ZDUtYjA3MGQ0ZTAtNzk5MzY1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.616233Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jrfqkwc84dt1p40z08bse2tq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmQwYTZlMzgtNzBmNTEyNmItMTVkMDY5MTItZmQ2OTc1Nzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.681089Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jrfqkwe72h3qq3bnnpqgga8z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODllNTIxMzUtMmQwYzVlNTctNzgwMzFmYjMtMzE0NTQ4YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.736869Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jrfqkwg6b608jsb3ye0avqkt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQxYzA3NGItNWY2MWQ2NGQtYTYzMTRhZjEtMjJlZTE2YjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.802293Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jrfqkwhz85d7hecsj00pkc7t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2RhZmRkMzQtODcyMDRkMzctN2NiODg1YWItYTAzNGNkNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.867181Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jrfqkwm0bx7cdn5t946n2grf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDRlNjA1MDEtNjg0ODc0ZWItNjAwYTY3YWUtMTUxZDgwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.933963Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jrfqkwp24zsbmfwc5kdff29w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQwODUwZTctMmI5NzZjYWItNzA1YmJmYTYtZjRmZGZlMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:27.991001Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jrfqkwr49aaqmw44dnqeh20s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzg0NTJkMWUtMTYyMjNjNzgtZTA4Y2YzZjYtODIwNTNiODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:28.056011Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jrfqkwsx044xgfp3kz6z5vaw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODY5NTA1YzAtMmY3NDMyZmItNGJiMmNkYmItZWYxMDI4NDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:28.122691Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jrfqkwvy2nnj9tayy9588dzp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGExYzllNzctYzFkZWRhNzctNzdjNjllNzktYTcyNWM2MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:28.187253Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jrfqkwy19d44mx2twb9n1m11, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjJkMzRmYmQtMmFlYzFlNDAtNDk1NDIzNGItZGVmNWQw ... -04-10T11:33:30.908356Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jrfqkzk40rw8rms3rx8hxgwt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQ5NjM1YjYtZTUxNmJhY2ItOTYzMmY4MWUtMWY3MmJkNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:30.961122Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jrfqkzn2eyxg2eab8mvamgw6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZGRmYi1jZThjNDlmZC1lYjJlOTNkYS03ZDZlMDJm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.013730Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jrfqkzpq6je630j45tg8x4dg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTgzNTc1MzEtZmE2OTJjNDEtYWRhZTc0YzUtMjRlZjA4Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.063901Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jrfqkzrbd5rxpgg3hehbmkdc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTJiZjNhMTUtZmU3YzFhZWEtYzBjYTVhNTQtNTY1YjBiZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.117844Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jrfqkzsxfkymh2mhhen7bqk9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM4ZjU2ZDMtNTI0N2JkYjItMmRkZWEwZjEtZTczZGJiMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.181983Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jrfqkzvmagaymcasd4zb6wcy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdkN2Y2MDAtNGU1OGM2ZTUtYmMyNDYwNTUtOGZkOTRkNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.260891Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jrfqkzxpen0t3rb4t1eex3t9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWJhZGY1MTUtZmU4NTBmZGUtZTJjZTI3LWYxNzlmODQ4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.317463Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jrfqm00370mjyscdegyv4nvw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzk4NDcyMTYtNTdiOWQyNGEtMmIwZWVkYmMtODA5MWE0Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.381644Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jrfqm01vbz17xykdnacr7hnn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzdhZDIzNC0xMThmOTMzMy1hZTZkYTE3ZS03YjdmYTIz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.447328Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jrfqm03w4b3d3eqxphwk13vq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg5MDYxMjQtZTlhMDRkZGQtOWM2ZTBlNDQtOWZiZmI1YzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.511870Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jrfqm05y07yvec2crtsh7em5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjE4NzYyMTktZTU5OGZhNC04MGE0YjAzMi01ZTljYWNkZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.575764Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jrfqm07y0qcqcab6pw3texyh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzgzN2ZlNDQtZjBhY2Y2YTQtMmIxNzg4NWUtNGRkZTM1OGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.642399Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jrfqm09z9bfrdzbszbsf33tb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODJhZjg4ZmYtNWM5MmM0YjYtOTFiNzJiZjUtZGMwMTkwMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.705146Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jrfqm0c1ccsffvc5wn52q5fm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWNhYjA1YzYtNmI3NmVhOC05NTVhZDAyZi1mNzgxODU1Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.764231Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jrfqm0dza47wx12s5scsvsfj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTRmMjFkNy05ZGQ3MWZhYy1hOTNjODkxLTJhYzFlMjAx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.824732Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jrfqm0fsed2h0k8escgga1vr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjI4MmJjNjYtNThiODllZmYtNjYwYjQ5ZGYtODM3OWE2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.908311Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jrfqm0hq13pmr7nsb45k0ezd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjgzZTMwMmMtNTdjMzdiYmQtMzViZjUxNjUtMzkzNjAyODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:31.972999Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jrfqm0mb096yw8zcnyg2enfy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2EzYzIzNTItNjdlYzEzYzktZWI2Yjk0NDEtMzY2OTkzNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.040979Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jrfqm0pe6v999rrfmx0gd22e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQxODQ4Ny1iNjMzMTY0YS04OWYyZWExZS0zZTY4ZjUzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.107252Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jrfqm0rfdssyffp9egyth50e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWUzYTdlYi0xNTNlZTRhNy03ZjM4MzgxMS03ODFkY2RhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.183055Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jrfqm0tjb0pktgxe70e8rd11, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQyZDc3YzMtZjAxMTk1ZjItNzM5ZjhkODItMzYxYTE5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.265068Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jrfqm0x07cctp1fanm8r2ppx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODAzNjUxOTItODIwNWViNTYtZjAxMzcxNzItMzEzYWRiNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.348133Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jrfqm0zj751sk9fs8mbgfbn5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjIyNDAyMTEtZjEyYmU1NGMtNzg3MDFlM2EtNjg1YTliZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.420608Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jrfqm1250mq74akt6dsfw8nk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVhNDQyN2EtZjk3ZDU4MTItMzRlNDZjNGUtODRhNTBmMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.494712Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jrfqm14c8dn5tt8yth9hcwbf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDJjZThiYWMtZGFjYjdkNzItYTY1N2EzZjYtNTgzMzE2Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.560041Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jrfqm16p656kjsqqzt6ctwtn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzhiYzM4ZDAtNTVmYjA0YzUtNjUxZTU5OWYtNmYwZmQ5ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.653854Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jrfqm19hejfez78g0kgv3qkj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Y2NGMyYTQtOGIxN2U3MzQtNTk5ZDVhNi00NmZlYmJmMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.715137Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jrfqm1bn7157x5p3fzp5tdj4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjIwN2ExNWItNThmMjViNjEtZTQ0NWM1N2MtMjUyYjQyYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.767674Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jrfqm1dh26xwmb5nhjb6r074, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDE5MDI4NDktMTI5ZDBhZDYtNWE1ZmYzNi05NjJmNDQzOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.827607Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jrfqm1f56fvnm6yep3963ta8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjZjNjg1NmItYjQ1MmY0MjUtZDE5ZTE2MTItNjNmZTZkYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.891761Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jrfqm1h31yvs9fhtkkjwcs44, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWVlZTFjLTczNzg2ZjJhLWVlMDA4NGFlLWQzYjViOTFj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:32.960507Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jrfqm1k3ehvgxpjdh2w73f32, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM1MTRiMzctNmE2NWNmY2EtN2FjMmI3MmItZmY3ZjczMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.030222Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jrfqm1n8ads3ztne2arb6wem, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZiOTAwZjYtZmNjOGQxNWYtOGFkN2VkODYtYTg2NmQ3Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.088818Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jrfqm1qec900b39nqgvhcjwx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjMxMTZlOTMtY2IzYmEzNTktZTQ0NjgzYmEtZWYyYzg4Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.146568Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jrfqm1s65nwzxfpwrwsxd7v1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2IwNjY3M2QtNjlmMzVhMzgtNTEwNjUyOTItMzgxNGY3OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.440696Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jrfqm1zf6qz5kc22ft6yrrfx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTU2MjlhOGYtNGM4NjQyMDQtZmU2MDU1ZGUtZjRlNjIyZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:33:31.143588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:33:31.143723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:33:31.143773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:33:31.143808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:33:31.143858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:33:31.143889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:33:31.143950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:33:31.144052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:33:31.144457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:33:31.232313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:33:31.232400Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:31.249871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:33:31.250292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:33:31.250505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:33:31.272796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:33:31.273184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:33:31.274414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:33:31.275042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:33:31.277900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:31.279921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:33:31.280020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:31.280307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:33:31.280372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:33:31.280430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:33:31.280604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:33:31.290583Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:33:31.452681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:33:31.453028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:31.453309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:33:31.453604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:33:31.453668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:31.456531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:33:31.456740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:33:31.456991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:31.457076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:33:31.457116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:33:31.457194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:33:31.459910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:31.459978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:33:31.460051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:33:31.463135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:31.463194Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:31.463290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:33:31.463383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:33:31.473691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:33:31.476506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:33:31.476741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:33:31.477956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:33:31.478115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:33:31.478199Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:33:31.478518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:33:31.478571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:33:31.478805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:33:31.478920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:33:31.481813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:33:31.481872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:33:31.482082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:31.482132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:33:31.482428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:31.482483Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:33:31.482585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:33:31.482664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:33:31.482719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:33:31.482753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:33:31.482800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:33:31.482862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:33:31.482934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:33:31.482972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:33:31.483056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:33:31.483106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:33:31.483141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:33:31.485796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:33:31.485948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:33:31.485990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tablet# 72057594046678944 2025-04-10T11:33:34.761957Z node 5 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:33:34.762016Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:33:34.762238Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:33:34.762319Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:33:34.764318Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:33:34.764386Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:33:34.764584Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:34.764634Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:33:34.765072Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:34.765132Z node 5 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:33:34.765255Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:33:34.765297Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:33:34.765350Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:33:34.765389Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:33:34.765430Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:33:34.765483Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:33:34.765527Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:33:34.765560Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:33:34.765648Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:33:34.765692Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:33:34.765734Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:33:34.766297Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:33:34.766405Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:33:34.766460Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:33:34.766511Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:33:34.766558Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:33:34.766656Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:33:34.769704Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:33:34.770258Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-04-10T11:33:34.770829Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:33:34.771001Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 214us result status StatusSuccess 2025-04-10T11:33:34.771402Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:33:34.771481Z node 5 :TX_PROXY DEBUG: actor# [5:267:2258] Bootstrap 2025-04-10T11:33:34.791950Z node 5 :TX_PROXY DEBUG: actor# [5:267:2258] Become StateWork (SchemeCache [5:272:2263]) 2025-04-10T11:33:34.792605Z node 5 :TX_PROXY DEBUG: actor# [5:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:33:34.795777Z node 5 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944 2025-04-10T11:33:34.796683Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-10T11:33:34.796737Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-10T11:33:34.826403Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Cannot find user: user1", at schemeshard: 72057594046678944 2025-04-10T11:33:34.826547Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:33:34.826594Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:33:34.826811Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:34.826865Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:204:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-10T11:33:34.827402Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 0 2025-04-10T11:33:34.827784Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:33:34.827966Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 199us result status StatusSuccess 2025-04-10T11:33:34.828367Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwJ+jlMD7COWZ+ue8WJs9\nAy/TwH/5kPo1QVdJvkHQv/07zUCm96rYhpvuXAX275//peXWWXlO9H1EvYIPfmnn\nBdpy36IHYNqPqvSDX/FyHnUiBWJsH4rgTWQSiQ4CImhsccbJUsqsoaJEUjOibi99\naHyjaZO48T//io/6Pt0+JPvvULgapujlKT/ZT1c9ReagT9hoxoP0M1cZAMjsAXY0\nqtBQQ1xvTUUClDoNR1pDft4qxljlzLFnA93TxagtvdWKVcJZFVNUNv9Cj3WA7l1z\nyZhq0ScuSlEHD25ns6f9+D9emIB4pM1pyD2fDmVDUHnUErrChciP2j9Zvn06jfTI\nlwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1744371214823 } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] |95.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenDupLock [GOOD] Test command err: 2025-04-10T11:32:50.500022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646044905581038:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:50.500577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002021/r3tmp/tmpkdilJT/pdisk_1.dat 2025-04-10T11:32:50.989601Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:51.007240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:51.007352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:51.019637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8949 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:32:51.353156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:32:51.391543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:51.562527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:51.634997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:54.040205Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646062549298915:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:54.044122Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002021/r3tmp/tmpNgTHm4/pdisk_1.dat 2025-04-10T11:32:54.165941Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:54.177879Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:54.177970Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:54.179917Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5119 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:32:54.391015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:54.402902Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:54.414452Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:32:54.419384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:54.503176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:54.557939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:58.251154Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646078054133414:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:32:58.254199Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002021/r3tmp/tmpMX0gdB/pdisk_1.dat 2025-04-10T11:32:58.367983Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:32:58.381772Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:32:58.382045Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:32:58.383661Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16742 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:32:58.611383Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:58.622377Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:58.633256Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:32:58.637271Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:58.694386Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:32:58.773769Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:01.990382Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646093380956578:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:01.990499Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002021/r3tmp/tmpyFoD73/pdisk_1.dat 2025-04-10T11:33:02.135986Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:02.148849Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:02.148940Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:02.152042Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29785 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { Scheme ... : HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:14.785932Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:14.787271Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18221 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:15.014602Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:33:15.036986Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:33:15.042018Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:15.146441Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:15.280802Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:19.028202Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491646170438145152:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:19.028315Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002021/r3tmp/tmpnCQ7uM/pdisk_1.dat 2025-04-10T11:33:19.224055Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:19.244955Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:19.245101Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:19.247274Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23862 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:19.556885Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:33:19.579639Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:19.664200Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:33:19.728852Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:33:23.888237Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491646187478869450:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:23.888952Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002021/r3tmp/tmpjOtiTG/pdisk_1.dat 2025-04-10T11:33:24.028411Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:24.050931Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:24.051038Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:24.053100Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4397 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:24.344128Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:33:24.376264Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:24.474738Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:24.560530Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:29.230399Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491646210368175146:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:29.230476Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002021/r3tmp/tmpQTZNIX/pdisk_1.dat 2025-04-10T11:33:29.407329Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:29.435900Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:29.436015Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:29.439556Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29409 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:29.836018Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:33:29.862522Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:29.932953Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:30.002667Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... |95.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::MergeEmptyAndWrite [GOOD] Test command err: 2025-04-10T11:33:28.367307Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646207235378855:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:28.367377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002011/r3tmp/tmpZJJjth/pdisk_1.dat 2025-04-10T11:33:28.728554Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:28.759182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:28.759273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:28.761178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29931 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:29.000151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:29.014409Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:29.034811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:31.643663Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646219523849147:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:31.699234Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002011/r3tmp/tmpXAHeJX/pdisk_1.dat 2025-04-10T11:33:31.763250Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:31.804043Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:31.804162Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:31.805857Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24959 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-10T11:33:31.987278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:32.012647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:32.124648Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-10T11:33:32.125557Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-10T11:33:32.164094Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-10T11:33:32.170625Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284812123 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-04-10T11:33:32.211603Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:32.215666Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-10T11:33:32.215903Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:32.218174Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-10T11:33:32.218404Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:32.219070Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-10T11:33:32.219779Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-10T11:33:32.219906Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:32.220258Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-10T11:33:32.220678Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-10T11:33:32.220781Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:32.221140Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-10T11:33:32.221679Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-10T11:33:32.221775Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:32.222100Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-10T11:33:32.222597Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-10T11:33:32.222699Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:32.223027Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-10T11:33:32.223646Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-10T11:33:32.223739Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:32.224134Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-10T11:33:32.224715Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-10T11:33:32.224800Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:32.225183Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-10T11:33:32.225737Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-10T11:33:32.225826Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:32.226209Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-10T11:33:32.226653Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-10T11:33:32.226730Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:32.227003Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-10T11:33:32.227436Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-10T11:33:32.227514Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:32.227800Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-10T11:33:32.228171Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-10T11:33:32.228242Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:32.228598Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-10T11:33:32.229274Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:33:32.229315Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:33:32.229492Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:32.229865Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-10T11:33:32.230413Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T11:33:32.230435Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:33:32.313289Z node 2 :FLAT_TX_SCHEMESHARD DEBUG ... :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-04-10T11:33:32.545838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-04-10T11:33:32.545843Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715687 2025-04-10T11:33:32.545851Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715687, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2025-04-10T11:33:32.545860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-10T11:33:32.545893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 0/1, is published: true 2025-04-10T11:33:32.545919Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:33:32.545971Z node 2 :TX_DATASHARD DEBUG: Complete [1744284812585 : 281474976715687] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7491646219523849338:2141], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:33:32.546000Z node 2 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715687 state PreOffline TxInFly 0 2025-04-10T11:33:32.546004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2025-04-10T11:33:32.546034Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:33:32.546102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1744284812585 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 1000 } } 2025-04-10T11:33:32.546115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2025-04-10T11:33:32.546759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1744284812585 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 1000 } } 2025-04-10T11:33:32.546842Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1744284812585 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 1000 } } 2025-04-10T11:33:32.547006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2025-04-10T11:33:32.547050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2025-04-10T11:33:32.547210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646223818817217 RawX2: 4503608217307444 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-04-10T11:33:32.547228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2025-04-10T11:33:32.547354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646223818817217 RawX2: 4503608217307444 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-04-10T11:33:32.547381Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-10T11:33:32.547455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7491646223818817217 RawX2: 4503608217307444 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-04-10T11:33:32.547492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715687:0, shardIdx: 72057594046644480:3, datashard: 72075186224037890, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-10T11:33:32.547509Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-10T11:33:32.547524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715687:0, datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-04-10T11:33:32.547545Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715687:0 129 -> 240 2025-04-10T11:33:32.547776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-10T11:33:32.548014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-10T11:33:32.548092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-10T11:33:32.548119Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715687:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:33:32.548473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-10T11:33:32.548611Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715687:0 progress is 1/1 2025-04-10T11:33:32.548625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-04-10T11:33:32.548650Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715687:0 progress is 1/1 2025-04-10T11:33:32.548664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-04-10T11:33:32.548683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 1/1, is published: true 2025-04-10T11:33:32.548732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7491646223818817466:2393] message: TxId: 281474976715687 2025-04-10T11:33:32.548754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-04-10T11:33:32.548774Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715687:0 2025-04-10T11:33:32.548783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715687:0 2025-04-10T11:33:32.548866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-10T11:33:32.549441Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715687 datashard 72075186224037890 state PreOffline 2025-04-10T11:33:32.549485Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 TClient::Ls request: /dc-1/Dir/TableOld 2025-04-10T11:33:32.550729Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:33:32.550810Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-04-10T11:33:32.552539Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:33:32.552954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646223818817217 RawX2: 4503608217307444 } TabletId: 72075186224037890 State: 4 2025-04-10T11:33:32.553109Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:33:32.553538Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-10T11:33:32.553539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:33:32.554813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-10T11:33:32.555332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-10T11:33:32.555605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:33:32.555629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-10T11:33:32.555685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:33:32.556678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-10T11:33:32.556699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-10T11:33:32.556748Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:33:32.557994Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-10T11:33:32.558067Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7491646223818817336:2582], serverId# [2:7491646223818817337:2583], sessionId# [0:0:0] 2025-04-10T11:33:32.558410Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-10T11:33:32.558686Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-10T11:33:32.558766Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" >> TLocksTest::GoodLock >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] Test command err: 2025-04-10T11:33:27.181867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:27.182037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:27.182203Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001368/r3tmp/tmpn7pYY9/pdisk_1.dat 2025-04-10T11:33:27.556728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:33:27.602856Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:27.643801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:27.643953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:27.655699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:27.745133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:28.106419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:28.106571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:28.106652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:28.112486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:33:28.291727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:33:28.379729Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:33:28.727514Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfqkwx858nfxha0fvn68zak, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGQ4NmEyZmUtYmRmNDA1ZWEtZDE5YzM4ZTEtMWNhNGFmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:28.801277Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqkxh58xqr4b2gd8sx88d4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMwMjdlYzAtNjdmYjE3YmMtYTIxNmY4MDgtZmM5YjJhOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:28.867100Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfqkxk8507xpkkk5y6gn6gb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmFiYTMxZTgtNGY2NTM2ZTAtNjJkNjBkMzEtZmQzNTI5NGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:28.934699Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfqkxn90fgp5656xwcv42h3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWE4ZDIzYWEtODRlYWZhYWQtMTBmNDZmZGEtZDI1ZDNlMDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.002585Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfqkxqdb76n7h9zdsq0xksw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY2MTVjOGItOWM1ZTEyZTUtYzczNjZhYzEtZTBhNWUzYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.074166Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfqkxsh4r5zz4fkn4v2dyhd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2YzNTliNDctNjJhNjEyODctYzA5YmViNTItMmMzMWVlMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.146856Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfqkxvz8yzadvj3w85ftjgd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODIyNWMxZjktMTY1OGZjYWMtYjIzM2VlYjAtNTBmMDUxNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.208207Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfqkxy1dz5x6swdfv7dt4tq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNhZGNhNWUtYzdhMDdiNzMtYzFmMDM1OGEtOWI0ZjhhMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.261806Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jrfqkxzx32ychxa1aw8wk9n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkxOWI5ZGMtN2I4NTE3OTEtOTcyYWE0MjQtZWUxMGEzY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.327221Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrfqky1mazy2eq8j593s726b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTMwOTY1MDItM2M1ZDhjOGYtNmViZWNmYTEtMTc5NTkxYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.393955Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jrfqky3p9yy6vccmsvmv3wk5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTA2MjQ2MWQtOTgxYTYwNTgtNWJlOGVjOC0yNmJmYjU0NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.459792Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jrfqky5s6h8v6kgntrj9p8qm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc5NzU4YTktZmM2ZWM1ZDAtMmM3MjZlOWEtZDIxNWI1YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.519088Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jrfqky7t51pzmdddj9wxneje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc3NWQ5NGItZGQ3NTJjOGYtM2YxZjI2ZmItM2ZmM2ExYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.575389Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jrfqky9n5xjk484gp4s1j8ay, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTE0YjI1YzItOTFhNjU0NmYtNmQ4MDUyZTYtZTFkMDY1OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.629002Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jrfqkybcd2a406gzvgw62c6e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTk4NjY3M2EtYjFlYjUyNDItNGM5MGIzYjQtNmVhYjM4MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.681159Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jrfqkyd3aztfj4890rq3gdzx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTUwYzYzNmMtYzFjMjdlNDgtNjM3NWVkMDUtMzBiZjIyM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.734935Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jrfqkyep1z7xbefcs50bzw2m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjQ5NjkzZDQtM2MyMjEzY2ItZGRlN2U1YzYtMzc5NWFmZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.798439Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jrfqkygc1mghq1a0ryy3vtkc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdkZjUzYzQtNjJlNTMwZWYtZGNjZDA3NjEtNjNiYzE1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.859097Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jrfqkyjc6jn0vkheg40b1p0j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjY2MDhiNGUtZWYxOTM2MTUtYjRlN2U3Y2YtODMwMGVmYjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.916495Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jrfqkym8b00a92sezdesswr5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODFhNWUxZWMtNzcwMTE4ZTQtZDVlMDZmN2MtZWU5Y2M2Yzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:29.978478Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jrfqkyp25n9wb9ksjenymka6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzk0MjYyYTEtYjkxMmVjYzktZTQ3OTI0YzItZTBhZjYzOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:30.042563Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jrfqkyr0ejge7b2yh4dhv1e3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgwNGIyMmEtNDM5YzZhNTYtMmMzZjc3ZjctNDk3N2E3MzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:30.107284Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jrfqkyt1f47w4ynqv8rfged9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBlMTRkMGItODVkM2ExMmEtOThlY2MwOGMtOGY3MDI4ZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:30.172194Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jrfqkyw16jrczwa0ats48wms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQ0M2M5NTUtZTAxZDRhZTUtZmY1NTQ4ZS1hMDczYzEzNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:30.235782Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jrfqkyy2a1wtj7betdxre3c5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmM2ZTExZTItOWM4NmZlNjQtOTMxNThkYTctYWU4Nzg3NjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:30.298668Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jrfqkz010rqwecb5kpwvav4f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjMwNzA4Y2EtYzA4ZTJhN2MtYTIzNzQ5OWMtMjlmODli ... 120170Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jrfqm1r374tqrt4ksk71vdxp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTU3OTc2MDktODMwN2Q5MDMtZjhkN2FjZjYtODdjZjliMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.189101Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jrfqm1t7cpx8q4h1qkntn57g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmZhYzQ1YzctYWUwYmQ3NzMtZTZlODBlZTItNmRkMGFjOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.259964Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jrfqm1wc0qwnd0xbdww89xs2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGJiNmRkMTItM2IwMmQzY2YtYjBjN2ZjMmQtZGEzMzgzNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.329001Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jrfqm1yk08876abad0n7ajq8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjFmNzJlOS1hZjM2ZGRmOS00OTI2ODU0YS02NjRlOTk3Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.397334Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jrfqm20r90wtbchveftnn800, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDU0NmRkMjUtZmVkMTYxNzEtYjJlNTJjMS01OTkzZTY4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.469057Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jrfqm22w6m83720xaa2v10dp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTNhMWUxODItZjI3Y2QzYjQtZmI2NTk0YzktNGVhMmIyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.536982Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jrfqm2549dwfm449tbf0zk1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0MzcwN2YtNzIwNGY1OTEtNWEzNDkxNjctYmY4YzcyNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.635679Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jrfqm2849ksvt71v4mxwqgan, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmY4MTBjMDgtYjM3NDQ2MjEtYmVkYmZlODktNzA1NTBkZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.704762Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jrfqm2abcgt3652djk2trkec, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUxMTY3MmEtNzI5NDZiYmEtODQwY2U4My05M2ExYjEzNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.775212Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jrfqm2cg2b6042yrgwem7r5c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZiOGVhNjktMjM1MzFmNGEtZTA4ZDRjNTItNGNjNzMzNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.844005Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jrfqm2epfctmsd5h7qm49wn6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzk0NGM3NTMtYTA0NzgwNzEtN2Q3MzE0ZDMtNjQ2NDUxMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.915898Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jrfqm2gv9rsdqqsp0c0fc17q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2FhZWU3Y2YtNWNkN2I4MS1iZGU1ZGY0Ny04MjA3YTE5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:33.987493Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jrfqm2k43ryv9aw5d2rar840, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjBmNDc2MmYtZGMyZTc2NmUtYjM3NWI4MTctYWUxOGViYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:34.058059Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jrfqm2nb2dd2vpv5e9pt5aft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgyMDdlYjItNzg1ODJiNzctZGIwYjIxNmItNmQ5OTViY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:34.127030Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jrfqm2qhaqmvgtw8j93h7hb3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRlNTJkYTItY2I3NTJlM2YtNmY5ZDlhYzItNzVmNjUzNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:34.195909Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jrfqm2sp3vrkkj4q09ymemp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGI5Nzc1NWQtZGMwYjVhODMtZWE3M2NkZDAtNGRiYzg3ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:34.284782Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jrfqm2vv5jdt5g5szcsqkm1b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzE1ODBmZWItNjIzNTMyYjktMmNkYmI0ODEtMWI0ZDQ3MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:34.350574Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jrfqm2ym7agjykaw6shp1psx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjc2YWJkYmUtYThiYjBjZGItODk2NWRkM2ItMWNmMzExZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:34.418888Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jrfqm30n1tqsk06aj60s7zzw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JhMDFhN2EtNmNlMjFlYTYtY2RmZjY3NmMtZjY5ZDcwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:34.488603Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jrfqm32tck6eb1q38rj4g7zh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTY0MTgyMzEtNmY4M2M2ZDItMjE2YmZmZDktMzdlNWUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:34.557734Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jrfqm34z90s9n1dj4nrswgq8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDVjMTM2ZmItYTZjNjA4ZTQtZDdiZTY4Mi0xNWY3OWYzMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:34.625964Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jrfqm375eb2w57qkv5fs3e3v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTUxNzQxODMtZjk2Y2FkODctMWUzM2ViZTAtNTk3MmMwYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:34.694880Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jrfqm3997f771mx1df47dm0w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjljZjk3YzUtZGQ2Yzc3NTAtOTBhMzUwZDctYTZmYWQxYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:34.764653Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jrfqm3be9848wcg0ppzevdk9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2E0ZTk3OGMtOWM4ZmRjZmItMzAxNTAyYzUtM2FkYTkxMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:34.833856Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jrfqm3dm6b35syxexd38dxe2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGVmNDhlZWItMjhiOTMzMjktMWZiY2FhNGItOTMxYmNmNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:34.901521Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jrfqm3fs91p4d6fnf6pgqr00, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODg1M2QxNTctMmVhZTY4NTktN2ExNjk2OTItMmE0NGFkZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:34.970168Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jrfqm3hw5w7q5t3ryc053n8n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE5ODM1OGMtYTU4NjI0YzktMTZhNDFhZTEtNDNiZWI0YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:35.066051Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jrfqm3mx4fs0xty9vjz47qqb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDNmMzQwZmQtZTJkZGEzYmEtMjIxYzI0NDQtZTU4OTBhNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:35.132221Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jrfqm3q18awsm5ms7ps1eva4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGMwODUzZjctYWJlM2Q0ZWUtOWM5MmNmZjAtYzExZDBiMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:35.197208Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jrfqm3s37te6h53br4n93a12, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg5MDg4MmItMzhkYzg3OTktNmJiNjFlNDgtMjRkNjg3MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:35.262455Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jrfqm3v484qg25msb5bhsnyx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODUxYWM1NTktYzhmYjUxOTYtYjg5MDljY2MtOWY4NTNiNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:35.327407Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jrfqm3x57ckz3aw85w8dn024, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2U0NjcyZmItNjkyYzlkZWQtNjBjNDgxNWItNzJhMjYwM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:35.392070Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jrfqm3z60hsdnsyc40edvkn2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQzNTg2NjktYzFlYTk1NmUtN2I4OGY4OTMtNTNhYTU1Zjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:35.457282Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jrfqm417d5edhgrnz2df2e8a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM0YmZmMTktMjUzODU0MjgtNzA1N2VjMjItMTE1OWU4NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:35.524494Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jrfqm438ef4xxjv5d81stxkn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjk2MGMzZi1kZjA5ZTc3Mi0zZTJjMjdhNy0xZDI4YzQ3MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:35.734756Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jrfqm45r90cfts4rf87r8vws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmU0YjVmODQtYjdlZWU0ODItNDQzNWYwNDYtZTNjZTA5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] Test command err: 2025-04-10T11:33:33.269101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:33.269436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:33.269688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001183/r3tmp/tmp56vNf0/pdisk_1.dat 2025-04-10T11:33:33.911701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:33:33.977452Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:34.037595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:34.038262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:34.051549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:34.155127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:34.789287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:866:2713], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:34.789374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:876:2718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:34.789802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:34.803612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:33:34.959813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:880:2721], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:33:35.062688Z node 1 :TX_PROXY ERROR: Actor# [1:962:2772] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:33:35.927417Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfqm3e2dp155erwddvez24x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDdiZDlmY2YtNzkwYzczMTMtMTQ4M2EwLTVhYjhkN2I3, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:35.980730Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:993:2793], TxId: 281474976715660, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfqm3e2dp155erwddvez24x. SessionId : ydb://session/3?node_id=1&id=NDdiZDlmY2YtNzkwYzczMTMtMTQ4M2EwLTVhYjhkN2I3. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Source[0] fatal error: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 } 2025-04-10T11:33:35.983781Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:993:2793], TxId: 281474976715660, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfqm3e2dp155erwddvez24x. SessionId : ydb://session/3?node_id=1&id=NDdiZDlmY2YtNzkwYzczMTMtMTQ4M2EwLTVhYjhkN2I3. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: INTERNAL_ERROR KIKIMR_CONSTRAINT_VIOLATION: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 }. 2025-04-10T11:33:35.995427Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:994:2794], TxId: 281474976715660, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NDdiZDlmY2YtNzkwYzczMTMtMTQ4M2EwLTVhYjhkN2I3. TraceId : 01jrfqm3e2dp155erwddvez24x. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-04-10T11:33:36.005333Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDdiZDlmY2YtNzkwYzczMTMtMTQ4M2EwLTVhYjhkN2I3, ActorId: [1:864:2711], ActorState: ExecuteState, TraceId: 01jrfqm3e2dp155erwddvez24x, Create QueryResponse for error on request, msg: 2025-04-10T11:33:36.011699Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqm3e2dp155erwddvez24x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDdiZDlmY2YtNzkwYzczMTMtMTQ4M2EwLTVhYjhkN2I3, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] Test command err: 2025-04-10T11:33:05.980013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:05.980666Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:05.980743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:05.982721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:05.983054Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:05.983466Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ab8/r3tmp/tmp1WCMw0/pdisk_1.dat 2025-04-10T11:33:06.399399Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11778, node 1 TClient is connected to server localhost:26143 2025-04-10T11:33:06.834646Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:06.834705Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:06.834729Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:06.834983Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-70fb-1231c6b1" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" reason: "YELLOW-5321-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-1" reason: "YELLOW-e9e2-1231c6b1-2" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-5321-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-595f-1231c6b1-80c02825" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-595f-1231c6b1-80c02825" status: YELLOW message: "Pool degraded" location { storage { pool { name: "static" } } database { name: "/Root" } } reason: "YELLOW-ef3e-1231c6b1-0" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-4847-1231c6b1-1-0-3-55-0-55" status: RED message: "VDisk is not available" location { storage { node { id: 1 host: "::1" port: 12001 } pool { name: "static" group { vdisk { id: "0-3-55-0-55" } } } } database { name: "/Root" } } type: "VDISK" level: 5 } issue_log { id: "YELLOW-ef3e-1231c6b1-0" status: YELLOW message: "Group degraded" location { storage { pool { name: "static" group { id: "0" } } } database { name: "/Root" } } reason: "RED-4847-1231c6b1-1-0-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 1 host: "::1" port: 12001 } 2025-04-10T11:33:14.233703Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:14.234140Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:14.234495Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:14.236124Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:14.236458Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:14.236703Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ab8/r3tmp/tmpR7IhmF/pdisk_1.dat 2025-04-10T11:33:14.577521Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13904, node 3 TClient is connected to server localhost:25455 2025-04-10T11:33:15.007917Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:15.008000Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:15.008042Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:15.008361Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:22.928933Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:22.929605Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:22.929799Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:22.930226Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:22.930510Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:22.930614Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ab8/r3tmp/tmpWQ5qHg/pdisk_1.dat 2025-04-10T11:33:23.267973Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11711, node 5 TClient is connected to server localhost:1129 2025-04-10T11:33:23.612159Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:23.612207Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:23.612228Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:23.612569Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:28.883017Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:28.883464Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:28.883646Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ab8/r3tmp/tmpGbZ5ah/pdisk_1.dat 2025-04-10T11:33:29.231270Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28914, node 7 TClient is connected to server localhost:3987 2025-04-10T11:33:29.696045Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:29.696123Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:29.696163Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:29.696849Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:34.393350Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:526:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:34.393847Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:34.393932Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000ab8/r3tmp/tmpTKpr3k/pdisk_1.dat 2025-04-10T11:33:34.819378Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1083, node 9 TClient is connected to server localhost:1627 2025-04-10T11:33:35.342881Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:35.342967Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:35.343022Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:35.343711Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> THealthCheckTest::ShardsLimit995 [GOOD] >> THealthCheckTest::ShardsLimit905 >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink |95.8%| [TA] $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink >> KqpJoinOrder::CanonizedJoinOrderTPCDS64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] Test command err: 2025-04-10T11:32:55.203921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:55.204706Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:55.204798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:32:55.207041Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:55.207403Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:55.207774Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001188/r3tmp/tmpQVEqiF/pdisk_1.dat 2025-04-10T11:32:55.654000Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22732, node 1 TClient is connected to server localhost:5746 2025-04-10T11:32:56.086914Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:56.086991Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:56.087029Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:56.087322Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:04.138510Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:04.138969Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:04.139298Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:04.140934Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:04.141298Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:04.141566Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001188/r3tmp/tmpw3wSQO/pdisk_1.dat 2025-04-10T11:33:04.488739Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26279, node 3 TClient is connected to server localhost:6534 2025-04-10T11:33:04.923237Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:04.923316Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:04.923381Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:04.923674Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-70fb-1231c6b1" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "RED-d6d1-1231c6b1" reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-3" reason: "YELLOW-e9e2-1231c6b1-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-d6d1-1231c6b1" status: RED message: "Storage failed" location { database { name: "/Root" } } reason: "RED-258e-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "RED-258e-1231c6b1-f7549920" status: RED message: "Pool failed" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "RED-819b-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-99d2-1231c6b1-3-2147483648-3-55-0-55" status: RED message: "VDisks have space issue" location { storage { node { id: 3 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483648-3-55-0-55" id: "2147483648-3-56-0-56" id: "2147483648-3-57-0-57" } } } } database { name: "/Root" } } reason: "RED-8ac8-3-3-42" reason: "RED-8ac8-3-3-43" reason: "RED-8ac8-3-3-44" type: "VDISK" level: 5 listed: 3 count: 3 } issue_log { id: "RED-8ac8-3-3-42" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/2gt4/001188/r3tmp/tmpw3wSQO/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-43" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/2gt4/001188/r3tmp/tmpw3wSQO/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-44" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/2gt4/001188/r3tmp/tmpw3wSQO/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-819b-1231c6b1-2147483648" status: RED message: "Group failed" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-99d2-1231c6b1-3-2147483648-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 3 host: "::1" port: 12001 } 2025-04-10T11:33:13.262872Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:13.263765Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:13.264059Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:13.264629Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:13.265035Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:13.265174Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001188/r3tmp/tmpEAjjFL/pdisk_1.dat 2025-04-10T11:33:13.666056Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26777, node 5 TClient is connected to server localhost:15493 2025-04-10T11:33:14.163802Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:14.163874Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:14.163919Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:14.164493Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:24.541431Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:24.542487Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:24.542849Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:24.543090Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:24.543590Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:24.543784Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001188/r3tmp/tmp1Zi4Tp/pdisk_1.dat 2025-04-10T11:33:24.973728Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5787, node 7 TClient is connected to server localhost:12501 2025-04-10T11:33:25.538237Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:25.538313Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:25.538359Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:25.538980Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:35.030574Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:35.031259Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:35.031525Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:35.032524Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:35.032869Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:35.033250Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001188/r3tmp/tmp30F1HU/pdisk_1.dat 2025-04-10T11:33:35.428988Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5443, node 9 TClient is connected to server localhost:11240 2025-04-10T11:33:36.021727Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:36.021808Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:36.021860Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:36.022685Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] >> DataShardSnapshots::MvccSnapshotTailCleanup >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> DataShardSnapshots::VolatileSnapshotMerge >> THealthCheckTest::NoBscResponse [GOOD] >> THealthCheckTest::LayoutIncorrect >> TLocksTest::CK_GoodLock >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink >> TLocksTest::GoodSameKeyLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] Test command err: 2025-04-10T11:32:49.013759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:49.014398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:49.014477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:32:49.016214Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:49.016470Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:49.016772Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011a8/r3tmp/tmpEPPWTp/pdisk_1.dat 2025-04-10T11:32:49.415251Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12692, node 1 TClient is connected to server localhost:65309 2025-04-10T11:32:49.914057Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:49.914123Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:49.914177Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:49.914442Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:32:57.670057Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:57.670538Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:57.670868Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:32:57.672577Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:32:57.672926Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:32:57.673199Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011a8/r3tmp/tmpF447FO/pdisk_1.dat 2025-04-10T11:32:58.032180Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2102, node 3 TClient is connected to server localhost:2885 2025-04-10T11:32:58.366175Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:32:58.366237Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:32:58.366264Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:32:58.366527Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:05.941909Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:05.942627Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:05.942811Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:05.943208Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:05.943475Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:05.943570Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011a8/r3tmp/tmpqXsvDQ/pdisk_1.dat 2025-04-10T11:33:06.281314Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16162, node 5 TClient is connected to server localhost:13347 2025-04-10T11:33:06.583120Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:06.583174Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:06.583197Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:06.583580Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:14.382073Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:773:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:14.382306Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:14.382454Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:14.383560Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:770:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:14.383801Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:14.383972Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011a8/r3tmp/tmpyZjmRx/pdisk_1.dat 2025-04-10T11:33:14.704612Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19980, node 7 TClient is connected to server localhost:7217 2025-04-10T11:33:18.301304Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:18.301372Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:18.301413Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:18.302267Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:18.327313Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:18.327477Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:18.359193Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-04-10T11:33:18.359962Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-7" reason: "YELLOW-e9e2-1231c6b1-8" reason: "YELLOW-e9e2-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 7 host: "::1" port: 12001 } 2025-04-10T11:33:27.896049Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:853:2415], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:27.896712Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:27.897166Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:27.897765Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:850:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:27.898264Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:27.898521Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0011a8/r3tmp/tmpxH8gM7/pdisk_1.dat 2025-04-10T11:33:28.311241Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6252, node 10 TClient is connected to server localhost:23979 2025-04-10 ... ed nodes count 1 2025-04-10T11:33:32.340289Z node 10 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected max priority nodes count 1 2025-04-10T11:33:32.340395Z node 10 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected node 12 2025-04-10T11:33:32.340479Z node 10 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 12) 2025-04-10T11:33:32.340558Z node 10 :HIVE TRACE: HIVE#72057594037968897 Node(12, (0,0,0,0)->(0,1048576,0,0)) 2025-04-10T11:33:32.340701Z node 10 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2025-04-10T11:33:32.340816Z node 10 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2025-04-10T11:33:32.340915Z node 10 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-10T11:33:32.341066Z node 10 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2025-04-10T11:33:32.341312Z node 10 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute, Sending TEvBootTablet(PersQueue.72075186224037888.Leader.1) to node 12 storage {Version# 1 TabletID# 72075186224037888 TabletType# PersQueue Channels# {0:{Channel# 0 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.065536Z}}, 1:{Channel# 1 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.065536Z}}, 2:{Channel# 2 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.065536Z}}} Tenant: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:33:32.361967Z node 10 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(12)::Complete 2025-04-10T11:33:32.362106Z node 10 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-10T11:33:32.362526Z node 10 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Complete Tablet (72075186224037888,0) SideEffects: {Notifications: 0x10080002 [12:1580:2365] NKikimrLocal.TEvBootTablet Info { TabletID: 72075186224037888 Channels { Channel: 0 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038080 } StoragePool: "/Root:test" } Channels { Channel: 1 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038080 } StoragePool: "/Root:test" } Channels { Channel: 2 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038080 } StoragePool: "/Root:test" } TabletType: PersQueue Version: 1 TenantIdOwner: 72057594046644480 TenantIdLocalId: 1 } SuggestedGeneration: 1 BootMode: BOOT_MODE_LEADER FollowerId: 0} 2025-04-10T11:33:32.363387Z node 10 :HIVE TRACE: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected (duplicate), NodeId 12 Cookie 72075186224037888 2025-04-10T11:33:32.468720Z node 10 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-04-10T11:33:32.468892Z node 10 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Execute for tablet PersQueue.72075186224037888.Leader.1 status 0 generation 1 follower 0 from local [12:1580:2365] 2025-04-10T11:33:32.469035Z node 10 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Starting -> Running (Node 12) 2025-04-10T11:33:32.469135Z node 10 :HIVE TRACE: HIVE#72057594037968897 Node(12, (0,1048576,0,0)->(0,0,0,0)) 2025-04-10T11:33:32.469320Z node 10 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2025-04-10T11:33:32.469466Z node 10 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2025-04-10T11:33:32.469554Z node 10 :HIVE TRACE: HIVE#72057594037968897 Node(12, (0,0,0,0)->(0,1048576,0,0)) 2025-04-10T11:33:32.469690Z node 10 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2025-04-10T11:33:32.469826Z node 10 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2025-04-10T11:33:32.470010Z node 10 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-10T11:33:32.470075Z node 10 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2025-04-10T11:33:32.470430Z node 10 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - executing 2025-04-10T11:33:32.470559Z node 10 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-10T11:33:32.470630Z node 10 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-10T11:33:32.470709Z node 10 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-10T11:33:32.486996Z node 10 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Complete TabletId: 72075186224037888 SideEffects: {Notifications: 0x10040207 [10:1338:2701] {EvTabletCreationResult Status: OK TabletID: 72075186224037888}} 2025-04-10T11:33:32.487120Z node 10 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-10T11:33:36.636504Z node 10 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 12: Status: 2 2025-04-10T11:33:36.636681Z node 10 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(12)::Execute 2025-04-10T11:33:36.636763Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-04-10T11:33:36.636942Z node 10 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(12)::Complete 2025-04-10T11:33:36.638049Z node 10 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRestartTablet(PersQueue.72075186224037888.Leader.1)::Execute 2025-04-10T11:33:36.638276Z node 10 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Running -> Stopped (Node 12) 2025-04-10T11:33:36.638374Z node 10 :HIVE TRACE: HIVE#72057594037968897 Node(12, (0,1048576,0,0)->(0,0,0,0)) 2025-04-10T11:33:36.638575Z node 10 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2025-04-10T11:33:36.638757Z node 10 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2025-04-10T11:33:36.638896Z node 10 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(PersQueue.72075186224037888.Leader.1 gen 1) to node 12 2025-04-10T11:33:36.639011Z node 10 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Stopped -> Booting 2025-04-10T11:33:36.639091Z node 10 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-04-10T11:33:36.639150Z node 10 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2025-04-10T11:33:36.639532Z node 10 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxKillNode(12)::Execute 2025-04-10T11:33:36.639707Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:33:36.639776Z node 10 :HIVE TRACE: Node(12) DeregisterInDomains (72057594046644480:1) : 1 -> 0 2025-04-10T11:33:36.639855Z node 10 :HIVE DEBUG: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(3, 12) 2025-04-10T11:33:36.639936Z node 10 :HIVE TRACE: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [10:1646:2713] 2025-04-10T11:33:36.640016Z node 10 :HIVE DEBUG: HIVE#72057594037968897 TryToDeleteNode(12): waiting 3600.000000s 2025-04-10T11:33:36.640573Z node 10 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([12:1581:2365]) [10:1646:2713] 2025-04-10T11:33:36.645347Z node 10 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([10:2012:2731]) [10:2013:2736] 2025-04-10T11:33:36.655333Z node 10 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([10:2012:2731]) [10:2013:2736] 2025-04-10T11:33:36.660849Z node 10 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([13:1991:2365]) [10:2049:2740] 2025-04-10T11:33:36.675128Z node 10 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [13:1987:2365] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-04-10T11:33:36.675331Z node 10 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(13)::Execute 2025-04-10T11:33:36.675521Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:36.675585Z node 10 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-10T11:33:36.675662Z node 10 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-04-10T11:33:36.675717Z node 10 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-10T11:33:36.675773Z node 10 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-04-10T11:33:36.675908Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:36.676605Z node 10 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 13 Location DataCenter: "4" Module: "4" Rack: "4" Unit: "4" self_check_result: EMERGENCY issue_log { id: "RED-f489-1231c6b1" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-6fa7-1231c6b1" reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "RED-6fa7-1231c6b1" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-e5e3-1231c6b1-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-10" reason: "YELLOW-e9e2-1231c6b1-11" reason: "YELLOW-e9e2-1231c6b1-12" reason: "YELLOW-e9e2-1231c6b1-13" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-11" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 11 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-12" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 12 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-13" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 13 host: "::1" port: 12004 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-e5e3-1231c6b1-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 10 host: "::1" port: 12001 } >> TFlatTest::LargeDatashardReply [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS64 [GOOD] Test command err: Trying to start YDB, gRPC: 25726, MsgBus: 10432 2025-04-10T11:29:54.274085Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645286402528508:2236];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:29:54.274276Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0015d4/r3tmp/tmpu89Fwf/pdisk_1.dat 2025-04-10T11:29:54.861778Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:29:54.869963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:29:54.870040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:29:54.874229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25726, node 1 2025-04-10T11:29:55.050751Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:29:55.050777Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:29:55.050784Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:29:55.050908Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10432 TClient is connected to server localhost:10432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:29:55.661412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:29:55.707164Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:29:57.768016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645299287430890:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:57.772110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:29:57.774324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645299287430882:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:57.774429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:29:57.785657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645299287430896:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:29:57.889686Z node 1 :TX_PROXY ERROR: Actor# [1:7491645299287430947:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:29:58.202697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:29:58.445527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645303582398542:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:58.446036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645303582398542:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:58.446322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645303582398542:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:58.446446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645303582398542:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:58.446534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645303582398542:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:58.446660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645303582398542:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:58.446754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645303582398542:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:58.446863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645303582398542:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:58.446994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645303582398542:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:58.447091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645303582398542:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:58.447211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645303582398542:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:58.447331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7491645303582398542:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:58.448674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491645303582398510:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:58.448719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491645303582398510:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:29:58.448896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491645303582398510:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:29:58.449023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491645303582398510:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:29:58.449132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491645303582398510:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:29:58.449227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491645303582398510:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:29:58.449316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491645303582398510:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:29:58.449424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491645303582398510:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:29:58.449536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491645303582398510:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:29:58.449648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491645303582398510:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:29:58.449763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491645303582398510:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:29:58.449860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7491645303582398510:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:29:58.484616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645303582398562:2362];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:29:58.484667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491645303582398562:2362];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abs ... 1:23.433878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.444892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.446622Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.456151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.457447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.472762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.477501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.483279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.488050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.493991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.497264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.511443Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.515094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.521762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.526030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.536021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.539779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.545336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.550557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.560427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.564727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.574681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.578731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.783430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039195;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710714; 2025-04-10T11:31:23.854724Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqexmvfgz4kcj389d8zqkj", SessionId: ydb://session/3?node_id=1&id=MTFkMTBkN2UtYjlmZDEwOTQtN2Q3YmZiMTMtMjMxYTlmZTc=, Slow query, duration: 38.834567s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-04-10T11:31:24.454804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:24.455268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:31:24.455628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7491645629999970883:11374];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-04-10T11:31:24.458858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710716; 2025-04-10T11:33:29.109143Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jrfqhx57darwme86z7fh0zjx", SessionId: ydb://session/3?node_id=1&id=MTFkMTBkN2UtYjlmZDEwOTQtN2Q3YmZiMTMtMjMxYTlmZTc=, Slow query, duration: 66.284409s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "PRAGMA TablePathPrefix='/Root/test/ds';\n\n-- NB: Subquerys\n\n$cs_ui =\n\n (select catalog_sales.cs_item_sk cs_item_sk\n\n ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund\n\n from catalog_sales as catalog_sales\n\n cross join catalog_returns as catalog_returns\n\n where cs_item_sk = cr_item_sk\n\n and cs_order_number = cr_order_number\n\n group by catalog_sales.cs_item_sk\n\n having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit));\n\n$cross_sales =\n\n (select item.i_product_name product_name\n\n ,item.i_item_sk item_sk\n\n ,store.s_store_name store_name\n\n ,store.s_zip store_zip\n\n ,ad1.ca_street_number b_street_number\n\n ,ad1.ca_street_name b_street_name\n\n ,ad1.ca_city b_city\n\n ,ad1.ca_zip b_zip\n\n ,ad2.ca_street_number c_street_number\n\n ,ad2.ca_street_name c_street_name\n\n ,ad2.ca_city c_city\n\n ,ad2.ca_zip c_zip\n\n ,d1.d_year as syear\n\n ,d2.d_year as fsyear\n\n ,d3.d_year s2year\n\n ,count(*) cnt\n\n ,sum(ss_wholesale_cost) s1\n\n ,sum(ss_list_price) s2\n\n ,sum(ss_coupon_amt) s3\n\n FROM store_sales as store_sales\n\n cross join store_returns as store_returns\n\n cross join $cs_ui cs_ui\n\n cross join date_dim d1\n\n cross join date_dim d2\n\n cross join date_dim d3\n\n cross join store as store\n\n cross join customer as customer\n\n cross join customer_demographics cd1\n\n cross join customer_demographics cd2\n\n cross join promotion as promotion\n\n cross join household_demographics hd1\n\n cross join household_demographics hd2\n\n cross join customer_address ad1\n\n cross join customer_address ad2\n\n cross join income_band ib1\n\n cross join income_band ib2\n\n cross join item as item\n\n WHERE ss_store_sk = s_store_sk AND\n\n ss_sold_date_sk = d1.d_date_sk AND\n\n ss_customer_sk = c_customer_sk AND\n\n ss_cdemo_sk= cd1.cd_demo_sk AND\n\n ss_hdemo_sk = hd1.hd_demo_sk AND\n\n ss_addr_sk = ad1.ca_address_sk and\n\n ss_item_sk = i_item_sk and\n\n ss_item_sk = sr_item_sk and\n\n ss_ticket_number = sr_ticket_number and\n\n ss_item_sk = cs_ui.cs_item_sk and\n\n c_current_cdemo_sk = cd2.cd_demo_sk AND\n\n c_current_hdemo_sk = hd2.hd_demo_sk AND\n\n c_current_addr_sk = ad2.ca_address_sk and\n\n c_first_sales_date_sk = d2.d_date_sk and\n\n c_first_shipto_date_sk = d3.d_date_sk and\n\n ss_promo_sk = p_promo_sk and\n\n hd1.hd_income_band_sk = ib1.ib_income_band_sk and\n\n hd2.hd_income_band_sk = ib2.ib_income_band_sk and\n\n cd1.cd_marital_status <> cd2.cd_marital_status and\n\n i_color in ('azure','gainsboro','misty','blush','hot','lemon') and\n\n i_current_price between 80 and 80 + 10 and\n\n i_current_price between 80 + 1 and 80 + 15\n\ngroup by item.i_product_name\n\n ,item.i_item_sk\n\n ,store.s_store_name\n\n ,store.s_zip\n\n ,ad1.ca_street_number\n\n ,ad1.ca_street_name\n\n ,ad1.ca_city\n\n ,ad1.ca_zip\n\n ,ad2.ca_street_number\n\n ,ad2.ca_street_name\n\n ,ad2.ca_city\n\n ,ad2.ca_zip\n\n ,d1.d_year\n\n ,d2.d_year\n\n ,d3.d_year\n\n);\n\n-- start query 1 in stream 0 using template query64.tpl and seed 1220860970\n\nselect cs1.product_name\n\n ,cs1.store_name\n\n ,cs1.store_zip\n\n ,cs1.b_street_number\n\n ,cs1.b_street_name\n\n ,cs1.b_city\n\n ,cs1.b_zip\n\n ,cs1.c_street_number\n\n ,cs1.c_street_name\n\n ,cs1.c_city\n\n ,cs1.c_zip\n\n ,cs1.syear\n\n ,cs1.cnt\n\n ,cs1.s1 as s11\n\n ,cs1.s2 as s21\n\n ,cs1.s3 as s31\n\n ,cs2.s1 as s12\n\n ,cs2.s2 as s22\n\n ,cs2.s3 as s32\n\n ,cs2.syear\n\n ,cs2.cnt\n\nfrom $cross_sales cs1 cross join $cross_sales cs2\n\nwhere cs1.item_sk=cs2.item_sk and\n\n cs1.syear = 1999 and\n\n cs2.syear = 1999 + 1 and\n\n cs2.cnt <= cs1.cnt and\n\n cs1.store_name = cs2.store_name and\n\n cs1.store_zip = cs2.store_zip\n\norder by cs1.product_name\n\n ,cs1.store_name\n\n ,cs2.cnt\n\n ,s11\n\n ,s21\n\n ,s22;\n\n\n\n-- end query 1 in stream 0 using template query64.tpl\n", parameters: 0b ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReply [GOOD] Test command err: 2025-04-10T11:33:25.842422Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646192917072932:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:25.842529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002015/r3tmp/tmpg6quPO/pdisk_1.dat 2025-04-10T11:33:26.258432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:26.258550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:26.260613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:26.282010Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:26.293698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 TClient is connected to server localhost:18923 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T11:33:26.479615Z node 1 :TX_PROXY DEBUG: actor# [1:7491646192917073188:2103] Handle TEvNavigate describe path dc-1 2025-04-10T11:33:26.479661Z node 1 :TX_PROXY DEBUG: Actor# [1:7491646197212040765:2259] HANDLE EvNavigateScheme dc-1 2025-04-10T11:33:26.481087Z node 1 :TX_PROXY DEBUG: Actor# [1:7491646197212040765:2259] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:33:26.516160Z node 1 :TX_PROXY DEBUG: Actor# [1:7491646197212040765:2259] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-10T11:33:26.534268Z node 1 :TX_PROXY DEBUG: Actor# [1:7491646197212040765:2259] Handle TEvDescribeSchemeResult Forward to# [1:7491646197212040764:2258] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:26.561545Z node 1 :TX_PROXY DEBUG: actor# [1:7491646192917073188:2103] Handle TEvProposeTransaction 2025-04-10T11:33:26.561580Z node 1 :TX_PROXY DEBUG: actor# [1:7491646192917073188:2103] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T11:33:26.561694Z node 1 :TX_PROXY DEBUG: actor# [1:7491646192917073188:2103] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491646197212040779:2266] 2025-04-10T11:33:26.670715Z node 1 :TX_PROXY DEBUG: Actor# [1:7491646197212040779:2266] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-10T11:33:26.670776Z node 1 :TX_PROXY DEBUG: Actor# [1:7491646197212040779:2266] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:33:26.670857Z node 1 :TX_PROXY DEBUG: Actor# [1:7491646197212040779:2266] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:33:26.671433Z node 1 :TX_PROXY DEBUG: Actor# [1:7491646197212040779:2266] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:33:26.671593Z node 1 :TX_PROXY DEBUG: Actor# [1:7491646197212040779:2266] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T11:33:26.671661Z node 1 :TX_PROXY DEBUG: Actor# [1:7491646197212040779:2266] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T11:33:26.671914Z node 1 :TX_PROXY DEBUG: Actor# [1:7491646197212040779:2266] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T11:33:26.675004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:33:26.675235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:26.675959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T11:33:26.676305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:33:26.676357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:26.677144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T11:33:26.677278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2025-04-10T11:33:26.677442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:26.677467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T11:33:26.677483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-10T11:33:26.677500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-10T11:33:26.677833Z node 1 :TX_PROXY DEBUG: Actor# [1:7491646197212040779:2266] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T11:33:26.677879Z node 1 :TX_PROXY DEBUG: Actor# [1:7491646197212040779:2266] txid# 281474976710657 SEND to# [1:7491646197212040778:2265] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-10T11:33:26.678369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:26.678398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:33:26.678412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-10T11:33:26.678993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:26.679022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:26.679049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:33:26.679083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:33:26.683453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:33:26.683800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:33:26.683810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-10T11:33:26.683826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:33:26.684070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-10T11:33:26.684206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:33:26.685873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744284806733, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:33:26.686020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744284806733 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T11:33:26.686076Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:33:26.686462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 ... rationId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:33:27.493935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710674:0, datashard: 72075186224037899, at schemeshard: 72057594046644480 2025-04-10T11:33:27.494000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710674:0 129 -> 240 2025-04-10T11:33:27.494602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:33:27.494692Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710674 datashard 72075186224037899 state PreOffline 2025-04-10T11:33:27.494744Z node 1 :TX_DATASHARD DEBUG: 72075186224037899 Got TEvSchemaChangedResult from SS at 72075186224037899 2025-04-10T11:33:27.494754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T11:33:27.494773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976710674:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:33:27.495013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-04-10T11:33:27.495157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710674:0 progress is 1/1 2025-04-10T11:33:27.495183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2025-04-10T11:33:27.495199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710674:0 progress is 1/1 2025-04-10T11:33:27.495205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2025-04-10T11:33:27.495216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710674, ready parts: 1/1, is published: true 2025-04-10T11:33:27.495252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7491646201507009105:2370] message: TxId: 281474976710674 2025-04-10T11:33:27.495274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2025-04-10T11:33:27.495286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710674:0 2025-04-10T11:33:27.495293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710674:0 2025-04-10T11:33:27.495388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 2 2025-04-10T11:33:27.497884Z node 1 :TX_DATASHARD DEBUG: 72075186224037899 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:33:27.497965Z node 1 :TX_DATASHARD INFO: 72075186224037899 Initiating switch from PreOffline to Offline state 2025-04-10T11:33:27.499449Z node 1 :TX_DATASHARD INFO: 72075186224037899 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:33:27.499934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646201507008693 RawX2: 4503603922340127 } TabletId: 72075186224037899 State: 4 2025-04-10T11:33:27.500018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037899, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:33:27.500362Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037899 state Offline 2025-04-10T11:33:27.500428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:12 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:33:27.500641Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 TxId_Deprecated: 12 TabletID: 72075186224037899 2025-04-10T11:33:27.500670Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037899 2025-04-10T11:33:27.500758Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037899.Leader.1) VolatileState: Running -> Stopped (Node 1) 2025-04-10T11:33:27.500835Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037899.Leader.1 gen 1) to node 1 2025-04-10T11:33:27.500931Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 2025-04-10T11:33:27.501772Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [1:7491646192917073172:2100] NKikimrLocal.TEvStopTablet TabletId: 72075186224037899 FollowerId: 0 Generation: 1,0x10040206 [1:7491646197212040683:2207] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 Actions: NKikimr::TTabletReqBlockBlobStorage} 2025-04-10T11:33:27.501943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12, at schemeshard: 72057594046644480 2025-04-10T11:33:27.502290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 1 2025-04-10T11:33:27.502467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:33:27.502494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 6], at schemeshard: 72057594046644480 2025-04-10T11:33:27.502551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-10T11:33:27.502723Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037899 reason = ReasonStop 2025-04-10T11:33:27.503047Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037899 OK) 2025-04-10T11:33:27.503155Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037899 OK) 2025-04-10T11:33:27.503169Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037899 2025-04-10T11:33:27.503200Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037899 2025-04-10T11:33:27.503250Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037899 2025-04-10T11:33:27.503761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:12 2025-04-10T11:33:27.503784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:12 tabletId 72075186224037899 2025-04-10T11:33:27.503828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:33:27.504033Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037899 2025-04-10T11:33:27.504051Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-10T11:33:27.504212Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037899 OK) 2025-04-10T11:33:27.505554Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037899)::Complete SideEffects {} 2025-04-10T11:33:29.354332Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646213064618675:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:29.354407Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002015/r3tmp/tmpToumRu/pdisk_1.dat 2025-04-10T11:33:29.559676Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:29.564124Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:29.564226Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:29.566709Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20791 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:29.746177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:33:29.770077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:34.357682Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491646213064618675:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:34.357756Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:33:40.102709Z node 2 :MINIKQL_ENGINE ERROR: Shard %72075186224037888, txid %281474976716360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-04-10T11:33:40.116215Z node 2 :TX_DATASHARD ERROR: Datashard execution error for [0:281474976716360] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-04-10T11:33:40.118942Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976716360 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-04-10T11:33:40.159310Z node 2 :TX_PROXY ERROR: Actor# [2:7491646256014298056:5915] txid# 281474976716360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable >> KqpStats::RequestUnitForBadRequestExecute >> KqpTypes::UnsafeTimestampCastV0 >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink >> KqpExplain::LimitOffset >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBasic >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> TFlatTest::WriteMergeAndRead >> THealthCheckTest::LayoutIncorrect [GOOD] >> THealthCheckTest::LayoutCorrect >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink >> KqpExplain::PrecomputeRange >> THealthCheckTest::ShardsLimit905 [GOOD] >> THealthCheckTest::ShardsNoLimit >> THiveTest::TestLockTabletExecutionRebootTimeout [GOOD] >> THiveTest::TestLockTabletExecutionReconnect >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue >> DataShardSnapshots::ShardRestartWholeShardLockBasic [GOOD] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert >> TFlatTest::WriteMergeAndRead [GOOD] >> TFlatTest::WriteSplitAndRead >> KqpStats::RequestUnitForBadRequestExecute [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare >> KqpTypes::UnsafeTimestampCastV0 [GOOD] >> KqpTypes::UnsafeTimestampCastV1 >> KqpExplain::LimitOffset [GOOD] >> KqpExplain::FullOuterJoin >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink >> THiveTest::TestLockTabletExecutionReconnect [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable >> THealthCheckTest::LayoutCorrect [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink >> THiveTest::TestLockTabletExecutionRebootReconnect [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire >> KqpExplain::PrecomputeRange [GOOD] >> KqpExplain::MultiUsedStage >> TLocksTest::Range_BrokenLock2 [GOOD] >> TLocksTest::Range_BrokenLock3 >> TFlatTest::WriteSplitAndRead [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink [GOOD] >> KqpLimits::ComputeNodeMemoryLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutCorrect [GOOD] Test command err: 2025-04-10T11:33:19.261485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:19.262205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:19.262296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:19.264080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:19.264371Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:19.264700Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000aad/r3tmp/tmplhMhmq/pdisk_1.dat 2025-04-10T11:33:19.669367Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8213, node 1 TClient is connected to server localhost:63114 2025-04-10T11:33:20.060226Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:20.060312Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:20.060363Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:20.060659Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:24.409746Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:24.410008Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:24.410194Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000aad/r3tmp/tmpbgAM8f/pdisk_1.dat 2025-04-10T11:33:24.761083Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62166, node 3 TClient is connected to server localhost:21464 2025-04-10T11:33:25.161410Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:25.161488Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:25.161526Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:25.162275Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:30.056225Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:454:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:30.056659Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:30.056734Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000aad/r3tmp/tmpNy7oJn/pdisk_1.dat 2025-04-10T11:33:30.380661Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8805, node 5 TClient is connected to server localhost:7363 2025-04-10T11:33:30.875873Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:30.875945Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:30.875986Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:30.876825Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:30.949459Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:30.949587Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:30.967603Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:42.993952Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:42.994574Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:42.994865Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000aad/r3tmp/tmpsN0cYG/pdisk_1.dat 2025-04-10T11:33:43.411726Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5586, node 7 TClient is connected to server localhost:29660 2025-04-10T11:33:43.960040Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:43.960118Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:43.960173Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:43.961032Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:49.836413Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:452:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:49.836937Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:49.837103Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000aad/r3tmp/tmpuz6gW9/pdisk_1.dat 2025-04-10T11:33:50.226629Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6192, node 9 TClient is connected to server localhost:20200 2025-04-10T11:33:50.560147Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:50.560211Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:50.560244Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:50.560738Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> THiveTest::TestLockTabletExecutionReconnectExpire [GOOD] >> THiveTest::TestLockTabletExecutionStealLock >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndRead [GOOD] Test command err: 2025-04-10T11:33:45.405378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646278875311578:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:45.408937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002004/r3tmp/tmpUL2SAo/pdisk_1.dat 2025-04-10T11:33:45.845470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:45.845576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:45.847543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:45.880736Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9340 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:46.172046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:33:46.202643Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:33:46.215600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:46.417609Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-10T11:33:46.428501Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-10T11:33:46.463736Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-10T11:33:46.472849Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-04-10T11:33:46.503455Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:46.505443Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:33:46.505515Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:33:46.507415Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 8r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2025-04-10T11:33:46.507829Z node 1 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:33:46.507871Z node 1 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-10T11:33:46.511988Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:46.514526Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:33:46.514576Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:33:46.517720Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:46.519206Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T11:33:46.519268Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:33:46.522523Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.21, eph 3} end=0, 4 blobs 9r (max 9), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (4073 2983 5183)b }, ecr=1.000 2025-04-10T11:33:46.523189Z node 1 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:33:46.523208Z node 1 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-04-10T11:33:46.523407Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:46.525209Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T11:33:46.525274Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284826333 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-04-10T11:33:46.534279Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:46.536915Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-10T11:33:46.537196Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:46.539209Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-10T11:33:46.539444Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:46.540241Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-10T11:33:46.541424Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-10T11:33:46.541622Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:46.542637Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-10T11:33:46.543690Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-10T11:33:46.543854Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:46.544396Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-10T11:33:46.545343Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-10T11:33:46.545476Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:46.546024Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-10T11:33:46.547232Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-10T11:33:46.547373Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:46.547915Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-10T11:33:46.548780Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-10T11:33:46.548918Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:46.549526Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-10T11:33:46.550721Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-10T11:33:46.550873Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:46.551497Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-10T11:33:46.552286Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-10T11:33:46.552455Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:46.552999Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-10T11:33:46.553760Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-10T11:33:46.553934Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:46.554701Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-10T11:33:46.555429Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-10T11:33:46.555559Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:46.556005Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-10T11:33:46.558326Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-10T11:33:46.558639Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:46.559482Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-10T11:33:46.560755Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-10T11:33:46.560976Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:46.561716Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-10T11:33:46.563050Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-10T11:33:46.563293Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:46.563907Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-10T11:33:46.564982Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-10T11:33:46.565165Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:33:46.565770Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 ... 4-10T11:33:50.057964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:33:50.058019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:33:50.059596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-10T11:33:50.059892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-10T11:33:50.060106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-10T11:33:50.060590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-10T11:33:50.060621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-10T11:33:50.060654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 TClient::Ls request: /dc-1/Dir/TableOld 2025-04-10T11:33:50.062571Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-04-10T11:33:50.062619Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-04-10T11:33:50.062639Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-04-10T11:33:50.062670Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7491646296181820236:2386], serverId# [2:7491646296181820237:2387], sessionId# [0:0:0] 2025-04-10T11:33:50.062729Z node 2 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:33:50.062836Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:33:50.062871Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-10T11:33:50.062911Z node 2 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-04-10T11:33:50.064634Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:33:50.064712Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-04-10T11:33:50.066698Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-10T11:33:50.066808Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-10T11:33:50.068562Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:33:50.068684Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-10T11:33:50.070479Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:33:50.070649Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:33:50.070825Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:33:50.070944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646296181820476 RawX2: 4503608217307457 } TabletId: 72075186224037892 State: 4 2025-04-10T11:33:50.071015Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:33:50.071273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646296181820481 RawX2: 4503608217307459 } TabletId: 72075186224037891 State: 4 2025-04-10T11:33:50.071339Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:33:50.071487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646296181820125 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-04-10T11:33:50.071506Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:33:50.071606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646296181820477 RawX2: 4503608217307458 } TabletId: 72075186224037890 State: 4 2025-04-10T11:33:50.071658Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:33:50.071872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:33:50.072192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:33:50.072270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:33:50.072374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:33:50.072650Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-10T11:33:50.072719Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-10T11:33:50.072750Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-10T11:33:50.072767Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-10T11:33:50.073561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-10T11:33:50.073823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-10T11:33:50.073824Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-04-10T11:33:50.073911Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-10T11:33:50.073957Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-10T11:33:50.073973Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-10T11:33:50.074044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-10T11:33:50.074276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-10T11:33:50.074452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-10T11:33:50.074652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-10T11:33:50.074808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-10T11:33:50.074901Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-10T11:33:50.075015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-10T11:33:50.075037Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-10T11:33:50.075200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:33:50.075215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-10T11:33:50.075266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:33:50.075580Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-10T11:33:50.075610Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-04-10T11:33:50.075623Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-10T11:33:50.075640Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-10T11:33:50.076637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-10T11:33:50.076664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-10T11:33:50.076708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-10T11:33:50.076716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-10T11:33:50.076747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-10T11:33:50.076759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-10T11:33:50.076787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-10T11:33:50.076804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-10T11:33:50.076832Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:33:50.076895Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-10T11:33:50.076981Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-10T11:33:50.078777Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-10T11:33:50.078850Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-10T11:33:50.080562Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-10T11:33:50.080642Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 >> KqpStats::RequestUnitForBadRequestExplicitPrepare [GOOD] >> KqpStats::RequestUnitForExecute >> THiveTest::TestLockTabletExecutionStealLock [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink [GOOD] >> KqpLimits::CancelAfterRwTx+useSink >> KqpTypes::UnsafeTimestampCastV1 [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64 >> TLocksTest::NoLocksSet >> KqpExplain::FullOuterJoin [GOOD] >> KqpExplain::MergeConnection >> KqpStats::StreamLookupStats+StreamLookupJoin >> THealthCheckTest::ShardsNoLimit [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] >> TLocksTest::GoodLock [GOOD] >> TLocksTest::GoodNullLock >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsNoLimit [GOOD] Test command err: 2025-04-10T11:33:18.750894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:18.751435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:18.751519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a9a/r3tmp/tmpIZdFqQ/pdisk_1.dat 2025-04-10T11:33:19.224156Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16474, node 1 TClient is connected to server localhost:28238 2025-04-10T11:33:19.708668Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:19.708753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:19.708800Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:19.709304Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:27.429800Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:27.430275Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:27.430575Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:27.432316Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:27.432632Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:27.432970Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a9a/r3tmp/tmpcKRDko/pdisk_1.dat 2025-04-10T11:33:27.797425Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1859, node 3 TClient is connected to server localhost:3078 2025-04-10T11:33:28.217952Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:28.218031Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:28.218071Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:28.218361Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:35.991939Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:35.992555Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:35.992723Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:35.993108Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:35.993354Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:35.993435Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a9a/r3tmp/tmpivXDFL/pdisk_1.dat 2025-04-10T11:33:36.328664Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2083, node 5 TClient is connected to server localhost:17872 2025-04-10T11:33:36.713508Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:36.713575Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:36.713604Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:36.714182Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:44.617673Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:44.618356Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:44.618625Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:44.618840Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:44.619189Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:44.619314Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a9a/r3tmp/tmpGAV23h/pdisk_1.dat 2025-04-10T11:33:44.987964Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25822, node 7 TClient is connected to server localhost:16094 2025-04-10T11:33:45.364094Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:45.364165Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:45.364194Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:45.364414Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:33:54.625623Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:54.626199Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:54.626424Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:33:54.627364Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:54.627780Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:54.628052Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a9a/r3tmp/tmpxkg398/pdisk_1.dat 2025-04-10T11:33:54.990114Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21441, node 9 TClient is connected to server localhost:5932 2025-04-10T11:33:55.360392Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:55.360453Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:55.360483Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:55.361079Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink |95.8%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] Test command err: 2025-04-10T11:31:49.258189Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:49.358300Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:49.384195Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:49.384489Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:49.394078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:49.394347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:49.394612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:49.394770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:49.394884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:49.395004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:49.395133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:49.395247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:49.395395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:49.395518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:49.395610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:49.395670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:49.425812Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:49.426309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:49.426369Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:49.426556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:49.426713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:49.426796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:49.426838Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:49.426937Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:49.427007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:49.428255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:49.428303Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:49.428494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:49.428569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:49.428606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:49.428634Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:49.428719Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:49.428872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:49.428927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:49.428960Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:49.429029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:49.429114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:49.429152Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:49.429209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:49.429263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:49.429293Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:49.429671Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-10T11:31:49.429746Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-04-10T11:31:49.429829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-04-10T11:31:49.429922Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-10T11:31:49.430113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:49.430181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:49.430218Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:49.430416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:49.430462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:49.430497Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:49.430638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:49.430678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:49.430711Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:49.430884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:49.430930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:49.430968Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:49.431096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:49.431133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:49.431176Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ne=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:117:2768:0]; 2025-04-10T11:33:55.526771Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:118:2768:0]; 2025-04-10T11:33:55.526828Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:119:2768:0]; 2025-04-10T11:33:55.526888Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:120:2768:0]; 2025-04-10T11:33:55.526950Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:121:2768:0]; 2025-04-10T11:33:55.527011Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:122:2768:0]; 2025-04-10T11:33:55.527070Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:123:2768:0]; 2025-04-10T11:33:55.527126Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:124:2768:0]; 2025-04-10T11:33:55.527185Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:125:2768:0]; 2025-04-10T11:33:55.527245Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:126:2768:0]; 2025-04-10T11:33:55.527305Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:127:2768:0]; 2025-04-10T11:33:55.527366Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:128:2696:0]; 2025-04-10T11:33:55.527425Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:129:2696:0]; 2025-04-10T11:33:55.527489Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:130:2696:0]; 2025-04-10T11:33:55.527548Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:131:2696:0]; 2025-04-10T11:33:55.527609Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:132:8920:0]; 2025-04-10T11:33:55.527669Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:133:2776:0]; 2025-04-10T11:33:55.527768Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:134:2776:0]; 2025-04-10T11:33:55.527841Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:135:2776:0]; 2025-04-10T11:33:55.527902Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:136:2776:0]; 2025-04-10T11:33:55.527962Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:137:2776:0]; 2025-04-10T11:33:55.528022Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:138:2768:0]; 2025-04-10T11:33:55.528079Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:139:2768:0]; 2025-04-10T11:33:55.528137Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:140:2768:0]; 2025-04-10T11:33:55.528199Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:141:2768:0]; 2025-04-10T11:33:55.528258Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:142:2768:0]; 2025-04-10T11:33:55.528317Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:143:2768:0]; 2025-04-10T11:33:55.528378Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:144:2768:0]; 2025-04-10T11:33:55.528444Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:145:2768:0]; 2025-04-10T11:33:55.528500Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:146:2768:0]; 2025-04-10T11:33:55.528571Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:147:2768:0]; 2025-04-10T11:33:55.528639Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:148:2768:0]; 2025-04-10T11:33:55.528708Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:149:2768:0]; 2025-04-10T11:33:55.528777Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:150:2768:0]; 2025-04-10T11:33:55.528855Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:151:2768:0]; 2025-04-10T11:33:55.528921Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:152:2768:0]; 2025-04-10T11:33:55.528989Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:153:2768:0]; 2025-04-10T11:33:55.529072Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:154:2768:0]; 2025-04-10T11:33:55.529134Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:155:2768:0]; 2025-04-10T11:33:55.529203Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:156:2768:0]; 2025-04-10T11:33:55.529275Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:157:2768:0]; 2025-04-10T11:33:55.529340Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:158:2768:0]; 2025-04-10T11:33:55.529409Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:159:2768:0]; 2025-04-10T11:33:55.529477Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:160:2768:0]; 2025-04-10T11:33:55.529562Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:161:2768:0]; 2025-04-10T11:33:55.529627Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:162:2768:0]; 2025-04-10T11:33:55.529687Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:163:2768:0]; 2025-04-10T11:33:55.529746Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:164:2768:0]; 2025-04-10T11:33:55.529812Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:165:2768:0]; 2025-04-10T11:33:55.529877Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:166:2768:0]; 2025-04-10T11:33:55.529939Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:167:2768:0]; 2025-04-10T11:33:55.530003Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:168:2768:0]; 2025-04-10T11:33:55.530069Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:169:2768:0]; 2025-04-10T11:33:55.530131Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:170:2768:0]; 2025-04-10T11:33:55.530977Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:171:2768:0]; 2025-04-10T11:33:55.531107Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:172:2696:0]; 2025-04-10T11:33:55.531188Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:173:2696:0]; 2025-04-10T11:33:55.531264Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:174:2696:0]; 2025-04-10T11:33:55.531338Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:175:2696:0]; 2025-04-10T11:33:55.531418Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:176:8904:0]; 2025-04-10T11:33:56.214896Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-10T11:33:56.216051Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-04-10T11:33:56.326619Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 747 2025-04-10T11:33:56.335151Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2213112;raw_bytes=2475629;count=1;records=26059} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7587944;raw_bytes=7088522;count=3;records=75200} inactive {blob_bytes=100101848;raw_bytes=103700153;count=42;records=1100341} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:33:56.697309Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ae622bf8-15ff11f0-8c4c1a88-dfc740a6;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-04-10T11:33:56.697392Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ae622bf8-15ff11f0-8c4c1a88-dfc740a6;fline=with_appended.cpp:65;portions=47,;task_id=ae622bf8-15ff11f0-8c4c1a88-dfc740a6; 2025-04-10T11:33:56.698278Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ae622bf8-15ff11f0-8c4c1a88-dfc740a6;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::ae622bf8-15ff11f0-8c4c1a88-dfc740a6; 2025-04-10T11:33:56.698375Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ae622bf8-15ff11f0-8c4c1a88-dfc740a6;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:1;path_id:1;size:7587944;portions_count:47;); 2025-04-10T11:33:56.698431Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ae622bf8-15ff11f0-8c4c1a88-dfc740a6;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:33:56.698504Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ae622bf8-15ff11f0-8c4c1a88-dfc740a6;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:33:56.698571Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ae622bf8-15ff11f0-8c4c1a88-dfc740a6;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=8; 2025-04-10T11:33:56.698671Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ae622bf8-15ff11f0-8c4c1a88-dfc740a6;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-10T11:33:56.698733Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ae622bf8-15ff11f0-8c4c1a88-dfc740a6;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:33:56.698792Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ae622bf8-15ff11f0-8c4c1a88-dfc740a6;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:33:56.698844Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ae622bf8-15ff11f0-8c4c1a88-dfc740a6;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:33:56.698940Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ae622bf8-15ff11f0-8c4c1a88-dfc740a6;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.944000s; 2025-04-10T11:33:56.699003Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ae622bf8-15ff11f0-8c4c1a88-dfc740a6;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:33:56.699266Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 747 2025-04-10T11:33:56.701227Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=39;external_task_id=ae622bf8-15ff11f0-8c4c1a88-dfc740a6;mem=11009198;cpu=0; 2025-04-10T11:33:56.702640Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] Test command err: 2025-04-10T11:32:39.501739Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:32:39.505878Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:32:39.506112Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:32:39.507134Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:45:2072] ControllerId# 72057594037932033 2025-04-10T11:32:39.507186Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:32:39.507363Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:32:39.507744Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:32:39.509864Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:32:39.509920Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:32:39.511941Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:51:2076] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:39.512148Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:52:2077] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:39.512317Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:53:2078] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:39.512476Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:54:2079] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:39.512617Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:55:2080] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:39.512773Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:56:2081] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:39.512903Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:57:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:39.512933Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:32:39.513224Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:45:2072] 2025-04-10T11:32:39.513285Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:45:2072] 2025-04-10T11:32:39.513360Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:32:39.513439Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:32:39.513975Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:32:39.516417Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:32:39.516595Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-10T11:32:39.517272Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-10T11:32:39.518364Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-10T11:32:39.518424Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:32:39.519182Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:68:2076] ControllerId# 72057594037932033 2025-04-10T11:32:39.519228Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:32:39.519307Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:32:39.519530Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:32:39.531889Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:32:39.531935Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:32:39.533385Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:76:2081] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:39.533529Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:77:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:39.533623Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:78:2083] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:39.533730Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:79:2084] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:39.533872Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:80:2085] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:39.533958Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:81:2086] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:39.534082Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:82:2087] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:39.534103Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:32:39.534202Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:68:2076] 2025-04-10T11:32:39.534228Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:68:2076] 2025-04-10T11:32:39.534275Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:32:39.534316Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:32:39.535123Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:32:39.535263Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:45:2072] 2025-04-10T11:32:39.535307Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:39.535342Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:32:39.535395Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:32:39.535576Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:32:39.552426Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:68:2076] 2025-04-10T11:32:39.552501Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:39.552539Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:32:39.554501Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:32:39.554777Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:68:2076] 2025-04-10T11:32:39.554828Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:39.554879Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-10T11:32:39.561060Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-10T11:32:39.561957Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-10T11:32:39.570048Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:39.570121Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-10T11:32:39.570297Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-10T11:32:39.570518Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-10T11:32:39.570712Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:32:39.570973Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:32:39.572709Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:39.573119Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:32:39.573716Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:39.573802Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:94:2093] 2025-04-10T11:32:39.573831Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:94:2093] 2025-04-10T11:32:39.573880Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [2:49:2064] 2025-04-10T11:32:39.573902Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [2:49:2064] 2025-04-10T11:32:39.573937Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-10T11:32:39.574011Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-10T11:32:39.574058Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-10T11:32:39.574082Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-10T11:32:39.574133Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-10T11:32:39.574279Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-1 ... .496425Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [23:1020:2318] 2025-04-10T11:33:57.496564Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037893] forward result local node, try to connect [23:1080:2356] 2025-04-10T11:33:57.496598Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037893]::SendEvent [23:1080:2356] 2025-04-10T11:33:57.496680Z node 23 :PIPE_SERVER DEBUG: [72075186224037893] Accept Connect Originator# [23:1080:2356] 2025-04-10T11:33:57.496788Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037893] connected with status OK role: Leader [23:1080:2356] 2025-04-10T11:33:57.496826Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037893] send queued [23:1080:2356] 2025-04-10T11:33:57.497082Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894] ::Bootstrap [23:1084:2359] 2025-04-10T11:33:57.497119Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [23:1084:2359] 2025-04-10T11:33:57.497192Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StInit ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:57.497319Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:33:57.497596Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2025-04-10T11:33:57.497665Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2025-04-10T11:33:57.497711Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2025-04-10T11:33:57.497980Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [23:680:2176] CurrentLeaderTablet: [23:686:2179] CurrentGeneration: 1 CurrentStep: 0} 2025-04-10T11:33:57.498067Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [23:680:2176] CurrentLeaderTablet: [23:686:2179] CurrentGeneration: 1 CurrentStep: 0} 2025-04-10T11:33:57.499786Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037894 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037894 Cookie: 0 CurrentLeader: [23:680:2176] CurrentLeaderTablet: [23:686:2179] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-10T11:33:57.499851Z node 23 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037894 followers: 0 2025-04-10T11:33:57.499901Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [23:680:2176] 2025-04-10T11:33:57.500049Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result local node, try to connect [23:1084:2359] 2025-04-10T11:33:57.500092Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [23:1084:2359] 2025-04-10T11:33:57.500191Z node 23 :PIPE_SERVER DEBUG: [72075186224037894] Accept Connect Originator# [23:1084:2359] 2025-04-10T11:33:57.500324Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connected with status OK role: Leader [23:1084:2359] 2025-04-10T11:33:57.500355Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894] send queued [23:1084:2359] 2025-04-10T11:33:57.500608Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895] ::Bootstrap [23:1088:2362] 2025-04-10T11:33:57.500651Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895] lookup [23:1088:2362] 2025-04-10T11:33:57.500715Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037895 entry.State: StInit ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:57.500822Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037895 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:33:57.501175Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037895 Cookie: 0} 2025-04-10T11:33:57.501218Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037895 Cookie: 1} 2025-04-10T11:33:57.501263Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037895 Cookie: 2} 2025-04-10T11:33:57.501463Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037895 CurrentLeader: [23:846:2222] CurrentLeaderTablet: [23:848:2223] CurrentGeneration: 2 CurrentStep: 0} 2025-04-10T11:33:57.501532Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037895 CurrentLeader: [23:846:2222] CurrentLeaderTablet: [23:848:2223] CurrentGeneration: 2 CurrentStep: 0} 2025-04-10T11:33:57.501643Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037895 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037895 Cookie: 0 CurrentLeader: [23:846:2222] CurrentLeaderTablet: [23:848:2223] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-10T11:33:57.501687Z node 23 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037895 followers: 0 2025-04-10T11:33:57.501753Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [23:846:2222] 2025-04-10T11:33:57.501859Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895] forward result local node, try to connect [23:1088:2362] 2025-04-10T11:33:57.501900Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895]::SendEvent [23:1088:2362] 2025-04-10T11:33:57.502004Z node 23 :PIPE_SERVER DEBUG: [72075186224037895] Accept Connect Originator# [23:1088:2362] 2025-04-10T11:33:57.502142Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895] connected with status OK role: Leader [23:1088:2362] 2025-04-10T11:33:57.502196Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895] send queued [23:1088:2362] 2025-04-10T11:33:57.502412Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896] ::Bootstrap [23:1092:2365] 2025-04-10T11:33:57.502445Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896] lookup [23:1092:2365] 2025-04-10T11:33:57.502528Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037896 entry.State: StInit ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:57.502635Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037896 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:33:57.502888Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037896 Cookie: 0} 2025-04-10T11:33:57.502932Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037896 Cookie: 1} 2025-04-10T11:33:57.502975Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037896 Cookie: 2} 2025-04-10T11:33:57.503201Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037896 CurrentLeader: [23:763:2198] CurrentLeaderTablet: [23:769:2201] CurrentGeneration: 1 CurrentStep: 0} 2025-04-10T11:33:57.503250Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037896 CurrentLeader: [23:763:2198] CurrentLeaderTablet: [23:769:2201] CurrentGeneration: 1 CurrentStep: 0} 2025-04-10T11:33:57.503347Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037896 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037896 Cookie: 0 CurrentLeader: [23:763:2198] CurrentLeaderTablet: [23:769:2201] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-10T11:33:57.503378Z node 23 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037896 followers: 0 2025-04-10T11:33:57.503412Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [23:763:2198] 2025-04-10T11:33:57.503478Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896] forward result local node, try to connect [23:1092:2365] 2025-04-10T11:33:57.503509Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896]::SendEvent [23:1092:2365] 2025-04-10T11:33:57.503575Z node 23 :PIPE_SERVER DEBUG: [72075186224037896] Accept Connect Originator# [23:1092:2365] 2025-04-10T11:33:57.503703Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896] connected with status OK role: Leader [23:1092:2365] 2025-04-10T11:33:57.503752Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896] send queued [23:1092:2365] 2025-04-10T11:33:57.504000Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897] ::Bootstrap [23:1096:2368] 2025-04-10T11:33:57.504035Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897] lookup [23:1096:2368] 2025-04-10T11:33:57.504088Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037897 entry.State: StInit ev: {EvForward TabletID: 72075186224037897 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:33:57.504177Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037897 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:33:57.504472Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037897 Cookie: 0} 2025-04-10T11:33:57.504529Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037897 Cookie: 1} 2025-04-10T11:33:57.504579Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037897 Cookie: 2} 2025-04-10T11:33:57.504775Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037897 CurrentLeader: [23:934:2270] CurrentLeaderTablet: [23:936:2271] CurrentGeneration: 2 CurrentStep: 0} 2025-04-10T11:33:57.504832Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037897 CurrentLeader: [23:934:2270] CurrentLeaderTablet: [23:936:2271] CurrentGeneration: 2 CurrentStep: 0} 2025-04-10T11:33:57.504962Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037897 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037897 Cookie: 0 CurrentLeader: [23:934:2270] CurrentLeaderTablet: [23:936:2271] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-10T11:33:57.505011Z node 23 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037897 followers: 0 2025-04-10T11:33:57.505066Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037897 followers: 0 countLeader 1 allowFollowers 0 winner: [23:934:2270] 2025-04-10T11:33:57.505162Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897] forward result local node, try to connect [23:1096:2368] 2025-04-10T11:33:57.505198Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897]::SendEvent [23:1096:2368] 2025-04-10T11:33:57.505321Z node 23 :PIPE_SERVER DEBUG: [72075186224037897] Accept Connect Originator# [23:1096:2368] 2025-04-10T11:33:57.505488Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897] connected with status OK role: Leader [23:1096:2368] 2025-04-10T11:33:57.505526Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897] send queued [23:1096:2368] >> KqpExplain::MultiUsedStage [GOOD] >> KqpExplain::Predicates >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] >> TLocksTest::CK_GoodLock [GOOD] >> TLocksTest::CK_BrokenLock >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByConflict [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert >> KqpParams::ImplicitParameterTypes >> TLocksTest::GoodSameKeyLock [GOOD] >> TLocksTest::GoodSameShardLock |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 61773, MsgBus: 30090 2025-04-10T11:33:42.991274Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646269486521801:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:42.991351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001779/r3tmp/tmpn6N80p/pdisk_1.dat 2025-04-10T11:33:43.451973Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:43.455781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:43.456297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:43.466671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61773, node 1 2025-04-10T11:33:43.683249Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:43.683283Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:43.683299Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:43.683437Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30090 TClient is connected to server localhost:30090 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:33:44.489253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.512983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.685121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.862078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.937930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:46.373216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646286666392755:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:46.375746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:47.078229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.119791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.159108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.192576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.268334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.335601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.390856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646290961360574:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:47.390926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:47.391163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646290961360579:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:47.394705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:33:47.404695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646290961360581:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:33:47.469837Z node 1 :TX_PROXY ERROR: Actor# [1:7491646290961360634:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:33:48.004495Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646269486521801:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:48.004661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:33:48.590321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480
: Warning: Optimization, code: 1070
:3:29: Warning: Unsafe conversion integral value to Timestamp, consider using date types, code: 1102 Trying to start YDB, gRPC: 13991, MsgBus: 7794 2025-04-10T11:33:49.751980Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646298311291730:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:49.765454Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001779/r3tmp/tmpZX3gk0/pdisk_1.dat 2025-04-10T11:33:49.896264Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:49.923262Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:49.923361Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:49.931507Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13991, node 2 2025-04-10T11:33:50.018165Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:50.018194Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:50.018202Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:50.018338Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7794 TClient is connected to server localhost:7794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:33:50.583944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:50.602503Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:33:50.609131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:50.690726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:50.878427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:50.961975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:53.175423Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646315491162562:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:53.175529Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:53.226420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:33:53.301367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:33:53.351041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:33:53.384973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:33:53.421337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:33:53.463257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:33:53.523135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646315491163079:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:53.523213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:53.523255Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646315491163084:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:53.527643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:33:53.538420Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491646315491163086:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:33:53.629905Z node 2 :TX_PROXY ERROR: Actor# [2:7491646315491163141:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:33:54.577279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:33:54.664553Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491646319786130808:2507], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. 2025-04-10T11:33:54.665339Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzAxMTg2ZWYtNzNjZGE3M2UtZjY0YzU2OTktMmJlNjA0ZTQ=, ActorId: [2:7491646319786130727:2496], ActorState: ExecuteState, TraceId: 01jrfqmpte3ph5h0w4xgg5ws0m, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. 2025-04-10T11:33:54.748298Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491646298311291730:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:54.748407Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23633, MsgBus: 31694 2025-04-10T11:33:55.595251Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646322833258458:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:55.595314Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001779/r3tmp/tmpsgMMae/pdisk_1.dat 2025-04-10T11:33:55.707096Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:55.735086Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:55.735284Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:55.736759Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23633, node 3 2025-04-10T11:33:55.776537Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:55.776570Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:55.776578Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:55.776690Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31694 TClient is connected to server localhost:31694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:33:56.244925Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:56.252107Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:33:58.806898Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646335718161005:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:58.806996Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:58.833952Z node 3 :TX_PROXY ERROR: Actor# [3:7491646335718161026:2305] txid# 281474976715658, issues: { message: "Type \'Datetime64\' specified for column \'DatetimePK\', but support for new date/time 64 types is disabled (EnableTableDatetime64 feature flag is off)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 27941, MsgBus: 24285 2025-04-10T11:30:28.964444Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645432237447560:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:28.964740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c95/r3tmp/tmpL5wrtB/pdisk_1.dat 2025-04-10T11:30:29.808385Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:29.825878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:29.825980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:29.829610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27941, node 1 2025-04-10T11:30:30.247320Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:30.247343Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:30.247348Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:30.251134Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24285 TClient is connected to server localhost:24285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:31.473480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:31.538610Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:30:31.549735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:31.841644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:32.105222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:32.199844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:33.970360Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645432237447560:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:33.970485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:34.070186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645458007252979:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:34.070307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:34.708369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:34.787733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:34.854347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:34.925774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:35.017421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:35.081148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:35.167104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645462302220799:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:35.167175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:35.167353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645462302220804:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:35.171372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:35.183524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645462302220806:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:30:35.250287Z node 1 :TX_PROXY ERROR: Actor# [1:7491645462302220859:3457] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:37.263281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:39.614989Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jrfqeraj8b4a9bjj88c83fms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzliYzYzNmMtZjE0YzQ2YTEtYjE4MGIyZTItMzdlMGE2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.646682Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jrfqerbmbs7gev1ar1jah3bg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGUwNzdkMDQtMmI0MGFmNjMtNjI4YTQ0NGQtYzQzNGQ2OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.654084Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jrfqeraj8b4a9bjj88c83fms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzliYzYzNmMtZjE0YzQ2YTEtYjE4MGIyZTItMzdlMGE2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.662425Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jrfqercr7a7ppf7egj0p0qx5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdiYzExNS1jYWYxNzE4Ny1kNTliMzdlOS02MjM0ZjM4Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.663040Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jrfqercv34sn1t8mhmphv9z5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzc3NThiYWUtMWI0MmNhNC1lNjU2ZmFhMS03MjdhMDU3MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.664287Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jrfqercr2weaj8rmbg6y26s2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDFhMWE1ZDctYjhkNDc4N2QtOWMyN2JkMzUtZGU1OGJiOWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.667983Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jrfqercr9rhxvd6grv46f8ep, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjY1MjY5OC1mNGI2MDU5ZC05ZTEzNDVhNy1hMWU3MTMwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.673888Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jrfqerbmbs7gev1ar1jah3bg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGUwNzdkMDQtMmI0MGFmNjMtNjI4YTQ0NGQtYzQzNGQ2OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.693062Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jrfqerd13q4amcb095yp0fhp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdjYzVmNDktMjZlMzEwN2MtZWM0ODNlNTgtYzc5Yzg1YzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.693759Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jrfqercv1zhtrqx4k6npdw5t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTBkZDJmODUtOTdmODkyZmUtZjJhYjE0ZWMtN2MxOTA1YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.695711Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jrfqercv34j418376882ktxp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODMxZWQ4ZS00YTMzYzA3YS00MTU3ZDQzZi1iODA4M2M5YQ==, CurrentExecutionId: , CustomerSuppl ... sion/3?node_id=2&id=Yjg1YTQ1YmMtNDM2NWM5YzgtZWVkOGYwM2YtNWQ2ZWEzOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.465878Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719476. Ctx: { TraceId: 01jrfqmpm70zb8yw0xka58xbh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yjg1YTQ1YmMtNDM2NWM5YzgtZWVkOGYwM2YtNWQ2ZWEzOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.467489Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719477. Ctx: { TraceId: 01jrfqmpmy23r7mr0x1f0rdnej, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjNkZTBmNjAtMWVhYTE2MDktNzcwYjIzY2UtODkwMTQ2Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.478817Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719478. Ctx: { TraceId: 01jrfqmpn38bnfs8nkyqg2gvym, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWQwODI1YmMtNTVlNzc1YzQtYjAwZDEyZGEtYjc1NzI5ZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.487901Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719479. Ctx: { TraceId: 01jrfqmpn38bnfs8nkyqg2gvym, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWQwODI1YmMtNTVlNzc1YzQtYjAwZDEyZGEtYjc1NzI5ZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.493266Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719480. Ctx: { TraceId: 01jrfqmpnn8ckpg0mk9zj751w2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODcyYjgwODktYTAzMTA0MjMtYmNhZDNhOGItZmMwMjgyYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.498742Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719481. Ctx: { TraceId: 01jrfqmpnn8ckpg0mk9zj751w2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODcyYjgwODktYTAzMTA0MjMtYmNhZDNhOGItZmMwMjgyYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.503029Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719482. Ctx: { TraceId: 01jrfqmpnn8ckpg0mk9zj751w2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODcyYjgwODktYTAzMTA0MjMtYmNhZDNhOGItZmMwMjgyYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.508860Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719483. Ctx: { TraceId: 01jrfqmpp2bqk3ekwwwpx9abhj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGMwODkwZDktYzg0YWE3NmEtM2E1MGQ2Y2ItZjY0NTI4NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.528874Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719484. Ctx: { TraceId: 01jrfqmpp5bkdyerehche6w4dy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjNkZTBmNjAtMWVhYTE2MDktNzcwYjIzY2UtODkwMTQ2Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.530112Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719485. Ctx: { TraceId: 01jrfqmpp2bqk3ekwwwpx9abhj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGMwODkwZDktYzg0YWE3NmEtM2E1MGQ2Y2ItZjY0NTI4NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.540048Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719486. Ctx: { TraceId: 01jrfqmpp25vxq20vjbeafzd0q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTQyYjJiM2YtOTExMWNhYjMtODhiNTdmNDAtMmMzYjI4MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.544893Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719487. Ctx: { TraceId: 01jrfqmpp5bkdyerehche6w4dy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjNkZTBmNjAtMWVhYTE2MDktNzcwYjIzY2UtODkwMTQ2Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.548541Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719488. Ctx: { TraceId: 01jrfqmppd9ezcxmktgnz8t1fh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yjg1YTQ1YmMtNDM2NWM5YzgtZWVkOGYwM2YtNWQ2ZWEzOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.558400Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719489. Ctx: { TraceId: 01jrfqmpp25vxq20vjbeafzd0q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTQyYjJiM2YtOTExMWNhYjMtODhiNTdmNDAtMmMzYjI4MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.564412Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719490. Ctx: { TraceId: 01jrfqmppd9ezcxmktgnz8t1fh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yjg1YTQ1YmMtNDM2NWM5YzgtZWVkOGYwM2YtNWQ2ZWEzOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.582768Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719491. Ctx: { TraceId: 01jrfqmpqv9qecf6fc534kmvy5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWQwODI1YmMtNTVlNzc1YzQtYjAwZDEyZGEtYjc1NzI5ZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.587682Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719492. Ctx: { TraceId: 01jrfqmprj489pnzanr5txksz8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODcyYjgwODktYTAzMTA0MjMtYmNhZDNhOGItZmMwMjgyYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.589432Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719493. Ctx: { TraceId: 01jrfqmpqv9qecf6fc534kmvy5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWQwODI1YmMtNTVlNzc1YzQtYjAwZDEyZGEtYjc1NzI5ZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.604091Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719494. Ctx: { TraceId: 01jrfqmps6c6w51myny3cbafcs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGMwODkwZDktYzg0YWE3NmEtM2E1MGQ2Y2ItZjY0NTI4NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.609046Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719495. Ctx: { TraceId: 01jrfqmps78b92e4v9rtc6d4km, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTQyYjJiM2YtOTExMWNhYjMtODhiNTdmNDAtMmMzYjI4MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.615071Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719496. Ctx: { TraceId: 01jrfqmps78b92e4v9rtc6d4km, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTQyYjJiM2YtOTExMWNhYjMtODhiNTdmNDAtMmMzYjI4MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.623159Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719497. Ctx: { TraceId: 01jrfqmpsq1pcazgtdj89bmaze, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yjg1YTQ1YmMtNDM2NWM5YzgtZWVkOGYwM2YtNWQ2ZWEzOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.634397Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719498. Ctx: { TraceId: 01jrfqmpt5bfrzgc5ma3knxggr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjNkZTBmNjAtMWVhYTE2MDktNzcwYjIzY2UtODkwMTQ2Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.636895Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719499. Ctx: { TraceId: 01jrfqmpt60025mf2ss3ed4s4e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODcyYjgwODktYTAzMTA0MjMtYmNhZDNhOGItZmMwMjgyYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.645446Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719500. Ctx: { TraceId: 01jrfqmpt97mn5wvxn3shcrrp5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWQwODI1YmMtNTVlNzc1YzQtYjAwZDEyZGEtYjc1NzI5ZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.648585Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719501. Ctx: { TraceId: 01jrfqmpt60025mf2ss3ed4s4e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODcyYjgwODktYTAzMTA0MjMtYmNhZDNhOGItZmMwMjgyYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.662057Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719503. Ctx: { TraceId: 01jrfqmpts21p9gpbdj53jvxvq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGMwODkwZDktYzg0YWE3NmEtM2E1MGQ2Y2ItZjY0NTI4NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.663126Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719502. Ctx: { TraceId: 01jrfqmpts0eh6tp9zyjsy3666, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTQyYjJiM2YtOTExMWNhYjMtODhiNTdmNDAtMmMzYjI4MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.672205Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719504. Ctx: { TraceId: 01jrfqmpts0eh6tp9zyjsy3666, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTQyYjJiM2YtOTExMWNhYjMtODhiNTdmNDAtMmMzYjI4MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.675423Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719505. Ctx: { TraceId: 01jrfqmpts21p9gpbdj53jvxvq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGMwODkwZDktYzg0YWE3NmEtM2E1MGQ2Y2ItZjY0NTI4NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.678919Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719507. Ctx: { TraceId: 01jrfqmpvg2z5nm7c2f7bfwx3v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjNkZTBmNjAtMWVhYTE2MDktNzcwYjIzY2UtODkwMTQ2Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:33:54.680504Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719506. Ctx: { TraceId: 01jrfqmpv724fb6eqx21ttjvq8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yjg1YTQ1YmMtNDM2NWM5YzgtZWVkOGYwM2YtNWQ2ZWEzOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-10T11:33:54.690374Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719508. Ctx: { TraceId: 01jrfqmpv724fb6eqx21ttjvq8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yjg1YTQ1YmMtNDM2NWM5YzgtZWVkOGYwM2YtNWQ2ZWEzOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-10T11:33:54.698299Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719509. Ctx: { TraceId: 01jrfqmpvnfwe4cmh78f2cs58p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWY3NTFjYzktMjhhODM1NWEtNWI1ZTFhOGMtMzQxYWUzNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2025-04-10T11:33:54.708993Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719510. Ctx: { TraceId: 01jrfqmpvnfwe4cmh78f2cs58p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWY3NTFjYzktMjhhODM1NWEtNWI1ZTFhOGMtMzQxYWUzNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> KqpStats::RequestUnitForExecute [GOOD] >> TLocksFatTest::PointSetNotBreak >> KqpStats::DataQueryWithEffects+UseSink >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::IdxFullscan >> KqpStats::JoinNoStatsYql >> KqpStats::StreamLookupStats+StreamLookupJoin [GOOD] >> KqpStats::StreamLookupStats-StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForExecute [GOOD] Test command err: Trying to start YDB, gRPC: 7945, MsgBus: 18293 2025-04-10T11:33:42.977866Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646266239162975:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:42.979048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001775/r3tmp/tmp3SNYdI/pdisk_1.dat 2025-04-10T11:33:43.439062Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:43.467991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:43.468127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:43.480117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7945, node 1 2025-04-10T11:33:43.686818Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:43.686865Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:43.686873Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:43.687013Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18293 TClient is connected to server localhost:18293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:33:44.463640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.491031Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:33:44.509480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.690288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.901461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:45.011307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:46.661614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646283419033785:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:46.661742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:47.078964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.124147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.194618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.230797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.277169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.315798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.375703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646287714001598:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:47.375767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:47.375900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646287714001603:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:47.378963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:33:47.389378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646287714001605:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:33:47.477976Z node 1 :TX_PROXY ERROR: Actor# [1:7491646287714001659:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:33:47.977013Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646266239162975:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:47.977097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:33:48.480778Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491646292008969256:2502], status: GENERIC_ERROR, issues:
:2:12: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} 2025-04-10T11:33:48.482368Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTA4ZjhhY2YtMjQzMmMzODItMzk4MjdlYWQtYzE0NjVmNjI=, ActorId: [1:7491646292008969248:2497], ActorState: ExecuteState, TraceId: 01jrfqmgsefe666c2r369gq6pt, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:12: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} Trying to start YDB, gRPC: 30800, MsgBus: 23419 2025-04-10T11:33:49.436846Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646298392679069:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:49.437054Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001775/r3tmp/tmpSig4hF/pdisk_1.dat 2025-04-10T11:33:49.548188Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30800, node 2 2025-04-10T11:33:49.622853Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:49.622928Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:49.634650Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:49.654833Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:49.654856Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:49.654865Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:49.654986Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23419 TClient is connected to server localhost:23419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 ... .123525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:33:53.166805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:33:53.220503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:33:53.269629Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646315572550539:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:53.269728Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:53.269951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646315572550544:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:53.273965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:33:53.287005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491646315572550546:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:33:53.353072Z node 2 :TX_PROXY ERROR: Actor# [2:7491646315572550599:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:33:54.249349Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491646319867518196:2501], status: GENERIC_ERROR, issues:
:2:8: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} 2025-04-10T11:33:54.249607Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjU4YTI2ZTUtNjljZGE3ZmItYjZkM2I1ZWItZmZhN2MxMTE=, ActorId: [2:7491646319867518188:2496], ActorState: ExecuteState, TraceId: 01jrfqmpe32zanrx6yx0wh0vqv, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:8: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} 2025-04-10T11:33:54.438266Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491646298392679069:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:54.438360Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18398, MsgBus: 25079 2025-04-10T11:33:55.257950Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646322698341169:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:55.258117Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001775/r3tmp/tmpUx3mxR/pdisk_1.dat 2025-04-10T11:33:55.351890Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18398, node 3 2025-04-10T11:33:55.402867Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:55.402990Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:55.412826Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:55.429867Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:55.429892Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:55.429902Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:55.430046Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25079 TClient is connected to server localhost:25079 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:33:55.912577Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:55.925549Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:55.976987Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:56.137585Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:56.213604Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:58.703279Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646335583244823:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:58.703381Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:58.752817Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:33:58.809697Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:33:58.880226Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:33:58.912932Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:33:58.941737Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:33:58.972914Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:33:59.024568Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646339878212634:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:59.024642Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:59.024680Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646339878212639:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:59.028299Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:33:59.039475Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646339878212641:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:33:59.115974Z node 3 :TX_PROXY ERROR: Actor# [3:7491646339878212695:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:00.266138Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646322698341169:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:00.266237Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Consumed units: 324 Consumed units: 6 |95.8%| [TA] $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::SplitInvalidPath >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout >> KqpStats::MultiTxStatsFullExpYql >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink [GOOD] >> KqpParams::ImplicitParameterTypes [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink [GOOD] >> KqpExplain::Predicates [GOOD] >> TFlatTest::SplitInvalidPath [GOOD] >> KqpExplain::MultiJoinCteLinks >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck >> TFlatTest::SplitThenMerge >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] >> KqpStats::DataQueryWithEffects+UseSink [GOOD] >> TLocksFatTest::PointSetNotBreak [GOOD] >> KqpStats::DataQueryWithEffects-UseSink >> KqpStats::SysViewCancelled >> TLocksFatTest::PointSetRemove ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 22414, MsgBus: 20466 2025-04-10T11:30:28.995256Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645434596127459:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:28.995347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c90/r3tmp/tmpbmIEgA/pdisk_1.dat 2025-04-10T11:30:29.752478Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:30:29.790539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:30:29.790621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:30:29.811855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22414, node 1 2025-04-10T11:30:30.246565Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:30:30.246587Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:30:30.246594Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:30:30.246697Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20466 TClient is connected to server localhost:20466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:30:31.404989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:31.475175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:31.737109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:31.969307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:32.120098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:30:33.999176Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645434596127459:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:30:33.999372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:30:34.024015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645456070965488:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:34.028897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:34.705694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:30:34.749556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:30:34.834193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:30:34.919298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:30:34.988145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:30:35.072612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:30:35.151423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645464660900609:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:35.151503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:35.151709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645464660900614:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:30:35.156139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:30:35.171933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491645464660900616:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:30:35.236084Z node 1 :TX_PROXY ERROR: Actor# [1:7491645464660900668:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:30:37.246875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:30:39.047655Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jrfqeqqt0d3z6a9grrrt8vcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRiYzViZGQtY2Q0NWRjMmQtMWViNjdjZDgtNDgzZDgyZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.055106Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jrfqeqqz8r6betbm9ce08w8t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkzZTY5ZTgtY2I5ZjA4NTUtZTU2M2Q3YmQtOGJhYTgxODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.056035Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jrfqeqqw0zm39whcah0d93qr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgwMTkwNTUtMmIwMmRiOWEtMjI1OTBlODAtYzhhZWQwNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.095453Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jrfqeqrg147zccfy55ncv206, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTc2YzI4N2EtNWVlZjYwYTctMjYwYTc2MWMtYTQwYzk4MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.096406Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jrfqeqrv06pghzmdk4sfy3dj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzczYTM2ODMtY2Q2YjUxNzQtZDVlYTcyYi02ZDNlMmFhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.097056Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jrfqeqs9apmrr386rbrmfddb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA3MzMxMzMtN2I5YTZlOC1iOWZiZWFlOC0xYmZiNjMyZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.105009Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jrfqeqqt0d3z6a9grrrt8vcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRiYzViZGQtY2Q0NWRjMmQtMWViNjdjZDgtNDgzZDgyZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.105773Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jrfqeqrgf2560trxq45mhaqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjMwZWZlOTktNzdlZTRiMGMtNDgwZTUyNDktN2I1ODVkMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.111562Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jrfqeqrv14a5kftzvqsgx6h3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDE2YzNmMDItNmMwMGY0NzYtMjQzNWQ2NGYtYjNkNzUwMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.122680Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jrfqeqqz8r6betbm9ce08w8t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkzZTY5ZTgtY2I5ZjA4NTUtZTU2M2Q3YmQtOGJhYTgxODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.136956Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jrfqeqrv89y5mna2jkcevp14, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWU0YTE1MzUtNzg3MjEyZi03MTMwYTEyNC02NmNkMjM0Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:30:39.141176Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jrfqeqta ... sion/3?node_id=2&id=ZWIwM2Q5OWItMzc5Y2JjNTctYmI4OGNhNS1lYzQxYTJhNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.292862Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727105. Ctx: { TraceId: 01jrfqmx9q9xy48x71w8ebf3gs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2FiMDQxNWMtMzUwOTQzZGUtOWI5ZmFkY2ItYTUxNWIwNDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.300050Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727107. Ctx: { TraceId: 01jrfqmx9c70jhnzp4cmwdy9p2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWIwM2Q5OWItMzc5Y2JjNTctYmI4OGNhNS1lYzQxYTJhNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.301820Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727106. Ctx: { TraceId: 01jrfqmx9ecezxr7tc8fh43rxr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGUxODlhM2EtZmRmMDNlZjgtNGUwNTkzYTktNGFjNGE5ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.307323Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727108. Ctx: { TraceId: 01jrfqmx9c70jhnzp4cmwdy9p2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWIwM2Q5OWItMzc5Y2JjNTctYmI4OGNhNS1lYzQxYTJhNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.307935Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727109. Ctx: { TraceId: 01jrfqmx9q9xy48x71w8ebf3gs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2FiMDQxNWMtMzUwOTQzZGUtOWI5ZmFkY2ItYTUxNWIwNDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.315172Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727111. Ctx: { TraceId: 01jrfqmx9ecezxr7tc8fh43rxr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGUxODlhM2EtZmRmMDNlZjgtNGUwNTkzYTktNGFjNGE5ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.317693Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727110. Ctx: { TraceId: 01jrfqmxae0k0hm6s3txeyh9kc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzJjNmJjZWEtNGY4NTBlYWItMjljODIzY2QtMmEyM2JhZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.323011Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727112. Ctx: { TraceId: 01jrfqmxb59s93n1n4p9jghhh7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZlYWVlNDYtZWMzZmUzZmMtNWNiMDBmMGYtOWNhODgyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.325988Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727113. Ctx: { TraceId: 01jrfqmxae0k0hm6s3txeyh9kc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzJjNmJjZWEtNGY4NTBlYWItMjljODIzY2QtMmEyM2JhZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.330843Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727114. Ctx: { TraceId: 01jrfqmx9ecezxr7tc8fh43rxr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGUxODlhM2EtZmRmMDNlZjgtNGUwNTkzYTktNGFjNGE5ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.333052Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727115. Ctx: { TraceId: 01jrfqmxae0k0hm6s3txeyh9kc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzJjNmJjZWEtNGY4NTBlYWItMjljODIzY2QtMmEyM2JhZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.340446Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727116. Ctx: { TraceId: 01jrfqmxae0k0hm6s3txeyh9kc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzJjNmJjZWEtNGY4NTBlYWItMjljODIzY2QtMmEyM2JhZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.351083Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727117. Ctx: { TraceId: 01jrfqmxbv7xy9wyvtajc1gaxr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjVlYTAxYzAtOTcyZjBmN2QtZGJjNTI1NzgtZjlkOGUzYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.353072Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727118. Ctx: { TraceId: 01jrfqmxc4esjeynzc41h0zqvy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2FiMDQxNWMtMzUwOTQzZGUtOWI5ZmFkY2ItYTUxNWIwNDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.357811Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727119. Ctx: { TraceId: 01jrfqmxc36y8jc3zrzwmka70x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWIwM2Q5OWItMzc5Y2JjNTctYmI4OGNhNS1lYzQxYTJhNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.360242Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727120. Ctx: { TraceId: 01jrfqmxbv7xy9wyvtajc1gaxr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjVlYTAxYzAtOTcyZjBmN2QtZGJjNTI1NzgtZjlkOGUzYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.365755Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727121. Ctx: { TraceId: 01jrfqmxc36y8jc3zrzwmka70x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWIwM2Q5OWItMzc5Y2JjNTctYmI4OGNhNS1lYzQxYTJhNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.369860Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727122. Ctx: { TraceId: 01jrfqmxbv7xy9wyvtajc1gaxr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjVlYTAxYzAtOTcyZjBmN2QtZGJjNTI1NzgtZjlkOGUzYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.372560Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727123. Ctx: { TraceId: 01jrfqmxc36y8jc3zrzwmka70x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWIwM2Q5OWItMzc5Y2JjNTctYmI4OGNhNS1lYzQxYTJhNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.379904Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727125. Ctx: { TraceId: 01jrfqmxcm0b646nx25wt6fak3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGUxODlhM2EtZmRmMDNlZjgtNGUwNTkzYTktNGFjNGE5ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.381022Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727124. Ctx: { TraceId: 01jrfqmxchfvpmbr0vh8ay695t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZlYWVlNDYtZWMzZmUzZmMtNWNiMDBmMGYtOWNhODgyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.389572Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727126. Ctx: { TraceId: 01jrfqmxcm0b646nx25wt6fak3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGUxODlhM2EtZmRmMDNlZjgtNGUwNTkzYTktNGFjNGE5ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.389618Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727127. Ctx: { TraceId: 01jrfqmxchfvpmbr0vh8ay695t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZlYWVlNDYtZWMzZmUzZmMtNWNiMDBmMGYtOWNhODgyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.393635Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727128. Ctx: { TraceId: 01jrfqmxcm0b646nx25wt6fak3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGUxODlhM2EtZmRmMDNlZjgtNGUwNTkzYTktNGFjNGE5ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.395822Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727129. Ctx: { TraceId: 01jrfqmxchfvpmbr0vh8ay695t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZlYWVlNDYtZWMzZmUzZmMtNWNiMDBmMGYtOWNhODgyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.397677Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727130. Ctx: { TraceId: 01jrfqmxd5amqgfg1afgvmn000, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzJjNmJjZWEtNGY4NTBlYWItMjljODIzY2QtMmEyM2JhZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.401049Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727131. Ctx: { TraceId: 01jrfqmxchfvpmbr0vh8ay695t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZlYWVlNDYtZWMzZmUzZmMtNWNiMDBmMGYtOWNhODgyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.406839Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727132. Ctx: { TraceId: 01jrfqmxd5amqgfg1afgvmn000, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzJjNmJjZWEtNGY4NTBlYWItMjljODIzY2QtMmEyM2JhZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2025-04-10T11:34:01.412416Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727133. Ctx: { TraceId: 01jrfqmxdrfh1jyn3dc0w3sf5y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2FiMDQxNWMtMzUwOTQzZGUtOWI5ZmFkY2ItYTUxNWIwNDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.417698Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727134. Ctx: { TraceId: 01jrfqmxdw391jwrnc7qq9zjfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjVlYTAxYzAtOTcyZjBmN2QtZGJjNTI1NzgtZjlkOGUzYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.421147Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727135. Ctx: { TraceId: 01jrfqmxdrfh1jyn3dc0w3sf5y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2FiMDQxNWMtMzUwOTQzZGUtOWI5ZmFkY2ItYTUxNWIwNDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.424801Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727136. Ctx: { TraceId: 01jrfqmxdw391jwrnc7qq9zjfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjVlYTAxYzAtOTcyZjBmN2QtZGJjNTI1NzgtZjlkOGUzYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.431868Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727137. Ctx: { TraceId: 01jrfqmxdw391jwrnc7qq9zjfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjVlYTAxYzAtOTcyZjBmN2QtZGJjNTI1NzgtZjlkOGUzYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:01.438186Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727138. Ctx: { TraceId: 01jrfqmxdw391jwrnc7qq9zjfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjVlYTAxYzAtOTcyZjBmN2QtZGJjNTI1NzgtZjlkOGUzYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-10T11:34:01.439392Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976727139. Ctx: { TraceId: 01jrfqmxd5amqgfg1afgvmn000, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzJjNmJjZWEtNGY4NTBlYWItMjljODIzY2QtMmEyM2JhZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS >> KqpStats::JoinNoStatsYql [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin >> KqpExplain::IdxFullscan [GOOD] |95.8%| [TA] $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpStats::MultiTxStatsFullExpYql [GOOD] >> KqpStats::MultiTxStatsFullExpScan >> TFlatTest::SplitThenMerge [GOOD] |95.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpExplain::Explain >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink [GOOD] >> KqpExplain::MultiJoinCteLinks [GOOD] >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink >> KqpStats::DataQueryWithEffects-UseSink [GOOD] >> TLocksTest::NoLocksSet [GOOD] >> TLocksTest::MultipleLocks >> KqpStats::DataQueryMulti ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::IdxFullscan [GOOD] Test command err: Trying to start YDB, gRPC: 16677, MsgBus: 23759 2025-04-10T11:33:42.997737Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646268421380294:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:42.998238Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001774/r3tmp/tmpfy2KoI/pdisk_1.dat 2025-04-10T11:33:43.491211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:43.491324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:43.494065Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:43.494460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16677, node 1 2025-04-10T11:33:43.688774Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:43.688800Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:43.688807Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:43.688908Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23759 TClient is connected to server localhost:23759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:33:44.528482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.557525Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:33:44.582358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.773078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.958066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:45.066931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:46.624661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646285601251107:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:46.624806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:47.080711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.155351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.186588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.227484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.265038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.334749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.417852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646289896218930:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:47.417974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:47.418032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646289896218935:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:47.422416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:33:47.436999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646289896218937:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:33:47.520448Z node 1 :TX_PROXY ERROR: Actor# [1:7491646289896218992:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:33:47.988200Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646268421380294:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:47.988300Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"Min(If,SUM(10,15))"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Inputs":[{"ExternalPlanNodeId":4}],"Offset":"15","Name":"Offset"}],"Node Type":"Limit-Offset"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"Min(If,SUM(10,15))"}],"Node Type":"Limit"}],"Operators":[{"Offset":"15","Name":"Offset"}],"Node Type":"Offset"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 2776, MsgBus: 13123 2025-04-10T11:33:49.723687Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646298869942400:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:49.723756Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001774/r3tmp/tmpaVK3V9/pdisk_1.dat 2025-04-10T11:33:49.878085Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:49.896784Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:49.896884Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:49.899723Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2776, node 2 2025-04-10T11:33:49.982660Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:49.982699Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:49.982707Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:49.982840Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13123 TClient is connected to server localhost:1312 ... ted 2025-04-10T11:34:02.980923Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:02.984634Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20984, node 4 2025-04-10T11:34:03.029570Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:03.029598Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:03.029607Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:03.029748Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9162 TClient is connected to server localhost:9162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:03.671578Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:03.679461Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:34:03.695011Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:03.778391Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:34:04.027657Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:04.155608Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:06.872529Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646370745469140:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:06.872686Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:06.929934Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:06.966842Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:07.012612Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:07.056106Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:07.138567Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:07.226378Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:07.330607Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646375040436959:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:07.330722Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:07.330964Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646375040436964:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:07.336525Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:07.351944Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491646375040436966:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:07.434028Z node 4 :TX_PROXY ERROR: Actor# [4:7491646375040437021:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:07.818271Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491646353565598189:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:07.818364Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:08.536022Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:08.798092Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:34:08.852195Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"E-Size":"No estimate","PlanNodeId":8,"LookupKeyColumns":["id"],"Node Type":"TableLookup","Path":"\/Root\/test_table_idx","Columns":["Value","complex_field","id","str_field"],"E-Rows":"No estimate","Table":"test_table_idx","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"No estimate","Condition":"t.id = idx.id","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":8}],"E-Rows":"No estimate","Predicate":"Exist(item.id)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Limit-InnerJoin (MapJoin)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["test_table_idx_idx"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/test_table_idx_idx","E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/test_table_idx","reads":[{"lookup_by":["id"],"columns":["Value","complex_field","id","str_field"],"type":"Lookup"}]},{"name":"\/Root\/test_table_idx_idx","reads":[{"lookup_by":["str_field (null)"],"columns":["id"],"scan_by":["complex_field (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"No estimate","Columns":["Value","complex_field","id","str_field"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"test_table_idx","LookupKeyColumns":["id"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.id)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":13,"Operators":[{"Scan":"Parallel","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/test_table_idx_idx","E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.id = idx.id","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} |95.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] Test command err: 2025-04-10T11:32:01.719194Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:01.811899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:01.836518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:01.836794Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:01.845300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:01.845483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:01.845724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:01.845850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:01.845951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:01.846070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:01.846202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:01.846307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:01.846440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:01.846540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:01.846655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:01.846749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:01.876168Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:01.876428Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:01.876478Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:01.876679Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:01.876845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:01.876953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:01.876997Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:01.877097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:01.877165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:01.877205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:01.877231Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:01.877411Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:01.877482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:01.877523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:01.877552Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:01.877635Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:01.877686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:01.877739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:01.877770Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:01.877845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:01.877884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:01.877925Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:01.877985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:01.878023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:01.878049Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:01.878449Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=71; 2025-04-10T11:32:01.878526Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-04-10T11:32:01.878615Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-04-10T11:32:01.878705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-10T11:32:01.878891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:01.878944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:01.878979Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:01.879178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:01.879227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:01.879260Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:01.879432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:01.879480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:01.879511Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:01.879687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:01.879729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:01.879757Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:01.879878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:01.879920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:01.879963Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 0.845143Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:81:2696:0]; 2025-04-10T11:34:10.845216Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:82:8528:0]; 2025-04-10T11:34:10.845281Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:83:2776:0]; 2025-04-10T11:34:10.845360Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:84:2768:0]; 2025-04-10T11:34:10.845439Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:85:2768:0]; 2025-04-10T11:34:10.845504Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:86:2768:0]; 2025-04-10T11:34:10.845571Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:87:2768:0]; 2025-04-10T11:34:10.845636Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:88:2768:0]; 2025-04-10T11:34:10.845700Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:89:2768:0]; 2025-04-10T11:34:10.845776Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:90:2768:0]; 2025-04-10T11:34:10.845850Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:91:2768:0]; 2025-04-10T11:34:10.845915Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:92:2768:0]; 2025-04-10T11:34:10.845976Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:93:2768:0]; 2025-04-10T11:34:10.846041Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:94:2768:0]; 2025-04-10T11:34:10.846103Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:95:2768:0]; 2025-04-10T11:34:10.846180Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:96:2768:0]; 2025-04-10T11:34:10.846241Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:97:2768:0]; 2025-04-10T11:34:10.846335Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:98:2768:0]; 2025-04-10T11:34:10.846413Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:99:2768:0]; 2025-04-10T11:34:10.846479Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:100:2768:0]; 2025-04-10T11:34:10.846544Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:101:2768:0]; 2025-04-10T11:34:10.846603Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:102:2768:0]; 2025-04-10T11:34:10.846669Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:103:2768:0]; 2025-04-10T11:34:10.846734Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:104:2768:0]; 2025-04-10T11:34:10.846799Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:105:2768:0]; 2025-04-10T11:34:10.846874Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:106:2768:0]; 2025-04-10T11:34:10.846950Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:107:2768:0]; 2025-04-10T11:34:10.847021Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:108:2768:0]; 2025-04-10T11:34:10.847082Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:109:2768:0]; 2025-04-10T11:34:10.847146Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:110:2768:0]; 2025-04-10T11:34:10.847219Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:111:2768:0]; 2025-04-10T11:34:10.847289Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:112:2768:0]; 2025-04-10T11:34:10.847356Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:113:2768:0]; 2025-04-10T11:34:10.847438Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:114:2768:0]; 2025-04-10T11:34:10.847519Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:115:2768:0]; 2025-04-10T11:34:10.847585Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:116:2768:0]; 2025-04-10T11:34:10.847651Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:117:2768:0]; 2025-04-10T11:34:10.847714Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:118:2768:0]; 2025-04-10T11:34:10.847781Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:119:2696:0]; 2025-04-10T11:34:10.847844Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:120:2696:0]; 2025-04-10T11:34:10.847903Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:121:2696:0]; 2025-04-10T11:34:10.847978Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:122:2696:0]; 2025-04-10T11:34:10.848051Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:123:8528:0]; 2025-04-10T11:34:10.848116Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:124:2768:0]; 2025-04-10T11:34:10.848178Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:125:2768:0]; 2025-04-10T11:34:10.848237Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:126:2768:0]; 2025-04-10T11:34:10.848297Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:127:2768:0]; 2025-04-10T11:34:10.848360Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:128:2768:0]; 2025-04-10T11:34:10.848421Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:129:2768:0]; 2025-04-10T11:34:10.848497Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:130:2768:0]; 2025-04-10T11:34:10.848572Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:131:2768:0]; 2025-04-10T11:34:10.848634Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:132:2768:0]; 2025-04-10T11:34:10.848692Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:133:2768:0]; 2025-04-10T11:34:10.848751Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:134:2768:0]; 2025-04-10T11:34:10.848820Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:135:2768:0]; 2025-04-10T11:34:10.848882Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:136:2768:0]; 2025-04-10T11:34:10.848941Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:137:2768:0]; 2025-04-10T11:34:10.849037Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:138:2768:0]; 2025-04-10T11:34:10.849106Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:139:2768:0]; 2025-04-10T11:34:10.849167Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:140:2768:0]; 2025-04-10T11:34:10.849232Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:141:2768:0]; 2025-04-10T11:34:10.849292Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:142:2768:0]; 2025-04-10T11:34:10.849357Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:143:2768:0]; 2025-04-10T11:34:10.849418Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:144:2768:0]; 2025-04-10T11:34:10.849482Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:145:2768:0]; 2025-04-10T11:34:10.849555Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:146:2768:0]; 2025-04-10T11:34:10.849628Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:147:2768:0]; 2025-04-10T11:34:10.849694Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:148:2768:0]; 2025-04-10T11:34:10.849753Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:149:2768:0]; 2025-04-10T11:34:10.849812Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:150:2768:0]; 2025-04-10T11:34:10.849883Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:151:2768:0]; 2025-04-10T11:34:10.849949Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:152:2768:0]; 2025-04-10T11:34:10.850014Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:153:2768:0]; 2025-04-10T11:34:10.850085Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:154:2768:0]; 2025-04-10T11:34:10.850179Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:155:2768:0]; 2025-04-10T11:34:10.850251Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:156:2768:0]; 2025-04-10T11:34:10.850312Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:157:2768:0]; 2025-04-10T11:34:10.850370Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:158:2768:0]; 2025-04-10T11:34:10.850434Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:159:2768:0]; 2025-04-10T11:34:10.850495Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:160:2696:0]; 2025-04-10T11:34:10.850561Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:161:2696:0]; 2025-04-10T11:34:10.850635Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:162:2696:0]; 2025-04-10T11:34:10.850709Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:163:2696:0]; 2025-04-10T11:34:10.850772Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:164:8528:0]; 2025-04-10T11:34:11.378714Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-10T11:34:11.379537Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[4] (CS::GENERAL) apply at tablet 9437184 2025-04-10T11:34:11.478871Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:2 Blob count: 692 2025-04-10T11:34:11.487496Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2078720;raw_bytes=2324579;count=1;records=24469} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7587944;raw_bytes=7088522;count=3;records=75200} inactive {blob_bytes=100419184;raw_bytes=104021253;count=42;records=1103721} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitThenMerge [GOOD] Test command err: 2025-04-10T11:34:03.812296Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646358839951906:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:03.812350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ffa/r3tmp/tmpgveOBE/pdisk_1.dat 2025-04-10T11:34:04.214665Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:04.237873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:04.238000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:04.241313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20067 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:04.562221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:04.591643Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 Error 128: Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) 2025-04-10T11:34:04.603561Z node 1 :TX_PROXY ERROR: Actor# [1:7491646363134919789:2298] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Dir1\', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:07.673672Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646374137385361:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:07.673758Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ffa/r3tmp/tmponMAVf/pdisk_1.dat 2025-04-10T11:34:07.865063Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:07.889180Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:07.889308Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:07.891152Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2691 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:08.083080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:08.109770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:08.284846Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-10T11:34:08.300589Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-10T11:34:08.330838Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-10T11:34:08.339505Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284848222 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-04-10T11:34:08.365886Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:34:08.367383Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-10T11:34:08.367549Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:34:08.368368Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-10T11:34:08.368486Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:34:08.368884Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037888 restored its data 2025-04-10T11:34:08.369473Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-10T11:34:08.369594Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:34:08.369873Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037889 restored its data 2025-04-10T11:34:08.370434Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-10T11:34:08.370551Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:34:08.370977Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037888 restored its data 2025-04-10T11:34:08.371656Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-10T11:34:08.371765Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:34:08.372164Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037889 restored its data 2025-04-10T11:34:08.372774Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-10T11:34:08.372861Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:34:08.409574Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037888 restored its data 2025-04-10T11:34:08.410702Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-10T11:34:08.410901Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:34:08.411448Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037889 restored its data 2025-04-10T11:34:08.412151Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-10T11:34:08.412294Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:34:08.412764Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037888 restored its data 2025-04-10T11:34:08.413454Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-10T11:34:08.413580Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:34:08.413975Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037889 restored its data 2025-04-10T11:34:08.415208Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-10T11:34:08.415336Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:34:08.415787Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037888 restored its data 2025-04-10T11:34:08.416441Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-10T11:34:08.416912Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:34:08.417346Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037889 restored its data 2025-04-10T11:34:08.417995Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-10T11:34:08.418099Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:34:08.418503Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037888 restored its data 2025-04-10T11:34:08.419222Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:34:08.419258Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:34:08.419471Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:34:08.419883Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037889 restored its data 2025-04-10T11:34:08.420425Z node 2 :TX_DATASHARD DEBUG: ... 5-04-10T11:34:09.011366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976710693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7491646378432353287 RawX2: 4503608217307387 } Origin: 72075186224037889 State: 5 TxId: 281474976710693 Step: 0 Generation: 1 2025-04-10T11:34:09.011415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710693:0, shardIdx: 72057594046644480:2, datashard: 72075186224037889, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-10T11:34:09.011441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976710693:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-10T11:34:09.011628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646378432353914 RawX2: 4503608217307479 } Origin: 72075186224037894 State: 5 TxId: 281474976710693 Step: 0 Generation: 1 2025-04-10T11:34:09.011648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710693, tablet: 72075186224037894, partId: 0 2025-04-10T11:34:09.011756Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710693:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646378432353914 RawX2: 4503608217307479 } Origin: 72075186224037894 State: 5 TxId: 281474976710693 Step: 0 Generation: 1 2025-04-10T11:34:09.011772Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-10T11:34:09.011817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976710693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7491646378432353914 RawX2: 4503608217307479 } Origin: 72075186224037894 State: 5 TxId: 281474976710693 Step: 0 Generation: 1 2025-04-10T11:34:09.011838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710693:0, shardIdx: 72057594046644480:7, datashard: 72075186224037894, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-10T11:34:09.011855Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-10T11:34:09.011874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710693:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-04-10T11:34:09.011887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710693:0, datashard: 72075186224037894, at schemeshard: 72057594046644480 2025-04-10T11:34:09.011905Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710693:0 129 -> 240 2025-04-10T11:34:09.012149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-10T11:34:09.012228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-10T11:34:09.012345Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-10T11:34:09.012385Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976710693:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:34:09.012769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-10T11:34:09.012917Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710693:0 progress is 1/1 2025-04-10T11:34:09.012939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710693 ready parts: 1/1 2025-04-10T11:34:09.012956Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710693:0 progress is 1/1 2025-04-10T11:34:09.012968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710693 ready parts: 1/1 2025-04-10T11:34:09.012989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710693, ready parts: 1/1, is published: true 2025-04-10T11:34:09.013048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7491646378432354141:2428] message: TxId: 281474976710693 2025-04-10T11:34:09.013077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710693 ready parts: 1/1 2025-04-10T11:34:09.013096Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710693:0 2025-04-10T11:34:09.013107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710693:0 2025-04-10T11:34:09.013204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-10T11:34:09.014098Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710693 datashard 72075186224037889 state PreOffline 2025-04-10T11:34:09.014140Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-10T11:34:09.014268Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710693 datashard 72075186224037894 state PreOffline 2025-04-10T11:34:09.014293Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-04-10T11:34:09.016589Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:34:09.016681Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-10T11:34:09.018215Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:34:09.018304Z node 2 :TX_DATASHARD INFO: 72075186224037894 Initiating switch from PreOffline to Offline state 2025-04-10T11:34:09.020006Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:34:09.020537Z node 2 :TX_DATASHARD INFO: 72075186224037894 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:34:09.021047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646378432353287 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-04-10T11:34:09.021119Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:34:09.021474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:34:09.022010Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-10T11:34:09.022217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646378432353914 RawX2: 4503608217307479 } TabletId: 72075186224037894 State: 4 2025-04-10T11:34:09.022270Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:34:09.022647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:34:09.023122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-10T11:34:09.023333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-10T11:34:09.023525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-04-10T11:34:09.023693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-10T11:34:09.023881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:34:09.023903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-10T11:34:09.023956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:34:09.024709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-10T11:34:09.024724Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-10T11:34:09.024758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-04-10T11:34:09.024772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-04-10T11:34:09.024804Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:34:09.024922Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037894 state Offline 2025-04-10T11:34:09.024946Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037894 reason = ReasonStop 2025-04-10T11:34:09.025024Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:7491646378432354027:2801], serverId# [2:7491646378432354028:2802], sessionId# [0:0:0] 2025-04-10T11:34:09.025041Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-10T11:34:09.025059Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7491646378432353398:2393], serverId# [2:7491646378432353399:2394], sessionId# [0:0:0] 2025-04-10T11:34:09.025891Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2025-04-10T11:34:09.025964Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037894 2025-04-10T11:34:09.027517Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-10T11:34:09.027577Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-10T11:34:09.028362Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-04-10T11:34:09.028386Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::MultiJoinCteLinks [GOOD] Test command err: Trying to start YDB, gRPC: 7861, MsgBus: 6963 2025-04-10T11:33:46.095173Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646284558481606:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:46.095464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00176a/r3tmp/tmpOgeb3w/pdisk_1.dat 2025-04-10T11:33:46.504863Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:46.567299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:46.567418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:46.568919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7861, node 1 2025-04-10T11:33:46.640972Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:46.641017Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:46.641024Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:46.641212Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6963 TClient is connected to server localhost:6963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:33:47.343385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:47.367036Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:47.385632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:33:47.520415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:47.686549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:47.766479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:49.661673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646297443385270:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:49.661821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:49.992454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:33:50.028707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:33:50.059936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:33:50.097963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:33:50.179359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:33:50.223753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:33:50.273427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646301738353085:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:50.273508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:50.273509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646301738353090:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:50.278617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:33:50.291403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646301738353092:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:33:50.394969Z node 1 :TX_PROXY ERROR: Actor# [1:7491646301738353148:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:33:51.098564Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646284558481606:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:51.098716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TopSort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 19455, MsgBus: 31354 2025-04-10T11:33:52.395445Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646308694489719:2148];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:52.411085Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00176a/r3tmp/tmpcji8vH/pdisk_1.dat 2025-04-10T11:33:52.529971Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:52.558578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:52.558661Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:52.560496Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19455, node 2 2025-04-10T11:33:52.637280Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:52.637303Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:52.637310Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:52.637427Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31354 TClient is connected to server localhost:31354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 AC ... .533464Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24514, MsgBus: 27804 2025-04-10T11:34:06.514180Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646368608716041:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:06.514283Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00176a/r3tmp/tmpLn3AmM/pdisk_1.dat 2025-04-10T11:34:06.657873Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:06.691006Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:06.691118Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:06.692624Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24514, node 4 2025-04-10T11:34:06.752724Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:06.752754Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:06.752762Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:06.752925Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27804 TClient is connected to server localhost:27804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:34:07.408007Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:34:07.440755Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.529310Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.730347Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.830372Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:10.766998Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646385788587020:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:10.767075Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:10.822363Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:10.862389Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:10.903102Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:10.938640Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:10.972427Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:11.008497Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:11.090797Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646390083554834:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:11.090873Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646390083554839:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:11.090888Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:11.095334Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:11.106763Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491646390083554841:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:11.186677Z node 4 :TX_PROXY ERROR: Actor# [4:7491646390083554896:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:11.514564Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491646368608716041:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:11.514662Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"E-Size":"No estimate","PlanNodeId":8,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/EightShard","Columns":["Data","Key","Text"],"E-Rows":"No estimate","Table":"EightShard","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"No estimate","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":8}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Limit-InnerJoin (MapJoin)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"lookup_by":["Key"],"columns":["Data","Key","Text"],"type":"Lookup"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"No estimate","Columns":["Data","Key","Text"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"EightShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":13,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpStats::JoinStatsBasicYql+StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicScan >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] >> KqpStats::MultiTxStatsFullExpScan [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin >> TLocksTest::Range_BrokenLock3 [GOOD] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] Test command err: 2025-04-10T11:32:02.686728Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:02.793680Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:02.811929Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:02.812163Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:02.819121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:02.819282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:02.819483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:02.819565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:02.819627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:02.819705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:02.819796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:02.819883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:02.819997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:02.820082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:02.820178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:02.820279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:02.848904Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:02.849291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:02.849352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:02.849664Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:02.849830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:02.849919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:02.849959Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:02.850040Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:02.850105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:02.850176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:02.850209Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:02.850460Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:02.850548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:02.850611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:02.850644Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:02.850728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:02.850774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:02.850824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:02.850868Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:02.850935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:02.850972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:02.851014Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:02.851069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:02.851105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:02.851130Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:02.851510Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=66; 2025-04-10T11:32:02.851591Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-10T11:32:02.851679Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-10T11:32:02.851786Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-10T11:32:02.851955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:02.852007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:02.852041Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:02.852245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:02.852292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:02.852326Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:02.852477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:02.852523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:02.852557Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:02.852753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:02.852796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:02.852825Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:02.852974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:02.853020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:02.853086Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 4.845296Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:81:2696:0]; 2025-04-10T11:34:14.845361Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:82:8528:0]; 2025-04-10T11:34:14.845421Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:83:2776:0]; 2025-04-10T11:34:14.845489Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:84:2768:0]; 2025-04-10T11:34:14.845570Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:85:2768:0]; 2025-04-10T11:34:14.845643Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:86:2768:0]; 2025-04-10T11:34:14.845704Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:87:2768:0]; 2025-04-10T11:34:14.845762Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:88:2768:0]; 2025-04-10T11:34:14.845825Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:89:2768:0]; 2025-04-10T11:34:14.845912Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:90:2768:0]; 2025-04-10T11:34:14.845992Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:91:2768:0]; 2025-04-10T11:34:14.846068Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:92:2768:0]; 2025-04-10T11:34:14.846249Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:93:2768:0]; 2025-04-10T11:34:14.846327Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:94:2768:0]; 2025-04-10T11:34:14.846420Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:95:2768:0]; 2025-04-10T11:34:14.846497Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:96:2768:0]; 2025-04-10T11:34:14.846564Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:97:2768:0]; 2025-04-10T11:34:14.846674Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:98:2768:0]; 2025-04-10T11:34:14.846750Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:99:2768:0]; 2025-04-10T11:34:14.846825Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:100:2768:0]; 2025-04-10T11:34:14.846899Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:101:2768:0]; 2025-04-10T11:34:14.846968Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:102:2768:0]; 2025-04-10T11:34:14.847033Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:103:2768:0]; 2025-04-10T11:34:14.847098Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:104:2768:0]; 2025-04-10T11:34:14.847160Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:105:2768:0]; 2025-04-10T11:34:14.847238Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:106:2768:0]; 2025-04-10T11:34:14.847317Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:107:2768:0]; 2025-04-10T11:34:14.847398Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:108:2768:0]; 2025-04-10T11:34:14.847473Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:109:2768:0]; 2025-04-10T11:34:14.847539Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:110:2768:0]; 2025-04-10T11:34:14.847604Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:111:2768:0]; 2025-04-10T11:34:14.847672Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:112:2768:0]; 2025-04-10T11:34:14.847755Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:113:2768:0]; 2025-04-10T11:34:14.847837Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:114:2768:0]; 2025-04-10T11:34:14.847922Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:115:2768:0]; 2025-04-10T11:34:14.848016Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:116:2768:0]; 2025-04-10T11:34:14.848106Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:117:2768:0]; 2025-04-10T11:34:14.848200Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:118:2768:0]; 2025-04-10T11:34:14.848285Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:119:2696:0]; 2025-04-10T11:34:14.848373Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:120:2696:0]; 2025-04-10T11:34:14.848462Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:121:2696:0]; 2025-04-10T11:34:14.848549Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:122:2696:0]; 2025-04-10T11:34:14.848640Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:123:8528:0]; 2025-04-10T11:34:14.848740Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:124:2768:0]; 2025-04-10T11:34:14.848823Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:125:2768:0]; 2025-04-10T11:34:14.848913Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:126:2768:0]; 2025-04-10T11:34:14.849014Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:127:2768:0]; 2025-04-10T11:34:14.849106Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:128:2768:0]; 2025-04-10T11:34:14.849193Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:129:2768:0]; 2025-04-10T11:34:14.849263Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:130:2768:0]; 2025-04-10T11:34:14.849337Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:131:2768:0]; 2025-04-10T11:34:14.849419Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:132:2768:0]; 2025-04-10T11:34:14.849494Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:133:2768:0]; 2025-04-10T11:34:14.849570Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:134:2768:0]; 2025-04-10T11:34:14.849635Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:135:2768:0]; 2025-04-10T11:34:14.849698Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:136:2768:0]; 2025-04-10T11:34:14.849765Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:137:2768:0]; 2025-04-10T11:34:14.849830Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:138:2768:0]; 2025-04-10T11:34:14.849899Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:139:2768:0]; 2025-04-10T11:34:14.849966Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:140:2768:0]; 2025-04-10T11:34:14.850025Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:141:2768:0]; 2025-04-10T11:34:14.850077Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:142:2768:0]; 2025-04-10T11:34:14.850124Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:143:2768:0]; 2025-04-10T11:34:14.850190Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:144:2768:0]; 2025-04-10T11:34:14.850247Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:145:2768:0]; 2025-04-10T11:34:14.850291Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:146:2768:0]; 2025-04-10T11:34:14.850335Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:147:2768:0]; 2025-04-10T11:34:14.850385Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:148:2768:0]; 2025-04-10T11:34:14.850436Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:149:2768:0]; 2025-04-10T11:34:14.850481Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:150:2768:0]; 2025-04-10T11:34:14.850525Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:151:2768:0]; 2025-04-10T11:34:14.850570Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:152:2768:0]; 2025-04-10T11:34:14.850617Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:153:2768:0]; 2025-04-10T11:34:14.850663Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:154:2768:0]; 2025-04-10T11:34:14.850711Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:155:2768:0]; 2025-04-10T11:34:14.850763Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:156:2768:0]; 2025-04-10T11:34:14.850818Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:157:2768:0]; 2025-04-10T11:34:14.850867Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:158:2768:0]; 2025-04-10T11:34:14.850911Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:159:2768:0]; 2025-04-10T11:34:14.850955Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:160:2696:0]; 2025-04-10T11:34:14.851005Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:161:2696:0]; 2025-04-10T11:34:14.851049Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:162:2696:0]; 2025-04-10T11:34:14.851093Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:163:2696:0]; 2025-04-10T11:34:14.851145Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:164:8528:0]; 2025-04-10T11:34:15.382419Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-10T11:34:15.383407Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[4] (CS::GENERAL) apply at tablet 9437184 2025-04-10T11:34:15.478762Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:2 Blob count: 692 2025-04-10T11:34:15.486573Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2078720;raw_bytes=2324579;count=1;records=24469} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7587944;raw_bytes=7088522;count=3;records=75200} inactive {blob_bytes=100419184;raw_bytes=104021253;count=42;records=1103721} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> TLocksFatTest::PointSetRemove [GOOD] >> KqpExplain::Explain [GOOD] >> KqpExplain::CompoundKeyRange >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] >> TFlatTest::CopyTableAndReturnPartAfterCompaction >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock3 [GOOD] Test command err: 2025-04-10T11:33:33.360774Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646229336065877:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:33.363013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00200e/r3tmp/tmpUoh3K2/pdisk_1.dat 2025-04-10T11:33:33.736271Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:33.788342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:33.788448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:33.789954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23938 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:34.071306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:34.085243Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:34.097557Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:33:34.103084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:33:34.261744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:33:34.340175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:36.978337Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646241446426355:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:36.978414Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00200e/r3tmp/tmpnvRpNb/pdisk_1.dat 2025-04-10T11:33:37.083664Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:37.114268Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:37.114409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:37.115890Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15100 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:37.352233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:33:37.373858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:37.433053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:37.485220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:40.558729Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646260716134978:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:40.558811Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00200e/r3tmp/tmphAF6OP/pdisk_1.dat 2025-04-10T11:33:40.655112Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:40.700708Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:40.700813Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:40.702578Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29361 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:40.881934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:40.887424Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:40.901140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:40.981954Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:41.056098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.294780Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646277431724568:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:44.294833Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00200e/r3tmp/tmp6wTUQH/pdisk_1.dat 2025-04-10T11:33:44.469379Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:44.506115Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:44.506219Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:44.508337Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12063 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 20 ... shed: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:57.525048Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:57.535481Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:57.549806Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:33:57.554233Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:57.628681Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:33:57.680909Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:34:01.742447Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491646349170409104:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:01.742641Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00200e/r3tmp/tmpNpRgru/pdisk_1.dat 2025-04-10T11:34:01.907225Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:01.948965Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:01.949096Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:01.951814Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1853 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:02.212140Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:02.228225Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:34:02.233782Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:02.312141Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:02.396661Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:06.538426Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491646370857063911:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:06.538505Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00200e/r3tmp/tmpsA0rkQ/pdisk_1.dat 2025-04-10T11:34:06.706985Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:06.707114Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:06.707854Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:06.728161Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16381 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:07.039867Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:07.075367Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.158208Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.232164Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:11.573453Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491646392748243819:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:11.573575Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00200e/r3tmp/tmptA5BXm/pdisk_1.dat 2025-04-10T11:34:11.704660Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:11.735336Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:11.735426Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:11.736953Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10487 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-10T11:34:12.063627Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:34:12.070355Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:12.083519Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:34:12.088960Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:12.185614Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:12.246898Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] >> KqpParams::DefaultParameterValue >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink >> KqpLimits::QueryReplySize >> KqpLimits::WaitCAsStateOnAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] Test command err: 2025-04-10T11:31:48.352156Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:48.524960Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:48.561104Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:48.561383Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:48.571798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:48.572009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:48.572236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:48.572365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:48.572472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:48.572592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:48.572710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:48.572834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:48.572967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:48.573147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.573253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:48.573349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:48.607596Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:48.607940Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:48.608004Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:48.608181Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.608429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:48.608513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:48.608558Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:48.608664Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:48.608824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:48.608870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:48.608901Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:48.609101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.609166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:48.609206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:48.609239Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:48.609349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:48.609441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:48.609489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:48.609544Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:48.609624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:48.609664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:48.609694Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:48.609752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:48.609795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:48.609822Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:48.610214Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=76; 2025-04-10T11:31:48.610293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-04-10T11:31:48.610982Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=622; 2025-04-10T11:31:48.611099Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-10T11:31:48.611285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:48.611339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:48.611377Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:48.611603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:48.611657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.611690Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.611852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:48.611895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:48.611932Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:48.612198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:48.612241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:48.612272Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:48.612414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:48.612456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:48.612502Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:44;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););(portion_id:48;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:49;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-10T11:34:17.728780Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5509:7501];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-04-10T11:34:17.730432Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5509:7501];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpLimits::LargeParametersAndMkqlFailure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetRemove [GOOD] Test command err: 2025-04-10T11:34:01.694818Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646350549173528:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:01.694958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ffe/r3tmp/tmp70DFHa/pdisk_1.dat 2025-04-10T11:34:02.048892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:02.049054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:02.051424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:02.076982Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:64060 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:02.350993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:02.378499Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:34:02.390295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:02.537117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:02.608895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:06.694964Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646350549173528:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:06.695037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:09.662098Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646383650858530:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:09.663082Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ffe/r3tmp/tmpq5KP2H/pdisk_1.dat 2025-04-10T11:34:09.778632Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:09.811663Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:09.811768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:09.812938Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13935 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:10.016037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:10.041247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:10.121085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:10.177410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:14.036417Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646405332427224:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:14.036496Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ffe/r3tmp/tmpmjy76G/pdisk_1.dat 2025-04-10T11:34:14.149899Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:14.174247Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:14.174344Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:14.175950Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15720 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:14.363428Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:14.371844Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:14.388203Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:14.444806Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:14.523371Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] Test command err: 2025-04-10T11:31:51.656663Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:51.768750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:51.798767Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:51.799048Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:51.807877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:51.808121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:51.808415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:51.808575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:51.808690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:51.808816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:51.808958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:51.809119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:51.809251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:51.809383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:51.809512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:51.809613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:51.849558Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:51.849866Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:51.849931Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:51.850178Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:51.850369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:51.850491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:51.850545Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:51.850649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:51.850756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:51.850810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:51.850850Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:51.851090Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:51.851179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:51.851238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:51.851278Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:51.851382Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:51.851444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:51.851499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:51.851540Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:51.851660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:51.851713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:51.851752Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:51.851816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:51.851869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:51.851929Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:51.852384Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=59; 2025-04-10T11:31:51.852512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=69; 2025-04-10T11:31:51.852614Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-04-10T11:31:51.852742Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=58; 2025-04-10T11:31:51.853000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:51.853090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:51.853136Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:51.853375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:51.853439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:51.853479Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:51.853684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:51.853746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:51.853784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:51.853993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:51.854064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:51.854112Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:51.855116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:51.855182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:51.855251Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:44;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););(portion_id:48;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:49;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-10T11:34:18.099690Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5510:7502];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-04-10T11:34:18.101743Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5510:7502];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart >> TFlatTest::Ls >> KqpStats::RequestUnitForSuccessExplicitPrepare >> KqpStats::DataQueryMulti [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink >> TLocksTest::GoodNullLock [GOOD] >> KqpParams::CheckQueryCacheForPreparedQuery >> TFlatTest::CopyTableAndReturnPartAfterCompaction [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryMulti [GOOD] Test command err: Trying to start YDB, gRPC: 29295, MsgBus: 15372 2025-04-10T11:34:02.589589Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646354686345791:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:02.596273Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001762/r3tmp/tmpqLPi80/pdisk_1.dat 2025-04-10T11:34:02.937368Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29295, node 1 2025-04-10T11:34:02.989674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:02.989837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:02.992840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:03.030542Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:03.030563Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:03.030568Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:03.030660Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15372 TClient is connected to server localhost:15372 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:03.567565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:03.598842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:03.739855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:03.906286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:03.995520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:05.914728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646367571249319:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:05.914846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:06.309705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:06.343842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:06.377897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:06.448535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:06.491089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:06.565521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:06.631220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646371866217140:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:06.631319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:06.631891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646371866217145:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:06.637941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:06.653905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646371866217147:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:06.737234Z node 1 :TX_PROXY ERROR: Actor# [1:7491646371866217203:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:07.591068Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646354686345791:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:07.591839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25109, MsgBus: 5956 2025-04-10T11:34:08.833162Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646379925434539:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:08.838824Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001762/r3tmp/tmpquFf0m/pdisk_1.dat 2025-04-10T11:34:09.015219Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:09.046929Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:09.047017Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 25109, node 2 2025-04-10T11:34:09.051389Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:09.093296Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:09.093323Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:09.093331Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:09.093704Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5956 TClient is connected to server localhost:5956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:09.531670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:09.548134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:09.637890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:34:09.803588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:09.886211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:34:12.279881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646397105305473:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:12.279986Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:12.346884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:12.385238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:12.418537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:12.449882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:12.483508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:12.525871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:12.614443Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646397105305985:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:12.614566Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:12.614960Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646397105305991:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:12.618658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:12.629692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491646397105305993:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:12.696001Z node 2 :TX_PROXY ERROR: Actor# [2:7491646397105306047:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:13.833480Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491646379925434539:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:13.833769Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24465, MsgBus: 4368 2025-04-10T11:34:14.629211Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646403376872661:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:14.629283Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001762/r3tmp/tmppP4Ih3/pdisk_1.dat 2025-04-10T11:34:14.757072Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24465, node 3 2025-04-10T11:34:14.784009Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:14.784227Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:14.785833Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:14.830716Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:14.830742Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:14.830750Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:14.830882Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4368 TClient is connected to server localhost:4368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:15.361580Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:15.381428Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:15.458131Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:15.640086Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:15.738451Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:18.407643Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646420556743638:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:18.407752Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:18.463696Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:18.496288Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:18.527184Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:18.557435Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:18.599912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:18.639732Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:18.683367Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646420556744148:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:18.683491Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:18.683811Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646420556744153:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:18.688295Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:18.701564Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646420556744155:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:18.780956Z node 3 :TX_PROXY ERROR: Actor# [3:7491646420556744209:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:19.631438Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646403376872661:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:19.631740Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TFlatTest::SplitEmptyToMany >> KqpQuery::QueryResultsTruncated >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] >> TLocksTest::CK_BrokenLock [GOOD] >> TFlatTest::Ls [GOOD] >> TFlatTest::LsPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodNullLock [GOOD] Test command err: 2025-04-10T11:33:37.325321Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646247858593787:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:37.325422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00200b/r3tmp/tmpgoVHXy/pdisk_1.dat 2025-04-10T11:33:37.706796Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:37.711808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:37.711953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:37.717045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10177 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:38.033753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:33:38.067209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:38.201002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:38.256995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:40.826239Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646257745013929:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:40.826320Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00200b/r3tmp/tmp2FTfnd/pdisk_1.dat 2025-04-10T11:33:40.927720Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:40.958003Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:40.958079Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:40.959033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17587 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:41.147872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:41.157245Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:41.175745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:41.247443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:41.293915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00200b/r3tmp/tmpLVayEM/pdisk_1.dat 2025-04-10T11:33:44.970315Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:44.989764Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:44.991877Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:44.991958Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:45.016464Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15541 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:45.229057Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:45.238858Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:45.254916Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:45.354779Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:33:45.419001Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:33:48.751156Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646293431023925:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:48.751291Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00200b/r3tmp/tmpTInQ3G/pdisk_1.dat 2025-04-10T11:33:48.917450Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:48.934135Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:48.934257Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:48.938692Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12231 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:49.207991Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:49.215377Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, ... (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:02.273047Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63113 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:02.541496Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:02.568810Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:34:02.574902Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:02.653064Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:02.714004Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:06.960866Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491646372595159083:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:06.960971Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00200b/r3tmp/tmpM64LqH/pdisk_1.dat 2025-04-10T11:34:07.150352Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:07.170901Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:07.171002Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:07.172987Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4565 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-10T11:34:07.508592Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:34:07.521184Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.536910Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.621472Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.706087Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:11.882634Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491646391693672834:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:11.882708Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00200b/r3tmp/tmpuhw265/pdisk_1.dat 2025-04-10T11:34:12.079333Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:12.104502Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:12.104618Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:12.107691Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32322 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:12.372326Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:12.398107Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:12.479412Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:12.561153Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:16.733228Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491646412312585857:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:16.734884Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00200b/r3tmp/tmp6bjbcz/pdisk_1.dat 2025-04-10T11:34:16.896223Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:16.932301Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:16.932413Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:16.934577Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10172 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:17.303218Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:17.325420Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:17.405247Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:17.482092Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> KqpExplain::CompoundKeyRange [GOOD] >> KqpExplain::ExplainDataQuery >> TLocksTest::MultipleLocks [GOOD] >> TLocksTest::GoodSameShardLock [GOOD] >> TObjectStorageListingTest::MaxKeysAndSharding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 28263, MsgBus: 14435 2025-04-10T11:34:05.070575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646364192838220:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:05.070700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00175f/r3tmp/tmpFWGRMf/pdisk_1.dat 2025-04-10T11:34:05.473299Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:05.508121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:05.508256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:05.509999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28263, node 1 2025-04-10T11:34:05.573137Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:05.573164Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:05.573172Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:05.573304Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14435 TClient is connected to server localhost:14435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:06.186956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:06.205207Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:06.210661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:06.336564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:06.516359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:06.597738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:08.385140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646377077741881:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:08.385327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:08.823745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:08.860816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:08.897122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:08.933409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:08.969370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:09.003518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:09.094728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646381372709691:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:09.094807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:09.095037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646381372709696:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:09.099076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:09.110316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646381372709700:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:09.173632Z node 1 :TX_PROXY ERROR: Actor# [1:7491646381372709754:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:10.070401Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646364192838220:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:10.070480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:10.381500Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284850357, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 20506, MsgBus: 24029 2025-04-10T11:34:11.245548Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646391930284781:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:11.245619Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00175f/r3tmp/tmpVFi4wP/pdisk_1.dat 2025-04-10T11:34:11.364106Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:11.396226Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:11.396324Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:11.399197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20506, node 2 2025-04-10T11:34:11.447843Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:11.447869Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:11.447880Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:11.448005Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24029 TClient is connected to server localhost:24029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:11.927713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:11.944738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:12.010507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:12.183656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594 ... oposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:14.370822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646404815188432:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:14.370932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:14.425042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:14.459470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:14.490061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:14.524276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:14.555054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:14.603135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:14.654182Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646404815188945:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:14.654268Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:14.654590Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646404815188951:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:14.658253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:14.668555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491646404815188953:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:14.752700Z node 2 :TX_PROXY ERROR: Actor# [2:7491646404815189006:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:15.793206Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284855817, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 26803, MsgBus: 63704 2025-04-10T11:34:16.776887Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646415063402579:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:16.776946Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00175f/r3tmp/tmpYeoiX3/pdisk_1.dat 2025-04-10T11:34:16.915413Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:16.944112Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:16.944233Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:16.945926Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26803, node 3 2025-04-10T11:34:17.013420Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:17.013446Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:17.013453Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:17.013582Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63704 TClient is connected to server localhost:63704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:17.444444Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:17.465776Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:34:17.532275Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:17.750328Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:17.832146Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:20.181234Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646432243273551:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:20.181315Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:20.228295Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:20.310661Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:20.382783Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:20.423447Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:20.462199Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:20.510768Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:20.562480Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646432243274065:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:20.562595Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:20.562659Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646432243274070:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:20.567902Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:20.580214Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646432243274072:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:20.637246Z node 3 :TX_PROXY ERROR: Actor# [3:7491646432243274126:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:21.777339Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646415063402579:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:21.777422Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpParams::DefaultParameterValue [GOOD] >> KqpLimits::LargeParametersAndMkqlFailure [GOOD] >> KqpLimits::ManyPartitions >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_BrokenLock [GOOD] Test command err: 2025-04-10T11:33:39.356262Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646256718856817:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:39.356499Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002008/r3tmp/tmp3Ypzet/pdisk_1.dat 2025-04-10T11:33:39.687297Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:39.737265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:39.737386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:39.739320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7800 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:39.997342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:40.014126Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:40.034424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:40.183809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:40.261905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:42.845176Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646267706074177:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:42.845311Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002008/r3tmp/tmpPiz7Nz/pdisk_1.dat 2025-04-10T11:33:42.973613Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:43.006584Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:43.006694Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:43.012531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24982 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-10T11:33:43.239993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:43.271438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:43.353830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:43.446059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:46.702719Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646284928637054:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:46.702882Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002008/r3tmp/tmpMLRSEJ/pdisk_1.dat 2025-04-10T11:33:46.852748Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:46.880295Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:46.880373Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:46.885686Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23297 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:47.120779Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:47.130193Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:47.141997Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:33:47.148668Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:47.227742Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:47.306424Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:51.013281Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646308126908221:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:51.013350Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002008/r3tmp/tmpQwLOS1/pdisk_1.dat 2025-04-10T11:33:51.177290Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:51.212565Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:51.212659Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:51.214239Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3810 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025 ... : HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:04.254894Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:04.257624Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10828 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:04.515343Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:04.523290Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:04.538332Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:04.614371Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:04.730653Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:08.717327Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491646378659300156:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:08.717566Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002008/r3tmp/tmp3SQ8Nn/pdisk_1.dat 2025-04-10T11:34:08.893396Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:08.913276Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:08.913502Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:08.915326Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2025 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-10T11:34:09.215243Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:09.239078Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:09.307843Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:09.386363Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:13.425885Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491646401603399694:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:13.425985Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002008/r3tmp/tmpLGFJ5q/pdisk_1.dat 2025-04-10T11:34:13.576227Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:13.597607Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:13.597715Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:13.599578Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12344 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:13.930712Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:13.956961Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:14.034643Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:14.097722Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:18.254349Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491646421186545421:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:18.254456Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002008/r3tmp/tmp9kMrLN/pdisk_1.dat 2025-04-10T11:34:18.396907Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:18.429225Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:18.429336Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:18.431235Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15771 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:18.817206Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:18.846028Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:18.925746Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:18.996666Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> KqpStats::JoinStatsBasicScan [GOOD] >> KqpParams::Decimal-QueryService-UseSink >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::MultipleLocks [GOOD] Test command err: 2025-04-10T11:33:56.067854Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646328977824243:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:56.067933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002001/r3tmp/tmp6Zwai9/pdisk_1.dat 2025-04-10T11:33:56.518600Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:56.521174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:56.521277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:56.526829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15950 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:56.862806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:56.881143Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:56.895973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:57.058364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:57.129818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:59.493564Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646341005080492:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:59.493894Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002001/r3tmp/tmp7WvnjV/pdisk_1.dat 2025-04-10T11:33:59.617764Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:59.631001Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:59.631105Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:59.632912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27015 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:59.888424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:59.906665Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:59.920566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:00.007113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:00.085723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:03.110927Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646356021649388:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:03.111015Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002001/r3tmp/tmpF9FDjU/pdisk_1.dat 2025-04-10T11:34:03.264034Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:03.284590Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:03.284676Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:03.286603Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9696 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:03.461541Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:03.483720Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:34:03.490812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:03.548520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:03.627388Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.216247Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646376749523971:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:07.217865Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002001/r3tmp/tmpOfyItQ/pdisk_1.dat 2025-04-10T11:34:07.357358Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:07.377443Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:07.377534Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:07.379126Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64769 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:07.615772Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.628523Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.645743Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.721064Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.797666Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:11.584782Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491646392371041092:2151];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002001/r3tmp/tmpFHXrsg/pdisk_1.dat 2025-04-10T11:34:11.644057Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:34:11.738507Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:11.766908Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:11.767017Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:11.771596Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23885 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:12.010367Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:12.031283Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:12.091457Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:12.141307Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:15.782822Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491646408578181634:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:15.782936Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002001/r3tmp/tmpsoWX0f/pdisk_1.dat 2025-04-10T11:34:15.901612Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:15.934934Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:15.935012Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:15.936701Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24836 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:16.172673Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:16.178543Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:16.197304Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:16.263494Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:16.336561Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:34:20.299267Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491646430077842105:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:20.299330Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002001/r3tmp/tmpApTmyS/pdisk_1.dat 2025-04-10T11:34:20.502917Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:20.512200Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:20.512308Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:20.514373Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4797 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:20.756846Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:20.780983Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:20.860556Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:34:20.954091Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodSameShardLock [GOOD] Test command err: 2025-04-10T11:33:40.497200Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646259614729024:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:40.497281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002006/r3tmp/tmpch55br/pdisk_1.dat 2025-04-10T11:33:40.875348Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:40.922914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:40.923016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:40.924652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20157 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:41.173727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:41.195150Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:41.212050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:41.374376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:41.433543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.056575Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646274797312614:2086];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:44.056935Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002006/r3tmp/tmpu4EHCD/pdisk_1.dat 2025-04-10T11:33:44.223137Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:44.239346Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:44.239438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:44.241614Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3869 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:44.451645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.460578Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.470563Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:33:44.475288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.557748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.637171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:47.893576Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646287215812538:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:47.893637Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002006/r3tmp/tmp9AwK16/pdisk_1.dat 2025-04-10T11:33:48.102192Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:48.119817Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:48.119903Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:48.122686Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14122 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:33:48.371272Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:48.376748Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:48.391350Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:33:48.399184Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:48.485441Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:48.564234Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:51.989424Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646307599151059:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:51.989527Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002006/r3tmp/tmpN9VtEw/pdisk_1.dat 2025-04-10T11:33:52.113822Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:52.146629Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:52.146753Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:52.151575Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3523 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 ... necting -> Connected TClient is connected to server localhost:20656 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:05.515527Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:05.530939Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:34:05.537197Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:05.610069Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:05.690547Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:34:09.803542Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491646382130739906:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:09.803631Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002006/r3tmp/tmpbzYafj/pdisk_1.dat 2025-04-10T11:34:09.966569Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:09.982005Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:09.982127Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:09.985185Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10115 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:10.237593Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:10.260266Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:34:10.274363Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:10.360780Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:10.511275Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:14.554696Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491646406122042893:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:14.554768Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002006/r3tmp/tmpNC5dLT/pdisk_1.dat 2025-04-10T11:34:14.710107Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:14.730945Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:14.731053Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:14.735362Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3025 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:15.044041Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:15.070056Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:15.154221Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:15.232522Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:19.316885Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491646427430947869:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:19.317029Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/002006/r3tmp/tmpzGzDqy/pdisk_1.dat 2025-04-10T11:34:19.511308Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:19.536750Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:19.536869Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:19.539600Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64213 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:19.848560Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:19.857982Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:19.872586Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:20.010563Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:20.097435Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> KqpStats::RequestUnitForSuccessExplicitPrepare [GOOD] >> KqpStats::StatsProfile >> KqpQuery::RewriteIfPresentToMap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::DefaultParameterValue [GOOD] Test command err: Trying to start YDB, gRPC: 23181, MsgBus: 8131 2025-04-10T11:34:00.245920Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646345108731155:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:00.252534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001765/r3tmp/tmp7icXvD/pdisk_1.dat 2025-04-10T11:34:00.619742Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:00.627160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:00.627268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:00.630625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23181, node 1 2025-04-10T11:34:00.706250Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:00.706278Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:00.706297Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:00.706426Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8131 TClient is connected to server localhost:8131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:01.331504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:01.349399Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:01.363858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:01.504440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:01.676129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:01.757818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:03.463778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646357993634606:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:03.463927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:03.771101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:03.805307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:03.840430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:03.879313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:03.944467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:03.982848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:04.037628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646362288602412:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:04.037713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:04.037754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646362288602417:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:04.042445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:04.053518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646362288602419:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:04.151933Z node 1 :TX_PROXY ERROR: Actor# [1:7491646362288602475:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:05.234300Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646345108731155:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:05.234395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25698, MsgBus: 8508 2025-04-10T11:34:06.320696Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646369706119909:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:06.320955Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001765/r3tmp/tmpwBfJHd/pdisk_1.dat 2025-04-10T11:34:06.428735Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25698, node 2 2025-04-10T11:34:06.468984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:06.469106Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:06.474690Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:06.507517Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:06.507539Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:06.507548Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:06.507669Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8508 TClient is connected to server localhost:8508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:06.991145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.008387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.089390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.299970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:07.403926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTab ... {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:16.256626Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:16.316166Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:16.355857Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:16.390634Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:16.425653Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:16.461108Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:16.497766Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:16.582471Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646411914173643:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:16.582551Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:16.582758Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646411914173648:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:16.586663Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:16.598060Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646411914173650:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:16.677758Z node 3 :TX_PROXY ERROR: Actor# [3:7491646411914173705:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:17.450739Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646394734302189:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:17.450834Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6564, MsgBus: 28702 2025-04-10T11:34:19.063232Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646424719406601:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:19.064050Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001765/r3tmp/tmpwJDAR1/pdisk_1.dat 2025-04-10T11:34:19.155789Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:19.190698Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:19.190812Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:19.192762Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6564, node 4 2025-04-10T11:34:19.241045Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:19.241067Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:19.241073Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:19.241171Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28702 TClient is connected to server localhost:28702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:19.785167Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:19.794894Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:34:19.804499Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:19.875882Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:20.055348Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:20.137129Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:22.881457Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646437604310114:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:22.881557Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:22.958423Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:22.997891Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.040753Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.079902Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.117390Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.153228Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.203610Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646441899277924:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:23.203700Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:23.203964Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646441899277929:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:23.209946Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:23.222250Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491646441899277931:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:23.276362Z node 4 :TX_PROXY ERROR: Actor# [4:7491646441899277983:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:24.033008Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491646424719406601:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:24.033090Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicScan [GOOD] Test command err: Trying to start YDB, gRPC: 29967, MsgBus: 29966 2025-04-10T11:34:02.941280Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646351556891713:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:02.941384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001761/r3tmp/tmpGDCt0O/pdisk_1.dat 2025-04-10T11:34:03.395506Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:03.401642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:03.401780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:03.404206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29967, node 1 2025-04-10T11:34:03.474398Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:03.474438Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:03.474445Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:03.474580Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29966 TClient is connected to server localhost:29966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:04.072103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:04.088281Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:04.095602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:04.289556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:04.472928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:04.554586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:06.372296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646368736762652:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:06.372426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:06.836530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:06.874778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:06.907737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:06.983503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:07.026553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:07.103415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:07.206076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646373031730476:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:07.206199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:07.206505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646373031730481:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:07.211389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:07.224782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646373031730483:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:07.283554Z node 1 :TX_PROXY ERROR: Actor# [1:7491646373031730537:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:07.945611Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646351556891713:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:07.945656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2102, MsgBus: 27741 2025-04-10T11:34:10.139815Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646388522714595:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:10.139905Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001761/r3tmp/tmpvlVafN/pdisk_1.dat 2025-04-10T11:34:10.265090Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:10.296964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:10.297068Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:10.298866Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2102, node 2 2025-04-10T11:34:10.357467Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:10.357499Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:10.357508Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:10.357645Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27741 TClient is connected to server localhost:27741 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:10.814424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:10.821012Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:10.828979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:10.935346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:11.091241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting.. ... source pool default not found or you don't have access permissions } 2025-04-10T11:34:13.516718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:13.565496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:13.599784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:13.628453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:13.660025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:13.688768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:13.724285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:13.768693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646401407618769:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:13.768794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:13.768830Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646401407618774:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:13.772036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:13.781350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491646401407618776:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:13.874129Z node 2 :TX_PROXY ERROR: Actor# [2:7491646401407618830:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:15.140084Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491646388522714595:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:15.140185Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19325, MsgBus: 61454 2025-04-10T11:34:16.501316Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646415605417073:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:16.501456Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001761/r3tmp/tmpwdj3zZ/pdisk_1.dat 2025-04-10T11:34:16.620042Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:16.647265Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:16.647366Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19325, node 3 2025-04-10T11:34:16.653177Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:16.690564Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:16.690593Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:16.690600Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:16.690720Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61454 TClient is connected to server localhost:61454 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:17.200649Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:17.221607Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:17.278471Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:17.448562Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:17.560761Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:34:19.964076Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646428490320736:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:19.964188Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:20.011507Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:20.058040Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:20.096031Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:20.139572Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:20.180905Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:20.222294Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:20.279166Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646432785288546:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:20.279248Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:20.279323Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646432785288551:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:20.283245Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:20.296777Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646432785288553:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:20.393627Z node 3 :TX_PROXY ERROR: Actor# [3:7491646432785288609:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:21.517770Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646415605417073:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:21.517838Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:24.984753Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284862236, txId: 281474976715671] shutting down >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] Test command err: 2025-04-10T11:34:18.721835Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646421940857665:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:18.722018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ff7/r3tmp/tmp4vRSnw/pdisk_1.dat 2025-04-10T11:34:19.046091Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:19.090892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:19.090986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:19.092697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2329 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:19.301848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:19.320867Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:19.330644Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:34:19.339807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:19.523860Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-10T11:34:19.542647Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.009s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-10T11:34:19.571416Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-10T11:34:19.579001Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284859457 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) Copy TableOld to Table 2025-04-10T11:34:19.727380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 100000 InMemStepsToSnapshot: 2 InMemForceStepsToSnapshot: 3 InMemForceSizeToSnapshot: 1000000 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 200000 ReadAheadLoThreshold: 100000 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 10000 CountToCompact: 2 ForceCountToCompact: 2 ForceSizeToCompact: 20000 CompactionBrokerQueue: 1 KeepInCache: true } } ColumnFamilies { Id: 0 ColumnCache: ColumnCacheNone Storage: ColumnStorageTest_1_2_1k } } CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:34:19.727921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:34:19.728487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-10T11:34:19.728521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-04-10T11:34:19.728555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-10T11:34:19.728582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-10T11:34:19.728616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-10T11:34:19.728759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-04-10T11:34:19.728882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:34:19.730858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:34:19.730910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-04-10T11:34:19.731733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-04-10T11:34:19.731892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-04-10T11:34:19.732577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:34:19.732607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:34:19.732776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-04-10T11:34:19.732871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:34:19.732893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491646426235825479:2241], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 2025-04-10T11:34:19.732905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491646426235825479:2241], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 2025-04-10T11:34:19.732980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:34:19.733032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-04-10T11:34:19.733384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-10T11:34:19.733499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } waiting... 2025-04-10T11:34:19.735653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-10T11:34:19.735763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-10T11:34:19.735778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-04-10T11:34:19.735794Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-10T11:34:19.735812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-10T11:34:19.736087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-10T11:34:19.736132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 720575940466444 ... le TEvSchemaChangedResult 281474976715784 datashard 72075186224037890 state PreOffline 2025-04-10T11:34:23.927608Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-10T11:34:23.927691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-04-10T11:34:23.927798Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715784:0 progress is 1/1 2025-04-10T11:34:23.927815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-04-10T11:34:23.927841Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715784:0 progress is 1/1 2025-04-10T11:34:23.927852Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-04-10T11:34:23.927871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715784, ready parts: 1/1, is published: true 2025-04-10T11:34:23.927913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7491646444891902837:2687] message: TxId: 281474976715784 2025-04-10T11:34:23.927939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-04-10T11:34:23.927956Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715784:0 2025-04-10T11:34:23.927965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715784:0 2025-04-10T11:34:23.928070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-04-10T11:34:23.928785Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:34:23.928879Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037889:1:16:1:12288:306:0] [72075186224037889:1:23:1:12288:253:0] ] return ack processed 2025-04-10T11:34:23.928909Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:34:23.928962Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:34:23.928985Z node 2 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-04-10T11:34:23.930226Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:34:23.930266Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-04-10T11:34:23.931616Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:34:23.931670Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:34:23.931781Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7491646444891902856:2689], serverId# [2:7491646444891902863:3448], sessionId# [0:0:0] 2025-04-10T11:34:23.932109Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:34:23.932109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646440596933841 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-04-10T11:34:23.932189Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:34:23.932464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646440596934130 RawX2: 4503608217307443 } TabletId: 72075186224037891 State: 4 2025-04-10T11:34:23.932504Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:34:23.932619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646440596934130 RawX2: 4503608217307443 } TabletId: 72075186224037891 State: 4 Check that tablet 72075186224037888 was deleted 2025-04-10T11:34:23.932654Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:34:23.932750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646440596934128 RawX2: 4503608217307442 } TabletId: 72075186224037890 State: 4 2025-04-10T11:34:23.932766Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:34:23.932918Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-10T11:34:23.932948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:34:23.933040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:34:23.933064Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-10T11:34:23.933079Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-10T11:34:23.933083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:34:23.933113Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-10T11:34:23.933114Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:34:23.934431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-10T11:34:23.934703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-10T11:34:23.935107Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-10T11:34:23.935160Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-10T11:34:23.935177Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-10T11:34:23.935665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-10T11:34:23.935757Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-10T11:34:23.935842Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-10T11:34:23.935853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-10T11:34:23.936007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-10T11:34:23.936178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-10T11:34:23.936348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-10T11:34:23.936470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:34:23.936488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-10T11:34:23.936533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:34:23.936550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-10T11:34:23.936567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:34:23.937091Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-10T11:34:23.937108Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-10T11:34:23.937124Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-10T11:34:23.937155Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-04-10T11:34:23.937666Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-10T11:34:23.937742Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 Check that tablet 72075186224037889 was deleted 2025-04-10T11:34:23.938769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-10T11:34:23.938793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-10T11:34:23.938932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-10T11:34:23.938944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-10T11:34:23.938968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-10T11:34:23.939001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-10T11:34:23.939019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-10T11:34:23.939054Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:34:23.939557Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-10T11:34:23.939605Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-10T11:34:23.940128Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted Check that tablet 72075186224037891 was deleted 2025-04-10T11:34:23.941453Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) 2025-04-10T11:34:23.942422Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TFlatTest::LsPathId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] Test command err: 2025-04-10T11:31:55.323553Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:55.415237Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:55.438256Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:55.438543Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:55.446559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:55.446756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:55.446993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:55.447130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:55.447233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:55.447350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:55.447459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:55.447572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:55.447714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:55.447823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:55.447935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:55.448039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:55.476286Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:55.476630Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:55.476684Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:55.476907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:55.477090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:55.477175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:55.477214Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:55.477294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:55.477346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:55.477387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:55.477414Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:55.477590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:55.477675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:55.477715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:55.477746Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:55.477834Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:55.477899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:55.477955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:55.477986Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:55.478077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:55.478120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:55.478170Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:55.478223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:55.478267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:55.478304Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:55.478686Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-10T11:31:55.478786Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-10T11:31:55.478878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-10T11:31:55.478981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-10T11:31:55.479161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:55.479219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:55.479267Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:55.479454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:55.479496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:55.479531Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:55.479724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:55.479767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:55.479799Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:55.479984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:55.480023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:55.480051Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:55.480153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:55.480192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:55.480231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... lumn_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:44;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:46;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););(portion_id:51;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-04-10T11:34:25.738437Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5649:7641];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-04-10T11:34:25.739735Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5649:7641];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpParams::CheckQueryCacheForPreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery >> KqpParams::MissingParameter >> KqpLimits::QueryReplySize [GOOD] >> KqpLimits::ReadsetCountLimit >> TFlatTest::RejectByPerShardReadSize >> KqpTypes::QuerySpecialTypes >> KqpStats::JoinNoStatsScan >> KqpLimits::WaitCAsStateOnAbort [GOOD] >> KqpLimits::WaitCAsTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LsPathId [GOOD] Test command err: 2025-04-10T11:34:20.433657Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646431649055132:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:20.433773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ff4/r3tmp/tmpck5Ano/pdisk_1.dat 2025-04-10T11:34:20.863926Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:20.871688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:20.871788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:20.875218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18760 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:21.172809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: / 2025-04-10T11:34:21.195127Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: // TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284861235 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePo... (TRUNCATED) TClient::Ls request: /dc-11 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" TClient::Ls request: /dc-2 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" waiting... TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284861235 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284861256 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/Berkanavt TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284861256 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 Shard... (TRUNCATED) 2025-04-10T11:34:21.221846Z node 1 :TX_PROXY ERROR: Actor# [1:7491646435944023017:2324] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Berkanavt\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/dc-1/Berkanavt', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284861235 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284861256 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" waiting... TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284861235 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284861256 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "arcadia" Path... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "arcadia" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744284861277 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsI... (TRUNCATED) 2025-04-10T11:34:23.879420Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646444953011516:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:23.879471Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ff4/r3tmp/tmpDeK79V/pdisk_1.dat 2025-04-10T11:34:24.003908Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:24.045134Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:24.045237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:24.046766Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6470 TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:24.246069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... >> KqpQuery::QueryResultsTruncated [GOOD] >> KqpQuery::QueryStats+UseSink >> KqpStats::MultiTxStatsFullYql >> TLocksTest::Range_BrokenLockMax |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] >> KqpLimits::ManyPartitions [GOOD] >> KqpLimits::ManyPartitionsSorting >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink >> TFlatTest::LargeProxyReply ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] Test command err: 2025-04-10T11:31:58.190471Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:58.305612Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:58.334333Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:58.334598Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:58.343594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:58.343849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:58.344114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:58.344265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:58.344386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:58.344553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:58.344700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:58.344833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:58.344988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:58.345149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:58.345278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:58.345423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:58.376588Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:58.377109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:58.377155Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:58.377287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:58.377389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:58.377451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:58.377479Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:58.377573Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:58.377614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:58.377642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:58.377663Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:58.377815Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:58.377870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:58.377897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:58.377919Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:58.377979Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:58.378017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:58.378047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:58.378069Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:58.378129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:58.378211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:58.378245Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:58.378299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:58.378342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:58.378386Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:58.378906Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-10T11:31:58.379026Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=53; 2025-04-10T11:31:58.379141Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-04-10T11:31:58.379242Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-10T11:31:58.379411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:58.379605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:58.379648Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:58.379855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:58.379904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:58.379936Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:58.380093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:58.380145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:58.380181Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:58.380377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:58.380421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:58.380455Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:58.380601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:58.380645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:58.380687Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... lumn_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:44;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:46;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););(portion_id:51;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-04-10T11:34:29.843889Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5650:7642];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-04-10T11:34:29.845479Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5650:7642];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpLimits::TooBigQuery+useSink >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] >> KqpStats::StatsProfile [GOOD] >> KqpStats::SelfJoin >> KqpExplain::ExplainDataQuery [GOOD] >> KqpExplain::ExplainDataQueryWithParams >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpQuery::ReadOverloaded+StreamLookup >> KqpExplain::SelfJoin3xSameLabels ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] Test command err: 2025-04-10T11:32:06.356742Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:06.471823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:06.499071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:06.499378Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:06.509669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:06.509902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:06.510205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:06.510385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:06.510524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:06.510675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:06.510830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:06.510978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:06.511133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:06.511262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:06.511390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:06.511495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:06.545188Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:06.545600Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:06.545660Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:06.545850Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:06.546063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:06.546173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:06.546222Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:06.546326Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:06.546402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:06.546447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:06.546482Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:06.546984Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:06.547059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:06.547096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:06.547128Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:06.547232Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:06.547323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:06.547373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:06.547430Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:06.547509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:06.547551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:06.547587Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:06.547649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:06.547704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:06.547735Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:06.548142Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-10T11:32:06.548253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-10T11:32:06.548346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-10T11:32:06.548457Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=48; 2025-04-10T11:32:06.548651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:06.548707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:06.548744Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:06.549078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:06.549169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:06.549206Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:06.549356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:06.549404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:06.549437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:06.549614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:06.549655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:06.549684Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:06.549813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:06.549898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:06.549946Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:44;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););(portion_id:48;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:49;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-10T11:34:31.946415Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5510:7502];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-04-10T11:34:31.948020Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5510:7502];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpParams::Decimal-QueryService-UseSink [GOOD] >> KqpParams::Decimal+QueryService-UseSink >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit >> KqpParams::CheckQueryCacheForUnpreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink >> KqpParams::MissingParameter [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck >> KqpTypes::QuerySpecialTypes [GOOD] >> KqpTypes::SelectNull >> KqpLimits::ReadsetCountLimit [GOOD] >> KqpLimits::QueryExecTimeoutCancel >> KqpLimits::ManyPartitionsSorting [GOOD] >> KqpLimits::ManyPartitionsSortingLimit >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] >> KqpQuery::QueryStats+UseSink [GOOD] >> KqpQuery::QueryStats-UseSink >> KqpStats::MultiTxStatsFullYql [GOOD] >> KqpStats::MultiTxStatsFullScan >> TObjectStorageListingTest::TestFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:32:41.265788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:32:41.265902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:32:41.265950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:32:41.265998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:32:41.266054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:32:41.266083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:32:41.266189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:32:41.266271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:32:41.266640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:32:41.357309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:32:41.357378Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:32:41.376813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:32:41.377105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:32:41.377304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:32:41.386799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:32:41.387124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:32:41.387813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:41.388421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:32:41.391293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:41.392819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:41.392878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:41.393060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:32:41.393104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:41.393143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:32:41.393294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:32:41.400291Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:32:41.522117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:32:41.522520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:41.522803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:32:41.523087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:32:41.523145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:41.527663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:41.527868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:32:41.528151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:41.528224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:32:41.528294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:32:41.528343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:32:41.531404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:41.531478Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:32:41.531519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:32:41.534131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:41.534236Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:41.534291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:41.534383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:32:41.538722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:32:41.542510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:32:41.542802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:32:41.544131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:32:41.544370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:32:41.544465Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:41.544842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:32:41.544936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:32:41.545185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:32:41.545315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:32:41.552411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:32:41.552492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:32:41.552752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:32:41.552816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:32:41.553184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:32:41.553263Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:32:41.553402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:32:41.553455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:32:41.553511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:32:41.553572Z no ... nId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:34:35.580561Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:34:35.580821Z node 46 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 294us result status StatusSuccess 2025-04-10T11:34:35.581603Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:34:35.593691Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1036:2801] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-10T11:34:35.593787Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1037:2801] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-10T11:34:35.593857Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:957:2801] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-04-10T11:34:35.593927Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:957:2801] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-10T11:34:35.594041Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1036:2801] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744284875565126 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744284875565126 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-10T11:34:35.594593Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1037:2801] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1744284875565126 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-10T11:34:35.609573Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1036:2801] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2025-04-10T11:34:35.609883Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:957:2801] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-04-10T11:34:35.611273Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1037:2801] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-10T11:34:35.611398Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:957:2801] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> KqpStats::JoinNoStatsScan [GOOD] >> KqpStats::DeferredEffects+UseSink >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot >> TFlatTest::LargeProxyReply [GOOD] >> TFlatTest::LargeProxyReplyRW >> KqpStats::OneShardLocalExec-UseSink >> KqpStats::SelfJoin [GOOD] >> KqpLimits::WaitCAsTimeout [GOOD] >> KqpParams::BadParameterType >> KqpExplain::ExplainDataQueryWithParams [GOOD] >> TObjectStorageListingTest::TestFilter [GOOD] >> TObjectStorageListingTest::TestSkipShards >> KqpExplain::SelfJoin3xSameLabels [GOOD] >> KqpExplain::PureExpr >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SelfJoin [GOOD] Test command err: Trying to start YDB, gRPC: 30424, MsgBus: 4225 2025-04-10T11:34:20.574806Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646430074349226:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:20.575139Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001754/r3tmp/tmptYMo0W/pdisk_1.dat 2025-04-10T11:34:20.959032Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30424, node 1 2025-04-10T11:34:21.013266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:21.013398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:21.015202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:21.055685Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:21.055718Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:21.055727Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:21.055849Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4225 TClient is connected to server localhost:4225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:21.575256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:21.607264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:21.743539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:21.902307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:21.974555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:23.773457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646442959252728:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:23.773561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:24.187521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:24.223458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:24.295564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:24.326698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:24.359591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:24.395791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:24.492080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646447254220540:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:24.492146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:24.492371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646447254220545:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:24.496865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:24.508776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646447254220547:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:24.582630Z node 1 :TX_PROXY ERROR: Actor# [1:7491646447254220602:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:25.572670Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646430074349226:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:25.572767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2611, MsgBus: 25268 2025-04-10T11:34:26.632598Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646456076964529:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:26.632665Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001754/r3tmp/tmpkEdBaO/pdisk_1.dat 2025-04-10T11:34:26.755862Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:26.784836Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:26.784925Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:26.786204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2611, node 2 2025-04-10T11:34:26.859873Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:26.859908Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:26.859916Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:26.860049Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25268 TClient is connected to server localhost:25268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:27.371908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:27.379474Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:34:27.392290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:27.467199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:27.657130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:27.762825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... ount":2,"Sum":6,"Max":4,"Min":2},"Chunks":{"Count":2,"Sum":6,"Max":3,"Min":3},"ResumeMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"ActiveMessageMs":{"Count":2,"Max":4,"Min":2},"PauseMessageMs":{"Count":2,"Sum":1,"Max":1,"Min":0},"ActiveTimeUs":{"Count":1,"Sum":2000,"Max":2000,"Min":2000},"WaitTimeUs":{"Count":2,"Sum":3866,"Max":2272,"Min":1594,"History":[4,1594,5,3866]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}],"DurationUs":{"Count":2,"Sum":3000,"Max":2000,"Min":1000},"MaxMemoryUsage":{"Count":2,"Sum":2097152,"Max":1048576,"Min":1048576,"History":[4,1048576,5,2097152]},"ResultRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Tasks":2,"ResultBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"OutputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"FinishedTasks":2,"IngressRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"PhysicalStageId":0,"StageDurationUs":2000,"Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"ReadBytes":{"Count":2,"Sum":48,"Max":24,"Min":24}}],"BaseTimeMs":1744284878573,"OutputBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"CpuTimeUs":{"Count":2,"Sum":2150,"Max":1967,"Min":183,"History":[4,183,5,2150]},"Ingress":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":96,"Max":48,"Min":48,"History":[4,48,5,96]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":96,"Max":48,"Min":48,"History":[4,48,5,96]},"PauseMessageMs":{"Count":2,"Sum":1,"Max":1,"Min":0},"WaitTimeUs":{"Count":2,"Sum":3965,"Max":2326,"Min":1639,"History":[4,1639,5,3965]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}]}}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"Collect","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":16,"Max":10,"Min":6},"ActiveMessageMs":{"Count":2,"Max":10,"Min":6},"FirstMessageMs":{"Count":2,"Sum":16,"Max":10,"Min":6},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12,"History":[7,36,11,48]}},"Name":"6","Push":{"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":16,"Max":10,"Min":6},"Chunks":{"Count":2,"Sum":6,"Max":3,"Min":3},"ResumeMessageMs":{"Count":2,"Sum":15,"Max":10,"Min":5},"FirstMessageMs":{"Count":2,"Sum":15,"Max":10,"Min":5},"ActiveMessageMs":{"Count":2,"Max":10,"Min":5},"PauseMessageMs":{"Count":2,"Sum":7,"Max":4,"Min":3},"ActiveTimeUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"WaitTimeUs":{"Count":2,"Sum":14996,"Max":10102,"Min":4894,"History":[7,4894,11,14996]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1},"WaitMessageMs":{"Count":2,"Max":10,"Min":3}}}],"MaxMemoryUsage":{"Count":2,"Sum":2097152,"Max":1048576,"Min":1048576,"History":[7,1048576,11,2097152]},"DurationUs":{"Count":2,"Sum":11000,"Max":7000,"Min":4000},"InputBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"Tasks":2,"OutputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"FinishedTasks":2,"InputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"PhysicalStageId":1,"StageDurationUs":9000,"Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"ReadBytes":{"Count":2,"Sum":24,"Max":12,"Min":12}}],"BaseTimeMs":1744284878573,"WaitInputTimeUs":{"Count":2,"Sum":14740,"Max":9966,"Min":4774,"History":[7,4774,11,14740]},"OutputBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"CpuTimeUs":{"Count":2,"Sum":856,"Max":571,"Min":285,"History":[7,285,11,856]},"Input":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":6,"Max":4,"Min":2},"ActiveMessageMs":{"Count":2,"Max":4,"Min":2},"FirstMessageMs":{"Count":2,"Sum":6,"Max":4,"Min":2},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12,"History":[7,36,11,48]}},"Name":"2","Push":{"LastMessageMs":{"Count":2,"Sum":6,"Max":4,"Min":2},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":6,"Max":4,"Min":2},"FirstMessageMs":{"Count":2,"Sum":6,"Max":4,"Min":2},"ActiveMessageMs":{"Count":2,"Max":4,"Min":2},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12,"History":[7,36,11,48]},"WaitTimeUs":{"Count":2,"Sum":6237,"Max":4282,"Min":1955,"History":[7,1955,11,6237]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":5}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"ActiveMessageMs":{"Count":1,"Max":10,"Min":6},"FirstMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27,"History":[12,27]},"ActiveTimeUs":{"Count":1,"Sum":4000,"Max":4000,"Min":4000}},"Name":"8","Push":{"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"ActiveMessageMs":{"Count":1,"Max":10,"Min":6},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"ActiveTimeUs":{"Count":1,"Sum":4000,"Max":4000,"Min":4000},"WaitTimeUs":{"Count":1,"Sum":8941,"Max":8941,"Min":8941,"History":[12,8941]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":10,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[12,1048576]},"DurationUs":{"Count":1,"Sum":6000,"Max":6000,"Min":6000},"InputBytes":{"Count":1,"Sum":48,"Max":48,"Min":48},"Tasks":1,"OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"FinishedTasks":1,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":2,"StageDurationUs":6000,"BaseTimeMs":1744284878573,"WaitInputTimeUs":{"Count":1,"Sum":4380,"Max":4380,"Min":4380,"History":[12,4380]},"OutputBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"CpuTimeUs":{"Count":1,"Sum":935,"Max":935,"Min":935,"History":[12,935]},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"ActiveMessageMs":{"Count":1,"Max":10,"Min":6},"FirstMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"Bytes":{"Count":1,"Sum":48,"Max":48,"Min":48,"History":[12,48]},"ActiveTimeUs":{"Count":1,"Sum":4000,"Max":4000,"Min":4000}},"Name":"4","Push":{"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"ActiveMessageMs":{"Count":1,"Max":10,"Min":6},"Bytes":{"Count":1,"Sum":48,"Max":48,"Min":48,"History":[12,48]},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"ActiveTimeUs":{"Count":1,"Sum":4000,"Max":4000,"Min":4000},"WaitTimeUs":{"Count":1,"Sum":6561,"Max":6561,"Min":6561,"History":[12,6561]},"WaitPeriods":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitMessageMs":{"Count":1,"Max":10,"Min":1}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":7}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":3,"FinishedTasks":1,"InputBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"DurationUs":{"Count":1,"Sum":6000,"Max":6000,"Min":6000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[12,1048576]},"BaseTimeMs":1744284878573,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"ActiveMessageMs":{"Count":1,"Max":11,"Min":7},"FirstMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27,"History":[12,27]},"ActiveTimeUs":{"Count":1,"Sum":4000,"Max":4000,"Min":4000}},"Name":"RESULT","Push":{"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"FirstMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"ActiveMessageMs":{"Count":1,"Max":11,"Min":6},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"ActiveTimeUs":{"Count":1,"Sum":5000,"Max":5000,"Min":5000},"WaitTimeUs":{"Count":1,"Sum":9777,"Max":9777,"Min":9777,"History":[12,9777]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":11,"Min":1}}}],"CpuTimeUs":{"Count":1,"Sum":670,"Max":670,"Min":670,"History":[12,670]},"StageDurationUs":6000,"WaitInputTimeUs":{"Count":1,"Sum":3910,"Max":3910,"Min":3910,"History":[12,3910]},"ResultRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResultBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"OutputBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"ActiveMessageMs":{"Count":1,"Max":11,"Min":6},"FirstMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27,"History":[12,27]},"ActiveTimeUs":{"Count":1,"Sum":5000,"Max":5000,"Min":5000}},"Name":"6","Push":{"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"FirstMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"ActiveMessageMs":{"Count":1,"Max":11,"Min":6},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27,"History":[12,27]},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"ActiveTimeUs":{"Count":1,"Sum":5000,"Max":5000,"Min":5000},"WaitTimeUs":{"Count":1,"Sum":9577,"Max":9577,"Min":9577,"History":[12,9577]},"WaitPeriods":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitMessageMs":{"Count":1,"Max":11,"Min":1}}}],"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Tasks":1}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":257170,"CpuTimeUs":253679},"ProcessCpuTimeUs":475,"TotalDurationUs":294864,"ResourcePoolId":"default","QueuedTimeUs":946},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/TwoShard","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"TwoShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"},{"Operators":[{"E-Rows":"No estimate","Columns":["Key"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"TableLookup"}],"Operators":[{"Name":"LookupJoin","LookupKeyColumns":["Key"]}],"Node Type":"LookupJoin","PlanNodeType":"Connection"}],"Operators":[{"A-Rows":6,"A-SelfCpu":0.935,"A-Cpu":0.935,"A-Size":27,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":6,"A-SelfCpu":0.67,"A-Cpu":1.605,"A-Size":27,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ParameterTypes >> KqpTypes::SelectNull [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64 >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ExplainDataQueryWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 64901, MsgBus: 19323 2025-04-10T11:34:11.859540Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646390676799988:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:11.859726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00175c/r3tmp/tmpaSXA6g/pdisk_1.dat 2025-04-10T11:34:12.203085Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64901, node 1 2025-04-10T11:34:12.246310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:12.246404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:12.248982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:12.320840Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:12.320869Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:12.320876Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:12.321025Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19323 TClient is connected to server localhost:19323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:12.881101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:12.914307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:13.071033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:13.237109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:13.299592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:15.339792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646407856670968:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:15.339914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:15.702482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:15.733878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:15.760218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:15.792905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:15.829830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:15.867167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:15.914188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646407856671479:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:15.914278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:15.914414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646407856671484:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:15.918333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:15.929204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646407856671486:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:16.002990Z node 1 :TX_PROXY ERROR: Actor# [1:7491646407856671540:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:16.863502Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646390676799988:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:16.863576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":4}],"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Final"},{"Inputs":[{"InternalOperatorId":2}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate-Limit-Aggregate"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 25866, MsgBus: 22571 2025-04 ... ngesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":"4"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1_1","PlanNodeType":"ResultSet"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":15,"Plans":[{"PlanNodeId":16,"Plans":[{"PlanNodeId":17,"Plans":[{"PlanNodeId":19,"Plans":[{"PlanNodeId":20,"Plans":[{"PlanNodeId":21,"Plans":[{"PlanNodeId":22,"Plans":[{"PlanNodeId":23,"Operators":[{"Scan":"Parallel","ReadRange":["Key (20, 120]"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"Value\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_2","PlanNodeType":"ResultSet"},{"PlanNodeId":24,"Plans":[{"PlanNodeId":28,"Plans":[{"PlanNodeId":29,"Plans":[{"PlanNodeId":30,"Plans":[{"PlanNodeId":32,"Plans":[{"PlanNodeId":33,"Plans":[{"PlanNodeId":34,"Plans":[{"PlanNodeId":35,"Plans":[{"PlanNodeId":36,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"Value\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_3","PlanNodeType":"ResultSet"},{"PlanNodeId":37,"Plans":[{"PlanNodeId":41,"Plans":[{"PlanNodeId":42,"Plans":[{"PlanNodeId":43,"Plans":[{"PlanNodeId":45,"Plans":[{"PlanNodeId":46,"Plans":[{"PlanNodeId":47,"Plans":[{"PlanNodeId":48,"Plans":[{"PlanNodeId":49,"Operators":[{"Scan":"Parallel","ReadRange":["Key [10, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"Value\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_4","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 28654, MsgBus: 21641 2025-04-10T11:34:33.071538Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646485349931077:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:33.071639Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00175c/r3tmp/tmpfLC4wb/pdisk_1.dat 2025-04-10T11:34:33.224377Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:33.244247Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:33.244355Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:33.245761Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28654, node 4 2025-04-10T11:34:33.330997Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:33.331027Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:33.331038Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:33.331201Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21641 TClient is connected to server localhost:21641 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:33.956076Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:33.964704Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:34:33.980932Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:34.077201Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:34.297315Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:34.457010Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:37.549349Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646502529802034:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:37.549481Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:37.614353Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:37.696332Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:37.747238Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:37.795186Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:37.847897Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:37.930529Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:37.991965Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646502529802549:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:37.992057Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:37.992309Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646502529802554:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:37.996638Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:38.010210Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491646502529802556:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:38.071977Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491646485349931077:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:38.072088Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:38.113213Z node 4 :TX_PROXY ERROR: Actor# [4:7491646506824769910:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpParams::Decimal+QueryService-UseSink [GOOD] >> KqpParams::Decimal-QueryService+UseSink >> KqpLimits::ManyPartitionsSortingLimit [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink >> KqpStats::MultiTxStatsFullScan [GOOD] >> KqpStats::OneShardLocalExec+UseSink >> KqpQuery::QueryStats-UseSink [GOOD] >> KqpQuery::QueryClientTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitionsSortingLimit [GOOD] Test command err: Trying to start YDB, gRPC: 14839, MsgBus: 29504 2025-04-10T11:34:19.902487Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646424438155342:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:19.902544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001756/r3tmp/tmpPraWhz/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14839, node 1 2025-04-10T11:34:20.343054Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:34:20.349177Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:34:20.375971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:20.376167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:20.378168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:20.395147Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:20.403256Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:20.403277Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:20.403284Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:20.403489Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29504 TClient is connected to server localhost:29504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:20.966017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:21.007334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:21.165054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:21.339659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:21.422551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:34:22.972178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646437323059022:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:22.972281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:23.224390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.293741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.326766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.399028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.443779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.518239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.573359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646441618026840:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:23.573438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:23.573593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646441618026845:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:23.576728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:23.586601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646441618026847:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:23.665867Z node 1 :TX_PROXY ERROR: Actor# [1:7491646441618026901:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:24.477950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:24.721210Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTY3NGQ2NGMtMWVlZjgzMzQtNTFiZjUxN2YtZTMyNzNjMg==, ActorId: [1:7491646445912994819:2521], ActorState: ExecuteState, TraceId: 01jrfqnm3465ek48wny6k7qnpq, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:999: Memory limit exception at ExecuteState, current limit is 1024 bytes.
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:999: Memory limit exception at ExecuteState, current limit is 1024 bytes. Trying to start YDB, gRPC: 16820, MsgBus: 21170 2025-04-10T11:34:25.623267Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646454214200865:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:25.623325Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001756/r3tmp/tmprSjWon/pdisk_1.dat 2025-04-10T11:34:25.720662Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16820, node 2 2025-04-10T11:34:25.760528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:25.760603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:25.767482Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:25.797594Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:25.797617Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:25.797625Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:25.797735Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21170 TClient is connected to server localhost:21170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:26.239811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:26.265892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:29.183107Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646471394075364:2631], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: { ... ":7707,\"Min\":7707,\"History\":[22,763,42,1504,62,1670,99,1753,108,7707]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":100000,\"Max\":100000,\"Min\":100000}},\"Name\":\"RESULT\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":106,\"Max\":106,\"Min\":106},\"Chunks\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":102,\"Max\":102,\"Min\":102},\"FirstMessageMs\":{\"Count\":1,\"Sum\":5,\"Max\":5,\"Min\":5},\"ActiveMessageMs\":{\"Count\":1,\"Max\":106,\"Min\":5},\"PauseMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":101000,\"Max\":101000,\"Min\":101000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":97709,\"Max\":97709,\"Min\":97709,\"History\":[22,18629,42,37871,62,58029,99,94502,108,97709]},\"WaitPeriods\":{\"Count\":1,\"Sum\":23,\"Max\":23,\"Min\":23},\"WaitMessageMs\":{\"Count\":1,\"Max\":102,\"Min\":3}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[2,1048576,108,1048576]},\"DurationUs\":{\"Count\":1,\"Sum\":105000,\"Max\":105000,\"Min\":105000},\"InputBytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"ResultRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"Tasks\":1,\"ResultBytes\":{\"Count\":1,\"Sum\":7707,\"Max\":7707,\"Min\":7707},\"OutputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"FinishedTasks\":1,\"InputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"PhysicalStageId\":1,\"StageDurationUs\":105000,\"BaseTimeMs\":1744284874874,\"OutputBytes\":{\"Count\":1,\"Sum\":7707,\"Max\":7707,\"Min\":7707},\"CpuTimeUs\":{\"Count\":1,\"Sum\":9681,\"Max\":9681,\"Min\":9681,\"History\":[1,301,22,2827,42,4581,62,5132,99,5371,108,9681]},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":30,\"Max\":30,\"Min\":30},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":106,\"Max\":106,\"Min\":106},\"ActiveMessageMs\":{\"Count\":1,\"Max\":106,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[22,1018,42,1765,62,1931,99,2014,108,8168]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":103000,\"Max\":103000,\"Min\":103000}},\"Name\":\"2\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":102,\"Max\":102,\"Min\":102},\"Chunks\":{\"Count\":1,\"Sum\":100,\"Max\":100,\"Min\":100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":102,\"Max\":102,\"Min\":102},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"ActiveMessageMs\":{\"Count\":1,\"Max\":102,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[22,2595,42,5891,62,7753,99,7919,108,8168]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":99000,\"Max\":99000,\"Min\":99000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":23922,\"Max\":23922,\"Min\":23922,\"History\":[22,4416,42,9078,62,14061,99,23155,108,23922]},\"WaitPeriods\":{\"Count\":1,\"Sum\":24,\"Max\":24,\"Min\":24},\"WaitMessageMs\":{\"Count\":1,\"Max\":102,\"Min\":1}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":145862,\"CpuTimeUs\":141528},\"ProcessCpuTimeUs\":353,\"TotalDurationUs\":388881,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":117022},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Reverse\":false,\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Data\",\"Key\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/ManyShardsTable\" \'\"72057594046644480:2\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Data\" \'\"Key\") \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $3 (StructType \'(\'\"Data\" (OptionalType (DataType \'Int32))) \'(\'\"Key\" (OptionalType (DataType \'Uint32)))))\n(let $4 \'(\'(\'\"_logical_id\" \'367) \'(\'\"_id\" \'\"36e4d3d4-9d5d94c0-3a0ee268-1bdf7a31\") \'(\'\"_wide_channels\" $3)))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($9) (block \'(\n (let $10 (lambda \'($11) (Member $11 \'\"Data\") (Member $11 \'\"Key\")))\n (return (FromFlow (ExpandMap (ToFlow $9) $10)))\n))) $4))\n(let $6 (DqCnMerge (TDqOutput $5 \'\"0\") \'(\'(\'1 \'\"Asc\"))))\n(let $7 (DqPhyStage \'($6) (lambda \'($12) (FromFlow (NarrowMap (ToFlow $12) (lambda \'($13 $14) (AsStruct \'(\'\"Data\" $13) \'(\'\"Key\" $14)))))) \'(\'(\'\"_logical_id\" \'379) \'(\'\"_id\" \'\"91766bdc-cca3df89-858413c0-1585dba2\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'(\'\"Key\" \'\"Data\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType $3) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 388881 total_cpu_time_us: 258051 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/ManyShardsTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":2},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Data\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1744284874\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"8c6d5796-ed63776b-78f2914a-5e000718\",\"version\":\"1.0\"}" Trying to start YDB, gRPC: 16820, MsgBus: 21172 2025-04-10T11:34:36.134513Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646501458841122:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:36.134583Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001756/r3tmp/tmpFp9hz5/pdisk_1.dat 2025-04-10T11:34:36.264615Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:36.310495Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:36.310611Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:36.312423Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16820, node 4 2025-04-10T11:34:36.402769Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:36.402799Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:36.402812Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:36.402972Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21172 TClient is connected to server localhost:21172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:34:37.051901Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:34:37.062966Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:34:37.085188Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:40.397022Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646518638715705:2636], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:40.397116Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:40.397657Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646518638715717:2639], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:40.401769Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:34:40.420352Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491646518638715719:2640], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:34:40.483197Z node 4 :TX_PROXY ERROR: Actor# [4:7491646518638715793:5675] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQuery::Now >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink >> KqpLimits::ComputeNodeMemoryLimit [GOOD] >> KqpLimits::DataShardReplySizeExceeded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryStats-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31809, MsgBus: 21904 2025-04-10T11:34:23.386561Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646444873303460:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:23.386778Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001750/r3tmp/tmpOIVh2Z/pdisk_1.dat 2025-04-10T11:34:23.753589Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31809, node 1 2025-04-10T11:34:23.779842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:23.779971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:23.781510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:23.830725Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:23.830758Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:23.830764Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:23.830893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21904 TClient is connected to server localhost:21904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:24.406922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:24.432150Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:24.441964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:24.600092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:34:24.750249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:24.820408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:26.697375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646457758207121:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:26.697555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:27.114164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:27.156431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:27.202205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:27.245456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:27.317827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:27.398347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:27.463561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646462053174941:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:27.463643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:27.463901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646462053174946:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:27.468433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:27.481307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646462053174948:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:27.544315Z node 1 :TX_PROXY ERROR: Actor# [1:7491646462053175002:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:28.386675Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646444873303460:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:28.386750Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:28.440831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61410, MsgBus: 64397 2025-04-10T11:34:30.033126Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646473516843514:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:30.033181Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001750/r3tmp/tmprBm19k/pdisk_1.dat 2025-04-10T11:34:30.198980Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:30.210930Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:30.211017Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:30.214057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61410, node 2 2025-04-10T11:34:30.342799Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:30.342822Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:30.342829Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:30.342949Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64397 TClient is connected to server localhost:64397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:30.825219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:30.835827Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:34:30.843318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:30.914756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04 ... hard: 72057594046644480 2025-04-10T11:34:33.711536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.748626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.828909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.880698Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646486401747697:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:33.880788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:33.880846Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646486401747702:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:33.888069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:33.897692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491646486401747704:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:33.961109Z node 2 :TX_PROXY ERROR: Actor# [2:7491646486401747757:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:35.038621Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491646473516843514:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:35.038708Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; query_phases { duration_us: 11623 table_access { name: "/Root/EightShard" updates { rows: 3 bytes: 47 } partitions_count: 1 } table_access { name: "/Root/TwoShard" reads { rows: 3 bytes: 35 } partitions_count: 1 } cpu_time_us: 3796 affected_shards: 2 } compilation { duration_us: 259179 cpu_time_us: 254062 } process_cpu_time_us: 452 total_duration_us: 274404 total_cpu_time_us: 258310 Trying to start YDB, gRPC: 65161, MsgBus: 2314 2025-04-10T11:34:36.230689Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646501484574330:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:36.230781Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001750/r3tmp/tmpGXb8CR/pdisk_1.dat 2025-04-10T11:34:36.365708Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:36.393256Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:36.393360Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:36.395103Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65161, node 3 2025-04-10T11:34:36.454721Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:36.454748Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:36.454756Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:36.454890Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2314 TClient is connected to server localhost:2314 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:36.980774Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:36.991034Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:34:36.997595Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:37.067973Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:37.263086Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:37.354487Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:40.027565Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646518664445287:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:40.027664Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:40.071478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:40.108690Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:40.151621Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:40.226062Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:40.277309Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:40.324651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:40.422738Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646518664445806:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:40.422849Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:40.423115Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646518664445811:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:40.427023Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:40.439999Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646518664445813:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:40.515554Z node 3 :TX_PROXY ERROR: Actor# [3:7491646518664445869:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:41.238322Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646501484574330:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:41.238416Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; query_phases { duration_us: 5271 table_access { name: "/Root/TwoShard" reads { rows: 3 bytes: 35 } partitions_count: 1 } cpu_time_us: 4240 affected_shards: 1 } query_phases { duration_us: 7854 table_access { name: "/Root/EightShard" updates { rows: 3 bytes: 47 } partitions_count: 1 } cpu_time_us: 2888 affected_shards: 2 } compilation { duration_us: 243083 cpu_time_us: 239258 } process_cpu_time_us: 855 total_duration_us: 260582 total_cpu_time_us: 247241 >> TObjectStorageListingTest::TestSkipShards [GOOD] >> KqpLimits::StreamWrite+Allowed >> TFlatTest::LargeProxyReplyRW [GOOD] >> KqpStats::DeferredEffects+UseSink [GOOD] >> KqpStats::DeferredEffects-UseSink >> KqpParams::BadParameterType [GOOD] >> KqpParams::CheckCacheByAst >> KqpQuery::ReadOverloaded+StreamLookup [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup >> KqpStats::OneShardLocalExec-UseSink [GOOD] >> KqpStats::OneShardNonLocalExec+UseSink >> KqpStats::SysViewClientLost ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestSkipShards [GOOD] Test command err: 2025-04-10T11:34:36.790314Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646500293295466:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:36.790451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fdf/r3tmp/tmpILVOnZ/pdisk_1.dat 2025-04-10T11:34:37.271651Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:37.276831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:37.276926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:37.285315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12787, node 1 2025-04-10T11:34:37.423075Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:37.423126Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:37.423139Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:37.423289Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3928 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:37.821327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:37.841071Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:37.857777Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:34:37.873607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fdf/r3tmp/tmpXxq3a3/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10839, node 2 TClient is connected to server localhost:8194 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... waiting... waiting... >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] >> KqpExplain::PureExpr [GOOD] >> KqpExplain::ReadTableRangesFullScan >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReplyRW [GOOD] Test command err: 2025-04-10T11:34:32.218475Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646483189698622:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:32.218532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fe3/r3tmp/tmpWhjifw/pdisk_1.dat 2025-04-10T11:34:32.645023Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:32.663346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:32.663426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:32.665696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32687 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:32.969318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:32.983371Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:32.992791Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:34:32.998019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:37.219947Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646483189698622:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:37.224132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:38.270562Z node 1 :TX_PROXY ERROR: Actor# [1:7491646504664538684:4144] txid# 281474976711010 MergeResult Result too large TDataReq marker# P18 2025-04-10T11:34:38.270632Z node 1 :TX_PROXY ERROR: Actor# [1:7491646504664538684:4144] txid# 281474976711010 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2025-04-10T11:34:38.929363Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646507746345713:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:38.929436Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fe3/r3tmp/tmpNWoes6/pdisk_1.dat 2025-04-10T11:34:39.079796Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:39.107616Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:39.107719Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:39.112029Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11507 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:39.379859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:39.388764Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:39.424521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:43.930661Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491646507746345713:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:43.930758Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:44.915073Z node 2 :TX_PROXY ERROR: Actor# [2:7491646533516153075:4147] txid# 281474976716011 MergeResult Result too large TDataReq marker# P18 2025-04-10T11:34:44.915156Z node 2 :TX_PROXY ERROR: Actor# [2:7491646533516153075:4147] txid# 281474976716011 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable >> TFlatTest::RejectByPerShardReadSize [GOOD] >> TFlatTest::RejectByPerRequestSize >> KqpParams::ParameterTypes [GOOD] >> KqpParams::InvalidJson ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 17363, MsgBus: 14781 2025-04-10T11:34:29.337199Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646467616012866:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:29.337457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001747/r3tmp/tmp724nMV/pdisk_1.dat 2025-04-10T11:34:29.695880Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:29.716467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:29.716577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:29.719826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17363, node 1 2025-04-10T11:34:29.809410Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:29.809452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:29.809477Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:29.809634Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14781 TClient is connected to server localhost:14781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:30.477645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:30.498200Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:30.505755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:30.687896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:30.853987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:30.947345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:34:32.740656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646480500916382:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:32.740809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:33.088304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.127116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.161620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.236394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.272323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.322405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.395892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646484795884198:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:33.395967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:33.396206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646484795884204:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:33.401038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:33.412571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646484795884206:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:33.468792Z node 1 :TX_PROXY ERROR: Actor# [1:7491646484795884259:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:34.331711Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646467616012866:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:34.331793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15761, MsgBus: 29465 2025-04-10T11:34:35.382469Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646493320814752:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:35.382554Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001747/r3tmp/tmpwneWot/pdisk_1.dat 2025-04-10T11:34:35.500841Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15761, node 2 2025-04-10T11:34:35.550928Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:35.551024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:35.610375Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:35.622551Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:35.622574Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:35.622581Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:35.622713Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29465 TClient is connected to server localhost:29465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:36.062030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:36.072495Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:34:36.085665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:36.173770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:36.364100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:36.476673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:38.783750Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646506205718408:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:38.783859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:38.830531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:38.866036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:38.939541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:38.973591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:39.008449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:39.082810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:39.180267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646510500686227:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:39.180405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:39.180917Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646510500686232:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:39.186527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:39.201244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491646510500686234:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:39.267635Z node 2 :TX_PROXY ERROR: Actor# [2:7491646510500686288:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:40.382541Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491646493320814752:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:40.382643Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20569, MsgBus: 9798 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001747/r3tmp/tmpt7PfKv/pdisk_1.dat 2025-04-10T11:34:41.288515Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:34:41.385278Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:41.403039Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:41.403121Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:41.404404Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20569, node 3 2025-04-10T11:34:41.462453Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:41.462480Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:41.462488Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:41.462622Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9798 TClient is connected to server localhost:9798 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:41.913121Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:41.921952Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:45.111394Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646538079823603:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:45.111503Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:45.133779Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:34:45.219235Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646538079823706:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:45.219365Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:45.219992Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646538079823711:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:45.224703Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:34:45.236269Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646538079823713:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:34:45.316848Z node 3 :TX_PROXY ERROR: Actor# [3:7491646538079823766:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpExplain::ExplainStream >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] >> KqpStats::OneShardLocalExec+UseSink [GOOD] >> KqpParams::Decimal-QueryService+UseSink [GOOD] >> KqpParams::Decimal+QueryService+UseSink >> TLocksTest::Range_BrokenLockMax [GOOD] >> TLocksTest::Range_CorrectDot >> KqpQuery::OlapCreateAsSelect_Simple >> KqpQuery::Now [GOOD] >> KqpQuery::NoEvaluate >> KqpLimits::StreamWrite+Allowed [GOOD] >> KqpLimits::StreamWrite-Allowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] Test command err: Trying to start YDB, gRPC: 31018, MsgBus: 30183 2025-04-10T11:34:22.136667Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646441370580372:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:22.136748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001751/r3tmp/tmpxz50nT/pdisk_1.dat 2025-04-10T11:34:22.477835Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:22.496874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:22.496980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:22.499136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31018, node 1 2025-04-10T11:34:22.577513Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:22.577539Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:22.577554Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:22.577696Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30183 TClient is connected to server localhost:30183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:23.116322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:23.153482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:23.288886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:23.450060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:23.538522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:25.420541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646454255484027:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:25.420748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:25.777200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:25.813764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:25.892726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:25.929381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:25.965775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:26.038699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:26.092858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646458550451838:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:26.092940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:26.092978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646458550451843:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:26.096961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:26.115096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646458550451845:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:26.182784Z node 1 :TX_PROXY ERROR: Actor# [1:7491646458550451900:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:27.136885Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646441370580372:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:27.137000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26709, MsgBus: 17388 2025-04-10T11:34:28.439003Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646465540107866:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:28.439060Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001751/r3tmp/tmp4VB6lI/pdisk_1.dat 2025-04-10T11:34:28.596769Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26709, node 2 2025-04-10T11:34:28.631146Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:28.631202Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:28.632312Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:28.678985Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:28.679006Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:28.679014Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:28.679137Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17388 TClient is connected to server localhost:17388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:29.123689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:29.133564Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:34:29.152423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:29.238666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:29.404609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:29.512154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCre ... false data# peer# 2025-04-10T11:34:48.319373Z node 4 :GRPC_SERVER DEBUG: [0x51b000419280] received request Name# Coordination/CreateNode ok# false data# peer# 2025-04-10T11:34:48.319408Z node 4 :GRPC_SERVER DEBUG: [0x51b000413e80] received request Name# Coordination/AlterNode ok# false data# peer# 2025-04-10T11:34:48.319563Z node 4 :GRPC_SERVER DEBUG: [0x51b000414580] received request Name# Coordination/DropNode ok# false data# peer# 2025-04-10T11:34:48.319589Z node 4 :GRPC_SERVER DEBUG: [0x51b000416880] received request Name# Coordination/DescribeNode ok# false data# peer# 2025-04-10T11:34:48.319745Z node 4 :GRPC_SERVER DEBUG: [0x51b000368d80] received request Name# CreateDatabase ok# false data# peer# 2025-04-10T11:34:48.319767Z node 4 :GRPC_SERVER DEBUG: [0x51b000369480] received request Name# GetDatabaseStatus ok# false data# peer# 2025-04-10T11:34:48.319922Z node 4 :GRPC_SERVER DEBUG: [0x51b000369b80] received request Name# AlterDatabase ok# false data# peer# 2025-04-10T11:34:48.319954Z node 4 :GRPC_SERVER DEBUG: [0x51b00036a280] received request Name# ListDatabases ok# false data# peer# 2025-04-10T11:34:48.320123Z node 4 :GRPC_SERVER DEBUG: [0x51b00036be80] received request Name# RemoveDatabase ok# false data# peer# 2025-04-10T11:34:48.320137Z node 4 :GRPC_SERVER DEBUG: [0x51b00036b780] received request Name# DescribeDatabaseOptions ok# false data# peer# 2025-04-10T11:34:48.320301Z node 4 :GRPC_SERVER DEBUG: [0x51b00036a980] received request Name# GetScaleRecommendation ok# false data# peer# 2025-04-10T11:34:48.320331Z node 4 :GRPC_SERVER DEBUG: [0x51b000480680] received request Name# ListEndpoints ok# false data# peer# 2025-04-10T11:34:48.320417Z node 4 :GRPC_SERVER DEBUG: [0x51b00035ec80] received request Name# WhoAmI ok# false data# peer# 2025-04-10T11:34:48.320435Z node 4 :GRPC_SERVER DEBUG: [0x51b00036c580] received request Name# NodeRegistration ok# false data# peer# 2025-04-10T11:34:48.320603Z node 4 :GRPC_SERVER DEBUG: [0x51b000371280] received request Name# Scan ok# false data# peer# 2025-04-10T11:34:48.320616Z node 4 :GRPC_SERVER DEBUG: [0x51b00036fd80] received request Name# GetShardLocations ok# false data# peer# 2025-04-10T11:34:48.320739Z node 4 :GRPC_SERVER DEBUG: [0x51b000371980] received request Name# DescribeTable ok# false data# peer# 2025-04-10T11:34:48.320796Z node 4 :GRPC_SERVER DEBUG: [0x51b000373c80] received request Name# CreateSnapshot ok# false data# peer# 2025-04-10T11:34:48.320884Z node 4 :GRPC_SERVER DEBUG: [0x51b000372780] received request Name# RefreshSnapshot ok# false data# peer# 2025-04-10T11:34:48.321043Z node 4 :GRPC_SERVER DEBUG: [0x51b000374380] received request Name# DiscardSnapshot ok# false data# peer# 2025-04-10T11:34:48.321069Z node 4 :GRPC_SERVER DEBUG: [0x51b000376680] received request Name# List ok# false data# peer# 2025-04-10T11:34:48.321240Z node 4 :GRPC_SERVER DEBUG: [0x51b000375180] received request Name# RateLimiter/CreateResource ok# false data# peer# 2025-04-10T11:34:48.321250Z node 4 :GRPC_SERVER DEBUG: [0x51b000377b80] received request Name# RateLimiter/AlterResource ok# false data# peer# 2025-04-10T11:34:48.321358Z node 4 :GRPC_SERVER DEBUG: [0x51b00037c880] received request Name# RateLimiter/DropResource ok# false data# peer# 2025-04-10T11:34:48.321419Z node 4 :GRPC_SERVER DEBUG: [0x51b000378280] received request Name# RateLimiter/ListResources ok# false data# peer# 2025-04-10T11:34:48.321486Z node 4 :GRPC_SERVER DEBUG: [0x51b000379080] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2025-04-10T11:34:48.321626Z node 4 :GRPC_SERVER DEBUG: [0x51b00037c180] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2025-04-10T11:34:48.321638Z node 4 :GRPC_SERVER DEBUG: [0x51b00037dd80] received request Name# CreateStream ok# false data# peer# 2025-04-10T11:34:48.321766Z node 4 :GRPC_SERVER DEBUG: [0x51b00045ac80] received request Name# ListStreams ok# false data# peer# 2025-04-10T11:34:48.321803Z node 4 :GRPC_SERVER DEBUG: [0x51b00013da80] received request Name# DeleteStream ok# false data# peer# 2025-04-10T11:34:48.321889Z node 4 :GRPC_SERVER DEBUG: [0x51b00037d680] received request Name# DescribeStream ok# false data# peer# 2025-04-10T11:34:48.321976Z node 4 :GRPC_SERVER DEBUG: [0x51b0004ee380] received request Name# ListShards ok# false data# peer# 2025-04-10T11:34:48.322032Z node 4 :GRPC_SERVER DEBUG: [0x51b000425d80] received request Name# SetWriteQuota ok# false data# peer# 2025-04-10T11:34:48.322265Z node 4 :GRPC_SERVER DEBUG: [0x51b00017f480] received request Name# UpdateStream ok# false data# peer# 2025-04-10T11:34:48.322453Z node 4 :GRPC_SERVER DEBUG: [0x51b00045c180] received request Name# PutRecord ok# false data# peer# 2025-04-10T11:34:48.322667Z node 4 :GRPC_SERVER DEBUG: [0x51b0004eea80] received request Name# PutRecords ok# false data# peer# 2025-04-10T11:34:48.322679Z node 4 :GRPC_SERVER DEBUG: [0x51b0004a0c80] received request Name# GetRecords ok# false data# peer# 2025-04-10T11:34:48.322855Z node 4 :GRPC_SERVER DEBUG: [0x51b0004a5280] received request Name# GetShardIterator ok# false data# peer# 2025-04-10T11:34:48.322860Z node 4 :GRPC_SERVER DEBUG: [0x51b00049fe80] received request Name# SubscribeToShard ok# false data# peer# 2025-04-10T11:34:48.323027Z node 4 :GRPC_SERVER DEBUG: [0x51b00049f080] received request Name# DescribeLimits ok# false data# peer# 2025-04-10T11:34:48.323062Z node 4 :GRPC_SERVER DEBUG: [0x51b0004a3680] received request Name# DescribeStreamSummary ok# false data# peer# 2025-04-10T11:34:48.323204Z node 4 :GRPC_SERVER DEBUG: [0x51b00049d480] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2025-04-10T11:34:48.323251Z node 4 :GRPC_SERVER DEBUG: [0x51b0004a2880] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2025-04-10T11:34:48.323376Z node 4 :GRPC_SERVER DEBUG: [0x51b00049e280] received request Name# UpdateShardCount ok# false data# peer# 2025-04-10T11:34:48.323419Z node 4 :GRPC_SERVER DEBUG: [0x51b0004a1a80] received request Name# UpdateStreamMode ok# false data# peer# 2025-04-10T11:34:48.323574Z node 4 :GRPC_SERVER DEBUG: [0x51b000220480] received request Name# RegisterStreamConsumer ok# false data# peer# 2025-04-10T11:34:48.323612Z node 4 :GRPC_SERVER DEBUG: [0x51b00011cd80] received request Name# DeregisterStreamConsumer ok# false data# peer# 2025-04-10T11:34:48.323763Z node 4 :GRPC_SERVER DEBUG: [0x51b0001faa80] received request Name# DescribeStreamConsumer ok# false data# peer# 2025-04-10T11:34:48.323843Z node 4 :GRPC_SERVER DEBUG: [0x51b0001f7980] received request Name# ListStreamConsumers ok# false data# peer# 2025-04-10T11:34:48.323948Z node 4 :GRPC_SERVER DEBUG: [0x51b000118080] received request Name# AddTagsToStream ok# false data# peer# 2025-04-10T11:34:48.324030Z node 4 :GRPC_SERVER DEBUG: [0x51b0001c7080] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2025-04-10T11:34:48.324172Z node 4 :GRPC_SERVER DEBUG: [0x51b0001eed80] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2025-04-10T11:34:48.324238Z node 4 :GRPC_SERVER DEBUG: [0x51b0001efb80] received request Name# ListTagsForStream ok# false data# peer# 2025-04-10T11:34:48.324354Z node 4 :GRPC_SERVER DEBUG: [0x51b0002d2c80] received request Name# MergeShards ok# false data# peer# 2025-04-10T11:34:48.324424Z node 4 :GRPC_SERVER DEBUG: [0x51b00013f680] received request Name# RemoveTagsFromStream ok# false data# peer# 2025-04-10T11:34:48.324528Z node 4 :GRPC_SERVER DEBUG: [0x51b0002d1e80] received request Name# SplitShard ok# false data# peer# 2025-04-10T11:34:48.324600Z node 4 :GRPC_SERVER DEBUG: [0x51b00013ef80] received request Name# StartStreamEncryption ok# false data# peer# 2025-04-10T11:34:48.324736Z node 4 :GRPC_SERVER DEBUG: [0x51b000426b80] received request Name# StopStreamEncryption ok# false data# peer# 2025-04-10T11:34:48.324791Z node 4 :GRPC_SERVER DEBUG: [0x51b0003e9e80] received request Name# SelfCheck ok# false data# peer# 2025-04-10T11:34:48.324901Z node 4 :GRPC_SERVER DEBUG: [0x51b000423380] received request Name# NodeCheck ok# false data# peer# 2025-04-10T11:34:48.325007Z node 4 :GRPC_SERVER DEBUG: [0x51b0003e8980] received request Name# CreateSession ok# false data# peer# 2025-04-10T11:34:48.325100Z node 4 :GRPC_SERVER DEBUG: [0x51b00042f780] received request Name# DeleteSession ok# false data# peer# 2025-04-10T11:34:48.325208Z node 4 :GRPC_SERVER DEBUG: [0x51b0003e8280] received request Name# AttachSession ok# false data# peer# 2025-04-10T11:34:48.325287Z node 4 :GRPC_SERVER DEBUG: [0x51b00017ed80] received request Name# BeginTransaction ok# false data# peer# 2025-04-10T11:34:48.325391Z node 4 :GRPC_SERVER DEBUG: [0x51b000019680] received request Name# CommitTransaction ok# false data# peer# 2025-04-10T11:34:48.325457Z node 4 :GRPC_SERVER DEBUG: [0x51b0003e7b80] received request Name# RollbackTransaction ok# false data# peer# 2025-04-10T11:34:48.325572Z node 4 :GRPC_SERVER DEBUG: [0x51b0003e9780] received request Name# ExecuteQuery ok# false data# peer# 2025-04-10T11:34:48.325630Z node 4 :GRPC_SERVER DEBUG: [0x51b0003e9080] received request Name# ExecuteScript ok# false data# peer# 2025-04-10T11:34:48.325759Z node 4 :GRPC_SERVER DEBUG: [0x51b000424f80] received request Name# FetchScriptResults ok# false data# peer# 2025-04-10T11:34:48.325811Z node 4 :GRPC_SERVER DEBUG: [0x51b00045eb80] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2025-04-10T11:34:48.325932Z node 4 :GRPC_SERVER DEBUG: [0x51b0001beb80] received request Name# ChangeTabletSchema ok# false data# peer# 2025-04-10T11:34:48.325975Z node 4 :GRPC_SERVER DEBUG: [0x51b00045dd80] received request Name# RestartTablet ok# false data# peer# 2025-04-10T11:34:48.326130Z node 4 :GRPC_SERVER DEBUG: [0x51b0001df180] received request Name# CreateLogStore ok# false data# peer# 2025-04-10T11:34:48.327246Z node 4 :GRPC_SERVER DEBUG: [0x51b00017e680] received request Name# DescribeLogStore ok# false data# peer# 2025-04-10T11:34:48.327378Z node 4 :GRPC_SERVER DEBUG: [0x51b0001dea80] received request Name# DropLogStore ok# false data# peer# 2025-04-10T11:34:48.327425Z node 4 :GRPC_SERVER DEBUG: [0x51b000129f80] received request Name# AlterLogStore ok# false data# peer# 2025-04-10T11:34:48.327571Z node 4 :GRPC_SERVER DEBUG: [0x51b00004b480] received request Name# CreateLogTable ok# false data# peer# 2025-04-10T11:34:48.327606Z node 4 :GRPC_SERVER DEBUG: [0x51b00012c980] received request Name# DescribeLogTable ok# false data# peer# 2025-04-10T11:34:48.327778Z node 4 :GRPC_SERVER DEBUG: [0x51b0002bea80] received request Name# DropLogTable ok# false data# peer# 2025-04-10T11:34:48.327789Z node 4 :GRPC_SERVER DEBUG: [0x51b0004eb980] received request Name# AlterLogTable ok# false data# peer# 2025-04-10T11:34:48.327949Z node 4 :GRPC_SERVER DEBUG: [0x51b00005fd80] received request Name# Login ok# false data# peer# 2025-04-10T11:34:48.327970Z node 4 :GRPC_SERVER DEBUG: [0x51b0001ea780] received request Name# DescribeReplication ok# false data# peer# 2025-04-10T11:34:48.328116Z node 4 :GRPC_SERVER DEBUG: [0x51b00005f680] received request Name# DescribeView ok# false data# peer# >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardLocalExec+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13603, MsgBus: 4315 2025-04-10T11:34:30.127987Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646472953095887:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:30.128064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001743/r3tmp/tmp7tIpW7/pdisk_1.dat 2025-04-10T11:34:30.649473Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:30.657527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:30.657657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:30.660330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13603, node 1 2025-04-10T11:34:30.742363Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:30.742391Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:30.742404Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:30.742533Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4315 TClient is connected to server localhost:4315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:31.387846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:31.403591Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:31.414108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:31.553034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:31.751271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:31.825676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:33.608899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646485837999353:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:33.609067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:33.901419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.935392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.967638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:34.007305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:34.102114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:34.179526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:34.249758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646490132967171:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:34.249890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:34.250299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646490132967176:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:34.255509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:34.267230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646490132967178:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:34.361627Z node 1 :TX_PROXY ERROR: Actor# [1:7491646490132967235:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:35.128331Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646472953095887:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:35.157540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:35.551081Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284875522, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 2969, MsgBus: 25677 2025-04-10T11:34:36.454470Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646500840458318:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001743/r3tmp/tmp2mClNM/pdisk_1.dat 2025-04-10T11:34:36.475232Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:34:36.573106Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:36.599872Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:36.599965Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:36.602750Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2969, node 2 2025-04-10T11:34:36.722780Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:36.722812Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:36.722819Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:36.722951Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25677 TClient is connected to server localhost:25677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:37.279880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:37.288393Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:37.301540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:37.370570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:37.566653Z node 2 :FLAT_TX_SCHEME ... source pool default not found or you don't have access permissions } 2025-04-10T11:34:39.989176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:40.035671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:40.111661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:40.152091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:40.188303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:40.226064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:40.301883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:40.392207Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646518020329611:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:40.392329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:40.392646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646518020329616:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:40.397636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:40.412578Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491646518020329618:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:40.499651Z node 2 :TX_PROXY ERROR: Actor# [2:7491646518020329673:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:41.384395Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491646500840458318:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:41.384474Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:41.755115Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284881759, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 29149, MsgBus: 16281 2025-04-10T11:34:42.805692Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646526770553778:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:42.805767Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001743/r3tmp/tmp5o8KLl/pdisk_1.dat 2025-04-10T11:34:42.990966Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:43.007858Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:43.007976Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:43.009623Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29149, node 3 2025-04-10T11:34:43.102804Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:43.102831Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:43.102838Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:43.102977Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16281 TClient is connected to server localhost:16281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:43.643869Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:43.660496Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:43.740145Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:43.926678Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:44.010285Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:46.874338Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646543950424712:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:46.874488Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:46.927252Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:46.966392Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.007887Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.054269Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.127981Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.185648Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.252585Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646548245392525:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:47.252680Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:47.252915Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646548245392530:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:47.257191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:47.269561Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646548245392532:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:47.335260Z node 3 :TX_PROXY ERROR: Actor# [3:7491646548245392585:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:47.806686Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646526770553778:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:47.806765Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TFlatTest::RejectByPerRequestSize [GOOD] >> KqpStats::SysViewCancelled [GOOD] >> KqpQuery::QueryClientTimeout [GOOD] >> KqpQuery::QueryCancelWrite >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit >> KqpExplain::ReadTableRangesFullScan [GOOD] >> KqpExplain::ReadTableRanges >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS >> TSchemeShardExtSubDomainTest::CreateAndWait >> TSchemeShardExtSubDomainTest::Fake [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByPerRequestSize [GOOD] Test command err: 2025-04-10T11:34:29.197719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646470203908802:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:29.202917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fea/r3tmp/tmpiIqjC0/pdisk_1.dat 2025-04-10T11:34:29.639070Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:29.656012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:29.656111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:29.663341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4590 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:29.912450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:29.928785Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:29.944312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:34:34.198276Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646470203908802:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:34.198378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:37.791247Z node 1 :TX_DATASHARD ERROR: Transaction read size 51002341 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760 2025-04-10T11:34:37.791336Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002341 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760) | 2025-04-10T11:34:37.791606Z node 1 :TX_PROXY ERROR: Actor# [1:7491646504563648691:2940] txid# 281474976710760 RESPONSE Status# WrongRequest marker# P13c 2025-04-10T11:34:38.428675Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646506233776336:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:38.429127Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fea/r3tmp/tmp8ebZzS/pdisk_1.dat 2025-04-10T11:34:38.565103Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:38.594037Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:38.594112Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:38.596233Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12836 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:38.787499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:38.802124Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:38.809628Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:34:38.814331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:43.429013Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491646506233776336:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:43.429106Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:46.830736Z node 2 :TX_DATASHARD ERROR: Transaction read size 51002293 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760 2025-04-10T11:34:46.830852Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002293 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760) | 2025-04-10T11:34:46.832609Z node 2 :TX_PROXY ERROR: Actor# [2:7491646540593516358:2938] txid# 281474976715760 RESPONSE Status# WrongRequest marker# P13c 2025-04-10T11:34:47.571277Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646547682480512:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:47.571327Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fea/r3tmp/tmpcWEnfb/pdisk_1.dat 2025-04-10T11:34:47.753209Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:47.753290Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:47.755342Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:47.771835Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12370 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:48.016746Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:48.022816Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:48.042597Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:51.432692Z node 3 :TX_PROXY DEBUG: actor# [3:7491646547682480747:2100] Handle TEvProposeTransaction 2025-04-10T11:34:51.432731Z node 3 :TX_PROXY DEBUG: actor# [3:7491646547682480747:2100] TxId# 281474976710700 ProcessProposeTransaction 2025-04-10T11:34:51.432783Z node 3 :TX_PROXY DEBUG: actor# [3:7491646547682480747:2100] Cookie# 0 userReqId# "" txid# 281474976710700 SEND to# [3:7491646564862350795:2613] DataReq marker# P0 2025-04-10T11:34:51.432856Z node 3 :TX_PROXY DEBUG: Actor# [3:7491646564862350795:2613] Cookie# 0 txid# 281474976710700 HANDLE TDataReq marker# P1 2025-04-10T11:34:51.433568Z node 3 :TX_PROXY DEBUG: Actor [3:7491646564862350795:2613] txid 281474976710700 disallow followers cause of operation 2 read target mode 0 2025-04-10T11:34:51.433584Z node 3 :TX_PROXY DEBUG: Actor [3:7491646564862350795:2613] txid 281474976710700 disallow followers cause of operation 2 read target mode 0 2025-04-10T11:34:51.433619Z node 3 :TX_PROXY DEBUG: Actor# [3:7491646564862350795:2613] txid# 281474976710700 SEND to# [3:7491646547682480774:2115] TSchemeCache with 2 scheme entries. DataReq marker# P2 2025-04-10T11:34:51.433814Z node 3 :TX_PROXY DEBUG: Actor# [3:7491646564862350795:2613] txid# 281474976710700 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-04-10T11:34:51.435450Z node 3 :TX_PROXY DEBUG: Actor# [3:7491646564862350795:2613] txid# 281474976710700 SEND TEvProposeTransaction to datashard 72075186224037888 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-04-10T11:34:51.435802Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:34:51.435814Z node 3 :TX_PROXY DEBUG: Actor# [3:7491646564862350795:2613] txid# 281474976710700 SEND TEvProposeTransaction to datashard 72075186224037889 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-04-10T11:34:51.436915Z node 3 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976710700 at tablet 72075186224037888 2025-04-10T11:34:51.437214Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-10T11:34:51.437961Z node 3 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976710700 at tablet 72075186224037889 2025-04-10T11:34:51.438487Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:34:51.438594Z node 3 :TX_PROXY DEBUG: Actor# [3:7491646564862350795:2613] txid# 281474976710700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037888 read size 17000855 out readset size 0 marker# P6 2025-04-10T11:34:51.439395Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-10T11:34:51.439470Z node 3 :TX_PROXY DEBUG: Actor# [3:7491646564862350795:2613] txid# 281474976710700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037889 read size 9000463 out readset size 0 marker# P6 2025-04-10T11:34:51.439509Z node 3 :TX_PROXY ERROR: Actor# [3:7491646564862350795:2613] txid# 281474976710700 FailProposedRequest: Transaction total read size 26001318 exceeded limit 10000 Status# ExecError 2025-04-10T11:34:51.439558Z node 3 :TX_PROXY ERROR: Actor# [3:7491646564862350795:2613] txid# 281474976710700 RESPONSE Status# ExecError marker# P13c 2025-04-10T11:34:51.439653Z node 3 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037888 txId 281474976710700 2025-04-10T11:34:51.439710Z node 3 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037888 txId 281474976710700 2025-04-10T11:34:51.440166Z node 3 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037889 txId 281474976710700 2025-04-10T11:34:51.440204Z node 3 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037889 txId 281474976710700 >> KqpStats::DeferredEffects-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewCancelled [GOOD] Test command err: Trying to start YDB, gRPC: 28103, MsgBus: 62731 2025-04-10T11:33:56.643064Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646329561443873:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:56.643141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001768/r3tmp/tmpijYpSr/pdisk_1.dat 2025-04-10T11:33:57.073102Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:57.077758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:57.077882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:57.080343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28103, node 1 2025-04-10T11:33:57.158803Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:57.158831Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:57.158843Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:57.158967Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62731 TClient is connected to server localhost:62731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:33:57.766033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:57.783325Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:33:57.796352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:57.947157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:58.119008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:58.199895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:59.931202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646342446347536:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:59.931350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:00.271596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:00.313647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:00.348056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:00.390630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:00.434089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:00.508420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:00.573792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646346741315348:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:00.573864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:00.574143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646346741315353:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:00.578206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:00.589128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646346741315355:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:00.680433Z node 1 :TX_PROXY ERROR: Actor# [1:7491646346741315410:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:01.640531Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646329561443873:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:01.640610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"E-Size":"No estimate","PlanNodeId":5,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/TwoShard","Columns":["Key","Value1","Value2"],"E-Rows":"No estimate","Table":"TwoShard","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5,"History":[7,5]}},"Name":"4","Push":{"WaitTimeUs":{"Count":1,"Sum":2684,"Max":2684,"Min":2684,"History":[7,2684]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3}}}],"DurationUs":{"Count":1,"Sum":3000,"Max":3000,"Min":3000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[7,1048576]},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"FinishedTasks":1,"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":3000,"Table":[{"Path":"\/Root\/KeyValue","ReadRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"ReadBytes":{"Count":1,"Sum":16,"Max":16,"Min":16}}],"BaseTimeMs":1744284841958,"OutputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"CpuTimeUs":{"Count":1,"Sum":892,"Max":892,"Min":892,"History":[6,892]},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32,"History":[7,32]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32,"History":[7,32]},"WaitTimeUs":{"Count":1,"Sum":2770,"Max":2770,"Min":2770,"History":[7,2770]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"HashShuffle","KeyColumns":["Key"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"PartitionByKey","Input":"NarrowMap"}],"Node Type":"Aggregate","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"FirstMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5,"History":[8,5]}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"FirstMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"PauseMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"WaitTimeUs":{"Count":1,"Sum":1618,"Max":1618,"Min":1618,"History":[8,1618]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":7,"Min":5}}}],"MaxM ... ":{"Count":1,"Sum":1433,"Max":1433,"Min":1433,"History":[3,1433]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":338015,"CpuTimeUs":333851},"ProcessCpuTimeUs":626,"TotalDurationUs":365733,"ResourcePoolId":"default","QueuedTimeUs":766},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"E-Rows":"No estimate","Columns":["Key","Value1","Value2"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"A-Rows":2,"A-SelfCpu":1.059,"A-Cpu":1.059,"A-Size":18,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":2,"A-SelfCpu":0.549,"A-Cpu":1.608,"A-Size":18,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 6256, MsgBus: 22817 2025-04-10T11:34:08.856225Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646381337110773:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:08.856294Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001768/r3tmp/tmpU1SS2O/pdisk_1.dat 2025-04-10T11:34:08.982293Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6256, node 3 2025-04-10T11:34:09.017380Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:09.017481Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:09.020338Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:09.064429Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:09.064457Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:09.064467Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:09.064600Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22817 TClient is connected to server localhost:22817 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:34:09.583990Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:34:09.603164Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:09.670903Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:34:09.833576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:09.919584Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:12.495384Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646398516981748:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:12.495502Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:12.552548Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:12.593928Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:12.631693Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:12.684206Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:12.721220Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:12.758057Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:12.798334Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646398516982257:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:12.798403Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:12.798463Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646398516982262:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:12.801570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:12.812915Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646398516982264:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:12.883359Z node 3 :TX_PROXY ERROR: Actor# [3:7491646398516982316:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:13.769548Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:13.856264Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646381337110773:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:13.856346Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:23.968970Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:34:23.969020Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:49.945947Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284889930, txId: 281474976715672] shutting down 2025-04-10T11:34:50.734593Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491646561725741411:2768] TxId: 281474976715674. Ctx: { TraceId: 01jrfqpde36cahxfxhmj62dts7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWVhNTIzOTctZmRmMGFmOTEtYTk3NjgzYzItYTczZTZmZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 100ms } {
: Error: Cancelling after 170ms during execution } ] 2025-04-10T11:34:50.746346Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491646561725741442:2797], TxId: 281474976715674, task: 9. Ctx: { TraceId : 01jrfqpde36cahxfxhmj62dts7. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=YWVhNTIzOTctZmRmMGFmOTEtYTk3NjgzYzItYTczZTZmZTY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7491646561725741411:2768], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-10T11:34:50.857336Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491646561725741440:2795], TxId: 281474976715674, task: 7. Ctx: { SessionId : ydb://session/3?node_id=3&id=YWVhNTIzOTctZmRmMGFmOTEtYTk3NjgzYzItYTczZTZmZTY=. CustomerSuppliedId : . TraceId : 01jrfqpde36cahxfxhmj62dts7. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7491646561725741411:2768], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-10T11:34:50.857983Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWVhNTIzOTctZmRmMGFmOTEtYTk3NjgzYzItYTczZTZmZTY=, ActorId: [3:7491646557430774059:2768], ActorState: ExecuteState, TraceId: 01jrfqpde36cahxfxhmj62dts7, Create QueryResponse for error on request, msg:
: Error: Request canceled after 100ms
: Error: Cancelling after 170ms during execution 2025-04-10T11:34:51.120372Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284891107, txId: 281474976715676] shutting down >> KqpParams::InvalidJson [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive >> KqpExplain::ExplainStream [GOOD] >> KqpExplain::ExplainScanQueryWithParams >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false >> KqpLimits::StreamWrite-Allowed [GOOD] >> KqpLimits::TooBigColumn+useSink >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout >> KqpLimits::DataShardReplySizeExceeded [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DeferredEffects-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 63956, MsgBus: 4198 2025-04-10T11:34:29.589236Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646469148946541:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:29.610594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001744/r3tmp/tmpx4Xz8N/pdisk_1.dat 2025-04-10T11:34:30.044939Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63956, node 1 2025-04-10T11:34:30.078682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:30.079077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:30.081600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:30.154721Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:30.154770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:30.154782Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:30.154912Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4198 TClient is connected to server localhost:4198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:30.759872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:30.782368Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:30.794805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:30.996031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:31.172009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:31.241930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:32.742179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646482033850125:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:32.742303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:33.027701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.065632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.108370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.141630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.210252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.290259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.380743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646486328817946:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:33.380860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:33.381259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646486328817951:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:33.385481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:33.398210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646486328817953:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:33.473713Z node 1 :TX_PROXY ERROR: Actor# [1:7491646486328818008:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:34.590272Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646469148946541:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:34.590343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:37.765846Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284875396, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 4799, MsgBus: 17441 2025-04-10T11:34:38.604635Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646508108202009:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:38.611115Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001744/r3tmp/tmpvvYC05/pdisk_1.dat 2025-04-10T11:34:38.735392Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:38.763279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:38.763373Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:38.765324Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4799, node 2 2025-04-10T11:34:38.827697Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:38.827721Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:38.827728Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:38.827864Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17441 TClient is connected to server localhost:17441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:39.333875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:39.361229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:39.444306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:39.704444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 7205759404664 ... oposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:42.431926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:42.466459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:42.499133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:42.537895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:42.632290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646525288073473:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:42.632407Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:42.632669Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646525288073478:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:42.636492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:42.651356Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491646525288073480:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:42.721291Z node 2 :TX_PROXY ERROR: Actor# [2:7491646525288073536:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:43.599206Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491646508108202009:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:43.599289Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:3:46: Warning: At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 27603, MsgBus: 27650 2025-04-10T11:34:45.994076Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646539976878467:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:45.994166Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001744/r3tmp/tmpHR5ryl/pdisk_1.dat 2025-04-10T11:34:46.091448Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:46.118478Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:46.118571Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:46.120111Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27603, node 3 2025-04-10T11:34:46.210298Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:46.210319Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:46.210326Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:46.210445Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27650 TClient is connected to server localhost:27650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:46.691706Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:46.706321Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:34:46.728117Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:46.818752Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:47.043068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:47.135117Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:50.011667Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646561451716687:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:50.011774Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:50.057132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:50.089957Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:50.128184Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:50.167207Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:50.244320Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:50.286410Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:50.387558Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646561451717206:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:50.387645Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:50.387737Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646561451717211:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:50.391465Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:50.407375Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646561451717213:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:50.481274Z node 3 :TX_PROXY ERROR: Actor# [3:7491646561451717269:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:51.002765Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646539976878467:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:51.002822Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:3:46: Warning: At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true >> KqpQuery::NoEvaluate [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::InvalidJson [GOOD] Test command err: Trying to start YDB, gRPC: 20991, MsgBus: 12247 2025-04-10T11:34:28.925818Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646466848140030:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:28.926019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001748/r3tmp/tmp286C7x/pdisk_1.dat 2025-04-10T11:34:29.307906Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:29.340966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:29.341084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 20991, node 1 2025-04-10T11:34:29.342576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:29.386420Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:29.386453Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:29.386465Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:29.386594Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12247 TClient is connected to server localhost:12247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:29.921774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:29.943158Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:29.953378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:30.118353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:30.272129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:30.354242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:32.279970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646484028010984:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:32.280107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:32.623513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:32.660651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:32.697311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:32.768567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:32.802946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:32.858385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:32.942091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646484028011498:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:32.942213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:32.942462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646484028011503:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:32.948454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:32.966783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646484028011505:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:33.039702Z node 1 :TX_PROXY ERROR: Actor# [1:7491646488322978859:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:33.925411Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646466848140030:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:33.925506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:34.251096Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTI4YjI3MzMtMzg0MDcxMDYtZWFiOWYwMjktY2U4NmY2M2E=, ActorId: [1:7491646492617946447:2497], ActorState: ExecuteState, TraceId: 01jrfqnxb19388n91x7cm09m3j, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1294: ydb/core/kqp/query_data/kqp_query_data.cpp:266: Missing value for parameter: $group Trying to start YDB, gRPC: 20074, MsgBus: 26504 2025-04-10T11:34:35.206810Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646494399573673:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:35.206877Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001748/r3tmp/tmpOpLaCC/pdisk_1.dat 2025-04-10T11:34:35.352916Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:35.383229Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:35.383325Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:35.384967Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20074, node 2 2025-04-10T11:34:35.450695Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:35.450716Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:35.450722Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:35.450836Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26504 TClient is connected to server localhost:26504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:35.924797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:35.948305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:36.027816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at sch ... pe: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:44.712015Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646534710709668:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:44.712113Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:44.758520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:44.801997Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:44.840476Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:44.887572Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:44.938972Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:44.986067Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:45.073509Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646539005677481:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:45.073597Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:45.073617Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646539005677486:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:45.077740Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:45.091358Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646539005677488:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:45.158102Z node 3 :TX_PROXY ERROR: Actor# [3:7491646539005677543:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12493, MsgBus: 29668 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001748/r3tmp/tmpzctJbs/pdisk_1.dat 2025-04-10T11:34:47.964793Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:34:47.997620Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:48.000883Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:48.000972Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:48.005680Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12493, node 4 2025-04-10T11:34:48.078444Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:48.078475Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:48.078481Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:48.078593Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29668 TClient is connected to server localhost:29668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:48.658834Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:48.666990Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:34:48.685574Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:48.766917Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:48.980399Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:49.058867Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:51.755052Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646565241860612:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:51.755151Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:51.807837Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:51.847170Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:51.884581Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:51.926658Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:51.999236Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:52.073308Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:52.132444Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646569536828427:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:52.132627Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:52.132905Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646569536828432:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:52.138302Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:52.150898Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491646569536828434:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:52.246379Z node 4 :TX_PROXY ERROR: Actor# [4:7491646569536828490:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:53.377557Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:34:53.552490Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjBiMTIxYTEtYjg5OTk4ZjctY2U1YzE4ODgtZmJlMTk5M2Q=, ActorId: [4:7491646573831796082:2497], ActorState: ExecuteState, TraceId: 01jrfqpg8t04gt7950h2jegmz9, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: Invalid Json value
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: Invalid Json value >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::Drop >> KqpStats::OneShardNonLocalExec+UseSink [GOOD] >> KqpStats::OneShardNonLocalExec-UseSink >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::Drop [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive >> TObjectStorageListingTest::MaxKeysAndSharding [GOOD] >> TObjectStorageListingTest::SchemaChecks >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain >> KqpQuery::OlapCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst >> KqpQuery::QueryCancelWrite [GOOD] >> KqpQuery::QueryCancelWriteImmediate >> KqpParams::Decimal+QueryService+UseSink [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::Drop-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6629, MsgBus: 28282 2025-04-10T11:34:25.942789Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646452787596200:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:25.942836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00174c/r3tmp/tmpFqE1Sx/pdisk_1.dat 2025-04-10T11:34:26.328193Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6629, node 1 2025-04-10T11:34:26.366581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:26.366681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:26.369222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:26.473777Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:26.473811Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:26.473819Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:26.473956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28282 TClient is connected to server localhost:28282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:27.107498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:27.131721Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:27.140237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:27.333858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:27.502911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:27.591894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:29.377639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646469967467186:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:29.377732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:29.793893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:29.829098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:29.904015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:29.937974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:29.973475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:30.048596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:30.141798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646474262435004:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:30.141878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:30.142074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646474262435009:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:30.147008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:30.168305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646474262435011:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:30.241919Z node 1 :TX_PROXY ERROR: Actor# [1:7491646474262435066:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:30.946292Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646452787596200:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:30.946375Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:31.321173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:34:32.569413Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491646482852370230:2534], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2025-04-10T11:34:32.569801Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmZjMzhiNDctNWJjZDRkOS1hNzlmODFmNi1mZWRiMDk1NQ==, ActorId: [1:7491646478557402620:2489], ActorState: ExecuteState, TraceId: 01jrfqnvv04hjr2ejxwfhca1xj, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:34:32.700449Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmZjMzhiNDctNWJjZDRkOS1hNzlmODFmNi1mZWRiMDk1NQ==, ActorId: [1:7491646478557402620:2489], ActorState: ExecuteState, TraceId: 01jrfqnvwcd66myhhsv5qywb00, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1294: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2025-04-10T11:34:32.734479Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491646482852370250:2540], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T11:34:32.736350Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmZjMzhiNDctNWJjZDRkOS1hNzlmODFmNi1mZWRiMDk1NQ==, ActorId: [1:7491646478557402620:2489], ActorState: ExecuteState, TraceId: 01jrfqnw0b0ngb76tgekbqhv9k, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:34:32.762469Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491646482852370259:2544], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:0:14: Error: Implicit decimal cast would lose precision
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T11:34:32.763687Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmZjMzhiNDctNWJjZDRkOS1hNzlmODFmNi1mZWRiMDk1NQ==, ActorId: [1:7491646478557402620:2489], ActorState: ExecuteState, TraceId: 01jrfqnw1c0zx36rvyth687whr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 6253, MsgBus: 20761 2025-04-10T11:34:34.207491Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undeli ... Q=, ActorId: [3:7491646546473337516:2497], ActorState: ExecuteState, TraceId: 01jrfqpbfy1zverbrn80ebepcz, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 15369, MsgBus: 3939 2025-04-10T11:34:49.981559Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646555681030199:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:49.981643Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00174c/r3tmp/tmp3Qk7ex/pdisk_1.dat 2025-04-10T11:34:50.094020Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:50.132265Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:50.132371Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:50.134720Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15369, node 4 2025-04-10T11:34:50.183794Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:50.183823Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:50.183831Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:50.183966Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3939 TClient is connected to server localhost:3939 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:50.801208Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:50.818852Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:34:50.839046Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:50.923650Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:51.115895Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:34:51.210114Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:54.077572Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646577155868447:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:54.077669Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:54.122105Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:54.196337Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:54.270025Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:54.311767Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:54.351563Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:54.430934Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:54.502078Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646577155868973:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:54.502220Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:54.502961Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646577155868978:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:54.507518Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:54.519805Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491646577155868980:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:54.580068Z node 4 :TX_PROXY ERROR: Actor# [4:7491646577155869033:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:54.981892Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491646555681030199:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:54.981969Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:55.738893Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.859792Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7491646585745804275:2548], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2025-04-10T11:34:56.861813Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=M2I1ZjM5NTctNTA4MzFhMDAtOGRmOTU5ZWItYmYxMDgxYWY=, ActorId: [4:7491646585745804273:2547], ActorState: ExecuteState, TraceId: 01jrfqpkj8fscxktfqj5en4vn1, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:34:56.967360Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njk5YTg1OWItZTUzMjczYmQtNDYwNGNhYzUtMTYyMjBiYzQ=, ActorId: [4:7491646585745804279:2550], ActorState: ExecuteState, TraceId: 01jrfqpkk58g1kse17qwrqeajg, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1294: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2025-04-10T11:34:57.006091Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7491646585745804296:2556], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T11:34:57.007726Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTBlM2MwMy01OWI3M2EyZi1jY2U0ODkzZi1jODgzMjFmNA==, ActorId: [4:7491646585745804294:2555], ActorState: ExecuteState, TraceId: 01jrfqpkph06w2dbvcdeag81vv, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-10T11:34:57.034899Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7491646590040771605:2562], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:0:14: Error: Implicit decimal cast would lose precision
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-10T11:34:57.035290Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Nzc4OTlmMC1jMzY1YmQzNS0xMzk4YzEwYS02MTY4OGMx, ActorId: [4:7491646590040771603:2561], ActorState: ExecuteState, TraceId: 01jrfqpkqp9f3phdxcrkkptnbs, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpExplain::ReadTableRanges [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 5402, MsgBus: 26163 2025-04-10T11:33:43.004310Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646270483295455:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:43.005621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001778/r3tmp/tmpRAko3R/pdisk_1.dat 2025-04-10T11:33:43.473668Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:43.476048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:43.476156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:43.478005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5402, node 1 2025-04-10T11:33:43.687935Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:43.687984Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:43.687994Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:43.688148Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26163 TClient is connected to server localhost:26163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:33:44.461182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:44.511433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:46.461086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646283368198181:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:46.461172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646283368198161:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:46.461354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:46.472787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:33:46.486139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646283368198190:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:33:46.561556Z node 1 :TX_PROXY ERROR: Actor# [1:7491646283368198241:2607] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:33:47.357578Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=1;memory=1048576; 2025-04-10T11:33:47.357611Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 1. [Mem] memory 1048576 NOT granted 2025-04-10T11:33:47.386845Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=3;memory=1048576; 2025-04-10T11:33:47.386875Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 3. [Mem] memory 1048576 NOT granted 2025-04-10T11:33:47.392196Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491646287663165596:2368], TxId: 281474976710661, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfqmd7e2vxafrdbxm1vfbr8. SessionId : ydb://session/3?node_id=1&id=MWE5MWZhOTAtNmI5MDU0MjYtYTk4NDQ5ZWEtMjQ5MjQ2MmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 1: 10, host: ghrun-x5vqckr7qy, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 50B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 5, started at: 2025-04-10T11:33:47.333120Z }, code: 2029 }. 2025-04-10T11:33:47.394809Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491646287663165598:2370], TxId: 281474976710661, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jrfqmd7e2vxafrdbxm1vfbr8. SessionId : ydb://session/3?node_id=1&id=MWE5MWZhOTAtNmI5MDU0MjYtYTk4NDQ5ZWEtMjQ5MjQ2MmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 3: 10, host: ghrun-x5vqckr7qy, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 50B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 5, started at: 2025-04-10T11:33:47.333120Z }, code: 2029 }. 2025-04-10T11:33:47.395488Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=4;memory=1048576; 2025-04-10T11:33:47.395508Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=5;memory=1048576; 2025-04-10T11:33:47.395517Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 5. [Mem] memory 1048576 NOT granted 2025-04-10T11:33:47.395516Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 4. [Mem] memory 1048576 NOT granted 2025-04-10T11:33:47.395788Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491646287663165599:2371], TxId: 281474976710661, task: 4. Ctx: { TraceId : 01jrfqmd7e2vxafrdbxm1vfbr8. SessionId : ydb://session/3?node_id=1&id=MWE5MWZhOTAtNmI5MDU0MjYtYTk4NDQ5ZWEtMjQ5MjQ2MmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 4: 10, host: ghrun-x5vqckr7qy, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 30B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 3, started at: 2025-04-10T11:33:47.333120Z }, code: 2029 }. 2025-04-10T11:33:47.396232Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491646287663165601:2372], TxId: 281474976710661, task: 5. Ctx: { TraceId : 01jrfqmd7e2vxafrdbxm1vfbr8. SessionId : ydb://session/3?node_id=1&id=MWE5MWZhOTAtNmI5MDU0MjYtYTk4NDQ5ZWEtMjQ5MjQ2MmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 5: 10, host: ghrun-x5vqckr7qy, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-04-10T11:33:47.333120Z }, code: 2029 }. 2025-04-10T11:33:47.405161Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491646287663165597:2369], TxId: 281474976710661, task: 2. Ctx: { TraceId : 01jrfqmd7e2vxafrdbxm1vfbr8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWE5MWZhOTAtNmI5MDU0MjYtYTk4NDQ5ZWEtMjQ5MjQ2MmQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7491646287663165570:2354], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-10T11:33:47.411260Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWE5MWZhOTAtNmI5MDU0MjYtYTk4NDQ5ZWEtMjQ5MjQ2MmQ=, ActorId: [1:7491646283368198159:2354], ActorState: ExecuteState, TraceId: 01jrfqmd7e2vxafrdbxm1vfbr8, Create QueryResponse for error on request, msg:
: Error: Mkql memory limit exceeded, allocated by task 1: 10, host: ghrun-x5vqckr7qy, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 50B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 5, started at: 2025-04-10T11:33:47.333120Z } , code: 2029 query_phases { duration_us: 102764 table_access { name: "/Root/LargeTable" partitions_count: 2 } cpu_time_us: 144358 affected_shards: 8 } compilation { duration_us: 719595 cpu_time_us: 711405 } process_cpu_time_us: 7681 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"LargeTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/LargeTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Data\",\"DataText\",\"Key\",\"KeyText\"],\"E-Cost\":\"No estim ... :2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:57.285191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:33:57.304853Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646332607150108:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:33:57.395577Z node 3 :TX_PROXY ERROR: Actor# [3:7491646332607150163:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:33:58.137551Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646315427278637:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:58.137659Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:08.326647Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:34:08.326681Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:43.634463Z node 3 :KQP_EXECUTER WARN: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jrfqmtg38mec6j35qtfzcfwc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDY3ZTgxYjktYTYwNzE4ZGItNzJkZGQ2NGMtYTBjMGI5M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, memory limit exceeded. 2025-04-10T11:34:43.635567Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDY3ZTgxYjktYTYwNzE4ZGItNzJkZGQ2NGMtYTBjMGI5M2I=, ActorId: [3:7491646336902117753:2497], ActorState: ExecuteState, TraceId: 01jrfqmtg38mec6j35qtfzcfwc, Create QueryResponse for error on request, msg: 2025-04-10T11:34:43.635767Z node 3 :KQP_SLOW_LOG WARN: TraceId: "01jrfqmtg38mec6j35qtfzcfwc", SessionId: ydb://session/3?node_id=3&id=NDY3ZTgxYjktYTYwNzE4ZGItNzJkZGQ2NGMtYTBjMGI5M2I=, Slow query, duration: 45.231849s, status: PRECONDITION_FAILED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT ToDict(\n ListMap(\n ListFromRange(0ul, 5000000ul),\n ($x) -> { RETURN AsTuple($x, $x + 1); }\n )\n );\n ", parameters: 0b
: Warning: Type annotation, code: 1030
:2:13: Warning: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:2:20: Warning: At function: ToDict
:5:38: Warning: At function: OrderedMap
:5:53: Warning: At function: +
:5:53: Warning: Integral type implicit bitcast: Uint64 and Int32, code: 1107
: Error: Memory limit exceeded, code: 2029 Trying to start YDB, gRPC: 31114, MsgBus: 13068 2025-04-10T11:34:44.583851Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646531788131852:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:44.584734Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001778/r3tmp/tmpp8rwSS/pdisk_1.dat 2025-04-10T11:34:44.759630Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:44.785995Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:44.786261Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:44.788020Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31114, node 4 2025-04-10T11:34:44.887120Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:44.887160Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:44.887175Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:44.887387Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13068 TClient is connected to server localhost:13068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:45.538706Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:45.553390Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:45.570386Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:45.672098Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:45.923187Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:46.038871Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:49.077784Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646553262970099:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:49.077871Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:49.158863Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:49.203772Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:49.252941Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:49.296115Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:49.334857Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:49.407782Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:49.464774Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646553262970614:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:49.464852Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:49.464901Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646553262970619:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:49.469482Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:49.484329Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491646553262970621:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:49.585216Z node 4 :TX_PROXY ERROR: Actor# [4:7491646553262970677:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:49.586321Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491646531788131852:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:49.586501Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:50.671131Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T11:34:54.278366Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YzgwYzQxYTktMzNhYTI0ODItYThlZDY3MGEtMmE0NmRmNzk=, ActorId: [4:7491646557557938267:2498], ActorState: ExecuteState, TraceId: 01jrfqpgkva61qvxxqmv39g737, Create QueryResponse for error on request, msg: >> TCmsTest::TestKeepAvailableModeScheduled >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 19170, MsgBus: 21220 2025-04-10T11:34:26.986517Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646456063083363:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:26.992493Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00174b/r3tmp/tmpQ9IYTK/pdisk_1.dat 2025-04-10T11:34:27.388811Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19170, node 1 2025-04-10T11:34:27.445380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:27.446173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:27.457211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:27.521429Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:27.521457Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:27.521899Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:27.522069Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21220 TClient is connected to server localhost:21220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:28.168221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:28.193576Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:28.206325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:28.328183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:34:28.509492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:28.602606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:30.439290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646473242954204:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:30.439407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:30.841199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:30.920210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:30.957808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:31.005575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:31.086847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:31.147656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:31.242921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646477537922023:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:31.242998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:31.243362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646477537922028:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:31.247671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:31.266204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646477537922030:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:31.328921Z node 1 :TX_PROXY ERROR: Actor# [1:7491646477537922086:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:31.981668Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646456063083363:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:31.981740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15095, MsgBus: 4965 2025-04-10T11:34:36.740047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:34:36.740307Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:34:36.740560Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00174b/r3tmp/tmppe8g2m/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15095, node 2 2025-04-10T11:34:37.205519Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:37.205581Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:37.205623Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:37.206426Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:37.206642Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:34:37.247604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:37.247792Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:37.259549Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4965 TClient is connected to server localhost:4965 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:37.653410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:37.708237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:38.044411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:38.539961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose it ... }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys/Index/indexImplTable' retry limit exceeded. }. 2025-04-10T11:34:45.179784Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:3286:4556], TxId: 281474976715674, task: 2. Ctx: { TraceId : 01jrfqp6rd31vw3vjghry5jff8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGE1MmIyMGItOTA1NTMzNDEtYWQ4YjA2NTEtOTEwOWE1ZDg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:3279:4105], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-10T11:34:45.180129Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:3287:4557], TxId: 281474976715674, task: 3. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZGE1MmIyMGItOTA1NTMzNDEtYWQ4YjA2NTEtOTEwOWE1ZDg=. CustomerSuppliedId : . TraceId : 01jrfqp6rd31vw3vjghry5jff8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:3279:4105], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-10T11:34:45.180462Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:3288:4558], TxId: 281474976715674, task: 4. Ctx: { TraceId : 01jrfqp6rd31vw3vjghry5jff8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGE1MmIyMGItOTA1NTMzNDEtYWQ4YjA2NTEtOTEwOWE1ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:3279:4105], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-10T11:34:45.181231Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGE1MmIyMGItOTA1NTMzNDEtYWQ4YjA2NTEtOTEwOWE1ZDg=, ActorId: [2:2719:4105], ActorState: ExecuteState, TraceId: 01jrfqp6rd31vw3vjghry5jff8, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 25458, MsgBus: 24817 2025-04-10T11:34:49.652735Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:34:49.653175Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:34:49.653277Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00174b/r3tmp/tmptnYkKz/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25458, node 3 2025-04-10T11:34:50.169063Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:50.170281Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:50.170348Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:50.170393Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:50.170752Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:34:50.208098Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:50.208262Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:50.223074Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24817 TClient is connected to server localhost:24817 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:50.621695Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:50.701773Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:51.072391Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:51.523253Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:51.831892Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:52.426450Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1805:3403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:52.426736Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:52.450988Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:52.693359Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:52.959779Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:53.241396Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:53.489762Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:53.858789Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:54.173291Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2393:3856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:54.173402Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:54.173704Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2398:3861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:54.180500Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:54.341334Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2400:3863], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:54.388632Z node 3 :TX_PROXY ERROR: Actor# [3:2463:3907] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:55.536112Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:34:55.805429Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.186131Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:34:58.111263Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3277:4550], TxId: 281474976715674, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZjUyNTIzODAtMzM3ZjllYWQtZmU0ZDRmMWMtNWQwYTA4Yjg=. TraceId : 01jrfqpk9pds8kp7f0ptmjjd7k. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. } 2025-04-10T11:34:58.111405Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3277:4550], TxId: 281474976715674, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZjUyNTIzODAtMzM3ZjllYWQtZmU0ZDRmMWMtNWQwYTA4Yjg=. TraceId : 01jrfqpk9pds8kp7f0ptmjjd7k. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. }. 2025-04-10T11:34:58.112453Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3278:4551], TxId: 281474976715674, task: 2. Ctx: { TraceId : 01jrfqpk9pds8kp7f0ptmjjd7k. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZjUyNTIzODAtMzM3ZjllYWQtZmU0ZDRmMWMtNWQwYTA4Yjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:3271:4105], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-10T11:34:58.113322Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZjUyNTIzODAtMzM3ZjllYWQtZmU0ZDRmMWMtNWQwYTA4Yjg=, ActorId: [3:2716:4105], ActorState: ExecuteState, TraceId: 01jrfqpk9pds8kp7f0ptmjjd7k, Create QueryResponse for error on request, msg: >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false >> KqpExplain::ExplainScanQueryWithParams [GOOD] >> KqpExplain::FewEffects+UseSink >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:34:54.173829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:34:54.173938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:34:54.173990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:34:54.174046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:34:54.174936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:34:54.174984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:34:54.175067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:34:54.175182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:34:54.177143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:34:54.259237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:34:54.259298Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:54.275840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:34:54.276105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:34:54.276250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:34:54.284182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:34:54.284405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:34:54.284946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:54.285263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:34:54.288266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:54.295862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:34:54.295950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:54.297085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:34:54.297153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:34:54.297248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:34:54.297424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.304516Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:34:54.446076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:34:54.448366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.450308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:34:54.451535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:34:54.451629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.462290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:54.462461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:34:54.462670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.462750Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:34:54.462825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:34:54.462897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:34:54.465905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.465978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:34:54.466017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:34:54.468007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.468064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.468104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:54.468162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:34:54.471961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:34:54.474800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:34:54.475015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:34:54.476002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:54.476168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:34:54.476228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:54.476531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:34:54.476586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:54.476770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:34:54.476874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:34:54.479471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:34:54.479516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:34:54.479697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:54.479758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:34:54.480042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.480104Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:34:54.480191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:34:54.480231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:54.480266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:34:54.480301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:54.480336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:34:54.480388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:54.480415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:34:54.480437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:34:54.480483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:34:54.480509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:34:54.480545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:34:54.482264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:34:54.482357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:34:54.482392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-10T11:34:59.857277Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:34:59.857349Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-04-10T11:34:59.857393Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 240 -> 240 2025-04-10T11:34:59.858323Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:34:59.858454Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:34:59.858500Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:34:59.858554Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-10T11:34:59.858598Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-10T11:34:59.858695Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-10T11:34:59.861725Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-10T11:34:59.861821Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:34:59.861923Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:399:2369], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:34:59.862018Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-04-10T11:34:59.862050Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-04-10T11:34:59.862210Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-04-10T11:34:59.862247Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:493:2435], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-04-10T11:34:59.863074Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-04-10T11:34:59.863196Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:34:59.863244Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-10T11:34:59.863394Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:34:59.863452Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:34:59.863500Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:34:59.863538Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:34:59.863579Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-10T11:34:59.863627Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:34:59.863681Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:34:59.863720Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:34:59.863802Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T11:34:59.864166Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:34:59.864242Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T11:34:59.865997Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T11:34:59.866050Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T11:34:59.866567Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:34:59.866657Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:34:59.866724Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:569:2509] TestWaitNotification: OK eventTxId 103 2025-04-10T11:34:59.867248Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:34:59.867428Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 226us result status StatusSuccess 2025-04-10T11:34:59.867807Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:34:59.868317Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:34:59.868466Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 176us result status StatusSuccess 2025-04-10T11:34:59.868774Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:34:59.869305Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2025-04-10T11:34:59.869475Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 184us result status StatusSuccess 2025-04-10T11:34:59.869811Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ReadTableRanges [GOOD] Test command err: Trying to start YDB, gRPC: 11044, MsgBus: 25514 2025-04-10T11:34:34.114817Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646491302245563:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:34.119629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001741/r3tmp/tmpU2CwEa/pdisk_1.dat 2025-04-10T11:34:34.602263Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:34.609608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:34.609746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:34.612070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11044, node 1 2025-04-10T11:34:34.694681Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:34.694701Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:34.694708Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:34.694825Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25514 TClient is connected to server localhost:25514 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:35.293283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:35.307811Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:35.324219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:35.465215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:35.620360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:35.693937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:37.367044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646504187149212:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:37.367185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:37.749993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:37.784380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:37.819496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:37.852565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:37.892468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:37.933528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:37.989725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646504187149723:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:37.989827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:37.990314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646504187149728:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:37.994714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:38.010376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646504187149730:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:38.067562Z node 1 :TX_PROXY ERROR: Actor# [1:7491646508482117079:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:39.118252Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646491302245563:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:39.125916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":17,"Plans":[{"PlanNodeId":16,"Plans":[{"PlanNodeId":15,"Plans":[{"PlanNodeId":14,"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Subplan Name":"CTE Stage_11","Plans":[{"Tables":["KeyValue"],"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Parent Relationship":"InitPlan"}],"Node Type":"Map","PlanNodeType":"Connection"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Node Type":"UnionAll","PlanNodeType":"Connection","CTE Name":"Stage_11"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"GroupBy":"item.t1.Key","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":6}],"E-Rows":"No estimate","Condition":"t1.Key = t2.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"ExternalPlanNodeId":9}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter"}],"Node Type":"HashShuffle","KeyColumns":["t1.Key"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":11}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"Map","PlanNodeType":"Connection"},{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Node Type":"UnionAll","PlanNodeType":"Connection","CTE Name":"Stage_11"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.Key","Name":"Sort"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":3}],"E-Rows":"No estimate","Condition":"Foo.t1.Key = t1.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"ExternalPlanNodeId":13}],"E-Rows":"No estimate","Predicate":"Exist(item.t1.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Sort-InnerJoin (MapJoin)-Filter"}],"Node Type":"Merge","SortColumns":["Key (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":19,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullS ... can"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoKeys"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/TwoKeys","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/TwoKeys","reads":[{"columns":["Key1","Key2","Value"],"scan_by":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/TwoKeys","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 1703, MsgBus: 23770 2025-04-10T11:34:53.250253Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646573996513972:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:53.250323Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001741/r3tmp/tmpZUyAHQ/pdisk_1.dat 2025-04-10T11:34:53.408660Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:53.422867Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:53.422957Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:53.424970Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1703, node 4 2025-04-10T11:34:53.498804Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:53.498826Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:53.498835Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:53.498986Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23770 TClient is connected to server localhost:23770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:34:54.015015Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:34:54.037548Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:54.111657Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:54.299031Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:54.386358Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:56.781666Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646586881417622:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:56.781760Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:56.834016Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.897162Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.940451Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.977842Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:57.015957Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:57.091447Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:57.138490Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646591176385435:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:57.138691Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:57.138985Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646591176385440:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:57.144299Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:57.158970Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491646591176385443:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:57.216647Z node 4 :TX_PROXY ERROR: Actor# [4:7491646591176385495:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:58.209758Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:58.252419Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491646573996513972:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:58.252493Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:58.633889Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T11:34:58.683146Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, 100)","Key [2000, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TCmsTenatsTest::TestNoneTenantPolicy >> TCmsTest::StateStorageNodesFromOneRing >> TObjectStorageListingTest::SchemaChecks [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true >> TCmsTest::RequestReplaceBrokenDevices >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:34:54.173382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:34:54.173512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:34:54.173556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:34:54.173596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:34:54.174501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:34:54.174568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:34:54.174719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:34:54.174793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:34:54.175833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:34:54.258809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:34:54.258896Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:54.274205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:34:54.274483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:34:54.274639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:34:54.281622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:34:54.281868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:34:54.282692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:54.283212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:34:54.288577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:54.296741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:34:54.296838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:54.297101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:34:54.297167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:34:54.297240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:34:54.297453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.306425Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:34:54.449739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:34:54.449975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.450238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:34:54.451258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:34:54.451350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.454878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:54.455012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:34:54.455228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.455398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:34:54.455478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:34:54.455525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:34:54.457839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.457899Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:34:54.457937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:34:54.459844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.459907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.459949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:54.460009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:34:54.465081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:34:54.467576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:34:54.468389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:34:54.469529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:54.469672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:34:54.469736Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:54.471144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:34:54.471215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:54.471448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:34:54.471566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:34:54.477519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:34:54.477596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:34:54.477797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:54.477838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:34:54.478082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.478160Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:34:54.478266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:34:54.478301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:54.478334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:34:54.478371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:54.478407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:34:54.478468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:54.478511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:34:54.478550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:34:54.478621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:34:54.478665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:34:54.478726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:34:54.481251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:34:54.481422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:34:54.481464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... D INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:35:01.339302Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:35:01.339372Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-10T11:35:01.342086Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T11:35:01.342263Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-10T11:35:01.342824Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:01.342964Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 30064773228 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:01.343038Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-10T11:35:01.343143Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 102 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:01.343190Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:35:01.343247Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 134 2025-04-10T11:35:01.344668Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:35:01.345479Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:35:01.346171Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:01.346251Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:01.346389Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 134 -> 135 2025-04-10T11:35:01.346709Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:01.346792Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:35:01.348558Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:01.348603Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:01.348754Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:35:01.348921Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:01.348962Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-10T11:35:01.349036Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T11:35:01.349356Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:01.349414Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2025-04-10T11:35:01.349471Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 135 -> 240 2025-04-10T11:35:01.350566Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:35:01.350659Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:35:01.350693Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:35:01.350728Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:35:01.350762Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:01.351359Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:35:01.351444Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:35:01.351474Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:35:01.351505Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:35:01.351537Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:35:01.351603Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-10T11:35:01.355635Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:01.355710Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:35:01.355909Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:35:01.355963Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:01.356021Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:35:01.356076Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:01.356133Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T11:35:01.356190Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:01.356245Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:35:01.356314Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:35:01.356411Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:35:01.357012Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:35:01.357091Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:35:01.357175Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:35:01.358393Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:35:01.358482Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:35:01.358580Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:01.359242Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:35:01.359659Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:35:01.361955Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:35:01.362088Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:35:01.362409Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:35:01.362465Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:35:01.362978Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:35:01.363101Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:35:01.363150Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:340:2331] TestWaitNotification: OK eventTxId 102 2025-04-10T11:35:01.363748Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:01.363969Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 272us result status StatusPathDoesNotExist 2025-04-10T11:35:01.364149Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:34:54.173423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:34:54.173589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:34:54.173634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:34:54.173667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:34:54.174532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:34:54.174575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:34:54.174653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:34:54.174728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:34:54.175837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:34:54.248562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:34:54.248624Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:54.267816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:34:54.268026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:34:54.268163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:34:54.278658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:34:54.279033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:34:54.282686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:54.283200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:34:54.288550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:54.295862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:34:54.295954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:54.297106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:34:54.297164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:34:54.297223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:34:54.297396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.304472Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:34:54.447721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:34:54.447970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.449925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:34:54.451174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:34:54.451254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.454909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:54.455042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:34:54.455274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.455336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:34:54.455380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:34:54.455451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:34:54.457545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.457599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:34:54.457636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:34:54.460927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.461017Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.461076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:54.461138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:34:54.464826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:34:54.467223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:34:54.468369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:34:54.469532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:54.469717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:34:54.469785Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:54.471092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:34:54.471156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:54.471395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:34:54.471474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:34:54.477546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:34:54.477607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:34:54.477788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:54.477830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:34:54.478110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:54.478192Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:34:54.478321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:34:54.478356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:54.478394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:34:54.478443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:54.478481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:34:54.478524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:54.478557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:34:54.478587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:34:54.478654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:34:54.478696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:34:54.478734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:34:54.481401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:34:54.481543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:34:54.481585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1.285147Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-10T11:35:01.285291Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T11:35:01.285343Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T11:35:01.285390Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-10T11:35:01.285431Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T11:35:01.285480Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-04-10T11:35:01.285535Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-10T11:35:01.285579Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-10T11:35:01.285620Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-10T11:35:01.285811Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T11:35:01.287616Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:35:01.288253Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-10T11:35:01.288433Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 2025-04-10T11:35:01.288643Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:01.288891Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:35:01.289908Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-04-10T11:35:01.290188Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T11:35:01.290364Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:35:01.291279Z node 6 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409547 2025-04-10T11:35:01.294762Z node 6 :TX_DATASHARD ERROR: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409549 2025-04-10T11:35:01.294944Z node 6 :TX_DATASHARD ERROR: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409550 2025-04-10T11:35:01.295177Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:35:01.295314Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:35:01.295832Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 Forgetting tablet 72075186234409546 Forgetting tablet 72075186234409548 2025-04-10T11:35:01.297609Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-10T11:35:01.297814Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-10T11:35:01.298249Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-10T11:35:01.298577Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:35:01.298636Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:35:01.298762Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:35:01.299068Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:35:01.299119Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:35:01.299188Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:01.302215Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:35:01.302291Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:35:01.302611Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:35:01.302641Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-04-10T11:35:01.302705Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:35:01.302726Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-04-10T11:35:01.303290Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-10T11:35:01.303341Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-04-10T11:35:01.303875Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:35:01.304005Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-10T11:35:01.304389Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-10T11:35:01.304439Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-10T11:35:01.304951Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-10T11:35:01.305085Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-10T11:35:01.305149Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:792:2702] TestWaitNotification: OK eventTxId 105 2025-04-10T11:35:01.305776Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:01.305991Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir/table_1" took 259us result status StatusPathDoesNotExist 2025-04-10T11:35:01.306180Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/dir/table_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/dir/table_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:35:01.306809Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:01.306992Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 201us result status StatusPathDoesNotExist 2025-04-10T11:35:01.307135Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:35:01.307693Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:01.307863Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 194us result status StatusSuccess 2025-04-10T11:35:01.308216Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:34:55.966111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:34:55.966256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:34:55.966303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:34:55.966347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:34:55.966389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:34:55.966422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:34:55.966504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:34:55.966584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:34:55.966944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:34:56.061491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:34:56.061582Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:56.083890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:34:56.084207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:34:56.084411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:34:56.104659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:34:56.104963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:34:56.105808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:56.106324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:34:56.109007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:56.110499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:34:56.110615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:56.110815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:34:56.110870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:34:56.110914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:34:56.111088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.120007Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:34:56.256219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:34:56.256435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.256594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:34:56.256783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:34:56.256840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.259561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:56.259709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:34:56.259937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.260008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:34:56.260064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:34:56.260113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:34:56.262929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.262996Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:34:56.263033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:34:56.267856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.267917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.268001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:56.268073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:34:56.271637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:34:56.273855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:34:56.274062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:34:56.275493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:56.275644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:34:56.275716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:56.276046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:34:56.276110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:56.276308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:34:56.276394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:34:56.279641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:34:56.279687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:34:56.279913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:56.279962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:34:56.280223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.280269Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:34:56.280387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:34:56.280425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:56.280462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:34:56.280496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:56.280530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:34:56.280571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:56.280602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:34:56.280628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:34:56.280697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:34:56.280734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:34:56.280765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:34:56.291817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:34:56.291972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:34:56.292010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... shard: 72057594046678944, txId: 103, path id: 1 2025-04-10T11:35:01.680729Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-10T11:35:01.681105Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:01.681157Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2025-04-10T11:35:01.681210Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 135 -> 240 2025-04-10T11:35:01.682089Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:35:01.682219Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:35:01.682259Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:35:01.682292Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:35:01.682326Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:01.683071Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:35:01.683157Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:35:01.683182Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:35:01.683210Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-10T11:35:01.683237Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-10T11:35:01.683296Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-10T11:35:01.687005Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72075186233409546 at ss 72057594046678944 2025-04-10T11:35:01.687069Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72075186233409546 at ss 72057594046678944 2025-04-10T11:35:01.687100Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72075186233409546 at ss 72057594046678944 2025-04-10T11:35:01.687127Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72075186233409546 at ss 72057594046678944 2025-04-10T11:35:01.687382Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:01.687430Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-10T11:35:01.687598Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:35:01.687645Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:35:01.687720Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:35:01.687765Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:35:01.687818Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-10T11:35:01.687879Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:35:01.687930Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:35:01.687974Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:35:01.688201Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-10T11:35:01.689215Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:35:01.690383Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-10T11:35:01.690690Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 2025-04-10T11:35:01.690882Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:01.691218Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186234409547 2025-04-10T11:35:01.692725Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-10T11:35:01.692964Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:35:01.693700Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-04-10T11:35:01.695328Z node 7 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:01.699770Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 2025-04-10T11:35:01.700234Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-10T11:35:01.700493Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186234409546 2025-04-10T11:35:01.701127Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-10T11:35:01.701287Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186234409548 2025-04-10T11:35:01.702845Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:35:01.703782Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:35:01.703863Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:35:01.704011Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:35:01.704814Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:35:01.704873Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:35:01.704954Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:01.707935Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-10T11:35:01.707994Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-10T11:35:01.708082Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-10T11:35:01.708101Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-04-10T11:35:01.708156Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-10T11:35:01.708173Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-04-10T11:35:01.708550Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-10T11:35:01.708592Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-04-10T11:35:01.709887Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-10T11:35:01.709966Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-10T11:35:01.710287Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-10T11:35:01.710342Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-10T11:35:01.710922Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-10T11:35:01.711045Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:35:01.711092Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:588:2528] TestWaitNotification: OK eventTxId 103 2025-04-10T11:35:01.711728Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:01.711945Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 264us result status StatusPathDoesNotExist 2025-04-10T11:35:01.712118Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TCmsTest::RequestRestartServicesOk >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> TCmsTest::TestKeepAvailableModeScheduledDisconnects >> TCmsTest::TestForceRestartMode >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true >> TCmsTest::StateStorageNodesFromOneRing [GOOD] >> TCmsTest::StateStorageTwoBrokenRings >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SchemaChecks [GOOD] Test command err: 2025-04-10T11:34:24.901612Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646446137572671:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:24.905369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fed/r3tmp/tmp1dJHQL/pdisk_1.dat 2025-04-10T11:34:25.261710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:25.261809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:25.270193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:25.293793Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11641, node 1 2025-04-10T11:34:25.592814Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:25.592844Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:25.592858Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:25.593056Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28034 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:26.129360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:26.152655Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:34:26.167458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:29.899399Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646446137572671:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:29.899492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:40.292435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:34:40.292474Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:57.693185Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646587538708277:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:57.693313Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fed/r3tmp/tmpoRPE56/pdisk_1.dat 2025-04-10T11:34:57.842439Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:57.868044Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:57.868138Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:57.871288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16522, node 2 2025-04-10T11:34:57.933451Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:57.933484Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:57.933496Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:57.933698Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13832 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:58.245761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:58.253665Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:58.270006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TDowntimeTest::AddDowntime [GOOD] >> TDowntimeTest::HasUpcomingDowntime [GOOD] >> TDowntimeTest::CleanupOldSegments [GOOD] >> TCmsTest::ManagePermissions >> KqpQuery::QueryCancelWriteImmediate [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink >> TCmsTest::RequestReplaceBrokenDevices [GOOD] >> TCmsTest::PermissionDuration >> TCmsTenatsTest::TestTenantLimit >> TCmsTenatsTest::TestNoneTenantPolicy [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TDowntimeTest::CleanupOldSegments [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:34:58.310843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:34:58.310947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:34:58.310988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:34:58.311044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:34:58.311096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:34:58.311127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:34:58.311183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:34:58.311254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:34:58.311625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:34:58.399199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:34:58.399258Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:58.414458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:34:58.414825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:34:58.415024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:34:58.424306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:34:58.424626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:34:58.425365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:58.425808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:34:58.428153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:58.429630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:34:58.429692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:58.429893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:34:58.429978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:34:58.430029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:34:58.430240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:34:58.438063Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:34:58.593738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:34:58.593980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:58.594187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:34:58.594437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:34:58.594491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:58.597600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:58.597732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:34:58.597934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:58.597989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:34:58.598036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:34:58.598071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:34:58.601195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:58.601259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:34:58.601298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:34:58.603050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:58.603115Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:58.603160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:58.603215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:34:58.607276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:34:58.609276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:34:58.609475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:34:58.610568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:58.610710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:34:58.610793Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:58.611092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:34:58.611163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:58.611339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:34:58.611436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:34:58.613490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:34:58.613537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:34:58.613717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:58.613765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:34:58.614032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:58.614078Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:34:58.614208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:34:58.614242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:58.614280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:34:58.614307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:58.614369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:34:58.614414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:58.614448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:34:58.614482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:34:58.614538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:34:58.614589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:34:58.614629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:34:58.616881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:34:58.617045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:34:58.617092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 7 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [7:123:2149] sender: [7:240:2058] recipient: [7:15:2062] 2025-04-10T11:35:03.833742Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:03.833995Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:03.834259Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:03.834494Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:03.834558Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:03.837382Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:03.837516Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:03.837721Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:03.837793Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:03.837845Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:03.837891Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:03.839945Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:03.840022Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:03.840074Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:03.841828Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:03.841897Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:03.841952Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:03.842012Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:03.842210Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:03.843863Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:03.844068Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:03.845015Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:03.845189Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 30064773228 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:03.845250Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:03.845544Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:03.845614Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:03.845854Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:03.845943Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:03.847955Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:03.848021Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:03.848250Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:03.848306Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:03.848696Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:03.848762Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:03.848908Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:03.848954Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:03.849028Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:03.849071Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:03.849125Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:03.849178Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:03.849228Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:03.849269Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:03.849367Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:03.849421Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:03.849469Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:03.850096Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:03.850243Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:03.850294Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:35:03.850364Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:35:03.850411Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:03.850528Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:35:03.853759Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:35:03.854275Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:35:03.854907Z node 7 :TX_PROXY DEBUG: actor# [7:270:2261] Bootstrap 2025-04-10T11:35:03.876001Z node 7 :TX_PROXY DEBUG: actor# [7:270:2261] Become StateWork (SchemeCache [7:275:2266]) 2025-04-10T11:35:03.879324Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:03.879571Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } 2025-04-10T11:35:03.879633Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, path /MyRoot/USER_1 2025-04-10T11:35:03.879813Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-04-10T11:35:03.879876Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-04-10T11:35:03.881224Z node 7 :TX_PROXY DEBUG: actor# [7:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:35:03.884797Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Invalid AlterExtSubDomain request: Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:03.885035Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: ALTER DATABASE, path: /MyRoot/USER_1 2025-04-10T11:35:03.885544Z node 7 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTest::RequestRestartServicesReject >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] >> TCmsTest::TestLoadLog ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] Test command err: Trying to start YDB, gRPC: 20131, MsgBus: 22291 2025-04-10T11:34:43.855893Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646531142079331:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:43.855998Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00173a/r3tmp/tmp0WgVnI/pdisk_1.dat 2025-04-10T11:34:44.292803Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20131, node 1 2025-04-10T11:34:44.309346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:44.309457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:44.311081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:44.362464Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:44.362500Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:44.362513Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:44.362663Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22291 TClient is connected to server localhost:22291 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:45.011166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:45.040119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:45.184278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:45.338760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:45.418123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.330977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646548321950290:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:47.331075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:47.728300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.772402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.809514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.843194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.906582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.985096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:48.042869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646552616918100:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:48.042944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:48.043124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646552616918105:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:48.046596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:48.057488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646552616918107:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:48.122673Z node 1 :TX_PROXY ERROR: Actor# [1:7491646552616918160:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:48.858771Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646531142079331:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:48.917894Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13345, MsgBus: 18754 2025-04-10T11:34:50.288022Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646560063834142:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:50.288074Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00173a/r3tmp/tmprvmz0v/pdisk_1.dat 2025-04-10T11:34:50.460058Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13345, node 2 2025-04-10T11:34:50.482323Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:50.482430Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:50.484265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:50.510764Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:50.510785Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:50.510792Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:50.510906Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18754 TClient is connected to server localhost:18754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:50.978523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:50.989236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:51.073087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:51.214702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:51.299166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:53.623527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService ... _COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:35:01.088097Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:35:01.088132Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:35:01.088155Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:35:01.088670Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:35:01.088710Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:35:01.088924Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:35:01.088951Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:35:01.089131Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:35:01.089162Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:35:01.089383Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:35:01.089415Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:35:01.089531Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:35:01.089554Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:35:01.091370Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7491646603884968389:2559];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:35:01.091434Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7491646603884968389:2559];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:35:01.091777Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7491646603884968389:2559];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:35:01.091919Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7491646603884968389:2559];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:35:01.092041Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7491646603884968389:2559];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:35:01.092182Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7491646603884968389:2559];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:35:01.092305Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7491646603884968389:2559];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:35:01.092420Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7491646603884968389:2559];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:35:01.092541Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7491646603884968389:2559];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:35:01.092689Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7491646603884968389:2559];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:35:01.092825Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7491646603884968389:2559];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:35:01.092986Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7491646603884968389:2559];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:35:01.101260Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:35:01.101320Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:35:01.101412Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:35:01.101439Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:35:01.101628Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:35:01.101657Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:35:01.101744Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:35:01.102013Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:35:01.102088Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:35:01.102117Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:35:01.102201Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:35:01.102236Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:35:01.102827Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:35:01.102888Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:35:01.103107Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:35:01.103135Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:35:01.103339Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:35:01.103385Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:35:01.103602Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:35:01.103633Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:35:01.103793Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:35:01.103827Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:35:01.122663Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715666; 2025-04-10T11:35:01.126106Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715666; 2025-04-10T11:35:01.131894Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715666; 2025-04-10T11:35:01.138679Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715666; >> TCmsTenatsTest::TestTenantRatioLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWriteImmediate [GOOD] Test command err: Trying to start YDB, gRPC: 12332, MsgBus: 4971 2025-04-10T11:34:43.168165Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646530934668985:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:43.168230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00173b/r3tmp/tmpfQNvR3/pdisk_1.dat 2025-04-10T11:34:43.632514Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:43.639716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:43.639839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:43.643306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12332, node 1 2025-04-10T11:34:43.742825Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:43.742850Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:43.742857Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:43.743008Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4971 TClient is connected to server localhost:4971 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:44.349928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:44.363994Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:44.372894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:44.504186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:44.676334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:44.762511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:46.692465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646543819572659:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:46.692578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:47.180675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.220465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.251367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.304152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.338318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.370971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:47.421692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646548114540473:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:47.421755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:47.422026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646548114540478:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:47.426357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:47.441791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646548114540480:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:47.510875Z node 1 :TX_PROXY ERROR: Actor# [1:7491646548114540533:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:48.168035Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646530934668985:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:48.168146Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:48.484929Z node 1 :GRPC_SERVER DEBUG: [0x51b000042180] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=MjQyOTQ3MjktYjY0ZTI5OWMtZGYxOWI4N2YtODQxMDMwYTQ=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:41890 2025-04-10T11:34:48.485023Z node 1 :GRPC_SERVER DEBUG: [0x51b000060480] created request Name# ExecuteDataQuery 2025-04-10T11:34:48.485184Z node 1 :GRPC_SERVER DEBUG: [0x51b000042180] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node_id=1&id=MjQyOTQ3MjktYjY0ZTI5OWMtZGYxOWI4N2YtODQxMDMwYTQ=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:41890 database# /Root 2025-04-10T11:34:48.485426Z node 1 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jrfqpbd5em1gr9crx1p7ava9, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:41890, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 2.996746s
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:12332 2025-04-10T11:34:51.486700Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491646552409508152:2497] TxId: 281474976710671. Ctx: { TraceId: 01jrfqpbd5em1gr9crx1p7ava9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQyOTQ3MjktYjY0ZTI5OWMtZGYxOWI4N2YtODQxMDMwYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-10T11:34:51.487335Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491646552409508159:2506], TxId: 281474976710671, task: 1. Ctx: { TraceId : 01jrfqpbd5em1gr9crx1p7ava9. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjQyOTQ3MjktYjY0ZTI5OWMtZGYxOWI4N2YtODQxMDMwYTQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491646552409508152:2497], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T11:34:51.487781Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491646552409508160:2507], TxId: 281474976710671, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjQyOTQ3MjktYjY0ZTI5OWMtZGYxOWI4N2YtODQxMDMwYTQ=. TraceId : 01jrfqpbd5em1gr9crx1p7ava9. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7491646552409508152:2497], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T11:34:51.488204Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjQyOTQ3MjktYjY0ZTI5OWMtZGYxOWI4N2YtODQxMDMwYTQ=, ActorId: [1:7491646552409508122:2497], ActorState: ExecuteState, TraceId: 01jrfqpbd5em1gr9crx1p7ava9, Create QueryResponse for error on request, msg: 2025-04-10T11:34:51.489453Z node 1 :GRPC_SERVER DEBUG: [0x51b000060480] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=MjQyOTQ3MjktYjY0ZTI5OWMtZGYxOWI4N2YtODQxMDMwYTQ=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:55300 2025-04-10T11:34:51.489497Z node 1 :GRPC_SERVER DEBUG: [0x51b00005fd80] created request Name# ExecuteDataQuery 2025-04-10T11:34:51.489608Z node 1 :GRPC_SERVER DEBUG: [0x51b000060480] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node ... s: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:55.959420Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:56.019404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.055029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.089371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.160042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.199659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.275395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.366684Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646585039334595:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:56.366793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:56.367188Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646585039334600:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:56.370478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:56.384220Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491646585039334602:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:56.439780Z node 2 :TX_PROXY ERROR: Actor# [2:7491646585039334657:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:57.430942Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491646567859463097:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:57.431059Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9330, MsgBus: 8135 2025-04-10T11:34:58.375192Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646593179904644:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:58.375274Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00173b/r3tmp/tmpVbmt7G/pdisk_1.dat 2025-04-10T11:34:58.540029Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:58.549246Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:58.549343Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:58.551063Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9330, node 3 2025-04-10T11:34:58.601905Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:58.601947Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:58.601982Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:58.602186Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8135 TClient is connected to server localhost:8135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:59.091429Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:59.099739Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:59.105906Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:59.195534Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:59.394528Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:59.472604Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:01.831019Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646606064808289:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:01.831111Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:01.882380Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:35:01.916550Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:35:01.953979Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:35:01.988137Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:35:02.023024Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:35:02.094431Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:35:02.141369Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646610359776097:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:02.141436Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:02.141630Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646610359776102:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:02.146260Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:35:02.156933Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646610359776104:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:35:02.242956Z node 3 :TX_PROXY ERROR: Actor# [3:7491646610359776157:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:35:03.375472Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646593179904644:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:03.375618Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TCmsTest::TestForceRestartMode [GOOD] >> TCmsTest::TestForceRestartModeDisconnects >> KqpParams::CheckCacheByAst [GOOD] >> TCmsTenatsTest::TestClusterRatioLimit >> TCmsTest::TestLoadLog [GOOD] >> TCmsTest::TestLogOperationsRollback >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::StateStorageRollingRestart >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] >> KqpExplain::FewEffects+UseSink [GOOD] >> KqpExplain::FewEffects-UseSink >> TCmsTest::PermissionDuration [GOOD] >> TCmsTest::RacyStartCollecting >> TCmsTest::ManagePermissions [GOOD] >> TCmsTest::ManagePermissionWrongRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] Test command err: 2025-04-10T11:33:40.556862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:40.557145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:40.557351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009a1/r3tmp/tmp1H9uDX/pdisk_1.dat 2025-04-10T11:33:40.965455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:33:41.009411Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:41.060406Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTU4MzE5MjYtMTdhNDU1Ni0yODJiYThhZS1lZTA1MzU2Yg==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OTU4MzE5MjYtMTdhNDU1Ni0yODJiYThhZS1lZTA1MzU2Yg== 2025-04-10T11:33:41.061296Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTU4MzE5MjYtMTdhNDU1Ni0yODJiYThhZS1lZTA1MzU2Yg==, ActorId: [1:619:2540], ActorState: unknown state, session actor bootstrapped 2025-04-10T11:33:41.061847Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTU4MzE5MjYtMTdhNDU1Ni0yODJiYThhZS1lZTA1MzU2Yg==, ActorId: [1:619:2540], ActorState: ReadyState, TraceId: 01jrfqm9j57mb84cs2ymryz3zv, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE TABLE `/Root/table1` (key int, value int, PRIMARY KEY (key)); rpcActor: [0:0:0] database: databaseId: /Root pool id: default 2025-04-10T11:33:41.311059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:623:2543], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:41.311323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:41.357361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:41.357580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:41.361641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:41.385292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:41.432943Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:684:2576], Recipient [1:689:2579]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:33:41.434633Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:684:2576], Recipient [1:689:2579]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:33:41.435308Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:689:2579] 2025-04-10T11:33:41.435725Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:33:41.450561Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:684:2576], Recipient [1:689:2579]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:33:41.503919Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:33:41.504129Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:33:41.511604Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:33:41.511790Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:33:41.511904Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:33:41.512597Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:33:41.512843Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:33:41.512983Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2579] in generation 1 2025-04-10T11:33:41.513707Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:33:41.579333Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:33:41.579634Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:33:41.579837Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:705:2588] 2025-04-10T11:33:41.579903Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:33:41.579993Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:33:41.580079Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:33:41.580394Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:689:2579], Recipient [1:689:2579]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:33:41.580480Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:33:41.580811Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:33:41.580935Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:33:41.581077Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:33:41.581144Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:33:41.581209Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:33:41.581258Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:33:41.581304Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:33:41.581343Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:33:41.581416Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:33:41.615151Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:708:2590], Recipient [1:689:2579]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:41.615281Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:41.615350Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:680:2574], serverId# [1:708:2590], sessionId# [0:0:0] 2025-04-10T11:33:41.615583Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:708:2590] 2025-04-10T11:33:41.615646Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:33:41.615806Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:41.616183Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:33:41.616288Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:33:41.616438Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:33:41.616547Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:33:41.616631Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:33:41.616691Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:33:41.616740Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:33:41.617191Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:33:41.617250Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:33:41.617299Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:33:41.617342Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:33:41.617414Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:33:41.617477Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:33:41.617552Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:33:41.617598Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:33:41.617656Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:33:41.619240Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:33:41.619322Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:33:41.619370Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:33:41.619481Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:33:41.619599Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:33:41.622722Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:709:2591], Recipient [1:689:2579]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:33:41.622794Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:33:41.667476Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:724:2600], Recipient [1:689:2579]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:41.667561Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:41.667634Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:722:2598], serverId# [1:724:2600], sessionId# [0:0:0] 2025-04-10T11:33:41.669140Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:689:2579]: {TEvPlanStep step# 300 MediatorId# 72057594046382081 Tabl ... node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-04-10T11:35:03.939160Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:35:03.939230Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-10T11:35:03.939302Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2025-04-10T11:35:03.939396Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:35:03.940159Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NDMxMWYxNzEtOTYyMmE5ZTAtOTZhYmJkYzUtOGZlOWM1ZA==, ActorId: [13:841:2686], ActorState: ExecuteState, TraceId: 01jrfqptcbeh3as2ps9xybv6jr, Create QueryResponse for error on request, msg: 2025-04-10T11:35:03.941167Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfqptcbeh3as2ps9xybv6jr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NDMxMWYxNzEtOTYyMmE5ZTAtOTZhYmJkYzUtOGZlOWM1ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:03.941498Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [13:949:2686], Recipient [13:904:2732]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 949 RawX2: 55834577534 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715666 ExecLevel: 0 Flags: 8 2025-04-10T11:35:03.941528Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:35:03.941619Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [13:904:2732], Recipient [13:904:2732]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-10T11:35:03.941641Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-10T11:35:03.941697Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:35:03.941868Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-10T11:35:03.941937Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CheckDataTx 2025-04-10T11:35:03.941977Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-10T11:35:03.942007Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CheckDataTx 2025-04-10T11:35:03.942030Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:35:03.942052Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:35:03.942098Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v500/18446744073709551615 ImmediateWriteEdgeReplied# v500/18446744073709551615 2025-04-10T11:35:03.942173Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715666] at 72075186224037888 2025-04-10T11:35:03.942206Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-10T11:35:03.942232Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:35:03.942254Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-10T11:35:03.942272Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-10T11:35:03.942335Z node 13 :TX_DATASHARD TRACE: Operation [0:281474976715666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-04-10T11:35:03.942469Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-04-10T11:35:03.942555Z node 13 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-10T11:35:03.942618Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-10T11:35:03.942639Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-10T11:35:03.942658Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:35:03.942675Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-04-10T11:35:03.942724Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-10T11:35:03.942831Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is DelayComplete 2025-04-10T11:35:03.942887Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:35:03.942916Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:35:03.942939Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:35:03.942975Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-10T11:35:03.942993Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:35:03.943025Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715666] at 72075186224037888 has finished 2025-04-10T11:35:03.943094Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:35:03.943118Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-04-10T11:35:03.943150Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:35:03.944583Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [13:61:2108], Recipient [13:904:2732]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 13 Status: STATUS_NOT_FOUND 2025-04-10T11:35:04.142764Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfqptged629fwtdxx48cjsw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ODNiMDBiY2ItOWE0YmM3MTMtZjgwYjljYmUtM2I1N2U3MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:04.145208Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:970:2776], Recipient [13:904:2732]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-10T11:35:04.145519Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:35:04.145638Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v500/18446744073709551615 ImmediateWriteEdgeReplied# v500/18446744073709551615 2025-04-10T11:35:04.145732Z node 13 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v500/18446744073709551615 2025-04-10T11:35:04.145856Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-10T11:35:04.146035Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:35:04.146130Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:35:04.146234Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:35:04.146298Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:35:04.146368Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-10T11:35:04.146440Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:35:04.146469Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:35:04.146497Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:35:04.146531Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:35:04.146697Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-10T11:35:04.147061Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:970:2776], 0} after executionsCount# 1 2025-04-10T11:35:04.147166Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:970:2776], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:35:04.147310Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:970:2776], 0} finished in read 2025-04-10T11:35:04.147416Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:35:04.147452Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:35:04.147480Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:35:04.147508Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:35:04.147576Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-10T11:35:04.147615Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:35:04.147665Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-10T11:35:04.147756Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:35:04.147947Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:35:04.148770Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:970:2776], Recipient [13:904:2732]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:35:04.148851Z node 13 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode >> TCmsTest::CollectInfo >> TCmsTenatsTest::TestTenantLimit [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheByAst [GOOD] Test command err: Trying to start YDB, gRPC: 30557, MsgBus: 20615 2025-04-10T11:34:22.398497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:34:22.398645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:34:22.398771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001757/r3tmp/tmpksgcLI/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30557, node 1 2025-04-10T11:34:22.945382Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:22.945548Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:22.945624Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:22.946184Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:22.951536Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:34:22.999809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:23.000329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:23.014760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20615 TClient is connected to server localhost:20615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:23.357349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:23.393272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:23.682831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:24.108675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:24.428112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:25.342028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1808:3404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:25.342389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:25.371831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:25.590173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:25.879290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:26.144253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:26.433282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:26.710978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:27.092370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2396:3857], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:27.092497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:27.092979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2401:3862], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:27.101988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:27.255548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2403:3864], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:27.329027Z node 1 :TX_PROXY ERROR: Actor# [1:2466:3908] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:28.863601Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:2737:4087] TxId: 281474976715671. Ctx: { TraceId: 01jrfqnr7s8dcftf06bw3shpak, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWVlNWI3YjMtMjAyNDUzYTAtYWEzYjg2NDEtODZkOGJhNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. STATUS_CODE_UNSPECIFIED: 2025-04-10T11:34:28.865155Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2745:4128], TxId: 281474976715671, task: 3. Ctx: { TraceId : 01jrfqnr7s8dcftf06bw3shpak. SessionId : ydb://session/3?node_id=1&id=OWVlNWI3YjMtMjAyNDUzYTAtYWEzYjg2NDEtODZkOGJhNmI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:2737:4087], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-10T11:34:28.865959Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2743:4126], TxId: 281474976715671, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jrfqnr7s8dcftf06bw3shpak. SessionId : ydb://session/3?node_id=1&id=OWVlNWI3YjMtMjAyNDUzYTAtYWEzYjg2NDEtODZkOGJhNmI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:2737:4087], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-10T11:34:28.866244Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2744:4127], TxId: 281474976715671, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jrfqnr7s8dcftf06bw3shpak. SessionId : ydb://session/3?node_id=1&id=OWVlNWI3YjMtMjAyNDUzYTAtYWEzYjg2NDEtODZkOGJhNmI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:2737:4087], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-10T11:34:28.866781Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWVlNWI3YjMtMjAyNDUzYTAtYWEzYjg2NDEtODZkOGJhNmI=, ActorId: [1:2701:4087], ActorState: ExecuteState, TraceId: 01jrfqnr7s8dcftf06bw3shpak, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 18133, MsgBus: 30123 2025-04-10T11:34:32.699657Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:34:32.699883Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:34:32.700107Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001757/r3tmp/tmp0bHexD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18133, node 2 2025-04-10T11:34:33.142584Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:33.142655Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:33.142709Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:33.142974Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:33.143819Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:34:33.191928Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:33.192074Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:33.204727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30123 TClient is connected to server localhost:30123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644 ...
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:56.115811Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:56.159532Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.198868Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.228855Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.262734Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.301192Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.339418Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:56.388926Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491646584949410358:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:56.389022Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:56.389294Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7491646584949410363:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:56.392497Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:56.401444Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-10T11:34:56.401730Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7491646584949410365:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:56.499434Z node 5 :TX_PROXY ERROR: Actor# [5:7491646584949410419:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:57.610369Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491646567769538906:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:57.610458Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10858, MsgBus: 24620 2025-04-10T11:34:58.646968Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491646594346967125:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:58.647072Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001757/r3tmp/tmp1m9DhE/pdisk_1.dat 2025-04-10T11:34:58.798569Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:58.813714Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:58.813825Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:58.819383Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10858, node 6 2025-04-10T11:34:58.895839Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:58.895862Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:58.895872Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:58.896017Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24620 TClient is connected to server localhost:24620 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:59.539081Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:59.552504Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:59.632922Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:59.887791Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:59.949960Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:02.961128Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491646611526838087:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:02.961250Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:03.013319Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:35:03.051571Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:35:03.087736Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:35:03.135668Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:35:03.169833Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:35:03.201373Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:35:03.251240Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491646615821805893:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:03.251363Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:03.251820Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491646615821805898:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:03.257354Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:35:03.270715Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491646615821805900:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:35:03.341157Z node 6 :TX_PROXY ERROR: Actor# [6:7491646615821805953:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:35:03.647327Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491646594346967125:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:03.647431Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TCmsTest::RequestRestartServicesReject [GOOD] >> TCmsTest::RequestRestartServicesPartial >> TCmsTest::TestForceRestartModeDisconnects [GOOD] >> TCmsTest::StateStorageTwoRings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5168, MsgBus: 15420 2025-04-10T11:34:39.293864Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646510525783872:2153];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:39.294333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00173e/r3tmp/tmp50Fpmw/pdisk_1.dat 2025-04-10T11:34:39.696491Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:39.715253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:39.715366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:39.718715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5168, node 1 2025-04-10T11:34:39.819420Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:39.819445Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:39.819455Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:39.819587Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15420 TClient is connected to server localhost:15420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:40.434363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:40.459351Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:40.470951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:40.665678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:40.854115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:40.937164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:42.924943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646523410687430:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:42.925099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:43.242906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:43.284350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:43.320665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:43.353084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:43.404828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:43.442068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:43.497853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646527705655239:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:43.497936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:43.498115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646527705655244:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:43.501832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:43.518356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646527705655246:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:43.614112Z node 1 :TX_PROXY ERROR: Actor# [1:7491646527705655301:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:44.294267Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646510525783872:2153];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:44.294359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17581, MsgBus: 62197 2025-04-10T11:34:46.675726Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646544439668686:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:46.675800Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:34:46.682197Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646542927876815:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:46.682269Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00173e/r3tmp/tmpy6AQY5/pdisk_1.dat 2025-04-10T11:34:47.071169Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:47.115868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:47.115952Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:47.119154Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:47.119242Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:47.123270Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:34:47.123380Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:47.127123Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17581, node 2 2025-04-10T11:34:47.242738Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:47.242771Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:47.242781Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:47.242931Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62197 TClient is connected to server localhost:62197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:47.680508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDoma ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:50.909982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646561619540662:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:50.910159Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:50.910597Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646561619540667:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:50.915327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:50.949753Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491646561619540669:2418], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:34:51.042446Z node 2 :TX_PROXY ERROR: Actor# [2:7491646565914508045:4278] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:51.675762Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491646544439668686:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:51.675837Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:51.684489Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646542927876815:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:51.684588Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16606, MsgBus: 23801 2025-04-10T11:34:56.520496Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646584560159436:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:56.522307Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:34:56.539031Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491646583899673899:2102];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:56.539410Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00173e/r3tmp/tmpr1fplh/pdisk_1.dat 2025-04-10T11:34:56.797947Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:56.826106Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:56.826425Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:56.830705Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-10T11:34:56.831508Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16606, node 4 2025-04-10T11:34:56.881540Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:56.881646Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:56.884225Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:56.901146Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:56.901174Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:56.901185Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:56.901341Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23801 TClient is connected to server localhost:23801 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:57.384799Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:57.409676Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:57.521267Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:57.656105Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:57.741607Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:00.617193Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646601740030893:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:00.617283Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:00.669583Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:35:00.742464Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:35:00.811229Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:35:00.876086Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:35:00.935466Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:35:01.014092Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:35:01.101345Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646606034998905:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:01.101444Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:01.101793Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646606034998910:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:01.106208Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:35:01.134395Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491646606034998912:2421], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:35:01.203841Z node 4 :TX_PROXY ERROR: Actor# [4:7491646606034998991:4331] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:35:01.520927Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491646584560159436:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:01.521040Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:35:01.538935Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491646583899673899:2102];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:01.539030Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TCmsTenatsTest::TestTenantRatioLimit [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode >> TCmsTest::TestKeepAvailableMode >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction [GOOD] >> TMaintenanceApiTest::CreateTime >> TCmsTenatsTest::TestClusterRatioLimit [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::ManageRequests >> TCmsTest::RacyStartCollecting [GOOD] >> TCmsTest::PriorityRange >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::DynamicConfig >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::SysTabletsNode >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::RequestRestartServicesNoUser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] Test command err: Trying to start YDB, gRPC: 28711, MsgBus: 16239 2025-04-10T11:34:50.357898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646561284259245:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:50.358648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00172d/r3tmp/tmpkcQtR2/pdisk_1.dat 2025-04-10T11:34:50.805394Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:50.810655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:50.810760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:50.814286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28711, node 1 2025-04-10T11:34:50.986567Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:50.986586Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:50.986606Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:50.986716Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16239 TClient is connected to server localhost:16239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:51.570373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:51.593099Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:53.669786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646574169161634:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:53.669958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:53.670666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646574169161669:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:53.675387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:34:53.686416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646574169161671:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:34:53.770491Z node 1 :TX_PROXY ERROR: Actor# [1:7491646574169161722:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:54.008028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:34:54.128486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7491646578464129156:2343];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:34:54.129095Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037896;self_id=[1:7491646578464129158:2344];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:34:54.158050Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037896;self_id=[1:7491646578464129158:2344];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:34:54.158050Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7491646578464129156:2343];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:34:54.158291Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037890 2025-04-10T11:34:54.158290Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037896 2025-04-10T11:34:54.168559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491646578464129156:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:34:54.168559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491646578464129158:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:34:54.168778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491646578464129158:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:34:54.168792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491646578464129156:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:34:54.169115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491646578464129158:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:34:54.169117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491646578464129156:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:34:54.169365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491646578464129158:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:34:54.169366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491646578464129156:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:34:54.169564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491646578464129158:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:34:54.169597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491646578464129156:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:34:54.169658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491646578464129158:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:34:54.169717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491646578464129156:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:34:54.169750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491646578464129158:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:34:54.169850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491646578464129156:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:34:54.169881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491646578464129158:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:34:54.169972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491646578464129156:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:34:54.170006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491646578464129158:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:34:54.170097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491646578464129156:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:34:54.170112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491646578464129158:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:34:54.170242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491646578464129156:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:34:54.170265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491646578464129158:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:34:54.170374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491646578464129158:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;des ... 72075186224037893;self_id=[1:7491646578464129177:2349];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037893; 2025-04-10T11:34:57.370969Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;self_id=[1:7491646578464129158:2344];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037896; 2025-04-10T11:34:57.374620Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;self_id=[1:7491646578464129164:2347];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037894; Trying to start YDB, gRPC: 62448, MsgBus: 2036 2025-04-10T11:34:57.977852Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646590810516379:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:57.977944Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00172d/r3tmp/tmp2rxW4e/pdisk_1.dat 2025-04-10T11:34:58.093296Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:58.117511Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:58.117603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:58.118900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62448, node 2 2025-04-10T11:34:58.186949Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:58.186984Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:58.186992Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:58.187131Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2036 TClient is connected to server localhost:2036 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:58.704723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:58.713118Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:34:58.725247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:34:58.763763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:35:01.454559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646607990386243:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:01.454660Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:01.454838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646607990386248:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:01.458865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:35:01.473614Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491646607990386257:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T11:35:01.568466Z node 2 :TX_PROXY ERROR: Actor# [2:7491646607990386308:2347] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:35:01.595242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:35:01.889311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9651, MsgBus: 27467 2025-04-10T11:35:03.058219Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646615333440557:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:03.058329Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00172d/r3tmp/tmpNvL3Mx/pdisk_1.dat 2025-04-10T11:35:03.172842Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:03.200110Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:03.200213Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:03.201729Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9651, node 3 2025-04-10T11:35:03.249563Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:35:03.249587Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:35:03.249598Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:03.249757Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27467 TClient is connected to server localhost:27467 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:03.685441Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:06.998372Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646628218343104:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:06.998507Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646628218343112:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:06.998569Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:07.003197Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:35:07.012287Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646628218343118:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:35:07.076882Z node 3 :TX_PROXY ERROR: Actor# [3:7491646632513310467:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:35:07.107899Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:35:07.264793Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7491646632513310606:2356], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:5:49: Error: Creating table with data is not supported. 2025-04-10T11:35:07.265020Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OWU0OWJlYWMtMzkyOWI3NzYtNWMwMGJkOTUtNjcyZTk3ZWY=, ActorId: [3:7491646632513310604:2355], ActorState: ExecuteState, TraceId: 01jrfqpxpz57gvbwdz7r709z53, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:34:56.072337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:34:56.072425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:34:56.072461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:34:56.072495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:34:56.072545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:34:56.072592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:34:56.072644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:34:56.072720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:34:56.073113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:34:56.147096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:34:56.147157Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:56.161195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:34:56.161486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:34:56.161696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:34:56.169169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:34:56.169415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:34:56.170082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:56.170514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:34:56.172531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:56.173859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:34:56.173913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:56.174123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:34:56.174210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:34:56.174253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:34:56.174404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.180815Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:34:56.306850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:34:56.307101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.307297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:34:56.307599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:34:56.307663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.311984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:56.312111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:34:56.312362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.312417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:34:56.312465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:34:56.312498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:34:56.315209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.315268Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:34:56.315345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:34:56.316903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.316955Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.317006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:56.317062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:34:56.325389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:34:56.327331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:34:56.327492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:34:56.328436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:34:56.328574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:34:56.328641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:56.328915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:34:56.328967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:34:56.329138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:34:56.329202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:34:56.331253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:34:56.331311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:34:56.331479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:34:56.331531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:34:56.331781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:34:56.331828Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:34:56.331916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:34:56.331975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:56.332027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:34:56.332057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:56.332091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:34:56.332132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:34:56.332177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:34:56.332208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:34:56.332266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:34:56.332301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:34:56.332339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:34:56.334434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:34:56.334556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:34:56.334639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 2 2025-04-10T11:35:08.424713Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 1 -> 2 2025-04-10T11:35:08.425175Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 116:1, propose status:StatusAccepted, reason: , at schemeshard: 72075186233409546 2025-04-10T11:35:08.425232Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 116:0, at schemeshard: 72075186233409546 2025-04-10T11:35:08.425360Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 12 2025-04-10T11:35:08.425443Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 3 2025-04-10T11:35:08.427879Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 116, response: Status: StatusAccepted TxId: 116 SchemeshardId: 72075186233409546 PathId: 9, at schemeshard: 72075186233409546 2025-04-10T11:35:08.428048Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 116, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-04-10T11:35:08.428344Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-10T11:35:08.428392Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-10T11:35:08.428616Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 9] 2025-04-10T11:35:08.428708Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-10T11:35:08.428765Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:729:2631], at schemeshard: 72075186233409546, txId: 116, path id: 1 2025-04-10T11:35:08.428815Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:729:2631], at schemeshard: 72075186233409546, txId: 116, path id: 9 2025-04-10T11:35:08.429424Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-04-10T11:35:08.429489Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 ProgressState, operation type: TxCreateTable, at tablet# 72075186233409546 2025-04-10T11:35:08.429740Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-10T11:35:08.430389Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-04-10T11:35:08.430506Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-04-10T11:35:08.430544Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-04-10T11:35:08.430586Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 16 2025-04-10T11:35:08.430625Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 13 2025-04-10T11:35:08.431706Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-04-10T11:35:08.431765Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-04-10T11:35:08.431788Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-04-10T11:35:08.431818Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 9], version: 1 2025-04-10T11:35:08.431844Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 4 2025-04-10T11:35:08.431895Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 116, ready parts: 0/1, is published: true 2025-04-10T11:35:08.433681Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72057594037968897 cookie: 72075186233409546:11 msg type: 268697601 2025-04-10T11:35:08.433838Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72057594037968897 2025-04-10T11:35:08.433907Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-04-10T11:35:08.434420Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-10T11:35:08.434673Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72075186233409546, OwnerIdx 11, type DataShard, boot OK, tablet id 72075186233409556 2025-04-10T11:35:08.435336Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72075186233409546 message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-04-10T11:35:08.435401Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-04-10T11:35:08.435548Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 116:0, at schemeshard: 72075186233409546, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-04-10T11:35:08.435621Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, at tabletId: 72075186233409546 2025-04-10T11:35:08.435733Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-04-10T11:35:08.435819Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 2 -> 3 2025-04-10T11:35:08.437017Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-04-10T11:35:08.437164Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-04-10T11:35:08.440039Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 116:0, at schemeshard: 72075186233409546 2025-04-10T11:35:08.440242Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-04-10T11:35:08.440298Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId# 116:0 ProgressState at tabletId# 72075186233409546 2025-04-10T11:35:08.440380Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 seqNo: 3:8 2025-04-10T11:35:08.440736Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 678 RawX2: 30064773663 } TxBody: "\n\236\004\n\007Table11\020\t\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\n\000\220\000\000\020\000\001\020\t:\004\010\003\020\010" TxId: 116 ExecLevel: 0 Flags: 0 SchemeShardId: 72075186233409546 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } SubDomainPathId: 1 2025-04-10T11:35:08.444037Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72075186233409556 cookie: 72075186233409546:11 msg type: 269549568 2025-04-10T11:35:08.444224Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72075186233409556 TestModificationResult got TxId: 116, wait until txId: 116 TestModificationResults wait txId: 117 2025-04-10T11:35:08.476893Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table12" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key" } } TxId: 117 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2025-04-10T11:35:08.479734Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 117, response: Status: StatusQuotaExceeded Reason: "Request exceeded a limit on the number of schema operations, try again later." TxId: 117 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-10T11:35:08.479937Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 117, database: /MyRoot/USER_0, subject: , status: StatusQuotaExceeded, reason: Request exceeded a limit on the number of schema operations, try again later., operation: CREATE TABLE, path: /MyRoot/USER_0/Table12 TestModificationResult got TxId: 117, wait until txId: 117 >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot |96.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled >> TCmsTest::TestKeepAvailableMode [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects >> TCmsTest::PriorityRange [GOOD] >> TMaintenanceApiTest::CreateTime [GOOD] >> TMaintenanceApiTest::LastRefreshTime |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace >> TCmsTest::SysTabletsNode [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled >> TCmsTest::ManageRequests [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag >> TSchemeShardTTLTestsWithReboots::CreateTable >> TCmsTest::RequestRestartServicesNoUser [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::PriorityRange [GOOD] Test command err: 2025-04-10T11:35:06.955602Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-10T11:35:06.955682Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-10T11:35:06.955703Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-10T11:35:06.955724Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-10T11:35:06.955744Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-10T11:35:06.955764Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-10T11:35:06.955784Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-10T11:35:06.955805Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-04-10T11:35:06.963024Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-10T11:35:06.963086Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-10T11:35:06.963107Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-10T11:35:06.963125Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-10T11:35:06.963157Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-10T11:35:06.963179Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-10T11:35:06.963198Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-10T11:35:06.963217Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-04-10T11:35:07.002968Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-10T11:35:07.003026Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-10T11:35:07.003047Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-10T11:35:07.003067Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-10T11:35:07.003093Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-10T11:35:07.003113Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-10T11:35:07.003132Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-10T11:35:07.003151Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SysTabletsNode [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesNoUser [GOOD] >> TCmsTest::DisabledEvictVDisks [GOOD] >> TCmsTest::EmergencyDuringRollingRestart >> TCmsTest::StateStorageRollingRestart [GOOD] >> TCmsTest::StateStorageLockedNodes >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn >> TCmsTest::TestLogOperationsRollback [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable >> KqpExplain::FewEffects-UseSink [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TCmsTest::TestForceRestartModeScheduled >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::ActionIssue >> TLocksTest::Range_CorrectDot [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] >> TMaintenanceApiTest::LastRefreshTime [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> TSchemeShardTTLTestsWithReboots::MoveTable |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::LastRefreshTime [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] Test command err: 2025-04-10T11:33:34.746369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:34.746706Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:34.747040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009f7/r3tmp/tmpH1NE1q/pdisk_1.dat 2025-04-10T11:33:35.278164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:33:35.331118Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:35.385058Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:33:35.387357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:35.390858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:35.391018Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:33:35.404262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:35.488306Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T11:33:35.488361Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T11:33:35.489314Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T11:33:35.599928Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T11:33:35.600062Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:33:35.602557Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:33:35.602688Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:33:35.603132Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:33:35.603393Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:33:35.603497Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T11:33:35.608217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:35.609332Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T11:33:35.610180Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T11:33:35.610266Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T11:33:35.662892Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:33:35.664287Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:33:35.664845Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:33:35.665135Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:33:35.719571Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:33:35.720392Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:33:35.720540Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:33:35.722598Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:33:35.722703Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:33:35.722767Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:33:35.723246Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:33:35.723441Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:33:35.723558Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:33:35.724134Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:33:35.767934Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:33:35.769220Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:33:35.769404Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:33:35.769461Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:33:35.769504Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:33:35.769543Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:33:35.769825Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:33:35.769895Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:33:35.771135Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:33:35.771253Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:33:35.771321Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:33:35.771364Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:33:35.771406Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:33:35.771446Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:33:35.771485Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:33:35.771521Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:33:35.771590Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:33:35.771736Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:35.771818Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:35.771876Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:33:35.773367Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:33:35.773433Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:33:35.773572Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:35.773947Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:33:35.774014Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:33:35.774114Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:33:35.774295Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:33:35.774343Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:33:35.774386Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:33:35.774420Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:33:35.774733Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:33:35.774774Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:33:35.774809Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:33:35.774844Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:33:35.774924Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:33:35.774964Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:33:35.774998Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:33:35.775042Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:33:35.775080Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:33:35.776005Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:33:35.776058Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:33:35.776090Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:33:35.776153Z node 1 :TX_DATASHARD TRACE: Prop ... 2075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-10T11:35:11.059472Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:35:11.059534Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2025-04-10T11:35:11.059648Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-10T11:35:11.059724Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-10T11:35:11.059802Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:666:2570], Recipient [13:757:2635]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-10T11:35:11.059823Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:35:11.059844Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-04-10T11:35:11.059888Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-10T11:35:11.059918Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-10T11:35:11.059979Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:757:2635], Recipient [13:666:2570]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-10T11:35:11.060009Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:35:11.060040Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2025-04-10T11:35:11.060073Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-10T11:35:11.060233Z node 13 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [13:983:2781], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:35:11.060483Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:666:2570], Recipient [13:757:2635]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-10T11:35:11.060506Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:35:11.060528Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-04-10T11:35:11.060563Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-10T11:35:11.060641Z node 13 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715663] from 72075186224037889 at tablet 72075186224037889 send result to client [13:983:2781], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:35:11.060966Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:35:11.061216Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 42703 } } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-1" WriteRows: 1 WriteBytes: 8 } } } 2025-04-10T11:35:11.061765Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 800 } } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-2" WriteRows: 1 WriteBytes: 8 } } } 2025-04-10T11:35:11.064140Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-10T11:35:11.064294Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [13:666:2570], Recipient [13:757:2635]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-10T11:35:11.064336Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:35:11.064402Z node 13 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2025-04-10T11:35:11.069985Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-10T11:35:11.070931Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [13:757:2635], Recipient [13:666:2570]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-10T11:35:11.071021Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:35:11.071085Z node 13 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2025-04-10T11:35:11.337721Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] Handle TEvExecuteKqpTransaction 2025-04-10T11:35:11.337813Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] TxId# 281474976715667 ProcessProposeKqpTransaction 2025-04-10T11:35:11.338825Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfqq1eybxagm3jwb54s74k6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZTNmZDllYTktMTg5NDEzMGItZTA4ZWJjZjctMWE3NmE5Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2025-04-10T11:35:11.341572Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1107:2901], Recipient [13:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-10T11:35:11.341743Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:35:11.341831Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v4000/18446744073709551615 ImmediateWriteEdge# v4001/0 ImmediateWriteEdgeReplied# v4001/0 2025-04-10T11:35:11.341903Z node 13 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v4001/18446744073709551615 2025-04-10T11:35:11.341998Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-10T11:35:11.342122Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:35:11.342203Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:35:11.342268Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:35:11.342311Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:35:11.342355Z node 13 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-10T11:35:11.342409Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:35:11.342431Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:35:11.342450Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:35:11.342467Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:35:11.342586Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-10T11:35:11.342925Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:1107:2901], 0} after executionsCount# 1 2025-04-10T11:35:11.343004Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:1107:2901], 0} sends rowCount# 2, bytes# 96, quota rows left# 999, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:35:11.343108Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:1107:2901], 0} finished in read 2025-04-10T11:35:11.343192Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:35:11.343212Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:35:11.343233Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:35:11.343253Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:35:11.343294Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-10T11:35:11.343312Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:35:11.343338Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-10T11:35:11.343401Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:35:11.343545Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:35:11.344563Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1107:2901], Recipient [13:666:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:35:11.344633Z node 13 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:12.532526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:12.532644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:12.532693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:12.532739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:12.532786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:12.532835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:12.532896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:12.533012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:12.533409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:12.620814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:12.620901Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:12.638127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:12.638496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:12.638684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:12.647336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:12.647589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:12.648374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:12.648814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:12.651276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:12.652658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:12.652728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:12.652910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:12.652964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:12.653028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:12.653193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.660277Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:12.805700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:12.805945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.806214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:12.806448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:12.806500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.809355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:12.809508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:12.809800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.809857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:12.809888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:12.809922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:12.812497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.812563Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:12.812599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:12.814756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.814807Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.814848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:12.814898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:12.818551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:12.820845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:12.821045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:12.822208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:12.822338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:12.822384Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:12.822659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:12.822711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:12.822877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:12.822973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:12.825158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:12.825211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:12.825396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:12.825437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:12.825660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.825700Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:12.825806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:12.825854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:12.825897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:12.825931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:12.825970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:12.826028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:12.826064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:12.826099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:12.826198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:12.826243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:12.826291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:12.828466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:12.828579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:12.828620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:35:12.828672Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:35:12.828722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:12.828833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:35:12.832056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:35:12.832589Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:35:12.836328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:12.836675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.837064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Incorrect ttl column - not found in scheme, at schemeshard: 72057594046678944 2025-04-10T11:35:12.838115Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-10T11:35:12.856958Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-10T11:35:12.857731Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:35:12.860473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Incorrect ttl column - not found in scheme" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:12.860662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Incorrect ttl column - not found in scheme, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-04-10T11:35:12.862230Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::FewEffects-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16729, MsgBus: 13896 2025-04-10T11:34:48.566427Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646552486074197:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:48.566644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00172f/r3tmp/tmpHhempd/pdisk_1.dat 2025-04-10T11:34:48.963727Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16729, node 1 2025-04-10T11:34:48.999107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:48.999214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:49.001101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:49.050668Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:49.050715Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:49.050724Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:49.050876Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13896 TClient is connected to server localhost:13896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:49.640156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:49.668582Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:49.677241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:49.854533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:50.025608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:50.111767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:51.947681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646565370977862:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:51.947832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:52.279454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:52.307891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:52.335828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:52.368485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:52.399461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:52.470688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:52.522538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646569665945674:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:52.522622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:52.524254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646569665945679:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:52.528461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:52.538222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646569665945681:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:52.617556Z node 1 :TX_PROXY ERROR: Actor# [1:7491646569665945736:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:53.566335Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646552486074197:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:53.575688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":4}],"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Final"},{"Inputs":[{"InternalOperatorId":2}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate-Limit-Aggregate"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"Result ... from file: (empty maybe) 2025-04-10T11:35:06.592325Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:06.592465Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25573 TClient is connected to server localhost:25573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:07.088355Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:07.103353Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:07.178659Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:07.370456Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:07.443285Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:09.588454Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646640601400256:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:09.588547Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:09.621881Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:35:09.657809Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:35:09.691137Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:35:09.724960Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:35:09.757922Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:35:09.792962Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:35:09.836117Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646640601400765:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:09.836206Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:09.836280Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646640601400770:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:09.839499Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:35:09.849060Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491646640601400772:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:35:09.935472Z node 4 :TX_PROXY ERROR: Actor# [4:7491646640601400826:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":26,"Plans":[{"Tables":["EightShard"],"PlanNodeId":25,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Delete","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_5_0","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","CTE Name":"precompute_5_0"}],"Node Type":"Effect"},{"PlanNodeId":23,"Plans":[{"PlanNodeId":22,"Plans":[{"PlanNodeId":21,"Plans":[{"PlanNodeId":20,"Plans":[{"Tables":["EightShard"],"PlanNodeId":19,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key (350, +∞)"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_5_0","Node Type":"Precompute_5","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":17,"Plans":[{"Tables":["EightShard"],"PlanNodeId":16,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_3_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_3_0"}],"Node Type":"Effect"},{"PlanNodeId":14,"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"Tables":["EightShard"],"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","ReadRangesPointPrefixLen":"1","ReadRangesKeys":["Key"],"Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_3_0","Node Type":"Precompute_3","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":8,"Plans":[{"Tables":["EightShard"],"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"Effect"},{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","ReadRangesPointPrefixLen":"0","Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_0_0","Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"},{"columns":["Key"],"scan_by":["Key (350, +∞)"],"type":"Scan"},{"columns":["Data","Key"],"scan_by":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"type":"Scan"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"},{"columns":["Data","Key"],"type":"MultiUpsert"},{"type":"MultiErase"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Path":"\/Root\/EightShard","Name":"Delete","Table":"EightShard"}],"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key (350, +∞)"],"Name":"TableRangeScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Delete"}],"Node Type":"Effect"},{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":16,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"Name":"TableRangeScan","Path":"\/Root\/EightShard","ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","ReadRangesKeys":["Key"],"Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"},{"PlanNodeId":17,"Plans":[{"PlanNodeId":18,"Operators":[{"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":24,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} 2025-04-10T11:35:11.410960Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491646627716496592:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:11.411058Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] >> TCmsTest::EmergencyDuringRollingRestart [GOOD] >> KqpLimits::TooBigColumn-useSink >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:12.547894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:12.548019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:12.548068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:12.548117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:12.548172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:12.548231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:12.548307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:12.548429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:12.548879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:12.637807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:12.637894Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:12.654560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:12.654865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:12.655075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:12.664068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:12.664353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:12.665201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:12.665730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:12.668287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:12.669815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:12.669899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:12.670097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:12.670179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:12.670224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:12.670435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.678943Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:12.845329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:12.845608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.845849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:12.846121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:12.846213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.848937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:12.849117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:12.849305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.849368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:12.849413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:12.849455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:12.851967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.852041Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:12.852081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:12.854187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.854250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.854297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:12.854346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:12.865974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:12.868712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:12.868921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:12.870188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:12.870349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:12.870406Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:12.870713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:12.870767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:12.870975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:12.871071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:12.873645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:12.873701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:12.873922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:12.873974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:12.874306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:12.874358Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:12.874491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:12.874533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:12.874580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:12.874636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:12.874679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:12.874746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:12.874788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:12.874826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:12.874911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:12.874961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:12.874995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:12.877797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:12.877945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:12.878016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2025-04-10T11:35:13.255824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:13.255964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:13.256032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-10T11:35:13.256592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-10T11:35:13.256779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-10T11:35:13.262043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:13.262108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:35:13.262511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:13.262564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T11:35:13.262966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:13.263026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:35:13.263684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:35:13.263799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:35:13.263843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:35:13.263892Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-10T11:35:13.263958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:35:13.264045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-10T11:35:13.267610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:35:13.290428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 2424 } } 2025-04-10T11:35:13.290487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-10T11:35:13.290629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 2424 } } 2025-04-10T11:35:13.290710Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 2424 } } FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:35:13.291714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:13.291760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-10T11:35:13.291883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:13.291937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:35:13.292025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:13.292083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:13.292114Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:13.292146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:35:13.292189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T11:35:13.295023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:13.295453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:13.295686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:13.295727Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:35:13.295826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:35:13.295856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:13.295887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:35:13.295914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:13.295996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T11:35:13.296124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 102 2025-04-10T11:35:13.296212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:13.296258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:35:13.296291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:35:13.296382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:35:13.298133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:35:13.298198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:460:2421] TestWaitNotification: OK eventTxId 102 2025-04-10T11:35:13.298639Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:13.298880Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 257us result status StatusSuccess 2025-04-10T11:35:13.299282Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_CorrectDot [GOOD] Test command err: 2025-04-10T11:34:30.143462Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646473478955996:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:30.144778Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fe6/r3tmp/tmpxAFZN2/pdisk_1.dat 2025-04-10T11:34:30.580524Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:30.600718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:30.600799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:30.602734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5494 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:30.908239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:30.923761Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:30.932821Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:34:30.937445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:31.078284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:31.146670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:34:33.618907Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646485034160000:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:33.618942Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fe6/r3tmp/tmp6kewdR/pdisk_1.dat 2025-04-10T11:34:33.755151Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:33.773850Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:33.773968Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:33.775650Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14025 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:33.994117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:34.025892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:34.125825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:34.176342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:37.718073Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646505788973340:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fe6/r3tmp/tmpkDSca2/pdisk_1.dat 2025-04-10T11:34:37.738568Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:34:37.810138Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:37.838476Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:37.838595Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:37.841732Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19109 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:38.068110Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:38.074026Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:38.093369Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:38.157443Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:38.233539Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:41.564449Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646522241169394:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:41.564723Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fe6/r3tmp/tmpTG01UH/pdisk_1.dat 2025-04-10T11:34:41.799147Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:41.892654Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:41.892768Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:41.894176Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64080 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 202 ... onnecting -> Connected TClient is connected to server localhost:25276 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:54.767535Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:54.786776Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:34:54.794886Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:34:54.874535Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:54.942857Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:34:59.070284Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491646596406389017:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:59.070374Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fe6/r3tmp/tmpu0Ur0T/pdisk_1.dat 2025-04-10T11:34:59.234969Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:59.266883Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:59.266998Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:59.269237Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9291 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:59.544058Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:59.552073Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:59.575703Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:59.670760Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:59.756765Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:03.776515Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491646615727396007:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:03.776595Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fe6/r3tmp/tmpXPDhg5/pdisk_1.dat 2025-04-10T11:35:03.923757Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:03.957130Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:03.957235Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:03.959189Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9953 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:35:04.192610Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:04.202656Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:04.217629Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:04.293212Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:35:04.360117Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:35:08.820427Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491646636174597316:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:08.820515Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fe6/r3tmp/tmp7Ony0v/pdisk_1.dat 2025-04-10T11:35:08.954317Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:08.984722Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:08.984810Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:08.986051Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14417 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:35:09.241552Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:35:09.261311Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:09.333601Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:09.406534Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TCmsTest::StateStorageLockedNodes [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers >> TCmsTest::TestForceRestartModeScheduled [GOOD] >> TCmsTest::TestForceRestartModeScheduledDisconnects >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EmergencyDuringRollingRestart [GOOD] Test command err: 2025-04-10T11:35:09.694626Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2025-04-10T11:35:09.695055Z node 10 :CMS DEBUG: TTxUpdateConfig Execute 2025-04-10T11:35:09.723766Z node 10 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-10T11:35:09.723964Z node 10 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-04-10T11:35:09.726397Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120027512 } } 2025-04-10T11:35:09.727374Z node 10 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120027512 } 2025-04-10T11:35:09.727687Z node 10 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003512s 2025-04-10T11:35:09.727754Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-10T11:35:09.727875Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-04-10T11:35:09.727938Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-04-10T11:35:09.727973Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-04-10T11:35:09.728031Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-04-10T11:35:09.728067Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-04-10T11:35:09.728101Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-04-10T11:35:09.728132Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-04-10T11:35:09.728177Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:83883506 ... :CMS DEBUG: Running CleanupWalleTasks 2025-04-10T11:35:10.035059Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-04-10T11:35:10.035110Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-04-10T11:35:10.035135Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-04-10T11:35:10.035173Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-04-10T11:35:10.035201Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-04-10T11:35:10.035226Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-04-10T11:35:10.035249Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-04-10T11:35:10.035292Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-04-10T11:35:10.035660Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-10T11:35:10.036108Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-10T11:35:10.036207Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-10T11:35:10.036313Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-10T11:35:10.036369Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/17/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-10T11:35:10.036443Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-10T11:35:10.036517Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-10T11:35:10.036572Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-04-10T11:35:10.036644Z node 10 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-10T11:35:10.036884Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# FAULTY, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 3 StateLimit# 1, dry run# 0 2025-04-10T11:35:10.036946Z node 10 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-04-10T11:35:10.037192Z node 10 :CMS DEBUG: TTxLogAndSend Execute 2025-04-10T11:35:10.037436Z node 10 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2025-04-10T11:35:10.037483Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 2025-04-10T11:35:10.050086Z node 10 :CMS DEBUG: TTxLogAndSend Complete 2025-04-10T11:35:10.065848Z node 10 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-10T11:35:10.065942Z node 10 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-10T11:35:10.066000Z node 10 :CMS DEBUG: Timestamp: 1970-01-01T00:04:00Z 2025-04-10T11:35:10.066906Z node 10 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-10T11:35:10.066988Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } 2025-04-10T11:35:10.067030Z node 10 :CMS DEBUG: Result: ERROR (reason: Evict vdisks is disabled in Sentinel (self heal)) 2025-04-10T11:35:10.067145Z node 10 :CMS DEBUG: TTxStorePermissions Execute 2025-04-10T11:35:10.067292Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-10T11:35:10.079539Z node 10 :CMS DEBUG: TTxStorePermissions complete 2025-04-10T11:35:10.079785Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ERROR Reason: "Evict vdisks is disabled in Sentinel (self heal)" } RequestId: "user-r-1" } 2025-04-10T11:35:10.080320Z node 10 :CMS DEBUG: TTxUpdateConfig Execute 2025-04-10T11:35:10.092661Z node 10 :CMS DEBUG: TTxUpdateConfig Complete 2025-04-10T11:35:10.092898Z node 10 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 1 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2025-04-10T11:35:10.156194Z node 10 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-10T11:35:10.156259Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-10T11:35:10.156452Z node 10 :CMS DEBUG: Running CleanupWalleTasks 2025-04-10T11:35:10.156670Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-04-10T11:35:10.156734Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-04-10T11:35:10.156776Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-04-10T11:35:10.156823Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-04-10T11:35:10.156855Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-04-10T11:35:10.156878Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-04-10T11:35:10.156899Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-04-10T11:35:10.156922Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-04-10T11:35:10.157155Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-10T11:35:10.157655Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-10T11:35:10.157788Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/17/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-10T11:35:10.157950Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-10T11:35:10.158011Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-10T11:35:10.158061Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-10T11:35:10.158110Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-10T11:35:10.158178Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-04-10T11:35:10.158220Z node 10 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-10T11:35:10.158432Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-04-10T11:35:10.158485Z node 10 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-04-10T11:35:10.158659Z node 10 :CMS DEBUG: TTxLogAndSend Execute 2025-04-10T11:35:10.158871Z node 10 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2025-04-10T11:35:10.158914Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageLockedNodes [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:14.031741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:14.031824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:14.031850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:14.031874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:14.031911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:14.031941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:14.031982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:14.032048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:14.032367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:14.109680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:14.109738Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:14.126400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:14.126713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:14.126983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:14.139048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:14.139358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:14.140133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:14.140562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:14.142835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:14.144049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:14.144115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:14.144293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:14.144341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:14.144377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:14.144522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:14.151033Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:14.286471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:14.286741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:14.287021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:14.287294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:14.287363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:14.291579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:14.291738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:14.291947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:14.292016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:14.292057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:14.292095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:14.294863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:14.294944Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:14.294986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:14.298124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:14.298199Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:14.298241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:14.298291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:14.301082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:14.302926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:14.303164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:14.304100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:14.304205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:14.304252Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:14.304539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:14.304582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:14.304758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:14.304837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:14.307195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:14.307248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:14.307479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:14.307528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:14.307857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:14.307916Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:14.308022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:14.308073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:14.308116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:14.308160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:14.308202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:14.308282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:14.308324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:14.308365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:14.308439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:14.308481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:14.308514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:14.311043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:14.311195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:14.311239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1079 } } 2025-04-10T11:35:14.663969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:35:14.664048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-04-10T11:35:14.664209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:35:14.664261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:35:14.664362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:35:14.664436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:14.664484Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T11:35:14.664546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:35:14.664596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-10T11:35:14.665386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:35:14.665428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-10T11:35:14.665514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:35:14.665560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:35:14.665633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:35:14.665680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:14.665709Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:14.665743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:35:14.665772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-10T11:35:14.683131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:35:14.683601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:35:14.684896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:35:14.686970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:35:14.687087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T11:35:14.687383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:14.687538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T11:35:14.687898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T11:35:14.687940Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-10T11:35:14.688035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-10T11:35:14.688061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-10T11:35:14.688090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-10T11:35:14.688143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-10T11:35:14.688177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-10T11:35:14.688576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:14.689009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:14.689055Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:35:14.689117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-10T11:35:14.689136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-10T11:35:14.689166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-10T11:35:14.689188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-10T11:35:14.689206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-10T11:35:14.689282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 101 2025-04-10T11:35:14.689321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-10T11:35:14.689355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:35:14.689386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:35:14.689524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:35:14.689570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-10T11:35:14.689590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-10T11:35:14.689613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:35:14.689627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-10T11:35:14.689640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-10T11:35:14.689679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:35:14.692932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:35:14.692994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:378:2346] TestWaitNotification: OK eventTxId 101 2025-04-10T11:35:14.693648Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:14.693907Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 276us result status StatusSuccess 2025-04-10T11:35:14.694749Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] >> TSchemeShardTTLTests::ShouldSkipDroppedColumn >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] >> TSchemeShardTTLTestsWithReboots::AlterTable >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:15.098098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:15.098236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:15.098275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:15.098309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:15.098373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:15.098429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:15.098497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:15.098582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:15.098905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:15.161666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:15.161753Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:15.176785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:15.177002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:15.177184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:15.185336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:15.185655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:15.186535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:15.187058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:15.189625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:15.191244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:15.191322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:15.191519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:15.191575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:15.191618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:15.191801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.200040Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:15.317069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:15.317328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.317627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:15.317908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:15.317971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.320866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:15.321074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:15.321297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.321380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:15.321430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:15.321467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:15.323735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.323813Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:15.323858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:15.326163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.326224Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.326271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:15.326322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.330292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:15.332672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:15.332927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:15.334105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:15.334281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:15.334336Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:15.334671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:15.334749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:15.334952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:15.335058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:15.337598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:15.337647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:15.337886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:15.337945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:15.338217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.338283Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:15.338398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:15.338458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.338527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:15.338563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.338606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:15.338663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.338711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:15.338744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:15.338822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:15.338864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:15.338899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:15.341301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:15.341418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:15.341461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T11:35:15.589633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:15.589701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:15.589893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:35:15.590112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:15.590178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:35:15.590229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:35:15.590857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.590924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:35:15.592052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:35:15.592166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:35:15.592215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:35:15.592258Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:35:15.592303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:15.593306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:35:15.593395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:35:15.593429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:35:15.593459Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:35:15.593502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:35:15.593586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-10T11:35:15.595992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1361 } } 2025-04-10T11:35:15.596041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-10T11:35:15.596168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1361 } } 2025-04-10T11:35:15.596285Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1361 } } 2025-04-10T11:35:15.597732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:35:15.597817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-10T11:35:15.597976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:35:15.598035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:35:15.598124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:35:15.598258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:15.598299Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.598352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:35:15.598403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-10T11:35:15.599371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:35:15.599443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:35:15.601146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.601312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.601645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.601710Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:35:15.601826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:35:15.601885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:35:15.601928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:35:15.601962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:35:15.602001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-10T11:35:15.602078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 101 2025-04-10T11:35:15.602134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:35:15.602222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:35:15.602263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:35:15.602454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:35:15.604514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:35:15.604582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:337:2315] TestWaitNotification: OK eventTxId 101 2025-04-10T11:35:15.605229Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:15.605542Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 291us result status StatusSuccess 2025-04-10T11:35:15.606053Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCmsTest::ActionIssue [GOOD] >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:15.515442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:15.515529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:15.515566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:15.515597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:15.515638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:15.515678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:15.515755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:15.515837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:15.516140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:15.596068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:15.596138Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:15.610850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:15.611075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:15.611250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:15.619270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:15.619484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:15.620174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:15.620569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:15.622554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:15.623940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:15.624007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:15.624193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:15.624235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:15.624274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:15.624444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.629938Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:15.749325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:15.749577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.749814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:15.750047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:15.750100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.752433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:15.752586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:15.752740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.752814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:15.752848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:15.752881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:15.754859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.754931Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:15.754968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:15.756757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.756810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.756858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:15.756900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.760642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:15.762723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:15.762934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:15.764004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:15.764154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:15.764209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:15.764545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:15.764604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:15.764782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:15.764862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:15.767284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:15.767332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:15.767530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:15.767599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:15.767856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.767900Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:15.768013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:15.768050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.768096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:15.768132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.768173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:15.768231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.768272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:15.768307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:15.768375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:15.768432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:15.768465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:15.770943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:15.771064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:15.771122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:35:15.771176Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:35:15.771237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:15.771340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:35:15.774521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:35:15.775096Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:35:15.778706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:15.779053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.779187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } }, at schemeshard: 72057594046678944 2025-04-10T11:35:15.779630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2025-04-10T11:35:15.780057Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-10T11:35:15.798527Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-10T11:35:15.799322Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:35:15.801824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:15.802007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-10T11:35:15.803363Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:15.098327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:15.098424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:15.098464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:15.098498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:15.098546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:15.098592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:15.098658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:15.098747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:15.099094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:15.184527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:15.184605Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:15.201301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:15.201596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:15.201780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:15.210390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:15.210601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:15.211399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:15.211812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:15.214085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:15.215544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:15.215613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:15.215822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:15.215883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:15.215931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:15.216142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.223620Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:15.357801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:15.358019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.358231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:15.358494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:15.358558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.360840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:15.361001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:15.361199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.361246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:15.361277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:15.361303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:15.363512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.363599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:15.363641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:15.365729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.365788Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.365834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:15.365916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.369749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:15.371623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:15.371811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:15.372668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:15.372808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:15.372871Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:15.373118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:15.373170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:15.373352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:15.373464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:15.375798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:15.375848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:15.376086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:15.376146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:15.376409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.376455Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:15.376574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:15.376639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.376717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:15.376798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.376838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:15.376899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.376942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:15.376990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:15.377058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:15.377109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:15.377158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:15.379617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:15.379732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:15.379771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ntStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2025-04-10T11:35:15.747412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:15.747526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:15.747585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-10T11:35:15.747901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-10T11:35:15.748076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:35:15.752506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:15.752568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:35:15.752899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:15.752964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-10T11:35:15.753646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.753707Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:35:15.754287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:35:15.754393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:35:15.754435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:35:15.754494Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-10T11:35:15.754540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-10T11:35:15.754637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-10T11:35:15.758266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:35:15.781207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1411 } } 2025-04-10T11:35:15.781276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-10T11:35:15.781426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1411 } } 2025-04-10T11:35:15.781527Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1411 } } FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:35:15.782516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:15.782571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-10T11:35:15.782708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:15.782764Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:35:15.782855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:15.782955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:15.783004Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.783057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:35:15.783109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T11:35:15.785762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.786216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.786545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.786609Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:35:15.786727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:35:15.786766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:15.786811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:35:15.786863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:15.786927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T11:35:15.787005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 102 2025-04-10T11:35:15.787056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:15.787109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:35:15.787153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:35:15.787282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:35:15.789001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:35:15.789053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:507:2432] TestWaitNotification: OK eventTxId 102 2025-04-10T11:35:15.789655Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:15.789943Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 297us result status StatusSuccess 2025-04-10T11:35:15.790556Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::ValidateTiers [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:15.580743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:15.580832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:15.580870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:15.580905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:15.580953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:15.581013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:15.581084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:15.581168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:15.581503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:15.665611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:15.665694Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:15.681444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:15.681759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:15.681948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:15.690760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:15.691022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:15.691826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:15.692280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:15.694844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:15.696552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:15.696629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:15.696883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:15.696964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:15.697040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:15.697309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.705892Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:15.892950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:15.893252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.893523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:15.893869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:15.893951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.896641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:15.896779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:15.896965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.897047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:15.897084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:15.897117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:15.898792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.898844Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:15.898880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:15.900395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.900442Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.900497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:15.900546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.904057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:15.905646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:15.905846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:15.906769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:15.906895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:15.906941Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:15.907216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:15.907271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:15.907441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:15.907514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:15.909245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:15.909289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:15.909454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:15.909492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:15.909688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:15.909730Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:15.909818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:15.909867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.909915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:15.909950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.909995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:15.910052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:15.910101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:15.910136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:15.910210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:15.910249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:15.910283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:15.912306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:15.912405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:15.912448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-10T11:35:16.245733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:16.245856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:16.245935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-10T11:35:16.247200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-10T11:35:16.247423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-10T11:35:16.253284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:16.253356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:35:16.253652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:16.253699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T11:35:16.254136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.254219Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:35:16.255054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:35:16.255297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:35:16.255346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:35:16.255384Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-10T11:35:16.255427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:35:16.255510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:35:16.256213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1257 } } 2025-04-10T11:35:16.256271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T11:35:16.256382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1257 } } 2025-04-10T11:35:16.256475Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1257 } } 2025-04-10T11:35:16.257065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:16.257109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T11:35:16.257259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:16.257389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:35:16.257478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:16.257539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:16.257584Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.257631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:35:16.257674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T11:35:16.261308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:35:16.261517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.261615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.261699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.261737Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:35:16.261834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:35:16.261869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:16.261903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:35:16.261930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:16.261960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T11:35:16.262020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 102 2025-04-10T11:35:16.262061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:16.262118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:35:16.262167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:35:16.262303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:35:16.263674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:35:16.263712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:394:2365] TestWaitNotification: OK eventTxId 102 2025-04-10T11:35:16.264091Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:16.264314Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 177us result status StatusSuccess 2025-04-10T11:35:16.264699Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTable >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink >> KqpLimits::QueryExecTimeoutCancel [GOOD] >> KqpLimits::ReplySizeExceeded >> TSchemeShardColumnTableTTL::AlterColumnTable >> TSchemeShardTTLTests::ConditionalErase |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ActionIssue [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::ValidateTiers [GOOD] |96.3%| [TA] $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TA] {RESULT} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true >> TSchemeShardTTLTests::AlterTableShouldSuccess >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings >> TSchemeShardTTLTests::TtlTiersValidation >> TSchemeShardTTLTests::CheckCounters >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:11.298311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:11.298439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:11.298480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:11.298512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:11.299220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:11.299283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:11.299440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:11.299544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:11.300845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:11.380917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:11.380991Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:11.395539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:11.395797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:11.395984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:11.406272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:11.406496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:11.410057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:11.411127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:11.415670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:11.422517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:11.422591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:11.423246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:11.423301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:11.423384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:11.423569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:11.429833Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:11.542556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:11.544041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:11.544832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:11.545956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:11.546021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:11.548851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:11.549011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:11.549190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:11.549241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:11.549275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:11.549306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:11.551299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:11.551357Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:11.551398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:11.553176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:11.553221Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:11.553261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:11.553302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:11.557936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:11.560010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:11.560875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:11.561623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:11.561718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:11.561753Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:11.562890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:11.562967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:11.563794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:11.563946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:11.567816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:11.567878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:11.568084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:11.568155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:11.568500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:11.568613Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:11.568692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:11.568731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:11.568765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:11.568799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:11.568835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:11.568888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:11.568935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:11.568984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:11.569046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:11.569080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:11.569120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:11.571007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:11.571095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:11.571129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... lete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.562639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.562745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.562833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.562906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.563081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.563562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.563724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.563908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.563998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.564081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.564189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.564264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.564343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.564409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.564486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.564559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.564697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.564736Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:35:17.564863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:35:17.564898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:35:17.564942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:35:17.564988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:35:17.565024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-10T11:35:17.565107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2812:4077] message: TxId: 101 2025-04-10T11:35:17.565154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:35:17.565213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:35:17.565261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:35:17.566659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-04-10T11:35:17.570464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:35:17.570527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:2813:4078] TestWaitNotification: OK eventTxId 101 2025-04-10T11:35:17.571191Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:17.571443Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 329us result status StatusSuccess 2025-04-10T11:35:17.572093Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "str" Type: "String" TypeId: 4097 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "key" NextColumnId: 4 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "key" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1744284917.572836 1769135 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TAlterColumnTable: 6:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2025-04-10T11:35:17.575532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterTtlSettings { Enabled { ColumnName: "str" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:17.575804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.576273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2025-04-10T11:35:17.579073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:17.579230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] >> DataShardSnapshots::ShardRestartLockBasic >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] >> KqpLimits::TooBigColumn-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:18.634520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:18.634648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:18.634702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:18.634764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:18.634815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:18.634865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:18.634937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:18.635035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:18.635429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:18.719145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:18.719220Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:18.736053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:18.736232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:18.736364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:18.743904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:18.744125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:18.744823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:18.745210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:18.747177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:18.748489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:18.748558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:18.748747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:18.748788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:18.748844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:18.749008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.755325Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:18.877873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:18.878070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.878318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:18.878520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:18.878563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.880632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:18.880767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:18.880917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.880963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:18.881012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:18.881042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:18.882757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.882811Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:18.882844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:18.884431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.884477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.884513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:18.884570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:18.888166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:18.889752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:18.889945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:18.890826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:18.890930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:18.890970Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:18.891211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:18.891253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:18.891415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:18.891491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:18.893353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:18.893394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:18.893555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:18.893602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:18.893805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.893841Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:18.893928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:18.893977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:18.894031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:18.894063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:18.894098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:18.894173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:18.894212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:18.894242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:18.894296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:18.894334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:18.894364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:18.896498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:18.896610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:18.896671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... nc, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:385:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:35:19.459743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-10T11:35:19.459868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:35:19.460089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-10T11:35:19.460128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-10T11:35:19.460163Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-10T11:35:19.460411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.460516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:19.460598Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-10T11:35:19.460644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-10T11:35:19.462600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.462653Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-10T11:35:19.462743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-10T11:35:19.462788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T11:35:19.462836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-10T11:35:19.462866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T11:35:19.462899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-10T11:35:19.462966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:123:2149] message: TxId: 281474976710760 2025-04-10T11:35:19.463008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T11:35:19.463040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-10T11:35:19.463069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-10T11:35:19.463152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-10T11:35:19.464848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-10T11:35:19.464927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-10T11:35:19.465009Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-10T11:35:19.465079Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:385:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:35:19.466502Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-10T11:35:19.466583Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:385:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:35:19.466636Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T11:35:19.467801Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-10T11:35:19.467857Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:385:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:35:19.467882Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-10T11:35:19.467984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:35:19.468026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:474:2434] TestWaitNotification: OK eventTxId 102 2025-04-10T11:35:19.468528Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:19.468767Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 244us result status StatusSuccess 2025-04-10T11:35:19.469205Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:18.824682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:18.824772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:18.824805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:18.824836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:18.824875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:18.824913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:18.824980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:18.825059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:18.825388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:18.899990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:18.900061Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:18.912847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:18.913020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:18.913150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:18.918357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:18.918503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:18.918945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:18.919205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:18.920448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:18.921525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:18.921599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:18.921737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:18.921776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:18.921810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:18.921924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.926800Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:19.044534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:19.044766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.045003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:19.045238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:19.045294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.047555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.047711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:19.047879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.047939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:19.047973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:19.048005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:19.049786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.049837Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:19.049875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:19.051528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.051569Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.051626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.051677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.055159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:19.056790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:19.057003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:19.057910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.058018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:19.058068Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.058335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:19.058397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.058568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:19.058665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:19.060444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:19.060488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:19.060660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.060704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:19.060925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.060984Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:19.061073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:19.061130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.061167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:19.061197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.061236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:19.061288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.061326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:19.061376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:19.061431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:19.061468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:19.061498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:19.063583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:19.063660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:19.063688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-10T11:35:19.364056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 104 at step: 5000004 2025-04-10T11:35:19.364446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.364552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:19.364613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 104:0 HandleReply TEvOperationPlan, operationId: 104:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-04-10T11:35:19.364859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 129 2025-04-10T11:35:19.365003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:35:19.368281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:19.368344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:35:19.368571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.368613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 2 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-10T11:35:19.369010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.369079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:35:19.369534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:35:19.369655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-10T11:35:19.369692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-10T11:35:19.369731Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-10T11:35:19.369766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:35:19.369839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-10T11:35:19.372508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-10T11:35:19.384255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1085 } } 2025-04-10T11:35:19.384314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-10T11:35:19.384433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1085 } } 2025-04-10T11:35:19.384512Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1085 } } FAKE_COORDINATOR: Erasing txId 104 2025-04-10T11:35:19.385107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-10T11:35:19.385161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-10T11:35:19.385258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-10T11:35:19.385293Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:35:19.385346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-10T11:35:19.385388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.385431Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.385455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:35:19.385499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-10T11:35:19.387111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.387939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.388186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.388218Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-10T11:35:19.388288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:35:19.388310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:35:19.388347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-10T11:35:19.388369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:35:19.388393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-10T11:35:19.388436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 104 2025-04-10T11:35:19.388474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-10T11:35:19.388522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-10T11:35:19.388552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-10T11:35:19.388624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:35:19.389818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:35:19.389851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:442:2413] TestWaitNotification: OK eventTxId 104 2025-04-10T11:35:19.390470Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:19.390683Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 226us result status StatusSuccess 2025-04-10T11:35:19.391068Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 TTLSettings { Disabled { } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:19.198999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:19.199089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:19.199121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:19.199142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:19.199173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:19.199205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:19.199262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:19.199321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:19.199586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:19.259147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:19.259200Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:19.274208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:19.274517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:19.274667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:19.283069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:19.283313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:19.284086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.284495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:19.286661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.288131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:19.288218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.288421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:19.288477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:19.288527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:19.288711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.295547Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:19.415086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:19.415294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.415461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:19.415664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:19.415715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.417764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.417944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:19.418181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.418248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:19.418288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:19.418325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:19.420208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.420264Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:19.420299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:19.421917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.421965Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.422011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.422056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.425165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:19.426844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:19.427025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:19.427806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.427913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:19.427992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.428227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:19.428271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.428407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:19.428463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:19.430354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:19.430397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:19.430546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.430585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:19.430761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.430793Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:19.430878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:19.430922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.430955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:19.430979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.431012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:19.431064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.431107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:19.431141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:19.431204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:19.431234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:19.431263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:19.432847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:19.432941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:19.432987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 9.608758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:35:19.608958Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.609016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:35:19.609056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:35:19.609633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.609699Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:35:19.610622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:35:19.610735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:35:19.610781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:35:19.610816Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:35:19.610851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:19.611694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:35:19.611756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:35:19.611777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:35:19.611800Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:35:19.611850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:35:19.611934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-10T11:35:19.614218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1290 } } 2025-04-10T11:35:19.614259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-10T11:35:19.614366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1290 } } 2025-04-10T11:35:19.614448Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1290 } } 2025-04-10T11:35:19.622910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:35:19.623001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-10T11:35:19.623154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:35:19.623199Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:35:19.623279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:35:19.623323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.623354Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.623396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:35:19.623426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-10T11:35:19.624120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:35:19.624168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:35:19.625647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.625780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.626087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.626171Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:35:19.626312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:35:19.626366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:35:19.626408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:35:19.626439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:35:19.626477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-10T11:35:19.626546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 101 2025-04-10T11:35:19.626605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:35:19.626651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:35:19.626681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:35:19.626813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:35:19.631159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:35:19.631247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:337:2315] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-10T11:35:19.635532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 Delete { } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:19.635805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.636239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, at schemeshard: 72057594046678944 2025-04-10T11:35:19.638986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Tier 0: only the last tier in TTL settings can have Delete action" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:19.639225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-04-10T11:35:19.643491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 EvictToExternalStorage { Storage: "/Root/abc" } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:19.643767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.644176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, at schemeshard: 72057594046678944 2025-04-10T11:35:19.646802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Only DELETE via TTL is allowed for row-oriented tables" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:19.647011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:19.352987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:19.353105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:19.353162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:19.353216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:19.353264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:19.353311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:19.353373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:19.353465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:19.353832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:19.441489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:19.441569Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:19.457818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:19.458117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:19.458343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:19.466438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:19.466666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:19.467424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.467827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:19.469845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.471242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:19.471317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.471513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:19.471568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:19.471617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:19.471796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.479077Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:19.631786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:19.632053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.632298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:19.632555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:19.632614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.635238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.635427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:19.635638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.635699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:19.635739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:19.635779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:19.638055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.638167Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:19.638217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:19.640331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.640392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.640437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.640493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.644492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:19.647124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:19.647314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:19.648275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.648431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:19.648488Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.648775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:19.648830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.649048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:19.649151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:19.651476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:19.651532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:19.651732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.651797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:19.652056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.652110Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:19.652238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:19.652280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.652327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:19.652364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.652407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:19.652472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.652517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:19.652570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:19.652648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:19.652689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:19.652726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:19.655221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:19.655358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:19.655404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:35:19.655677Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:35:19.655733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:19.655841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:35:19.659061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:35:19.659671Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:35:19.663679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:19.664073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.664180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { }, at schemeshard: 72057594046678944 2025-04-10T11:35:19.664659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL status must be specified, at schemeshard: 72057594046678944 2025-04-10T11:35:19.665117Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-10T11:35:19.685532Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-10T11:35:19.686427Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:35:19.689184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL status must be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:19.689362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL status must be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-10T11:35:19.690804Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:19.132402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:19.132511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:19.132549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:19.132582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:19.132625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:19.132671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:19.132729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:19.132803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:19.133155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:19.207132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:19.207195Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:19.219584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:19.219857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:19.220030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:19.227406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:19.227627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:19.228268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.228599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:19.230465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.231687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:19.231754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.231942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:19.231991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:19.232033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:19.232202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.238382Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:19.345252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:19.345438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.345614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:19.345779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:19.345818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.347635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.347755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:19.347878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.347919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:19.347944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:19.347967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:19.349375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.349417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:19.349441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:19.350764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.350804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.350839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.350873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.358304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:19.360145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:19.360302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:19.361233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.361335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:19.361375Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.361647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:19.361707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.361889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:19.362008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:19.363908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:19.363948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:19.364122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.364161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:19.364342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.364369Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:19.364454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:19.364505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.364546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:19.364575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.364614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:19.364665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.364690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:19.364717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:19.364769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:19.364803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:19.364829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:19.366912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:19.367015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:19.367053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:35:19.677930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-10T11:35:19.688184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:35:19.689092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:35:19.690615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1552 } } 2025-04-10T11:35:19.690674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-10T11:35:19.690873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1552 } } 2025-04-10T11:35:19.691027Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1552 } } 2025-04-10T11:35:19.691697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 409 RawX2: 4294969673 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:19.691754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-10T11:35:19.691926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 409 RawX2: 4294969673 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:19.691985Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:35:19.692083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 409 RawX2: 4294969673 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:19.692152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.692204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-10T11:35:19.695516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.695904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.708442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:19.708525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T11:35:19.708672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:19.708717Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:35:19.708811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:19.708869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.708906Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.708950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:35:19.709026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:35:19.709084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T11:35:19.711344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.711878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.711942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-04-10T11:35:19.712046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-10T11:35:19.712090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-04-10T11:35:19.712218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-10T11:35:19.712276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-04-10T11:35:19.714570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.714644Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:35:19.714753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:35:19.714792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:19.714844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:35:19.714915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:19.714969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T11:35:19.715060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 102 2025-04-10T11:35:19.715140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:19.715220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:35:19.715261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:35:19.715449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:35:19.715519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:35:19.717639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:35:19.717698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:437:2397] TestWaitNotification: OK eventTxId 102 2025-04-10T11:35:19.718354Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:19.718638Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 279us result status StatusSuccess 2025-04-10T11:35:19.719228Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: true IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:19.569735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:19.569808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:19.569833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:19.569855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:19.569886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:19.569923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:19.569965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:19.570055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:19.570387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:19.648798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:19.648864Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:19.663566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:19.663775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:19.663942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:19.671034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:19.671253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:19.671835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.672211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:19.674174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.675375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:19.675441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.675608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:19.675653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:19.675686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:19.675834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.685386Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:19.805293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:19.805465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.805626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:19.805830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:19.805873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.807747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.807901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:19.808068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.808120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:19.808150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:19.808171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:19.809853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.809898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:19.809926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:19.811306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.811340Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.811368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.811401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.813795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:19.815048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:19.815220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:19.815861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.815944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:19.815973Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.816250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:19.816283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.816399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:19.816447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:19.817908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:19.817941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:19.818068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.818113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:19.818294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.818321Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:19.818384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:19.818418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.818445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:19.818468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.818501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:19.818542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.818569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:19.818591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:19.818632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:19.818666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:19.818696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:19.820242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:19.820313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:19.820340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:35:19.820382Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:35:19.820411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:19.820491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:35:19.822354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:35:19.822793Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:35:19.825279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:19.825521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.825601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } }, at schemeshard: 72057594046678944 2025-04-10T11:35:19.825882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', at schemeshard: 72057594046678944 2025-04-10T11:35:19.826138Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-10T11:35:19.839261Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-10T11:35:19.839791Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:35:19.841617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Cannot enable TTL on unknown column: \'created_at\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:19.841750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-10T11:35:19.842747Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 3504, MsgBus: 23859 2025-04-10T11:34:45.524642Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646538025936094:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:45.525050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001736/r3tmp/tmp4ufkiG/pdisk_1.dat 2025-04-10T11:34:45.963876Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:45.971672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:45.971800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:45.975518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3504, node 1 2025-04-10T11:34:46.085258Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:46.085280Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:46.085296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:46.085462Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23859 TClient is connected to server localhost:23859 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:46.704711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:46.738839Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:49.054894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646555205805799:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:49.055007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646555205805807:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:49.055060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:49.059634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:34:49.069511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646555205805813:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:34:49.152694Z node 1 :TX_PROXY ERROR: Actor# [1:7491646555205805864:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:49.447400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6616, MsgBus: 29809 2025-04-10T11:34:50.561839Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646560347274941:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001736/r3tmp/tmpvofLxw/pdisk_1.dat 2025-04-10T11:34:50.638404Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:34:50.684659Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6616, node 2 2025-04-10T11:34:50.739406Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:50.739524Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:50.742371Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:50.773172Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:50.773196Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:50.773201Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:50.773308Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29809 TClient is connected to server localhost:29809 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:51.243832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:51.254931Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:53.840483Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646573232177297:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:53.840556Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646573232177330:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:53.840691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:53.845152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:34:53.856972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491646573232177334:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:34:53.940592Z node 2 :TX_PROXY ERROR: Actor# [2:7491646573232177385:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:53.968543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:34:54.118526Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491646577527144845:2349], SessionActorId: [2:7491646577527144831:2349], statusCode=PRECONDITION_FAILED. Issue=
: Error: Stream write queries aren't allowed., code: 2029 . sessionActorId=[2:7491646577527144831:2349]. isRollback=0 2025-04-10T11:34:54.118840Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWYyMWY1OGYtYjljOGQ1NzYtNDIyYjU5MzktOWQ4MzQwN2Y=, ActorId: [2:7491646577527144831:2349], ActorState: ExecuteState, TraceId: 01jrfqpgt62zchvnzxbfns1rx7, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7491646577527144846:2349] from: [2:7491646577527144845:2349] 2025-04-10T11:34:54.118927Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7491646577527144846:2349] TxId: 281474976710661. Ctx: { TraceId: 01jrfqpgt62zchvnzxbfns1rx7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWYyMWY1OGYtYjljOGQ1NzYtNDIyYjU5MzktOWQ4MzQwN2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Stream write queries aren't allowed., code: 2029 } 2025-04-10T11:34:54.119089Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7491646577527144849:2349], TxId: 281474976710661, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWYyMWY1OGYtYjljOGQ1NzYtNDIyYjU5MzktOWQ4MzQwN2Y=. CustomerSuppliedId : . TraceId : 01jrfqpgt62zchvnzxbfns1rx7. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7491646577527144846:2349], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:34:54.119695Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWYyMWY1OGYtYjljOGQ1NzYtNDIyYjU5MzktOWQ4MzQ ... uppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7491646597584223716:2404], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T11:34:59.837202Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWQwMjgwZTUtY2YyYzZlMjctMWNhNDQ3M2QtY2FmZDU2MTA=, ActorId: [3:7491646593289255712:2404], ActorState: ExecuteState, TraceId: 01jrfqpnh49685g5p4h6a03b4d, Create QueryResponse for error on request, msg: VERIFY failed (2025-04-10T11:34:59.842668Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:372, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Error: Stream write queries aren't allowed., code: 2029 library/cpp/testing/unittest/registar.cpp:37 RaiseError(): requirement UnittestThread failed 2025-04-10T11:34:59.965264Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646576109384849:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:59.965363Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:35:10.068287Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:35:10.068324Z node 3 :IMPORT WARN: Table profiles were not loaded 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x196FCB18 1. /-S/util/system/yassert.cpp:55: Panic @ 0x196EADAA 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:37: RaiseError @ 0x19B7A091 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:372: AssertSuccessResult @ 0x18EEB5A6 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:348: CreateSampleTables @ 0x492FEE76 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:544: operator() @ 0x493406A3 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x493406A3 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x493406A3 8. /-S/util/thread/pool.h:71: Process @ 0x493406A3 9. /-S/util/thread/pool.cpp:411: DoExecute @ 0x1970FCC5 10. /-S/util/thread/factory.h:15: Execute @ 0x1970C85C 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x1970C85C 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x19700F64 13. /tmp//-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239: asan_thread_start @ 0x193B27F8 14. ??:0: ?? @ 0x7FCFE5BD2AC2 15. ??:0: ?? @ 0x7FCFE5C6484F Trying to start YDB, gRPC: 21053, MsgBus: 9837 2025-04-10T11:35:14.302228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646660983734558:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:14.302396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001736/r3tmp/tmpGzB835/pdisk_1.dat 2025-04-10T11:35:14.644473Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21053, node 1 2025-04-10T11:35:14.709107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:14.709206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:14.711319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:14.721815Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:35:14.721837Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:35:14.721842Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:14.721976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9837 TClient is connected to server localhost:9837 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:15.250506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:15.279709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:15.426599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:15.577432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:15.640343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:17.335281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646673868638236:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:17.335719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:17.646816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:35:17.684730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:35:17.719176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:35:17.754515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:35:17.786514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:35:17.858790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:35:17.905741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646673868638752:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:17.905821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:17.906041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646673868638757:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:17.909757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:35:17.919912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646673868638759:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:35:18.005747Z node 1 :TX_PROXY ERROR: Actor# [1:7491646678163606108:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:35:19.302276Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646660983734558:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:19.302356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:35:19.614596Z node 1 :TX_DATASHARD ERROR: Transaction write column value of 20971522 bytes is larger than the allowed threshold 2025-04-10T11:35:19.614755Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715671 at tablet 72075186224037911 status: EXEC_ERROR errors: BAD_ARGUMENT (Transaction write column value of 20971522 bytes is larger than the allowed threshold) | 2025-04-10T11:35:19.614951Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491646682458573723:2488] TxId: 281474976715671. Ctx: { TraceId: 01jrfqq9dva30sy79fe1gkmgq9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDhlNzI2YzMtZGRmMjIzMWMtYTAyY2YzZjMtNjM1NWE2ODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold; 2025-04-10T11:35:19.624310Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDhlNzI2YzMtZGRmMjIzMWMtYTAyY2YzZjMtNjM1NWE2ODI=, ActorId: [1:7491646682458573665:2488], ActorState: ExecuteState, TraceId: 01jrfqq9dva30sy79fe1gkmgq9, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold >> TColumnEngineTestLogs::IndexTtl >> TSchemeShardTTLTests::ConditionalErase [GOOD] >> TestProgram::CountWithNulls >> TestProgram::CountWithNulls [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] >> TColumnEngineTestLogs::IndexTtl [GOOD] >> TestProgram::JsonValue |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicates >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK >> TColumnEngineTestLogs::IndexReadWithPredicates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=insert_table.cpp:43;event=commit_insertion;path_id=0;blob_range={ Blob: DS:0:[2222:1:1:2:100:1:0] Offset: 0 Size: 0 }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N2(15):{\"i\":\"2\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N0(2):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N2[shape=box, label="N1(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N3(15):{\"i\":\"10001\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"2","p":{"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:17.714331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:17.714445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:17.714491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:17.714523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:17.714564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:17.714605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:17.714666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:17.714755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:17.715145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:17.795018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:17.795079Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:17.809706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:17.809971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:17.810141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:17.817269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:17.817465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:17.818079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:17.818495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:17.820396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:17.821669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:17.821726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:17.821922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:17.821968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:17.822009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:17.822192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.828543Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:17.958019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:17.958345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.958632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:17.958891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:17.958945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.961833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:17.961957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:17.962112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.962170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:17.962207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:17.962236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:17.964782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.964839Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:17.964864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:17.966340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.966377Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.966406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:17.966438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:17.968900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:17.970548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:17.970796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:17.971822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:17.971968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:17.972044Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:17.972314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:17.972370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:17.972556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:17.972645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:17.974785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:17.974829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:17.974992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:17.975039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:17.975302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.975345Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:17.975438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:17.975487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:17.975524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:17.975554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:17.975597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:17.975651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:17.975684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:17.975715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:17.975773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:17.975809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:17.975840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:17.983518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:17.983684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:17.983732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... sage 0.0003 2025-04-10T11:35:21.278493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-10T11:35:21.279168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-10T11:35:21.280486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 6] state 'Ready' dataSize 627 rowCount 2 cpuUsage 0 2025-04-10T11:35:21.399354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-10T11:35:21.399451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T11:35:21.399585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-10T11:35:21.399712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409546, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640243000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-10T11:35:21.399809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409550, request: TableId: 6 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640243000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-10T11:35:21.399873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 5 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640243000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-10T11:35:21.399951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640243000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-10T11:35:21.400024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 3 Expiration { ColumnId: 2 WallClockTimestamp: 1600463040243000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-10T11:35:21.400108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409551, request: TableId: 7 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640243000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-10T11:35:21.400902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-10T11:35:21.401419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-10T11:35:21.401778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:35:21.402076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2025-04-10T11:35:21.402285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:35:21.402450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2025-04-10T11:35:21.402557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-10T11:35:21.402602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-10T11:35:21.402946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-10T11:35:21.402979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-10T11:35:21.408287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-10T11:35:21.408333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2025-04-10T11:35:21.408808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-10T11:35:21.408839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:35:21.409591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-10T11:35:21.409624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:35:21.409947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-10T11:35:21.410071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T11:35:21.410116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2020-09-18T23:04:00.243000Z, at schemeshard: 72057594046678944 2025-04-10T11:35:21.410198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-10T11:35:21.410272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T11:35:21.410298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2020-09-18T23:04:00.243000Z, at schemeshard: 72057594046678944 2025-04-10T11:35:21.412246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-10T11:35:21.412389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T11:35:21.412421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:5, run at: 2020-09-18T23:04:00.245000Z, at schemeshard: 72057594046678944 2025-04-10T11:35:21.412596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-10T11:35:21.412661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T11:35:21.412685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 2020-09-18T23:04:00.245000Z, at schemeshard: 72057594046678944 2025-04-10T11:35:21.412851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-10T11:35:21.412889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-10T11:35:21.412920Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-10T11:35:21.412940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-10T11:35:21.412958Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-10T11:35:21.413031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T11:35:21.413056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:2, run at: 2020-09-18T23:04:00.245000Z, at schemeshard: 72057594046678944 2025-04-10T11:35:21.413085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-10T11:35:21.479585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 5 2025-04-10T11:35:21.479768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 5 shard idx 72057594046678944:4 data size 43 row count 1 2025-04-10T11:35:21.479856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], pathId map=TTLEnabledTable4, is column=0, is olap=0, RowCount 1, DataSize 43 2025-04-10T11:35:21.480018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 603 row count 2 2025-04-10T11:35:21.480060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTable3, is column=0, is olap=0, RowCount 2, DataSize 603 2025-04-10T11:35:21.480117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-10T11:35:21.480149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable1, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:35:21.480186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-10T11:35:21.480217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=TTLEnabledTable2, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:35:21.480251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 6 shard idx 72057594046678944:5 data size 627 row count 2 2025-04-10T11:35:21.480311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72057594046678944:5 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], pathId map=TTLEnabledTable5, is column=0, is olap=0, RowCount 2, DataSize 627, with borrowed parts 2025-04-10T11:35:21.492471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-10T11:35:21.492521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2025-04-10T11:35:21.493968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-10T11:35:21.494077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T11:35:21.494120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:6, run at: 2020-09-18T23:04:00.248000Z, at schemeshard: 72057594046678944 2025-04-10T11:35:21.494183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8416;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8416;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8416;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8416;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38328;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8416;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8416;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8416;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8416;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38328;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING; ... ALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38328;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38328;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38392;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38328;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38328;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38360;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38360;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38328;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38328;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38328;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38328;index_size:0;meta:((produced=INSERTED;));); >> TestProgram::JsonValue [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicates [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING; ... t=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING; ... t=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\" ... 30\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; >> TestProgram::CountUIDByVAT >> TestProgram::Like >> TestProgram::CountUIDByVAT [GOOD] >> TestProgram::YqlKernel >> TestProgram::Like [GOOD] >> TestProgram::YqlKernel [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(18):{\"i\":\"2,4\",\"o\":\"10001\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N1(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(27):{\"i\":\"10001,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"2,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N4->N2->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":4},{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"2,4","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"10001,4","t":"Projection"},"w":27,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"2,4","o":"10001","t":"Aggregation"},"w":18,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; >> TestProgram::SimpleFunction [GOOD] >> TColumnEngineTestLogs::IndexWriteLoadRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N5(0):{\"p\":{\"v\":\"001\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N0(0):{\"p\":{\"v\":\"uid\"},\"o\":\"16\",\"t\":\"Const\"}\n"]; N2[shape=box, label="N3(15):{\"i\":\"7,16\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"17\",\"t\":\"Calculation\"}\nREMOVE:16"]; N1 -> N2[label="1"]; N4 -> N2[label="2"]; N3[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4[shape=box, label="N2(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N3 -> N4[label="1"]; N5[shape=box, label="N6(15):{\"i\":\"7,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"18\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N5[label="1"]; N4 -> N5[label="2"]; N6[shape=box, label="N4(23):{\"i\":\"17\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"19\",\"t\":\"Calculation\"}\nREMOVE:17"]; N2 -> N6[label="1"]; N7[shape=box, label="N7(23):{\"i\":\"18\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"20\",\"t\":\"Calculation\"}\nREMOVE:18"]; N5 -> N7[label="1"]; N8[shape=box, label="N8(54):{\"i\":\"19,20\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"21\",\"t\":\"Calculation\"}\nREMOVE:19,20"]; N6 -> N8[label="1"]; N7 -> N8[label="2"]; N9[shape=box, label="N9(54):{\"i\":\"21\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N8 -> N9[label="1"]; N1->N3->N4->N2->N6->N0->N5->N7->N8->N9[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1},{"from":4}]},{"owner_id":3,"inputs":[]},{"owner_id":4,"inputs":[{"from":3}]},{"owner_id":5,"inputs":[{"from":0},{"from":4}]},{"owner_id":6,"inputs":[{"from":2}]},{"owner_id":7,"inputs":[{"from":5}]},{"owner_id":8,"inputs":[{"from":6},{"from":7}]},{"owner_id":9,"inputs":[{"from":8}]}],"nodes":{"1":{"p":{"p":{"v":"uid"},"o":"16","t":"Const"},"w":0,"id":1},"3":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":3},"8":{"p":{"i":"19,20","p":{"kernel":{"class_name":"SIMPLE"}},"o":"21","t":"Calculation"},"w":54,"id":8},"2":{"p":{"i":"7,16","p":{"kernel":{"class_name":"SIMPLE"}},"o":"17","t":"Calculation"},"w":15,"id":2},"0":{"p":{"p":{"v":"001"},"o":"15","t":"Const"},"w":0,"id":0},"5":{"p":{"i":"7,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"18","t":"Calculation"},"w":15,"id":5},"9":{"p":{"i":"21","t":"Projection"},"w":54,"id":9},"7":{"p":{"i":"18","p":{"kernel":{"class_name":"SIMPLE"}},"o":"20","t":"Calculation"},"w":23,"id":7},"4":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":4},"6":{"p":{"i":"17","p":{"kernel":{"class_name":"SIMPLE"}},"o":"19","t":"Calculation"},"w":23,"id":6}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow11BooleanTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow11BooleanTypeE; |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(26):{\"i\":\"3,4\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:3,4"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"3\",\"p\":{\"address\":{\"name\":\"sum\",\"id\":3}},\"o\":\"3\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"3,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"3","p":{"address":{"name":"sum","id":3}},"o":"3","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"3,4","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"3,4","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::SimpleFunction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N2(15):{\"i\":\"2\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N0(2):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N2[shape=box, label="N1(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N3(15):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"15","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"2","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;));); |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestScript::StepMerging [GOOD] >> TestProgram::YqlKernelEndsWith [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:17.742252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:17.742335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:17.742369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:17.742396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:17.742434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:17.742475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:17.742520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:17.742592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:17.742885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:17.817206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:17.817276Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:17.833951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:17.834244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:17.834460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:17.843292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:17.843537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:17.844279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:17.844681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:17.846869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:17.848329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:17.848401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:17.848610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:17.848669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:17.848714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:17.848925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.858020Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:17.995914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:17.996177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.996405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:17.996652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:17.996726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.999082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:17.999240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:17.999417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.999472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:17.999511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:17.999549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:18.001613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.001677Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:18.001720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:18.003687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.003741Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.003784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:18.003831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:18.007772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:18.010175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:18.010392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:18.011455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:18.011638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:18.011690Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:18.011997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:18.012057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:18.012243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:18.012356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:18.014751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:18.014798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:18.014992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:18.015044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:18.015291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.015350Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:18.015453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:18.015511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:18.015560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:18.015610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:18.015656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:18.015717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:18.015762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:18.015796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:18.015869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:18.015912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:18.015946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:18.018348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:18.018466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:18.018509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d::TEvNotifyTxCompletionResult> complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.191776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.191891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.191980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.192068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.192175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.192264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.197543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.197784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.197896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.197964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.198050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.198166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.198238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.198365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.198411Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-10T11:35:24.198547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:35:24.198587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:35:24.198629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:35:24.198664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:35:24.198747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-10T11:35:24.198840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2812:4077] message: TxId: 103 2025-04-10T11:35:24.198896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:35:24.198955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:35:24.198987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:35:24.200580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-04-10T11:35:24.204064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:35:24.204129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:4007:5199] TestWaitNotification: OK eventTxId 103 2025-04-10T11:35:24.204926Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:24.205271Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 378us result status StatusSuccess 2025-04-10T11:35:24.205895Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "saved_at" Type: "Datetime" TypeId: 49 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 4 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 5 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Disabled { } Version: 3 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2025-04-10T11:35:24.209240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterSchema { AlterColumns { Name: "data" DefaultValue: "10" } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:24.209465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 104:0, at schemeshard: 72057594046678944 2025-04-10T11:35:24.213355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: schema update error: sparsed columns are disabled, at schemeshard: 72057594046678944 2025-04-10T11:35:24.216811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "schema update error: sparsed columns are disabled" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:24.216988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusSchemeError, reason: schema update error: sparsed columns are disabled, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-10T11:35:24.217354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-10T11:35:24.217397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-10T11:35:24.217943Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-10T11:35:24.218056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-10T11:35:24.218091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:4343:5534] TestWaitNotification: OK eventTxId 104 |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestScript::StepMerging [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(26):{\"i\":\"7,9\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; >> TestProgram::YqlKernelContains >> TestProgram::YqlKernelEndsWithScalar >> TestProgram::YqlKernelEndsWithScalar [GOOD] >> TestProgram::YqlKernelContains [GOOD] >> KqpLimits::ReplySizeExceeded [GOOD] >> DataShardSnapshots::ShardRestartLockBasic [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"amet.\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(15):{\"i\":\"7,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"7,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"amet."},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(26):{\"i\":\"7,9\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot [GOOD] >> DataShardSnapshots::BrokenLockChangesDontLeak |96.5%| [TA] $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 15681, MsgBus: 21624 2025-04-10T11:34:19.477779Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646425393785119:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:19.477996Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001758/r3tmp/tmpxmbre5/pdisk_1.dat 2025-04-10T11:34:19.885350Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15681, node 1 2025-04-10T11:34:19.893474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:19.893591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:19.896742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:19.922443Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:34:19.968083Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:19.968112Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:19.968120Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:19.968292Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21624 TClient is connected to server localhost:21624 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:20.599648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:20.614835Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:20.631392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:20.808535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:20.978207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:21.058007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:22.689621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646438278688801:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:22.689731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:23.056068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.129964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.199207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.235441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.264684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.339973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:23.394837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646442573656619:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:23.394942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:23.395299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646442573656624:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:23.399377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:23.412196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646442573656626:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:23.488727Z node 1 :TX_PROXY ERROR: Actor# [1:7491646442573656680:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:24.404888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:24.478014Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646425393785119:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:24.478077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:28.047456Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWIzYTE1N2UtZTUwM2Q5Y2EtMTAyZmJkOGQtMzNmZDg3ZmU=, ActorId: [1:7491646459753527099:2609], ActorState: ExecuteState, TraceId: 01jrfqnps621pp7v092xph3cyc, Create QueryResponse for error on request, msg:
: Error: Query result size limit exceeded. (80001703 > 50331648), code: 2013 Trying to start YDB, gRPC: 3054, MsgBus: 1631 2025-04-10T11:34:28.976429Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646463537032297:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:28.976484Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001758/r3tmp/tmpka0t4N/pdisk_1.dat 2025-04-10T11:34:29.087665Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:29.114190Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:29.114280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:29.115690Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3054, node 2 2025-04-10T11:34:29.162083Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:29.162102Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:29.162111Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:29.162280Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1631 TClient is connected to server localhost:1631 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:29.689202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:29.696444Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10 ... txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:40.611152Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646496255135230:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:40.611237Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:41.001727Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:50.744952Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:34:50.745010Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:15.894074Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODZkMTYxNmYtYjNmODI2ZTUtZTc3Zjk1YTUtNzM0ZWJjZDk=, ActorId: [3:7491646668053831211:2764], ActorState: ExecuteState, TraceId: 01jrfqq61xakpr267hvzbhc6z0, Create QueryResponse for error on request, msg:
: Error: Task execution timeout 95ms exceeded, terminating after 99ms 2025-04-10T11:35:16.064764Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491646668053831311:2764] TxId: 281474976715674. Ctx: { TraceId: 01jrfqq67adn79wg6z8wz735g9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODZkMTYxNmYtYjNmODI2ZTUtZTc3Zjk1YTUtNzM0ZWJjZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 100ms } {
: Error: Cancelling after 101ms during execution } ] 2025-04-10T11:35:16.065519Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491646668053831326:2797], TxId: 281474976715674, task: 9. Ctx: { CustomerSuppliedId : . TraceId : 01jrfqq67adn79wg6z8wz735g9. SessionId : ydb://session/3?node_id=3&id=ODZkMTYxNmYtYjNmODI2ZTUtZTc3Zjk1YTUtNzM0ZWJjZDk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7491646668053831311:2764], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-10T11:35:16.160027Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491646668053831324:2795], TxId: 281474976715674, task: 7. Ctx: { SessionId : ydb://session/3?node_id=3&id=ODZkMTYxNmYtYjNmODI2ZTUtZTc3Zjk1YTUtNzM0ZWJjZDk=. TraceId : 01jrfqq67adn79wg6z8wz735g9. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7491646668053831311:2764], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-10T11:35:16.160456Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODZkMTYxNmYtYjNmODI2ZTUtZTc3Zjk1YTUtNzM0ZWJjZDk=, ActorId: [3:7491646668053831211:2764], ActorState: ExecuteState, TraceId: 01jrfqq67adn79wg6z8wz735g9, Create QueryResponse for error on request, msg:
: Error: Request canceled after 100ms
: Error: Cancelling after 101ms during execution Trying to start YDB, gRPC: 16836, MsgBus: 22001 2025-04-10T11:35:17.313620Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646676887369830:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:17.313766Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001758/r3tmp/tmptYpSYi/pdisk_1.dat 2025-04-10T11:35:17.448199Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:17.479557Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:17.479669Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:17.481589Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16836, node 4 2025-04-10T11:35:17.548127Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:35:17.548158Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:35:17.548169Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:17.548372Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22001 TClient is connected to server localhost:22001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:18.127652Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:18.135702Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:35:18.148924Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:18.227762Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:18.443488Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:18.540030Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:21.271251Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646694067240797:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:21.271371Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:21.317388Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:35:21.394538Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:35:21.430222Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:35:21.464811Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:35:21.500303Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:35:21.540224Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:35:21.585412Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646694067241311:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:21.585497Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:21.585629Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491646694067241316:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:21.589162Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:35:21.599444Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491646694067241318:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:35:21.686213Z node 4 :TX_PROXY ERROR: Actor# [4:7491646694067241372:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:35:22.378571Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491646676887369830:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:22.378915Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:35:22.761347Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-10T11:35:25.725506Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZDNjMzJjMDItYzI3NWE5NjQtMjhkMTM3ZDMtN2U5ZmIzZTI=, ActorId: [4:7491646698362208968:2497], ActorState: ExecuteState, TraceId: 01jrfqqfd6dyydr91rp7q7xzf0, Create QueryResponse for error on request, msg: |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> SystemView::StoragePoolsFields >> SystemView::VSlotsFields >> SystemView::PartitionStatsOneSchemeShard >> SystemView::ShowCreateTableDefaultLiteral >> SystemView::CollectPreparedQueries >> SystemView::AuthGroups >> SystemView::Nodes >> DbCounters::TabletsSimple |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestInlineCopyRangeWorks >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |96.6%| [TA] $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] Test command err: 2025-04-10T11:33:35.845967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:35.846214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:35.846415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009f0/r3tmp/tmpazOrMF/pdisk_1.dat 2025-04-10T11:33:36.169212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:33:36.217626Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:36.259190Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:33:36.260468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:36.260626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:36.260819Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:33:36.273033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:36.355862Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T11:33:36.355946Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T11:33:36.356175Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T11:33:36.492576Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T11:33:36.492719Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:33:36.493471Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:33:36.493602Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:33:36.494043Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:33:36.494284Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:33:36.494385Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T11:33:36.496388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:36.496893Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T11:33:36.497696Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T11:33:36.497791Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T11:33:36.537616Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:33:36.538951Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:33:36.539509Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:33:36.539855Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:33:36.592397Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:33:36.593474Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:33:36.593658Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:33:36.595880Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:33:36.595991Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:33:36.596083Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:33:36.596663Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:33:36.596855Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:33:36.596973Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:33:36.597583Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:33:36.647374Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:33:36.647669Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:33:36.647850Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:33:36.647892Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:33:36.647931Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:33:36.647978Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:33:36.648272Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:33:36.648328Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:33:36.648779Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:33:36.648893Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:33:36.648971Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:33:36.649027Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:33:36.649110Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:33:36.649157Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:33:36.649195Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:33:36.649271Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:33:36.649332Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:33:36.649480Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:36.649559Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:36.649611Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:33:36.650115Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:33:36.650208Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:33:36.650343Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:36.650648Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:33:36.650733Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:33:36.650851Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:33:36.650913Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:33:36.650978Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:33:36.651054Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:33:36.651132Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:33:36.651516Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:33:36.651568Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:33:36.651609Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:33:36.651660Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:33:36.651721Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:33:36.651759Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:33:36.651821Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:33:36.651879Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:33:36.651915Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:33:36.652984Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:33:36.653063Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:33:36.653101Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:33:36.653163Z node 1 :TX_DATASHARD TRACE: Prop ... ablet: 72075186224037890 2025-04-10T11:35:28.352993Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037890 2025-04-10T11:35:28.353605Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 278593539, Sender [13:835:2694], Recipient [13:666:2570]: NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3,4] } 2025-04-10T11:35:28.353689Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037890 2025-04-10T11:35:28.353792Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435091, Sender [13:666:2570], Recipient [13:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRemoveChangeRecords 2025-04-10T11:35:28.353829Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037888 2025-04-10T11:35:28.353852Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 2, at tablet: 72075186224037888 2025-04-10T11:35:28.353911Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 3, at tablet: 72075186224037888 2025-04-10T11:35:28.353941Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037888 2025-04-10T11:35:28.354247Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037888 2025-04-10T11:35:28.591594Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:1079:2892]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:35:29.158967Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] Handle TEvExecuteKqpTransaction 2025-04-10T11:35:29.159091Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] TxId# 281474976715671 ProcessProposeKqpTransaction 2025-04-10T11:35:29.160611Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jrfqqjkd38je35x7hh4rbf5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OGM3OTRlOGUtNTZiZjQ4NmYtM2NkNjFhNDAtZGIxNTQwODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-04-10T11:35:29.163404Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1707:3414], Recipient [13:796:2664]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-04-10T11:35:29.163722Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-10T11:35:29.163821Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v8038/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T11:35:29.163914Z node 13 :TX_DATASHARD TRACE: 72075186224037889 changed HEAD read to non-repeatable v9000/18446744073709551615 2025-04-10T11:35:29.164037Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-04-10T11:35:29.164255Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-10T11:35:29.164340Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-04-10T11:35:29.164411Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-10T11:35:29.164479Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-10T11:35:29.164544Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2025-04-10T11:35:29.164623Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-10T11:35:29.164656Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-10T11:35:29.164684Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-04-10T11:35:29.164711Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-04-10T11:35:29.164881Z node 13 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-10T11:35:29.165415Z node 13 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[13:1707:3414], 0} after executionsCount# 1 2025-04-10T11:35:29.165539Z node 13 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[13:1707:3414], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-04-10T11:35:29.165710Z node 13 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[13:1707:3414], 0} finished in read 2025-04-10T11:35:29.165838Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-10T11:35:29.165877Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-04-10T11:35:29.165913Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-04-10T11:35:29.165950Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-04-10T11:35:29.166015Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-10T11:35:29.166047Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-04-10T11:35:29.166085Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2025-04-10T11:35:29.166181Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-10T11:35:29.166390Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-10T11:35:29.167863Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1707:3414], Recipient [13:796:2664]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:35:29.167974Z node 13 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 21 } } 2025-04-10T11:35:29.699182Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] Handle TEvExecuteKqpTransaction 2025-04-10T11:35:29.699300Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] TxId# 281474976715672 ProcessProposeKqpTransaction 2025-04-10T11:35:29.700638Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jrfqqk4w712hhe73fc2nnxgy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2M0OWY2OGItMWI2ODRjYTktNDc1MTVkM2MtZGYzY2I4OGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-04-10T11:35:29.703115Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1738:3439], Recipient [13:1079:2892]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-04-10T11:35:29.703323Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-04-10T11:35:29.703420Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v8038/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T11:35:29.703496Z node 13 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v9000/18446744073709551615 2025-04-10T11:35:29.703596Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit CheckRead 2025-04-10T11:35:29.703760Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-04-10T11:35:29.703837Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit CheckRead 2025-04-10T11:35:29.703899Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-04-10T11:35:29.703976Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit BuildAndWaitDependencies 2025-04-10T11:35:29.704029Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037891 2025-04-10T11:35:29.704094Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-04-10T11:35:29.704122Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-04-10T11:35:29.704143Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit ExecuteRead 2025-04-10T11:35:29.704163Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit ExecuteRead 2025-04-10T11:35:29.704307Z node 13 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-10T11:35:29.704686Z node 13 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[13:1738:3439], 0} after executionsCount# 1 2025-04-10T11:35:29.704783Z node 13 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[13:1738:3439], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-04-10T11:35:29.704911Z node 13 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[13:1738:3439], 0} finished in read 2025-04-10T11:35:29.705031Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-04-10T11:35:29.705058Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit ExecuteRead 2025-04-10T11:35:29.705084Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit CompletedOperations 2025-04-10T11:35:29.705116Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit CompletedOperations 2025-04-10T11:35:29.705172Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-04-10T11:35:29.705200Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit CompletedOperations 2025-04-10T11:35:29.705234Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037891 has finished 2025-04-10T11:35:29.705292Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-04-10T11:35:29.705470Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 2025-04-10T11:35:29.706591Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1738:3439], Recipient [13:1079:2892]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:35:29.706679Z node 13 :TX_DATASHARD TRACE: 72075186224037891 ReadCancel: { ReadId: 0 } { items { uint32_value: 10 } items { uint32_value: 110 } }, { items { uint32_value: 20 } items { uint32_value: 210 } } >> TKeyValueTest::TestRenameWorks >> TKeyValueTest::TestIncorrectRequestThenResponseError >> DataShardSnapshots::ShardRestartAfterDropTable [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet >> SystemView::CollectPreparedQueries [GOOD] >> SystemView::CollectScanQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] Test command err: 2025-04-10T11:33:34.746380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:34.746744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:34.747006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009fc/r3tmp/tmpOVRops/pdisk_1.dat 2025-04-10T11:33:35.281549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:33:35.331689Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:35.384529Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:33:35.387316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:35.390349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:35.390551Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:33:35.403739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:35.487953Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T11:33:35.488036Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T11:33:35.489151Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T11:33:35.629962Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T11:33:35.630103Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:33:35.630700Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:33:35.630804Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:33:35.631066Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:33:35.631233Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:33:35.631315Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T11:33:35.633324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:35.633823Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T11:33:35.634427Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T11:33:35.634491Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T11:33:35.663422Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:33:35.664635Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:33:35.665140Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:33:35.665416Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:33:35.710358Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:33:35.710990Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:33:35.711141Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:33:35.714030Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:33:35.714178Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:33:35.714243Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:33:35.716731Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:33:35.716956Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:33:35.717111Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:33:35.717795Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:33:35.792237Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:33:35.792456Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:33:35.792604Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:33:35.792647Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:33:35.792676Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:33:35.792708Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:33:35.792887Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:33:35.792926Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:33:35.793245Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:33:35.793318Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:33:35.793368Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:33:35.793417Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:33:35.793463Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:33:35.793499Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:33:35.793528Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:33:35.793555Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:33:35.793592Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:33:35.793689Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:35.793727Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:35.793763Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:33:35.794068Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:33:35.794118Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:33:35.794305Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:35.794568Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:33:35.794648Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:33:35.794744Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:33:35.794815Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:33:35.794878Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:33:35.794917Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:33:35.794967Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:33:35.795305Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:33:35.795351Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:33:35.795385Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:33:35.795416Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:33:35.795462Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:33:35.795481Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:33:35.795520Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:33:35.795557Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:33:35.795581Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:33:35.796333Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:33:35.796378Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:33:35.796419Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:33:35.796476Z node 1 :TX_DATASHARD TRACE: Prop ... _DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v400/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v401/0 ImmediateWriteEdgeReplied# v401/0 2025-04-10T11:35:34.097383Z node 16 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-04-10T11:35:34.097462Z node 16 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-10T11:35:34.097518Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:35:34.097549Z node 16 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteWrite 2025-04-10T11:35:34.097576Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteWrite 2025-04-10T11:35:34.097637Z node 16 :TX_DATASHARD DEBUG: Executing write operation for [0:5] at 72075186224037888 2025-04-10T11:35:34.097744Z node 16 :TX_DATASHARD TRACE: Operation [0:5] at 72075186224037888 aborting because it cannot acquire locks 2025-04-10T11:35:34.097867Z node 16 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because it cannot acquire locks;tx_id=5; 2025-04-10T11:35:34.097998Z node 16 :TX_DATASHARD INFO: Write transaction 5 at 72075186224037888 has an error: Operation is aborting because it cannot acquire locks 2025-04-10T11:35:34.098058Z node 16 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-10T11:35:34.098090Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteWrite 2025-04-10T11:35:34.098118Z node 16 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit FinishProposeWrite 2025-04-10T11:35:34.098170Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:35:34.098377Z node 16 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-04-10T11:35:34.098447Z node 16 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayComplete 2025-04-10T11:35:34.098494Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit FinishProposeWrite 2025-04-10T11:35:34.098571Z node 16 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:35:34.098635Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:35:34.098807Z node 16 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-10T11:35:34.098843Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:35:34.098892Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-10T11:35:34.099010Z node 16 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-10T11:35:34.099084Z node 16 :TX_DATASHARD TRACE: Complete execution for [0:5] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:35:34.099174Z node 16 :TX_DATASHARD TRACE: Propose transaction complete txid 5 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_LOCKS_BROKEN 2025-04-10T11:35:34.099393Z node 16 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-04-10T11:35:34.099542Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:35:34.099950Z node 16 :KQP_COMPUTE ERROR: SelfId: [16:906:2681], Table: `/Root/table` ([72057594046644480:2:1]), SessionActorId: [16:841:2681]Got LOCKS BROKEN for table `/Root/table`. ShardID=72075186224037888, Sink=[16:906:2681].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2025-04-10T11:35:34.100204Z node 16 :KQP_COMPUTE ERROR: SelfId: [16:899:2681], SessionActorId: [16:841:2681], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[16:841:2681]. isRollback=0 2025-04-10T11:35:34.100838Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=YzRkYzI4ZTEtYWJmMDc4ZWYtNThmZmIzNzMtNjg0NGI4Ng==, ActorId: [16:841:2681], ActorState: ExecuteState, TraceId: 01jrfqqqn33ewhp6n28wvjhxar, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [16:900:2681] from: [16:899:2681] 2025-04-10T11:35:34.101066Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [16:899:2681], Recipient [16:689:2579]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-04-10T11:35:34.101120Z node 16 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-04-10T11:35:34.101332Z node 16 :KQP_EXECUTER ERROR: ActorId: [16:900:2681] TxId: 281474976715663. Ctx: { TraceId: 01jrfqqqn33ewhp6n28wvjhxar, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=YzRkYzI4ZTEtYWJmMDc4ZWYtNThmZmIzNzMtNjg0NGI4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } 2025-04-10T11:35:34.101677Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [16:689:2579], Recipient [16:689:2579]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-10T11:35:34.101720Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-10T11:35:34.101793Z node 16 :TX_DATASHARD TRACE: TTxWrite:: execute at tablet# 72075186224037888 2025-04-10T11:35:34.101927Z node 16 :TX_DATASHARD TRACE: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-04-10T11:35:34.102054Z node 16 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-10T11:35:34.102194Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-04-10T11:35:34.102243Z node 16 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-10T11:35:34.102273Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-04-10T11:35:34.102305Z node 16 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:35:34.102333Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:35:34.102379Z node 16 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v400/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v401/0 ImmediateWriteEdgeReplied# v401/0 2025-04-10T11:35:34.102438Z node 16 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2025-04-10T11:35:34.102473Z node 16 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-10T11:35:34.102500Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:35:34.102525Z node 16 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-04-10T11:35:34.102554Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-04-10T11:35:34.102586Z node 16 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037888 2025-04-10T11:35:34.102703Z node 16 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 2025-04-10T11:35:34.102791Z node 16 :TX_DATASHARD DEBUG: Skip empty write operation for [0:6] at 72075186224037888 2025-04-10T11:35:34.102898Z node 16 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-10T11:35:34.102994Z node 16 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:35:34.103034Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-04-10T11:35:34.103130Z node 16 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-04-10T11:35:34.103198Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:35:34.103249Z node 16 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-04-10T11:35:34.103284Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-04-10T11:35:34.103316Z node 16 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:35:34.103344Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:35:34.103386Z node 16 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-10T11:35:34.103411Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:35:34.103438Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-04-10T11:35:34.103492Z node 16 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-10T11:35:34.103525Z node 16 :TX_DATASHARD TRACE: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:35:34.103561Z node 16 :TX_DATASHARD TRACE: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-04-10T11:35:34.103645Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:35:34.103879Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=YzRkYzI4ZTEtYWJmMDc4ZWYtNThmZmIzNzMtNjg0NGI4Ng==, ActorId: [16:841:2681], ActorState: ExecuteState, TraceId: 01jrfqqqn33ewhp6n28wvjhxar, Create QueryResponse for error on request, msg: 2025-04-10T11:35:34.105360Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [16:61:2108], Recipient [16:689:2579]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 16 Status: STATUS_NOT_FOUND 2025-04-10T11:35:34.110175Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [16:913:2731], Recipient [16:689:2579]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:35:34.110275Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:35:34.110370Z node 16 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [16:912:2730], serverId# [16:913:2731], sessionId# [0:0:0] 2025-04-10T11:35:34.110620Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553224, Sender [16:593:2518], Recipient [16:689:2579]: NKikimr::TEvDataShard::TEvGetOpenTxs >> SystemView::VSlotsFields [GOOD] >> SystemView::TopPartitionsByCpuFields >> SystemView::StoragePoolsFields [GOOD] >> SystemView::StoragePoolsRanges >> TKeyValueTest::TestIncrementalKeySet [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents >> KqpStats::SysViewClientLost [FAIL] >> KqpTypes::DyNumberCompare >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents [GOOD] >> TKeyValueTest::TestWriteLongKey >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] >> SystemView::Nodes [GOOD] >> SystemView::ConcurrentScans >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:16.491467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:16.491563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:16.491596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:16.491626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:16.491670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:16.491720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:16.491791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:16.491876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:16.492210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:16.574835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:16.574916Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:16.589937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:16.590217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:16.590393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:16.597243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:16.597454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:16.598051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:16.598462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:16.603053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:16.604401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:16.604476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:16.604644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:16.604687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:16.604725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:16.604884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.611559Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:16.727829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:16.728061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.728291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:16.728528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:16.728586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.731060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:16.731212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:16.731403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.731453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:16.731515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:16.731556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:16.733913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.733963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:16.734000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:16.737185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.737234Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.737271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:16.737315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:16.746527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:16.749485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:16.749706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:16.750638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:16.750766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:16.750806Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:16.751047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:16.751105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:16.751277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:16.751353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:16.753480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:16.753524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:16.753689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:16.753745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:16.753963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.754000Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:16.754089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:16.754134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:16.754197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:16.754227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:16.754266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:16.754316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:16.754350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:16.754380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:16.754432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:16.754467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:16.754494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:16.756656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:16.756758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:16.756795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... posedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:17.170723Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.170784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:35:17.170834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-10T11:35:17.173962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:35:17.174696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.174817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.175108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.175166Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-10T11:35:17.175287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:35:17.175333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:35:17.175395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-10T11:35:17.175445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:35:17.175505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-10T11:35:17.175583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 103 2025-04-10T11:35:17.175642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-10T11:35:17.175687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:35:17.175726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:35:17.175857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:35:17.177887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:35:17.177935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:503:2464] TestWaitNotification: OK eventTxId 103 2025-04-10T11:35:22.608836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:35:22.608898Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:24.400332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0378 2025-04-10T11:35:24.411117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0752 2025-04-10T11:35:24.452250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-10T11:35:24.452496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-10T11:35:24.452602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:35:24.452732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-10T11:35:24.452771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:35:24.463305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:35:27.862338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0108 2025-04-10T11:35:27.873054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0197 2025-04-10T11:35:27.914197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-10T11:35:27.914407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-10T11:35:27.914499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:35:27.914636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-10T11:35:27.914690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:35:27.925059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:35:31.409214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0108 2025-04-10T11:35:31.419709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0197 2025-04-10T11:35:31.460645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-10T11:35:31.460783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-10T11:35:31.460864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:35:31.460991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-10T11:35:31.461033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:35:31.471361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:35:34.941329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0039 2025-04-10T11:35:34.954398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0059 2025-04-10T11:35:34.998420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-10T11:35:34.998604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-10T11:35:34.998677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:35:34.998776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-10T11:35:34.998814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:35:35.009203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:35:38.480462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-10T11:35:38.480546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T11:35:38.480780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-10T11:35:38.480941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 60025000 ColumnUnit: UNIT_AUTO } SchemaVersion: 3 Indexes { OwnerId: 72057594046678944 PathId: 4 SchemaVersion: 1 KeyMap { IndexColumnId: 1 MainColumnId: 3 } KeyMap { IndexColumnId: 2 MainColumnId: 1 } } Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-10T11:35:38.481405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:35:38.482120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-10T11:35:38.482182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:35:38.484821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-10T11:35:38.484957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T11:35:38.485019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T01:01:00.025000Z, at schemeshard: 72057594046678944 2025-04-10T11:35:38.485076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:16.679677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:16.679819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:16.679881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:16.679929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:16.679976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:16.680040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:16.680124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:16.680234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:16.680608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:16.769151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:16.769217Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:16.785438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:16.785744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:16.785916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:16.794034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:16.794284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:16.794963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:16.795361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:16.797435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:16.798868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:16.798928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:16.799112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:16.799162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:16.799205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:16.799378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.806529Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:16.945202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:16.945433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.945640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:16.945866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:16.945918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.948428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:16.948565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:16.948757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.948804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:16.948839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:16.948872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:16.950834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.950896Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:16.950930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:16.952774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.952817Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.952854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:16.952898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:16.956494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:16.958633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:16.958871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:16.959897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:16.960035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:16.960082Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:16.960387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:16.960442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:16.960626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:16.960721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:16.962990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:16.963050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:16.963243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:16.963281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:16.963491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.963538Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:16.963667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:16.963719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:16.963758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:16.963791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:16.963825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:16.963880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:16.963924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:16.963951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:16.964028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:16.964069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:16.964117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:16.966391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:16.966511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:16.966553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:38.299146Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:38.299343Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:38.311100Z node 27 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [27:122:2148] sender: [27:238:2058] recipient: [27:15:2062] 2025-04-10T11:35:38.325772Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:38.326083Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:38.326393Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:38.326704Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:38.326788Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:38.332071Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:38.332306Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:38.332696Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:38.332848Z node 27 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:38.332954Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:38.333076Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:38.337991Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:38.338114Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:38.338236Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:38.340925Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:38.341002Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:38.341122Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:38.341233Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:38.341509Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:38.343397Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:38.343736Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:38.344925Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:38.345168Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 115964119149 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:38.345315Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:38.345731Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:38.345825Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:38.346426Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:38.346577Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:38.351604Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:38.351724Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:38.352090Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:38.352197Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [27:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:38.352890Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:38.353006Z node 27 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:38.353320Z node 27 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:38.353411Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:38.353504Z node 27 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:38.353581Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:38.353670Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:38.353754Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:38.353833Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:38.353922Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:38.354038Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:38.354129Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:38.354226Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:38.355266Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:38.355450Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:38.355521Z node 27 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:35:38.355602Z node 27 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:35:38.355675Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:38.355846Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:35:38.358770Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:35:38.359636Z node 27 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:35:38.364341Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:38.364932Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:38.365142Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2025-04-10T11:35:38.365910Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2025-04-10T11:35:38.366461Z node 27 :TX_PROXY DEBUG: actor# [27:268:2259] Bootstrap 2025-04-10T11:35:38.403394Z node 27 :TX_PROXY DEBUG: actor# [27:268:2259] Become StateWork (SchemeCache [27:273:2264]) 2025-04-10T11:35:38.404977Z node 27 :TX_PROXY DEBUG: actor# [27:268:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:35:38.407241Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:38.407568Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-10T11:35:38.409370Z node 27 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> SystemView::PartitionStatsOneSchemeShard [GOOD] >> SystemView::PartitionStatsOneSchemeShardDataQuery >> SystemView::CollectScanQueries [GOOD] >> SystemView::AuthUsers >> SystemView::AuthGroups [GOOD] >> SystemView::AuthGroups_Access >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] >> TKeyValueTest::TestRewriteThenLastValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] Test command err: 2025-04-10T11:33:36.135507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:36.135712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:36.135930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009e4/r3tmp/tmpKTBGRp/pdisk_1.dat 2025-04-10T11:33:36.519318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:33:36.554615Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:36.596158Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:33:36.596929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:36.597045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:36.597155Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:33:36.608674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:36.689113Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T11:33:36.689185Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T11:33:36.689386Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T11:33:36.810518Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T11:33:36.810608Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:33:36.811103Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:33:36.811179Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:33:36.811443Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:33:36.811573Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:33:36.811636Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T11:33:36.813122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:36.813474Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T11:33:36.814056Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T11:33:36.814107Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T11:33:36.855591Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:33:36.856896Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:33:36.857482Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:33:36.857776Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:33:36.907749Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:33:36.908598Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:33:36.908777Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:33:36.910731Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:33:36.910852Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:33:36.910925Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:33:36.911372Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:33:36.911503Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:33:36.911606Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:33:36.922534Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:33:36.977327Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:33:36.977549Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:33:36.977768Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:33:36.977813Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:33:36.977846Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:33:36.977885Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:33:36.978102Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:33:36.978173Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:33:36.978544Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:33:36.978649Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:33:36.978714Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:33:36.978760Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:33:36.978801Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:33:36.978836Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:33:36.978869Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:33:36.978929Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:33:36.978978Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:33:36.979112Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:36.979172Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:36.979214Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:33:36.979607Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:33:36.979650Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:33:36.979792Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:36.980067Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:33:36.980173Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:33:36.980283Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:33:36.980350Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:33:36.980387Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:33:36.980418Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:33:36.980449Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:33:36.980718Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:33:36.980759Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:33:36.980794Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:33:36.980827Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:33:36.980894Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:33:36.980926Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:33:36.980957Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:33:36.980986Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:33:36.981042Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:33:36.982482Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:33:36.982537Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:33:36.993359Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... w KQP executer: [13:993:2684] isRollback: 1 2025-04-10T11:35:38.185895Z node 13 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=13&id=MzJlMWMyYjgtYjhjMjY4OTktYjNjZWU3ZjAtY2IzMjAzMTc=, ActorId: [13:839:2684], ActorState: ExecuteState, TraceId: 01jrfqqvv68jykcy50ss67hnqj, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T11:35:38.186510Z node 13 :KQP_EXECUTER DEBUG: TxId: 281474976715665. Resolved key sets: 0 2025-04-10T11:35:38.186624Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfqqvv68jykcy50ss67hnqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MzJlMWMyYjgtYjhjMjY4OTktYjNjZWU3ZjAtY2IzMjAzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:38.186665Z node 13 :KQP_EXECUTER DEBUG: TxId: 281474976715665. Ctx: { TraceId: 01jrfqqvv68jykcy50ss67hnqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MzJlMWMyYjgtYjhjMjY4OTktYjNjZWU3ZjAtY2IzMjAzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 1, snapshot: {0, 0} 2025-04-10T11:35:38.186786Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jrfqqvv68jykcy50ss67hnqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MzJlMWMyYjgtYjhjMjY4OTktYjNjZWU3ZjAtY2IzMjAzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [], lockTxId: (empty maybe), locks: Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true } Op: Rollback, immediate: 1 2025-04-10T11:35:38.186872Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jrfqqvv68jykcy50ss67hnqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MzJlMWMyYjgtYjhjMjY4OTktYjNjZWU3ZjAtY2IzMjAzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2025-04-10T11:35:38.186931Z node 13 :KQP_EXECUTER INFO: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jrfqqvv68jykcy50ss67hnqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MzJlMWMyYjgtYjhjMjY4OTktYjNjZWU3ZjAtY2IzMjAzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T11:35:38.186973Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jrfqqvv68jykcy50ss67hnqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MzJlMWMyYjgtYjhjMjY4OTktYjNjZWU3ZjAtY2IzMjAzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2025-04-10T11:35:38.187015Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jrfqqvv68jykcy50ss67hnqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MzJlMWMyYjgtYjhjMjY4OTktYjNjZWU3ZjAtY2IzMjAzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-04-10T11:35:38.187053Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jrfqqvv68jykcy50ss67hnqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MzJlMWMyYjgtYjhjMjY4OTktYjNjZWU3ZjAtY2IzMjAzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-10T11:35:38.187282Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [13:993:2684], Recipient [13:962:2778]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 993 RawX2: 55834577532 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\001 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-04-10T11:35:38.187320Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:35:38.187431Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [13:962:2778], Recipient [13:962:2778]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-10T11:35:38.187462Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-10T11:35:38.187526Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:35:38.187679Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-10T11:35:38.187755Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-04-10T11:35:38.187801Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-10T11:35:38.187831Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-04-10T11:35:38.187856Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:35:38.187884Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:35:38.187920Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v400/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2025-04-10T11:35:38.187964Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-04-10T11:35:38.187997Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-10T11:35:38.188022Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:35:38.188045Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-10T11:35:38.188068Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-10T11:35:38.188151Z node 13 :TX_DATASHARD TRACE: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-04-10T11:35:38.188280Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true 2025-04-10T11:35:38.188384Z node 13 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-10T11:35:38.188461Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-10T11:35:38.188488Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-10T11:35:38.188514Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:35:38.188539Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-10T11:35:38.188598Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-10T11:35:38.188712Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-04-10T11:35:38.188741Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:35:38.188767Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:35:38.188795Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:35:38.188840Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-10T11:35:38.188871Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:35:38.188897Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-04-10T11:35:38.188958Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:35:38.189009Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-10T11:35:38.189048Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:35:38.189184Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jrfqqvv68jykcy50ss67hnqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MzJlMWMyYjgtYjhjMjY4OTktYjNjZWU3ZjAtY2IzMjAzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-04-10T11:35:38.189341Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jrfqqvv68jykcy50ss67hnqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MzJlMWMyYjgtYjhjMjY4OTktYjNjZWU3ZjAtY2IzMjAzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T11:35:38.189443Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jrfqqvv68jykcy50ss67hnqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MzJlMWMyYjgtYjhjMjY4OTktYjNjZWU3ZjAtY2IzMjAzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-10T11:35:38.189597Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=MzJlMWMyYjgtYjhjMjY4OTktYjNjZWU3ZjAtY2IzMjAzMTc=, ActorId: [13:839:2684], ActorState: CleanupState, TraceId: 01jrfqqvv68jykcy50ss67hnqj, EndCleanup, isFinal: 0 2025-04-10T11:35:38.189806Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=MzJlMWMyYjgtYjhjMjY4OTktYjNjZWU3ZjAtY2IzMjAzMTc=, ActorId: [13:839:2684], ActorState: CleanupState, TraceId: 01jrfqqvv68jykcy50ss67hnqj, Sent query response back to proxy, proxyRequestId: 8, proxyId: [13:57:2104] 2025-04-10T11:35:38.459938Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1009:2811], Recipient [13:962:2778]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:35:38.460087Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:35:38.460216Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:1008:2810], serverId# [13:1009:2811], sessionId# [0:0:0] 2025-04-10T11:35:38.460522Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553224, Sender [13:593:2518], Recipient [13:962:2778]: NKikimr::TEvDataShard::TEvGetOpenTxs >> KeyValueReadStorage::ReadError [GOOD] >> KeyValueReadStorage::ReadErrorWithWrongGroupId [GOOD] >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:18.596575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:18.596680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:18.596723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:18.596763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:18.596817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:18.596866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:18.596952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:18.597087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:18.597471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:18.687580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:18.687655Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:18.704093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:18.704408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:18.704612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:18.713395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:18.713642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:18.714481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:18.714922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:18.718940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:18.720407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:18.720494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:18.720721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:18.720782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:18.720836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:18.721045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.728531Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:18.884745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:18.885061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.885301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:18.885538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:18.885600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.888145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:18.888302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:18.888500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.888556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:18.888598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:18.888634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:18.890640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.890702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:18.890742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:18.892653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.892704Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.892744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:18.892794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:18.896552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:18.898510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:18.898731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:18.899754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:18.899899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:18.899953Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:18.900260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:18.900317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:18.900542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:18.900632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:18.902887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:18.902938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:18.903145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:18.903197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:18.903446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.903504Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:18.903620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:18.903672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:18.903718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:18.903753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:18.903798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:18.903856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:18.903899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:18.903934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:18.904008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:18.904048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:18.904082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:18.906403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:18.906521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:18.906565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 238 } } 2025-04-10T11:35:40.360845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-10T11:35:40.361017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2025-04-10T11:35:40.361065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-04-10T11:35:40.363336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:40.363534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:40.363585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:40.363666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-10T11:35:40.363846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:40.371439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T11:35:40.371625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-10T11:35:40.372303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:40.372419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:40.372482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-10T11:35:40.372739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-10T11:35:40.372846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-10T11:35:40.382125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:40.382253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:35:40.382642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:40.382706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T11:35:40.383271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:40.383351Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:35:40.384588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:35:40.384698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:35:40.384770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:35:40.384835Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-10T11:35:40.384889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:35:40.385029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:35:40.385799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1272 } } 2025-04-10T11:35:40.385845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T11:35:40.385990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1272 } } 2025-04-10T11:35:40.386141Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1272 } } 2025-04-10T11:35:40.386912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:40.386971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-10T11:35:40.387141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:40.387200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:35:40.387330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 308 RawX2: 4294969591 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-10T11:35:40.387425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:40.387483Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:40.387523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:35:40.387568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-10T11:35:40.391783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:35:40.392113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:40.392383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:40.392541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:35:40.392591Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-10T11:35:40.392706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:35:40.392754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:40.392819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-10T11:35:40.392860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:40.392917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-10T11:35:40.393031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:336:2314] message: TxId: 102 2025-04-10T11:35:40.393115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-10T11:35:40.393168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:35:40.393201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:35:40.393348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:35:40.395603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:35:40.395662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:615:2569] TestWaitNotification: OK eventTxId 102 2025-04-10T11:35:40.396165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-10T11:35:40.396239Z node 1 :FLAT_TX_SCHEMESHARD ERROR: Unsuccessful conditional erase: tabletId: 72075186233409546, status: SCHEME_ERROR, error: Schema version mismatch: got 1, expected 2, retry after: 300.000000s, at schemeshard: 72057594046678944 2025-04-10T11:35:40.398165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-10T11:35:40.398280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T11:35:40.398343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T00:06:00.038500Z, at schemeshard: 72057594046678944 2025-04-10T11:35:40.398410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] Test command err: 2025-04-10T11:35:41.225013Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# ERROR ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-10T11:35:41.225104Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV316@keyvalue_storage_read_request.cpp:270} Unexpected EvGetResult. KeyValue# 1 Status# ERROR Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1744284941222 ErrorReason# 2025-04-10T11:35:41.234654Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 2 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-10T11:35:41.234768Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV318@keyvalue_storage_read_request.cpp:240} Received EvGetResult from an unexpected storage group. KeyValue# 1 GroupId# 2 ExpecetedGroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1744284941234 ErrorReason# 2025-04-10T11:35:41.241213Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-10T11:35:41.241284Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV319@keyvalue_storage_read_request.cpp:222} Received EvGetResult with an unexpected cookie. KeyValue# 1 Cookie# 1000 SentGets# 1 GroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 GotAt# 1744284941241 ErrorReason# >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany >> TKeyValueCollectorTest::TestKeyValueCollectorMany [GOOD] >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] >> TResourceBroker::TestQueueWithConfigure >> SystemView::StoragePoolsRanges [GOOD] >> SystemView::SystemViewFailOps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] Test command err: 2025-04-10T11:35:43.260147Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-04-10T11:35:43.263124Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 >> TResourceBroker::TestQueueWithConfigure [GOOD] >> TResourceBroker::TestOverusageDifferentResources >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] >> TKeyValueTest::TestCopyRangeWorks >> TResourceBroker::TestOverusageDifferentResources [GOOD] >> KqpTypes::DyNumberCompare [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] Test command err: 2025-04-10T11:33:42.383952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:33:42.384228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:33:42.384438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000984/r3tmp/tmp0iaUMn/pdisk_1.dat 2025-04-10T11:33:42.790989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:33:42.832740Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:42.873484Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:33:42.874990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:42.875181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:42.875356Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:33:42.887369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:33:42.972918Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T11:33:42.972998Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T11:33:42.973212Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T11:33:43.161064Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T11:33:43.161196Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:33:43.161883Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:33:43.162000Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:33:43.163411Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:33:43.163646Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:33:43.163770Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T11:33:43.165806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:33:43.166353Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T11:33:43.167183Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T11:33:43.167263Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T11:33:43.203622Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:33:43.204995Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:33:43.205520Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:33:43.205841Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:33:43.263061Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:33:43.263965Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:33:43.264158Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:33:43.266173Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:33:43.266296Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:33:43.266367Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:33:43.266869Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:33:43.267032Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:33:43.267141Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:33:43.267699Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:33:43.338112Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:33:43.338405Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:33:43.338655Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:33:43.338722Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:33:43.338767Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:33:43.338812Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:33:43.339148Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:33:43.339208Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:33:43.339626Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:33:43.339736Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:33:43.339816Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:33:43.339860Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:33:43.339927Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:33:43.339968Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:33:43.340005Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:33:43.340041Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:33:43.340128Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:33:43.340283Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:43.340350Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:33:43.340402Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:33:43.340843Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:33:43.340914Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:33:43.341084Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:33:43.341327Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:33:43.341402Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:33:43.341506Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:33:43.341603Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:33:43.341657Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:33:43.341694Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:33:43.341734Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:33:43.342084Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:33:43.342136Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:33:43.342203Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:33:43.342255Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:33:43.342342Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:33:43.342385Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:33:43.342423Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:33:43.342478Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:33:43.342527Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:33:43.343573Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:33:43.343626Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:33:43.343666Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:33:43.343746Z node 1 :TX_DATASHARD TRACE: Prop ... eration: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true } SendingShards: 72075186224037888 ReceivingShards: 72075186224037888 Op: Commit } 2025-04-10T11:35:41.525810Z node 14 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-04-10T11:35:41.526027Z node 14 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2025-04-10T11:35:41.526163Z node 14 :TX_DATASHARD NOTICE: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-04-10T11:35:41.526442Z node 14 :KQP_COMPUTE ERROR: SelfId: [14:872:2685], Table: `/Root/table` ([72057594046644480:2:1]), SessionActorId: [14:844:2685]Got WRONG SHARD STATE for table `/Root/table`. ShardID=72075186224037888, Sink=[14:872:2685].{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } 2025-04-10T11:35:41.526668Z node 14 :KQP_COMPUTE ERROR: SelfId: [14:865:2685], SessionActorId: [14:844:2685], statusCode=UNAVAILABLE. Issue=
: Error: Wrong shard state. Table `/Root/table`., code: 2005
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 . sessionActorId=[14:844:2685]. isRollback=0 2025-04-10T11:35:41.527228Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=M2UyYjI4MDEtMmNjZDJmYi00NWY4YmNkLWUxOTFlODkx, ActorId: [14:844:2685], ActorState: ExecuteState, TraceId: 01jrfqqz3f06j62kj3nceept0j, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [14:998:2685] from: [14:865:2685] 2025-04-10T11:35:41.527484Z node 14 :KQP_EXECUTER ERROR: ActorId: [14:998:2685] TxId: 281474976715665. Ctx: { TraceId: 01jrfqqz3f06j62kj3nceept0j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=M2UyYjI4MDEtMmNjZDJmYi00NWY4YmNkLWUxOTFlODkx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: {
: Error: Wrong shard state. Table `/Root/table`., code: 2005 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } 2025-04-10T11:35:41.527872Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [14:865:2685], Recipient [14:690:2580]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true } Op: Rollback } 2025-04-10T11:35:41.527913Z node 14 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-04-10T11:35:41.528002Z node 14 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2025-04-10T11:35:41.528042Z node 14 :TX_DATASHARD NOTICE: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-04-10T11:35:41.528230Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=M2UyYjI4MDEtMmNjZDJmYi00NWY4YmNkLWUxOTFlODkx, ActorId: [14:844:2685], ActorState: ExecuteState, TraceId: 01jrfqqz3f06j62kj3nceept0j, Create QueryResponse for error on request, msg: ... blocking NKikimr::NLongTxService::TEvLongTxService::TEvLockStatus from LONG_TX_SERVICE to TX_DATASHARD_ACTOR 2025-04-10T11:35:41.530677Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 65543, Sender [14:594:2519], Recipient [14:690:2580]: NActors::TEvents::TEvPoison 2025-04-10T11:35:41.531265Z node 14 :TX_DATASHARD INFO: OnDetach: 72075186224037888 2025-04-10T11:35:41.531356Z node 14 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-10T11:35:41.552759Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [14:1002:2814], Recipient [14:1004:2815]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:35:41.560612Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [14:1002:2814], Recipient [14:1004:2815]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:35:41.560847Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [14:1002:2814], Recipient [14:1004:2815]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:35:41.565420Z node 14 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [14:1004:2815] 2025-04-10T11:35:41.565845Z node 14 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:41.574999Z node 14 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:41.576427Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:35:41.579343Z node 14 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:35:41.579467Z node 14 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:35:41.579573Z node 14 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:35:41.580387Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:35:41.580836Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:35:41.580935Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:35:41.581038Z node 14 :TX_DATASHARD INFO: Switched to work state PreOffline tabletId 72075186224037888 2025-04-10T11:35:41.581239Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 1 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:35:41.581337Z node 14 :TX_DATASHARD INFO: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:35:41.581547Z node 14 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [14:1018:2822] 2025-04-10T11:35:41.581624Z node 14 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:35:41.581694Z node 14 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2025-04-10T11:35:41.581766Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:35:41.582335Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [14:61:2108], Recipient [14:1004:2815]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 14 Status: STATUS_NOT_FOUND 2025-04-10T11:35:41.582670Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:1004:2815], Recipient [14:1004:2815]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:35:41.582728Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:35:41.583602Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435075, Sender [14:1004:2815], Recipient [14:1004:2815]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressResendReadSet 2025-04-10T11:35:41.583677Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressResendReadSet 2025-04-10T11:35:41.584622Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [14:24:2071], Recipient [14:1004:2815]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 600} 2025-04-10T11:35:41.584681Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-10T11:35:41.584758Z node 14 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 600 2025-04-10T11:35:41.584838Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:35:41.586001Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:35:41.586097Z node 14 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037888 state 5 2025-04-10T11:35:41.586279Z node 14 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-04-10T11:35:41.586360Z node 14 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2025-04-10T11:35:41.586441Z node 14 :TX_DATASHARD DEBUG: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2025-04-10T11:35:41.586940Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [14:24:2071], Recipient [14:1004:2815]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 400 NextReadStep# 600 ReadStep# 600 } 2025-04-10T11:35:41.587010Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-10T11:35:41.587124Z node 14 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 400 next step 600 2025-04-10T11:35:41.587335Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [14:1004:2815], Recipient [14:904:2728]: {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-04-10T11:35:41.587391Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:35:41.587500Z node 14 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-04-10T11:35:41.587697Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-04-10T11:35:41.587817Z node 14 :TX_DATASHARD NOTICE: Outdated readset for 500:281474976715663 at 72075186224037889 2025-04-10T11:35:41.587950Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-10T11:35:41.588092Z node 14 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037889 {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-04-10T11:35:41.588416Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:35:41.588558Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [14:904:2728], Recipient [14:1004:2815]: {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-10T11:35:41.588605Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-10T11:35:41.588684Z node 14 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2025-04-10T11:35:41.588798Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:76:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:79:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:78:2110] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:82:2057] recipient: [5:78:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:81:2111] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:135:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:77:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:79:2110] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:81:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:83:2057] recipient: [6:79:2110] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:82:2111] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:136:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestOverusageDifferentResources [GOOD] Test command err: 2025-04-10T11:35:43.857461Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'compaction1\' uses unknown queue \'queue_default1\'" 2025-04-10T11:35:43.857713Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' is required" 2025-04-10T11:35:43.857895Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' uses unknown queue \'queue_default\'" >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:18.771673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:18.771776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:18.771813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:18.771845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:18.771886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:18.771924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:18.771984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:18.772065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:18.772389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:18.859399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:18.859492Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:18.875461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:18.875745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:18.875939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:18.887871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:18.888116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:18.888762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:18.889167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:18.891080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:18.892432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:18.892506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:18.892701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:18.892752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:18.892796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:18.893009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:18.899413Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:19.032131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:19.032366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.032576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:19.032811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:19.032874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.036128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.036268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:19.036461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.036514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:19.036564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:19.036605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:19.039282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.039339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:19.039376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:19.041268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.041318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.041361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.041407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.044982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:19.046918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:19.047120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:19.048093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.048223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:19.048272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.048554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:19.048611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.048769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:19.048879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:19.050962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:19.051005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:19.051181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.051228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:19.051458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.051499Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:19.051585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:19.051670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.051711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:19.051743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.051785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:19.051845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.051883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:19.051915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:19.051973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:19.052006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:19.052054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:19.054373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:19.054494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:19.054534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046678944, LocalPathId: 1] 2025-04-10T11:35:44.399152Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:35:44.399409Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:44.399465Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-10T11:35:44.399510Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-10T11:35:44.399653Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:44.399699Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-10T11:35:44.402132Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:35:44.402533Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:35:44.402571Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:35:44.402611Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-10T11:35:44.402650Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:44.405027Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:35:44.405128Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-10T11:35:44.405162Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-10T11:35:44.405199Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-10T11:35:44.405241Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:35:44.405342Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-10T11:35:44.406877Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1212 } } 2025-04-10T11:35:44.406931Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-10T11:35:44.407082Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1212 } } 2025-04-10T11:35:44.407179Z node 28 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1212 } } 2025-04-10T11:35:44.408923Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 120259086585 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:35:44.408990Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-10T11:35:44.409106Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 120259086585 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:35:44.409145Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:35:44.409219Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 310 RawX2: 120259086585 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:35:44.409273Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:44.409310Z node 28 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:44.409347Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:35:44.409393Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-10T11:35:44.410471Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:35:44.412506Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:35:44.412828Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:44.414119Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:44.414290Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:44.414357Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:35:44.414463Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:35:44.414610Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:35:44.414669Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:35:44.414701Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:35:44.414744Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-10T11:35:44.414830Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [28:338:2317] message: TxId: 101 2025-04-10T11:35:44.414881Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:35:44.414924Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:35:44.414955Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:35:44.415123Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:35:44.417143Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:35:44.417190Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [28:339:2318] TestWaitNotification: OK eventTxId 101 2025-04-10T11:35:44.417700Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:44.417924Z node 28 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" took 271us result status StatusSuccess 2025-04-10T11:35:44.418443Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "pgint8" TypeId: 12288 Id: 2 NotNull: false TypeInfo { PgTypeId: 20 } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.6%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> SystemView::ConcurrentScans [GOOD] >> SystemView::GroupsFields >> SelfHealActorTest::NoMoreThanOneReplicating >> TPipeCacheTest::TestIdleRefresh >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] >> TPipeCacheTest::TestIdleRefresh [GOOD] >> TPipeCacheTest::TestTabletNode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:159:2058] recipient: [1:157:2137] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:159:2058] recipient: [1:157:2137] Leader for TabletID 9437184 is [1:165:2141] sender: [1:166:2058] recipient: [1:157:2137] Leader for TabletID 9437185 is [0:0:0] sender: [2:170:2049] recipient: [2:162:2095] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [2:170:2049] recipient: [2:162:2095] Leader for TabletID 9437185 is [2:186:2098] sender: [2:187:2049] recipient: [2:162:2095] Leader for TabletID 9437184 is [1:165:2141] sender: [1:209:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:186:2098] sender: [1:211:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:186:2098] sender: [2:213:2049] recipient: [2:42:2053] Leader for TabletID 9437185 is [2:186:2098] sender: [2:214:2049] recipient: [2:154:2094] Leader for TabletID 9437185 is [2:186:2098] sender: [1:217:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:186:2098] sender: [2:218:2049] recipient: [2:42:2053] Leader for TabletID 9437185 is [2:186:2098] sender: [2:220:2049] recipient: [2:219:2111] Leader for TabletID 9437185 is [2:221:2112] sender: [2:222:2049] recipient: [2:219:2111] Leader for TabletID 9437185 is [2:221:2112] sender: [1:251:2058] recipient: [1:15:2062] >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] >> TPipeCacheTest::TestTabletNode [GOOD] >> TSchemeShardTTLTests::CheckCounters [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestTabletNode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] 2025-04-10T11:35:19.234418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:19.234525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:19.234567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:19.234601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:19.234649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:19.234677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:19.234731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:19.234808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:19.235155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:19.317452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:19.317507Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:19.327511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:19.327624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:19.327822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:19.331833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:19.332080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:19.332745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.333005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:19.337138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.338670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:19.338745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.338850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:19.338895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:19.338955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:19.339139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.345549Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-10T11:35:19.482351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:19.482615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.482852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:19.483102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:19.483160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.485288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.485428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:19.485630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.485691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:19.485728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:19.485763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:19.487751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.487814Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:19.487863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:19.489602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.489650Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.489695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.489743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.493494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:19.495687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:19.495895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:19.496939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:19.497113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 122 RawX2: 4294969444 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:19.497170Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.497432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:19.497482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:19.497673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:19.497764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:19.499817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:19.499879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:19.500065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:19.500118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:19.500351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:19.500394Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:19.500513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:19.500551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.500588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:19.500617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.500664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:19.500721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:19.500758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:19.500807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:19.500883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:19.500923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:19.500957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:19.502945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:19.503068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:19.503122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... Progress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.918413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:46.918469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 107:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-10T11:35:46.918611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-10T11:35:46.918661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-10T11:35:46.918706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-10T11:35:46.918749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-10T11:35:46.918794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-10T11:35:46.918873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:340:2319] message: TxId: 107 2025-04-10T11:35:46.918932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-10T11:35:46.918972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-10T11:35:46.919011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-10T11:35:46.919167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:35:46.919219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-10T11:35:46.919581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:35:46.919635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-10T11:35:46.919700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:46.922284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-10T11:35:46.922351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1355:3256] 2025-04-10T11:35:46.922895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-04-10T11:35:47.022333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-10T11:35:47.022560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-10T11:35:47.023058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-04-10T11:35:47.023148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:35:47.023516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-04-10T11:35:47.023597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-04-10T11:35:47.110639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-10T11:35:47.110767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T11:35:47.110896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-10T11:35:47.111057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1744298209686847 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-10T11:35:47.111160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1744298209686847 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-10T11:35:47.111693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-10T11:35:47.112203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-10T11:35:47.112747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-10T11:35:47.112790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-10T11:35:47.113056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-10T11:35:47.113088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-10T11:35:47.116756Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-10T11:35:47.116918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-10T11:35:47.117025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T11:35:47.117073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2025-04-10T16:16:49.686847Z, at schemeshard: 72057594046678944 2025-04-10T11:35:47.117133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-10T11:35:47.117196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-10T11:35:47.117224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2025-04-10T16:16:49.686847Z, at schemeshard: 72057594046678944 2025-04-10T11:35:47.117251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-10T11:35:47.138728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:35:47.193065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-10T11:35:47.193207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-10T11:35:47.193278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-04-10T11:35:47.193367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:35:47.193625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-04-10T11:35:47.193699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-04-10T11:35:47.224889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-10T11:35:47.291040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2025-04-10T11:35:47.291221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-10T11:35:47.291312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-04-10T11:35:47.291403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T11:35:47.291671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-04-10T11:35:47.291732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:17.705580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:17.705669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:17.705709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:17.705743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:17.705790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:17.705845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:17.705934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:17.706025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:17.706419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:17.796913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:17.796966Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:17.813699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:17.813980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:17.814188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:17.822819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:17.823090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:17.823870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:17.824286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:17.826835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:17.828262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:17.828343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:17.828538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:17.828591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:17.828654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:17.828866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.836788Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:17.965285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:17.965543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.965761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:17.965988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:17.966037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.968621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:17.968763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:17.968947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.969016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:17.969072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:17.969116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:17.971011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.971072Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:17.971108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:17.972958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.973020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.973063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:17.973110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:17.976653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:17.978558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:17.978772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:17.979729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:17.979849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:17.979894Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:17.980181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:17.980236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:17.980402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:17.980473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:17.982588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:17.982631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:17.982807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:17.982860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:17.983076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:17.983118Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:17.983204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:17.983268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:17.983340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:17.983371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:17.983417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:17.983476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:17.983532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:17.983567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:17.983626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:17.983668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:17.983699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:17.991723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:17.991865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:17.991910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 44 2025-04-10T11:35:46.347701Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.349227Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.349598Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.349747Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.353674Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.353869Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.353992Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.354255Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.354926Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.355432Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.356429Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.356622Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.358320Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.358508Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.358713Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.358810Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.358920Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.359017Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.359104Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.359199Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.359307Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.359397Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.359468Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.359601Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.359677Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.359801Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:46.359856Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:35:46.360000Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:35:46.360046Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:35:46.360097Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-10T11:35:46.360139Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:35:46.360178Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-10T11:35:46.360271Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:2771:4036] message: TxId: 101 2025-04-10T11:35:46.360329Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-10T11:35:46.360400Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:35:46.360445Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:35:46.361732Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-04-10T11:35:46.366132Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:35:46.366236Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [4:2772:4037] TestWaitNotification: OK eventTxId 101 2025-04-10T11:35:46.366934Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:35:46.367279Z node 4 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 375us result status StatusSuccess 2025-04-10T11:35:46.367924Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_SECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } Version: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] >> TLocksTest::BrokenSameKeyLock >> SystemView::AuthUsers [GOOD] >> SystemView::AuthUsers_LockUnlock >> SystemView::SystemViewFailOps [GOOD] >> SystemView::TabletsFields >> TLocksFatTest::PointSetBreak >> SystemView::PartitionStatsOneSchemeShardDataQuery [GOOD] >> SystemView::PgTablesOneSchemeShardDataQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows 2025-04-10 11:35:45,762 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:35:45,994 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1682690 46.2M 46.1M 23.1M test_tool run_ut @/home/runner/.ya/build/build_root/2gt4/001ea6/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_t 1683261 1.8G 1.8G 1.4G └─ ydb-core-tx-datashard-ut_read_iterator --trace-path-append /home/runner/.ya/build/build_root/2gt4/001ea6/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/te Test command err: 2025-04-10T11:25:50.386408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:25:50.386560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:25:50.386714Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ea6/r3tmp/tmpsJMwjh/pdisk_1.dat 2025-04-10T11:25:50.811363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:25:50.862042Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:25:50.905668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:25:50.906253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:25:50.918109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:25:51.010343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:25:51.061592Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:25:51.062678Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:25:51.063107Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:25:51.063345Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:25:51.113400Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:25:51.114286Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:25:51.114393Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:25:51.116046Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:25:51.116149Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:25:51.116208Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:25:51.116595Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:25:51.116725Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:25:51.116809Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:25:51.127652Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:25:51.164163Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:25:51.164365Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:25:51.164505Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:25:51.164562Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:25:51.164597Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:25:51.164628Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.164858Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.164908Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.165232Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:25:51.165348Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:25:51.165420Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:51.165457Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:25:51.165530Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:25:51.165567Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:25:51.165605Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:25:51.165643Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:25:51.165697Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:25:51.165832Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.165880Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.165923Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:25:51.166315Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:25:51.166357Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:25:51.166477Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:25:51.166707Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:25:51.166755Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:25:51.166849Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:25:51.166926Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:25:51.166968Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:25:51.167003Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:25:51.167034Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.167317Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:25:51.167359Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:25:51.167406Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:25:51.167445Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.167501Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:25:51.167533Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:25:51.167563Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:25:51.167596Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.167656Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:25:51.169168Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:25:51.169215Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:25:51.179951Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:25:51.180043Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:25:51.180082Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:25:51.180129Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:25:51.180198Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:25:51.329621Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.329673Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:25:51.329707Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:25:51.330101Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:25:51.330140Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:25:51.330303Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:25:51.330362Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:25:51.330401Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:25:51.330437Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:25:51.334646Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:25:51.334703Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:25:51.335435Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.335469Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:25:51.335506Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:25:5 ... LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL\");", parameters: 0b 2025-04-10T11:34:50.815153Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfqp4vh1tz5dqz5ys71f7fq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=NGYwYTU4N2UtMTQwMjY1NDctNGIxOTAyMWEtYTQyYWU0NmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:34:59.929678Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jrfqpdva084zxazzekxb22w2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=NDc4N2IwNi02OWExYWY3My1jNmY1ZDI0MS00Mzc3NmU2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:08.496722Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrfqpppe30dfjma266eh41xe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=ZTM1ZTE4NTMtNjRjYTc4OTQtZGUyNzgxMGItOTMzOTUzNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2025-04-10T11:35:10.662412Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:35:10.662480Z node 11 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 20972828 RowCount: 10 IndexSize: 0 InMemSize: 4195104 LastAccessTime: 1505 LastUpdateTime: 1505 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 380 IndexSize: 0 } Channels { Channel: 2 DataSize: 16777344 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 4995 Memory: 4320316 Storage: 16778723 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 11 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 20972828 RowCount: 10 IndexSize: 0 InMemSize: 4195104 LastAccessTime: 1505 LastUpdateTime: 1505 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 380 IndexSize: 0 } Channels { Channel: 2 DataSize: 16777344 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1389 Memory: 126364 Storage: 20973341 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 11 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-10T11:35:13.492136Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jrfqq3j8b3kqp4vncj1tyg7d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=OGZmYTE1MjYtNTEwMDQ1NmYtOWU5ZDNhZTQtZDc2NzkzNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:19.451627Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:35:19.451826Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:35:19.451986Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ea6/r3tmp/tmpoommaD/pdisk_1.dat 2025-04-10T11:35:19.839771Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:35:19.875451Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:19.915720Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:19.915934Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:19.927898Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:20.012900Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:35:20.323184Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:20.323373Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:20.323501Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:20.332463Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:35:20.504216Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:35:20.540584Z node 12 :TX_PROXY ERROR: Actor# [12:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:35:25.435571Z node 12 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfqqag06jwahv7y99y774yz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=YTdkMTNiMTMtYTBmYmYyYmItNWJlN2IyYjUtNzFiMWZmMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:29.735545Z node 12 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqqfj2cyb15t2c65xf9vvd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=YzdmYThiMmUtYmI1NjFiYjUtZjg0NWFkOTUtYWZiYTIzNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:34.542425Z node 12 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfqqkry61fzb8efavwyt244, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=N2M0MjVhYjUtMmQwNmFlNTQtMWE0Zjg5MTgtOTFhMjVjODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:39.014861Z node 12 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfqqrfk56snm21rm9nptgx3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=NjUxNDIxMzgtNTdhNzVhNmQtNzE4YTM4NS04MjQ1MjFhOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:43.586490Z node 12 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfqqwv508rp0r15j8wayt2b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=ODA4NTQ5ZTQtZDFlYzE3ODUtNzZlMTdmZWItYTEyZTdjN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/001ea6/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/001ea6/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> BsControllerTest::TestLocalBrokenRelocation >> TKeyValueTest::TestWriteLongKey [GOOD] >> TFlatTest::CopyTableAndCompareColumnsSchema >> SystemView::AuthGroups_Access [GOOD] >> SystemView::AuthGroups_ResultOrder >> TFlatTest::SelectBigRangePerf >> KqpLimits::CancelAfterRwTx+useSink [GOOD] >> KqpLimits::CancelAfterRwTx-useSink |96.7%| [TA] $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:77:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:81:2057] recipient: [5:79:2110] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:83:2057] recipient: [5:79:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:82:2111] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:136:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:82:2113] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:86:2057] recipient: [7:82:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:85:2114] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:81:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:83:2113] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:87:2057] recipient: [8:83:2113] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:86:2114] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:140:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |96.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestRenameWorks [GOOD] >> TKeyValueTest::TestRenameWorksNewApi >> SystemView::TopPartitionsByCpuFields [GOOD] >> SystemView::TopPartitionsByCpuTables >> TLocksTest::Range_CorrectNullDot >> SystemView::TabletsFields [GOOD] >> SystemView::TabletsFollowers >> TFlatTest::WriteSplitByPartialKeyAndRead >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi >> SystemView::GroupsFields [GOOD] >> SystemView::Describe >> TFlatTest::SelectBigRangePerf [GOOD] >> TFlatTest::SelectRangeBothLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] Test command err: Trying to start YDB, gRPC: 62918, MsgBus: 29250 2025-04-10T11:34:46.500430Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646543800191696:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:46.502663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001732/r3tmp/tmpx1PDAz/pdisk_1.dat 2025-04-10T11:34:46.954334Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62918, node 1 2025-04-10T11:34:46.964067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:46.964218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:46.967366Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:34:46.967456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:47.078888Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:47.078909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:47.078915Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:47.079020Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29250 TClient is connected to server localhost:29250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:47.809985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:47.833251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:47.985480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:48.150531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:48.255210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:50.040311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646560980062643:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:50.040424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:50.349421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:50.389720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:50.423902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:50.462438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:50.494596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:50.566019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:50.641575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646560980063159:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:50.641664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:50.644645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646560980063164:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:50.651000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:50.667762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646560980063166:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:50.731574Z node 1 :TX_PROXY ERROR: Actor# [1:7491646560980063219:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:51.502285Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646543800191696:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:51.502368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:51.694022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:01.926649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:35:01.926703Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:20.831596Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284920820, txId: 281474976710672] shutting down 2025-04-10T11:35:20.887161Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-10T11:35:22.093730Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284922086, txId: 281474976710674] shutting down 2025-04-10T11:35:23.313379Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284923306, txId: 281474976710676] shutting down 2025-04-10T11:35:24.465492Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284924460, txId: 281474976710678] shutting down 2025-04-10T11:35:25.743404Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284925733, txId: 281474976710680] shutting down 2025-04-10T11:35:26.916416Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284926910, txId: 281474976710682] shutting down 2025-04-10T11:35:28.092480Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284928087, txId: 281474976710684] shutting down 2025-04-10T11:35:29.256893Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284929249, txId: 281474976710686] shutting down 2025-04-10T11:35:30.480915Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284930474, txId: 281474976710688] shutting down 2025-04-10T11:35:31.688678Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284931675, txId: 281474976710690] shutting down 2025-04-10T11:35:32.915682Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284932906, txId: 281474976710692] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x196B4EEB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x19B79E1F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1925A458 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x1926D467 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1926D467 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1926D467 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x1926D467 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x1926D467 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19BB0E45 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19BB0E45 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19BB0E45 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19B80998 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x1926C5EB 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: E ... SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2FjYTNlNDQtOTBiOGUwMzgtMzlkMDc4YmMtNWMxYmY4OTE=, ActorId: [2:7491646788500057240:2497], ActorState: ExecuteState, TraceId: 01jrfqr1c89aaqgj0sgr7srymv, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Int32
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Int32
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Int32
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2025-04-10T11:35:43.813491Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491646788500057302:2510], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional 2025-04-10T11:35:43.813761Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2FjYTNlNDQtOTBiOGUwMzgtMzlkMDc4YmMtNWMxYmY4OTE=, ActorId: [2:7491646788500057240:2497], ActorState: ExecuteState, TraceId: 01jrfqr1ddb1hgk3ctw42zha75, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional Trying to start YDB, gRPC: 19439, MsgBus: 2743 2025-04-10T11:35:44.860674Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646791090862476:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:44.860924Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001732/r3tmp/tmpXHSFGv/pdisk_1.dat 2025-04-10T11:35:44.992804Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:45.022352Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:45.022454Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:45.023785Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19439, node 3 2025-04-10T11:35:45.082657Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:35:45.082689Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:35:45.082701Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:45.082833Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2743 TClient is connected to server localhost:2743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:45.645169Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:45.652664Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:35:45.662483Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:45.778866Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:45.975411Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:46.099795Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:48.638193Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646808270733348:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:48.638306Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:48.689707Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:35:48.729646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T11:35:48.766432Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T11:35:48.800916Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T11:35:48.830950Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:35:48.874632Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T11:35:48.921166Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646808270733856:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:48.921276Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491646808270733861:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:48.921287Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:48.925672Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T11:35:48.936711Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491646808270733863:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T11:35:49.020074Z node 3 :TX_PROXY ERROR: Actor# [3:7491646812565701215:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:35:49.862279Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646791090862476:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:49.862370Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] >> BsControllerTest::TestLocalBrokenRelocation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:35:20.497123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:20.497225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:20.497268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:20.497305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:20.497357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:20.497427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:20.497497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:20.497596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:20.497996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:20.576569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:35:20.576650Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:20.593907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:20.594215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:20.594414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:20.602055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:20.602310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:20.603001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:20.603402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:20.605488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:20.607016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:20.607099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:20.607321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:20.607401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:20.607452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:20.607660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:20.615319Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:35:20.754952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:20.755199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:20.755477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:20.755718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:20.755772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:20.758602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:20.758784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:20.758996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:20.759057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:20.759100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:20.759136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:20.761585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:20.761650Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:20.761688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:20.763881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:20.763928Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:20.763969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:20.764016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:20.773400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:20.775828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:20.776047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:20.777060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:20.777221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:20.777290Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:20.777580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:20.777635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:20.777818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:20.777911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:20.780399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:20.780447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:20.780631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:20.780700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:20.781053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:20.781103Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:20.781218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:20.781254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:20.781294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:20.781328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:20.781367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:20.781432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:20.781474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:20.781507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:20.781591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:20.781638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:20.781674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:20.784111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:20.784229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:20.784270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:54.468772Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:54.469139Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:35:54.481211Z node 37 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [37:124:2150] sender: [37:240:2058] recipient: [37:15:2062] 2025-04-10T11:35:54.495267Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:54.495631Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:54.495932Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:54.496244Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:54.496341Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:54.499783Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:54.499963Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:54.500290Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:54.500398Z node 37 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:54.500484Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:54.500566Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:54.502952Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:54.503034Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:54.503106Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:54.505108Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:54.505175Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:54.505266Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:54.505378Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:54.505649Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:54.507621Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:54.507960Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:54.509099Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:54.509347Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 158913792109 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:54.509458Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:54.509852Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:54.509950Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:54.510417Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:54.510580Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:54.513103Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:54.513206Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:54.513603Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:54.513710Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [37:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:54.514493Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:54.514602Z node 37 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:54.514896Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:54.514978Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:54.515076Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:54.515161Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:54.515247Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:35:54.515333Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:54.515420Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:35:54.515492Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:35:54.515618Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:35:54.515703Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:35:54.515789Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:35:54.516642Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:54.516853Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:35:54.516943Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:35:54.517061Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:35:54.517157Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:54.517343Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:35:54.520892Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:35:54.521836Z node 37 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:35:54.527049Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:54.527817Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:35:54.528050Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2025-04-10T11:35:54.529004Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2025-04-10T11:35:54.529634Z node 37 :TX_PROXY DEBUG: actor# [37:270:2261] Bootstrap 2025-04-10T11:35:54.575721Z node 37 :TX_PROXY DEBUG: actor# [37:270:2261] Become StateWork (SchemeCache [37:275:2266]) 2025-04-10T11:35:54.577751Z node 37 :TX_PROXY DEBUG: actor# [37:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:35:54.580869Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral PG type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:54.581199Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-10T11:35:54.581965Z node 37 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalBrokenRelocation [GOOD] Test command err: 2025-04-10T11:35:50.104791Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-04-10T11:35:50.105519Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-04-10T11:35:50.106705Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-04-10T11:35:50.106751Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-04-10T11:35:50.106831Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-04-10T11:35:50.106858Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-04-10T11:35:50.106919Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-04-10T11:35:50.106954Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-04-10T11:35:50.107142Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-04-10T11:35:50.107169Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-04-10T11:35:50.107234Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-04-10T11:35:50.107265Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-04-10T11:35:50.107374Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-04-10T11:35:50.107403Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-04-10T11:35:50.107468Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-04-10T11:35:50.107507Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-04-10T11:35:50.107546Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-04-10T11:35:50.107592Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-04-10T11:35:50.107642Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-04-10T11:35:50.107673Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-04-10T11:35:50.107743Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-04-10T11:35:50.107770Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-04-10T11:35:50.107811Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-04-10T11:35:50.107847Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-04-10T11:35:50.107900Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-04-10T11:35:50.107922Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-04-10T11:35:50.107961Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-04-10T11:35:50.107983Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-04-10T11:35:50.108022Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-04-10T11:35:50.108045Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-04-10T11:35:50.108086Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-04-10T11:35:50.108107Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-04-10T11:35:50.108146Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-04-10T11:35:50.108172Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-04-10T11:35:50.108213Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-04-10T11:35:50.108236Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-04-10T11:35:50.108300Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-04-10T11:35:50.108325Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-04-10T11:35:50.108385Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-04-10T11:35:50.108421Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-04-10T11:35:50.108463Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-04-10T11:35:50.108489Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-04-10T11:35:50.108530Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-04-10T11:35:50.108552Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-04-10T11:35:50.108606Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-04-10T11:35:50.108629Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-04-10T11:35:50.108682Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-04-10T11:35:50.108708Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-04-10T11:35:50.108747Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-04-10T11:35:50.108770Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-04-10T11:35:50.108814Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-04-10T11:35:50.108835Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-04-10T11:35:50.108887Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-04-10T11:35:50.108911Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-04-10T11:35:50.108944Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-04-10T11:35:50.108976Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-04-10T11:35:50.109011Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-04-10T11:35:50.109031Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-04-10T11:35:50.109068Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-04-10T11:35:50.109099Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-04-10T11:35:50.109155Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-04-10T11:35:50.109179Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-04-10T11:35:50.109215Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-04-10T11:35:50.109240Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-04-10T11:35:50.109288Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-04-10T11:35:50.109321Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-04-10T11:35:50.109368Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-04-10T11:35:50.109390Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-04-10T11:35:50.109444Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-04-10T11:35:50.109470Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-04-10T11:35:50.109516Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-04-10T11:35:50.109550Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-04-10T11:35:50.138422Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2025-04-10T11:35:50.140049Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2025-04-10T11:35:50.140133Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2025-04-10T11:35:50.140197Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2025-04-10T11:35:50.140259Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2025-04-10T11:35:50.140299Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2025-04-10T11:35:50.140344Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2025-04-10T11:35:50.140374Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2025-04-10T11:35:50.140402Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2025-04-10T11:35:50.140428Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2025-04-10T11:35:50.140453Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2025-04-10T11:35:50.140481Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2025-04-10T11:35:50.140507Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2025-04-10T11:35:50.140532Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2025-04-10T11:35:50.140568Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2025-04-10T11:35:50.140620Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2025-04-10T11:35:50.140660Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2025-04-10T11:35:50.140729Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2025-04-10T11:35:50.140794Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2025-04-10T11:35:50.140858Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2025-04-10T11:35:50.140896Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2025-04-10T11:35:50.140946Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2025-04-10T11:35:50.141003Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2025-04-10T11:35:50.141042Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2025-04-10T11:35:50.141091Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2025-04-10T11:35:50.141131Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2025-04-10T11:35:50.141168Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2025-04-10T11:35:50.141206Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2025-04-10T11:35:50.141249Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2025-04-10T11:35:50.141287Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2025-04-10T11:35:50.141326Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2025-04-10T11:35:50.141368Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2025-04-10T11:35:50.141413Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2025-04-10T11:35:50.141475Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2025-04-10T11:35:50.141527Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000001:2:2:2:0] -> [80000001:3:2:2:0] 2025-04-10T11:35:54.368205Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000021:2:2:2:0] -> [80000021:3:2:2:0] 2025-04-10T11:35:54.368252Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000031:2:2:2:0] -> [80000031:3:2:2:0] 2025-04-10T11:35:54.368321Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000051:2:2:2:0] -> [80000051:3:2:2:0] 2025-04-10T11:35:54.368369Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000061:2:2:2:0] -> [80000061:3:2:2:0] 2025-04-10T11:35:54.369208Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-04-10T11:35:54.369280Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000010:2:1:0:0] -> [80000010:3:1:0:0] 2025-04-10T11:35:54.369359Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000040:2:1:0:0] -> [80000040:3:1:0:0] 2025-04-10T11:35:54.369442Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000070:2:1:0:0] -> [80000070:3:1:0:0] 2025-04-10T11:35:54.369506Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000001:2:1:1:0] -> [80000001:3:1:1:0] 2025-04-10T11:35:54.369557Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000021:2:1:1:0] -> [80000021:3:1:1:0] 2025-04-10T11:35:54.369606Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000031:2:1:1:0] -> [80000031:3:1:1:0] 2025-04-10T11:35:54.369656Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000051:2:1:1:0] -> [80000051:3:1:1:0] 2025-04-10T11:35:54.369705Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000061:2:1:1:0] -> [80000061:3:1:1:0] 2025-04-10T11:35:54.369760Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000002:1:1:2:0] -> [80000002:2:1:2:0] 2025-04-10T11:35:54.369807Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000012:1:1:2:0] -> [80000012:2:1:2:0] 2025-04-10T11:35:54.369853Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000022:1:1:2:0] -> [80000022:2:1:2:0] 2025-04-10T11:35:54.369906Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000032:1:1:2:0] -> [80000032:2:1:2:0] 2025-04-10T11:35:54.369969Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000042:1:1:2:0] -> [80000042:2:1:2:0] 2025-04-10T11:35:54.370028Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000052:1:1:2:0] -> [80000052:2:1:2:0] 2025-04-10T11:35:54.370078Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000062:1:1:2:0] -> [80000062:2:1:2:0] 2025-04-10T11:35:54.370141Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000072:1:1:2:0] -> [80000072:2:1:2:0] 2025-04-10T11:35:54.370781Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-10T11:35:54.370847Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000010:2:2:2:0] -> [80000010:3:2:2:0] 2025-04-10T11:35:54.370899Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000040:2:2:2:0] -> [80000040:3:2:2:0] 2025-04-10T11:35:54.370951Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000070:2:2:2:0] -> [80000070:3:2:2:0] 2025-04-10T11:35:54.371024Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000002:1:2:0:0] -> [80000002:2:2:0:0] 2025-04-10T11:35:54.371080Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000012:1:2:0:0] -> [80000012:2:2:0:0] 2025-04-10T11:35:54.371197Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000022:1:2:0:0] -> [80000022:2:2:0:0] 2025-04-10T11:35:54.371247Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000032:1:2:0:0] -> [80000032:2:2:0:0] 2025-04-10T11:35:54.371303Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000042:1:2:0:0] -> [80000042:2:2:0:0] 2025-04-10T11:35:54.371356Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000052:1:2:0:0] -> [80000052:2:2:0:0] 2025-04-10T11:35:54.371408Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000062:1:2:0:0] -> [80000062:2:2:0:0] 2025-04-10T11:35:54.371454Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000072:1:2:0:0] -> [80000072:2:2:0:0] 2025-04-10T11:35:54.371937Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-10T11:35:54.372005Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000010:2:1:1:0] -> [80000010:3:1:1:0] 2025-04-10T11:35:54.372067Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000040:2:1:1:0] -> [80000040:3:1:1:0] 2025-04-10T11:35:54.372114Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000070:2:1:1:0] -> [80000070:3:1:1:0] 2025-04-10T11:35:54.372161Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000001:2:1:2:0] -> [80000001:3:1:2:0] 2025-04-10T11:35:54.372211Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000021:2:1:2:0] -> [80000021:3:1:2:0] 2025-04-10T11:35:54.372263Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000031:2:1:2:0] -> [80000031:3:1:2:0] 2025-04-10T11:35:54.372331Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000051:2:1:2:0] -> [80000051:3:1:2:0] 2025-04-10T11:35:54.372389Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000061:2:1:2:0] -> [80000061:3:1:2:0] 2025-04-10T11:35:54.376178Z 5 01h25m01.144560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to REPLICATING 2025-04-10T11:35:54.376800Z 4 01h25m02.250560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to REPLICATING 2025-04-10T11:35:54.377278Z 4 01h25m02.950560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to REPLICATING 2025-04-10T11:35:54.377769Z 7 01h25m03.243560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to REPLICATING 2025-04-10T11:35:54.378239Z 2 01h25m03.332560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to REPLICATING 2025-04-10T11:35:54.378709Z 7 01h25m03.337560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to REPLICATING 2025-04-10T11:35:54.379109Z 10 01h25m03.499560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to REPLICATING 2025-04-10T11:35:54.379513Z 7 01h25m03.596560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to REPLICATING 2025-04-10T11:35:54.379956Z 10 01h25m04.405560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to REPLICATING 2025-04-10T11:35:54.380388Z 4 01h25m04.473560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to REPLICATING 2025-04-10T11:35:54.380802Z 7 01h25m04.531560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to REPLICATING 2025-04-10T11:35:54.381228Z 10 01h25m04.928560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to REPLICATING 2025-04-10T11:35:54.383416Z 2 01h25m05.038560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to REPLICATING 2025-04-10T11:35:54.383920Z 5 01h25m05.089560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to REPLICATING 2025-04-10T11:35:54.384357Z 4 01h25m05.100560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to REPLICATING 2025-04-10T11:35:54.384789Z 8 01h25m05.769560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to REPLICATING 2025-04-10T11:35:54.385639Z 4 01h25m11.577560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to READY 2025-04-10T11:35:54.386692Z 1 01h25m11.578072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.386780Z 1 01h25m11.578072s :BS_NODE DEBUG: [1] VDiskId# [80000022:1:0:2:0] destroyed 2025-04-10T11:35:54.386915Z 8 01h25m11.915560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to READY 2025-04-10T11:35:54.387785Z 1 01h25m11.916072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.387839Z 1 01h25m11.916072s :BS_NODE DEBUG: [1] VDiskId# [80000061:2:0:1:0] destroyed 2025-04-10T11:35:54.388859Z 7 01h25m16.059560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to READY 2025-04-10T11:35:54.389673Z 1 01h25m16.060072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.389725Z 1 01h25m16.060072s :BS_NODE DEBUG: [1] VDiskId# [80000031:2:0:1:0] destroyed 2025-04-10T11:35:54.389879Z 2 01h25m19.075560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to READY 2025-04-10T11:35:54.390719Z 1 01h25m19.076072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.390774Z 1 01h25m19.076072s :BS_NODE DEBUG: [1] VDiskId# [80000062:1:0:2:0] destroyed 2025-04-10T11:35:54.390877Z 10 01h25m19.245560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to READY 2025-04-10T11:35:54.391674Z 1 01h25m19.246072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.391727Z 1 01h25m19.246072s :BS_NODE DEBUG: [1] VDiskId# [80000040:2:0:0:0] destroyed 2025-04-10T11:35:54.392288Z 10 01h25m20.289560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to READY 2025-04-10T11:35:54.393206Z 1 01h25m20.290072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.393256Z 1 01h25m20.290072s :BS_NODE DEBUG: [1] VDiskId# [80000010:2:0:0:0] destroyed 2025-04-10T11:35:54.393388Z 5 01h25m21.432560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to READY 2025-04-10T11:35:54.394236Z 1 01h25m21.433072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.394287Z 1 01h25m21.433072s :BS_NODE DEBUG: [1] VDiskId# [80000072:1:0:2:0] destroyed 2025-04-10T11:35:54.394730Z 4 01h25m26.106560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to READY 2025-04-10T11:35:54.395570Z 1 01h25m26.107072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.395636Z 1 01h25m26.107072s :BS_NODE DEBUG: [1] VDiskId# [80000002:1:0:2:0] destroyed 2025-04-10T11:35:54.395738Z 7 01h25m26.231560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to READY 2025-04-10T11:35:54.396495Z 1 01h25m26.232072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.396543Z 1 01h25m26.232072s :BS_NODE DEBUG: [1] VDiskId# [80000001:2:0:1:0] destroyed 2025-04-10T11:35:54.397173Z 7 01h25m27.518560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to READY 2025-04-10T11:35:54.397932Z 1 01h25m27.519072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.397992Z 1 01h25m27.519072s :BS_NODE DEBUG: [1] VDiskId# [80000051:2:0:1:0] destroyed 2025-04-10T11:35:54.398121Z 5 01h25m28.744560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to READY 2025-04-10T11:35:54.398915Z 1 01h25m28.745072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.398977Z 1 01h25m28.745072s :BS_NODE DEBUG: [1] VDiskId# [80000052:1:0:2:0] destroyed 2025-04-10T11:35:54.400314Z 4 01h25m32.942560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to READY 2025-04-10T11:35:54.401071Z 1 01h25m32.943072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.401118Z 1 01h25m32.943072s :BS_NODE DEBUG: [1] VDiskId# [80000012:1:0:2:0] destroyed 2025-04-10T11:35:54.401230Z 2 01h25m33.703560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to READY 2025-04-10T11:35:54.402012Z 1 01h25m33.704072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.402065Z 1 01h25m33.704072s :BS_NODE DEBUG: [1] VDiskId# [80000042:1:0:2:0] destroyed 2025-04-10T11:35:54.403636Z 10 01h25m37.365560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to READY 2025-04-10T11:35:54.404520Z 1 01h25m37.366072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.404572Z 1 01h25m37.366072s :BS_NODE DEBUG: [1] VDiskId# [80000070:2:0:0:0] destroyed 2025-04-10T11:35:54.404681Z 4 01h25m37.595560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to READY 2025-04-10T11:35:54.405485Z 1 01h25m37.596072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.405533Z 1 01h25m37.596072s :BS_NODE DEBUG: [1] VDiskId# [80000032:1:0:2:0] destroyed 2025-04-10T11:35:54.405649Z 7 01h25m38.910560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to READY 2025-04-10T11:35:54.406608Z 1 01h25m38.911072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:35:54.406658Z 1 01h25m38.911072s :BS_NODE DEBUG: [1] VDiskId# [80000021:2:0:1:0] destroyed >> TVectorIndexTests::VectorKmeansTreePostingImplTable [GOOD] >> DbCounters::TabletsSimple [GOOD] >> LabeledDbCounters::OneTablet |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::VectorKmeansTreePostingImplTable [GOOD] >> SystemView::PgTablesOneSchemeShardDataQuery [GOOD] >> SystemView::PartitionStatsTtlFields >> TLocksFatTest::PointSetBreak [GOOD] >> TLocksFatTest::LocksLimit >> TFlatTest::WriteSplitByPartialKeyAndRead [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower >> TFlatTest::SelectRangeBothLimit [GOOD] >> TFlatTest::CopyCopiedTableAndRead >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi >> SystemView::TabletsFollowers [GOOD] >> SystemView::TabletsRanges >> TFlatTest::CopyTableAndRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBothLimit [GOOD] Test command err: 2025-04-10T11:35:50.915345Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646815439825236:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:50.915438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fd3/r3tmp/tmpg8PAIS/pdisk_1.dat 2025-04-10T11:35:51.417484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:51.417589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:51.420152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:51.444870Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7365 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:35:51.710098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:51.728034Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:35:51.770464Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-10T11:35:51.776971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... insert finished 34988 usec 15132 usec 12024 usec 12500 usec 13090 usec 12680 usec 11690 usec 12236 usec 12510 usec 12184 usec 2025-04-10T11:35:54.639868Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646835396521196:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:54.640007Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fd3/r3tmp/tmpSuHyp5/pdisk_1.dat 2025-04-10T11:35:54.855058Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:54.858589Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:54.858664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:54.860241Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30618 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:35:55.074077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:55.089717Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:55.101780Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:35:55.108665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi |96.7%| [TA] $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} >> SystemView::AuthGroups_ResultOrder [GOOD] >> SystemView::AuthGroups_TableRange >> BsControllerTest::SelfHealMirror3dc |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> SystemView::Describe [GOOD] >> SystemView::DescribeSystemFolder >> TFlatTest::CopyCopiedTableAndRead [GOOD] >> TFlatTest::CopyTableAndAddFollowers >> TFlatTest::CopyTableAndRead [GOOD] >> TFlatTest::CopyTableAndDropOriginal >> SystemView::ShowCreateTableDefaultLiteral [GOOD] >> SystemView::ShowCreateTableColumn >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] >> TFlatTest::CopyTableAndCompareColumnsSchema [GOOD] >> TFlatTest::CopyTableAndDropCopy >> TLocksTest::BrokenLockUpdate >> SystemView::AuthUsers_LockUnlock [GOOD] >> SystemView::AuthUsers_Access >> TLocksFatTest::LocksLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] Test command err: 2025-04-10T11:35:54.336193Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646835451172543:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:54.337257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fcd/r3tmp/tmpZvHSms/pdisk_1.dat 2025-04-10T11:35:54.955187Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:54.962579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:54.962689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:54.971286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17698 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:35:55.308435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:35:55.342004Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:55.350601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284955469 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Key2" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Va... (TRUNCATED) 2025-04-10T11:35:55.609190Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:35:55.610794Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:35:55.610834Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:35:55.734743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } } } TxId: 281474976710668 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:35:55.737930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:35:55.738339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-10T11:35:55.738407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-10T11:35:55.738837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-10T11:35:55.738973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710668:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:35:55.740040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710668, response: Status: StatusAccepted TxId: 281474976710668 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T11:35:55.740173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710668, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-04-10T11:35:55.740371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:35:55.740416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-04-10T11:35:55.740822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-10T11:35:55.740973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-10T11:35:55.741644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:2 msg type: 268697601 2025-04-10T11:35:55.741735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-04-10T11:35:55.741798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710668, partId: 0, tablet: 72057594037968897 2025-04-10T11:35:55.741821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2025-04-10T11:35:55.741849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 waiting... 2025-04-10T11:35:55.744911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-04-10T11:35:55.744950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2025-04-10T11:35:55.745097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-04-10T11:35:55.745124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-10T11:35:55.745173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-04-10T11:35:55.745464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-04-10T11:35:55.745488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2025-04-10T11:35:55.745567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-04-10T11:35:55.745591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-10T11:35:55.745618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-04-10T11:35:55.745656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710668:0 2 -> 3 2025-04-10T11:35:55.746230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:35:55.746325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:35:55.746458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:35:55.746510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:35:55.746597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037889 splitOp: 281474976710668:0 alterVersion: 1 at tablet: 72057594046644480 2025-04-10T11:35:55.746781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037890 splitOp: 281474976710668:0 alterVersion: 1 at tablet: 72057594046644480 2025-04-10T11:35:55.747374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72075186224037889 cookie: 72057594046644480:2 msg type: 269553152 2025-04-10T11:3 ... 224037889 State: 4 2025-04-10T11:36:00.101714Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:36:00.103455Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:36:00.103821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646856507869542 RawX2: 4503612512274685 } TabletId: 72075186224037891 State: 4 2025-04-10T11:36:00.103882Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:36:00.104700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646857430806515 RawX2: 4503608217307463 } TabletId: 72075186224037890 State: 4 2025-04-10T11:36:00.106079Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:36:00.106358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:36:00.106895Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-10T11:36:00.107487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:36:00.107851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-10T11:36:00.108236Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-10T11:36:00.108979Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-10T11:36:00.109468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:36:00.109578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:36:00.110519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-10T11:36:00.110559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-10T11:36:00.110669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-10T11:36:00.110935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-10T11:36:00.111157Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-10T11:36:00.111283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-10T11:36:00.111323Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-10T11:36:00.111820Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-04-10T11:36:00.111848Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-10T11:36:00.112594Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-04-10T11:36:00.112624Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-10T11:36:00.113706Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-10T11:36:00.113768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-10T11:36:00.110627Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-10T11:36:00.110680Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-10T11:36:00.111714Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-10T11:36:00.114968Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-10T11:36:00.115584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-10T11:36:00.115824Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-10T11:36:00.115829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-10T11:36:00.115871Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-10T11:36:00.115896Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-10T11:36:00.115913Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-10T11:36:00.116007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-10T11:36:00.116061Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-04-10T11:36:00.116127Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7491646857430806143:2720], serverId# [2:7491646857430806144:2721], sessionId# [0:0:0] 2025-04-10T11:36:00.116194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-10T11:36:00.116205Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-10T11:36:00.116217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-10T11:36:00.116256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-10T11:36:00.116268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-10T11:36:00.116311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:36:00.116338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-10T11:36:00.116389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:36:00.117129Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-10T11:36:00.117162Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-10T11:36:00.117205Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-10T11:36:00.117216Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7491646857430806821:3249], serverId# [2:7491646857430806825:3253], sessionId# [0:0:0] 2025-04-10T11:36:00.117410Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-10T11:36:00.118231Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-10T11:36:00.119011Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-04-10T11:36:00.119069Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:7491646857430806823:3251], serverId# [2:7491646857430806824:3252], sessionId# [0:0:0] 2025-04-10T11:36:00.119126Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-10T11:36:00.119718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-10T11:36:00.119754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-10T11:36:00.119795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-10T11:36:00.119811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-10T11:36:00.119853Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:36:00.120220Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-04-10T11:36:00.120272Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-10T11:36:00.120409Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-10T11:36:00.122376Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-10T11:36:00.122450Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-10T11:36:00.123758Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-10T11:36:00.123812Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-10T11:36:00.155422Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-10T11:36:00.157902Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-10T11:36:00.154440Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-10T11:36:00.154500Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7491646857430806161:2735], serverId# [3:7491646856507869424:2175], sessionId# [0:0:0] 2025-04-10T11:36:00.154584Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-10T11:36:00.155648Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-10T11:36:00.156506Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-10T11:36:00.156550Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7491646857430806822:3250], serverId# [3:7491646856507869636:2336], sessionId# [0:0:0] 2025-04-10T11:36:00.156606Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-10T11:36:00.157459Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-10T11:36:00.158858Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-10T11:36:00.158938Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-10T11:36:00.160428Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-10T11:36:00.160479Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 >> TFlatTest::CopyTableAndAddFollowers [GOOD] >> TFlatTest::CopyCopiedTableAndDropFirstCopy >> TFlatTest::CopyTableAndDropOriginal [GOOD] >> SystemView::TabletsRanges [GOOD] >> SystemView::TabletsRangesPredicateExtractDisabled >> TFlatTest::CopyTableAndDropCopy [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::LocksLimit [GOOD] Test command err: 2025-04-10T11:35:49.668602Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646814260095323:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:49.670456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fd8/r3tmp/tmp6RlZxL/pdisk_1.dat 2025-04-10T11:35:50.005268Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:50.047038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:50.047122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:50.049896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26959 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:35:50.281887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:35:50.309925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:50.458684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:50.506730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:54.669129Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646814260095323:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:54.669235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:35:58.071375Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646851838770253:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:58.071405Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fd8/r3tmp/tmp5kzJJE/pdisk_1.dat 2025-04-10T11:35:58.241195Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:58.265476Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:58.265569Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:58.267721Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23516 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:35:58.475244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:58.485027Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:58.498845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:58.580600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:58.655465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:01.810859Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646865748625119:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:01.810922Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fd8/r3tmp/tmpaKKTvZ/pdisk_1.dat 2025-04-10T11:36:01.941158Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:01.954585Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:01.954684Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:01.956658Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28074 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:02.141889Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:02.160716Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:02.216845Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:02.274040Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] >> TFlatTest::Mix_DML_DDL >> TKeyValueTest::TestCopyRangeWorks [GOOD] >> TKeyValueTest::TestCopyRangeWorksNewApi >> TLocksTest::BrokenSameKeyLock [GOOD] >> TLocksTest::BrokenSameShardLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropOriginal [GOOD] Test command err: 2025-04-10T11:35:59.473148Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646856030509181:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:59.473763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fc6/r3tmp/tmpjG2QDM/pdisk_1.dat 2025-04-10T11:35:59.875626Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:59.900852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:59.900926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:59.902394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29499 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:00.196068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:00.231139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:00.395172Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.014s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-10T11:36:00.400744Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.022s,wait=0.003s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-10T11:36:00.434443Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.004s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-10T11:36:00.440104Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-04-10T11:36:00.600345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:36:00.600573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:36:00.600938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-10T11:36:00.600983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-04-10T11:36:00.600994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-10T11:36:00.601053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-10T11:36:00.601070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-10T11:36:00.601208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-04-10T11:36:00.601313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:36:00.601864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:36:00.601894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-04-10T11:36:00.602335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-04-10T11:36:00.602463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-04-10T11:36:00.602651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:36:00.602689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:36:00.602764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-04-10T11:36:00.602815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:36:00.602849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491646856030509700:2242], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 waiting... 2025-04-10T11:36:00.602869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491646856030509700:2242], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 2025-04-10T11:36:00.602992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-10T11:36:00.603033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-04-10T11:36:00.603351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-10T11:36:00.603491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-10T11:36:00.605366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-10T11:36:00.605459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-10T11:36:00.605472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-04-10T11:36:00.605487Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-10T11:36:00.605535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-10T11:36:00.605795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-10T11:36:00.605836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-10T11:36:00.605843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-04-10T11:36:00.605852Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2025-04-10T11:36:00.605860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 5 2025-04-10T11:36:00.605898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710676, ready parts: 0/1, is published: true 2025-04-10T11:36:00.606031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-04-10T11:36:00.606100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-04-10T11:36:00.606141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72057594037968897 2025-04-10T11:36:00.607428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:3, partId: 0 2025-04-10T11:36:00.607446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:4, partId: 0 2025-04-10T11:36:00.607626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710676, at schemeshard: 72057594046644480 2025-04-10T11:36:00.607640Z no ... -04-10T11:36:03.547583Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-04-10T11:36:03.548974Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:36:03.549042Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037889:1:16:1:12288:306:0] [72075186224037889:1:23:1:12288:253:0] ] return ack processed 2025-04-10T11:36:03.549068Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:36:03.549116Z node 2 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state Check that tablet 72075186224037888 was deleted 2025-04-10T11:36:03.550408Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:36:03.550527Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:36:03.550612Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7491646872488214497:2374], serverId# [2:7491646872488214504:2676], sessionId# [0:0:0] 2025-04-10T11:36:03.550644Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7491646872488214499:2376], serverId# [2:7491646872488214505:2677], sessionId# [0:0:0] 2025-04-10T11:36:03.550695Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:36:03.550759Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:36:03.550806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646872488213974 RawX2: 4503608217307387 } TabletId: 72075186224037888 State: 4 2025-04-10T11:36:03.550856Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:36:03.551070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646872488214288 RawX2: 4503608217307446 } TabletId: 72075186224037891 State: 4 2025-04-10T11:36:03.551104Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:36:03.551207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646872488214288 RawX2: 4503608217307446 } TabletId: 72075186224037891 State: 4 2025-04-10T11:36:03.551230Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:36:03.551304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646872488214281 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-04-10T11:36:03.551325Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:36:03.551395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646872488213973 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-04-10T11:36:03.551415Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:36:03.551564Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-04-10T11:36:03.551573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:36:03.552030Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:36:03.552061Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-10T11:36:03.552080Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-10T11:36:03.552080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:36:03.552113Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-10T11:36:03.552113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:36:03.552144Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-10T11:36:03.552148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:36:03.554823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-10T11:36:03.555185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-10T11:36:03.555458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-10T11:36:03.555661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-10T11:36:03.555807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-10T11:36:03.555956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-10T11:36:03.556130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-10T11:36:03.556304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-10T11:36:03.556459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-10T11:36:03.556595Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:36:03.556613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-10T11:36:03.556661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:36:03.556691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-10T11:36:03.556713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:36:03.556988Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-04-10T11:36:03.557018Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-10T11:36:03.557034Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-10T11:36:03.557047Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-10T11:36:03.559103Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-10T11:36:03.559132Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-10T11:36:03.559146Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-10T11:36:03.559162Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-10T11:36:03.559207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-10T11:36:03.559232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-10T11:36:03.559518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-10T11:36:03.559533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-10T11:36:03.559567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-10T11:36:03.559593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-10T11:36:03.559603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-10T11:36:03.559634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-10T11:36:03.559655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-10T11:36:03.559701Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:36:03.561415Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-10T11:36:03.561524Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-10T11:36:03.563100Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-10T11:36:03.563157Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-10T11:36:03.564436Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-10T11:36:03.564484Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-10T11:36:03.565700Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-10T11:36:03.565744Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 Check that tablet 72075186224037889 was deleted Check that tablet 72075186224037890 was deleted 2025-04-10T11:36:03.854331Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-04-10T11:36:03.854859Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) 2025-04-10T11:36:03.855206Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-04-10T11:36:03.863052Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TKeyValueTest::TestRenameWorksNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:35:16.651369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:16.651453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:16.651489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:16.651524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:16.651562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:16.651589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:16.651636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:16.651706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:16.652058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:16.733782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:35:16.733837Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:35:16.749734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:16.749974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:16.750195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:16.756899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:16.757203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:16.757819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:16.758465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:16.761190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:16.762461Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:16.762513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:16.762672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:16.762715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:16.762751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:16.762903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:35:16.769325Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:35:16.901061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:16.901341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.901639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:16.901859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:16.901927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.904269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:16.904419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:16.904589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.904638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:16.904674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:16.904705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:16.906692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.906748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:16.906781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:16.908504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.908542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.908589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:16.908648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:16.911944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:16.913801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:16.913969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:16.914939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:16.915084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:16.915160Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:16.915402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:16.915452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:16.915606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:16.915764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:16.917737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:16.917781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:16.917963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:16.918001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:16.918281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:16.918335Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:16.918424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:16.918459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:16.918494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... 51 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:07.065875Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 219043334252 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:07.065935Z node 51 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 1003:0 HandleReply TEvOperationPlan, operationId: 1003:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-04-10T11:36:07.066194Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 129 2025-04-10T11:36:07.066308Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-10T11:36:07.071796Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:07.071850Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:36:07.072100Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:07.072137Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [51:205:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-04-10T11:36:07.072594Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T11:36:07.072664Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1003 2025-04-10T11:36:07.073446Z node 51 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-10T11:36:07.073529Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-10T11:36:07.073563Z node 51 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-10T11:36:07.073598Z node 51 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-10T11:36:07.073631Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:36:07.073724Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-04-10T11:36:07.074608Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 988 } } 2025-04-10T11:36:07.074650Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-04-10T11:36:07.074775Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 988 } } 2025-04-10T11:36:07.074864Z node 51 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 988 } } 2025-04-10T11:36:07.075411Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 219043334412 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-10T11:36:07.075449Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-04-10T11:36:07.075550Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 219043334412 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-10T11:36:07.075590Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:36:07.075659Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 329 RawX2: 219043334412 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-10T11:36:07.075710Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:07.075746Z node 51 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T11:36:07.075792Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:36:07.075833Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-04-10T11:36:07.078853Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:36:07.080054Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T11:36:07.080208Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T11:36:07.080587Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T11:36:07.080629Z node 51 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-04-10T11:36:07.080725Z node 51 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-10T11:36:07.080768Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-10T11:36:07.080807Z node 51 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-10T11:36:07.080836Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-10T11:36:07.080868Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-04-10T11:36:07.080905Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-10T11:36:07.080941Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-10T11:36:07.080988Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-10T11:36:07.081101Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-10T11:36:07.084468Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-10T11:36:07.084520Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-10T11:36:07.084884Z node 51 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-10T11:36:07.084983Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-10T11:36:07.085016Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:452:2425] TestWaitNotification: OK eventTxId 1003 2025-04-10T11:36:07.085349Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:36:07.085515Z node 51 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 192us result status StatusSuccess 2025-04-10T11:36:07.085830Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THeavyPerfTest::TTestLoadEverything [GOOD] >> THiveImplTest::BootQueueSpeed |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:84:2114] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:88:2057] recipient: [8:84:2114] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:87:2115] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:107:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:86:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:86:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:108:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:145:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:88:2118] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:209 ... recipient: [15:79:2110] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:81:2111] Leader for TabletID 72057594037927937 is [15:81:2111] sender: [15:135:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:76:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:79:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:78:2110] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:82:2057] recipient: [16:78:2110] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:77:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:81:2057] recipient: [17:79:2110] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:83:2057] recipient: [17:79:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:82:2111] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:136:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:83:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:84:2057] recipient: [18:82:2113] Leader for TabletID 72057594037927937 is [18:85:2114] sender: [18:86:2057] recipient: [18:82:2113] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:85:2114] Leader for TabletID 72057594037927937 is [18:85:2114] sender: [18:139:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:81:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:85:2057] recipient: [20:83:2113] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:87:2057] recipient: [20:83:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:86:2114] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:140:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:82:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:86:2057] recipient: [21:84:2114] Leader for TabletID 72057594037927937 is [21:87:2115] sender: [21:88:2057] recipient: [21:84:2114] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:87:2115] Leader for TabletID 72057594037927937 is [21:87:2115] sender: [21:107:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:108:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:50:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:50:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:86:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:89:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:90:2057] recipient: [23:88:2118] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:92:2057] recipient: [23:88:2118] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:91:2119] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:145:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:86:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:89:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:90:2057] recipient: [24:88:2118] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:92:2057] recipient: [24:88:2118] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:91:2119] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:145:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:90:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:91:2057] recipient: [25:89:2118] Leader for TabletID 72057594037927937 is [25:92:2119] sender: [25:93:2057] recipient: [25:89:2118] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:92:2119] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropCopy [GOOD] Test command err: 2025-04-10T11:35:50.591452Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646819237359599:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:50.593111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fd7/r3tmp/tmp6yJITI/pdisk_1.dat 2025-04-10T11:35:50.946059Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:50.987126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:50.987229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:50.992818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23056 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:35:51.238376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:51.256115Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:51.273471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284951388 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_1_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1_Copy" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744284951479 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot... (TRUNCATED) 2025-04-10T11:35:51.455056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1744284951535 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_2_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2_Copy" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710662 CreateStep: 1744284951570 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: ... (TRUNCATED) 2025-04-10T11:35:51.554512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710663 CreateStep: 1744284951661 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_3_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3_Copy" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710664 CreateStep: 1744284951710 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { ... (TRUNCATED) 2025-04-10T11:35:51.691605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710665 CreateStep: 1744284951759 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_4" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_4_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4_Copy" PathId: 10 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710666 CreateStep: 1744284951794 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_4_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildIn ... 46644480 2025-04-10T11:36:04.712601Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 129 -> 240 2025-04-10T11:36:04.713101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-10T11:36:04.713178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-10T11:36:04.713278Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037895 state Ready 2025-04-10T11:36:04.713305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-10T11:36:04.713318Z node 2 :TX_DATASHARD DEBUG: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2025-04-10T11:36:04.713326Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715686:0 ProgressState 2025-04-10T11:36:04.713411Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-04-10T11:36:04.713424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-04-10T11:36:04.713443Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-04-10T11:36:04.713455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-04-10T11:36:04.713463Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037894 state Ready 2025-04-10T11:36:04.713471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715686, ready parts: 1/1, is published: true 2025-04-10T11:36:04.713494Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-04-10T11:36:04.713510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7491646879251756902:2402] message: TxId: 281474976715686 2025-04-10T11:36:04.713532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-04-10T11:36:04.713548Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2025-04-10T11:36:04.713560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715686:0 2025-04-10T11:36:04.713681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 4 2025-04-10T11:36:04.720285Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:7491646879251757017:3013], serverId# [2:7491646879251757018:3014], sessionId# [0:0:0] 2025-04-10T11:36:04.720440Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-10T11:36:04.722498Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-10T11:36:04.722579Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-10T11:36:04.726705Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037895, clientId# [2:7491646879251757027:3020], serverId# [2:7491646879251757028:3021], sessionId# [0:0:0] 2025-04-10T11:36:04.726829Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-10T11:36:04.728516Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-10T11:36:04.728569Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-10T11:36:04.732169Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-10T11:36:04.733521Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-10T11:36:04.733581Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-10T11:36:04.737041Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-10T11:36:04.739205Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-10T11:36:04.739300Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-10T11:36:04.743547Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-10T11:36:04.745632Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-10T11:36:04.745701Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-10T11:36:04.747897Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-10T11:36:04.748364Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:36:04.748405Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-04-10T11:36:04.750784Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-10T11:36:04.752203Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-10T11:36:04.752257Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-10T11:36:04.754895Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-10T11:36:04.758691Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:36:04.758746Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-04-10T11:36:04.762595Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-10T11:36:04.768039Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-10T11:36:04.768147Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-10T11:36:04.774834Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-10T11:36:04.776478Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-10T11:36:04.776526Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-10T11:36:04.780315Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-10T11:36:04.781683Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-10T11:36:04.781744Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-10T11:36:04.785173Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-10T11:36:04.786513Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-10T11:36:04.786572Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-10T11:36:04.790219Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-10T11:36:04.791753Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-10T11:36:04.791814Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-10T11:36:04.794647Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-10T11:36:04.795797Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:36:04.795817Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-04-10T11:36:04.798329Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-10T11:36:04.801869Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-04-10T11:36:04.803420Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-10T11:36:04.803512Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-10T11:36:04.804138Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-10T11:36:04.804154Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-04-10T11:36:04.815662Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-10T11:36:04.824279Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-10T11:36:04.824392Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-10T11:36:04.829293Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-10T11:36:04.831210Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-10T11:36:04.831263Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-10T11:36:04.834648Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-10T11:36:04.836862Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-10T11:36:04.836925Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-10T11:36:04.840191Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-10T11:36:04.841574Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-10T11:36:04.841625Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 Check that tablet 72075186224037892 was deleted 2025-04-10T11:36:04.842772Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2025-04-10T11:36:04.843203Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2025-04-10T11:36:04.843580Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-04-10T11:36:04.844021Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-04-10T11:36:04.844442Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-04-10T11:36:04.844855Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] >> SelfHealActorTest::SingleErrorDisk >> SelfHealActorTest::SingleErrorDisk [GOOD] >> TLocksTest::Range_Pinhole >> TFlatTest::ShardUnfreezeNonFrozen |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::SingleErrorDisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] Test command err: 2025-04-10T11:35:58.559304Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646850385437260:2244];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:58.559757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fc9/r3tmp/tmpmk97YL/pdisk_1.dat 2025-04-10T11:35:58.981214Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:58.993590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:58.993722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:58.995275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64611 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:35:59.244578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:59.259120Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:59.265884Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:35:59.272508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:59.417056Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-10T11:35:59.429582Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.011s,wait=0.006s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-10T11:35:59.456612Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-10T11:35:59.473665Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.011s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-04-10T11:35:59.584805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976715676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:35:59.585590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:35:59.586170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-10T11:35:59.586215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-04-10T11:35:59.586225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-10T11:35:59.586276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-10T11:35:59.586396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-10T11:35:59.586542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-04-10T11:35:59.586690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:35:59.587630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:35:59.587671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 waiting... 2025-04-10T11:35:59.588190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715676, response: Status: StatusAccepted TxId: 281474976715676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-04-10T11:35:59.588294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-04-10T11:35:59.588426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:35:59.588444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:35:59.588565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-04-10T11:35:59.588618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:35:59.588634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491646850385437570:2241], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 2 2025-04-10T11:35:59.588647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491646850385437570:2241], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 4 2025-04-10T11:35:59.588691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T11:35:59.588718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-04-10T11:35:59.589035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-10T11:35:59.589159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-10T11:35:59.592226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-04-10T11:35:59.592307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-04-10T11:35:59.592329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2025-04-10T11:35:59.592348Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-10T11:35:59.592363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-10T11:35:59.592515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-04-10T11:35:59.592568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-04-10T11:35:59.592575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2025-04-10T11:35:59.592584Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715676, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2025-04-10T11:35:59.592591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 5 2025-04-10T11:35:59.592638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715676, ready parts: 0/1, is published: true 2025-04-10T11:35:59.592867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-04-10T11:35:59.592939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-04-10T11:35:59.593004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715676, partId: 0, tablet: 72057594037968897 2025-04-10T11:35:59.593045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976715676, shardIdx: 72057594046644480:3, partId: 0 2025-04-10T11:35:59.593057Z node 1 : ... 2 2025-04-10T11:36:06.689565Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:36:06.689618Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:36:06.690041Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-04-10T11:36:06.690063Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-04-10T11:36:06.690100Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-04-10T11:36:06.690117Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-10T11:36:06.690125Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-10T11:36:06.690939Z node 3 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-04-10T11:36:06.691284Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-10T11:36:06.691456Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-10T11:36:06.691587Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-10T11:36:06.691729Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 5] was 1 2025-04-10T11:36:06.691860Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:36:06.691885Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 5], at schemeshard: 72057594046644480 2025-04-10T11:36:06.691933Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-10T11:36:06.692218Z node 3 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:36:06.692292Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-04-10T11:36:06.692325Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-04-10T11:36:06.692360Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:7491646884952110586:2386], serverId# [3:7491646884952110587:2387], sessionId# [0:0:0] 2025-04-10T11:36:06.692383Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-10T11:36:06.692400Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-10T11:36:06.692428Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-10T11:36:06.692444Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-04-10T11:36:06.692488Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037893 reason = ReasonStop 2025-04-10T11:36:06.692512Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [3:7491646884952111109:2744], serverId# [3:7491646884952111111:2746], sessionId# [0:0:0] 2025-04-10T11:36:06.692788Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-10T11:36:06.692906Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-10T11:36:06.693308Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-10T11:36:06.693331Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-10T11:36:06.693366Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-10T11:36:06.693383Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-10T11:36:06.693409Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:36:06.694271Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-10T11:36:06.694328Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-10T11:36:06.694746Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-10T11:36:06.694792Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-10T11:36:06.694811Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-04-10T11:36:06.694823Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-04-10T11:36:06.695535Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-10T11:36:06.695586Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-10T11:36:06.696578Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:7491646884952111223:2393], serverId# [3:7491646884952111227:2845], sessionId# [0:0:0] 2025-04-10T11:36:06.696745Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037893 2025-04-10T11:36:06.696789Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-04-10T11:36:06.706817Z node 3 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:36:06.707024Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037893 from 72075186224037891 is reset 2025-04-10T11:36:06.707400Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646884952110474 RawX2: 4503612512274683 } TabletId: 72075186224037889 State: 4 2025-04-10T11:36:06.707459Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:36:06.707957Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491646884952110779 RawX2: 4503612512274742 } TabletId: 72075186224037891 State: 4 2025-04-10T11:36:06.707993Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:36:06.708329Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-10T11:36:06.708351Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:36:06.708504Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:36:06.708869Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-10T11:36:06.710025Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-10T11:36:06.710279Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-10T11:36:06.710285Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-10T11:36:06.710311Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-10T11:36:06.710469Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-10T11:36:06.710600Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-10T11:36:06.710702Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:36:06.710716Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-10T11:36:06.710732Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-10T11:36:06.710766Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:36:06.710781Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-10T11:36:06.710796Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:36:06.710802Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-10T11:36:06.711210Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-10T11:36:06.711228Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-10T11:36:06.711492Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-10T11:36:06.711506Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-10T11:36:06.711597Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-10T11:36:06.711616Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-10T11:36:06.711641Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:36:06.712331Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-10T11:36:06.712382Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-10T11:36:06.976182Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted Check that tablet 72075186224037888 was deleted Check that tablet 72075186224037889 was deleted Check that tablet 72075186224037890 was deleted Check that tablet 72075186224037891 was deleted 2025-04-10T11:36:06.982895Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) 2025-04-10T11:36:06.983366Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-04-10T11:36:06.983792Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) 2025-04-10T11:36:06.984219Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) 2025-04-10T11:36:06.984638Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TFlatTest::Mix_DML_DDL [GOOD] >> TFlatTest::OutOfDiskSpace [GOOD] >> TLocksFatTest::RangeSetBreak >> SystemView::TopPartitionsByCpuTables [GOOD] >> SystemView::TopPartitionsByCpuRanges >> SystemView::DescribeSystemFolder [GOOD] >> SystemView::DescribeAccessDenied >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> SystemView::AuthGroups_TableRange [GOOD] >> SystemView::AuthGroupMembers_Access ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::OutOfDiskSpace [GOOD] Test command err: 2025-04-10T11:36:07.967361Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646889125962280:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:07.967401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fbf/r3tmp/tmpULPPap/pdisk_1.dat 2025-04-10T11:36:08.468674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:08.468752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:08.478756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:36:08.505092Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21684 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:08.782327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:08.793954Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:36:08.801648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:09.008445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... proxy error code: Unknown error:
: Error: Resolve failed for table: /dc-1/Table, error: column 'value' not exist, code: 200400 2025-04-10T11:36:09.052631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:09.088510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:09.118469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... proxy error code: Unknown error:
:5:24: Error: At function: AsList
:5:32: Error: At function: SetResult
:4:27: Error: At function: SelectRow
:4:27: Error: Mismatch of key columns count for table [/dc-1/Table], expected: 2, but got 1., code: 2028 >> TLocksTest::Range_CorrectNullDot [GOOD] >> TLocksTest::Range_EmptyKey >> TObjectStorageListingTest::Split >> TFlatTest::LargeDatashardReplyDistributed >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] Test command err: 2025-04-10T11:35:29.912092Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646728450991183:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:29.912171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f5f/r3tmp/tmpdL9BT5/pdisk_1.dat 2025-04-10T11:35:30.380035Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:30.435312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:30.435460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:30.437183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2909, node 1 2025-04-10T11:35:30.758743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:35:30.758771Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:35:30.758778Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:30.758908Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29894 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:31.187810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:33.063101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646745630861016:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:33.063219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:33.063664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646745630861028:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:33.068139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:35:33.083255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646745630861030:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:35:33.148704Z node 1 :TX_PROXY ERROR: Actor# [1:7491646745630861081:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:35:33.918344Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfqqn5x3jevtsaz4kfqakrt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ZkMGZmYi1hNzc2MGU3Ny0zNTU3NDA2My01NTlhM2I4Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:33.971904Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7491646745630861119:2341], owner: [1:7491646745630861114:2339], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-10T11:35:33.975479Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7491646745630861119:2341], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:35:33.980452Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7491646745630861119:2341], row count: 0, finished: 1 2025-04-10T11:35:33.980528Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7491646745630861119:2341], owner: [1:7491646745630861114:2339], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-10T11:35:33.995330Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284933911, txId: 281474976710660] shutting down 2025-04-10T11:35:34.914300Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646728450991183:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:34.914397Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:35:35.164639Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jrfqqrvab2bydbny9c0msrz6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI5YzcwNjctMjhjMGQ0MjctYjU4ODUwMTItOTUyNTlmY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:35.167745Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7491646754220795757:2356], owner: [1:7491646754220795754:2354], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-10T11:35:35.170945Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7491646754220795757:2356], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:35:35.171185Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7491646754220795757:2356], row count: 0, finished: 1 2025-04-10T11:35:35.171235Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7491646754220795757:2356], owner: [1:7491646754220795754:2354], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-10T11:35:35.173559Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284935163, txId: 281474976710662] shutting down 2025-04-10T11:35:36.297045Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfqqszt2mxv9e9akjh22nnr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxYWNhOTktNmFjYTc5ODYtMjY4NmVhNDktMjdjNTEyYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:36.299317Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7491646758515763097:2370], owner: [1:7491646758515763093:2368], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-10T11:35:36.300422Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7491646758515763097:2370], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:35:36.330225Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7491646758515763097:2370], row count: 1, finished: 1 2025-04-10T11:35:36.330295Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7491646758515763097:2370], owner: [1:7491646758515763093:2368], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-10T11:35:36.333309Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284936294, txId: 281474976710664] shutting down 2025-04-10T11:35:36.996249Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646755800820764:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:36.996321Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f5f/r3tmp/tmpajNlHm/pdisk_1.dat 2025-04-10T11:35:37.109692Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9181, node 2 2025-04-10T11:35:37.155223Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:37.155325Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:37.157564Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:37.194716Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:35:37.194738Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:35:37.194745Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:37.194878Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28869 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:37.429608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-0 ... t, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:36:05.375074Z node 10 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [10:7491646880149583151:2424], row count: 3, finished: 1 2025-04-10T11:36:05.375113Z node 10 :SYSTEM_VIEWS INFO: Scan finished, actor: [10:7491646880149583151:2424], owner: [10:7491646880149583147:2422], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-10T11:36:05.378209Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284965328, txId: 281474976715673] shutting down 2025-04-10T11:36:05.554052Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jrfqrpg8cggwdf8x29pq9ej7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=N2U5MzMxZGYtZmRiNDRkYzgtOWQyOTM2YTYtYjA3MzQzYzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:36:05.556241Z node 10 :SYSTEM_VIEWS INFO: Scan started, actor: [10:7491646880149583186:2435], owner: [10:7491646880149583183:2433], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-10T11:36:05.557315Z node 10 :SYSTEM_VIEWS INFO: Scan prepared, actor: [10:7491646880149583186:2435], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:36:05.557842Z node 10 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [10:7491646880149583186:2435], row count: 3, finished: 1 2025-04-10T11:36:05.557878Z node 10 :SYSTEM_VIEWS INFO: Scan finished, actor: [10:7491646880149583186:2435], owner: [10:7491646880149583183:2433], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-10T11:36:05.561416Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284965552, txId: 281474976715675] shutting down 2025-04-10T11:36:05.772271Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jrfqrpp58659tqhh9jt5x63x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzEyMTM1MDAtYTk0MWRkZTEtNTE0Y2MyN2EtZjI1NzlhMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:36:05.774743Z node 10 :SYSTEM_VIEWS INFO: Scan started, actor: [10:7491646880149583219:2444], owner: [10:7491646880149583215:2442], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-10T11:36:05.775499Z node 10 :SYSTEM_VIEWS INFO: Scan prepared, actor: [10:7491646880149583219:2444], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:36:05.776061Z node 10 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [10:7491646880149583219:2444], row count: 4, finished: 1 2025-04-10T11:36:05.776097Z node 10 :SYSTEM_VIEWS INFO: Scan finished, actor: [10:7491646880149583219:2444], owner: [10:7491646880149583215:2442], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-10T11:36:05.779139Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284965770, txId: 281474976715677] shutting down 2025-04-10T11:36:05.956419Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jrfqrpwqbvg8s0b44042xbjm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=Yzk0MWJlMDYtYTRiNjIyNGUtMjQwMWEyZDAtOGZiNjJiMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:36:05.958529Z node 10 :SYSTEM_VIEWS INFO: Scan started, actor: [10:7491646880149583254:2454], owner: [10:7491646880149583250:2452], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-10T11:36:05.959231Z node 10 :SYSTEM_VIEWS INFO: Scan prepared, actor: [10:7491646880149583254:2454], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:36:05.959953Z node 10 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [10:7491646880149583254:2454], row count: 4, finished: 1 2025-04-10T11:36:05.959982Z node 10 :SYSTEM_VIEWS INFO: Scan finished, actor: [10:7491646880149583254:2454], owner: [10:7491646880149583250:2452], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-10T11:36:05.965539Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284965952, txId: 281474976715679] shutting down 2025-04-10T11:36:07.098407Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7491646891450333053:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:07.098497Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f5f/r3tmp/tmpYKabEB/pdisk_1.dat 2025-04-10T11:36:07.311229Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:07.316430Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:07.316538Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:07.318841Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18764, node 11 2025-04-10T11:36:07.417175Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:07.417196Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:07.417204Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:07.417329Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:07.901772Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:07.912458Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:36:11.306239Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:36:11.415261Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491646908630203165:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:11.415350Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7491646908630203173:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:11.415417Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:11.419865Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:36:11.432896Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7491646908630203179:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:36:11.503992Z node 11 :TX_PROXY ERROR: Actor# [11:7491646908630203230:2493] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:36:11.786892Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfqrwcj8tcvbdyaypmfgtpg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=M2E5MjRkMWItYzBjYWFjZDAtZmFlNWNkNjctYTA2OTBhMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:36:11.825601Z node 11 :SYSTEM_VIEWS INFO: Scan started, actor: [11:7491646908630203278:2365], owner: [11:7491646908630203277:2364], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-10T11:36:11.826217Z node 11 :SYSTEM_VIEWS INFO: Scan prepared, actor: [11:7491646908630203278:2365], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:36:11.826859Z node 11 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [11:7491646908630203278:2365], row count: 4, finished: 1 2025-04-10T11:36:11.826891Z node 11 :SYSTEM_VIEWS INFO: Scan finished, actor: [11:7491646908630203278:2365], owner: [11:7491646908630203277:2364], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-10T11:36:11.827037Z node 11 :SYSTEM_VIEWS INFO: Scan started, actor: [11:7491646908630203284:2368], owner: [11:7491646908630203277:2364], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-10T11:36:11.828982Z node 11 :SYSTEM_VIEWS INFO: Scan prepared, actor: [11:7491646908630203284:2368], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:36:11.829388Z node 11 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [11:7491646908630203284:2368], row count: 4, finished: 1 2025-04-10T11:36:11.829422Z node 11 :SYSTEM_VIEWS INFO: Scan finished, actor: [11:7491646908630203284:2368], owner: [11:7491646908630203277:2364], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-10T11:36:11.832253Z node 11 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284971783, txId: 281474976710661] shutting down >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TFlatTest::ShardUnfreezeNonFrozen [GOOD] >> TFlatTest::ShardFreezeUnfreezeRejectScheme >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:76:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:79:2057] recipient: [13:78:2110] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:82:2057] recipient: [13:78:2110] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:81:2111] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:135:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:76:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:78:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:79:2110] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:82:2057] recipient: [14:79:2110] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:81:2111] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:135:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:77:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:80:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:79:2110] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:83:2057] recipient: [15:79:2110] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:82:2111] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:136:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:83:2057] recipient: [16:82:2113] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:86:2057] recipient: [16:82:2113] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:85:2114] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:139:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:83:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:82:2113] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:86:2057] recipient: [17:82:2113] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:85:2114] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:139:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:83:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:84:2113] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:87:2057] recipient: [18:84:2113] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:86:2114] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:104:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:86:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:87:2057] recipient: [19:85:2115] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:89:2057] recipient: [19:85:2115] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:88:2116] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:142:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:86:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:87:2057] recipient: [20:85:2115] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:89:2057] recipient: [20:85:2115] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:88:2116] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:142:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:87:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:88:2057] recipient: [21:86:2115] Leader for TabletID 72057594037927937 is [21:89:2116] sender: [21:90:2057] recipient: [21:86:2115] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:89:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck >> TObjectStorageListingTest::CornerCases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:93:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:92:2119] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:146:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:79:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:78:2110] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:82:2057] recipient: [17:78:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:81:2111] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:77:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:79:2110] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:83:2057] recipient: [18:79:2110] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:82:2111] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:136:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:82:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:83:2113] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:83:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:82:2113] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:86:2057] recipient: [20:82:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:85:2114] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:83:2113] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:83:2113] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:104:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:142:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:50:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:50:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:83:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:86:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:87:2057] recipient: [23:85:2115] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:89:2057] recipient: [23:85:2115] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:88:2116] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:142:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:87:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:88:2057] recipient: [24:86:2115] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:90:2057] recipient: [24:86:2115] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:89:2116] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:143:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:90:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:91:2057] recipient: [25:89:2118] Leader for TabletID 72057594037927937 is [25:92:2119] sender: [25:93:2057] recipient: [25:89:2118] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:92:2119] Leader for TabletID 72057594037927937 is [25:92:2119] sender: [25:146:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:87:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:90:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:91:2057] recipient: [26:89:2118] Leader for TabletID 72057594037927937 is [26:92:2119] sender: [26:93:2057] recipient: [26:89:2118] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:92:2119] Leader for TabletID 72057594037927937 is [26:92:2119] sender: [26:146:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> THiveImplTest::BootQueueSpeed [GOOD] >> THiveImplTest::BalancerSpeedAndDistribution |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> BsControllerTest::SelfHealMirror3dc [GOOD] >> BsControllerTest::TestLocalSelfHeal |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealMirror3dc [GOOD] Test command err: 2025-04-10T11:36:01.609276Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-04-10T11:36:01.609340Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-04-10T11:36:01.609445Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-04-10T11:36:01.609473Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-04-10T11:36:01.609530Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-04-10T11:36:01.609555Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-04-10T11:36:01.609601Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-04-10T11:36:01.609633Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-04-10T11:36:01.609677Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-04-10T11:36:01.609697Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-04-10T11:36:01.609744Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-04-10T11:36:01.609770Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-04-10T11:36:01.609858Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-04-10T11:36:01.609878Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-04-10T11:36:01.609912Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-04-10T11:36:01.609935Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-04-10T11:36:01.609972Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-04-10T11:36:01.610008Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-04-10T11:36:01.610063Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-04-10T11:36:01.610092Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-04-10T11:36:01.610163Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-04-10T11:36:01.610187Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-04-10T11:36:01.610221Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-04-10T11:36:01.610241Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-04-10T11:36:01.610301Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-04-10T11:36:01.610326Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-04-10T11:36:01.610364Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-04-10T11:36:01.610384Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-04-10T11:36:01.610417Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-04-10T11:36:01.610441Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-04-10T11:36:01.610482Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-04-10T11:36:01.610503Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-04-10T11:36:01.610536Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-04-10T11:36:01.610558Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-04-10T11:36:01.610595Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-04-10T11:36:01.610616Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-04-10T11:36:01.610670Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-04-10T11:36:01.610693Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-04-10T11:36:01.610729Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-04-10T11:36:01.610775Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-04-10T11:36:01.610815Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-04-10T11:36:01.610836Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-04-10T11:36:01.610882Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-04-10T11:36:01.610903Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-04-10T11:36:01.610937Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-04-10T11:36:01.610970Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-04-10T11:36:01.611010Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-04-10T11:36:01.611030Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-04-10T11:36:01.611064Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-04-10T11:36:01.611086Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-04-10T11:36:01.611134Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-04-10T11:36:01.611157Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-04-10T11:36:01.611210Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-04-10T11:36:01.611238Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-04-10T11:36:01.611272Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-04-10T11:36:01.611291Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-04-10T11:36:01.611330Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-04-10T11:36:01.611350Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-04-10T11:36:01.611387Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-04-10T11:36:01.611410Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-04-10T11:36:01.611449Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-04-10T11:36:01.611481Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-04-10T11:36:01.611523Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-04-10T11:36:01.611545Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-04-10T11:36:01.611581Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-04-10T11:36:01.611608Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-04-10T11:36:01.611652Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-04-10T11:36:01.611676Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-04-10T11:36:01.611730Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-04-10T11:36:01.611751Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-04-10T11:36:01.611785Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-04-10T11:36:01.611815Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-04-10T11:36:01.628931Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2025-04-10T11:36:01.630501Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2025-04-10T11:36:01.630571Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2025-04-10T11:36:01.630630Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2025-04-10T11:36:01.630687Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2025-04-10T11:36:01.630735Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2025-04-10T11:36:01.630775Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2025-04-10T11:36:01.630817Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2025-04-10T11:36:01.630860Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2025-04-10T11:36:01.630900Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2025-04-10T11:36:01.630944Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2025-04-10T11:36:01.630991Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2025-04-10T11:36:01.631033Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2025-04-10T11:36:01.631073Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2025-04-10T11:36:01.631119Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2025-04-10T11:36:01.631162Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2025-04-10T11:36:01.631205Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2025-04-10T11:36:01.631246Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2025-04-10T11:36:01.631320Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2025-04-10T11:36:01.631383Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2025-04-10T11:36:01.631426Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2025-04-10T11:36:01.631471Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2025-04-10T11:36:01.631516Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2025-04-10T11:36:01.631559Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2025-04-10T11:36:01.631602Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2025-04-10T11:36:01.631644Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2025-04-10T11:36:01.631691Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2025-04-10T11:36:01.631731Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2025-04-10T11:36:01.631775Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2025-04-10T11:36:01.631816Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2025-04-10T11:36:01.631856Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2025-04-10T11:36:01.631899Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2025-04-10T11:36:01.631943Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2025-04-10T11:36:01.631987Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2025-04-10T11:36:01.632045Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 0:0] 2025-04-10T11:36:15.418823Z 3 05h45m00.119456s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-04-10T11:36:15.418870Z 3 05h45m00.119456s :BS_NODE DEBUG: [3] VDiskId# [8000000d:2:0:1:0] -> [8000000d:3:0:1:0] 2025-04-10T11:36:15.418949Z 6 05h45m00.119456s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-04-10T11:36:15.418991Z 6 05h45m00.119456s :BS_NODE DEBUG: [6] VDiskId# [8000000d:2:0:2:0] -> [8000000d:3:0:2:0] 2025-04-10T11:36:15.419075Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-10T11:36:15.419114Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] VDiskId# [8000000d:3:1:0:0] PDiskId# 1002 VSlotId# 1009 created 2025-04-10T11:36:15.419176Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] VDiskId# [8000000d:3:1:0:0] status changed to INIT_PENDING 2025-04-10T11:36:15.419245Z 24 05h45m00.119456s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-10T11:36:15.419326Z 9 05h45m00.119456s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-04-10T11:36:15.419377Z 9 05h45m00.119456s :BS_NODE DEBUG: [9] VDiskId# [8000000d:2:0:0:0] -> [8000000d:3:0:0:0] 2025-04-10T11:36:15.419463Z 27 05h45m00.119456s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2025-04-10T11:36:15.419515Z 27 05h45m00.119456s :BS_NODE DEBUG: [27] VDiskId# [8000000d:2:2:1:0] -> [8000000d:3:2:1:0] 2025-04-10T11:36:15.419604Z 30 05h45m00.119456s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:15.419652Z 30 05h45m00.119456s :BS_NODE DEBUG: [30] VDiskId# [8000000d:2:2:2:0] -> [8000000d:3:2:2:0] 2025-04-10T11:36:15.419735Z 15 05h45m00.119456s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-04-10T11:36:15.419782Z 15 05h45m00.119456s :BS_NODE DEBUG: [15] VDiskId# [8000000d:2:1:1:0] -> [8000000d:3:1:1:0] 2025-04-10T11:36:15.419911Z 34 05h45m00.119456s :BS_NODE DEBUG: [34] NodeServiceSetUpdate 2025-04-10T11:36:15.419965Z 34 05h45m00.119456s :BS_NODE DEBUG: [34] VDiskId# [80000055:3:2:0:0] -> [80000055:4:2:0:0] 2025-04-10T11:36:15.420053Z 17 05h45m00.119456s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-10T11:36:15.420103Z 17 05h45m00.119456s :BS_NODE DEBUG: [17] VDiskId# [80000055:3:1:2:0] -> [80000055:4:1:2:0] 2025-04-10T11:36:15.420191Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-04-10T11:36:15.420240Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] VDiskId# [80000055:3:0:1:0] -> [80000055:4:0:1:0] 2025-04-10T11:36:15.420338Z 21 05h45m00.119456s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-04-10T11:36:15.420380Z 21 05h45m00.119456s :BS_NODE DEBUG: [21] VDiskId# [80000055:4:1:0:0] PDiskId# 1000 VSlotId# 1009 created 2025-04-10T11:36:15.420443Z 21 05h45m00.119456s :BS_NODE DEBUG: [21] VDiskId# [80000055:4:1:0:0] status changed to INIT_PENDING 2025-04-10T11:36:15.420544Z 5 05h45m00.119456s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-04-10T11:36:15.420595Z 5 05h45m00.119456s :BS_NODE DEBUG: [5] VDiskId# [80000055:3:0:2:0] -> [80000055:4:0:2:0] 2025-04-10T11:36:15.420665Z 24 05h45m00.119456s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-10T11:36:15.420741Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2025-04-10T11:36:15.420791Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] VDiskId# [80000055:3:2:1:0] -> [80000055:4:2:1:0] 2025-04-10T11:36:15.420882Z 10 05h45m00.119456s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-10T11:36:15.420930Z 10 05h45m00.119456s :BS_NODE DEBUG: [10] VDiskId# [80000055:3:0:0:0] -> [80000055:4:0:0:0] 2025-04-10T11:36:15.421037Z 30 05h45m00.119456s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:15.421084Z 30 05h45m00.119456s :BS_NODE DEBUG: [30] VDiskId# [80000055:3:2:2:0] -> [80000055:4:2:2:0] 2025-04-10T11:36:15.421175Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-04-10T11:36:15.421226Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] VDiskId# [80000055:3:1:1:0] -> [80000055:4:1:1:0] 2025-04-10T11:36:15.421361Z 18 05h45m00.119456s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-10T11:36:15.421405Z 18 05h45m00.119456s :BS_NODE DEBUG: [18] VDiskId# [8000007d:2:1:2:0] -> [8000007d:3:1:2:0] 2025-04-10T11:36:15.421485Z 36 05h45m00.119456s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2025-04-10T11:36:15.421528Z 36 05h45m00.119456s :BS_NODE DEBUG: [36] VDiskId# [8000007d:2:2:0:0] -> [8000007d:3:2:0:0] 2025-04-10T11:36:15.421654Z 3 05h45m00.119456s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-04-10T11:36:15.421709Z 3 05h45m00.119456s :BS_NODE DEBUG: [3] VDiskId# [8000007d:2:0:1:0] -> [8000007d:3:0:1:0] 2025-04-10T11:36:15.421795Z 6 05h45m00.119456s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-04-10T11:36:15.421841Z 6 05h45m00.119456s :BS_NODE DEBUG: [6] VDiskId# [8000007d:2:0:2:0] -> [8000007d:3:0:2:0] 2025-04-10T11:36:15.421927Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-10T11:36:15.421964Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] VDiskId# [8000007d:3:1:0:0] PDiskId# 1001 VSlotId# 1009 created 2025-04-10T11:36:15.422024Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] VDiskId# [8000007d:3:1:0:0] status changed to INIT_PENDING 2025-04-10T11:36:15.422093Z 24 05h45m00.119456s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-10T11:36:15.422185Z 9 05h45m00.119456s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-04-10T11:36:15.422234Z 9 05h45m00.119456s :BS_NODE DEBUG: [9] VDiskId# [8000007d:2:0:0:0] -> [8000007d:3:0:0:0] 2025-04-10T11:36:15.422315Z 27 05h45m00.119456s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2025-04-10T11:36:15.422360Z 27 05h45m00.119456s :BS_NODE DEBUG: [27] VDiskId# [8000007d:2:2:1:0] -> [8000007d:3:2:1:0] 2025-04-10T11:36:15.422444Z 30 05h45m00.119456s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:15.422491Z 30 05h45m00.119456s :BS_NODE DEBUG: [30] VDiskId# [8000007d:2:2:2:0] -> [8000007d:3:2:2:0] 2025-04-10T11:36:15.422575Z 15 05h45m00.119456s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-04-10T11:36:15.422624Z 15 05h45m00.119456s :BS_NODE DEBUG: [15] VDiskId# [8000007d:2:1:1:0] -> [8000007d:3:1:1:0] 2025-04-10T11:36:15.422753Z 18 05h45m00.119456s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-10T11:36:15.422802Z 18 05h45m00.119456s :BS_NODE DEBUG: [18] VDiskId# [8000006d:2:1:2:0] -> [8000006d:3:1:2:0] 2025-04-10T11:36:15.422881Z 36 05h45m00.119456s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2025-04-10T11:36:15.422923Z 36 05h45m00.119456s :BS_NODE DEBUG: [36] VDiskId# [8000006d:2:2:0:0] -> [8000006d:3:2:0:0] 2025-04-10T11:36:15.423006Z 3 05h45m00.119456s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-04-10T11:36:15.423049Z 3 05h45m00.119456s :BS_NODE DEBUG: [3] VDiskId# [8000006d:2:0:1:0] -> [8000006d:3:0:1:0] 2025-04-10T11:36:15.423124Z 6 05h45m00.119456s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-04-10T11:36:15.423168Z 6 05h45m00.119456s :BS_NODE DEBUG: [6] VDiskId# [8000006d:2:0:2:0] -> [8000006d:3:0:2:0] 2025-04-10T11:36:15.423248Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-10T11:36:15.423283Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] VDiskId# [8000006d:3:1:0:0] PDiskId# 1003 VSlotId# 1009 created 2025-04-10T11:36:15.423345Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] VDiskId# [8000006d:3:1:0:0] status changed to INIT_PENDING 2025-04-10T11:36:15.423411Z 24 05h45m00.119456s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-10T11:36:15.423482Z 9 05h45m00.119456s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-04-10T11:36:15.423529Z 9 05h45m00.119456s :BS_NODE DEBUG: [9] VDiskId# [8000006d:2:0:0:0] -> [8000006d:3:0:0:0] 2025-04-10T11:36:15.423608Z 27 05h45m00.119456s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2025-04-10T11:36:15.423653Z 27 05h45m00.119456s :BS_NODE DEBUG: [27] VDiskId# [8000006d:2:2:1:0] -> [8000006d:3:2:1:0] 2025-04-10T11:36:15.423775Z 30 05h45m00.119456s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:15.423823Z 30 05h45m00.119456s :BS_NODE DEBUG: [30] VDiskId# [8000006d:2:2:2:0] -> [8000006d:3:2:2:0] 2025-04-10T11:36:15.423904Z 15 05h45m00.119456s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-04-10T11:36:15.423947Z 15 05h45m00.119456s :BS_NODE DEBUG: [15] VDiskId# [8000006d:2:1:1:0] -> [8000006d:3:1:1:0] 2025-04-10T11:36:15.429736Z 23 05h45m01.340456s :BS_NODE DEBUG: [23] VDiskId# [8000000d:3:1:0:0] status changed to REPLICATING 2025-04-10T11:36:15.430347Z 23 05h45m01.746456s :BS_NODE DEBUG: [23] VDiskId# [8000005d:3:1:0:0] status changed to REPLICATING 2025-04-10T11:36:15.431051Z 23 05h45m02.338456s :BS_NODE DEBUG: [23] VDiskId# [8000003d:3:1:0:0] status changed to REPLICATING 2025-04-10T11:36:15.431713Z 23 05h45m03.631456s :BS_NODE DEBUG: [23] VDiskId# [8000007d:3:1:0:0] status changed to REPLICATING 2025-04-10T11:36:15.432449Z 20 05h45m04.759456s :BS_NODE DEBUG: [20] VDiskId# [8000004d:3:1:0:0] status changed to REPLICATING 2025-04-10T11:36:15.432817Z 21 05h45m04.980456s :BS_NODE DEBUG: [21] VDiskId# [80000055:4:1:0:0] status changed to REPLICATING 2025-04-10T11:36:15.434247Z 23 05h45m05.348456s :BS_NODE DEBUG: [23] VDiskId# [8000001d:3:1:0:0] status changed to REPLICATING 2025-04-10T11:36:15.434979Z 23 05h45m05.370456s :BS_NODE DEBUG: [23] VDiskId# [8000006d:3:1:0:0] status changed to REPLICATING 2025-04-10T11:36:15.435630Z 24 05h45m05.850456s :BS_NODE DEBUG: [24] VDiskId# [8000002d:3:1:0:0] status changed to REPLICATING 2025-04-10T11:36:15.436336Z 23 05h45m08.786456s :BS_NODE DEBUG: [23] VDiskId# [8000000d:3:1:0:0] status changed to READY 2025-04-10T11:36:15.437591Z 24 05h45m08.786968s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-10T11:36:15.437648Z 24 05h45m08.786968s :BS_NODE DEBUG: [24] VDiskId# [8000000d:2:1:0:0] destroyed 2025-04-10T11:36:15.438197Z 21 05h45m12.256456s :BS_NODE DEBUG: [21] VDiskId# [80000055:4:1:0:0] status changed to READY 2025-04-10T11:36:15.439088Z 24 05h45m12.256968s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-10T11:36:15.439144Z 24 05h45m12.256968s :BS_NODE DEBUG: [24] VDiskId# [80000055:3:1:0:0] destroyed 2025-04-10T11:36:15.439289Z 23 05h45m12.781456s :BS_NODE DEBUG: [23] VDiskId# [8000001d:3:1:0:0] status changed to READY 2025-04-10T11:36:15.440386Z 24 05h45m12.781968s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-10T11:36:15.440440Z 24 05h45m12.781968s :BS_NODE DEBUG: [24] VDiskId# [8000001d:2:1:0:0] destroyed 2025-04-10T11:36:15.440613Z 23 05h45m14.660456s :BS_NODE DEBUG: [23] VDiskId# [8000006d:3:1:0:0] status changed to READY 2025-04-10T11:36:15.441685Z 24 05h45m14.660968s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-10T11:36:15.441734Z 24 05h45m14.660968s :BS_NODE DEBUG: [24] VDiskId# [8000006d:2:1:0:0] destroyed 2025-04-10T11:36:15.442817Z 23 05h45m19.816456s :BS_NODE DEBUG: [23] VDiskId# [8000003d:3:1:0:0] status changed to READY 2025-04-10T11:36:15.443923Z 24 05h45m19.816968s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-10T11:36:15.443974Z 24 05h45m19.816968s :BS_NODE DEBUG: [24] VDiskId# [8000003d:2:1:0:0] destroyed 2025-04-10T11:36:15.446062Z 24 05h45m31.833456s :BS_NODE DEBUG: [24] VDiskId# [8000002d:3:1:0:0] status changed to READY 2025-04-10T11:36:15.447427Z 24 05h45m31.833968s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-10T11:36:15.447483Z 24 05h45m31.833968s :BS_NODE DEBUG: [24] VDiskId# [8000002d:2:1:0:0] destroyed 2025-04-10T11:36:15.448116Z 23 05h45m35.554456s :BS_NODE DEBUG: [23] VDiskId# [8000005d:3:1:0:0] status changed to READY 2025-04-10T11:36:15.449185Z 24 05h45m35.554968s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-10T11:36:15.449240Z 24 05h45m35.554968s :BS_NODE DEBUG: [24] VDiskId# [8000005d:2:1:0:0] destroyed 2025-04-10T11:36:15.449395Z 23 05h45m36.224456s :BS_NODE DEBUG: [23] VDiskId# [8000007d:3:1:0:0] status changed to READY 2025-04-10T11:36:15.450480Z 24 05h45m36.224968s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-10T11:36:15.450536Z 24 05h45m36.224968s :BS_NODE DEBUG: [24] VDiskId# [8000007d:2:1:0:0] destroyed 2025-04-10T11:36:15.450681Z 20 05h45m36.387456s :BS_NODE DEBUG: [20] VDiskId# [8000004d:3:1:0:0] status changed to READY 2025-04-10T11:36:15.451430Z 24 05h45m36.387968s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-10T11:36:15.451481Z 24 05h45m36.387968s :BS_NODE DEBUG: [24] VDiskId# [8000004d:2:1:0:0] destroyed >> TKeyValueTest::TestInlineCopyRangeWorks [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi >> TObjectStorageListingTest::Split [GOOD] >> TObjectStorageListingTest::SuffixColumns >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] >> SystemView::AuthUsers_Access [GOOD] >> SystemView::AuthUsers_ResultOrder >> TFlatTest::ShardFreezeRejectBadProtobuf ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:35:13.649868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:13.649957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:13.649996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:13.650032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:13.650073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:13.650099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:13.650171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:13.650251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:13.650608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:13.741546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:35:13.741616Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:35:13.756619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:13.756833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:13.756985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:13.763130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:13.763388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:13.763861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:13.764386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:13.767180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:13.768280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:13.768325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:13.768513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:13.768545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:13.768572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:13.768695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:35:13.774450Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:35:13.893170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:13.893467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:13.893687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:13.893919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:13.893993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:13.896551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:13.896701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:13.896894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:13.896946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:13.896997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:13.897037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:13.899231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:13.899286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:13.899336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:13.901316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:13.901356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:13.901414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:13.901472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:13.905088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:13.907060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:13.907271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:13.908175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:13.908303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:13.908347Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:13.908591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:13.908641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:13.908800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:13.908873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:13.912114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:13.912175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:13.912366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:13.912408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:13.912695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:13.912745Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:13.912844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:13.912882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:13.912920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... LAT_TX_SCHEMESHARD INFO: TMoveTable TDeleteTableBarrier operationId: 1003:0 HandleReply TEvPrivate:TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-10T11:36:17.740432Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:36:17.740609Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 137 -> 129 2025-04-10T11:36:17.740740Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:36:17.740808Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:36:17.741156Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:36:17.743523Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:36:17.746378Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T11:36:17.746566Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T11:36:17.746835Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:17.746884Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:17.747090Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-10T11:36:17.747253Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:17.747297Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:207:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2025-04-10T11:36:17.747349Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:207:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-04-10T11:36:17.747602Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T11:36:17.747658Z node 62 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:36:17.747768Z node 62 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T11:36:17.747808Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:36:17.747857Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-04-10T11:36:17.748989Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-10T11:36:17.749097Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-10T11:36:17.749134Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-10T11:36:17.749175Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-10T11:36:17.749220Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-10T11:36:17.750342Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-10T11:36:17.750441Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-10T11:36:17.750477Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-10T11:36:17.750522Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-10T11:36:17.750559Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:36:17.750646Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-04-10T11:36:17.754379Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-10T11:36:17.754448Z node 62 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:17.754499Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 1003:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-10T11:36:17.754623Z node 62 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-10T11:36:17.754665Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-10T11:36:17.754713Z node 62 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-10T11:36:17.754754Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-10T11:36:17.754815Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-04-10T11:36:17.754881Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-10T11:36:17.754936Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-10T11:36:17.754977Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-10T11:36:17.755121Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:36:17.755162Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-10T11:36:17.755869Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:36:17.755928Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-10T11:36:17.756007Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-10T11:36:17.758136Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:36:17.758372Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-10T11:36:17.761410Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-10T11:36:17.761817Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-10T11:36:17.761865Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-10T11:36:17.762317Z node 62 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-10T11:36:17.762456Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-10T11:36:17.762512Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [62:471:2444] TestWaitNotification: OK eventTxId 1003 2025-04-10T11:36:17.763067Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableMoved" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:36:17.763292Z node 62 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableMoved" took 263us result status StatusSuccess 2025-04-10T11:36:17.763844Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableMoved" PathDescription { Self { Name: "TTLEnabledTableMoved" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TTLEnabledTableMoved" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TObjectStorageListingTest::CornerCases [GOOD] >> TObjectStorageListingTest::Decimal >> BsControllerTest::TestLocalSelfHeal [GOOD] >> THiveImplTest::BalancerSpeedAndDistribution [GOOD] >> THiveImplTest::TestShortTabletTypes [GOOD] >> THiveImplTest::TestStDev [GOOD] >> THiveTest::TestBlockCreateTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] Test command err: 2025-04-10T11:36:11.206124Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646909507110534:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:11.206191Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fb9/r3tmp/tmpm96Mwz/pdisk_1.dat 2025-04-10T11:36:11.585338Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:11.672055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:11.672152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:11.674677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30104 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:11.942111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:11.966078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:12.109996Z node 1 :TX_PROXY ERROR: Actor# [1:7491646913802078452:2363] txid# 281474976715659, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fb9/r3tmp/tmpRES4x0/pdisk_1.dat 2025-04-10T11:36:15.052633Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:15.055665Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:36:15.085416Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:15.085492Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:15.087710Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13216 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:15.276001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:15.288545Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:36:15.292357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:15.369827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... Error 128: Table is frozen. Only unfreeze alter is allowed 2025-04-10T11:36:15.384109Z node 2 :TX_PROXY ERROR: Actor# [2:7491646925394028026:2396] txid# 281474976715660, issues: { message: "Table is frozen. Only unfreeze alter is allowed" severity: 1 } 2025-04-10T11:36:15.386132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:15.401679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] >> TLocksTest::Range_GoodLock0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalSelfHeal [GOOD] Test command err: 2025-04-10T11:36:16.853308Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-04-10T11:36:16.853375Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-04-10T11:36:16.853485Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-04-10T11:36:16.853511Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-04-10T11:36:16.853570Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-04-10T11:36:16.853599Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-04-10T11:36:16.853639Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-04-10T11:36:16.853672Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-04-10T11:36:16.853719Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-04-10T11:36:16.853740Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-04-10T11:36:16.853790Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-04-10T11:36:16.853814Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-04-10T11:36:16.853858Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-04-10T11:36:16.853881Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-04-10T11:36:16.853917Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-04-10T11:36:16.853939Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-04-10T11:36:16.853974Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-04-10T11:36:16.854005Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-04-10T11:36:16.854062Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-04-10T11:36:16.854092Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-04-10T11:36:16.854142Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-04-10T11:36:16.854208Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-04-10T11:36:16.854247Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-04-10T11:36:16.854293Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-04-10T11:36:16.854347Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-04-10T11:36:16.854368Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-04-10T11:36:16.854404Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-04-10T11:36:16.854424Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-04-10T11:36:16.854458Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-04-10T11:36:16.854482Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-04-10T11:36:16.854522Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-04-10T11:36:16.854541Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-04-10T11:36:16.854575Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-04-10T11:36:16.854595Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-04-10T11:36:16.854633Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-04-10T11:36:16.854653Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-04-10T11:36:16.854709Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-04-10T11:36:16.854732Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-04-10T11:36:16.854789Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-04-10T11:36:16.854831Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-04-10T11:36:16.854872Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-04-10T11:36:16.854899Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-04-10T11:36:16.854943Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-04-10T11:36:16.854964Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-04-10T11:36:16.855004Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-04-10T11:36:16.855020Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-04-10T11:36:16.855048Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-04-10T11:36:16.855062Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-04-10T11:36:16.855087Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-04-10T11:36:16.855107Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-04-10T11:36:16.855159Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-04-10T11:36:16.855189Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-04-10T11:36:16.855244Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-04-10T11:36:16.855273Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-04-10T11:36:16.855310Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-04-10T11:36:16.855331Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-04-10T11:36:16.855371Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-04-10T11:36:16.855392Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-04-10T11:36:16.855426Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-04-10T11:36:16.855449Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-04-10T11:36:16.855515Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-04-10T11:36:16.855536Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-04-10T11:36:16.855578Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-04-10T11:36:16.855597Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-04-10T11:36:16.855638Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-04-10T11:36:16.855658Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-04-10T11:36:16.855710Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-04-10T11:36:16.855730Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-04-10T11:36:16.855792Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-04-10T11:36:16.855814Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-04-10T11:36:16.855851Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-04-10T11:36:16.855888Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-04-10T11:36:16.874824Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2025-04-10T11:36:16.876409Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2025-04-10T11:36:16.876485Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2025-04-10T11:36:16.876547Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2025-04-10T11:36:16.876607Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2025-04-10T11:36:16.876656Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2025-04-10T11:36:16.876700Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2025-04-10T11:36:16.876749Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2025-04-10T11:36:16.876792Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2025-04-10T11:36:16.876837Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2025-04-10T11:36:16.876880Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2025-04-10T11:36:16.876925Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2025-04-10T11:36:16.876989Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2025-04-10T11:36:16.877032Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2025-04-10T11:36:16.877075Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2025-04-10T11:36:16.877122Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2025-04-10T11:36:16.877166Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2025-04-10T11:36:16.877222Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2025-04-10T11:36:16.877284Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2025-04-10T11:36:16.877349Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2025-04-10T11:36:16.877390Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2025-04-10T11:36:16.877441Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2025-04-10T11:36:16.877482Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2025-04-10T11:36:16.877522Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2025-04-10T11:36:16.877572Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2025-04-10T11:36:16.877617Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2025-04-10T11:36:16.877660Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2025-04-10T11:36:16.877699Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2025-04-10T11:36:16.877745Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2025-04-10T11:36:16.877786Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2025-04-10T11:36:16.877826Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2025-04-10T11:36:16.877868Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2025-04-10T11:36:16.877913Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2025-04-10T11:36:16.877969Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2025-04-10T11:36:16.878020Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 2:0:0] -> [80000047:2:2:0:0] 2025-04-10T11:36:19.019236Z 32 00h05m00.102048s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-04-10T11:36:19.019271Z 32 00h05m00.102048s :BS_NODE DEBUG: [32] VDiskId# [80000047:2:2:1:0] PDiskId# 1000 VSlotId# 1009 created 2025-04-10T11:36:19.019356Z 32 00h05m00.102048s :BS_NODE DEBUG: [32] VDiskId# [80000047:2:2:1:0] status changed to INIT_PENDING 2025-04-10T11:36:19.019510Z 17 00h05m00.102048s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-10T11:36:19.019558Z 17 00h05m00.102048s :BS_NODE DEBUG: [17] VDiskId# [80000037:1:1:0:0] -> [80000037:2:1:0:0] 2025-04-10T11:36:19.019644Z 35 00h05m00.102048s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2025-04-10T11:36:19.019695Z 35 00h05m00.102048s :BS_NODE DEBUG: [35] VDiskId# [80000037:1:2:2:0] -> [80000037:2:2:2:0] 2025-04-10T11:36:19.019809Z 20 00h05m00.102048s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-10T11:36:19.019859Z 20 00h05m00.102048s :BS_NODE DEBUG: [20] VDiskId# [80000037:1:1:1:0] -> [80000037:2:1:1:0] 2025-04-10T11:36:19.019982Z 5 00h05m00.102048s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-04-10T11:36:19.020040Z 5 00h05m00.102048s :BS_NODE DEBUG: [5] VDiskId# [80000037:1:0:0:0] -> [80000037:2:0:0:0] 2025-04-10T11:36:19.020124Z 23 00h05m00.102048s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-10T11:36:19.020167Z 23 00h05m00.102048s :BS_NODE DEBUG: [23] VDiskId# [80000037:1:1:2:0] -> [80000037:2:1:2:0] 2025-04-10T11:36:19.020256Z 8 00h05m00.102048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-04-10T11:36:19.020304Z 8 00h05m00.102048s :BS_NODE DEBUG: [8] VDiskId# [80000037:1:0:1:0] -> [80000037:2:0:1:0] 2025-04-10T11:36:19.020390Z 11 00h05m00.102048s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2025-04-10T11:36:19.020435Z 11 00h05m00.102048s :BS_NODE DEBUG: [11] VDiskId# [80000037:1:0:2:0] -> [80000037:2:0:2:0] 2025-04-10T11:36:19.020522Z 29 00h05m00.102048s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-04-10T11:36:19.020576Z 29 00h05m00.102048s :BS_NODE DEBUG: [29] VDiskId# [80000037:1:2:0:0] -> [80000037:2:2:0:0] 2025-04-10T11:36:19.020686Z 32 00h05m00.102048s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-04-10T11:36:19.020794Z 32 00h05m00.102048s :BS_NODE DEBUG: [32] VDiskId# [80000037:2:2:1:0] PDiskId# 1002 VSlotId# 1009 created 2025-04-10T11:36:19.020868Z 32 00h05m00.102048s :BS_NODE DEBUG: [32] VDiskId# [80000037:2:2:1:0] status changed to INIT_PENDING 2025-04-10T11:36:19.021035Z 17 00h05m00.102048s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-10T11:36:19.021085Z 17 00h05m00.102048s :BS_NODE DEBUG: [17] VDiskId# [80000027:1:1:0:0] -> [80000027:2:1:0:0] 2025-04-10T11:36:19.021184Z 35 00h05m00.102048s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2025-04-10T11:36:19.021233Z 35 00h05m00.102048s :BS_NODE DEBUG: [35] VDiskId# [80000027:1:2:2:0] -> [80000027:2:2:2:0] 2025-04-10T11:36:19.021312Z 20 00h05m00.102048s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-10T11:36:19.021356Z 20 00h05m00.102048s :BS_NODE DEBUG: [20] VDiskId# [80000027:1:1:1:0] -> [80000027:2:1:1:0] 2025-04-10T11:36:19.021473Z 5 00h05m00.102048s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-04-10T11:36:19.021528Z 5 00h05m00.102048s :BS_NODE DEBUG: [5] VDiskId# [80000027:1:0:0:0] -> [80000027:2:0:0:0] 2025-04-10T11:36:19.021609Z 23 00h05m00.102048s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-10T11:36:19.021654Z 23 00h05m00.102048s :BS_NODE DEBUG: [23] VDiskId# [80000027:1:1:2:0] -> [80000027:2:1:2:0] 2025-04-10T11:36:19.021756Z 8 00h05m00.102048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-04-10T11:36:19.021844Z 8 00h05m00.102048s :BS_NODE DEBUG: [8] VDiskId# [80000027:1:0:1:0] -> [80000027:2:0:1:0] 2025-04-10T11:36:19.021929Z 11 00h05m00.102048s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2025-04-10T11:36:19.021980Z 11 00h05m00.102048s :BS_NODE DEBUG: [11] VDiskId# [80000027:1:0:2:0] -> [80000027:2:0:2:0] 2025-04-10T11:36:19.022074Z 29 00h05m00.102048s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-04-10T11:36:19.022124Z 29 00h05m00.102048s :BS_NODE DEBUG: [29] VDiskId# [80000027:1:2:0:0] -> [80000027:2:2:0:0] 2025-04-10T11:36:19.022245Z 32 00h05m00.102048s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-04-10T11:36:19.022288Z 32 00h05m00.102048s :BS_NODE DEBUG: [32] VDiskId# [80000027:2:2:1:0] PDiskId# 1003 VSlotId# 1009 created 2025-04-10T11:36:19.022378Z 32 00h05m00.102048s :BS_NODE DEBUG: [32] VDiskId# [80000027:2:2:1:0] status changed to INIT_PENDING 2025-04-10T11:36:19.022561Z 17 00h05m00.102048s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-10T11:36:19.022620Z 17 00h05m00.102048s :BS_NODE DEBUG: [17] VDiskId# [80000017:1:1:0:0] -> [80000017:2:1:0:0] 2025-04-10T11:36:19.022722Z 35 00h05m00.102048s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2025-04-10T11:36:19.022770Z 35 00h05m00.102048s :BS_NODE DEBUG: [35] VDiskId# [80000017:1:2:2:0] -> [80000017:2:2:2:0] 2025-04-10T11:36:19.022864Z 20 00h05m00.102048s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-10T11:36:19.022912Z 20 00h05m00.102048s :BS_NODE DEBUG: [20] VDiskId# [80000017:1:1:1:0] -> [80000017:2:1:1:0] 2025-04-10T11:36:19.023012Z 5 00h05m00.102048s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-04-10T11:36:19.023071Z 5 00h05m00.102048s :BS_NODE DEBUG: [5] VDiskId# [80000017:1:0:0:0] -> [80000017:2:0:0:0] 2025-04-10T11:36:19.023170Z 23 00h05m00.102048s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-10T11:36:19.023213Z 23 00h05m00.102048s :BS_NODE DEBUG: [23] VDiskId# [80000017:1:1:2:0] -> [80000017:2:1:2:0] 2025-04-10T11:36:19.023299Z 8 00h05m00.102048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-04-10T11:36:19.023356Z 8 00h05m00.102048s :BS_NODE DEBUG: [8] VDiskId# [80000017:1:0:1:0] -> [80000017:2:0:1:0] 2025-04-10T11:36:19.023440Z 11 00h05m00.102048s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2025-04-10T11:36:19.023535Z 11 00h05m00.102048s :BS_NODE DEBUG: [11] VDiskId# [80000017:1:0:2:0] -> [80000017:2:0:2:0] 2025-04-10T11:36:19.023619Z 29 00h05m00.102048s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-04-10T11:36:19.023664Z 29 00h05m00.102048s :BS_NODE DEBUG: [29] VDiskId# [80000017:1:2:0:0] -> [80000017:2:2:0:0] 2025-04-10T11:36:19.023760Z 32 00h05m00.102048s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-04-10T11:36:19.023798Z 32 00h05m00.102048s :BS_NODE DEBUG: [32] VDiskId# [80000017:2:2:1:0] PDiskId# 1000 VSlotId# 1010 created 2025-04-10T11:36:19.023861Z 32 00h05m00.102048s :BS_NODE DEBUG: [32] VDiskId# [80000017:2:2:1:0] status changed to INIT_PENDING 2025-04-10T11:36:19.024013Z 17 00h05m00.102048s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-10T11:36:19.024070Z 17 00h05m00.102048s :BS_NODE DEBUG: [17] VDiskId# [80000007:1:1:0:0] -> [80000007:2:1:0:0] 2025-04-10T11:36:19.024169Z 35 00h05m00.102048s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2025-04-10T11:36:19.024215Z 35 00h05m00.102048s :BS_NODE DEBUG: [35] VDiskId# [80000007:1:2:2:0] -> [80000007:2:2:2:0] 2025-04-10T11:36:19.024297Z 20 00h05m00.102048s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-10T11:36:19.024346Z 20 00h05m00.102048s :BS_NODE DEBUG: [20] VDiskId# [80000007:1:1:1:0] -> [80000007:2:1:1:0] 2025-04-10T11:36:19.024450Z 5 00h05m00.102048s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-04-10T11:36:19.024512Z 5 00h05m00.102048s :BS_NODE DEBUG: [5] VDiskId# [80000007:1:0:0:0] -> [80000007:2:0:0:0] 2025-04-10T11:36:19.024599Z 23 00h05m00.102048s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-10T11:36:19.024654Z 23 00h05m00.102048s :BS_NODE DEBUG: [23] VDiskId# [80000007:1:1:2:0] -> [80000007:2:1:2:0] 2025-04-10T11:36:19.024753Z 8 00h05m00.102048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-04-10T11:36:19.024801Z 8 00h05m00.102048s :BS_NODE DEBUG: [8] VDiskId# [80000007:1:0:1:0] -> [80000007:2:0:1:0] 2025-04-10T11:36:19.024902Z 11 00h05m00.102048s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2025-04-10T11:36:19.024974Z 11 00h05m00.102048s :BS_NODE DEBUG: [11] VDiskId# [80000007:1:0:2:0] -> [80000007:2:0:2:0] 2025-04-10T11:36:19.025070Z 29 00h05m00.102048s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-04-10T11:36:19.025115Z 29 00h05m00.102048s :BS_NODE DEBUG: [29] VDiskId# [80000007:1:2:0:0] -> [80000007:2:2:0:0] 2025-04-10T11:36:19.025213Z 32 00h05m00.102048s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-04-10T11:36:19.025255Z 32 00h05m00.102048s :BS_NODE DEBUG: [32] VDiskId# [80000007:2:2:1:0] PDiskId# 1002 VSlotId# 1010 created 2025-04-10T11:36:19.025315Z 32 00h05m00.102048s :BS_NODE DEBUG: [32] VDiskId# [80000007:2:2:1:0] status changed to INIT_PENDING 2025-04-10T11:36:19.031218Z 32 00h05m01.149048s :BS_NODE DEBUG: [32] VDiskId# [80000007:2:2:1:0] status changed to REPLICATING 2025-04-10T11:36:19.031779Z 32 00h05m01.659048s :BS_NODE DEBUG: [32] VDiskId# [80000047:2:2:1:0] status changed to REPLICATING 2025-04-10T11:36:19.032555Z 32 00h05m01.779048s :BS_NODE DEBUG: [32] VDiskId# [80000077:2:2:1:0] status changed to REPLICATING 2025-04-10T11:36:19.033475Z 32 00h05m02.274048s :BS_NODE DEBUG: [32] VDiskId# [80000067:2:2:1:0] status changed to REPLICATING 2025-04-10T11:36:19.034541Z 32 00h05m03.166048s :BS_NODE DEBUG: [32] VDiskId# [80000037:2:2:1:0] status changed to REPLICATING 2025-04-10T11:36:19.035413Z 32 00h05m04.692048s :BS_NODE DEBUG: [32] VDiskId# [80000027:2:2:1:0] status changed to REPLICATING 2025-04-10T11:36:19.037217Z 32 00h05m05.345048s :BS_NODE DEBUG: [32] VDiskId# [80000057:2:2:1:0] status changed to REPLICATING 2025-04-10T11:36:19.038181Z 32 00h05m05.866048s :BS_NODE DEBUG: [32] VDiskId# [80000017:2:2:1:0] status changed to REPLICATING 2025-04-10T11:36:19.039198Z 32 00h05m09.215048s :BS_NODE DEBUG: [32] VDiskId# [80000077:2:2:1:0] status changed to READY 2025-04-10T11:36:19.040897Z 32 00h05m09.215560s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-04-10T11:36:19.040991Z 32 00h05m09.215560s :BS_NODE DEBUG: [32] VDiskId# [80000077:1:2:1:0] destroyed 2025-04-10T11:36:19.041508Z 32 00h05m12.733048s :BS_NODE DEBUG: [32] VDiskId# [80000047:2:2:1:0] status changed to READY 2025-04-10T11:36:19.043253Z 32 00h05m12.733560s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-04-10T11:36:19.043313Z 32 00h05m12.733560s :BS_NODE DEBUG: [32] VDiskId# [80000047:1:2:1:0] destroyed 2025-04-10T11:36:19.043530Z 32 00h05m14.388048s :BS_NODE DEBUG: [32] VDiskId# [80000057:2:2:1:0] status changed to READY 2025-04-10T11:36:19.045131Z 32 00h05m14.388560s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-04-10T11:36:19.045185Z 32 00h05m14.388560s :BS_NODE DEBUG: [32] VDiskId# [80000057:1:2:1:0] destroyed 2025-04-10T11:36:19.046400Z 32 00h05m21.938048s :BS_NODE DEBUG: [32] VDiskId# [80000027:2:2:1:0] status changed to READY 2025-04-10T11:36:19.048084Z 32 00h05m21.938560s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-04-10T11:36:19.048141Z 32 00h05m21.938560s :BS_NODE DEBUG: [32] VDiskId# [80000027:1:2:1:0] destroyed 2025-04-10T11:36:19.048341Z 32 00h05m23.575048s :BS_NODE DEBUG: [32] VDiskId# [80000037:2:2:1:0] status changed to READY 2025-04-10T11:36:19.049897Z 32 00h05m23.575560s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-04-10T11:36:19.049950Z 32 00h05m23.575560s :BS_NODE DEBUG: [32] VDiskId# [80000037:1:2:1:0] destroyed 2025-04-10T11:36:19.050571Z 32 00h05m24.546048s :BS_NODE DEBUG: [32] VDiskId# [80000017:2:2:1:0] status changed to READY 2025-04-10T11:36:19.052246Z 32 00h05m24.546560s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-04-10T11:36:19.052321Z 32 00h05m24.546560s :BS_NODE DEBUG: [32] VDiskId# [80000017:1:2:1:0] destroyed 2025-04-10T11:36:19.053214Z 32 00h05m29.374048s :BS_NODE DEBUG: [32] VDiskId# [80000067:2:2:1:0] status changed to READY 2025-04-10T11:36:19.055035Z 32 00h05m29.374560s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-04-10T11:36:19.055092Z 32 00h05m29.374560s :BS_NODE DEBUG: [32] VDiskId# [80000067:1:2:1:0] destroyed 2025-04-10T11:36:19.055568Z 32 00h05m29.992048s :BS_NODE DEBUG: [32] VDiskId# [80000007:2:2:1:0] status changed to READY 2025-04-10T11:36:19.057660Z 32 00h05m29.992560s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-04-10T11:36:19.057716Z 32 00h05m29.992560s :BS_NODE DEBUG: [32] VDiskId# [80000007:1:2:1:0] destroyed >> TLocksFatTest::RangeSetBreak [GOOD] >> TLocksFatTest::RangeSetNotBreak >> TObjectStorageListingTest::SuffixColumns [GOOD] >> BsControllerTest::DecommitRejected >> THiveTest::TestBlockCreateTablet [GOOD] >> THiveTest::DrainWithHiveRestart >> TCdcStreamTests::Basic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] Test command err: 2025-04-10T11:31:48.354736Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:48.521328Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:48.551145Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:48.551588Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:48.564252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:48.564486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:48.564851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:48.564982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:48.565115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:48.565255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:48.565393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:48.565526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:48.565636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:48.565746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.565848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:48.565988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:48.606176Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:48.606558Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:48.606637Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:48.606865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.608460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:48.608577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:48.608640Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:48.608744Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:48.608832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:48.608884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:48.608916Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:48.609143Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.609226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:48.609272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:48.609308Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:48.609419Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:48.609492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:48.609546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:48.609578Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:48.609654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:48.609717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:48.609760Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:48.609827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:48.609870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:48.609901Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:48.610348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=73; 2025-04-10T11:31:48.610443Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-10T11:31:48.611002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=469; 2025-04-10T11:31:48.611132Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=54; 2025-04-10T11:31:48.611366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:48.611432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:48.611473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:48.611698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:48.611767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.611821Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.611990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:48.612043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:48.612080Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:48.612290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:48.612335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:48.612367Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:48.612485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:48.612531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:48.612579Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:8568];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:74;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:75;blob_range:[NO_BLOB:0:8552];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:8488];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:54;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:55;blob_range:[NO_BLOB:0:8472];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:1;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;;;switched=(portion_id:55;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2117976;index_size:24;meta:((produced=INSERTED;)););(portion_id:54;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2549080;index_size:24;meta:((produced=SPLIT_COMPACTED;)););; 2025-04-10T11:36:19.740769Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:10777:12404];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-10T11:36:19.743980Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10777:12404];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> BsControllerTest::DecommitRejected [GOOD] >> TFlatTest::ShardFreezeRejectBadProtobuf [GOOD] >> TFlatTest::SelectRangeSkipNullKeys |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] Test command err: 2025-04-10T11:36:21.842724Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-04-10T11:36:21.842783Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-04-10T11:36:21.842883Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-04-10T11:36:21.842906Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-04-10T11:36:21.842960Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-04-10T11:36:21.842987Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-04-10T11:36:21.843197Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-04-10T11:36:21.843233Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-04-10T11:36:21.843288Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-04-10T11:36:21.844505Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-04-10T11:36:21.844603Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-04-10T11:36:21.844674Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-04-10T11:36:21.844778Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-04-10T11:36:21.844831Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-04-10T11:36:21.844920Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-04-10T11:36:21.844981Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-04-10T11:36:21.845037Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-04-10T11:36:21.845067Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-04-10T11:36:21.845153Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-04-10T11:36:21.845181Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-04-10T11:36:21.845252Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-04-10T11:36:21.845284Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-04-10T11:36:21.845338Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-04-10T11:36:21.845388Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-04-10T11:36:21.845445Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-04-10T11:36:21.845475Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-04-10T11:36:21.845569Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-04-10T11:36:21.845599Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-04-10T11:36:21.845648Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-04-10T11:36:21.845701Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-04-10T11:36:21.861949Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:508:32] Status# ERROR ClientId# [1:508:32] ServerId# [0:0:0] PipeClient# [1:508:32] 2025-04-10T11:36:21.862809Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:509:20] Status# ERROR ClientId# [2:509:20] ServerId# [0:0:0] PipeClient# [2:509:20] 2025-04-10T11:36:21.862899Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:510:20] Status# ERROR ClientId# [3:510:20] ServerId# [0:0:0] PipeClient# [3:510:20] 2025-04-10T11:36:21.862950Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:511:20] Status# ERROR ClientId# [4:511:20] ServerId# [0:0:0] PipeClient# [4:511:20] 2025-04-10T11:36:21.863012Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:512:20] Status# ERROR ClientId# [5:512:20] ServerId# [0:0:0] PipeClient# [5:512:20] 2025-04-10T11:36:21.863066Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:513:20] Status# ERROR ClientId# [6:513:20] ServerId# [0:0:0] PipeClient# [6:513:20] 2025-04-10T11:36:21.863117Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:514:20] Status# ERROR ClientId# [7:514:20] ServerId# [0:0:0] PipeClient# [7:514:20] 2025-04-10T11:36:21.863164Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:515:20] Status# ERROR ClientId# [8:515:20] ServerId# [0:0:0] PipeClient# [8:515:20] 2025-04-10T11:36:21.863238Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:516:20] Status# ERROR ClientId# [9:516:20] ServerId# [0:0:0] PipeClient# [9:516:20] 2025-04-10T11:36:21.863292Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:517:20] Status# ERROR ClientId# [10:517:20] ServerId# [0:0:0] PipeClient# [10:517:20] 2025-04-10T11:36:21.863346Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:518:20] Status# ERROR ClientId# [11:518:20] ServerId# [0:0:0] PipeClient# [11:518:20] 2025-04-10T11:36:21.863396Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:519:20] Status# ERROR ClientId# [12:519:20] ServerId# [0:0:0] PipeClient# [12:519:20] 2025-04-10T11:36:21.863473Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:520:20] Status# ERROR ClientId# [13:520:20] ServerId# [0:0:0] PipeClient# [13:520:20] 2025-04-10T11:36:21.863550Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:521:20] Status# ERROR ClientId# [14:521:20] ServerId# [0:0:0] PipeClient# [14:521:20] 2025-04-10T11:36:21.863602Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:522:20] Status# ERROR ClientId# [15:522:20] ServerId# [0:0:0] PipeClient# [15:522:20] 2025-04-10T11:36:21.924422Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] Connect 2025-04-10T11:36:21.924519Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] Connect 2025-04-10T11:36:21.924584Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] Connect 2025-04-10T11:36:21.924633Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] Connect 2025-04-10T11:36:21.924680Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] Connect 2025-04-10T11:36:21.924756Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] Connect 2025-04-10T11:36:21.924814Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] Connect 2025-04-10T11:36:21.924893Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] Connect 2025-04-10T11:36:21.925065Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] Connect 2025-04-10T11:36:21.925132Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] Connect 2025-04-10T11:36:21.925184Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] Connect 2025-04-10T11:36:21.925234Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] Connect 2025-04-10T11:36:21.925291Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] Connect 2025-04-10T11:36:21.925358Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] Connect 2025-04-10T11:36:21.925409Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] Connect 2025-04-10T11:36:21.928448Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:581:60] Status# OK ClientId# [1:581:60] ServerId# [1:610:61] PipeClient# [1:581:60] 2025-04-10T11:36:21.928518Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] State switched from 0 to 1 2025-04-10T11:36:21.933230Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:582:21] Status# OK ClientId# [2:582:21] ServerId# [1:611:62] PipeClient# [2:582:21] 2025-04-10T11:36:21.933307Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] State switched from 0 to 1 2025-04-10T11:36:21.933365Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:583:21] Status# OK ClientId# [3:583:21] ServerId# [1:612:63] PipeClient# [3:583:21] 2025-04-10T11:36:21.933396Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] State switched from 0 to 1 2025-04-10T11:36:21.933459Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:584:21] Status# OK ClientId# [4:584:21] ServerId# [1:613:64] PipeClient# [4:584:21] 2025-04-10T11:36:21.933507Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] State switched from 0 to 1 2025-04-10T11:36:21.933553Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:585:21] Status# OK ClientId# [5:585:21] ServerId# [1:614:65] PipeClient# [5:585:21] 2025-04-10T11:36:21.933580Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] State switched from 0 to 1 2025-04-10T11:36:21.933620Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:586:21] Status# OK ClientId# [6:586:21] ServerId# [1:615:66] PipeClient# [6:586:21] 2025-04-10T11:36:21.933656Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] State switched from 0 to 1 2025-04-10T11:36:21.933703Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:587:21] Status# OK ClientId# [7:587:21] ServerId# [1:616:67] PipeClient# [7:587:21] 2025-04-10T11:36:21.933757Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] State switched from 0 to 1 2025-04-10T11:36:21.933820Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:588:21] Status# OK ClientId# [8:588:21] ServerId# [1:617:68] PipeClient# [8:588:21] 2025-04-10T11:36:21.933860Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] State switched from 0 to 1 2025-04-10T11:36:21.933907Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:589:21] Status# OK ClientId# [9:589:21] ServerId# [1:618:69] PipeClient# [9:589:21] 2025-04-10T11:36:21.933938Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] State switched from 0 to 1 2025-04-10T11:36:21.933984Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:590:21] Status# OK ClientId# [10:590:21] ServerId# [1:619:70] PipeClient# [10:590:21] 2025-04-10T11:36:21.934017Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] State switched from 0 to 1 2025-04-10T11:36:21.934069Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:591:21] Status# OK ClientId# [11:591:21] ServerId# [1:620:71] PipeClient# [11:591:21] 2025-04-10T11:36:21.934096Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] State switched from 0 to 1 2025-04-10T11:36:21.935031Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:592:21] Status# OK ClientId# [12:592:21] ServerId# [1:621:72] PipeClient# [12:592:21] 2025-04-10T11:36:21.935111Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] State switched from 0 to 1 2025-04-10T11:36:21.935180Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:593:21] Status# OK ClientId# [13:593:21] ServerId# [1:622:73] PipeClient# [13:593:21] 2025-04-10T11:36:21.935224Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] State switched from 0 to 1 2025-04-10T11:36:21.935265Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:594:21] Status# OK ClientId# [14:594:21] ServerId# [1:623:74] PipeClient# [14:594:21] 2025-04-10T11:36:21.935288Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] State switched from 0 to 1 2025-04-10T11:36:21.935320Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:595:21] Status# OK ClientId# [15:595:21] ServerId# [1:624:75] PipeClient# [15:595:21] 2025-04-10T11:36:21.935341Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] State switched from 0 to 1 2025-04-10T11:36:21.938015Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:36:21.938083Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] PDiskId# 1000 VSlotId# 1000 created 2025-04-10T11:36:21.959869Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] status changed to INIT_PENDING 2025-04-10T11:36:21.961005Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-04-10T11:36:21.961070Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] PDiskId# 1000 VSlotId# 1000 created 2025-04-10T11:36:21.961158Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] status changed to INIT_PENDING 2025-04-10T11:36:21.961279Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-04-10T11:36:21.961317Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] PDiskId# 1000 VSlotId# 1000 created 2025-04-10T11:36:21.961361Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] status changed to INIT_PENDING 2025-04-10T11:36:21.961451Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-04-10T11:36:21.961485Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] PDiskId# 1000 VSlotId# 1000 created 2025-04-10T11:36:21.961526Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] status changed to INIT_PENDING 2025-04-10T11:36:21.961624Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-04-10T11:36:21.961671Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] PDiskId# 1000 VSlotId# 1000 created 2025-04-10T11:36:21.961733Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] status changed to INIT_PENDING 2025-04-10T1 ... 2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-10T11:36:22.243090Z 11 00h01m05.392512s :BS_NODE DEBUG: [11] VDiskId# [80000001:1:0:1:0] status changed to REPLICATING 2025-04-10T11:36:22.243393Z 1 00h01m05.392512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-10T11:36:22.243630Z 2 00h01m06.214512s :BS_NODE DEBUG: [2] VDiskId# [80000001:1:2:1:0] status changed to REPLICATING 2025-04-10T11:36:22.244059Z 1 00h01m06.214512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-10T11:36:22.244276Z 13 00h01m06.653024s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] status changed to REPLICATING 2025-04-10T11:36:22.244667Z 1 00h01m06.653024s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-10T11:36:22.244939Z 1 00h01m10.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-10T11:36:22.245540Z 14 00h01m14.659512s :BS_NODE DEBUG: [14] VDiskId# [80000001:1:1:1:0] status changed to READY 2025-04-10T11:36:22.245990Z 1 00h01m14.659512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-10T11:36:22.246517Z 11 00h01m16.697512s :BS_NODE DEBUG: [11] VDiskId# [80000001:1:0:1:0] status changed to READY 2025-04-10T11:36:22.246779Z 1 00h01m16.697512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-10T11:36:22.246978Z 3 00h01m17.005512s :BS_NODE DEBUG: [3] VDiskId# [80000001:1:2:2:0] status changed to READY 2025-04-10T11:36:22.247401Z 1 00h01m17.005512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-10T11:36:22.247615Z 13 00h01m18.575024s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] status changed to READY 2025-04-10T11:36:22.248074Z 1 00h01m18.575024s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-10T11:36:22.248754Z 7 00h01m18.575536s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-04-10T11:36:22.248813Z 7 00h01m18.575536s :BS_NODE DEBUG: [7] VDiskId# [80000000:1:2:0:0] destroyed 2025-04-10T11:36:22.249092Z 1 00h01m20.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-10T11:36:22.249352Z 14 00h01m26.595536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] status changed to READY 2025-04-10T11:36:22.249736Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:71} Reassigner starting GroupId# 2147483648 2025-04-10T11:36:22.250461Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-10T11:36:22.250530Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:0:0] DiskIsOk# true 2025-04-10T11:36:22.250828Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-10T11:36:22.250875Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:1:0] DiskIsOk# true 2025-04-10T11:36:22.250913Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-10T11:36:22.250965Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:2:0] DiskIsOk# true 2025-04-10T11:36:22.251064Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-10T11:36:22.251099Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:0:0] DiskIsOk# true 2025-04-10T11:36:22.251130Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-10T11:36:22.251157Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:1:0] DiskIsOk# true 2025-04-10T11:36:22.251189Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-10T11:36:22.251230Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:2:0] DiskIsOk# true 2025-04-10T11:36:22.251262Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-10T11:36:22.251289Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:0:0] DiskIsOk# true 2025-04-10T11:36:22.251316Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-10T11:36:22.251342Z 1 00h01m26.595536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:1:0] DiskIsOk# true 2025-04-10T11:36:22.253867Z 1 00h01m26.596048s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-10T11:36:22.253944Z 1 00h01m26.596048s :BS_NODE DEBUG: [1] VDiskId# [80000000:3:0:0:0] -> [80000000:4:0:0:0] 2025-04-10T11:36:22.254551Z 1 00h01m26.596048s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:206} Reassigner succeeded GroupId# 2147483648 Items# [80000000:3:2:2:0]: 9:1000:1000 -> 15:1000:1001 ConfigTxSeqNo# 23 2025-04-10T11:36:22.254593Z 1 00h01m26.596048s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:217} Reassigner finished GroupId# 2147483648 Success# true 2025-04-10T11:36:22.254709Z 8 00h01m26.596048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-04-10T11:36:22.254750Z 8 00h01m26.596048s :BS_NODE DEBUG: [8] VDiskId# [80000000:2:2:1:0] destroyed 2025-04-10T11:36:22.254880Z 2 00h01m26.596048s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-04-10T11:36:22.254964Z 2 00h01m26.596048s :BS_NODE DEBUG: [2] VDiskId# [80000000:3:0:1:0] -> [80000000:4:0:1:0] 2025-04-10T11:36:22.255044Z 3 00h01m26.596048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-04-10T11:36:22.255097Z 3 00h01m26.596048s :BS_NODE DEBUG: [3] VDiskId# [80000000:3:0:2:0] -> [80000000:4:0:2:0] 2025-04-10T11:36:22.255172Z 4 00h01m26.596048s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-04-10T11:36:22.255212Z 4 00h01m26.596048s :BS_NODE DEBUG: [4] VDiskId# [80000000:3:1:0:0] -> [80000000:4:1:0:0] 2025-04-10T11:36:22.255301Z 5 00h01m26.596048s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-04-10T11:36:22.255346Z 5 00h01m26.596048s :BS_NODE DEBUG: [5] VDiskId# [80000000:3:1:1:0] -> [80000000:4:1:1:0] 2025-04-10T11:36:22.255431Z 6 00h01m26.596048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-04-10T11:36:22.255474Z 6 00h01m26.596048s :BS_NODE DEBUG: [6] VDiskId# [80000000:3:1:2:0] -> [80000000:4:1:2:0] 2025-04-10T11:36:22.255528Z 9 00h01m26.596048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-04-10T11:36:22.255607Z 13 00h01m26.596048s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-04-10T11:36:22.255653Z 13 00h01m26.596048s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] -> [80000000:4:2:0:0] 2025-04-10T11:36:22.255720Z 14 00h01m26.596048s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-04-10T11:36:22.255763Z 14 00h01m26.596048s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] -> [80000000:4:2:1:0] 2025-04-10T11:36:22.255858Z 15 00h01m26.596048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-04-10T11:36:22.255897Z 15 00h01m26.596048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] PDiskId# 1000 VSlotId# 1001 created 2025-04-10T11:36:22.255969Z 15 00h01m26.596048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to INIT_PENDING 2025-04-10T11:36:22.257170Z 13 00h01m27.451512s :BS_NODE DEBUG: [13] VDiskId# [80000001:1:1:0:0] status changed to READY 2025-04-10T11:36:22.257804Z 2 00h01m28.430512s :BS_NODE DEBUG: [2] VDiskId# [80000001:1:2:1:0] status changed to READY 2025-04-10T11:36:22.258410Z 15 00h01m28.877048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to REPLICATING 2025-04-10T11:36:22.259469Z 1 00h01m30.840512s :BS_NODE DEBUG: [1] VDiskId# [80000001:1:2:0:0] status changed to READY 2025-04-10T11:36:22.260552Z 10 00h01m34.880512s :BS_NODE DEBUG: [10] VDiskId# [80000001:1:0:0:0] status changed to READY 2025-04-10T11:36:22.261052Z 12 00h01m36.924512s :BS_NODE DEBUG: [12] VDiskId# [80000001:1:0:2:0] status changed to READY 2025-04-10T11:36:22.261535Z 15 00h01m38.407512s :BS_NODE DEBUG: [15] VDiskId# [80000001:1:1:2:0] status changed to READY 2025-04-10T11:36:22.264090Z 15 00h01m57.888048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to READY 2025-04-10T11:36:22.265007Z 9 00h01m57.888560s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-04-10T11:36:22.265067Z 9 00h01m57.888560s :BS_NODE DEBUG: [9] VDiskId# [80000000:3:2:2:0] destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SuffixColumns [GOOD] Test command err: 2025-04-10T11:36:13.776495Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646916072596502:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:13.776601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fb4/r3tmp/tmpUli165/pdisk_1.dat 2025-04-10T11:36:14.204576Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:14.215211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:14.215308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:14.217595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28803, node 1 2025-04-10T11:36:14.339109Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:14.339140Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:14.339148Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:14.339329Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13579 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:14.799410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:14.846691Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:14.863222Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:14.875219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284975083 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284975083 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fb4/r3tmp/tmpBqHZEl/pdisk_1.dat 2025-04-10T11:36:18.014282Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:36:18.049752Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:18.072657Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:18.072753Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:18.074326Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1336, node 2 2025-04-10T11:36:18.175648Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:18.175671Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:18.175679Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:18.175790Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5643 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:18.435844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:18.446528Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:18.468740Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:18.480783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:36:19.054022Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553163, Sender [2:7491646941810923104:2484], Recipient [2:7491646937515955133:2311]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\002\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3\010\000\000\000B\000\000\000\000\000\000\000" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 6 MaxKeys: 10 2025-04-10T11:36:19.054068Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-04-10T11:36:19.054331Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3") (type:4, value:"B\0\0\0\0\0\0\0")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-10T11:36:19.054610Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 77, String : ) 2025-04-10T11:36:19.054666Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 88, String : ) 2025-04-10T11:36:19.054707Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 666, String : ) 2025-04-10T11:36:19.054748Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, String : ) 2025-04-10T11:36:19.054777Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, String : ) 2025-04-10T11:36:19.054877Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 5 common prefixes: 0 2025-04-10T11:36:19.071263Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553163, Sender [2:7491646941810923108:2485], Recipient [2:7491646937515955133:2311]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\001\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 5 MaxKeys: 10 2025-04-10T11:36:19.071299Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-04-10T11:36:19.071469Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-10T11:36:19.071675Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, Uint64 : 10) 2025-04-10T11:36:19.071707Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, Uint64 : 10) 2025-04-10T11:36:19.071782Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 0 >> TObjectStorageListingTest::Decimal [GOOD] >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:35:11.543577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:11.543678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:11.543717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:11.543753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:11.543806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:11.543838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:11.543907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:11.543990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:11.544362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:11.629412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:35:11.629482Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:35:11.648185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:11.648459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:11.648673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:11.657473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:11.657779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:11.658570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:11.659340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:11.662780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:11.664313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:11.664375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:11.664565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:11.664621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:11.664679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:11.664854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:35:11.672555Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:35:11.807458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:11.807693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:11.807880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:11.808108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:11.808176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:11.811082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:11.811243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:11.811399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:11.811443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:11.811477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:11.811507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:11.813349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:11.813405Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:11.813439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:11.814918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:11.814956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:11.814998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:11.815047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:11.818007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:11.819898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:11.820090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:11.821211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:11.821345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:11.821392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:11.821717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:11.821783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:11.821970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:11.822072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:11.824398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:11.824446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:11.824630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:11.824675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:11.824936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:11.825005Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:11.825102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:11.825138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:11.825179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... lPathId: 3] 2025-04-10T11:36:22.937922Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:22.937977Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:206:2208], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2025-04-10T11:36:22.938031Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:206:2208], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2025-04-10T11:36:22.938432Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-10T11:36:22.938499Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 ProgressState at tablet: 72057594046678944 2025-04-10T11:36:22.940752Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-10T11:36:22.941015Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-10T11:36:22.941066Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-04-10T11:36:22.941116Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-10T11:36:22.941165Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 FAKE_COORDINATOR: Erasing txId 1002 2025-04-10T11:36:22.942797Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-10T11:36:22.942922Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-10T11:36:22.942969Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-04-10T11:36:22.943021Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-10T11:36:22.943075Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:36:22.943169Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2025-04-10T11:36:22.946684Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1321 } } 2025-04-10T11:36:22.946756Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-04-10T11:36:22.946955Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1321 } } 2025-04-10T11:36:22.947100Z node 72 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1321 } } 2025-04-10T11:36:22.949861Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 332 RawX2: 309237647631 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-04-10T11:36:22.949926Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-04-10T11:36:22.950119Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 332 RawX2: 309237647631 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-04-10T11:36:22.950226Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:36:22.950354Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 332 RawX2: 309237647631 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-04-10T11:36:22.950431Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:22.950478Z node 72 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-10T11:36:22.950525Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:36:22.950637Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 129 -> 240 2025-04-10T11:36:22.952379Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-04-10T11:36:22.952515Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-04-10T11:36:22.955346Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-10T11:36:22.955572Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-10T11:36:22.956013Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-10T11:36:22.956072Z node 72 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-04-10T11:36:22.956205Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-04-10T11:36:22.956262Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-04-10T11:36:22.956321Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-04-10T11:36:22.956363Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-04-10T11:36:22.956412Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2025-04-10T11:36:22.956465Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-04-10T11:36:22.956518Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2025-04-10T11:36:22.956558Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2025-04-10T11:36:22.956711Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2025-04-10T11:36:22.960724Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-04-10T11:36:22.960786Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-04-10T11:36:22.961288Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-04-10T11:36:22.961418Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-04-10T11:36:22.961463Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:409:2382] TestWaitNotification: OK eventTxId 1002 2025-04-10T11:36:22.962176Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:36:22.962438Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 336us result status StatusSuccess 2025-04-10T11:36:22.963095Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamTests::Basic [GOOD] >> TCdcStreamTests::Attributes >> TAsyncIndexTests::OnlineBuild >> SystemView::DescribeAccessDenied [GOOD] >> SystemView::CollectScriptingQueries >> TLocksTest::BrokenLockUpdate [GOOD] >> TLocksTest::BrokenNullLock >> DstCreator::WithIntermediateDir >> DstCreator::WithSyncIndexAndIntermediateDir >> DstCreator::ReplicationModeMismatch >> DstCreator::ExistingDst >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestGetStatusWorks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Decimal [GOOD] Test command err: 2025-04-10T11:36:15.423242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646926764193322:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:15.423295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fae/r3tmp/tmpTQMwVR/pdisk_1.dat 2025-04-10T11:36:15.945719Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:15.961879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:15.961973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:15.964411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1178, node 1 2025-04-10T11:36:16.045875Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:16.045905Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:16.045944Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:16.046135Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15513 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:16.341248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:16.355557Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:16.378746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:36:19.409031Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646940667142436:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:19.409083Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fae/r3tmp/tmpWN15qo/pdisk_1.dat 2025-04-10T11:36:19.648995Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:19.651726Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:19.651850Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:19.659230Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10626, node 2 2025-04-10T11:36:19.782827Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:19.782848Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:19.782856Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:19.782966Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17572 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:20.055143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:20.066645Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:20.087702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... >> DstCreator::GlobalConsistency ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] Test command err: 2025-04-10T11:31:48.419762Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:48.567070Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:48.593184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:48.593540Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:48.603939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:48.604205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:48.604468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:48.604583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:48.604691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:48.604810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:48.604955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:48.605114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:48.605243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:48.605352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.605457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:48.605557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:48.638031Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:48.638481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:48.638549Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:48.638796Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.638989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:48.639086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:48.639138Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:48.639233Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:48.639300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:48.639362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:48.639396Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:48.639622Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.639706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:48.639757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:48.639795Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:48.639912Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:48.639984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:48.640036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:48.640073Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:48.640161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:48.640214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:48.640249Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:48.640311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:48.640352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:48.640381Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:48.640834Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=56; 2025-04-10T11:31:48.640929Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-10T11:31:48.641049Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=57; 2025-04-10T11:31:48.641186Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=50; 2025-04-10T11:31:48.641391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:48.641452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:48.641494Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:48.641712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:48.641765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.641804Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.641997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:48.642062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:48.642101Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:48.642344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:48.642410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:48.642442Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:48.642583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:48.642622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:48.642677Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:8568];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:74;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:75;blob_range:[NO_BLOB:0:8552];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:8488];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:54;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:55;blob_range:[NO_BLOB:0:8472];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:1;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;;;switched=(portion_id:55;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2117976;index_size:24;meta:((produced=INSERTED;)););(portion_id:54;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2549080;index_size:24;meta:((produced=SPLIT_COMPACTED;)););; 2025-04-10T11:36:23.257256Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:10778:12405];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-10T11:36:23.259399Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10778:12405];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TCdcStreamTests::Attributes [GOOD] >> TCdcStreamTests::DocApi |96.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} >> TAsyncIndexTests::OnlineBuild [GOOD] >> DstCreator::WithSyncIndex >> TFlatTest::LargeDatashardReplyDistributed [GOOD] >> TFlatTest::LargeDatashardReplyRW >> TFlatTest::SelectRangeSkipNullKeys [GOOD] >> TCdcStreamTests::DocApi [GOOD] >> TCdcStreamTests::DocApiNegative ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::OnlineBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:36:25.179158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:25.179262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:25.179309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:25.179348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:25.179396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:25.179426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:25.179496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:25.179588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:25.179983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:25.270388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:36:25.270466Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:25.289839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:25.290170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:25.290374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:25.300460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:25.300749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:25.301664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:25.302094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:25.304669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:25.306308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:25.306402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:25.306654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:25.306724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:25.306775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:25.306992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:36:25.317883Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:36:25.477682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:25.477905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:25.478084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:25.478358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:25.478421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:25.483655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:25.483812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:25.484066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:25.484138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:25.484197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:25.484249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:25.486781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:25.486845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:25.486891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:25.488842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:25.488921Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:25.489026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:25.489087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:25.492808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:25.495062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:25.495249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:25.496371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:25.496505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:25.496547Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:25.496771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:25.496816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:25.496980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:25.497046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:25.498811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:25.498848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:25.499031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:25.499091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:25.499442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:25.499492Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:25.499604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:25.499644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:25.499687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:25.499719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:25.499758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:36:25.499805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:25.499844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:36:25.499877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:36:25.499945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:36:25.499987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:36:25.500021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:36:25.502544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:25.502706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:25.502775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710760, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP LOCK, path: /MyRoot/Table 2025-04-10T11:36:26.150384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710760, status# StatusAccepted 2025-04-10T11:36:26.150439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2025-04-10T11:36:26.150499Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 102, cookie: 102, txId: 281474976710760, status: StatusAccepted 2025-04-10T11:36:26.150579Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2025-04-10T11:36:26.150880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-10T11:36:26.150927Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 ProgressState 2025-04-10T11:36:26.151000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2025-04-10T11:36:26.151112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:26.153441Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-10T11:36:26.153523Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:36:26.153920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-10T11:36:26.154054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:36:26.154207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-10T11:36:26.154243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-10T11:36:26.154290Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-10T11:36:26.154554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:26.154634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:26.154679Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-10T11:36:26.154734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-10T11:36:26.156966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-10T11:36:26.157019Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-10T11:36:26.157114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-10T11:36:26.157165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T11:36:26.157199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-10T11:36:26.157226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T11:36:26.157252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-10T11:36:26.157304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:123:2149] message: TxId: 281474976710760 2025-04-10T11:36:26.157342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-10T11:36:26.157386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-10T11:36:26.157439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-10T11:36:26.157497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-10T11:36:26.159169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-10T11:36:26.159231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-10T11:36:26.159372Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-10T11:36:26.159459Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:36:26.161243Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-10T11:36:26.161338Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:36:26.161387Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-10T11:36:26.162977Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-10T11:36:26.163055Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-10T11:36:26.163121Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-10T11:36:26.163234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:36:26.163272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:473:2433] TestWaitNotification: OK eventTxId 102 >> THiveTest::DrainWithHiveRestart [GOOD] >> THiveTest::TestCheckSubHiveForwarding >> SystemView::AuthGroupMembers_Access [GOOD] >> SystemView::AuthGroupMembers_ResultOrder >> TCdcStreamTests::DocApiNegative [GOOD] >> TCdcStreamTests::Negative >> BsControllerTest::SelfHealBlock4Plus2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeSkipNullKeys [GOOD] Test command err: 2025-04-10T11:36:19.162104Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646942162727272:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:19.162259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fac/r3tmp/tmpuLnqLB/pdisk_1.dat 2025-04-10T11:36:19.630235Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:19.639881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:19.639973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:19.643746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9838 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:19.938273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:19.965438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:20.128851Z node 1 :TX_PROXY ERROR: Actor# [1:7491646946457695257:2365] txid# 281474976710659, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-04-10T11:36:20.131902Z node 1 :TX_PROXY ERROR: Actor# [1:7491646946457695270:2371] txid# 281474976710660, issues: { message: "Unexpected freeze state" severity: 1 } Error 128: Unexpected freeze state 2025-04-10T11:36:20.135093Z node 1 :TX_PROXY ERROR: Actor# [1:7491646946457695276:2376] txid# 281474976710661, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-04-10T11:36:20.137932Z node 1 :TX_PROXY ERROR: Actor# [1:7491646946457695282:2381] txid# 281474976710662, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-04-10T11:36:22.806789Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646954973093882:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:22.806906Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fac/r3tmp/tmpZQ6aUV/pdisk_1.dat 2025-04-10T11:36:22.983692Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:23.002719Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:23.002795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:23.006923Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14726 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:23.200306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:23.208223Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:23.225557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TCdcStreamTests::VirtualTimestamps >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] >> DstCreator::WithIntermediateDir [GOOD] >> DstCreator::WithAsyncIndex >> DstCreator::GlobalConsistency [GOOD] >> DstCreator::KeyColumnNameMismatch >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo >> DstCreator::ReplicationModeMismatch [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch >> THiveTest::TestCheckSubHiveForwarding [GOOD] >> THiveTest::TestCheckSubHiveDrain >> DstCreator::ExistingDst [GOOD] >> DstCreator::EmptyReplicationConfig >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] >> TLocksFatTest::RangeSetNotBreak [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TCdcStreamTests::CreateStream >> TCdcStreamTests::VirtualTimestamps [GOOD] >> TCdcStreamTests::ResolvedTimestamps >> DstCreator::Basic >> TLocksTest::Range_Pinhole [GOOD] >> TLocksTest::SetBreakSetEraseBreak >> TKeyValueTest::TestCopyRangeWorksNewApi [GOOD] >> TKeyValueTest::TestCopyRangeToLongKey >> SystemView::AuthUsers_ResultOrder [GOOD] >> SystemView::AuthUsers_TableRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] Test command err: 2025-04-10T11:36:25.429766Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646966523701187:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:25.429847Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014a2/r3tmp/tmpJcLaUB/pdisk_1.dat 2025-04-10T11:36:25.909423Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:25.922482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:25.923058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:25.933859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20226 TServer::EnableGrpc on GrpcPort 21312, node 1 2025-04-10T11:36:26.218895Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:26.218918Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:26.218936Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:26.219090Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:26.754567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:26.770680Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:36:26.781460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284987123 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284986808 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284987123 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-04-10T11:36:27.215922Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:27.216091Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:27.216120Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:27.217200Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:28.737818Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284987123, tx_id: 281474976710658 } } } 2025-04-10T11:36:28.738384Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:28.740995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-10T11:36:28.747290Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-10T11:36:28.747318Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-10T11:36:28.786738Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-10T11:36:28.788168Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dir/Replicated" PathDescription { Self { Name: "Replicated" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284988824 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 7 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForc ... 943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 8 PathOwnerId: 72057594046644480 } 2025-04-10T11:36:28.801014Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 8] TClient::Ls request: /Root/Dir/Replicated/index_by_value TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284988824 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284988824 ParentPathId: 7 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { ... (TRUNCATED) TClient::Ls request: /Root/Dir/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284988824 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284988824 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Dir/Replicated/index_by_value/indexImplTable" >> DstCreator::WithSyncIndex [GOOD] >> THiveTest::TestCheckSubHiveDrain [GOOD] >> THiveTest::TestCheckSubHiveMigration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::RangeSetNotBreak [GOOD] Test command err: 2025-04-10T11:36:12.201688Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646911659459281:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:12.201747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fb6/r3tmp/tmpF62K1Y/pdisk_1.dat 2025-04-10T11:36:12.540284Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:12.621616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:12.621755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:12.624118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3425 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:12.803985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:12.837070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:12.962867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:13.006392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:36:17.202398Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646911659459281:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:17.202500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fb6/r3tmp/tmpqmtqq7/pdisk_1.dat 2025-04-10T11:36:21.605092Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:36:21.606554Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:21.635469Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:21.635591Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:21.637309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13622 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:21.851868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:21.857634Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:21.873139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:21.924014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:22.010500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TCdcStreamTests::ResolvedTimestamps [GOOD] >> TCdcStreamTests::RetentionPeriod >> TCdcStreamTests::CreateStream [GOOD] >> TCdcStreamTests::AlterStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndex [GOOD] Test command err: 2025-04-10T11:36:26.557122Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646970335603896:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:26.557185Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00149b/r3tmp/tmpiNsXtE/pdisk_1.dat 2025-04-10T11:36:27.012771Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:27.019138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:27.019254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:27.024691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11919 TServer::EnableGrpc on GrpcPort 62550, node 1 2025-04-10T11:36:27.259364Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:27.259381Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:27.259385Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:27.259491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:27.627894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:27.652491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284987977 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284987690 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284987977 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-04-10T11:36:28.004744Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:28.004978Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:28.005020Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:28.005811Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:29.886377Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284987977, tx_id: 281474976710658 } } } 2025-04-10T11:36:29.886847Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:29.890297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:36:29.893312Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-10T11:36:29.893336Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-10T11:36:29.934097Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-10T11:36:29.935496Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Replicated" PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284989972 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Gener ... pliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 7 PathOwnerId: 72057594046644480 } 2025-04-10T11:36:29.952434Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 7] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284989972 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284989972 ParentPathId: 6 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { ... (TRUNCATED) TClient::Ls request: /Root/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284989972 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284989972 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Replicated/index_by_value/indexImplTable" >> TLocksTest::BrokenSameShardLock [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] >> DstCreator::NonExistentSrc >> SystemView::CollectScriptingQueries [GOOD] >> TCdcStreamTests::RetentionPeriod [GOOD] >> TCdcStreamTests::TopicPartitions >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> DstCreator::WithAsyncIndex [GOOD] >> DstCreator::EmptyReplicationConfig [GOOD] >> DstCreator::KeyColumnNameMismatch [GOOD] >> THiveTest::TestCheckSubHiveMigration [GOOD] >> THiveTest::TestCheckSubHiveMigrationManyTablets >> DstCreator::SameOwner ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] Test command err: 2025-04-10T11:31:58.959559Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:59.104915Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:59.130626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:59.130917Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:59.140872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:59.141101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:59.141351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:59.141462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:59.141559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:59.141666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:59.141840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:59.141966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:59.142082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:59.142415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:59.142538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:59.142644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:59.173497Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:59.173723Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:59.173775Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:59.173961Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:59.174106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:59.174217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:59.174262Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:59.174344Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:59.174402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:59.174444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:59.174471Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:59.174682Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:59.174753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:59.174790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:59.174821Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:59.174896Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:59.174941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:59.174996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:59.175032Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:59.175106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:59.175152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:59.175182Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:59.175231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:59.175265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:59.175291Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:59.175651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-10T11:31:59.175743Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-10T11:31:59.175814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-10T11:31:59.175914Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=47; 2025-04-10T11:31:59.176091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:59.176147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:59.176180Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:59.176407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:59.176452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:59.176480Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:59.176633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:59.176677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:59.176703Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:59.176869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:59.176916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:59.176953Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:59.177113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:59.177160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:59.177200Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:8568];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:74;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:75;blob_range:[NO_BLOB:0:8552];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:8488];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:54;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:55;blob_range:[NO_BLOB:0:8472];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:1;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;;;switched=(portion_id:55;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2117976;index_size:24;meta:((produced=INSERTED;)););(portion_id:54;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2549080;index_size:24;meta:((produced=SPLIT_COMPACTED;)););; 2025-04-10T11:36:31.766269Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:10778:12405];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-10T11:36:31.769008Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10778:12405];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenSameShardLock [GOOD] Test command err: 2025-04-10T11:35:49.169041Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646810946707450:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:49.169368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fdc/r3tmp/tmplb6NlV/pdisk_1.dat 2025-04-10T11:35:49.467037Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:49.499993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:49.500110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:49.502236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16618 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:35:49.769909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:35:49.814295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:35:49.940072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:35:50.017829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:52.582825Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646826013126074:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:52.587051Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fdc/r3tmp/tmpDGIxV1/pdisk_1.dat 2025-04-10T11:35:52.814908Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:52.890704Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:52.890801Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:52.893207Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16538 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:35:53.043835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:35:53.070560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:53.144327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:53.224618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:56.442076Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646842295883259:2150];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:56.443443Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fdc/r3tmp/tmpNWpVNR/pdisk_1.dat 2025-04-10T11:35:56.625270Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:56.628547Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:56.628633Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:56.629940Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26964 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:35:56.841018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:56.850789Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:56.860054Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:35:56.864919Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:35:56.946654Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:57.000184Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:36:00.334019Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646858712478910:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:00.334124Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fdc/r3tmp/tmpCg4wJ2/pdisk_1.dat 2025-04-10T11:36:00.448563Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:00.477165Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:00.477250Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:00.478652Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28724 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:00.697005Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubD ... ecting -> Connected TClient is connected to server localhost:26513 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:12.734525Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:12.751754Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:12.813656Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:12.888747Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:17.199897Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491646931842300936:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:17.200112Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fdc/r3tmp/tmpbsFp4e/pdisk_1.dat 2025-04-10T11:36:17.527545Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:17.549518Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:17.549639Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:17.552017Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28822 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:17.832862Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:17.841458Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:17.867155Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:36:17.965207Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:18.053595Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:22.525122Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491646955035726063:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:22.525965Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fdc/r3tmp/tmpNzuX89/pdisk_1.dat 2025-04-10T11:36:22.700686Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:22.733231Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:22.733339Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:22.734927Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14226 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:23.040159Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:23.052570Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:23.067796Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:23.159692Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:23.234942Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:27.353756Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491646977212889507:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:27.353882Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fdc/r3tmp/tmpHaxYrK/pdisk_1.dat 2025-04-10T11:36:27.533189Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:27.554237Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:27.554342Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:27.556066Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30185 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:27.819520Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:27.826623Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:27.840267Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:27.912755Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:27.987821Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> DstCreator::Basic [GOOD] >> DstCreator::CannotFindColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] Test command err: 2025-04-10T11:36:25.430446Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646965738458882:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:25.430567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014b0/r3tmp/tmp4uVFNH/pdisk_1.dat 2025-04-10T11:36:25.964129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:25.964231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:25.966561Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:25.969852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21254 TServer::EnableGrpc on GrpcPort 7730, node 1 2025-04-10T11:36:26.225086Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:26.225125Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:26.225133Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:26.225302Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:26.750128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:26.767124Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:36:26.785315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:26.916485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284986808 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284986990 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284986808 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284986990 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-10T11:36:26.956890Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:26.957054Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:26.957145Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:26.957733Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:28.684486Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284986913, tx_id: 281474976710658 } } } 2025-04-10T11:36:28.684871Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:28.687331Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-10T11:36:28.689462Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284986990 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_NONE ConsistencyLevel: CONSISTENCY_LEVEL_UNKNOWN } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIop ... 68897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62271 TServer::EnableGrpc on GrpcPort 63578, node 2 2025-04-10T11:36:29.650908Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:29.650932Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:29.650940Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:29.651071Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:29.983548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:29.997689Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:36:30.005129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:30.062707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284990035 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284990175 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284990035 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284990175 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-10T11:36:30.138686Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:30.138845Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:30.138864Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:30.139451Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:32.593517Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284990091, tx_id: 281474976715658 } } } 2025-04-10T11:36:32.593807Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:32.595359Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-10T11:36:32.596337Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284990175 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-10T11:36:32.596509Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication consistency level mismatch: expected: CONSISTENCY_LEVEL_ROW, got: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::CollectScriptingQueries [GOOD] Test command err: 2025-04-10T11:35:30.296020Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646730430709631:2244];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:30.296127Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f98/r3tmp/tmpoRpoLs/pdisk_1.dat 2025-04-10T11:35:30.761646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:30.761757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:30.766280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:30.782926Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32599, node 1 2025-04-10T11:35:30.829547Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:35:30.841348Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:35:30.881200Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:35:30.881232Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:35:30.881286Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:30.881458Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:31.271106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:31.301235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:31.326037Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491646734278021590:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:31.326137Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:35:31.327509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:31.327608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:31.334633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:35:31.336881Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 waiting... 2025-04-10T11:35:31.360300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:31.360384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:31.362656Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-10T11:35:31.393628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:31.409806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:31.630117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:31.660390Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646737034207679:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:31.660942Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:35:31.668148Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646737375916521:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:31.668209Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:35:31.704703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:35:31.705531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:31.705598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:31.712068Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 waiting... 2025-04-10T11:35:31.713447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:31.735783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:31.740447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:31.750986Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:35:31.762732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:34.707081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:35:34.891914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646747610580013:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:34.892003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:34.892143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646747610580025:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:34.896621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2025-04-10T11:35:34.938047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646747610580027:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-10T11:35:35.007405Z node 1 :TX_PROXY ERROR: Actor# [1:7491646747610580100:3014] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:35:35.293305Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646730430709631:2244];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:35.293385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:35:35.569310Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfqqrq984fkgcfzn6dzkpt3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGY4YTVjM2ItOTE0MTE4MzQtY2ZkYjE0MzUtMjlkM2QwNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:35.625181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:35:35.914916Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfqqsn76btzetj416hhbq74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGY4YTVjM2ItOTE0MTE4MzQtY2ZkYjE0MzUtMjlkM2QwNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:35.941529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:35:36.119274Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jrfqqsvs82jznk6dmgw08pzb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGY4YTVjM2ItOTE0MTE4MzQtY2ZkYjE0MzUtMjlkM2QwNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:36.299907Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jrfqqsyk615c1g71qsvaa69b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU0MzdiZWUtOTQxNTNjNzEtZTkzZjhhY2ItN2JhZjhkODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:36.313260Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7491646756200514991:2380], owner: [1:7491646756200514987:2378], scan id: 0, table id: [72057594046644480:2:0:nodes] 2025-04-10T11:35:36.314733Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7491646756200514991:2380], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057 ... xist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:36:20.315929Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfqs4pf9gdy331yz5dzqpf9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=MmIzNWVhOGMtMmI1ZWMyYTEtMmU0NjNiYy1jNjQyZGQyOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:36:20.352239Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T11:36:20.757595Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfqs5bq83ka307atzweh192, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=MmIzNWVhOGMtMmI1ZWMyYTEtMmU0NjNiYy1jNjQyZGQyOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:36:20.804217Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-10T11:36:21.366041Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrfqs6118bymxgwrr3zs4m2a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=MmIzNWVhOGMtMmI1ZWMyYTEtMmU0NjNiYy1jNjQyZGQyOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:36:21.443557Z node 26 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root
: Error: Access denied 2025-04-10T11:36:21.464216Z node 26 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1
: Error: Access denied 2025-04-10T11:36:21.475551Z node 26 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/.sys
: Error: Access denied 2025-04-10T11:36:21.496390Z node 26 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1/.sys
: Error: Access denied 2025-04-10T11:36:21.519212Z node 26 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/.sys/partition_stats
: Error: Access denied 2025-04-10T11:36:21.538915Z node 26 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1/.sys/partition_stats
: Error: Access denied 2025-04-10T11:36:21.582509Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 29 2025-04-10T11:36:21.583048Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:36:21.583193Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 28 2025-04-10T11:36:21.583671Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:36:21.583863Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 30 2025-04-10T11:36:21.584323Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:36:21.587584Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 27 2025-04-10T11:36:21.588248Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:36:21.595969Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[27:7491646918824163152:2107], Type=268959746 2025-04-10T11:36:21.596026Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[27:7491646918824163152:2107], Type=268959746 2025-04-10T11:36:21.596049Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[27:7491646918824163152:2107], Type=268959746 2025-04-10T11:36:21.596088Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[27:7491646918824163152:2107], Type=268959746 2025-04-10T11:36:24.810619Z node 31 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[31:7491646962628372339:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:24.810716Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f98/r3tmp/tmpDQtghL/pdisk_1.dat 2025-04-10T11:36:24.994535Z node 31 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:25.047701Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:25.047864Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:25.050293Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25405, node 31 2025-04-10T11:36:25.128921Z node 31 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:25.128971Z node 31 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:25.128984Z node 31 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:25.129176Z node 31 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:25.648212Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:25.662464Z node 31 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:25.674983Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:36:29.810739Z node 31 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[31:7491646962628372339:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:29.810863Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:36:30.605556Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [31:7491646988398176845:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:30.605556Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [31:7491646988398176839:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:30.605650Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:30.612030Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:36:30.634353Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [31:7491646988398176853:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:36:30.722492Z node 31 :TX_PROXY ERROR: Actor# [31:7491646988398176904:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:36:30.832110Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfqsf4b2jbnn05vk0qjardc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=Y2U4NGIyZTgtNWMyNGY0NGQtOTVlNTA5M2YtM2FlYzMzNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:36:31.105071Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jrfqsffd2p13r0dfp4stgkv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=YzlmNGJlZGUtMTZjY2Q4MTYtMjM3MzAzY2EtNjdlM2QzOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:36:31.146526Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284991148, txId: 281474976710662] shutting down 2025-04-10T11:36:31.407796Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfqsfph11amnrtqs5t61n51, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=NTQ4MjI0YWYtODQxYWZiMC0yMDAyOWYxOC1iNzAzNDUwYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:36:31.410918Z node 31 :SYSTEM_VIEWS INFO: Scan started, actor: [31:7491646992693144332:2372], owner: [31:7491646992693144328:2370], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-10T11:36:31.411546Z node 31 :SYSTEM_VIEWS INFO: Scan prepared, actor: [31:7491646992693144332:2372], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:36:31.412165Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7491646992693144332:2372], row count: 2, finished: 1 2025-04-10T11:36:31.412216Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7491646992693144332:2372], owner: [31:7491646992693144328:2370], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-10T11:36:31.415186Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284991406, txId: 281474976710664] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnNameMismatch [GOOD] Test command err: 2025-04-10T11:36:25.438377Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646966711585207:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:25.438908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014b7/r3tmp/tmpi7hHLv/pdisk_1.dat 2025-04-10T11:36:25.963922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:25.964089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:25.964594Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:25.969864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11519 TServer::EnableGrpc on GrpcPort 1266, node 1 2025-04-10T11:36:26.266892Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:26.266931Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:26.266946Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:26.267142Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11519 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:26.738378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:26.765817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284986913 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284986801 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284986913 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-10T11:36:27.013374Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:27.013511Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:27.013559Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:27.014140Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:28.644250Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284986913, tx_id: 281474976710658 } } } 2025-04-10T11:36:28.644501Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:28.645981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:36:28.646629Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-10T11:36:28.646651Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-10T11:36:28.672954Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 TClient::Ls request: /Root/Replicated 2025-04-10T11:36:28.672986Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284988712 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-04-10T11:36:29.286876Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646986511224875:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:29.286975Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014b7/r3tmp/tmp80JgOT/pdisk_1.dat 2025-04-10T11:36:29.405694Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:29.432176Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:29.432266Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:29.433588Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30090 TServer::EnableGrpc on GrpcPort 26071, node 2 2025-04-10T11:36:29.644619Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:29.644645Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:29.644653Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:29.644751Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30090 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:29.964470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:29.971202Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:36:29.974280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:30.056901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284990014 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284990168 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284990014 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284990168 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-10T11:36:30.140782Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:30.141033Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:30.141057Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:30.142122Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:32.790282Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284990084, tx_id: 281474976715658 } } } 2025-04-10T11:36:32.790583Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:32.792203Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-10T11:36:32.793187Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284990168 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-10T11:36:32.793424Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key column name mismatch: position: 0, expected: key, got: value ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithAsyncIndex [GOOD] Test command err: 2025-04-10T11:36:25.430063Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646966611977932:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:25.430124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014c1/r3tmp/tmpBEWsvR/pdisk_1.dat 2025-04-10T11:36:25.900465Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:25.922484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:25.923050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:25.932253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24357 TServer::EnableGrpc on GrpcPort 12340, node 1 2025-04-10T11:36:26.218642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:26.218666Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:26.218674Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:26.218800Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:26.754226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:26.772207Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:36:26.775690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284986913 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284986815 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284986913 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-10T11:36:26.966082Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:26.966238Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:26.966271Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:26.966915Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:28.401967Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284986913, tx_id: 281474976710658 } } } 2025-04-10T11:36:28.403135Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:28.405033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-10T11:36:28.405907Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-10T11:36:28.405931Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-10T11:36:28.441524Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-10T11:36:28.441576Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 4] TClient::Ls request: /Root/Dir/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284988481 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-04-10T11:36:29.205200Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646986153057569:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:29.205297Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014c1/r3tmp/tmpjBRdH4/pdisk_1.dat 2025-04-10T11:36:29.304674Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:29.341330Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:29.341422Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:29.346990Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17215 TServer::EnableGrpc on GrpcPort 13560, node 2 2025-04-10T11:36:29.545100Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:29.545121Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:29.545132Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:29.545277Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:29.886491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:29.893364Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:36:29.898546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744284990308 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284989937 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744284990308 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-04-10T11:36:30.318313Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:30.318494Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:30.318512Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:30.319174Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:32.670619Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284990308, tx_id: 281474976715658 } } } 2025-04-10T11:36:32.670994Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:32.672887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:36:32.674316Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-04-10T11:36:32.674339Z node 2 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2025-04-10T11:36:32.712412Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-04-10T11:36:32.712437Z node 2 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 5] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284992751 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::EmptyReplicationConfig [GOOD] Test command err: 2025-04-10T11:36:25.448710Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646967087900851:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:25.449042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0014cf/r3tmp/tmpzKn5A8/pdisk_1.dat 2025-04-10T11:36:25.940986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:25.941159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:25.944490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:36:25.970418Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:65003 TServer::EnableGrpc on GrpcPort 29210, node 1 2025-04-10T11:36:26.219968Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:26.219998Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:26.220010Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:26.220126Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:26.773894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:26.810644Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:36:26.820906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:26.944446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284986836 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284987011 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284986836 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284987011 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-10T11:36:26.982003Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:26.982067Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:26.982084Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:26.982969Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:28.729871Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284986941, tx_id: 281474976710658 } } } 2025-04-10T11:36:28.730241Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:28.731890Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-10T11:36:28.733995Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284987011 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteI ... Disconnected 2025-04-10T11:36:29.698560Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:29.704415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27120 TServer::EnableGrpc on GrpcPort 25424, node 2 2025-04-10T11:36:29.958128Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:29.958164Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:29.958169Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:29.958282Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:30.293805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:30.299983Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:36:30.302818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:30.371927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284990343 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284990441 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284990343 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284990441 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-10T11:36:30.402687Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:30.402925Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:30.402948Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:30.403466Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:32.706345Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284990413, tx_id: 281474976715658 } } } 2025-04-10T11:36:32.706692Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:32.708116Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-10T11:36:32.709391Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284990441 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-10T11:36:32.709568Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Empty replication config >> TCdcStreamTests::TopicPartitions [GOOD] >> TCdcStreamTests::ReplicationAttribute >> TCdcStreamTests::DropStream [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail >> TVectorIndexTests::CreateTableWithError >> DstCreator::ColumnsSizeMismatch |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> DstCreator::NonExistentSrc [GOOD] >> DstCreator::KeyColumnsSizeMismatch >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] >> TCdcStreamTests::ReplicationAttribute [GOOD] >> TCdcStreamTests::RebootSchemeShard >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail >> TVectorIndexTests::CreateTableWithError [GOOD] >> TKeyValueTest::TestGetStatusWorks [GOOD] >> TLocksTest::Range_EmptyKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] Test command err: 2025-04-10T11:36:28.351132Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-04-10T11:36:28.351191Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-04-10T11:36:28.351275Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-04-10T11:36:28.351318Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-04-10T11:36:28.351379Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-04-10T11:36:28.351400Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-04-10T11:36:28.351435Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-04-10T11:36:28.351456Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-04-10T11:36:28.351490Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-04-10T11:36:28.351510Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-04-10T11:36:28.351568Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-04-10T11:36:28.351589Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-04-10T11:36:28.351622Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-04-10T11:36:28.351643Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-04-10T11:36:28.351685Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-04-10T11:36:28.351706Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-04-10T11:36:28.351738Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-04-10T11:36:28.351759Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-04-10T11:36:28.351807Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-04-10T11:36:28.351831Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-04-10T11:36:28.351881Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-04-10T11:36:28.351909Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-04-10T11:36:28.351947Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-04-10T11:36:28.351969Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-04-10T11:36:28.352021Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-04-10T11:36:28.352043Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-04-10T11:36:28.352077Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-04-10T11:36:28.352098Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-04-10T11:36:28.352132Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-04-10T11:36:28.352152Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-04-10T11:36:28.352189Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-04-10T11:36:28.352209Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-04-10T11:36:28.352247Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-04-10T11:36:28.352268Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-04-10T11:36:28.352339Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-04-10T11:36:28.352383Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-04-10T11:36:28.352436Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-04-10T11:36:28.352459Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-04-10T11:36:28.352492Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-04-10T11:36:28.352512Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-04-10T11:36:28.352565Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-04-10T11:36:28.352588Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-04-10T11:36:28.352628Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-04-10T11:36:28.352648Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-04-10T11:36:28.352683Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-04-10T11:36:28.352713Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-04-10T11:36:28.352755Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-04-10T11:36:28.352776Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-04-10T11:36:28.352832Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-04-10T11:36:28.352857Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-04-10T11:36:28.352893Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-04-10T11:36:28.352919Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-04-10T11:36:28.352980Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-04-10T11:36:28.353006Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-04-10T11:36:28.353045Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-04-10T11:36:28.353070Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-04-10T11:36:28.353105Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-04-10T11:36:28.353126Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-04-10T11:36:28.353161Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-04-10T11:36:28.353183Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-04-10T11:36:28.353221Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-04-10T11:36:28.353241Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-04-10T11:36:28.353278Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-04-10T11:36:28.353299Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-04-10T11:36:28.368198Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2157:49] Status# ERROR ClientId# [1:2157:49] ServerId# [0:0:0] PipeClient# [1:2157:49] 2025-04-10T11:36:28.369400Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2158:37] Status# ERROR ClientId# [2:2158:37] ServerId# [0:0:0] PipeClient# [2:2158:37] 2025-04-10T11:36:28.369459Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2159:37] Status# ERROR ClientId# [3:2159:37] ServerId# [0:0:0] PipeClient# [3:2159:37] 2025-04-10T11:36:28.369514Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2160:37] Status# ERROR ClientId# [4:2160:37] ServerId# [0:0:0] PipeClient# [4:2160:37] 2025-04-10T11:36:28.369557Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2161:37] Status# ERROR ClientId# [5:2161:37] ServerId# [0:0:0] PipeClient# [5:2161:37] 2025-04-10T11:36:28.369599Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2162:37] Status# ERROR ClientId# [6:2162:37] ServerId# [0:0:0] PipeClient# [6:2162:37] 2025-04-10T11:36:28.369653Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2163:37] Status# ERROR ClientId# [7:2163:37] ServerId# [0:0:0] PipeClient# [7:2163:37] 2025-04-10T11:36:28.369695Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2164:37] Status# ERROR ClientId# [8:2164:37] ServerId# [0:0:0] PipeClient# [8:2164:37] 2025-04-10T11:36:28.369737Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2165:37] Status# ERROR ClientId# [9:2165:37] ServerId# [0:0:0] PipeClient# [9:2165:37] 2025-04-10T11:36:28.369780Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2166:37] Status# ERROR ClientId# [10:2166:37] ServerId# [0:0:0] PipeClient# [10:2166:37] 2025-04-10T11:36:28.369819Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2167:37] Status# ERROR ClientId# [11:2167:37] ServerId# [0:0:0] PipeClient# [11:2167:37] 2025-04-10T11:36:28.369855Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2168:37] Status# ERROR ClientId# [12:2168:37] ServerId# [0:0:0] PipeClient# [12:2168:37] 2025-04-10T11:36:28.369895Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2169:37] Status# ERROR ClientId# [13:2169:37] ServerId# [0:0:0] PipeClient# [13:2169:37] 2025-04-10T11:36:28.369935Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2170:37] Status# ERROR ClientId# [14:2170:37] ServerId# [0:0:0] PipeClient# [14:2170:37] 2025-04-10T11:36:28.369994Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2171:37] Status# ERROR ClientId# [15:2171:37] ServerId# [0:0:0] PipeClient# [15:2171:37] 2025-04-10T11:36:28.370032Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2172:37] Status# ERROR ClientId# [16:2172:37] ServerId# [0:0:0] PipeClient# [16:2172:37] 2025-04-10T11:36:28.370069Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2173:37] Status# ERROR ClientId# [17:2173:37] ServerId# [0:0:0] PipeClient# [17:2173:37] 2025-04-10T11:36:28.370115Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2174:37] Status# ERROR ClientId# [18:2174:37] ServerId# [0:0:0] PipeClient# [18:2174:37] 2025-04-10T11:36:28.370186Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2175:37] Status# ERROR ClientId# [19:2175:37] ServerId# [0:0:0] PipeClient# [19:2175:37] 2025-04-10T11:36:28.370241Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2176:37] Status# ERROR ClientId# [20:2176:37] ServerId# [0:0:0] PipeClient# [20:2176:37] 2025-04-10T11:36:28.370280Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2177:37] Status# ERROR ClientId# [21:2177:37] ServerId# [0:0:0] PipeClient# [21:2177:37] 2025-04-10T11:36:28.370329Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2178:37] Status# ERROR ClientId# [22:2178:37] ServerId# [0:0:0] PipeClient# [22:2178:37] 2025-04-10T11:36:28.370377Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2179:37] Status# ERROR ClientId# [23:2179:37] ServerId# [0:0:0] PipeClient# [23:2179:37] 2025-04-10T11:36:28.370415Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2180:37] Status# ERROR ClientId# [24:2180:37] ServerId# [0:0:0] PipeClient# [24:2180:37] 2025-04-10T11:36:28.370451Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2181:37] Status# ERROR ClientId# [25:2181:37] ServerId# [0:0:0] PipeClient# [25:2181:37] 2025-04-10T11:36:28.370492Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2182:37] Status# ERROR ClientId# [26:2182:37] ServerId# [0:0:0] PipeClient# [26:2182:37] 2025-04-10T11:36:28.370543Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2183:37] Status# ERROR ClientId# [27:2183:37] ServerId# [0:0:0] PipeClient# [27:2183:37] 2025-04-10T11:36:28.370583Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2184:37] Status# ERROR ClientId# [28:2184:37] ServerId# [0:0:0] PipeClient# [28:2184:37] 2025-04-10T11:36:28.370621Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2185:37] Status# ERROR ClientId# [29:2185:37] ServerId# [0:0:0] PipeClient# [29:2185:37] 2025-04-10T11:36:28.370668Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2186:37] Status# ERROR ClientId# [30:2186:37] ServerId# [0:0:0] PipeClient# [30:2186:37] 2025-04-10T11:36:28.370733Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2187:37] Status# ERROR ClientId# [31:2187:37] ServerId# [0:0:0] PipeClient# [31:2187:37] 2025-04-10T11:36:28.370773Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2188:37] Status# ERROR ClientId# [32:2188:37] ServerId# [0:0:0] PipeClient# [32:2188:37] 2025-04-10T11:36:28.516778Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.115900s 2025-04-10T11:36:28.516920Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.116068s 2025-04-10T11:36:28.527046Z 1 00h00m00.002560s :BS_NODE DEBUG: [1] CheckState from [1:2257:73] expected 1 current 0 2025-04-10T11:36:28.527123Z 2 00h00m00.002560s :BS_NODE DEBUG: [2] CheckState from [2:2258:38] expected 1 current 0 2025-04-10T11:36:28.527156Z 3 00h00m00.002560s :BS_NODE DEBUG: [3] CheckState from [3:2259:38] expected 1 current 0 2025-04-10T11:36:28.527191Z 4 00h00m00.002560s :BS_NODE DEBUG: [4] CheckState from [4:2260:38] expected 1 current 0 2025-04-10T11:36:28.527236Z 5 00h00m00.002560s :BS_NODE DEBUG: [5] CheckState from [5:2261:38] expected 1 current 0 2025-04-10T11:36:28.527274Z 6 00h00m00.002560s :BS_NODE DEBUG: [6] CheckState from [6:2262:38] expected 1 current 0 2025-04-10T11:36:28.527321Z 7 00h00m00.002560s :BS_NODE DEBUG: [7] CheckState from [7 ... 5m00.117408s :BS_NODE DEBUG: [28] VDiskId# [80000033:3:0:3:0] -> [80000033:4:0:3:0] 2025-04-10T11:36:36.114266Z 29 05h15m00.117408s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-04-10T11:36:36.114310Z 29 05h15m00.117408s :BS_NODE DEBUG: [29] VDiskId# [80000033:3:0:4:0] -> [80000033:4:0:4:0] 2025-04-10T11:36:36.114362Z 30 05h15m00.117408s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:36.114424Z 31 05h15m00.117408s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-10T11:36:36.114468Z 31 05h15m00.117408s :BS_NODE DEBUG: [31] VDiskId# [80000033:3:0:6:0] -> [80000033:4:0:6:0] 2025-04-10T11:36:36.114578Z 18 05h15m00.117408s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-10T11:36:36.114622Z 18 05h15m00.117408s :BS_NODE DEBUG: [18] VDiskId# [80000012:5:0:1:0] -> [80000012:6:0:1:0] 2025-04-10T11:36:36.114699Z 19 05h15m00.117408s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-10T11:36:36.114730Z 19 05h15m00.117408s :BS_NODE DEBUG: [19] VDiskId# [80000012:6:0:5:0] PDiskId# 1001 VSlotId# 1014 created 2025-04-10T11:36:36.114783Z 19 05h15m00.117408s :BS_NODE DEBUG: [19] VDiskId# [80000012:6:0:5:0] status changed to INIT_PENDING 2025-04-10T11:36:36.114858Z 20 05h15m00.117408s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-10T11:36:36.114903Z 20 05h15m00.117408s :BS_NODE DEBUG: [20] VDiskId# [80000012:5:0:3:0] -> [80000012:6:0:3:0] 2025-04-10T11:36:36.114977Z 23 05h15m00.117408s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-10T11:36:36.115021Z 23 05h15m00.117408s :BS_NODE DEBUG: [23] VDiskId# [80000012:5:0:6:0] -> [80000012:6:0:6:0] 2025-04-10T11:36:36.115091Z 24 05h15m00.117408s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-10T11:36:36.115133Z 24 05h15m00.117408s :BS_NODE DEBUG: [24] VDiskId# [80000012:5:0:7:0] -> [80000012:6:0:7:0] 2025-04-10T11:36:36.115205Z 29 05h15m00.117408s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-04-10T11:36:36.115248Z 29 05h15m00.117408s :BS_NODE DEBUG: [29] VDiskId# [80000012:5:0:0:0] -> [80000012:6:0:0:0] 2025-04-10T11:36:36.115300Z 30 05h15m00.117408s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:36.115363Z 31 05h15m00.117408s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-10T11:36:36.115407Z 31 05h15m00.117408s :BS_NODE DEBUG: [31] VDiskId# [80000012:5:0:4:0] -> [80000012:6:0:4:0] 2025-04-10T11:36:36.115478Z 14 05h15m00.117408s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-04-10T11:36:36.115524Z 14 05h15m00.117408s :BS_NODE DEBUG: [14] VDiskId# [80000012:5:0:2:0] -> [80000012:6:0:2:0] 2025-04-10T11:36:36.115631Z 17 05h15m00.117408s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-10T11:36:36.115677Z 17 05h15m00.117408s :BS_NODE DEBUG: [17] VDiskId# [80000023:3:0:1:0] -> [80000023:4:0:1:0] 2025-04-10T11:36:36.115754Z 18 05h15m00.117408s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-10T11:36:36.115802Z 18 05h15m00.117408s :BS_NODE DEBUG: [18] VDiskId# [80000023:3:0:7:0] -> [80000023:4:0:7:0] 2025-04-10T11:36:36.115880Z 19 05h15m00.117408s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-10T11:36:36.115913Z 19 05h15m00.117408s :BS_NODE DEBUG: [19] VDiskId# [80000023:4:0:5:0] PDiskId# 1001 VSlotId# 1015 created 2025-04-10T11:36:36.115967Z 19 05h15m00.117408s :BS_NODE DEBUG: [19] VDiskId# [80000023:4:0:5:0] status changed to INIT_PENDING 2025-04-10T11:36:36.116042Z 25 05h15m00.117408s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-10T11:36:36.116086Z 25 05h15m00.117408s :BS_NODE DEBUG: [25] VDiskId# [80000023:3:0:0:0] -> [80000023:4:0:0:0] 2025-04-10T11:36:36.116160Z 27 05h15m00.117408s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2025-04-10T11:36:36.116203Z 27 05h15m00.117408s :BS_NODE DEBUG: [27] VDiskId# [80000023:3:0:2:0] -> [80000023:4:0:2:0] 2025-04-10T11:36:36.116270Z 28 05h15m00.117408s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-10T11:36:36.116316Z 28 05h15m00.117408s :BS_NODE DEBUG: [28] VDiskId# [80000023:3:0:3:0] -> [80000023:4:0:3:0] 2025-04-10T11:36:36.116387Z 29 05h15m00.117408s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-04-10T11:36:36.116432Z 29 05h15m00.117408s :BS_NODE DEBUG: [29] VDiskId# [80000023:3:0:4:0] -> [80000023:4:0:4:0] 2025-04-10T11:36:36.116488Z 30 05h15m00.117408s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:36.116551Z 31 05h15m00.117408s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-10T11:36:36.116594Z 31 05h15m00.117408s :BS_NODE DEBUG: [31] VDiskId# [80000023:3:0:6:0] -> [80000023:4:0:6:0] 2025-04-10T11:36:36.116719Z 19 05h15m00.117408s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-10T11:36:36.116755Z 19 05h15m00.117408s :BS_NODE DEBUG: [19] VDiskId# [80000013:4:0:5:0] PDiskId# 1001 VSlotId# 1016 created 2025-04-10T11:36:36.116806Z 19 05h15m00.117408s :BS_NODE DEBUG: [19] VDiskId# [80000013:4:0:5:0] status changed to INIT_PENDING 2025-04-10T11:36:36.116886Z 22 05h15m00.117408s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-04-10T11:36:36.116936Z 22 05h15m00.117408s :BS_NODE DEBUG: [22] VDiskId# [80000013:3:0:1:0] -> [80000013:4:0:1:0] 2025-04-10T11:36:36.117034Z 25 05h15m00.117408s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-10T11:36:36.117078Z 25 05h15m00.117408s :BS_NODE DEBUG: [25] VDiskId# [80000013:3:0:0:0] -> [80000013:4:0:0:0] 2025-04-10T11:36:36.117151Z 27 05h15m00.117408s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2025-04-10T11:36:36.117198Z 27 05h15m00.117408s :BS_NODE DEBUG: [27] VDiskId# [80000013:3:0:2:0] -> [80000013:4:0:2:0] 2025-04-10T11:36:36.117273Z 28 05h15m00.117408s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-10T11:36:36.117318Z 28 05h15m00.117408s :BS_NODE DEBUG: [28] VDiskId# [80000013:3:0:3:0] -> [80000013:4:0:3:0] 2025-04-10T11:36:36.117392Z 29 05h15m00.117408s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-04-10T11:36:36.117436Z 29 05h15m00.117408s :BS_NODE DEBUG: [29] VDiskId# [80000013:3:0:4:0] -> [80000013:4:0:4:0] 2025-04-10T11:36:36.117493Z 30 05h15m00.117408s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:36.117557Z 31 05h15m00.117408s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-10T11:36:36.117599Z 31 05h15m00.117408s :BS_NODE DEBUG: [31] VDiskId# [80000013:3:0:6:0] -> [80000013:4:0:6:0] 2025-04-10T11:36:36.117709Z 17 05h15m00.117408s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-10T11:36:36.117757Z 17 05h15m00.117408s :BS_NODE DEBUG: [17] VDiskId# [80000003:3:0:1:0] -> [80000003:4:0:1:0] 2025-04-10T11:36:36.117834Z 19 05h15m00.117408s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-10T11:36:36.117869Z 19 05h15m00.117408s :BS_NODE DEBUG: [19] VDiskId# [80000003:4:0:5:0] PDiskId# 1001 VSlotId# 1017 created 2025-04-10T11:36:36.117923Z 19 05h15m00.117408s :BS_NODE DEBUG: [19] VDiskId# [80000003:4:0:5:0] status changed to INIT_PENDING 2025-04-10T11:36:36.118005Z 2 05h15m00.117408s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-04-10T11:36:36.118058Z 2 05h15m00.117408s :BS_NODE DEBUG: [2] VDiskId# [80000003:3:0:7:0] -> [80000003:4:0:7:0] 2025-04-10T11:36:36.118135Z 25 05h15m00.117408s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-10T11:36:36.118204Z 25 05h15m00.117408s :BS_NODE DEBUG: [25] VDiskId# [80000003:3:0:0:0] -> [80000003:4:0:0:0] 2025-04-10T11:36:36.118282Z 27 05h15m00.117408s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2025-04-10T11:36:36.118327Z 27 05h15m00.117408s :BS_NODE DEBUG: [27] VDiskId# [80000003:3:0:2:0] -> [80000003:4:0:2:0] 2025-04-10T11:36:36.118398Z 28 05h15m00.117408s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-10T11:36:36.118442Z 28 05h15m00.117408s :BS_NODE DEBUG: [28] VDiskId# [80000003:3:0:3:0] -> [80000003:4:0:3:0] 2025-04-10T11:36:36.118515Z 29 05h15m00.117408s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-04-10T11:36:36.118559Z 29 05h15m00.117408s :BS_NODE DEBUG: [29] VDiskId# [80000003:3:0:4:0] -> [80000003:4:0:4:0] 2025-04-10T11:36:36.118615Z 30 05h15m00.117408s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:36.118678Z 31 05h15m00.117408s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-10T11:36:36.118721Z 31 05h15m00.117408s :BS_NODE DEBUG: [31] VDiskId# [80000003:3:0:6:0] -> [80000003:4:0:6:0] 2025-04-10T11:36:36.124010Z 19 05h15m01.713408s :BS_NODE DEBUG: [19] VDiskId# [8000001b:5:0:5:0] status changed to REPLICATING 2025-04-10T11:36:36.124840Z 19 05h15m02.273408s :BS_NODE DEBUG: [19] VDiskId# [8000003b:6:0:5:0] status changed to REPLICATING 2025-04-10T11:36:36.125653Z 19 05h15m02.694408s :BS_NODE DEBUG: [19] VDiskId# [80000003:4:0:5:0] status changed to REPLICATING 2025-04-10T11:36:36.126470Z 19 05h15m02.696408s :BS_NODE DEBUG: [19] VDiskId# [80000033:4:0:5:0] status changed to REPLICATING 2025-04-10T11:36:36.127263Z 19 05h15m02.946408s :BS_NODE DEBUG: [19] VDiskId# [80000012:6:0:5:0] status changed to REPLICATING 2025-04-10T11:36:36.128034Z 19 05h15m03.523408s :BS_NODE DEBUG: [19] VDiskId# [80000023:4:0:5:0] status changed to REPLICATING 2025-04-10T11:36:36.128777Z 19 05h15m04.346408s :BS_NODE DEBUG: [19] VDiskId# [8000000b:4:0:5:0] status changed to REPLICATING 2025-04-10T11:36:36.129538Z 19 05h15m04.350408s :BS_NODE DEBUG: [19] VDiskId# [8000002b:4:0:5:0] status changed to REPLICATING 2025-04-10T11:36:36.130329Z 19 05h15m04.397408s :BS_NODE DEBUG: [19] VDiskId# [80000013:4:0:5:0] status changed to REPLICATING 2025-04-10T11:36:36.131924Z 19 05h15m08.201408s :BS_NODE DEBUG: [19] VDiskId# [8000003b:6:0:5:0] status changed to READY 2025-04-10T11:36:36.133373Z 30 05h15m08.201920s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:36.133440Z 30 05h15m08.201920s :BS_NODE DEBUG: [30] VDiskId# [8000003b:5:0:5:0] destroyed 2025-04-10T11:36:36.133851Z 19 05h15m10.355408s :BS_NODE DEBUG: [19] VDiskId# [8000000b:4:0:5:0] status changed to READY 2025-04-10T11:36:36.135203Z 30 05h15m10.355920s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:36.135267Z 30 05h15m10.355920s :BS_NODE DEBUG: [30] VDiskId# [8000000b:3:0:5:0] destroyed 2025-04-10T11:36:36.135413Z 19 05h15m10.711408s :BS_NODE DEBUG: [19] VDiskId# [8000001b:5:0:5:0] status changed to READY 2025-04-10T11:36:36.136611Z 30 05h15m10.711920s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:36.136664Z 30 05h15m10.711920s :BS_NODE DEBUG: [30] VDiskId# [8000001b:4:0:5:0] destroyed 2025-04-10T11:36:36.137413Z 19 05h15m15.419408s :BS_NODE DEBUG: [19] VDiskId# [80000023:4:0:5:0] status changed to READY 2025-04-10T11:36:36.138670Z 30 05h15m15.419920s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:36.138728Z 30 05h15m15.419920s :BS_NODE DEBUG: [30] VDiskId# [80000023:3:0:5:0] destroyed 2025-04-10T11:36:36.139120Z 19 05h15m21.580408s :BS_NODE DEBUG: [19] VDiskId# [80000012:6:0:5:0] status changed to READY 2025-04-10T11:36:36.140316Z 30 05h15m21.580920s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:36.140370Z 30 05h15m21.580920s :BS_NODE DEBUG: [30] VDiskId# [80000012:5:0:5:0] destroyed 2025-04-10T11:36:36.141426Z 19 05h15m28.720408s :BS_NODE DEBUG: [19] VDiskId# [80000033:4:0:5:0] status changed to READY 2025-04-10T11:36:36.142638Z 30 05h15m28.720920s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:36.142699Z 30 05h15m28.720920s :BS_NODE DEBUG: [30] VDiskId# [80000033:3:0:5:0] destroyed 2025-04-10T11:36:36.143642Z 19 05h15m32.134408s :BS_NODE DEBUG: [19] VDiskId# [80000013:4:0:5:0] status changed to READY 2025-04-10T11:36:36.144780Z 30 05h15m32.134920s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:36.144831Z 30 05h15m32.134920s :BS_NODE DEBUG: [30] VDiskId# [80000013:3:0:5:0] destroyed 2025-04-10T11:36:36.145446Z 19 05h15m37.622408s :BS_NODE DEBUG: [19] VDiskId# [80000003:4:0:5:0] status changed to READY 2025-04-10T11:36:36.146573Z 30 05h15m37.622920s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:36.146621Z 30 05h15m37.622920s :BS_NODE DEBUG: [30] VDiskId# [80000003:3:0:5:0] destroyed 2025-04-10T11:36:36.146739Z 19 05h15m37.690408s :BS_NODE DEBUG: [19] VDiskId# [8000002b:4:0:5:0] status changed to READY 2025-04-10T11:36:36.147786Z 30 05h15m37.690920s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-10T11:36:36.147836Z 30 05h15m37.690920s :BS_NODE DEBUG: [30] VDiskId# [8000002b:3:0:5:0] destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:36:36.506470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:36.506556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:36.506646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:36.506678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:36.506717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:36.506742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:36.506791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:36.506851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:36.507163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:36.580468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:36:36.580524Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:36.598773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:36.599023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:36.599211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:36.608027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:36.608242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:36.608935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:36.609332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:36.611223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:36.612506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:36.612574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:36.612767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:36.612813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:36.612857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:36.613024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:36:36.619502Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:36:36.740616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:36.740889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:36.741134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:36.741401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:36.741474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:36.743790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:36.743975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:36.744220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:36.744282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:36.744323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:36.744381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:36.746332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:36.746405Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:36.746446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:36.748295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:36.748339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:36.748397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:36.748457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:36.752028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:36.754530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:36.754699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:36.755747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:36.755870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:36.755922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:36.756211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:36.756265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:36.756447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:36.756542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:36.758565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:36.758635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:36.758812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:36.758852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:36.759135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:36.759180Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:36.759292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:36.759325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:36.759363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:36.759391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:36.759427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:36:36.759464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:36.759495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:36:36.759526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:36:36.759583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:36:36.759617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:36:36.759646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:36:36.761840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:36.761962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:36.762003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-10T11:36:36.762040Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-10T11:36:36.762081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:36.762192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-10T11:36:36.765528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-10T11:36:36.766003Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-10T11:36:36.770206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "__ydb_parent" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "__ydb_parent" Type: EIndexTypeGlobalVectorKmeansTree VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:36.770639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-04-10T11:36:36.770823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: index key column shouldn't have a reserved name, at schemeshard: 72057594046678944 2025-04-10T11:36:36.770862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: index key column shouldn't have a reserved name, at schemeshard: 72057594046678944 2025-04-10T11:36:36.771193Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-04-10T11:36:36.787325Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-04-10T11:36:36.788172Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:36:36.791531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "index key column shouldn\'t have a reserved name" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:36.791696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: index key column shouldn't have a reserved name, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2025-04-10T11:36:36.794094Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-10T11:36:36.797275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "id" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:36.797661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-04-10T11:36:36.797791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2025-04-10T11:36:36.797850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2025-04-10T11:36:36.799904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "the same column can\'t be used as key and data column for one index, for example id" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:36.800064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors TestModificationResult got TxId: 102, wait until txId: 102 >> DstCreator::SameOwner [GOOD] >> DstCreator::SamePartitionCount >> TFlatTest::LargeDatashardReplyRW [GOOD] >> SystemView::TopPartitionsByCpuRanges [GOOD] >> SystemView::TopPartitionsByCpuFollowers >> TBlockBlobStorageTest::DelayedErrorsNotIgnored |96.9%| [TA] $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} >> DstCreator::CannotFindColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... or TabletID 72057594037927937 is [29:56:2097] sender: [29:89:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:90:2057] recipient: [29:88:2117] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:92:2057] recipient: [29:88:2117] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:91:2118] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:145:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:50:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:50:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:87:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:90:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:91:2057] recipient: [30:89:2117] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:93:2057] recipient: [30:89:2117] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:92:2118] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:146:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:90:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:93:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:94:2057] recipient: [31:92:2120] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:96:2057] recipient: [31:92:2120] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:95:2121] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:149:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:90:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:93:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:94:2057] recipient: [32:92:2120] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:96:2057] recipient: [32:92:2120] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:95:2121] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:149:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:76:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:78:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:80:2057] recipient: [35:79:2110] Leader for TabletID 72057594037927937 is [35:81:2111] sender: [35:82:2057] recipient: [35:79:2110] !Reboot 72057594037927937 (actor [35:56:2097]) rebooted! !Reboot 72057594037927937 (actor [35:56:2097]) tablet resolver refreshed! new actor is[35:81:2111] Leader for TabletID 72057594037927937 is [35:81:2111] sender: [35:135:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:76:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:79:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:80:2057] recipient: [36:78:2110] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:82:2057] recipient: [36:78:2110] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:81:2111] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:135:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:77:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:80:2057] recipient: [37:79:2110] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:81:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:82:2111] sender: [37:83:2057] recipient: [37:79:2110] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:82:2111] Leader for TabletID 72057594037927937 is [37:82:2111] sender: [37:136:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:50:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:50:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:79:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:82:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:83:2057] recipient: [38:81:2112] Leader for TabletID 72057594037927937 is [38:84:2113] sender: [38:85:2057] recipient: [38:81:2112] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:84:2113] Leader for TabletID 72057594037927937 is [38:84:2113] sender: [38:138:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:79:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:81:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:83:2057] recipient: [39:82:2112] Leader for TabletID 72057594037927937 is [39:84:2113] sender: [39:85:2057] recipient: [39:82:2112] !Reboot 72057594037927937 (actor [39:56:2097]) rebooted! !Reboot 72057594037927937 (actor [39:56:2097]) tablet resolver refreshed! new actor is[39:84:2113] Leader for TabletID 72057594037927937 is [39:84:2113] sender: [39:138:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:57:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:74:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:80:2057] recipient: [40:36:2083] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:83:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:84:2057] recipient: [40:82:2112] Leader for TabletID 72057594037927937 is [40:85:2113] sender: [40:86:2057] recipient: [40:82:2112] !Reboot 72057594037927937 (actor [40:56:2097]) rebooted! !Reboot 72057594037927937 (actor [40:56:2097]) tablet resolver refreshed! new actor is[40:85:2113] Leader for TabletID 72057594037927937 is [40:85:2113] sender: [40:139:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:57:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:74:2057] recipient: [41:14:2061] >> TSubscriberTest::NotifyUpdate >> TSubscriberTest::Sync >> TSubscriberTest::InvalidNotification >> TSubscriberTest::ReconnectOnFailure >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream >> TCdcStreamTests::RebootSchemeShard [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative >> TSubscriberTest::NotifyUpdate [GOOD] >> TSubscriberTest::Sync [GOOD] >> TSubscriberTest::InvalidNotification [GOOD] >> TSubscriberTest::ReconnectOnFailure [GOOD] >> TLocksTest::SetBreakSetEraseBreak [GOOD] >> TPipeTrackerTest::TestShareTablet [GOOD] >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyUpdate [GOOD] Test command err: 2025-04-10T11:36:38.400189Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:36:38.403512Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-10T11:36:38.403637Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-10T11:36:38.403683Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-10T11:36:38.403745Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-10T11:36:38.405337Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-10T11:36:38.405401Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:38.405445Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-10T11:36:38.405500Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:38.409927Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-04-10T11:36:38.410037Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:35:2065] 2025-04-10T11:36:38.410092Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Update to strong state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::InvalidNotification [GOOD] Test command err: 2025-04-10T11:36:38.445792Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:36:38.447872Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-10T11:36:38.447970Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-10T11:36:38.448003Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-10T11:36:38.448061Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-10T11:36:38.448127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-10T11:36:38.448156Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:38.448187Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-10T11:36:38.448215Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:38.448371Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:33:2064] 2025-04-10T11:36:38.448408Z node 1 :SCHEME_BOARD_SUBSCRIBER ERROR: [main][1:34:2065][path] Suspicious NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:33:2064] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::CannotFindColumn [GOOD] Test command err: 2025-04-10T11:36:30.537046Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646987751094146:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:30.538710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001495/r3tmp/tmpfdk1Ms/pdisk_1.dat 2025-04-10T11:36:30.928795Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:30.933835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:30.934003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:30.936912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24635 TServer::EnableGrpc on GrpcPort 6876, node 1 2025-04-10T11:36:31.188755Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:31.188783Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:31.188792Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:31.188931Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24635 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:31.597743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:31.638455Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:36:31.646316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284991764 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284991666 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744284991764 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-10T11:36:31.798674Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:31.798870Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:31.798899Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:31.799556Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:33.590724Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284991764, tx_id: 281474976710658 } } } 2025-04-10T11:36:33.591054Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:33.592687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T11:36:33.593984Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-10T11:36:33.593998Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-10T11:36:33.621961Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-10T11:36:33.621995Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284993661 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-04-10T11:36:34.486479Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491647005724725316:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:34.487342Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001495/r3tmp/tmpABvpzj/pdisk_1.dat 2025-04-10T11:36:34.676041Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:34.701337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:34.701418Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:34.703144Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2045 TServer::EnableGrpc on GrpcPort 6943, node 2 2025-04-10T11:36:34.958821Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:34.958844Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:34.958858Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:34.958984Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:35.313261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:35.351372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:36:35.429892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284995362 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284995502 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284995362 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284995502 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-10T11:36:35.466084Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:35.466209Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:35.466225Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:35.466743Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:37.596785Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284995467, tx_id: 281474976715658 } } } 2025-04-10T11:36:37.597140Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:37.598560Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-10T11:36:37.599661Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284995502 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-10T11:36:37.599895Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot find column: name: value ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::ReconnectOnFailure [GOOD] Test command err: 2025-04-10T11:36:38.474049Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:36:38.477885Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-10T11:36:38.478012Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-10T11:36:38.478124Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-10T11:36:38.478309Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-04-10T11:36:38.478393Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-04-10T11:36:38.478436Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:34:2065][path] Set up state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:38.478496Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-04-10T11:36:38.478548Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:38.481370Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-04-10T11:36:38.481453Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:38.481525Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-04-10T11:36:38.481560Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:38.481600Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-04-10T11:36:38.481634Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:38.493044Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:45:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-10T11:36:38.493277Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-04-10T11:36:38.493350Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:38.493487Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:46:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-10T11:36:38.493553Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:47:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-10T11:36:38.493688Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-04-10T11:36:38.493726Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:38.493795Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-04-10T11:36:38.493840Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:38.494464Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:45:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-04-10T11:36:38.494609Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:35:2065] 2025-04-10T11:36:38.494938Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:34:2065][path] Update to strong state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Sync [GOOD] Test command err: 2025-04-10T11:36:38.409349Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:36:38.410897Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-04-10T11:36:38.411003Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2025-04-10T11:36:38.411034Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2025-04-10T11:36:38.411077Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2066] 2025-04-10T11:36:38.411111Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2025-04-10T11:36:38.411164Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:38.411274Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2066] 2025-04-10T11:36:38.411332Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:38.411432Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-04-10T11:36:38.411524Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2066], cookie# 1 2025-04-10T11:36:38.411580Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2025-04-10T11:36:38.411621Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2025-04-10T11:36:38.411661Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:3:2050], cookie# 1 2025-04-10T11:36:38.411698Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-04-10T11:36:38.411718Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-04-10T11:36:38.411759Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:36:2066], cookie# 1 2025-04-10T11:36:38.411794Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:36:38.411822Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:37:2066], cookie# 1 2025-04-10T11:36:38.411852Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:36:38.411891Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:38:2066], cookie# 1 2025-04-10T11:36:38.411913Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Unexpected sync response: sender# [1:38:2066], cookie# 1 >> YdbSdkSessions::TestActiveSessionCountAfterBadSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_EmptyKey [GOOD] Test command err: 2025-04-10T11:35:54.055048Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646833868286468:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:54.055156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fd0/r3tmp/tmpFDzCGj/pdisk_1.dat 2025-04-10T11:35:54.529218Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:54.547776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:54.547860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:54.550260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27748 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:35:54.894114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:54.943831Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:54.974854Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:35:54.985356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:55.189588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:55.261508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:57.823229Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646848784247623:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:57.823315Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fd0/r3tmp/tmpbbUZJO/pdisk_1.dat 2025-04-10T11:35:57.943772Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:57.973973Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:57.974050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:57.975548Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14775 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:35:58.180421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:35:58.210411Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:35:58.221148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:58.324981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:58.406502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:01.417703Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646866419960614:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:01.417837Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fd0/r3tmp/tmpDGYcqj/pdisk_1.dat 2025-04-10T11:36:01.529889Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:01.557434Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:01.557525Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:01.558908Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20411 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:01.752950Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:01.769932Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:01.820173Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:01.897083Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:05.370767Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646883036944785:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:05.371753Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fd0/r3tmp/tmpjrxg2k/pdisk_1.dat 2025-04-10T11:36:05.542831Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:05.574556Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:05.574646Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:05.576138Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27546 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 20 ... ecting -> Connected TClient is connected to server localhost:17881 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:18.628845Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:18.638916Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:18.657021Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:18.741931Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:18.825050Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:22.769299Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491646953914538051:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:22.769359Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fd0/r3tmp/tmphJhsVo/pdisk_1.dat 2025-04-10T11:36:23.012302Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:23.033026Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:23.033286Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:23.034454Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61103 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:23.309935Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:23.331273Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:23.417432Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:23.481554Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:27.677271Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491646974800244674:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:27.677337Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fd0/r3tmp/tmpMbPp7T/pdisk_1.dat 2025-04-10T11:36:27.872828Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:27.874387Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:27.874501Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:27.878741Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17889 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:28.157991Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:28.165693Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:28.185608Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:28.255345Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:28.329737Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:32.567219Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491646997429390956:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:32.567268Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fd0/r3tmp/tmpRbWC3P/pdisk_1.dat 2025-04-10T11:36:32.784125Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:32.803858Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:32.803964Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:32.805544Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30472 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:33.115898Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:33.127941Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:33.152494Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:33.235010Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:33.328410Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReplyRW [GOOD] Test command err: 2025-04-10T11:36:13.980219Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646914393230357:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:13.980289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fb1/r3tmp/tmp4atg1K/pdisk_1.dat 2025-04-10T11:36:14.545237Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:14.566194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:14.566294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:14.568081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3872 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:14.846069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:14.870247Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:14.888690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:18.980554Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646914393230357:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:18.980623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:36:25.899697Z node 1 :MINIKQL_ENGINE ERROR: Shard %72075186224037889, txid %281474976711360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-04-10T11:36:25.915988Z node 1 :TX_DATASHARD ERROR: Datashard execution error for [1744284985331:281474976711360] at 72075186224037889: Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-04-10T11:36:25.919754Z node 1 :TX_PROXY ERROR: Actor# [1:7491646965932844393:5954] txid# 281474976711360 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# RESULT_UNAVAILABLE shard id 72075186224037889 marker# P12 2025-04-10T11:36:25.919871Z node 1 :TX_PROXY ERROR: Actor# [1:7491646965932844393:5954] txid# 281474976711360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) proxy error code: ExecResultUnavailable 2025-04-10T11:36:26.585488Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646971346723824:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:26.587082Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fb1/r3tmp/tmp7Zqy9j/pdisk_1.dat 2025-04-10T11:36:26.735694Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:26.760386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:26.760605Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:26.763726Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4747 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:26.972704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:26.997224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:31.580688Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491646971346723824:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:31.580751Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:36:37.258122Z node 2 :MINIKQL_ENGINE ERROR: Shard %72075186224037888, txid %281474976711361, engine error: Error executing transaction (read-only: 0): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-04-10T11:36:37.269632Z node 2 :TX_DATASHARD ERROR: Datashard execution error for [0:281474976711361] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-04-10T11:36:37.272663Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976711361 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-04-10T11:36:37.272833Z node 2 :TX_PROXY ERROR: Actor# [2:7491647014296403210:5922] txid# 281474976711361 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> SystemView::AuthGroupMembers_ResultOrder [GOOD] >> SystemView::AuthGroupMembers_TableRange >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative [GOOD] >> TCdcStreamTests::StreamOnIndexTable |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> TLocksTest::Range_GoodLock0 [GOOD] >> TLocksTest::Range_GoodLock1 >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap [GOOD] >> KqpQueryService::TableSink_ReplaceDuplicatesOlap |97.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} >> DstCreator::ColumnsSizeMismatch [GOOD] >> DstCreator::ColumnTypeMismatch >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> TCdcStreamTests::MoveTableShouldFail >> DstCreator::KeyColumnsSizeMismatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:87:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:87:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:92:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:94:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:93:2120] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:147:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 29:78:2110] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:81:2111] Leader for TabletID 72057594037927937 is [29:81:2111] sender: [29:135:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:50:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:50:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:77:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:80:2057] recipient: [30:79:2110] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:81:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:82:2111] sender: [30:83:2057] recipient: [30:79:2110] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:82:2111] Leader for TabletID 72057594037927937 is [30:82:2111] sender: [30:136:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:80:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:83:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:84:2057] recipient: [31:82:2113] Leader for TabletID 72057594037927937 is [31:85:2114] sender: [31:86:2057] recipient: [31:82:2113] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:85:2114] Leader for TabletID 72057594037927937 is [31:85:2114] sender: [31:139:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:80:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:83:2057] recipient: [32:82:2113] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:84:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:85:2114] sender: [32:86:2057] recipient: [32:82:2113] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:85:2114] Leader for TabletID 72057594037927937 is [32:85:2114] sender: [32:139:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:81:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:84:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:85:2057] recipient: [33:83:2113] Leader for TabletID 72057594037927937 is [33:86:2114] sender: [33:87:2057] recipient: [33:83:2113] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:86:2114] Leader for TabletID 72057594037927937 is [33:86:2114] sender: [33:140:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:84:2057] recipient: [34:36:2083] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:87:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:88:2057] recipient: [34:86:2116] Leader for TabletID 72057594037927937 is [34:89:2117] sender: [34:90:2057] recipient: [34:86:2116] !Reboot 72057594037927937 (actor [34:56:2097]) rebooted! !Reboot 72057594037927937 (actor [34:56:2097]) tablet resolver refreshed! new actor is[34:89:2117] Leader for TabletID 72057594037927937 is [34:89:2117] sender: [34:143:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:84:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:87:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:88:2057] recipient: [35:86:2116] Leader for TabletID 72057594037927937 is [35:89:2117] sender: [35:90:2057] recipient: [35:86:2116] !Reboot 72057594037927937 (actor [35:56:2097]) rebooted! !Reboot 72057594037927937 (actor [35:56:2097]) tablet resolver refreshed! new actor is[35:89:2117] Leader for TabletID 72057594037927937 is [35:89:2117] sender: [35:143:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:85:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:88:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:89:2057] recipient: [36:87:2116] Leader for TabletID 72057594037927937 is [36:90:2117] sender: [36:91:2057] recipient: [36:87:2116] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:90:2117] Leader for TabletID 72057594037927937 is [36:90:2117] sender: [36:144:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:88:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:90:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:92:2057] recipient: [37:91:2119] Leader for TabletID 72057594037927937 is [37:93:2120] sender: [37:94:2057] recipient: [37:91:2119] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:93:2120] Leader for TabletID 72057594037927937 is [37:93:2120] sender: [37:147:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:50:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:50:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:88:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:91:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:92:2057] recipient: [38:90:2119] Leader for TabletID 72057594037927937 is [38:93:2120] sender: [38:94:2057] recipient: [38:90:2119] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:93:2120] Leader for TabletID 72057594037927937 is [38:93:2120] sender: [38:147:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:89:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:92:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:93:2057] recipient: [39:91:2119] Leader for TabletID 72057594037927937 is [39:94:2120] sender: [39:95:2057] recipient: [39:91:2119] !Reboot 72057594037927937 (actor [39:56:2097]) rebooted! !Reboot 72057594037927937 (actor [39:56:2097]) tablet resolver refreshed! new actor is[39:94:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:57:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:74:2057] recipient: [40:14:2061] >> TBlockBlobStorageTest::DelayedErrorsNotIgnored [GOOD] >> TFlatMetrics::DecayingAverageAvg [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetBreakSetEraseBreak [GOOD] Test command err: 2025-04-10T11:36:10.862190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646901760779102:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:10.862740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fbc/r3tmp/tmpCIbyAP/pdisk_1.dat 2025-04-10T11:36:11.320864Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:11.329118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:11.329241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:11.331169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32123 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:11.595836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:11.620345Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:11.636152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:11.789786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:11.868737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:14.419052Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646920700937899:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:14.424386Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fbc/r3tmp/tmpn7s2vy/pdisk_1.dat 2025-04-10T11:36:14.618542Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:14.649942Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:14.650061Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:14.651632Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8577 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:14.958486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:14.966611Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:14.977011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:15.039049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:15.117463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:18.611937Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646936804550859:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fbc/r3tmp/tmp91BOHS/pdisk_1.dat 2025-04-10T11:36:18.655677Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:36:18.732712Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:18.751024Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:18.751108Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:18.752336Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3481 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-10T11:36:18.963196Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:36:18.970639Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:18.985061Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:19.073871Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T11:36:19.152846Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:22.506244Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646953610869796:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:22.506388Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fbc/r3tmp/tmpwfC1wE/pdisk_1.dat 2025-04-10T11:36:22.632271Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:22.647442Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:22.647550Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:22.649223Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5323 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:22.871616Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:22.895262Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:22.972836Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:23.057785Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:26.396739Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491646972565411033:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:26.396835Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fbc/r3tmp/tmpegOhBM/pdisk_1.dat 2025-04-10T11:36:26.581270Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:26.609728Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:26.609820Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:26.612502Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8907 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:26.914208Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:26.921897Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:26.933015Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:36:26.939475Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:27.016159Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:27.073755Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:30.671617Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7491646990920061148:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:30.671672Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fbc/r3tmp/tmp0pNA2z/pdisk_1.dat 2025-04-10T11:36:30.823648Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:30.828238Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:30.828302Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:30.831220Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14912 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:31.067338Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:31.075126Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:31.082762Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:36:31.087002Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:31.160419Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:31.216361Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:34.844241Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491647006575823386:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:34.844290Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fbc/r3tmp/tmp9k0eOV/pdisk_1.dat 2025-04-10T11:36:35.064448Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:35.078300Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:35.078396Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:35.080739Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16410 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:35.338959Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:35.349890Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:35.360809Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:36:35.367010Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:35.448699Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:35.519504Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TSubscriberTest::NotifyDelete |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TSubscriberTest::SyncWithOutdatedReplica >> TSubscriberTest::StrongNotificationAfterCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::DecayingAverageAvg [GOOD] Test command err: ... waiting for all block results ... passing block result OK for [1:101:2135] ... blocking block result NO_GROUP for [1:103:2135] ... blocking block result NO_GROUP for [1:104:2135] ... blocking block result NO_GROUP for [1:102:2135] >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] >> TSubscriberTest::NotifyDelete [GOOD] >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnsSizeMismatch [GOOD] Test command err: 2025-04-10T11:36:32.822432Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646997307686370:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:32.823036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001489/r3tmp/tmpZ2KtYg/pdisk_1.dat 2025-04-10T11:36:33.251823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:33.251960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:33.254598Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:33.258375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18446 TServer::EnableGrpc on GrpcPort 13888, node 1 2025-04-10T11:36:33.496631Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:33.496681Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:33.496703Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:33.496889Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:33.891269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284993976 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284993976 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) 2025-04-10T11:36:33.938701Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:33.938907Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:33.938927Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:33.939657Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:35.838442Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { status: SCHEME_ERROR, issues: } } 2025-04-10T11:36:35.838506Z node 1 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot describe table: status: SCHEME_ERROR, issue: 2025-04-10T11:36:36.542473Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491647015677900350:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:36.546732Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001489/r3tmp/tmpKRygug/pdisk_1.dat 2025-04-10T11:36:36.666000Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:36.680500Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:36.680630Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:36.682496Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24027 TServer::EnableGrpc on GrpcPort 12997, node 2 2025-04-10T11:36:36.876616Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:36.876641Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:36.876647Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:36.876748Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:37.147155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:37.155892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:37.260303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284997196 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284997329 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284997196 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284997329 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-10T11:36:37.290093Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:37.290210Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:37.290222Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:37.290648Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:39.561549Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284997259, tx_id: 281474976715658 } } } 2025-04-10T11:36:39.561929Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:39.563346Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-10T11:36:39.565392Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744284997329 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnNames: "value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-10T11:36:39.565573Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key columns size mismatch: expected: 1, got: 2 >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] >> TCdcStreamTests::StreamOnIndexTable [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> TCdcStreamTests::CheckSchemeLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyDelete [GOOD] Test command err: 2025-04-10T11:36:40.892619Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:36:40.894214Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-04-10T11:36:40.894369Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2025-04-10T11:36:40.894402Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2025-04-10T11:36:40.894438Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2066] 2025-04-10T11:36:40.894474Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2025-04-10T11:36:40.894526Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:40.894620Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2066] 2025-04-10T11:36:40.894667Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:40.894937Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:3:2050] 2025-04-10T11:36:40.895003Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:6:2053] 2025-04-10T11:36:40.895037Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:9:2056] 2025-04-10T11:36:40.895094Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:36:2066] 2025-04-10T11:36:40.895137Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Path was updated to new version: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:40.895180Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:37:2066] 2025-04-10T11:36:40.895225Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:40.895299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:38:2066] 2025-04-10T11:36:40.895320Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] Test command err: 2025-04-10T11:36:41.001288Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:36:41.003622Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:3:2050] 2025-04-10T11:36:41.003761Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:6:2053] 2025-04-10T11:36:41.003807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:9:2056] 2025-04-10T11:36:41.003866Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:36:2066] 2025-04-10T11:36:41.003918Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:37:2066] 2025-04-10T11:36:41.004004Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:41.004150Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:38:2066] 2025-04-10T11:36:41.004207Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:41.004331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-04-10T11:36:41.004421Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2066], cookie# 1 2025-04-10T11:36:41.004499Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2025-04-10T11:36:41.004546Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2025-04-10T11:36:41.004617Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:3:2050], cookie# 1 2025-04-10T11:36:41.004656Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-04-10T11:36:41.004685Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-04-10T11:36:41.004728Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:36:2066], cookie# 1 2025-04-10T11:36:41.004772Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-10T11:36:41.004816Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:37:2066], cookie# 1 2025-04-10T11:36:41.004847Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-10T11:36:41.004895Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:38:2066], cookie# 1 2025-04-10T11:36:41.004918Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Unexpected sync response: sender# [1:38:2066], cookie# 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] Test command err: 2025-04-10T11:36:40.960062Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:36:40.962754Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-10T11:36:40.962864Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-10T11:36:40.962913Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-10T11:36:40.962980Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-10T11:36:40.963077Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-10T11:36:40.963120Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:40.963163Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-10T11:36:40.963208Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:40.963604Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-10T11:36:40.963678Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-10T11:36:40.963736Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Update to strong state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:40.963899Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-10T11:36:40.963984Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-10T11:36:40.964029Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> TSubscriberTest::SyncPartial >> DstCreator::SamePartitionCount [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TSubscriberTest::SyncPartial [GOOD] >> EscapingBasics::HideSecretsOverEncloseSecretShouldWork [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> EscapingBasics::EscapeStringShouldWork [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly >> SplitterBasic::LimitExceed [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] >> EntityId::Order >> Cache::Test1 [GOOD] >> Cache::Test2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncPartial [GOOD] Test command err: 2025-04-10T11:36:42.025265Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-10T11:36:42.027487Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-10T11:36:42.027588Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-10T11:36:42.027641Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-10T11:36:42.027706Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-10T11:36:42.027785Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-10T11:36:42.027826Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:42.027879Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-10T11:36:42.027926Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:42.028150Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-04-10T11:36:42.028287Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2065], cookie# 1 2025-04-10T11:36:42.028358Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 1 2025-04-10T11:36:42.028405Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 1 2025-04-10T11:36:42.028459Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-04-10T11:36:42.028500Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-04-10T11:36:42.028576Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 1 2025-04-10T11:36:42.028613Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 0, faulires# 1 2025-04-10T11:36:42.028657Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-10T11:36:42.028709Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:42.028749Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:36:2065], cookie# 1 2025-04-10T11:36:42.028779Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 1 2025-04-10T11:36:42.028807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:37:2065], cookie# 1 2025-04-10T11:36:42.028844Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 1, partial# 0 2025-04-10T11:36:42.028932Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 2 2025-04-10T11:36:42.029049Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 2 2025-04-10T11:36:42.029072Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 2, size# 3, half# 1, successes# 0, faulires# 1 2025-04-10T11:36:42.029129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 2 2025-04-10T11:36:42.029177Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 2 2025-04-10T11:36:42.029222Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 2 2025-04-10T11:36:42.029295Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:36:2065], cookie# 2 2025-04-10T11:36:42.029325Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:34:2065][path] Sync is done: cookie# 2, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-10T11:36:42.029367Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-10T11:36:42.029403Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-10T11:36:42.029444Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:37:2065], cookie# 2 2025-04-10T11:36:42.029468Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Unexpected sync response: sender# [1:37:2065], cookie# 2 2025-04-10T11:36:42.029561Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 3 2025-04-10T11:36:42.029651Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 3 2025-04-10T11:36:42.029675Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 3, size# 3, half# 1, successes# 0, faulires# 1 2025-04-10T11:36:42.029706Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:36:2065], cookie# 3 2025-04-10T11:36:42.029729Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:34:2065][path] Sync is done: cookie# 3, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-10T11:36:42.029760Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 3 2025-04-10T11:36:42.029820Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:37:2065], cookie# 3 2025-04-10T11:36:42.029839Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Unexpected sync response: sender# [1:37:2065], cookie# 3 2025-04-10T11:36:42.029890Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-10T11:36:42.029924Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> EntityId::Order [GOOD] >> EntityId::MinId [GOOD] >> StatisticsSaveLoad::Simple |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test2 [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::LimitExceed [GOOD] |97.0%| [TA] $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EscapeStringShouldWork [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled >> SystemView::AuthUsers_TableRange [GOOD] >> SystemView::ShowCreateTableColumn [GOOD] >> SystemView::ShowCreateTable >> TVectorIndexTests::CreateTablePrefix |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::MinId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::SamePartitionCount [GOOD] Test command err: 2025-04-10T11:36:33.985132Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647000658362283:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:33.985388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001487/r3tmp/tmpClFwsB/pdisk_1.dat 2025-04-10T11:36:34.316710Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:34.364962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:34.365110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:34.367352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28431 TServer::EnableGrpc on GrpcPort 2843, node 1 2025-04-10T11:36:34.594964Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:34.594988Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:34.594995Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:34.595120Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28431 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:35.000481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:35.018992Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:36:35.025045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:36:35.030004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284995138 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284995054 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284995138 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-10T11:36:35.194596Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:35.194756Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:35.194773Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:35.197787Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:37.174563Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284995138, tx_id: 281474976710659 } } } 2025-04-10T11:36:37.174918Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:37.176555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:36:37.177229Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-04-10T11:36:37.177266Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710660 2025-04-10T11:36:37.205070Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710660 2025-04-10T11:36:37.205107Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1744284997245 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-04-10T11:36:37.930106Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491647019074783387:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:37.930288Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001487/r3tmp/tmpwGrrkU/pdisk_1.dat 2025-04-10T11:36:38.060334Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:38.074054Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:38.074185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:38.079447Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23258 TServer::EnableGrpc on GrpcPort 11248, node 2 2025-04-10T11:36:38.274424Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:38.274448Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:38.274455Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:38.274582Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23258 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:36:38.622231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:36:38.632242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744284998736 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284998673 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744284998736 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-10T11:36:38.710848Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:38.711007Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:38.711021Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:38.711519Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:41.637826Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284998736, tx_id: 281474976715658 } } } 2025-04-10T11:36:41.639107Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:41.640610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:36:41.641917Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-04-10T11:36:41.641942Z node 2 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2025-04-10T11:36:41.671019Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-04-10T11:36:41.671052Z node 2 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744284998736 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744285001711 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) >> DstCreator::ColumnTypeMismatch [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] Test command err: 2025-04-10T11:31:51.453857Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:51.573518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:51.597617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:51.597888Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:51.605704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:51.605906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:51.606123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:51.606314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:51.606413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:51.606529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:51.606665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:51.606782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:51.606883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:51.606992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:51.607077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:51.607193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:51.643387Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:51.643774Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:51.643826Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:51.644026Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:51.644178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:51.644259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:51.644299Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:51.644401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:51.644464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:51.644502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:51.644528Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:51.644724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:51.644790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:51.644829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:51.644856Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:51.644939Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:51.644991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:51.645043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:51.645102Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:51.645171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:51.645208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:51.645232Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:51.645274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:51.645325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:51.645359Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:51.645731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-04-10T11:31:51.645810Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-04-10T11:31:51.645917Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=51; 2025-04-10T11:31:51.646012Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-04-10T11:31:51.646201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:51.646259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:51.646290Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:51.646479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:51.646529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:51.646559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:51.646743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:51.646788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:51.646833Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:51.647026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:51.647065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:51.647092Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:51.647225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:51.647267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:51.647312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... LOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:61;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:57;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-10T11:36:42.168675Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11071:12698];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-10T11:36:42.171284Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11071:12698];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> EntityId::Distinct [GOOD] >> EntityId::MaxId [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> TCdcStreamWithInitialScanTests::InitialScanEnabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnTypeMismatch [GOOD] Test command err: 2025-04-10T11:36:36.456714Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647015980072214:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:36.456829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001443/r3tmp/tmpOVP8vG/pdisk_1.dat 2025-04-10T11:36:36.791964Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:36.859240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:36.859445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:36.861734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1606 TServer::EnableGrpc on GrpcPort 30994, node 1 2025-04-10T11:36:37.048366Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:37.048399Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:37.048415Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:37.048575Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:37.408962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:37.434425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:37.553201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284997469 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284997623 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744284997469 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284997623 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-10T11:36:37.588685Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:37.588882Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:37.588911Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:37.589666Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:39.483882Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744284997539, tx_id: 281474976710658 } } } 2025-04-10T11:36:39.484259Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:39.486072Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-10T11:36:39.488153Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284997623 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "extra" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 720575940 ... 1:36:40.405826Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:40.407443Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3373 TServer::EnableGrpc on GrpcPort 11869, node 2 2025-04-10T11:36:40.597848Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:40.597879Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:40.597895Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:40.598022Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:40.887602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:40.898072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:40.933308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744285000941 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744285001004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744285000941 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744285001004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-10T11:36:40.966802Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:40.966968Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-10T11:36:40.966979Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-10T11:36:40.967415Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-10T11:36:43.266122Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1744285000969, tx_id: 281474976715658 } } } 2025-04-10T11:36:43.266489Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-10T11:36:43.268436Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-10T11:36:43.269538Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1744285001004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-10T11:36:43.269762Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Column type mismatch: name: value, expected: Utf8, got: Uint32 |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::MaxId [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] >> TVectorIndexTests::CreateTablePrefix [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] |97.1%| [TA] $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthUsers_TableRange [GOOD] Test command err: 2025-04-10T11:35:29.912143Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646728273168997:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:29.912228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f74/r3tmp/tmpmCAgNs/pdisk_1.dat 2025-04-10T11:35:30.376703Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:30.418982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:30.419114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:30.424582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62669, node 1 2025-04-10T11:35:30.754865Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:35:30.754898Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:35:30.754911Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:30.755062Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:31.211680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:31.230250Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:35:31.243344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:32.910918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646741158071624:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:32.911810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:32.912248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646741158071636:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:32.920089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:35:32.932794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646741158071638:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:35:33.018630Z node 1 :TX_PROXY ERROR: Actor# [1:7491646745453038987:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:35:33.925085Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfqqps74nxh0kh6tz32fq23, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmU1YzUzOWUtYTYxN2IyZmUtYjk2NGRlNjctYjM5M2E2OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:34.195290Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfqqr1h62tn0zaa7zmwpq27, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdjNDkzODQtMzllZTE4NS0xMDg1NTRhZS0zZDE5N2Q3Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:34.395622Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jrfqqr264ecgd0bxwq78h4d2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzAxNzhmNi1lYzkzMzI3ZC05NGE4YmM2Yi00ZTg5MDRkMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:34.402712Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7491646749748006385:2366], owner: [1:7491646749748006381:2364], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-10T11:35:34.406949Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7491646749748006385:2366], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:35:34.409860Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7491646749748006385:2366], row count: 2, finished: 1 2025-04-10T11:35:34.409903Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7491646749748006385:2366], owner: [1:7491646749748006381:2364], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-10T11:35:34.415505Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284934394, txId: 281474976710663] shutting down 2025-04-10T11:35:35.270579Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646752808405383:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:35.270631Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f74/r3tmp/tmpmrh7Bc/pdisk_1.dat 2025-04-10T11:35:35.383818Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14822, node 2 2025-04-10T11:35:35.423001Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:35.423083Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:35.424209Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:35.490777Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:35:35.490808Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:35:35.490816Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:35.490932Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:35.763697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:35.769861Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:35:35.774504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:38.620454Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646765693308001:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:38.620510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646765693307993:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:38.620564Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:38.623672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:35:38.633742Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491646765693308007:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:35:38.722486Z node 2 :TX_PROXY ERROR: Actor# [2:7491646765693308058:2393] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts i ... atedAt: 1744284998993786 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"rflp8jr7pAXaqVOF1mTFT28rcnhmt4pqZSNWWO6GMww=\",\"salt\":\"2yN0P6eDPOQpAAL4IvMTKQ==\",\"type\":\"argon2id\"}" } Users { Name: "user3" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998977293 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"/C1Z89qcQG1vKg2Sn7DShTUMCJV73CVeQRa/pLMHzL4=\",\"salt\":\"l3SWkC5LqigtU6CW1wllzg==\",\"type\":\"argon2id\"}" } Users { Name: "user2" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998962469 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"CNdowshekbNz35L/F111vzFPmT4y2B03uKZjj5LQdrA=\",\"salt\":\"F2pYzCWkUQlYvccSguiU8A==\",\"type\":\"argon2id\"}" } Users { Name: "user1" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998947131 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"nmdH3ah6essKjlPSNDDAlA3bESo10J2VlwPthH/zpKY=\",\"salt\":\"/KsX530RJioMFfHPhlLnQw==\",\"type\":\"argon2id\"}" } 2025-04-10T11:36:39.906906Z node 23 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [23:7491647025881493965:2423], row count: 2, finished: 1 2025-04-10T11:36:39.906936Z node 23 :SYSTEM_VIEWS INFO: Scan finished, actor: [23:7491647025881493965:2423], owner: [23:7491647025881493962:2421], scan id: 0, table id: [72057594046644480:1:0:auth_users] 2025-04-10T11:36:39.910067Z node 23 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284999899, txId: 281474976710682] shutting down 2025-04-10T11:36:39.910887Z node 23 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [23:7491646991521753720:2184], database# , query hash# 8862277434384952876, cpu time# 151253 2025-04-10T11:36:39.921464Z node 25 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-04-10T11:36:39.921355Z 2025-04-10T11:36:40.143548Z node 23 :KQP_EXECUTER ERROR: TxId: 281474976710685. Ctx: { TraceId: 01jrfqsr7kakphkg4xca27bka0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=23&id=NjY3ZmQzNzItZGFmNDM0MWEtNjVmMzMwOTAtZmUxMzMzZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:36:40.146873Z node 23 :SYSTEM_VIEWS INFO: Scan started, actor: [23:7491647030176461299:2432], owner: [23:7491647030176461296:2430], scan id: 0, table id: [72057594046644480:1:0:auth_users] 2025-04-10T11:36:40.149373Z node 23 :SYSTEM_VIEWS INFO: Scan prepared, actor: [23:7491647030176461299:2432], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:36:40.149441Z node 23 :SYSTEM_VIEWS TRACE: Sending list users request 2025-04-10T11:36:40.149859Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxListUsers Execute at schemeshard: 72057594046644480 2025-04-10T11:36:40.150186Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxListUsers Complete, result: Users { Name: "user4" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998993786 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"rflp8jr7pAXaqVOF1mTFT28rcnhmt4pqZSNWWO6GMww=\",\"salt\":\"2yN0P6eDPOQpAAL4IvMTKQ==\",\"type\":\"argon2id\"}" } Users { Name: "user3" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998977293 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"/C1Z89qcQG1vKg2Sn7DShTUMCJV73CVeQRa/pLMHzL4=\",\"salt\":\"l3SWkC5LqigtU6CW1wllzg==\",\"type\":\"argon2id\"}" } Users { Name: "user2" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998962469 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"CNdowshekbNz35L/F111vzFPmT4y2B03uKZjj5LQdrA=\",\"salt\":\"F2pYzCWkUQlYvccSguiU8A==\",\"type\":\"argon2id\"}" } Users { Name: "user1" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998947131 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"nmdH3ah6essKjlPSNDDAlA3bESo10J2VlwPthH/zpKY=\",\"salt\":\"/KsX530RJioMFfHPhlLnQw==\",\"type\":\"argon2id\"}" }, at schemeshard: 72057594046644480 2025-04-10T11:36:40.150591Z node 23 :SYSTEM_VIEWS TRACE: Got list users response Users { Name: "user4" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998993786 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"rflp8jr7pAXaqVOF1mTFT28rcnhmt4pqZSNWWO6GMww=\",\"salt\":\"2yN0P6eDPOQpAAL4IvMTKQ==\",\"type\":\"argon2id\"}" } Users { Name: "user3" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998977293 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"/C1Z89qcQG1vKg2Sn7DShTUMCJV73CVeQRa/pLMHzL4=\",\"salt\":\"l3SWkC5LqigtU6CW1wllzg==\",\"type\":\"argon2id\"}" } Users { Name: "user2" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998962469 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"CNdowshekbNz35L/F111vzFPmT4y2B03uKZjj5LQdrA=\",\"salt\":\"F2pYzCWkUQlYvccSguiU8A==\",\"type\":\"argon2id\"}" } Users { Name: "user1" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998947131 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"nmdH3ah6essKjlPSNDDAlA3bESo10J2VlwPthH/zpKY=\",\"salt\":\"/KsX530RJioMFfHPhlLnQw==\",\"type\":\"argon2id\"}" } 2025-04-10T11:36:40.150690Z node 23 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [23:7491647030176461299:2432], row count: 2, finished: 1 2025-04-10T11:36:40.150723Z node 23 :SYSTEM_VIEWS INFO: Scan finished, actor: [23:7491647030176461299:2432], owner: [23:7491647030176461296:2430], scan id: 0, table id: [72057594046644480:1:0:auth_users] 2025-04-10T11:36:40.156449Z node 23 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744285000138, txId: 281474976710684] shutting down 2025-04-10T11:36:40.157099Z node 23 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [23:7491646991521753720:2184], database# , query hash# 13069672625607331218, cpu time# 210861 2025-04-10T11:36:40.351453Z node 23 :KQP_EXECUTER ERROR: TxId: 281474976710687. Ctx: { TraceId: 01jrfqsrfgdat95r7jzjv49k28, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=23&id=NjNhN2RhNGYtZGI4NjkzN2ItNWNjODMxYWItYThkYjUzYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:36:40.354332Z node 23 :SYSTEM_VIEWS INFO: Scan started, actor: [23:7491647030176461334:2441], owner: [23:7491647030176461331:2439], scan id: 0, table id: [72057594046644480:1:0:auth_users] 2025-04-10T11:36:40.355005Z node 23 :SYSTEM_VIEWS INFO: Scan prepared, actor: [23:7491647030176461334:2441], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:36:40.355073Z node 23 :SYSTEM_VIEWS TRACE: Sending list users request 2025-04-10T11:36:40.355398Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxListUsers Execute at schemeshard: 72057594046644480 2025-04-10T11:36:40.355664Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxListUsers Complete, result: Users { Name: "user4" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998993786 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"rflp8jr7pAXaqVOF1mTFT28rcnhmt4pqZSNWWO6GMww=\",\"salt\":\"2yN0P6eDPOQpAAL4IvMTKQ==\",\"type\":\"argon2id\"}" } Users { Name: "user3" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998977293 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"/C1Z89qcQG1vKg2Sn7DShTUMCJV73CVeQRa/pLMHzL4=\",\"salt\":\"l3SWkC5LqigtU6CW1wllzg==\",\"type\":\"argon2id\"}" } Users { Name: "user2" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998962469 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"CNdowshekbNz35L/F111vzFPmT4y2B03uKZjj5LQdrA=\",\"salt\":\"F2pYzCWkUQlYvccSguiU8A==\",\"type\":\"argon2id\"}" } Users { Name: "user1" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998947131 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"nmdH3ah6essKjlPSNDDAlA3bESo10J2VlwPthH/zpKY=\",\"salt\":\"/KsX530RJioMFfHPhlLnQw==\",\"type\":\"argon2id\"}" }, at schemeshard: 72057594046644480 2025-04-10T11:36:40.355937Z node 23 :SYSTEM_VIEWS TRACE: Got list users response Users { Name: "user4" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998993786 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"rflp8jr7pAXaqVOF1mTFT28rcnhmt4pqZSNWWO6GMww=\",\"salt\":\"2yN0P6eDPOQpAAL4IvMTKQ==\",\"type\":\"argon2id\"}" } Users { Name: "user3" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998977293 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"/C1Z89qcQG1vKg2Sn7DShTUMCJV73CVeQRa/pLMHzL4=\",\"salt\":\"l3SWkC5LqigtU6CW1wllzg==\",\"type\":\"argon2id\"}" } Users { Name: "user2" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998962469 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"CNdowshekbNz35L/F111vzFPmT4y2B03uKZjj5LQdrA=\",\"salt\":\"F2pYzCWkUQlYvccSguiU8A==\",\"type\":\"argon2id\"}" } Users { Name: "user1" IsEnabled: true IsLockedOut: false CreatedAt: 1744284998947131 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"nmdH3ah6essKjlPSNDDAlA3bESo10J2VlwPthH/zpKY=\",\"salt\":\"/KsX530RJioMFfHPhlLnQw==\",\"type\":\"argon2id\"}" } 2025-04-10T11:36:40.355986Z node 23 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [23:7491647030176461334:2441], row count: 1, finished: 1 2025-04-10T11:36:40.356019Z node 23 :SYSTEM_VIEWS INFO: Scan finished, actor: [23:7491647030176461334:2441], owner: [23:7491647030176461331:2439], scan id: 0, table id: [72057594046644480:1:0:auth_users] 2025-04-10T11:36:40.358537Z node 23 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [23:7491646991521753720:2184], database# , query hash# 11995873958551672460, cpu time# 169165 2025-04-10T11:36:40.359324Z node 23 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744285000350, txId: 281474976710686] shutting down 2025-04-10T11:36:40.372697Z node 23 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 25 2025-04-10T11:36:40.373602Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:36:40.373788Z node 23 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 26 2025-04-10T11:36:40.373977Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:36:40.375398Z node 23 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 27 2025-04-10T11:36:40.376079Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:36:40.374179Z node 25 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:40.376915Z node 27 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:40.395565Z node 23 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 24 2025-04-10T11:36:40.396681Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(24, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:36:40.399688Z node 23 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[27:7491646996737908503:2099], Type=268959746 2025-04-10T11:36:40.399730Z node 23 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[24:7491646996887074140:2104], Type=268959746 2025-04-10T11:36:40.399755Z node 23 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[24:7491646996887074140:2104], Type=268959746 2025-04-10T11:36:40.399782Z node 23 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[24:7491646996887074140:2104], Type=268959746 2025-04-10T11:36:40.399811Z node 23 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[24:7491646996887074140:2104], Type=268959746 2025-04-10T11:36:40.399837Z node 23 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[24:7491646996887074140:2104], Type=268959746 2025-04-10T11:36:40.399863Z node 23 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[24:7491646996887074140:2104], Type=268959746 |97.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TA] $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TA] {RESULT} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamWithInitialScanTests::InitialScanDisabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanProgress ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefix [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:36:44.058371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:44.058479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:44.058532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:44.058583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:44.058629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:44.058658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:44.058724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:44.058795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:44.059153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:44.148988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:36:44.149042Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:44.165287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:44.165624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:44.165814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:44.173483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:44.173714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:44.174325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:44.174700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:44.176678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:44.178084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:44.178174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:44.178369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:44.178433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:44.178478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:44.178674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:36:44.185832Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:36:44.316378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:44.316615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:44.316835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:44.317108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:44.317177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:44.319480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:44.319613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:44.319821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:44.319869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:44.319903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:44.319972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:44.321790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:44.321851Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:44.321884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:44.323939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:44.323997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:44.324053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:44.324109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:44.334637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:44.337341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:44.337580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:44.338950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:44.339182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:44.339251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:44.339636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:44.339709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:44.339934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:44.340048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:44.342900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:44.342962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:44.343181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:44.343237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:44.343588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:44.343653Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:44.343790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:44.343837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:44.343890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:44.343932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:44.343981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:36:44.344033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:44.344089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:36:44.344129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:36:44.344209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:36:44.344265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:36:44.344310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:36:44.347289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:44.347441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:44.347506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ration: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:44.961029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:44.961052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:36:44.961198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:44.961280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:44.961304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:36:44.961332Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-10T11:36:44.961368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:36:44.961706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:44.961790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:44.961820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:36:44.961854Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-04-10T11:36:44.961894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-10T11:36:44.962292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:44.962376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:44.962404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:36:44.962431Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-04-10T11:36:44.962464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-04-10T11:36:44.962528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/5, is published: true 2025-04-10T11:36:44.967194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-04-10T11:36:44.967260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:44.967674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-10T11:36:44.967830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/5 2025-04-10T11:36:44.967881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-04-10T11:36:44.967923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/5 2025-04-10T11:36:44.967972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-04-10T11:36:44.968021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/5, is published: true 2025-04-10T11:36:44.969123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:4, at schemeshard: 72057594046678944 2025-04-10T11:36:44.969213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:4 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:44.969477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-10T11:36:44.969581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:4 progress is 3/5 2025-04-10T11:36:44.969622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-04-10T11:36:44.969660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:4 progress is 3/5 2025-04-10T11:36:44.969692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-04-10T11:36:44.969722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/5, is published: true 2025-04-10T11:36:44.971008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-10T11:36:44.971052Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:44.971244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:36:44.971346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 4/5 2025-04-10T11:36:44.971376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-04-10T11:36:44.971431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 4/5 2025-04-10T11:36:44.971461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-04-10T11:36:44.971489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/5, is published: true 2025-04-10T11:36:44.971853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:44.972049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:44.972086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:44.972130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:44.972420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:36:44.972461Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:44.972671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:36:44.972777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 5/5 2025-04-10T11:36:44.972805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-04-10T11:36:44.972836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 5/5 2025-04-10T11:36:44.972861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-04-10T11:36:44.972886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 5/5, is published: true 2025-04-10T11:36:44.972982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:447:2393] message: TxId: 102 2025-04-10T11:36:44.973048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-04-10T11:36:44.973107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:36:44.973154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:36:44.973263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:36:44.973308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-10T11:36:44.973330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-10T11:36:44.973360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:36:44.973384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-04-10T11:36:44.973431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-04-10T11:36:44.973564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:36:44.973599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-04-10T11:36:44.973621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-04-10T11:36:44.973668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-10T11:36:44.973689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:4 2025-04-10T11:36:44.973708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:4 2025-04-10T11:36:44.973753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-10T11:36:44.974188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:44.974254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:44.974304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:44.974772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:44.974849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:44.974906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:44.977800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:36:44.977867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:589:2528] TestWaitNotification: OK eventTxId 102 |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TTabletPipeTest::TestSendBeforeBootTarget >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] >> TDatabaseResolverTests::DataStreams_Dedicated >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] Test command err: 2025-04-10T11:36:46.921430Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgb1 via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgb1': Status: 404 Response body: {"message":"Database not found"} >> TCdcStreamWithInitialScanTests::InitialScanProgress [GOOD] >> TCdcStreamWithInitialScanTests::WithoutPqTransactions >> KqpQueryService::TableSink_ReplaceDuplicatesOlap [GOOD] >> KqpQueryService::TableSink_Oltp_Replace-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:36:45.844637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:45.844771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:45.844828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:45.844866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:45.844910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:45.844957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:45.845016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:45.845089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:45.845430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:45.933050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:36:45.933110Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:45.949529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:45.949810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:45.949976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:45.959725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:45.960002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:45.960670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:45.961124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:45.963329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:45.964707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:45.964780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:45.964996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:45.965050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:45.965090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:45.965258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:36:45.972157Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:36:46.124184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:46.124437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.124674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:46.124975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:46.125039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.127936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:46.128092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:46.128360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.128440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:46.128479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:46.128529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:46.131015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.131079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:46.131117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:46.133119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.133170Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.133234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:46.133294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:46.144307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:46.147056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:46.147242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:46.148354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:46.148488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:46.148541Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:46.148843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:46.148898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:46.149119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:46.149201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:46.151426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:46.151476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:46.151683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:46.151739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:46.152006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.152053Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:46.152167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:46.152203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:46.152241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:46.152272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:46.152306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:36:46.152349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:46.152388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:36:46.152419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:36:46.152478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:36:46.152517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:36:46.152548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:36:46.155006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:46.155128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:46.155168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "embedding" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:46.543335Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:36:46.543572Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 228us result status StatusSuccess 2025-04-10T11:36:46.544003Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_centroid" Type: "String" TypeId: 4097 Id: 3 NotNull: true IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:46.544699Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:36:46.544900Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 214us result status StatusSuccess 2025-04-10T11:36:46.545317Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TLocksTest::BrokenNullLock [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2025-04-10T11:36:47.276591Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TUniqueIndexTests::CreateTable >> TDatabaseResolverTests::MySQL >> TDatabaseResolverTests::MySQL [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] Test command err: 2025-04-10T11:36:48.142011Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed MySQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-mysql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. >> TUniqueIndexTests::CreateTable [GOOD] >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:92:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:94:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:93:2120] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:147:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7 is [27:56:2097] sender: [27:89:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:92:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:93:2057] recipient: [27:91:2119] Leader for TabletID 72057594037927937 is [27:94:2120] sender: [27:95:2057] recipient: [27:91:2119] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:94:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:50:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:50:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:76:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:78:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:80:2057] recipient: [30:79:2110] Leader for TabletID 72057594037927937 is [30:81:2111] sender: [30:82:2057] recipient: [30:79:2110] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:81:2111] Leader for TabletID 72057594037927937 is [30:81:2111] sender: [30:135:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:76:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:78:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:80:2057] recipient: [31:79:2110] Leader for TabletID 72057594037927937 is [31:81:2111] sender: [31:82:2057] recipient: [31:79:2110] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:81:2111] Leader for TabletID 72057594037927937 is [31:81:2111] sender: [31:135:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:77:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:80:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:81:2057] recipient: [32:79:2110] Leader for TabletID 72057594037927937 is [32:82:2111] sender: [32:83:2057] recipient: [32:79:2110] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:82:2111] Leader for TabletID 72057594037927937 is [32:82:2111] sender: [32:136:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:80:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:82:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:84:2057] recipient: [33:83:2113] Leader for TabletID 72057594037927937 is [33:85:2114] sender: [33:86:2057] recipient: [33:83:2113] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:85:2114] Leader for TabletID 72057594037927937 is [33:85:2114] sender: [33:139:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:80:2057] recipient: [34:36:2083] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:83:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:84:2057] recipient: [34:82:2113] Leader for TabletID 72057594037927937 is [34:85:2114] sender: [34:86:2057] recipient: [34:82:2113] !Reboot 72057594037927937 (actor [34:56:2097]) rebooted! !Reboot 72057594037927937 (actor [34:56:2097]) tablet resolver refreshed! new actor is[34:85:2114] Leader for TabletID 72057594037927937 is [34:85:2114] sender: [34:139:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:81:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:84:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:85:2057] recipient: [35:83:2113] Leader for TabletID 72057594037927937 is [35:86:2114] sender: [35:87:2057] recipient: [35:83:2113] !Reboot 72057594037927937 (actor [35:56:2097]) rebooted! !Reboot 72057594037927937 (actor [35:56:2097]) tablet resolver refreshed! new actor is[35:86:2114] Leader for TabletID 72057594037927937 is [35:86:2114] sender: [35:140:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:84:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:87:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:88:2057] recipient: [36:86:2116] Leader for TabletID 72057594037927937 is [36:89:2117] sender: [36:90:2057] recipient: [36:86:2116] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:89:2117] Leader for TabletID 72057594037927937 is [36:89:2117] sender: [36:143:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:84:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:86:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:88:2057] recipient: [37:87:2116] Leader for TabletID 72057594037927937 is [37:89:2117] sender: [37:90:2057] recipient: [37:87:2116] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:89:2117] Leader for TabletID 72057594037927937 is [37:89:2117] sender: [37:143:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:50:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:50:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:85:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:87:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:89:2057] recipient: [38:88:2116] Leader for TabletID 72057594037927937 is [38:90:2117] sender: [38:91:2057] recipient: [38:88:2116] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:90:2117] Leader for TabletID 72057594037927937 is [38:90:2117] sender: [38:144:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:51:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TCdcStreamWithInitialScanTests::WithoutPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::WithPqTransactions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TUniqueIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:36:48.190321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:48.190412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:48.190465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:48.190502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:48.190546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:48.190574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:48.190634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:48.190725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:48.191035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:48.272371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:36:48.272441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:48.292478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:48.292801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:48.293017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:48.301105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:48.301337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:48.302011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:48.302435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:48.304528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:48.305846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:48.305923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:48.306099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:48.306177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:48.306223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:48.306385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:36:48.313405Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:36:48.453335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:48.453548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:48.453773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:48.454005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:48.454049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:48.456062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:48.456179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:48.456377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:48.456426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:48.456460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:48.456507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:48.458136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:48.458286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:48.458324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:48.459721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:48.459769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:48.459822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:48.459871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:48.462892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:48.464654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:48.464789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:48.465650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:48.465756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:48.465796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:48.466025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:48.466067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:48.466271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:48.466373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:48.468424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:48.468467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:48.468636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:48.468688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:48.468932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:48.468994Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:48.469108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:48.469166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:48.469204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:48.469235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:48.469273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:36:48.469311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:48.469341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:36:48.469369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:36:48.469429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:36:48.469458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:36:48.469482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:36:48.471650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:48.471758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:48.471806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:48.771859Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T11:36:48.771899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-10T11:36:48.771968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-10T11:36:48.772453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:36:48.772484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-10T11:36:48.772605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:36:48.772639Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-10T11:36:48.772703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-10T11:36:48.772746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:48.772773Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:36:48.772808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-10T11:36:48.772853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-10T11:36:48.776663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:36:48.777206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:36:48.777402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:36:48.779491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-10T11:36:48.779601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T11:36:48.779710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:36:48.779824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T11:36:48.780176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:36:48.780365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-10T11:36:48.780427Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-10T11:36:48.780538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-10T11:36:48.780581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-10T11:36:48.780623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-10T11:36:48.780688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-10T11:36:48.780735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-10T11:36:48.781039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-10T11:36:48.781072Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-10T11:36:48.781152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-10T11:36:48.781180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-10T11:36:48.781216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-10T11:36:48.781239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-10T11:36:48.781266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-10T11:36:48.781353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 101 2025-04-10T11:36:48.781417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-10T11:36:48.781474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-10T11:36:48.781517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-10T11:36:48.781656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:36:48.781704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-10T11:36:48.781727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-10T11:36:48.781756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:36:48.781777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-10T11:36:48.781796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-10T11:36:48.781848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:36:48.784028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-10T11:36:48.784090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:378:2346] TestWaitNotification: OK eventTxId 101 2025-04-10T11:36:48.784610Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:36:48.784865Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 277us result status StatusSuccess 2025-04-10T11:36:48.785704Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] Test command err: 2025-04-10T11:36:04.612466Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646878335242170:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:04.614545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fc2/r3tmp/tmp5FAcQj/pdisk_1.dat 2025-04-10T11:36:04.989910Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:05.044109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:05.044231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:05.045873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24427 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:05.314420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:05.361616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:05.500742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:05.545392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:08.191316Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646893645666278:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:08.191356Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fc2/r3tmp/tmpRLjOGW/pdisk_1.dat 2025-04-10T11:36:08.392716Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:08.401773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:08.401867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:08.403772Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4558 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:08.720800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:08.734618Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:08.754717Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:36:08.762775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:08.849594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:08.912402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:12.117207Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646910381036021:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:12.117343Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fc2/r3tmp/tmpAS2gRo/pdisk_1.dat 2025-04-10T11:36:12.221205Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:12.254169Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:12.254265Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:12.255643Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15922 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:12.427109Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:12.435084Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:12.454293Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:12.526474Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:12.597233Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:16.003988Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646925133682632:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:16.004047Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fc2/r3tmp/tmpYKxUqQ/pdisk_1.dat 2025-04-10T11:36:16.206858Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:16.221386Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:16.221475Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:16.223860Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2805 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025 ... (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:29.954760Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1076 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:30.194811Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:30.217616Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:30.273458Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:30.350090Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:34.243961Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491647008050128598:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:34.244021Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fc2/r3tmp/tmpiQJO3T/pdisk_1.dat 2025-04-10T11:36:34.460851Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:34.494274Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:34.494396Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:34.496094Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21663 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:34.816722Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:34.824769Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:34.841512Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:34.937978Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:35.009226Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:38.644366Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491647025443879177:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:38.644568Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fc2/r3tmp/tmpV4vyDm/pdisk_1.dat 2025-04-10T11:36:38.785790Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:38.817382Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:38.817487Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:38.819327Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21095 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:39.071954Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:39.093658Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:39.156770Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:39.212725Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:42.968533Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491647042305341482:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:42.968638Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fc2/r3tmp/tmpmqTZII/pdisk_1.dat 2025-04-10T11:36:43.116511Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:43.137938Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:43.138050Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:43.140139Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6673 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:43.509048Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:43.520079Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:43.536372Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:43.630522Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:43.704748Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2136] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2136] Leader for TabletID 9437184 is [1:108:2140] sender: [1:109:2057] recipient: [1:102:2136] Leader for TabletID 9437184 is [1:108:2140] sender: [1:128:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [0:0:0] sender: [1:163:2057] recipient: [1:161:2168] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:163:2057] recipient: [1:161:2168] Leader for TabletID 9437185 is [1:167:2172] sender: [1:168:2057] recipient: [1:161:2168] Leader for TabletID 9437185 is [1:167:2172] sender: [1:187:2057] recipient: [1:14:2061] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> TVectorIndexTests::CreateTable >> TDatabaseResolverTests::ClickHouseNative |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TDatabaseResolverTests::ClickHouseNative [GOOD] >> TDatabaseResolverTests::ClickHouseHttp >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> YdbSdkSessions::TestSessionPool |97.2%| [TA] $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |97.2%| [TA] {RESULT} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> TDatabaseResolverTests::PostgreSQL >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] >> TDatabaseResolverTests::PostgreSQL [GOOD] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] >> TVectorIndexTests::CreateTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] Test command err: 2025-04-10T11:36:51.179699Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed PostgreSQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-postgresql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-postgresql.viewer`. >> TCdcStreamWithInitialScanTests::WithPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] Test command err: 2025-04-10T11:36:39.780577Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647026200428370:2085];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:39.787013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f7e/r3tmp/tmpqkvBnk/pdisk_1.dat 2025-04-10T11:36:40.147447Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:40.159542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:40.159635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:40.165843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62551, node 1 2025-04-10T11:36:40.272658Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:40.272687Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:40.272698Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:40.272874Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30766 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:40.616605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:42.885621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647039085331277:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:42.885621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647039085331268:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:42.885734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:42.889996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:36:42.890504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647039085331311:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:42.890555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647039085331314:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:42.890665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:42.895293Z node 1 :TX_PROXY ERROR: Actor# [1:7491647039085331317:2641] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T11:36:42.910446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491647039085331282:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:36:42.910513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491647039085331316:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:36:42.978054Z node 1 :TX_PROXY ERROR: Actor# [1:7491647039085331436:2717] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:36:42.999806Z node 1 :TX_PROXY ERROR: Actor# [1:7491647039085331453:2726] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f7e/r3tmp/tmpaOALj4/pdisk_1.dat 2025-04-10T11:36:45.104095Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:36:45.187415Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:45.222757Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:45.222847Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:45.226586Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13076, node 4 2025-04-10T11:36:45.364298Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:45.364321Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:45.364329Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:45.364468Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:45.641438Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:48.297731Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491647065731173976:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:48.297842Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:48.332724Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:36:49.079873Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491647070026141650:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:49.080018Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:49.080385Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491647070026141655:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:49.080628Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmM4ZTdiNWMtNjA4MTM0MGMtNmUwMjU2N2UtYWQ3YzEyMTE=, ActorId: [4:7491647070026141646:2483], ActorState: ExecuteState, TraceId: 01jrfqt15qdjre9rwbqh2ab5wr, Create QueryResponse for error on request, msg: 2025-04-10T11:36:49.084280Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:36:49.117050Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491647070026141657:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:36:49.191365Z node 4 :TX_PROXY ERROR: Actor# [4:7491647070026141757:2825] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:36:49.194426Z node 4 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 50 >> StatisticsSaveLoad::Delete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:36:50.605505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:50.605619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:50.605672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:50.605767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:50.605826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:50.605860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:50.605973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:50.606078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:50.606511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:50.706040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:36:50.706111Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:50.723873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:50.724151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:50.724327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:50.732459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:50.732723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:50.733534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:50.734030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:50.736009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:50.737266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:50.737348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:50.737552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:50.737645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:50.737711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:50.737923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:36:50.744884Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:36:50.905909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:50.906200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:50.906418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:50.906665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:50.906724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:50.908856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:50.909014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:50.909224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:50.909295Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:50.909341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:50.909403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:50.911358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:50.911419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:50.911460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:50.913305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:50.913359Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:50.913422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:50.913484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:50.917507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:50.919582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:50.919775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:50.920766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:50.920902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:50.920976Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:50.921262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:50.921317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:50.921508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:50.921609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:50.923718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:50.923769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:50.923933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:50.923994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:50.924286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:50.924339Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:50.924472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:50.924512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:50.924556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:50.924593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:50.924636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:36:50.924679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:50.924715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:36:50.924746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:36:50.924817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:36:50.924860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:36:50.924896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:36:50.927431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:50.927552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:50.927604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :36:51.382335Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-10T11:36:51.382357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-10T11:36:51.383348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:51.383450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:51.383488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:36:51.384277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:51.384363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:51.384382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:36:51.384403Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-10T11:36:51.384423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:36:51.384825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-04-10T11:36:51.384876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:51.385211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-10T11:36:51.385338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/4 2025-04-10T11:36:51.385378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2025-04-10T11:36:51.385413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/4 2025-04-10T11:36:51.385442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2025-04-10T11:36:51.385470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: false 2025-04-10T11:36:51.385690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:51.385739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:51.385756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:36:51.387402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:51.387509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:51.387540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:36:51.387721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:51.387761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:51.387777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:36:51.387795Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-10T11:36:51.387823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:36:51.388219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:51.388278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:36:51.388302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:36:51.388328Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-04-10T11:36:51.388357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-10T11:36:51.388411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2025-04-10T11:36:51.389808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-10T11:36:51.389863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:51.390070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:36:51.390175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-04-10T11:36:51.390208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-04-10T11:36:51.390238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-04-10T11:36:51.390278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-04-10T11:36:51.390303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-04-10T11:36:51.391331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:51.391593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-10T11:36:51.391623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:51.391767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:36:51.391839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-04-10T11:36:51.391860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-10T11:36:51.391886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-04-10T11:36:51.391909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-10T11:36:51.391937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-04-10T11:36:51.391987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:412:2369] message: TxId: 102 2025-04-10T11:36:51.392029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-10T11:36:51.392071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:36:51.392102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:36:51.392174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:36:51.392214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-10T11:36:51.392228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-10T11:36:51.392254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-10T11:36:51.392267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-04-10T11:36:51.392279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-04-10T11:36:51.392336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:36:51.392360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-04-10T11:36:51.392374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-04-10T11:36:51.392400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-10T11:36:51.393085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:51.393124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:51.393178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:51.393221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:51.393252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:51.394583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:51.394657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-10T11:36:51.395987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:36:51.396028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:531:2481] TestWaitNotification: OK eventTxId 102 >> TDatabaseResolverTests::DataStreams_Serverless >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] Test command err: 2025-04-10T11:31:57.747022Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:57.873169Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:57.904338Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:57.904701Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:57.918141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:57.918481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:57.918757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:57.918887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:57.919007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:57.919131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:57.919282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:57.919420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:57.919533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:57.919643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:57.919743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:57.919886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:57.952464Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:57.952758Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:57.952824Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:57.953027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:57.953268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:57.953373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:57.953430Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:57.953532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:57.953614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:57.953692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:57.953732Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:57.953975Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:57.954052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:57.954100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:57.954132Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:57.954264Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:57.954343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:57.954445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:57.954484Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:57.954579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:57.954634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:57.954665Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:57.954725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:57.954767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:57.954802Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:57.955309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=64; 2025-04-10T11:31:57.955413Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-10T11:31:57.955489Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-10T11:31:57.955613Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=50; 2025-04-10T11:31:57.955810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:57.955871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:57.955914Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:57.956139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:57.956194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:57.956229Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:57.956404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:57.956457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:57.956512Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:57.956735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:57.956805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:57.956847Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:57.956987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:57.957375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:57.957439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... LOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:61;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:57;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-10T11:36:50.518950Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11072:12699];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-10T11:36:50.521180Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11072:12699];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> SystemView::AuthGroupMembers_TableRange [GOOD] >> SystemView::AuthOwners ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] Test command err: 2025-04-10T11:36:46.803254Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647056518310567:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:46.803336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f63/r3tmp/tmp3NrvXz/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23389, node 1 2025-04-10T11:36:47.246668Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:47.250698Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:36:47.250727Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:36:47.264702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:47.264846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:47.270901Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:47.270923Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:47.270933Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:47.271042Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:36:47.291277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:47.575931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:49.823309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647069403213474:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:49.823434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:50.068108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:36:50.241186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647073698180947:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:50.241332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:50.241422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647073698180952:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:50.244965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:36:50.284078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491647073698180954:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:36:50.353426Z node 1 :TX_PROXY ERROR: Actor# [1:7491647073698181048:2809] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:36:50.468740Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfqt2a0emx79pqhpzmwrdxd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThhMDI1NjMtNjQyNmZjZjQtNWFlZTUwNTEtNTg4NThhNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:36:50.613888Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfqt2nm8arc7gzv80b633h1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGE3NGI4YjItYjE4ZTFiMjYtN2ZhYTE2YTktNzUyOTZhMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |97.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/test_connection/ut/unittest |97.3%| [TS] {RESULT} ydb/core/fq/libs/test_connection/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2025-04-10T11:36:52.403774Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. >> DoubleIndexedTests::TestMerge [GOOD] >> DoubleIndexedTests::TestUpsertBySingleKey [GOOD] >> DoubleIndexedTests::TestErase [GOOD] >> DoubleIndexedTests::TestUpsertByBothKeys [GOOD] >> DoubleIndexedTests::TestFind [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TCdcStreamWithInitialScanTests::AlterStream [GOOD] >> TCdcStreamWithInitialScanTests::DropStream |97.3%| [TA] $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_double_indexed/unittest >> DoubleIndexedTests::TestFind [GOOD] |97.3%| [TS] {RESULT} ydb/core/tx/scheme_board/ut_double_indexed/unittest |97.3%| [TA] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ArrowInferenceTest::csv_simple [GOOD] >> ArrowInferenceTest::tsv_simple [GOOD] |97.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/external_sources/object_storage/inference/ut/gtest >> ArrowInferenceTest::tsv_simple [GOOD] |97.3%| [TS] {RESULT} ydb/core/external_sources/object_storage/inference/ut/gtest >> TMemoryPoolTest::TransactionsWithAlignment [GOOD] >> UtilString::ShrinkToFit [GOOD] >> TMemoryPoolTest::Transactions [GOOD] >> TMemoryPoolTest::LongRollback [GOOD] >> TMemoryPoolTest::AllocOneByte [GOOD] >> TMemoryPoolTest::AppendString [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_util/unittest >> TMemoryPoolTest::AppendString [GOOD] |97.3%| [TM] {RESULT} ydb/core/tablet_flat/ut_util/unittest >> TCdcStreamWithInitialScanTests::DropStream [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots >> SystemView::TopPartitionsByCpuFollowers [GOOD] >> SystemView::TopPartitionsByTliFields >> YdbSdkSessions::TestSessionPool [GOOD] >> TSentinelBaseTests::PDiskInitialStatus [GOOD] >> TSentinelBaseTests::PDiskErrorState [GOOD] >> TSentinelBaseTests::PDiskInactiveAfterStateChange [GOOD] >> TSentinelBaseTests::PDiskFaultyState [GOOD] >> TSentinelBaseTests::PDiskStateChangeNormalFlow [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodePermanentlyBad [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeNotExpectedRestart [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] >> TSentinelBaseTests::GuardianDataCenterRatio >> TSentinelBaseTests::GuardianDataCenterRatio [GOOD] >> TSentinelBaseTests::GuardianRackRatio [GOOD] >> TSentinelTests::Smoke >> TGenerateQueueIdTests::MakeQueueIdBasic [GOOD] >> TParseParamsTests::CreateUser [GOOD] >> TParseParamsTests::ChangeMessageVisibilityBatchRequest [GOOD] >> TParseParamsTests::DeleteMessageBatchRequest [GOOD] >> TParseParamsTests::MessageBody [GOOD] >> TParseParamsTests::SendMessageBatchRequest [GOOD] >> TParseParamsTests::DeleteQueueBatchRequest [GOOD] >> TParseParamsTests::PurgeQueueBatchRequest [GOOD] >> TParseParamsTests::GetQueueAttributesBatchRequest [GOOD] >> TParseParamsTests::UnnumberedAttribute [GOOD] >> TParseParamsTests::UnnumberedAttributeName [GOOD] >> TParseParamsTests::FailsOnInvalidDeduplicationId [GOOD] >> TParseParamsTests::FailsOnInvalidGroupId [GOOD] >> TParseParamsTests::FailsOnInvalidReceiveRequestAttemptId [GOOD] >> TParseParamsTests::FailsOnInvalidMaxNumberOfMessages [GOOD] >> TParseParamsTests::FailsOnInvalidWaitTime [GOOD] >> TParseParamsTests::FailsOnInvalidDelaySeconds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSessionPool [GOOD] Test command err: 2025-04-10T11:36:50.656979Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647072902307037:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:50.657068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f5c/r3tmp/tmp7fNuOC/pdisk_1.dat 2025-04-10T11:36:50.984112Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22425, node 1 2025-04-10T11:36:51.046735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:51.046933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:51.056290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:36:51.065214Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:51.065252Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:51.065259Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:51.065406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:51.365693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:53.252484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647085787209945:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:53.252505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647085787209953:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:53.252605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:53.256211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:36:53.273202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491647085787209959:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:36:53.370467Z node 1 :TX_PROXY ERROR: Actor# [1:7491647085787210036:2687] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> StatisticsSaveLoad::Simple [GOOD] |97.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/ut/unittest >> TParseParamsTests::FailsOnInvalidDelaySeconds [GOOD] |97.3%| [TS] {RESULT} ydb/core/ymq/ut/unittest >> KqpQueryService::TableSink_Oltp_Replace-UseSink [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple [GOOD] Test command err: 2025-04-10T11:36:46.752068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:36:46.752454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:36:46.752510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c51/r3tmp/tmpv7KISp/pdisk_1.dat 2025-04-10T11:36:47.263306Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17089, node 1 2025-04-10T11:36:47.811687Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:47.811756Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:47.811795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:47.812263Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:36:47.824515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:36:47.926272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:47.926874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:47.941879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11038 2025-04-10T11:36:48.477287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:36:51.528832Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:36:51.568188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:51.568291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:51.598005Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:36:51.600587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:36:51.839603Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:36:51.841870Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:36:51.842669Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:36:51.842899Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:36:51.843030Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:36:51.843317Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:36:51.843422Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:36:51.843510Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:36:51.843597Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:36:52.005796Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:52.005888Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:52.019863Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:36:52.176655Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:52.246533Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:36:52.246641Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:36:52.290027Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:36:52.294439Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:36:52.294752Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:36:52.294828Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:36:52.294898Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:36:52.294969Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:36:52.295030Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:36:52.295101Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:36:52.295750Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:36:52.323676Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:36:52.323825Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1875:2600], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:36:52.330806Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2606] 2025-04-10T11:36:52.338869Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1917:2623] 2025-04-10T11:36:52.339718Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1917:2623], schemeshard id = 72075186224037897 2025-04-10T11:36:52.343123Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:36:52.373584Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:36:52.373657Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:36:52.373833Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:36:52.429120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:36:52.448287Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:36:52.448516Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:36:52.658388Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:36:52.863348Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:36:52.943285Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:36:53.574927Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:36:53.575497Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:36:53.599086Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:36:53.609659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2260:3086], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:53.609798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2276:3091], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:53.609882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:53.618348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037897 2025-04-10T11:36:53.668955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2280:3094], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:36:53.957249Z node 1 :TX_PROXY ERROR: Actor# [1:2370:3123] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:36:54.484298Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2392:3135]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:36:54.484575Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:36:54.484698Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2394:3137] 2025-04-10T11:36:54.485509Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2394:3137] 2025-04-10T11:36:54.486224Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2395:2845] 2025-04-10T11:36:54.486562Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2394:3137], server id = [2:2395:2845], tablet id = 72075186224037894, status = OK 2025-04-10T11:36:54.486737Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2395:2845], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:36:54.486819Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:36:54.487054Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:36:54.487167Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2392:3135], StatRequests.size() = 1 2025-04-10T11:36:54.950701Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=YTU3NDY2YWEtZWMxNWI3MmItY2IwYzY2ZTctYzJiMzMwYzk=, TxId: 2025-04-10T11:36:54.950811Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=YTU3NDY2YWEtZWMxNWI3MmItY2IwYzY2ZTctYzJiMzMwYzk=, TxId: 2025-04-10T11:36:54.952084Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:36:54.954957Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-04-10T11:36:55.043853Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2428:3158]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:36:55.044142Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:36:55.044197Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2428:3158], StatRequests.size() = 1 2025-04-10T11:36:55.275713Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NzNhYjZhNGUtODVmM2E3ODgtNzdkYzBhMzctY2RiMDM3NDE=, TxId: 01jrfqt7667eqrd13ewpgctev9 2025-04-10T11:36:55.276684Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=NzNhYjZhNGUtODVmM2E3ODgtNzdkYzBhMzctY2RiMDM3NDE=, TxId: 01jrfqt7667eqrd13ewpgctev9 2025-04-10T11:36:55.279433Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:36:55.283202Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-04-10T11:36:55.302862Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZWM5YWM4YzgtNmI3ZjQ4N2EtNTNjOTdiN2ItOGUxYzhlZQ==, TxId: 01jrfqt77m0hf18ayydbc1j6py 2025-04-10T11:36:55.302985Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZWM5YWM4YzgtNmI3ZjQ4N2EtNTNjOTdiN2ItOGUxYzhlZQ==, TxId: 01jrfqt77m0hf18ayydbc1j6py >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] >> TSentinelTests::Smoke [GOOD] >> TSentinelTests::PDiskUnknownState >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource [GOOD] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel >> RangeOps::Intersection [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] Test command err: 2025-04-10T11:32:06.605321Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:06.728546Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:06.753748Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:06.754088Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:06.763641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:06.763909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:06.764269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:06.764430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:06.764547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:06.764671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:06.764859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:06.765006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:06.765181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:06.765291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:06.765404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:06.765500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:06.810767Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:06.811044Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:06.811108Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:06.811388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:06.811583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:06.811683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:06.811731Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:06.811845Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:06.811933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:06.811980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:06.812013Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:06.812213Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:06.812294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:06.812359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:06.812398Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:06.812506Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:06.812570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:06.812622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:06.812653Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:06.812730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:06.812770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:06.812819Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:06.812889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:06.812930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:06.812967Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:06.813411Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2025-04-10T11:32:06.813506Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-10T11:32:06.813614Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=54; 2025-04-10T11:32:06.813729Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-10T11:32:06.813942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:06.814005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:06.814043Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:06.814283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:06.814339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:06.814376Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:06.814546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:06.814592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:06.814628Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:06.814826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:06.814869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:06.814909Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:06.815066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:06.815120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:06.815185Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... LOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:61;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:57;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-10T11:36:57.186096Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11072:12699];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-10T11:36:57.189228Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11072:12699];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel [GOOD] >> TCheckpointCoordinatorTests::ShouldAllSnapshots >> TGRpcRateLimiterTest::CreateResource >> TCheckpointCoordinatorTests::ShouldAllSnapshots [GOOD] >> TCheckpointCoordinatorTests::Should2Increments1Snapshot >> TCdcStreamWithInitialScanTests::MeteringServerless [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated >> TCheckpointCoordinatorTests::Should2Increments1Snapshot [GOOD] >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_range_ops/unittest >> RangeOps::Intersection [GOOD] Test command err: first [(Uint64 : NULL, Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 5)] result [(Uint64 : 10) ; (Uint64 : 5)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 17)] result [(Uint64 : 15) ; (Uint64 : 17)] correct [(Uint64 : 15) ; (Uint64 : 17)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 20)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 30)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 20)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 30)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 25) ; (Uint64 : 30)] result [(Uint64 : 25) ; (Uint64 : 20)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result ((Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result ((Uint64 : 10) ; (Uint64 : 15)] correct ((Uint64 : 10) ; (Uint64 : 15)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)) result ((Uint64 : 10) ; (Uint64 : 15)) correct ((Uint64 : 10) ; (Uint64 : 15)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; (Uint64 : 20)) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 20)) correct [(Uint64 : 1) ; (Uint64 : 20)) first [(Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 10) ; (Uint64 : 20)) correct [(Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; (Uint64 : 10)] second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 10)] correct [(Uint64 : 1) ; (Uint64 : 10)] first [(Uint64 : NULL) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 1) ; (Uint64 : 10)) correct [(Uint64 : 1) ; (Uint64 : 10)) |97.3%| [TM] {RESULT} ydb/core/tx/datashard/ut_range_ops/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] Test command err: 2025-04-10T11:32:00.618627Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:00.737200Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:00.762082Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:00.762399Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:00.770946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:00.771169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:00.771408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:00.771528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:00.771633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:00.771751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:00.771873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:00.772010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:00.772139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:00.772246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:00.772348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:00.772459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:00.807320Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:00.807700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:00.807760Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:00.807961Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:00.808141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:00.808221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:00.808267Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:00.808360Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:00.808435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:00.808481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:00.808509Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:00.808751Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:00.808814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:00.808854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:00.808883Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:00.808981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:00.809042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:00.809110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:00.809144Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:00.809222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:00.809274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:00.809308Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:00.809361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:00.809400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:00.809432Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:00.809809Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-04-10T11:32:00.809903Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-04-10T11:32:00.809994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-04-10T11:32:00.810093Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=45; 2025-04-10T11:32:00.810323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:00.810387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:00.810424Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:00.810646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:00.810692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:00.810724Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:00.810882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:00.810927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:00.810957Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:00.811139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:00.811177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:00.811204Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:00.811362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:00.811405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:00.811449Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... B:0:2688];;column_id:8;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:60;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:56;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-04-10T11:36:57.486193Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11155:12782];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-10T11:36:57.488835Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11155:12782];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> MediatorTimeCast::ReadStepSubscribe ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] Test command err: 2025-04-10T11:36:43.151713Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647043543493612:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:43.152026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f71/r3tmp/tmpwPCmrZ/pdisk_1.dat 2025-04-10T11:36:43.624001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:43.624231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:43.648643Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:43.665789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12117, node 1 2025-04-10T11:36:43.683667Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:36:43.683737Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:36:43.747169Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:43.747192Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:43.747201Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:43.747343Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2981 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:44.062062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:48.151327Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491647043543493612:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:48.151428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:36:51.496690Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491647081037998647:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:51.496791Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f71/r3tmp/tmpNVEl7a/pdisk_1.dat 2025-04-10T11:36:51.613822Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:51.646516Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:51.646657Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:51.649675Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27863, node 4 2025-04-10T11:36:51.703456Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:51.703482Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:51.703492Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:51.703632Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:51.924802Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:56.496906Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491647081037998647:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:56.497000Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpointing/ut/unittest >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] Test command err: 2025-04-10T11:36:59.087921Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-04-10T11:36:59.088127Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-04-10T11:36:59.088288Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-04-10T11:36:59.088310Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-04-10T11:36:59.088333Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-04-10T11:36:59.089512Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-04-10T11:36:59.096853Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-04-10T11:36:59.096923Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-04-10T11:36:59.096972Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-10T11:36:59.103010Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-04-10T11:36:59.103068Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-04-10T11:36:59.103114Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-10T11:36:59.103210Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.103243Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-04-10T11:36:59.103283Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.103314Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-04-10T11:36:59.103353Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.103376Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-04-10T11:36:59.103398Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-10T11:36:59.103451Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-10T11:36:59.103474Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-04-10T11:36:59.103583Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2025-04-10T11:36:59.103619Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [1:6:2053], need 1 more acks 2025-04-10T11:36:59.103652Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2025-04-10T11:36:59.103677Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [1:8:2055], need 0 more acks 2025-04-10T11:36:59.103699Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-04-10T11:36:59.103752Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2025-04-10T11:36:59.103779Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint completed 2025-04-10T11:36:59.187416Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-04-10T11:36:59.187602Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-04-10T11:36:59.187717Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-04-10T11:36:59.187755Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-04-10T11:36:59.187801Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-04-10T11:36:59.187865Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-04-10T11:36:59.188047Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-04-10T11:36:59.188079Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-04-10T11:36:59.188106Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-10T11:36:59.188248Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-04-10T11:36:59.188284Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-04-10T11:36:59.188319Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-10T11:36:59.188404Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.188437Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-04-10T11:36:59.188488Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.188561Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-04-10T11:36:59.188620Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.188657Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-04-10T11:36:59.188704Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-10T11:36:59.188820Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-10T11:36:59.188902Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-04-10T11:36:59.189052Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2025-04-10T11:36:59.189111Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [2:6:2053], need 1 more acks 2025-04-10T11:36:59.189150Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2025-04-10T11:36:59.189190Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [2:8:2055], need 0 more acks 2025-04-10T11:36:59.189245Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-04-10T11:36:59.189311Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2025-04-10T11:36:59.189353Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint completed 2025-04-10T11:36:59.278897Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-04-10T11:36:59.279080Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-04-10T11:36:59.279201Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-04-10T11:36:59.279238Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-04-10T11:36:59.279274Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-04-10T11:36:59.279359Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-04-10T11:36:59.279580Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-04-10T11:36:59.279643Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-04-10T11:36:59.279685Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-10T11:36:59.279854Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-04-10T11:36:59.279925Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-04-10T11:36:59.279982Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-10T11:36:59.280154Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.280209Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-04-10T11:36:59.280261Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.280299Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-04-10T11:36:59.280349Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.280386Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-04-10T11:36:59.280416Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-10T11:36:59.280482Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-10T11:36:59.280531Z node 3 :STREAMS_C ... :2] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.363268Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Task state saved, need 0 more acks 2025-04-10T11:36:59.363295Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-10T11:36:59.363370Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-10T11:36:59.363398Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-04-10T11:36:59.363485Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvStateCommitted; task: 1 2025-04-10T11:36:59.363511Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] State committed [4:6:2053], need 1 more acks 2025-04-10T11:36:59.363538Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvStateCommitted; task: 3 2025-04-10T11:36:59.363563Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] State committed [4:8:2055], need 0 more acks 2025-04-10T11:36:59.363582Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-04-10T11:36:59.363644Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvCompleteCheckpointResponse 2025-04-10T11:36:59.363670Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Checkpoint completed 2025-04-10T11:36:59.363698Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-04-10T11:36:59.363725Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-10T11:36:59.363781Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvCreateCheckpointResponse 2025-04-10T11:36:59.363805Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-10T11:36:59.363873Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.363900Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Task state saved, need 2 more acks 2025-04-10T11:36:59.363927Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.363960Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Task state saved, need 1 more acks 2025-04-10T11:36:59.363994Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.364024Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Task state saved, need 0 more acks 2025-04-10T11:36:59.364054Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-10T11:36:59.364112Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-10T11:36:59.364151Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-04-10T11:36:59.364274Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 1 2025-04-10T11:36:59.364310Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] State committed [4:6:2053], need 1 more acks 2025-04-10T11:36:59.364351Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 3 2025-04-10T11:36:59.364389Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] State committed [4:8:2055], need 0 more acks 2025-04-10T11:36:59.364423Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-04-10T11:36:59.364460Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvCompleteCheckpointResponse 2025-04-10T11:36:59.364484Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Checkpoint completed 2025-04-10T11:36:59.364524Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-04-10T11:36:59.364563Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-10T11:36:59.364607Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvCreateCheckpointResponse 2025-04-10T11:36:59.364672Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-10T11:36:59.364729Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.364759Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Task state saved, need 2 more acks 2025-04-10T11:36:59.364786Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.364807Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Task state saved, need 1 more acks 2025-04-10T11:36:59.364833Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.364864Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Task state saved, need 0 more acks 2025-04-10T11:36:59.364902Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-10T11:36:59.364969Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-10T11:36:59.364993Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-04-10T11:36:59.365076Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 1 2025-04-10T11:36:59.365118Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] State committed [4:6:2053], need 1 more acks 2025-04-10T11:36:59.365151Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 3 2025-04-10T11:36:59.365185Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] State committed [4:8:2055], need 0 more acks 2025-04-10T11:36:59.365209Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-04-10T11:36:59.365253Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvCompleteCheckpointResponse 2025-04-10T11:36:59.365291Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Checkpoint completed 2025-04-10T11:36:59.446002Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-04-10T11:36:59.446115Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-04-10T11:36:59.446238Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-04-10T11:36:59.446269Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-04-10T11:36:59.446301Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-04-10T11:36:59.446383Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-04-10T11:36:59.446538Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-04-10T11:36:59.446561Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-04-10T11:36:59.446585Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-10T11:36:59.446680Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-04-10T11:36:59.446710Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-04-10T11:36:59.446746Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-10T11:36:59.446827Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-10T11:36:59.446856Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-04-10T11:36:59.446890Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2025-04-10T11:36:59.446929Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2025-04-10T11:36:59.446987Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2025-04-10T11:36:59.447015Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2025-04-10T11:36:59.447060Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: [my-graph-id.42] [42:1] Got all acks for aborted checkpoint, aborting in storage Waiting for TEvAbortCheckpointRequest (storage) 2025-04-10T11:36:59.447127Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvAbortCheckpointResponse 2025-04-10T11:36:59.447164Z node 5 :STREAMS_CHECKPOINT_COORDINATOR WARN: [my-graph-id.42] [42:1] Checkpoint aborted 2025-04-10T11:36:59.447203Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-04-10T11:36:59.447253Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-10T11:36:59.447320Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvCreateCheckpointResponse 2025-04-10T11:36:59.447352Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) |97.4%| [TM] {RESULT} ydb/core/fq/libs/checkpointing/ut/unittest >> LabeledDbCounters::OneTablet [GOOD] >> LabeledDbCounters::OneTabletRemoveCounters |97.4%| [TA] $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSentinelTests::PDiskUnknownState [GOOD] >> TSentinelTests::PDiskErrorState >> ExternalDataSourceTest::ValidateName [GOOD] >> ExternalDataSourceTest::ValidatePack [GOOD] >> ExternalDataSourceTest::ValidateAuth [GOOD] >> ExternalDataSourceTest::ValidateParameters [GOOD] >> ExternalDataSourceTest::ValidateHasExternalTable [GOOD] >> ExternalDataSourceTest::ValidateProperties [GOOD] >> ExternalDataSourceTest::ValidateLocation >> ExternalDataSourceTest::ValidateLocation [GOOD] >> ExternalSourceBuilderTest::ValidateName [GOOD] >> ExternalSourceBuilderTest::ValidateAuthWithoutCondition [GOOD] >> ExternalSourceBuilderTest::ValidateAuthWithCondition [GOOD] >> ExternalSourceBuilderTest::ValidateUnsupportedField [GOOD] >> ExternalSourceBuilderTest::ValidateNonRequiredField [GOOD] >> ExternalSourceBuilderTest::ValidateRequiredField [GOOD] >> ExternalSourceBuilderTest::ValidateNonRequiredFieldValues [GOOD] >> ExternalSourceBuilderTest::ValidateRequiredFieldValues [GOOD] >> ExternalSourceBuilderTest::ValidateRequiredFieldOnCondition [GOOD] >> IcebergDdlTest::HiveCatalogWithS3Test [GOOD] >> IcebergDdlTest::HadoopCatalogWithS3Test [GOOD] >> ObjectStorageTest::SuccessValidation [GOOD] >> ObjectStorageTest::FailedCreate [GOOD] >> ObjectStorageTest::FailedValidation [GOOD] >> ObjectStorageTest::FailedJsonListValidation [GOOD] >> ObjectStorageTest::FailedOptionalTypeValidation [GOOD] >> ObjectStorageTest::WildcardsValidation [GOOD] >> DataShardBackgroundCompaction::ShouldCompact >> TTxDataShardBuildIndexScan::RunScan |97.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/external_sources/ut/unittest >> ObjectStorageTest::WildcardsValidation [GOOD] |97.4%| [TS] {RESULT} ydb/core/external_sources/ut/unittest >> TCollectingS3ListingStrategyTests::IfNoIssuesOccursShouldReturnCollectedPaths [GOOD] >> TCollectingS3ListingStrategyTests::IfThereAreMoreRecordsThanSpecifiedByLimitShouldReturnError [GOOD] >> TCollectingS3ListingStrategyTests::IfAnyIterationReturnIssueThanWholeStrategyShouldReturnIt [GOOD] >> TCollectingS3ListingStrategyTests::IfExceptionIsReturnedFromIteratorThanItShouldCovertItToIssue [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] |97.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/s3/provider/ut/unittest >> TCollectingS3ListingStrategyTests::IfExceptionIsReturnedFromIteratorThanItShouldCovertItToIssue [GOOD] |97.4%| [TS] {RESULT} ydb/library/yql/providers/s3/provider/ut/unittest >> TLocksTest::Range_GoodLock1 [GOOD] >> test_query_cache.py::TestQueryCache::test >> TGRpcRateLimiterTest::CreateResource [GOOD] >> TGRpcRateLimiterTest::UpdateResource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_Oltp_Replace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25828, MsgBus: 15986 2025-04-10T11:31:11.893930Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491645619701472989:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:11.897872Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001569/r3tmp/tmpkTDVRD/pdisk_1.dat 2025-04-10T11:31:12.799336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:31:12.799463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:31:12.889140Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:31:12.893550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25828, node 1 2025-04-10T11:31:13.182691Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:31:13.182715Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:31:13.182722Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:31:13.182894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15986 TClient is connected to server localhost:15986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:31:14.306678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:31:14.350583Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:31:16.908082Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491645619701472989:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:31:16.908486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; CREATE TABLE `/Root/ColumnShard1` (Col1 Int64 NOT NULL, Col2 Int32 NOT NULL, PRIMARY KEY (Col1)) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1000); 2025-04-10T11:31:16.954005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491645641176310016:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:16.954116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:31:17.969423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:31:18.298936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645649766244710:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:18.299173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645649766244710:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:18.299458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645649766244710:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:18.299579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645649766244710:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:18.299683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645649766244710:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:18.299796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645649766244710:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:18.299926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645649766244710:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:18.300021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645649766244710:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:18.300128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645649766244710:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:18.300223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645649766244710:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:18.300323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645649766244710:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:18.300433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491645649766244710:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:18.318957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:18.319031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:18.319148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:18.319182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:18.319376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:18.319407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:18.319506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:18.319535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:18.319602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:18.319627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:18.319669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:18.319697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:18.320481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:18.320527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:18.320713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:18.320742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:18.320873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:18.320898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:18.321087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:18.321114Z node 1 :TX_COLUMN ... stract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:36:45.322962Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:36:45.323025Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:36:45.323387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:36:45.323447Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:36:45.323733Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:36:45.323793Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:36:45.396594Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715658; 2025-04-10T11:36:45.404755Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715658; 2025-04-10T11:36:45.405317Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715658; 2025-04-10T11:36:45.413711Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715658; 2025-04-10T11:36:45.414297Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715658; 2025-04-10T11:36:45.422789Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715658; 2025-04-10T11:36:45.423740Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715658; 2025-04-10T11:36:45.432475Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715658; 2025-04-10T11:36:45.432475Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715658; 2025-04-10T11:36:45.440379Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715658; 2025-04-10T11:36:45.467790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491647052386511981:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:45.467883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:45.468210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491647052386511986:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:45.472971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:36:45.487085Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491647052386511988:2406], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:36:45.565806Z node 2 :TX_PROXY ERROR: Actor# [2:7491647052386512039:2591] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:36:45.671490Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715662;tx_id=281474976715662;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715662; 2025-04-10T11:36:45.704958Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976715664; Trying to start YDB, gRPC: 21064, MsgBus: 17688 2025-04-10T11:36:47.811931Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491647060766154578:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:47.812015Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001569/r3tmp/tmporHyi6/pdisk_1.dat 2025-04-10T11:36:47.974812Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:47.982822Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:47.982953Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:47.986588Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21064, node 3 2025-04-10T11:36:48.050632Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:48.050697Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:48.050726Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:48.050976Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17688 TClient is connected to server localhost:17688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:36:48.637777Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:52.622386Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491647082240991721:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:52.622522Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:52.669780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:36:52.860814Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491647060766154578:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:52.887237Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:36:52.959182Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-10T11:36:53.243220Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491647086535960412:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:53.243309Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:53.243442Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491647086535960417:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:36:53.248660Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T11:36:53.263743Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491647086535960419:2447], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T11:36:53.348390Z node 3 :TX_PROXY ERROR: Actor# [3:7491647086535960486:3259] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> HttpRouter::Basic [GOOD] >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected [GOOD] >> KesusProxyTest::ReconnectsWithKesusWhenPipeDestroyed [GOOD] >> KesusProxyTest::RejectsNotCanonizedResourceName [GOOD] >> KesusProxyTest::SubscribesOnResource |97.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/public_http/ut/unittest >> HttpRouter::Basic [GOOD] |97.4%| [TS] {RESULT} ydb/core/public_http/ut/unittest >> KesusProxyTest::SubscribesOnResource [GOOD] >> KesusProxyTest::SubscribesOnResourcesWhenReconnected [GOOD] >> KesusProxyTest::ProxyRequestDuringDisconnection [GOOD] >> KesusProxyTest::DeactivateSessionWhenResourceClosed [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnSuccess [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnFailure [GOOD] >> KesusProxyTest::AnswersWithSessionWhenResourceIsAlreadyKnown [GOOD] >> KesusProxyTest::SendsBrokenUpdateWhenKesusPassesError [GOOD] >> KesusProxyTest::AllocatesResourceWithKesus [GOOD] >> KesusProxyTest::DisconnectsDuringActiveSession >> DataCleanup::ForceDataCleanup >> KesusProxyTest::DisconnectsDuringActiveSession [GOOD] >> KesusProxyTest::AllocatesResourceOffline [GOOD] >> KesusProxyTest::ConnectsDuringOfflineAllocation [GOOD] >> KesusResourceAllocationStatisticsTest::ReturnsDefaultValues [GOOD] >> KesusResourceAllocationStatisticsTest::CalculatesAverage [GOOD] >> KesusResourceAllocationStatisticsTest::TakesBestStat [GOOD] >> TQuoterServiceTest::StaticRateLimiter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_GoodLock1 [GOOD] Test command err: 2025-04-10T11:36:21.062242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646951276874482:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:21.062400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fa9/r3tmp/tmpUH1KSI/pdisk_1.dat 2025-04-10T11:36:21.551312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:21.551471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:21.553140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:36:21.595759Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11332 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:21.857353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:21.881266Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:21.896805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:22.040190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:22.102211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:24.710985Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646961891389300:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:24.711116Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fa9/r3tmp/tmp1kecGW/pdisk_1.dat 2025-04-10T11:36:24.842281Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:24.869561Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:24.869660Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:24.871424Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23261 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:25.099705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:25.106532Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:25.124664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:25.201170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:25.260353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:28.171496Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646981362791153:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:28.171609Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fa9/r3tmp/tmpbgjaYr/pdisk_1.dat 2025-04-10T11:36:28.260622Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:28.311076Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:28.311163Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:28.312507Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23127 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:28.483590Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:28.490578Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:28.498551Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:36:28.502948Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:28.554922Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:28.601721Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T11:36:31.865602Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491646995455743047:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:31.865699Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fa9/r3tmp/tmpmRg5pD/pdisk_1.dat 2025-04-10T11:36:32.020579Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:32.041176Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:32.041263Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:32.044603Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11154 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { Sche ... (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:44.574676Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28604 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:44.847448Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:44.854907Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:44.866022Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:36:44.872071Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:44.965823Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:45.026480Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:48.926632Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7491647066788133403:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:48.926699Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fa9/r3tmp/tmpajcqOS/pdisk_1.dat 2025-04-10T11:36:49.119252Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:49.143063Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:49.143173Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:49.144875Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12616 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:49.407332Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:49.434240Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:49.507053Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T11:36:49.594269Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:53.218421Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7491647086687120294:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:53.218531Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fa9/r3tmp/tmpd0hTYV/pdisk_1.dat 2025-04-10T11:36:53.328720Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:53.353589Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:53.353691Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:53.354786Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6865 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:53.587947Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:53.602983Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:53.677638Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:53.737791Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:57.685472Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491647103184828212:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:57.685595Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001fa9/r3tmp/tmpEU00Ts/pdisk_1.dat 2025-04-10T11:36:57.836103Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:57.859972Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:57.860085Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:57.863607Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32433 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:36:58.118411Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:36:58.142286Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:58.208979Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:36:58.265146Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] >> StatisticsSaveLoad::Delete [GOOD] >> SystemView::AuthOwners [GOOD] >> MediatorTimeCast::ReadStepSubscribe [GOOD] >> MediatorTimeCast::GranularTimecast ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete [GOOD] Test command err: 2025-04-10T11:36:55.977633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:36:55.978091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:36:55.978196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000c23/r3tmp/tmpcSrCY2/pdisk_1.dat 2025-04-10T11:36:56.396666Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3420, node 1 2025-04-10T11:36:56.639032Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:36:56.639108Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:36:56.639144Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:36:56.639667Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:36:56.642883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:36:56.730614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:36:56.730746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:36:56.744887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24801 2025-04-10T11:36:57.345747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:00.430635Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-10T11:37:00.463217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:00.463324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:00.492001Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:37:00.494730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:00.747473Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:37:00.748133Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:37:00.748867Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:37:00.749063Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:37:00.749180Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:37:00.749474Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:37:00.749565Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:37:00.749644Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:37:00.749739Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-10T11:37:00.916768Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:00.916883Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:00.930512Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:01.070054Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:01.127176Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-10T11:37:01.127264Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-10T11:37:01.176096Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-10T11:37:01.177164Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-10T11:37:01.177377Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-10T11:37:01.177442Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-10T11:37:01.177505Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-10T11:37:01.177588Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-10T11:37:01.177654Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-10T11:37:01.177705Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-10T11:37:01.178220Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-10T11:37:01.203723Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:37:01.203871Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-10T11:37:01.213816Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-04-10T11:37:01.218998Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2616] 2025-04-10T11:37:01.219373Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2616], schemeshard id = 72075186224037897 2025-04-10T11:37:01.230044Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-10T11:37:01.248730Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-10T11:37:01.248806Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-10T11:37:01.248885Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-10T11:37:01.267313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-10T11:37:01.275414Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-10T11:37:01.275558Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-10T11:37:01.490845Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-10T11:37:01.658876Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-10T11:37:01.737166Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:37:02.538497Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-10T11:37:02.538841Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:37:02.555642Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-10T11:37:02.559003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2263:3090], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:02.559098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2279:3095], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:02.559167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:02.564207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037897 2025-04-10T11:37:02.604134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2283:3098], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:37:02.886118Z node 1 :TX_PROXY ERROR: Actor# [1:2371:3126] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:03.234115Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2393:3138]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:37:03.234355Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-10T11:37:03.234443Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2395:3140] 2025-04-10T11:37:03.234527Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2395:3140] 2025-04-10T11:37:03.235116Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2396:2843] 2025-04-10T11:37:03.235375Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2395:3140], server id = [2:2396:2843], tablet id = 72075186224037894, status = OK 2025-04-10T11:37:03.235590Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2396:2843], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-10T11:37:03.235673Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-10T11:37:03.235875Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-10T11:37:03.235957Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2393:3138], StatRequests.size() = 1 2025-04-10T11:37:03.367809Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=N2Y4NTQxZjgtZmI4Nzg1ZWUtNGE3ZTkwOTEtOGE4NzQ2N2I=, TxId: 2025-04-10T11:37:03.367887Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=N2Y4NTQxZjgtZmI4Nzg1ZWUtNGE3ZTkwOTEtOGE4NzQ2N2I=, TxId: 2025-04-10T11:37:03.368780Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:37:03.371141Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-10T11:37:03.393252Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2429:3161]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:37:03.393399Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-10T11:37:03.393442Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2429:3161], StatRequests.size() = 1 2025-04-10T11:37:03.545234Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZGZhYTRjOTgtMzI0Yzc3MmQtMjc4OTYzYTUtZjgxMjg5Njk=, TxId: 2025-04-10T11:37:03.545312Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZGZhYTRjOTgtMzI0Yzc3MmQtMjc4OTYzYTUtZjgxMjg5Njk=, TxId: 2025-04-10T11:37:03.546531Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-10T11:37:03.549462Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-04-10T11:37:03.598779Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2461:3176]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-10T11:37:03.598992Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-10T11:37:03.599040Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2461:3176], StatRequests.size() = 1 2025-04-10T11:37:03.723254Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NzlhZDEwZjItZGQ5YzVlOTctZWMxMGQzOTctMzUwNTY2NmE=, TxId: 01jrfqtfeq9nhqymf4y9fqdbzj 2025-04-10T11:37:03.723406Z node 1 :STATISTICS WARN: [TQueryBase] Finish with BAD_REQUEST, Issues: {
: Error: No data }, SessionId: ydb://session/3?node_id=1&id=NzlhZDEwZjItZGQ5YzVlOTctZWMxMGQzOTctMzUwNTY2NmE=, TxId: 01jrfqtfeq9nhqymf4y9fqdbzj |97.4%| [TA] $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TA] {RESULT} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthOwners [GOOD] Test command err: 2025-04-10T11:35:30.274896Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646729821249169:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:30.274994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f86/r3tmp/tmpQd28LM/pdisk_1.dat 2025-04-10T11:35:30.767033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:30.768196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:30.768284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:30.774275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8142, node 1 2025-04-10T11:35:30.889592Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:35:30.889613Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:35:30.889619Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:30.889735Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:31.270084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:31.409584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:35:31.409793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:35:31.409870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-10T11:35:31.409985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-10T11:35:31.410136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:35:31.410387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:35:31.410405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:35:31.410462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:35:31.410482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:35:31.414426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-10T11:35:31.414563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/Tenant1 2025-04-10T11:35:31.414791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:35:31.414812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:35:31.414951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:35:31.415035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:35:31.415064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491646729821249772:2392], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-10T11:35:31.415108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7491646729821249772:2392], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-10T11:35:31.415154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:35:31.415172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:35:31.415217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-10T11:35:31.415244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 waiting... 2025-04-10T11:35:31.418316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:31.419810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-10T11:35:31.419868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-10T11:35:31.419879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-10T11:35:31.419895Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-10T11:35:31.419913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-10T11:35:31.422630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-10T11:35:31.422786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-10T11:35:31.422800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-10T11:35:31.422817Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-10T11:35:31.422837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-10T11:35:31.422903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-10T11:35:31.423784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:35:31.423816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-10T11:35:31.423850Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-10T11:35:31.423973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2025-04-10T11:35:31.424116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:35:31.425278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-10T11:35:31.425501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-10T11:35:31.426878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744284931473, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:35:31.427101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744284931473 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T11:35:31.427173Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-10T11:35:31.427388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-10T11:35:31.427457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-10T11:35:31.427651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:35:31.427712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:35:31.427759Z node 1 :FLAT_TX_SCHEMESHARD INF ... se ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037895 Coordinators: 72075186224037896 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037897 Mediators: 72075186224037898 SchemeShard: 72075186224037894 SysViewProcessor: 72075186224037899 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user4 },{ Sid: user3 }] Groups: [{ Sid: group1 Members: [] }] } Children [Dir3,Dir4,Table2] }] } 2025-04-10T11:37:01.238905Z node 36 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [36:7491647122056166668:2409], row count: 1, finished: 0 2025-04-10T11:37:01.238985Z node 36 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant2/Dir3 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:37:01.239173Z node 36 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant2/Dir3 TableId: [72075186224037894:3:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037895 Coordinators: 72075186224037896 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037897 Mediators: 72075186224037898 SchemeShard: 72075186224037894 SysViewProcessor: 72075186224037899 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir33,SubDir34] }] } 2025-04-10T11:37:01.239238Z node 36 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [36:7491647122056166668:2409], row count: 1, finished: 0 2025-04-10T11:37:01.239788Z node 36 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant2/Dir3/SubDir33 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:37:01.240257Z node 36 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant2/Dir3/SubDir33 TableId: [72075186224037894:4:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037895 Coordinators: 72075186224037896 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037897 Mediators: 72075186224037898 SchemeShard: 72075186224037894 SysViewProcessor: 72075186224037899 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-10T11:37:01.240304Z node 36 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [36:7491647122056166668:2409], row count: 1, finished: 0 2025-04-10T11:37:01.240993Z node 36 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant2/Dir3/SubDir34 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:37:01.243448Z node 36 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant2/Dir3/SubDir34 TableId: [72075186224037894:5:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037895 Coordinators: 72075186224037896 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037897 Mediators: 72075186224037898 SchemeShard: 72075186224037894 SysViewProcessor: 72075186224037899 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-10T11:37:01.243522Z node 36 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [36:7491647122056166668:2409], row count: 1, finished: 0 2025-04-10T11:37:01.243608Z node 36 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant2/Dir4 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:37:01.244128Z node 36 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant2/Dir4 TableId: [72075186224037894:6:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037895 Coordinators: 72075186224037896 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037897 Mediators: 72075186224037898 SchemeShard: 72075186224037894 SysViewProcessor: 72075186224037899 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir45,SubDir46] }] } 2025-04-10T11:37:01.244168Z node 36 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [36:7491647122056166668:2409], row count: 1, finished: 0 2025-04-10T11:37:01.244239Z node 36 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant2/Dir4/SubDir45 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:37:01.245845Z node 36 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant2/Dir4/SubDir45 TableId: [72075186224037894:7:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037895 Coordinators: 72075186224037896 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037897 Mediators: 72075186224037898 SchemeShard: 72075186224037894 SysViewProcessor: 72075186224037899 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-10T11:37:01.245894Z node 36 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [36:7491647122056166668:2409], row count: 1, finished: 0 2025-04-10T11:37:01.246008Z node 36 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant2/Dir4/SubDir46 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:37:01.246419Z node 36 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant2/Dir4/SubDir46 TableId: [72075186224037894:8:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037895 Coordinators: 72075186224037896 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037897 Mediators: 72075186224037898 SchemeShard: 72075186224037894 SysViewProcessor: 72075186224037899 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-10T11:37:01.246464Z node 36 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [36:7491647122056166668:2409], row count: 1, finished: 0 2025-04-10T11:37:01.246675Z node 36 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant2/Table2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:37:01.246959Z node 36 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant2/Table2 TableId: [72075186224037894:2:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037895 Coordinators: 72075186224037896 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037897 Mediators: 72075186224037898 SchemeShard: 72075186224037894 SysViewProcessor: 72075186224037899 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:37:01.246997Z node 36 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [36:7491647122056166668:2409], row count: 1, finished: 0 2025-04-10T11:37:01.247139Z node 36 :SYSTEM_VIEWS INFO: Scan finished, actor: [36:7491647122056166668:2409], owner: [36:7491647122056166664:2407], scan id: 0, table id: [72075186224037894:1:0:auth_owners] 2025-04-10T11:37:01.248453Z node 36 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [36:7491647087696426230:2212], database# , query hash# 18177464083368258369, cpu time# 116246 2025-04-10T11:37:01.249181Z node 36 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744285021234, txId: 281474976715693] shutting down 2025-04-10T11:37:01.285325Z node 37 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:37:01.285574Z node 40 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:37:01.286395Z node 36 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 38 2025-04-10T11:37:01.291177Z node 36 :HIVE WARN: HIVE#72057594037968897 Node(38, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:37:01.291388Z node 36 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 37 2025-04-10T11:37:01.291851Z node 36 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:37:01.294069Z node 36 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 40 2025-04-10T11:37:01.295046Z node 36 :HIVE WARN: HIVE#72057594037968897 Node(40, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:37:01.295303Z node 36 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 39 2025-04-10T11:37:01.295480Z node 36 :HIVE WARN: HIVE#72057594037968897 Node(39, (0,0,0,0)) VolatileState: Connected -> Disconnected |97.4%| [TA] $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] >> TRangeTreap::Simple [GOOD] >> DataShardBackgroundCompaction::ShouldCompact [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed >> TRangeTreap::Sequential >> TTxDataShardBuildIndexScan::RunScan [GOOD] >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction >> TGRpcRateLimiterTest::UpdateResource [GOOD] >> TGRpcRateLimiterTest::DropResource >> DataShardReassign::AutoReassignOnYellowFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:36:29.074314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:29.074406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:29.074472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:29.074504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:29.074540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:29.074564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:29.074618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:29.074696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:29.075030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:29.158298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:36:29.158361Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:29.173788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:29.174065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:29.174244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:29.181279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:29.181508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:29.182178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:29.182590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:29.184577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:29.185996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:29.186071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:29.186306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:29.186360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:29.186399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:29.186607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:36:29.192958Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:36:29.314885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:29.315139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:29.315385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:29.315628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:29.315690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:29.319554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:29.319710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:29.319925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:29.319996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:29.320049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:29.320107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:29.322646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:29.322716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:29.322754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:29.324860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:29.324916Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:29.324985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:29.325050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:29.329566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:29.331840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:29.332036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:29.332847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:29.332972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:29.333022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:29.333276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:29.333319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:29.333453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:29.333511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:29.335240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:29.335275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:29.335407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:29.335434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:29.335595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:29.335623Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:29.335708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:29.335743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:29.335771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:29.335827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:29.335850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:36:29.335888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:29.335913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:36:29.335933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:36:29.335982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:36:29.336006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:36:29.336030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:36:29.337924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:29.338032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:29.338066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1165Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2025-04-10T11:37:00.931201Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2025-04-10T11:37:00.931235Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2025-04-10T11:37:00.931264Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2025-04-10T11:37:00.931294Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2025-04-10T11:37:00.931882Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-10T11:37:00.932063Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-10T11:37:00.932131Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-04-10T11:37:00.932207Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], version: 5 2025-04-10T11:37:00.932293Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 4 2025-04-10T11:37:00.933661Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-10T11:37:00.933752Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-10T11:37:00.933786Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-04-10T11:37:00.933817Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 5 2025-04-10T11:37:00.933850Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 6 2025-04-10T11:37:00.933937Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2025-04-10T11:37:00.939281Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-04-10T11:37:00.940208Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-04-10T11:37:00.954014Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 300 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1467 } } 2025-04-10T11:37:00.954071Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-04-10T11:37:00.954252Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 300 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1467 } } 2025-04-10T11:37:00.954415Z node 19 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72075186233409546, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 300 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1467 } } 2025-04-10T11:37:00.955752Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 750 RawX2: 81604381263 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-10T11:37:00.955840Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-04-10T11:37:00.956088Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: Source { RawX1: 750 RawX2: 81604381263 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-10T11:37:00.956167Z node 19 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 2025-04-10T11:37:00.956323Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 750 RawX2: 81604381263 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-10T11:37:00.956416Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72075186233409546:4, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409546 2025-04-10T11:37:00.956485Z node 19 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-10T11:37:00.956561Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409552, at schemeshard: 72075186233409546 2025-04-10T11:37:00.956640Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:1 129 -> 240 2025-04-10T11:37:00.959503Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-10T11:37:00.961138Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-10T11:37:00.961708Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-10T11:37:00.961786Z node 19 :FLAT_TX_SCHEMESHARD INFO: [72075186233409546] TDone opId# 281474976715657:1 ProgressState 2025-04-10T11:37:00.962024Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2025-04-10T11:37:00.962088Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-04-10T11:37:00.962190Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2025-04-10T11:37:00.962262Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-04-10T11:37:00.962329Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2025-04-10T11:37:00.962410Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-04-10T11:37:00.962475Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-10T11:37:00.962537Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2025-04-10T11:37:00.962620Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 3 2025-04-10T11:37:00.962677Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:1 2025-04-10T11:37:00.962696Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:1 2025-04-10T11:37:00.962767Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 5 2025-04-10T11:37:00.962795Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:2 2025-04-10T11:37:00.962813Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:2 2025-04-10T11:37:00.962846Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-10T11:37:03.803092Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-10T11:37:03.803556Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 492us result status StatusNameConflict 2025-04-10T11:37:03.803848Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-10T11:37:06.352527Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-10T11:37:06.352983Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 444us result status StatusNameConflict 2025-04-10T11:37:06.353207Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TQuoterServiceTest::StaticRateLimiter [GOOD] >> TSequence::SequencesIndex >> TQuoterServiceTest::StaticMultipleAndResources |97.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] >> test_query_cache.py::TestQueryCache::test [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] >> TTxDataShardTestInit::TestGetShardStateAfterInitialization ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] Test command err: 2025-04-10T11:32:11.018049Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:11.134341Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:11.154865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:11.155168Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:11.163801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:11.164035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:11.164280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:11.164403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:11.164509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:11.164625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:11.164750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:11.164879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:11.165017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:11.165153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:11.165251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:11.165348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:11.210735Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:11.210973Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:11.211018Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:11.211163Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:11.211282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:11.211349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:11.211380Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:11.211443Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:11.211516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:11.211550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:11.211573Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:11.211707Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:11.211762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:11.211794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:11.211816Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:11.211901Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:11.211945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:11.211984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:11.212016Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:11.212079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:11.212115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:11.212143Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:11.212196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:11.212230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:11.212250Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:11.212579Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2025-04-10T11:32:11.212671Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-10T11:32:11.212750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-10T11:32:11.212852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-10T11:32:11.213074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:11.213139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:11.213178Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:11.213390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:11.213437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:11.213469Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:11.213644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:11.213702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:11.213734Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:11.213946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:11.213991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:11.214025Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:11.214164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:11.214208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:11.214268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... B:0:2688];;column_id:8;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:60;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:56;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-04-10T11:37:07.418033Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11154:12781];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-04-10T11:37:07.420472Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11154:12781];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery >> TTxDataShardTestInit::TestGetShardStateAfterInitialization [GOOD] >> TTxDataShardTestInit::TestTableHasPath >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries [GOOD] >> TxKeys::ComparePointKeys >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery >> BufferWithGaps::IsReadable [GOOD] >> TBatchedVecTest::TestOutputTOutputType [GOOD] >> BufferWithGaps::Basic [GOOD] >> TBatchedVecTest::TestToStringInt [GOOD] >> PtrTest::Test1 [GOOD] >> MediatorTimeCast::GranularTimecast [GOOD] >> DataCleanup::ForceDataCleanup [GOOD] >> DataCleanup::ForceDataCleanupWithoutCompaction >> TQuoterServiceTest::StaticMultipleAndResources [GOOD] >> TQuoterServiceTest::StaticDeadlines >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus |97.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/blobstorage/base/ut/gtest >> PtrTest::Test1 [GOOD] |97.5%| [TS] {RESULT} ydb/core/blobstorage/base/ut/gtest >> TGRpcRateLimiterTest::DropResource [GOOD] >> TGRpcRateLimiterTest::DescribeResource >> SystemView::TopPartitionsByTliFields [GOOD] >> SystemView::TabletsShards >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/time_cast/ut/unittest >> MediatorTimeCast::GranularTimecast [GOOD] Test command err: 2025-04-10T11:37:03.244520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:03.244731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:37:03.244880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a30/r3tmp/tmpFzChtp/pdisk_1.dat 2025-04-10T11:37:03.699959Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvSubscribeReadStep{ CoordinatorId# 72057594046316545 } 2025-04-10T11:37:03.700549Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientConnected 2025-04-10T11:37:03.708296Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 1 LastAcquireStep: 0 NextAcquireStep: 0 2025-04-10T11:37:03.740786Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:03.783742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:03.784159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:03.796536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:03.881397Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 500 2025-04-10T11:37:03.987003Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 1000 2025-04-10T11:37:04.168947Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 2000 2025-04-10T11:37:04.301375Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 3000 2025-04-10T11:37:04.428102Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 4000 2025-04-10T11:37:04.567183Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 5000 2025-04-10T11:37:04.613068Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 7000 } 2025-04-10T11:37:04.749529Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 6000 2025-04-10T11:37:04.876428Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 7000 2025-04-10T11:37:04.881158Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientDestroyed 2025-04-10T11:37:04.903479Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientConnected 2025-04-10T11:37:04.904372Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 2 LastAcquireStep: 0 NextAcquireStep: 7000 2025-04-10T11:37:04.916866Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 12000 } 2025-04-10T11:37:05.021733Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 7500 2025-04-10T11:37:05.117030Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 8000 2025-04-10T11:37:05.264974Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 9000 2025-04-10T11:37:05.391322Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 10000 2025-04-10T11:37:05.540447Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 11000 2025-04-10T11:37:05.688617Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 12000 2025-04-10T11:37:09.134701Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:09.134936Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:37:09.135207Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a30/r3tmp/tmpx9n8Pw/pdisk_1.dat 2025-04-10T11:37:09.459282Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365120 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-04-10T11:37:09.459975Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 1 Tablets: 72057594047365120 MinStep: 0 2025-04-10T11:37:09.460027Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-10T11:37:09.460069Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:650:2548] {TEvRegisterTabletResult TabletId# 72057594047365120 Entry# 0} 2025-04-10T11:37:09.460367Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-04-10T11:37:09.460496Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 0 2025-04-10T11:37:09.460622Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} 2025-04-10T11:37:09.460835Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365121 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-04-10T11:37:09.460898Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 2 AddTablets: 72057594047365121 2025-04-10T11:37:09.460958Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:653:2550] {TEvRegisterTabletResult TabletId# 72057594047365121 Entry# 0} 2025-04-10T11:37:09.461160Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 2 LatestStep: 0 2025-04-10T11:37:09.461324Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365123 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-04-10T11:37:09.461384Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 3 AddTablets: 72057594047365123 2025-04-10T11:37:09.461423Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:654:2551] {TEvRegisterTabletResult TabletId# 72057594047365123 Entry# 0} 2025-04-10T11:37:09.461585Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 0 2025-04-10T11:37:09.484247Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:09.521762Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:09.521905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:09.533266Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:09.608225Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 500 2025-04-10T11:37:09.608313Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 500} 2025-04-10T11:37:09.713747Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 1000 2025-04-10T11:37:09.713820Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 1000} 2025-04-10T11:37:09.894900Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 2000 2025-04-10T11:37:09.894997Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2000} ... have step 0 and 2000 after sleep 2025-04-10T11:37:09.982969Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... tx1 planned at step 2500 ... tablet1 at 2499 ... tablet2 at 2499 ... tablet3 at 2500 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet 2025-04-10T11:37:10.079133Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 3000 ... tx2 planned at step 3000 ... tablet1 at 2499 ... tablet2 at 2499 ... tablet3 at 3000 ... unblocking tx1 at tablet2 ... unblocking NKikimr::TEvTxProcessing: ... OR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 0 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-10T11:37:10.264950Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-10T11:37:10.275794Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3000 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-10T11:37:10.286541Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3000 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-10T11:37:10.297312Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 2999 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-10T11:37:10.318890Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 2999 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 3499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-10T11:37:10.340261Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 3499 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 UnfrozenTablets: 72057594047365121 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-10T11:37:10.363484Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 UnfrozenTablets: 72057594047365121 ... restarting mediator 2025-04-10T11:37:10.375167Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientDestroyed 2025-04-10T11:37:10.375328Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 7 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-04-10T11:37:10.375380Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-10T11:37:10.375835Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-04-10T11:37:10.376111Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 8 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-04-10T11:37:10.376151Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-10T11:37:10.376597Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-04-10T11:37:10.376667Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 9 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-04-10T11:37:10.376690Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-10T11:37:10.377384Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-04-10T11:37:10.377449Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 10 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-04-10T11:37:10.377472Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-10T11:37:10.381493Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-10T11:37:10.381871Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... fully unblocking tx1 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR 2025-04-10T11:37:10.404635Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2500} ... tablet1 at 2500 ... tablet2 at 3500 ... tablet3 at 3500 ... fully unblocking tx2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR 2025-04-10T11:37:10.415708Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} ... tablet1 at 3000 ... tablet2 at 3500 ... tablet3 at 3500 ... fully unblocking tx3 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR 2025-04-10T11:37:10.426719Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3500} ... tablet1 at 3500 ... tablet2 at 3500 ... tablet3 at 3500 |97.5%| [TM] {RESULT} ydb/core/tx/time_cast/ut/unittest >> TxKeys::ComparePointKeys [GOOD] >> TxKeys::ComparePointKeysWithNull >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] >> TxKeys::ComparePointKeysWithNull [GOOD] >> TxKeys::ComparePointAndRange >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob >> test_commit.py::TestCommit::test_commit >> TxKeys::ComparePointAndRange [GOOD] >> TxKeys::ComparePointAndRangeWithNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:33:25.290165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:33:25.290279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:33:25.290322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:33:25.290361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:33:25.290416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:33:25.290448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:33:25.290537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:33:25.290627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:33:25.291010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:33:25.378984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:33:25.379078Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:33:25.397162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:33:25.397495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:33:25.397699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:33:25.406133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:33:25.406561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:33:25.407360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:33:25.408023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:33:25.411448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:25.412986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:33:25.413079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:25.413272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:33:25.413326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:33:25.413371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:33:25.413568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:33:25.422410Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:33:25.586635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:33:25.586902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:25.587173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:33:25.587468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:33:25.587531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:25.590409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:33:25.590575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:33:25.590802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:25.590853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:33:25.590895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:33:25.590924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:33:25.593474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:25.593545Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:33:25.593587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:33:25.595932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:25.596006Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:25.596061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:33:25.596137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:33:25.600061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:33:25.602673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:33:25.602900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:33:25.604123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:33:25.604307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:33:25.604379Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:33:25.604697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:33:25.604777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:33:25.604972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:33:25.605089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:33:25.607935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:33:25.608017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:33:25.608218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:25.608265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:33:25.608537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:25.608607Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:33:25.608712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:33:25.608747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:33:25.608785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:33:25.608834Z no ... ct: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:37:12.267594Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:37:12.267869Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 294us result status StatusSuccess 2025-04-10T11:37:12.268789Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:37:12.279905Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:841:2672] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-10T11:37:12.280016Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:780:2672] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-10T11:37:12.280204Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:841:2672] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744285032254945 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744285032254945 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744285032254945 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-10T11:37:12.282592Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:841:2672] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-10T11:37:12.282712Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:780:2672] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TSequence::SequencesIndex [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceFromSelect >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection >> TMemoryController::Counters >> TQuoterServiceTest::StaticDeadlines [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> TTxDataShardTestInit::TestTableHasPath [GOOD] >> TTxDataShardTestInit::TestResolvePathAfterRestart >> TTxDataShardReshuffleKMeansScan::BadRequest >> test_commit.py::TestCommit::test_commit [GOOD] >> test_timeout.py::TestTimeout::test_timeout >> TxKeys::ComparePointAndRangeWithNull [GOOD] >> TxKeys::ComparePointAndRangeWithInf >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections >> Login::TestSuccessfulLogin1 >> Login::TestSuccessfulLogin1 [GOOD] >> Login::TestFailedLogin1 >> Login::TestFailedLogin1 [GOOD] >> Login::TestFailedLogin2 >> Login::TestFailedLogin2 [GOOD] >> Login::TestFailedLogin3 >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection >> Login::TestFailedLogin3 [GOOD] >> Login::TestModifyUser >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] [GOOD] >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] >> Login::TestModifyUser [GOOD] >> Login::TestGroups >> TxKeys::ComparePointAndRangeWithInf [GOOD] >> Login::TestGroups [GOOD] >> Login::TestTokenWithGroups [GOOD] >> Login::TestCreateTokenForExternalAuth >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection >> Login::TestCreateTokenForExternalAuth [GOOD] >> Login::SanitizeJwtToken [GOOD] >> Login::CheckTimeOfUserCreating >> DataCleanup::ForceDataCleanupWithoutCompaction [GOOD] >> DataCleanup::MultipleDataCleanups >> TGRpcRateLimiterTest::DescribeResource [GOOD] >> TGRpcRateLimiterTest::ListResources >> Login::CheckTimeOfUserCreating [GOOD] >> Login::CannotCheckLockoutNonExistentUser >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable >> Login::CannotCheckLockoutNonExistentUser [GOOD] >> Login::AccountLockoutAndAutomaticallyUnlock >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_keys/unittest >> TxKeys::ComparePointAndRangeWithInf [GOOD] Test command err: 2025-04-10T11:37:11.125411Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:37:11.147060Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:37:11.150570Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-04-10T11:37:11.151526Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:37:11.173197Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:37:11.278468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:37:11.278534Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:11.287369Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:37:11.288047Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:37:11.293076Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:37:11.293168Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:37:11.293238Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:37:11.294602Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:37:11.294870Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:37:11.294950Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:192:2148] in generation 2 2025-04-10T11:37:11.379756Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:37:11.420428Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:37:11.421747Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:37:11.421886Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:217:2215] 2025-04-10T11:37:11.421929Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:37:11.421964Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:37:11.422003Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:37:11.422302Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:37:11.422373Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:37:11.423284Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:37:11.423384Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:37:11.423446Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:37:11.423507Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:11.423616Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:37:11.423649Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:37:11.423698Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:37:11.423756Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:37:11.423798Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:37:11.423907Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:37:11.423949Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:37:11.423997Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2025-04-10T11:37:11.427470Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-10T11:37:11.427532Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:37:11.427627Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-10T11:37:11.427801Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-10T11:37:11.427846Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-10T11:37:11.427901Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-10T11:37:11.427993Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:37:11.428038Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-10T11:37:11.428072Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-10T11:37:11.428118Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:37:11.428425Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-10T11:37:11.428462Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-10T11:37:11.428495Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-10T11:37:11.428525Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:37:11.428573Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-10T11:37:11.428601Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-10T11:37:11.428634Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-10T11:37:11.428663Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-10T11:37:11.428687Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-10T11:37:11.441165Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-10T11:37:11.441247Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-10T11:37:11.441295Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-10T11:37:11.441357Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-10T11:37:11.442245Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-10T11:37:11.444823Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:223:2221], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:37:11.444886Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:37:11.444968Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:222:2220], serverId# [1:223:2221], sessionId# [0:0:0] 2025-04-10T11:37:11.445121Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-10T11:37:11.445154Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:37:11.445365Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-10T11:37:11.445425Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:37:11.445467Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-10T11:37:11.445522Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-10T11:37:11.449416Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-10T11:37:11.449489Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:37:11.449747Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:37:11.449787Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:37:11.449851Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:37:11.449910Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:37:11.449945Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:37:11.449988Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-10T11:37:11.450024Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-10T11:37:11.450072Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:37:11.450113Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-10T11:37:11.450172Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-10T11:37:11.450232Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-10T11:37:11.450452Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-10T11:37:11.450495Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:37:11.450537Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-10T11:37:11.450565Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-10T11:37:11.450589Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-10T11:37:11.450690Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-10T11:37:11.450718Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-10T11:37:11.450753Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-10T11:37:11.450792Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-10T11:37:11.450853Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-10T11:37:11.450903Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-10T11:37:11.450954Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at ... 25-04-10T11:37:15.120245Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CreateIncrementalRestoreSrc 2025-04-10T11:37:15.120261Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:37:15.120279Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CreateIncrementalRestoreSrc 2025-04-10T11:37:15.120293Z node 5 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit CompleteOperation 2025-04-10T11:37:15.120306Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CompleteOperation 2025-04-10T11:37:15.120490Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is DelayComplete 2025-04-10T11:37:15.120520Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CompleteOperation 2025-04-10T11:37:15.120551Z node 5 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit CompletedOperations 2025-04-10T11:37:15.120579Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CompletedOperations 2025-04-10T11:37:15.120603Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-10T11:37:15.120618Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CompletedOperations 2025-04-10T11:37:15.120641Z node 5 :TX_DATASHARD TRACE: Execution plan for [1000001:1] at 9437184 has finished 2025-04-10T11:37:15.120684Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:15.120712Z node 5 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-10T11:37:15.120742Z node 5 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:37:15.120771Z node 5 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:37:15.121987Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [5:24:2071], Recipient [5:131:2154]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-04-10T11:37:15.122024Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-10T11:37:15.122063Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-04-10T11:37:15.122101Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:37:15.124795Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000001 txid# 1} 2025-04-10T11:37:15.124881Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000001} 2025-04-10T11:37:15.124978Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:37:15.125602Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:37:15.125645Z node 5 :TX_DATASHARD TRACE: Complete execution for [1000001:1] at 9437184 on unit CreateTable 2025-04-10T11:37:15.125684Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:37:15.125728Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-04-10T11:37:15.125756Z node 5 :TX_DATASHARD TRACE: Complete execution for [1000001:1] at 9437184 on unit CompleteOperation 2025-04-10T11:37:15.125828Z node 5 :TX_DATASHARD DEBUG: Complete [1000001 : 1] from 9437184 at tablet 9437184 send result to client [5:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:37:15.125890Z node 5 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-04-10T11:37:15.125970Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:37:15.126613Z node 5 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 131 RawX2: 21474838634 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 2 2025-04-10T11:37:15.126748Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [5:227:2225], Recipient [5:131:2154]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:229:2226] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T11:37:15.126798Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T11:37:15.126887Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [5:121:2147], Recipient [5:131:2154]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-10T11:37:15.126925Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-10T11:37:15.126964Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-10T11:37:15.127024Z node 5 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-10T11:37:15.127430Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 65543, Sender [5:99:2134], Recipient [5:131:2154]: NActors::TEvents::TEvPoison 2025-04-10T11:37:15.127822Z node 5 :TX_DATASHARD INFO: OnDetach: 9437184 2025-04-10T11:37:15.127928Z node 5 :TX_DATASHARD INFO: Change sender killed: at tablet: 9437184 2025-04-10T11:37:15.139091Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [5:232:2227], Recipient [5:235:2228]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:37:15.145834Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [5:232:2227], Recipient [5:235:2228]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:37:15.146013Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [5:232:2227], Recipient [5:235:2228]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:37:15.149412Z node 5 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [5:235:2228] 2025-04-10T11:37:15.149661Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:37:15.154305Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute Persist Sys_SubDomainInfo 2025-04-10T11:37:15.182375Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:37:15.182502Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:37:15.184444Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:37:15.184549Z node 5 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:37:15.184616Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:37:15.185100Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:37:15.185331Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:37:15.185411Z node 5 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [5:278:2228] in generation 3 2025-04-10T11:37:15.198007Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:37:15.198204Z node 5 :TX_DATASHARD INFO: Switched to work state Ready tabletId 9437184 2025-04-10T11:37:15.198380Z node 5 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-04-10T11:37:15.198574Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2025-04-10T11:37:15.198906Z node 5 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [5:283:2267] 2025-04-10T11:37:15.198966Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:37:15.199049Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-04-10T11:37:15.199108Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:37:15.199529Z node 5 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-10T11:37:15.199717Z node 5 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-10T11:37:15.199975Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [5:235:2228], Recipient [5:235:2228]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:37:15.200051Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:37:15.200475Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:37:15.200605Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:37:15.200814Z node 5 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 235 RawX2: 21474838708 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2025-04-10T11:37:15.200975Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [5:24:2071], Recipient [5:235:2228]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-04-10T11:37:15.201041Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-10T11:37:15.201096Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-04-10T11:37:15.201148Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:37:15.201285Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [5:24:2071], Recipient [5:235:2228]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2025-04-10T11:37:15.201349Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-10T11:37:15.201403Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-04-10T11:37:15.201525Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:37:15.201589Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:15.201654Z node 5 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:37:15.201703Z node 5 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:37:15.201755Z node 5 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:37:15.201803Z node 5 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:37:15.201970Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:37:15.202168Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [5:281:2265], Recipient [5:235:2228]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:285:2269] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T11:37:15.202219Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T11:37:15.202358Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [5:121:2147], Recipient [5:235:2228]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-10T11:37:15.202417Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-10T11:37:15.202472Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-10T11:37:15.202553Z node 5 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-10T11:37:15.215671Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [5:281:2265], Recipient [5:235:2228]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [5:281:2265] ServerId: [5:285:2269] } 2025-04-10T11:37:15.215756Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed |97.5%| [TM] {RESULT} ydb/core/tx/datashard/ut_keys/unittest >> TKeyValueTest::TestRewriteThenLastValue [GOOD] >> TKeyValueTest::TestRewriteThenLastValueNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_reassign/unittest >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] Test command err: 2025-04-10T11:37:10.331490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:10.331766Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:37:10.332017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000896/r3tmp/tmp8L0STj/pdisk_1.dat 2025-04-10T11:37:10.812042Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose 2025-04-10T11:37:10.812125Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:10.818389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:10.819371Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} hope 1 -> done Change{4, redo 987b alter 0b annex 0, ~{ 1, 33, 35, 42, 4 } -{ }, 0 gb} 2025-04-10T11:37:10.819472Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:10.820318Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:5:1:24576:515:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:10.820409Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:10.820504Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} commited cookie 1 for step 5 2025-04-10T11:37:10.831664Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-10T11:37:10.831735Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:10.833418Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{5, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-04-10T11:37:10.833533Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:10.834065Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:6:1:24576:129:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:10.834176Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:10.834326Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} commited cookie 1 for step 6 2025-04-10T11:37:10.834559Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-10T11:37:10.834618Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:10.834824Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{6, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-04-10T11:37:10.834910Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:10.835196Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:7:1:24576:130:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:10.835394Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:10.835508Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} commited cookie 1 for step 7 2025-04-10T11:37:10.835681Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-10T11:37:10.835735Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:10.836821Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{7, redo 120b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-10T11:37:10.836876Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:10.837085Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:8:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:10.837121Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:10.837177Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} commited cookie 1 for step 8 2025-04-10T11:37:10.839856Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion 2025-04-10T11:37:10.839932Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:10.840009Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:37:10.840057Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:10.850939Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} queued, type NKikimr::NBsController::TBlobStorageController::TTxRegisterNode 2025-04-10T11:37:10.851045Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:10.851378Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} hope 1 -> done Change{7, redo 79b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-10T11:37:10.851455Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:10.872045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:10.873130Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} queued, type NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig 2025-04-10T11:37:10.873276Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:10.876576Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:37:10.876663Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:10.876812Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} queued, type NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig 2025-04-10T11:37:10.876854Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:10.882088Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:37:10.882192Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:10.894613Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037932033:2:8:0:0:87:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:10.894734Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} commited cookie 1 for step 8 2025-04-10T11:37:10.894895Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} queued, type NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives 2025-04-10T11:37:10.894952Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:10.895316Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:37:10.895404Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:10.918660Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:37:10.918876Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} queued, type NKikimr::NTxAllocator::TTxAllocator::TTxReserve 2025-04-10T11:37:10.918946Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:10.919173Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} hope 1 -> done Change{3, redo 76b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-10T11:37:10.919239Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTx ... er{72057594046316545:2:26} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{20, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-10T11:37:14.191124Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:14.191480Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:26:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:14.191531Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:14.191592Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} commited cookie 1 for step 26 2025-04-10T11:37:14.214637Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2025-04-10T11:37:14.214719Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:14.214889Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{12, redo 82b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-10T11:37:14.214943Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:14.225656Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037968897:2:10:0:0:94:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:14.225792Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} commited cookie 1 for step 10 2025-04-10T11:37:14.349846Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-10T11:37:14.349940Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:14.350102Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{21, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-10T11:37:14.350175Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:14.350596Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:27:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:14.350704Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:14.350817Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} commited cookie 1 for step 27 2025-04-10T11:37:14.486990Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-10T11:37:14.487081Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:14.487227Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{22, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-10T11:37:14.487266Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:14.487638Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:28:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:14.487738Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:14.487843Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} commited cookie 1 for step 28 2025-04-10T11:37:14.623867Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-10T11:37:14.623962Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:14.624129Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{23, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-10T11:37:14.624203Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:14.624679Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:29:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:14.624735Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:14.624826Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} commited cookie 1 for step 29 2025-04-10T11:37:14.761529Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-10T11:37:14.761627Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:14.761771Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{24, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-10T11:37:14.761835Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:14.762257Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:30:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:14.762325Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:14.762406Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} commited cookie 1 for step 30 2025-04-10T11:37:14.774165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:37:14.774234Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:14.799933Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} queued, type NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics 2025-04-10T11:37:14.800050Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:14.800134Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} hope 1 -> done Change{9, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:37:14.800188Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:14.894362Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-10T11:37:14.894447Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-10T11:37:14.894559Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxCleanupTransaction 2025-04-10T11:37:14.894626Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:14.894734Z node 1 :TX_DATASHARD TRACE: No cleanup at 72075186224037888 outdated step 15000 last cleanup 0 2025-04-10T11:37:14.894806Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:14.894847Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-10T11:37:14.894882Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:37:14.894916Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:37:14.895005Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:37:14.895082Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:14.895274Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:37:14.957516Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-10T11:37:14.957612Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:14.957761Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{25, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-10T11:37:14.957827Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:14.958246Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:31:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:14.958302Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-10T11:37:14.958390Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:32} commited cookie 1 for step 31 --- Captured TEvCheckBlobstorageStatusResult event --- Waiting for TEvReassignTablet event... 2025-04-10T11:37:15.085012Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} CheckYellow current light yellow move channels: 0 1 2 --- Captured TEvReassignTablet event |97.5%| [TM] {RESULT} ydb/core/tx/datashard/ut_reassign/unittest >> SystemView::ShowCreateTable [GOOD] >> SystemView::QueryStats >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/py3test >> test_query_cache.py::TestQueryCache::test [GOOD] |97.5%| [TM] {RESULT} ydb/tests/functional/query_cache/py3test >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yaml_config/ut_transform/py3test >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] [GOOD] 2025-04-10 11:37:16,687 ERROR devtools.ya.test.canon.compare: Cannot calculate diff: Traceback (most recent call last): File "devtools/ya/test/canon/compare.py", line 402, in _get_file_diff_via_diff raise Exception( Exception: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: |97.5%| [TM] {RESULT} ydb/library/yaml_config/ut_transform/py3test >> SystemView::TabletsShards [GOOD] >> TRangeTreap::Sequential [GOOD] >> TRangeTreap::Random >> TRangeTreap::Random [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding >> TTxDataShardReshuffleKMeansScan::BadRequest [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPosting >> DiscoveryIsNotBroken::NoKafkaEndpointInDiscovery >> test_timeout.py::TestTimeout::test_timeout [GOOD] >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath >> TSequence::CreateTableWithDefaultFromSequenceFromSelect [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceBadRequest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/locks/ut_range_treap/unittest >> TRangeTreap::Random [GOOD] Test command err: NOTE: building treap of size 1000000 got height 50 and needed 1000000 ops (1000000 inserts 0 updates 0 deletes) and 33013387 comparisons (33.013387 per op) NOTE: building treap of size 9256 got height 29 and needed 10479 ops (9735 inserts 265 updates 479 deletes) and 200005 comparisons (19.08626777 per op) Checking point 422 ... found 402 ranges, needed 954 comparisons (2.373134328 per range) Checking point 2944 ... found 1970 ranges, needed 5983 comparisons (3.037055838 per range) Checking point 5197 ... found 2453 ranges, needed 8897 comparisons (3.626987362 per range) Checking point 4318 ... found 2402 ranges, needed 8372 comparisons (3.485428809 per range) Checking point 7835 ... found 2383 ranges, needed 8678 comparisons (3.6416282 per range) Checking point 2283 ... found 1692 ranges, needed 4742 comparisons (2.802600473 per range) Checking point 780 ... found 666 ranges, needed 1615 comparisons (2.424924925 per range) Checking point 4678 ... found 2435 ranges, needed 8770 comparisons (3.60164271 per range) Checking point 4609 ... found 2449 ranges, needed 8761 comparisons (3.577378522 per range) Checking point 1340 ... found 1092 ranges, needed 2786 comparisons (2.551282051 per range) |97.5%| [TM] {RESULT} ydb/core/tx/locks/ut_range_treap/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::TabletsShards [GOOD] Test command err: 2025-04-10T11:35:29.912144Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646729125392717:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:29.912206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f50/r3tmp/tmpnvuaxA/pdisk_1.dat 2025-04-10T11:35:30.402002Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:30.407772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:30.407882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:30.445936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63458, node 1 2025-04-10T11:35:30.753083Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:35:30.753126Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:35:30.753133Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:30.753283Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:31.201708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:33.097099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646746305262528:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:33.097099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646746305262559:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:33.097197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:33.101706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:35:33.116370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646746305262566:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:35:33.207302Z node 1 :TX_PROXY ERROR: Actor# [1:7491646746305262617:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:35:33.924059Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfqqn5ydzvc3ph8bhea666g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWMyOTkwMC1mMDU5NWFjMC04ZmNkMzBkLTQxYzg5YzM0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:33.973025Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7491646746305262655:2341], owner: [1:7491646746305262651:2339], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-10T11:35:33.979147Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7491646746305262655:2341], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:35:33.993548Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7491646746305262655:2341], row count: 1, finished: 1 2025-04-10T11:35:33.993614Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7491646746305262655:2341], owner: [1:7491646746305262651:2339], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-10T11:35:34.004424Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284933921, txId: 281474976710660] shutting down 2025-04-10T11:35:34.914302Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646729125392717:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:34.914419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:35:35.183314Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jrfqqrvb6fmtcyxefnxhr4d4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODA2NTQxOS1mNWQyNDZhLWE1NWM2YWRmLTEzMTgzZDdh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:35.186030Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7491646754895197293:2356], owner: [1:7491646754895197290:2354], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-10T11:35:35.186768Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7491646754895197293:2356], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:35:35.186996Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7491646754895197293:2356], row count: 1, finished: 1 2025-04-10T11:35:35.187020Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7491646754895197293:2356], owner: [1:7491646754895197290:2354], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-10T11:35:35.188978Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284935179, txId: 281474976710662] shutting down 2025-04-10T11:35:36.371949Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfqqt0j4c6n50fb1y8bq0vf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWZhZjkyZmMtNGY1YTFmOGUtNDc3N2NjMjgtY2U3OTM3ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:36.376298Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7491646759190164633:2370], owner: [1:7491646759190164629:2368], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-10T11:35:36.378798Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7491646759190164633:2370], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:35:36.379077Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7491646759190164633:2370], row count: 2, finished: 1 2025-04-10T11:35:36.379136Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7491646759190164633:2370], owner: [1:7491646759190164629:2368], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-10T11:35:36.381360Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744284936370, txId: 281474976710664] shutting down 2025-04-10T11:35:37.332722Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646761169413695:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:37.332784Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f50/r3tmp/tmphfuAx3/pdisk_1.dat 2025-04-10T11:35:37.444259Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:37.489237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:37.489332Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:37.492377Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2411, node 2 2025-04-10T11:35:37.699755Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:35:37.699779Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:35:37.699786Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:37.699928Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:37.936209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:37.954445Z node 2 :FLAT_TX_SCHEMESHARD W ... SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 11, interval end# 2025-04-10T12:00:00.000000Z, query count# 0, persisted# 0 2025-04-10T11:37:08.553291Z node 25 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 13, interval end# 2025-04-10T12:00:00.000000Z, query count# 0, persisted# 0 2025-04-10T11:37:08.553323Z node 25 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 15, interval end# 2025-04-10T12:00:00.000000Z, query count# 0, persisted# 0 2025-04-10T11:37:08.556227Z node 25 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Complete 2025-04-10T11:37:08.619379Z node 22 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jrfqtm068cjgyg2eacv5yep5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=YTQwYWNkMy02M2QwNjU4OC05ZDc1YzA1Yy0zMDE4MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:08.622164Z node 22 :SYSTEM_VIEWS INFO: Scan started, actor: [22:7491647152044836400:2447], owner: [22:7491647152044836397:2445], scan id: 0, table id: [72075186224037888:1:0:top_partitions_by_tli_one_minute] 2025-04-10T11:37:08.622891Z node 22 :SYSTEM_VIEWS INFO: Scan prepared, actor: [22:7491647152044836400:2447], schemeshard id: 72075186224037888, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-04-10T11:37:08.623556Z node 25 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1744285028000000 Rank: 0 } InclusiveFrom: true To { IntervalEndUs: 1744285028000000 Rank: 4294967295 } InclusiveTo: true Type: TOP_PARTITIONS_BY_TLI_ONE_MINUTE , rows# 1, bytes# 63, next# 2025-04-10T11:37:08.623844Z node 22 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [22:7491647152044836400:2447], row count: 1, finished: 1 2025-04-10T11:37:08.623865Z node 22 :SYSTEM_VIEWS INFO: Scan finished, actor: [22:7491647152044836400:2447], owner: [22:7491647152044836397:2445], scan id: 0, table id: [72075186224037888:1:0:top_partitions_by_tli_one_minute] 2025-04-10T11:37:08.627553Z node 22 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744285028618, txId: 281474976715682] shutting down 2025-04-10T11:37:08.638909Z node 22 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 26 2025-04-10T11:37:08.639241Z node 26 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [26:7491647100358748038:2206], processor id# 72075186224037893, database# /Root/Tenant1 2025-04-10T11:37:08.639126Z node 23 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [23:7491647101382838575:2197], processor id# 72075186224037899, database# /Root/Tenant2 2025-04-10T11:37:08.639410Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:37:08.639408Z node 23 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [23:7491647101382838575:2197], database# /Root/Tenant2, processor id# 72075186224037899 2025-04-10T11:37:08.639876Z node 26 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [26:7491647100358748038:2206], database# /Root/Tenant1, processor id# 72075186224037893 2025-04-10T11:37:08.640913Z node 22 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 25 2025-04-10T11:37:08.641149Z node 24 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [24:7491647099278567296:2206], processor id# 72075186224037899, database# /Root/Tenant2 2025-04-10T11:37:08.641853Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:37:08.642040Z node 22 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 23 2025-04-10T11:37:08.642791Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:37:08.642947Z node 22 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 24 2025-04-10T11:37:08.643426Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(24, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:37:08.644311Z node 22 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[25:7491647100631391820:2106], Type=268959746 2025-04-10T11:37:08.644460Z node 25 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [25:7491647100631391678:2156], processor id# 72075186224037893, database# /Root/Tenant1 2025-04-10T11:37:08.646312Z node 25 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [25:7491647100631391678:2156], database# /Root/Tenant1, processor id# 72075186224037893 2025-04-10T11:37:08.648972Z node 24 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [24:7491647099278567296:2206], database# /Root/Tenant2, processor id# 72075186224037899 2025-04-10T11:37:10.029713Z node 25 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [25:7491647096336424218:2063] 2025-04-10T11:37:10.077304Z node 25 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [25:7491647096336424218:2063] 2025-04-10T11:37:10.422926Z node 23 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [23:7491647097087871075:2063] 2025-04-10T11:37:10.426971Z node 23 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [23:7491647097087871075:2063] 2025-04-10T11:37:10.464400Z node 23 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [23:7491647101382838575:2197] 2025-04-10T11:37:11.478731Z node 27 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[27:7491647166917377831:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:11.478843Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f50/r3tmp/tmp7GsuoG/pdisk_1.dat 2025-04-10T11:37:11.635640Z node 27 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:11.665744Z node 27 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:11.665907Z node 27 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:11.667876Z node 27 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26941, node 27 2025-04-10T11:37:11.727751Z node 27 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:11.727783Z node 27 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:11.727800Z node 27 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:11.727983Z node 27 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37:12.123803Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:37:12.135975Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:37:15.933411Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [27:7491647184097247851:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:15.933546Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [27:7491647184097247847:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:15.933814Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:15.938584Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:37:15.950451Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [27:7491647184097247861:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:37:16.051776Z node 27 :TX_PROXY ERROR: Actor# [27:7491647188392215208:2457] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:16.186779Z node 27 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfqtqsv93mym6tqrkbj0td6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=27&id=ZWY2ZmFlMjktZDc5NjFjNy1hMGQzNDRmMC1hMGVhNjVlYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:16.189428Z node 27 :SYSTEM_VIEWS INFO: Scan started, actor: [27:7491647188392215242:2351], owner: [27:7491647188392215238:2349], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-10T11:37:16.190328Z node 27 :SYSTEM_VIEWS INFO: Scan prepared, actor: [27:7491647188392215242:2351], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:37:16.191206Z node 27 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [27:7491647188392215242:2351], row count: 3, finished: 1 2025-04-10T11:37:16.191258Z node 27 :SYSTEM_VIEWS INFO: Scan finished, actor: [27:7491647188392215242:2351], owner: [27:7491647188392215238:2349], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-10T11:37:16.193790Z node 27 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744285036185, txId: 281474976715661] shutting down >> TSentinelTests::PDiskErrorState [GOOD] >> TSentinelTests::PDiskFaultyState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_init/unittest >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] Test command err: 2025-04-10T11:37:09.703901Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:108:2140]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:37:09.717105Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:108:2140]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:37:09.720596Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:108:2140] 2025-04-10T11:37:09.721913Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:37:09.778311Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:102:2136], Recipient [1:108:2140]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:37:09.785409Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:37:09.785562Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:37:09.789854Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-10T11:37:09.789979Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-10T11:37:09.790050Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-10T11:37:09.791436Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:37:09.791613Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:37:09.791714Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:131:2140] in generation 2 2025-04-10T11:37:09.816290Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:37:09.846000Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-10T11:37:09.847158Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:37:09.847329Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:136:2160] 2025-04-10T11:37:09.847374Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-10T11:37:09.847411Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-10T11:37:09.847455Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-10T11:37:09.847687Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:108:2140], Recipient [1:108:2140]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:37:09.848403Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:37:09.849535Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-10T11:37:09.849650Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-10T11:37:09.849708Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-10T11:37:09.849784Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:09.849837Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-10T11:37:09.849874Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-10T11:37:09.849906Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-10T11:37:09.849944Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-10T11:37:09.849990Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-10T11:37:09.852109Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [1:99:2134], Recipient [1:108:2140]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 4294969430 } 2025-04-10T11:37:09.852191Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-10T11:37:13.090954Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:13.091279Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:37:13.091547Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0007fc/r3tmp/tmpFUjRMs/pdisk_1.dat 2025-04-10T11:37:13.556945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:13.608867Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:13.655927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:13.656447Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:13.669948Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:13.763351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:13.803890Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:666:2570] 2025-04-10T11:37:13.804206Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:37:13.857012Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:37:13.857212Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:37:13.859203Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:37:13.859297Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:37:13.859357Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:37:13.859761Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:37:13.859926Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:37:13.860034Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:682:2570] in generation 1 2025-04-10T11:37:13.870963Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:37:13.871091Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:37:13.871219Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:37:13.871329Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:684:2580] 2025-04-10T11:37:13.871367Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:37:13.871407Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:37:13.871452Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:13.871928Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:37:13.872065Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:37:13.872149Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:13.872207Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:13.872247Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:37:13.872309Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:13.872444Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:663:2568], serverId# [2:673:2574], sessionId# [0:0:0] 2025-04-10T11:37:13.874397Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:37:13.875271Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:37:13.875936Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:37:13.878131Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:13.889058Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:37:13.889255Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:37:14.039994Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-04-10T11:37:14.048984Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:37:14.049084Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:14.050851Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:14.050920Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:37:14.050983Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:37:14.051240Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:37:14.051397Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:37:14.052273Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:14.052358Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:37:14.054890Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:37:14.055983Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:14.057460Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:37:14.057521Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:14.059275Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:37:14.059355Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:14.060366Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:14.060424Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:37:14.060485Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037 ... :17.460545Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:37:17.460589Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:17.460671Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-04-10T11:37:17.461160Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:37:17.461278Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:37:17.461340Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:37:17.462591Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:17.473333Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:37:17.473426Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:37:17.624627Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-04-10T11:37:17.625288Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:37:17.625337Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:17.626276Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:17.626331Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:37:17.626372Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:37:17.626609Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:37:17.626753Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:37:17.627532Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:17.627626Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:37:17.628044Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:37:17.628415Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:17.629877Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:37:17.629922Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:17.630524Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:37:17.630583Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:17.631405Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:17.631448Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:37:17.631504Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:37:17.631564Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:37:17.631624Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:37:17.631713Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:17.633076Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:17.635034Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:37:17.635093Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:37:17.635249Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:37:17.640505Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-10T11:37:17.641623Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-10T11:37:17.693730Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:742:2622] 2025-04-10T11:37:17.694038Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:37:17.702520Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:37:17.703155Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:37:17.705637Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:37:17.705724Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:37:17.705793Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:37:17.706287Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:37:17.707380Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:37:17.707476Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:757:2622] in generation 2 2025-04-10T11:37:17.729254Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:37:17.729397Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037888 2025-04-10T11:37:17.729506Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:37:17.729707Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:37:17.729800Z node 3 :TX_DATASHARD DEBUG: Resolve path at 72075186224037888: reason# empty path 2025-04-10T11:37:17.729943Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:761:2632] 2025-04-10T11:37:17.729982Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:37:17.730033Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:37:17.730078Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:17.730453Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-10T11:37:17.730666Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-10T11:37:17.731713Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 281474976715657 message# Source { RawX1: 742 RawX2: 12884904510 } Origin: 72075186224037888 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-10T11:37:17.732087Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:37:17.732180Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:37:17.732340Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1000 2025-04-10T11:37:17.732384Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:17.732541Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:17.732891Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:17.732959Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:17.733002Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:37:17.733047Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:17.733316Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:37:17.770813Z node 3 :TX_DATASHARD DEBUG: Got scheme resolve result at 72075186224037888: Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-04-10T11:37:17.771120Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:37:17.771238Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:37:17.771466Z node 3 :TX_DATASHARD DEBUG: TTxStoreTablePath::Execute at 72075186224037888 2025-04-10T11:37:17.773684Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:765:2636], serverId# [3:767:2637], sessionId# [0:0:0] 2025-04-10T11:37:17.787854Z node 3 :TX_DATASHARD DEBUG: TTxStoreTablePath::Complete at 72075186224037888 |97.5%| [TM] {RESULT} ydb/core/tx/datashard/ut_init/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding >> THiveTest::TestCheckSubHiveMigrationManyTablets [GOOD] >> THiveTest::PipeAlivenessOfDeadTablet >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding >> TMemoryController::Counters [GOOD] >> TMemoryController::Counters_HardLimit >> Login::AccountLockoutAndAutomaticallyUnlock [GOOD] >> Login::ResetFailedAttemptCount >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime >> AuthConfigValidation::AcceptValidPasswordComplexity [GOOD] >> AuthConfigValidation::CannotAcceptInvalidPasswordComplexity [GOOD] >> AuthConfigValidation::AcceptValidAccountLockoutConfig [GOOD] >> AuthConfigValidation::CannotAcceptInvalidAccountLockoutConfig [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery >> TGRpcRateLimiterTest::ListResources [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/validation/auth_config_validator_ut/unittest >> AuthConfigValidation::CannotAcceptInvalidAccountLockoutConfig [GOOD] |97.5%| [TS] {RESULT} ydb/core/config/validation/auth_config_validator_ut/unittest >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] >> ColumnShardConfigValidation::AcceptDefaultCompression >> DataCleanup::MultipleDataCleanups [GOOD] >> DataCleanup::MultipleDataCleanupsWithOldGenerations >> ColumnShardConfigValidation::AcceptDefaultCompression [GOOD] >> ColumnShardConfigValidation::NotAcceptDefaultCompression [GOOD] >> ColumnShardConfigValidation::CorrectPlainCompression [GOOD] >> ColumnShardConfigValidation::NotCorrectPlainCompression [GOOD] >> ColumnShardConfigValidation::CorrectLZ4Compression [GOOD] >> ColumnShardConfigValidation::NotCorrectLZ4Compression [GOOD] >> ColumnShardConfigValidation::CorrectZSTDCompression [GOOD] >> ColumnShardConfigValidation::NotCorrectZSTDCompression [GOOD] >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/validation/column_shard_config_validator_ut/unittest >> ColumnShardConfigValidation::NotCorrectZSTDCompression [GOOD] |97.5%| [TS] {RESULT} ydb/core/config/validation/column_shard_config_validator_ut/unittest >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus >> TFlatTest::SplitEmptyToMany [GOOD] >> TFlatTest::SplitEmptyTwice >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/py3test >> test_timeout.py::TestTimeout::test_timeout [GOOD] |97.5%| [TS] {RESULT} ydb/tests/tools/pq_read/test/py3test >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToBuild >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] >> ConfigGRPCService::ReplaceConfig >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery >> AttributesMD5Test::AmazonSampleWithString [GOOD] >> AttributesMD5Test::AmazonSampleWithBinary [GOOD] >> InflyTest::AddMessage [GOOD] >> InflyTest::DeleteMessage [GOOD] >> InflyTest::ChangeMesageVisibility [GOOD] >> InflyTest::ReceiveMessages [GOOD] >> InflyTest::DeleteReceivedMessage [GOOD] >> MessageDelayStatsTest::All [GOOD] >> MessageDelayStatsTest::BigTimeDiff [GOOD] >> MessageDelayStatsTest::MaxMessageDelay [GOOD] >> Metering::BillingRecords >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath [GOOD] >> QuoterWithKesusTest::HandlesNonExistentResource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_build_index/unittest >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction [GOOD] Test command err: 2025-04-10T11:37:04.279242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:04.279506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:37:04.279722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0006c0/r3tmp/tmpd8dNV6/pdisk_1.dat 2025-04-10T11:37:04.789968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:04.863949Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:04.909620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:04.910037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:04.927477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:05.041151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:05.100370Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:37:05.100747Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:37:05.153144Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:37:05.153280Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:37:05.156460Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:37:05.156569Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:37:05.156623Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:37:05.158386Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:37:05.158545Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:37:05.158628Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:37:05.169488Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:37:05.201404Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:37:05.203593Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:37:05.203785Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:37:05.203830Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:37:05.203884Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:37:05.203926Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:05.205417Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:37:05.205546Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:37:05.205674Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:05.205748Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:05.205923Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:37:05.205987Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:05.206138Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:37:05.207733Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:37:05.208197Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:37:05.208320Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:37:05.210343Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:05.221182Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:37:05.221306Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:37:05.371201Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:37:05.375876Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:37:05.375959Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:05.376984Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:05.377053Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:37:05.377137Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-10T11:37:05.377398Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-10T11:37:05.377565Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:37:05.378306Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:05.378390Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:37:05.381251Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:37:05.382742Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:05.384408Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-10T11:37:05.384458Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:05.385076Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-10T11:37:05.385143Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:05.385867Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:05.385924Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:37:05.385969Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:37:05.386033Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:37:05.386108Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-10T11:37:05.386269Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:05.389655Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:05.391719Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-10T11:37:05.391788Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:37:05.392311Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-10T11:37:05.419242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:05.420484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:05.420615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:05.428255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:37:05.433855Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:05.583897Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:05.586859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:37:05.651238Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:06.505726Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfqth47frse2j83gc99h669, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzY5OWZmYTMtNDkwNjc5YmUtZDdmMDRkNjYtNDQyMzY5NTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:06.597912Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-04-10T11:37:06.604145Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:37:06.604400Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 ... 881210Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-10T11:37:21.881241Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:37:21.881281Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037891 for WaitForStreamClearance 2025-04-10T11:37:21.881323Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:37:21.881368Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:37:21.881409Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:21.881500Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:25} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-04-10T11:37:21.881542Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:25} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:21.881574Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T11:37:21.881601Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:37:21.881674Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037890 for WaitForStreamClearance 2025-04-10T11:37:21.882044Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:37:21.882121Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:26} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{15, redo 134b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-10T11:37:21.882271Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:26} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:21.882557Z node 2 :TABLET_EXECUTOR INFO: Leader{72075186224037890:1:27} starting Scan{8 on 1001, TReadTableScan} 2025-04-10T11:37:21.883109Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 1 2025-04-10T11:37:21.883738Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} commited cookie 1 for step 24 2025-04-10T11:37:21.883815Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 63000} 2025-04-10T11:37:21.883899Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:37:21.884058Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} commited cookie 8 for step 25 2025-04-10T11:37:21.884606Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715666, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T11:37:21.884744Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715666, PendingAcks: 0 2025-04-10T11:37:21.884795Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 0 2025-04-10T11:37:21.886572Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-10T11:37:21.886630Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037890 2025-04-10T11:37:21.887103Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-04-10T11:37:21.887156Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:21.887198Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T11:37:21.887234Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:37:21.887283Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037890 for ReadTableScan 2025-04-10T11:37:21.887555Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:21.887646Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{16, redo 336b alter 0b annex 0, ~{ 1, 3, 4, 12, 7, 8 } -{ }, 0 gb} 2025-04-10T11:37:21.887701Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:21.898777Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} commited cookie 1 for step 26 2025-04-10T11:37:21.898882Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:37:21.898947Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:37:21.899021Z node 2 :TX_DATASHARD DEBUG: Complete [63000 : 281474976715666] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1536:3309], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:37:21.899092Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:37:21.899491Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} commited cookie 1 for step 24 2025-04-10T11:37:21.899564Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 63000} 2025-04-10T11:37:21.899630Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-10T11:37:21.899665Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-10T11:37:21.899877Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-04-10T11:37:21.899945Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:21.899999Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-10T11:37:21.900040Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:37:21.900085Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037891 for WaitForStreamClearance 2025-04-10T11:37:21.900396Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:37:21.900486Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:26} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{15, redo 134b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-10T11:37:21.900567Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:26} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:21.900830Z node 2 :TABLET_EXECUTOR INFO: Leader{72075186224037891:1:27} starting Scan{8 on 1001, TReadTableScan} 2025-04-10T11:37:21.901120Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{87, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations 2025-04-10T11:37:21.901197Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{87, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:21.901374Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{87, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} hope 1 -> done Change{81, redo 184b alter 0b annex 0, ~{ 4, 0 } -{ }, 0 gb} 2025-04-10T11:37:21.901441Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{87, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:21.901927Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037891, TxId: 281474976715666, MessageQuota: 1 2025-04-10T11:37:21.902742Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} commited cookie 8 for step 25 2025-04-10T11:37:21.902949Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:88} commited cookie 1 for step 87 2025-04-10T11:37:21.903316Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037891, TxId: 281474976715666, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-10T11:37:21.979031Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037891, TxId: 281474976715666, PendingAcks: 0 2025-04-10T11:37:21.979128Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037891, TxId: 281474976715666, MessageQuota: 0 2025-04-10T11:37:21.981169Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037891 2025-04-10T11:37:21.981230Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037891 2025-04-10T11:37:21.981468Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-04-10T11:37:21.981525Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:37:21.981574Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-10T11:37:21.981614Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:37:21.981656Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037891 for ReadTableScan 2025-04-10T11:37:21.981925Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:21.982024Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{16, redo 336b alter 0b annex 0, ~{ 1, 3, 4, 12, 7, 8 } -{ }, 0 gb} 2025-04-10T11:37:21.982084Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:37:21.992995Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} commited cookie 1 for step 26 2025-04-10T11:37:21.993105Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-10T11:37:21.993155Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-10T11:37:21.993225Z node 2 :TX_DATASHARD DEBUG: Complete [63000 : 281474976715666] from 72075186224037891 at tablet 72075186224037891 send result to client [2:1536:3309], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:37:21.993278Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 |97.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_build_index/unittest >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] >> Login::ResetFailedAttemptCount [GOOD] >> Login::ResetFailedAttemptCountWithAlterUserLogin >> TSentinelTests::PDiskFaultyState [GOOD] >> TSentinelTests::PDiskRackGuardHalfRack >> Login::ResetFailedAttemptCountWithAlterUserLogin [GOOD] >> Login::CreateAlterUserWithHash >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs >> Login::CreateAlterUserWithHash [GOOD] >> Login::CheckThatCacheDoesNotHoldOldPassword >> Login::CheckThatCacheDoesNotHoldOldPassword [GOOD] >> TMemoryController::Counters_HardLimit [GOOD] >> TMemoryController::Counters_NoHardLimit >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] |97.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/login/ut/unittest >> Login::CheckThatCacheDoesNotHoldOldPassword [GOOD] |97.6%| [TS] {RESULT} ydb/library/login/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] Test command err: 2025-04-10T11:37:04.730177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:04.730440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:37:04.730633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a73/r3tmp/tmptVYotV/pdisk_1.dat 2025-04-10T11:37:05.232724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:05.279254Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:05.326563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:05.331176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:05.343864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:05.442004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:05.889322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:770:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:05.889504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:05.889606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:05.898502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:37:06.066594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:784:2650], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:37:06.141320Z node 1 :TX_PROXY ERROR: Actor# [1:858:2693] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:07.179615Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfqthjvf7ergkxe9gex7c5s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzY5ZDVhNTYtNTVkMDliMzItM2IwOWFhNWYtNzY0NGJmYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:07.375477Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqtjy82esvwd314y0zyem9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDY3ZDdlZi00ZjQ0MjAxNy1iMDRkZGRjOS1mZjM5NzM5Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:07.516511Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfqtk2t76xx9vfcvz4fjwpk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWI1MTFlNDUtOWNhYTlmMDktZTRmZjg1OTUtODcwZTkyYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:07.729843Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfqtk78dqn6h9ztchgqjg2c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNlYTQ2ZjctYWIxMWIzY2YtOTNlOThhNjItOWMzYWY1ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-04-10T11:37:10.803802Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:10.803974Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:37:10.804145Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a73/r3tmp/tmpCkAv5i/pdisk_1.dat 2025-04-10T11:37:11.069683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:11.093387Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:11.129134Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:11.129298Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:11.141127Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:11.223001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:11.550783Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:821:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:11.550889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:11.550961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:11.554925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:37:11.712518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:835:2689], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:37:11.747387Z node 2 :TX_PROXY ERROR: Actor# [2:913:2736] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:12.288178Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfqtq3x6cgexxpg8fw2wx11, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTVjMDAzZjktMjE1ZDFhZmYtNzhhZGIzNDgtMjYwN2E1MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:12.315992Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqtq3x6cgexxpg8fw2wx11, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTVjMDAzZjktMjE1ZDFhZmYtNzhhZGIzNDgtMjYwN2E1MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:12.331913Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfqtq3x6cgexxpg8fw2wx11, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTVjMDAzZjktMjE1ZDFhZmYtNzhhZGIzNDgtMjYwN2E1MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:12.697502Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfqtqxh2q4cmnwy18t24zyd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWQ0OWNhMGMtMjUyYWJhODktNWQ1NWVlN2MtNzk0MjhlYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:12.740970Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfqtqxh2q4cmnwy18t24zyd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWQ0OWNhMGMtMjUyYWJhODktNWQ1NWVlN2MtNzk0MjhlYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:12.749014Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfqtqxh2q4cmnwy18t24zyd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWQ0OWNhMGMtMjUyYWJhODktNWQ1NWVlN2MtNzk0MjhlYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:12.752899Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfqtqxh2q4cmnwy18t24zyd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWQ0OWNhMGMtMjUyYWJhODktNWQ1NWVlN2MtNzk0MjhlYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:13.209056Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfqtra8c60tndn2e0mrw7vh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDYxMGIxNTQtY2JhMTc4ZmItNzMwMjUwNGMtOTBlOGYyOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:13.247974Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jrfqtra8c60tndn2e0mrw7vh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDYxMGIxNTQtY2JhMTc4ZmItNzMwMjUwNGMtOTBlOGYyOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:13.254031Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrfqtra8c60tndn2e0mrw7vh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDYxMGIxNTQtY2JhMTc4ZmItNzMwMjUwNGMtOTBlOGYyOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Ro ... w5gc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Njk2ODUxZjktZTliM2UzMjUtYzQ0YjUwMy04Yjg5MDk2Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-04-10T11:37:13.541902Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jrfqtrxffqtgrvgdsv609dcp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2M5MjJlMGItYTc1YjUyMTMtYjY4YjhjOTQtZmU3ZDQ2Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-04-10T11:37:16.401198Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:16.401524Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:37:16.401603Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a73/r3tmp/tmpITR51T/pdisk_1.dat 2025-04-10T11:37:16.687756Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:16.718550Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:16.757093Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:16.757244Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:16.769097Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:16.854553Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:17.174459Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:770:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:17.174572Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:780:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:17.174653Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:17.180509Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:37:17.341051Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:784:2650], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:37:17.376075Z node 3 :TX_PROXY ERROR: Actor# [3:858:2693] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:17.474689Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfqtwkm8hr4zqhtekdm8w4j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmEzZWRmNTItOWJjMThkMWMtMjM0N2VhNDYtZDk2MDYyOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:17.676218Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqtwy67ehpbmjycgj5h5cq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWM3MDA1ZDAtNzU2MDJlZGQtZGU3NzljYTEtZTRiMGNiNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } } 2025-04-10T11:37:17.836060Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfqtx3n110vp3h2ndm5datt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWZjYTA3ZjAtOWE1ODE0NDAtMmRmYjhlOTgtYjg0MzIxODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:18.003924Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfqtx9d0hvfww9jr60y5m18, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODJmODE5ODktNDM3MTZmMjgtYWUwM2NhOGMtMjc5M2EzMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } } 2025-04-10T11:37:18.201597Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfqtxdxec070gcw70z7js1a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTU4MmUxMzAtYzNjN2U1OTMtNzZjYWEzMWMtZTVjOTg4Yzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:18.370633Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfqtxmx6dhtdh5vxrp2bvge, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDM4NGE0MWYtMjVkOThmNmItZDNmNDk1NWEtNzQ1YjQzNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } }, { items { int64_value: 3 } items { uint32_value: 303 } } 2025-04-10T11:37:22.470284Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:22.470506Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:37:22.470724Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a73/r3tmp/tmpk0TeB0/pdisk_1.dat 2025-04-10T11:37:22.821011Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:22.854419Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:22.892617Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:22.892789Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:22.904647Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:22.988734Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:23.294908Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:770:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:23.295033Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:780:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:23.295111Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:23.300634Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:37:23.459595Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:784:2650], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:37:23.494409Z node 4 :TX_PROXY ERROR: Actor# [4:858:2693] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:23.569079Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:868:2702], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017 2025-04-10T11:37:23.571126Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=M2VjMDY2YTYtOWQxYjNhNmYtZTIxYzY2MS1iNjI1YWY0Yw==, ActorId: [4:767:2639], ActorState: ExecuteState, TraceId: 01jrfqv2jw537b927ywjespkpv, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-10T11:37:23.610701Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:890:2718], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017 2025-04-10T11:37:23.612741Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mjg0NWEzY2YtZTgwZjYyYTYtNDA4MTVlOWYtYjk3NDI5ZjE=, ActorId: [4:882:2710], ActorState: ExecuteState, TraceId: 01jrfqv2vqdfkas8b6n0cpfmfe, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: |97.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_sequence/unittest >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection >> GenericProviderLookupActor::Lookup >> GenericProviderLookupActor::Lookup [GOOD] >> GenericProviderLookupActor::LookupWithErrors >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> GenericProviderLookupActor::LookupWithErrors [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi >> SystemView::QueryStats [GOOD] >> SystemView::QueryStatsFields >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_background_compaction/unittest >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] Test command err: 2025-04-10T11:37:04.310530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:04.310818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:37:04.311055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000999/r3tmp/tmpzQub2h/pdisk_1.dat 2025-04-10T11:37:04.857775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:04.918966Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:04.967789Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:37:04.969874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:04.974036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:04.974249Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:37:04.986743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:05.071495Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T11:37:05.071582Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T11:37:05.072838Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T11:37:05.219451Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T11:37:05.219588Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:37:05.220336Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:37:05.220476Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:37:05.220889Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:37:05.221187Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:37:05.221324Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T11:37:05.226109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:05.227594Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T11:37:05.228384Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T11:37:05.228486Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T11:37:05.282941Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:37:05.284264Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:37:05.284740Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:37:05.285122Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:37:05.337985Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:37:05.338924Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:37:05.339063Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:37:05.341154Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:37:05.341249Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:37:05.341363Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:37:05.343603Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:37:05.343768Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:37:05.343867Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:37:05.354759Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:37:05.404257Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:37:05.405447Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:37:05.405627Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:37:05.405689Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:37:05.405732Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:37:05.405771Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:05.406050Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:37:05.406113Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:37:05.407424Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:37:05.407545Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:37:05.407616Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:05.407657Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:05.407775Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:37:05.407817Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:37:05.407856Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:37:05.407908Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:37:05.407963Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:05.408091Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:37:05.408131Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:37:05.408176Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:37:05.409568Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:37:05.409653Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:37:05.409788Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:37:05.410242Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:37:05.410312Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:37:05.410435Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:37:05.410517Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:37:05.410573Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:37:05.410611Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:37:05.410648Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:37:05.411058Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:37:05.411107Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:37:05.411146Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:37:05.411181Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:37:05.411255Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:37:05.411333Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:37:05.411420Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:37:05.411458Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:37:05.411493Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:37:05.413146Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:37:05.413202Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:05.424142Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... :24.511544Z node 5 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:37:24.511583Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:37:24.511626Z node 5 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T11:37:24.511704Z node 5 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-04-10T11:37:24.511749Z node 5 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:37:24.511783Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:37:24.511808Z node 5 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteWrite 2025-04-10T11:37:24.511833Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteWrite 2025-04-10T11:37:24.511865Z node 5 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:37:24.511922Z node 5 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-10T11:37:24.512075Z node 5 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 2025-04-10T11:37:24.512126Z node 5 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-10T11:37:24.512202Z node 5 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:37:24.512234Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-04-10T11:37:24.512281Z node 5 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-04-10T11:37:24.512324Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:37:24.512406Z node 5 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:37:24.512442Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-04-10T11:37:24.512486Z node 5 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:37:24.512524Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:37:24.512576Z node 5 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:37:24.512599Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:37:24.512632Z node 5 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-04-10T11:37:24.523587Z node 5 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-10T11:37:24.523699Z node 5 :TX_DATASHARD TRACE: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:37:24.523780Z node 5 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-04-10T11:37:24.523904Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:24.526467Z node 5 :TX_PROXY DEBUG: actor# [5:59:2106] Handle TEvNavigate describe path /Root/table-1 2025-04-10T11:37:24.526617Z node 5 :TX_PROXY DEBUG: Actor# [5:860:2690] HANDLE EvNavigateScheme /Root/table-1 2025-04-10T11:37:24.527173Z node 5 :TX_PROXY DEBUG: Actor# [5:860:2690] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T11:37:24.527312Z node 5 :TX_PROXY DEBUG: Actor# [5:860:2690] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table-1" Options { ShowPrivateTable: true } 2025-04-10T11:37:24.528532Z node 5 :TX_PROXY DEBUG: Actor# [5:860:2690] Handle TEvDescribeSchemeResult Forward to# [5:594:2519] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-04-10T11:37:24.529688Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:864:2694], Recipient [5:667:2571]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:37:24.529767Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:37:24.529824Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:863:2693], serverId# [5:864:2694], sessionId# [0:0:0] 2025-04-10T11:37:24.529976Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [5:862:2692], Recipient [5:667:2571]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-10T11:37:24.530990Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:867:2697], Recipient [5:667:2571]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:37:24.531072Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:37:24.531129Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:866:2696], serverId# [5:867:2697], sessionId# [0:0:0] 2025-04-10T11:37:24.531313Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [5:865:2695], Recipient [5:667:2571]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-04-10T11:37:24.531481Z node 5 :TX_DATASHARD INFO: Started background compaction# 1 of 72075186224037888 tableId# 2 localTid# 1001, requested from [5:865:2695], partsCount# 0, memtableSize# 728, memtableWaste# 3880, memtableRows# 3 2025-04-10T11:37:24.535947Z node 5 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:01.543308Z 2025-04-10T11:37:24.536045Z node 5 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2025-04-10T11:37:24.536109Z node 5 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [5:865:2695]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:37:24.536954Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [5:658:2565], Recipient [5:667:2571]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-10T11:37:24.537486Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:874:2703], Recipient [5:667:2571]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:37:24.537549Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:37:24.537608Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:873:2702], serverId# [5:874:2703], sessionId# [0:0:0] 2025-04-10T11:37:24.537822Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [5:872:2701], Recipient [5:667:2571]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-04-10T11:37:24.537949Z node 5 :TX_DATASHARD DEBUG: Background compaction of tablet# 72075186224037888 of path# [OwnerId: 72057594046644480, LocalPathId: 2], requested from# [5:872:2701] is not needed |97.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_background_compaction/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/actors/ut/unittest >> GenericProviderLookupActor::LookupWithErrors [GOOD] Test command err: 2025-04-10 11:37:25.929 INFO ydb-library-yql-providers-generic-actors-ut(pid=1815538, tid=0x00007F6902CDBB40) [generic] yql_generic_lookup_actor.cpp:151: New generic proivider lookup source actor(ActorId=[1:4:2051]) for kind=YDB, endpoint=host: "some_host" port: 2135, database=some_db, use_tls=1, protocol=NATIVE, table=lookup_test 2025-04-10 11:37:25.953 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1815538, tid=0x00007F6902CDBB40) [generic] yql_generic_lookup_actor.cpp:288: ActorId=[1:4:2051] Got LookupRequest for 3 keys Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } i ... left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 ListSplits result. GRpcStatusCode: 0 2025-04-10 11:37:26.074 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1815538, tid=0x00007F68FF684640) [generic] yql_generic_lookup_actor.cpp:319: ActorId=[2:7491647229255847637:2051] Got TListSplitsStreamIterator 2025-04-10 11:37:26.074 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1815538, tid=0x00007F68FF684640) [generic] yql_generic_lookup_actor.cpp:196: ActorId=[2:7491647229255847637:2051] Got TListSplitsResponse from Connector Call ReadSplits. data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY CRAB Expected: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY CRAB Actual: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY ReadSplits result. GRpcStatusCode: 0 2025-04-10 11:37:26.075 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1815538, tid=0x00007F68FF684640) [generic] yql_generic_lookup_actor.cpp:229: ActorId=[2:7491647229255847637:2051] Got ReadSplitsStreamIterator from Connector 2025-04-10 11:37:26.075 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1815538, tid=0x00007F68FF684640) [generic] yql_generic_lookup_actor.cpp:341: ActorId=[2:7491647229255847637:2051] Got DataChunk 2025-04-10 11:37:26.076 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1815538, tid=0x00007F68FF684640) [generic] yql_generic_lookup_actor.cpp:352: ActorId=[2:7491647229255847637:2051] Got EOF 2025-04-10 11:37:26.076 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1815538, tid=0x00007F68FF684640) [generic] yql_generic_lookup_actor.cpp:402: Sending lookup results for 3 keys |97.6%| [TS] {RESULT} ydb/library/yql/providers/generic/actors/ut/unittest >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections >> TTxDataShardReshuffleKMeansScan::MainToBuild [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToPosting >> DataCleanup::MultipleDataCleanupsWithOldGenerations [GOOD] >> DataCleanup::ForceDataCleanupWithRestart >> ConfigGRPCService::ReplaceConfig [GOOD] >> ConfigGRPCService::FetchConfig >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection >> TIndexProcesorTests::TestCreateIndexProcessor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:36:46.845191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:46.845279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:46.845318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:46.845357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:46.845412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:46.845440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:46.845514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:46.845586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:46.845925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:46.933781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:36:46.933852Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:36:46.952555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:46.952830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:46.953040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:46.968294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:46.968627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:46.969272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:46.970265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:46.974437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:46.975816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:46.975876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:46.976039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:46.976081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:46.976126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:46.976275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:36:46.984651Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:36:47.097797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:47.098046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.098317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:47.098582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:47.098644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.103031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:47.103187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:47.103381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.103434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:47.103489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:47.103523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:47.106915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.106975Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:47.107010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:47.115085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.115158Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.115198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:47.115277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:47.119054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:47.122793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:47.122969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:47.123945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:47.124073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:47.124115Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:47.124378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:47.124444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:47.124611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:47.124678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:47.127093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:47.127138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:47.127303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:47.127344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:47.127610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.127668Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:47.127762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:47.127795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:47.127838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:47.127873Z no ... CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:37:26.126639Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:37:26.126845Z node 22 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 241us result status StatusSuccess 2025-04-10T11:37:26.127476Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TFlatTest::SplitEmptyTwice [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection |97.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceshard/public/ut/unittest |97.6%| [TS] {RESULT} ydb/core/tx/sequenceshard/public/ut/unittest >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount >> SystemView::PartitionStatsTtlFields [GOOD] >> SystemView::PartitionStatsLocksFields >> Graph::CreateGraphShard >> Mvp::OpenIdConnectRequestWithIamTokenYandex >> Mvp::OpenIdConnectRequestWithIamTokenYandex [GOOD] >> Mvp::OpenIdConnectRequestWithIamTokenNebius [GOOD] >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodYandex [GOOD] >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodNebius [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieYandex [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieNebius >> QuoterWithKesusTest::HandlesNonExistentResource [GOOD] >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource >> Mvp::OpenIdConnectSessionServiceCheckValidCookieNebius [GOOD] >> DiscoveryIsNotBroken::NoKafkaEndpointInDiscovery [GOOD] >> DiscoveryIsNotBroken::NoKafkaSslEndpointInDiscovery >> Mvp::OpenIdConnectProxyOnHttpsHost ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] Test command err: 2025-04-10T11:32:34.813562Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:32:34.826043Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:32:34.826314Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:32:34.827421Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:45:2072] ControllerId# 72057594037932033 2025-04-10T11:32:34.827485Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:32:34.827677Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:32:34.827977Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:32:34.830822Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:32:34.830915Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:32:34.833215Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:51:2076] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.833381Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:52:2077] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.833514Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:53:2078] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.833701Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:54:2079] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.833835Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:55:2080] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.833964Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:56:2081] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.834124Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:57:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.834173Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:32:34.834460Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:45:2072] 2025-04-10T11:32:34.834499Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:45:2072] 2025-04-10T11:32:34.834560Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:32:34.834633Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:32:34.835197Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:32:34.837750Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:32:34.837923Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-10T11:32:34.838477Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-10T11:32:34.839405Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-10T11:32:34.839444Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:32:34.840030Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:68:2076] ControllerId# 72057594037932033 2025-04-10T11:32:34.840050Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:32:34.840108Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:32:34.840248Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:32:34.850609Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:32:34.850667Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:32:34.851768Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:76:2081] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.851907Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:77:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.851988Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:78:2083] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.852072Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:79:2084] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.852160Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:80:2085] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.852263Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:81:2086] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.852359Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:82:2087] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:34.852381Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:32:34.852430Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:68:2076] 2025-04-10T11:32:34.852448Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:68:2076] 2025-04-10T11:32:34.852476Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:32:34.852499Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:32:34.853142Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:32:34.853270Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:45:2072] 2025-04-10T11:32:34.853308Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:34.853330Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:32:34.853370Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:32:34.853526Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:32:34.866027Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:68:2076] 2025-04-10T11:32:34.866096Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:34.866176Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:32:34.867973Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:32:34.868210Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:68:2076] 2025-04-10T11:32:34.868282Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:34.868318Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-10T11:32:34.872110Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-10T11:32:34.872960Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-10T11:32:34.882644Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:34.882800Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-10T11:32:34.882941Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-10T11:32:34.883229Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-10T11:32:34.883456Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:32:34.883787Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:32:34.885629Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:34.886080Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:32:34.886803Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:34.886877Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:94:2093] 2025-04-10T11:32:34.886913Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:94:2093] 2025-04-10T11:32:34.886978Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [2:49:2064] 2025-04-10T11:32:34.886999Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [2:49:2064] 2025-04-10T11:32:34.887030Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-10T11:32:34.887101Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-10T11:32:34.887166Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-10T11:32:34.887292Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-10T11:32:34.887357Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-10T11:32:34.887473Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-1 ... T11:37:20.453140Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-10T11:37:20.453162Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-04-10T11:37:20.453198Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [14:409:2367] 2025-04-10T11:37:20.453225Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] schedule retry [14:409:2367] 2025-04-10T11:37:20.464200Z node 14 :BS_PROXY_PUT INFO: [848c0e06882585e9] bootstrap ActorId# [14:413:2369] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:199:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:37:20.464371Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] Id# [72057594037927937:2:9:0:0:199:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:37:20.464452Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] restore Id# [72057594037927937:2:9:0:0:199:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:37:20.464526Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:199:1] Marker# BPG33 2025-04-10T11:37:20.464588Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:199:1] Marker# BPG32 2025-04-10T11:37:20.464812Z node 14 :BS_PROXY DEBUG: Send to queueActorId# [14:39:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:199:1] FDS# 199 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:37:20.466362Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:199:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 24 } Cost# 81566 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 25 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-10T11:37:20.466515Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-10T11:37:20.466602Z node 14 :BS_PROXY_PUT INFO: [848c0e06882585e9] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:37:20.466772Z node 14 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.858 sample PartId# [72057594037927937:2:9:0:0:199:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 14 } TEvVPutResult{ TimestampMs# 2.43 VDiskId# [0:1:0:0:0] NodeId# 14 Status# OK } ] } 2025-04-10T11:37:20.466956Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-10T11:37:20.467106Z node 14 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 2025-04-10T11:37:20.477545Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] client retry [14:143:2159] 2025-04-10T11:37:20.477629Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] lookup [14:143:2159] 2025-04-10T11:37:20.477751Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936131 entry.State: StInit ev: {EvForward TabletID: 72057594037936131 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:37:20.477886Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:37:20.477989Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-04-10T11:37:20.478035Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-04-10T11:37:20.478067Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-04-10T11:37:20.478102Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131} 2025-04-10T11:37:20.478165Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131} 2025-04-10T11:37:20.478205Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131} 2025-04-10T11:37:20.478282Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936131 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-10T11:37:20.478316Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936131 followers: 0 2025-04-10T11:37:20.478388Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] forward result error, check reconnect [14:143:2159] 2025-04-10T11:37:20.478421Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] schedule retry [14:143:2159] 2025-04-10T11:37:20.499233Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] client retry [14:409:2367] 2025-04-10T11:37:20.499314Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [14:409:2367] 2025-04-10T11:37:20.499411Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:37:20.499558Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:37:20.499672Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-10T11:37:20.499720Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-10T11:37:20.499746Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-10T11:37:20.499774Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-10T11:37:20.499807Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-10T11:37:20.499832Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-10T11:37:20.499911Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-10T11:37:20.499947Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-04-10T11:37:20.499993Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [14:409:2367] 2025-04-10T11:37:20.500016Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] schedule retry [14:409:2367] 2025-04-10T11:37:20.520777Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] client retry [14:409:2367] 2025-04-10T11:37:20.520850Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [14:409:2367] 2025-04-10T11:37:20.520953Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:37:20.521090Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:37:20.521200Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-10T11:37:20.521276Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-10T11:37:20.521320Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-10T11:37:20.521355Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-10T11:37:20.521402Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-10T11:37:20.521434Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-10T11:37:20.521516Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-10T11:37:20.521553Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-04-10T11:37:20.521613Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [14:409:2367] 2025-04-10T11:37:20.521658Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed, check aliveness [14:409:2367] 2025-04-10T11:37:20.563515Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [14:417:2370] 2025-04-10T11:37:20.563587Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [14:417:2370] 2025-04-10T11:37:20.563664Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:37:20.563733Z node 14 :TABLET_RESOLVER DEBUG: SelectForward node 14 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [14:271:2262] 2025-04-10T11:37:20.563821Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [14:417:2370] 2025-04-10T11:37:20.563880Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [14:417:2370] 2025-04-10T11:37:20.563947Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [14:417:2370] 2025-04-10T11:37:20.564073Z node 14 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [14:417:2370] 2025-04-10T11:37:20.564226Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [14:417:2370] 2025-04-10T11:37:20.564294Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [14:417:2370] 2025-04-10T11:37:20.564347Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [14:417:2370] 2025-04-10T11:37:20.564425Z node 14 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [14:409:2367] EventType# 268697616 2025-04-10T11:37:20.564582Z node 14 :HIVE WARN: HIVE#72057594037927937 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-04-10T11:37:20.564702Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received poison pill [14:417:2370] 2025-04-10T11:37:20.564772Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [14:417:2370] 2025-04-10T11:37:20.564834Z node 14 :PIPE_SERVER DEBUG: [72057594037927937] Got PeerClosed from# [14:417:2370] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] >> Mvp::OpenIdConnectProxyOnHttpsHost [GOOD] >> Mvp::OpenIdConnectFixLocationHeader >> Mvp::OpenIdConnectFixLocationHeader [GOOD] >> Mvp::OpenIdConnectExchangeNebius >> Metering::BillingRecords [GOOD] >> Metering::MockedNetClassifierOnly >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] >> Mvp::OpenIdConnectExchangeNebius [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckAuthorizationFail >> Mvp::OpenIdConnectSessionServiceCheckAuthorizationFail [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlow >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection >> Mvp::OpenIdConnectFullAuthorizationFlow [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlowAjax >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] >> Mvp::OpenIdConnectFullAuthorizationFlowAjax [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlow [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlowAjax >> GraphShard::NormalizeAndDownsample1 [GOOD] >> GraphShard::NormalizeAndDownsample2 [GOOD] >> GraphShard::NormalizeAndDownsample3 [GOOD] >> GraphShard::NormalizeAndDownsample4 [GOOD] >> GraphShard::NormalizeAndDownsample5 [GOOD] >> GraphShard::NormalizeAndDownsample6 [GOOD] >> GraphShard::CheckHistogramToPercentileConversions [GOOD] >> GraphShard::CreateGraphShard >> TVDiskDefrag::HugeHeapDefragmentationRequired [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlowAjax [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAuthorizationFail [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalid >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalid [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalidAjax [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateOpenIdScopeMissed >> TIndexProcesorTests::TestCreateIndexProcessor [GOOD] >> TIndexProcesorTests::TestSingleCreateQueueEvent >> Mvp::OpenIdConnectSessionServiceCreateOpenIdScopeMissed [GOOD] >> Mvp::OpenIdConnectAllowedHostsList [GOOD] >> Mvp::OpenIdConnectHandleNullResponseFromProtectedResource [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateNotFoundCookie >> TMemoryController::Counters_NoHardLimit [GOOD] >> TMemoryController::Config_ConsumerLimits |97.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/defrag/ut/unittest >> TVDiskDefrag::HugeHeapDefragmentationRequired [GOOD] |97.6%| [TS] {RESULT} ydb/core/blobstorage/vdisk/defrag/ut/unittest >> Mvp::OpenIdConnectSessionServiceCreateNotFoundCookie [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateGetWrongStateAndWrongCookie [GOOD] >> Mvp::OidcImpersonationStartFlow >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount >> Mvp::OidcImpersonationStartFlow [GOOD] >> Mvp::OidcImpersonationStartNeedServiceAccountId [GOOD] >> Mvp::OidcImpersonationStopFlow >> Mvp::OidcImpersonationStopFlow [GOOD] >> Mvp::OidcImpersonatedAccessToProtectedResource [GOOD] >> Mvp::OidcImpersonatedAccessNotAuthorized >> Graph::CreateGraphShard [GOOD] >> Graph::UseGraphShard >> Mvp::OidcImpersonatedAccessNotAuthorized [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding ------- [TS] {asan, default-linux-x86_64, release} ydb/mvp/oidc_proxy/ut/unittest >> Mvp::OidcImpersonatedAccessNotAuthorized [GOOD] Test command err: 2025-04-10T11:37:29.014684Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.016067Z :MVP DEBUG: Incoming response for protected resource: 200 2025-04-10T11:37:29.026024Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.026570Z :MVP DEBUG: Incoming response for protected resource: 200 2025-04-10T11:37:29.034795Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.035366Z :MVP DEBUG: Incoming response for protected resource: 204 2025-04-10T11:37:29.043248Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.043487Z :MVP DEBUG: Incoming response for protected resource: 204 2025-04-10T11:37:29.052151Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.052594Z :MVP DEBUG: Incoming response for protected resource: 204 2025-04-10T11:37:29.062122Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.062536Z :MVP DEBUG: Incoming response for protected resource: 204 2025-04-10T11:37:29.213017Z :MVP DEBUG: SessionService.Check(): OK 2025-04-10T11:37:29.213112Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.213502Z :MVP DEBUG: Incoming response for protected resource: 400 2025-04-10T11:37:29.213547Z :MVP DEBUG: Try to send request to HTTPS port 2025-04-10T11:37:29.213587Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.213769Z :MVP DEBUG: Incoming response for protected resource: 200 2025-04-10T11:37:29.221715Z :MVP DEBUG: SessionService.Check(): OK 2025-04-10T11:37:29.221811Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.222071Z :MVP DEBUG: Incoming response for protected resource: 400 2025-04-10T11:37:29.327503Z :MVP DEBUG: SessionService.Check(): OK 2025-04-10T11:37:29.327615Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.327997Z :MVP DEBUG: Incoming response for protected resource: 307 2025-04-10T11:37:29.339958Z :MVP DEBUG: SessionService.Check(): OK 2025-04-10T11:37:29.340071Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.340376Z :MVP DEBUG: Incoming response for protected resource: 302 2025-04-10T11:37:29.347722Z :MVP DEBUG: SessionService.Check(): OK 2025-04-10T11:37:29.347779Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.347994Z :MVP DEBUG: Incoming response for protected resource: 302 2025-04-10T11:37:29.354067Z :MVP DEBUG: SessionService.Check(): OK 2025-04-10T11:37:29.354135Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.354406Z :MVP DEBUG: Incoming response for protected resource: 302 2025-04-10T11:37:29.362281Z :MVP DEBUG: SessionService.Check(): OK 2025-04-10T11:37:29.362380Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.362692Z :MVP DEBUG: Incoming response for protected resource: 302 2025-04-10T11:37:29.531633Z :MVP DEBUG: Start OIDC process 2025-04-10T11:37:29.532207Z :MVP DEBUG: Using cookie (__Host_session_cookie_79632E6F617574682E7964622D766965776572: c2Vz****aWU= (CE0CB168)) 2025-04-10T11:37:29.532283Z :MVP DEBUG: Exchange session token 2025-04-10T11:37:29.538256Z :MVP DEBUG: Getting access token: 200 OK 2025-04-10T11:37:29.538390Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.538604Z :MVP DEBUG: Incoming response for protected resource: 200 2025-04-10T11:37:29.666664Z :MVP DEBUG: SessionService.Check(): 401 2025-04-10T11:37:29.790294Z :MVP DEBUG: SessionService.Check(): 400 2025-04-10T11:37:29.792270Z :MVP DEBUG: Restore oidc session 2025-04-10T11:37:29.793578Z :MVP DEBUG: Incoming response from authorization server: 200 2025-04-10T11:37:29.801870Z :MVP DEBUG: SessionService.Create(): OK 2025-04-10T11:37:29.809744Z :MVP DEBUG: SessionService.Check(): OK 2025-04-10T11:37:29.809849Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.810113Z :MVP DEBUG: Incoming response for protected resource: 200 2025-04-10T11:37:29.879872Z :MVP DEBUG: SessionService.Check(): 400 2025-04-10T11:37:29.880781Z :MVP DEBUG: Restore oidc session 2025-04-10T11:37:29.881297Z :MVP DEBUG: Incoming response from authorization server: 200 2025-04-10T11:37:29.892949Z :MVP DEBUG: SessionService.Create(): OK 2025-04-10T11:37:29.900751Z :MVP DEBUG: SessionService.Check(): OK 2025-04-10T11:37:29.900814Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:29.900998Z :MVP DEBUG: Incoming response for protected resource: 200 2025-04-10T11:37:29.938488Z :MVP DEBUG: Restore oidc session 2025-04-10T11:37:29.938765Z :MVP DEBUG: Check state failed: Calculated digest is not equal expected digest 2025-04-10T11:37:29.969364Z :MVP DEBUG: Restore oidc session 2025-04-10T11:37:29.969621Z :MVP DEBUG: Check state failed: Calculated digest is not equal expected digest 2025-04-10T11:37:30.011237Z :MVP DEBUG: Restore oidc session 2025-04-10T11:37:30.011794Z :MVP DEBUG: Incoming response from authorization server: 200 2025-04-10T11:37:30.022500Z :MVP DEBUG: SessionService.Create(): 401 2025-04-10T11:37:30.068398Z :MVP DEBUG: Restore oidc session 2025-04-10T11:37:30.069180Z :MVP DEBUG: Incoming response from authorization server: 200 2025-04-10T11:37:30.081001Z :MVP DEBUG: SessionService.Create(): 400 2025-04-10T11:37:30.118880Z :MVP DEBUG: Restore oidc session 2025-04-10T11:37:30.119790Z :MVP DEBUG: Incoming response from authorization server: 200 2025-04-10T11:37:30.130984Z :MVP DEBUG: SessionService.Create(): 400 2025-04-10T11:37:30.180226Z :MVP DEBUG: Restore oidc session 2025-04-10T11:37:30.180822Z :MVP DEBUG: Incoming response from authorization server: 200 2025-04-10T11:37:30.194536Z :MVP DEBUG: SessionService.Create(): 412 2025-04-10T11:37:30.246117Z :MVP DEBUG: SessionService.Check(): 400 2025-04-10T11:37:30.256971Z :MVP DEBUG: SessionService.Check(): 400 2025-04-10T11:37:30.265011Z :MVP DEBUG: SessionService.Check(): 400 2025-04-10T11:37:30.287658Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:30.288162Z :MVP DEBUG: Can not process request to protected resource: GET /counters HTTP/1.1 Host: ydb.viewer.page Accept: */* Accept-Encoding: deflate Authorization: 2025-04-10T11:37:30.308065Z :MVP DEBUG: Restore oidc session 2025-04-10T11:37:30.308414Z :MVP DEBUG: Restore oidc context failed: Cannot find cookie ydb_oidc_cookie 2025-04-10T11:37:30.342999Z :MVP DEBUG: Restore oidc session 2025-04-10T11:37:30.343313Z :MVP DEBUG: Check state failed: Calculated digest is not equal expected digest 2025-04-10T11:37:30.385932Z :MVP DEBUG: Start impersonation process 2025-04-10T11:37:30.386045Z :MVP DEBUG: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-04-10T11:37:30.386094Z :MVP DEBUG: Request impersonated token 2025-04-10T11:37:30.386492Z :MVP DEBUG: Incoming response from authorization server: 200 2025-04-10T11:37:30.386595Z :MVP DEBUG: Set impersonated cookie: (__Host_impersonated_cookie_636C69656E745F6964: aW1w****bg== (B126DD61)) 2025-04-10T11:37:30.441410Z :MVP DEBUG: Start impersonation process 2025-04-10T11:37:30.441577Z :MVP DEBUG: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-04-10T11:37:30.513228Z :MVP DEBUG: Clear cookie: (__Host_impersonated_cookie_636C69656E745F6964) 2025-04-10T11:37:30.548843Z :MVP DEBUG: Start OIDC process 2025-04-10T11:37:30.548970Z :MVP DEBUG: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-04-10T11:37:30.549043Z :MVP DEBUG: Using cookie (__Host_impersonated_cookie_636C69656E745F6964: aW1w****ZQ== (1A20D8C0)) 2025-04-10T11:37:30.549083Z :MVP DEBUG: Exchange impersonated token 2025-04-10T11:37:30.549600Z :MVP DEBUG: Getting access token: 200 OK 2025-04-10T11:37:30.549673Z :MVP DEBUG: Forward user request bypass OIDC 2025-04-10T11:37:30.549828Z :MVP DEBUG: Incoming response for protected resource: 200 2025-04-10T11:37:30.594117Z :MVP DEBUG: Start OIDC process 2025-04-10T11:37:30.594232Z :MVP DEBUG: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-04-10T11:37:30.594278Z :MVP DEBUG: Using cookie (__Host_impersonated_cookie_636C69656E745F6964: aW1w****ZQ== (1A20D8C0)) 2025-04-10T11:37:30.594305Z :MVP DEBUG: Exchange impersonated token 2025-04-10T11:37:30.594543Z :MVP DEBUG: Getting access token: 401 OK 2025-04-10T11:37:30.594566Z :MVP DEBUG: Getting access token: {"error": "bad_token"} 2025-04-10T11:37:30.594598Z :MVP DEBUG: Clear impersonated cookie (__Host_impersonated_cookie_636C69656E745F6964) and retry |97.6%| [TS] {RESULT} ydb/mvp/oidc_proxy/ut/unittest >> ConfigGRPCService::FetchConfig [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings >> TTxDataShardReshuffleKMeansScan::BuildToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuild >> GraphShard::CreateGraphShard [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/shard/ut/unittest >> GraphShard::CreateGraphShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:37:30.909337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:37:30.909575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:37:30.909637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:37:30.909690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:37:30.910582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:37:30.910650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:37:30.910749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:37:30.910836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:37:30.912275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:37:31.000011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:37:31.000062Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:31.016638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:37:31.016992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:37:31.017169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:37:31.029162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:37:31.029538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:37:31.035916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:37:31.036719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:37:31.046842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:37:31.054535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:37:31.054648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:37:31.055511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:37:31.055581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:37:31.055670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:37:31.055865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:37:31.067229Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:37:31.204810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:37:31.206068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:37:31.208558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:37:31.210087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:37:31.210204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:37:31.213676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:37:31.213835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:37:31.214063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:37:31.214227Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:37:31.214274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:37:31.214345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:37:31.216902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:37:31.216981Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:37:31.217021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:37:31.219260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:37:31.219313Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:37:31.219378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:37:31.219434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:37:31.224686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:37:31.227229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:37:31.228178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:37:31.229554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:37:31.229716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:37:31.229787Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:37:31.231682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:37:31.231763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:37:31.232031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:37:31.232168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:37:31.236584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:37:31.236655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:37:31.236873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:37:31.236947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:37:31.237239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:37:31.237292Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:37:31.237435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:37:31.237476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:37:31.237521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:37:31.237555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:37:31.237598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:37:31.237644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:37:31.237680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:37:31.237714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:37:31.237807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:37:31.237871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:37:31.237943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:37:31.240618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:37:31.240766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:37:31.240822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hemeshard:72057594046678944 2025-04-10T11:37:31.485093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:1 Got OK TEvConfigureStatus from tablet# 72075186234409547 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-04-10T11:37:31.490403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-10T11:37:31.493599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186234409548, partId: 1 2025-04-10T11:37:31.493836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:1, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409548 2025-04-10T11:37:31.493901Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 102:1 HandleReply TEvConfigureStatus operationId:102:1 at schemeshard:72057594046678944 2025-04-10T11:37:31.493951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:1 Got OK TEvConfigureStatus from tablet# 72075186234409548 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-04-10T11:37:31.494002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 3 -> 128 2025-04-10T11:37:31.494531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-10T11:37:31.497812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-10T11:37:31.498011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-10T11:37:31.498061Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-10T11:37:31.498171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 102:1, at tablet# 72057594046678944 2025-04-10T11:37:31.498234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 2/2 2025-04-10T11:37:31.498424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:37:31.500572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-10T11:37:31.500737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-10T11:37:31.501129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:37:31.501266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:37:31.501312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute operation part is already done, operationId: 102:0 2025-04-10T11:37:31.501357Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2025-04-10T11:37:31.501701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 128 -> 240 2025-04-10T11:37:31.501785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2025-04-10T11:37:31.501918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-10T11:37:31.502038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[1:406:2373], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 72075186234409549, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-10T11:37:31.504554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:37:31.504604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-10T11:37:31.504763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:37:31.504799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-10T11:37:31.505112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-10T11:37:31.505185Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 102:1, ProgressState, NeedSyncHive: 0 2025-04-10T11:37:31.505232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 240 -> 240 2025-04-10T11:37:31.505967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:37:31.506055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-10T11:37:31.506102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-10T11:37:31.506139Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-10T11:37:31.506198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-10T11:37:31.506282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/2, is published: true 2025-04-10T11:37:31.509229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-10T11:37:31.509286Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:1 ProgressState 2025-04-10T11:37:31.509374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-04-10T11:37:31.509406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-10T11:37:31.509450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-04-10T11:37:31.509478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-10T11:37:31.509532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2025-04-10T11:37:31.509585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-10T11:37:31.509633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-10T11:37:31.509669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-10T11:37:31.509833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-10T11:37:31.509875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-10T11:37:31.509897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-10T11:37:31.509987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-10T11:37:31.511263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-10T11:37:31.512750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-10T11:37:31.512806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-10T11:37:31.513336Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-10T11:37:31.513430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-10T11:37:31.513478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:566:2495] TestWaitNotification: OK eventTxId 102 2025-04-10T11:37:31.514935Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/db1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-10T11:37:31.515226Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/db1" took 293us result status StatusSuccess 2025-04-10T11:37:31.516871Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/db1" PathDescription { Self { Name: "db1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 GraphShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.6%| [TS] {RESULT} ydb/core/graph/shard/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/config/ut/unittest >> ConfigGRPCService::FetchConfig [GOOD] Test command err: 2025-04-10T11:37:23.408895Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647216273465710:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:23.409002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a6c/r3tmp/tmpH1d42A/pdisk_1.dat 2025-04-10T11:37:23.869060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:23.869206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:23.873228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:23.898875Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21421, node 1 2025-04-10T11:37:23.927006Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-10T11:37:23.927570Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-10T11:37:23.927939Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-10T11:37:23.927975Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-10T11:37:23.928132Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-10T11:37:23.928268Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-10T11:37:23.928505Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-10T11:37:23.928537Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-10T11:37:23.929249Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-10T11:37:23.930949Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-10T11:37:23.933092Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:37:23.933126Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:37:23.948486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:37:23.955538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:37:23.959053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-10T11:37:23.962578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T11:37:23.968181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:37:23.973153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:37:23.973288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:37:23.973345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:37:23.973365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:37:23.973422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:37:23.973496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-10T11:37:23.981248Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:37:23.981314Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:37:23.981324Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:37:23.981333Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:37:24.012146Z node 1 :GRPC_SERVER DEBUG: [0x51a000026a80] created request Name# BlobStorageConfig 2025-04-10T11:37:24.013450Z node 1 :GRPC_SERVER DEBUG: [0x51a000027080] created request Name# HiveCreateTablet 2025-04-10T11:37:24.013792Z node 1 :GRPC_SERVER DEBUG: [0x51a000027680] created request Name# TabletStateRequest 2025-04-10T11:37:24.014320Z node 1 :GRPC_SERVER DEBUG: [0x51a000027c80] created request Name# SchemeOperationStatus 2025-04-10T11:37:24.015544Z node 1 :GRPC_SERVER DEBUG: [0x51a000028280] created request Name# ChooseProxy 2025-04-10T11:37:24.015929Z node 1 :GRPC_SERVER DEBUG: [0x51a000028880] created request Name# ResolveNode 2025-04-10T11:37:24.016982Z node 1 :GRPC_SERVER DEBUG: [0x51a000028e80] created request Name# FillNode 2025-04-10T11:37:24.017266Z node 1 :GRPC_SERVER DEBUG: [0x51a000029480] created request Name# DrainNode 2025-04-10T11:37:24.017535Z node 1 :GRPC_SERVER DEBUG: [0x51a000029a80] created request Name# InterconnectDebug 2025-04-10T11:37:24.017814Z node 1 :GRPC_SERVER DEBUG: [0x51a00002a080] created request Name# TestShardControl 2025-04-10T11:37:24.018939Z node 1 :GRPC_SERVER DEBUG: [0x51a00002a680] created request Name# RegisterNode 2025-04-10T11:37:24.019219Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ac80] created request Name# CmsRequest 2025-04-10T11:37:24.020461Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b280] created request Name# ConsoleRequest 2025-04-10T11:37:24.020743Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b880] created request Name# SchemeInitRoot 2025-04-10T11:37:24.021978Z node 1 :GRPC_SERVER DEBUG: [0x51a00002be80] created request Name# PersQueueRequest 2025-04-10T11:37:24.022318Z node 1 :GRPC_SERVER DEBUG: [0x51a00002c480] created request Name# SchemeOperation 2025-04-10T11:37:24.023081Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ca80] created request Name# SchemeDescribe 2025-04-10T11:37:24.097266Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:24.097291Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:24.097298Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:24.097448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37:24.686280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "hdd2" Kind: "hdd2" } StoragePools { Name: "hdd" Kind: "hdd" } StoragePools { Name: "hdd1" Kind: "hdd1" } StoragePools { Name: "ssd" Kind: "ssd" } StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:37:24.686944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:24.689064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T11:37:24.691240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:37:24.691326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:24.694200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T11:37:24.695414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T11:37:24.695628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:24.695743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T11:37:24.695771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-10T11:37:24.695796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-10T11:37:24.698959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:24.699011Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:37:24.699034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-10T11:37:24.699330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:37:24.699352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-10T11:37:24.699374Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:37:24.700705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:24.700735Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:24.700760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:37:24.700802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:37:24.721834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:37:24.724293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-10T11:37:24.725432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:37: ... ionPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T11:37:28.204899Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T11:37:28.205108Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:28.205138Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T11:37:28.205152Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-04-10T11:37:28.205168Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-10T11:37:28.209200Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:28.209236Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:37:28.209256Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 waiting... 2025-04-10T11:37:28.211374Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:37:28.211395Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-04-10T11:37:28.211412Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:37:28.212411Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:28.212439Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:28.212462Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-10T11:37:28.212496Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-04-10T11:37:28.212624Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:37:28.214962Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-04-10T11:37:28.215092Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:37:28.217661Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744285048261, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:37:28.217750Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744285048261 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T11:37:28.217772Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-10T11:37:28.218024Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-10T11:37:28.218050Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-10T11:37:28.218196Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:37:28.218234Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T11:37:28.220381Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:37:28.220406Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T11:37:28.220558Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T11:37:28.220569Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7491647233137239080:2369], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-04-10T11:37:28.220626Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:28.220649Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-10T11:37:28.220715Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-10T11:37:28.220732Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-10T11:37:28.220751Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-10T11:37:28.220760Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-10T11:37:28.220785Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2025-04-10T11:37:28.220811Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-10T11:37:28.220822Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-10T11:37:28.220831Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2025-04-10T11:37:28.220871Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-10T11:37:28.220884Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-10T11:37:28.220894Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-10T11:37:28.223893Z node 3 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:37:28.223969Z node 3 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:37:28.223977Z node 3 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:37:28.224004Z node 3 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:37:28.225151Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-10T11:37:28.225237Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-10T11:37:28.225249Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-10T11:37:28.225264Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-10T11:37:28.225278Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:37:28.225360Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-10T11:37:28.225374Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7491647237432206677:2316] 2025-04-10T11:37:28.230983Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715657 2025-04-10T11:37:28.295357Z node 3 :GRPC_SERVER DEBUG: Got grpc request# FetchConfigRequest, traceId# 01jrfqv7f7dgnhrysfjysky1tf, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:50464, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:37:28.309048Z node 3 :GRPC_SERVER DEBUG: [0x51a000095a80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.309070Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d2080] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.309316Z node 3 :GRPC_SERVER DEBUG: [0x51a000097280] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.309336Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d2680] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.309541Z node 3 :GRPC_SERVER DEBUG: [0x51a000095480] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.309578Z node 3 :GRPC_SERVER DEBUG: [0x51a000096680] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.309769Z node 3 :GRPC_SERVER DEBUG: [0x51a00009c080] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.309838Z node 3 :GRPC_SERVER DEBUG: [0x51a00009b480] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.309948Z node 3 :GRPC_SERVER DEBUG: [0x51a00009a880] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.310026Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d0e80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.310125Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d1480] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.310201Z node 3 :GRPC_SERVER DEBUG: [0x51a000099080] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.310391Z node 3 :GRPC_SERVER DEBUG: [0x51a00008ca80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.310551Z node 3 :GRPC_SERVER DEBUG: [0x51a000094280] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.310555Z node 3 :GRPC_SERVER DEBUG: [0x51a000096080] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.310723Z node 3 :GRPC_SERVER DEBUG: [0x51a000099c80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-10T11:37:28.310755Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d1a80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 |97.6%| [TM] {RESULT} ydb/services/config/ut/unittest >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding >> Graph::UseGraphShard [GOOD] >> Graph::MemoryBackendFullCycle >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] >> TTxDataShardLocalKMeansScan::BadRequest >> DataCleanup::ForceDataCleanupWithRestart [GOOD] >> DataCleanup::OutReadSetsCleanedAfterCopyTable >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter >> TIndexProcesorTests::TestSingleCreateQueueEvent [GOOD] >> TIndexProcesorTests::TestReindexSingleQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:36:51.650920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:51.651005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:51.651041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:51.651077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:51.651121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:51.651156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:51.651233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:51.651302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:51.651622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:51.734646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:36:51.734719Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:36:51.750792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:51.751053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:51.751223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:51.758419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:51.758784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:51.759464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:51.760106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:51.763094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:51.764398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:51.764457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:51.764619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:51.764659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:51.764719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:51.764869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:36:51.771407Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:36:51.895169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:51.895422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:51.895657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:51.895909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:51.895970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:51.898329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:51.898587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:51.898778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:51.898846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:51.898894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:51.898926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:51.900779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:51.900829Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:51.900859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:51.902416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:51.902464Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:51.902508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:51.902570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:51.905922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:51.907697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:51.907878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:51.909023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:51.909174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:51.909224Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:51.909520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:51.909591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:51.909773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:51.909875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:51.911998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:51.912046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:51.912231Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:51.912272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:51.912509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:51.912566Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:51.912668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:51.912703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:51.912758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:51.912793Z no ... 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:37:32.509151Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:37:32.509461Z node 24 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 364us result status StatusSuccess 2025-04-10T11:37:32.510472Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:37:32.521802Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:799:2617] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-10T11:37:32.521941Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:722:2617] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-10T11:37:32.522122Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:799:2617] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744285052483070 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744285052483070 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744285052483070 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-10T11:37:32.525134Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:799:2617] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-10T11:37:32.525263Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:722:2617] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitEmptyTwice [GOOD] Test command err: 2025-04-10T11:34:22.783291Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646438156156897:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:22.783361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ff0/r3tmp/tmpTfK3rk/pdisk_1.dat 2025-04-10T11:34:23.244792Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:23.253092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:23.253206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:34:23.255406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2158 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:34:23.545938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:23.563772Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:23.593003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:23.797262Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.014s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-04-10T11:34:23.802031Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-04-10T11:34:23.828847Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-04-10T11:34:23.839397Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284863713 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1744284863713 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... 2025-04-10T11:34:26.155128Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.103, eph 1} end=0, 2 blobs 218r (max 218), put Spent{time=0.016s,wait=0.006s,interrupts=1} Part{ 1 pk, lobs 0 +0, (63264 0 0)b }, ecr=1.000 2025-04-10T11:34:26.212613Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.119, eph 1} end=0, 2 blobs 771r (max 771), put Spent{time=0.029s,wait=0.007s,interrupts=1} Part{ 1 pk, lobs 0 +0, (50647 0 0)b }, ecr=1.000 2025-04-10T11:34:26.354494Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.514, eph 1} end=0, 2 blobs 3r (max 3), put Spent{time=0.006s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-04-10T11:34:26.372921Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.210, eph 2} end=0, 2 blobs 469r (max 470), put Spent{time=0.030s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (135751 0 0)b }, ecr=1.000 2025-04-10T11:34:26.432546Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.225, eph 1} end=0, 2 blobs 2r (max 2), put Spent{time=0.061s,wait=0.059s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-04-10T11:34:26.459221Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.226, eph 1} end=0, 2 blobs 2r (max 2), put Spent{time=0.053s,wait=0.045s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-04-10T11:34:26.466678Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.229, eph 1} end=0, 2 blobs 501r (max 501), put Spent{time=0.039s,wait=0.034s,interrupts=1} Part{ 1 pk, lobs 0 +0, (31966 0 0)b }, ecr=1.000 2025-04-10T11:34:26.490674Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.230, eph 1} end=0, 2 blobs 1500r (max 1500), put Spent{time=0.044s,wait=0.023s,interrupts=1} Part{ 1 pk, lobs 0 +0, (103070 0 0)b }, ecr=1.000 2025-04-10T11:34:26.494692Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.235, eph 2} end=0, 2 blobs 1524r (max 1527), put Spent{time=0.101s,wait=0.004s,interrupts=1} Part{ 1 pk, lobs 0 +0, (99916 0 0)b }, ecr=1.000 2025-04-10T11:34:26.498412Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.541, eph 1} end=0, 2 blobs 10001r (max 10001), put Spent{time=0.134s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-04-10T11:34:26.615527Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.336, eph 3} end=0, 2 blobs 720r (max 721), put Spent{time=0.017s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (208238 0 0)b }, ecr=1.000 2025-04-10T11:34:26.652559Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.357, eph 3} end=0, 2 blobs 2289r (max 2292), put Spent{time=0.023s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (150019 0 0)b }, ecr=1.000 2025-04-10T11:34:26.781044Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1026, eph 2} end=0, 2 blobs 3r (max 5), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-04-10T11:34:26.855813Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.465, eph 2} end=0, 2 blobs 2r (max 3), put Spent{time=0.061s,wait=0.058s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-04-10T11:34:26.858173Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.447, eph 4} end=0, 2 blobs 971r (max 972), put Spent{time=0.089s,wait=0.035s,interrupts=1} Part{ 1 pk, lobs 0 +0, (280770 0 0)b }, ecr=1.000 2025-04-10T11:34:26.869679Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.466, eph 2} end=0, 2 blobs 3000r (max 3000), put Spent{time=0.074s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (205952 0 0)b }, ecr=1.000 2025-04-10T11:34:26.871370Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.464, eph 2} end=0, 2 blobs 2r (max 3), put Spent{time=0.077s,wait=0.074s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-04-10T11:34:26.875180Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.468, eph 2} end=0, 2 blobs 1002r (max 1002), put Spent{time=0.080s,wait=0.005s,interrupts=1} Part{ 1 pk, lobs 0 +0, (63729 0 0)b }, ecr=1.000 2025-04-10T11:34:26.922386Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.478, eph 4} end=0, 2 blobs 3060r (max 3063), put Spent{time=0.108s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (200566 0 0)b }, ecr=1.000 2025-04-10T11:34:26.951232Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1049, eph 2} end=0, 2 blobs 10001r (max 10502), put Spent{time=0.155s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-04-10T11:34:27.055004Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.563, eph 5} end=0, 2 blobs 1222r (max 1223), put Spent{time=0.033s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (353257 0 0)b }, ecr=1.000 2025-04-10T11:34:27.084453Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.583, eph 5} end=0, 2 blobs 3813r (max 3816), put Spent{time=0.026s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (249835 0 0)b }, ecr=1.000 2025-04-10T11:34:27.171815Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1541, eph 3} end=0, 2 blobs 3r (max 5), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-04-10T11:34:27.179871Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.673, eph 3} end=0, 2 blobs 2r (max 3), put Spent{time=0.007s,wait=0.005s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-04-10T11:34:27.193728Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.676, eph 3} end=0, 2 blobs 1503r (max 1503), put Spent{time=0.020s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (95542 0 0)b }, ecr=1.000 2025-04-10T11:34:27.344232Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.662, eph 6} end=0, 2 blobs 1473r (max 1474), put Spent{time=0.185s,wait=0.145s,interrupts=1} Part{ 1 pk, lobs 0 +0, (425789 0 0)b }, ecr=1.000 2025-04-10T11:34:27.345793Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.671, eph 3} end=0, 2 blobs 4500r (max 4500), put Spent{time=0.175s,wait=0.128s,interrupts=1} Part{ 1 pk, lobs 0 +0, (308834 0 0)b }, ecr=1.000 2025-04-10T11:34:27.347152Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.674, eph 3} end=0, 2 blobs 2r (max 3), put Spent{time=0.175s,wait=0.1 ... ARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:37:23.157464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-10T11:37:23.157494Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715690:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:37:23.157905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 9 2025-04-10T11:37:23.158050Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715690:0 progress is 1/1 2025-04-10T11:37:23.158075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-04-10T11:37:23.158091Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715690:0 progress is 1/1 2025-04-10T11:37:23.158111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-04-10T11:37:23.158135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715690, ready parts: 1/1, is published: true 2025-04-10T11:37:23.158207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7491647216740870155:2417] message: TxId: 281474976715690 2025-04-10T11:37:23.158233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-04-10T11:37:23.158263Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715690:0 2025-04-10T11:37:23.158282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715690:0 2025-04-10T11:37:23.158422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 8 2025-04-10T11:37:23.159079Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-10T11:37:23.159338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-04-10T11:37:23.159579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-10T11:37:23.159625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-10T11:37:23.161475Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-04-10T11:37:23.169577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491647212445902139 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-04-10T11:37:23.169641Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:37:23.169888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491647212445902639 RawX2: 4503608217307465 } TabletId: 72075186224037893 State: 4 2025-04-10T11:37:23.169924Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:37:23.170047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491647212445902642 RawX2: 4503608217307466 } TabletId: 72075186224037895 State: 4 2025-04-10T11:37:23.170077Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037895, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:37:23.170196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491647212445902649 RawX2: 4503608217307467 } TabletId: 72075186224037894 State: 4 2025-04-10T11:37:23.170222Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:37:23.170342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491647212445902450 RawX2: 4503608217307445 } TabletId: 72075186224037891 State: 4 2025-04-10T11:37:23.170396Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:37:23.170526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7491647212445902453 RawX2: 4503608217307446 } TabletId: 72075186224037890 State: 4 2025-04-10T11:37:23.170552Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-10T11:37:23.170768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:37:23.170865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:37:23.170923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:37:23.170985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:37:23.171048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:37:23.171106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:37:23.172445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-10T11:37:23.172697Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-04-10T11:37:23.172885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-04-10T11:37:23.173060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-10T11:37:23.173192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2025-04-10T11:37:23.173311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-10T11:37:23.173413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-04-10T11:37:23.173534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-10T11:37:23.173634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-10T11:37:23.173751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-10T11:37:23.173848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-10T11:37:23.173970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-10T11:37:23.174172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:37:23.174199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-10T11:37:23.174248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:37:23.175346Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-04-10T11:37:23.175366Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-10T11:37:23.175383Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-10T11:37:23.175400Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-10T11:37:23.175416Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2025-04-10T11:37:23.175435Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037895 not found 2025-04-10T11:37:23.175674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-10T11:37:23.175693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-10T11:37:23.175739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-04-10T11:37:23.175751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-04-10T11:37:23.175777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:8 2025-04-10T11:37:23.175786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2025-04-10T11:37:23.176090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-04-10T11:37:23.176100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-04-10T11:37:23.176125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-10T11:37:23.176137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-10T11:37:23.176160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-10T11:37:23.176180Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-10T11:37:23.176229Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery >> FormatCSV::Instants >> FormatCSV::Instants [GOOD] >> FormatCSV::EmptyData [GOOD] >> FormatCSV::Common >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries >> FormatCSV::Common [GOOD] >> FormatCSV::Strings [GOOD] >> FormatCSV::Nulls [GOOD] >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource [GOOD] >> QuoterWithKesusTest::GetsQuota >> TIndexProcesorTests::TestReindexSingleQueue [GOOD] >> TIndexProcesorTests::TestDeletedQueueNotReindexed >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery ------- [TS] {asan, default-linux-x86_64, release} ydb/core/io_formats/arrow/scheme/ut/unittest >> FormatCSV::Nulls [GOOD] Test command err: 12000000 Cannot read CSV: no columns specified Cannot read CSV: Invalid: Empty CSV file d'Artagnan '"' Jeanne d'Arc "'" 'd'Artagnan' ''"'' 'Jeanne d'Arc' '"'"' d'Artagnan '"' Jeanne d'Arc "'" src: ,"","" ,"","" ,, parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: ,"","" ,"","" ,, parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: \N,"","" \N,"\N","\N" \N,\N,\N parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,\N,\N ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: NULL,"","" NULL,"NULL","NULL" NULL,NULL,NULL parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,NULL,NULL ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ |97.7%| [TS] {RESULT} ydb/core/io_formats/arrow/scheme/ut/unittest |97.7%| [TA] $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMemoryController::Config_ConsumerLimits [GOOD] >> TMemoryController::SharedCache >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] |97.7%| [TA] {RESULT} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery >> SystemView::QueryStatsFields [GOOD] >> SystemView::QueryStatsAllTables >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery >> TTxDataShardReshuffleKMeansScan::BuildToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BadRequest [GOOD] >> TTxDataShardLocalKMeansScan::MainToPosting >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_reshuffle_kmeans/unittest >> TTxDataShardReshuffleKMeansScan::BuildToBuild [GOOD] Test command err: 2025-04-10T11:37:14.894662Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647176394593922:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:14.894722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a1e/r3tmp/tmpqUZq8z/pdisk_1.dat 2025-04-10T11:37:15.234077Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:15.295897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:15.296360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:15.299297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:15.337889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:15.379955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:15.416051Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7491647180689561798:2295] 2025-04-10T11:37:15.416452Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:37:15.431347Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:37:15.431425Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:37:15.434616Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:37:15.434713Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:37:15.434747Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:37:15.436732Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:37:15.436805Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:37:15.436850Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7491647180689561812:2295] in generation 1 2025-04-10T11:37:15.438094Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:37:15.467658Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:37:15.468887Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:37:15.469001Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7491647180689561816:2296] 2025-04-10T11:37:15.469013Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:37:15.469023Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:37:15.469039Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:15.470007Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647180689561793:2293], serverId# [1:7491647180689561815:2304], sessionId# [0:0:0] 2025-04-10T11:37:15.470211Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:37:15.470306Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:37:15.470356Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:15.470448Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:15.470472Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:37:15.470492Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:15.470515Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:37:15.470846Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:37:15.471038Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-10T11:37:15.472266Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:15.472892Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:37:15.473752Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:37:15.475898Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647180689561830:2312], serverId# [1:7491647180689561831:2313], sessionId# [0:0:0] 2025-04-10T11:37:15.481064Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744285035521 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744285035521 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:37:15.481128Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:15.481256Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:15.481331Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:15.481355Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:37:15.481405Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744285035521:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-10T11:37:15.481675Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744285035521:281474976710657 keys extracted: 0 2025-04-10T11:37:15.481834Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:37:15.481930Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:15.481992Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:37:15.484923Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:37:15.486512Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:15.487761Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744285035520 2025-04-10T11:37:15.487790Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:15.487821Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744285035528 2025-04-10T11:37:15.487887Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744285035521} 2025-04-10T11:37:15.487930Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:15.487963Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:15.488005Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:37:15.488026Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:37:15.488086Z node 1 :TX_DATASHARD DEBUG: Complete [1744285035521 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7491647180689561626:2185], exec latency: 4 ms, propose latency: 6 ms 2025-04-10T11:37:15.488123Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-10T11:37:15.488233Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:15.492081Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-10T11:37:15.492139Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:37:15.503204Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647180689561869:2341], serverId# [1:7491647180689561870:2342], sessionId# [0:0:0] 2025-04-10T11:37:15.504066Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:37:15.504215Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710658 at tablet 72075186224037888 2025-04-10T11:37:15.505583Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:37:15.506748Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710658 at step 1744285035549 at tablet 72075186224037888 { Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744285035549 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:37:15.506790Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:15.506859Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:15.506869Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:37:15.506883Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744285035549:281474976710658] in PlanQueue unit at 72075186224037888 2025-04-10T11:37:15.507012Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744285035549:281474976710658 keys extracted: 0 2025-04-10T11:37:15.507281Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:15.508211Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744285035549} 2025-04-10T11:37:15.508254Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:15.508307Z node 1 :TX_DATASHARD DEBUG: Complete [1744285035549 : 281474976710658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7491647180689561864:2337], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:37:15.508331Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:15.511000Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647180689561880:2352], serverId# [1:7491647180689561881:2353], sessionId# [0:0:0] 2025-04-10T11:37:15.511734Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:37:15.511854Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710659 at tablet 72075186224037888 2025-04-10T11:37:15.513249Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037 ... -10T11:37:35.821098Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:37:35.821121Z node 5 :TX_DATASHARD DEBUG: Found ready operation [1744285055863:281474976715688] in PlanQueue unit at 72075186224037895 2025-04-10T11:37:35.821340Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037895 loaded tx from db 1744285055863:281474976715688 keys extracted: 0 2025-04-10T11:37:35.821471Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:37:35.821566Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037895 2025-04-10T11:37:35.821671Z node 5 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037895 2025-04-10T11:37:35.822116Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:35.823342Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:35.823503Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037895 2025-04-10T11:37:35.824097Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037895 step# 1744285055863} 2025-04-10T11:37:35.824145Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037895 2025-04-10T11:37:35.824190Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037895 2025-04-10T11:37:35.824247Z node 5 :TX_DATASHARD DEBUG: Complete [1744285055863 : 281474976715688] from 72075186224037895 at tablet 72075186224037895 send result to client [5:7491647249185654183:2150], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:37:35.824293Z node 5 :TX_DATASHARD INFO: 72075186224037895 Sending notify to schemeshard 72057594046644480 txId 281474976715688 state PreOffline TxInFly 0 2025-04-10T11:37:35.824362Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-10T11:37:35.826395Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715688 datashard 72075186224037895 state PreOffline 2025-04-10T11:37:35.826457Z node 5 :TX_DATASHARD DEBUG: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2025-04-10T11:37:35.828064Z node 5 :TX_DATASHARD DEBUG: 72075186224037895 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-10T11:37:35.828150Z node 5 :TX_DATASHARD INFO: 72075186224037895 Initiating switch from PreOffline to Offline state 2025-04-10T11:37:35.829811Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-10T11:37:35.830408Z node 5 :TX_DATASHARD INFO: 72075186224037895 Reporting state Offline to schemeshard 72057594046644480 2025-04-10T11:37:35.832259Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:35.832294Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037895 2025-04-10T11:37:35.835722Z node 5 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037895 state Offline 2025-04-10T11:37:35.839610Z node 5 :TX_DATASHARD INFO: OnTabletStop: 72075186224037895 reason = ReasonStop 2025-04-10T11:37:35.840232Z node 5 :TX_DATASHARD INFO: OnTabletDead: 72075186224037895 2025-04-10T11:37:35.840331Z node 5 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037895 2025-04-10T11:37:35.841016Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037895 not found 2025-04-10T11:37:35.842260Z node 5 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037896 actor [5:7491647266365524878:2359] 2025-04-10T11:37:35.842494Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:37:35.853906Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:37:35.853991Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:37:35.856679Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037896 2025-04-10T11:37:35.856735Z node 5 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037896 2025-04-10T11:37:35.856772Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037896 2025-04-10T11:37:35.857208Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:37:35.857271Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:37:35.857309Z node 5 :TX_DATASHARD DEBUG: DataShard 72075186224037896 persisting started state actor id [5:7491647266365524904:2359] in generation 1 2025-04-10T11:37:35.858749Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:37:35.858785Z node 5 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037896 2025-04-10T11:37:35.858864Z node 5 :TX_DATASHARD DEBUG: 72075186224037896 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:37:35.858903Z node 5 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037896, actorId: [5:7491647266365524906:2360] 2025-04-10T11:37:35.858915Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037896 2025-04-10T11:37:35.858927Z node 5 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037896, state: WaitScheme 2025-04-10T11:37:35.858939Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-10T11:37:35.859062Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037896 2025-04-10T11:37:35.859129Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037896 2025-04-10T11:37:35.859161Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037896, clientId# [5:7491647266365524886:3249], serverId# [5:7491647266365524896:3256], sessionId# [0:0:0] 2025-04-10T11:37:35.859184Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-04-10T11:37:35.859200Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:35.859216Z node 5 :TX_DATASHARD INFO: No tx to execute at 72075186224037896 TxInFly 0 2025-04-10T11:37:35.859233Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-10T11:37:35.859315Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037896 2025-04-10T11:37:35.859506Z node 5 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037896 txId 281474976715689 ssId 72057594046644480 seqNo 2:16 2025-04-10T11:37:35.859565Z node 5 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715689 at tablet 72075186224037896 2025-04-10T11:37:35.859933Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037896 2025-04-10T11:37:35.861129Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037896 2025-04-10T11:37:35.861213Z node 5 :TX_DATASHARD DEBUG: 72075186224037896 not sending time cast registration request in state WaitScheme 2025-04-10T11:37:35.863707Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037896, clientId# [5:7491647266365524912:3267], serverId# [5:7491647266365524914:3269], sessionId# [0:0:0] 2025-04-10T11:37:35.864946Z node 5 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715689 at step 1744285055912 at tablet 72075186224037896 { Transactions { TxId: 281474976715689 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744285055912 MediatorID: 72057594046382081 TabletID: 72075186224037896 } 2025-04-10T11:37:35.864975Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-10T11:37:35.865067Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-04-10T11:37:35.865091Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:37:35.865136Z node 5 :TX_DATASHARD DEBUG: Found ready operation [1744285055912:281474976715689] in PlanQueue unit at 72075186224037896 2025-04-10T11:37:35.865412Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037896 loaded tx from db 1744285055912:281474976715689 keys extracted: 0 2025-04-10T11:37:35.865509Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:35.865553Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:37:35.865653Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037896 2025-04-10T11:37:35.865689Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-04-10T11:37:35.865732Z node 5 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037896 tableId# [OwnerId: 72057594046644480, LocalPathId: 14] schema version# 1 2025-04-10T11:37:35.866203Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037896 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:37:35.866602Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:35.868142Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037896 time 1744285055911 2025-04-10T11:37:35.868166Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-10T11:37:35.868187Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037896 coordinator 72057594046316545 last step 0 next step 1744285055912 2025-04-10T11:37:35.868234Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037896 step# 1744285055912} 2025-04-10T11:37:35.868268Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-10T11:37:35.868311Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-10T11:37:35.868334Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037896 2025-04-10T11:37:35.868362Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037896 2025-04-10T11:37:35.868413Z node 5 :TX_DATASHARD DEBUG: Complete [1744285055912 : 281474976715689] from 72075186224037896 at tablet 72075186224037896 send result to client [5:7491647249185654183:2150], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:37:35.868459Z node 5 :TX_DATASHARD INFO: 72075186224037896 Sending notify to schemeshard 72057594046644480 txId 281474976715689 state Ready TxInFly 0 2025-04-10T11:37:35.868528Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-10T11:37:35.869242Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715689 datashard 72075186224037896 state Ready 2025-04-10T11:37:35.869288Z node 5 :TX_DATASHARD DEBUG: 72075186224037896 Got TEvSchemaChangedResult from SS at 72075186224037896 |97.7%| [TM] {RESULT} ydb/core/tx/datashard/ut_reshuffle_kmeans/unittest >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs >> DiscoveryIsNotBroken::NoKafkaSslEndpointInDiscovery [GOOD] >> DiscoveryIsNotBroken::HaveKafkaEndpointInDiscovery >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob >> TIndexProcesorTests::TestDeletedQueueNotReindexed [GOOD] >> TIndexProcesorTests::TestManyMessages >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection >> TRUCalculatorTests::TestReadTable [GOOD] >> TRUCalculatorTests::TestBulkUpsert [GOOD] >> test_example.py::TestExample::test_example |97.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> TRUCalculatorTests::TestBulkUpsert [GOOD] |97.7%| [TS] {RESULT} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> test.py::test_order_conflict >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] >> test.py::test_order_conflict [GOOD] >> test.py::test_missing_value [GOOD] >> test.py::test_unexpected_value [GOOD] >> test.py::test_local >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] >> alter_compression.py::TestAlterCompression::test_all_supported_compression >> TCreateAndDropViewTest::CheckCreatedView >> DataCleanup::OutReadSetsCleanedAfterCopyTable [GOOD] >> DataCleanup::BorrowerDataCleanedAfterCopyTable >> Metering::MockedNetClassifierOnly [GOOD] >> Metering::MockedNetClassifierLabelTransformation >> Graph::MemoryBackendFullCycle [GOOD] >> Graph::LocalBackendFullCycle >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections >> TestFilterSet::FilterGroup >> QuoterWithKesusTest::GetsQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:36:47.339199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:47.339285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:47.339319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:47.339352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:47.339394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:47.339421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:47.339491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:47.339560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:47.339844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:47.411781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:36:47.411845Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:36:47.427812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:47.428048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:47.428213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:47.435268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:47.435612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:47.436242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:47.437020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:47.440840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:47.442737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:47.442818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:47.443047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:47.443108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:47.443165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:47.443367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:36:47.452204Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:36:47.589129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:47.589372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.589624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:47.589884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:47.589949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.592254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:47.592393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:47.592609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.592663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:47.592718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:47.592757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:47.594974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.595046Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:47.595111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:47.596998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.597052Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.597092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:47.597153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:47.600460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:47.602415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:47.602599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:47.603651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:47.603807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:47.603869Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:47.604152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:47.604239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:47.604404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:47.604479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:47.606553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:47.606614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:47.606771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:47.606810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:47.607024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.607081Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:47.607182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:47.607213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:47.607251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:47.607296Z no ... 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:37:39.464031Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:37:39.464305Z node 30 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 321us result status StatusSuccess 2025-04-10T11:37:39.465126Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:37:39.476182Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:850:2681] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-10T11:37:39.476299Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:789:2681] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-10T11:37:39.476464Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:850:2681] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744285059450141 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744285059450141 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744285059450141 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-10T11:37:39.479403Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:850:2681] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-10T11:37:39.479543Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:789:2681] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution >> TIndexProcesorTests::TestManyMessages [GOOD] >> TIndexProcesorTests::TestOver1000Queues >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated >> GroupStress::Test [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] >> TSentinelTests::PDiskRackGuardHalfRack [GOOD] >> TSentinelTests::PDiskRackGuardFullRack >> test.py::test[solomon-BadDownsamplingAggregation-] >> TTxDataShardLocalKMeansScan::MainToPosting [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuild |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_group/unittest >> GroupStress::Test [GOOD] |97.7%| [TM] {RESULT} ydb/core/blobstorage/ut_group/unittest >> SystemView::PartitionStatsLocksFields [GOOD] >> SystemView::PartitionStatsFields >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount >> TDqPqRdReadActorTests::TestReadFromTopic2 >> TDqPqRdReadActorTests::TestReadFromTopic2 [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection >> ActionParsingTest::ToAndFromStringAreConsistent [GOOD] >> ActionParsingTest::ActionsForQueueTest [GOOD] >> ActionParsingTest::BatchActionTest [GOOD] >> ActionParsingTest::ActionsForMessageTest [GOOD] >> ActionParsingTest::FastActionsTest [GOOD] >> HttpCountersTest::CountersAggregationTest [GOOD] >> LazyCounterTest::LazyCounterTest [GOOD] >> LazyCounterTest::AggregationLazyTest [GOOD] >> LazyCounterTest::AggregationNonLazyTest [GOOD] >> LazyCounterTest::HistogramAggregationTest [GOOD] >> MessageAttributeValidationTest::MessageAttributeValidationTest [GOOD] >> MessageBodyValidationTest::MessageBodyValidationTest [GOOD] >> MeteringCountersTest::CountersAggregationTest [GOOD] >> NameValidationTest::NameValidationTest [GOOD] >> QueueAttributes::BasicStdTest [GOOD] >> QueueAttributes::BasicFifoTest [GOOD] >> QueueAttributes::BasicClampTest [GOOD] >> QueueCountersTest::InsertCountersTest [GOOD] >> QueueCountersTest::RemoveQueueCountersNonLeaderWithoutFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersLeaderWithoutFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersNonLeaderWithFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersLeaderWithFolderTest [GOOD] >> QueueCountersTest::CountersAggregationTest [GOOD] >> QueueCountersTest::CountersAggregationCloudTest [GOOD] >> RedrivePolicy::RedrivePolicyValidationTest [GOOD] >> RedrivePolicy::RedrivePolicyToJsonTest [GOOD] >> RedrivePolicy::RedrivePolicyArnValidationTest [GOOD] >> SecureProtobufPrinterTest::MessageBody >> SecureProtobufPrinterTest::MessageBody [GOOD] >> SecureProtobufPrinterTest::Tokens [GOOD] >> StringValidationTest::IsAlphaNumAndPunctuationTest [GOOD] >> UserCountersTest::DisableCountersTest [GOOD] >> UserCountersTest::RemoveUserCountersTest [GOOD] >> UserCountersTest::CountersAggregationTest [GOOD] >> TDqPqRdReadActorTests::SessionError >> TestFilterSet::FilterGroup [GOOD] >> CoordinatorTests::Route >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount >> CoordinatorTests::Route [GOOD] >> TestFilterSet::DuplicationValidation >> TDqPqRdReadActorTests::SessionError [GOOD] >> CoordinatorTests::RouteTwoTopicWichSameName [GOOD] >> LeaderElectionTests::Test1 |97.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/base/ut/unittest >> UserCountersTest::CountersAggregationTest [GOOD] |97.7%| [TS] {RESULT} ydb/core/ymq/base/ut/unittest >> TDqPqRdReadActorTests::ReadWithFreeSpace >> LeaderElectionTests::Test1 [GOOD] >> TCreateAndDropViewTest::CheckCreatedView [GOOD] >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding >> LeaderElectionTests::TestLocalMode >> LeaderElectionTests::TestLocalMode [GOOD] >> TopicSessionTests::TwoSessionsWithoutOffsets >> TDqPqRdReadActorTests::ReadWithFreeSpace [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/py3test >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |97.7%| [TM] {RESULT} ydb/tests/tools/kqprun/tests/py3test >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead >> XmlBuilderTest::WritesProperly [GOOD] >> XmlBuilderTest::MacroBuilder [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings >> Metering::MockedNetClassifierLabelTransformation [GOOD] >> SHA256Test::SHA256Test [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] |97.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/http/ut/unittest >> XmlBuilderTest::MacroBuilder [GOOD] |97.7%| [TS] {RESULT} ydb/core/ymq/http/ut/unittest >> test_example.py::TestExample::test_example [GOOD] >> test_example.py::TestExample::test_example2 [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] >> QuoterWithKesusTest::GetsBigQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuotaWithDeadline >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding ------- [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/ut/unittest >> SHA256Test::SHA256Test [GOOD] Test command err: 2025-04-10T11:37:23.740411Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647216081527911:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:23.740506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a50/r3tmp/tmpntalkj/pdisk_1.dat 2025-04-10T11:37:24.053515Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:24.141858Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:24.141890Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:24.141900Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:24.142015Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:37:24.145502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:24.145637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:24.148335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:28.742510Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491647216081527911:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:28.742624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:37:29.566926Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491647241589605271:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:29.566968Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a50/r3tmp/tmpupU2YI/pdisk_1.dat 2025-04-10T11:37:29.718790Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:29.741198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:29.741280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:29.743004Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:29.763497Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:29.763510Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:29.763516Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:29.763603Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:37:34.567378Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491647241589605271:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:34.567459Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:37:40.482873Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491647290192146523:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:40.482966Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a50/r3tmp/tmpDlGd0s/pdisk_1.dat 2025-04-10T11:37:40.596274Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:40.610227Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:40.610314Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:40.613598Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:40.634640Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:40.634660Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:40.634667Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:40.634786Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:37:45.483199Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491647290192146523:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:45.483273Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |97.7%| [TS] {RESULT} ydb/core/ymq/actor/ut/unittest >> DiscoveryIsNotBroken::HaveKafkaEndpointInDiscovery [GOOD] >> DiscoveryIsNotBroken::HaveKafkaSslEndpointInDiscovery >> TestFilterSet::DuplicationValidation [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BuildToPosting >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead [GOOD] >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> TestFilterSet::CompilationValidation >> test.py::test[solomon-BadDownsamplingDisabled-] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding >> TDqPqRdReadActorTests::CoordinatorChanged >> DataCleanup::BorrowerDataCleanedAfterCopyTable [GOOD] >> Graph::LocalBackendFullCycle [GOOD] >> Graph::MemoryBordersOnGet >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery >> Mvp::TokenatorGetMetadataTokenGood >> Mvp::TokenatorGetMetadataTokenGood [GOOD] >> Mvp::TokenatorRefreshMetadataTokenGood >> THiveTest::TestCheckSubHiveMigrationWithReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::InvalidQuery >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_large/unittest |97.7%| [TM] {RESULT} ydb/core/tablet_flat/ut_large/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_data_cleanup/unittest >> DataCleanup::BorrowerDataCleanedAfterCopyTable [GOOD] Test command err: 2025-04-10T11:37:07.097621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:07.097885Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:37:07.098140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009e6/r3tmp/tmpNEYAWx/pdisk_1.dat 2025-04-10T11:37:07.588362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:07.658059Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:07.707979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:07.708481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:07.722215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:07.818979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:08.255743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:770:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:08.256334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:08.256435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:08.266322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:37:08.425894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:784:2659], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:37:08.503322Z node 1 :TX_PROXY ERROR: Actor# [1:858:2702] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:09.557937Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfqtkwwapy18swn3c0yq3ym, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzUwNTc3MzktMWVhOTA2NmUtYWU0NjRlNzEtZmY1ZDdjZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:13.571529Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:304:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:13.571810Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:37:13.571955Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009e6/r3tmp/tmphSnCa8/pdisk_1.dat 2025-04-10T11:37:13.873456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:13.906741Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:13.944731Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:13.944885Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:13.956770Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:14.038188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:14.367910Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:763:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:14.368029Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:772:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:14.368114Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:14.373748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:37:14.502569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:777:2653], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:37:14.537939Z node 2 :TX_PROXY ERROR: Actor# [2:850:2695] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:14.787433Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfqtsvx7z1ps90xjnbgjbqn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTY3YzdiODAtZmQzMjBkNWUtYzNkM2I5NWItZDdkYjkzYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:18.705678Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:18.706093Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:37:18.706219Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009e6/r3tmp/tmpUvekee/pdisk_1.dat 2025-04-10T11:37:19.029431Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:19.064986Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:19.104035Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:19.104207Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:19.116129Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:19.199810Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:19.546789Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:769:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:19.546904Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:779:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:19.546962Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:19.551714Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:37:19.714985Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:783:2659], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:37:19.750820Z node 3 :TX_PROXY ERROR: Actor# [3:857:2702] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:20.122192Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfqtyxr8t0p9ezr1xg1kjhg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2I0M2JmNjUtN2JiMmQyMzQtZDc0ZmU5Y2MtMjAyNWVhYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:24.333292Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:24.333548Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:37:24.333825Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya ... D_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:304:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:30.260978Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:37:30.261221Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009e6/r3tmp/tmpkPOVgz/pdisk_1.dat 2025-04-10T11:37:30.585246Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:30.617926Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:30.656459Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:30.656608Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:30.671342Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:30.792214Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:31.190090Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:770:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:31.190246Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:781:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:31.190338Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:31.196832Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:37:31.368516Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:784:2659], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:37:31.404622Z node 5 :TX_PROXY ERROR: Actor# [5:858:2702] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:31.782994Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfqva9kadwcyw649f2hscy1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YTAyNmRjNmQtNzQ0NjNiNy01ZDJiMmIzLWE4NzE3OWU4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:36.600110Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:36.600514Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:37:36.600620Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009e6/r3tmp/tmp45uiCf/pdisk_1.dat 2025-04-10T11:37:36.942975Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:36.978382Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:37.018420Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:37.018576Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:37.030419Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:37.117790Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:37.863620Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:879:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:37.863746Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:889:2734], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:37.863835Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:37.870007Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:37:38.021877Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:893:2737], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:37:38.061891Z node 6 :TX_PROXY ERROR: Actor# [6:954:2779] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:38.464245Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqvgt5afs8yv2a9z59vg2m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=ZTQwMGQ4OWItZDU1NDk2NGUtZGI5ZWEzZGEtZWFiYmM0MzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:38.846541Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfqvhdt31s5qems5efqjtnc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=ZDE1OTY1NWUtMWM3M2EwYjUtNTQwMzVhMzktMmNhMGVkZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:44.549013Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:44.549449Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:37:44.549597Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009e6/r3tmp/tmpFEPN4k/pdisk_1.dat 2025-04-10T11:37:44.932347Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:44.969619Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:45.009921Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:45.010165Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:45.021952Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:45.107630Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:45.869727Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:878:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:45.869875Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:888:2734], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:45.869992Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:45.877912Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:37:46.035039Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:892:2737], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:37:46.079474Z node 7 :TX_PROXY ERROR: Actor# [7:953:2779] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:46.542792Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqvrmaaeswdvywge8kcq2w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzEzY2Y0OGUtZTg1ZmE3YzgtMzM1YzVmNTktZjk0ZjU5ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:47.040956Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfqvsaafhxr3fy9bm0vs8y6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MzQ1N2VkYjctODgwNDIwOTItZGFkYWIzNjktMzg1N2QzN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:47.571404Z node 7 :TX_DATASHARD WARN: DataCleanup of tablet# 72075186224037888: has borrowed parts, requested from [7:594:2519] |97.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_data_cleanup/unittest >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] >> Graph::MemoryBordersOnGet [GOOD] >> Graph::LocalBordersOnGet >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery >> TDqPqRdReadActorTests::CoordinatorChanged [GOOD] >> TestFilterSet::CompilationValidation [GOOD] >> TDqPqRdReadActorTests::Backpressure >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus >> TestFormatHandler::ManyJsonClients >> test.py::test[solomon-BadDownsamplingAggregation-] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery >> SystemView::PartitionStatsFields [FAIL] >> SystemView::PDisksFields >> Graph::LocalBordersOnGet [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/ut/unittest >> Graph::LocalBordersOnGet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:37:29.685058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:37:29.685233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:37:29.685283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:37:29.685322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:37:29.686216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:37:29.686285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:37:29.686386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:37:29.686465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:37:29.687625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:37:29.785859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:37:29.785931Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:29.803717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:37:29.804035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:37:29.804217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:37:29.813295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:37:29.813654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:37:29.817878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:37:29.818432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:37:29.823662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:37:29.835469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:37:29.835565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:37:29.836852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:37:29.836950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:37:29.837068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:37:29.837302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:37:29.844741Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:37:29.997355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:37:29.999160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:37:30.003053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:37:30.007376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:37:30.007501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:37:30.023667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:37:30.023870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:37:30.024097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:37:30.024246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:37:30.024329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:37:30.024407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:37:30.026801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:37:30.026874Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:37:30.026926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:37:30.029361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:37:30.029435Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:37:30.029479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:37:30.029555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:37:30.035685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:37:30.038527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:37:30.040162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:37:30.041456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:37:30.041628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:37:30.041690Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:37:30.043467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:37:30.043551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:37:30.043792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:37:30.043877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:37:30.047552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:37:30.047604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:37:30.047827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:37:30.047877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:37:30.048130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:37:30.048197Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:37:30.048303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:37:30.048355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:37:30.048402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:37:30.048438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:37:30.048477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:37:30.048519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:37:30.048557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:37:30.048589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:37:30.048657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:37:30.048699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:37:30.048752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:37:30.051260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:37:30.051376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:37:30.051413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... cs 2025-04-10T11:37:52.256443Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 101 } Time: 101 2025-04-10T11:37:52.256468Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.256495Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.256531Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.256609Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 102 } Time: 102 2025-04-10T11:37:52.256629Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.256677Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.256729Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.256822Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 103 } Time: 103 2025-04-10T11:37:52.256843Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.256862Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.256888Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.256955Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 104 } Time: 104 2025-04-10T11:37:52.256980Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.257009Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.257044Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.257112Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 105 } Time: 105 2025-04-10T11:37:52.257129Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.257147Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.257170Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.257220Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 106 } Time: 106 2025-04-10T11:37:52.257237Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.257259Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.257289Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.257364Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 107 } Time: 107 2025-04-10T11:37:52.257390Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.257414Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.257442Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.257529Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 108 } Time: 108 2025-04-10T11:37:52.257556Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.257585Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.257622Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.257719Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 109 } Time: 109 2025-04-10T11:37:52.257746Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.257779Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.257816Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.257898Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 110 } Time: 110 2025-04-10T11:37:52.257918Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.257940Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.257973Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.258064Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 111 } Time: 111 2025-04-10T11:37:52.258093Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.258126Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.258295Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.258439Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 112 } Time: 112 2025-04-10T11:37:52.258488Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.258522Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.258599Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.258718Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 113 } Time: 113 2025-04-10T11:37:52.258750Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.258783Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.258820Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.258936Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 114 } Time: 114 2025-04-10T11:37:52.258968Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.259002Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.259039Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.259148Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 115 } Time: 115 2025-04-10T11:37:52.259178Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.259210Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.259245Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.259346Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 116 } Time: 116 2025-04-10T11:37:52.259376Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.259404Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.259437Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.259514Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 117 } Time: 117 2025-04-10T11:37:52.259531Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.259551Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.259573Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.259647Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 118 } Time: 118 2025-04-10T11:37:52.259674Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.259703Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.259739Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.259842Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 119 } Time: 119 2025-04-10T11:37:52.259870Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-10T11:37:52.259898Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-10T11:37:52.259937Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-10T11:37:52.260037Z node 6 :GRAPH TRACE: SHARD Handle TEvGraph::TEvGetMetrics from [6:571:2500] 2025-04-10T11:37:52.260102Z node 6 :GRAPH DEBUG: SHARD TTxGetMetrics::Execute 2025-04-10T11:37:52.260168Z node 6 :GRAPH DEBUG: DB Querying from 0 to 119 2025-04-10T11:37:52.273298Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273382Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273410Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273438Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273461Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273486Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273512Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273539Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273561Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273588Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273615Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273641Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273669Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273697Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273722Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273748Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273777Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273803Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273830Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273853Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273876Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273903Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273929Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273957Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.273981Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274007Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274032Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274059Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274086Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274112Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274138Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274184Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274212Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274238Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274262Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274289Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274312Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274337Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274364Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274390Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274414Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274436Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274465Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274492Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274519Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274544Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274572Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274599Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274634Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274658Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274684Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274709Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274739Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274765Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274791Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274818Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274845Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274868Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274894Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274919Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-10T11:37:52.274956Z node 6 :GRAPH DEBUG: SHARD TTxGetMetric::Complete 2025-04-10T11:37:52.275010Z node 6 :GRAPH TRACE: SHARD TxGetMetrics returned 60 points for request 3 2025-04-10T11:37:52.275169Z node 6 :GRAPH TRACE: SVC TEvMetricsResult 3 2025-04-10T11:37:52.275219Z node 6 :GRAPH TRACE: SVC TEvMetricsResult found request 3 resending to [6:572:2501] |97.8%| [TS] {RESULT} ydb/core/graph/ut/unittest >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> TCreateAndDropViewTest::InvalidQuery [GOOD] >> TCreateAndDropViewTest::ParsingSecurityInvoker >> KqpLimits::CancelAfterRwTx-useSink [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData >> Mvp::TokenatorRefreshMetadataTokenGood [GOOD] >> TTxDataShardLocalKMeansScan::BuildToPosting [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] ------- [TS] {asan, default-linux-x86_64, release} ydb/mvp/core/ut/unittest >> Mvp::TokenatorRefreshMetadataTokenGood [GOOD] Test command err: 2025-04-10T11:37:49.539678Z :MVP DEBUG: Refreshing token metadataTokenName 2025-04-10T11:37:49.540716Z :MVP DEBUG: Updating metadata token 2025-04-10T11:37:49.557597Z :MVP DEBUG: Refreshing token metadataTokenName 2025-04-10T11:37:49.559116Z :MVP DEBUG: Updating metadata token 2025-04-10T11:37:54.560020Z :MVP DEBUG: Refreshing token metadataTokenName 2025-04-10T11:37:54.560376Z :MVP DEBUG: Updating metadata token |97.8%| [TS] {RESULT} ydb/mvp/core/ut/unittest >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs >> QuoterWithKesusTest::GetsBigQuotaWithDeadline [GOOD] >> QuoterWithKesusTest::FailsToGetBigQuota >> TestFormatHandler::ManyJsonClients [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRwTx-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 13530, MsgBus: 61888 2025-04-10T11:33:45.917418Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646279195709277:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:45.917524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00176f/r3tmp/tmp7S3aPo/pdisk_1.dat 2025-04-10T11:33:46.313771Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:46.334640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:46.334752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:46.336356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13530, node 1 2025-04-10T11:33:46.405661Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:46.405690Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:46.405701Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:46.405839Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61888 TClient is connected to server localhost:61888 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:33:47.017067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:47.041359Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:33:47.054306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:49.345042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646296375579521:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:49.345094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646296375579533:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:49.345173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:49.349587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:33:49.366435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646296375579535:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:33:49.435297Z node 1 :TX_PROXY ERROR: Actor# [1:7491646296375579586:2606] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:33:49.891402Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=1;memory=1048576; 2025-04-10T11:33:49.891450Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 1. [Mem] memory 1048576 NOT granted 2025-04-10T11:33:49.904417Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491646296375579628:2368], TxId: 281474976710661, task: 1. Ctx: { TraceId : 01jrfqmhms0hqedmxv4ykkh1tg. SessionId : ydb://session/3?node_id=1&id=NjZlMjNjOTEtNWFlNWI0MDItZmMwMGI1N2EtMmVkMjBmOTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 1: 10, host: ghrun-x5vqckr7qy, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-04-10T11:33:49.884408Z }, code: 2029 }. 2025-04-10T11:33:49.906655Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491646296375579629:2369], TxId: 281474976710661, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjZlMjNjOTEtNWFlNWI0MDItZmMwMGI1N2EtMmVkMjBmOTc=. CustomerSuppliedId : . TraceId : 01jrfqmhms0hqedmxv4ykkh1tg. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491646296375579616:2354], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-10T11:33:49.913993Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjZlMjNjOTEtNWFlNWI0MDItZmMwMGI1N2EtMmVkMjBmOTc=, ActorId: [1:7491646296375579503:2354], ActorState: ExecuteState, TraceId: 01jrfqmhms0hqedmxv4ykkh1tg, Create QueryResponse for error on request, msg:
: Error: Mkql memory limit exceeded, allocated by task 1: 10, host: ghrun-x5vqckr7qy, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-04-10T11:33:49.884408Z } , code: 2029 Trying to start YDB, gRPC: 11993, MsgBus: 63796 2025-04-10T11:33:50.741209Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646303501759147:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:33:50.741261Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00176f/r3tmp/tmp7fKEmK/pdisk_1.dat 2025-04-10T11:33:50.894461Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:33:50.906622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:33:50.906739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:33:50.910263Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11993, node 2 2025-04-10T11:33:50.990804Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:33:50.990838Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:33:50.990845Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:33:50.990988Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63796 TClient is connected to server localhost:63796 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:33:51.454109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:51.461932Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:33:51.472657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:33:54.304284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646320681629384:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:54.304368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:54.304767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491646320681629396:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:33:54.312952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose i ... ssionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, ActorId: [4:7491646853379846357:2493], ActorState: ExecuteState, TraceId: 01jrfqt4yaasbhp760rhc9f6sv, Create QueryResponse for error on request, msg: 2025-04-10T11:36:57.353809Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, ActorId: [4:7491646853379846357:2493], ActorState: ExecuteState, TraceId: 01jrfqt8x28d2trgghh0qpt0x1, Create QueryResponse for error on request, msg: 2025-04-10T11:36:58.473565Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, ActorId: [4:7491646853379846357:2493], ActorState: ExecuteState, TraceId: 01jrfqt9zxdpaadcq98vct4sgm, Create QueryResponse for error on request, msg: 2025-04-10T11:36:59.921021Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491647115372862662:2493] TxId: 281474976710956. Ctx: { TraceId: 01jrfqtbd18a5a6dn6nyrxsc44, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 367ms } {
: Error: Cancelling after 366ms during execution } ] 2025-04-10T11:36:59.921184Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491647115372862677:4889], TxId: 281474976710956, task: 9. Ctx: { TraceId : 01jrfqtbd18a5a6dn6nyrxsc44. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7491647115372862662:2493], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-10T11:37:00.004301Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491647115372862672:4884], TxId: 281474976710956, task: 4. Ctx: { TraceId : 01jrfqtbd18a5a6dn6nyrxsc44. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7491647115372862662:2493], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-10T11:37:00.004824Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, ActorId: [4:7491646853379846357:2493], ActorState: ExecuteState, TraceId: 01jrfqtbd18a5a6dn6nyrxsc44, Create QueryResponse for error on request, msg: 2025-04-10T11:37:01.106047Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491647123962797505:2493] TxId: 281474976710963. Ctx: { TraceId: 01jrfqtchy6j67j9pjc17dvmfq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 370ms } {
: Error: Cancelling after 370ms during execution } ] 2025-04-10T11:37:01.106356Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, ActorId: [4:7491646853379846357:2493], ActorState: ExecuteState, TraceId: 01jrfqtchy6j67j9pjc17dvmfq, Create QueryResponse for error on request, msg: 2025-04-10T11:37:03.883822Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, ActorId: [4:7491646853379846357:2493], ActorState: ExecuteState, TraceId: 01jrfqtf8h5svw1zdk795qjck8, Create QueryResponse for error on request, msg: 2025-04-10T11:37:07.423769Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, ActorId: [4:7491646853379846357:2493], ActorState: ExecuteState, TraceId: 01jrfqtjpt8zkppmnfzyp11k12, Create QueryResponse for error on request, msg: 2025-04-10T11:37:08.199975Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491647154027570053:2493] TxId: 281474976711001. Ctx: { TraceId: 01jrfqtkf01r66ah4jgekvae65, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 391ms } {
: Error: Cancelling after 391ms during execution } ] 2025-04-10T11:37:08.200139Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, ActorId: [4:7491646853379846357:2493], ActorState: ExecuteState, TraceId: 01jrfqtkf01r66ah4jgekvae65, Create QueryResponse for error on request, msg: 2025-04-10T11:37:10.097936Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491647162617504984:2493] TxId: 281474976711011. Ctx: { TraceId: 01jrfqtna4c3wz2rnx7kv3am8d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 396ms } {
: Error: Cancelling after 396ms during execution } ] 2025-04-10T11:37:10.098395Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491647162617504999:5284], TxId: 281474976711011, task: 9. Ctx: { TraceId : 01jrfqtna4c3wz2rnx7kv3am8d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7491647162617504984:2493], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-10T11:37:10.098402Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491647162617504984:2493] TxId: 281474976711011. Ctx: { TraceId: 01jrfqtna4c3wz2rnx7kv3am8d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event while waiting for shutdown: NYql::NDq::TEvDqCompute::TEvChannelData 2025-04-10T11:37:10.098774Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, ActorId: [4:7491646853379846357:2493], ActorState: ExecuteState, TraceId: 01jrfqtna4c3wz2rnx7kv3am8d, Create QueryResponse for error on request, msg: 2025-04-10T11:37:20.120885Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491647205567180076:2493] TxId: 281474976711069. Ctx: { TraceId: 01jrfqtz2e4z8w8htz8268yfrc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 425ms } {
: Error: Cancelling after 425ms during execution } ] 2025-04-10T11:37:20.121227Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491647205567180076:2493] TxId: 281474976711069. Ctx: { TraceId: 01jrfqtz2e4z8w8htz8268yfrc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event while waiting for shutdown: NYql::NDq::TEvDqCompute::TEvChannelData 2025-04-10T11:37:20.121242Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491647205567180095:5710], TxId: 281474976711069, task: 9. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=. TraceId : 01jrfqtz2e4z8w8htz8268yfrc. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7491647205567180076:2493], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-10T11:37:20.121574Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, ActorId: [4:7491646853379846357:2493], ActorState: ExecuteState, TraceId: 01jrfqtz2e4z8w8htz8268yfrc, Create QueryResponse for error on request, msg: 2025-04-10T11:37:27.849192Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491647235631952619:2493] TxId: 281474976711107. Ctx: { TraceId: 01jrfqv6kbac46h6nwmeh3paf4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 444ms } {
: Error: Cancelling after 445ms during execution } ] 2025-04-10T11:37:27.849397Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491647235631952634:5991], TxId: 281474976711107, task: 6. Ctx: { TraceId : 01jrfqv6kbac46h6nwmeh3paf4. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7491647235631952619:2493], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-10T11:37:27.849629Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491647235631952638:5993], TxId: 281474976711107, task: 8. Ctx: { SessionId : ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=. TraceId : 01jrfqv6kbac46h6nwmeh3paf4. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7491647235631952619:2493], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-10T11:37:27.849706Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491647235631952632:5990], TxId: 281474976711107, task: 5. Ctx: { TraceId : 01jrfqv6kbac46h6nwmeh3paf4. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7491647235631952619:2493], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-10T11:37:27.850054Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491647235631952636:5992], TxId: 281474976711107, task: 7. Ctx: { SessionId : ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=. CustomerSuppliedId : . TraceId : 01jrfqv6kbac46h6nwmeh3paf4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7491647235631952619:2493], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-10T11:37:27.850306Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7491647235631952642:5994], TxId: 281474976711107, task: 9. Ctx: { TraceId : 01jrfqv6kbac46h6nwmeh3paf4. SessionId : ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7491647235631952619:2493], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-10T11:37:27.850791Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, ActorId: [4:7491646853379846357:2493], ActorState: ExecuteState, TraceId: 01jrfqv6kbac46h6nwmeh3paf4, Create QueryResponse for error on request, msg: 2025-04-10T11:37:42.458735Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, ActorId: [4:7491646853379846357:2493], ActorState: ExecuteState, TraceId: 01jrfqvmtq7f1w5kw1x7pm7q6r, Create QueryResponse for error on request, msg: 2025-04-10T11:37:43.077973Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzU3YzE2ZDYtYWUwMzhiNmEtNjEzNjQ4MzctZWIyMmI3MWE=, ActorId: [4:7491646853379846357:2493], ActorState: ExecuteState, TraceId: 01jrfqvne4f0drc9zrg3qgd0w5, Create QueryResponse for error on request, msg: >> TestFormatHandler::ManyRawClients >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob >> TMemoryController::SharedCache [GOOD] >> TMemoryController::SharedCache_ConfigLimit >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] >> THiveTest::TestCreateAndReassignTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWhileStarting >> DiscoveryIsNotBroken::HaveKafkaSslEndpointInDiscovery [GOOD] >> Functions::CreateRequest [GOOD] >> Functions::CreateResponse >> Functions::CreateResponse [GOOD] >> KafkaProtocol::ProduceScenario >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection >> TopicSessionTests::TwoSessionsWithoutOffsets [GOOD] >> TabletService_ChangeSchema::Basics >> DataShardStats::OneChannelStatsCorrect >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> TestFormatHandler::ManyRawClients [GOOD] >> TopicSessionTests::TwoSessionWithoutPredicate >> THiveTest::TestCreateAndReassignTabletWhileStarting [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection >> TestFormatHandler::ClientValidation >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] >> TCreateAndDropViewTest::ParsingSecurityInvoker [GOOD] >> TCreateAndDropViewTest::ListCreatedView >> TSentinelTests::PDiskRackGuardFullRack [GOOD] >> TSentinelTests::BSControllerUnresponsive >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] >> SystemView::PDisksFields [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild_Ranges >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions >> THiveTest::TestCreateTabletAndReassignGroups [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection >> TMemoryController::SharedCache_ConfigLimit [GOOD] >> TMemoryController::MemTable >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection >> THiveTest::TestCreateTabletAndReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount >> LabeledDbCounters::OneTabletRemoveCounters [GOOD] >> LabeledDbCounters::OneTabletRestart >> NodeWardenDsProxyConfigRetrieval::Disconnect >> KafkaProtocol::ProduceScenario [GOOD] >> KafkaProtocol::FetchScenario >> QuoterWithKesusTest::FailsToGetBigQuota [GOOD] >> QuoterWithKesusTest::PrefetchCoefficient >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding >> TestFormatHandler::ClientValidation [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] >> TTxDataShardSampleKScan::RunScan >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] >> TestFormatHandler::ClientError >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings >> THiveTest::TestCreateTabletAndMixedReassignGroups3 [GOOD] >> THiveTest::TestCreateExternalTablet >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] Test command err: Caught NodeWarden registration actorId# [1:11:2058] 2025-04-10T11:38:03.715470Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:38:03.762625Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/2gt4/0008a7/r3tmp/tmpkamSEn/static.dat" PDiskGuid: 14492327582807374897 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 14492327582807374897 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 14492327582807374897 } } } } AvailabilityDomains: 0 } 2025-04-10T11:38:03.764238Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:/home/runner/.ya/build/build_root/2gt4/0008a7/r3tmp/tmpkamSEn/static.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-10T11:38:03.767967Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T11:38:03.771955Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 14492327582807374897 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-10T11:38:03.773513Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 14492327582807374897 2025-04-10T11:38:03.773590Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:38:03.775223Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:30:2075] ControllerId# 72057594037932033 2025-04-10T11:38:03.775273Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:38:03.775991Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:38:03.776525Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:38:03.794231Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:38:03.798025Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:38:03.838809Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:38:03.838900Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:38:03.839109Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:38:03.839160Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-10T11:38:03.844287Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-10T11:38:03.855250Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-10T11:38:03.855675Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/2gt4/0008a7/r3tmp/tmpkamSEn/static.dat" PDiskGuid: 14492327582807374897 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 14492327582807374897 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 14492327582807374897 } } } } AvailabilityDomains: 0 } 2025-04-10T11:38:03.860359Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:38:03.860422Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-10T11:38:03.900351Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-10T11:38:03.900414Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:38:03.901198Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\231\250\211\216\025EM\322\207\363\306\244~\177\320?1\371\r\347" } 2025-04-10T11:38:03.901390Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-04-10T11:38:03.901433Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:317} StateFunc Type# 2146435075 Sender# [1:47:2089] SessionId# [0:0:0] Cookie# 0 2025-04-10T11:38:03.901492Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.062164s 2025-04-10T11:38:03.902254Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-04-10T11:38:03.902347Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:317} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2025-04-10T11:38:03.932416Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-04-10T11:38:03.946864Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-04-10T11:38:03.980087Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-10T11:38:04.006625Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-10T11:38:04.009200Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-10T11:38:04.012790Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:38:04.013067Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-10T11:38:04.015751Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-10T11:38:04.015809Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-10T11:38:04.016040Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-10T11:38:04.047875Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-10T11:38:04.048170Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-10T11:38:04.049208Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-10T11:38:04.049496Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T11:38:04.049636Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-10T11:38:04.049758Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-10T11:38:04.079293Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-10T11:38:04.079469Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T11:38:04.094524Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-10T11:38:04.094663Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T11:38:04.094769Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-10T11:38:04.094844Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T11:38:04.094944Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-10T11:38:04.095012Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T11:38:04.095070Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-10T11:38:04.095140Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T11:38:04.107897Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-10T11:38:04.108056Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T11:38:04.120522Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-10T11:38:04.120682Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-10T11:38:04.124524Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-10T11:38:04.124616Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-10T11:38:04.144032Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-10T11:38:04.144135Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed Pipe connected clientId# [1:30:2075] 2025-04-10T11:38:04.144846Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:52} TEvTabletPipe::TEvClientConnected OK ClientId# [1:30:2075] ServerId# [1:124:2146] TabletId# 72057594037932033 PipeClientId# [1:30:2075] 2025-04-10T11:38:04.145543Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 14492327582807374897 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-04-10T11:38:04.147104Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:/home/runner/.ya/build/build_root/2gt4/0008a7/r3tmp/tmpkamSEn/static.dat" PDiskConfig { ExpectedSlotCount: 2 } } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 ErasureSpecies: "none" VDiskKind: "Default" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-10T11:38:04.148135Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1 Path# SectorMap:/home/runner/.ya/build/build_root/2gt4/0008a7/r3tmp/tmpkamSEn/static.dat 2025-04-10T11:38:04.156772Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: Initial Replicated: false DiskSpace: Green } } 2025-04-10T11:38:04.157039Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-04-10T11:38:04.157200Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } 2025-04-10T11:38:04.157508Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 14492327582807374897 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-04-10T11:38:04.157706Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 14492327582807374897 Status: READY OnlyPhantomsRemain: false } } 2025-04-10T11:38:04.159335Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-04-10T11:38:04.160534Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2025-04-10T11:38:04.160742Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-04-10T11:38:04.160899Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-04-10T11:38:04.160964Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-04-10T11:38:04.175947Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] === Waiting for pipe to establish === === Breaking pipe === === Sending put === Pipe disconnected clientId# [1:30:2075] 2025-04-10T11:38:04.176981Z node 1 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [1:30:2075] ServerId# [1:124:2146] TabletId# 72057594037932033 PipeClientId# [1:30:2075] 2025-04-10T11:38:04.177099Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:139:2159] ControllerId# 72057594037932033 2025-04-10T11:38:04.177144Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:38:04.177621Z node 1 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:127} HandleForwarded GroupId# 2147483648 EnableProxyMock# false NoGroup# false 2025-04-10T11:38:04.177671Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 2147483648 2025-04-10T11:38:04.177712Z node 1 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:265} RequestGroupConfig GroupId# 2147483648 2025-04-10T11:38:04.178032Z node 1 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2147483648 Pipe connected clientId# [1:139:2159] 2025-04-10T11:38:04.178374Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:52} TEvTabletPipe::TEvClientConnected OK ClientId# [1:139:2159] ServerId# [1:142:2161] TabletId# 72057594037932033 PipeClientId# [1:139:2159] 2025-04-10T11:38:04.178586Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 14492327582807374897 Status: READY OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-04-10T11:38:04.178900Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2025-04-10T11:38:04.179100Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [1:11:2058] Cookie# 0 Recipient# [1:142:2161] RecipientRewrite# [1:90:2122] Request# {NodeID: 1 GroupIDs: 2147483648 } StopGivingGroups# false 2025-04-10T11:38:04.179206Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 1 GroupIDs: 2147483648 } 2025-04-10T11:38:04.179339Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-04-10T11:38:04.179471Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 0 } } 2025-04-10T11:38:04.195206Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-04-10T11:38:04.196068Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:792} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/2gt4/0008a7/r3tmp/tmpkamSEn/static.dat" PDiskGuid: 14492327582807374897 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 14492327582807374897 } VDiskKind: Default StoragePoolName: "" } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 14492327582807374897 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "4c1f69c4-60559a92-93b9cac5-518dcbef" Comprehensive: true AvailDomain: 0 } 2025-04-10T11:38:04.196336Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# true Origin# controller ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/2gt4/0008a7/r3tmp/tmpkamSEn/static.dat" PDiskGuid: 14492327582807374897 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 14492327582807374897 } VDiskKind: Default StoragePoolName: "" } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 14492327582807374897 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2025-04-10T11:38:04.196566Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 14492327582807374897 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-10T11:38:04.197799Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 14492327582807374897 2025-04-10T11:38:04.203562Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:792} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 14492327582807374897 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } } 2025-04-10T11:38:04.203706Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 14492327582807374897 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2025-04-10T11:38:04.209175Z node 1 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2147483648 2025-04-10T11:38:04.210850Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 14492327582807374897 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2025-04-10T11:38:04.213253Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2025-04-10T11:38:04.218103Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } } 2025-04-10T11:38:04.224425Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-04-10T11:38:04.224942Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 14492327582807374897 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-04-10T11:38:04.227017Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-04-10T11:38:04.228315Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 14492327582807374897 Status: READY OnlyPhantomsRemain: false } } |97.8%| [TM] {RESULT} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding >> TabletService_ChangeSchema::Basics [GOOD] >> TabletService_ChangeSchema::OnlyAdminsAllowed >> THiveTest::TestCreateExternalTablet [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding >> test.py::test_local [GOOD] >> TCreateAndDropViewTest::ListCreatedView [GOOD] >> TCreateAndDropViewTest::CreateSameViewTwice >> TopicSessionTests::TwoSessionWithoutPredicate [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries >> TestFormatHandler::ClientError [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] >> TestFormatHandler::ClientErrorWithEmptyFilter >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> TTxDataShardSampleKScan::RunScan [GOOD] >> TTxDataShardSampleKScan::ScanBadParameters >> TabletService_ChangeSchema::OnlyAdminsAllowed [GOOD] >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/example/py3test >> test_example.py::TestExample::test_example2 [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus |97.8%| [TM] {RESULT} ydb/tests/example/py3test >> KafkaProtocol::FetchScenario [GOOD] >> KafkaProtocol::BalanceScenario >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/py3test >> test.py::test_local [GOOD] |97.8%| [TM] {RESULT} ydb/tests/functional/serializable/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery >> QuoterWithKesusTest::PrefetchCoefficient [FAIL] >> QuoterWithKesusTest::GetsQuotaAfterPause >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> TCreateAndDropViewTest::CreateSameViewTwice [GOOD] >> TCreateAndDropViewTest::CreateViewOccupiedName >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateExternalTablet [GOOD] Test command err: 2025-04-10T11:32:36.549931Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-04-10T11:32:36.554014Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:32:36.554342Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-10T11:32:36.555037Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-10T11:32:36.556432Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-10T11:32:36.556495Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-10T11:32:36.557510Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:30:2075] ControllerId# 72057594037932033 2025-04-10T11:32:36.557554Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-10T11:32:36.557721Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-04-10T11:32:36.558112Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-04-10T11:32:36.558286Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:30:2075] 2025-04-10T11:32:36.558328Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:30:2075] 2025-04-10T11:32:36.558382Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-10T11:32:36.558442Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-10T11:32:36.558686Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-10T11:32:36.558840Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:32:36.581396Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-10T11:32:36.581467Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-10T11:32:36.584138Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:36.584341Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:36.584570Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:36.584726Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:36.584879Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:36.585098Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:44:2087] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:36.585378Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:45:2088] targetNodeId# 1 Marker# DSP01 2025-04-10T11:32:36.585416Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-10T11:32:36.585659Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-10T11:32:36.585724Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:36.585762Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:32:36.586772Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:20:2063] 2025-04-10T11:32:36.586821Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:20:2063] 2025-04-10T11:32:36.602300Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-10T11:32:36.602409Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:36.602455Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-10T11:32:36.606920Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-10T11:32:36.607600Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:32:36.607864Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:32:36.609715Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-10T11:32:36.609917Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-04-10T11:32:36.610333Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-10T11:32:36.610900Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-10T11:32:36.610974Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-10T11:32:36.611027Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-10T11:32:36.611112Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-10T11:32:36.611179Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-10T11:32:36.611260Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:32:36.611430Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:51:2091] 2025-04-10T11:32:36.611462Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:51:2091] 2025-04-10T11:32:36.611521Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-04-10T11:32:36.611590Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-10T11:32:36.611677Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:32:36.611731Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:51:2091] 2025-04-10T11:32:36.612305Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-04-10T11:32:36.612357Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-04-10T11:32:36.612453Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-10T11:32:36.612667Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-10T11:32:36.612774Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-10T11:32:36.612822Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-04-10T11:32:36.612996Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-04-10T11:32:36.613079Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:30:2075] 2025-04-10T11:32:36.613136Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:30:2075] 2025-04-10T11:32:36.616914Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-10T11:32:36.618958Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-04-10T11:32:36.619182Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-10T11:32:36.621915Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-10T11:32:36.622070Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-10T11:32:36.622126Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-04-10T11:32:36.622314Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:32:36.622486Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:20:2063] 2025-04-10T11:32:36.622535Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:20:2063] 2025-04-10T11:32:36.622814Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-04-10T11:32:36.622879Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-04-10T11:32:36.622989Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-04-10T11:32:36.623182Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-10T11:32:36.623229Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-10T11:32:36.623388Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-04-10T11:32:36.623549Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-04-10T11:32:36.623614Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-04-10T11:32:36.624218Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForI ... 72057594037927937 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72057594037927937 Cookie: 0 CurrentLeader: [151:270:2261] CurrentLeaderTablet: [151:277:2265] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 0}} 2025-04-10T11:38:05.430011Z node 151 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72057594037927937 followers: 0 2025-04-10T11:38:05.430110Z node 151 :TABLET_RESOLVER DEBUG: SelectForward node 151 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [151:270:2261] 2025-04-10T11:38:05.430288Z node 151 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [151:267:2260] 2025-04-10T11:38:05.430402Z node 151 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [151:267:2260] 2025-04-10T11:38:05.430592Z node 151 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [151:267:2260] 2025-04-10T11:38:05.430803Z node 151 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [151:267:2260] 2025-04-10T11:38:05.430901Z node 151 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [151:267:2260] 2025-04-10T11:38:05.431002Z node 151 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [151:267:2260] 2025-04-10T11:38:05.431103Z node 151 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [151:267:2260] 2025-04-10T11:38:05.431176Z node 151 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [151:267:2260] 2025-04-10T11:38:05.431401Z node 151 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [151:266:2259] EventType# 268697601 2025-04-10T11:38:05.431653Z node 151 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} queued, type NKikimr::NHive::TTxCreateTablet 2025-04-10T11:38:05.431775Z node 151 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:38:05.432806Z node 151 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{4, redo 1157b alter 0b annex 0, ~{ 14, 0, 1, 2 } -{ }, 0 gb} 2025-04-10T11:38:05.432950Z node 151 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:38:05.433483Z node 151 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [151:311:2288] 2025-04-10T11:38:05.433537Z node 151 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [151:311:2288] 2025-04-10T11:38:05.433608Z node 151 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [151:311:2288] 2025-04-10T11:38:05.433678Z node 151 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:38:05.433757Z node 151 :TABLET_RESOLVER DEBUG: SelectForward node 151 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [151:94:2122] 2025-04-10T11:38:05.433853Z node 151 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result local node, try to connect [151:311:2288] 2025-04-10T11:38:05.433898Z node 151 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [151:311:2288] 2025-04-10T11:38:05.434007Z node 151 :PIPE_SERVER DEBUG: [72057594037932033] Accept Connect Originator# [151:311:2288] 2025-04-10T11:38:05.434189Z node 151 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [151:311:2288] 2025-04-10T11:38:05.434236Z node 151 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [151:311:2288] 2025-04-10T11:38:05.434269Z node 151 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [151:311:2288] 2025-04-10T11:38:05.434388Z node 151 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [151:277:2265] EventType# 268637702 2025-04-10T11:38:05.434559Z node 151 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2025-04-10T11:38:05.434670Z node 151 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:38:05.434928Z node 151 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{20, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:38:05.435054Z node 151 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:38:05.435463Z node 151 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-10T11:38:05.435582Z node 151 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:38:05.436134Z node 151 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923003660064}(72075186224037888)::Execute - TryToBoot was not successfull 2025-04-10T11:38:05.436307Z node 151 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2025-04-10T11:38:05.436425Z node 151 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:38:05.447663Z node 151 :BS_PROXY_PUT INFO: [aeed6b7f2709b4c0] bootstrap ActorId# [151:314:2291] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:698:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-10T11:38:05.447840Z node 151 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] Id# [72057594037927937:2:4:0:0:698:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-10T11:38:05.447897Z node 151 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] restore Id# [72057594037927937:2:4:0:0:698:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-10T11:38:05.447957Z node 151 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG33 2025-04-10T11:38:05.448002Z node 151 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG32 2025-04-10T11:38:05.448141Z node 151 :BS_PROXY DEBUG: Send to queueActorId# [151:35:2079] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:698:1] FDS# 698 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-10T11:38:05.451201Z node 151 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:698:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 19 } Cost# 85496 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 20 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-10T11:38:05.451354Z node 151 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-10T11:38:05.451418Z node 151 :BS_PROXY_PUT INFO: [aeed6b7f2709b4c0] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-10T11:38:05.451558Z node 151 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.712 sample PartId# [72057594037927937:2:4:0:0:698:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 151 } TEvVPutResult{ TimestampMs# 3.809 VDiskId# [0:1:0:0:0] NodeId# 151 Status# OK } ] } 2025-04-10T11:38:05.451717Z node 151 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-10T11:38:05.451884Z node 151 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2025-04-10T11:38:05.452321Z node 151 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:38:05.452450Z node 151 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-10T11:38:05.452509Z node 151 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-10T11:38:05.452559Z node 151 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-10T11:38:05.452622Z node 151 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-10T11:38:05.452675Z node 151 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-10T11:38:05.452727Z node 151 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-10T11:38:05.453216Z node 151 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [151:318:2294] 2025-04-10T11:38:05.453299Z node 151 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [151:318:2294] 2025-04-10T11:38:05.453478Z node 151 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-10T11:38:05.453690Z node 151 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-10T11:38:05.453875Z node 151 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-10T11:38:05.453992Z node 151 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-10T11:38:05.454042Z node 151 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-10T11:38:05.454108Z node 151 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-10T11:38:05.454573Z node 151 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-10T11:38:05.454625Z node 151 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-10T11:38:05.454787Z node 151 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-10T11:38:05.454878Z node 151 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-04-10T11:38:05.455066Z node 151 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [151:318:2294] 2025-04-10T11:38:05.455154Z node 151 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [151:318:2294] >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] >> TestFormatHandler::ClientErrorWithEmptyFilter [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] >> TTxDataShardSampleKScan::ScanBadParameters [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData >> TIndexProcesorTests::TestOver1000Queues [GOOD] >> TestJsonParser::Simple1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:36:45.769131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:45.769228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:45.769270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:45.769305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:45.769352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:45.769386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:45.769461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:45.769543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:45.769925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:45.858903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:36:45.858998Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:36:45.877823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:45.878139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:45.878382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:45.887015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:45.887406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:45.888182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:45.889257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:45.893062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:45.894616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:45.894683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:45.894864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:45.894961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:45.895017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:45.895199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:36:45.906881Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:36:46.072993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:46.073258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.073513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:46.073841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:46.073904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.076466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:46.076636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:46.076870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.076932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:46.077012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:46.077055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:46.079208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.079267Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:46.079309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:46.081193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.081258Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.081305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:46.081415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:46.085609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:46.091123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:46.091365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:46.092538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:46.092697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:46.092754Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:46.093092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:46.093178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:46.093397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:46.093504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:46.099500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:46.099556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:46.099760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:46.099810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:46.100081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.100151Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:46.100268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:46.100305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:46.100348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:46.100403Z no ... BrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:38:12.105225Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:38:12.105459Z node 34 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 271us result status StatusSuccess 2025-04-10T11:38:12.106124Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:38:12.117583Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1088:2875] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-10T11:38:12.117724Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1046:2875] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-10T11:38:12.117911Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1088:2875] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744285092075855 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744285092075855 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1744285092075855 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-10T11:38:12.121737Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1088:2875] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-10T11:38:12.121842Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1046:2875] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TestJsonParser::Simple1 [GOOD] >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead >> TestJsonParser::Simple2 >> KeyValueGRPCService::SimpleAcquireLock >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs >> DataShardStats::OneChannelStatsCorrect [GOOD] >> DataShardStats::MultipleChannelsStatsCorrect >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] >> TestJsonParser::Simple2 [GOOD] >> PushdownTest::NoFilter >> TestJsonParser::Simple3 >> PushdownTest::NoFilter [GOOD] >> PushdownTest::Equal >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sample_k/unittest >> TTxDataShardSampleKScan::ScanBadParameters [GOOD] Test command err: 2025-04-10T11:38:04.844524Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647391750434337:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:38:04.844634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0006c5/r3tmp/tmpoXDM3N/pdisk_1.dat 2025-04-10T11:38:05.372391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:05.372481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:05.379129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:38:05.400786Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:05.458422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:05.512734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:38:05.574133Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7491647396045402206:2295] 2025-04-10T11:38:05.574482Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:38:05.599051Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:38:05.599131Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:38:05.606840Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:38:05.606902Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:38:05.606955Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:38:05.610064Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:38:05.610173Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:38:05.610205Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7491647396045402222:2295] in generation 1 2025-04-10T11:38:05.611333Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:38:05.649599Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:38:05.652614Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:38:05.652711Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7491647396045402226:2296] 2025-04-10T11:38:05.652720Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:38:05.652731Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:38:05.652740Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:38:05.653811Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647396045402203:2296], serverId# [1:7491647396045402221:2304], sessionId# [0:0:0] 2025-04-10T11:38:05.654057Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:38:05.654182Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:38:05.654207Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:38:05.654227Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:38:05.654302Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:38:05.654328Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:38:05.654348Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:38:05.654714Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:38:05.654846Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-10T11:38:05.656578Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:38:05.659557Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:38:05.659642Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:38:05.662416Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647396045402239:2314], serverId# [1:7491647396045402240:2315], sessionId# [0:0:0] 2025-04-10T11:38:05.667270Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744285085711 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744285085711 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:38:05.667302Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:38:05.667470Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:38:05.667537Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:38:05.667555Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:38:05.667597Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744285085711:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-10T11:38:05.667837Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744285085711:281474976710657 keys extracted: 0 2025-04-10T11:38:05.667966Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:38:05.668055Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:38:05.668200Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:38:05.671823Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:38:05.673492Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:38:05.675836Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744285085710 2025-04-10T11:38:05.675871Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:38:05.675909Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744285085718 2025-04-10T11:38:05.676126Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744285085711} 2025-04-10T11:38:05.676167Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:38:05.677724Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:38:05.677761Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:38:05.677790Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:38:05.677864Z node 1 :TX_DATASHARD DEBUG: Complete [1744285085711 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7491647396045402043:2192], exec latency: 5 ms, propose latency: 9 ms 2025-04-10T11:38:05.677909Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-10T11:38:05.678051Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:38:05.683039Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-10T11:38:05.683123Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:38:07.915428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647404635336926:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:38:07.915614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647404635336934:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:38:07.916584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:38:07.929165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:38:07.934508Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:38:07.954308Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:38:07.955631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491647404635336940:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:38:08.092307Z node 1 :TX_PROXY ERROR: Actor# [1:7491647408930304287:2393] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:38:09.092336Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710660. Ctx: { TraceId: 01jrfqwe56d74c6aa9h6f741nr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWFlY2I3YTQtOTA4Njc1ODgtNTBlZDI2ZjctOTFkYjRmNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:38:09.138507Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647413225271619:2408], serverId# [1:7491647413225271620:2409], sessionId# [0:0:0] 2025-04-10T11:38:09.140820Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-04-10T11:38:09.141069Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row c ... 0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:09.913238Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:09.915022Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:38:09.927028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:09.941028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:38:09.954657Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:7491647414264486466:2295] 2025-04-10T11:38:09.954895Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:38:09.966831Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:38:09.966898Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:38:09.968287Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:38:09.968322Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:38:09.968350Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:38:09.968632Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:38:09.968675Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:38:09.968700Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:7491647414264486482:2295] in generation 1 2025-04-10T11:38:09.970317Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:38:09.970350Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:38:09.970403Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:38:09.970441Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:7491647414264486484:2296] 2025-04-10T11:38:09.970450Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:38:09.970458Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:38:09.970465Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:38:09.970554Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:38:09.970606Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:38:09.970644Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:38:09.970655Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:38:09.970672Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:38:09.970688Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:38:09.971724Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7491647414264486463:2294], serverId# [2:7491647414264486473:2299], sessionId# [0:0:0] 2025-04-10T11:38:09.971806Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:38:09.972024Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:38:09.972078Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-10T11:38:09.973787Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:38:09.974120Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:38:09.974182Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:38:09.977275Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7491647414264486498:2312], serverId# [2:7491647414264486500:2314], sessionId# [0:0:0] 2025-04-10T11:38:09.978879Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:38:09.980062Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744285090023 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744285090023 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:38:09.980085Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:38:09.980176Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:38:09.980196Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:38:09.980214Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1744285090023:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-10T11:38:09.980421Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744285090023:281474976710657 keys extracted: 0 2025-04-10T11:38:09.980524Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:38:09.980617Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:38:09.980655Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:38:09.981095Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:38:09.981430Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:38:09.983017Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744285090022 2025-04-10T11:38:09.983042Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:38:09.983071Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744285090030 2025-04-10T11:38:09.983110Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744285090023} 2025-04-10T11:38:09.983133Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:38:09.983163Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:38:09.983179Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:38:09.983193Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:38:09.983231Z node 2 :TX_DATASHARD DEBUG: Complete [1744285090023 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:7491647414264486225:2143], exec latency: 0 ms, propose latency: 2 ms 2025-04-10T11:38:09.983254Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-10T11:38:09.983306Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:38:09.984990Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-10T11:38:09.985021Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:38:09.989453Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7491647414264486537:2341], serverId# [2:7491647414264486538:2342], sessionId# [0:0:0] 2025-04-10T11:38:09.989541Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:38:09.989626Z node 2 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710658 at tablet 72075186224037888 2025-04-10T11:38:09.991141Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:38:09.992576Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710658 at step 1744285090037 at tablet 72075186224037888 { Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744285090037 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:38:09.992594Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:38:09.992692Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:38:09.992710Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:38:09.992743Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1744285090037:281474976710658] in PlanQueue unit at 72075186224037888 2025-04-10T11:38:09.992864Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744285090037:281474976710658 keys extracted: 0 2025-04-10T11:38:09.993164Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:38:09.994173Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744285090037} 2025-04-10T11:38:09.994221Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:38:09.994261Z node 2 :TX_DATASHARD DEBUG: Complete [1744285090037 : 281474976710658] from 72075186224037888 at tablet 72075186224037888 send result to client [2:7491647414264486532:2337], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:38:09.994281Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:38:09.997281Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7491647414264486548:2352], serverId# [2:7491647414264486549:2353], sessionId# [0:0:0] 2025-04-10T11:38:10.006327Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7491647418559453849:2357], serverId# [2:7491647418559453850:2358], sessionId# [0:0:0] 2025-04-10T11:38:10.014267Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7491647418559453854:2362], serverId# [2:7491647418559453855:2363], sessionId# [0:0:0] 2025-04-10T11:38:10.017839Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7491647418559453859:2367], serverId# [2:7491647418559453860:2368], sessionId# [0:0:0] 2025-04-10T11:38:10.021339Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7491647418559453864:2372], serverId# [2:7491647418559453865:2373], sessionId# [0:0:0] 2025-04-10T11:38:10.023385Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7491647418559453869:2377], serverId# [2:7491647418559453870:2378], sessionId# [0:0:0] |97.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_sample_k/unittest >> TestJsonParser::Simple3 [GOOD] >> PushdownTest::Equal [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob >> PushdownTest::NotEqualInt32Int64 [GOOD] >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate [GOOD] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] >> TestJsonParser::Simple4 >> PushdownTest::TrueCoalesce [GOOD] >> PushdownTest::CmpInt16AndInt32 [GOOD] >> PushdownTest::PartialAnd >> TestJsonParser::Simple4 [GOOD] >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected >> PushdownTest::PartialAnd [GOOD] >> PushdownTest::PartialAndOneBranchPushdownable [GOOD] >> TestJsonParser::LargeStrings >> PushdownTest::NotNull [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection >> PushdownTest::NotNullForDatetime [GOOD] >> PushdownTest::IsNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/yc_search_ut/unittest >> TIndexProcesorTests::TestOver1000Queues [GOOD] Test command err: 2025-04-10T11:37:26.502230Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647228855786713:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:26.502336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008f1/r3tmp/tmpLou4UW/pdisk_1.dat 2025-04-10T11:37:26.966974Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:26.973831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:26.973932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:26.987135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13679, node 1 2025-04-10T11:37:27.094494Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:27.094515Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:27.094521Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:27.094629Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37:27.576154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... TClient is connected to server localhost:12711 waiting... 2025-04-10T11:37:27.876223Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-10T11:37:29.943296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-10T11:37:29.946355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:12711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744285047638 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SQS" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1744285047666 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 184467... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37:30.349902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:37:30.360722Z node 1 :TX_PROXY ERROR: Actor# [1:7491647246035656803:2469] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) TClient is connected to server localhost:12711 waiting... 2025-04-10T11:37:30.602894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:1, at schemeshard: 72057594046644480 2025-04-10T11:37:30.604027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 ===Execute query: UPSERT INTO `/Root/SQS/SingleCreateQueueEvent/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "queue1", 1, "myQueueCustomName", 1744285050410, "myFolder", "{\"k1\": \"v1\"}"); 2025-04-10T11:37:30.749771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647246035656975:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:30.749779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647246035656983:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:30.749874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:30.756684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710667:3, at schemeshard: 72057594046644480 2025-04-10T11:37:30.765450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491647246035656989:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710667 completed, doublechecking } 2025-04-10T11:37:30.839374Z node 1 :TX_PROXY ERROR: Actor# [1:7491647246035657043:2627] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:31.486000Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491647228855786713:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:31.486078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:37:31.642008Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jrfqv9v79ysjmfwvt0jsgymb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWMzNjExZjgtZmVkZWJiNzktYzAyNzU1OGYtZmEwMWU0Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:32.068674Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jrfqvat3dw1jvzycpcv018vt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTU1NTk3OC1lNjNjODFkNy1kOGM2YTVhMC03MDA5NzhiOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:32.216770Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jrfqvb5s97mxq5zjcrcsdj95, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFjNzU3YmItNDRkZGU5ZjEtZWYyY2I2ZjQtYzhmMzNjYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:32.392527Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jrfqvba041eppznj9m05k8am, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFhNWY0MmYtODZjZjRiYWUtMzAzNzBjZDAtNzdiZTg1Mjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-10T11:37:30.410000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-10T11:37:30.410000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-10T11:37:32.399529Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2025-04-10T11:37:32.507985Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jrfqvbjtc4d8p3aj64qepy1y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI1N2Y2NDEtMzUyYjVjYTgtOTNmNTk4MDYtZTc3NGY0NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:32.514729Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jrfqvbk1fgactm70k1t5pekr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM0MWY5My02Y2IxYWZkNS1mNzliODYyYy1mMTM5ZmIxNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:32.621919Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jrfqvbpc3fbgj4fw03xh01wy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjkyNGY4My00MmFiZDIxNC01MmE4ZjI2OS1hODE5ZGI1Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:32.628088Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jrfqvbpkd03331rgzakqbz6p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjYyMmFlNGItOGYzMzk4MzUtZGVmNWUwZjAtYzg5Y2UxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:32.734718Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jrfqvbsxbehtk5rpgxk6khmv, Database: , DatabaseId: /Root, Sessio ... source_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-10T11:37:39.059000Z","resource_id":"deleting1","name":"myQueueCustomName","service":"message-queue","deleted":"2025-04-10T11:37:39.059000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-10T11:37:39.059000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-10T11:37:39.059000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-10T11:37:38.000000Z","resource_id":"existing1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-10T11:37:41.235984Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-10T11:37:38.000000Z","resource_id":"existing2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-10T11:37:41.236051Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-10T11:37:39.059000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-10T11:37:41.236081Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-10T11:37:38.000000Z","resource_id":"existing3","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-10T11:37:41.236111Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-10T11:37:39.059000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-10T11:37:41.236140Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2025-04-10T11:37:41.341052Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710748. Ctx: { TraceId: 01jrfqvm6v3xke2ahwbnxxh83j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTY1ZGY5MTctY2FjZTU4ODAtMTJmYTk0YzItMjk2YzJjNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:41.355726Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710749. Ctx: { TraceId: 01jrfqvm76a2vgdk02cks4fvj7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTY1ZGY5MTctY2FjZTU4ODAtMTJmYTk0YzItMjk2YzJjNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:41.363638Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710750. Ctx: { TraceId: 01jrfqvm7j4nj3gekj1sjz7xkq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGZmNDgzNjctZDg5MjdkODctNDM4NmVkZDgtZTRmN2Q1ZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:41.474739Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710751. Ctx: { TraceId: 01jrfqvmaz3d8jrh9fg4e8enj9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI2MzM5N2ItNWY1OGZkNDAtNDE1ZDc3NjQtOGE0OTFjYWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:41.483594Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710752. Ctx: { TraceId: 01jrfqvmb8byextccw5nr8qxr4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI2MzM5N2ItNWY1OGZkNDAtNDE1ZDc3NjQtOGE0OTFjYWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:41.492562Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710753. Ctx: { TraceId: 01jrfqvmbj916x1fms4nm6v103, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzlmMTcwMzgtYjg0ZjBjYjEtNzRiNGFmMGMtZDE1MDIxMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:41.603097Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710754. Ctx: { TraceId: 01jrfqvmf15zsag078pzdarpn1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTAyNmMzZmUtZmVhYTVjZjQtNDdiYTMwMjItOGZmNDRkOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:41.611286Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710755. Ctx: { TraceId: 01jrfqvmf9eexhdb65wj6d7xxh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTAyNmMzZmUtZmVhYTVjZjQtNDdiYTMwMjItOGZmNDRkOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:41.633685Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710756. Ctx: { TraceId: 01jrfqvmg0fs1trt71bnqb4asv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2YxMmM3My01NmY1ZGMyNS0zNmE0ZjFmZi1lZWZlMzRjNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:41.648015Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710757. Ctx: { TraceId: 01jrfqvm45aw0jd9fqtng3f23e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWMzNjExZjgtZmVkZWJiNzktYzAyNzU1OGYtZmEwMWU0Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:41.942043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:37:41.942072Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1744285047638 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 5 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".metadata" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710667 CreateStep: 1744285050809 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "SQS" Pat... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37:41.957606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710758:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:37:41.967868Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710758, at schemeshard: 72057594046644480 Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) 2025-04-10T11:37:41.971168Z node 1 :TX_PROXY ERROR: Actor# [1:7491647293280299267:3616] txid# 281474976710759, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:12711 waiting... 2025-04-10T11:37:42.267694Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710760, at schemeshard: 72057594046644480 2025-04-10T11:37:42.306040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710762:1, at schemeshard: 72057594046644480 2025-04-10T11:37:42.308323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710761:0, at schemeshard: 72057594046644480 ===Started add queue batch 2025-04-10T11:38:08.357799Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710763. Ctx: { TraceId: 01jrfqweac9nfqben6x197v0xh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWMzNjExZjgtZmVkZWJiNzktYzAyNzU1OGYtZmEwMWU0Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:38:11.702740Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710764. Ctx: { TraceId: 01jrfqwhcj9dn5x3t8v25baxn2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWMzNjExZjgtZmVkZWJiNzktYzAyNzU1OGYtZmEwMWU0Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:38:12.969717Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710765. Ctx: { TraceId: 01jrfqwjvn2fw0pk4emgmpm4nh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQxNDMxOWYtYjhiMTk1NjEtNzhiOTdlNTEtZjdkMWYxNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:38:13.037294Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710766. Ctx: { TraceId: 01jrfqwk5b2eb81s2czax04qn2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQxNDMxOWYtYjhiMTk1NjEtNzhiOTdlNTEtZjdkMWYxNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:38:13.108305Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710767. Ctx: { TraceId: 01jrfqwk7j0tz018sa5aexa8f5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQxNDMxOWYtYjhiMTk1NjEtNzhiOTdlNTEtZjdkMWYxNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:38:13.119495Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710768. Ctx: { TraceId: 01jrfqwk7xepq6zmw7va0kr7zg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQxNDMxOWYtYjhiMTk1NjEtNzhiOTdlNTEtZjdkMWYxNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:38:13.344178Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710769. Ctx: { TraceId: 01jrfqwk87b6dck50xk02jrj87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTM4NTFhYzQtOTFkNzAwYjItM2MwNDQ3YmQtYzMxNTNjZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |97.8%| [TM] {RESULT} ydb/core/ymq/actor/yc_search_ut/unittest >> TestJsonParser::LargeStrings [GOOD] >> PushdownTest::StringFieldsNotSupported >> PushdownTest::StringFieldsNotSupported [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/rate_limiter/ut/unittest >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] Test command err: 2025-04-10T11:36:59.782973Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647113040101038:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:36:59.783684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a35/r3tmp/tmpkvEIot/pdisk_1.dat 2025-04-10T11:37:00.302278Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:00.308647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:00.308746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:00.313726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20504, node 1 2025-04-10T11:37:00.535782Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:00.535806Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:00.535816Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:00.535976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37:01.056962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:37:01.150635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:37:03.473906Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491647129638174239:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:03.473985Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a35/r3tmp/tmpKp6rYi/pdisk_1.dat 2025-04-10T11:37:03.574872Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:03.603284Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:03.603361Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:03.605671Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14309, node 4 2025-04-10T11:37:03.647348Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:03.647373Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:03.647386Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:03.647528Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37:03.830334Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:37:03.915893Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:37:07.849603Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7491647147310454214:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:07.849691Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a35/r3tmp/tmpcwdNke/pdisk_1.dat 2025-04-10T11:37:07.945686Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:07.973754Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:07.973816Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:07.975914Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15528, node 7 2025-04-10T11:37:08.019855Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:08.019873Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:08.019879Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:08.019977Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14204 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37:08.291627Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:37:08.347056Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T11:37:11.636173Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7491647166848919231:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:11.636247Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a35/r3tmp/tmpS9V5cV/pdisk_1.dat 2025-04-10T11:37:11.750348Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:11.788277Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:11.788391Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:11.791223Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8183, node 10 2025-04-10T11:37:11.841310Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:11.841336Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:11.841346Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:11.841508Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37: ... tor=TTableExistsActor;event=undelivered;self_id=[28:7491647316494906750:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:46.284192Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a35/r3tmp/tmpIAKD1d/pdisk_1.dat 2025-04-10T11:37:46.648905Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:46.679478Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:46.679589Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:46.684952Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65360, node 28 2025-04-10T11:37:46.775119Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:46.775149Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:46.775162Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:46.775340Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23015 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37:47.232408Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:37:47.341450Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T11:37:53.260767Z node 31 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[31:7491647346831774378:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:53.260911Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a35/r3tmp/tmps5Z3m6/pdisk_1.dat 2025-04-10T11:37:53.452771Z node 31 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:53.512953Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:53.513079Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:53.515966Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4956, node 31 2025-04-10T11:37:53.621113Z node 31 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:53.621143Z node 31 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:53.621158Z node 31 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:53.621361Z node 31 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37:54.014822Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:37:54.104408Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a35/r3tmp/tmp5y6Ifs/pdisk_1.dat 2025-04-10T11:38:00.829835Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:38:00.895822Z node 34 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:00.970042Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:00.970496Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:00.976053Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23552, node 34 2025-04-10T11:38:01.119932Z node 34 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:38:01.119975Z node 34 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:38:01.119990Z node 34 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:38:01.120203Z node 34 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9664 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:38:01.330734Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:38:01.475131Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a35/r3tmp/tmpZqgZrx/pdisk_1.dat 2025-04-10T11:38:07.927226Z node 37 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:38:08.032297Z node 37 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17068, node 37 2025-04-10T11:38:08.140006Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:08.140118Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:08.159170Z node 37 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:38:08.159227Z node 37 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:38:08.159249Z node 37 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:38:08.159457Z node 37 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:38:08.173491Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:38:08.356235Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:38:08.463315Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 |97.8%| [TM] {RESULT} ydb/services/rate_limiter/ut/unittest >> TestJsonParser::ManyValues >> PushdownTest::StringFieldsNotSupported2 [GOOD] >> TestJsonParser::ManyValues [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> TestJsonParser::MissingFields ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/provider/ut/pushdown/unittest >> PushdownTest::StringFieldsNotSupported2 [GOOD] Test command err: Initial program: ( (let $data_source (DataSource '"generic" '"test_cluster")) (let $empty_lambda (lambda '($arg) (Bool '"true"))) (let $table (MrTableConcat (Key '('table (String '"test_table")))) ) (let $read (Read! world $data_source $table)) (let $map_lambda (lambda '($row) (OptionalIf (Bool '"true") $row ) )) (let $filtered_data (FlatMap (Right! $read) $map_lambda)) (let $resulte_data_sink (DataSink '"result")) (let $result (ResWrite! (Left! $read) $resulte_data_sink (Key) $filtered_data '('('type)))) (return (Commit! $result $resulte_data_sink)) ) 2025-04-10 11:38:14.481 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (Bool '"true") $4))) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:14.488 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (Bool '"true") $4))) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:14.489 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [generic] yql_generic_io_discovery.cpp:55: discovered cluster name: test_cluster 2025-04-10 11:38:14.497 INFO yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [generic] yql_generic_load_meta.cpp:91: Loading table meta for: `test_cluster`.`test_table` 2025-04-10 11:38:14.522 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:14.556 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:14.565 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:14.570 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (Bool '"true")) (let $2 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($5) $1))) (let $3 (DataSink '"result")) (let $4 (ResWrite! (Left! $2) $3 (Key) (FlatMap (Right! $2) (lambda '($6) (OptionalIf $1 $6))) '('('type)))) (return (Commit! $4 $3)) ) 2025-04-10 11:38:14.614 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_co_simple1.cpp:986: OptionalIf over Bool 'true 2025-04-10 11:38:14.615 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:14.616 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:14.617 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:14.618 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_co_simple1.cpp:2040: FlatMap with Just 2025-04-10 11:38:14.619 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (Right! $1) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:14.620 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (Right! $1) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:14.621 INFO yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [generic] yql_optimize.cpp:135: PhysicalOptimizer-TrimReadWorld 2025-04-10 11:38:14.622 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (DataSink '"result")) (let $2 (ResWrite! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)))) (return (Commit! $2 $1)) ) 2025-04-10 11:38:14.623 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (ResWrite! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)))) (return (Commit! $2 $1)) ) 2025-04-10 11:38:14.625 INFO yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [RESULT] yql_result_provider.cpp:773: ResPull 2025-04-10 11:38:14.625 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) 2025-04-10 11:38:14.626 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) 2025-04-10 11:38:14.627 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Optimized expr: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) 2025-04-10 11:38:14.630 INFO yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [generic] yql_generic_dq_integration.cpp:187: Filling source settings: cluster: test_cluster, table: test_table, endpoint: host: "host" port: 42 2025-04-10 11:38:14.639 INFO yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [generic] yql_optimize.cpp:135: BuildGenericDqSourceSettings 2025-04-10 11:38:14.641 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Built settings: ( (let $1 (DataSink '"result")) (let $2 '('"col_bool" '"col_date" '"col_datetime" '"col_double" '"col_dynumber" '"col_float" '"col_int16" '"col_int32" '"col_int64" '"col_int8" '"col_interval" '"col_json" '"col_json_document" '"col_optional_bool" '"col_optional_date" '"col_optional_datetime" '"col_optional_double" '"col_optional_dynumber" '"col_optional_float" '"col_optional_int16" '"col_optional_int32" '"col_optional_int64" '"col_optional_int8" '"col_optional_interval" '"col_optional_json" '"col_optional_json_document" '"col_optional_string" '"col_optional_timestamp" '"col_optional_tz_date" '"col_optional_tz_datetime" '"col_optional_tz_timestamp" '"col_optional_uint16" '"col_optional_uint32" '"col_optional_uint64" '"col_optional_uint8" '"col_optional_utf8" '"col_optional_uuid" '"col_optional_yson" '"col_string" '"col_timestamp" '"col_tz_date" '"col_tz_datetime" '"col_tz_timestamp" '"col_uint16" '"col_uint32" '"col_uint64" '"col_uint8" '"col_utf8" '"col_uuid" '"col_yson")) (let $3 (GenSourceSettings world '"test_cluster" '"test_table" (SecureParam '"cluster:default_test_cluster") $2 (lambda '($32) (Bool '"true")))) (let $4 (DataType 'Bool)) (let $5 (DataType 'Date)) (let $6 (DataType 'Datetime)) (let $7 (DataType 'Double)) (let $8 (DataType 'DyNumber)) (let $9 (DataType 'Float)) (let $10 (DataType 'Int16)) (let $11 (DataType 'Int32)) (let $12 (DataType 'Int64)) (let $13 (DataType 'Int8)) (let $14 (DataType 'Interval)) (let $15 (DataType 'Json)) (let $16 (DataType 'JsonDocument)) (let $17 (DataType 'String)) (let $18 (DataType 'Timestamp)) (let $19 (DataType 'TzDate)) (let $20 (DataType 'TzDatetime)) (let $21 (DataType 'TzTimestamp)) (let $22 (DataType 'Uint16)) (let $23 (DataType 'Uint32)) (let $24 (DataType 'Uint64)) (let $25 (DataType 'Uint8)) (let $26 (DataType 'Utf8)) (let $27 (DataType 'Uuid)) (let $28 (DataType 'Yson)) (let $29 (StructType '('"col_bool" $4) '('"col_date" $5) '('"col_datetime" $6) '('"col_double" $7) '('"col_dynumber" $8) '('"col_float" $9) '('"col_int16" $10) '('"col_int32" $11) '('"col_int64" $12) '('"col_int8" $13) '('"col_interval" $14) '('"col_json" $15) '('"col_json_document" $16) '('"col_optional_bool" (OptionalType $4)) '('"col_optional_date" (OptionalType $5)) '('"col_optional_datetime" (OptionalType $6)) '('"col_optional_double" (OptionalType $7)) '('"col_optional_dynumber" (OptionalType $8)) '('"col_optional_float" (OptionalType $9)) '('"col_optional_int16" (OptionalType $10)) '('"col_optional_int32" (OptionalType $11)) '('"col_optional_int64" (OptionalType $12)) '('"col_optional_int8" (OptionalType $13)) '('"col_optional_interval" (OptionalType $14)) '('"col_optional_json" (OptionalType $15)) '('"col_optional_json_document" (OptionalType $16)) '('"col_optional_string" (OptionalType $17)) '('"col_optional_timestamp" (Optio ... tional_utf8" } } } 2025-04-10 11:38:16.044 INFO yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [generic] yql_generic_settings.cpp:38: GenericConfiguration::AddCluster: name = test_cluster, kind = POSTGRESQL, database name = database, database id = , endpoint = { host: "host" port: 42 }, use tls = 0, protocol = NATIVE Initial program: ( (let $data_source (DataSource '"generic" '"test_cluster")) (let $empty_lambda (lambda '($arg) (Bool '"true"))) (let $table (MrTableConcat (Key '('table (String '"test_table")))) ) (let $read (Read! world $data_source $table)) (let $map_lambda (lambda '($row) (OptionalIf (!= (Member $row '"col_string") (String '"value") ) $row ) )) (let $filtered_data (FlatMap (Right! $read) $map_lambda)) (let $resulte_data_sink (DataSink '"result")) (let $result (ResWrite! (Left! $read) $resulte_data_sink (Key) $filtered_data '('('type)))) (return (Commit! $result $resulte_data_sink)) ) 2025-04-10 11:38:16.046 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (!= (Member $4 '"col_string") (String '"value")) $4))) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:16.049 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (!= (Member $4 '"col_string") (String '"value")) $4))) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:16.049 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [generic] yql_generic_io_discovery.cpp:55: discovered cluster name: test_cluster 2025-04-10 11:38:16.050 INFO yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [generic] yql_generic_load_meta.cpp:91: Loading table meta for: `test_cluster`.`test_table` 2025-04-10 11:38:16.053 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") (String '"value")) $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:16.055 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") (String '"value")) $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:16.055 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") (String '"value")) $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:16.056 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") (String '"value")) $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-04-10 11:38:16.058 INFO yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [generic] yql_optimize.cpp:135: PhysicalOptimizer-TrimReadWorld 2025-04-10 11:38:16.059 INFO yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [default] physical_opt.cpp:76: Push filter lambda: ( (return (lambda '($1) (!= (Member $1 '"col_string") (String '"value")))) ) 2025-04-10 11:38:16.059 INFO yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [generic] yql_optimize.cpp:135: PhysicalOptimizer-PushFilterToReadTable 2025-04-10 11:38:16.059 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (DataSink '"result")) (let $2 (String '"value")) (let $3 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (!= (Member $4 '"col_string") $2)))) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") $2) $5))) '('('type)))) (return (Commit! $3 $1)) ) 2025-04-10 11:38:16.060 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (DataSink '"result")) (let $2 (String '"value")) (let $3 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (!= (Member $4 '"col_string") $2)))) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") $2) $5))) '('('type)))) (return (Commit! $3 $1)) ) 2025-04-10 11:38:16.061 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (String '"value")) (let $3 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (!= (Member $4 '"col_string") $2)))) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") $2) $5))) '('('type)))) (return (Commit! $3 $1)) ) 2025-04-10 11:38:16.063 TRACE yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [generic] yql_generic_physical_opt.cpp:136: Push filter. Lambda is already not empty 2025-04-10 11:38:16.063 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Optimized expr: ( (let $1 (DataSink '"result")) (let $2 (String '"value")) (let $3 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (!= (Member $4 '"col_string") $2)))) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") $2) $5))) '('('type)))) (return (Commit! $3 $1)) ) 2025-04-10 11:38:16.064 INFO yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [generic] yql_generic_dq_integration.cpp:187: Filling source settings: cluster: test_cluster, table: test_table, endpoint: host: "host" port: 42 2025-04-10 11:38:16.073 INFO yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [generic] yql_optimize.cpp:135: BuildGenericDqSourceSettings 2025-04-10 11:38:16.076 DEBUG yql-providers-generic-provider-ut-pushdown(pid=1836448, tid=0x00007FF8B460BF00) [core] yql_out_transformers.cpp:62: Built settings: ( (let $1 (DataSink '"result")) (let $2 '('"col_bool" '"col_date" '"col_datetime" '"col_double" '"col_dynumber" '"col_float" '"col_int16" '"col_int32" '"col_int64" '"col_int8" '"col_interval" '"col_json" '"col_json_document" '"col_optional_bool" '"col_optional_date" '"col_optional_datetime" '"col_optional_double" '"col_optional_dynumber" '"col_optional_float" '"col_optional_int16" '"col_optional_int32" '"col_optional_int64" '"col_optional_int8" '"col_optional_interval" '"col_optional_json" '"col_optional_json_document" '"col_optional_string" '"col_optional_timestamp" '"col_optional_tz_date" '"col_optional_tz_datetime" '"col_optional_tz_timestamp" '"col_optional_uint16" '"col_optional_uint32" '"col_optional_uint64" '"col_optional_uint8" '"col_optional_utf8" '"col_optional_uuid" '"col_optional_yson" '"col_string" '"col_timestamp" '"col_tz_date" '"col_tz_datetime" '"col_tz_timestamp" '"col_uint16" '"col_uint32" '"col_uint64" '"col_uint8" '"col_utf8" '"col_uuid" '"col_yson")) (let $3 (String '"value")) (let $4 (GenSourceSettings world '"test_cluster" '"test_table" (SecureParam '"cluster:default_test_cluster") $2 (lambda '($33) (!= (Member $33 '"col_string") $3)))) (let $5 (DataType 'Bool)) (let $6 (DataType 'Date)) (let $7 (DataType 'Datetime)) (let $8 (DataType 'Double)) (let $9 (DataType 'DyNumber)) (let $10 (DataType 'Float)) (let $11 (DataType 'Int16)) (let $12 (DataType 'Int32)) (let $13 (DataType 'Int64)) (let $14 (DataType 'Int8)) (let $15 (DataType 'Interval)) (let $16 (DataType 'Json)) (let $17 (DataType 'JsonDocument)) (let $18 (DataType 'String)) (let $19 (DataType 'Timestamp)) (let $20 (DataType 'TzDate)) (let $21 (DataType 'TzDatetime)) (let $22 (DataType 'TzTimestamp)) (let $23 (DataType 'Uint16)) (let $24 (DataType 'Uint32)) (let $25 (DataType 'Uint64)) (let $26 (DataType 'Uint8)) (let $27 (DataType 'Utf8)) (let $28 (DataType 'Uuid)) (let $29 (DataType 'Yson)) (let $30 (StructType '('"col_bool" $5) '('"col_date" $6) '('"col_datetime" $7) '('"col_double" $8) '('"col_dynumber" $9) '('"col_float" $10) '('"col_int16" $11) '('"col_int32" $12) '('"col_int64" $13) '('"col_int8" $14) '('"col_interval" $15) '('"col_json" $16) '('"col_json_document" $17) '('"col_optional_bool" (OptionalType $5)) '('"col_optional_date" (OptionalType $6)) '('"col_optional_datetime" (OptionalType $7)) '('"col_optional_double" (OptionalType $8)) '('"col_optional_dynumber" (OptionalType $9)) '('"col_optional_float" (OptionalType $10)) '('"col_optional_int16" (OptionalType $11)) '('"col_optional_int32" (OptionalType $12)) '('"col_optional_int64" (OptionalType $13)) '('"col_optional_int8" (OptionalType $14)) '('"col_optional_interval" (OptionalType $15)) '('"col_optional_json" (OptionalType $16)) '('"col_optional_json_document" (OptionalType $17)) '('"col_optional_string" (OptionalType $18)) '('"col_optional_timestamp" (OptionalType $19)) '('"col_optional_tz_date" (OptionalType $20)) '('"col_optional_tz_datetime" (OptionalType $21)) '('"col_optional_tz_timestamp" (OptionalType $22)) '('"col_optional_uint16" (OptionalType $23)) '('"col_optional_uint32" (OptionalType $24)) '('"col_optional_uint64" (OptionalType $25)) '('"col_optional_uint8" (OptionalType $26)) '('"col_optional_utf8" (OptionalType $27)) '('"col_optional_uuid" (OptionalType $28)) '('"col_optional_yson" (OptionalType $29)) '('"col_string" $18) '('"col_timestamp" $19) '('"col_tz_date" $20) '('"col_tz_datetime" $21) '('"col_tz_timestamp" $22) '('"col_uint16" $23) '('"col_uint32" $24) '('"col_uint64" $25) '('"col_uint8" $26) '('"col_utf8" $27) '('"col_uuid" $28) '('"col_yson" $29))) (let $31 (DqSourceWrap $4 (DataSource '"generic" '"test_cluster") $30)) (let $32 (ResWrite! world $1 (Key) (FlatMap $31 (lambda '($34) (OptionalIf (!= (Member $34 '"col_string") $3) $34))) '('('type)))) (return (Commit! $32 $1)) ) Dq source filter settings: filter_typed { comparison { operation: NE left_value { column: "col_string" } right_value { typed_value { type { type_id: STRING } value { bytes_value: "value" } } } } } |97.8%| [TS] {RESULT} ydb/library/yql/providers/generic/provider/ut/pushdown/unittest >> TestJsonParser::MissingFields [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild_Ranges [GOOD] >> TestJsonParser::NestedTypes >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections >> TestJsonParser::NestedTypes [GOOD] >> TCreateAndDropViewTest::CreateViewOccupiedName [GOOD] >> TestJsonParser::SimpleBooleans >> TCreateAndDropViewTest::CreateViewIfNotExists >> TPGTest::TestLogin [GOOD] >> TestJsonParser::SimpleBooleans [GOOD] >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/pgproxy/ut/unittest >> TPGTest::TestLogin [GOOD] Test command err: 2025-04-10T11:38:17.447718Z :PGWIRE INFO: Listening on [::]:1756 2025-04-10T11:38:17.448995Z :PGWIRE DEBUG: (#13,[::1]:32784) incoming connection opened 2025-04-10T11:38:17.449900Z :PGWIRE DEBUG: (#13,[::1]:32784) -> [1] 'i' "Initial" Size(15) protocol(0x00000300) user=user 2025-04-10T11:38:17.450293Z :PGWIRE DEBUG: (#13,[::1]:32784) <- [1] 'R' "Auth" Size(4) OK |97.9%| [TS] {RESULT} ydb/core/pgproxy/ut/unittest >> TestJsonParser::ManyBatches >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::MalformedParams ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::PDisksFields [GOOD] Test command err: 2025-04-10T11:35:30.270141Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646731636207411:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:30.270377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f81/r3tmp/tmpRFxlOs/pdisk_1.dat 2025-04-10T11:35:30.716702Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:30.748266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:30.748373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:30.753431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28758, node 1 2025-04-10T11:35:30.851416Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:35:30.851439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:35:30.851446Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:30.851562Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9631 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:31.236740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:31.290846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:31.310611Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7491646736510631496:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:31.310688Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:35:31.322772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:31.348341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:31.348408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:31.348571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:31.348604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:31.350491Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-10T11:35:31.350528Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-10T11:35:31.371642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:31.374067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:31.536006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:31.616498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T11:35:31.625593Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646734762952059:2221];send_to=[0:7307199536658146131:7762515]; waiting... 2025-04-10T11:35:31.635452Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646734510577358:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:31.678915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:31.679008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:31.694993Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:35:31.696031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:31.703059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:31.704148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:31.721760Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:35:31.726105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:31.738580Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:35:31.838336Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T11:35:35.134317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:35:35.278245Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646731636207411:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:35.278313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:35:35.388521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646753111045262:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:35.388622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646753111045257:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:35.388762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:35.393060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2025-04-10T11:35:35.419538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646753111045271:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-10T11:35:35.531643Z node 1 :TX_PROXY ERROR: Actor# [1:7491646753111045348:3010] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:35:36.008255Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.232384s 2025-04-10T11:35:36.008296Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.232447s 2025-04-10T11:35:36.093074Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfqqs6s4trff7jsq3smmyqx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODViNDQ1ZmItNWI5MzdjNGUtYzUzZWVhMDUtMmJkOTU5ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:36.120590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:35:36.314399Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7491646736510631496:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:36.314474Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:35:36.358680Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfqqt1g8mpp989c5aphsjdw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODViNDQ1ZmItNWI5MzdjNGUtYzUzZWVhMDUtMmJkOTU5ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:35:36.405474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T11:35:36.575646Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7491646734762952059:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:36.575723Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:35:36.579210Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491646 ... 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [26:7491647329384384833:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:49.625894Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:37:49.673195Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [26:7491647329384384835:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:37:49.768752Z node 26 :TX_PROXY ERROR: Actor# [26:7491647329384384912:2722] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:49.899073Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfqvw9hcjht316kyvebm814, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=YWRmY2VkMjYtYzI0NWZkMzgtMjU4ODI1MWYtYWU2Mjc4NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:50.063390Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jrfqvwk1b8j4092k4zxqj3ca, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=NGYxZmRkMjEtZjFjOWFhNjctODI3ZjE0NGQtMmZmZTM3NGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:50.066692Z node 26 :SYSTEM_VIEWS INFO: Scan started, actor: [26:7491647333679352284:2365], owner: [26:7491647333679352281:2363], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T11:37:50.068847Z node 26 :SYSTEM_VIEWS INFO: Scan prepared, actor: [26:7491647333679352284:2365], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:37:50.069239Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7491647333679352284:2365], row count: 1, finished: 1 2025-04-10T11:37:50.069287Z node 26 :SYSTEM_VIEWS INFO: Scan finished, actor: [26:7491647333679352284:2365], owner: [26:7491647333679352281:2363], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T11:37:50.072482Z node 26 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744285070058, txId: 281474976710662] shutting down 2025-04-10T11:37:50.412613Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfqvwr80qx0hn585m2k7jga, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=YzQwNDI4MTQtNGJhMWQzODUtN2E5YWM1YzgtNjBmNWY0NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:50.420526Z node 26 :SYSTEM_VIEWS INFO: Scan started, actor: [26:7491647333679352324:2376], owner: [26:7491647333679352320:2374], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T11:37:50.421803Z node 26 :SYSTEM_VIEWS INFO: Scan prepared, actor: [26:7491647333679352324:2376], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:37:50.422174Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7491647333679352324:2376], row count: 1, finished: 1 2025-04-10T11:37:50.422228Z node 26 :SYSTEM_VIEWS INFO: Scan finished, actor: [26:7491647333679352324:2376], owner: [26:7491647333679352320:2374], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T11:37:50.426815Z node 26 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744285070411, txId: 281474976710664] shutting down greater-or-equal assertion failed at ydb/core/sys_view/ut_kqp.cpp:537, void NKikimr::NSysView::(anonymous namespace)::TYsonFieldChecker::Uint64GreaterOrEquals(ui64): value.AsUint64() >= expected TBackTrace::Capture()+28 (0x1B8CA60C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1BD87F10) ??+0 (0x1B349BFC) NKikimr::NSysView::NTestSuiteSystemView::TTestCasePartitionStatsFields::Execute_(NUnitTest::TTestContext&)+8701 (0x1B35FBFD) std::__y1::__function::__func, void ()>::operator()()+280 (0x1B48D6F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1BDBEF36) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1BD8EA89) NKikimr::NSysView::NTestSuiteSystemView::TCurrentTest::Execute()+1204 (0x1B48C5A4) NUnitTest::TTestFactory::Execute()+2438 (0x1BD90356) NUnitTest::RunMain(int, char**)+5213 (0x1BDB94AD) ??+0 (0x7F0D85657D90) __libc_start_main+128 (0x7F0D85657E40) _start+41 (0x18AC0029) 2025-04-10T11:37:52.834402Z node 31 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[31:7491647341538329426:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:52.834514Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001f81/r3tmp/tmpALLwrn/pdisk_1.dat 2025-04-10T11:37:53.107432Z node 31 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:53.117151Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:53.117302Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:53.123728Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28604, node 31 2025-04-10T11:37:53.223093Z node 31 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:53.223126Z node 31 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:53.223146Z node 31 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:53.223330Z node 31 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-10T11:37:53.691769Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:57.830858Z node 31 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[31:7491647341538329426:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:57.830984Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:37:58.881583Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [31:7491647367308133869:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:58.881682Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [31:7491647367308133875:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:58.881767Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:58.888678Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:37:58.913043Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [31:7491647367308133883:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:37:59.001876Z node 31 :TX_PROXY ERROR: Actor# [31:7491647367308133934:2343] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:59.384548Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqw0a213yz39ge3qqradmp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=N2I1YTNlMzItN2E3YTg0NjQtNWMzYzhmOTktODkyMzRhMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:37:59.389153Z node 31 :SYSTEM_VIEWS INFO: Scan started, actor: [31:7491647371603101265:2346], owner: [31:7491647371603101261:2344], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-04-10T11:37:59.395346Z node 31 :SYSTEM_VIEWS INFO: Scan prepared, actor: [31:7491647371603101265:2346], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:37:59.443668Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7491647371603101265:2346], row count: 1, finished: 1 2025-04-10T11:37:59.443778Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7491647371603101265:2346], owner: [31:7491647371603101261:2344], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-04-10T11:37:59.447609Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744285079381, txId: 281474976715660] shutting down >> TestJsonParser::ManyBatches [GOOD] >> TestJsonParser::LittleBatches ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_local_kmeans/unittest >> TTxDataShardLocalKMeansScan::BuildToBuild_Ranges [GOOD] Test command err: 2025-04-10T11:37:33.556240Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647261385442912:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:33.556296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00077b/r3tmp/tmpY3bUaB/pdisk_1.dat 2025-04-10T11:37:34.007050Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:34.042640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:34.043359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:34.047507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:34.097268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:37:34.133068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:37:34.176134Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7491647265680410781:2295] 2025-04-10T11:37:34.176374Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:37:34.189847Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:37:34.189933Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:37:34.192910Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:37:34.192996Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:37:34.193039Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:37:34.194812Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:37:34.194904Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:37:34.194934Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7491647265680410795:2295] in generation 1 2025-04-10T11:37:34.196132Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:37:34.243215Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:37:34.244066Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:37:34.244150Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7491647265680410799:2296] 2025-04-10T11:37:34.244160Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:37:34.244170Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:37:34.244178Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:34.244958Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647265680410775:2294], serverId# [1:7491647265680410798:2305], sessionId# [0:0:0] 2025-04-10T11:37:34.245198Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:37:34.245279Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:37:34.245305Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:34.245329Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:34.245412Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:37:34.245469Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:34.245508Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:37:34.245854Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:37:34.245929Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-10T11:37:34.247355Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:34.247425Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:37:34.248008Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:37:34.250571Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647265680410813:2313], serverId# [1:7491647265680410815:2315], sessionId# [0:0:0] 2025-04-10T11:37:34.256038Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744285054295 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744285054295 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:37:34.256097Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:34.256237Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:37:34.256311Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:34.256343Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:37:34.256388Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744285054295:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-10T11:37:34.256694Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744285054295:281474976710657 keys extracted: 0 2025-04-10T11:37:34.256876Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:37:34.256967Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:34.257017Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:37:34.259399Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:37:34.261024Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:34.262275Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744285054294 2025-04-10T11:37:34.262320Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:34.262355Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744285054302 2025-04-10T11:37:34.262411Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744285054295} 2025-04-10T11:37:34.262457Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:34.262489Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:34.262502Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:37:34.262514Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:37:34.262579Z node 1 :TX_DATASHARD DEBUG: Complete [1744285054295 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7491647261385443320:2186], exec latency: 3 ms, propose latency: 5 ms 2025-04-10T11:37:34.262622Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-10T11:37:34.262735Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:34.267872Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-10T11:37:34.267927Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:37:34.278621Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647265680410852:2342], serverId# [1:7491647265680410853:2343], sessionId# [0:0:0] 2025-04-10T11:37:34.279711Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:37:34.279868Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710658 at tablet 72075186224037888 2025-04-10T11:37:34.280828Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:37:34.281888Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710658 at step 1744285054330 at tablet 72075186224037888 { Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744285054330 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:37:34.281909Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:34.282020Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:37:34.282042Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:37:34.282060Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744285054330:281474976710658] in PlanQueue unit at 72075186224037888 2025-04-10T11:37:34.282219Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744285054330:281474976710658 keys extracted: 0 2025-04-10T11:37:34.282556Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:37:34.283089Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744285054330} 2025-04-10T11:37:34.283152Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:37:34.283189Z node 1 :TX_DATASHARD DEBUG: Complete [1744285054330 : 281474976710658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7491647265680410847:2338], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:37:34.283230Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:37:34.285966Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647265680410863:2353], serverId# [1:7491647265680410864:2354], sessionId# [0:0:0] 2025-04-10T11:37:34.296365Z node 1 :BUILD_INDEX NOTICE: Starting TLocalKMeansScan Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } SnapshotTxId: 281474976710658 SnapshotStep: 1744285054330 SeqNoGeneration: 1 SeqNoRound: 1 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 2 } Seed: 1337 K ... n [1744285096358:281474976711039] in PlanQueue unit at 72075186224037997 2025-04-10T11:38:16.313471Z node 6 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037997 loaded tx from db 1744285096358:281474976711039 keys extracted: 0 2025-04-10T11:38:16.313599Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037997 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:38:16.313689Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037997 2025-04-10T11:38:16.313733Z node 6 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037997 tableId# [OwnerId: 72057594046644480, LocalPathId: 115] schema version# 1 2025-04-10T11:38:16.314221Z node 6 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037997 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:38:16.314625Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037997 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:38:16.316468Z node 6 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037997 time 1744285096357 2025-04-10T11:38:16.316499Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037997 2025-04-10T11:38:16.316525Z node 6 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037997 2025-04-10T11:38:16.316578Z node 6 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:38:16.316934Z node 6 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037997 coordinator 72057594046316545 last step 0 next step 1744285096365 2025-04-10T11:38:16.316991Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037997 step# 1744285096358} 2025-04-10T11:38:16.317029Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037997 2025-04-10T11:38:16.317071Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037997 2025-04-10T11:38:16.317088Z node 6 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037997 2025-04-10T11:38:16.317114Z node 6 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037997 2025-04-10T11:38:16.317162Z node 6 :TX_DATASHARD DEBUG: Complete [1744285096358 : 281474976711039] from 72075186224037997 at tablet 72075186224037997 send result to client [6:7491647378486743260:2147], exec latency: 0 ms, propose latency: 3 ms 2025-04-10T11:38:16.317199Z node 6 :TX_DATASHARD INFO: 72075186224037997 Sending notify to schemeshard 72057594046644480 txId 281474976711039 state Ready TxInFly 0 2025-04-10T11:38:16.317237Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037997 2025-04-10T11:38:16.319044Z node 6 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976711039 datashard 72075186224037997 state Ready 2025-04-10T11:38:16.319096Z node 6 :TX_DATASHARD DEBUG: 72075186224037997 Got TEvSchemaChangedResult from SS at 72075186224037997 2025-04-10T11:38:16.329897Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976711040:0, at schemeshard: 72057594046644480 2025-04-10T11:38:16.337592Z node 6 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:38:16.337721Z node 6 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037997 2025-04-10T11:38:16.340972Z node 6 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037998 actor [6:7491647442911268547:2901] 2025-04-10T11:38:16.341304Z node 6 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:38:16.353826Z node 6 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:38:16.353908Z node 6 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:38:16.356008Z node 6 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037998 2025-04-10T11:38:16.356077Z node 6 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037998 2025-04-10T11:38:16.356126Z node 6 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037998 2025-04-10T11:38:16.356531Z node 6 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:38:16.356594Z node 6 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:38:16.356628Z node 6 :TX_DATASHARD DEBUG: DataShard 72075186224037998 persisting started state actor id [6:7491647442911268565:2901] in generation 1 2025-04-10T11:38:16.360887Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:38:16.360940Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:16.364078Z node 6 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:38:16.364137Z node 6 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037998 2025-04-10T11:38:16.364240Z node 6 :TX_DATASHARD DEBUG: 72075186224037998 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:38:16.364283Z node 6 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037998, actorId: [6:7491647442911268567:2902] 2025-04-10T11:38:16.364318Z node 6 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037998 2025-04-10T11:38:16.364330Z node 6 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037998, state: WaitScheme 2025-04-10T11:38:16.364346Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037998 2025-04-10T11:38:16.364497Z node 6 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037998 2025-04-10T11:38:16.364587Z node 6 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037998 2025-04-10T11:38:16.364612Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037998 2025-04-10T11:38:16.364631Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037998 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:38:16.364648Z node 6 :TX_DATASHARD INFO: No tx to execute at 72075186224037998 TxInFly 0 2025-04-10T11:38:16.364669Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037998 2025-04-10T11:38:16.366108Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037998, clientId# [6:7491647442911268555:9600], serverId# [6:7491647442911268561:9592], sessionId# [0:0:0] 2025-04-10T11:38:16.366448Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037998 2025-04-10T11:38:16.366713Z node 6 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037998 txId 281474976711040 ssId 72057594046644480 seqNo 2:219 2025-04-10T11:38:16.366787Z node 6 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976711040 at tablet 72075186224037998 2025-04-10T11:38:16.367272Z node 6 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037998 2025-04-10T11:38:16.370494Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037998 2025-04-10T11:38:16.370598Z node 6 :TX_DATASHARD DEBUG: 72075186224037998 not sending time cast registration request in state WaitScheme 2025-04-10T11:38:16.374493Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037998, clientId# [6:7491647442911268573:9583], serverId# [6:7491647442911268574:9582], sessionId# [0:0:0] 2025-04-10T11:38:16.374797Z node 6 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711040 at step 1744285096421 at tablet 72075186224037998 { Transactions { TxId: 281474976711040 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744285096421 MediatorID: 72057594046382081 TabletID: 72075186224037998 } 2025-04-10T11:38:16.374814Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037998 2025-04-10T11:38:16.374924Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037998 2025-04-10T11:38:16.374943Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037998 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:38:16.374968Z node 6 :TX_DATASHARD DEBUG: Found ready operation [1744285096421:281474976711040] in PlanQueue unit at 72075186224037998 2025-04-10T11:38:16.375274Z node 6 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037998 loaded tx from db 1744285096421:281474976711040 keys extracted: 0 2025-04-10T11:38:16.375401Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037998 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:38:16.375497Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037998 2025-04-10T11:38:16.375539Z node 6 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037998 tableId# [OwnerId: 72057594046644480, LocalPathId: 116] schema version# 1 2025-04-10T11:38:16.376015Z node 6 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037998 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:38:16.376424Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037998 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:38:16.378131Z node 6 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037998 time 1744285096420 2025-04-10T11:38:16.378594Z node 6 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:38:16.378696Z node 6 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037997 2025-04-10T11:38:16.381902Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037998 2025-04-10T11:38:16.381979Z node 6 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037998 2025-04-10T11:38:16.382078Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037998 step# 1744285096421} 2025-04-10T11:38:16.382124Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037998 2025-04-10T11:38:16.382195Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037998 2025-04-10T11:38:16.382216Z node 6 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037998 2025-04-10T11:38:16.382244Z node 6 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037998 2025-04-10T11:38:16.382308Z node 6 :TX_DATASHARD DEBUG: Complete [1744285096421 : 281474976711040] from 72075186224037998 at tablet 72075186224037998 send result to client [6:7491647378486743260:2147], exec latency: 0 ms, propose latency: 6 ms 2025-04-10T11:38:16.382351Z node 6 :TX_DATASHARD INFO: 72075186224037998 Sending notify to schemeshard 72057594046644480 txId 281474976711040 state Ready TxInFly 0 2025-04-10T11:38:16.382397Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037998 2025-04-10T11:38:16.382473Z node 6 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037998 coordinator 72057594046316545 last step 0 next step 1744285096421 2025-04-10T11:38:16.383849Z node 6 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976711040 datashard 72075186224037998 state Ready 2025-04-10T11:38:16.383902Z node 6 :TX_DATASHARD DEBUG: 72075186224037998 Got TEvSchemaChangedResult from SS at 72075186224037998 |97.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_local_kmeans/unittest >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] >> TestJsonParser::LittleBatches [GOOD] >> KeyValueGRPCService::SimpleAcquireLock [GOOD] >> KeyValueGRPCService::SimpleExecuteTransaction >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection >> QuoterWithKesusTest::GetsQuotaAfterPause [GOOD] >> QuoterWithKesusTest::GetsSeveralQuotas >> TestJsonParser::MissingFieldsValidation >> TestJsonParser::MissingFieldsValidation [GOOD] >> TestJsonParser::TypeKindsValidation >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> TestJsonParser::TypeKindsValidation [GOOD] >> TestJsonParser::NumbersValidation >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection >> TestJsonParser::NumbersValidation [GOOD] >> TestJsonParser::StringsValidation >> SdkCredProvider::PingFromProviderSyncDiscovery >> TestJsonParser::StringsValidation [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection >> TestJsonParser::NestedJsonValidation >> DataShardReplication::SimpleApplyChanges >> TestJsonParser::NestedJsonValidation [GOOD] >> TestJsonParser::BoolsValidation >> KqpLimits::TooBigQuery+useSink [GOOD] >> KqpLimits::TooBigQuery-useSink >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount >> TSentinelTests::BSControllerUnresponsive [GOOD] >> TestJsonParser::BoolsValidation [GOOD] >> TestJsonParser::JsonStructureValidation >> TabletService_ExecuteMiniKQL::MalformedParams [GOOD] >> TabletService_ExecuteMiniKQL::MalformedProgram >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected [GOOD] >> TestJsonParser::JsonStructureValidation [GOOD] >> BlobDepot::BasicPutAndGet >> TestPurecalcFilter::Simple1 >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding >> TopicSessionTests::TwoSessionsWithOffsets >> SdkCredProvider::PingFromProviderSyncDiscovery [GOOD] >> SdkCredProvider::PingFromProviderAsyncDiscovery >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> TCreateAndDropViewTest::CreateViewIfNotExists [GOOD] >> TCreateAndDropViewTest::DropView >> KeyValueGRPCService::SimpleExecuteTransaction [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGeneration >> test.py::test[solomon-HistResponse-default.txt] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings >> MediatorTest::BasicTimecastUpdates >> BlobDepot::BasicPutAndGet [GOOD] >> BlobDepot::TestBlockedEvGetRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::BSControllerUnresponsive [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings [GOOD] >> TMemoryController::MemTable [GOOD] Test command err: 2025-04-10T11:36:57.184977Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-04-10T11:36:57.185065Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-04-10T11:36:57.185157Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-10T11:36:57.185209Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-04-10T11:36:57.185260Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-04-10T11:36:57.185385Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-04-10T11:36:57.187642Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-04-10T11:36:57.193088Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDom ... AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860025 2025-04-10T11:38:20.197171Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 70, response# PDiskStateInfo { PDiskId: 280 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-280.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 281 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-281.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 282 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-282.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 283 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-283.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860025 2025-04-10T11:38:20.197354Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 71, response# PDiskStateInfo { PDiskId: 284 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-284.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 285 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-285.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 286 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-286.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 287 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-287.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860025 2025-04-10T11:38:20.197520Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 66, response# PDiskStateInfo { PDiskId: 264 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-264.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 265 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-265.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 266 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-266.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 267 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-267.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860025 2025-04-10T11:38:20.197584Z node 65 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-10T11:38:20.198057Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 68:272, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-10T11:38:20.198124Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 68:274, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-10T11:38:20.198189Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 70:281, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-10T11:38:20.198241Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-10T11:38:20.208780Z node 65 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 1 2025-04-10T11:38:20.208861Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-10T11:38:20.219414Z node 65 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-10T11:38:20.219492Z node 65 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-10T11:38:20.219627Z node 65 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 2 2025-04-10T11:38:20.219667Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-10T11:38:20.219850Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 65, wbId# [65:8388350642965737326:1634689637] 2025-04-10T11:38:20.219932Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 66, wbId# [66:8388350642965737326:1634689637] 2025-04-10T11:38:20.219986Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 67, wbId# [67:8388350642965737326:1634689637] 2025-04-10T11:38:20.220037Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 68, wbId# [68:8388350642965737326:1634689637] 2025-04-10T11:38:20.220077Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 69, wbId# [69:8388350642965737326:1634689637] 2025-04-10T11:38:20.220113Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 70, wbId# [70:8388350642965737326:1634689637] 2025-04-10T11:38:20.220150Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 71, wbId# [71:8388350642965737326:1634689637] 2025-04-10T11:38:20.220180Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 72, wbId# [72:8388350642965737326:1634689637] 2025-04-10T11:38:20.220485Z node 65 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { }, cookie# 123 2025-04-10T11:38:20.220537Z node 65 :CMS ERROR: [Sentinel] [Main] Unsuccesful response from BSC: error# 2025-04-10T11:38:20.221278Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 65, response# PDiskStateInfo { PDiskId: 260 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-260.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 261 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-261.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 262 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-262.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 263 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-263.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880025 2025-04-10T11:38:20.221720Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 71, response# PDiskStateInfo { PDiskId: 284 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-284.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 285 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-285.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 286 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-286.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 287 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-287.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880025 2025-04-10T11:38:20.221999Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 72, response# PDiskStateInfo { PDiskId: 288 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-288.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 289 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-289.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 290 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-290.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 291 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-291.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880025 2025-04-10T11:38:20.222389Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 66, response# PDiskStateInfo { PDiskId: 264 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-264.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 265 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-265.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 266 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-266.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 267 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-267.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880025 2025-04-10T11:38:20.222573Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 67, response# PDiskStateInfo { PDiskId: 268 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-268.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 269 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-269.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 270 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-270.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 271 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-271.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880025 2025-04-10T11:38:20.222720Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 68, response# PDiskStateInfo { PDiskId: 272 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-272.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 273 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-273.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 274 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-274.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 275 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-275.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880025 2025-04-10T11:38:20.222876Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 69, response# PDiskStateInfo { PDiskId: 276 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-276.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 277 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-277.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 278 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-278.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 279 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-279.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880025 2025-04-10T11:38:20.223034Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 70, response# PDiskStateInfo { PDiskId: 280 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-280.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 281 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-281.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 282 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-282.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 283 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-283.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880025 2025-04-10T11:38:20.223105Z node 65 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/py3test >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] >> TMemoryController::ResourceBroker >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding |97.9%| [TM] {RESULT} ydb/tests/functional/wardens/py3test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] >> SequenceShardTests::Basics >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] >> test_crud.py::TestYdbCrudOperations::test_crud_operations >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> DataShardStats::MultipleChannelsStatsCorrect [GOOD] >> DataShardStats::HistogramStatsCorrect >> BlobDepot::TestBlockedEvGetRequest [GOOD] >> BlobDepot::BasicRange >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding |97.9%| [TM] {RESULT} ydb/core/cms/ut_sentinel/unittest >> KqpTpch::Query01 >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] >> SequenceShardTests::Basics [GOOD] >> SequenceShardTests::MarkedPipeRetries >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding |97.9%| [TA] $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SequenceShardTests::MarkedPipeRetries [GOOD] >> SequenceShardTests::FreezeRestoreRedirect >> TDataShardRSTest::TestCleanupInRS+UseSink >> BlobDepot::BasicRange [GOOD] >> BlobDepot::BasicDiscover >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> QuoterWithKesusTest::GetsSeveralQuotas [GOOD] >> TabletService_ExecuteMiniKQL::MalformedProgram [GOOD] >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario >> SequenceShardTests::FreezeRestoreRedirect [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] [GOOD] >> QuoterWithKesusTest::KesusRecreation >> TabletService_ExecuteMiniKQL::DryRunEraseRow >> SequenceShardTests::NegativeIncrement >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] >> DataShardReplication::SimpleApplyChanges [GOOD] >> DataShardReplication::SplitMergeChanges >> TestPurecalcFilter::Simple1 [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_tx_coordinator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_allocator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] >> MediatorTest::BasicTimecastUpdates [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGeneration [GOOD] >> KeyValueGRPCService::SimpleRenameUnexistedKey >> TestPurecalcFilter::Simple2 >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> SequenceShardTests::NegativeIncrement [GOOD] >> BlobDepot::BasicDiscover [GOOD] >> MediatorTest::MultipleTablets >> TCreateAndDropViewTest::DropView [GOOD] >> test.py::test[solomon-InvalidProject-] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> TopicSessionTests::TwoSessionsWithOffsets [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] >> BlobDepot::BasicBlock >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag >> test.py::test[solomon-HistResponse-default.txt] >> BlobDepot::BasicBlock [GOOD] >> BlobDepot::BasicCollectGarbage >> test.py::test_wait_for_cluster_ready [GOOD] >> test.py::test_counter ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] Test command err: 2 2 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceshard/ut/unittest >> SequenceShardTests::NegativeIncrement [GOOD] Test command err: 2025-04-10T11:38:25.729574Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-10T11:38:25.729788Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-10T11:38:25.758560Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-10T11:38:25.771767Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-10T11:38:25.771838Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-10T11:38:25.781018Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-04-10T11:38:25.782368Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2025-04-10T11:38:25.805833Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-10T11:38:25.806326Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-04-10T11:38:25.806385Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SEQUENCE_ALREADY_EXISTS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-10T11:38:25.806478Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-10T11:38:25.806748Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } StartValue: 100001 Cache: 10 2025-04-10T11:38:25.806966Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 100001 Cache# 10 Increment# 1 Cycle# false State# Active 2025-04-10T11:38:25.822827Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-10T11:38:25.823245Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-10T11:38:25.823335Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 1 AllocationCount# 1 AllocationIncrement# 1 2025-04-10T11:38:25.838566Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:25.838913Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2025-04-10T11:38:25.839015Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 2 AllocationCount# 10 AllocationIncrement# 1 2025-04-10T11:38:25.850917Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:25.851325Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-10T11:38:25.851418Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100001 AllocationCount# 10 AllocationIncrement# 1 2025-04-10T11:38:25.863573Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:25.864009Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 50 2025-04-10T11:38:25.864105Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100011 AllocationCount# 50 AllocationIncrement# 1 2025-04-10T11:38:25.876275Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:25.876618Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 99] Cache# 0 2025-04-10T11:38:25.876665Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 99] 2025-04-10T11:38:25.876754Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:25.876987Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2025-04-10T11:38:25.877062Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 12 AllocationCount# 9223372036854775796 AllocationIncrement# 1 2025-04-10T11:38:25.889297Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:25.889702Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2025-04-10T11:38:25.889773Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-10T11:38:25.889840Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:25.890136Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-10T11:38:25.890253Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxDropSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-10T11:38:25.902483Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2025-04-10T11:38:25.902901Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-10T11:38:25.902961Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-10T11:38:25.903027Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2025-04-10T11:38:25.918266Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-10T11:38:25.918430Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-10T11:38:25.918968Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-10T11:38:25.919513Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-10T11:38:25.920751Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-10T11:38:25.925103Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-10T11:38:25.925162Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-10T11:38:25.925213Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:25.925488Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-10T11:38:25.925571Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100061 AllocationCount# 10 AllocationIncrement# 1 2025-04-10T11:38:25.937507Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:25.938012Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } NextValue: 200000 NextUsed: true 2025-04-10T11:38:25.938106Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-10T11:38:25.950251Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-04-10T11:38:25.950681Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-10T11:38:25.950770Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200001 AllocationCount# 10 AllocationIncrement# 1 2025-04-10T11:38:25.962803Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:25.963295Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } Cache: 5 2025-04-10T11:38:25.963399Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-10T11:38:25.975521Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-04-10T11:38:25.975917Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-10T11:38:25.975999Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200011 AllocationCount# 5 AllocationIncrement# 1 2025-04-10T11:38:25.998779Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:25.999213Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxGetSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-10T11:38:25.999271Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxGetSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-10T11:38:25.999341Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxGetSequence.Complete 2025-04-10T11:38:26.481131Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-10T11:38:26.481257Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-10T11:38:26.494293Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-10T11:38:26.497267Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-10T11:38:26.497348Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-10T11:38:26.499815Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxMarkSchemeShardPipe.Execute SchemeShardId# 123 Generation# 1 Round# 1 2025-04-10T11:38:26.500168Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-04-10T11:38:26.500274Z node 2 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2025-04-10T11:38:26.523380Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 7205 ... ENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-10T11:38:27.050823Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-10T11:38:27.050923Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-10T11:38:27.066811Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-10T11:38:27.067256Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2025-04-10T11:38:27.067317Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_FROZEN PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-10T11:38:27.067390Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:27.067798Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2025-04-10T11:38:27.067919Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-10T11:38:27.080468Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-04-10T11:38:27.080932Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Cache# 0 2025-04-10T11:38:27.081038Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] AllocationStart# 11 AllocationCount# 100 AllocationIncrement# 1 2025-04-10T11:38:27.095152Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:27.095783Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2025-04-10T11:38:27.095848Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SEQUENCE_ALREADY_ACTIVE PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-10T11:38:27.095937Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-04-10T11:38:27.096259Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-10T11:38:27.096361Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-10T11:38:27.111484Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-10T11:38:27.111955Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-10T11:38:27.112070Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-10T11:38:27.124293Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-10T11:38:27.124664Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-10T11:38:27.124767Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-10T11:38:27.138396Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-10T11:38:27.138835Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-10T11:38:27.138897Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 42] MovedTo# 12345 2025-04-10T11:38:27.138975Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:27.139295Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-10T11:38:27.139380Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-10T11:38:27.151705Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-10T11:38:27.152215Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2025-04-10T11:38:27.152374Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2025-04-10T11:38:27.164991Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-04-10T11:38:27.165435Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2025-04-10T11:38:27.165543Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2025-04-10T11:38:27.178227Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-10T11:38:27.178799Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-10T11:38:27.178866Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 43] MovedTo# 54321 2025-04-10T11:38:27.178950Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-10T11:38:27.179347Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-10T11:38:27.179463Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 111 AllocationCount# 100 AllocationIncrement# 1 2025-04-10T11:38:27.192016Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:27.638433Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-10T11:38:27.638554Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-10T11:38:27.650528Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-10T11:38:27.655328Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-10T11:38:27.655411Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-10T11:38:27.670000Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cache: 10 Increment: -1 2025-04-10T11:38:27.670139Z node 4 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# -9223372036854775808 MaxValue# -1 StartValue# -1 Cache# 10 Increment# -1 Cycle# false State# Active 2025-04-10T11:38:27.702814Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-10T11:38:27.703153Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-10T11:38:27.703250Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2025-04-10T11:38:27.722800Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:27.723169Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-10T11:38:27.723265Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2025-04-10T11:38:27.735376Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:27.735725Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2025-04-10T11:38:27.735818Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -21 AllocationCount# 9223372036854775788 AllocationIncrement# -1 2025-04-10T11:38:27.747896Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:27.748251Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2025-04-10T11:38:27.748302Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-10T11:38:27.748364Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:27.748666Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cycle: true 2025-04-10T11:38:27.748757Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-10T11:38:27.762873Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-04-10T11:38:27.763286Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-10T11:38:27.763403Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2025-04-10T11:38:27.776412Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-10T11:38:27.776803Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-10T11:38:27.776922Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2025-04-10T11:38:27.792435Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete |97.9%| [TM] {RESULT} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> TopicSessionTests::BadDataSessionError >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [FAIL] |97.9%| [TS] {RESULT} ydb/core/tx/sequenceshard/ut/unittest >> BlobDepot::BasicCollectGarbage [GOOD] >> BlobDepot::VerifiedRandom >> test.py::test_counter [GOOD] >> TMemoryController::ResourceBroker [GOOD] >> test.py::test_viewer_nodes >> MediatorTest::MultipleTablets [GOOD] >> TMemoryController::ResourceBroker_ConfigLimit >> test.py::test_viewer_nodes [GOOD] >> test.py::test_storage_groups [GOOD] >> test.py::test_viewer_sysinfo >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] [GOOD] >> test.py::test_viewer_sysinfo [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] >> test.py::test_viewer_vdiskinfo [GOOD] >> test.py::test_viewer_pdiskinfo [GOOD] >> test.py::test_viewer_bsgroupinfo [GOOD] >> test.py::test_viewer_tabletinfo >> MediatorTest::TabletAckBeforePlanComplete |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [FAIL] >> test.py::test_viewer_tabletinfo [GOOD] >> test.py::test_viewer_describe >> TabletService_ExecuteMiniKQL::DryRunEraseRow [GOOD] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] >> test.py::test_viewer_describe [GOOD] >> TestPurecalcFilter::Simple2 [GOOD] >> ReadUpdateWrite::Load >> KeyValueGRPCService::SimpleRenameUnexistedKey [GOOD] >> test.py::test_viewer_cluster [GOOD] >> DataShardReplication::SplitMergeChanges [GOOD] >> DataShardReplication::SplitMergeChangesReboots >> test.py::test_viewer_tenantinfo >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed >> TestPurecalcFilter::ManyValues >> BlobDepotWithTestShard::PlainGroup [GOOD] >> test.py::test_viewer_tenantinfo [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] >> KqpTpch::Query01 [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKey >> test.py::test_viewer_tenantinfo_db >> KqpTpch::Query02 >> test.py::test_viewer_tenantinfo_db [GOOD] >> test.py::test_viewer_healthcheck >> test.py::test_viewer_healthcheck [GOOD] >> test.py::test_viewer_acl >> test.py::test_viewer_acl [GOOD] >> test.py::test_viewer_autocomplete >> test.py::test_viewer_autocomplete [GOOD] >> test.py::test_viewer_check_access >> test.py::test_viewer_check_access [GOOD] >> test.py::test_viewer_query >> test.py::test_viewer_query [GOOD] >> test.py::test_viewer_query_issue_13757 >> test.py::test_viewer_query_issue_13757 [GOOD] >> test.py::test_viewer_query_issue_13945 >> test.py::test_viewer_query_issue_13945 [GOOD] >> test.py::test_pqrb_tablet >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_testshard/unittest >> BlobDepotWithTestShard::PlainGroup [GOOD] |97.9%| [TM] {RESULT} ydb/core/blobstorage/ut_testshard/unittest |97.9%| [TA] {RESULT} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.9%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test >> test.py::test_pqrb_tablet [GOOD] >> test.py::test_viewer_nodes_issue_14992 >> test.py::test_viewer_nodes_issue_14992 [GOOD] >> test.py::test_operations_list [GOOD] >> test.py::test_operations_list_page >> test.py::test_operations_list_page [GOOD] >> test.py::test_operations_list_page_bad [GOOD] >> test.py::test_scheme_directory >> test.py::test_scheme_directory [GOOD] >> MediatorTest::TabletAckBeforePlanComplete [GOOD] >> test.py::test_topic_data >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] >> QuoterWithKesusTest::KesusRecreation [GOOD] >> QuoterWithKesusTest::AllocationStatistics >> MediatorTest::TabletAckWhenDead >> JsonEnvelopeTest::NoReplace [GOOD] >> JsonEnvelopeTest::Escape >> JsonEnvelopeTest::Simple [GOOD] >> JsonEnvelopeTest::ArrayItem [GOOD] >> JsonEnvelopeTest::Escape [GOOD] >> JsonEnvelopeTest::BinaryData [GOOD] >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::DropNonexistingView |97.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/log_backend/ut/unittest >> JsonEnvelopeTest::BinaryData [GOOD] |97.9%| [TS] {RESULT} ydb/core/log_backend/ut/unittest >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] >> BlobDepot::VerifiedRandom [GOOD] >> BlobDepot::LoadPutAndRead >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed [GOOD] >> TabletService_Restart::Basics >> Mirror3of4::ReplicationSmall >> TopicSessionTests::BadDataSessionError [GOOD] >> TestPurecalcFilter::ManyValues [GOOD] >> TMemoryController::ResourceBroker_ConfigLimit [GOOD] >> TMemTableMemoryConsumersCollection::Empty [GOOD] >> TMemTableMemoryConsumersCollection::Destruction [GOOD] >> TMemTableMemoryConsumersCollection::Register [GOOD] >> TMemTableMemoryConsumersCollection::Unregister [GOOD] >> TMemTableMemoryConsumersCollection::SetConsumption [GOOD] >> TMemTableMemoryConsumersCollection::CompactionComplete [GOOD] >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] >> EncryptedFileSerializerTest::SerializeWholeFileAtATime [GOOD] >> EncryptedFileSerializerTest::WrongParametersForSerializer [GOOD] >> EncryptedFileSerializerTest::WrongParametersForDeserializer [GOOD] >> EncryptedFileSerializerTest::SplitOnBlocks >> KeyValueGRPCService::SimpleConcatUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKey >> TopicSessionTests::WrongFieldType >> EncryptedFileSerializerTest::SplitOnBlocks [GOOD] >> EncryptedFileSerializerTest::EmptyFile [GOOD] >> EncryptedFileSerializerTest::ReadPartial [GOOD] >> EncryptedFileSerializerTest::DeleteLastByte [GOOD] >> EncryptedFileSerializerTest::AddByte [GOOD] >> EncryptedFileSerializerTest::RemoveLastBlock [GOOD] >> EncryptedFileSerializerTest::ChangeAnyByte [GOOD] >> EncryptedFileSerializerTest::BigHeaderSize [GOOD] >> EncryptedFileSerializerTest::BigBlockSize [GOOD] >> EncryptedFileSerializerTest::RestoreFromState [GOOD] >> EncryptedFileSerializerTest::IVSerialization [GOOD] >> TestPurecalcFilter::NullValues |97.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/backup/common/ut/unittest >> EncryptedFileSerializerTest::IVSerialization [GOOD] |97.9%| [TS] {RESULT} ydb/core/backup/common/ut/unittest >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] >> DataShardReplication::SplitMergeChangesReboots [GOOD] >> DataShardReplication::ReplicatedTable+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:50:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:86:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:86:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:50:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:87:2116] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:91:2057] recipient: [11:87:2116] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:90:2117] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:110:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2 ... 2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [106:54:2057] recipient: [106:51:2095] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:57:2057] recipient: [106:51:2095] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:74:2057] recipient: [106:14:2061] !Reboot 72057594037927937 (actor [106:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:127:2057] recipient: [106:36:2083] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:130:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:131:2057] recipient: [106:129:2148] Leader for TabletID 72057594037927937 is [106:132:2149] sender: [106:133:2057] recipient: [106:129:2148] !Reboot 72057594037927937 (actor [106:56:2097]) rebooted! !Reboot 72057594037927937 (actor [106:56:2097]) tablet resolver refreshed! new actor is[106:132:2149] Leader for TabletID 72057594037927937 is [106:132:2149] sender: [106:152:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:54:2057] recipient: [107:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:54:2057] recipient: [107:51:2095] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:57:2057] recipient: [107:51:2095] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:74:2057] recipient: [107:14:2061] !Reboot 72057594037927937 (actor [107:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:130:2057] recipient: [107:36:2083] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:132:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:134:2057] recipient: [107:133:2151] Leader for TabletID 72057594037927937 is [107:135:2152] sender: [107:136:2057] recipient: [107:133:2151] !Reboot 72057594037927937 (actor [107:56:2097]) rebooted! !Reboot 72057594037927937 (actor [107:56:2097]) tablet resolver refreshed! new actor is[107:135:2152] Leader for TabletID 72057594037927937 is [107:135:2152] sender: [107:189:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:54:2057] recipient: [108:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:54:2057] recipient: [108:50:2095] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:57:2057] recipient: [108:50:2095] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:74:2057] recipient: [108:14:2061] !Reboot 72057594037927937 (actor [108:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:130:2057] recipient: [108:36:2083] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:133:2057] recipient: [108:132:2151] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:134:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [108:135:2152] sender: [108:136:2057] recipient: [108:132:2151] !Reboot 72057594037927937 (actor [108:56:2097]) rebooted! !Reboot 72057594037927937 (actor [108:56:2097]) tablet resolver refreshed! new actor is[108:135:2152] Leader for TabletID 72057594037927937 is [108:135:2152] sender: [108:189:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:54:2057] recipient: [109:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:54:2057] recipient: [109:51:2095] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:57:2057] recipient: [109:51:2095] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:74:2057] recipient: [109:14:2061] !Reboot 72057594037927937 (actor [109:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:131:2057] recipient: [109:36:2083] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:134:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:135:2057] recipient: [109:133:2151] Leader for TabletID 72057594037927937 is [109:136:2152] sender: [109:137:2057] recipient: [109:133:2151] !Reboot 72057594037927937 (actor [109:56:2097]) rebooted! !Reboot 72057594037927937 (actor [109:56:2097]) tablet resolver refreshed! new actor is[109:136:2152] Leader for TabletID 72057594037927937 is [109:136:2152] sender: [109:154:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:54:2057] recipient: [110:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:54:2057] recipient: [110:50:2095] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:57:2057] recipient: [110:50:2095] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:74:2057] recipient: [110:14:2061] !Reboot 72057594037927937 (actor [110:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:133:2057] recipient: [110:36:2083] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:136:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:137:2057] recipient: [110:135:2153] Leader for TabletID 72057594037927937 is [110:138:2154] sender: [110:139:2057] recipient: [110:135:2153] !Reboot 72057594037927937 (actor [110:56:2097]) rebooted! !Reboot 72057594037927937 (actor [110:56:2097]) tablet resolver refreshed! new actor is[110:138:2154] Leader for TabletID 72057594037927937 is [110:138:2154] sender: [110:192:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:54:2057] recipient: [111:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:54:2057] recipient: [111:51:2095] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:57:2057] recipient: [111:51:2095] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:74:2057] recipient: [111:14:2061] !Reboot 72057594037927937 (actor [111:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:133:2057] recipient: [111:36:2083] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:136:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:137:2057] recipient: [111:135:2153] Leader for TabletID 72057594037927937 is [111:138:2154] sender: [111:139:2057] recipient: [111:135:2153] !Reboot 72057594037927937 (actor [111:56:2097]) rebooted! !Reboot 72057594037927937 (actor [111:56:2097]) tablet resolver refreshed! new actor is[111:138:2154] Leader for TabletID 72057594037927937 is [111:138:2154] sender: [111:192:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:54:2057] recipient: [112:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:54:2057] recipient: [112:51:2095] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:57:2057] recipient: [112:51:2095] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:74:2057] recipient: [112:14:2061] !Reboot 72057594037927937 (actor [112:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:134:2057] recipient: [112:36:2083] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:136:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:138:2057] recipient: [112:137:2153] Leader for TabletID 72057594037927937 is [112:139:2154] sender: [112:140:2057] recipient: [112:137:2153] !Reboot 72057594037927937 (actor [112:56:2097]) rebooted! !Reboot 72057594037927937 (actor [112:56:2097]) tablet resolver refreshed! new actor is[112:139:2154] Leader for TabletID 72057594037927937 is [112:139:2154] sender: [112:193:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:54:2057] recipient: [113:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:54:2057] recipient: [113:51:2095] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:57:2057] recipient: [113:51:2095] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:74:2057] recipient: [113:14:2061] !Reboot 72057594037927937 (actor [113:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:137:2057] recipient: [113:36:2083] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:140:2057] recipient: [113:139:2156] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:141:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [113:142:2157] sender: [113:143:2057] recipient: [113:139:2156] !Reboot 72057594037927937 (actor [113:56:2097]) rebooted! !Reboot 72057594037927937 (actor [113:56:2097]) tablet resolver refreshed! new actor is[113:142:2157] Leader for TabletID 72057594037927937 is [113:142:2157] sender: [113:196:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:54:2057] recipient: [114:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:54:2057] recipient: [114:51:2095] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:57:2057] recipient: [114:51:2095] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:74:2057] recipient: [114:14:2061] !Reboot 72057594037927937 (actor [114:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:137:2057] recipient: [114:36:2083] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:140:2057] recipient: [114:139:2156] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:141:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [114:142:2157] sender: [114:143:2057] recipient: [114:139:2156] !Reboot 72057594037927937 (actor [114:56:2097]) rebooted! !Reboot 72057594037927937 (actor [114:56:2097]) tablet resolver refreshed! new actor is[114:142:2157] Leader for TabletID 72057594037927937 is [114:142:2157] sender: [114:196:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:54:2057] recipient: [115:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:54:2057] recipient: [115:51:2095] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:57:2057] recipient: [115:51:2095] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:74:2057] recipient: [115:14:2061] !Reboot 72057594037927937 (actor [115:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:138:2057] recipient: [115:36:2083] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:141:2057] recipient: [115:140:2156] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:142:2057] recipient: [115:14:2061] Leader for TabletID 72057594037927937 is [115:143:2157] sender: [115:144:2057] recipient: [115:140:2156] !Reboot 72057594037927937 (actor [115:56:2097]) rebooted! !Reboot 72057594037927937 (actor [115:56:2097]) tablet resolver refreshed! new actor is[115:143:2157] Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:54:2057] recipient: [116:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:54:2057] recipient: [116:51:2095] Leader for TabletID 72057594037927937 is [116:56:2097] sender: [116:57:2057] recipient: [116:51:2095] Leader for TabletID 72057594037927937 is [116:56:2097] sender: [116:74:2057] recipient: [116:14:2061] >> TCdcStreamTests::MeteringDedicated [GOOD] >> TCdcStreamTests::ChangeOwner ------- [TM] {asan, default-linux-x86_64, release} ydb/core/memory_controller/ut/unittest >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] Test command err: ResourceBrokerSelfConfig: LimitBytes: 0 QueryExecutionLimitBytes: 0 2025-04-10T11:37:14.765942Z node 1 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: none MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 60MiB ConsumersConsumption: 0B OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 6MiB Coefficient: 0.9999990463 2025-04-10T11:37:14.767458Z node 1 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 0B Limit: 6MiB Min: 2MiB Max: 6MiB 2025-04-10T11:37:14.768167Z node 1 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 0B Limit: 40MiB 2025-04-10T11:37:14.770274Z node 1 :MEMORY_CONTROLLER INFO: Bootstrapped with config HardLimitBytes: 209715200 2025-04-10T11:37:14.781930Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-10T11:37:14.785062Z node 1 :TABLET_SAUSAGECACHE NOTICE: Bootstrap with config MemoryLimit: 33554432 2025-04-10T11:37:17.387452Z node 1 :MEMORY_CONTROLLER INFO: Consumer SharedCache [1:20:2067] registered 2025-04-10T11:37:17.388527Z node 1 :RESOURCE_BROKER INFO: New config diff: Queues { Name: "queue_kqp_resource_manager" Limit { Memory: 41943040 } } ResourceLimit { Memory: 62914560 } 2025-04-10T11:37:17.389308Z node 1 :RESOURCE_BROKER INFO: New config: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 41943040 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } ResourceLimit { Cpu: 20 Memory: 62914560 } 2025-04-10T11:37:17.390532Z node 1 :RESOURCE_BROKER INFO: Configure result: Success: true 2025-04-10T11:37:17.390841Z node 1 :TABLET_SAUSAGECACHE NOTICE: Register memory consumer 2025-04-10T11:37:17.396267Z node 1 :MEMORY_CONTROLLER INFO: ResourceBroker configure result Success: true 2025-04-10T11:37:17.443897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:37:17.444285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:37:17.444425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:37:17.568443Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:438:2399] 1 registered 2025-04-10T11:37:17.583046Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:451:2401] 0 registered 2025-04-10T11:37:17.590125Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:451:2401] 2 registered 2025-04-10T11:37:17.590486Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:451:2401] 4 registered 2025-04-10T11:37:17.590612Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:451:2401] 5 registered 2025-04-10T11:37:17.593168Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:461:2403] 1 registered 2025-04-10T11:37:17.597382Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:461:2403] 2 registered 2025-04-10T11:37:17.643859Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 1 registered 2025-04-10T11:37:17.685772Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 2 registered 2025-04-10T11:37:17.687258Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 3 registered 2025-04-10T11:37:17.687604Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 4 registered 2025-04-10T11:37:17.687912Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 5 registered 2025-04-10T11:37:17.688861Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 6 registered 2025-04-10T11:37:17.689069Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 7 registered 2025-04-10T11:37:17.689393Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 8 registered 2025-04-10T11:37:17.689803Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 9 registered 2025-04-10T11:37:17.690025Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 10 registered 2025-04-10T11:37:17.690583Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 11 registered 2025-04-10T11:37:17.690766Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 12 registered 2025-04-10T11:37:17.690907Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 13 registered 2025-04-10T11:37:17.691832Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 14 registered 2025-04-10T11:37:17.692818Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 15 registered 2025-04-10T11:37:17.693240Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 16 registered 2025-04-10T11:37:17.693499Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 17 registered 2025-04-10T11:37:17.693620Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 18 registered 2025-04-10T11:37:17.701341Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 19 registered 2025-04-10T11:37:17.726959Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 20 registered 2025-04-10T11:37:17.728451Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 22 registered 2025-04-10T11:37:17.729728Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 23 registered 2025-04-10T11:37:17.754474Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 24 registered 2025-04-10T11:37:17.754663Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 25 registered 2025-04-10T11:37:17.755224Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 26 registered 2025-04-10T11:37:17.755283Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 27 registered 2025-04-10T11:37:17.755553Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 28 registered 2025-04-10T11:37:17.756125Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 29 registered 2025-04-10T11:37:17.756202Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 30 registered 2025-04-10T11:37:17.760035Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 31 registered 2025-04-10T11:37:17.764144Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 32 registered 2025-04-10T11:37:17.766367Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 33 registered 2025-04-10T11:37:17.767513Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 34 registered 2025-04-10T11:37:17.768798Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 35 registered 2025-04-10T11:37:17.769169Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 36 registered 2025-04-10T11:37:17.769390Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 37 registered 2025-04-10T11:37:17.769494Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 38 registered 2025-04-10T11:37:17.769650Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 39 registered 2025-04-10T11:37:17.775550Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 40 registered 2025-04-10T11:37:17.775705Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 41 registered 2025-04-10T11:37:17.782423Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 42 registered 2025-04-10T11:37:17.789161Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 43 registered 2025-04-10T11:37:17.789355Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 44 registered 2025-04-10T11:37:17.789611Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 45 registered 2025-04-10T11:37:17.789805Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 46 registered 2025-04-10T11:37:17.790227Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 47 registered 2025-04-10T11:37:17.790492Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 48 registered 2025-04-10T11:37:17.790828Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 49 registered 2025-04-10T11:37:17.791318Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:467:2405] 50 registered 2025-04-10T11:37:17.791439Z node 1 :MEMORY_CONTROLLER TRACE: ... 499204Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2405] 105 registered 2025-04-10T11:38:36.499240Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2405] 106 registered 2025-04-10T11:38:36.499268Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2405] 107 registered 2025-04-10T11:38:36.499293Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2405] 108 registered 2025-04-10T11:38:36.499320Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2405] 109 registered 2025-04-10T11:38:36.499353Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2405] 110 registered 2025-04-10T11:38:36.499387Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2405] 111 registered 2025-04-10T11:38:36.499430Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2405] 113 registered 2025-04-10T11:38:36.499475Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2405] 114 registered 2025-04-10T11:38:36.499513Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2405] 115 registered 2025-04-10T11:38:36.499542Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2405] 116 registered 2025-04-10T11:38:36.499565Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2405] 117 registered 2025-04-10T11:38:36.499954Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2405] 118 registered 2025-04-10T11:38:36.500073Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 17 registered 2025-04-10T11:38:36.500234Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 0 registered 2025-04-10T11:38:36.501044Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 18 registered 2025-04-10T11:38:36.501350Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 1 registered 2025-04-10T11:38:36.501636Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 19 registered 2025-04-10T11:38:36.501795Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 2 registered 2025-04-10T11:38:36.501964Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 20 registered 2025-04-10T11:38:36.502215Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 3 registered 2025-04-10T11:38:36.502536Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 21 registered 2025-04-10T11:38:36.502658Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 4 registered 2025-04-10T11:38:36.502730Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 6 registered 2025-04-10T11:38:36.502824Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 9 registered 2025-04-10T11:38:36.502888Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 10 registered 2025-04-10T11:38:36.502942Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 13 registered 2025-04-10T11:38:36.502991Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 14 registered 2025-04-10T11:38:36.503040Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2408] 16 registered 2025-04-10T11:38:36.503144Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:514:2411] 7 registered 2025-04-10T11:38:36.503201Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:514:2411] 1 registered 2025-04-10T11:38:36.503332Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:514:2411] 2 registered 2025-04-10T11:38:36.503420Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:514:2411] 3 registered 2025-04-10T11:38:36.503463Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:514:2411] 4 registered 2025-04-10T11:38:36.503511Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:514:2411] 5 registered 2025-04-10T11:38:36.503563Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:514:2411] 6 registered 2025-04-10T11:38:36.503612Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2413] 102 registered 2025-04-10T11:38:36.503662Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2413] 103 registered 2025-04-10T11:38:36.503711Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2413] 1 registered 2025-04-10T11:38:36.503752Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2413] 104 registered 2025-04-10T11:38:36.503799Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2413] 2 registered 2025-04-10T11:38:36.503844Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2413] 3 registered 2025-04-10T11:38:36.503901Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2413] 4 registered 2025-04-10T11:38:36.503940Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2413] 5 registered 2025-04-10T11:38:36.503998Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2413] 6 registered 2025-04-10T11:38:36.504041Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2413] 7 registered 2025-04-10T11:38:36.504095Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2413] 100 registered 2025-04-10T11:38:36.504148Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2413] 101 registered 2025-04-10T11:38:36.504199Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:536:2415] 1 registered 2025-04-10T11:38:36.504255Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:536:2415] 2 registered 2025-04-10T11:38:36.504304Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:536:2415] 3 registered test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000751/r3tmp/tmpvcesEU/pdisk_1.dat 2025-04-10T11:38:36.538691Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 1 registered 2025-04-10T11:38:36.538998Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 2 registered 2025-04-10T11:38:36.539056Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 3 registered 2025-04-10T11:38:36.539251Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 4 registered 2025-04-10T11:38:36.539373Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 120 registered 2025-04-10T11:38:36.539543Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 5 registered 2025-04-10T11:38:36.539623Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 121 registered 2025-04-10T11:38:36.539662Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 6 registered 2025-04-10T11:38:36.539862Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 122 registered 2025-04-10T11:38:36.539912Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 7 registered 2025-04-10T11:38:36.539958Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 123 registered 2025-04-10T11:38:36.539996Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 8 registered 2025-04-10T11:38:36.540130Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 125 registered 2025-04-10T11:38:36.540176Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 126 registered 2025-04-10T11:38:36.540225Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 127 registered 2025-04-10T11:38:36.540272Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 128 registered 2025-04-10T11:38:36.540312Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 129 registered 2025-04-10T11:38:36.540545Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 100 registered 2025-04-10T11:38:36.540608Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 101 registered 2025-04-10T11:38:36.540659Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 131 registered 2025-04-10T11:38:36.540721Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 102 registered 2025-04-10T11:38:36.540765Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 103 registered 2025-04-10T11:38:36.540804Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:508:2409] 105 registered 2025-04-10T11:38:36.611010Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:36.654882Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:36.656844Z node 9 :MEMORY_CONTROLLER INFO: Config updated QueryExecutionLimitPercent: 15 2025-04-10T11:38:36.657595Z node 9 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-04-10T11:38:36.695741Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:36.695889Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:36.707325Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:38:36.942032Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 500MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 500MiB SoftLimit: 375MiB TargetUtilization: 250MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 32.5KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 250MiB ResultingConsumersConsumption: 250MiB Coefficient: 0.90625 2025-04-10T11:38:36.942940Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 236MiB Min: 100MiB Max: 250MiB 2025-04-10T11:38:36.943181Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 32.5KiB Limit: 14.1MiB Min: 5MiB Max: 15MiB 2025-04-10T11:38:36.943239Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-04-10T11:38:36.943829Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 236MiB 2025-04-10T11:38:37.140108Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 500MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 500MiB SoftLimit: 375MiB TargetUtilization: 250MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 32.6KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 250MiB ResultingConsumersConsumption: 250MiB Coefficient: 0.90625 2025-04-10T11:38:37.140817Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 236MiB Min: 100MiB Max: 250MiB 2025-04-10T11:38:37.140938Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 32.6KiB Limit: 14.1MiB Min: 5MiB Max: 15MiB 2025-04-10T11:38:37.140986Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-04-10T11:38:37.141121Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 236MiB 2025-04-10T11:38:37.310257Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 200MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 33KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 100MiB Coefficient: 0.90625 2025-04-10T11:38:37.311117Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 94.4MiB Min: 40MiB Max: 100MiB 2025-04-10T11:38:37.311205Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 33KiB Limit: 5.63MiB Min: 2MiB Max: 6MiB 2025-04-10T11:38:37.311260Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-04-10T11:38:37.311468Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 94.4MiB 2025-04-10T11:38:37.451219Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 200MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 33.1KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 100MiB Coefficient: 0.90625 2025-04-10T11:38:37.451867Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 94.4MiB Min: 40MiB Max: 100MiB 2025-04-10T11:38:37.451919Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 33.1KiB Limit: 5.63MiB Min: 2MiB Max: 6MiB 2025-04-10T11:38:37.451958Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-04-10T11:38:37.452062Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 94.4MiB |98.0%| [TM] {RESULT} ydb/core/memory_controller/ut/unittest >> test.py::test_topic_data [GOOD] >> test.py::test_transfer_describe >> TTestYqlToMiniKQLCompile::CheckResolve >> TTestYqlToMiniKQLCompile::CheckResolve [GOOD] >> TTestYqlToMiniKQLCompile::OnlyResult >> test.py::test_transfer_describe [GOOD] >> TTestYqlToMiniKQLCompile::OnlyResult [GOOD] >> TTestYqlToMiniKQLCompile::EraseRow [GOOD] >> TTestYqlToMiniKQLCompile::UpdateRow >> TDqSolomonWriteActorTest::TestWriteFormat >> TArrowPushDown::SimplePushDown >> TTestYqlToMiniKQLCompile::UpdateRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRow >> TArrowPushDown::SimplePushDown [GOOD] >> TArrowPushDown::FilterEverything [GOOD] >> TArrowPushDown::MatchSeveralRowGroups [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] [SKIPPED] >> TTestYqlToMiniKQLCompile::SelectRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRange >> test.py::test[solomon-UnknownSetting-] >> TTestYqlToMiniKQLCompile::SelectRange [GOOD] >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx [GOOD] >> TTestYqlToMiniKQLCompile::AcquireLocks >> TTestYqlToMiniKQLCompile::AcquireLocks [GOOD] >> TTestYqlToMiniKQLCompile::StaticMapTypeOf >> TTestYqlToMiniKQLCompile::StaticMapTypeOf [GOOD] >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange [GOOD] >> TTestYqlToMiniKQLCompile::Extract |98.0%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/s3/actors/ut/unittest >> TArrowPushDown::MatchSeveralRowGroups [GOOD] |98.0%| [TS] {RESULT} ydb/library/yql/providers/s3/actors/ut/unittest >> TTestYqlToMiniKQLCompile::Extract [GOOD] |98.0%| [TA] $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.0%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/py3test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] |98.0%| [TM] {RESULT} ydb/tests/functional/scheme_tests/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/py3test >> test.py::test_transfer_describe [GOOD] |98.0%| [TM] {RESULT} ydb/core/viewer/tests/py3test |98.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/client/minikql_compile/ut/unittest >> TTestYqlToMiniKQLCompile::Extract [GOOD] |98.0%| [TS] {RESULT} ydb/core/client/minikql_compile/ut/unittest >> TCdcStreamTests::ChangeOwner [GOOD] >> TCdcStreamTests::DropIndexWithStream >> TCreateAndDropViewTest::DropNonexistingView [GOOD] >> TCreateAndDropViewTest::CallDropViewOnTable >> MediatorTest::TabletAckWhenDead [GOOD] >> ParseStats::ParseWithSources >> TabletService_Restart::Basics [GOOD] >> TabletService_Restart::OnlyAdminsAllowed >> ParseStats::ParseWithSources [GOOD] >> ParseStats::ParseJustOutput [GOOD] >> ParseStats::ParseMultipleGraphsV1 [GOOD] >> ParseStats::ParseMultipleGraphsV2 [GOOD] >> KafkaProtocol::BalanceScenario [GOOD] >> KafkaProtocol::OffsetCommitAndFetchScenario >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows >> MediatorTest::PlanStepAckToReconnectedMediator |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest >> ParseStats::ParseMultipleGraphsV2 [GOOD] |98.0%| [TM] {RESULT} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest >> BasicExample::BasicExample >> TestPurecalcFilter::NullValues [GOOD] >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> TestS3UrlEscape::EscapeEscapedForce [GOOD] >> TestS3UrlEscape::EscapeUnescapeForceRet [GOOD] >> TestS3UrlEscape::EscapeAdditionalSymbols [GOOD] >> TestUrlBuilder::UriOnly [GOOD] >> TestUrlBuilder::Basic [GOOD] >> TestUrlBuilder::BasicWithEncoding [GOOD] >> TestUrlBuilder::BasicWithAdditionalEncoding [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleWriteRead >> TestPurecalcFilter::PartialPush |98.0%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/s3/common/ut/unittest >> TestUrlBuilder::BasicWithAdditionalEncoding [GOOD] |98.0%| [TS] {RESULT} ydb/library/yql/providers/s3/common/ut/unittest >> BlobDepot::LoadPutAndRead [GOOD] >> BlobDepot::DecommitPutAndRead >> TCdcStreamTests::DropIndexWithStream [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream >> KqpTpch::Query02 [GOOD] >> KqpTpch::Query03 >> QuoterWithKesusTest::AllocationStatistics [GOOD] >> QuoterWithKesusTest::UpdatesCountersForParentResources >> ArrowTest::BatchBuilder >> ArrowTest::BatchBuilder [GOOD] >> ArrowTest::ArrowToYdbConverter >> ArrowTest::ArrowToYdbConverter [GOOD] >> ArrowTest::SortWithCompositeKey >> ArrowTest::SortWithCompositeKey [GOOD] >> ArrowTest::MergingSortedInputStream [GOOD] >> ArrowTest::MergingSortedInputStreamReversed [GOOD] >> ArrowTest::MergingSortedInputStreamReplace [GOOD] >> ColumnFilter::MergeFilters [GOOD] >> ColumnFilter::CombineFilters [GOOD] >> ColumnFilter::FilterSlice [GOOD] >> ColumnFilter::FilterCheckSlice [GOOD] >> ColumnFilter::FilterSlice1 [GOOD] >> ColumnFilter::CutFilter1 [GOOD] >> ColumnFilter::CutFilter2 [GOOD] >> Dictionary::Simple >> Backpressure::MonteCarlo >> ReadUpdateWrite::Load [GOOD] >> TopicSessionTests::WrongFieldType [GOOD] >> DataShardReplication::ReplicatedTable+UseSink [GOOD] >> DataShardReplication::ReplicatedTable-UseSink >> test_ctas.py::TestYtCtas::test_simple_ctast >> TopicSessionTests::RestartSessionIfNewClientWithOffset >> test.py::test[solomon-UnknownSetting-] [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] >> MediatorTest::PlanStepAckToReconnectedMediator [GOOD] >> MediatorTest::WatcherReconnect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-04-10T11:36:23.092465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:23.092657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:23.092709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:23.092743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:23.093596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:23.093635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:23.093716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:23.093801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:23.095176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:23.193460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:36:23.193527Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:36:23.218389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:23.218707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:23.218867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:23.230267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:23.230649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:23.235356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:23.238677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:23.250971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:23.258405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:23.258589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:23.259389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:23.259460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:23.259540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:23.259721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-10T11:36:23.266572Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:239:2058] recipient: [1:15:2062] 2025-04-10T11:36:23.401487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:23.403332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:23.404343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:23.405522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:23.405613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:23.408866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:23.409019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:23.409215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:23.409356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:23.409405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:23.409473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:23.411600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:23.411655Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:23.411694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:23.415718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:23.415791Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:23.415874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:23.415936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:23.420517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:23.423082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:23.423887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:23.424962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:23.425096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:23.425139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:23.426259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:23.426319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:23.426489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:23.426574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:23.429288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:23.429342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:23.429513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:23.429546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:23.429804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:23.429850Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:23.429940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:23.429984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:23.430023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:23.430049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:23.430080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-10T11:36:23.430119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:23.430183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-10T11:36:23.430229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-10T11:36:23.430284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-10T11:36:23.430317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-10T11:36:23.430345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-10T11:36:23.432551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:23.432661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-10T11:36:23.432692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:38:46.716470Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:38:46.716515Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:38:46.717402Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-10T11:38:46.717511Z node 19 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:38:46.718044Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2025-04-10T11:38:46.718394Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2025-04-10T11:38:46.718481Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2025-04-10T11:38:46.718569Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2025-04-10T11:38:46.718657Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2025-04-10T11:38:46.718752Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: false 2025-04-10T11:38:46.721768Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:38:46.721907Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:38:46.721949Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:38:46.722822Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:38:46.722936Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-10T11:38:46.722972Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-10T11:38:46.723015Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-10T11:38:46.723062Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-10T11:38:46.723211Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: true 2025-04-10T11:38:46.724124Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-10T11:38:46.724191Z node 19 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:38:46.724596Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-10T11:38:46.724769Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2025-04-10T11:38:46.724816Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-04-10T11:38:46.724878Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2025-04-10T11:38:46.724933Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-04-10T11:38:46.724981Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 5/5, is published: true 2025-04-10T11:38:46.725084Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [19:376:2344] message: TxId: 103 2025-04-10T11:38:46.725200Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-04-10T11:38:46.725303Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-10T11:38:46.725376Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-10T11:38:46.725559Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-10T11:38:46.726558Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-04-10T11:38:46.726632Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-10T11:38:46.726703Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-10T11:38:46.726751Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-04-10T11:38:46.726783Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-04-10T11:38:46.726850Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-10T11:38:46.726887Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2025-04-10T11:38:46.726913Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2025-04-10T11:38:46.726951Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-10T11:38:46.726985Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:4 2025-04-10T11:38:46.727013Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:4 2025-04-10T11:38:46.727125Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-04-10T11:38:46.729135Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-10T11:38:46.729280Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-04-10T11:38:46.729440Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-10T11:38:46.729524Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-10T11:38:46.729568Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-10T11:38:46.733354Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:38:46.733558Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:38:46.733672Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:38:46.733717Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:38:46.737523Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:38:46.737655Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-10T11:38:46.737930Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-10T11:38:46.738052Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [19:754:2657] 2025-04-10T11:38:46.738697Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-04-10T11:38:46.739610Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:38:46.740321Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 767us result status StatusPathDoesNotExist 2025-04-10T11:38:46.740754Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4])" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-10T11:38:46.741679Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:38:46.742110Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" took 476us result status StatusPathDoesNotExist 2025-04-10T11:38:46.742484Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4])" Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpTpch::Query03 [GOOD] >> KqpTpch::Query04 |98.0%| [TM] {asan, default-linux-x86_64, pic, release} ydb/tests/fq/solomon/py3test >> test.py::test[solomon-UnknownSetting-] [GOOD] |98.0%| [TM] {RESULT} ydb/tests/fq/solomon/py3test |98.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |98.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> TabletService_Restart::OnlyAdminsAllowed [GOOD] >> KafkaProtocol::OffsetCommitAndFetchScenario [GOOD] >> KafkaProtocol::CreateTopicsScenarioWithKafkaAuth >> TStreamRequestUnitsCalculatorTest::Basic [GOOD] >> TTimeGridTest::TimeGrid [GOOD] >> DataShardStats::HistogramStatsCorrect [GOOD] >> DataShardStats::BlobsStatsCorrect >> BasicExample::BasicExample [GOOD] |98.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/metering/ut/unittest >> TTimeGridTest::TimeGrid [GOOD] |98.1%| [TS] {RESULT} ydb/core/metering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/high_load/unittest >> ReadUpdateWrite::Load [GOOD] Test command err: Step 1. only write Was written: 0 MiB, Speed: 0 MiB/s Write: 10% 0.652800s 30% 0.652800s 50% 0.652800s 90% 0.652800s 99% 0.652800s Write: 10% 0.791807s 30% 0.791807s 50% 0.791807s 90% 0.791807s 99% 0.791807s Write: 10% 0.840810s 30% 0.840810s 50% 0.840810s 90% 0.840810s 99% 0.840810s Write: 10% 0.920938s 30% 0.920938s 50% 0.920938s 90% 0.920938s 99% 0.920938s Write: 10% 0.954986s 30% 0.954986s 50% 0.954986s 90% 0.954986s 99% 0.954986s Write: 10% 0.826767s 30% 0.826767s 50% Write: 10% 0.948275s 30% 0.948275s 50% 0.826767s 90% 0.948275s 90% Write: 10% 0.948275s 99% 0.886126s 30% 0.886126s 50% 0.948275s 0.886126s 90% 0.886126s 99% 0.886126s 0.826767s 99% 0.826767s Write: 10% 0.944276s 30% 0.944276s 50% 0.944276s 90% Write: 10% 0.944276s 99% 0.944276s 0.650914s 30% 0.650914s 50% 0.650914s 90% 0.650914s 99% 0.650914s Write: 10% 1.166059s 30% 1.166059s 50% 1.166059s 90% 1.166059s 99% 1.166059s Write: 10% 1.106572s 30% 1.106572s 50% 1.106572s 90% 1.106572s 99% 1.106572s Write: 10% 0.910980s 30% 0.910980s 50% 0.910980s 90% 0.910980s 99% 0.910980s Write: 10% 1.176088s 30% 1.176088s 50% 1.176088s 90% 1.176088s 99% 1.176088s Write: 10% 1.170263s 30% 1.170263s 50% 1.170263s 90% 1.170263s 99% 1.170263s Write: 10% 1.023361s 30% 1.023361s 50% 1.023361s 90% 1.023361s 99% 1.023361s Write: 10% 1.029402s 30% 1.029402s 50% 1.029402s 90% 1.029402s 99% 1.029402s Write: 10% 1.197823s 30% 1.197823s 50% 1.197823s 90% 1.197823s 99% 1.197823s Write: 10% 1.007706s 30% 1.007706s 50% 1.007706s 90% 1.007706s 99% 1.007706s Write: 10% 1.026812s 30% 1.026812s 50% 1.026812s 90% 1.026812s 99% 1.026812s Write: 10% 1.282393s 30% 1.282393s 50% 1.282393s 90% 1.282393s 99% 1.282393s Write: 10% 1.204021s 30% 1.204021s 50% 1.204021s 90% 1.204021s 99% 1.204021s Write: 10% 1.002074s 30% 1.002074s 50% 1.002074s 90% 1.002074s 99% 1.002074s Write: 10% 1.215664s 30% 1.215664s 50% 1.215664s 90% 1.215664s 99% 1.215664s Write: 10% 1.275429s 30% 1.275429s 50% 1.275429s 90% 1.275429s 99% 1.275429s Write: 10% 1.104328s 30% 1.104328s 50% 1.104328s 90% 1.104328s 99% 1.104328s Write: 10% 1.327722s 30% 1.327722s 50% 1.327722s 90% 1.327722s 99% 1.327722s Write: 10% 1.274969s 30% 1.274969s 50% 1.274969s 90% 1.274969s 99% 1.274969s Write: 10% 1.083595s 30% 1.083595s 50% 1.083595s 90% 1.083595s 99% 1.083595s Write: 10% 1.153700s 30% 1.153700s 50% 1.153700s 90% 1.153700s 99% 1.153700s Write: 10% 1.082923s 30% 1.082923s 50% 1.082923s 90% 1.082923s 99% 1.082923s Write: 10% 1.323844s 30% 1.323844s 50% 1.323844s 90% Write: 10% 1.364387s 30% 1.364387s 50% 1.364387s 90% 1.364387s 99% 1.364387s 1.323844s 99% 1.323844s Write: 10% 1.378705s 30% 1.378705s 50% 1.378705s 90% 1.378705s 99% 1.378705s Write: 10% 1.225224s 30% 1.225224s 50% 1.225224s 90% 1.225224s 99% 1.225224s Write: 10% 1.404851s 30% 1.404851s 50% 1.404851s 90% 1.404851s 99% 1.404851s Write: 10% 1.514546s 30% 1.514546s 50% 1.514546s 90% Write: 10% 1.578933s 30% 1.578933s 50% 1.578933s 90% 1.578933s 99% 1.578933s Write: 10% 1.485933s 30% 1.485933s 50% 1.514546s 99% 1.514546s Write: 10% 1.591238s 30% 1.591238s 50% 1.591238s 90% 1.591238s 99% 1.591238s 1.485933s 90% Write: 10% 1.265967s 30% 1.265967s 50% 1.485933s 99% 1.485933s 1.265967s 90% 1.265967s 99% 1.265967s Write: 10% 1.376427s 30% 1.376427s 50% 1.376427s 90% 1.376427s 99% 1.376427s Write: 10% 1.432944s 30% 1.432944s 50% 1.432944s 90% 1.432944s 99% 1.432944s Write: 10% 1.578159s 30% 1.578159s 50% 1.578159s 90% 1.578159s 99% 1.578159s Write: 10% 1.428489s 30% 1.428489s 50% 1.428489s 90% 1.428489s 99% 1.428489s Write: 10% 1.382702s 30% 1.382702s 50% 1.382702s 90% 1.382702s 99% 1.382702s Write: 10% 1.457265s 30% 1.457265s 50% 1.457265s 90% 1.457265s 99% 1.457265s Write: 10% 1.588996s 30% 1.588996s 50% 1.588996s 90% 1.588996s 99% 1.588996s Write: 10% 1.590934s 30% 1.590934s 50% 1.590934s 90% 1.590934s 99% 1.590934s Write: 10% 1.460744s 30% 1.460744s 50% 1.460744s 90% Write: 10% 1.584515s 30% 1.584515s 50% 1.584515s 90% 1.584515s 99% 1.584515s 1.460744s 99% 1.460744s Write: 10% 1.385177s 30% 1.385177s 50% 1.385177s 90% Write: 10% 1.440908s 30% 1.440908s 50% 1.440908s 90% 1.440908s 99% 1.440908s Write: 10% 1.621393s 30% 1.621393s 50% 1.621393s 90% 1.621393s 99% 1.621393s 1.385177s 99% Write: 10% 1.618070s 30% 1.618070s 50% 1.385177s 1.618070s 90% 1.618070s 99% 1.618070s Write: 10% 1.679262s 30% 1.679262s 50% 1.679262s 90% 1.679262s 99% 1.679262s Write: 10% 1.655241s 30% 1.655241s 50% 1.655241s 90% 1.655241s 99% 1.655241s Write: 10% 1.691157s 30% 1.691157s 50% 1.691157s 90% 1.691157s 99% 1.691157s Write: 10% 1.664508s 30% 1.664508s 50% 1.664508s 90% 1.664508s 99% 1.664508s Write: 10% 1.704981s 30% 1.704981s 50% 1.704981s 90% 1.704981s 99% 1.704981s Write: 10% 1.470140s 30% 1.470140s 50% 1.470140s 90% 1.470140s 99% 1.470140s Write: 10% 1.544290s 30% 1.544290s 50% 1.544290s 90% 1.544290s 99% 1.544290s Write: 10% 1.765367s 30% 1.765367s 50% 1.765367s 90% 1.765367s 99% 1.765367s Write: 10% 1.737234s 30% 1.737234s 50% 1.737234s 90% 1.737234s 99% 1.737234s Step 2. read write Write: 10% 0.820074s 30% 0.820074s 50% 0.820074s 90% 0.820074s 99% 0.820074s Write: 10% 0.438614s 30% 0.438614s 50% 0.438614s 90% 0.438614s 99% 0.438614s Write: 10% 0.569089s 30% 0.569089s 50% 0.569089s 90% 0.569089s 99% 0.569089s Write: 10% 0.585477s 30% 0.585477s 50% 0.585477s 90% 0.585477s 99% 0.585477s Write: 10% 0.568689s 30% 0.568689s 50% 0.568689s 90% 0.568689s 99% 0.568689s Write: 10% 0.734155s 30% 0.734155s 50% 0.734155s 90% 0.734155s 99% 0.734155s Write: 10% 0.608538s 30% 0.608538s 50% 0.608538s 90% 0.608538s 99% 0.608538s Write: 10% 0.724731s 30% 0.724731s 50% Write: 10% 0.887401s 30% 0.887401s 50% 0.724731s 90% 0.724731s 99% 0.724731s 0.887401s 90% Write: 10% 0.611542s 30% 0.611542s 50% Write: 10% 0.520259s 30% 0.520259s 50% 0.887401s 99% 0.887401s 0.611542s 90% 0.520259s 90% 0.520259s 99% 0.520259s 0.611542s 99% 0.611542s Write: 10% 0.871875s 30% 0.871875s 50% 0.871875s 90% 0.871875s 99% 0.871875s Write: 10% 0.661063s 30% 0.661063s 50% 0.661063s 90% 0.661063s 99% 0.661063s Write: 10% 0.569411s 30% 0.569411s 50% 0.569411s 90% 0.569411s 99% 0.569411s Write: 10% 0.659851s 30% 0.659851s 50% Write: 10% 0.773639s 30% 0.773639s 50% 0.773639s 90% 0.773639s 99% 0.773639s 0.659851s 90% 0.659851s 99% 0.659851s Write: 10% 0.645103s 30% 0.645103s 50% 0.645103s 90% 0.645103s 99% 0.645103s Write: 10% 0.656518s 30% 0.656518s 50% 0.656518s 90% 0.656518s 99% 0.656518s Write: 10% 0.824563s 30% 0.824563s 50% 0.824563s 90% 0.824563s 99% 0.824563s Write: 10% 0.771758s 30% 0.771758s 50% 0.771758s 90% 0.771758s 99% 0.771758s Write: 10% 0.605976s 30% 0.605976s 50% 0.605976s 90% 0.605976s 99% 0.605976s Write: 10% 0.947814s 30% 0.947814s 50% 0.947814s 90% 0.947814s 99% 0.947814s Write: 10% 0.881215s 30% 0.881215s 50% 0.881215s 90% 0.881215s 99% 0.881215s Write: 10% 0.668705s 30% 0.668705s 50% 0.668705s 90% 0.668705s 99% 0.668705s Write: 10% 0.736650s 30% 0.736650s 50% 0.736650s 90% 0.736650s 99% 0.736650s Write: 10% 0.652141s 30% 0.652141s 50% 0.652141s 90% 0.652141s 99% 0.652141s Write: 10% 0.909395s 30% 0.909395s 50% 0.909395s 90% 0.909395s 99% 0.909395s Write: 10% 0.797859s 30% 0.797859s 50% 0.797859s 90% 0.797859s 99% 0.797859s Write: 10% 0.645185s 30% 0.645185s 50% 0.645185s 90% 0.645185s 99% 0.645185s Write: 10% 0.624043s 30% 0.624043s 50% 0.624043s 90% 0.624043s 99% 0.624043s Write: 10% 0.929466s 30% 0.929466s 50% 0.929466s 90% 0.929466s 99% 0.929466s Write: 10% 0.751074s 30% 0.751074s 50% 0.751074s 90% 0.751074s 99% 0.751074s Write: 10% 0.783287s 30% 0.783287s 50% 0.783287s 90% 0.783287s 99% 0.783287s Write: 10% 0.995693s 30% 0.995693s 50% 0.995693s 90% 0.995693s 99% 0.995693s Write: 10% 0.491168s 30% 0.491168s 50% 0.491168s 90% 0.491168s 99% 0.491168s Write: 10% 0.673823s 30% 0.673823s 50% 0.673823s 90% 0.673823s 99% 0.673823s Write: 10% 0.553129s 30% 0.553129s 50% 0.553129s 90% 0.553129s 99% 0.553129s Write: 10% 1.000856s 30% 1.000856s 50% 1.000856s 90% 1.000856s 99% 1.000856s Write: 10% 0.758586s 30% 0.758586s 50% 0.758586s 90% 0.758586s 99% 0.758586s Write: 10% 0.754330s 30% 0.754330s 50% 0.754330s 90% 0.754330s 99% 0.754330s Write: 10% 0.793082s 30% 0.793082s 50% 0.793082s 90% 0.793082s 99% 0.793082s Write: 10% 0.651857s 30% 0.651857s 50% 0.651857s 90% 0.651857s 99% 0.651857s Write: 10% 0.577086s 30% 0.577086s 50% 0.577086s 90% 0.577086s 99% 0.577086s Write: 10% 0.843822s 30% 0.843822s 50% 0.843822s 90% 0.843822s 99% 0.843822s Write: 10% 0.796005s 30% 0.796005s 50% 0.796005s 90% 0.796005s 99% 0.796005s Write: 10% 0.899100s 30% 0.899100s 50% 0.899100s 90% 0.899100s 99% 0.899100s Write: 10% 0.530895s 30% 0.530895s 50% 0.530895s 90% 0.530895s 99% 0.530895s Write: 10% 0.954494s 30% 0.954494s 50% 0.954494s 90% 0.954494s 99% 0.954494s Write: 10% 0.889596s 30% 0.889596s 50% 0.889596s 90% 0.889596s 99% 0.889596s Write: 10% 0.782699s 30% 0.782699s 50% 0.782699s 90% 0.782699s 99% 0.782699s Write: 10% 0.770644s 30% 0.770644s 50% 0.770644s 90% 0.770644s 99% 0.770644s Write: 10% 0.851980s 30% Write: 10% 0.691526s 30% 0.691526s 50% 0.691526s 90% 0.691526s 99% 0.691526s Write: 10% 0.574051s 30% 0.574051s 50% 0.574051s 90% 0.574051s 99% 0.574051s Write: 10% 0.742448s 30% 0.742448s 50% 0.742448s 90% 0.742448s 99% 0.742448s Write: 10% 0.858797s 30% 0.858797s 50% 0.851980s 50% 0.851980s 90% 0.851980s 99% 0.851980s Write: 10% Write: 10% 0.730321s 30% 0.730321s 50% 0.730321s 90% 0.858797s 90% 0.730321s 99% 0.730321s 0.858797s 99% 0.858797s Write: 10% 0.904468s 30% 0.904468s 50% 0.904468s 90% 0.904468s 99% 0.904468s 0.922468s 30% 0.922468s 50% 0.922468s 90% 0.922468s 99% 0.922468s Write: 10% 0.836450s 30% 0.836450s 50% 0.836450s 90% 0.836450s 99% 0.836450s Write: 10% 0.682600s 30% 0.682600s 50% 0.682600s 90% 0.682600s 99% 0.682600s Write: 10% 0.773530s 30% 0.773530s 50% Write: 10% 0.765963s 30% 0.765963s 50% 0.765963s 90% 0.765963s 99% 0.765963s 0.773530s 90% 0.773530s 99% 0.773530s Write: 10% 0.881772s 30% 0.881772s 50% 0.881772s 90% 0.881772s 99% 0.881772s Read: 10% 2.096056s 30% 2.096056s 50% 2.096056s 90% 2.096056s 99% 2.096056s Step 3. write modify Write: 10% 0.436276s 30% 0.436276s 50% 0.436276s 90% 0.436276s 99% 0.436276s Write: 10% 0.465707s 30% 0.465707s 50% 0.465707s 90% 0.465707s 99% 0.465707s Write: 10% 0.490875s 30% 0.490875s 50% 0.490875s 90% 0.490875s 99% 0.490875s Write: 10% 0.433187s 30% 0.433187s 50% 0.433187s 90% 0.433187s 99% 0.433187s Write: 10% 0.515797s 30% 0.515797s 50% 0.515797s 90% 0.515797s 99% 0.515797s Write: 10% 0.618838s 30% 0.618838s 50% Write: 10% 0.264248s 30% 0.264248s 50% 0.264248s 90% 0.264248s 99% 0.264248s 0.618838s 90% 0.618838s 99% 0.618838s Write: 10% 0.619582s 30% 0.619582s 50% 0.619582s 90% 0.619582s 99% 0.619582s Write: 10% 0.529821s 30% 0.529821s 50% 0.529821s 90% 0.529821s 99% 0.529821s Write: 10% 0.936811s 30% 0.936811s 50% 0.936811s 90% 0.936811s 99% 0.936811s Write: 10% 0.577437s 30% 0.577437s 50% 0.577437s 90% 0.577437s 99% 0.577437s Write: 10% 0.781638s 30% 0.781638s 50% 0.781638s 90% 0.781638s 99% 0.781638s Write: 10% 0.404105s 30% 0.404105s 50% 0.404105s 90% 0.404105s 99% 0.404105sWrite: 10% 0.803266s 30% 0.803266s 50% 0.803266s 90% 0.803266s 99% 0.803266s Write: 10% 0.877221s 30% 0.877221s 50% 0.877221s 90% 0.877221s 99% 0.877221s Write: 10% 0.615688s 30% 0.615688s 50% 0.615688s 90% 0.615688s 99% 0.615688s Write: 10% 0.599463s 30% 0.599463s 50% 0.599463s 90% 0.599463s 99% 0.599463s Write: 10% 0.603283s 30% 0.603283s 50% 0.603283s 90% 0.603283s 99% 0.603283s Write: 10% 0.990087s 30% 0.990087s 50% 0.990087s 90% 0.990087s 99% 0.990087s Write: 10% 0.847792s 30% 0.847792s 50% 0.847792s 90% 0.847792s 99% 0.847792s Write: 10% 0.836049s 30% 0.836049s 50% 0.836049s 90% 0.836049s 99% 0.836049s Write: 10% 0.845576s 30% 0.845576s 50% 0.845576s 90% 0.845576s 99% 0.845576s Write: 10% 0.485528s 30% 0.485528s 50% 0.485528s 90% 0.485528s 99% 0.485528s Write: 10% 0.762997s 30% 0.762997s 50% 0.762997s 90% 0.762997s 99% 0.762997s Write: 10% 0.753463s 30% 0.753463s 50% 0.753463s 90% 0.753463s 99% 0.753463s Write: 10% 0.270368s 30% 0.270368s 50% 0.270368s 90% 0.270368s 99% 0.270368s Write: 10% 0.882749s 30% 0.882749s 50% 0.882749s 90% 0.882749s 99% 0.882749s Write: 10% 0.559852s 30% 0.559852s 50% 0.559852s 90% 0.559852s 99% 0.559852s Write: 10% 0.316321s 30% 0.316321s 50% 0.316321s 90% 0.316321s 99% 0.316321s Write: 10% 1.087801s 30% 1.087801s 50% 1.087801s 90% 1.087801s 99% 1.087801s Write: 10% 0.915735s 30% 0.915735s 50% 0.915735s 90% 0.915735s 99% 0.915735s Write: 10% 0.790562s 30% 0.790562s 50% 0.790562s 90% 0.790562s 99% 0.790562s Write: 10% 1.019806s 30% 1.019806s 50% 1.019806s 90% 1.019806s 99% 1.019806s Write: 10% 0.349651s 30% 0.349651s 50% 0.349651s 90% 0.349651s 99% 0.349651s Write: 10% 0.375519s 30% 0.375519s 50% 0.375519s 90% 0.375519s 99% 0.375519s Write: 10% 1.010739s 30% 1.010739s 50% 1.010739s 90% 1.010739s 99% 1.010739s Write: 10% 0.756494s 30% 0.756494s 50% 0.756494s 90% 0.756494s 99% 0.756494s Write: 10% 0.403751s 30% 0.403751s 50% 0.403751s 90% 0.403751s 99% 0.403751s Write: 10% 0.395364s 30% 0.395364s 50% 0.395364s 90% 0.395364s 99% 0.395364s Write: 10% 1.140364s 30% 1.140364s 50% 1.140364s 90% 1.140364s 99% 1.140364s Write: 10% 1.076583s 30% 1.076583s 50% 1.076583s 90% 1.076583s 99% 1.076583s Write: 10% 0.627788s 30% 0.627788s 50% 0.627788s 90% 0.627788s 99% 0.627788s Write: 10% 1.017902s 30% 1.017902s 50% 1.017902s 90% 1.017902s 99% 1.017902s Write: 10% 0.767915s 30% 0.767915s 50% 0.767915s 90% 0.767915s 99% 0.767915s Write: 10% 0.359748s 30% 0.359748s 50% 0.359748s 90% 0.359748s 99% 0.359748s Write: 10% 0.364977s 30% 0.364977s 50% 0.364977s 90% 0.364977s 99% 0.364977s Write: 10% 0.952064s 30% 0.952064s 50% 0.952064s 90% 0.952064s 99% 0.952064s Write: 10% 0.395715s 30% 0.395715s 50% 0.395715s 90% 0.395715s 99% 0.395715s Write: 10% 0.370711s 30% 0.370711s 50% 0.370711s 90% 0.370711s 99% 0.370711s Write: 10% 1.029215s 30% 1.029215s 50% 1.029215s 90% 1.029215s 99% 1.029215s Write: 10% 0.474865s 30% 0.474865s 50% 0.474865s 90% 0.474865s 99% 0.474865s Write: 10% 1.158035s 30% 1.158035s 50% 1.158035s 90% 1.158035s 99% 1.158035s Write: 10% 0.400357s 30% 0.400357s 50% 0.400357s 90% 0.400357s 99% 0.400357s Write: 10% 1.086523s 30% 1.086523s 50% 1.086523s 90% 1.086523s 99% 1.086523s Write: 10% 0.399463s 30% 0.399463s 50% 0.399463s 90% 0.399463s 99% 0.399463s Write: 10% 0.375379s 30% 0.375379s 50% 0.375379s 90% 0.375379s 99% 0.375379s Write: 10% 0.367943s 30% 0.367943s 50% 0.367943s 90% 0.367943s 99% 0.367943s Write: 10% 0.992178s 30% 0.992178s 50% 0.992178s 90% 0.992178s 99% 0.992178s Write: 10% 0.376041s 30% 0.376041s 50% 0.376041s 90% 0.376041s 99% 0.376041s Write: 10% 0.357458s 30% 0.357458s 50% 0.357458s 90% 0.357458s 99% 0.357458s Write: 10% 0.401369s 30% 0.401369s 50% 0.401369s 90% 0.401369s 99% 0.401369s Write: 10% 0.406572s 30% 0.406572s 50% 0.406572s 90% 0.406572s 99% 0.406572s Write: 10% 0.416455s 30% 0.416455s 50% 0.416455s 90% 0.416455s 99% 0.416455s Write: 10% 0.418794s 30% 0.418794s 50% 0.418794s 90% 0.418794s 99% 0.418794s Update: 10% 0.353537s 30% 0.353537s 50% 0.353537s 90% 0.353537s 99% 0.353537s Step 4. read modify write Write: 10% 0.180196s 30% 0.180196s 50% 0.180196s 90% 0.180196s 99% 0.180196s Write: 10% 0.200126s 30% 0.200126s 50% 0.200126s 90% 0.200126s 99% 0.200126s Write: 10% 0.356781s 30% 0.356781s 50% 0.356781s 90% 0.356781s 99% 0.356781s Write: 10% 0.379307s 30% 0.379307s 50% 0.379307s 90% 0.379307s 99% 0.379307s Write: 10% 0.266857s 30% 0.266857s 50% 0.266857s 90% 0.266857s 99% 0.266857s Write: 10% 0.541863s 30% 0.541863s 50% 0.541863s 90% 0.541863s 99% 0.541863s Write: 10% 0.293795s 30% 0.293795s 50% 0.293795s 90% 0.293795s 99% 0.293795s Write: 10% 0.287036s 30% 0.287036s 50% 0.287036s 90% 0.287036s 99% 0.287036s Write: 10% 0.449089s 30% 0.449089s 50% 0.449089s 90% 0.449089s 99% 0.449089s Write: 10% 0.359752s 30% 0.359752s 50% 0.359752s 90% 0.359752s 99% 0.359752s Write: 10% 0.512031s 30% 0.512031s 50% 0.512031s 90% 0.512031s 99% 0.512031sWrite: 10% 0.318392s 30% 0.318392s 50% 0.318392s 90% 0.318392s 99% 0.318392s Write: 10% 0.372942s 30% 0.372942s 50% 0.372942s 90% 0.372942s 99% 0.372942s Write: 10% 0.400743s 30% 0.400743s 50% Write: 10% 0.394044s 30% 0.394044s 50% 0.394044s 90% 0.394044s 99% 0.394044s Write: 10% 0.475167s 30% 0.475167s 50% 0.400743s 90% 0.400743s 99% 0.400743s 0.475167s 90% 0.475167s 99% 0.475167s Write: 10% 0.410799s 30% 0.410799s 50% 0.410799s 90% 0.410799s 99% 0.410799s Write: 10% 0.424683s 30% 0.424683s 50% 0.424683s 90% 0.424683s 99% 0.424683s Write: 10% 0.501848s 30% 0.501848s 50% 0.501848s 90% 0.501848s 99% 0.501848s Write: 10% 0.558237s 30% 0.558237s 50% 0.558237s 90% 0.558237s 99% 0.558237s Write: 10% 0.588289s 30% 0.588289s 50% 0.588289s 90% 0.588289s 99% 0.588289s Write: 10% 0.533794s 30% 0.533794s 50% 0.533794s 90% 0.533794s 99% 0.533794s Write: 10% 0.576731s 30% 0.576731s 50% 0.576731s 90% 0.576731s 99% 0.576731s Write: 10% 0.733932s 30% 0.733932s 50% 0.733932s 90% 0.733932s 99% 0.733932s Write: 10% 0.801607s 30% 0.801607s 50% 0.801607s 90% 0.801607s 99% 0.801607s Write: 10% 0.785366s 30% 0.785366s 50% 0.785366s 90% 0.785366s 99% 0.785366s Write: 10% 0.842897s 30% 0.842897s 50% 0.842897s 90% 0.842897s 99% 0.842897s Write: 10% 0.833384s 30% 0.833384s 50% 0.833384s 90% 0.833384s 99% 0.833384s Write: 10% 0.858525s 30% 0.858525s 50% 0.858525s 90% 0.858525s 99% 0.858525s Write: 10% 0.862284s 30% 0.862284s 50% 0.862284s 90% 0.862284s 99% 0.862284s Write: 10% 0.867478s 30% 0.867478s 50% 0.867478s 90% 0.867478s 99% 0.867478s Write: 10% 0.876312s 30% 0.876312s 50% 0.876312s 90% 0.876312s 99% 0.876312s Write: 10% 0.788428s 30% 0.788428s 50% 0.788428s 90% 0.788428s 99% 0.788428s Write: 10% 0.882091s 30% 0.882091s 50% 0.882091s 90% 0.882091s 99% 0.882091s Write: 10% 0.862882s 30% 0.862882s 50% 0.862882s 90% 0.862882s 99% 0.862882s Write: 10% 0.899217s 30% 0.899217s 50% 0.899217s 90% 0.899217s 99% 0.899217s Write: 10% 0.939414s 30% 0.939414s 50% 0.939414s 90% 0.939414s 99% 0.939414s Write: 10% 0.918130s 30% 0.918130s 50% 0.918130s 90% 0.918130s 99% 0.918130s Write: 10% 0.951028s 30% 0.951028s 50% 0.951028s 90% 0.951028s 99% 0.951028s Write: 10% 0.921744s 30% 0.921744s 50% 0.921744s 90% 0.921744s 99% 0.921744s Write: 10% 0.755917s 30% 0.755917s 50% 0.755917s 90% 0.755917s 99% 0.755917s Write: 10% 0.881854s 30% 0.881854s 50% 0.881854s 90% 0.881854s 99% 0.881854s Write: 10% 0.917291s 30% 0.917291s 50% 0.917291s 90% 0.917291s 99% 0.917291s Write: 10% 0.844591s 30% 0.844591s 50% 0.844591s 90% 0.844591s 99% 0.844591s Write: 10% 1.325893s 30% 1.325893s 50% 1.325893s 90% 1.325893s 99% 1.325893s Write: 10% 1.389737s 30% 1.389737s 50% 1.389737s 90% 1.389737s 99% 1.389737s Write: 10% 1.203952s 30% 1.203952s 50% 1.203952s 90% 1.203952s 99% 1.203952s Write: 10% 1.333905s 30% 1.333905s 50% 1.333905s 90% 1.333905s 99% 1.333905s Write: 10% 1.245735s 30% 1.245735s 50% 1.245735s 90% 1.245735s 99% 1.245735s Write: 10% 1.237641s 30% 1.237641s 50% 1.237641s 90% 1.237641s 99% 1.237641s Write: 10% 1.232728s 30% 1.232728s 50% 1.232728s 90% 1.232728s 99% 1.232728s Write: 10% 1.408516s 30% 1.408516s 50% 1.408516s 90% 1.408516s 99% 1.408516s Write: 10% 1.269747s 30% 1.269747s 50% 1.269747s 90% 1.269747s 99% 1.269747s Write: 10% 1.334491s 30% 1.334491s 50% 1.334491s 90% 1.334491s 99% 1.334491s Write: 10% 1.438758s 30% 1.438758s 50% 1.438758s 90% 1.438758s 99% 1.438758s Write: 10% 1.314058s 30% 1.314058s 50% 1.314058s 90% 1.314058s 99% 1.314058s Write: 10% 1.429802s 30% 1.429802s 50% 1.429802s 90% 1.429802s 99% 1.429802s Write: 10% 1.308296s 30% 1.308296s 50% 1.308296s 90% 1.308296s 99% 1.308296s Write: 10% 1.349853s 30% 1.349853s 50% 1.349853s 90% 1.349853s 99% 1.349853s Write: 10% 1.362539s 30% 1.362539s 50% 1.362539s 90% 1.362539s 99% 1.362539s Write: 10% 1.322381s 30% 1.322381s 50% 1.322381s 90% 1.322381s 99% 1.322381s Write: 10% 1.330481s 30% 1.330481s 50% 1.330481s 90% 1.330481s 99% 1.330481s Write: 10% 1.337462s 30% 1.337462s 50% 1.337462s 90% 1.337462s 99% 1.337462s Write: 10% 1.467444s 30% 1.467444s 50% 1.467444s 90% 1.467444s 99% 1.467444s Update: 10% 0.850776s 30% 0.850776s 50% 0.927805s 90% 0.927805s 99% 0.927805s Read: 10% 3.187687s 30% 3.187687s 50% 3.187687s 90% 3.187687s 99% 3.187687s |98.1%| [TM] {RESULT} ydb/tests/olap/high_load/unittest >> DataShardCompaction::CompactBorrowed >> KqpTpch::Query04 [GOOD] >> KqpTpch::Query05 >> TCreateAndDropViewTest::CallDropViewOnTable [FAIL] >> TCreateAndDropViewTest::DropSameViewTwice >> BlobDepot::DecommitPutAndRead [GOOD] >> BlobDepot::DecommitVerifiedRandom >> TestPurecalcFilter::PartialPush [GOOD] >> KeyValueGRPCService::SimpleWriteRead [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/tablet/ut/unittest >> TabletService_Restart::OnlyAdminsAllowed [GOOD] Test command err: 2025-04-10T11:38:03.209157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:03.209414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:38:03.209646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00088a/r3tmp/tmpNeroot/pdisk_1.dat 2025-04-10T11:38:03.800286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:03.869829Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:03.919001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:03.919582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:03.932618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ... reading schema ... changing schema (dry run) ... reading schema ... changing schema ... reading schema 2025-04-10T11:38:08.886797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:08.887002Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:38:08.887212Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00088a/r3tmp/tmp29X2pn/pdisk_1.dat 2025-04-10T11:38:09.158801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:09.187740Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:09.231817Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:09.231957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:09.244303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... reading schema (without token) ... reading schema (non-admin token) ... reading schema (admin token) 2025-04-10T11:38:13.073596Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:13.073978Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:38:13.074119Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00088a/r3tmp/tmpKNNM8D/pdisk_1.dat 2025-04-10T11:38:13.392997Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:13.423797Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:13.461767Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:13.461914Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:13.473626Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:38:17.080041Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:17.080224Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:38:17.080444Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00088a/r3tmp/tmpnKvl70/pdisk_1.dat 2025-04-10T11:38:17.375939Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:17.406687Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:17.444065Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:17.444215Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:17.455735Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:38:21.310083Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:297:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:21.310463Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:38:21.310529Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00088a/r3tmp/tmpnhDRvl/pdisk_1.dat 2025-04-10T11:38:21.628051Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:21.659304Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:21.697927Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:21.698099Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:21.710042Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:38:26.003389Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:26.003820Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:38:26.003944Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00088a/r3tmp/tmpyynCri/pdisk_1.dat 2025-04-10T11:38:26.316192Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:26.351184Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:26.390262Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:26.390426Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:26.403906Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:38:31.114031Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:31.114479Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:38:31.114677Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00088a/r3tmp/tmpXp0UoC/pdisk_1.dat 2025-04-10T11:38:31.471395Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:31.507294Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:31.604165Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:31.604338Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:31.616300Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:38:36.549283Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:297:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:36.549768Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:38:36.549959Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00088a/r3tmp/tmpImD56W/pdisk_1.dat 2025-04-10T11:38:36.956710Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:36.993898Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:37.037827Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:37.038000Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:37.049700Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:38:42.065073Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:38:42.065461Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:42.065637Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00088a/r3tmp/tmpkEFHqn/pdisk_1.dat 2025-04-10T11:38:42.451396Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:42.487329Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:42.527248Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:42.527425Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:42.539273Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected ... restarting tablet 72057594046644480 2025-04-10T11:38:42.703364Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:47.766909Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:47.767203Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:38:47.767288Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00088a/r3tmp/tmpHkXJJE/pdisk_1.dat 2025-04-10T11:38:48.128010Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:48.169429Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:48.209931Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:48.210128Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:48.222345Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected ... restarting tablet 72057594046644480 (without token) ... restarting tablet 72057594046644480 (non-admin token) ... restarting tablet 72057594046644480 (admin token) 2025-04-10T11:38:48.635356Z node 10 :IMPORT WARN: Table profiles were not loaded |98.1%| [TM] {RESULT} ydb/core/grpc_services/tablet/ut/unittest >> TDqSolomonWriteActorTest::TestWriteFormat [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring >> TestPurecalcFilter::CompilationValidation >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] >> MediatorTest::WatcherReconnect [GOOD] >> DataShardReplication::ReplicatedTable-UseSink [GOOD] >> DataShardReplication::ApplyChangesToReplicatedTable >> MediatorTest::MultipleSteps >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> BasicExample::BasicExample [GOOD] |98.1%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] >> QuoterWithKesusTest::UpdatesCountersForParentResources [GOOD] >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt >> test_ctas.py::TestYtCtas::test_simple_ctast [GOOD] >> test_yt_reading.py::TestYtReading::test_partitioned_reading ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:36:29.174402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:29.174516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:29.174565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:29.174607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:29.174658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:29.174694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:29.174794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:29.174890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:29.175272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:29.271381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:36:29.271467Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:36:29.291374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:29.291667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:29.291847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:29.313198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:29.313543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:29.314262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:29.317415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:29.323389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:29.324858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:29.324926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:29.325123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:29.325198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:29.325250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:29.325424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:36:29.333416Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:36:29.473188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:29.473436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:29.473674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:29.473954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:29.474016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:29.476440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:29.476586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:29.476808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:29.476874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:29.476929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:29.476980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:29.479089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:29.479157Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:29.479193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:29.481157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:29.481216Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:29.481259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:29.481324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:29.485042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:29.487659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:29.487862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:29.488882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:29.489039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:29.489082Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:29.489357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:29.489432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:29.489597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:29.489679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:29.491783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:29.491834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:29.491985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:29.492046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:29.492278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:29.492337Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:29.492433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:29.492464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:29.492501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:29.492547Z no ... Size: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:38:54.537199Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:38:54.537502Z node 54 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 322us result status StatusSuccess 2025-04-10T11:38:54.538524Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:38:54.549753Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1134:2898] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-10T11:38:54.549876Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1080:2898] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-10T11:38:54.550047Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1134:2898] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744285134522789 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744285134522789 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744285134522789 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-10T11:38:54.560108Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1134:2898] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-10T11:38:54.560273Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1080:2898] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> KafkaProtocol::CreateTopicsScenarioWithKafkaAuth [GOOD] >> KafkaProtocol::CreateTopicsScenarioWithoutKafkaAuth >> test_http_api.py::TestHttpApi::test_simple_analytics_query >> MediatorTest::MultipleSteps [GOOD] >> Dictionary::Simple [GOOD] >> Dictionary::ComparePayloadAndFull >> TSentinelUnstableTests::BSControllerCantChangeStatus >> BlobDepot::DecommitVerifiedRandom [GOOD] >> MediatorTest::WatchesBeforeFirstStep >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] >> TestPurecalcFilter::CompilationValidation [GOOD] >> KqpTpch::Query05 [GOOD] >> KqpTpch::Query06 >> test_alter_tiering.py::TestAlterTiering::test[many_tables] [FAIL] >> DataShardReplication::ApplyChangesToReplicatedTable [GOOD] >> DataShardReplication::ApplyChangesToCommonTable >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPath [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutToken >> TopicSessionTests::RestartSessionIfNewClientWithOffset [GOOD] >> TestRawParser::Simple ------- [TS] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest >> BlobDepot::DecommitVerifiedRandom [GOOD] Test command err: Mersenne random seed 857874631 RandomSeed# 9866369642401955316 Mersenne random seed 3063076836 Mersenne random seed 817660413 Mersenne random seed 973134476 Mersenne random seed 40726866 2025-04-10T11:38:28.898458Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.898652Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.898722Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.898791Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.898860Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.898922Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:2] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.899006Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.899081Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.899473Z 1 00h00m25.012048s :BS_PROXY_PUT ERROR: [235472c006cd4393] Result# TEvPutResult {Id# [15:1:1:0:1:100:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000000:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-10T11:38:28.901001Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:2] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.901184Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.901236Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.901292Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.901340Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.901423Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.901475Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.901522Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.920732Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.921020Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.921080Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.921138Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:2] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.921181Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.921227Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.921273Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.921314Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-10T11:38:28.921545Z 1 00h00m25.012048s :BS_PROXY_PUT ERROR: [64a832cdc325f18a] Result# TEvPutResult {Id# [16:2:2:0:2:100:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000000:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 Mersenne random seed 1936390131 Read over the barrier, blob id# [15:1:1:0:1:100:0] Read over the barrier, blob id# [15:1:2:0:1:100:0] 2025-04-10T11:38:30.304536Z 1 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-10T11:38:30.305001Z 2 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-10T11:38:30.305111Z 3 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-10T11:38:30.305207Z 4 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-10T11:38:30.305299Z 5 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-10T11:38:30.305395Z 6 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-10T11:38:30.305504Z 7 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-10T11:38:30.305597Z 8 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 Put over the barrier, blob id# [15:1:1:0:99:100:0] Put over the barrier, blob id# [15:1:3:0:99:100:0] 2025-04-10T11:38:30.360579Z 1 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-10T11:38:30.361019Z 2 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-10T11:38:30.361132Z 3 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-10T11:38:30.361222Z 4 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-10T11:38:30.361332Z 5 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-10T11:38:30.361438Z 6 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-10T11:38:30.361552Z 7 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-10T11:38:30.361648Z 8 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 Read over the barrier, blob id# [15:1:5:0:1:100:0] Read over the barrier, blob id# [15:1:6:0:1:100:0] Read over the barrier, blob id# [15:1:19:0:1:100:0] Read over the barrier, blob id# [15:2:1:0:1:100:0] Read over the barrier, blob id# [15:2:2:0:1:100:0] TEvRange returned collected blob with id# [15:1:17:0:1:100:0] TEvRange returned collected blob with id# [15:1:19:0:1:100:0] TEvRange returned collected blob with id# [15:2:1:0:1:100:0] TEvRange returned collected blob with id# [15:2:2:0:1:100:0] TEvRange returned collected blob with id# [15:2:3:0:1:100:0] TEvRange returned collected blob with id# [15:2:4:0:1:100:0] TEvRange returned collected blob with id# [15:2:5:0:1:100:0] TEvRange returned collected blob with id# [15:2:6:0:1:100:0] Read over the barrier, blob id# [100:1:3:0:1:100:0] Read over the barrier, blob id# [100:1:5:0:1:100:0] Read over the barrier, blob id# [100:1:6:0:1:100:0] Read over the barrier, blob id# [100:2:1:0:1:100:0] Read over the barrier, blob id# [100:2:2:0:1:100:0] TEvRange returned collected blob with id# [100:2:2:0:1:100:0] TEvRange returned collected blob with id# [100:2:3:0:1:100:0] TEvRange returned collected blob with id# [100:2:4:0:1:100:0] TEvRange returned collected blob with id# [100:2:5:0:1:100:0] TEvRange returned collected blob with id# [100:2:6:0:1:100:0] Mersenne random seed 476483068 Read over the barrier, blob id# [100:1:1:2:8602999:410:0] Read over the barrier, blob id# [100:1:3:1:8552762:351:0] Read over the barrier, blob id# [100:1:3:1:8552762:351:0] Read over the barrier, blob id# [100:1:1:2:8602999:410:0] Read over the barrier, blob id# [100:1:1:2:8602999:410 ... VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 17 7 hard] barrier# 1:2 new key# [15 1 19 2 hard] barrier# 0:2 2025-04-10T11:38:55.472116Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 17 7 hard] barrier# 1:2 new key# [15 1 19 2 hard] barrier# 0:2 2025-04-10T11:38:55.472303Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 17 7 hard] barrier# 1:2 new key# [15 1 19 2 hard] barrier# 0:2 2025-04-10T11:38:55.472469Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 17 7 hard] barrier# 1:2 new key# [15 1 19 2 hard] barrier# 0:2 2025-04-10T11:38:55.472640Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 17 7 hard] barrier# 1:2 new key# [15 1 19 2 hard] barrier# 0:2 2025-04-10T11:38:55.472807Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 17 7 hard] barrier# 1:2 new key# [15 1 19 2 hard] barrier# 0:2 2025-04-10T11:38:55.472998Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 17 7 hard] barrier# 1:2 new key# [15 1 19 2 hard] barrier# 0:2 2025-04-10T11:38:55.473146Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 17 7 hard] barrier# 1:2 new key# [15 1 19 2 hard] barrier# 0:2 Read over the barrier, blob id# [17:6:14:0:4341129:814:0] Read over the barrier, blob id# [17:6:14:0:4006279:857:0] Read over the barrier, blob id# [17:1:2:1:5846452:190:0] Read over the barrier, blob id# [17:5:7:0:13063235:478:0] Read over the barrier, blob id# [17:6:14:0:4341129:814:0] Read over the barrier, blob id# [17:4:5:0:5550520:764:0] Read over the barrier, blob id# [17:1:2:1:5846452:190:0] Read over the barrier, blob id# [16:1:4:1:6782852:173:0] Read over the barrier, blob id# [16:1:4:1:12103244:716:0] Read over the barrier, blob id# [16:1:4:1:6782852:173:0] Read over the barrier, blob id# [16:1:4:2:12075823:505:0] Read over the barrier, blob id# [16:1:4:2:15253013:400:0] Read over the barrier, blob id# [16:1:4:1:7575283:705:0] Read over the barrier, blob id# [16:1:4:2:15253013:400:0] Read over the barrier, blob id# [16:1:4:1:12103244:716:0] TEvRange returned collected blob with id# [17:6:14:0:4341129:814:0] TEvRange returned collected blob with id# [17:6:14:0:8305288:518:0] TEvRange returned collected blob with id# [17:6:15:0:15689592:351:0] TEvRange returned collected blob with id# [17:6:16:0:10695715:377:0] TEvRange returned collected blob with id# [17:6:17:0:16627050:835:0] TEvRange returned collected blob with id# [17:3:4:1:13861638:568:0] Read over the barrier, blob id# [17:4:5:0:10349626:778:0] Read over the barrier, blob id# [17:5:7:0:6320068:71:0] Read over the barrier, blob id# [17:1:2:2:12591082:300:0] Read over the barrier, blob id# [17:3:4:1:13861638:568:0] Read over the barrier, blob id# [17:4:5:0:10349626:778:0] Read over the barrier, blob id# [17:6:17:0:16627050:835:0] Read over the barrier, blob id# [17:6:16:0:10695715:377:0] Read over the barrier, blob id# [17:5:7:0:592767:597:0] Read over the barrier, blob id# [17:5:7:0:592767:597:0] Read over the barrier, blob id# [17:5:7:0:592767:597:0] Read over the barrier, blob id# [17:4:5:0:5550520:764:0] Read over the barrier, blob id# [17:5:7:0:13063235:478:0] TEvRange returned collected blob with id# [17:3:4:1:13861638:568:0] Read over the barrier, blob id# [17:6:14:0:8305288:518:0] Read over the barrier, blob id# [17:4:5:0:16275546:938:0] Read over the barrier, blob id# [17:6:14:0:8305288:518:0] Read over the barrier, blob id# [17:6:14:0:4006279:857:0] Read over the barrier, blob id# [17:6:16:0:10695715:377:0] 2025-04-10T11:38:56.452350Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 1 27 0 hard] existing barrier# 3:0 new barrier# 3:4 2025-04-10T11:38:56.453076Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 1 27 0 hard] existing barrier# 3:0 new barrier# 3:4 2025-04-10T11:38:56.453305Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 1 27 0 hard] existing barrier# 3:0 new barrier# 3:4 2025-04-10T11:38:56.453561Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 1 27 0 hard] existing barrier# 3:0 new barrier# 3:4 2025-04-10T11:38:56.453782Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 1 27 0 hard] existing barrier# 3:0 new barrier# 3:4 2025-04-10T11:38:56.454010Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 1 27 0 hard] existing barrier# 3:0 new barrier# 3:4 2025-04-10T11:38:56.454486Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 1 27 0 hard] existing barrier# 3:0 new barrier# 3:4 2025-04-10T11:38:56.454744Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 1 27 0 hard] existing barrier# 3:0 new barrier# 3:4 Read over the barrier, blob id# [17:5:7:0:6320068:71:0] Read over the barrier, blob id# [17:5:9:0:6655547:195:0] Read over the barrier, blob id# [17:6:14:0:4006279:857:0] Read over the barrier, blob id# [17:4:5:0:16275546:938:0] TEvRange returned collected blob with id# [17:3:4:1:13861638:568:0] TEvRange returned collected blob with id# [17:1:2:2:1269226:402:0] TEvRange returned collected blob with id# [17:1:2:2:12591082:300:0] Read over the barrier, blob id# [17:6:15:0:15689592:351:0] Read over the barrier, blob id# [17:6:17:0:16627050:835:0] Read over the barrier, blob id# [17:6:16:0:10695715:377:0] Read over the barrier, blob id# [17:3:4:1:13861638:568:0] Read over the barrier, blob id# [17:6:16:0:10695715:377:0] Read over the barrier, blob id# [17:5:7:0:13063235:478:0] Read over the barrier, blob id# [17:4:5:0:10349626:778:0] Read over the barrier, blob id# [17:5:9:0:6655547:195:0] Read over the barrier, blob id# [17:4:5:0:10349626:778:0] TEvRange returned collected blob with id# [17:3:4:1:13861638:568:0] TEvRange returned collected blob with id# [17:1:2:2:1269226:402:0] TEvRange returned collected blob with id# [17:1:2:2:12591082:300:0] Read over the barrier, blob id# [16:1:4:1:7575283:705:0] Read over the barrier, blob id# [16:3:15:2:5108928:994:0] Read over the barrier, blob id# [16:1:4:2:14447395:841:0] Read over the barrier, blob id# [16:3:15:2:5108928:994:0] Read over the barrier, blob id# [16:1:4:0:14018040:572:0] Read over the barrier, blob id# [16:1:4:2:14447395:841:0] 2025-04-10T11:38:57.152526Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 16 key# [16 2 18 1 soft] existing barrier# 4:0 new barrier# 3:1 2025-04-10T11:38:57.153735Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 16 key# [16 2 18 1 soft] existing barrier# 4:0 new barrier# 3:1 2025-04-10T11:38:57.154017Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 16 key# [16 2 18 1 soft] existing barrier# 4:0 new barrier# 3:1 2025-04-10T11:38:57.154233Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 16 key# [16 2 18 1 soft] existing barrier# 4:0 new barrier# 3:1 2025-04-10T11:38:57.154431Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 16 key# [16 2 18 1 soft] existing barrier# 4:0 new barrier# 3:1 2025-04-10T11:38:57.154576Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 16 key# [16 2 18 1 soft] existing barrier# 4:0 new barrier# 3:1 2025-04-10T11:38:57.154732Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 16 key# [16 2 18 1 soft] existing barrier# 4:0 new barrier# 3:1 2025-04-10T11:38:57.154918Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 16 key# [16 2 18 1 soft] existing barrier# 4:0 new barrier# 3:1 TEvRange returned collected blob with id# [17:1:2:2:1269226:402:0] TEvRange returned collected blob with id# [17:1:2:2:12591082:300:0] Read over the barrier, blob id# [16:1:4:2:14447395:841:0] Read over the barrier, blob id# [16:1:4:0:14018040:572:0] Read over the barrier, blob id# [16:1:4:0:14018040:572:0] 2025-04-10T11:38:57.236559Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 35 4 soft] barrier# 7:2 new key# [17 0 36 3 soft] barrier# 6:3 2025-04-10T11:38:57.237864Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 35 4 soft] barrier# 7:2 new key# [17 0 36 3 soft] barrier# 6:3 2025-04-10T11:38:57.238076Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 35 4 soft] barrier# 7:2 new key# [17 0 36 3 soft] barrier# 6:3 2025-04-10T11:38:57.238266Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 35 4 soft] barrier# 7:2 new key# [17 0 36 3 soft] barrier# 6:3 2025-04-10T11:38:57.238456Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 35 4 soft] barrier# 7:2 new key# [17 0 36 3 soft] barrier# 6:3 2025-04-10T11:38:57.238645Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 35 4 soft] barrier# 7:2 new key# [17 0 36 3 soft] barrier# 6:3 2025-04-10T11:38:57.238829Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 35 4 soft] barrier# 7:2 new key# [17 0 36 3 soft] barrier# 6:3 2025-04-10T11:38:57.238996Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 35 4 soft] barrier# 7:2 new key# [17 0 36 3 soft] barrier# 6:3 TEvRange returned collected blob with id# [17:3:4:1:13861638:568:0] |98.1%| [TS] {RESULT} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest >> TopicSessionTests::ReadNonExistentTopic >> TestRawParser::Simple [GOOD] >> TCreateAndDropViewTest::DropSameViewTwice [GOOD] >> TCreateAndDropViewTest::DropViewIfExists >> TestRawParser::ManyValues >> KqpTpch::Query06 [GOOD] >> KqpTpch::Query07 >> TTxDataShardPrefixKMeansScan::BadRequest >> TestRawParser::ManyValues [GOOD] >> Dictionary::ComparePayloadAndFull [GOOD] >> Hash::ScalarBinaryHash [GOOD] >> Hash::ScalarCTypeHash [GOOD] >> Hash::ScalarCompositeHash [GOOD] >> ProgramStep::Round0 [GOOD] >> ProgramStep::Round1 [GOOD] >> ProgramStep::Filter [GOOD] >> ProgramStep::Add [GOOD] >> ProgramStep::Substract [GOOD] >> ProgramStep::Multiply [GOOD] >> ProgramStep::Divide [GOOD] >> ProgramStep::Gcd [GOOD] >> ProgramStep::Lcm >> ProgramStep::Lcm [GOOD] >> ProgramStep::Mod [GOOD] >> ProgramStep::ModOrZero [GOOD] >> ProgramStep::Abs [GOOD] >> ProgramStep::Negate [GOOD] >> ProgramStep::Compares [GOOD] >> ProgramStep::Logic0 [GOOD] >> ProgramStep::Logic1 [GOOD] >> ProgramStep::StartsWith [GOOD] >> ProgramStep::EndsWith [GOOD] >> ProgramStep::MatchSubstring [GOOD] >> ProgramStep::StartsWithIgnoreCase [GOOD] >> ProgramStep::EndsWithIgnoreCase [GOOD] >> ProgramStep::MatchSubstringIgnoreCase [GOOD] >> ProgramStep::ScalarTest [GOOD] >> ProgramStep::TestValueFromNull [GOOD] >> ProgramStep::MergeFilterSimple >> TestRawParser::TypeKindsValidation >> ProgramStep::MergeFilterSimple [GOOD] >> ProgramStep::Projection [GOOD] >> ProgramStep::MinMax >> ProgramStep::MinMax [GOOD] >> ProgramStep::Sum [GOOD] >> ProgramStep::SumGroupBy [GOOD] >> ProgramStep::SumGroupByNotNull [GOOD] >> ProgramStep::MinMaxSomeGroupBy [GOOD] >> ProgramStep::MinMaxSomeGroupByNotNull [GOOD] >> SortableBatchPosition::FindPosition [GOOD] >> ClosedIntervalSet::Union >> BulkUpsert::BulkUpsert >> SystemView::QueryStatsAllTables [GOOD] >> SystemView::QueryStatsRetries >> TestRawParser::TypeKindsValidation [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/formats/arrow/ut/unittest >> SortableBatchPosition::FindPosition [GOOD] Test command err: Process: 100000d;/100000; 10000d;/10000; NO_CODEC(poolsize=1024;keylen=1) 0.2021203448 0.2210911404 NO_CODEC(poolsize=1024;keylen=10) 0.1534132783 0.2482180533 NO_CODEC(poolsize=1024;keylen=16) 0.1104676508 0.2045372848 NO_CODEC(poolsize=1024;keylen=32) 0.06592569055 0.1591802296 NO_CODEC(poolsize=1024;keylen=64) 0.03972180035 0.1324717476 NO_CODEC(poolsize=128;keylen=1) 0.2016566193 0.2164784476 NO_CODEC(poolsize=128;keylen=10) 0.07304169975 0.08752922393 NO_CODEC(poolsize=128;keylen=16) 0.05151637558 0.06514358749 NO_CODEC(poolsize=128;keylen=32) 0.02919093319 0.04189888314 NO_CODEC(poolsize=128;keylen=64) 0.01605694811 0.02821124922 NO_CODEC(poolsize=16;keylen=1) 0.2010010074 0.2099570542 NO_CODEC(poolsize=16;keylen=10) 0.0719219365 0.07635285397 NO_CODEC(poolsize=16;keylen=16) 0.05039654131 0.05396013899 NO_CODEC(poolsize=16;keylen=32) 0.02807102527 0.03070808446 NO_CODEC(poolsize=16;keylen=64) 0.01493699686 0.01701612239 NO_CODEC(poolsize=1;keylen=1) 0.2008730831 0.2086845872 NO_CODEC(poolsize=1;keylen=10) 0.07177339648 0.07487027428 NO_CODEC(poolsize=1;keylen=16) 0.0502445638 0.05244238527 NO_CODEC(poolsize=1;keylen=32) 0.02791992658 0.0291982148 NO_CODEC(poolsize=1;keylen=64) 0.01478641518 0.01551089526 NO_CODEC(poolsize=512;keylen=1) 0.2021203448 0.2210911404 NO_CODEC(poolsize=512;keylen=10) 0.1482943606 0.1971260763 NO_CODEC(poolsize=512;keylen=16) 0.1053484084 0.1534129488 NO_CODEC(poolsize=512;keylen=32) 0.0608061115 0.1080222928 NO_CODEC(poolsize=512;keylen=64) 0.03460202321 0.08129402495 NO_CODEC(poolsize=64;keylen=1) 0.2013687897 0.2136153969 NO_CODEC(poolsize=64;keylen=10) 0.07240183504 0.08114272681 NO_CODEC(poolsize=64;keylen=16) 0.05087647028 0.05875304549 NO_CODEC(poolsize=64;keylen=32) 0.02855098581 0.03550414104 NO_CODEC(poolsize=64;keylen=64) 0.01541697597 0.02181403389 lz4(poolsize=1024;keylen=1) 0.006629768257 0.05541610349 lz4(poolsize=1024;keylen=10) 0.04233951498 0.3344832994 lz4(poolsize=1024;keylen=16) 0.05657489465 0.404264214 lz4(poolsize=1024;keylen=32) 0.09037137941 0.5318074361 lz4(poolsize=1024;keylen=64) 0.01074936154 0.1063492063 lz4(poolsize=128;keylen=1) 0.003831111821 0.02881389382 lz4(poolsize=128;keylen=10) 0.00718182175 0.06087121933 lz4(poolsize=128;keylen=16) 0.008735936466 0.07523964551 lz4(poolsize=128;keylen=32) 0.01375268158 0.117441454 lz4(poolsize=128;keylen=64) 0.02262360212 0.1850289108 lz4(poolsize=16;keylen=1) 0.00273442178 0.01820340324 lz4(poolsize=16;keylen=10) 0.003078137332 0.02169239789 lz4(poolsize=16;keylen=16) 0.003266503667 0.02356577168 lz4(poolsize=16;keylen=32) 0.003742685614 0.02844311377 lz4(poolsize=16;keylen=64) 0.004937163375 0.03979647465 lz4(poolsize=1;keylen=1) 0.00251497006 0.01603325416 lz4(poolsize=1;keylen=10) 0.002531395234 0.01628089447 lz4(poolsize=1;keylen=16) 0.002515970516 0.01617933723 lz4(poolsize=1;keylen=32) 0.00251450677 0.01630226314 lz4(poolsize=1;keylen=64) 0.002511620933 0.01653353149 lz4(poolsize=512;keylen=1) 0.005362411291 0.04359726295 lz4(poolsize=512;keylen=10) 0.02347472854 0.1933066062 lz4(poolsize=512;keylen=16) 0.03056053336 0.2426853056 lz4(poolsize=512;keylen=32) 0.04856356058 0.3467897492 lz4(poolsize=512;keylen=64) 0.04102771881 0.3228658321 lz4(poolsize=64;keylen=1) 0.003312844256 0.02372010279 lz4(poolsize=64;keylen=10) 0.004839661617 0.03863241259 lz4(poolsize=64;keylen=16) 0.005715507689 0.04687204687 lz4(poolsize=64;keylen=32) 0.007821957352 0.06669044223 lz4(poolsize=64;keylen=64) 0.01258912656 0.1073551894 zstd(poolsize=1024;keylen=1) 0.007324840764 0.0754840827 zstd(poolsize=1024;keylen=10) 0.04506846012 0.3776978417 zstd(poolsize=1024;keylen=16) 0.0655640205 0.4694540288 zstd(poolsize=1024;keylen=32) 0.1110720087 0.6098141264 zstd(poolsize=1024;keylen=64) 0.1914108287 0.7447345433 zstd(poolsize=128;keylen=1) 0.003769847609 0.04002713704 zstd(poolsize=128;keylen=10) 0.007456731695 0.07809798271 zstd(poolsize=128;keylen=16) 0.0102539786 0.1029455519 zstd(poolsize=128;keylen=32) 0.01677217062 0.1578947368 zstd(poolsize=128;keylen=64) 0.03005940945 0.2517949988 zstd(poolsize=16;keylen=1) 0.002620896858 0.02794819359 zstd(poolsize=16;keylen=10) 0.002816201441 0.03048416019 zstd(poolsize=16;keylen=16) 0.003368308096 0.03570300158 zstd(poolsize=16;keylen=32) 0.004159808469 0.0434375 zstd(poolsize=16;keylen=64) 0.005779996974 0.05875115349 zstd(poolsize=1;keylen=1) 0.002461243407 0.02626193724 zstd(poolsize=1;keylen=10) 0.002154636612 0.0234375 zstd(poolsize=1;keylen=16) 0.002356872222 0.02519132653 zstd(poolsize=1;keylen=32) 0.002427911996 0.02573879886 zstd(poolsize=1;keylen=64) 0.00258021431 0.02699269609 zstd(poolsize=512;keylen=1) 0.005583027596 0.05848930481 zstd(poolsize=512;keylen=10) 0.0236929438 0.2237078941 zstd(poolsize=512;keylen=16) 0.03443366072 0.2936507937 zstd(poolsize=512;keylen=32) 0.05917328099 0.4212765957 zstd(poolsize=512;keylen=64) 0.1058929843 0.5749553837 zstd(poolsize=64;keylen=1) 0.00319560285 0.03401360544 zstd(poolsize=64;keylen=10) 0.004852093844 0.05176470588 zstd(poolsize=64;keylen=16) 0.00633344236 0.06557881773 zstd(poolsize=64;keylen=32) 0.009647738439 0.09619952494 zstd(poolsize=64;keylen=64) 0.01626771323 0.1514644351 NO_CODEC --1000 ----1 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----16 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----64 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----128 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----512 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----1024 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% --10000 ---- ... _arrays_cache.h:65;event=slice_from_cache;key=int16;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(18):{\"i\":\"1,2\",\"o\":\"3,4\",\"t\":\"Aggregation\"}\n"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(36):{\"i\":\"1,2,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N4 -> N5[label="2"]; N0 -> N5[label="3"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=int16;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint32;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=int16;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint32;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=int64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=int16;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint32;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(18):{\"i\":\"1,2\",\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(18):{\"i\":\"1,2\",\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(18):{\"i\":\"1,2\",\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(18):{\"i\":\"1,2\",\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(18):{\"i\":\"1,2\",\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(18):{\"i\":\"1,2\",\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(18):{\"i\":\"1,2\",\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; |98.1%| [TS] {RESULT} ydb/core/formats/arrow/ut/unittest >> test_yt_reading.py::TestYtReading::test_partitioned_reading [GOOD] >> test_yt_reading.py::TestYtReading::test_block_reading >> Mirror3of4::ReplicationSmall [GOOD] >> Mirror3of4::ReplicationHuge >> test_scheduling.py::TestSchedule::test_skip_busy[kikimr0] [SKIPPED] >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] >> MediatorTest::WatchesBeforeFirstStep [GOOD] >> MediatorTest::RebootTargetTablets >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] >> KafkaProtocol::CreateTopicsScenarioWithoutKafkaAuth [GOOD] >> KafkaProtocol::CreatePartitionsScenario ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> TestRawParser::TypeKindsValidation [GOOD] Test command err: 2025-04-10T11:37:40.664315Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-10T11:37:40.664350Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where col_0 == "str1"' (filter id: [0:0:0]) 2025-04-10T11:37:40.664372Z node 1 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where col_0 == "str1"; 2025-04-10T11:37:40.664412Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-10T11:37:40.664566Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [1:7491647288133852448:2051] 2025-04-10T11:37:43.509239Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7491647288133852448:2051] [id 1]: Started compile request 2025-04-10T11:37:44.398296Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7491647288133852448:2051] [id 1]: Compilation completed for request 2025-04-10T11:37:44.398616Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [1:7491647288133852448:2051] 2025-04-10T11:37:44.399840Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-10T11:37:44.399875Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-04-10T11:37:44.399950Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [1:0:0] 2025-04-10T11:37:44.399962Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where col_1 == "str2"' (filter id: [1:0:0]) 2025-04-10T11:37:44.399977Z node 1 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where col_1 == "str2"; 2025-04-10T11:37:44.400004Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Send compile request with id 2 2025-04-10T11:37:44.401354Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 2 from [1:7491647288133852448:2051] 2025-04-10T11:37:44.401436Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7491647288133852448:2051] [id 2]: Started compile request 2025-04-10T11:37:44.430332Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7491647288133852448:2051] [id 2]: Compilation completed for request 2025-04-10T11:37:44.430430Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 2 from [1:7491647288133852448:2051] 2025-04-10T11:37:44.431687Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 2 2025-04-10T11:37:44.431707Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Filter compilation finished 2025-04-10T11:37:44.431734Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [2:0:0] 2025-04-10T11:37:44.431784Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: FilterData for 3 clients, number rows: 3 2025-04-10T11:37:44.431794Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 3 rows to purecalc filter (filter id: [1:0:0]) 2025-04-10T11:37:44.431801Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 3 rows 2025-04-10T11:37:44.434882Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Add 3 rows to client [2:0:0] without filtering 2025-04-10T11:37:44.434937Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 3 rows to purecalc filter (filter id: [0:0:0]) 2025-04-10T11:37:44.434947Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 3 rows 2025-04-10T11:37:44.435031Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Remove filter with id [2:0:0] 2025-04-10T11:37:44.435107Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: FilterData for 2 clients, number rows: 1 2025-04-10T11:37:44.435121Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 1 rows to purecalc filter (filter id: [1:0:0]) 2025-04-10T11:37:44.435130Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-10T11:37:44.435163Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 1 rows to purecalc filter (filter id: [0:0:0]) 2025-04-10T11:37:44.435168Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-10T11:37:44.887387Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-10T11:37:44.887447Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where a1 = "str1"' (filter id: [0:0:0]) 2025-04-10T11:37:44.887484Z node 2 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a1 = "str1"; 2025-04-10T11:37:44.887517Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-10T11:37:44.887955Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [2:7491647308167288858:2051] 2025-04-10T11:37:48.056582Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [2:7491647308167288858:2051] [id 1]: Started compile request 2025-04-10T11:37:48.107766Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [2:7491647308167288858:2051] [id 1]: Compilation completed for request 2025-04-10T11:37:48.107914Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [2:7491647308167288858:2051] 2025-04-10T11:37:48.108044Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-10T11:37:48.108075Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-04-10T11:37:48.108106Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-10T11:37:48.590708Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-10T11:37:48.590740Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where a2 ... 50' (filter id: [0:0:0]) 2025-04-10T11:37:48.590763Z node 3 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; 2025-04-10T11:37:48.590787Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-10T11:37:48.590901Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [3:7491647324551215685:2051] 2025-04-10T11:37:51.188473Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [3:7491647324551215685:2051] [id 1]: Started compile request 2025-04-10T11:37:51.246990Z node 3 :FQ_ROW_DISPATCHER ERROR: TPurecalcCompileActor [3:7491647324551215685:2051] [id 1]: Compilation failed for request 2025-04-10T11:37:51.247148Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [3:7491647324551215685:2051] 2025-04-10T11:37:51.248780Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-10T11:37:51.248908Z node 3 :FQ_ROW_DISPATCHER ERROR: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation error: {
: Error: Failed to compile purecalc program subissue: {
: Error: Compile issues: generated.sql:2:36: Error: mismatched input '.' expecting {'$', ABORT, ACTION, ADD, AFTER, ALL, ALTER, ANALYZE, AND, ANSI, ANY, ARRAY, AS, ASC, ASSUME, ASYMMETRIC, ASYNC, AT, ATTACH, ATTRIBUTES, AUTOINCREMENT, BACKUP, BATCH, COLLECTION, BEFORE, BEGIN, BERNOULLI, BETWEEN, BITCAST, BY, CALLABLE, CASCADE, CASE, CAST, CHANGEFEED, CHECK, CLASSIFIER, COLLATE, COLUMN, COLUMNS, COMMIT, COMPACT, CONDITIONAL, CONFLICT, CONNECT, CONSTRAINT, CONSUMER, COVER, CREATE, CROSS, CUBE, CURRENT, CURRENT_DATE, CURRENT_TIME, CURRENT_TIMESTAMP, DATA, DATABASE, DECIMAL, DECLARE, DEFAULT, DEFERRABLE, DEFERRED, DEFINE, DELETE, DESC, DESCRIBE, DETACH, DICT, DIRECTORY, DISABLE, DISCARD, DISTINCT, DO, DROP, EACH, ELSE, EMPTY, EMPTY_ACTION, ENCRYPTED, END, ENUM, ERASE, ERROR, ESCAPE, EVALUATE, EXCEPT, EXCLUDE, EXCLUSION, EXCLUSIVE, EXISTS, EXPLAIN, EXPORT, EXTERNAL, FAIL, FAMILY, FILTER, FIRST, FLATTEN, FLOW, FOLLOWING, FOR, FOREIGN, FROM, FULL, FUNCTION, GLOB, GLOBAL, GRANT, GROUP, GROUPING, GROUPS, HASH, HAVING, HOP, IF, IGNORE, ILIKE, IMMEDIATE, IMPORT, IN, INCREMENT, INCREMENTAL, INDEX, INDEXED, INHERITS, INITIAL, INITIALLY, INNER, INSERT, INSTEAD, INTERSECT, INTO, IS, ISNULL, JOIN, JSON_EXISTS, JSON_QUERY, JSON_VALUE, KEY, LAST, LEFT, LEGACY, LIKE, LIMIT, LIST, LOCAL, LOGIN, MANAGE, MATCH, MATCHES, MATCH_RECOGNIZE, MEASURES, MICROSECONDS, MILLISECONDS, MODIFY, NANOSECONDS, NATURAL, NEXT, NO, NOLOGIN, NOT, NOTNULL, NULL, NULLS, OBJECT, OF, OFFSET, OMIT, ON, ONE, ONLY, OPTION, OPTIONAL, OR, ORDER, OTHERS, OUTER, OVER, OWNER, PARALLEL, PARTITION, PASSING, PASSWORD, PAST, PATTERN, PER, PERMUTE, PLAN, POOL, PRAGMA, PRECEDING, PRESORT, PRIMARY, PRIVILEGES, PROCESS, QUERY, QUEUE, RAISE, RANGE, REDUCE, REFERENCES, REGEXP, REINDEX, RELEASE, REMOVE, RENAME, REPLACE, REPLICATION, RESET, RESOURCE, RESPECT, RESTART, RESTORE, RESTRICT, RESULT, RETURN, RETURNING, REVERT, REVOKE, RIGHT, RLIKE, ROLLBACK, ROLLUP, ROW, ROWS, SAMPLE, SAVEPOINT, SCHEMA, SECONDS, SEEK, SELECT, SEMI, SET, SETS, SHOW, TSKIP, SEQUENCE, SOURCE, START, STREAM, STRUCT, SUBQUERY, SUBSET, SYMBOLS, SYMMETRIC, SYNC, SYSTEM, TABLE, TABLES, TABLESAMPLE, TABLESTORE, TAGGED, TEMP, TEMPORARY, THEN, TIES, TO, TOPIC, TRANSACTION, TRANSFER, TRIGGER, TUPLE, TYPE, UNBOUNDED, UNCONDITIONAL, UNION, UNIQUE, UNKNOWN, UNMATCHED, UPDATE, UPSERT, USE, USER, USING, VACUUM, VALUES, VARIANT, VIEW, VIRTUAL, WHEN, WHERE, WINDOW, WITH, WITHOUT, WRAPPER, XOR, STRING_VALUE, ID_PLAIN, ID_QUOTED, DIGITS} } subissue: {
: Error: Final yql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; } } 2025-04-10T11:37:54.841498Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Add client with id [0:0:0] 2025-04-10T11:37:54.842728Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: UpdateParser to new schema with size 2 2025-04-10T11:37:54.926041Z node 4 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-10T11:37:54.926303Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Parser was updated on new schema with 2 columns 2025-04-10T11:37:54.927198Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-10T11:37:54.927233Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'WHERE col_first = "str_first__large__"' (filter id: [0:0:0]) 2025-04-10T11:37:54.927260Z node 4 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input WHERE col_first = "str_first__large__"; 2025-04-10T11:37:54.927321Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-10T11:37:54.960840Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-10T11:37:54.960908Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-04-10T11:37:54.961148Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Add client with id [1:0:0] 2025-04-10T11:37:55.078346Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: UpdateParser to new schema with size 3 2025-04-10T11:37:55.186162Z node 4 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-10T11:37:55.186452Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Parser was updated on new schema with 3 columns 2025-04-10T11:37:55.186489Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [1:0:0] 2025-04-1 ... 10Z node 20 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "hello1", "a2": null, "event": "event1"} 2025-04-10T11:38:19.810228Z node 21 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-10T11:38:19.977301Z node 21 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-10T11:38:20.512459Z node 22 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-10T11:38:20.512868Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-10T11:38:20.512933Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": 456, "a2": 42} 2025-04-10T11:38:20.513670Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-10T11:38:20.513719Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "456", "a2": -42} 2025-04-10T11:38:20.514061Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-10T11:38:20.514093Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": "str", "a2": 99999} 2025-04-10T11:38:20.514472Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-10T11:38:20.514505Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 45, values: {"a1": "456", "a2": 42, "a3": 1.11.1} 2025-04-10T11:38:20.984183Z node 23 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-10T11:38:20.984478Z node 23 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-10T11:38:20.984517Z node 23 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": "-456"} 2025-04-10T11:38:21.455042Z node 24 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-10T11:38:21.455410Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-10T11:38:21.455455Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": {"key": "value"}, "a2": {"key2": "value2"}} 2025-04-10T11:38:21.455924Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-10T11:38:21.455959Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": {"key": "value", "nested": {"a": "b", "c":}}, "a2": "str"} 2025-04-10T11:38:21.456218Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-10T11:38:21.456253Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": {"key" "value"}, "a2": "str"} 2025-04-10T11:38:21.986514Z node 25 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-10T11:38:21.986833Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-10T11:38:21.986889Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": true, "a2": false} 2025-04-10T11:38:21.987319Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-10T11:38:21.987368Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "true", "a2": falce} 2025-04-10T11:38:22.496535Z node 26 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-10T11:38:22.496825Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-10T11:38:22.496873Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": Yelse} 2025-04-10T11:38:22.497328Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-10T11:38:22.497378Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "st""r"} 2025-04-10T11:38:22.497567Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-10T11:38:22.497608Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": "x"} {"a1": "y"} 2025-04-10T11:38:22.497802Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-10T11:38:22.497834Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 45, values: { 2025-04-10T11:38:22.915027Z node 27 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-04-10T11:38:22.915220Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [27:7491647471743803887:2051] 2025-04-10T11:38:27.418671Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [27:7491647471743803887:2051] [id 0]: Started compile request 2025-04-10T11:38:27.477496Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [27:7491647471743803887:2051] [id 0]: Compilation completed for request 2025-04-10T11:38:27.477673Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [27:7491647471743803887:2051] 2025-04-10T11:38:27.478073Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-10T11:38:27.479144Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-10T11:38:28.045595Z node 28 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-04-10T11:38:28.055334Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [28:7491647493794566831:2051] 2025-04-10T11:38:32.547788Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [28:7491647493794566831:2051] [id 0]: Started compile request 2025-04-10T11:38:32.592127Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [28:7491647493794566831:2051] [id 0]: Compilation completed for request 2025-04-10T11:38:32.592254Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [28:7491647493794566831:2051] 2025-04-10T11:38:32.592456Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-10T11:38:32.592589Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-10T11:38:33.248892Z node 29 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-04-10T11:38:33.249292Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [29:7491647519176527563:2051] 2025-04-10T11:38:38.110307Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [29:7491647519176527563:2051] [id 0]: Started compile request 2025-04-10T11:38:38.201560Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [29:7491647519176527563:2051] [id 0]: Compilation completed for request 2025-04-10T11:38:38.201688Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [29:7491647519176527563:2051] 2025-04-10T11:38:38.201940Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-10T11:38:38.202094Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-10T11:38:38.202166Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-10T11:38:38.202213Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-10T11:38:38.202251Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-10T11:38:39.027996Z node 30 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a1 is null; 2025-04-10T11:38:39.028184Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [30:7491647543763328088:2051] 2025-04-10T11:38:44.156370Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [30:7491647543763328088:2051] [id 0]: Started compile request 2025-04-10T11:38:44.232001Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [30:7491647543763328088:2051] [id 0]: Compilation completed for request 2025-04-10T11:38:44.232194Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [30:7491647543763328088:2051] 2025-04-10T11:38:44.233971Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-10T11:38:44.988632Z node 31 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 50; 2025-04-10T11:38:44.988848Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [31:7491647565098539365:2051] 2025-04-10T11:38:50.988674Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [31:7491647565098539365:2051] [id 0]: Started compile request 2025-04-10T11:38:51.049370Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [31:7491647565098539365:2051] [id 0]: Compilation completed for request 2025-04-10T11:38:51.049531Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [31:7491647565098539365:2051] 2025-04-10T11:38:51.049930Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-10T11:38:51.935348Z node 32 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; 2025-04-10T11:38:51.935542Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [32:7491647594357087714:2051] 2025-04-10T11:38:58.030355Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [32:7491647594357087714:2051] [id 0]: Started compile request 2025-04-10T11:38:58.047614Z node 32 :FQ_ROW_DISPATCHER ERROR: TPurecalcCompileActor [32:7491647594357087714:2051] [id 0]: Compilation failed for request 2025-04-10T11:38:58.047808Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [32:7491647594357087714:2051] 2025-04-10T11:38:59.218124Z node 33 :FQ_ROW_DISPATCHER TRACE: TRawParser: Add 1 messages to parse 2025-04-10T11:38:59.218214Z node 33 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1__large_str", "a2": 101, "event": "event1"} 2025-04-10T11:38:59.815578Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Add 3 messages to parse 2025-04-10T11:38:59.815665Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1", "a2": "101", "event": "event1"} 2025-04-10T11:38:59.815841Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 43, value: {"a1": "hello1", "a2": "101", "event": "event2"} 2025-04-10T11:38:59.815882Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 44, value: {"a2": "101", "a1": "hello1", "event": "event3"} |98.1%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> DataShardStats::BlobsStatsCorrect [GOOD] >> DataShardStats::SharedCacheGarbage >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchSolomon [GOOD] >> TDqSolomonWriteActorTest::TestWriteWithTimeseries >> StatisticsScan::RunScanOnShard >> TTxDataShardPrefixKMeansScan::BadRequest [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToPosting >> DataShardReplication::ApplyChangesToCommonTable [GOOD] >> DataShardReplication::ApplyChangesWithConcurrentTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:35:49.401491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:49.401589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:49.401628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:49.401670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:49.401716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:49.401747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:49.401838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:49.401920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:49.403406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:49.475268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:35:49.475332Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:35:49.492256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:49.492521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:49.492686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:49.500685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:49.501012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:49.501708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:49.502396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:49.505485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:49.506896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:49.506959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:49.507118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:49.507163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:49.507200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:49.507346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:35:49.518016Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:35:49.633756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:49.634034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:49.634326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:49.634632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:49.634693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:49.637522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:49.637687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:49.637935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:49.637996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:49.638052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:49.638086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:49.640494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:49.640551Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:49.640585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:49.642592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:49.642669Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:49.642714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:49.642786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:49.646131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:49.648128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:49.648299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:49.649384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:49.649531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:49.649590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:49.649886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:49.649951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:49.650128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:49.650285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:49.652693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:49.652732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:49.652971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:49.653020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:49.653255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:49.653324Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:49.653423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:49.653459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:49.653496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:49.653548Z no ... 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:39:01.709208Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:39:01.709496Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 328us result status StatusSuccess 2025-04-10T11:39:01.710403Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:39:01.721507Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:808:2624] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-10T11:39:01.721621Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:729:2624] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-10T11:39:01.721777Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:808:2624] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744285141690842 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744285141690842 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744285141690842 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-10T11:39:01.724186Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:808:2624] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-10T11:39:01.724287Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:729:2624] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> ServerRestartTest::RestartOnGetSession >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] >> test.py::test[solomon-Subquery-default.txt] [GOOD] >> test.py::test[solomon-UnknownSetting-] >> KeyValueGRPCService::SimpleWriteReadWithoutToken [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:35:47.712327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:35:47.712433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:47.712478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:35:47.712513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:35:47.712567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:35:47.712615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:35:47.712705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:35:47.712792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:35:47.713177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:35:47.803997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:35:47.804074Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:35:47.822834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:35:47.823147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:35:47.823331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:35:47.832195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:35:47.832613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:35:47.833343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:47.834029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:35:47.837969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:47.839498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:47.839566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:47.839744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:35:47.839802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:47.839840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:35:47.839999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:35:47.850189Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:35:47.976921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:35:47.977214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:47.977457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:35:47.977748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:35:47.977805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:47.980728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:47.980888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:35:47.981115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:47.981171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:35:47.981225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:35:47.981265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:35:47.983436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:47.983500Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:35:47.983537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:35:47.985460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:47.985520Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:47.985573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:47.985645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:35:47.989629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:35:47.991857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:35:47.992060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:35:47.993264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:35:47.993460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:35:47.993524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:47.993852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:35:47.993937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:35:47.994173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:35:47.994256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:35:47.997259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:35:47.997303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:35:47.997481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:35:47.997527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:35:47.997796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:35:47.997865Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:35:47.997973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:47.998012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:35:47.998053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:35:47.998113Z no ... CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:39:03.344006Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:39:03.344318Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 387us result status StatusSuccess 2025-04-10T11:39:03.345303Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ClosedIntervalSet::Union [GOOD] >> ClosedIntervalSet::Difference >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt [GOOD] >> QuoterWithKesusTest::CanKillKesusWhenUsingIt >> TopicSessionTests::ReadNonExistentTopic [GOOD] >> MediatorTest::RebootTargetTablets [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts >> TopicSessionTests::SlowSession >> QueryActorTest::SimpleQuery >> Init::TWithDefaultParser [GOOD] >> MediatorTest::ResendSubset >> TCreateAndDropViewTest::DropViewIfExists [FAIL] >> TCreateAndDropViewTest::DropViewInFolder >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] |98.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/init/ut/unittest >> Init::TWithDefaultParser [GOOD] |98.1%| [TS] {RESULT} ydb/core/config/init/ut/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_import/py3test >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] |98.1%| [TM] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/py3test |98.1%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_import/py3test >> test.py::test[solomon-UnknownSetting-] [GOOD] >> KqpTpch::Query07 [GOOD] >> KqpTpch::Query08 >> KafkaProtocol::CreatePartitionsScenario [GOOD] >> KafkaProtocol::AlterConfigsScenario >> Splitter::Simple >> LongTxServicePublicTypes::SnapshotReadOnly [GOOD] >> LongTxServicePublicTypes::Snapshot [GOOD] >> LongTxServicePublicTypes::SnapshotMaxTxId [GOOD] >> LongTxServicePublicTypes::LongTxId [GOOD] >> Splitter::Simple [GOOD] >> Splitter::Small [GOOD] >> Splitter::Minimal [GOOD] >> Splitter::Trivial >> Splitter::Trivial [GOOD] >> Splitter::BigAndSmall |98.1%| [TM] {asan, default-linux-x86_64, pic, release} ydb/library/yql/tests/sql/solomon/pytest >> test.py::test[solomon-UnknownSetting-] [GOOD] |98.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/long_tx_service/public/ut/unittest >> LongTxServicePublicTypes::LongTxId [GOOD] |98.2%| [TS] {RESULT} ydb/core/tx/long_tx_service/public/ut/unittest >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] |98.2%| [TM] {RESULT} ydb/library/yql/tests/sql/solomon/pytest >> Splitter::BigAndSmall [GOOD] >> Splitter::CritSmallPortions >> DataShardCompaction::CompactBorrowed [GOOD] >> DataShardCompaction::CompactBorrowedTxStatus >> LongTxService::BasicTransactions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_replication/unittest >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] Test command err: 2025-04-10T11:38:25.010452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:25.010693Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:38:25.010888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000706/r3tmp/tmpxzkRLI/pdisk_1.dat 2025-04-10T11:38:25.606093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:25.654544Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:25.703201Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:38:25.705655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:25.706199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:25.706373Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:38:25.718768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:38:25.807066Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T11:38:25.807149Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T11:38:25.808437Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T11:38:25.926320Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T11:38:25.926432Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:38:25.927010Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:38:25.927106Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:38:25.927435Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:38:25.927688Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:38:25.927800Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T11:38:25.930695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:38:25.931699Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T11:38:25.932365Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T11:38:25.932453Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T11:38:25.983716Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:38:25.985132Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:38:25.986355Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:38:25.986632Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:38:26.030429Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:38:26.031231Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:38:26.031336Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:38:26.033159Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:38:26.033243Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:38:26.033305Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:38:26.034546Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:38:26.034705Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:38:26.034797Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:38:26.045627Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:38:26.069435Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:38:26.071296Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:38:26.071505Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:38:26.071580Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:38:26.071621Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:38:26.071656Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:38:26.071912Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:38:26.071979Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:38:26.073081Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:38:26.073202Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:38:26.073288Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:38:26.073335Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:38:26.073431Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:38:26.073487Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:38:26.073529Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:38:26.073576Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:38:26.073621Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:38:26.073765Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:38:26.073803Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:38:26.073848Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:38:26.075281Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:38:26.075346Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:38:26.075463Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:38:26.075834Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:38:26.075886Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:38:26.075988Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:38:26.076115Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:38:26.076166Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:38:26.076219Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:38:26.076264Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:38:26.076642Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:38:26.076686Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:38:26.076728Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:38:26.076759Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:38:26.076818Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:38:26.076850Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:38:26.076882Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:38:26.076926Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:38:26.076949Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:38:26.078608Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:38:26.078680Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 720751862 ... 1:39:09.005229Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [8:870:2702], Recipient [8:665:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-10T11:39:09.005481Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:39:09.005592Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2025-04-10T11:39:09.005721Z node 8 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:39:09.005783Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:39:09.005849Z node 8 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:39:09.005912Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:39:09.005985Z node 8 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-04-10T11:39:09.006057Z node 8 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:39:09.006093Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:39:09.006128Z node 8 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:39:09.008060Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:39:09.008317Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-10T11:39:09.008709Z node 8 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:39:09.008777Z node 8 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1500/18446744073709551615 2025-04-10T11:39:09.008839Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[8:870:2702], 0} after executionsCount# 1 2025-04-10T11:39:09.008928Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:870:2702], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:39:09.009063Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:870:2702], 0} finished in read 2025-04-10T11:39:09.009178Z node 8 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:39:09.009215Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:39:09.009250Z node 8 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:39:09.009278Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:39:09.009330Z node 8 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-10T11:39:09.009356Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:39:09.009384Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-04-10T11:39:09.009431Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:39:09.009581Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:39:09.009854Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [8:61:2108], Recipient [8:665:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715660 LockNode: 8 Status: STATUS_SUBSCRIBED 2025-04-10T11:39:09.011787Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [8:870:2702], Recipient [8:665:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:39:09.011873Z node 8 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } } 2025-04-10T11:39:09.018915Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [8:874:2706], Recipient [8:665:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:39:09.019014Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:39:09.019084Z node 8 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [8:873:2705], serverId# [8:874:2706], sessionId# [0:0:0] 2025-04-10T11:39:09.019391Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269549570, Sender [8:872:2704], Recipient [8:665:2570]: NKikimrTxDataShard.TEvApplyReplicationChanges TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Source: "my-source" Changes { SourceOffset: 1 WriteTxId: 0 Key: "\001\000\004\000\000\000\001\000\000\000" Upsert { Tags: 2 Data: "\001\000\004\000\000\000\025\000\000\000" } } 2025-04-10T11:39:09.019606Z node 8 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v1500/18446744073709551615 ImmediateWriteEdge# v1000/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2025-04-10T11:39:09.019785Z node 8 :TX_DATASHARD TRACE: Lock 281474976715660 marked broken at v{min} 2025-04-10T11:39:09.031075Z node 8 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-10T11:39:09.031878Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 270270977, Sender [8:24:2071], Recipient [8:665:2570]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 1501} 2025-04-10T11:39:09.031952Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2025-04-10T11:39:09.032009Z node 8 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-04-10T11:39:09.032063Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:39:09.180015Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqy9v8fx7gyebgxeaaqtqn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=N2VlOTdiZTItM2IzMDA3N2YtNWIxNjVhMWYtZWU0YjY3Mzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:39:09.182421Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [8:897:2723], Recipient [8:665:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-10T11:39:09.182725Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-10T11:39:09.182851Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-10T11:39:09.183014Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T11:39:09.183096Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-10T11:39:09.183160Z node 8 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:39:09.183213Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:39:09.183277Z node 8 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-10T11:39:09.183334Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T11:39:09.183364Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:39:09.183390Z node 8 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-10T11:39:09.183416Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-10T11:39:09.183597Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-10T11:39:09.183959Z node 8 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715660, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:39:09.184036Z node 8 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1500/18446744073709551615 2025-04-10T11:39:09.184100Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[8:897:2723], 0} after executionsCount# 1 2025-04-10T11:39:09.184168Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:897:2723], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:39:09.184280Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:897:2723], 0} finished in read 2025-04-10T11:39:09.184397Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T11:39:09.184427Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-10T11:39:09.184472Z node 8 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:39:09.184504Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:39:09.184555Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-10T11:39:09.184579Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:39:09.184614Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-10T11:39:09.184668Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-10T11:39:09.184819Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-10T11:39:09.185837Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [8:897:2723], Recipient [8:665:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:39:09.185920Z node 8 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-10T11:39:09.189317Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [8:61:2108], Recipient [8:665:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715660 LockNode: 8 Status: STATUS_NOT_FOUND { items { uint32_value: 1 } items { uint32_value: 11 } } |98.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_replication/unittest >> StatisticsScan::RunScanOnShard [GOOD] >> MediatorTest::ResendSubset [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2 >> MetadataConversion::MakeAuthTest [GOOD] >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] >> QueryActorTest::SimpleQuery [GOOD] >> QueryActorTest::Rollback >> SystemView::QueryStatsRetries [GOOD] >> MediatorTest::ResendNotSubset >> LongTxService::BasicTransactions [GOOD] >> LongTxService::AcquireSnapshot |98.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/gateway/ut/gtest >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] |98.2%| [TS] {RESULT} ydb/core/kqp/gateway/ut/gtest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_column_stats/unittest >> StatisticsScan::RunScanOnShard [GOOD] Test command err: 2025-04-10T11:39:08.229227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:39:08.229422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:39:08.229575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a5a/r3tmp/tmpNMSxLD/pdisk_1.dat 2025-04-10T11:39:08.803585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:39:08.859293Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:39:08.907910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:39:08.908404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:39:08.923245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:39:09.017711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:39:09.400657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:39:09.401431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:39:09.401575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:39:09.410508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T11:39:09.582900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T11:39:09.649638Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:39:10.631505Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfqya6nb760w4c3vp8kj5sx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmZjY2Y2MzUtYTgwM2I4MTctOTBhNTczMzQtMjAwODRkYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |98.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_column_stats/unittest >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable >> Splitter::CritSmallPortions [GOOD] >> Splitter::Crit >> test_http_api.py::TestHttpApi::test_simple_analytics_query [GOOD] >> test_http_api.py::TestHttpApi::test_empty_query >> test_http_api.py::TestHttpApi::test_empty_query [GOOD] >> test_http_api.py::TestHttpApi::test_warning >> ConsoleDumper::Basic >> ConsoleDumper::Basic [GOOD] >> ConsoleDumper::CoupleMerge [GOOD] >> ConsoleDumper::CoupleOverwrite [GOOD] >> ConsoleDumper::CoupleMergeOverwriteRepeated [GOOD] >> ConsoleDumper::ReverseMerge [GOOD] >> ConsoleDumper::ReverseOverwrite [GOOD] >> ConsoleDumper::ReverseMergeOverwriteRepeated [GOOD] >> ConsoleDumper::Different [GOOD] >> ConsoleDumper::SimpleNode >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config >> ConsoleDumper::SimpleNode [GOOD] >> ConsoleDumper::JoinSimilar [GOOD] >> ConsoleDumper::DontJoinDifferent [GOOD] >> ConsoleDumper::SimpleTenant [GOOD] >> ConsoleDumper::SimpleNodeTenant [GOOD] >> ConsoleDumper::SimpleHostId [GOOD] >> ConsoleDumper::SimpleNodeId [GOOD] >> ConsoleDumper::DontJoinNodeTenant [GOOD] >> ConsoleDumper::JoinMultipleSimple [GOOD] >> ConsoleDumper::MergeNode [GOOD] >> ConsoleDumper::MergeOverwriteRepeatedNode [GOOD] >> ConsoleDumper::Ordering [GOOD] >> ConsoleDumper::IgnoreUnmanagedItems [GOOD] >> YamlConfig::CollectLabels [GOOD] >> YamlConfig::MaterializeSpecificConfig [GOOD] >> YamlConfig::MaterializeAllConfigSimple >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] >> YamlConfig::MaterializeAllConfigSimple [GOOD] >> YamlConfig::MaterializeAllConfigs >> TDqSolomonWriteActorTest::TestWriteWithTimeseries [GOOD] >> TDqSolomonWriteActorTest::TestCheckpoints >> YamlConfig::MaterializeAllConfigs [GOOD] >> YamlConfig::AppendVolatileConfig [GOOD] >> YamlConfig::AppendAndResolve [GOOD] >> YamlConfig::GetMetadata [GOOD] >> YamlConfig::ReplaceMetadata [GOOD] >> YamlConfigParser::Iterate [GOOD] >> YamlConfigParser::ProtoBytesFieldDoesNotDecodeBase64 [GOOD] >> YamlConfigParser::PdiskCategoryFromString [GOOD] >> YamlConfigParser::AllowDefaultHostConfigId [GOOD] >> YamlConfigParser::IncorrectHostConfigIdFails [GOOD] >> YamlConfigParser::NoMixedHostConfigIds [GOOD] >> YamlConfigProto2Yaml::StorageConfig [GOOD] >> ConfigValidation::SameStaticGroup [GOOD] >> ConfigValidation::StaticGroupSizesGrow [GOOD] >> ConfigValidation::StaticGroupSizesShrink [GOOD] >> ConfigValidation::VDiskChanged [GOOD] >> ConfigValidation::TooManyVDiskChanged [GOOD] >> DatabaseConfigValidation::AllowedFields [GOOD] >> DatabaseConfigValidation::NotAllowedFields [GOOD] >> LongTxService::AcquireSnapshot [GOOD] >> LongTxService::LockSubscribe >> MetaCache::BasicForwarding [GOOD] >> MetaCache::TimeoutFallback >> MetaCache::TimeoutFallback [GOOD] >> TDqSolomonWriteActorTest::TestCheckpoints [GOOD] >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yaml_config/ut/unittest >> YamlConfigProto2Yaml::StorageConfig [GOOD] Test command err: host_config: "[{\"drive\":[{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"},{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_02\"}],\"host_config_id\":1},{\"drive\":[{\"type\":\"SSD\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"}],\"host_config_id\":2}]" "\/dev\/disk\/by-partlabel\/kikimr_nvme_02" host_config: "[{\"drive\":[{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"},{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_02\"}],\"host_config_id\":1},{\"drive\":[{\"type\":\"SSD\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"}],\"host_config_id\":2}]" host_configs: - host_config_id: 1 drive: - path: /dev/disk/by-partlabel/kikimr_nvme_01 type: NVME expected_slot_count: 9 - path: /dev/disk/by-partlabel/kikimr_nvme_02 type: NVME expected_slot_count: 9 - host_config_id: 2 drive: - path: /dev/disk/by-partlabel/kikimr_nvme_01 type: SSD expected_slot_count: 9 hosts: - host: sas8-6954.search.yandex.net port: 19000 host_config_id: 1 - host: sas8-6955.search.yandex.net port: 19000 host_config_id: 2 item_config_generation: 0 |98.2%| [TS] {RESULT} ydb/library/yaml_config/ut/unittest |98.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/library/ut/py3test >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] |98.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/validation/ut/unittest >> DatabaseConfigValidation::NotAllowedFields [GOOD] |98.2%| [TS] {RESULT} ydb/tests/library/ut/py3test |98.2%| [TS] {RESULT} ydb/core/config/validation/ut/unittest >> test_http_api.py::TestHttpApi::test_warning [GOOD] >> test_http_api.py::TestHttpApi::test_get_unknown_query [GOOD] >> test_http_api.py::TestHttpApi::test_unauthenticated >> test_http_api.py::TestHttpApi::test_unauthenticated [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency ------- [TS] {asan, default-linux-x86_64, release} ydb/mvp/meta/ut/unittest >> MetaCache::TimeoutFallback [GOOD] Test command err: 2025-04-10T11:39:14.074178Z :HTTP INFO: Listening on http://[::]:20221 2025-04-10T11:39:14.074994Z :HTTP INFO: Listening on http://[::]:29950 2025-04-10T11:39:14.075584Z :HTTP DEBUG: Connection created [1:14:2061] 2025-04-10T11:39:14.075693Z :HTTP DEBUG: resolving 127.0.0.1:20221 2025-04-10T11:39:14.075865Z :HTTP DEBUG: connecting 2025-04-10T11:39:14.076220Z :HTTP DEBUG: (#11,127.0.0.1:20221) outgoing connection opened 2025-04-10T11:39:14.076436Z :HTTP DEBUG: (#11,127.0.0.1:20221) <- (GET /server) 2025-04-10T11:39:14.076880Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:47720) incoming connection opened 2025-04-10T11:39:14.077151Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:47720) -> (GET /server) 2025-04-10T11:39:14.077407Z :HTTP DEBUG: Updating ownership http://127.0.0.1:29950 with deadline 2025-04-10T11:40:14.077284Z 2025-04-10T11:39:14.077460Z :HTTP DEBUG: SetRefreshTime "/server" to 2025-04-10T11:40:14.077284Z (+1744285214.077284s) 2025-04-10T11:39:14.077559Z :HTTP DEBUG: IncomingForward /server to http://127.0.0.1:29950 timeout 30.000000s 2025-04-10T11:39:14.077844Z :HTTP DEBUG: Connection created [1:16:2063] 2025-04-10T11:39:14.077898Z :HTTP DEBUG: resolving 127.0.0.1:29950 2025-04-10T11:39:14.077981Z :HTTP DEBUG: connecting 2025-04-10T11:39:14.078220Z :HTTP DEBUG: (#13,127.0.0.1:29950) outgoing connection opened 2025-04-10T11:39:14.078272Z :HTTP DEBUG: (#13,127.0.0.1:29950) <- (GET /server) 2025-04-10T11:39:14.079817Z :HTTP DEBUG: (#14,[::ffff:127.0.0.1]:57874) incoming connection opened 2025-04-10T11:39:14.079982Z :HTTP DEBUG: (#14,[::ffff:127.0.0.1]:57874) -> (GET /server) 2025-04-10T11:39:14.081169Z :HTTP DEBUG: (#14,[::ffff:127.0.0.1]:57874) <- (200 Found) 2025-04-10T11:39:14.081348Z :HTTP DEBUG: (#14,[::ffff:127.0.0.1]:57874) connection closed 2025-04-10T11:39:14.081591Z :HTTP DEBUG: (#13,127.0.0.1:29950) -> (200 Found) 2025-04-10T11:39:14.081659Z :HTTP DEBUG: (#13,127.0.0.1:29950) connection closed 2025-04-10T11:39:14.082873Z :HTTP DEBUG: Cache received successfull (200) response for /server 2025-04-10T11:39:14.083256Z :HTTP DEBUG: Connection closed [1:16:2063] 2025-04-10T11:39:14.083320Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:47720) <- (200 Found) 2025-04-10T11:39:14.083423Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:47720) connection closed 2025-04-10T11:39:14.083642Z :HTTP DEBUG: (#11,127.0.0.1:20221) -> (200 Found) 2025-04-10T11:39:14.083677Z :HTTP DEBUG: (#11,127.0.0.1:20221) connection closed 2025-04-10T11:39:14.083971Z :HTTP DEBUG: Connection closed [1:14:2061] 2025-04-10T11:39:14.094883Z :HTTP INFO: Listening on http://[::]:27880 2025-04-10T11:39:14.095426Z :HTTP INFO: Listening on http://[::]:29733 2025-04-10T11:39:14.095891Z :HTTP DEBUG: Connection created [2:14:2061] 2025-04-10T11:39:14.095958Z :HTTP DEBUG: resolving 127.0.0.1:27880 2025-04-10T11:39:14.096039Z :HTTP DEBUG: connecting 2025-04-10T11:39:14.096337Z :HTTP DEBUG: (#11,127.0.0.1:27880) outgoing connection opened 2025-04-10T11:39:14.096381Z :HTTP DEBUG: (#11,127.0.0.1:27880) <- (GET /server) 2025-04-10T11:39:14.096776Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:40896) incoming connection opened 2025-04-10T11:39:14.096927Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:40896) -> (GET /server) 2025-04-10T11:39:14.097203Z :HTTP DEBUG: Updating ownership http://127.0.0.1:29733 with deadline 2025-04-10T11:49:14.097156Z 2025-04-10T11:39:14.097256Z :HTTP DEBUG: SetRefreshTime "/server" to 2025-04-10T11:49:14.097156Z (+1744285754.097156s) 2025-04-10T11:39:14.097320Z :HTTP DEBUG: IncomingForward /server to http://127.0.0.1:29733 timeout 30.000000s 2025-04-10T11:39:14.097464Z :HTTP DEBUG: Connection created [2:16:2063] 2025-04-10T11:39:14.097526Z :HTTP DEBUG: resolving 127.0.0.1:29733 2025-04-10T11:39:14.097594Z :HTTP DEBUG: connecting 2025-04-10T11:39:14.097757Z :HTTP DEBUG: (#13,127.0.0.1:29733) outgoing connection opened 2025-04-10T11:39:14.097795Z :HTTP DEBUG: (#13,127.0.0.1:29733) <- (GET /server) 2025-04-10T11:39:14.097987Z :HTTP ERROR: (#13,127.0.0.1:29733) connection closed with error: Connection timed out 2025-04-10T11:39:14.098409Z :HTTP WARN: Cache received failed response with error "Connection timed out" for /server - retrying locally 2025-04-10T11:39:14.098655Z :HTTP DEBUG: (#14,[::ffff:127.0.0.1]:39160) incoming connection opened 2025-04-10T11:39:14.098733Z :HTTP DEBUG: Connection closed [2:16:2063] 2025-04-10T11:39:14.098867Z :HTTP DEBUG: (#14,[::ffff:127.0.0.1]:39160) -> (GET /server) 2025-04-10T11:39:14.098995Z :HTTP DEBUG: (#14,[::ffff:127.0.0.1]:39160) connection closed 2025-04-10T11:39:14.109800Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:40896) <- (200 Found) 2025-04-10T11:39:14.109981Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:40896) connection closed 2025-04-10T11:39:14.114049Z :HTTP DEBUG: (#11,127.0.0.1:27880) -> (200 Found) 2025-04-10T11:39:14.114190Z :HTTP DEBUG: (#11,127.0.0.1:27880) connection closed 2025-04-10T11:39:14.114529Z :HTTP DEBUG: Connection closed [2:14:2061] |98.2%| [TS] {RESULT} ydb/mvp/meta/ut/unittest >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] >> TCreateAndDropViewTest::DropViewInFolder [GOOD] >> TCreateAndDropViewTest::ContextPollution ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::QueryStatsRetries [GOOD] Test command err: 2025-04-10T11:35:30.390724Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646732801961954:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:30.391236Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ed5/r3tmp/tmpDsVRsD/pdisk_1.dat 2025-04-10T11:35:30.789943Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:30.815513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:30.815613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3738, node 1 2025-04-10T11:35:30.821878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:30.884113Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:35:30.884143Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:35:30.884155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:30.884286Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26375 TClient is connected to server localhost:26375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:31.445470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:33.451268Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-04-10T11:35:33.451300Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-04-10T11:35:33.484974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646745686864843:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:33.484976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646745686864835:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:33.485085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:35:33.488998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T11:35:33.513587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646745686864849:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T11:35:33.583090Z node 1 :TX_PROXY ERROR: Actor# [1:7491646745686864924:2753] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:35:33.584560Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-10T11:35:33.586203Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F000000AE8 2025-04-10T11:35:33.586266Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7491646745686864831:2339], queryUid: , queryText: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n ", keepInCache: 1, split: 0{ TraceId: 01jrfqqqbafemx382qr7pr0sda, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2FkY2QyMDUtN2Y1ODZmMDMtYjIxYTI4NDMtMjU3ZmE0YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-04-10T11:35:33.586410Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-10T11:35:33.586474Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7491646745686864831:2339], queueSize: 1 2025-04-10T11:35:33.587036Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7491646745686864831:2339], compileActor: [1:7491646745686864943:2350] 2025-04-10T11:35:33.902408Z node 1 :KQP_YQL INFO: TraceId: 01jrfqqqbafemx382qr7pr0sda, SessionId: CompileActor 2025-04-10 11:35:33.901 INFO ydb-core-sys_view-ut(pid=1775016, tid=0x00007FBDE4EE3640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (return (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) ) 2025-04-10T11:35:33.903486Z node 1 :KQP_YQL TRACE: TraceId: 01jrfqqqbafemx382qr7pr0sda, SessionId: CompileActor 2025-04-10 11:35:33.902 TRACE ydb-core-sys_view-ut(pid=1775016, tid=0x00007FBDE4EE3640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (let $4 (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-04-10T11:35:33.903933Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfqqqbafemx382qr7pr0sda, SessionId: CompileActor 2025-04-10 11:35:33.903 DEBUG ydb-core-sys_view-ut(pid=1775016, tid=0x00007FBDE4EE3640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 361us 2025-04-10T11:35:33.904693Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfqqqbafemx382qr7pr0sda, SessionId: CompileActor 2025-04-10 11:35:33.904 DEBUG ydb-core-sys_view-ut(pid=1775016, tid=0x00007FBDE4EE3640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-04-10T11:35:33.905940Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfqqqbafemx382qr7pr0sda, SessionId: CompileActor 2025-04-10 11:35:33.905 DEBUG ydb-core-sys_view-ut(pid=1775016, tid=0x00007FBDE4EE3640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-04-10T11:35:33.906840Z node 1 :KQP_YQL TRACE: TraceId: 01jrfqqqbafemx382qr7pr0sda, SessionId: CompileActor 2025-04-10 11:35:33.906 TRACE ydb-core-sys_view-ut(pid=1775016, tid=0x00007FBDE4EE3640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId ... 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:38:50.733750Z node 61 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:38:50.801074Z node 61 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:38:54.227580Z node 61 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[61:7491647584003811608:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:38:54.227699Z node 61 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:38:57.664467Z node 61 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [61:7491647618363551037:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:38:57.664647Z node 61 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:38:57.665002Z node 61 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [61:7491647618363551049:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:38:57.672557Z node 61 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:38:57.707631Z node 61 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [61:7491647618363551051:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:38:57.798028Z node 61 :TX_PROXY ERROR: Actor# [61:7491647618363551124:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:38:58.055586Z node 61 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfqxyqx0a5keqc1shm7mck2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=61&id=NDlkN2FkNzYtYzBiOGQ4MTctNjQyZDFjYTUtM2E2YjE0MzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:38:58.283849Z node 61 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfqxz5gc0kka8vhh95gmwhd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=61&id=NmU1ZTM3MDEtOGVlYWYxYmEtMjYyODJhOTQtYmEwN2QwZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:38:58.287236Z node 61 :SYSTEM_VIEWS INFO: Scan started, actor: [61:7491647622658518506:2372], owner: [61:7491647622658518503:2370], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2025-04-10T11:38:58.302910Z node 61 :SYSTEM_VIEWS INFO: Scan prepared, actor: [61:7491647622658518506:2372], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:38:58.303621Z node 61 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [61:7491647622658518506:2372], row count: 1, finished: 1 2025-04-10T11:38:58.303666Z node 61 :SYSTEM_VIEWS INFO: Scan finished, actor: [61:7491647622658518506:2372], owner: [61:7491647622658518503:2370], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2025-04-10T11:38:58.309597Z node 61 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744285138280, txId: 281474976715662] shutting down 2025-04-10T11:39:01.234316Z node 66 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[66:7491647636749210367:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:39:01.236171Z node 66 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ed5/r3tmp/tmpGScKtJ/pdisk_1.dat 2025-04-10T11:39:01.494323Z node 66 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:39:01.557821Z node 66 :HIVE WARN: HIVE#72057594037968897 Node(66, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:39:01.557968Z node 66 :HIVE WARN: HIVE#72057594037968897 Node(66, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:39:01.562660Z node 66 :HIVE WARN: HIVE#72057594037968897 Node(66, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7363, node 66 2025-04-10T11:39:01.665568Z node 66 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:39:01.665602Z node 66 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:39:01.665613Z node 66 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:39:01.665827Z node 66 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:39:02.220091Z node 66 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:39:02.259560Z node 66 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:39:06.235448Z node 66 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[66:7491647636749210367:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:39:06.235584Z node 66 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:39:08.972340Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [66:7491647666813982508:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:39:08.972340Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [66:7491647666813982503:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:39:08.972484Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:39:08.979628Z node 66 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:39:09.026957Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [66:7491647666813982517:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:39:09.103127Z node 66 :TX_PROXY ERROR: Actor# [66:7491647671108949884:2732] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:39:09.364555Z node 66 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfqy9s9cmy3edjkhbs7ehys, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=66&id=M2E3Mjc4OWUtMTljMjE1ZTMtYzgzN2QxYjAtYTIwYzZiOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:39:09.585511Z node 66 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jrfqya6e3da4zprn5zt9aa3v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=66&id=YmMzYTFmODItMzdjYmQzMzUtYWNlNjNjNGEtMTY4M2ZkNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:39:09.589916Z node 66 :SYSTEM_VIEWS INFO: Scan started, actor: [66:7491647671108949964:2370], owner: [66:7491647671108949961:2368], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-10T11:39:09.591366Z node 66 :SYSTEM_VIEWS INFO: Scan prepared, actor: [66:7491647671108949964:2370], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T11:39:09.592246Z node 66 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [66:7491647671108949964:2370], row count: 1, finished: 1 2025-04-10T11:39:09.592312Z node 66 :SYSTEM_VIEWS INFO: Scan finished, actor: [66:7491647671108949964:2370], owner: [66:7491647671108949961:2368], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-10T11:39:09.595984Z node 66 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744285149583, txId: 281474976710662] shutting down >> SequenceProxy::Basics >> LongTxService::LockSubscribe [GOOD] >> MediatorTest::ResendNotSubset [GOOD] >> KafkaProtocol::AlterConfigsScenario [GOOD] >> KafkaProtocol::LoginWithApiKey >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update >> ServerRestartTest::RestartOnGetSession [GOOD] >> QueryActorTest::Rollback [GOOD] >> QueryActorTest::Commit ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/long_tx_service/ut/unittest >> LongTxService::LockSubscribe [GOOD] Test command err: 2025-04-10T11:39:11.433771Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:39:11.434381Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/000830/r3tmp/tmpz2f5Yp/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:39:11.435102Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/000830/r3tmp/tmpz2f5Yp/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/000830/r3tmp/tmpz2f5Yp/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13228490149522999125 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:39:11.485249Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvBeginTx from [1:429:2317] 2025-04-10T11:39:11.485459Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Created new LongTxId# ydb://long-tx/000000001fawm4shf7jjq5ry4r?node_id=1 2025-04-10T11:39:11.496418Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvAttachColumnShardWrites from [2:430:2099] LongTxId# ydb://long-tx/000000001fawm4shf7jjq5ry4r?node_id=1 2025-04-10T11:39:11.496555Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeConnected for NodeId# 1 from session [2:97:2048] 2025-04-10T11:39:11.496808Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvAttachColumnShardWrites from [2:147:2088] LongTxId# ydb://long-tx/000000001fawm4shf7jjq5ry4r?node_id=1 2025-04-10T11:39:11.497088Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvCommitTx from [2:430:2099] LongTxId# ydb://long-tx/000000001fawm4shf7jjq5ry4r?node_id=1 2025-04-10T11:39:11.497230Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvCommitTx from [2:147:2088] LongTxId# ydb://long-tx/000000001fawm4shf7jjq5ry4r?node_id=1 2025-04-10T11:39:11.498062Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Committed LongTxId# ydb://long-tx/000000001fawm4shf7jjq5ry4r?node_id=1 without side-effects 2025-04-10T11:39:11.498371Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvRollbackTx from [2:430:2099] LongTxId# ydb://long-tx/000000001fawm4shf7jjq5ry4r?node_id=1 2025-04-10T11:39:11.498610Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvRollbackTx from [2:147:2088] LongTxId# ydb://long-tx/000000001fawm4shf7jjq5ry4r?node_id=1 2025-04-10T11:39:11.500290Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvRollbackTx from [2:430:2099] LongTxId# ydb://long-tx/000000001fawm4shf7jjq5ry4r?node_id=1 2025-04-10T11:39:11.500449Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvRollbackTx from [2:147:2088] LongTxId# ydb://long-tx/000000001fawm4shf7jjq5ry4r?node_id=1 2025-04-10T11:39:11.501545Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2025-04-10T11:39:11.501642Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-04-10T11:39:11.502398Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 1 from session [2:97:2048] 2025-04-10T11:39:11.502601Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:83:2074] ServerId# [1:351:2269] TabletId# 72057594037932033 PipeClientId# [2:83:2074] 2025-04-10T11:39:11.507012Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvCommitTx from [2:430:2099] LongTxId# ydb://long-tx/000000001fawm4shf7jjq5ry4r?node_id=3 2025-04-10T11:39:11.507302Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 3 from session [2:468:2101] 2025-04-10T11:39:12.273566Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:39:12.273658Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:39:12.358621Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:39:13.133272Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:39:13.133967Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/000830/r3tmp/tmpFcawME/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:39:13.134338Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/000830/r3tmp/tmpFcawME/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/000830/r3tmp/tmpFcawME/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14528443831523618314 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:39:13.508759Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireReadSnapshot from [3:509:2382] for database /dc-1 2025-04-10T11:39:13.508866Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-04-10T11:39:13.519516Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-04-10T11:39:13.519775Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:561:2421] Sending navigate request for /dc-1 2025-04-10T11:39:13.526718Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:561:2421] Received navigate response status Ok 2025-04-10T11:39:13.526811Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:561:2421] Sending acquire step to coordinator 72057594046316545 2025-04-10T11:39:13.531553Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:561:2421] Received read step 1000 2025-04-10T11:39:13.531779Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 1 2025-04-10T11:39:13.532675Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvBeginTx from [3:509:2382] 2025-04-10T11:39:13.532726Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-04-10T11:39:13.543559Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-04-10T11:39:13.543835Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:579:2433] Sending navigate request for /dc-1 2025-04-10T11:39:13.544100Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:579:2433] Received navigate response status Ok 2025-04-10T11:39:13.544154Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:579:2433] Sending acquire step to coordinator 72057594046316545 2025-04-10T11:39:13.544448Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:579:2433] Received read step 1500 2025-04-10T11:39:13.544542Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 2 2025-04-10T11:39:13.544626Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Created new read-only LongTxId# ydb://long-tx/read-only?snapshot=1500%3Amax 2025-04-10T11:39:13.544863Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvBeginTx from [3:509:2382] 2025-04-10T11:39:13.544923Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-04-10T11:39:13.556429Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-04-10T11:39:13.556710Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:581:2435] Sending navigate request for /dc-1 2025-04-10T11:39:13.556982Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:581:2435] Received navigate response status Ok 2025-04-10T11:39:13.557037Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:581:2435] Sending acquire step to coordinator 72057594046316545 2025-04-10T11:39:13.557258Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:581:2435] Received read step 1500 2025-04-10T11:39:13.557345Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 3 2025-04-10T11:39:13.557454Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Created new read-write LongTxId# ydb://long-tx/00000001ea4jd1egx6be6fzn97?node_id=3&snapshot=1500%3Amax 2025-04-10T11:39:14.456931Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:39:14.457382Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/000830/r3tmp/tmpeR3v9h/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:39:14.457642Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/000830/r3tmp/tmpeR3v9h/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/000830/r3tmp/tmpeR3v9h/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5815479446084710945 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:39:14.507799Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvRegisterLock for LockId# 123 2025-04-10T11:39:14.507939Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [5:431:2319] for LockId# 987 LockNode# 5 2025-04-10T11:39:14.522954Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:432:2099] for LockId# 987 LockNode# 5 2025-04-10T11:39:14.523957Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:100:2048] 2025-04-10T11:39:14.524836Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [6:147:2088] for LockId# 987 LockNode# 5 2025-04-10T11:39:14.526397Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:146:2135] for LockId# 987 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2025-04-10T11:39:14.526639Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [5:431:2319] for LockId# 123 LockNode# 5 2025-04-10T11:39:14.526793Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:432:2099] for LockId# 123 LockNode# 5 2025-04-10T11:39:14.527761Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [6:147:2088] for LockId# 123 LockNode# 5 2025-04-10T11:39:14.527993Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:146:2135] for LockId# 123 LockNode# 5 LockStatus# STATUS_SUBSCRIBED 2025-04-10T11:39:14.528170Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvUnregisterLock for LockId# 123 2025-04-10T11:39:14.528343Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:146:2135] for LockId# 123 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2025-04-10T11:39:14.528538Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:432:2099] for LockId# 234 LockNode# 5 2025-04-10T11:39:14.528783Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-10T11:39:14.529214Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-10T11:39:14.529472Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:100:2048] 2025-04-10T11:39:14.529654Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:83:2074] ServerId# [5:353:2271] TabletId# 72057594037932033 PipeClientId# [6:83:2074] 2025-04-10T11:39:14.760680Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:461:2048] 2025-04-10T11:39:14.760991Z node 5 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 6 2025-04-10T11:39:14.761225Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:461:2048] 2025-04-10T11:39:14.761358Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-10T11:39:14.761399Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-10T11:39:14.761787Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:462:2100] ServerId# [5:467:2338] TabletId# 72057594037932033 PipeClientId# [6:462:2100] 2025-04-10T11:39:14.762096Z node 6 :TX_PROXY WARN: actor# [6:145:2087] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-10T11:39:14.985822Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:492:2048] 2025-04-10T11:39:14.986070Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-10T11:39:14.986132Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-10T11:39:14.986771Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:492:2048] 2025-04-10T11:39:14.987043Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:493:2101] ServerId# [5:497:2359] TabletId# 72057594037932033 PipeClientId# [6:493:2101] 2025-04-10T11:39:15.226941Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:513:2048] 2025-04-10T11:39:15.227232Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-10T11:39:15.227300Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-10T11:39:15.227457Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:513:2048] 2025-04-10T11:39:15.227892Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:514:2103] ServerId# [5:518:2373] TabletId# 72057594037932033 PipeClientId# [6:514:2103] |98.2%| [TS] {RESULT} ydb/core/tx/long_tx_service/ut/unittest >> MediatorTest::OneCoordinatorResendTxNotLost >> TopicSessionTests::SlowSession [GOOD] >> Coordinator::ReadStepSubscribe >> TopicSessionTests::TwoSessionsWithDifferentSchemes >> Splitter::Crit [GOOD] >> Splitter::CritSimple |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/common/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] |98.2%| [TM] {RESULT} ydb/tests/fq/common/py3test >> SequenceProxy::Basics [GOOD] >> SequenceProxy::DropRecreate >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatus >> TSentinelUnstableTests::BSControllerCantChangeStatus [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToPosting [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToBuild >> test_http_api.py::TestHttpApi::test_create_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_stop_idempotency >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel_unstable/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus [GOOD] Test command err: 2025-04-10T11:38:58.880343Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-04-10T11:38:58.880405Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-04-10T11:38:58.880469Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-10T11:38:58.880493Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-04-10T11:38:58.880548Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-04-10T11:38:58.880621Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-04-10T11:38:58.882278Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-04-10T11:38:58.895804Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDom ... 64800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860027 2025-04-10T11:39:16.835935Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860027 2025-04-10T11:39:16.836089Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860027 2025-04-10T11:39:16.836168Z node 1 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-10T11:39:16.836647Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 6:25, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-10T11:39:16.836723Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 6:27, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-10T11:39:16.836758Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 3:13, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-10T11:39:16.836801Z node 1 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-10T11:39:16.837071Z node 1 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { }, cookie# 132 2025-04-10T11:39:16.837106Z node 1 :CMS ERROR: [Sentinel] [Main] Unsuccesful response from BSC: error# 2025-04-10T11:39:16.850419Z node 1 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 1 2025-04-10T11:39:16.850496Z node 1 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-10T11:39:16.850758Z node 1 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { }, cookie# 133 2025-04-10T11:39:16.850797Z node 1 :CMS ERROR: [Sentinel] [Main] Unsuccesful response from BSC: error# 2025-04-10T11:39:16.861674Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-10T11:39:16.861751Z node 1 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-10T11:39:16.861852Z node 1 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 2 2025-04-10T11:39:16.861880Z node 1 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-10T11:39:16.862021Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2025-04-10T11:39:16.862053Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2025-04-10T11:39:16.862074Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2025-04-10T11:39:16.862091Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2025-04-10T11:39:16.862107Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2025-04-10T11:39:16.862161Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2025-04-10T11:39:16.862197Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2025-04-10T11:39:16.862225Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2025-04-10T11:39:16.862439Z node 1 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 134 2025-04-10T11:39:16.862486Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 3:13 2025-04-10T11:39:16.862523Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 6:25 2025-04-10T11:39:16.862549Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 6:27 2025-04-10T11:39:16.862871Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-10T11:39:16.863282Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-10T11:39:16.863502Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-10T11:39:16.863671Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-10T11:39:16.863766Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-10T11:39:16.863864Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-10T11:39:16.863950Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-10T11:39:16.864021Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880027 2025-04-10T11:39:16.864071Z node 1 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s |98.3%| [TM] {RESULT} ydb/core/cms/ut_sentinel_unstable/unittest >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] >> QueryActorTest::Commit [GOOD] >> QueryActorTest::StreamQuery |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> ServerRestartTest::RestartOnGetSession [GOOD] |98.3%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false] >> test_tpch_import.py::TestS3TpchImport::test_import_and_export >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] [GOOD] >> SequenceProxy::DropRecreate [GOOD] >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable [GOOD] >> DataShardFollowers::FollowerStaleRo |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceproxy/ut/unittest >> SequenceProxy::DropRecreate [GOOD] Test command err: 2025-04-10T11:39:16.457221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:39:16.457287Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:39:16.547803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:39:17.411708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944 2025-04-10T11:39:17.673073Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:39:17.673606Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/000a3d/r3tmp/tmpKAApYs/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:39:17.674589Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/000a3d/r3tmp/tmpKAApYs/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/000a3d/r3tmp/tmpKAApYs/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8095544002461453149 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:39:18.623528Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-10T11:39:18.623605Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:39:18.677198Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-10T11:39:19.224980Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944 2025-04-10T11:39:19.447811Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-10T11:39:19.448341Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2754} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2gt4/000a3d/r3tmp/tmppCgL51/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-10T11:39:19.448532Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2gt4/000a3d/r3tmp/tmppCgL51/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2gt4/000a3d/r3tmp/tmppCgL51/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10088704293959602429 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-10T11:39:19.589191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715658:0, at schemeshard: 72057594046578944 2025-04-10T11:39:19.856806Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715659:0, at schemeshard: 72057594046578944 |98.3%| [TS] {RESULT} ydb/core/tx/sequenceproxy/ut/unittest >> Splitter::CritSimple [GOOD] >> KqpTpch::Query08 [GOOD] >> KqpTpch::Query09 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:36:51.907119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:51.907215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:51.907259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:51.907297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:51.907343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:51.907373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:51.907475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:51.907554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:51.907904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:51.993139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:36:51.993204Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:36:52.007803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:52.008084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:52.008244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:52.021562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:52.021935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:52.022710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:52.023478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:52.027080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:52.028576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:52.028642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:52.028882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:52.028968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:52.029016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:52.029178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:36:52.037162Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:36:52.160146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:52.160386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:52.160621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:52.160841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:52.160886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:52.163263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:52.163372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:52.163549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:52.163598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:52.163648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:52.163682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:52.165654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:52.165716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:52.165756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:52.167720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:52.167769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:52.167806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:52.167872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:52.170875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:52.172813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:52.173034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:52.173906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:52.174033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:52.174072Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:52.174309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:52.174370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:52.174510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:52.174583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:52.176431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:52.176467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:52.176631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:52.176663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:52.176835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:52.176907Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:52.177016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:52.177049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:52.177087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:52.177127Z no ... nd_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:39:20.610465Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:39:20.611006Z node 54 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 564us result status StatusSuccess 2025-04-10T11:39:20.611996Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:39:20.634087Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1168:2954] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-10T11:39:20.634243Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1140:2954] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-10T11:39:20.634441Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1168:2954] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744285160569125 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744285160569125 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1744285160569125 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-10T11:39:20.637382Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1168:2954] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-10T11:39:20.637486Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1140:2954] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/splitter/ut/unittest >> Splitter::CritSimple [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=seria ... 82944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=71282912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964800;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964800;columns=1; |98.3%| [TS] {RESULT} ydb/core/tx/columnshard/splitter/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/mediator/ut/unittest >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] Test command err: 2025-04-10T11:38:27.268199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:27.268507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:38:27.268723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000675/r3tmp/tmpz7f8aZ/pdisk_1.dat 2025-04-10T11:38:27.834244Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 TTxSchema Complete 2025-04-10T11:38:27.834963Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2025-04-10T11:38:27.835685Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:617:2532] connected 2025-04-10T11:38:27.835840Z node 1 :TX_MEDIATOR NOTICE: tablet# 72057594047365120 actor# [1:600:2522] HANDLE TEvMediatorConfiguration Version# 1 2025-04-10T11:38:27.836385Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2025-04-10T11:38:27.836585Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2025-04-10T11:38:27.837262Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:623:2537] connected ... waiting for watcher to connect (done) 2025-04-10T11:38:27.837581Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:621:2536] to# [1:619:2534] ExecQueue 2025-04-10T11:38:27.837732Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [1:621:2536] bucket# 0 2025-04-10T11:38:27.838521Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:621:2536] to# [1:619:2534] ExecQueue 2025-04-10T11:38:27.838584Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvWatch 2025-04-10T11:38:27.838655Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [1:620:2535] bucket.ActiveActor 2025-04-10T11:38:27.838729Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [1:621:2536]} 2025-04-10T11:38:27.838863Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:621:2536] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} 2025-04-10T11:38:27.849821Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:627:2541] connected 2025-04-10T11:38:27.849966Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-04-10T11:38:27.850022Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [1:625:2539] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 4 Coordinator# 72057594046316545 2025-04-10T11:38:27.850485Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1000Steps: {{TCoordinatorStep step# 1000 PrevStep# 0}}} marker# M1 2025-04-10T11:38:27.850587Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:620:2535] bucket.ActiveActor step# 1000 2025-04-10T11:38:27.850683Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1000} 2025-04-10T11:38:27.850950Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:621:2536] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1000} ... waiting for blocked plan step 2025-04-10T11:38:27.871509Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2025-04-10T11:38:27.871580Z node 1 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2025-04-10T11:38:27.871711Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCommitStep to# [1:619:2534] ExecQueue {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:625:2539]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M0 2025-04-10T11:38:27.871837Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:625:2539]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M1 2025-04-10T11:38:27.871885Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 marker# M2 2025-04-10T11:38:27.871935Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND Ev to# [1:620:2535] step# 1010 forTablet# 72057594047365121 txid# 1 marker# M3 2025-04-10T11:38:27.871998Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:620:2535] bucket.ActiveActor step# 1010 2025-04-10T11:38:27.872092Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [1:625:2539]}}} marker# M4 2025-04-10T11:38:27.872602Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2025-04-10T11:38:27.873929Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [1:649:2553] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T11:38:27.873994Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-04-10T11:38:27.874056Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for blocked plan step (done) ... waiting for no pending commands 2025-04-10T11:38:27.874776Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:621:2536] to# [1:619:2534] ExecQueue 2025-04-10T11:38:27.874835Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [1:621:2536] bucket# 0 ... waiting for no pending commands (done) ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for watch updates 2025-04-10T11:38:27.875461Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvPlanStepAccepted TabletId# 72057594047365121 step# 1010} 2025-04-10T11:38:27.875530Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:625:2539] {TEvPlanStepAck TabletId# 72057594047365121 step# 1010 txid# 1} 2025-04-10T11:38:27.875631Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:621:2536] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1010} ... waiting for watch updates (done) 2025-04-10T11:38:31.388104Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:31.388289Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:38:31.388522Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000675/r3tmp/tmpldR5r5/pdisk_1.dat 2025-04-10T11:38:31.731781Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 TTxSchema Complete 2025-04-10T11:38:31.732279Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2025-04-10T11:38:31.732770Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [2:617:2532] connected 2025-04-10T11:38:31.732891Z node 2 :TX_MEDIATOR NOTICE: tablet# 72057594047365120 actor# [2:600:2522] HANDLE TEvMediatorConfiguration Version# 1 2025-04-10T11:38:31.733338Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2025-04-10T11:38:31.733467Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2025-04-10T11:38:31.733895Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [2:623:2537] connected 2025-04-10T11:38:31.733971Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:621:2536] to# [2:619:2534] ExecQueue 2025-04-10T11:38:31.734033Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [2:621:2536] bucket# 0 ... waiting for watcher to connect (done) 2025-04-10T11:38:31.734622Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:621:2536] to# [2:619:2534] ExecQueue 2025-04-10T11:38:31.734681Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 HANDLE TEvWatch 2025-04-10T11:38:31.734739Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [2:620:2535] bucket.ActiveActor 2025-04-10T11:38:31.734816Z node 2 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [2:620:2535] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [2:621:2536]} 2025-04-10T11:38:31.734907Z node 2 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [2:620:2535] Mediator# 72057594047365120 SEND to# [2:621:2536] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} ... waiting for no pending commands 2025-04-10T11:38:31.750461Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:621:2536] to# [2:619:2534] ExecQueue 2025-04-10T11:38:31.750535Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:621:2536] bucket# 0 2025-04-10T11:38:31.750628Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:621:2536] to# [2:619:2534] ExecQueue 2025-04-10T11:38:31.750660Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:621:2536] bucket# 0 2025-04-10T11:38:31.750714Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:621:2536] to# [2:619:2534] ExecQueue 2025-04-10T11:38:31.750741Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:621:2536] bucket# 0 ... waiti ... ket# 0 ... waiting for no pending commands (done) 2025-04-10T11:39:20.596219Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [12:664:2560] connected 2025-04-10T11:39:20.596327Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-04-10T11:39:20.596380Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:662:2558] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 3 Coordinator# 72057594046316545 2025-04-10T11:39:20.596769Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [12:667:2563] connected 2025-04-10T11:39:20.596873Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-04-10T11:39:20.596971Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:665:2561] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 3 Coordinator# 72057594046316546 2025-04-10T11:39:20.597344Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2025-04-10T11:39:20.597395Z node 12 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2025-04-10T11:39:20.597476Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2025-04-10T11:39:20.597509Z node 12 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2025-04-10T11:39:20.597637Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCommitStep to# [12:619:2534] ExecQueue {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M0 2025-04-10T11:39:20.597772Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M1 2025-04-10T11:39:20.597845Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 txid# 2 marker# M2 2025-04-10T11:39:20.597922Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365121 txid# 1 txid# 2 marker# M3 2025-04-10T11:39:20.597982Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 1 txid# 2 marker# M2 2025-04-10T11:39:20.598018Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365122 txid# 1 txid# 2 marker# M3 2025-04-10T11:39:20.598070Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [12:620:2535] bucket.ActiveActor step# 1010 2025-04-10T11:39:20.598217Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}} marker# M4 2025-04-10T11:39:20.598448Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}} marker# M4 2025-04-10T11:39:20.598530Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2025-04-10T11:39:20.599326Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:673:2567] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T11:39:20.599411Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-04-10T11:39:20.599453Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2025-04-10T11:39:20.599499Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-04-10T11:39:20.599906Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:674:2568] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T11:39:20.599948Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2025-04-10T11:39:20.599980Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2025-04-10T11:39:20.600027Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-04-10T11:39:20.612118Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [12:677:2571] connected 2025-04-10T11:39:20.612291Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-04-10T11:39:20.612360Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:675:2569] Cookie# 2 CompleteStep# 1010 LatestKnownStep# 1010 SubjectiveTime# 4 Coordinator# 72057594046316546 2025-04-10T11:39:20.612652Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2025-04-10T11:39:20.612707Z node 12 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2025-04-10T11:39:20.612800Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvRequestLostAcks to# [12:619:2534] ExecQueue step {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} 2025-04-10T11:39:20.612928Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 HANDLE TEvRequestLostAcks {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} AckTo# [12:675:2569] 2025-04-10T11:39:20.612980Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 2 marker# M2 2025-04-10T11:39:20.613037Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365121 txid# 2 marker# M3 2025-04-10T11:39:20.613105Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 2 marker# M2 2025-04-10T11:39:20.613163Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365122 txid# 2 marker# M3 2025-04-10T11:39:20.613262Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:675:2569]}}} 2025-04-10T11:39:20.613355Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:675:2569]}}} 2025-04-10T11:39:20.626833Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365121 ClientId: [12:669:2565] ServerId: [12:673:2567] } 2025-04-10T11:39:20.666625Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:39:20.691068Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:702:2584] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-04-10T11:39:20.691173Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-04-10T11:39:20.691225Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2025-04-10T11:39:20.691278Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-04-10T11:39:20.703557Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365122 ClientId: [12:670:2566] ServerId: [12:674:2568] } 2025-04-10T11:39:20.735282Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:738:2597] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-04-10T11:39:20.735415Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2025-04-10T11:39:20.735464Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2025-04-10T11:39:20.735519Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-04-10T11:39:20.748178Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:39:20.748341Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:39:20.760211Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected |98.3%| [TM] {RESULT} ydb/core/tx/mediator/ut/unittest >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] >> TCreateAndDropViewTest::ContextPollution [GOOD] >> TEvaluateExprInViewTest::EvaluateExpr >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert >> KafkaProtocol::LoginWithApiKey [GOOD] >> KafkaProtocol::LoginWithApiKeyWithoutAt >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] >> test_simple.py::TestSimple::test[alter_table] >> Mirror3of4::ReplicationHuge [GOOD] >> TopicSessionTests::TwoSessionsWithDifferentSchemes [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatus [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrun ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/solomon/actors/ut/unittest >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] Test command err: 2025-04-10T11:38:41.640573Z node 1 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-04-10T11:38:41.644827Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-10T11:38:41.646021Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-04-10T11:38:41.647189Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-04-10T11:38:41.647219Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-10T11:38:41.662049Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Thu, 10 Apr 2025 11:38:41 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-04-10T11:38:41.668627Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-10T11:38:51.972416Z node 2 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-04-10T11:38:51.979029Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 7500 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-10T11:38:51.998689Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-04-10T11:38:52.015116Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-10T11:38:52.031227Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-10T11:38:52.045997Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-10T11:38:52.062214Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-10T11:38:52.078113Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-10T11:38:52.094713Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-10T11:38:52.103207Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 54513 bytes of data to buffer 2025-04-10T11:38:52.103595Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-04-10T11:38:52.103879Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-10T11:38:52.104402Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-10T11:38:52.104437Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-10T11:38:52.214915Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 10 Apr 2025 11:38:52 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-10T11:38:52.215378Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-10T11:38:52.215398Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-10T11:38:52.270749Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 10 Apr 2025 11:38:52 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-10T11:38:52.271199Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-10T11:38:52.271221Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-10T11:38:52.339743Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 10 Apr 2025 11:38:52 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-10T11:38:52.340182Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-10T11:38:52.340202Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-10T11:38:52.449821Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[3]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 10 Apr 2025 11:38:52 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-10T11:38:52.450279Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-10T11:38:52.450299Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-10T11:38:52.505319Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[5]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 10 Apr 2025 11:38:52 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-10T11:38:52.505586Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 500 metrics with size of 54513 bytes to solomon 2025-04-10T11:38:52.505602Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer MaxRequestsInflight 2025-04-10T11:38:52.565451Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[4]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 10 Apr 2025 11:38:52 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-10T11:38:52.565556Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-10T11:38:52.592685Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[7]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Thu, 10 Apr 2025 11:38:52 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 500} 2025-04-10T11:38:52.592805Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-10T11:38:52.666685Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[6]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 10 Apr 2025 11:38:52 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-10T11:38:52.666833Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-10T11:39:03.322814Z node 3 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-04-10T11:39:03.323173Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 10 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-10T11:39:03.323843Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 579 bytes of data to buffer 2025-04-10T11:39:03.324099Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 10 metrics with size of 579 bytes to solomon 2025-04-10T11:39:03.324143Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-10T11:39:03.344875Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 27 Date: Thu, 10 Apr 2025 11:39:03 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 10} 2025-04-10T11:39:03.345062Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-10T11:39:13.609999Z node 4 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-04-10T11:39:13.613255Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 2400 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-10T11:39:13.630716Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-04-10T11:39:13.645963Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-10T11:39:13.651564Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 43613 bytes of data to buffer 2025-04-10T11:39:13.651960Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-04-10T11:39:13.652299Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-10T11:39:13.652670Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 400 metrics with size of 43613 bytes to solomon 2025-04-10T11:39:13.652694Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-10T11:39:13.687107Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Thu, 10 Apr 2025 11:39:13 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 400} 2025-04-10T11:39:13.687249Z node 4 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 2 2025-04-10T11:39:13.808138Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 10 Apr 2025 11:39:13 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-10T11:39:13.808347Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: CheckpointInProgress Empty buffer 2025-04-10T11:39:13.870740Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Thu, 10 Apr 2025 11:39:13 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-10T11:39:13.870925Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-10T11:39:14.434144Z node 5 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-04-10T11:39:14.434401Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-10T11:39:14.434536Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-04-10T11:39:14.434712Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-04-10T11:39:14.434736Z node 5 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 1 2025-04-10T11:39:14.438785Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Thu, 10 Apr 2025 11:39:14 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-04-10T11:39:14.438941Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-10T11:39:14.439032Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-10T11:39:14.439179Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-04-10T11:39:14.439317Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-04-10T11:39:14.439334Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-10T11:39:14.442951Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Thu, 10 Apr 2025 11:39:14 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-04-10T11:39:14.443104Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer |98.3%| [TS] {RESULT} ydb/library/yql/providers/solomon/actors/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_mirror3of4/unittest >> Mirror3of4::ReplicationHuge [GOOD] Test command err: 2025-04-10T11:38:38.065987Z 1 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:0:0]: (0) SKELETON START Marker# BSVS37 2025-04-10T11:38:38.068234Z 2 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:1:0]: (0) SKELETON START Marker# BSVS37 2025-04-10T11:38:38.068431Z 3 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:2:0]: (0) SKELETON START Marker# BSVS37 2025-04-10T11:38:38.068620Z 4 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:3:0]: (0) SKELETON START Marker# BSVS37 2025-04-10T11:38:38.068757Z 5 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:4:0]: (0) SKELETON START Marker# BSVS37 2025-04-10T11:38:38.068974Z 6 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:5:0]: (0) SKELETON START Marker# BSVS37 2025-04-10T11:38:38.069133Z 7 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:6:0]: (0) SKELETON START Marker# BSVS37 2025-04-10T11:38:38.069256Z 8 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:7:0]: (0) SKELETON START Marker# BSVS37 2025-04-10T11:38:38.074502Z 1 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery START 2025-04-10T11:38:38.074640Z 1 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:0:0]: (0) Sending TEvYardInit: pdiskGuid# 8735711000343649006 skeletonid# [1:139:13] selfid# [1:155:22] delay 0.000000 sec 2025-04-10T11:38:38.074710Z 2 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:1:0]: (0) LocalRecovery START 2025-04-10T11:38:38.074746Z 2 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:1:0]: (0) Sending TEvYardInit: pdiskGuid# 11658902862930046666 skeletonid# [2:140:11] selfid# [2:156:12] delay 0.000000 sec 2025-04-10T11:38:38.074781Z 3 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:2:0]: (0) LocalRecovery START 2025-04-10T11:38:38.074811Z 3 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:2:0]: (0) Sending TEvYardInit: pdiskGuid# 12168463831355364741 skeletonid# [3:141:11] selfid# [3:157:12] delay 0.000000 sec 2025-04-10T11:38:38.074848Z 4 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:3:0]: (0) LocalRecovery START 2025-04-10T11:38:38.074902Z 4 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:3:0]: (0) Sending TEvYardInit: pdiskGuid# 8572655527864327127 skeletonid# [4:142:11] selfid# [4:158:12] delay 0.000000 sec 2025-04-10T11:38:38.074938Z 5 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:4:0]: (0) LocalRecovery START 2025-04-10T11:38:38.074967Z 5 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:4:0]: (0) Sending TEvYardInit: pdiskGuid# 10857373175763973976 skeletonid# [5:143:11] selfid# [5:159:12] delay 0.000000 sec 2025-04-10T11:38:38.074999Z 6 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:5:0]: (0) LocalRecovery START 2025-04-10T11:38:38.075029Z 6 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:5:0]: (0) Sending TEvYardInit: pdiskGuid# 14649659976135664429 skeletonid# [6:144:11] selfid# [6:160:12] delay 0.000000 sec 2025-04-10T11:38:38.075061Z 7 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:6:0]: (0) LocalRecovery START 2025-04-10T11:38:38.075094Z 7 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:6:0]: (0) Sending TEvYardInit: pdiskGuid# 10656299421357032638 skeletonid# [7:145:11] selfid# [7:161:12] delay 0.000000 sec 2025-04-10T11:38:38.075127Z 8 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:7:0]: (0) LocalRecovery START 2025-04-10T11:38:38.075169Z 8 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:7:0]: (0) Sending TEvYardInit: pdiskGuid# 14838741021832313845 skeletonid# [8:146:11] selfid# [8:162:12] delay 0.000000 sec 2025-04-10T11:38:38.077164Z 1 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:408} PDiskMock[1:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:0:0] PDiskGuid# 8735711000343649006 CutLogID# [1:139:13] WhiteboardProxyId# [1:122:10]} 2025-04-10T11:38:38.078684Z 1 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:443} PDiskMock[1:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-04-10T11:38:38.078802Z 2 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:408} PDiskMock[2:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:1:0] PDiskGuid# 11658902862930046666 CutLogID# [2:140:11] WhiteboardProxyId# [2:124:10]} 2025-04-10T11:38:38.078882Z 2 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:443} PDiskMock[2:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-04-10T11:38:38.078944Z 3 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:408} PDiskMock[3:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:2:0] PDiskGuid# 12168463831355364741 CutLogID# [3:141:11] WhiteboardProxyId# [3:126:10]} 2025-04-10T11:38:38.078986Z 3 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:443} PDiskMock[3:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-04-10T11:38:38.079029Z 4 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:408} PDiskMock[4:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:3:0] PDiskGuid# 8572655527864327127 CutLogID# [4:142:11] WhiteboardProxyId# [4:128:10]} 2025-04-10T11:38:38.079092Z 4 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:443} PDiskMock[4:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-04-10T11:38:38.079131Z 5 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:408} PDiskMock[5:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:4:0] PDiskGuid# 10857373175763973976 CutLogID# [5:143:11] WhiteboardProxyId# [5:130:10]} 2025-04-10T11:38:38.079182Z 5 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:443} PDiskMock[5:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-04-10T11:38:38.079231Z 6 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:408} PDiskMock[6:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:5:0] PDiskGuid# 14649659976135664429 CutLogID# [6:144:11] WhiteboardProxyId# [6:132:10]} 2025-04-10T11:38:38.079277Z 6 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:443} PDiskMock[6:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-04-10T11:38:38.079323Z 7 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:408} PDiskMock[7:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:6:0] PDiskGuid# 10656299421357032638 CutLogID# [7:145:11] WhiteboardProxyId# [7:134:10]} 2025-04-10T11:38:38.079365Z 7 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:443} PDiskMock[7:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-04-10T11:38:38.079407Z 8 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:408} PDiskMock[8:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:7:0] PDiskGuid# 14838741021832313845 CutLogID# [8:146:11] WhiteboardProxyId# [8:136:10]} 2025-04-10T11:38:38.079467Z 8 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:443} PDiskMock[8:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-04-10T11:38:38.083347Z 1 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:0:0]: (0) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-04-10T11:38:38.085477Z 2 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:1:0]: (0) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-04-10T11:38:38.086091Z 3 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:2:0]: (0) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-04-10T11:38:38.086802Z 4 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:3:0]: (0) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-04-10T11:38:38.087447Z 5 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:4:0]: (0) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-04-10T11:38:38.088114Z 6 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:5:0]: (0) MAX LSNS: LogoBlobs# [ExplicitlySet# tr ... PDISK DEBUG: {PDM12@pdisk_mock.cpp:602} PDiskMock[7:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 25 Cookie# 0}} Recipient# [7:345:29] 2025-04-10T11:39:25.353798Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:540} PDiskMock[8:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 581 Lsn# 25 LsnSegmentStart# 25 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:7:0] 2025-04-10T11:39:25.353852Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:602} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 25 Cookie# 0}} Recipient# [8:355:29] 2025-04-10T11:39:25.357128Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:540} PDiskMock[7:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 581 Lsn# 26 LsnSegmentStart# 26 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:6:0] 2025-04-10T11:39:25.357202Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:602} PDiskMock[7:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 26 Cookie# 0}} Recipient# [7:345:29] 2025-04-10T11:39:25.357313Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:540} PDiskMock[8:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 581 Lsn# 26 LsnSegmentStart# 26 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:7:0] 2025-04-10T11:39:25.357366Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:602} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 26 Cookie# 0}} Recipient# [8:355:29] 2025-04-10T11:39:25.357700Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:1:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-04-10T11:39:25.358085Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:540} PDiskMock[7:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 581 Lsn# 27 LsnSegmentStart# 27 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:6:0] 2025-04-10T11:39:25.358219Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:602} PDiskMock[7:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 27 Cookie# 0}} Recipient# [7:345:29] 2025-04-10T11:39:25.358295Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:1:0]: (0) GLUEREAD(0x511000957bc0): {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 89335319839296} 2025-04-10T11:39:25.358379Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:540} PDiskMock[8:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 581 Lsn# 27 LsnSegmentStart# 27 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:7:0] 2025-04-10T11:39:25.358430Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:602} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 27 Cookie# 0}} Recipient# [8:355:29] 2025-04-10T11:39:25.358523Z 2 00h00m00.000000s :BS_PDISK DEBUG: {PDM13@pdisk_mock.cpp:690} PDiskMock[2:1] received TEvChunkRead Msg# {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 89335319839296} VDiskId# [0:4294967295:0:1:0] 2025-04-10T11:39:25.359552Z 2 00h00m00.000000s :BS_PDISK DEBUG: {PDM14@pdisk_mock.cpp:730} PDiskMock[2:1] sending TEvChunkReadResult Msg# {EvChunkReadres Status# OK ErrorReason# "" chunkIdx# 1 Offset# 5 DataSize# 1048576 Cookie# 89335319839296 StatusFlags# None} 2025-04-10T11:39:25.359720Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:1:0]: (0) GLUEREAD FINISHED(0x511000957bc0): actualReadN# 1 origReadN# 1 2025-04-10T11:39:25.360113Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:1:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:2] OK Size# 1048576 FullDataSize# 1048576 PayloadId# 0 Data# 1048576b Ingress# 1369701526376808448} BlockedGeneration# 0} 2025-04-10T11:39:25.366622Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:2:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-04-10T11:39:25.367702Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:2:0]: (0) GLUEREAD(0x511000c6f9c0): {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 89335320031808} 2025-04-10T11:39:25.368152Z 3 00h00m00.000000s :BS_PDISK DEBUG: {PDM13@pdisk_mock.cpp:690} PDiskMock[3:1] received TEvChunkRead Msg# {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 89335320031808} VDiskId# [0:4294967295:0:2:0] 2025-04-10T11:39:25.369137Z 3 00h00m00.000000s :BS_PDISK DEBUG: {PDM14@pdisk_mock.cpp:730} PDiskMock[3:1] sending TEvChunkReadResult Msg# {EvChunkReadres Status# OK ErrorReason# "" chunkIdx# 1 Offset# 5 DataSize# 1048576 Cookie# 89335320031808 StatusFlags# None} 2025-04-10T11:39:25.369286Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:2:0]: (0) GLUEREAD FINISHED(0x511000c6f9c0): actualReadN# 1 origReadN# 1 2025-04-10T11:39:25.369395Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:2:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:1] OK Size# 1048576 FullDataSize# 1048576 PayloadId# 0 Data# 1048576b Ingress# 2522623030983655424} BlockedGeneration# 0} 2025-04-10T11:39:25.372213Z 4 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:3:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-04-10T11:39:25.372501Z 4 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:3:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:3] OK Size# 0 FullDataSize# 1048576 BufferData# Ingress# 793240774073384960} BlockedGeneration# 0} 2025-04-10T11:39:25.373389Z 5 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:4:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-04-10T11:39:25.373596Z 5 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:4:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:3] OK Size# 0 FullDataSize# 1048576 BufferData# Ingress# 793240774073384960} BlockedGeneration# 0} 2025-04-10T11:39:25.374406Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:5:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-04-10T11:39:25.374647Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:5:0]: (0) GLUEREAD(0x511000943080): {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 89335319966272} 2025-04-10T11:39:25.374734Z 6 00h00m00.000000s :BS_PDISK DEBUG: {PDM13@pdisk_mock.cpp:690} PDiskMock[6:1] received TEvChunkRead Msg# {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 89335319966272} VDiskId# [0:4294967295:0:5:0] 2025-04-10T11:39:25.375674Z 6 00h00m00.000000s :BS_PDISK DEBUG: {PDM14@pdisk_mock.cpp:730} PDiskMock[6:1] sending TEvChunkReadResult Msg# {EvChunkReadres Status# OK ErrorReason# "" chunkIdx# 1 Offset# 5 DataSize# 1048576 Cookie# 89335319966272 StatusFlags# None} 2025-04-10T11:39:25.375748Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:5:0]: (0) GLUEREAD FINISHED(0x511000943080): actualReadN# 1 origReadN# 1 2025-04-10T11:39:25.375873Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:5:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:2] OK Size# 1048576 FullDataSize# 1048576 PayloadId# 0 Data# 1048576b Ingress# 1946162278680231936} {[1:1:1:0:0:1048576:3] OK Size# 0 FullDataSize# 1048576 BufferData# Ingress# 1946162278680231936} BlockedGeneration# 0} 2025-04-10T11:39:25.378607Z 7 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:6:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-04-10T11:39:25.378823Z 7 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:6:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:0] NODATA Ingress# 216780021769961472} BlockedGeneration# 0} 2025-04-10T11:39:25.379618Z 8 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:7:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-04-10T11:39:25.379810Z 8 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:7:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:0] NODATA Ingress# 216780021769961472} BlockedGeneration# 0} >> DataShardFollowers::FollowerStaleRo [GOOD] >> DataShardFollowers::FollowerRebootAfterSysCompaction >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes |98.3%| [TM] {RESULT} ydb/core/blobstorage/ut_mirror3of4/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values >> test_http_api.py::TestHttpApi::test_stop_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_restart_idempotency ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_compaction/unittest >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] Test command err: 2025-04-10T11:38:54.684991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:54.685232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:38:54.685450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a8b/r3tmp/tmpjBQSpi/pdisk_1.dat 2025-04-10T11:38:55.241060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:55.320806Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:55.376089Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:38:55.379127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:55.387172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:55.387507Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:38:55.400706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:38:55.499978Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T11:38:55.500076Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T11:38:55.500853Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T11:38:55.678310Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T11:38:55.678433Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:38:55.679155Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:38:55.679269Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:38:55.679647Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:38:55.679957Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:38:55.680110Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T11:38:55.683477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:38:55.690346Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T11:38:55.691229Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T11:38:55.691309Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T11:38:55.782281Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:38:55.783732Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:38:55.784262Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:38:55.784541Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:38:55.863000Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:38:55.863911Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:38:55.864066Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:38:55.866100Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:38:55.866225Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:38:55.866332Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:38:55.868685Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:38:55.868858Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:38:55.868991Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:38:55.880094Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:38:55.922755Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:38:55.923864Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:38:55.924042Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:38:55.924091Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:38:55.924122Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:38:55.924162Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:38:55.924394Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:38:55.924442Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:38:55.926439Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:38:55.926606Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:38:55.927629Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:38:55.927687Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:38:55.927830Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:38:55.927878Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:38:55.927922Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:38:55.927958Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:38:55.928033Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:38:55.928191Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:38:55.928230Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:38:55.928275Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:38:55.931331Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:38:55.931394Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:38:55.931569Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:38:55.932010Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:38:55.932077Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:38:55.932211Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:38:55.932335Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:38:55.932394Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:38:55.932438Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:38:55.932470Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:38:55.932778Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:38:55.932829Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:38:55.932866Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:38:55.932922Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:38:55.933001Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:38:55.933042Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:38:55.933076Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:38:55.933110Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:38:55.933135Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:38:55.935413Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:38:55.935496Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:38:55.947046Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 4-10T11:39:27.699706Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2025-04-10T11:39:27.699739Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037892 2025-04-10T11:39:27.700103Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:932:2762], Recipient [2:932:2762]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:39:27.700133Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:39:27.700185Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-10T11:39:27.700211Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:39:27.700241Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037892 for ReadTableScan 2025-04-10T11:39:27.700263Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit ReadTableScan 2025-04-10T11:39:27.700288Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [71500:281474976715661] at 72075186224037892 error: , IsFatalError: 0 2025-04-10T11:39:27.700319Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is Executed 2025-04-10T11:39:27.700339Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit ReadTableScan 2025-04-10T11:39:27.700359Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037892 to execution unit CompleteOperation 2025-04-10T11:39:27.700379Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit CompleteOperation 2025-04-10T11:39:27.700543Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is DelayComplete 2025-04-10T11:39:27.700560Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit CompleteOperation 2025-04-10T11:39:27.700581Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037892 to execution unit CompletedOperations 2025-04-10T11:39:27.700601Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit CompletedOperations 2025-04-10T11:39:27.700622Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is Executed 2025-04-10T11:39:27.700639Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit CompletedOperations 2025-04-10T11:39:27.700657Z node 2 :TX_DATASHARD TRACE: Execution plan for [71500:281474976715661] at 72075186224037892 has finished 2025-04-10T11:39:27.700678Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:39:27.700695Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037892 2025-04-10T11:39:27.700716Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-04-10T11:39:27.700738Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-04-10T11:39:27.712419Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-10T11:39:27.712511Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-10T11:39:27.712553Z node 2 :TX_DATASHARD TRACE: Complete execution for [71500:281474976715661] at 72075186224037892 on unit CompleteOperation 2025-04-10T11:39:27.712614Z node 2 :TX_DATASHARD DEBUG: Complete [71500 : 281474976715661] from 72075186224037892 at tablet 72075186224037892 send result to client [2:1476:3277], exec latency: 0 ms, propose latency: 1 ms 2025-04-10T11:39:27.712658Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-10T11:39:27.712834Z node 2 :TX_PROXY DEBUG: Actor# [2:1476:3277] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2025-04-10T11:39:27.712881Z node 2 :TX_PROXY DEBUG: Send stream clearance, shard: 72075186224037890, txid: 281474976715661, cleared: 1 2025-04-10T11:39:27.713035Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:1476:3277], Recipient [2:768:2643]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2025-04-10T11:39:27.713077Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-10T11:39:27.713169Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:768:2643], Recipient [2:768:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:39:27.713203Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:39:27.713274Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T11:39:27.713309Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:39:27.713346Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037890 for WaitForStreamClearance 2025-04-10T11:39:27.713380Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit WaitForStreamClearance 2025-04-10T11:39:27.713415Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [71500:281474976715661] at 72075186224037890 2025-04-10T11:39:27.713449Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-04-10T11:39:27.713481Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit WaitForStreamClearance 2025-04-10T11:39:27.713509Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit ReadTableScan 2025-04-10T11:39:27.713538Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit ReadTableScan 2025-04-10T11:39:27.713777Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Continue 2025-04-10T11:39:27.713805Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:39:27.713831Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-04-10T11:39:27.713858Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-10T11:39:27.713886Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-10T11:39:27.714365Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1508:3306], Recipient [2:768:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-10T11:39:27.714407Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-10T11:39:27.714668Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2025-04-10T11:39:27.714797Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2025-04-10T11:39:27.770214Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-10T11:39:27.770284Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037890 2025-04-10T11:39:27.770572Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:768:2643], Recipient [2:768:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:39:27.770612Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:39:27.770679Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-10T11:39:27.770714Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:39:27.770749Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037890 for ReadTableScan 2025-04-10T11:39:27.770775Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit ReadTableScan 2025-04-10T11:39:27.770808Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [71500:281474976715661] at 72075186224037890 error: , IsFatalError: 0 2025-04-10T11:39:27.770847Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-04-10T11:39:27.770876Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit ReadTableScan 2025-04-10T11:39:27.770905Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit CompleteOperation 2025-04-10T11:39:27.770931Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit CompleteOperation 2025-04-10T11:39:27.771147Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is DelayComplete 2025-04-10T11:39:27.771178Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit CompleteOperation 2025-04-10T11:39:27.771204Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit CompletedOperations 2025-04-10T11:39:27.771247Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit CompletedOperations 2025-04-10T11:39:27.771278Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-04-10T11:39:27.771301Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit CompletedOperations 2025-04-10T11:39:27.771324Z node 2 :TX_DATASHARD TRACE: Execution plan for [71500:281474976715661] at 72075186224037890 has finished 2025-04-10T11:39:27.771354Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:39:27.771378Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-10T11:39:27.771404Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-10T11:39:27.771432Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-10T11:39:27.782782Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:39:27.782849Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-10T11:39:27.782884Z node 2 :TX_DATASHARD TRACE: Complete execution for [71500:281474976715661] at 72075186224037890 on unit CompleteOperation 2025-04-10T11:39:27.782948Z node 2 :TX_DATASHARD DEBUG: Complete [71500 : 281474976715661] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1476:3277], exec latency: 1 ms, propose latency: 1 ms 2025-04-10T11:39:27.782993Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:39:27.783173Z node 2 :TX_PROXY DEBUG: Actor# [2:1476:3277] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037890 marker# P12 2025-04-10T11:39:27.783255Z node 2 :TX_PROXY INFO: Actor# [2:1476:3277] txid# 281474976715661 RESPONSE Status# ExecComplete prepare time: 0.000500s execute time: 0.001500s total time: 0.002000s marker# P13 |98.3%| [TM] {RESULT} ydb/core/tx/datashard/ut_compaction/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> QueryActorTest::StreamQuery [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple >> KafkaProtocol::LoginWithApiKeyWithoutAt [GOOD] >> KafkaProtocol::MetadataScenario >> KqpTpch::Query09 [GOOD] >> KqpTpch::Query10 ------- [TS] {asan, default-linux-x86_64, release} ydb/library/query_actor/ut/unittest >> QueryActorTest::StreamQuery [GOOD] Test command err: 2025-04-10T11:39:07.316172Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647662333772487:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:39:07.316229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009fb/r3tmp/tmp8lyuiz/pdisk_1.dat 2025-04-10T11:39:07.868487Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:39:07.882039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:39:07.882919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:39:07.886172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6105 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:39:08.253368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:39:08.291594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:39:08.454668Z node 1 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-10T11:39:10.427744Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:39:10.430514Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:39:10.435353Z node 1 :KQP_PROXY DEBUG: Request has 18444999788559.116291s seconds to be completed 2025-04-10T11:39:10.447911Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=MjM4NmQzYjctOTVjZjE3NTMtODhmMmM1Y2EtNmFlNmNiMjg=, workerId: [1:7491647675218674941:2315], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-10T11:39:10.447971Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:39:10.448146Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:39:10.448243Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:39:10.448285Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:39:10.448320Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:39:10.448365Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:39:10.448489Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:39:10.448541Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:39:10.448589Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:39:10.448617Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:39:10.448641Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:39:10.450682Z node 1 :KQP_PROXY DEBUG: [TQueryBase] RunDataQuery: SELECT 42 2025-04-10T11:39:10.455426Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=MjM4NmQzYjctOTVjZjE3NTMtODhmMmM1Y2EtNmFlNmNiMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7491647675218674941:2315] 2025-04-10T11:39:10.455507Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7491647675218674943:2359] 2025-04-10T11:39:10.459057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647675218674956:2320], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:39:10.459080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647675218674944:2317], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:39:10.459218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:39:10.465626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:39:10.475349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491647675218674958:2321], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:39:10.534765Z node 1 :TX_PROXY ERROR: Actor# [1:7491647675218675009:2394] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:39:11.260366Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7491647675218674942:2316], selfId: [1:7491647662333772593:2278], source: [1:7491647675218674941:2315] 2025-04-10T11:39:11.260679Z node 1 :KQP_PROXY DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=MjM4NmQzYjctOTVjZjE3NTMtODhmMmM1Y2EtNmFlNmNiMjg=, TxId: 2025-04-10T11:39:11.261475Z node 1 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=MjM4NmQzYjctOTVjZjE3NTMtODhmMmM1Y2EtNmFlNmNiMjg=, TxId: 2025-04-10T11:39:11.261966Z node 1 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=1&id=MjM4NmQzYjctOTVjZjE3NTMtODhmMmM1Y2EtNmFlNmNiMjg=, workerId: [1:7491647675218674941:2315], local sessions count: 0 2025-04-10T11:39:11.825979Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491647678879161671:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:39:11.826036Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009fb/r3tmp/tmpH3g7lq/pdisk_1.dat 2025-04-10T11:39:11.968609Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:39:12.003591Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:39:12.003703Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:39:12.007131Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21137 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:39:12.163331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:39:12.175457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:39:12.266362Z node 2 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-10T11:39:14.966040Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:39:14.967182Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:39:14.968076Z node 2 :KQP_PROXY DEBUG: Request has 18444999788554.583557s seconds to be completed 2025-04-10T11:39:14.969865Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=NzI4YmIxZDUtZmVhODNlLWU1ZDZjODIxLWFkMDM2MDAz, workerId: [2:7491647691764064251:2315], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-10T11:39:14.969911Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:39:14.970030Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:39:14.970087Z node 2 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:39:14.970110Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:39:14.970126Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:39:14.970179Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:39:14.970237Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:39:14.970301Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool i ... Wuh/pdisk_1.dat 2025-04-10T11:39:20.373319Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:39:20.393184Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:39:20.393280Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:39:20.394928Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4616 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T11:39:20.685917Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:39:20.694440Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:39:20.767551Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-10T11:39:23.651143Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:39:23.652331Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:39:23.653429Z node 4 :KQP_PROXY DEBUG: Request has 18444999788545.898210s seconds to be completed 2025-04-10T11:39:23.656130Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=NzAwZGVlZDItOTQ1NTEwOWQtNTIxYjQ3ZGEtNjdjYWU0OGU=, workerId: [4:7491647732411820486:2315], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-10T11:39:23.656156Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:39:23.656350Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:39:23.656405Z node 4 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-10T11:39:23.656428Z node 4 :KQP_PROXY DEBUG: Updated table service config. 2025-04-10T11:39:23.656452Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-10T11:39:23.656483Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-10T11:39:23.662011Z node 4 :KQP_PROXY DEBUG: [TQueryBase] RunStreamQuery: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2025-04-10T11:39:23.664829Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-04-10T11:39:23.666078Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:39:23.666219Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:39:23.666246Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:39:23.666335Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:39:23.666362Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:39:23.672008Z node 4 :KQP_PROXY DEBUG: TraceId: "01jrfqyr4g3q5nwta7y4k4rd1c", Created new session, sessionId: ydb://session/3?node_id=4&id=NzQzNWQyZmUtMzFhMWI1NmItYmEzNzY4NTgtYTQyMGQyZTg=, workerId: [4:7491647732411820512:2316], database: /dc-1, longSession: 0, local sessions count: 2 2025-04-10T11:39:23.672363Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jrfqyr4g3q5nwta7y4k4rd1c, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=NzQzNWQyZmUtMzFhMWI1NmItYmEzNzY4NTgtYTQyMGQyZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [4:7491647732411820512:2316] 2025-04-10T11:39:23.672398Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [4:7491647732411820513:2360] 2025-04-10T11:39:23.675706Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491647732411820514:2317], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:39:23.675838Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:39:23.676244Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491647732411820526:2320], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:39:23.681279Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-10T11:39:23.710780Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-10T11:39:23.711199Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491647732411820528:2321], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-10T11:39:23.817312Z node 4 :TX_PROXY ERROR: Actor# [4:7491647732411820579:2395] txid# 281474976715660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:39:24.962832Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:39:25.254380Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491647719526917919:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:39:25.254494Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:39:29.715898Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-04-10T11:39:29.728590Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Cancel stream request 2025-04-10T11:39:29.728716Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NzAwZGVlZDItOTQ1NTEwOWQtNTIxYjQ3ZGEtNjdjYWU0OGU=, TxId: 2025-04-10T11:39:29.731621Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-10T11:39:30.000093Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-10T11:39:30.036418Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=NzAwZGVlZDItOTQ1NTEwOWQtNTIxYjQ3ZGEtNjdjYWU0OGU=, workerId: [4:7491647732411820486:2315], local sessions count: 1 2025-04-10T11:39:30.249847Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-10T11:39:30.249968Z node 4 :KQP_PROXY DEBUG: Request has 18444999788539.301671s seconds to be completed 2025-04-10T11:39:30.252953Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=NjZjZmY2ZjMtMTg3Y2FjOTItY2U5YzBhNi04NWRhOTgyOQ==, workerId: [4:7491647762476591728:2345], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-10T11:39:30.253195Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-10T11:39:30.254354Z node 4 :KQP_PROXY DEBUG: [TQueryBase] RunStreamQuery: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2025-04-10T11:39:30.254473Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-04-10T11:39:30.257065Z node 4 :KQP_PROXY DEBUG: TraceId: "01jrfqyyje02zat3t9ectp1h4q", Created new session, sessionId: ydb://session/3?node_id=4&id=Mzg2MWMyMjEtZDkyZTNmYmYtYzYxOTRkNDAtOWU2NTMzNjU=, workerId: [4:7491647762476591731:2346], database: /dc-1, longSession: 0, local sessions count: 3 2025-04-10T11:39:30.257334Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jrfqyyje02zat3t9ectp1h4q, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=Mzg2MWMyMjEtZDkyZTNmYmYtYzYxOTRkNDAtOWU2NTMzNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [4:7491647762476591731:2346] 2025-04-10T11:39:30.257375Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 600.000000s actor id: [4:7491647762476591732:2436] 2025-04-10T11:39:30.372157Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-04-10T11:39:30.375629Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744285170354, txId: 281474976715663] shutting down 2025-04-10T11:39:30.379152Z node 4 :KQP_PROXY DEBUG: TraceId: "01jrfqyyje02zat3t9ectp1h4q", Forwarded response to sender actor, requestId: 5, sender: [4:7491647762476591729:2432], selfId: [4:7491647719526918020:2145], source: [4:7491647762476591731:2346] 2025-04-10T11:39:30.379772Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=Mzg2MWMyMjEtZDkyZTNmYmYtYzYxOTRkNDAtOWU2NTMzNjU=, workerId: [4:7491647762476591731:2346], local sessions count: 2 2025-04-10T11:39:30.383646Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-04-10T11:39:30.383933Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-04-10T11:39:30.384035Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NjZjZmY2ZjMtMTg3Y2FjOTItY2U5YzBhNi04NWRhOTgyOQ==, TxId: 2025-04-10T11:39:30.384824Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=NjZjZmY2ZjMtMTg3Y2FjOTItY2U5YzBhNi04NWRhOTgyOQ==, workerId: [4:7491647762476591728:2345], local sessions count: 1 |98.3%| [TS] {RESULT} ydb/library/query_actor/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardFollowers::FollowerRebootAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterSysCompaction >> KeyValueGRPCService::SimpleWriteReadOverrun [GOOD] >> KeyValueGRPCService::SimpleWriteReadRange >> Coordinator::ReadStepSubscribe [GOOD] >> Coordinator::LastStepSubscribe |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/quoter/ut/unittest >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] Test command err: 2025-04-10T11:37:03.866481Z node 1 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-10T11:37:03.866686Z node 1 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-10T11:37:03.871425Z node 1 :QUOTER_PROXY WARN: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2025-04-10T11:37:03.871494Z node 1 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-04-10T11:37:03.893505Z node 2 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-10T11:37:03.893605Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-10T11:37:03.893903Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-10T11:37:03.893979Z node 2 :QUOTER_PROXY WARN: [/Path/KesusName]: Disconnected from tablet 2025-04-10T11:37:03.894007Z node 2 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-04-10T11:37:03.894385Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-10T11:37:03.912843Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-10T11:37:03.912985Z node 3 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-10T11:37:03.913245Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "/resource" 2025-04-10T11:37:03.913303Z node 3 :QUOTER_PROXY WARN: [/Path/KesusName]: Resource "/resource" has incorrect name. Maybe this was some error on client side. 2025-04-10T11:37:03.913353Z node 3 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("/resource", Error: GenericError) 2025-04-10T11:37:03.913452Z node 3 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-10T11:37:03.913554Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "resource//resource" 2025-04-10T11:37:03.913603Z node 3 :QUOTER_PROXY WARN: [/Path/KesusName]: Resource "resource//resource" has incorrect name. Maybe this was some error on client side. 2025-04-10T11:37:03.913634Z node 3 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("resource//resource", Error: GenericError) 2025-04-10T11:37:03.922243Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-10T11:37:03.922365Z node 4 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-10T11:37:03.922488Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-10T11:37:03.940833Z node 4 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-10T11:37:03.958865Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-04-10T11:37:03.958939Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-04-10T11:37:03.958991Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-04-10T11:37:03.959090Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-10T11:37:03.966737Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-10T11:37:03.966847Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-10T11:37:03.967116Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res0" 2025-04-10T11:37:03.967300Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-10T11:37:03.967566Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-10T11:37:03.967599Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res0" 2025-04-10T11:37:03.967636Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res0", 42) 2025-04-10T11:37:03.967688Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }]) 2025-04-10T11:37:03.967782Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res1" 2025-04-10T11:37:03.967881Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Subscribe on resource "res1" 2025-04-10T11:37:03.968060Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-10T11:37:03.968102Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res1" 2025-04-10T11:37:03.968137Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res1", 43) 2025-04-10T11:37:03.968185Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2025-04-10T11:37:03.968274Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res2" 2025-04-10T11:37:03.968362Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Subscribe on resource "res2" 2025-04-10T11:37:03.968560Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-10T11:37:03.968594Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res2" 2025-04-10T11:37:03.968623Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res2", 44) 2025-04-10T11:37:03.968662Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res2", Normal, {0: Front(1, 2)} }]) 2025-04-10T11:37:03.968836Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyStats([{"res1", Consumed: 0, Queue: 5}]) 2025-04-10T11:37:03.968874Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Set info for resource "res1": { Available: 1, QueueWeight: 5 } 2025-04-10T11:37:03.968926Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Activate session to "res1". Connected: 1 2025-04-10T11:37:03.969674Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 43 ConsumeResource: true Amount: inf } ActorID { RawX1: 5 RawX2: 21474838532 } }) 2025-04-10T11:37:03.969741Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2025-04-10T11:37:03.970006Z node 5 :QUOTER_PROXY WARN: [/Path/KesusName]: Disconnected from tablet 2025-04-10T11:37:03.970039Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-04-10T11:37:03.970131Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Mark "res1" for offline allocation. Connected: 0, SessionIsActive: 1, AverageDuration: 0.100000s, AverageAmount: 0.5 2025-04-10T11:37:03.970190Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Schedule offline allocation in 0.000000s: [{ "res1", 0.5 }] 2025-04-10T11:37:03.970323Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-10T11:37:03.970545Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-10T11:37:03.970608Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res0" 2025-04-10T11:37:03.970641Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res1" 2025-04-10T11:37:03.970680Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res2" 2025-04-10T11:37:03.970735Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }, { "res1", Normal, {0: Front(1, 2)} }, { "res2", Normal, {0: Front(1, 2)} }]) 2025-04-10T11:37:03.978514Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-10T11:37:03.978638Z node 6 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-10T11:37:03.978935Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-10T11:37:03.979108Z node 6 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-10T11:37:03.979405Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-04-10T11:37:03.979440Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-04-10T11:37:03.979481Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-04-10T11:37:03.979536Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-10T11:37:03.987450Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-10T11:37:03.987544Z node 7 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-10T11:37:03.987822Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-10T11:37:03.988004Z node 7 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-10T11:37:03.988275Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-04-10T11:37:03.988303Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-04-10T11:37:03.988341Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-04-10T11:37:03.988405Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-10T11:37:03.988628Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyStats([{"res", Consumed: 0, Queue: 25}]) 2025-04-10T11:37:03.988675Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: Set info for resource "res": { Available: 20, QueueWeight: 25 } 2025-04-10T11:37:03.988707Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Activate session to "res". Connected: 1 2025-04-10T11:37:03.988791Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 42 ConsumeResource: true Amount: inf } ActorID { RawX1: 5 RawX2: 30064773124 } }) 2025-04-10T11:37:03.988831Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-10T11:37:03.988998Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyCloseSession("res", 42) 2025-04-10T11:37:03.989042Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Deactivate session to "res". Connected: 1 2025-04-10T11:37:03.989130Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 42 } ActorID { RawX1: 5 RawX2: 30064773124 } }) 2025-04-10T11:37:04.007898Z node 8 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-10T11:37:04.008038Z node 8 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-10T11:37:04.008171Z node 8 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-10T11:37:04.008314Z node 8 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-10T11:37:04.008662Z node 8 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 H ... ACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-10T11:39:07.603379Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T11:39:07.603440Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T11:39:07.638722Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-10T11:39:07.638775Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-10T11:39:07.737000Z 2025-04-10T11:39:07.638802Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-10T11:39:07.639177Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-10T11:39:07.639212Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -4.00010076, QueueWeight: 5 } 2025-04-10T11:39:07.639263Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T11:39:07.640547Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T11:39:07.701041Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7491647663484600372:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-10T11:39:07.701549Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-10T11:39:07.701614Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-10T11:39:07.701669Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T11:39:07.702284Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T11:39:07.737377Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-10T11:39:07.737430Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-10T11:39:07.837000Z 2025-04-10T11:39:07.737474Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-10T11:39:07.737698Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-10T11:39:07.737740Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -3.00010076, QueueWeight: 5 } 2025-04-10T11:39:07.737790Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T11:39:07.737870Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T11:39:07.800752Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7491647663484600372:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-10T11:39:07.801416Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-10T11:39:07.801473Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-10T11:39:07.801526Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T11:39:07.801791Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T11:39:07.838273Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-10T11:39:07.838331Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-10T11:39:07.937000Z 2025-04-10T11:39:07.838359Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-10T11:39:07.838557Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-10T11:39:07.838594Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -2.00010076, QueueWeight: 5 } 2025-04-10T11:39:07.838650Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T11:39:07.838809Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T11:39:07.901200Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7491647663484600372:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-10T11:39:07.902623Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-10T11:39:07.902686Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-10T11:39:07.902743Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T11:39:07.902809Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T11:39:07.939909Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-10T11:39:07.939966Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-10T11:39:08.037000Z 2025-04-10T11:39:07.939993Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-10T11:39:07.942072Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-10T11:39:07.942132Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -1.00010076, QueueWeight: 5 } 2025-04-10T11:39:07.942203Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T11:39:07.942268Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T11:39:08.001857Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7491647663484600372:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-10T11:39:08.002437Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-10T11:39:08.002500Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-10T11:39:08.002556Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T11:39:08.002741Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T11:39:08.038483Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-10T11:39:08.038538Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-10T11:39:08.137000Z 2025-04-10T11:39:08.038574Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-10T11:39:08.038762Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-10T11:39:08.038797Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -0.0001007603144, QueueWeight: 5 } 2025-04-10T11:39:08.038855Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T11:39:08.039107Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T11:39:08.102755Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7491647663484600372:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-10T11:39:08.104052Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-10T11:39:08.104094Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-10T11:39:08.104141Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(0.9998992397, 2)} }]) 2025-04-10T11:39:08.104217Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T11:39:08.138264Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0.9998992397. FreeBalance: 0.9998992397 2025-04-10T11:39:08.138321Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-10T11:39:08.237000Z 2025-04-10T11:39:08.138348Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-10T11:39:08.138415Z node 49 :QUOTER_SERVICE TRACE: Charge "Resource" for 5. Balance: 0.9998992397. FreeBalance: 0.9998992397. TicksToFullfill: 5.000503852. DurationToFullfillInUs: 500050.3852. TimeToFullfill: 2025-04-10T11:39:07.634245Z. Now: 2025-04-10T11:39:08.138158Z. LastAllocated: 2025-04-10T11:39:07.134195Z 2025-04-10T11:39:08.138866Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 5, Queue: 0}]) 2025-04-10T11:39:08.138898Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -4.00010076, QueueWeight: 0 } 2025-04-10T11:39:08.138952Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T11:39:08.139034Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T11:39:08.207521Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7491647663484600372:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-10T11:39:08.210414Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-10T11:39:08.210475Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-10T11:39:08.210533Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T11:39:08.210816Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T11:39:08.238038Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-10T11:39:08.302913Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7491647663484600372:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-10T11:39:08.310437Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-10T11:39:08.310499Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-10T11:39:08.310555Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T11:39:08.312595Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T11:39:08.312656Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-10T11:39:10.995783Z node 49 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[49:7491647654894664925:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:39:10.995880Z node 49 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |98.4%| [TM] {RESULT} ydb/core/quoter/ut/unittest >> TopicSessionTests::RestartSessionIfQueryStopped >> TEvaluateExprInViewTest::EvaluateExpr [GOOD] >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] >> TTxDataShardPrefixKMeansScan::BuildToBuild [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> ClosedIntervalSet::Difference [GOOD] >> ClosedIntervalSet::Contains |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> ClosedIntervalSet::Contains [GOOD] >> ClosedIntervalSet::EnumInRange >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_prefix_kmeans/unittest >> TTxDataShardPrefixKMeansScan::BuildToBuild [GOOD] Test command err: 2025-04-10T11:39:00.140486Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647635237489592:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:39:00.140581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000877/r3tmp/tmpiwJN5Z/pdisk_1.dat 2025-04-10T11:39:00.659569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:39:00.660681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:39:00.663277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:39:00.696978Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:39:00.745689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:39:00.788145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:39:00.845681Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7491647635237490167:2295] 2025-04-10T11:39:00.846000Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:39:00.861205Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:39:00.861272Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:39:00.867967Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:39:00.868048Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:39:00.868092Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:39:00.871585Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:39:00.871677Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:39:00.871707Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7491647635237490183:2295] in generation 1 2025-04-10T11:39:00.874591Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:39:00.915760Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:39:00.916701Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:39:00.916766Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7491647635237490185:2296] 2025-04-10T11:39:00.916776Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:39:00.916790Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:39:00.916800Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:39:00.917669Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647635237490164:2296], serverId# [1:7491647635237490181:2303], sessionId# [0:0:0] 2025-04-10T11:39:00.917845Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:39:00.917924Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:39:00.917964Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:39:00.918037Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:39:00.918058Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:39:00.918079Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:39:00.918099Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:39:00.918395Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:39:00.918576Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-10T11:39:00.921094Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:39:00.921709Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:39:00.926289Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:39:00.929992Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647635237490199:2313], serverId# [1:7491647635237490201:2315], sessionId# [0:0:0] 2025-04-10T11:39:00.936237Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1744285140976 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744285140976 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:39:00.936454Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:39:00.936686Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:39:00.936751Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:39:00.936780Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:39:00.936846Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1744285140976:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-10T11:39:00.937155Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1744285140976:281474976710657 keys extracted: 0 2025-04-10T11:39:00.937313Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:39:00.937541Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:39:00.937634Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-10T11:39:00.940631Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:39:00.941999Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:39:00.943143Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1744285140976} 2025-04-10T11:39:00.943198Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:39:00.943244Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1744285140975 2025-04-10T11:39:00.943264Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:39:00.943288Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1744285140983 2025-04-10T11:39:00.943335Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:39:00.943379Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:39:00.943413Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-10T11:39:00.943472Z node 1 :TX_DATASHARD DEBUG: Complete [1744285140976 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7491647635237490005:2194], exec latency: 4 ms, propose latency: 6 ms 2025-04-10T11:39:00.943504Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-10T11:39:00.943572Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:39:00.955687Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-10T11:39:00.955803Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-10T11:39:00.962932Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647635237490235:2340], serverId# [1:7491647635237490236:2341], sessionId# [0:0:0] 2025-04-10T11:39:00.973422Z node 1 :BUILD_INDEX NOTICE: Starting TPrefixKMeansScan Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } SeqNoGeneration: 1 SeqNoRound: 1 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 2 } Seed: 1337 Upload: UPLOAD_BUILD_TO_POSTING K: 0 NeedsRounds: 3 Child: 1 LevelName: "/Root/table-level" PostingName: "/Root/table-posting" EmbeddingColumn: "embedding" row version v1744285141000/18446744073709551615 2025-04-10T11:39:00.975590Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647635237490240:2345], serverId# [1:7491647635237490241:2346], sessionId# [0:0:0] 2025-04-10T11:39:00.975805Z node 1 :BUILD_INDEX NOTICE: Starting TPrefixKMeansScan Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } SeqNoGeneration: 2 SeqNoRound: 1 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 2 } Seed: 1337 Upload: UPLOAD_BUILD_TO_POSTING K: 1 NeedsRounds: 3 Child: 1 LevelName: "/Root/table-level" PostingName: "/Root/table-posting" EmbeddingColumn: "embedding" row version v1744285141000/18446744073709551615 2025-04-10T11:39:00.977735Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647635237490245:2350], serverId# [1:7491647635237490246:2351], sessionId# [0:0:0] 2025-04-10T11:39:00.977900Z node 1 :BUILD_INDEX NOTICE: Starting TPrefixKMeansScan Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } SeqNoGeneration: 3 SeqNoRound: 1 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 2 } Seed: 1337 Upload: UPLOAD_BUILD_TO_POSTING K: 2 NeedsRounds: 3 Child: 1 LevelName: "/Root/table-level" PostingName: "/Root/table-posting" row version v1744285141000/18446744073709551615 2025-04-10T11:39:00.979378Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7491647635237490250:2355], serverId# [1:7491647635237490251:2356], sessionId# [0:0:0] 2025-04-10T11:39:00.979726Z node 1 :BUILD_INDEX NOTICE: Starting TPrefixKMeansScan Id: 1 TabletId: 0 PathId { OwnerId: 72057594046644480 LocalId: 2 } SeqNoGeneration: 4 SeqNoRound: 1 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 2 } Seed: 1337 Upload: UPLOAD_BUILD_TO_POSTING K: 2 NeedsRounds: 3 Child: 1 LevelName: "/Root/table-level" PostingName: "/Root/table-posting" EmbeddingColumn: "embedding" row version v1744285141000/18446744073709551615 2025-04-10T11:39:00.981439Z node 1 :TX_DATASHARD DEBUG: Server connected ... 1 immediate 0 planned 1 2025-04-10T11:39:37.548547Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224038016 2025-04-10T11:39:37.548592Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224038016 tableId# [OwnerId: 72057594046644480, LocalPathId: 134] schema version# 1 2025-04-10T11:39:37.549097Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224038016 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:39:37.549521Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224038016 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:39:37.550860Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:39:37.550924Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224038015 2025-04-10T11:39:37.551437Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224038016 time 1744285177592 2025-04-10T11:39:37.551457Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224038016 2025-04-10T11:39:37.551486Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224038016 2025-04-10T11:39:37.551542Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224038016 coordinator 72057594046316545 last step 0 next step 1744285177600 2025-04-10T11:39:37.551829Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224038016 step# 1744285177593} 2025-04-10T11:39:37.551875Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224038016 2025-04-10T11:39:37.551911Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224038016 2025-04-10T11:39:37.551936Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224038016 2025-04-10T11:39:37.551961Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224038016 2025-04-10T11:39:37.552010Z node 3 :TX_DATASHARD DEBUG: Complete [1744285177593 : 281474976716040] from 72075186224038016 at tablet 72075186224038016 send result to client [3:7491647715488441488:2166], exec latency: 0 ms, propose latency: 3 ms 2025-04-10T11:39:37.552048Z node 3 :TX_DATASHARD INFO: 72075186224038016 Sending notify to schemeshard 72057594046644480 txId 281474976716040 state Ready TxInFly 0 2025-04-10T11:39:37.552100Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224038016 2025-04-10T11:39:37.555785Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976716040 datashard 72075186224038016 state Ready 2025-04-10T11:39:37.555842Z node 3 :TX_DATASHARD DEBUG: 72075186224038016 Got TEvSchemaChangedResult from SS at 72075186224038016 2025-04-10T11:39:37.562253Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976716041:0, at schemeshard: 72057594046644480 2025-04-10T11:39:37.565901Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:39:37.565913Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224038015 2025-04-10T11:39:37.566016Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224038016 2025-04-10T11:39:37.573407Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224038017 actor [3:7491647792797870772:3004] 2025-04-10T11:39:37.573627Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:39:37.585098Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:39:37.585170Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:39:37.586652Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224038017 2025-04-10T11:39:37.586684Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224038017 2025-04-10T11:39:37.586712Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224038017 2025-04-10T11:39:37.586978Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:39:37.587011Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:39:37.587031Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224038017 persisting started state actor id [3:7491647792797870789:3004] in generation 1 2025-04-10T11:39:37.588176Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:39:37.588205Z node 3 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224038017 2025-04-10T11:39:37.588254Z node 3 :TX_DATASHARD DEBUG: 72075186224038017 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:39:37.588285Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224038017, actorId: [3:7491647792797870791:3005] 2025-04-10T11:39:37.588295Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224038017 2025-04-10T11:39:37.588305Z node 3 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224038017, state: WaitScheme 2025-04-10T11:39:37.588315Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224038017 2025-04-10T11:39:37.588378Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224038017 2025-04-10T11:39:37.588421Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224038017 2025-04-10T11:39:37.588440Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224038017 2025-04-10T11:39:37.588449Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224038017 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:39:37.588460Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224038017 TxInFly 0 2025-04-10T11:39:37.588471Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224038017 2025-04-10T11:39:37.589060Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224038017, clientId# [3:7491647792797870775:6304], serverId# [3:7491647792797870780:6573], sessionId# [0:0:0] 2025-04-10T11:39:37.589140Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224038017 2025-04-10T11:39:37.589328Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224038017 txId 281474976716041 ssId 72057594046644480 seqNo 2:256 2025-04-10T11:39:37.589389Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976716041 at tablet 72075186224038017 2025-04-10T11:39:37.590139Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224038017 2025-04-10T11:39:37.598294Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224038017 2025-04-10T11:39:37.598406Z node 3 :TX_DATASHARD DEBUG: 72075186224038017 not sending time cast registration request in state WaitScheme 2025-04-10T11:39:37.602337Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224038017, clientId# [3:7491647792797870797:6552], serverId# [3:7491647792797870799:6548], sessionId# [0:0:0] 2025-04-10T11:39:37.602714Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976716041 at step 1744285177649 at tablet 72075186224038017 { Transactions { TxId: 281474976716041 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744285177649 MediatorID: 72057594046382081 TabletID: 72075186224038017 } 2025-04-10T11:39:37.602736Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224038017 2025-04-10T11:39:37.602896Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224038017 2025-04-10T11:39:37.602915Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224038017 active 0 active planned 0 immediate 0 planned 1 2025-04-10T11:39:37.602941Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1744285177649:281474976716041] in PlanQueue unit at 72075186224038017 2025-04-10T11:39:37.603245Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224038017 loaded tx from db 1744285177649:281474976716041 keys extracted: 0 2025-04-10T11:39:37.603387Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224038017 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-10T11:39:37.603486Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:39:37.603497Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224038017 2025-04-10T11:39:37.603538Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224038017 2025-04-10T11:39:37.603569Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224038015 2025-04-10T11:39:37.603582Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224038017 tableId# [OwnerId: 72057594046644480, LocalPathId: 135] schema version# 1 2025-04-10T11:39:37.603617Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224038016 2025-04-10T11:39:37.604105Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224038017 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-10T11:39:37.604613Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224038017 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:39:37.610873Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224038017 time 1744285177648 2025-04-10T11:39:37.610911Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224038017 2025-04-10T11:39:37.610937Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224038017 coordinator 72057594046316545 last step 0 next step 1744285177649 2025-04-10T11:39:37.611003Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224038017 step# 1744285177649} 2025-04-10T11:39:37.611048Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224038017 2025-04-10T11:39:37.611098Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224038017 2025-04-10T11:39:37.611119Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224038017 2025-04-10T11:39:37.611150Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224038017 2025-04-10T11:39:37.611207Z node 3 :TX_DATASHARD DEBUG: Complete [1744285177649 : 281474976716041] from 72075186224038017 at tablet 72075186224038017 send result to client [3:7491647715488441488:2166], exec latency: 0 ms, propose latency: 7 ms 2025-04-10T11:39:37.611242Z node 3 :TX_DATASHARD INFO: 72075186224038017 Sending notify to schemeshard 72057594046644480 txId 281474976716041 state Ready TxInFly 0 2025-04-10T11:39:37.611286Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224038017 2025-04-10T11:39:37.617622Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976716041 datashard 72075186224038017 state Ready 2025-04-10T11:39:37.617674Z node 3 :TX_DATASHARD DEBUG: 72075186224038017 Got TEvSchemaChangedResult from SS at 72075186224038017 |98.4%| [TM] {RESULT} ydb/core/tx/datashard/ut_prefix_kmeans/unittest >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown >> KqpTpch::Query10 [GOOD] >> KqpTpch::Query11 |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation >> ClosedIntervalSet::EnumInRange [GOOD] >> ClosedIntervalSet::EnumInRangeReverse >> DataShardFollowers::FollowerAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterDataCompaction >> KafkaProtocol::MetadataScenario [GOOD] >> KafkaProtocol::MetadataInServerlessScenario |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KeyValueGRPCService::SimpleWriteReadRange [GOOD] >> KeyValueGRPCService::SimpleWriteListRange >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_simple.py::TestSimple::test[alter_tablestore] >> ClosedIntervalSet::EnumInRangeReverse [GOOD] >> GivenIdRange::IssueNewRange [GOOD] >> GivenIdRange::Trim |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_http_api.py::TestHttpApi::test_restart_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_simple_streaming_query >> GivenIdRange::Trim [GOOD] >> GivenIdRange::Subtract >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] >> GivenIdRange::Subtract [GOOD] >> GivenIdRange::Points >> GivenIdRange::Points [GOOD] >> GivenIdRange::Runs |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> GivenIdRange::Runs [GOOD] >> GivenIdRange::Allocate >> GivenIdRange::Allocate [GOOD] >> test_http_api.py::TestHttpApi::test_simple_streaming_query [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blob_depot/ut/unittest >> GivenIdRange::Allocate [GOOD] |98.4%| [TM] {RESULT} ydb/core/blob_depot/ut/unittest >> test_http_api.py::TestHttpApi::test_integral_results [GOOD] >> test_http_api.py::TestHttpApi::test_optional_results >> Coordinator::LastStepSubscribe [GOOD] >> Coordinator::RestoreDomainConfiguration >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction [GOOD] >> TSelectFromViewTest::OneTable >> TDataShardRSTest::TestCleanupInRS+UseSink [GOOD] >> TDataShardRSTest::TestCleanupInRS-UseSink |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardStats::SharedCacheGarbage [GOOD] >> DataShardStats::CollectStatsForSeveralParts >> KqpTpch::Query11 [GOOD] >> KqpTpch::Query12 >> test_http_api.py::TestHttpApi::test_optional_results [GOOD] >> test_http_api.py::TestHttpApi::test_pg_results >> TopicSessionTests::RestartSessionIfQueryStopped [GOOD] >> DataShardFollowers::FollowerAfterDataCompaction [GOOD] >> DataShardFollowers::FollowerDuringSysPartSwitch |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets >> RowDispatcherTests::OneClientOneSession >> RowDispatcherTests::OneClientOneSession [GOOD] >> RowDispatcherTests::TwoClientOneSession >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[table] >> test_http_api.py::TestHttpApi::test_pg_results [GOOD] >> RowDispatcherTests::TwoClientOneSession [GOOD] >> test_http_api.py::TestHttpApi::test_set_result >> RowDispatcherTests::SessionError >> RowDispatcherTests::SessionError [GOOD] >> RowDispatcherTests::CoordinatorSubscribe >> RowDispatcherTests::CoordinatorSubscribe [GOOD] >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged [GOOD] >> RowDispatcherTests::TwoClients4Sessions >> RowDispatcherTests::TwoClients4Sessions [GOOD] >> RowDispatcherTests::ReinitConsumerIfNewGeneration >> test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] [GOOD] >> KeyValueGRPCService::SimpleWriteListRange [GOOD] >> KeyValueGRPCService::SimpleGetStorageChannelStatus >> RowDispatcherTests::ReinitConsumerIfNewGeneration [GOOD] >> RowDispatcherTests::HandleTEvUndelivered >> RowDispatcherTests::HandleTEvUndelivered [GOOD] >> RowDispatcherTests::TwoClientTwoConnection >> RowDispatcherTests::TwoClientTwoConnection [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KafkaProtocol::MetadataInServerlessScenario [GOOD] >> KafkaProtocol::NativeKafkaBalanceScenario >> RowDispatcherTests::ProcessNoSession >> RowDispatcherTests::ProcessNoSession [GOOD] >> RowDispatcherTests::IgnoreWrongPartitionId >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] >> test_http_api.py::TestHttpApi::test_set_result [GOOD] >> test_http_api.py::TestHttpApi::test_complex_results >> Coordinator::RestoreDomainConfiguration [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false >> KqpTpch::Query12 [GOOD] >> KqpTpch::Query13 |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_http_api.py::TestHttpApi::test_complex_results [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_http_api.py::TestHttpApi::test_result_offset_limit [GOOD] >> test_http_api.py::TestHttpApi::test_openapi_spec >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test[tablestores] >> KqpTpch::Query13 [GOOD] >> KqpTpch::Query14 |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardStats::CollectStatsForSeveralParts [GOOD] >> DataShardFollowers::FollowerDuringSysPartSwitch [GOOD] >> DataShardFollowers::FollowerDuringDataPartSwitch >> DataShardStats::NoData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/ut/unittest >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] Test command err: 2025-04-10T11:37:44.734690Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [1:30:2057] 2025-04-10T11:37:44.734891Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [1:25:2054] 2025-04-10T11:37:44.735035Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [1:25:2054] 2025-04-10T11:37:44.735106Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:26:2054] 2025-04-10T11:37:44.735140Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:26:2054] 2025-04-10T11:37:44.735183Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [3:27:2054] 2025-04-10T11:37:44.735232Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [3:27:2054] 2025-04-10T11:37:44.735356Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-04-10T11:37:44.735546Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-04-10T11:37:44.735754Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 0 2025-04-10T11:37:44.735827Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-04-10T11:37:44.745046Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-04-10T11:37:44.745172Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-04-10T11:37:44.745333Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:31:2055] 2025-04-10T11:37:44.745367Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:26:2054] to new [2:31:2055] 2025-04-10T11:37:44.745394Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:31:2055] 2025-04-10T11:37:44.745460Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:32:2056] 2025-04-10T11:37:44.745492Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:31:2055] to new [2:32:2056] 2025-04-10T11:37:44.745522Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:32:2056] 2025-04-10T11:37:44.745596Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-04-10T11:37:44.745654Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-04-10T11:37:44.745753Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-04-10T11:37:44.745795Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-04-10T11:37:44.869814Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [5:30:2057] 2025-04-10T11:37:44.874201Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [5:25:2054] 2025-04-10T11:37:44.874303Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [5:25:2054] 2025-04-10T11:37:44.874355Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [6:26:2054] 2025-04-10T11:37:44.874381Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [6:26:2054] 2025-04-10T11:37:44.874418Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [7:27:2054] 2025-04-10T11:37:44.874440Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [7:27:2054] 2025-04-10T11:37:44.874526Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:28:2055], topic1, partIds: 0, 1, 2 2025-04-10T11:37:44.874686Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:28:2055] 2025-04-10T11:37:44.874840Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:29:2056], topic1, partIds: 3 2025-04-10T11:37:44.874929Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:29:2056] 2025-04-10T11:37:45.051912Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Successfully bootstrapped, local coordinator id [9:5:2052] 2025-04-10T11:37:45.053323Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Successfully bootstrapped, local coordinator id [9:6:2053] 2025-04-10T11:37:45.053399Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Successfully bootstrapped, local coordinator id [9:7:2054] 2025-04-10T11:37:45.053452Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-04-10T11:37:45.053593Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.053641Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.060846Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-04-10T11:37:45.060959Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.061003Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.061114Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-04-10T11:37:45.061157Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.061205Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.194794Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-10T11:37:45.194972Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-10T11:37:45.195117Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.195341Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.206644Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.206738Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.221972Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-10T11:37:45.222123Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.222174Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.227990Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-10T11:37:45.228164Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.228196Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.232435Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-10T11:37:45.232626Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.232665Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.239176Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-10T11:37:45.239328Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.239361Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.246278Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-10T11:37:45.246448Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.246488Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.247337Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-10T11:37:45.256349Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-10T11:37:45.256550Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.256598Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.268500Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.268595Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.269632Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-10T11:37:45.277081Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-10T11:37:45.277227Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.277272Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.283722Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-04-10T11:37:45.283802Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": 2025-04-10T11:37:45.28 ... 2025-04-10T11:39:53.402394Z node 38 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-04-10T11:39:53.402431Z node 38 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-10T11:39:53.402464Z node 38 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-10T11:39:53.403107Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [38:18:2059] 2025-04-10T11:39:53.403339Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:14:2056], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-04-10T11:39:53.403590Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-10T11:39:53.403837Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 1 2025-04-10T11:39:53.404291Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:15:2057], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-04-10T11:39:53.404775Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:22:2063] to [38:14:2056] query id QueryId 2025-04-10T11:39:53.404911Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:14:2056] part id 0 query id QueryId 2025-04-10T11:39:53.405024Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:22:2063] to [38:14:2056] query id QueryId 2025-04-10T11:39:53.405117Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:23:2064] to [38:14:2056] query id QueryId 2025-04-10T11:39:53.405221Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:14:2056] part id 1 query id QueryId 2025-04-10T11:39:53.405319Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:23:2064] to [38:14:2056] query id QueryId 2025-04-10T11:39:53.405442Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:22:2063] to [38:15:2057] query id QueryId 2025-04-10T11:39:53.405552Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:15:2057] part id 0 query id QueryId 2025-04-10T11:39:53.405665Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:22:2063] to [38:15:2057] query id QueryId 2025-04-10T11:39:53.405756Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:23:2064] to [38:15:2057] query id QueryId 2025-04-10T11:39:53.405863Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:15:2057] part id 1 query id QueryId 2025-04-10T11:39:53.405952Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:23:2064] to [38:15:2057] query id QueryId 2025-04-10T11:39:53.406048Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:14:2056], reason ActorUnknown 2025-04-10T11:39:53.406111Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:14:2056] query id QueryId 2025-04-10T11:39:53.410507Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:15:2057], reason ActorUnknown 2025-04-10T11:39:53.410602Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:15:2057] query id QueryId 2025-04-10T11:39:53.410709Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:22:2063] 2025-04-10T11:39:53.410782Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:23:2064] 2025-04-10T11:39:53.546246Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [40:17:2058], tenant Tenant 2025-04-10T11:39:53.558850Z node 40 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [40:18:2059] 2025-04-10T11:39:53.558934Z node 40 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [40:19:2060] Successfully bootstrapped, local coordinator id [40:18:2059] 2025-04-10T11:39:53.558997Z node 40 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-04-10T11:39:53.559030Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-10T11:39:53.559057Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-10T11:39:53.559316Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [40:18:2059] 2025-04-10T11:39:53.559508Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:14:2056], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-04-10T11:39:53.559714Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-10T11:39:53.560121Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:15:2057], read group connection_id2, topicPath topic part id 0 query id QueryId cookie 1 2025-04-10T11:39:53.560615Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id2 topic topic part id 0 2025-04-10T11:39:53.562008Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:22:2063] to [40:14:2056] query id QueryId 2025-04-10T11:39:53.562213Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:14:2056] part id 0 query id QueryId 2025-04-10T11:39:53.562687Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:22:2063] to [40:14:2056] query id QueryId 2025-04-10T11:39:53.562924Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:23:2064] to [40:15:2057] query id QueryId 2025-04-10T11:39:53.563103Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:15:2057] part id 0 query id QueryId 2025-04-10T11:39:53.563204Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:23:2064] to [40:15:2057] query id QueryId 2025-04-10T11:39:53.563319Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:14:2056] topic topic query id QueryId 2025-04-10T11:39:53.563375Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:14:2056] query id QueryId 2025-04-10T11:39:53.563483Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:22:2063] 2025-04-10T11:39:53.563706Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:15:2057] topic topic query id QueryId 2025-04-10T11:39:53.563759Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:15:2057] query id QueryId 2025-04-10T11:39:53.563843Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:23:2064] 2025-04-10T11:39:53.697656Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [42:17:2058], tenant Tenant 2025-04-10T11:39:53.706828Z node 42 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [42:18:2059] 2025-04-10T11:39:53.706931Z node 42 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [42:19:2060] Successfully bootstrapped, local coordinator id [42:18:2059] 2025-04-10T11:39:53.706984Z node 42 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-04-10T11:39:53.707007Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-10T11:39:53.707033Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-10T11:39:53.707382Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [42:18:2059] 2025-04-10T11:39:53.707587Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [43:16:2053], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 42 2025-04-10T11:39:53.707890Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-10T11:39:53.738456Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvTryConnect to node id 43 2025-04-10T11:39:53.738995Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: EvNodeConnected, node id 43 2025-04-10T11:39:53.739604Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:22:2063] to [43:16:2053] query id QueryId 2025-04-10T11:39:53.739929Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:16:2053] part id 0 query id QueryId 2025-04-10T11:39:53.740045Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:22:2063] to [43:16:2053] query id QueryId 2025-04-10T11:39:53.740328Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:16:2053], generation 41 2025-04-10T11:39:53.740398Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:22:2063] to [43:16:2053] query id QueryId 2025-04-10T11:39:53.740656Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:16:2053] part id 0 query id QueryId 2025-04-10T11:39:53.740748Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:22:2063] to [43:16:2053] query id QueryId 2025-04-10T11:39:53.741045Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:16:2053], generation 42 2025-04-10T11:39:53.741104Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [43:16:2053] query id QueryId 2025-04-10T11:39:53.741193Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [42:22:2063] 2025-04-10T11:39:53.861137Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [44:17:2058], tenant Tenant 2025-04-10T11:39:53.875454Z node 44 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [44:18:2059] 2025-04-10T11:39:53.875562Z node 44 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [44:19:2060] Successfully bootstrapped, local coordinator id [44:18:2059] 2025-04-10T11:39:53.875628Z node 44 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-04-10T11:39:53.875659Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-10T11:39:53.875694Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-10T11:39:53.876052Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [44:18:2059] 2025-04-10T11:39:53.876925Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [44:14:2056], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-04-10T11:39:53.877176Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-10T11:39:53.877615Z node 44 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [44:22:2063] to [44:14:2056] query id QueryId 2025-04-10T11:39:53.877708Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [44:14:2056] topic topic query id QueryId 2025-04-10T11:39:53.877765Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [44:14:2056] query id QueryId 2025-04-10T11:39:53.877858Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [44:22:2063] |98.5%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/ut/unittest >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KqpTpch::Query14 [GOOD] >> KqpTpch::Query15 |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KeyValueGRPCService::SimpleGetStorageChannelStatus [GOOD] >> KeyValueGRPCService::SimpleCreateAlterDropVolume >> TSelectFromViewTest::OneTable [GOOD] >> TSelectFromViewTest::OneTableUsingRelativeName >> TDqPqRdReadActorTests::Backpressure [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateQuery |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated [GOOD] >> TControlPlaneProxyTest::ShouldSendListQueries >> KqpTpch::Query15 [GOOD] >> KqpTpch::Query16 |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted [GOOD] >> DataShardFollowers::FollowerDuringDataPartSwitch [GOOD] >> DataShardFollowers::FollowerReadDuringSplit >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions [GOOD] >> TControlPlaneProxyTest::ShouldSendListQueries [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeQuery >> TDqPqRdReadActorTests::MetadataFields >> TDqPqRdReadActorTests::MetadataFields [GOOD] >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState >> test_drain.py::TestHive::test_drain_tablets >> TControlPlaneProxyTest::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetQueryStatus |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/py3test >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] |98.5%| [TM] {RESULT} ydb/tests/fq/http_api/py3test >> TControlPlaneProxyTest::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyQuery >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> KeyValueGRPCService::SimpleCreateAlterDropVolume [GOOD] >> KeyValueGRPCService::SimpleListPartitions [GOOD] >> TDqPqReadActorTest::TestReadFromTopic |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> KqpTpch::Query16 [GOOD] >> KqpTpch::Query17 >> TControlPlaneProxyTest::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/services/keyvalue/ut/unittest >> KeyValueGRPCService::SimpleListPartitions [GOOD] Test command err: 2025-04-10T11:38:14.606617Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647433648108425:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:38:14.609428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008d1/r3tmp/tmpRBGLga/pdisk_1.dat 2025-04-10T11:38:15.210763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:15.210889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:15.217279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:38:15.218814Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28577, node 1 2025-04-10T11:38:15.318633Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-10T11:38:15.319033Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-10T11:38:15.319083Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-10T11:38:15.319473Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-10T11:38:15.319500Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-10T11:38:15.319986Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-10T11:38:15.320118Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-10T11:38:15.320179Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-10T11:38:15.323223Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-10T11:38:15.323245Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-10T11:38:15.329967Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:38:15.330569Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:38:15.330864Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-10T11:38:15.330883Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-10T11:38:15.395298Z node 1 :GRPC_SERVER DEBUG: [0x51a000057680] created request Name# BlobStorageConfig 2025-04-10T11:38:15.396863Z node 1 :GRPC_SERVER DEBUG: [0x51a000057c80] created request Name# HiveCreateTablet 2025-04-10T11:38:15.397298Z node 1 :GRPC_SERVER DEBUG: [0x51a000058280] created request Name# TabletStateRequest 2025-04-10T11:38:15.402309Z node 1 :GRPC_SERVER DEBUG: [0x51a000058880] created request Name# SchemeOperationStatus 2025-04-10T11:38:15.402862Z node 1 :GRPC_SERVER DEBUG: [0x51a000058e80] created request Name# ChooseProxy 2025-04-10T11:38:15.403214Z node 1 :GRPC_SERVER DEBUG: [0x51a000059480] created request Name# ResolveNode 2025-04-10T11:38:15.403523Z node 1 :GRPC_SERVER DEBUG: [0x51a000059a80] created request Name# FillNode 2025-04-10T11:38:15.403897Z node 1 :GRPC_SERVER DEBUG: [0x51a00005a080] created request Name# DrainNode 2025-04-10T11:38:15.404191Z node 1 :GRPC_SERVER DEBUG: [0x51a00005a680] created request Name# InterconnectDebug 2025-04-10T11:38:15.404501Z node 1 :GRPC_SERVER DEBUG: [0x51a00005ac80] created request Name# TestShardControl 2025-04-10T11:38:15.404783Z node 1 :GRPC_SERVER DEBUG: [0x51a00005b280] created request Name# RegisterNode 2025-04-10T11:38:15.407968Z node 1 :GRPC_SERVER DEBUG: [0x51a00005b880] created request Name# CmsRequest 2025-04-10T11:38:15.410805Z node 1 :GRPC_SERVER DEBUG: [0x51a00005be80] created request Name# ConsoleRequest 2025-04-10T11:38:15.418272Z node 1 :GRPC_SERVER DEBUG: [0x51a00005c480] created request Name# SchemeInitRoot 2025-04-10T11:38:15.418762Z node 1 :GRPC_SERVER DEBUG: [0x51a00005ca80] created request Name# PersQueueRequest 2025-04-10T11:38:15.419095Z node 1 :GRPC_SERVER DEBUG: [0x51a00005d080] created request Name# SchemeOperation 2025-04-10T11:38:15.419430Z node 1 :GRPC_SERVER DEBUG: [0x51a00005d680] created request Name# SchemeDescribe 2025-04-10T11:38:15.515915Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:38:15.515939Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:38:15.515944Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:38:15.516093Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:38:16.229858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "hdd2-pool" Kind: "hdd2" } StoragePools { Name: "hdd-pool" Kind: "hdd" } StoragePools { Name: "hdd1-pool" Kind: "hdd1" } StoragePools { Name: "ssd-pool" Kind: "ssd" } StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T11:38:16.231583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:38:16.233674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T11:38:16.236118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T11:38:16.236231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:38:16.243203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T11:38:16.244323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T11:38:16.244503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:38:16.244557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T11:38:16.244659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-10T11:38:16.244677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-10T11:38:16.251231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:38:16.251310Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T11:38:16.251329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-10T11:38:16.254488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:38:16.254516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-10T11:38:16.254537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:38:16.255266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:38:16.255300Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T11:38:16.255346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:38:16.255386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-10T11:38:16.261039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:38:16.267256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-10T11:38:16.267431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-10T11:38:16.276739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1744285096316, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T11:38:16.276980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1744285096316 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T11:38:16.277028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:38:16.279081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-10T11:38:16.279132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-10T11:38:16.279404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-10T11:38:16.279449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T11:38:16.282414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T11:38:16.282436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-1 ... 0T11:40:04.461850Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:7491647903901010296:2372], at schemeshard: 72057594046644480, txId: 281474976715662, path id: 3 2025-04-10T11:40:04.461910Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T11:40:04.461953Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDeleteParts opId# 281474976715662:0 ProgressState 2025-04-10T11:40:04.462019Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-04-10T11:40:04.462040Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-10T11:40:04.462079Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-04-10T11:40:04.462101Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-10T11:40:04.462127Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 1/1, is published: false 2025-04-10T11:40:04.462168Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-10T11:40:04.462196Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-04-10T11:40:04.462213Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-04-10T11:40:04.462396Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-10T11:40:04.462422Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715662, publications: 2, subscribers: 1 2025-04-10T11:40:04.462440Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715662, [OwnerId: 72057594046644480, LocalPathId: 2], 7 2025-04-10T11:40:04.462456Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715662, [OwnerId: 72057594046644480, LocalPathId: 3], 18446744073709551615 2025-04-10T11:40:04.463324Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-10T11:40:04.463422Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-10T11:40:04.463444Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715662 2025-04-10T11:40:04.463473Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2025-04-10T11:40:04.463500Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-10T11:40:04.465704Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-10T11:40:04.465810Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-10T11:40:04.468651Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715662 2025-04-10T11:40:04.468702Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2025-04-10T11:40:04.468743Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-10T11:40:04.468845Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715662, subscribers: 1 2025-04-10T11:40:04.468872Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [33:7491647908195978273:2345] 2025-04-10T11:40:04.469306Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:40:04.469330Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:40:04.469343Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-10T11:40:04.469473Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 2025-04-10T11:40:04.472831Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 2025-04-10T11:40:04.475523Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-10T11:40:04.475963Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-10T11:40:04.476095Z node 33 :GRPC_SERVER DEBUG: Got grpc request# ListDirectoryRequest, traceId# 01jrfqzzzv9qj5nygdqx271ehe, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:41262, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-10T11:40:04.476293Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-10T11:40:04.476518Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-10T11:40:04.476664Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-10T11:40:04.476787Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-10T11:40:04.476956Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-10T11:40:04.478279Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037889 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-04-10T11:40:04.478963Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-10T11:40:04.479061Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-10T11:40:04.479245Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037890 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-04-10T11:40:04.479667Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037888 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-04-10T11:40:04.480354Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037889 not found 2025-04-10T11:40:04.480400Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037890 not found 2025-04-10T11:40:04.480453Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037888 not found 2025-04-10T11:40:04.485138Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-10T11:40:04.485193Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-10T11:40:04.485272Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-10T11:40:04.485284Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-10T11:40:04.485304Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-10T11:40:04.485326Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-10T11:40:04.485376Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-10T11:40:04.497066Z node 33 :GRPC_SERVER DEBUG: [0x51a00005f480] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.497489Z node 33 :GRPC_SERVER DEBUG: [0x51a000121880] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.497845Z node 33 :GRPC_SERVER DEBUG: [0x51a00003ea80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.498218Z node 33 :GRPC_SERVER DEBUG: [0x51a0000dc880] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.498599Z node 33 :GRPC_SERVER DEBUG: [0x51a000097280] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.498960Z node 33 :GRPC_SERVER DEBUG: [0x51a00000ba80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.499300Z node 33 :GRPC_SERVER DEBUG: [0x51a00005ee80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.499643Z node 33 :GRPC_SERVER DEBUG: [0x51a000117080] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.500002Z node 33 :GRPC_SERVER DEBUG: [0x51a000005a80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.500325Z node 33 :GRPC_SERVER DEBUG: [0x51a000133280] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.500669Z node 33 :GRPC_SERVER DEBUG: [0x51a0000b5e80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.501018Z node 33 :GRPC_SERVER DEBUG: [0x51a0000e6480] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.501344Z node 33 :GRPC_SERVER DEBUG: [0x51a000059480] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.501694Z node 33 :GRPC_SERVER DEBUG: [0x51a000123080] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.502032Z node 33 :GRPC_SERVER DEBUG: [0x51a000106e80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.502907Z node 33 :GRPC_SERVER DEBUG: [0x51a0000f7880] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-10T11:40:04.503314Z node 33 :GRPC_SERVER DEBUG: [0x51a00003fc80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 |98.5%| [TM] {RESULT} ydb/services/keyvalue/ut/unittest >> TControlPlaneProxyTest::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendControlQuery >> TSelectFromViewTest::OneTableUsingRelativeName [GOOD] >> TSelectFromViewTest::DisabledFeatureFlag >> DataShardFollowers::FollowerReadDuringSplit [GOOD] >> TControlPlaneProxyTest::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetResultData >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyTest::ShouldSendListJobs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_followers/unittest >> DataShardFollowers::FollowerReadDuringSplit [GOOD] Test command err: 2025-04-10T11:39:15.897871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:39:15.898207Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:39:15.898463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a20/r3tmp/tmpx3vDi8/pdisk_1.dat 2025-04-10T11:39:16.437015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:39:16.478958Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:39:16.520975Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-10T11:39:16.523147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:39:16.525406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:39:16.525629Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-10T11:39:16.538127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:39:16.624086Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-10T11:39:16.624166Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T11:39:16.625109Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-10T11:39:16.749355Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 PartitionConfig { FollowerGroups { FollowerCount: 1 AllowLeaderPromotion: false } } } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-10T11:39:16.749466Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T11:39:16.750210Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T11:39:16.750345Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T11:39:16.750668Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T11:39:16.750866Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T11:39:16.751010Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T11:39:16.753523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:39:16.754510Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T11:39:16.755732Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T11:39:16.755806Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-10T11:39:16.804350Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:39:16.805670Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:39:16.806131Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:39:16.806535Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:39:16.857883Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:39:16.858696Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:39:16.858815Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:39:16.862513Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:39:16.862631Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:39:16.862695Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:39:16.863844Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:39:16.864007Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:39:16.864095Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:39:16.874889Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:39:16.934078Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:39:16.935295Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:39:16.935442Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:39:16.935487Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:39:16.935518Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:39:16.935555Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:39:16.935798Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:39:16.936449Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:39:16.937486Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:39:16.937599Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:39:16.937654Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:39:16.937689Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:39:16.937729Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:39:16.937766Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:39:16.937813Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:39:16.937847Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:39:16.937885Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:39:16.938004Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:39:16.938035Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:39:16.938079Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:39:16.945721Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:39:16.945789Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:39:16.945993Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:39:16.946364Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:39:16.946436Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:39:16.947147Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:39:16.947219Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:39:16.947249Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:39:16.947281Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:39:16.947309Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:39:16.947570Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:39:16.947623Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:39:16.947656Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:39:16.947693Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:39:16.947751Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:39:16.947775Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:39:16.947811Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:39:16.947842Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:39:16.947881Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:39:16.949375Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:39:16.949436Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025- ... cessing event TEvDataShard::TEvReadAck 2025-04-10T11:40:14.692853Z node 8 :TX_DATASHARD DEBUG: 72075186224037889 ReadAck from [8:1097:2865] on missing iterator: { ReadId: 0 SeqNo: 1 MaxRows: 999 MaxBytes: 5242880 } 2025-04-10T11:40:14.693097Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553218, Sender [8:1097:2865], Recipient [8:1087:2861]: NKikimrTxDataShard.TEvReadAck ReadId: 0 SeqNo: 2 MaxRows: 999 MaxBytes: 5242880 2025-04-10T11:40:14.693139Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadAck 2025-04-10T11:40:14.693192Z node 8 :TX_DATASHARD DEBUG: 72075186224037889 ReadAck from [8:1097:2865] on missing iterator: { ReadId: 0 SeqNo: 2 MaxRows: 999 MaxBytes: 5242880 } 2025-04-10T11:40:14.693689Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553219, Sender [8:1097:2865], Recipient [8:1087:2861]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:40:14.693751Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2025-04-10T11:40:14.693843Z node 8 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } ... reading from the right follower 2025-04-10T11:40:14.941816Z node 8 :TX_PROXY DEBUG: actor# [8:59:2106] Handle TEvExecuteKqpTransaction 2025-04-10T11:40:14.941914Z node 8 :TX_PROXY DEBUG: actor# [8:59:2106] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-04-10T11:40:14.943963Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269877761, Sender [8:1116:2880], Recipient [8:1089:2862]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:40:14.944059Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:40:14.944148Z node 8 :TX_DATASHARD DEBUG: Server connected at follower 1 tablet# 72075186224037890, clientId# [8:1114:2879], serverId# [8:1116:2880], sessionId# [0:0:0] 2025-04-10T11:40:14.944322Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfr09zg4wpbhece6gmzj5a2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=OTBhZGQ2ZWQtMzQ5YTQzYTMtZWJmNzBjNGEtZmZmMzdlODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:40:14.946792Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553215, Sender [8:1120:2881], Recipient [8:1089:2862]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-10T11:40:14.946862Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvRead 2025-04-10T11:40:14.947038Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-04-10T11:40:14.947135Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:40:14.947342Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2025-04-10T11:40:14.947466Z node 8 :TX_DATASHARD DEBUG: Updating sys metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=6, epoch=1} 2025-04-10T11:40:14.948338Z node 8 :TX_DATASHARD DEBUG: Updating tables metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=4, epoch=1} 2025-04-10T11:40:14.948811Z node 8 :TX_DATASHARD DEBUG: Updating snapshots metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=0, epoch=1} 2025-04-10T11:40:14.948941Z node 8 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to repeatable v1500/18446744073709551615 2025-04-10T11:40:14.949048Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2025-04-10T11:40:14.949161Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-10T11:40:14.949222Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2025-04-10T11:40:14.949288Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-10T11:40:14.949342Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-10T11:40:14.949395Z node 8 :TX_DATASHARD TRACE: Activated operation [0:1] at 72075186224037890 2025-04-10T11:40:14.949451Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-10T11:40:14.949485Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-10T11:40:14.949512Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2025-04-10T11:40:14.949540Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-04-10T11:40:14.949708Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2025-04-10T11:40:14.950027Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Restart 2025-04-10T11:40:14.950074Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Restart at tablet# 72075186224037890 2025-04-10T11:40:14.950234Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> retry Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:40:14.950355Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} touch new 0b, 65b lo load (65b in total), 0b requested for data (4194304b in total) 2025-04-10T11:40:14.950465Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 8388608b of static mem, Memory{8388608 dyn 0} 2025-04-10T11:40:14.950562Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} requests PageCollection [72075186224037888:1:28:1:12288:190:0] 65 bytes, 1 pages: [0 4] 2025-04-10T11:40:14.950688Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} postponed, 65b, pages {1 wait, 1 load}, freshly touched 1 pages 2025-04-10T11:40:14.950947Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} got result TEvResult{1 pages [72075186224037888:1:28:1:12288:190:0] ok OK}, category 1 2025-04-10T11:40:14.951134Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2025-04-10T11:40:14.951178Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-04-10T11:40:14.951311Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 2, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2025-04-10T11:40:14.951562Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[8:1120:2881], 0} after executionsCount# 2 2025-04-10T11:40:14.951650Z node 8 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[8:1120:2881], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-10T11:40:14.951772Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-10T11:40:14.951809Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2025-04-10T11:40:14.951844Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2025-04-10T11:40:14.951877Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2025-04-10T11:40:14.951931Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-10T11:40:14.951955Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2025-04-10T11:40:14.951996Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:1] at 72075186224037890 has finished 2025-04-10T11:40:14.952044Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-10T11:40:14.952156Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 2 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:40:14.952281Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 8388608b of static, Memory{0 dyn 0} 2025-04-10T11:40:14.952356Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-10T11:40:14.952575Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553217, Sender [8:1089:2862], Recipient [8:1089:2862]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-10T11:40:14.952621Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadContinue 2025-04-10T11:40:14.952746Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} queued, type NKikimr::NDataShard::TDataShard::TTxReadContinue 2025-04-10T11:40:14.952833Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T11:40:14.952941Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[8:1120:2881], 0}, firstUnprocessedQuery# 0 2025-04-10T11:40:14.953033Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[8:1120:2881], 0}, FirstUnprocessedQuery# 0 2025-04-10T11:40:14.953175Z node 8 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[8:1120:2881], 0} sends rowCount# 0, bytes# 0, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-10T11:40:14.953257Z node 8 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[8:1120:2881], 0} finished in ReadContinue 2025-04-10T11:40:14.953404Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T11:40:14.953498Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T11:40:14.954861Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553219, Sender [8:1120:2881], Recipient [8:1089:2862]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-10T11:40:14.954921Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2025-04-10T11:40:14.955007Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadCancel: { ReadId: 0 } { items { uint32_value: 3 } items { uint32_value: 33 } } |98.5%| [TM] {RESULT} ydb/core/tx/datashard/ut_followers/unittest >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KqpTpch::Query17 [GOOD] >> KqpTpch::Query18 >> TControlPlaneProxyTest::ShouldSendListJobs [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeJob |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::TestReadFromTopic [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFromNow >> TControlPlaneProxyTest::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateConnection >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] >> TControlPlaneProxyTest::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendListConnections >> test_drain.py::TestHive::test_drain_on_stop |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true >> KqpTpch::Query18 [GOOD] >> KqpTpch::Query19 >> TControlPlaneProxyTest::ShouldSendListConnections [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeConnection >> TDqPqReadActorTest::TestReadFromTopicFromNow [GOOD] >> TDqPqReadActorTest::ReadWithFreeSpace |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardStats::NoData [GOOD] >> DataShardStats::Follower >> TControlPlaneProxyTest::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyConnection |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] >> TSelectFromViewTest::DisabledFeatureFlag [GOOD] >> TSelectFromViewTest::ReadTestCasesFromFiles |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteConnection >> TDqPqReadActorTest::ReadWithFreeSpace [GOOD] >> TDqPqReadActorTest::ReadNonExistentTopic >> TDqPqReadActorTest::ReadNonExistentTopic [GOOD] >> TDqPqReadActorTest::TestSaveLoadPqRead >> TControlPlaneProxyTest::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendTestConnection |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateBinding >> KqpTpch::Query19 [GOOD] >> KqpTpch::Query20 |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendListBindings |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendListBindings [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeBinding >> DataShardStats::Follower [GOOD] >> DataShardStats::Tli >> TControlPlaneProxyTest::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyBinding |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] Test command err: 2025-04-10T11:31:55.651086Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:55.749286Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:55.773723Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:55.773975Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:55.782620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:55.782854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:55.783071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:55.783194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:55.783330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:55.783447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:55.783569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:55.783668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:55.783769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:55.783893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:55.784010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:55.784117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:55.814894Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:55.815102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:55.815154Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:55.815377Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:55.815595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:55.815686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:55.815736Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:55.815995Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:55.816063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:55.816104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:55.816136Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:55.816348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:55.816429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:55.816485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:55.816517Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:55.816604Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:55.816658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:55.816730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:55.816765Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:55.816827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:55.816857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:55.816877Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:55.816913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:55.816940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:55.816958Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:55.817264Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=36; 2025-04-10T11:31:55.817319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=24; 2025-04-10T11:31:55.817380Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-04-10T11:31:55.817456Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-04-10T11:31:55.817607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:55.817644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:55.817665Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:55.817808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:55.817854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:55.817873Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:55.817976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:55.818006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:55.818025Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:55.818234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:55.818280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:55.818311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:55.818419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:55.818453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:55.818488Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... T11:40:30.273646Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:14812:16773];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-04-10T11:40:31.508021Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T11:40:31.508177Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=18; 2025-04-10T11:40:31.508916Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=635; 2025-04-10T11:40:31.508987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=737; 2025-04-10T11:40:31.521789Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T11:40:31.521917Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=15; 2025-04-10T11:40:31.552821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=30760; 2025-04-10T11:40:31.585109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=29915; 2025-04-10T11:40:31.585262Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=32290; 2025-04-10T11:40:31.585505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=153; 2025-04-10T11:40:31.585718Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=128; 2025-04-10T11:40:31.585971Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=186; 2025-04-10T11:40:31.586730Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=677; 2025-04-10T11:40:31.587048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=230; 2025-04-10T11:40:31.587102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=65116; 2025-04-10T11:40:31.600409Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T11:40:31.600520Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=18; 2025-04-10T11:40:31.605680Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5036; 2025-04-10T11:40:31.648243Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=42436; 2025-04-10T11:40:31.648412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=50; 2025-04-10T11:40:31.648540Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=45; 2025-04-10T11:40:31.648601Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-04-10T11:40:31.648679Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2025-04-10T11:40:31.648759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=12; 2025-04-10T11:40:31.648861Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=52; 2025-04-10T11:40:31.648975Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=19; 2025-04-10T11:40:31.649107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=82; 2025-04-10T11:40:31.649181Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-04-10T11:40:31.649283Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=40; 2025-04-10T11:40:31.649410Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=76; 2025-04-10T11:40:31.649533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=71; 2025-04-10T11:40:31.649588Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=48991; 2025-04-10T11:40:31.649838Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113965260;raw_bytes=176366876;count=47;records=1845000} inactive {blob_bytes=169434716;raw_bytes=262645956;count=79;records=2743332} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:40:31.652437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:14812:16773];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:40:31.652559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:14812:16773];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:40:31.652691Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14812:16773];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:40:31.652791Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14812:16773];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-10T11:40:31.653130Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:40:31.653229Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:40:31.653524Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-04-10T11:40:31.653634Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T11:40:31.653704Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:40:31.653780Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:40:31.653856Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:40:31.654046Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:40:31.660102Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:40:31.670010Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14812:16773];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:40:31.672418Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14812:16773];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:40:31.672504Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:40:31.672545Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:40:31.672628Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14812:16773];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:40:31.672746Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14812:16773];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:40:31.673103Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14812:16773];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-04-10T11:40:31.673231Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14812:16773];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T11:40:31.673323Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14812:16773];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:40:31.673435Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14812:16773];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:40:31.673509Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14812:16773];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:40:31.673645Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14812:16773];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-04-10T11:40:31.673728Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14812:16773];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; >> TControlPlaneProxyTest::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteBinding >> KqpTpch::Query20 [GOOD] >> KqpTpch::Query21 |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/control_plane_proxy/ut/unittest >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] Test command err: 2025-04-10T11:37:10.021164Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-04-10T11:37:10.308421Z node 2 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:10.696592Z node 3 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:11.103161Z node 4 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:11.502889Z node 5 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:11.945360Z node 6 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:12.407474Z node 7 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:12.820452Z node 8 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:13.245172Z node 9 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-04-10T11:37:13.707031Z node 10 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:14.120978Z node 11 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:14.584113Z node 12 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:15.041757Z node 13 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:15.481284Z node 14 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:15.926483Z node 15 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:16.351193Z node 16 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:16.783847Z node 17 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:17.214246Z node 18 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:17.688581Z node 19 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:18.167746Z node 20 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:18.672253Z node 21 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:19.197913Z node 22 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:19.679108Z node 23 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:20.149354Z node 24 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:49.878745Z node 72 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-04-10T11:37:50.637927Z node 73 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:51.357653Z node 74 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:52.067753Z node 75 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:52.745908Z node 76 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:53.450419Z node 77 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:54.217169Z node 78 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:54.927266Z node 79 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:55.668092Z node 80 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-04-10T11:37:56.434547Z node 81 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:57.225833Z node 82 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:58.012433Z node 83 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:58.729794Z node 84 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:37:59.594932Z node 85 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:38:00.475650Z node 86 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:38:01.379588Z node 87 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:38:02.129818Z node 88 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:38:02.953412Z node 89 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:38:03.729141Z node 90 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:38:04.528346Z node 91 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:38:05.241969Z node 92 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:38:06.021611Z node 93 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:38:06.805678Z node 94 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:38:07.563332Z node 95 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:08.897049Z node 163 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user_3@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-04-10T11:39:11.788274Z node 166 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:12.771025Z node 167 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:13.807365Z node 168 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:14.769238Z node 169 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:15.713348Z node 170 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:18.756141Z node 173 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:21.802333Z node 176 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:22.866495Z node 177 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:23.877019Z node 178 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:24.916006Z node 179 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:28.851925Z node 182 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:30.364758Z node 183 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:33.407901Z node 185 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:34.950901Z node 186 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:37.976160Z node 188 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:39.226074Z node 189 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:40.735361Z node 190 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:44.464355Z node 192 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user_4@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-04-10T11:39:46.181524Z node 193 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:47.830936Z node 194 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:49.226697Z node 195 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:50.701567Z node 196 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:52.135774Z node 197 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:53.369978Z node 198 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:54.917501Z node 199 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:56.454833Z node 200 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:57.930054Z node 201 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:39:59.559494Z node 202 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:40:01.130304Z node 203 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-10T11:40:02.462240Z node 204 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 |98.6%| [TM] {RESULT} ydb/core/fq/libs/control_plane_proxy/ut/unittest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardStats::Tli [GOOD] >> DataShardStats::HasSchemaChanges_BTreeIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] Test command err: 2025-04-10T11:32:11.095852Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:11.207407Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:11.233682Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:11.234071Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:11.243595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:11.243853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:11.244106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:11.244218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:11.244334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:11.244486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:11.244596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:11.244717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:11.244816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:11.244918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:11.245039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:11.245167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:11.277010Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:11.277301Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:11.277366Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:11.277570Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:11.277762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:11.277867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:11.277929Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:11.278028Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:11.278110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:11.278194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:11.278233Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:11.278436Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:11.278530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:11.278593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:11.278629Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:11.278722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:11.278786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:11.278836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:11.278886Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:11.278977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:11.279021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:11.279052Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:11.279113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:11.279152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:11.279182Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:11.279668Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=62; 2025-04-10T11:32:11.279772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-04-10T11:32:11.279868Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2025-04-10T11:32:11.279984Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=49; 2025-04-10T11:32:11.280200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:11.280263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:11.280321Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:11.280533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:11.280586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:11.280620Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:11.280820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:11.280897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:11.280938Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:11.281168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:11.281215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:11.281253Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:11.281379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:11.281444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:11.281501Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... T11:40:39.349736Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10456:12417];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-04-10T11:40:40.198824Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T11:40:40.198964Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=17; 2025-04-10T11:40:40.199737Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=664; 2025-04-10T11:40:40.199828Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=795; 2025-04-10T11:40:40.209806Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T11:40:40.209949Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=17; 2025-04-10T11:40:40.230328Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=20232; 2025-04-10T11:40:40.249378Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=17326; 2025-04-10T11:40:40.249537Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=19071; 2025-04-10T11:40:40.249799Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=153; 2025-04-10T11:40:40.249992Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=117; 2025-04-10T11:40:40.250255Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=191; 2025-04-10T11:40:40.250497Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=158; 2025-04-10T11:40:40.250773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=213; 2025-04-10T11:40:40.250827Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=40809; 2025-04-10T11:40:40.256558Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T11:40:40.256699Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=20; 2025-04-10T11:40:40.261408Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4567; 2025-04-10T11:40:40.295467Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=33914; 2025-04-10T11:40:40.295655Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=61; 2025-04-10T11:40:40.295770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=36; 2025-04-10T11:40:40.295848Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2025-04-10T11:40:40.295909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-10T11:40:40.295988Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=10; 2025-04-10T11:40:40.296087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=51; 2025-04-10T11:40:40.296147Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-10T11:40:40.296283Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=77; 2025-04-10T11:40:40.296360Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=10; 2025-04-10T11:40:40.296454Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=46; 2025-04-10T11:40:40.296581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=77; 2025-04-10T11:40:40.296703Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=72; 2025-04-10T11:40:40.296781Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=39983; 2025-04-10T11:40:40.297047Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113961708;raw_bytes=176366876;count=47;records=1845000} inactive {blob_bytes=174125508;raw_bytes=270077548;count=81;records=2819164} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:40:40.298527Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10456:12417];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:40:40.298635Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10456:12417];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:40:40.298766Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:40:40.298849Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-10T11:40:40.299154Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:40:40.299264Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:40:40.299544Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-04-10T11:40:40.299642Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T11:40:40.299712Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:40:40.299773Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:40:40.299829Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:40:40.299966Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:40:40.305792Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:40:40.312190Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:40:40.315825Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:40:40.315905Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:40:40.315946Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:40:40.316041Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:40:40.316155Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:40:40.316505Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-04-10T11:40:40.316626Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T11:40:40.316709Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:40:40.316787Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:40:40.316864Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:40:40.317010Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.998000s; 2025-04-10T11:40:40.317107Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10456:12417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::TestSaveLoadPqRead [GOOD] >> TDqPqReadActorTest::LoadCorruptedState >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] >> TDqPqReadActorTest::LoadCorruptedState [GOOD] >> TDqPqReadActorTest::TestLoadFromSeveralStates |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] Test command err: 2025-04-10T11:32:06.631233Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:06.753632Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:06.779906Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:06.780224Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:06.788449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:06.788632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:06.788874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:06.788999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:06.789144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:06.789295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:06.789410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:06.789537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:06.789645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:06.789756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:06.789883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:06.789992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:06.815133Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:06.815309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:06.815348Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:06.815473Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:06.815577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:06.815663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:06.815702Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:06.815886Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:06.815950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:06.815990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:06.816019Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:06.816158Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:06.816204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:06.816238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:06.816261Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:06.816321Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:06.816354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:06.816419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:06.816436Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:06.816492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:06.816524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:06.816546Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:06.816579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:06.816604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:06.816619Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:06.816901Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=28; 2025-04-10T11:32:06.816961Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=26; 2025-04-10T11:32:06.817020Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=27; 2025-04-10T11:32:06.817112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=28; 2025-04-10T11:32:06.817247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:06.817283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:06.817308Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:06.817449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:06.817480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:06.817499Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:06.817611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:06.817647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:06.817682Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:06.817819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:06.817845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:06.817864Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:06.817951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:06.817982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:06.818023Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 0:44.421268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:14813:16774];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-04-10T11:40:46.133525Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T11:40:46.133671Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=20; 2025-04-10T11:40:46.134652Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=879; 2025-04-10T11:40:46.134714Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=980; 2025-04-10T11:40:46.144476Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T11:40:46.144597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=18; 2025-04-10T11:40:46.246398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=101680; 2025-04-10T11:40:46.356178Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=108040; 2025-04-10T11:40:46.356323Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=109781; 2025-04-10T11:40:46.356537Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=135; 2025-04-10T11:40:46.356698Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=103; 2025-04-10T11:40:46.356980Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=218; 2025-04-10T11:40:46.357214Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=172; 2025-04-10T11:40:46.358212Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=925; 2025-04-10T11:40:46.358299Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=213641; 2025-04-10T11:40:46.408151Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T11:40:46.408272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=15; 2025-04-10T11:40:46.429250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=20857; 2025-04-10T11:40:46.491261Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=61871; 2025-04-10T11:40:46.491462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=71; 2025-04-10T11:40:46.491586Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=47; 2025-04-10T11:40:46.491650Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-04-10T11:40:46.491725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-04-10T11:40:46.491786Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=11; 2025-04-10T11:40:46.491904Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=56; 2025-04-10T11:40:46.491967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-10T11:40:46.492115Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=97; 2025-04-10T11:40:46.492180Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=11; 2025-04-10T11:40:46.492287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=49; 2025-04-10T11:40:46.492422Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=71; 2025-04-10T11:40:46.492572Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=70; 2025-04-10T11:40:46.492627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=84286; 2025-04-10T11:40:46.492886Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113965260;raw_bytes=176366876;count=47;records=1845000} inactive {blob_bytes=169434716;raw_bytes=262645956;count=79;records=2743332} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:40:46.494111Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:14813:16774];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:40:46.495132Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:14813:16774];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:40:46.495251Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14813:16774];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:40:46.495327Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14813:16774];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-10T11:40:46.495648Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:40:46.495744Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:40:46.496015Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-04-10T11:40:46.496113Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T11:40:46.496176Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:40:46.496244Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:40:46.496312Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:40:46.496446Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:40:46.503098Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:40:46.519776Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14813:16774];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:40:46.526837Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14813:16774];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:40:46.526935Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:40:46.526985Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:40:46.527091Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14813:16774];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:40:46.527245Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14813:16774];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:40:46.527637Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14813:16774];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-04-10T11:40:46.527786Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14813:16774];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T11:40:46.527874Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14813:16774];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:40:46.527972Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14813:16774];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:40:46.528047Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14813:16774];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:40:46.528190Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14813:16774];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-04-10T11:40:46.528279Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14813:16774];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true [GOOD] >> Coordinator::LastEmptyStepResent >> TDqPqReadActorTest::TestLoadFromSeveralStates [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark [GOOD] >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 >> KqpTpch::Query21 [GOOD] >> KqpTpch::Query22 |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardStats::HasSchemaChanges_BTreeIndex [GOOD] >> DataShardStats::HasSchemaChanges_ByKeyFilter |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> Backpressure::MonteCarlo [GOOD] >> Coordinator::LastEmptyStepResent [GOOD] >> CoordinatorVolatile::PlanResentOnReboots >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut_client/unittest >> Backpressure::MonteCarlo [GOOD] Test command err: Clock# 1970-01-01T00:00:00.000000Z elapsed# 0.000036s EventsProcessed# 0 clients.size# 0 Clock# 1970-01-01T00:00:10.507605Z elapsed# 0.000195s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:00:28.085036Z elapsed# 0.000220s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:00:40.429085Z elapsed# 0.000242s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:00:56.176666Z elapsed# 0.000264s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:12.089789Z elapsed# 0.000290s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:29.982856Z elapsed# 0.000312s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:42.534828Z elapsed# 0.000331s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:52.551538Z elapsed# 0.001419s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:02:09.501664Z elapsed# 0.001454s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:02:28.334042Z elapsed# 0.001477s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:02:48.241019Z elapsed# 0.001501s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:02:58.819793Z elapsed# 0.001537s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:03:18.747329Z elapsed# 0.001559s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:03:37.752578Z elapsed# 0.001580s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:03:54.391824Z elapsed# 0.001602s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:04:13.358858Z elapsed# 0.001640s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:04:30.415878Z elapsed# 0.001667s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:04:45.665247Z elapsed# 0.001688s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:04:56.435060Z elapsed# 0.001710s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:05:08.956029Z elapsed# 0.001728s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:05:20.166326Z elapsed# 0.036004s EventsProcessed# 1299 clients.size# 1 Clock# 1970-01-01T00:05:34.148485Z elapsed# 0.054781s EventsProcessed# 2993 clients.size# 1 Clock# 1970-01-01T00:05:46.019029Z elapsed# 0.067087s EventsProcessed# 4377 clients.size# 1 Clock# 1970-01-01T00:06:04.214474Z elapsed# 0.090952s EventsProcessed# 6572 clients.size# 1 Clock# 1970-01-01T00:06:15.835588Z elapsed# 0.104478s EventsProcessed# 7879 clients.size# 1 Clock# 1970-01-01T00:06:28.146534Z elapsed# 0.121397s EventsProcessed# 9292 clients.size# 1 Clock# 1970-01-01T00:06:44.733010Z elapsed# 0.143920s EventsProcessed# 11198 clients.size# 1 Clock# 1970-01-01T00:07:02.578345Z elapsed# 0.168584s EventsProcessed# 13365 clients.size# 1 Clock# 1970-01-01T00:07:13.093126Z elapsed# 0.168951s EventsProcessed# 13367 clients.size# 0 Clock# 1970-01-01T00:07:23.385381Z elapsed# 0.168972s EventsProcessed# 13367 clients.size# 0 Clock# 1970-01-01T00:07:41.940020Z elapsed# 0.168989s EventsProcessed# 13367 clients.size# 0 Clock# 1970-01-01T00:07:58.177983Z elapsed# 0.169007s EventsProcessed# 13367 clients.size# 0 Clock# 1970-01-01T00:08:10.747825Z elapsed# 0.169027s EventsProcessed# 13367 clients.size# 0 Clock# 1970-01-01T00:08:29.906449Z elapsed# 0.169043s EventsProcessed# 13367 clients.size# 0 Clock# 1970-01-01T00:08:43.930934Z elapsed# 0.169061s EventsProcessed# 13367 clients.size# 0 Clock# 1970-01-01T00:08:57.897583Z elapsed# 0.169081s EventsProcessed# 13367 clients.size# 0 Clock# 1970-01-01T00:09:10.184517Z elapsed# 0.169100s EventsProcessed# 13367 clients.size# 0 Clock# 1970-01-01T00:09:27.762897Z elapsed# 0.169137s EventsProcessed# 13367 clients.size# 0 Clock# 1970-01-01T00:09:46.558076Z elapsed# 0.169155s EventsProcessed# 13367 clients.size# 0 Clock# 1970-01-01T00:09:56.938756Z elapsed# 0.169185s EventsProcessed# 13367 clients.size# 0 Clock# 1970-01-01T00:10:15.003370Z elapsed# 0.192223s EventsProcessed# 15504 clients.size# 1 Clock# 1970-01-01T00:10:25.679967Z elapsed# 0.192360s EventsProcessed# 15506 clients.size# 0 Clock# 1970-01-01T00:10:40.411329Z elapsed# 0.192378s EventsProcessed# 15506 clients.size# 0 Clock# 1970-01-01T00:10:55.896115Z elapsed# 0.192397s EventsProcessed# 15506 clients.size# 0 Clock# 1970-01-01T00:11:06.048718Z elapsed# 0.192418s EventsProcessed# 15506 clients.size# 0 Clock# 1970-01-01T00:11:22.787725Z elapsed# 0.192435s EventsProcessed# 15506 clients.size# 0 Clock# 1970-01-01T00:11:39.374440Z elapsed# 0.192451s EventsProcessed# 15506 clients.size# 0 Clock# 1970-01-01T00:11:51.544374Z elapsed# 0.192467s EventsProcessed# 15506 clients.size# 0 Clock# 1970-01-01T00:12:05.930861Z elapsed# 0.209539s EventsProcessed# 17156 clients.size# 1 Clock# 1970-01-01T00:12:18.328331Z elapsed# 0.243163s EventsProcessed# 20125 clients.size# 2 Clock# 1970-01-01T00:12:28.769161Z elapsed# 0.271886s EventsProcessed# 22582 clients.size# 2 Clock# 1970-01-01T00:12:43.191717Z elapsed# 0.310962s EventsProcessed# 25985 clients.size# 2 Clock# 1970-01-01T00:12:57.089373Z elapsed# 0.346814s EventsProcessed# 29290 clients.size# 2 Clock# 1970-01-01T00:13:11.973568Z elapsed# 0.383541s EventsProcessed# 32855 clients.size# 2 Clock# 1970-01-01T00:13:30.097511Z elapsed# 0.427059s EventsProcessed# 37235 clients.size# 2 Clock# 1970-01-01T00:13:41.709885Z elapsed# 0.453729s EventsProcessed# 40006 clients.size# 2 Clock# 1970-01-01T00:13:59.051162Z elapsed# 0.492071s EventsProcessed# 44053 clients.size# 2 Clock# 1970-01-01T00:14:18.292133Z elapsed# 0.532711s EventsProcessed# 48677 clients.size# 2 Clock# 1970-01-01T00:14:31.352872Z elapsed# 0.577839s EventsProcessed# 53235 clients.size# 3 Clock# 1970-01-01T00:14:43.939535Z elapsed# 0.620548s EventsProcessed# 57711 clients.size# 3 Clock# 1970-01-01T00:14:55.521592Z elapsed# 0.650761s EventsProcessed# 61800 clients.size# 3 Clock# 1970-01-01T00:15:08.548421Z elapsed# 0.687225s EventsProcessed# 66579 clients.size# 3 Clock# 1970-01-01T00:15:19.304973Z elapsed# 0.708254s EventsProcessed# 69102 clients.size# 2 Clock# 1970-01-01T00:15:31.045599Z elapsed# 0.734355s EventsProcessed# 71838 clients.size# 2 Clock# 1970-01-01T00:15:42.692811Z elapsed# 0.762561s EventsProcessed# 74595 clients.size# 2 Clock# 1970-01-01T00:16:00.631191Z elapsed# 0.805488s EventsProcessed# 78762 clients.size# 2 Clock# 1970-01-01T00:16:20.371868Z elapsed# 0.849111s EventsProcessed# 83352 clients.size# 2 Clock# 1970-01-01T00:16:38.193026Z elapsed# 0.892812s EventsProcessed# 87595 clients.size# 2 Clock# 1970-01-01T00:16:56.804021Z elapsed# 0.940581s EventsProcessed# 92218 clients.size# 2 Clock# 1970-01-01T00:17:09.642524Z elapsed# 0.980713s EventsProcessed# 96698 clients.size# 3 Clock# 1970-01-01T00:17:21.605275Z elapsed# 1.022071s EventsProcessed# 100938 clients.size# 3 Clock# 1970-01-01T00:17:36.944743Z elapsed# 1.071257s EventsProcessed# 106465 clients.size# 3 Clock# 1970-01-01T00:17:56.856102Z elapsed# 1.143744s EventsProcessed# 113609 clients.size# 3 Clock# 1970-01-01T00:18:13.825613Z elapsed# 1.204632s EventsProcessed# 119692 clients.size# 3 Clock# 1970-01-01T00:18:27.821797Z elapsed# 1.256947s EventsProcessed# 124736 clients.size# 3 Clock# 1970-01-01T00:18:38.917170Z elapsed# 1.281484s EventsProcessed# 127295 clients.size# 2 Clock# 1970-01-01T00:18:51.795200Z elapsed# 1.315851s EventsProcessed# 130421 clients.size# 2 Clock# 1970-01-01T00:19:11.085309Z elapsed# 1.364079s EventsProcessed# 134964 clients.size# 2 Clock# 1970-01-01T00:19:29.009192Z elapsed# 1.405248s EventsProcessed# 139137 clients.size# 2 Clock# 1970-01-01T00:19:42.956325Z elapsed# 1.458371s EventsProcessed# 144049 clients.size# 3 Clock# 1970-01-01T00:19:54.935329Z elapsed# 1.490874s EventsProcessed# 146882 clients.size# 2 Clock# 1970-01-01T00:20:09.000697Z elapsed# 1.525950s EventsProcessed# 150040 clients.size# 2 Clock# 1970-01-01T00:20:19.392695Z elapsed# 1.552538s EventsProcessed# 152539 clients.size# 2 Clock# 1970-01-01T00:20:32.785840Z elapsed# 1.587383s EventsProcessed# 155793 clients.size# 2 Clock# 1970-01-01T00:20:45.617965Z elapsed# 1.622511s EventsProcessed# 158916 clients.size# 2 Clock# 1970-01-01T00:20:58.355918Z elapsed# 1.654459s EventsProcessed# 161971 clients.size# 2 Clock# 1970-01-01T00:21:12.221326Z elapsed# 1.689484s EventsProcessed# 165317 clients.size# 2 Clock# 1970-01-01T00:21:24.050778Z elapsed# 1.720838s EventsProcessed# 168134 clients.size# 2 Clock# 1970-01-01T00:21:39.205941Z elapsed# 1.758398s EventsProcessed# 171707 clients.size# 2 Clock# 1970-01-01T00:21:50.904250Z elapsed# 1.791502s EventsProcessed# 174341 clients.size# 2 Clock# 1970-01-01T00:22:04.150185Z elapsed# 1.829516s EventsProcessed# 177610 clients.size# 2 Clock# 1970-01-01T00:22:23.479580Z elapsed# 1.877491s EventsProcessed# 182226 clients.size# 2 Clock# 1970-01-01T00:22:41.015617Z elapsed# 1.924366s EventsProcessed# 186434 clients.size# 2 Clock# 1970-01-01T00:22:57.718926Z elapsed# 1.964755s EventsProcessed# 190276 clients.size# 2 Clock# 1970-01-01T00:23:12.490010Z elapsed# 1.983529s EventsProcessed# 192085 clients.size# 1 Clock# 1970-01-01T00:23:27.165948Z elapsed# 2.000926s EventsProcessed# 193766 clients.size# 1 Clock# 1970-01-01T00:23:39.015604Z elapsed# 2.016014s EventsProcessed# 195157 clients.size# 1 Clock# 1970-01-01T00:23:55.588406Z elapsed# 2.037582s EventsProcessed# 197190 clients.size# 1 Clock# 1970-01-01T00:24:11.471637Z elapsed# 2.057668s EventsProcessed# 199082 clients.size# 1 Clock# 1970-01-01T00:24:23.535552Z elapsed# 2.072421s EventsProcessed# 200508 clients.size# 1 Clock# 1970-01-01T00:24:41.648418Z elapsed# 2.095155s EventsProcessed# 202641 clients.size# 1 Clock# 1970-01-01T00:24:54.661045Z elapsed# 2.112697s EventsProcessed# 204285 clients.size# 1 Clock# 1970-01-01T00:25:10.842061Z elapsed# 2.133647s EventsProcessed# 206219 clients.size# 1 Clock# 1970-01-01T00:25:30.726659Z elapsed# 2.158774s EventsProcessed# 208578 clients.size# 1 Clock# 1970-01-01T00:25:48.158648Z elapsed# 2.180868s EventsProcessed# 210696 clients.size# 1 Clock# 1970-01-01T00:26:03.919379Z elapsed# 2.199932s EventsProcessed# 212608 clients.size# 1 Clock# 1970-01-01T00:26:15.326680Z elapsed# 2.211037s EventsProcessed# 213922 clients.size# 1 Clock# 1970-01-01T00:26:26.023848Z elapsed# 2.220387s EventsProcessed# 215122 clients.size# 1 Clock# 1970-01-01T00:26:40.905077Z elapsed# 2.254725s EventsProcessed# 218693 clients.size# 2 Clock# 1970-01-01T00:26:55.091372Z elapsed# 2.286316s EventsProcessed# 222153 clients.size# 2 Clock# 1970-01-01T00:27:13.313805Z elapsed# 2.323746s EventsProcessed# 226403 clients.size# 2 Clock# 1970-01-01T00:27:24.570937Z elapsed# 2.350353s EventsProcessed# 229099 clients.size# 2 Clock# 1970-01-01T00:27:41.530553Z elapsed# 2.393739s EventsProcessed# 233118 clients.size# 2 Clock# 1970-01-01T00:27:54.036059Z elapsed# 2.426588s EventsProcessed# 236201 clients.size# 2 Clock# 1970-01-01T00:28:08.231916Z elapsed# 2.462170s EventsProcessed# 239640 clients.size# 2 Clock# 1970-01-01T00:28:27.484498Z elapsed# 2.486406s EventsProcessed# 241908 clients.size# 1 Clock# 1970-01-01T00:28:46.002066Z elapsed# 2.538983s EventsProcessed# 246354 clients.size# 2 Clock# 1970-01-01T00:29:01.087516Z elapsed# 2.573874s EventsProcessed# 249955 clients.size# 2 Clock# 1970-01-01T00:29:13.058890Z elapsed# 2.588087s EventsProcessed# 251383 clients.size# 1 Clock# 1970-01-01T00:29:28.938633Z elapsed# 2.588231s EventsProcessed# 251385 clients.size# 0 Clock# 1970-01-01T00:29:46.624866Z elapsed# 2.588254s EventsProcessed# 251385 clients.size# 0 Clock# 1970-01-01T00:30:01.712657Z elapsed# 2.588271s EventsProcessed# 251385 clients.size# 0 Clock# 1970-01-01T00:30:19.292040Z elapsed# 2.588287s EventsProcessed# 251385 clients.size# 0 Clock# 1970-01-01T00:30:33.677287Z elapsed# 2.588304s EventsProcessed# 251385 clients.size# 0 Clock# 1970-01-01T00:30:52.694259Z elapsed# 2.588321s EventsProcessed# 251385 clients.size# 0 Clock# 1970-01-01T00:31:03.233764Z elapsed# 2.588338s EventsProcessed# 251385 clients.size# 0 Clock# 1970-01-01T00:31:13.593384Z elapsed# 2.588355s EventsProcessed# 251385 clients.size# 0 Clock# 1970-01-01T00:31:24.728626Z elapsed# 2.588381s EventsProcessed# 251385 clients.size# 0 Clock# 1970-01-01T00:31:42.730641Z ... ts.size# 3 Clock# 1970-01-01T05:29:02.863540Z elapsed# 127.713286s EventsProcessed# 9684952 clients.size# 3 Clock# 1970-01-01T05:29:16.029834Z elapsed# 127.843368s EventsProcessed# 9689647 clients.size# 3 Clock# 1970-01-01T05:29:30.391707Z elapsed# 127.963400s EventsProcessed# 9694893 clients.size# 3 Clock# 1970-01-01T05:29:49.888198Z elapsed# 128.116797s EventsProcessed# 9701804 clients.size# 3 Clock# 1970-01-01T05:30:09.368033Z elapsed# 128.249102s EventsProcessed# 9708844 clients.size# 3 Clock# 1970-01-01T05:30:21.085484Z elapsed# 128.329940s EventsProcessed# 9712961 clients.size# 3 Clock# 1970-01-01T05:30:31.673562Z elapsed# 128.448621s EventsProcessed# 9716710 clients.size# 3 Clock# 1970-01-01T05:30:48.061134Z elapsed# 128.595019s EventsProcessed# 9722578 clients.size# 3 Clock# 1970-01-01T05:31:04.795511Z elapsed# 128.766114s EventsProcessed# 9728584 clients.size# 3 Clock# 1970-01-01T05:31:16.782751Z elapsed# 128.832423s EventsProcessed# 9732961 clients.size# 3 Clock# 1970-01-01T05:31:33.308030Z elapsed# 128.912074s EventsProcessed# 9738843 clients.size# 3 Clock# 1970-01-01T05:31:43.730245Z elapsed# 129.094170s EventsProcessed# 9742552 clients.size# 3 Clock# 1970-01-01T05:31:58.595725Z elapsed# 129.193087s EventsProcessed# 9746051 clients.size# 2 Clock# 1970-01-01T05:32:17.793580Z elapsed# 129.228202s EventsProcessed# 9748228 clients.size# 1 Clock# 1970-01-01T05:32:27.895892Z elapsed# 129.242114s EventsProcessed# 9749429 clients.size# 1 Clock# 1970-01-01T05:32:41.528703Z elapsed# 129.284575s EventsProcessed# 9752688 clients.size# 2 Clock# 1970-01-01T05:33:01.331926Z elapsed# 129.356680s EventsProcessed# 9757453 clients.size# 2 Clock# 1970-01-01T05:33:13.485686Z elapsed# 129.379490s EventsProcessed# 9758909 clients.size# 1 Clock# 1970-01-01T05:33:23.692188Z elapsed# 129.396175s EventsProcessed# 9760116 clients.size# 1 Clock# 1970-01-01T05:33:42.802577Z elapsed# 129.470335s EventsProcessed# 9762390 clients.size# 1 Clock# 1970-01-01T05:33:57.806637Z elapsed# 129.494098s EventsProcessed# 9764190 clients.size# 1 Clock# 1970-01-01T05:34:16.163790Z elapsed# 129.523541s EventsProcessed# 9766281 clients.size# 1 Clock# 1970-01-01T05:34:28.275240Z elapsed# 129.542664s EventsProcessed# 9767701 clients.size# 1 Clock# 1970-01-01T05:34:40.634845Z elapsed# 129.558973s EventsProcessed# 9769204 clients.size# 1 Clock# 1970-01-01T05:34:53.801450Z elapsed# 129.580042s EventsProcessed# 9770771 clients.size# 1 Clock# 1970-01-01T05:35:08.292735Z elapsed# 129.602305s EventsProcessed# 9772473 clients.size# 1 Clock# 1970-01-01T05:35:19.167610Z elapsed# 129.636644s EventsProcessed# 9775034 clients.size# 2 Clock# 1970-01-01T05:35:33.878698Z elapsed# 129.692599s EventsProcessed# 9778627 clients.size# 2 Clock# 1970-01-01T05:35:52.008167Z elapsed# 129.799754s EventsProcessed# 9785199 clients.size# 3 Clock# 1970-01-01T05:36:11.904818Z elapsed# 129.958393s EventsProcessed# 9792111 clients.size# 3 Clock# 1970-01-01T05:36:30.774692Z elapsed# 130.231932s EventsProcessed# 9798938 clients.size# 3 Clock# 1970-01-01T05:36:40.891537Z elapsed# 130.296647s EventsProcessed# 9802602 clients.size# 3 Clock# 1970-01-01T05:37:00.782079Z elapsed# 130.438250s EventsProcessed# 9809893 clients.size# 3 Clock# 1970-01-01T05:37:11.171279Z elapsed# 130.497135s EventsProcessed# 9813531 clients.size# 3 Clock# 1970-01-01T05:37:29.619832Z elapsed# 130.598174s EventsProcessed# 9820118 clients.size# 3 Clock# 1970-01-01T05:37:48.618953Z elapsed# 130.787589s EventsProcessed# 9826777 clients.size# 3 Clock# 1970-01-01T05:38:07.939269Z elapsed# 131.130635s EventsProcessed# 9833610 clients.size# 3 Clock# 1970-01-01T05:38:25.760456Z elapsed# 131.283084s EventsProcessed# 9839972 clients.size# 3 Clock# 1970-01-01T05:38:41.258397Z elapsed# 131.362060s EventsProcessed# 9845376 clients.size# 3 Clock# 1970-01-01T05:38:55.377981Z elapsed# 131.449370s EventsProcessed# 9850241 clients.size# 3 Clock# 1970-01-01T05:39:12.782579Z elapsed# 131.568262s EventsProcessed# 9858572 clients.size# 4 Clock# 1970-01-01T05:39:22.899884Z elapsed# 131.659942s EventsProcessed# 9863491 clients.size# 4 Clock# 1970-01-01T05:39:42.040825Z elapsed# 131.871839s EventsProcessed# 9872447 clients.size# 4 Clock# 1970-01-01T05:39:52.267915Z elapsed# 132.045036s EventsProcessed# 9877261 clients.size# 4 Clock# 1970-01-01T05:40:11.053734Z elapsed# 132.223971s EventsProcessed# 9886264 clients.size# 4 Clock# 1970-01-01T05:40:24.920931Z elapsed# 132.387719s EventsProcessed# 9892709 clients.size# 4 Clock# 1970-01-01T05:40:37.225948Z elapsed# 132.575642s EventsProcessed# 9898543 clients.size# 4 Clock# 1970-01-01T05:40:56.996273Z elapsed# 132.950419s EventsProcessed# 9907864 clients.size# 4 Clock# 1970-01-01T05:41:16.243295Z elapsed# 133.112351s EventsProcessed# 9914762 clients.size# 3 Clock# 1970-01-01T05:41:27.260142Z elapsed# 133.174198s EventsProcessed# 9917369 clients.size# 2 Clock# 1970-01-01T05:41:44.021636Z elapsed# 133.255959s EventsProcessed# 9921450 clients.size# 2 Clock# 1970-01-01T05:41:57.498545Z elapsed# 133.323583s EventsProcessed# 9924670 clients.size# 2 Clock# 1970-01-01T05:42:12.916663Z elapsed# 133.459266s EventsProcessed# 9928560 clients.size# 2 Clock# 1970-01-01T05:42:30.814544Z elapsed# 133.610657s EventsProcessed# 9934931 clients.size# 3 Clock# 1970-01-01T05:42:44.006754Z elapsed# 133.720093s EventsProcessed# 9939649 clients.size# 3 Clock# 1970-01-01T05:43:01.932098Z elapsed# 133.909921s EventsProcessed# 9945841 clients.size# 3 Clock# 1970-01-01T05:43:19.372685Z elapsed# 134.035228s EventsProcessed# 9951985 clients.size# 3 Clock# 1970-01-01T05:43:38.256457Z elapsed# 134.288869s EventsProcessed# 9958826 clients.size# 3 Clock# 1970-01-01T05:43:57.050429Z elapsed# 134.446659s EventsProcessed# 9965653 clients.size# 3 Clock# 1970-01-01T05:44:07.739437Z elapsed# 134.543622s EventsProcessed# 9969436 clients.size# 3 Clock# 1970-01-01T05:44:19.444779Z elapsed# 134.656703s EventsProcessed# 9975015 clients.size# 4 Clock# 1970-01-01T05:44:38.635917Z elapsed# 134.885163s EventsProcessed# 9981738 clients.size# 3 Clock# 1970-01-01T05:44:58.480934Z elapsed# 135.050021s EventsProcessed# 9988638 clients.size# 3 Clock# 1970-01-01T05:45:15.076037Z elapsed# 135.214202s EventsProcessed# 9994556 clients.size# 3 Clock# 1970-01-01T05:45:29.742720Z elapsed# 135.293677s EventsProcessed# 9999709 clients.size# 3 Clock# 1970-01-01T05:45:40.621759Z elapsed# 135.345608s EventsProcessed# 10003613 clients.size# 3 Clock# 1970-01-01T05:45:54.963268Z elapsed# 135.416628s EventsProcessed# 10008465 clients.size# 3 Clock# 1970-01-01T05:46:13.508957Z elapsed# 135.513846s EventsProcessed# 10015258 clients.size# 3 Clock# 1970-01-01T05:46:25.325357Z elapsed# 135.644636s EventsProcessed# 10019409 clients.size# 3 Clock# 1970-01-01T05:46:40.038072Z elapsed# 135.733804s EventsProcessed# 10024599 clients.size# 3 Clock# 1970-01-01T05:46:55.312503Z elapsed# 135.815204s EventsProcessed# 10030174 clients.size# 3 Clock# 1970-01-01T05:47:06.216894Z elapsed# 135.934950s EventsProcessed# 10033897 clients.size# 3 Clock# 1970-01-01T05:47:20.740175Z elapsed# 136.067139s EventsProcessed# 10037422 clients.size# 2 Clock# 1970-01-01T05:47:37.884473Z elapsed# 136.160218s EventsProcessed# 10041565 clients.size# 2 Clock# 1970-01-01T05:47:51.803464Z elapsed# 136.188876s EventsProcessed# 10043226 clients.size# 1 Clock# 1970-01-01T05:48:02.906266Z elapsed# 136.216355s EventsProcessed# 10044581 clients.size# 1 Clock# 1970-01-01T05:48:17.648962Z elapsed# 136.265578s EventsProcessed# 10046371 clients.size# 1 Clock# 1970-01-01T05:48:28.431364Z elapsed# 136.284729s EventsProcessed# 10047681 clients.size# 1 Clock# 1970-01-01T05:48:42.923205Z elapsed# 136.308046s EventsProcessed# 10049431 clients.size# 1 Clock# 1970-01-01T05:48:56.070059Z elapsed# 136.354070s EventsProcessed# 10051034 clients.size# 1 Clock# 1970-01-01T05:49:07.556867Z elapsed# 136.400997s EventsProcessed# 10052375 clients.size# 1 Clock# 1970-01-01T05:49:21.139011Z elapsed# 136.489086s EventsProcessed# 10053930 clients.size# 1 Clock# 1970-01-01T05:49:37.962907Z elapsed# 136.518215s EventsProcessed# 10055906 clients.size# 1 Clock# 1970-01-01T05:49:57.450478Z elapsed# 136.552915s EventsProcessed# 10058194 clients.size# 1 Clock# 1970-01-01T05:50:17.241135Z elapsed# 136.588702s EventsProcessed# 10060455 clients.size# 1 Clock# 1970-01-01T05:50:28.428276Z elapsed# 136.588914s EventsProcessed# 10060457 clients.size# 0 Clock# 1970-01-01T05:50:38.597180Z elapsed# 136.607128s EventsProcessed# 10061599 clients.size# 1 Clock# 1970-01-01T05:50:53.563611Z elapsed# 136.631847s EventsProcessed# 10063392 clients.size# 1 Clock# 1970-01-01T05:51:03.962283Z elapsed# 136.648242s EventsProcessed# 10064634 clients.size# 1 Clock# 1970-01-01T05:51:21.440843Z elapsed# 136.675139s EventsProcessed# 10066688 clients.size# 1 Clock# 1970-01-01T05:51:35.472589Z elapsed# 136.696562s EventsProcessed# 10068278 clients.size# 1 Clock# 1970-01-01T05:51:54.494199Z elapsed# 136.740486s EventsProcessed# 10070481 clients.size# 1 Clock# 1970-01-01T05:52:08.468819Z elapsed# 136.786194s EventsProcessed# 10072098 clients.size# 1 Clock# 1970-01-01T05:52:24.217230Z elapsed# 136.821510s EventsProcessed# 10073927 clients.size# 1 Clock# 1970-01-01T05:52:43.569831Z elapsed# 136.856813s EventsProcessed# 10076156 clients.size# 1 Clock# 1970-01-01T05:52:55.493560Z elapsed# 136.857084s EventsProcessed# 10076159 clients.size# 0 Clock# 1970-01-01T05:53:12.187411Z elapsed# 136.886359s EventsProcessed# 10078168 clients.size# 1 Clock# 1970-01-01T05:53:30.760203Z elapsed# 136.919092s EventsProcessed# 10080392 clients.size# 1 Clock# 1970-01-01T05:53:42.856364Z elapsed# 136.940067s EventsProcessed# 10081860 clients.size# 1 Clock# 1970-01-01T05:54:01.618798Z elapsed# 136.976090s EventsProcessed# 10084071 clients.size# 1 Clock# 1970-01-01T05:54:20.964445Z elapsed# 137.033459s EventsProcessed# 10086316 clients.size# 1 Clock# 1970-01-01T05:54:35.654261Z elapsed# 137.063079s EventsProcessed# 10088186 clients.size# 1 Clock# 1970-01-01T05:54:48.700056Z elapsed# 137.156979s EventsProcessed# 10089655 clients.size# 1 Clock# 1970-01-01T05:55:07.860725Z elapsed# 137.201517s EventsProcessed# 10091859 clients.size# 1 Clock# 1970-01-01T05:55:19.477162Z elapsed# 137.220407s EventsProcessed# 10093256 clients.size# 1 Clock# 1970-01-01T05:55:31.036945Z elapsed# 137.240974s EventsProcessed# 10094626 clients.size# 1 Clock# 1970-01-01T05:55:47.395790Z elapsed# 137.265084s EventsProcessed# 10096609 clients.size# 1 Clock# 1970-01-01T05:56:04.977289Z elapsed# 137.315676s EventsProcessed# 10100767 clients.size# 2 Clock# 1970-01-01T05:56:24.178982Z elapsed# 137.372107s EventsProcessed# 10105257 clients.size# 2 Clock# 1970-01-01T05:56:37.157684Z elapsed# 137.407943s EventsProcessed# 10108349 clients.size# 2 Clock# 1970-01-01T05:56:55.821666Z elapsed# 137.464342s EventsProcessed# 10112729 clients.size# 2 Clock# 1970-01-01T05:57:13.990703Z elapsed# 137.527877s EventsProcessed# 10117014 clients.size# 2 Clock# 1970-01-01T05:57:29.125513Z elapsed# 137.577157s EventsProcessed# 10120586 clients.size# 2 Clock# 1970-01-01T05:57:46.415422Z elapsed# 137.633628s EventsProcessed# 10124617 clients.size# 2 Clock# 1970-01-01T05:58:06.052938Z elapsed# 137.763696s EventsProcessed# 10129293 clients.size# 2 Clock# 1970-01-01T05:58:23.405439Z elapsed# 137.824203s EventsProcessed# 10133569 clients.size# 2 Clock# 1970-01-01T05:58:36.767063Z elapsed# 137.874141s EventsProcessed# 10136677 clients.size# 2 Clock# 1970-01-01T05:58:49.831500Z elapsed# 137.931755s EventsProcessed# 10139686 clients.size# 2 Clock# 1970-01-01T05:59:01.961217Z elapsed# 138.024115s EventsProcessed# 10142567 clients.size# 2 Clock# 1970-01-01T05:59:15.549473Z elapsed# 138.070103s EventsProcessed# 10145730 clients.size# 2 Clock# 1970-01-01T05:59:32.284153Z elapsed# 138.157198s EventsProcessed# 10151697 clients.size# 3 Clock# 1970-01-01T05:59:43.292281Z elapsed# 138.197913s EventsProcessed# 10154308 clients.size# 2 Clock# 1970-01-01T05:59:57.939395Z elapsed# 138.249202s EventsProcessed# 10157706 clients.size# 2 |98.8%| [TM] {RESULT} ydb/core/blobstorage/backpressure/ut_client/unittest |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> LabeledDbCounters::OneTabletRestart [GOOD] >> LabeledDbCounters::TwoTablets >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 [GOOD] >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/sql/py3test >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows [GOOD] |98.8%| [TM] {RESULT} ydb/tests/sql/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] Test command err: 2025-04-10T11:31:48.363243Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:48.557950Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:48.586936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:48.587325Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:48.597439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:48.597719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:48.597989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:48.598114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:48.598261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:48.598429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:48.598545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:48.598670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:48.598781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:48.598890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.599078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:48.599199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:48.639128Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:48.639430Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:48.639493Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:48.639723Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.639900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:48.639991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:48.640042Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:48.640139Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:48.640234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:48.640286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:48.640328Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:48.640570Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:48.640654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:48.640699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:48.640751Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:48.640858Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:48.640915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:48.640976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:48.641008Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:48.641128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:48.641228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:48.641267Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:48.641338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:48.641380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:48.641411Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:48.641823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-10T11:31:48.641927Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=49; 2025-04-10T11:31:48.642031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=50; 2025-04-10T11:31:48.642138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-10T11:31:48.642372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:48.642450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:48.642491Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:48.642730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:48.642786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.642823Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:48.642997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:48.643044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:48.643076Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:48.643321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:48.643368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:48.643408Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:48.643557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:48.643604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:48.643660Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 97Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15681:17639];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-04-10T11:41:05.723287Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T11:41:05.723428Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=18; 2025-04-10T11:41:05.723936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=410; 2025-04-10T11:41:05.724016Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=499; 2025-04-10T11:41:05.732342Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T11:41:05.732445Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=11; 2025-04-10T11:41:05.745106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=12550; 2025-04-10T11:41:05.757945Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=10203; 2025-04-10T11:41:05.758082Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12852; 2025-04-10T11:41:05.758292Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=141; 2025-04-10T11:41:05.758432Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=94; 2025-04-10T11:41:05.758627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=147; 2025-04-10T11:41:05.758832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=153; 2025-04-10T11:41:05.759142Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=251; 2025-04-10T11:41:05.759195Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=26701; 2025-04-10T11:41:05.767174Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T11:41:05.767317Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=17; 2025-04-10T11:41:05.781099Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=13638; 2025-04-10T11:41:05.833991Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=52721; 2025-04-10T11:41:05.834201Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=76; 2025-04-10T11:41:05.834298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=41; 2025-04-10T11:41:05.834358Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-04-10T11:41:05.834422Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2025-04-10T11:41:05.834477Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-04-10T11:41:05.834586Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=63; 2025-04-10T11:41:05.834639Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-04-10T11:41:05.834765Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=83; 2025-04-10T11:41:05.834822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-04-10T11:41:05.834953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=59; 2025-04-10T11:41:05.835095Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=83; 2025-04-10T11:41:05.835225Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=77; 2025-04-10T11:41:05.835298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=67865; 2025-04-10T11:41:05.835546Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2425692;raw_bytes=4011492;count=1;records=39328} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111593408;raw_bytes=187115688;count=44;records=1805672} inactive {blob_bytes=178990352;raw_bytes=300417312;count=90;records=2897034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:41:05.836587Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15681:17639];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:41:05.836688Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15681:17639];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:41:05.836834Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15681:17639];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:41:05.836952Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15681:17639];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-10T11:41:05.837249Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:41:05.837352Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:41:05.837647Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-04-10T11:41:05.837752Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T11:41:05.837823Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:41:05.837897Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:41:05.837957Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:41:05.838131Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:41:05.845528Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:41:05.855780Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15681:17639];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:41:05.859541Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15681:17639];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:41:05.859622Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:41:05.859664Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:41:05.859737Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15681:17639];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:41:05.859854Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15681:17639];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:41:05.860186Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15681:17639];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-04-10T11:41:05.860301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15681:17639];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T11:41:05.860377Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15681:17639];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:41:05.860464Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15681:17639];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:41:05.860530Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15681:17639];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:41:05.860666Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15681:17639];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.998000s; 2025-04-10T11:41:05.860752Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15681:17639];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> CoordinatorVolatile::PlanResentOnReboots [GOOD] >> CoordinatorVolatile::MediatorReconnectPlanRace |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> KqpTpch::Query22 [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] >> TPqWriterTest::TestWriteToTopic >> TPqWriterTest::TestWriteToTopic [GOOD] >> DataShardStats::HasSchemaChanges_ByKeyFilter [GOOD] >> DataShardStats::HasSchemaChanges_Columns |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TPqWriterTest::TestWriteToTopicMultiBatch >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls >> TPqWriterTest::TestWriteToTopicMultiBatch [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] >> TPqWriterTest::TestDeferredWriteToTopic >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] >> test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] [GOOD] >> TPqWriterTest::TestDeferredWriteToTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, pic, release} ydb/core/kqp/tests/kikimr_tpch/unittest >> KqpTpch::Query22 [GOOD] Test command err: -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 5 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 20 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 28 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 37 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 5 >> CoordinatorVolatile::MediatorReconnectPlanRace [GOOD] >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx |98.8%| [TM] {RESULT} ydb/core/kqp/tests/kikimr_tpch/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> TPqWriterTest::WriteNonExistentTopic >> TPqWriterTest::WriteNonExistentTopic [GOOD] >> TPqWriterTest::TestCheckpoints >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] >> TDataShardRSTest::TestCleanupInRS-UseSink [GOOD] >> TDataShardRSTest::TestDelayedRSAckForUnknownTx |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] >> KqpLimits::TooBigQuery-useSink [GOOD] >> KqpLimits::TooBigKey+useSink >> TPqWriterTest::TestCheckpoints [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown >> TPqWriterTest::TestCheckpointWithEmptyBatch >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] >> test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] >> TSelectFromViewTest::ReadTestCasesFromFiles [GOOD] >> TSelectFromViewTest::QueryCacheIsUpdated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] Test command err: 2025-04-10T11:31:57.860259Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:58.031304Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:58.058366Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:58.058716Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:58.068063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:58.068349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:58.068606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:58.068729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:58.068856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:58.068992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:58.069148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:58.069261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:58.069374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:58.069494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:58.069628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:58.069736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:58.100996Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:58.101297Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:58.101366Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:58.101579Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:58.101755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:58.101850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:58.101904Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:58.102012Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:58.102090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:58.102164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:58.102204Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:58.102434Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:58.102540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:58.102594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:58.102632Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:58.102741Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:58.102847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:58.102916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:58.102954Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:58.103045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:58.103088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:58.103122Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:58.103191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:58.103235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:58.103269Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:58.103722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-10T11:31:58.103832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=52; 2025-04-10T11:31:58.103932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-10T11:31:58.104058Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=55; 2025-04-10T11:31:58.104306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:58.104382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:58.104432Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:58.104669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:58.104731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:58.104792Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:58.104982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:58.105034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:58.105083Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:58.105306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:58.105353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:58.105395Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:58.105590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:58.105648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:58.105705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 39Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15677:17635];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-04-10T11:41:19.537157Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T11:41:19.537306Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=18; 2025-04-10T11:41:19.537823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=408; 2025-04-10T11:41:19.537912Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=530; 2025-04-10T11:41:19.543906Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T11:41:19.544024Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=18; 2025-04-10T11:41:19.556505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=12340; 2025-04-10T11:41:19.571239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=13043; 2025-04-10T11:41:19.571431Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=14794; 2025-04-10T11:41:19.571625Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=116; 2025-04-10T11:41:19.571773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=85; 2025-04-10T11:41:19.571938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=121; 2025-04-10T11:41:19.572066Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=92; 2025-04-10T11:41:19.572287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=184; 2025-04-10T11:41:19.572335Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=28234; 2025-04-10T11:41:19.580846Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T11:41:19.580987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=16; 2025-04-10T11:41:19.591368Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=10245; 2025-04-10T11:41:19.648293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=56754; 2025-04-10T11:41:19.648508Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=68; 2025-04-10T11:41:19.648629Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=43; 2025-04-10T11:41:19.648691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2025-04-10T11:41:19.648770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-04-10T11:41:19.648823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-04-10T11:41:19.648949Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=82; 2025-04-10T11:41:19.649017Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=12; 2025-04-10T11:41:19.649145Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=84; 2025-04-10T11:41:19.649214Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-04-10T11:41:19.649296Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=38; 2025-04-10T11:41:19.649435Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=94; 2025-04-10T11:41:19.649587Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=90; 2025-04-10T11:41:19.649659Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=68584; 2025-04-10T11:41:19.649920Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2425692;raw_bytes=4011492;count=1;records=39328} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111593408;raw_bytes=187115688;count=44;records=1805672} inactive {blob_bytes=178990352;raw_bytes=300417312;count=90;records=2897034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:41:19.651265Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15677:17635];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T11:41:19.651371Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15677:17635];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T11:41:19.651496Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15677:17635];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T11:41:19.651572Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15677:17635];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-10T11:41:19.651835Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:41:19.651932Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:41:19.652205Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-04-10T11:41:19.652303Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T11:41:19.652369Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:41:19.652464Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:41:19.652543Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:41:19.652692Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T11:41:19.666167Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:41:19.679638Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15677:17635];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T11:41:19.680933Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15677:17635];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T11:41:19.680997Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T11:41:19.681036Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T11:41:19.681117Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15677:17635];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T11:41:19.681233Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15677:17635];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T11:41:19.681583Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15677:17635];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-04-10T11:41:19.681720Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15677:17635];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T11:41:19.681815Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15677:17635];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T11:41:19.681918Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15677:17635];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T11:41:19.682004Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15677:17635];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T11:41:19.682135Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15677:17635];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.998000s; 2025-04-10T11:41:19.684721Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15677:17635];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx [GOOD] >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestExplain::test_explain_data_query >> TDataShardRSTest::TestDelayedRSAckForUnknownTx [GOOD] >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx >> DataShardStats::HasSchemaChanges_Columns [GOOD] >> DataShardStats::HasSchemaChanges_Families ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/pq_async_io/ut/unittest >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] Test command err: 2025-04-10T11:37:42.981448Z node 1 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [1:7491647298144400004:2053], metadatafields: , partitions: 666 2025-04-10T11:37:43.160555Z node 1 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-04-10T11:37:43.160613Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2025-04-10T11:37:43.160628Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([1:7491647298144400004:2053]) 2025-04-10T11:37:43.160677Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [1:7491647298144400010:2048] 2025-04-10T11:37:43.161087Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [1:7491647298144400005:2054] 2025-04-10T11:37:43.161115Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [1:7491647298144400005:2054], partIds: 666 cookie 1 2025-04-10T11:37:43.161439Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [1:7491647298144400005:2054], cookie 1 2025-04-10T11:37:43.161459Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2025-04-10T11:37:43.161467Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2025-04-10T11:37:43.161497Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [1:7491647298144400007:2056], generation 1 2025-04-10T11:37:43.161561Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [1:7491647298144400007:2056], connection id 1 partitions offsets (666 / ), 2025-04-10T11:37:43.166277Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [1:7491647298144400007:2056], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-04-10T11:37:43.170337Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [1:7491647298144400007:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-10T11:37:43.176280Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [1:7491647298144400007:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-10T11:37:43.176318Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2025-04-10T11:37:43.176324Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2025-04-10T11:37:43.177403Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2025-04-10T11:37:43.177586Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2025-04-10T11:37:43.177599Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-04-10T11:37:43.191081Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. PassAway 2025-04-10T11:37:43.191187Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. State: used buffer size 0 ready buffer event size 0 state 5 InFlyAsyncInputData 0 Counters: GetAsyncInputData 1 CoordinatorChanged 1 CoordinatorResult 0 MessageBatch 1 StartSessionAck 1 NewDataArrived 1 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 0 Retry 0 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 NotifyCA 1 [1:7491647298144400007:2056] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partitions 666 offsets 666=2 has pending data 2025-04-10T11:37:43.191200Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7491647298144400010:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send StopSession to [1:7491647298144400007:2056] generation 1 2025-04-10T11:37:43.838486Z node 3 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [3:7491647301597848341:2053], metadatafields: , partitions: 666 2025-04-10T11:37:43.989793Z node 3 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-04-10T11:37:43.989840Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2025-04-10T11:37:43.989855Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([3:7491647301597848341:2053]) 2025-04-10T11:37:43.989894Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [3:7491647301597848347:2048] 2025-04-10T11:37:43.990057Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [3:7491647301597848342:2054] 2025-04-10T11:37:43.990087Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [3:7491647301597848342:2054], partIds: 666 cookie 1 2025-04-10T11:37:43.990325Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [3:7491647301597848342:2054], cookie 1 2025-04-10T11:37:43.990344Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2025-04-10T11:37:43.990352Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2025-04-10T11:37:43.990373Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [3:7491647301597848344:2056], generation 1 2025-04-10T11:37:43.990397Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [3:7491647301597848344:2056], connection id 1 partitions offsets (666 / ), 2025-04-10T11:37:43.990785Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [3:7491647301597848344:2056], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-04-10T11:37:43.990957Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [3:7491647301597848344:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-10T11:37:43.991773Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [3:7491647301597848344:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-10T11:37:43.991798Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2025-04-10T11:37:43.991810Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2025-04-10T11:37:43.991837Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2025-04-10T11:37:43.991923Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2025-04-10T11:37:43.991932Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-04-10T11:37:43.992111Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvUndelivered, TSystem::Undelivered from [3:7491647301597848344:2056], reason Disconnected, cookie 999 2025-04-10T11:37:43.992273Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [3:7491647301597848344:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-10T11:37:43.992737Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvRetry, EventQueueId 1 2025-04-10T11:37:43.992774Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [3:7491647301597848344:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-10T11:37:43.992787Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 3 2025-04-10T11:37:43.992993Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2025-04-10T11:37:43.993055Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 3 2025-04-10T11:37:43.993071Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 1 rows, buffer size 0, free space 974, result size 26 2025-04-10T11:37:43.993440Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. PassAway 2025-04-10T11:37:43.993487Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. State: used buffer size 0 ready buffer event size 0 state 5 InFlyAsyncInputData 0 Counters: GetAsyncInputData 1 CoordinatorChanged 1 CoordinatorResult 0 MessageBatch 2 StartSessionAck 1 NewDataArrived 2 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 1 Retry 1 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 NotifyCA 2 [3:7491647301597848344:2056] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partitions 666 offsets 666=3 has pending data 2025-04-10T11:37:43.993496Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7491647301597848347:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send StopSession to [3:7491647301597848344:2056] generation 1 2025-04-10T11:37:44.570777Z node 5 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [5:7491647307396324893:2053], metadat ... r session id: test_client_1_22_4426594328182610901_v1 2025-04-10T11:41:25.286458Z :DEBUG: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-10T11:41:25.286775Z :DEBUG: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-10T11:41:25.294312Z :INFO: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "Checkpoints". Partition: 0. Read offset: (NULL) 2025-04-10T11:41:25.295087Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [de7606a4-181f0f4e-c05ecd5d-ec972f81] Write session: OnReadDone gRpcStatusCode: 0 2025-04-10T11:41:25.295197Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [de7606a4-181f0f4e-c05ecd5d-ec972f81] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744285285295 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:41:25.295361Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [de7606a4-181f0f4e-c05ecd5d-ec972f81] Write session established. Init response: last_seq_no: 5 session_id: "de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0" 2025-04-10T11:41:25.295423Z :TRACE: [local] TRACE_EVENT InitResponse partition_id=0 session_id=de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0 2025-04-10T11:41:25.295494Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0] MessageGroupId [de7606a4-181f0f4e-c05ecd5d-ec972f81] Write session: set DirectWriteToPartitionId 0 2025-04-10T11:41:25.295718Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0] PartitionId [0] Generation [0] Get partition location async, partition 0, delay 0.000000s 2025-04-10T11:41:25.295789Z :TRACE: [local] TRACE_EVENT DescribePartitionRequest path=local/Checkpoints partition_id=0 2025-04-10T11:41:25.298552Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0] PartitionId [0] Generation [0] Getting partition location, partition 0 2025-04-10T11:41:25.301321Z :DEBUG: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] [] Got ReadResponse, serverBytesSize = 1095, now ReadSizeBudget = 0, ReadSizeServerDelta = 52427705 2025-04-10T11:41:25.301572Z :DEBUG: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52427705 2025-04-10T11:41:25.302059Z :DEBUG: [local] Decompression task done. Partition/PartitionSessionId: 1 (0-4) 2025-04-10T11:41:25.302143Z :DEBUG: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] [] Returning serverBytesSize = 1095 to budget 2025-04-10T11:41:25.302224Z :DEBUG: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] [] In ContinueReadingDataImpl, ReadSizeBudget = 1095, ReadSizeServerDelta = 52427705 2025-04-10T11:41:25.302442Z :DEBUG: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-10T11:41:25.302540Z :DEBUG: [local] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-10T11:41:25.302591Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-10T11:41:25.302661Z :DEBUG: [local] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-04-10T11:41:25.302694Z :DEBUG: [local] Take Data. Partition 0. Read: {3, 0} (3-3) 2025-04-10T11:41:25.302737Z :DEBUG: [local] Take Data. Partition 0. Read: {4, 0} (4-4) 2025-04-10T11:41:25.303279Z :DEBUG: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] [] The application data is transferred to the client. Number of messages 5, size 5 bytes 2025-04-10T11:41:25.303342Z :DEBUG: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] [] Returning serverBytesSize = 0 to budget 2025-04-10T11:41:25.303555Z :INFO: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] Closing read session. Close timeout: 0.000000s 2025-04-10T11:41:25.303634Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-04-10T11:41:25.303722Z :INFO: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] Counters: { Errors: 0 CurrentSessionLifetimeMs: 33 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:41:25.303886Z :NOTICE: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-10T11:41:25.303959Z :DEBUG: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] [] Abort session to cluster 2025-04-10T11:41:25.305340Z :INFO: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] Closing read session. Close timeout: 0.000000s 2025-04-10T11:41:25.305450Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-04-10T11:41:25.305546Z :INFO: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] Counters: { Errors: 0 CurrentSessionLifetimeMs: 34 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:41:25.305745Z :NOTICE: [local] [local] [48b60878-9316a4a6-bb600d31-f9772a21] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T11:41:25.310165Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0] PartitionId [0] Generation [0] Got PartitionLocation response. Status SUCCESS, proto: partition { active: true partition_location { node_id: 1 generation: 1 } } 2025-04-10T11:41:25.310245Z :TRACE: [local] TRACE_EVENT DescribePartitionResponse partition_id=0 active=1 pl_node_id=1 pl_generation=1 2025-04-10T11:41:25.310323Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0] PartitionId [0] Generation [0] GetPreferredEndpoint: partitionId 0, partitionNodeId 1 exists in the endpoint pool. 2025-04-10T11:41:25.310374Z :TRACE: [local] TRACE_EVENT PreferredPartitionLocation Endpoint= NodeId=1 Generation=1 2025-04-10T11:41:25.310433Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0] PartitionId [0] Generation [1] Start write session. Will connect to nodeId: 1 2025-04-10T11:41:25.311692Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-04-10T11:41:25.320392Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0] PartitionId [0] Generation [1] Write session: direct write to partition: 0, generation 1 2025-04-10T11:41:25.320577Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0] PartitionId [0] Generation [1] Write session: send init request: init_request { path: "Checkpoints" producer_id: "de7606a4-181f0f4e-c05ecd5d-ec972f81" partition_with_generation { generation: 1 } } 2025-04-10T11:41:25.320627Z :TRACE: [local] TRACE_EVENT InitRequest pwg_partition_id=0 pwg_generation=1 2025-04-10T11:41:25.321089Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0] PartitionId [0] Generation [1] Write session: OnWriteDone gRpcStatusCode: 0 2025-04-10T11:41:25.338343Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-10T11:41:25.338402Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0] PartitionId [0] Generation [1] Write session will now close 2025-04-10T11:41:25.338483Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-10T11:41:25.339281Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-10T11:41:25.339401Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [de7606a4-181f0f4e-c05ecd5d-ec972f81|9e77db9c-efeae5b5-950fe5bb-c68366b6_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-10T11:41:26.450699Z node 54 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 0. Checkpoint: 1. Finished: 0 2025-04-10T11:41:26.536042Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [a325140c-9489718a-fc95f089-ff30d8da] Write session: try to update token 2025-04-10T11:41:26.536577Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [a325140c-9489718a-fc95f089-ff30d8da] Start write session. Will connect to nodeId: 0 2025-04-10T11:41:26.624762Z node 54 :KQP_COMPUTE DEBUG: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. [Checkpoint 0.0] Send checkpoint state immediately 2025-04-10T11:41:26.625068Z node 54 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Save checkpoint { Id: 0 Generation: 0 } state: { SourceId: "a325140c-9489718a-fc95f089-ff30d8da" } 2025-04-10T11:41:26.629172Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [a325140c-9489718a-fc95f089-ff30d8da] Write session: close. Timeout 0.000000s 2025-04-10T11:41:26.634301Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [a325140c-9489718a-fc95f089-ff30d8da] Write session will now close 2025-04-10T11:41:26.634396Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [a325140c-9489718a-fc95f089-ff30d8da] Write session: aborting 2025-04-10T11:41:26.634588Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [a325140c-9489718a-fc95f089-ff30d8da] Write session: gracefully shut down, all writes complete 2025-04-10T11:41:26.634716Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [a325140c-9489718a-fc95f089-ff30d8da] Write session: destroy |98.9%| [TM] {RESULT} ydb/tests/fq/pq_async_io/ut/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] >> KqpLimits::TooBigKey+useSink [GOOD] >> KqpLimits::TooBigKey-useSink >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/coordinator/ut/unittest >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] Test command err: 2025-04-10T11:39:24.078376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:39:24.079090Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:39:24.079168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:39:24.081049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:39:24.081360Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:39:24.081686Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a89/r3tmp/tmpMHVYFy/pdisk_1.dat 2025-04-10T11:39:24.745933Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for the first mediator step 2025-04-10T11:39:25.149463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:39:25.154433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:39:25.163098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:39:25.163206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:39:25.211419Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:39:25.211891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:39:25.212377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... found first step to be 500 2025-04-10T11:39:25.521152Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 500 ... waiting for the next mediator step ... found second step to be 1000 ... read step subscribe result: [500, 1000] ... read step subscribe update: 2000 2025-04-10T11:39:26.822042Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 2000 ... read step subscribe result: [2000, 2000] ... read step subscribe update: 2500 ... read step subscribe update: 2500 ... read step subscribe update: 3000 ... read step subscribe update: 4000 ... read step subscribe update: 5000 ... read step subscribe update: 6000 ... read step subscribe result: [2000, 6000] 2025-04-10T11:39:29.998593Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-04-10T11:39:29.998689Z node 1 :PIPE_SERVER ERROR: [72057594037936131] NodeDisconnected NodeId# 2 2025-04-10T11:39:29.998777Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2025-04-10T11:39:29.998829Z node 1 :PIPE_SERVER ERROR: [72057594037968897] NodeDisconnected NodeId# 2 2025-04-10T11:39:29.998917Z node 1 :PIPE_SERVER ERROR: [72057594037936129] NodeDisconnected NodeId# 2 2025-04-10T11:39:30.000004Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeDisconnected, NodeId 2 2025-04-10T11:39:30.000130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnecting 2025-04-10T11:39:30.000784Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:87:2087] ServerId# [1:1071:2628] TabletId# 72057594037932033 PipeClientId# [2:87:2087] 2025-04-10T11:39:30.001246Z node 2 :TX_PROXY WARN: actor# [2:238:2129] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-10T11:39:30.001830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnecting -> Disconnected 2025-04-10T11:39:30.011429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:39:30.044140Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:39:30.045476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... read step subscribe update: 7000 ... read step subscribe update: 8000 ... read step subscribe update: 9000 ... read step subscribe update: 10000 ... read step subscribe update: 11000 2025-04-10T11:39:42.252811Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:39:42.253221Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:39:42.253558Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T11:39:42.255362Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:39:42.255699Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:39:42.255972Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a89/r3tmp/tmp1yOXzx/pdisk_1.dat 2025-04-10T11:39:42.587801Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:39:42.782123Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1141:2375] at coordinator 72057594046316545 with seqNo 123 and cookie 234 2025-04-10T11:39:42.863810Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:39:42.863931Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:39:42.867309Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:39:42.867389Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:39:42.883170Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-10T11:39:42.883986Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:39:42.884446Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:39:43.587999Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1142:2376] at coordinator 72057594046316545 with seqNo 234 and cookie 345 2025-04-10T11:39:44.386042Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1141:2375] at coordinator 72057594046316545 with seqNo 124 and cookie 245 2025-04-10T11:39:44.400725Z node 3 :TX_COORDINATOR DEBUG: Ignored TEvSubscribeLastStep from [4:1141:2375] at coordinator 72057594046316545 with seqNo 123 existing seqNo 124 2025-04-10T11:39:45.115154Z node 3 :TX_COORDINATOR DEBUG: Processing TEvUnsubscribeLastStep from [4:1141:2375] at coordinator 72057594046316545 with seqNo 124 2025-04-10T11:39:52.613487Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:297:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:39:52.613969Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:39:52.614042Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a89/r3tmp/tmpWjlS5o/pdisk_1.dat 2025-04-10T11:39:52.980158Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:39:53.031130Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:39:53.031292Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:39:53.043081Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected Rebooting coordinator to restore config 2025-04-10T11:39:53.319960Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete 2025-04-10T11:39:53.321791Z node 5 :TX_COORDINATOR INFO: Coordinator# 72057594046316545 restoring static processing params 2025-04-10T11:39:53.322590Z node 5 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-04-10T11:39:53.322738Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 version# 0 TTxConfigure Complete Rebooting coordinator a second time 2025-04-10T11:39:53.631012Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete 2025-04-10T11:39:53.632438Z node 5 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-04-10T11:39:58.950883Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:683:2416], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:39:58.951058Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:39:58.951140Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a89/r3tmp/tmpLiiVfI/pdisk_1.dat 2025-04-10T11:39:59.458857Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:39:59.746131Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:39:59.746320Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:39:59.761139Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected Sending CreateDatabase request 2025-04-10T11:40:00.735030Z node 6 :CMS_TENANTS ... d ... coordinator 72057594046316545 gen 2 is planning step 1000 2025-04-10T11:41:37.516604Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 50 To# 1000Steps: {{TCoordinatorStep step# 1000 PrevStep# 50}}} marker# M1 2025-04-10T11:41:37.516752Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:571:2497] bucket.ActiveActor step# 1000 2025-04-10T11:41:37.516845Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:572:2498] bucket.ActiveActor step# 1000 2025-04-10T11:41:37.516957Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1000} 2025-04-10T11:41:37.517091Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:572:2498] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1000} ... waiting for blocked put responses 2025-04-10T11:41:37.532520Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 HANDLE EvProposeTransaction marker# C0 2025-04-10T11:41:37.532645Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 step# 1050 Status# 16 SEND to# [20:595:2519] Proxy marker# C1 ... waiting for blocked put responses ... coordinator 72057594046316545 gen 2 is planning step 1050 2025-04-10T11:41:37.548004Z node 20 :TX_COORDINATOR DEBUG: Transaction 10000000 has been planned 2025-04-10T11:41:37.548173Z node 20 :TX_COORDINATOR DEBUG: Planned transaction 10000000 for mediator 72057594046382081 tablet 72057594047365120 ... blocking put [72057594046316545:2:12:1:24576:168:0] response ... waiting for planning for the required step ... waiting for planning for the required step ... coordinator 72057594046316545 gen 2 is planning step 1100 ... starting a new coordinator instance ... waiting for migrated state 2025-04-10T11:41:37.603474Z node 20 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-04-10T11:41:37.603995Z node 20 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-04-10T11:41:37.612676Z node 20 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete ... blocking state response from [20:526:2400] to [20:690:2563] LastSentStep: 1000 LastAcquiredStep: 0 LastConfirmedStep: 0 ... unblocking put responses and requests 2025-04-10T11:41:37.613862Z node 20 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-04-10T11:41:37.614043Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 stepId# 1050 Status# 17 SEND EvProposeTransactionStatus to# [20:595:2519] Proxy 2025-04-10T11:41:37.616436Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 server# [20:544:2482] disconnnected 2025-04-10T11:41:37.616560Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 HANDLE TEvServerDisconnected server# [20:544:2482] ... trying to plan tx 10000011 ... waiting for planned another persistent tx 2025-04-10T11:41:37.644690Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 server# [20:697:2573] connected 2025-04-10T11:41:37.645072Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorSync 2025-04-10T11:41:37.645148Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCoordinatorSyncResult to# [20:693:2571] Cookie# 1 CompleteStep# 1000 LatestKnownStep# 1000 SubjectiveTime# 952 Coordinator# 72057594046316545 2025-04-10T11:41:37.645455Z node 20 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-04-10T11:41:37.645522Z node 20 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1050, txid# 10000000 marker# C2 2025-04-10T11:41:37.645645Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 HANDLE EvProposeTransaction marker# C0 2025-04-10T11:41:37.645731Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 step# 1100 Status# 16 SEND to# [20:595:2519] Proxy marker# C1 ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000000 } Step: 1050 PrevStep: 0 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-04-10T11:41:37.660282Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1050 2025-04-10T11:41:37.660392Z node 20 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [1050] transactions [1] 2025-04-10T11:41:37.660573Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCommitStep to# [20:570:2496] ExecQueue {TMediateStep From 1000 To# 1050Steps: {{TCoordinatorStep step# 1050 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M0 2025-04-10T11:41:37.660780Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1050Steps: {{TCoordinatorStep step# 1050 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M1 2025-04-10T11:41:37.660908Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND Ev to# [20:571:2497] step# 1050 forTablet# 72057594047365120 txid# 10000000 marker# M3 2025-04-10T11:41:37.661006Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:571:2497] bucket.ActiveActor step# 1050 2025-04-10T11:41:37.661066Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:572:2498] bucket.ActiveActor step# 1050 2025-04-10T11:41:37.661203Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 1050 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000000 AckTo# [20:693:2571]}}} marker# M4 2025-04-10T11:41:37.661411Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:572:2498] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1050} 2025-04-10T11:41:37.661656Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1050} 2025-04-10T11:41:37.662306Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365120 Status: OK ServerId: [20:701:2576] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T11:41:37.662424Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 1050 MediatorId# 72057594046382081 TabletID 72057594047365120} ... observed tablet step: Transactions { TxId: 10000000 AckTo { RawX1: 0 RawX2: 0 } } Step: 1050 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... waiting for planned another persistent tx ... coordinator 72057594046316545 gen 3 is planning step 1100 2025-04-10T11:41:37.674642Z node 20 :TX_COORDINATOR DEBUG: Transaction 10000011 has been planned 2025-04-10T11:41:37.674816Z node 20 :TX_COORDINATOR DEBUG: Planned transaction 10000011 for mediator 72057594046382081 tablet 72057594047365120 2025-04-10T11:41:37.675914Z node 20 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1100, txid# 10000011 marker# C2 2025-04-10T11:41:37.676029Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 stepId# 1100 Status# 17 SEND EvProposeTransactionStatus to# [20:595:2519] Proxy ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000011 } Step: 1100 PrevStep: 1050 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-04-10T11:41:37.676474Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1100 2025-04-10T11:41:37.676547Z node 20 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [1100] transactions [1] 2025-04-10T11:41:37.676716Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCommitStep to# [20:570:2496] ExecQueue {TMediateStep From 1050 To# 1100Steps: {{TCoordinatorStep step# 1100 PrevStep# 1050Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M0 2025-04-10T11:41:37.676992Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1050 To# 1100Steps: {{TCoordinatorStep step# 1100 PrevStep# 1050Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M1 2025-04-10T11:41:37.677125Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND Ev to# [20:571:2497] step# 1100 forTablet# 72057594047365120 txid# 10000011 marker# M3 2025-04-10T11:41:37.677241Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:571:2497] bucket.ActiveActor step# 1100 2025-04-10T11:41:37.677311Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:572:2498] bucket.ActiveActor step# 1100 2025-04-10T11:41:37.677478Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 1100 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000011 AckTo# [20:693:2571]}}} marker# M4 2025-04-10T11:41:37.677598Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 1100 MediatorId# 72057594046382081 TabletID 72057594047365120} 2025-04-10T11:41:37.677733Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:572:2498] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1100} 2025-04-10T11:41:37.677890Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1100} ... observed tablet step: Transactions { TxId: 10000011 AckTo { RawX1: 0 RawX2: 0 } } Step: 1100 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... coordinator 72057594046316545 gen 3 is planning step 1150 ... observed step: Step: 1150 PrevStep: 1100 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-04-10T11:41:37.689875Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1100 To# 1150Steps: {{TCoordinatorStep step# 1150 PrevStep# 1100}}} marker# M1 2025-04-10T11:41:37.689951Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:571:2497] bucket.ActiveActor step# 1150 2025-04-10T11:41:37.690028Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:570:2496] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:572:2498] bucket.ActiveActor step# 1150 2025-04-10T11:41:37.690092Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1150} 2025-04-10T11:41:37.690162Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:572:2498] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1150} |98.9%| [TM] {RESULT} ydb/core/tx/coordinator/ut/unittest >> test_public_api.py::TestExplain::test_explain_data_query [GOOD] >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionCommit >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> BulkUpsert::BulkUpsert [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[0] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/view/unittest >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] Test command err: Trying to start YDB, gRPC: 19867, MsgBus: 61462 2025-04-10T11:37:40.659930Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647291143887484:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:40.671417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008c2/r3tmp/tmp0l1tHN/pdisk_1.dat 2025-04-10T11:37:41.158880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:41.159001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:41.162116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:37:41.193506Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19867, node 1 2025-04-10T11:37:41.226019Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:37:41.226104Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T11:37:41.347885Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:41.347907Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:41.347914Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:41.348058Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61462 TClient is connected to server localhost:61462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37:42.179360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:37:43.942766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647304028790007:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:43.942915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 23546, MsgBus: 6590 2025-04-10T11:37:45.475791Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491647310183812589:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:45.475836Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008c2/r3tmp/tmp313alo/pdisk_1.dat 2025-04-10T11:37:45.686112Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:45.698424Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:45.698504Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:45.699446Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23546, node 2 2025-04-10T11:37:45.769630Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:45.769652Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:45.769664Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:45.769775Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6590 TClient is connected to server localhost:6590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37:46.256166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:37:46.264730Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:37:48.999442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491647323068715137:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:48.999517Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 12319, MsgBus: 3999 2025-04-10T11:37:49.956236Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491647327016188447:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:49.956315Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008c2/r3tmp/tmpdkm5im/pdisk_1.dat 2025-04-10T11:37:50.072623Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:50.152033Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:50.152130Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:50.153441Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12319, node 3 2025-04-10T11:37:50.250897Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:37:50.250914Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:37:50.250923Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:37:50.251034Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3999 TClient is connected to server localhost:3999 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37:50.713060Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:37:50.726532Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T11:37:53.389635Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491647344196058282:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:53.389815Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 16135, MsgBus: 3861 2025-04-10T11:37:54.160983Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491647349247786559:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:54.161060Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008c2/r3tmp/tmpgLblpS/pdisk_1.dat 2025-04-10T11:37:54.309079Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:54.333256Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:54.333364Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileSta ... d))) (let $9 (DataSink 'result)) (let $10 (ResPull! (Left! $8) $9 (Key) (Nth (Right! $8) '0) '('('type) '('autoref)) '"kikimr")) (return (Commit! (Commit! $10 $9) $1 $7)) ) 2025-04-10 11:41:23.936 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpLogical-ApplyExtractMembersToReadTableRanges 2025-04-10 11:41:23.942 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpLogical-RewriteAggregate 2025-04-10 11:41:23.951 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildReadTableRangesStage 2025-04-10 11:41:23.961 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpPhysical-PushAggregateCombineToStage 2025-04-10 11:41:23.973 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-10 11:41:23.988 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-10 11:41:24.007 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-10 11:41:24.023 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-10 11:41:24.058 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildShuffleStage 2025-04-10 11:41:24.105 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildSortStage 2025-04-10 11:41:24.222 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpPhysical-RewriteKqpReadTable 2025-04-10 11:41:24.645 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpPeepholeFinal-SetCombinerMemoryLimit 2025-04-10 11:41:24.824 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] kqp_host.cpp:1382: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/count_episodes")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) 2025-04-10 11:41:24.888 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/count_episodes" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) 2025-04-10 11:41:24.960 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F4735BBC640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Right! (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/view_series"))) (Void) '()))) (let $2 '('('"query_ast" (RemoveSystemMembers (PersistableRepr (SqlProject $1 '((SqlProjectStarItem (TypeOf $1) '"" (lambda '($3) $3) '())))))) '('"query_text" '"SELECT * FROM `/Root/view_series`") '('"security_invoker" (Bool '"true")))) (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/read_from_one_view")) '('typeId (String '"VIEW"))) (Void) '('('mode 'createObject) '('features $2)))) ) 2025-04-10 11:41:25.029 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F4735BBC640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 '('('"query_ast" (Right! (KiReadTable! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/series"))) (Void) '()))) '('"query_text" '"SELECT * FROM `/Root/view_series`") '('"security_invoker" (Bool '"true")))) (let $3 (KiCreateObject! world $1 '"/Root/read_from_one_view" '"VIEW" $2 '0 '0)) (return (Commit! $3 $1 '('('"mode" '"flush")))) ) 2025-04-10 11:41:25.121 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F4735BBC640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/read_from_one_view"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Right! $1)) (let $4 (Write! (Left! $1) $2 (Key) (RemoveSystemMembers (Sort (PersistableRepr (SqlProject $3 '((SqlProjectStarItem (TypeOf $3) '"" (lambda '($5) $5) '())))) (Bool 'true) (lambda '($6) (PersistableRepr (Member $6 '"series_id"))))) '('('type) '('autoref)))) (return (Commit! $4 $2)) ) 2025-04-10 11:41:25.245 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (Sort (Right! (KiReadTable! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/series"))) (Void) '())) (Bool 'true) (lambda '($7) (Member $7 '"series_id")))) (let $3 '('('"mode" '"flush"))) (let $4 (KiExecDataQuery! world $1 (DataQueryBlocks (TKiDataQueryBlock '('($2 '() '0)) (KiEffects) '('('"db" '"/Root/series" '"Select")) '())) $3 (Void))) (let $5 (DataSink 'result)) (let $6 (ResPull! (Left! $4) $5 (Key) (Nth (Right! $4) '0) '('('type) '('autoref)) '"kikimr")) (return (Commit! (Commit! $6 $5) $1 $3)) ) 2025-04-10 11:41:25.256 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildReadTableRangesStage 2025-04-10 11:41:25.264 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildSortStage 2025-04-10 11:41:25.272 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpPhysical-RemoveRedundantSortByPk 2025-04-10 11:41:25.280 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F47363BD640) [KQP] yql_optimize.cpp:135: KqpPhysical-RewriteKqpReadTable 2025-04-10 11:41:25.566 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F4735BBC640) [KQP] kqp_host.cpp:1382: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/read_from_one_view")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) 2025-04-10 11:41:25.576 INFO ydb-core-kqp-ut-view(pid=1823129, tid=0x00007F4735BBC640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/read_from_one_view" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) Trying to start YDB, gRPC: 23105, MsgBus: 21821 2025-04-10T11:41:27.918730Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7491648265152100885:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:41:27.920594Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0008c2/r3tmp/tmpLgAuCw/pdisk_1.dat 2025-04-10T11:41:28.632993Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:41:28.667740Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:41:28.667935Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:41:28.675239Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23105, node 23 2025-04-10T11:41:28.968743Z node 23 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:41:28.968781Z node 23 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:41:28.968801Z node 23 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:41:28.969096Z node 23 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21821 TClient is connected to server localhost:21821 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:41:30.949402Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:41:32.918614Z node 23 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[23:7491648265152100885:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:41:32.932463Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:41:40.070328Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7491648320986676343:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:41:40.070532Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:41:40.117015Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7491648320986676373:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:41:40.117199Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:41:40.117232Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7491648320986676378:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:41:40.127152Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T11:41:40.151069Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [23:7491648320986676380:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T11:41:40.254107Z node 23 :TX_PROXY ERROR: Actor# [23:7491648320986676431:2377] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |98.9%| [TM] {RESULT} ydb/core/kqp/ut/view/unittest >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionCommit [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionAbort >> DataShardStats::HasSchemaChanges_Families [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[1] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> BulkUpsert::BulkUpsert [GOOD] |98.9%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> KqpLimits::TooBigKey-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_stats/unittest >> DataShardStats::HasSchemaChanges_Families [GOOD] Test command err: 2025-04-10T11:38:03.057821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:03.058100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:38:03.058407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/000a01/r3tmp/tmpoTMUjp/pdisk_1.dat 2025-04-10T11:38:03.711887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:03.787694Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:03.795505Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-04-10T11:38:03.833660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:03.834168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:03.846985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:38:03.953027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:38:04.011352Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-10T11:38:04.012672Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-10T11:38:04.013239Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-10T11:38:04.013650Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:38:04.080177Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T11:38:04.081120Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:38:04.081255Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:38:04.083896Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:38:04.083993Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:38:04.084054Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:38:04.085105Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:38:04.085270Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:38:04.085369Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-10T11:38:04.096220Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:38:04.132853Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:38:04.134289Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:38:04.134485Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-10T11:38:04.134541Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:38:04.134583Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:38:04.134620Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:38:04.134925Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:38:04.138215Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:38:04.140336Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:38:04.140453Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:38:04.140523Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:38:04.140579Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:38:04.143238Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:38:04.143354Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:38:04.143401Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:38:04.143444Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:38:04.143517Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:38:04.143720Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:38:04.143763Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:38:04.143817Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-10T11:38:04.146494Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-04-10T11:38:04.146562Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-10T11:38:04.146710Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:38:04.147233Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-10T11:38:04.147336Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:38:04.147451Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:38:04.147538Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:38:04.147585Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-10T11:38:04.147636Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-10T11:38:04.147690Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:38:04.148100Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-10T11:38:04.148176Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-10T11:38:04.148235Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-10T11:38:04.148281Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:38:04.148362Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-10T11:38:04.148408Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-10T11:38:04.148467Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-10T11:38:04.148539Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:38:04.148579Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-10T11:38:04.151528Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-10T11:38:04.151604Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-10T11:38:04.166884Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-10T11:38:04.166974Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-10T11:38:04.167012Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-10T11:38:04.167092Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-10T11:38:04.167169Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-10T11:38:04.328130Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:38:04.328210Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:38:04.328254Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-04-10T11:38:04.328695Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-10T11:38:04.328756Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T11:38:04.328882Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-10T11:38:04.328943Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-10T11:38:04.328986Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-10T11:38:04.329043Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-10T11:38:04.333959Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-10T11:38:04.334061Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:38:04.338077Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-10T11:38:04.338136Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-10T11:38:04.338225Z ... lanQueue at 72075186224037888 2025-04-10T11:41:46.249851Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:41:46.249919Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:41:46.254356Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 25500} 2025-04-10T11:41:46.254488Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:41:46.255882Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:41:46.255934Z node 13 :TX_DATASHARD TRACE: Complete execution for [25500:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-10T11:41:46.256040Z node 13 :TX_DATASHARD DEBUG: Complete [25500 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [13:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-10T11:41:46.256131Z node 13 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715664 state Ready TxInFly 0 2025-04-10T11:41:46.256268Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:41:46.258543Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [13:1167:2976], Recipient [13:935:2761]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [13:1169:2978] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T11:41:46.258612Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T11:41:46.259718Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [13:409:2404], Recipient [13:935:2761]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715664 2025-04-10T11:41:46.259786Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-10T11:41:46.259880Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state Ready 2025-04-10T11:41:46.259981Z node 13 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 waiting for schema changes 2025-04-10T11:41:46.273896Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [13:1167:2976], Recipient [13:935:2761]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [13:1167:2976] ServerId: [13:1169:2978] } 2025-04-10T11:41:46.274033Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-10T11:41:47.158117Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:935:2761]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:41:47.158253Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-04-10T11:41:47.158611Z node 13 :TABLET_STATS_BUILDER INFO: Skipped at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount 1, with schema changes 2025-04-10T11:41:47.158859Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 2 Round: 9 TableStats { DataSize: 130 RowCount: 3 IndexSize: 82 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 65 IndexSize: 82 } Channels { Channel: 2 DataSize: 65 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: true LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 3060 Memory: 124352 Storage: 254 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 13 StartTime: 5451 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-10T11:41:47.160551Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1202:3011], Recipient [13:935:2761]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:41:47.160657Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:41:47.160769Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:1201:3010], serverId# [13:1202:3011], sessionId# [0:0:0] 2025-04-10T11:41:47.160980Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [13:1200:3009], Recipient [13:935:2761]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-04-10T11:41:47.161226Z node 13 :TX_DATASHARD INFO: Started background compaction# 3 of 72075186224037888 tableId# 2 localTid# 1001, requested from [13:1200:3009], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-04-10T11:41:47.165639Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 2, ts 1970-01-01T00:00:20.452024Z 2025-04-10T11:41:47.165732Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-04-10T11:41:47.165848Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 2, front# 3 2025-04-10T11:41:47.176283Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: starting for mixed index 2025-04-10T11:41:47.178765Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [13:932:2759], Recipient [13:935:2761]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-10T11:41:47.180473Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: finished for mixed index ready: 1 stats: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-04-10T11:41:47.180639Z node 13 :TABLET_STATS_BUILDER INFO: Stats at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount: 1, with schema changes, LoadedSize 82, Spent{time=0.000s,wait=0.000s,interrupts=2} 2025-04-10T11:41:47.181390Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [13:1207:3015], Recipient [13:935:2761]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-10T11:41:47.181529Z node 13 :TABLET_STATS_BUILDER INFO: Result received at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-04-10T11:41:47.181680Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 2025-04-10T11:41:47.193565Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 3, ts 1970-01-01T00:00:30.452024Z 2025-04-10T11:41:47.193688Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-04-10T11:41:47.193732Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 3, front# 3 2025-04-10T11:41:47.193794Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [13:1200:3009]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-10T11:41:47.196128Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: starting for mixed index 2025-04-10T11:41:47.196530Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [13:932:2759], Recipient [13:935:2761]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-10T11:41:47.209841Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: finished for mixed index ready: 1 stats: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-04-10T11:41:47.210043Z node 13 :TABLET_STATS_BUILDER INFO: Stats at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount: 1, LoadedSize 82, Spent{time=0.000s,wait=0.000s,interrupts=2} 2025-04-10T11:41:47.210482Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [13:1214:3021], Recipient [13:935:2761]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-10T11:41:47.210562Z node 13 :TABLET_STATS_BUILDER INFO: Result received at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-04-10T11:41:47.210655Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 waiting for no schema changes 2025-04-10T11:41:47.223252Z node 13 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186224037888, tableId# 2, last full compaction# 1970-01-01T00:00:30.452024Z 2025-04-10T11:41:48.173612Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [13:935:2761]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-10T11:41:48.173777Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-10T11:41:48.173962Z node 13 :TX_DATASHARD TRACE: No cleanup at 72075186224037888 outdated step 35000 last cleanup 0 2025-04-10T11:41:48.174085Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:41:48.174180Z node 13 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-10T11:41:48.174267Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-10T11:41:48.174334Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-10T11:41:48.174581Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:935:2761]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-10T11:41:48.174639Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-04-10T11:41:48.174848Z node 13 :TABLET_STATS_BUILDER INFO: Skipped at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount 1 2025-04-10T11:41:48.175031Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 2 Round: 12 TableStats { DataSize: 130 RowCount: 3 IndexSize: 82 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 30 HasLoanedParts: false Channels { Channel: 1 DataSize: 80 IndexSize: 82 } Channels { Channel: 2 DataSize: 50 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1794 Memory: 124352 Storage: 254 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 13 StartTime: 5451 TableOwnerId: 72057594046644480 FollowerId: 0 |98.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_stats/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigKey-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 3233, MsgBus: 1742 2025-04-10T11:34:32.186905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646481754265754:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:32.186974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001742/r3tmp/tmpS8YU65/pdisk_1.dat 2025-04-10T11:34:32.561072Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:34:32.600223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:34:32.600482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3233, node 1 2025-04-10T11:34:32.606640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:34:32.670729Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:34:32.670760Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:34:32.670766Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:34:32.670888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1742 TClient is connected to server localhost:1742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:34:33.265479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:33.298745Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:34:33.317026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:33.501400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:33.661361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:33.732481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:35.607131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646494639169413:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:35.607294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:35.920932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:34:35.967688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:34:35.997053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:34:36.077901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:34:36.111013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:34:36.153683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:34:36.230532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646498934137225:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:36.230620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:36.230779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491646498934137230:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:34:36.235238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:34:36.248519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491646498934137232:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:34:36.320096Z node 1 :TX_PROXY ERROR: Actor# [1:7491646498934137287:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:34:37.189508Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491646481754265754:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:34:37.189592Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:34:37.341265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:34:47.561316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:34:47.561368Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 31724, MsgBus: 21358 2025-04-10T11:38:22.074972Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491647471966951154:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:38:22.133372Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001742/r3tmp/tmpkgsBJp/pdisk_1.dat 2025-04-10T11:38:22.279412Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:22.308890Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:22.309056Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:22.310787Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31724, node 2 2025-04-10T11:38:22.390061Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:38:22.390085Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:38:22.390092Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:38:22.390287Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21358 TClient is connected to server localhost:21358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:38:23.069872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:38:23.096180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:38:23.247867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 wai ... 37.293863Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491648308799944775:2511], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [3:7491648304504977432:2511]Got BAD REQUEST for table `/Root/Test`. ShardID=72075186224037914, Sink=[3:7491648308799944775:2511].{
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 } 2025-04-10T11:41:37.299704Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7491648308799944759:2511], SessionActorId: [3:7491648304504977432:2511], statusCode=BAD_REQUEST. Issue=
: Error: Bad request. Table: `/Root/Test`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 . sessionActorId=[3:7491648304504977432:2511]. isRollback=0 2025-04-10T11:41:37.300646Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTAwMjcyMDQtYzhhMTk2ZmYtZmQwNTgxM2QtYTdlMDNhYTI=, ActorId: [3:7491648304504977432:2511], ActorState: ExecuteState, TraceId: 01jrfr2tf273f5jyf9z64n2dd1, got TEvKqpBuffer::TEvError in ExecuteState, status: BAD_REQUEST send to: [3:7491648308799944760:2511] from: [3:7491648308799944759:2511] 2025-04-10T11:41:37.300774Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7491648308799944760:2511] TxId: 281474976710671. Ctx: { TraceId: 01jrfr2tf273f5jyf9z64n2dd1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTAwMjcyMDQtYzhhMTk2ZmYtZmQwNTgxM2QtYTdlMDNhYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: {
: Error: Bad request. Table: `/Root/Test`., code: 2017 subissue: {
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 } } 2025-04-10T11:41:37.301036Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTAwMjcyMDQtYzhhMTk2ZmYtZmQwNTgxM2QtYTdlMDNhYTI=, ActorId: [3:7491648304504977432:2511], ActorState: ExecuteState, TraceId: 01jrfr2tf273f5jyf9z64n2dd1, Create QueryResponse for error on request, msg:
: Error: Bad request. Table: `/Root/Test`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 Trying to start YDB, gRPC: 8715, MsgBus: 25558 2025-04-10T11:41:39.066711Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491648317318775134:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:41:39.067937Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001742/r3tmp/tmpvVPRAd/pdisk_1.dat 2025-04-10T11:41:39.238475Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:41:39.281417Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:41:39.281537Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:41:39.287775Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8715, node 4 2025-04-10T11:41:39.498326Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:41:39.498354Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:41:39.498365Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:41:39.498564Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25558 TClient is connected to server localhost:25558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:41:40.361721Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:41:40.371975Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T11:41:40.389449Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:41:40.490374Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:41:40.777470Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:41:40.881096Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:41:44.059388Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7491648317318775134:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:41:44.059526Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T11:41:45.857586Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491648343088580721:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:41:45.857745Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:41:45.968172Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T11:41:46.021933Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T11:41:46.082833Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:41:46.136386Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:41:46.211053Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T11:41:46.302589Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T11:41:46.420141Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491648347383548554:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:41:46.420277Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:41:46.420553Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491648347383548559:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:41:46.428591Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T11:41:46.459231Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491648347383548561:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T11:41:46.560775Z node 4 :TX_PROXY ERROR: Actor# [4:7491648347383548617:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:41:49.249058Z node 4 :TX_DATASHARD ERROR: Operation [0:281474976710671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914 2025-04-10T11:41:49.249246Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710671 at tablet 72075186224037914 status: BAD_REQUEST errors: BAD_ARGUMENT (Operation [0:281474976710671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914) | 2025-04-10T11:41:49.249458Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7491648360268450826:2498] TxId: 281474976710671. Ctx: { TraceId: 01jrfr360442d8kcgx4jdhnbmv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OTdkODk4ZWMtNzM5MDU2NWYtYmNkNzNlYmYtODVkNzhlMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: [BAD_ARGUMENT] Operation [0:281474976710671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914; 2025-04-10T11:41:49.250056Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTdkODk4ZWMtNzM5MDU2NWYtYmNkNzNlYmYtODVkNzhlMDg=, ActorId: [4:7491648355973483472:2498], ActorState: ExecuteState, TraceId: 01jrfr360442d8kcgx4jdhnbmv, Create QueryResponse for error on request, msg:
: Error: Bad request., code: 2017
: Error: [BAD_ARGUMENT] Operation [0:281474976710671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914 >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column [GOOD] |98.9%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column |98.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> KafkaProtocol::NativeKafkaBalanceScenario [GOOD] >> KafkaProtocol::InitProducerId_withoutTransactionalIdShouldReturnRandomInt >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key >> test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString 2025-04-10 11:41:51,713 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:41:51,960 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1734948 46.5M 45.6M 23.6M test_tool run_ut @/home/runner/.ya/build/build_root/2gt4/001117/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk27/testing_out_stuff/test_tool.args 1735139 1.8G 1.7G 1.5G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/2gt4/001117/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/ch Test command err: 2025-04-10T11:31:53.330204Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:31:53.460772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:31:53.490018Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:31:53.490452Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:31:53.499850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:31:53.500092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:31:53.500333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:31:53.500517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:31:53.500635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:31:53.500748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:31:53.500887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:31:53.501007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:31:53.501147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:31:53.501278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:31:53.501407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:31:53.501520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:31:53.536045Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:31:53.536447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:31:53.536512Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:31:53.536727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:53.536941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:31:53.537033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:31:53.537114Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:31:53.537233Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:31:53.537305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:31:53.537409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:31:53.537448Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:31:53.537652Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:31:53.537726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:31:53.537772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:31:53.537805Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:31:53.537906Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:31:53.537969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:31:53.538026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:31:53.538062Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:31:53.538179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:31:53.538236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:31:53.538271Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:31:53.538335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:31:53.538384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:31:53.538421Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:31:53.538860Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-10T11:31:53.538949Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-10T11:31:53.539074Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=49; 2025-04-10T11:31:53.539204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=58; 2025-04-10T11:31:53.539418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:31:53.539484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:31:53.539525Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:31:53.539737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:31:53.539789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:31:53.539825Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:31:53.540002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:31:53.540051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:31:53.540097Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:31:53.540337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:31:53.540388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:31:53.540420Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:31:53.540554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:31:53.540595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:31:53.540647Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... .129614Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:200:2888:0]; 2025-04-10T11:41:48.129696Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:201:2784:0]; 2025-04-10T11:41:48.129777Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:202:2784:0]; 2025-04-10T11:41:48.129855Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:203:2864:0]; 2025-04-10T11:41:48.129916Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:204:2784:0]; 2025-04-10T11:41:48.129993Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:205:2784:0]; 2025-04-10T11:41:48.130075Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:206:2800:0]; 2025-04-10T11:41:48.130256Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:207:2784:0]; 2025-04-10T11:41:48.130343Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:208:2736:0]; 2025-04-10T11:41:48.130415Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:209:2728:0]; 2025-04-10T11:41:48.130494Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:210:7880:0]; 2025-04-10T11:41:48.130574Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:211:2880:0]; 2025-04-10T11:41:48.130646Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:212:2792:0]; 2025-04-10T11:41:48.130725Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:213:2792:0]; 2025-04-10T11:41:48.130819Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:214:2872:0]; 2025-04-10T11:41:48.130904Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:215:2792:0]; 2025-04-10T11:41:48.130990Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:216:2792:0]; 2025-04-10T11:41:48.131062Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:217:2880:0]; 2025-04-10T11:41:48.131139Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:218:2792:0]; 2025-04-10T11:41:48.131221Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:219:2792:0]; 2025-04-10T11:41:48.131289Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:220:2824:0]; 2025-04-10T11:41:48.131372Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:221:2792:0]; 2025-04-10T11:41:48.131447Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:222:2792:0]; 2025-04-10T11:41:48.131523Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:223:2792:0]; 2025-04-10T11:41:48.131600Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:224:2816:0]; 2025-04-10T11:41:48.131686Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:225:2792:0]; 2025-04-10T11:41:48.131770Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:226:2792:0]; 2025-04-10T11:41:48.131845Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:227:2880:0]; 2025-04-10T11:41:48.131914Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:228:2792:0]; 2025-04-10T11:41:48.131994Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:229:2792:0]; 2025-04-10T11:41:48.132073Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:230:2888:0]; 2025-04-10T11:41:48.132169Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:231:2792:0]; 2025-04-10T11:41:48.132247Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:232:2792:0]; 2025-04-10T11:41:48.132315Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:233:2888:0]; 2025-04-10T11:41:48.132396Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:234:2784:0]; 2025-04-10T11:41:48.132473Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:235:2784:0]; 2025-04-10T11:41:48.132539Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:236:2840:0]; 2025-04-10T11:41:48.132610Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:237:2784:0]; 2025-04-10T11:41:48.132684Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:238:2784:0]; 2025-04-10T11:41:48.132755Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:239:2784:0]; 2025-04-10T11:41:48.132827Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:240:2800:0]; 2025-04-10T11:41:48.132907Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:241:2784:0]; 2025-04-10T11:41:48.132983Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:242:2792:0]; 2025-04-10T11:41:48.133054Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:243:2856:0]; 2025-04-10T11:41:48.133121Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:244:2784:0]; 2025-04-10T11:41:48.133196Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:245:2784:0]; 2025-04-10T11:41:48.133269Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:246:2792:0]; 2025-04-10T11:41:48.133347Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:247:2784:0]; 2025-04-10T11:41:48.133425Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:248:2776:0]; 2025-04-10T11:41:48.133489Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:249:2832:0]; 2025-04-10T11:41:48.133563Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:250:2744:0]; 2025-04-10T11:41:48.133639Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:251:2736:0]; 2025-04-10T11:41:48.133706Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:252:7888:0]; 2025-04-10T11:41:48.140048Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:1:2784:0]; 2025-04-10T11:41:48.140205Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:2:2808:0]; 2025-04-10T11:41:48.140291Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:3:2776:0]; 2025-04-10T11:41:48.140418Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:4:2736:0]; 2025-04-10T11:41:48.140485Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:5:9120:0]; 2025-04-10T11:41:48.140564Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:6:2784:0]; 2025-04-10T11:41:48.140642Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:7:2808:0]; 2025-04-10T11:41:48.140712Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:8:2776:0]; 2025-04-10T11:41:48.140792Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:9:2736:0]; 2025-04-10T11:41:48.140887Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:10:9120:0]; 2025-04-10T11:41:49.728597Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-10T11:41:49.729605Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[244] (CS::INDEXATION) apply at tablet 9437184 2025-04-10T11:41:49.780800Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:101 Blob count: 727 2025-04-10T11:41:49.788768Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3438184;raw_bytes=5197030;count=2;records=48570} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=103328888;raw_bytes=152958540;count=54;records=1481430} inactive {blob_bytes=196464120;raw_bytes=286388607;count=100;records=2801221} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:41:50.452306Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-10T11:41:50.453282Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[246] (CS::INDEXATION) apply at tablet 9437184 2025-04-10T11:41:50.515343Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:102 Blob count: 727 2025-04-10T11:41:50.523404Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3438184;raw_bytes=5197030;count=2;records=48570} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=103328888;raw_bytes=152958540;count=54;records=1481430} inactive {blob_bytes=196464120;raw_bytes=286388607;count=100;records=2801221} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:41:50.544801Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-10T11:41:50.545198Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[248] (CS::INDEXATION) apply at tablet 9437184 2025-04-10T11:41:50.548991Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:103 Blob count: 24 2025-04-10T11:41:50.549531Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3438184;raw_bytes=5197030;count=2;records=48570} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=103328888;raw_bytes=152958540;count=54;records=1481430} inactive {blob_bytes=196464120;raw_bytes=286388607;count=100;records=2801221} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/001117/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk27/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/001117/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk27/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_rs/unittest >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] Test command err: 2025-04-10T11:38:30.507130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T11:38:30.507435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T11:38:30.507636Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/0009f6/r3tmp/tmpQ4VBXR/pdisk_1.dat 2025-04-10T11:38:31.104442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T11:38:31.171582Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:38:31.228714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:38:31.229522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:38:31.242494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:38:31.343167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T11:38:31.407168Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:681:2579] 2025-04-10T11:38:31.407587Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:38:31.472007Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:38:31.472277Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:38:31.474414Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-10T11:38:31.474501Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-10T11:38:31.474558Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-10T11:38:31.476977Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:38:31.477534Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:686:2581] 2025-04-10T11:38:31.477773Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:38:31.488312Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:38:31.488445Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:718:2579] in generation 1 2025-04-10T11:38:31.489364Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:38:31.489526Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:38:31.491170Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-10T11:38:31.491238Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-10T11:38:31.491317Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-10T11:38:31.491670Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:38:31.492037Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:38:31.492096Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:725:2581] in generation 1 2025-04-10T11:38:31.494667Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:690:2583] 2025-04-10T11:38:31.494897Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:38:31.507387Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2585] 2025-04-10T11:38:31.507647Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:38:31.519491Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:38:31.519645Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:38:31.521362Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-04-10T11:38:31.521459Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-04-10T11:38:31.521528Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-04-10T11:38:31.521952Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:38:31.522212Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:38:31.522292Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037891 persisting started state actor id [1:751:2583] in generation 1 2025-04-10T11:38:31.522763Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:38:31.522876Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-10T11:38:31.524421Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-10T11:38:31.524512Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-10T11:38:31.524561Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-10T11:38:31.524895Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-10T11:38:31.525041Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-10T11:38:31.525134Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:752:2585] in generation 1 2025-04-10T11:38:31.536413Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:38:31.578275Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-10T11:38:31.579454Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:38:31.579621Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:757:2620] 2025-04-10T11:38:31.579663Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-10T11:38:31.579725Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-10T11:38:31.579766Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:38:31.580848Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:38:31.580922Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-10T11:38:31.580996Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:38:31.581067Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:758:2621] 2025-04-10T11:38:31.581094Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-10T11:38:31.581139Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-10T11:38:31.581169Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:38:31.581606Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:38:31.581639Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-04-10T11:38:31.581697Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:38:31.581758Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [1:759:2622] 2025-04-10T11:38:31.581785Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-04-10T11:38:31.581826Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-04-10T11:38:31.581855Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-10T11:38:31.582109Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-10T11:38:31.582259Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-10T11:38:31.582634Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-10T11:38:31.582673Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-10T11:38:31.582742Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-10T11:38:31.582800Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:760:2623] 2025-04-10T11:38:31.582822Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-10T11:38:31.582864Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-10T11:38:31.582887Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-10T11:38:31.583454Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-10T11:38:31.583519Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:38:31.583580Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-10T11:38:31.583632Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:38:31.583689Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-10T11:38:31.583758Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-10T11:38:31.583899Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2573], serverId# [1:714:2597], sessionId# [0:0:0] 2025-04-10T11:38:31.583956Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-10T11:38:31.583996Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-10T11:38:31.584024Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-10T11:38:31.584060Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:38:31.584106Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037891 2025-04-10T11:38:31.584158Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037891 2025-04-10T11:38:31.584380Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-10T11:38:31.584810Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-10T11:38:31.584956Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-10T11:38:31.585553Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:672:2574], serverId# [1:715:2598], sessionId# [0:0:0] 2025-04-10T11:38:31.585608Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-10T11:38:31.585637Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active ... : Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2026} 2025-04-10T11:41:54.849990Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1015:2805], Recipient [6:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T11:41:54.850035Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T11:41:54.850076Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:1012:2802], serverId# [6:1015:2805], sessionId# [0:0:0] 2025-04-10T11:41:54.850185Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2026} 2025-04-10T11:41:54.850508Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [6:757:2635], Recipient [6:666:2570]: {TEvReadSet step# 2026 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-10T11:41:54.850551Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:41:54.850612Z node 6 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715664 2025-04-10T11:41:54.850728Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 2026 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-10T11:41:54.850989Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-10T11:41:54.851053Z node 6 :TX_DATASHARD TRACE: Complete execution for [2026:281474976715664] at 72075186224037888 on unit CompleteWrite 2025-04-10T11:41:54.851144Z node 6 :TX_DATASHARD DEBUG: Complete write [2026 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [6:1000:2754] 2025-04-10T11:41:54.851221Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:41:54.851350Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-10T11:41:54.851438Z node 6 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037888 {TEvReadSet step# 2026 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} ... nodata readset 2025-04-10T11:41:54.851558Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [6:666:2570], Recipient [6:757:2635]: {TEvReadSet step# 2026 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-10T11:41:54.851591Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-10T11:41:54.851625Z node 6 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715664 2025-04-10T11:41:54.851680Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2026 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-10T11:41:54.851735Z node 6 :TX_DATASHARD TRACE: Processed readset without data from 72075186224037888 to 72075186224037889 at tablet 72075186224037889 2025-04-10T11:41:54.851977Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:1000:2754], SessionActorId: [6:942:2754], Got LOCKS BROKEN for table. ShardID=72075186224037888, Sink=[6:1000:2754].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-10T11:41:54.852195Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:1000:2754], SessionActorId: [6:942:2754], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[6:942:2754]. isRollback=0 2025-04-10T11:41:54.853154Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=YmMxZmMzYzktYmNmNDRlODItZWVlZTQzNTUtNDdiYjdhMDE=, ActorId: [6:942:2754], ActorState: ExecuteState, TraceId: 01jrfr3bp66e620dfegyqsa1gc, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [6:1001:2754] from: [6:1000:2754] 2025-04-10T11:41:54.853422Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [6:1000:2754], Recipient [6:666:2570]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-04-10T11:41:54.853476Z node 6 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-04-10T11:41:54.853713Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [6:666:2570], Recipient [6:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-10T11:41:54.853765Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-10T11:41:54.853857Z node 6 :TX_DATASHARD TRACE: TTxWrite:: execute at tablet# 72075186224037888 2025-04-10T11:41:54.854016Z node 6 :TX_DATASHARD TRACE: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-04-10T11:41:54.854134Z node 6 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715662, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-10T11:41:54.854286Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-04-10T11:41:54.854356Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-10T11:41:54.854410Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-04-10T11:41:54.854464Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-10T11:41:54.854507Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-10T11:41:54.854559Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2026/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2001/0 ImmediateWriteEdgeReplied# v2001/0 2025-04-10T11:41:54.854638Z node 6 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2025-04-10T11:41:54.854696Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-10T11:41:54.854728Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-10T11:41:54.854755Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-04-10T11:41:54.854782Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-04-10T11:41:54.854822Z node 6 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037888 2025-04-10T11:41:54.854932Z node 6 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 2025-04-10T11:41:54.855016Z node 6 :TX_DATASHARD DEBUG: Skip empty write operation for [0:6] at 72075186224037888 2025-04-10T11:41:54.855093Z node 6 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-10T11:41:54.855180Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-10T11:41:54.855216Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-04-10T11:41:54.855270Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-04-10T11:41:54.855320Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:41:54.855355Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-04-10T11:41:54.855391Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-04-10T11:41:54.855459Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-04-10T11:41:54.855506Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-04-10T11:41:54.855559Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-10T11:41:54.855605Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-04-10T11:41:54.855639Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-04-10T11:41:54.855709Z node 6 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-04-10T11:41:54.855762Z node 6 :TX_DATASHARD TRACE: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-04-10T11:41:54.855827Z node 6 :TX_DATASHARD TRACE: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-04-10T11:41:54.855922Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-10T11:41:54.856117Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-10T11:41:54.856155Z node 6 :TX_DATASHARD TRACE: Complete execution for [2026:281474976715664] at 72075186224037889 on unit CompleteWrite 2025-04-10T11:41:54.856215Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-10T11:41:54.856279Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-10T11:41:54.856445Z node 6 :KQP_EXECUTER ERROR: ActorId: [6:1001:2754] TxId: 281474976715664. Ctx: { TraceId: 01jrfr3bp66e620dfegyqsa1gc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=YmMxZmMzYzktYmNmNDRlODItZWVlZTQzNTUtNDdiYjdhMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-10T11:41:54.857016Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=YmMxZmMzYzktYmNmNDRlODItZWVlZTQzNTUtNDdiYjdhMDE=, ActorId: [6:942:2754], ActorState: ExecuteState, TraceId: 01jrfr3bp66e620dfegyqsa1gc, Create QueryResponse for error on request, msg: 2025-04-10T11:41:54.858033Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [6:61:2108], Recipient [6:666:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715662 LockNode: 6 Status: STATUS_NOT_FOUND 2025-04-10T11:41:54.858421Z node 6 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715664; 2025-04-10T11:41:54.858706Z node 6 :TX_DATASHARD ERROR: Complete volatile write [2026 : 281474976715664] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-04-10T11:41:54.858835Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 |99.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_rs/unittest |99.0%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.0%| [TA] $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] |99.0%| [TA] {RESULT} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index >> test_alloc_default.py::TestAlloc::test_up_down[kikimr0] >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> KafkaProtocol::InitProducerId_withoutTransactionalIdShouldReturnRandomInt [GOOD] >> KafkaProtocol::InitProducerId_forNewTransactionalIdShouldReturnIncrementingInt >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session [GOOD] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 2025-04-10 11:42:03,236 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:42:03,548 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1736504 46.5M 46.2M 23.7M test_tool run_ut @/home/runner/.ya/build/build_root/2gt4/001050/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args 1736680 1.8G 1.7G 1.5G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/2gt4/001050/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/ch Test command err: 2025-04-10T11:32:04.844749Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T11:32:04.979745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T11:32:05.003613Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T11:32:05.003995Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T11:32:05.013849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T11:32:05.014098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T11:32:05.014406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T11:32:05.014572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T11:32:05.014693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T11:32:05.014819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T11:32:05.014953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T11:32:05.015071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T11:32:05.015177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T11:32:05.015310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T11:32:05.015429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T11:32:05.015528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T11:32:05.047167Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T11:32:05.047451Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T11:32:05.047514Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T11:32:05.047722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:05.047906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T11:32:05.048010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T11:32:05.048060Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T11:32:05.048159Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T11:32:05.048235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T11:32:05.048297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T11:32:05.048340Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T11:32:05.048574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T11:32:05.048651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T11:32:05.048696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T11:32:05.048729Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T11:32:05.048824Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T11:32:05.048891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T11:32:05.048952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T11:32:05.048986Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T11:32:05.049079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T11:32:05.049126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T11:32:05.049161Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T11:32:05.049216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T11:32:05.049261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T11:32:05.049292Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T11:32:05.049772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=70; 2025-04-10T11:32:05.049876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-10T11:32:05.049988Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=52; 2025-04-10T11:32:05.050096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-10T11:32:05.050368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T11:32:05.050453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T11:32:05.050505Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T11:32:05.050697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T11:32:05.050747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T11:32:05.050778Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T11:32:05.050940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T11:32:05.050994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T11:32:05.051028Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T11:32:05.051233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T11:32:05.051296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T11:32:05.051346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T11:32:05.051481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T11:32:05.051530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T11:32:05.051601Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... .520947Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:200:2888:0]; 2025-04-10T11:41:59.521026Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:201:2784:0]; 2025-04-10T11:41:59.521129Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:202:2784:0]; 2025-04-10T11:41:59.521216Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:203:2864:0]; 2025-04-10T11:41:59.521286Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:204:2784:0]; 2025-04-10T11:41:59.521365Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:205:2784:0]; 2025-04-10T11:41:59.521445Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:206:2800:0]; 2025-04-10T11:41:59.521512Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:207:2784:0]; 2025-04-10T11:41:59.521596Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:208:2736:0]; 2025-04-10T11:41:59.521689Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:209:2728:0]; 2025-04-10T11:41:59.521784Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:210:7880:0]; 2025-04-10T11:41:59.521876Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:211:2880:0]; 2025-04-10T11:41:59.521941Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:212:2792:0]; 2025-04-10T11:41:59.522019Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:213:2792:0]; 2025-04-10T11:41:59.522090Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:214:2872:0]; 2025-04-10T11:41:59.522177Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:215:2792:0]; 2025-04-10T11:41:59.522262Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:216:2792:0]; 2025-04-10T11:41:59.522347Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:217:2880:0]; 2025-04-10T11:41:59.522427Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:218:2792:0]; 2025-04-10T11:41:59.522498Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:219:2792:0]; 2025-04-10T11:41:59.522560Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:220:2824:0]; 2025-04-10T11:41:59.522640Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:221:2792:0]; 2025-04-10T11:41:59.522713Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:222:2792:0]; 2025-04-10T11:41:59.522780Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:223:2792:0]; 2025-04-10T11:41:59.522862Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:224:2816:0]; 2025-04-10T11:41:59.522935Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:225:2792:0]; 2025-04-10T11:41:59.522999Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:226:2792:0]; 2025-04-10T11:41:59.523072Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:227:2880:0]; 2025-04-10T11:41:59.523165Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:228:2792:0]; 2025-04-10T11:41:59.523224Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:229:2792:0]; 2025-04-10T11:41:59.523304Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:230:2888:0]; 2025-04-10T11:41:59.523381Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:231:2792:0]; 2025-04-10T11:41:59.523441Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:232:2792:0]; 2025-04-10T11:41:59.523513Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:233:2888:0]; 2025-04-10T11:41:59.523605Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:234:2784:0]; 2025-04-10T11:41:59.523686Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:235:2784:0]; 2025-04-10T11:41:59.523758Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:236:2840:0]; 2025-04-10T11:41:59.523819Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:237:2784:0]; 2025-04-10T11:41:59.523924Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:238:2784:0]; 2025-04-10T11:41:59.523997Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:239:2784:0]; 2025-04-10T11:41:59.524063Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:240:2800:0]; 2025-04-10T11:41:59.524143Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:241:2784:0]; 2025-04-10T11:41:59.524218Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:242:2792:0]; 2025-04-10T11:41:59.524297Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:243:2856:0]; 2025-04-10T11:41:59.524369Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:244:2784:0]; 2025-04-10T11:41:59.524456Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:245:2784:0]; 2025-04-10T11:41:59.524535Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:246:2792:0]; 2025-04-10T11:41:59.524610Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:247:2784:0]; 2025-04-10T11:41:59.524676Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:248:2776:0]; 2025-04-10T11:41:59.524776Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:249:2832:0]; 2025-04-10T11:41:59.524870Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:250:2744:0]; 2025-04-10T11:41:59.524956Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:251:2736:0]; 2025-04-10T11:41:59.525031Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:82:255:252:7888:0]; 2025-04-10T11:41:59.535588Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:1:2784:0]; 2025-04-10T11:41:59.535773Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:2:2808:0]; 2025-04-10T11:41:59.535846Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:3:2776:0]; 2025-04-10T11:41:59.535905Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:4:2736:0]; 2025-04-10T11:41:59.535959Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:5:9120:0]; 2025-04-10T11:41:59.536026Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:6:2784:0]; 2025-04-10T11:41:59.536125Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:7:2808:0]; 2025-04-10T11:41:59.536194Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:8:2776:0]; 2025-04-10T11:41:59.536253Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:9:2736:0]; 2025-04-10T11:41:59.536323Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:83:255:10:9120:0]; 2025-04-10T11:42:01.093128Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-10T11:42:01.094236Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[244] (CS::INDEXATION) apply at tablet 9437184 2025-04-10T11:42:01.155547Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:101 Blob count: 727 2025-04-10T11:42:01.164266Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3438184;raw_bytes=5197030;count=2;records=48570} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=103328888;raw_bytes=152958540;count=54;records=1481430} inactive {blob_bytes=196464120;raw_bytes=286388607;count=100;records=2801221} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:42:01.980587Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-10T11:42:01.981703Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[246] (CS::INDEXATION) apply at tablet 9437184 2025-04-10T11:42:02.040447Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:102 Blob count: 727 2025-04-10T11:42:02.049250Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3438184;raw_bytes=5197030;count=2;records=48570} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=103328888;raw_bytes=152958540;count=54;records=1481430} inactive {blob_bytes=196464120;raw_bytes=286388607;count=100;records=2801221} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T11:42:02.069302Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-10T11:42:02.069703Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[248] (CS::INDEXATION) apply at tablet 9437184 2025-04-10T11:42:02.073571Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:103 Blob count: 24 2025-04-10T11:42:02.074127Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3438184;raw_bytes=5197030;count=2;records=48570} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=103328888;raw_bytes=152958540;count=54;records=1481430} inactive {blob_bytes=196464120;raw_bytes=286388607;count=100;records=2801221} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/001050/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/001050/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session >> LabeledDbCounters::TwoTablets [GOOD] >> LabeledDbCounters::TwoTabletsKillOneTablet |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle [GOOD] >> test_public_api.py::TestCRUDOperations::test_none_values >> test_public_api.py::TestCRUDOperations::test_none_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_list_type >> test_public_api.py::TestCRUDOperations::test_parse_list_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_tuple >> test_public_api.py::TestCRUDOperations::test_parse_tuple [GOOD] >> test_public_api.py::TestCRUDOperations::test_dict_type >> test_public_api.py::TestCRUDOperations::test_dict_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type >> test_public_api.py::TestCRUDOperations::test_struct_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types >> test_public_api.py::TestCRUDOperations::test_data_types [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter |99.0%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert >> test_public_api.py::TestCRUDOperations::test_bulk_upsert [GOOD] >> test_public_api.py::TestCRUDOperations::test_all_enums_are_presented_as_exceptions [GOOD] >> test_public_api.py::TestCRUDOperations::test_type_builders_str_methods [GOOD] >> test_public_api.py::TestCRUDOperations::test_create_and_delete_session_then_use_it_again [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl >> test_public_api.py::TestCRUDOperations::test_tcl [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> KafkaProtocol::InitProducerId_forNewTransactionalIdShouldReturnIncrementingInt [GOOD] >> KafkaProtocol::InitProducerId_forSqlInjectionShouldReturnWithoutDropingDatabase |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] |99.0%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> test_clickbench.py::TestClickbench::test_clickbench[0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[1] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] |99.0%| [TM] {RESULT} ydb/tests/fq/restarts/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts [GOOD] >> test_public_api.py::TestCRUDOperations::test_presented_in_cache >> test_public_api.py::TestCRUDOperations::test_presented_in_cache [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values >> test_public_api.py::TestCRUDOperations::test_decimal_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children [GOOD] >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modifications_1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modification_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success [GOOD] >> test_public_api.py::TestCRUDOperations::test_modify_permissions_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_directory_that_doesnt_exists [GOOD] >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions [GOOD] >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 >> test_alloc_default.py::TestAlloc::test_up_down[kikimr0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[1] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[2] >> test_public_api.py::TestCRUDOperations::test_query_set1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure |99.1%| [TA] $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |99.1%| [TA] {RESULT} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_clickbench.py::TestClickbench::test_clickbench[2] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[3] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir >> test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> KafkaProtocol::InitProducerId_forSqlInjectionShouldReturnWithoutDropingDatabase [GOOD] >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnSameProducerIdAndIncrementEpoch |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a >> test_clickbench.py::TestClickbench::test_clickbench[3] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[4] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 >> test_clickbench.py::TestClickbench::test_clickbench[4] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[5] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:36:47.370969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:47.371113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:47.371184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:47.371243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:47.371308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:47.371356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:47.371468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:47.371565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:47.372029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:47.460407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:36:47.460477Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:36:47.475704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:47.475994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:47.476162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:47.484298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:47.484646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:47.485357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:47.486020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:47.495457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:47.497032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:47.497107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:47.497305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:47.497379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:47.497430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:47.497600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:36:47.507085Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:36:47.678069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:47.678362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.678607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:47.678888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:47.678955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.681369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:47.681519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:47.681738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.681797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:47.681860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:47.681917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:47.684120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.684189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:47.684231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:47.687766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.687833Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.687890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:47.687964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:47.692220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:47.694444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:47.694643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:47.695762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:47.695918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:47.696018Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:47.696347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:47.696429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:47.696619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:47.696705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:47.699273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:47.699323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:47.699488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:47.699531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:47.699779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:47.699849Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:47.699958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:47.699995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:47.700042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:47.700094Z no ... 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:42:46.509260Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:42:46.509573Z node 118 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 336us result status StatusSuccess 2025-04-10T11:42:46.510484Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:42:46.522469Z node 118 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1092:2881] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-10T11:42:46.522599Z node 118 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][118:1052:2881] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-10T11:42:46.522765Z node 118 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1092:2881] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744285366452389 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744285366452389 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1744285366452389 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-10T11:42:46.525653Z node 118 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1092:2881] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-10T11:42:46.525768Z node 118 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][118:1052:2881] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges >> test_clickbench.py::TestClickbench::test_clickbench[5] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[6] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnSameProducerIdAndIncrementEpoch [GOOD] >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnNewProducerIdIfEpochOverflown >> test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[6] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[7] >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] >> test_clickbench.py::TestClickbench::test_clickbench[7] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[8] >> test_public_api.py::TestSessionNotFound::test_session_not_found >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[8] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[9] >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnNewProducerIdIfEpochOverflown [GOOD] >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondOkIfTxnProducerWasNotFound >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondOkIfTxnProducerWasNotFound [GOOD] >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondOkIfTxnProducerWasFoundButEpochIsOlder >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondOkIfTxnProducerWasFoundButEpochIsOlder [GOOD] >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondOkIfNewEpochIsLessButProducerIdIsNew >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondOkIfNewEpochIsLessButProducerIdIsNew [GOOD] >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondWithProducerFencedErrorIfNewEpochIsLessAndProducerIdIsTheSame >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns >> KafkaTransactionCoordinatorActor::OnProducerInitializedEvent_ShouldRespondWithProducerFencedErrorIfNewEpochIsLessAndProducerIdIsTheSame [GOOD] >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldSendBack_PRODUCER_FENCED_ErrorIfThereIsNoTransactionalIdInState >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldSendBack_PRODUCER_FENCED_ErrorIfThereIsNoTransactionalIdInState [GOOD] >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldSendBack_PRODUCER_FENCED_ErrorIfProducerEpochExpired >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot [GOOD] >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldSendBack_PRODUCER_FENCED_ErrorIfProducerEpochExpired [GOOD] >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldForwardItToTheRelevantTransactionalIdActorIfProducerIsValid >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldForwardItToTheRelevantTransactionalIdActorIfProducerIsValid [GOOD] >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldForwardItToExistingTransactionActorIfProducerIsValid |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[9] [GOOD] >> KafkaTransactionCoordinatorActor::OnAnyTransactionalRequest_ShouldForwardItToExistingTransactionActorIfProducerIsValid [GOOD] >> KafkaTransactionCoordinatorActor::OnAddPartitions_ShouldSendBack_PRODUCER_FENCED_ErrorIfProducerIsNotInitialized >> test_clickbench.py::TestClickbench::test_clickbench[10] >> KafkaTransactionCoordinatorActor::OnAddPartitions_ShouldSendBack_PRODUCER_FENCED_ErrorIfProducerIsNotInitialized [GOOD] >> KafkaTransactionCoordinatorActor::OnTxnOffsetCommit_ShouldSendBack_PRODUCER_FENCED_ErrorIfProducerIsNotInitialized >> KafkaTransactionCoordinatorActor::OnTxnOffsetCommit_ShouldSendBack_PRODUCER_FENCED_ErrorIfProducerIsNotInitialized [GOOD] >> KafkaTransactionCoordinatorActor::AfterSecondInitializationOldTxnRequestsShouldBeFenced |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario [GOOD] >> KafkaTransactionCoordinatorActor::AfterSecondInitializationOldTxnRequestsShouldBeFenced [GOOD] >> TMetadataActorTests::TopicMetadataGoodAndBad >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success >> test_clickbench.py::TestClickbench::test_clickbench[10] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[11] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_clickbench.py::TestClickbench::test_clickbench[11] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[12] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success >> test_clickbench.py::TestClickbench::test_clickbench[12] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[13] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_revert_basis >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] 2025-04-10 11:43:30,936 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:43:31,060 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1748299 46.3M 43.7M 23.3M test_tool run_ut @/home/runner/.ya/build/build_root/2gt4/001f28/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_tool.ar 1748441 741M 700M 441M └─ ydb-core-tx-schemeshard-ut_index --trace-path-append /home/runner/.ya/build/build_root/2gt4/001f28/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_st Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:33:32.756172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:33:32.756278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:33:32.756319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:33:32.756353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:33:32.756403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:33:32.756436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:33:32.756522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:33:32.756644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:33:32.757069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:33:32.857179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:33:32.857279Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:33:32.876610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:33:32.876939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:33:32.877141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:33:32.886114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:33:32.886535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:33:32.887294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:33:32.888090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:33:32.891948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:32.893444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:33:32.893521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:32.893713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:33:32.893786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:33:32.893855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:33:32.894014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:33:32.902732Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:33:33.060696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:33:33.060954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:33.061205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:33:33.061498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:33:33.061553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:33.064653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:33:33.064805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:33:33.065113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:33.065176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:33:33.065233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:33:33.065272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:33:33.067742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:33.067815Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:33:33.067853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:33:33.070008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:33.070071Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:33.070119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:33:33.070211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:33:33.074260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:33:33.076608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:33:33.076821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:33:33.077937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:33:33.078113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:33:33.078190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:33:33.078485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:33:33.078567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:33:33.078748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:33:33.078826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:33:33.083439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:33:33.083496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:33:33.083700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:33:33.083742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:33:33.083997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:33:33.084060Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:33:33.084164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:33:33.084202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:33:33.084246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:33:33.084298Z no ... 00 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:43:30.945660Z node 199 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:43:30.945977Z node 199 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 362us result status StatusSuccess 2025-04-10T11:43:30.946932Z node 199 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/001f28/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2gt4/001f28/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata >> test_insert.py::TestInsertOperations::test_insert_revert_basis [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata >> test_clickbench.py::TestClickbench::test_clickbench[13] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[14] >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed1 [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 [GOOD] >> test_result_limits.py::TestResultLimits::test_many_rows >> test_public_api.py::TestSessionNotFound::test_session_not_found [GOOD] >> TMetadataActorTests::TopicMetadataGoodAndBad [GOOD] >> PublishKafkaEndpoints::HaveEndpointInLookup >> test_clickbench.py::TestClickbench::test_clickbench[14] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[15] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] >> test_clickbench.py::TestClickbench::test_clickbench[15] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[16] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool >> test_clickbench.py::TestClickbench::test_clickbench[16] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[17] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_invalid_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_simple_table_profile_settings [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[17] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[18] >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] >> PublishKafkaEndpoints::HaveEndpointInLookup [GOOD] >> PublishKafkaEndpoints::MetadataActorGetsEndpoint >> test_clickbench.py::TestClickbench::test_clickbench[18] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[19] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[19] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[20] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[20] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[21] >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(a, b) from t1-Aggregation function Min requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] [GOOD] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_clickbench.py::TestClickbench::test_clickbench[21] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[22] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] >> test_public_api.py::TestBadSession::test_simple >> LabeledDbCounters::TwoTabletsKillOneTablet [GOOD] >> SystemView::AuthGroupMembers |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store >> test_clickbench.py::TestClickbench::test_clickbench[22] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[23] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> PublishKafkaEndpoints::MetadataActorGetsEndpoint [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithNoNode >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata >> SystemView::AuthGroupMembers [GOOD] >> SystemView::AuthEffectivePermissions >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[23] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[24] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] >> test_result_limits.py::TestResultLimits::test_many_rows [GOOD] >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query >> test_clickbench.py::TestClickbench::test_clickbench[24] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[25] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[25] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[26] >> test_public_api.py::TestBadSession::test_simple [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithNoNode [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithError >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query [GOOD] >> SystemView::AuthEffectivePermissions [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[26] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[27] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthEffectivePermissions [GOOD] Test command err: 2025-04-10T11:35:30.219003Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491646733018207468:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:30.219059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/001ee2/r3tmp/tmpakmecU/pdisk_1.dat 2025-04-10T11:35:30.660337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:30.660461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:30.668160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:30.670984Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15092, node 1 2025-04-10T11:35:30.861690Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T11:35:30.861714Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T11:35:30.861723Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T11:35:30.861852Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:35:31.265637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:31.304852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:31.324280Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491646734497359081:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:31.324317Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Database1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2025-04-10T11:35:31.370653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:31.370738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:31.374300Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-10T11:35:31.375499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:31.434451Z node 3 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2025-04-10T11:35:31.434521Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2025-04-10T11:35:31.504842Z node 3 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-04-10T11:35:31.507131Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [3:7491646734497358880:2075], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2025-04-10T11:35:31.507657Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2025-04-10T11:35:31.507737Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Execute 2025-04-10T11:35:31.510378Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2025-04-10T11:35:31.510445Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval metrics: query count# 0 2025-04-10T11:35:31.510536Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval query tops: total query count# 0 2025-04-10T11:35:31.510582Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading nodes to request: nodes count# 0, hashes count# 0 2025-04-10T11:35:31.510645Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 6, result count# 0 2025-04-10T11:35:31.510688Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 7, result count# 0 2025-04-10T11:35:31.510732Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 8, result count# 0 2025-04-10T11:35:31.510791Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 9, result count# 0 2025-04-10T11:35:31.510882Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 10, result count# 0 2025-04-10T11:35:31.510919Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 11, result count# 0 2025-04-10T11:35:31.510968Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 12, result count# 0 2025-04-10T11:35:31.511008Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 13, result count# 0 2025-04-10T11:35:31.511056Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 14, result count# 0 2025-04-10T11:35:31.511084Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 15, result count# 0 2025-04-10T11:35:31.512071Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 16, partCount count# 0 2025-04-10T11:35:31.512117Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 19, partCount count# 0 2025-04-10T11:35:31.512191Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 17, result count# 0 2025-04-10T11:35:31.512250Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 18, result count# 0 2025-04-10T11:35:31.512292Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 20, result count# 0 2025-04-10T11:35:31.512352Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 21, result count# 0 2025-04-10T11:35:31.513487Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reset: interval end# 2025-04-10T11:35:31.000000Z 2025-04-10T11:35:31.513705Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:35:31.518130Z node 3 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [3:7491646734497358880:2075], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Database1 2025-04-10T11:35:31.518223Z node 3 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [3:7491646734497358880:2075], database# /Root/Database1, no sysview processor 2025-04-10T11:35:31.521961Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Complete 2025-04-10T11:35:31.525632Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Execute: database# /Root/Database1 2025-04-10T11:35:31.541945Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Complete 2025-04-10T11:35:31.542016Z node 3 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037893 2025-04-10T11:35:31.567232Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Execute 2025-04-10T11:35:31.567291Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryResults: interval end# 2025-04-10T11:35:31.000000Z, query count# 0 2025-04-10T11:35:31.567310Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 8, interval end# 2025-04-10T11:35:31.000000Z, query count# 0, persisted# 0 2025-04-10T11:35:31.567325Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 10, interval end# 2025-04-10T11:35:31.000000Z, query count# 0, persisted# 0 2025-04-10T11:35:31.567345Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 12, interval end# 2025-04-10T11:35:31.000000Z, query count# 0, persisted# 0 2025-04-10T11:35:31.567356Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 14, interval end# 2025-04-10T11:35:31.000000Z, query count# 0, persisted# 0 2025-04-10T11:35:31.567392Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 9, interval end# 2025-04-10T12:00:00.000000Z, query count# 0, persisted# 0 2025-04-10T11:35:31.567412Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 11, interval end# 2025-04-10T12:00:00.000000Z, query count# 0, persisted# 0 2025-04-10T11:35:31.567432Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 13, interval end# 2025-04-10T12:00:00.000000Z, query count# 0, persisted# 0 2025-04-10T11:35:31.567471Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 15, interval end# 2025-04-10T12:00:00.000000Z, query count# 0, persisted# 0 2025-04-10T11:35:31.571088Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Complete 2025-04-10T11:35:31.592098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:35:31.605375Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T11:35:31.612660Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491646735506922098:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:35:31.612719Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Database2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2025-04-10T11:35:31.664347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:35:31.664413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:35:31.674924Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T11:35:31.682475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T11:35:31.766564Z node 2 :SYSTEM_VIEWS INFO: [72075186224037899] OnActivateExecutor 2025-04-10T11:35:31.766617Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInitSchema::Execute 2025-04-10T11:35:31.847855Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-04-10T11:35:31.847930Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [2:7491646735506922233:2206], path ... e 24 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [24:7491649107334794911:2391], row count: 1, finished: 0 2025-04-10T11:44:43.099330Z node 24 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:44:43.100603Z node 24 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [72057594046644480:8:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindResourcePool DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:44:43.100756Z node 24 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [24:7491649107334794911:2391], row count: 5, finished: 0 2025-04-10T11:44:43.101216Z node 24 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:44:43.104466Z node 24 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-10T11:44:43.104523Z node 24 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [24:7491649107334794911:2391], row count: 1, finished: 0 2025-04-10T11:44:43.106292Z node 24 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:44:43.106831Z node 24 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [72057594046644480:4:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:44:43.106894Z node 24 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [24:7491649107334794911:2391], row count: 1, finished: 0 2025-04-10T11:44:43.107102Z node 24 :SYSTEM_VIEWS INFO: Scan finished, actor: [24:7491649107334794911:2391], owner: [24:7491649107334794908:2389], scan id: 0, table id: [72057594046644480:1:0:auth_effective_permissions] 2025-04-10T11:44:43.108325Z node 24 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [24:7491649051500218141:2144], database# , query hash# 11342553055430868283, cpu time# 150540 2025-04-10T11:44:43.108857Z node 24 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744285483076, txId: 281474976715676] shutting down 2025-04-10T11:44:43.186938Z node 26 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-04-10T11:44:43.186799Z 2025-04-10T11:44:43.228181Z node 24 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jrfr8g3n5p19djz1kwn7t5b5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=24&id=MmM5M2Y3YjItYzJkYjBjYTItM2MxYTY0MmUtZGM2MGZmOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T11:44:43.230458Z node 24 :SYSTEM_VIEWS INFO: Scan started, actor: [24:7491649107334794960:2400], owner: [24:7491649107334794956:2398], scan id: 0, table id: [72075186224037888:1:0:auth_effective_permissions] 2025-04-10T11:44:43.231151Z node 24 :SYSTEM_VIEWS INFO: Scan prepared, actor: [24:7491649107334794960:2400], schemeshard id: 72075186224037888, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-04-10T11:44:43.231188Z node 24 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root/Tenant1 tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-04-10T11:44:43.231275Z node 24 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:44:43.231629Z node 24 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1 TableId: [72075186224037888:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 }] Groups: [] } Children [Dir2,Table1] }] } 2025-04-10T11:44:43.231749Z node 24 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [24:7491649107334794960:2400], row count: 1, finished: 0 2025-04-10T11:44:43.231901Z node 24 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Dir2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:44:43.232169Z node 24 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Dir2 TableId: [72075186224037888:3:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-10T11:44:43.232234Z node 24 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [24:7491649107334794960:2400], row count: 2, finished: 0 2025-04-10T11:44:43.232822Z node 24 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-10T11:44:43.233271Z node 24 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Table1 TableId: [72075186224037888:2:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-10T11:44:43.233325Z node 24 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [24:7491649107334794960:2400], row count: 1, finished: 0 2025-04-10T11:44:43.234143Z node 24 :SYSTEM_VIEWS INFO: Scan finished, actor: [24:7491649107334794960:2400], owner: [24:7491649107334794956:2398], scan id: 0, table id: [72075186224037888:1:0:auth_effective_permissions] 2025-04-10T11:44:43.235729Z node 24 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [24:7491649051500218141:2144], database# , query hash# 17325808444334437222, cpu time# 98571 2025-04-10T11:44:43.236318Z node 24 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744285483226, txId: 281474976715678] shutting down 2025-04-10T11:44:43.246223Z node 27 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:44:43.248103Z node 24 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 25 2025-04-10T11:44:43.248369Z node 26 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:44:43.249184Z node 24 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:44:43.250809Z node 24 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 26 2025-04-10T11:44:43.251691Z node 24 :HIVE WARN: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:44:43.251988Z node 24 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 28 2025-04-10T11:44:43.252257Z node 24 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:44:43.252402Z node 24 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 27 2025-04-10T11:44:43.253166Z node 24 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-10T11:44:43.257059Z node 24 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[26:7491649059841180816:2104], Type=268959746 2025-04-10T11:44:43.257109Z node 24 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[26:7491649059841180816:2104], Type=268959746 2025-04-10T11:44:43.257133Z node 24 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[26:7491649059841180816:2104], Type=268959746 2025-04-10T11:44:43.257160Z node 24 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[26:7491649059841180816:2104], Type=268959746 >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario [GOOD] >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] >> test_public_api.py::TestDriverCanRecover::test_driver_recovery >> test_clickbench.py::TestClickbench::test_clickbench[27] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[28] |99.2%| [TA] $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] |99.2%| [TA] {RESULT} $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:36:45.923154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:45.923323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:45.923387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:45.923452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:45.923514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:45.923559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:45.923669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:45.923788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:45.924224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:46.047898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:36:46.048010Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:36:46.071343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:46.071737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:46.071980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:46.082325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:46.082782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:46.083636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:46.089498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:46.094838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:46.096481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:46.096551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:46.096736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:46.096786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:46.096831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:46.097002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:36:46.105284Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:36:46.264591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:46.264830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.265081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:46.265341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:46.265393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.268107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:46.268235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:46.268395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.268442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:46.268488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:46.268527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:46.270355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.270403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:46.270433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:46.272197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.272259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.272312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:46.272383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:46.276363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:46.280588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:46.280806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:46.281971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:46.282125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:46.282203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:46.282544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:46.282618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:46.282813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:46.282900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:46.286737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:46.286788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:46.286981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:46.287038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:46.287300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:46.287369Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:46.287485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:46.287528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:46.287571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:46.287623Z no ... CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:44:55.844840Z node 173 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:44:55.845174Z node 173 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 395us result status StatusSuccess 2025-04-10T11:44:55.846182Z node 173 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:44:55.857801Z node 173 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][173:1041:2807] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-10T11:44:55.857931Z node 173 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][173:1042:2807] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-10T11:44:55.858019Z node 173 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][173:962:2807] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-04-10T11:44:55.858105Z node 173 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][173:962:2807] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-10T11:44:55.858261Z node 173 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][173:1041:2807] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744285495825470 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744285495825470 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-10T11:44:55.858463Z node 173 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][173:1042:2807] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1744285495825470 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-10T11:44:55.863387Z node 173 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][173:1041:2807] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2025-04-10T11:44:55.863548Z node 173 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][173:962:2807] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-04-10T11:44:55.863785Z node 173 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][173:1042:2807] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-10T11:44:55.864148Z node 173 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][173:962:2807] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[28] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[29] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> PublishKafkaEndpoints::DiscoveryResponsesWithError [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithOtherPort >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] >> test_public_api.py::TestDriverCanRecover::test_driver_recovery [GOOD] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithOtherPort [GOOD] >> PublishKafkaEndpoints::MetadataActorDoubleTopic >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T11:36:41.731339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T11:36:41.731433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:41.731470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T11:36:41.731507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T11:36:41.731551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T11:36:41.731582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T11:36:41.731666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T11:36:41.731741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T11:36:41.732111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T11:36:41.815851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T11:36:41.815909Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T11:36:41.829794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T11:36:41.830077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T11:36:41.830243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T11:36:41.837210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T11:36:41.837558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T11:36:41.838126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:41.839007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T11:36:41.841815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:41.843006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:41.843049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:41.843161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T11:36:41.843211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:41.843241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T11:36:41.843350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T11:36:41.849397Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T11:36:41.965958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T11:36:41.966206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:41.966403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T11:36:41.966637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T11:36:41.966675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:41.969133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:41.969264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T11:36:41.969443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:41.969487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T11:36:41.969536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T11:36:41.969570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T11:36:41.971471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:41.971515Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T11:36:41.971542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T11:36:41.973174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:41.973222Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:41.973262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:41.973319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T11:36:41.976266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T11:36:41.978059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T11:36:41.978277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T11:36:41.979287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T11:36:41.979423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:36:41.979468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:41.979716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T11:36:41.979790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T11:36:41.979987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T11:36:41.980070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T11:36:41.982338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T11:36:41.982385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T11:36:41.982539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T11:36:41.982583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T11:36:41.982796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T11:36:41.982848Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T11:36:41.982933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:41.982963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T11:36:41.983002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T11:36:41.983032Z no ... " } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:45:28.608948Z node 164 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T11:45:28.609289Z node 164 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 376us result status StatusSuccess 2025-04-10T11:45:28.610325Z node 164 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T11:45:28.621751Z node 164 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][164:1181:2964] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-10T11:45:28.621904Z node 164 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][164:1150:2964] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-10T11:45:28.622085Z node 164 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][164:1181:2964] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744285528563627 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744285528563627 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1744285528563627 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-10T11:45:28.625543Z node 164 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][164:1181:2964] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-10T11:45:28.625692Z node 164 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][164:1150:2964] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] >> test_clickbench.py::TestClickbench::test_clickbench[29] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[30] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_clickbench.py::TestClickbench::test_clickbench[30] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[31] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} >> test_clickbench.py::TestClickbench::test_clickbench[31] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[32] >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] |99.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} >> PublishKafkaEndpoints::MetadataActorDoubleTopic [GOOD] >> Serialization::RequestHeader [GOOD] >> Serialization::ResponseHeader [GOOD] >> Serialization::ApiVersionsRequest [GOOD] >> Serialization::ApiVersionsResponse [GOOD] >> Serialization::ApiVersion_WithoutSupportedFeatures [GOOD] >> Serialization::ProduceRequest [GOOD] >> Serialization::UnsignedVarint32 [GOOD] >> Serialization::UnsignedVarint64 [GOOD] >> Serialization::Varint32 [GOOD] >> Serialization::Varint64 [GOOD] >> Serialization::UnsignedVarint32_Wrong [GOOD] >> Serialization::UnsignedVarint64_Wrong [GOOD] >> Serialization::UnsignedVarint32_Deserialize [GOOD] >> Serialization::TKafkaInt8_NotPresentVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::Struct_IsDefault [GOOD] >> Serialization::TKafkaString_IsDefault [GOOD] >> Serialization::TKafkaString_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaString_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaString_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TKafkaArray_IsDefault [GOOD] >> Serialization::TKafkaArray_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaArray_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaArray_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TKafkaBytes_IsDefault [GOOD] >> Serialization::TKafkaBytes_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaBytes_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaBytes_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TRequestHeaderData_reference [GOOD] >> Serialization::TKafkaFloat64_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::RequestHeader_reference [GOOD] >> Serialization::ProduceRequestData [GOOD] >> Serialization::ProduceRequestData_Record_v0 [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[32] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[33] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kafka_proxy/ut/unittest >> Serialization::ProduceRequestData_Record_v0 [GOOD] Test command err: 2025-04-10T11:37:18.231359Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491647195800075781:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:18.231439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00076a/r3tmp/tmpzOLynF/pdisk_1.dat 2025-04-10T11:37:18.634848Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:18.663820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:18.664204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:18.666071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21194, node 1 2025-04-10T11:37:18.829138Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/00076a/r3tmp/yandex3QP3HG.tmp 2025-04-10T11:37:18.829170Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/00076a/r3tmp/yandex3QP3HG.tmp 2025-04-10T11:37:18.830659Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/00076a/r3tmp/yandex3QP3HG.tmp 2025-04-10T11:37:18.830821Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:37:19.040412Z INFO: TTestServer started on Port 8225 GrpcPort 21194 TClient is connected to server localhost:8225 PQClient connected to localhost:21194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37:19.347423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T11:37:19.387162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T11:37:21.146464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647208684978503:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:21.146597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491647208684978494:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:21.147491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T11:37:21.155600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T11:37:21.165665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491647208684978509:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T11:37:21.238409Z node 1 :TX_PROXY ERROR: Actor# [1:7491647208684978573:2452] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T11:37:21.603707Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491647208684978588:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:37:21.618718Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2FiOWUzOWQtYzRjNzY4YzktZTFkN2NmYmQtOTk2OWY3YjM=, ActorId: [1:7491647208684978492:2341], ActorState: ExecuteState, TraceId: 01jrfqv0fbf8b5epvq4ctctbh1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:37:21.638234Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:37:21.682030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T11:37:21.713326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T11:37:21.803360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:37:22.281837Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfqv1648rxcd5rp2makw9yb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2Q1MDM0Y2MtNjcwNDI3ZGQtMjYzYWExYmItOGM0MjY3MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491647212979946185:2640] 2025-04-10T11:37:23.231420Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491647195800075781:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:23.231545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok Run with port = 21194, kafka port = 22991 2025-04-10T11:37:29.259521Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491647241795125398:2182];send_to=[0:7307199536658146131:7762515]; 2025-04-10T11:37:29.285865Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2gt4/00076a/r3tmp/tmpHUTq93/pdisk_1.dat 2025-04-10T11:37:29.412846Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T11:37:29.434137Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T11:37:29.434237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T11:37:29.435716Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12383, node 2 2025-04-10T11:37:29.557354Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2gt4/00076a/r3tmp/yandexg61v68.tmp 2025-04-10T11:37:29.557389Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2gt4/00076a/r3tmp/yandexg61v68.tmp 2025-04-10T11:37:29.557541Z node 2 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2gt4/00076a/r3tmp/yandexg61v68.tmp 2025-04-10T11:37:29.557695Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T11:37:29.603724Z INFO: TTestServer started on Port 23407 GrpcPort 12383 TClient is connected to server localhost:23407 PQClient connected to localhost:12383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T11:37:29.790591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:37:29.799197Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-10T11:37:29.820471Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-10T11:37:29.825322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo un ... on failed, self: [40:7491649359833336612:2383], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T11:45:42.373695Z node 40 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=40&id=NmEyYThkMzItMWRiN2Q2MDgtYzQ1ZDU5OGItNzRmYzNlMGI=, ActorId: [40:7491649359833336436:2364], ActorState: ExecuteState, TraceId: 01jrfra9qa0ry78ke8a8pdjsjg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T11:45:42.374727Z node 40 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T11:45:42.564577Z node 40 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T11:45:43.087067Z node 40 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfraaa23ez1m3jhzzc4crnd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=40&id=ODI2ZmY0ZGYtOGJhZGUxMjAtNjUzNWZhNzEtNTY4NTViYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [40:7491649364128304158:2675] 2025-04-10T11:45:45.127197Z node 40 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:45:45.127255Z node 40 :IMPORT WARN: Table profiles were not loaded === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 1 partitions CallPersQueueGRPC request to localhost:18195 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:18195 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976715675 SchemeShardTabletId: 72057594046644480 PathId: 13 } ErrorCode: OK AddTopic: rt3.dc1--topic1 ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic1, dc = dc1 2025-04-10T11:45:48.784309Z node 40 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jrfrafx769mtr2k5qs2ma031, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=40&id=NmU5Zjg2ZDItNjY5ZjBiYWEtODIyZDhmOGEtZjdjNWVjODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===Query complete TClient::Ls request: /Root/PQ/rt3.dc1--topic1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976715675 CreateStep: 1744285548474 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic1" PathId: 13 TotalGroupCount: 1 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20... (TRUNCATED) GetTopicVersionFromPath: record Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976715675 CreateStep: 1744285548474 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic1" PathId: 13 TotalGroupCount: 1 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--topic1" name rt3.dc1--topic1 version1 CallPersQueueGRPC request to localhost:18195 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:18195 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--topic1" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--topic1" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-04-10T11:45:49.374176Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-04-10T11:45:49.374969Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-04-10T11:45:49.375075Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:18195 2025-04-10T11:45:49.386969Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "topic1" message_group_id: "src" } 2025-04-10T11:45:50.150850Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1744285550150 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-10T11:45:50.151005Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|53ac547-d9fcc945-3e1f430c-1b8fea9e_0" topic: "topic1" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-10T11:45:50.151290Z :INFO: [] MessageGroupId [src] SessionId [src|53ac547-d9fcc945-3e1f430c-1b8fea9e_0] Write session: close. Timeout = 0 ms 2025-04-10T11:45:50.151436Z :INFO: [] MessageGroupId [src] SessionId [src|53ac547-d9fcc945-3e1f430c-1b8fea9e_0] Write session will now close 2025-04-10T11:45:50.151549Z :DEBUG: [] MessageGroupId [src] SessionId [src|53ac547-d9fcc945-3e1f430c-1b8fea9e_0] Write session: aborting 2025-04-10T11:45:50.152543Z :INFO: [] MessageGroupId [src] SessionId [src|53ac547-d9fcc945-3e1f430c-1b8fea9e_0] Write session: gracefully shut down, all writes complete 2025-04-10T11:45:50.152696Z :DEBUG: [] MessageGroupId [src] SessionId [src|53ac547-d9fcc945-3e1f430c-1b8fea9e_0] Write session: destroy Broker 40 - [::]:19749 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=4294967295 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=2147483648 >>>>> Check value=9223372036854775807 >>>>> Check value=18446744073709551615 >>>>> Check value=-2147483648 >>>>> Check value=-167966 >>>>> Check value=-1 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=-9223372036854775808 >>>>> Check value=-2147483648 >>>>> Check value=-167966 >>>>> Check value=-1 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483648 >>>>> Check value=9223372036854775807 >>>>> Buffer size: 251 >>>>> Buffer size: 104 |99.2%| [TM] {RESULT} ydb/core/kafka_proxy/ut/unittest >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[33] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[34] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[34] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[35] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] |99.3%| [TA] $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} |99.3%| [TA] {RESULT} $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] [GOOD] >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] >> test_clickbench.py::TestClickbench::test_clickbench[35] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[36] >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[36] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[37] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[37] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[38] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_clickbench.py::TestClickbench::test_clickbench[38] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[39] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_clickbench.py::TestClickbench::test_clickbench[39] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[40] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_clickbench.py::TestClickbench::test_clickbench[40] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[41] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> test_public_api.py::TestJsonExample::test_json_unexpected_failure >> test_clickbench.py::TestClickbench::test_clickbench[41] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[42] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_public_api.py::TestJsonExample::test_json_unexpected_failure [GOOD] >> test_public_api.py::TestJsonExample::test_json_success >> test_public_api.py::TestJsonExample::test_json_success [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] >> test_self_heal.py::TestEnableSelfHeal::test_replication |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestRecursiveCreation::test_mkdir >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_public_api.py::TestRecursiveCreation::test_mkdir [GOOD] >> test_public_api.py::TestRecursiveCreation::test_create_table >> test_public_api.py::TestRecursiveCreation::test_create_table [GOOD] >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestAttributes::test_create_table |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet >> test_public_api.py::TestAttributes::test_create_table [GOOD] >> test_public_api.py::TestAttributes::test_copy_table >> test_public_api.py::TestAttributes::test_copy_table [GOOD] >> test_public_api.py::TestAttributes::test_create_indexed_table >> test_public_api.py::TestAttributes::test_create_indexed_table [GOOD] >> test_public_api.py::TestAttributes::test_alter_table >> test_public_api.py::TestAttributes::test_alter_table [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes0] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes1] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes2] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes3] >> test_public_api.py::TestAttributes::test_limits[attributes3] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes4] >> test_public_api.py::TestAttributes::test_limits[attributes4] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] >> test_result_limits.py::TestResultLimits::test_large_row >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestDocApiTables::test_create_table |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] 2025-04-10 11:47:31,261 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:47:31,554 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1819280 1.0G 986M 937M ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/2gt4/000744/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 1898202 1.9G 1.9G 1.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2gt4/000744/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_ins Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/2gt4/000744/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 88, in test ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_insert.py", line 86, in scenario_read_data_during_bulk_upsert thread2.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...home/runner/.ya/build/build_root/2gt4/000744/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/2gt4/000744/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2gt4/000744/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2gt4/000744', '--source-root', '/home/runner/.ya/build/build_root/2gt4/000744/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2gt4/000744/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...home/runner/.ya/build/build_root/2gt4/000744/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/2gt4/000744/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2gt4/000744/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2gt4/000744', '--source-root', '/home/runner/.ya/build/build_root/2gt4/000744/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2gt4/000744/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.6%| [TM] {RESULT} ydb/tests/olap/scenario/py3test |99.6%| [TA] $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] {RESULT} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestDocApiTables::test_create_table [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_all_supported_compression 2025-04-10 11:47:34,504 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:47:35,212 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1820643 826M 772M 751M ydb-tests-olap-column_family-compression --basetemp /home/runner/.ya/build/build_root/2gt4/00073a/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor 1823886 5.3G 5.2G 4.8G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2gt4/00073a/ydb/tests/olap/column_family/compression/test-results/py3test/testing_o Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/column_family/compression/alter_compression.py", line 102, in test_all_supported_compression tasks.start_and_wait_all() File "ydb/tests/olap/common/thread_helper.py", line 49, in start_and_wait_all self.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Thread 0x00007f0ed8148640 (most recent call first): File "ydb/tests/library/common/wait_for.py", line 19 in wait_for File "ydb/tests/library/harness/daemon.py", line 193 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 208 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 462 in __stop_node File "ydb/tests/library/harness/kikimr_runner.py", line 476 in stop_node File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007f0f084aea00 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 1169 in _wait_for_tstate_lock File "contrib/tools/python3/Lib/threading.py", line 1149 in join File "ydb/tests/library/harness/kikimr_runner.py", line 487 in stop File "ydb/tests/olap/column_family/compression/base.py", line 22 in teardown_class File "contrib/python/pytest/py3/_pytest/python.py", line 777 in _call_with_optional_argument File "contrib/python/pytest/py3/_pytest/python.py", line 860 in xunit_setup_class_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 911 in _teardown_yield_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1024 in finish File "contrib/python/pytest/py3/_pytest/fixtures.py", line 701 in File "contrib/python/pytest/py3/_pytest/runner.py", line 526 in teardown_exact File "contrib/python/pytest/py3/_pytest/runner.py", line 108 in pytest_sessionfinish File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 306 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 318 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...olumn_family-compression', '--basetemp', '/home/runner/.ya/build/build_root/2gt4/00073a/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2gt4/00073a/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2gt4/00073a', '--source-root', '/home/runner/.ya/build/build_root/2gt4/00073a/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2gt4/00073a/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...olumn_family-compression', '--basetemp', '/home/runner/.ya/build/build_root/2gt4/00073a/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2gt4/00073a/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2gt4/00073a', '--source-root', '/home/runner/.ya/build/build_root/2gt4/00073a/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2gt4/00073a/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) 2025-04-10 11:48:05,765 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-04-10 11:48:05,766 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |99.6%| [TM] {RESULT} ydb/tests/olap/column_family/compression/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] |99.7%| [TA] $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] |99.8%| [TA] $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_result_limits.py::TestResultLimits::test_large_row [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] >> test_workload.py::TestYdbKvWorkload::test[row] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_encryption.py::TestEncryption::test_simple_encryption ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/py3test >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] 2025-04-10 11:48:55,446 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:48:55,773 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1853025 786M 770M 34.9M ydb-tests-fq-mem_alloc --basetemp /home/runner/.ya/build/build_root/2gt4/0009d4/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 2028473 720M 15.1M 33.1M ├─ ydb-tests-fq-mem_alloc --basetemp /home/runner/.ya/build/build_root/2gt4/0009d4/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-m 2028476 498M 519M 186M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2gt4/0009d4/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/test_resu Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/fq/mem_alloc/test_result_limits.py", line 102, in test_quotas huge_query_id = huge_client.create_query( File "ydb/tests/tools/fq_runner/fq_client.py", line 185, in create_query return self.create_query_impl(name=name, File "library/python/retry/__init__.py", line 214, in wrapped return _retry(obj.retry_conf, functools.partial(f, obj, *f_args, **f_kwargs)) File "library/python/retry/__init__.py", line 239, in _retry conf.sleep(delay) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...d '['/home/runner/.ya/build/build_root/2gt4/0009d4/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc', '--basetemp', '/home/runner/.ya/build/build_root/2gt4/0009d4/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2gt4/0009d4/ydb/tests/fq/mem_alloc/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2gt4/0009d4', '--source-root', '/home/runner/.ya/build/build_root/2gt4/0009d4/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2gt4/0009d4/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/fq/mem_alloc', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/fq/mem_alloc', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...d '['/home/runner/.ya/build/build_root/2gt4/0009d4/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc', '--basetemp', '/home/runner/.ya/build/build_root/2gt4/0009d4/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2gt4/0009d4/ydb/tests/fq/mem_alloc/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2gt4/0009d4', '--source-root', '/home/runner/.ya/build/build_root/2gt4/0009d4/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2gt4/0009d4/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/fq/mem_alloc', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/fq/mem_alloc', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.8%| [TM] {RESULT} ydb/tests/fq/mem_alloc/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] >> test_workload.py::TestYdbWorkload::test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export 2025-04-10 11:49:14,911 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:49:17,358 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1863815 601M 552M 520M ydb-tests-olap-s3_import --basetemp /home/runner/.ya/build/build_root/2gt4/000ca1/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modul 1868304 12.0G 11.9G 11.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2gt4/000ca1/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tp 1874398 386M 331M 353M ├─ moto_server s3 --port 23229 1891004 2.7G 2.6G 2.6G └─ ydb -e grpc://localhost:25103 -d /Root workload tpch import generator --scale 1 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/s3_import/test_tpch_import.py", line 94, in test_import_and_export self.ydb_client.run_cli_comand(["workload", "tpch", "import", "generator", "--scale", "1"]) File "ydb/tests/olap/common/ydb_client.py", line 37, in run_cli_comand process = yatest.common.process.execute(cmd, check_exit_code=False) File "library/python/testing/yatest_common/yatest/common/process.py", line 656, in execute res.wait(check_exit_code, timeout, on_timeout) File "library/python/testing/yatest_common/yatest/common/process.py", line 400, in wait _wait() File "library/python/testing/yatest_common/yatest/common/process.py", line 335, in _wait pid, sts, rusage = os.wait4(self._process.pid, 0) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Thread 0x00007fd462cd2640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_common.py", line 112 in _wait_once File "contrib/python/grpcio/py3/grpc/_common.py", line 150 in wait File "contrib/python/grpcio/py3/grpc/_channel.py", line 872 in _next File "contrib/python/grpcio/py3/grpc/_channel.py", line 475 in __next__ File "contrib/python/ydb/py3/ydb/_utilities.py", line 164 in _next File "contrib/python/ydb/py3/ydb/_utilities.py", line 173 in __next__ File "contrib/python/ydb/py3/ydb/query/session.py", line 257 in _check_session_status_loop File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fd465312640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1392 in channel_spin File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fd466632640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007fd496bf8a00 (most recent call first): File "contrib/tools/python3/Lib/subprocess.py", line 2019 in _try_wait File "contrib/tools/python3/Lib/subprocess.py", line 2061 in _wait File "contrib/tools/python3/Lib/subprocess.py", line 1266 in wait File "library/python/testing/yatest_common/yatest/common/process.py", line 370 in _wait File "library/python/testing/yatest_common/yatest/common/process.py", line 400 in wait File "library/python/testing/yatest_common/yatest/common/process.py", line 656 in execute File "ydb/tests/olap/common/ydb_client.py", line 37 in run_cli_comand File "ydb/tests/olap/s3_import/test_tpch_import.py", line 94 in test_import_and_export File "library/python/pytest/plugins/ya.py", line 563 in pytest_pyfunc_call File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/python.py", line 1805 in runtest File "contrib/python/pytest/py3/_pytest/runner.py", line 169 in pytest_runtest_call File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/runner.py", line 262 in File "contrib/python/pytest/py3/_pytest/runner.py", line 341 in from_call File "contrib/python/pytest/py3/_pytest/runner.py", line 261 in call_runtest_hook File "contrib/python/pytest/py3/_pytest/runner.py", line 222 in call_and_report File "contrib/python/pytest/py3/_pytest/runner.py", line 133 in runtestprotocol File "contrib/python/pytest/py3/_pytest/runner.py", line 114 in pytest_runtest_protocol File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 350 in pytest_runtestloop File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 325 in _main File "contrib/python/pytest/py3/_pytest/main.py", line 271 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 318 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...unner/.ya/build/build_root/2gt4/000ca1/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import', '--basetemp', '/home/runner/.ya/build/build_root/2gt4/000ca1/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2gt4/000ca1/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2gt4/000ca1', '--source-root', '/home/runner/.ya/build/build_root/2gt4/000ca1/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2gt4/000ca1/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...unner/.ya/build/build_root/2gt4/000ca1/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import', '--basetemp', '/home/runner/.ya/build/build_root/2gt4/000ca1/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2gt4/000ca1/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2gt4/000ca1', '--source-root', '/home/runner/.ya/build/build_root/2gt4/000ca1/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2gt4/000ca1/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) 2025-04-10 11:49:47,973 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-04-10 11:49:47,973 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores 2025-04-10 11:49:48,757 WARNING libarchive: File (test_tpch_import.py.TestS3TpchImport.test_import_and_export/cluster/node_1/logfile_62xwticb.log) size has changed. Can't write more data than was declared in the tar header (394302748). (probably file was changed during archiving) |99.8%| [TM] {RESULT} ydb/tests/olap/s3_import/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> ConsistentIndexRead::InteractiveTx >> test_workload.py::TestYdbWorkload::test[row] >> Backup::UuidValue >> Backup::UuidValue [GOOD] >> KqpQueryService::ReplyPartLimitProxyNode >> KqpQueryService::ReplyPartLimitProxyNode [GOOD] >> NodeIdDescribe::HasDistribution ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/unittest >> Backup::UuidValue [GOOD] Test command err: Found S3 object: "ProducerUuidValueBackup/data_00.csv" Found S3 object: "ProducerUuidValueBackup/metadata.json" Found S3 object: "ProducerUuidValueBackup/scheme.pb" |99.8%| [TM] {RESULT} ydb/tests/functional/backup/unittest >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> test_workload.py::TestYdbKvWorkload::test[row] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[column] >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> KqpQuerySession::NoLocalAttach |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> test_workload.py::TestYdbWorkload::test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> test_workload.py::TestYdbLogWorkload::test[row] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/py3test >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/encryption/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test |99.9%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test >> KqpQuerySession::NoLocalAttach [GOOD] >> Transfer::CreateTransfer >> Transfer::CreateTransfer [GOOD] >> Replication::Types |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] |99.9%| [TA] $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} >> S3PathStyleBackup::DisableVirtualAddressing |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_session/unittest >> KqpQuerySession::NoLocalAttach [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_session/unittest >> Replication::Types [GOOD] >> Replication::PauseAndResumeReplication >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/s3_path_style/unittest >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/backup/s3_path_style/unittest >> Replication::PauseAndResumeReplication [GOOD] >> NodeIdDescribe::HasDistribution [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/replication/unittest >> Replication::PauseAndResumeReplication [GOOD] Test command err: DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64) |> ]; }; CREATE TRANSFER `Transfer_9357046218939285660` FROM `SourceTopic` TO `TargetTable` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost' ); >>>>> ACTUAL: {
: Error: Scheme operation failed, status: ExecError, reason: The transfer is only available in the Enterprise version } >>>>> EXPECTED: The transfer is only available in the Enterprise version DDL: CREATE TABLE `SourceTable_12082893114522020824` ( Key Uint32, Key2 Uuid, v01 Uuid, v02 Uuid NOT NULL, v03 Double, PRIMARY KEY (Key, Key2) ); >>>>> Query: UPSERT INTO `SourceTable_12082893114522020824` (Key,Key2,v01,v02,v03) VALUES ( 1, CAST("00078af5-0000-0000-6c0b-040000000000" as Uuid), CAST("00078af5-0000-0000-6c0b-040000000001" as Uuid), UNWRAP(CAST("00078af5-0000-0000-6c0b-040000000002" as Uuid)), CAST("311111111113.222222223" as Double) ); DDL: CREATE ASYNC REPLICATION `Replication_12082893114522020824` FOR `SourceTable_12082893114522020824` AS `Table_12082893114522020824` WITH ( CONNECTION_STRING = 'grpc://localhost:13068/?database=local' ); >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_12082893114522020824` ORDER BY `Key2`, `v01`, `v02`, `v03` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_12082893114522020824]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=19 count=-1 >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_12082893114522020824` ORDER BY `Key2`, `v01`, `v02`, `v03` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_12082893114522020824]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=18 count=-1 >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_12082893114522020824` ORDER BY `Key2`, `v01`, `v02`, `v03` Attempt=17 count=1 DDL: DROP ASYNC REPLICATION `Replication_12082893114522020824`; DDL: DROP TABLE `SourceTable_12082893114522020824` DDL: CREATE TABLE `SourceTable_16478028602344155530` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ); DDL: CREATE ASYNC REPLICATION `Replication_16478028602344155530` FOR `SourceTable_16478028602344155530` AS `Table_16478028602344155530` WITH ( CONNECTION_STRING = 'grpc://localhost:13068/?database=local' ); >>>>> Query: INSERT INTO `SourceTable_16478028602344155530` (`Key`, `Message`) VALUES (1, 'Message-1'); >>>>> Query: SELECT `Message` FROM `Table_16478028602344155530` ORDER BY `Message` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_16478028602344155530]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=19 count=-1 >>>>> Query: SELECT `Message` FROM `Table_16478028602344155530` ORDER BY `Message` Attempt=18 count=1 State: Paused DDL: ALTER ASYNC REPLICATION `Replication_16478028602344155530` SET ( STATE = "Paused" ); >>>>> Query: INSERT INTO `SourceTable_16478028602344155530` (`Key`, `Message`) VALUES (2, 'Message-2'); >>>>> Query: SELECT `Message` FROM `Table_16478028602344155530` ORDER BY `Message` Attempt=19 count=1 State: StandBy DDL: ALTER ASYNC REPLICATION `Replication_16478028602344155530` SET ( STATE = "StandBy" ); >>>>> Query: SELECT `Message` FROM `Table_16478028602344155530` ORDER BY `Message` Attempt=19 count=1 >>>>> Query: SELECT `Message` FROM `Table_16478028602344155530` ORDER BY `Message` Attempt=18 count=2 DDL: ALTER ASYNC REPLICATION `Replication_16478028602344155530` SET ( STATE = "Paused" ); DDL: ALTER ASYNC REPLICATION `Replication_16478028602344155530` SET ( STATE = "StandBy" ); DDL: DROP ASYNC REPLICATION `Replication_16478028602344155530`; DDL: DROP TABLE `SourceTable_16478028602344155530` |99.9%| [TM] {RESULT} ydb/tests/functional/replication/unittest |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_svc/unittest >> NodeIdDescribe::HasDistribution [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_svc/unittest |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[1] 2025-04-10 11:51:44,548 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 11:51:46,397 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1931687 548M 538M 468M ydb-tests-functional-tpc-medium --basetemp /home/runner/.ya/build/build_root/2gt4/001ecc/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 1932502 9.5G 9.3G 9.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2gt4/001ecc/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/ 1934308 4.0G 3.9G 3.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2gt4/001ecc/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/ 2035740 159M 160M 96.7M └─ ydb -e grpc://localhost:23338 -d /local/test_db workload tpch --path olap_yatests/tpch/s1 run --json /home/runner/.ya/build/build_root/2gt4/001ecc/ydb/tests/functional/ Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/load/lib/tpch.py", line 48, in test_tpch self.run_workload_test(self._get_path(), query_num) File "ydb/tests/olap/load/lib/conftest.py", line 286, in run_workload_test result = YdbCliHelper.workload_run( File "ydb/tests/olap/lib/ydb_cli.py", line 310, in workload_run ).process() File "ydb/tests/olap/lib/ydb_cli.py", line 283, in process process = yatest.common.process.execute(self._get_cmd(), check_exit_code=False) File "library/python/testing/yatest_common/yatest/common/process.py", line 656, in execute res.wait(check_exit_code, timeout, on_timeout) File "library/python/testing/yatest_common/yatest/common/process.py", line 400, in wait _wait() File "library/python/testing/yatest_common/yatest/common/process.py", line 335, in _wait pid, sts, rusage = os.wait4(self._process.pid, 0) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Thread 0x00007fcad266d640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007fcb03a8f940 (most recent call first): File "contrib/tools/python3/Lib/subprocess.py", line 2019 in _try_wait File "contrib/tools/python3/Lib/subprocess.py", line 2061 in _wait File "contrib/tools/python3/Lib/subprocess.py", line 1266 in wait File "library/python/testing/yatest_common/yatest/common/process.py", line 370 in _wait File "library/python/testing/yatest_common/yatest/common/process.py", line 400 in wait File "library/python/testing/yatest_common/yatest/common/process.py", line 656 in execute File "ydb/tests/olap/lib/ydb_cli.py", line 283 in process File "ydb/tests/olap/lib/ydb_cli.py", line 310 in workload_run File "ydb/tests/olap/load/lib/conftest.py", line 286 in run_workload_test File "ydb/tests/olap/load/lib/tpch.py", line 48 in test_tpch File "library/python/pytest/plugins/ya.py", line 563 in pytest_pyfunc_call File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/python.py", line 1805 in runtest File "contrib/python/pytest/py3/_pytest/runner.py", line 169 in pytest_runtest_call File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/runner.py", line 262 in File "contrib/python/pytest/py3/_pytest/runner.py", line 341 in from_call File "contrib/python/pytest/py3/_pytest/runner.py", line 261 in call_runtest_hook File "contrib/python/pytest/py3/_pytest/runner.py", line 222 in call_and_report File "contrib/python/pytest/py3/_pytest/runner.py", line 133 in runtestprotocol File "contrib/python/pytest/py3/_pytest/runner.py", line 114 in pytest_runtest_protocol File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 350 in pytest_runtestloop File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 325 in _main File "contrib/python/pytest/py3/_pytest/main.py", line 271 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 318 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: .../build/build_root/2gt4/001ecc/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2gt4/001ecc/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2gt4/001ecc', '--source-root', '/home/runner/.ya/build/build_root/2gt4/001ecc/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2gt4/001ecc/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/tpc/medium', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: ((".../build/build_root/2gt4/001ecc/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2gt4/001ecc/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2gt4/001ecc', '--source-root', '/home/runner/.ya/build/build_root/2gt4/001ecc/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2gt4/001ecc/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/tpc/medium', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout",), {}) 2025-04-10 11:52:16,977 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-04-10 11:52:16,977 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores 2025-04-10 11:52:18,761 WARNING libarchive: File (test_tpch.py.TestTpchS1.test_tpch.1/cluster/slot_1/logfile_6aochmqb.log) size has changed. Can't write more data than was declared in the tar header (311256769). (probably file was changed during archiving) |99.9%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/kv/tests/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] >> test_workload.py::TestYdbWorkload::test[row] [GOOD] >> test_workload.py::TestYdbWorkload::test[column] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] |99.9%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbLogWorkload::test[row] [GOOD] >> test_workload.py::TestYdbLogWorkload::test[column] >> ConsistentIndexRead::InteractiveTx [GOOD] >> KqpExtTest::SecondaryIndexSelectUsingScripting >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_indexes/unittest >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_indexes/unittest >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/oltp_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/oltp_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/py3test >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/log/tests/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/simple_queue/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/simple_queue/tests/py3test |99.9%| CLEANING BUILD ROOT Number of suites skipped by size: 130 ydb/library/yaml_config/ut_transform [size:medium] ------ sole chunk ran 6 tests (total:21.50s - test:20.49s canon:0.77s) [fail] test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [default-linux-x86_64-release-asan] (1.86s) Test results differ from canonical: test_result[3]: files content differs: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff/test_transform.py.TestYamlConfigTransformations.test_basic.args1-dump_ds_init.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff ------ FAIL: 5 - GOOD, 1 - FAIL ydb/library/yaml_config/ut_transform ydb/tests/fq/mem_alloc [size:medium] ------ sole chunk ran 12 tests (total:625.23s - recipes:9.79s test:600.06s recipes:3.83s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_result_limits.py::TestResultLimits::test_many_rows (good) duration: 238.38s test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] (good) duration: 86.99s test_result_limits.py::TestResultLimits::test_large_row (good) duration: 73.24s test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] (good) duration: 37.74s test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] (good) duration: 34.74s test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] (good) duration: 32.55s test_alloc_default.py::TestAlloc::test_up_down[kikimr0] (good) duration: 31.99s test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] (good) duration: 30.26s test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] (good) duration: 26.12s test_result_limits.py::TestResultLimits::test_quotas[kikimr0] (timeout) duration: 13.23s 1 more tests with 0.00s total duration are not listed. test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] test was not launched inside chunk. Info: Test run has exceeded 8.0G (8388608K) memory limit with 8.9G (9319296K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1848950 44.9M 41.4M 6.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1849183 36.7M 24.6M 12.3M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1853025 786M 768M 473M │ └─ ydb-tests-fq-mem_alloc --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 2008222 718M 15.2M 470M │ ├─ ydb-tests-fq-mem_alloc --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 2008260 5.4G 5.4G 5.0G │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/tes 1850640 2.2G 2.2G 1.7G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/ydb_data_ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/stderr [timeout] test_result_limits.py::TestResultLimits::test_quotas[kikimr0] [default-linux-x86_64-release-asan] (13.23s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/test_result_limits.py.TestResultLimits.test_quotas.kikimr0.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff ------ TIMEOUT: 9 - GOOD, 1 - NOT_LAUNCHED, 1 - TIMEOUT, 1 - SKIPPED ydb/tests/fq/mem_alloc ydb/tests/fq/streaming_optimize [size:medium] nchunks:4 ------ [test_sql_streaming.py 0/4] chunk ran 7 tests (total:142.75s - recipes:1.35s test:139.60s recipes:0.67s) [fail] test_sql_streaming.py::test[suites-GroupByHop-default.txt] [default-linux-x86_64-release-asan] (20.17s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yf4fn0in/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yf4fn0in/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yf4fn0in/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yf4fn0in/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yf4fn0in/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yf4fn0in/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yf4fn0in/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yf4fn0in/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yf4fn0in/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yf4fn0in/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f6221d203b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f6221d19ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f62215a3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f62216a1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f62216a1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f62216a1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f62215a2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f622164f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f6221547e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f6221547e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f6221547c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f622169ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f622169ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [default-linux-x86_64-release-asan] (17.38s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0k8o791k/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0k8o791k/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0k8o791k/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0k8o791k/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0k8o791k/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0k8o791k/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0k8o791k/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0k8o791k/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0k8o791k/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0k8o791k/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f6a23da03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f6a23d99ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f6a23623d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f6a23721464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f6a23721464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f6a23721464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f6a23622b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f6a236cf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f6a235c7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f6a235c7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f6a235c7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f6a2371ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f6a2371ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [default-linux-x86_64-release-asan] (17.94s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tplyi893/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tplyi893/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tplyi893/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tplyi893/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tplyi893/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tplyi893/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tplyi893/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tplyi893/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tplyi893/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tplyi893/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f5306e603b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f5306e59ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f53066e3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f53067e1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f53067e1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f53067e1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f53066e2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f530678f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f5306687e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f5306687e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f5306687c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f53067dee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f53067dee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [default-linux-x86_64-release-asan] (23.89s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3l8lfbsa/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3l8lfbsa/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3l8lfbsa/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3l8lfbsa/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3l8lfbsa/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3l8lfbsa/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3l8lfbsa/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3l8lfbsa/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3l8lfbsa/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3l8lfbsa/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fb03d1203b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fb03d119ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fb03c9a3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fb03caa1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fb03caa1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fb03caa1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fb03c9a2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fb03ca4f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fb03c947e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fb03c947e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fb03c947c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fb03ca9ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fb03ca9ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [default-linux-x86_64-release-asan] (20.45s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sxc1k_ye/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sxc1k_ye/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sxc1k_ye/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sxc1k_ye/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sxc1k_ye/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sxc1k_ye/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sxc1k_ye/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sxc1k_ye/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sxc1k_ye/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sxc1k_ye/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f7bc92103b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f7bc9209ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f7bc8a93d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f7bc8b91464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f7bc8b91464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f7bc8b91464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f7bc8a92b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f7bc8b3f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7bc8a37e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7bc8a37e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7bc8a37c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f7bc8b8ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f7bc8b8ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [default-linux-x86_64-release-asan] (16.82s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5wqm7l5p/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5wqm7l5p/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5wqm7l5p/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5wqm7l5p/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5wqm7l5p/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5wqm7l5p/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5wqm7l5p/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5wqm7l5p/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5wqm7l5p/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5wqm7l5p/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fa7834f03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fa7834e9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fa782d73d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fa782e71464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fa782e71464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fa782e71464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fa782d72b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fa782e1f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fa782d17e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fa782d17e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fa782d17c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fa782e6ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fa782e6ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (17.41s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_psm5xc4z/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_psm5xc4z/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_psm5xc4z/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_psm5xc4z/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_psm5xc4z/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_psm5xc4z/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_psm5xc4z/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_psm5xc4z/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_psm5xc4z/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_psm5xc4z/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fc301bb03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fc301ba9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fc301433d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fc301531464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fc301531464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fc301531464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fc301432b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fc3014df90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fc3013d7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fc3013d7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fc3013d7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fc30152ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fc30152ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 1/4] chunk ran 7 tests (total:140.36s - recipes:0.89s test:137.70s recipes:0.69s) [fail] test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (20.12s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6_q_ppg_/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6_q_ppg_/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6_q_ppg_/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6_q_ppg_/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6_q_ppg_/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6_q_ppg_/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6_q_ppg_/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6_q_ppg_/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6_q_ppg_/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6_q_ppg_/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f0fe28b03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f0fe28a9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f0fe2133d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f0fe2231464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f0fe2231464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f0fe2231464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f0fe2132b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f0fe21df90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f0fe20d7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f0fe20d7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f0fe20d7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f0fe222ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f0fe222ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [default-linux-x86_64-release-asan] (16.70s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pkb9rjox/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pkb9rjox/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pkb9rjox/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pkb9rjox/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pkb9rjox/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pkb9rjox/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pkb9rjox/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pkb9rjox/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pkb9rjox/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pkb9rjox/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fe3325f03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fe3325e9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fe331e73d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fe331f71464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fe331f71464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fe331f71464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fe331e72b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fe331f1f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fe331e17e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fe331e17e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fe331e17c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fe331f6ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fe331f6ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindow-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [default-linux-x86_64-release-asan] (17.10s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1q_ndzcp/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1q_ndzcp/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1q_ndzcp/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1q_ndzcp/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1q_ndzcp/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1q_ndzcp/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1q_ndzcp/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1q_ndzcp/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1q_ndzcp/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1q_ndzcp/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f5806d803b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f5806d79ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f5806603d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f5806701464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f5806701464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f5806701464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f5806602b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f58066af90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f58065a7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f58065a7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f58065a7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f58066fee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f58066fee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [default-linux-x86_64-release-asan] (23.75s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kjtin6lr/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kjtin6lr/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kjtin6lr/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kjtin6lr/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kjtin6lr/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kjtin6lr/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kjtin6lr/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kjtin6lr/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kjtin6lr/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kjtin6lr/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f270aff83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f270aff1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f270a87bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f270a979464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f270a979464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f270a979464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f270a87ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f270a92790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f270a81fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f270a81fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f270a81fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f270a976e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f270a976e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [default-linux-x86_64-release-asan] (21.29s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rsa_bo29/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rsa_bo29/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rsa_bo29/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rsa_bo29/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rsa_bo29/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rsa_bo29/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rsa_bo29/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rsa_bo29/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rsa_bo29/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rsa_bo29/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f92453903b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f9245389ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f9244c13d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f9244d11464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f9244d11464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f9244d11464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f9244c12b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f9244cbf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f9244bb7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f9244bb7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f9244bb7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f9244d0ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f9244d0ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [default-linux-x86_64-release-asan] (17.19s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqnbk9jk/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqnbk9jk/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqnbk9jk/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqnbk9jk/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqnbk9jk/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqnbk9jk/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqnbk9jk/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqnbk9jk/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqnbk9jk/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sqnbk9jk/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fb0281e03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fb0281d9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fb027a63d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fb027b61464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fb027b61464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fb027b61464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fb027a62b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fb027b0f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fb027a07e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fb027a07e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fb027a07c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fb027b5ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fb027b5ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [default-linux-x86_64-release-asan] (16.06s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pljssyug/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pljssyug/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pljssyug/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pljssyug/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pljssyug/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pljssyug/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pljssyug/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pljssyug/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pljssyug/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pljssyug/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fca100a03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fca10099ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fca0f923d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fca0fa21464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fca0fa21464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fca0fa21464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fca0f922b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fca0f9cf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fca0f8c7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fca0f8c7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fca0f8c7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fca0fa1ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fca0fa1ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423457 byte(s) leaked in 8237 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 2/4] chunk ran 7 tests (total:137.39s - recipes:0.97s test:134.77s recipes:0.79s) [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (20.05s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t_fs49wn/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t_fs49wn/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t_fs49wn/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t_fs49wn/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t_fs49wn/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t_fs49wn/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t_fs49wn/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t_fs49wn/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t_fs49wn/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t_fs49wn/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f60b7c903b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f60b7c89ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f60b7513d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f60b7611464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f60b7611464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f60b7611464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f60b7512b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f60b75bf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f60b74b7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f60b74b7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f60b74b7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f60b760ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f60b760ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (17.81s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yjyq1bju/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yjyq1bju/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yjyq1bju/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yjyq1bju/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yjyq1bju/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yjyq1bju/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yjyq1bju/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yjyq1bju/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yjyq1bju/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yjyq1bju/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fae9a6803b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fae9a679ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fae99f03d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fae9a001464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fae9a001464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fae9a001464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fae99f02b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fae99faf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fae99ea7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fae99ea7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fae99ea7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fae99ffee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fae99ffee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopic-default.txt] [default-linux-x86_64-release-asan] (16.34s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u9yw9qc7/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u9yw9qc7/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u9yw9qc7/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u9yw9qc7/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u9yw9qc7/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u9yw9qc7/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u9yw9qc7/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u9yw9qc7/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u9yw9qc7/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u9yw9qc7/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fc58bcb03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fc58bca9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fc58b533d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fc58b631464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fc58b631464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fc58b631464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fc58b532b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fc58b5df90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fc58b4d7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fc58b4d7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fc58b4d7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fc58b62ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fc58b62ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [default-linux-x86_64-release-asan] (19.75s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f359e8mz/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f359e8mz/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f359e8mz/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f359e8mz/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f359e8mz/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f359e8mz/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f359e8mz/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f359e8mz/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f359e8mz/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f359e8mz/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fad2b5503b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fad2b549ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fad2add3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fad2aed1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fad2aed1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fad2aed1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fad2add2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fad2ae7f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fad2ad77e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fad2ad77e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fad2ad77c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fad2aecee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fad2aecee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicGroupWriteToSolomon-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [default-linux-x86_64-release-asan] (21.42s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9_zl9qzc/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9_zl9qzc/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9_zl9qzc/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9_zl9qzc/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9_zl9qzc/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9_zl9qzc/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9_zl9qzc/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9_zl9qzc/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9_zl9qzc/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9_zl9qzc/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f42e3d103b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f42e3d09ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f42e3593d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f42e3691464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f42e3691464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f42e3691464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f42e3592b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f42e363f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f42e3537e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f42e3537e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f42e3537c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f42e368ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f42e368ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadata-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [default-linux-x86_64-release-asan] (16.57s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bar0hb2/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bar0hb2/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bar0hb2/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bar0hb2/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bar0hb2/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bar0hb2/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bar0hb2/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bar0hb2/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bar0hb2/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bar0hb2/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f84928203b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f8492819ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f84920a3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f84921a1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f84921a1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f84921a1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f84920a2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f849214f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f8492047e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f8492047e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f8492047c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f849219ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f849219ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataInsideFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [default-linux-x86_64-release-asan] (16.82s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6qjwo3gr/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6qjwo3gr/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6qjwo3gr/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6qjwo3gr/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6qjwo3gr/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6qjwo3gr/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6qjwo3gr/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6qjwo3gr/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6qjwo3gr/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6qjwo3gr/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fb76c9c03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fb76c9b9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fb76c243d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fb76c341464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fb76c341464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fb76c341464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fb76c242b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fb76c2ef90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fb76c1e7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fb76c1e7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fb76c1e7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fb76c33ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fb76c33ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataNestedDeep-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 3/4] chunk ran 7 tests (total:139.34s - recipes:0.86s test:136.79s recipes:0.68s) [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [default-linux-x86_64-release-asan] (20.35s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8xwynm43/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8xwynm43/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8xwynm43/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8xwynm43/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8xwynm43/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8xwynm43/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8xwynm43/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8xwynm43/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8xwynm43/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8xwynm43/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fa886fc03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fa886fb9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fa886843d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fa886941464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fa886941464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fa886941464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fa886842b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fa8868ef90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fa8867e7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fa8867e7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fa8867e7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fa88693ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fa88693ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataWithFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (18.13s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_aea_895f/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_aea_895f/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_aea_895f/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_aea_895f/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_aea_895f/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_aea_895f/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_aea_895f/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_aea_895f/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_aea_895f/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_aea_895f/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f7f58eb03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f7f58ea9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f7f58733d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f7f58831464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f7f58831464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f7f58831464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f7f58732b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f7f587df90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7f586d7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7f586d7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7f586d7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f7f5882ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f7f5882ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [default-linux-x86_64-release-asan] (17.42s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g5i5k9qd/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g5i5k9qd/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g5i5k9qd/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g5i5k9qd/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g5i5k9qd/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g5i5k9qd/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g5i5k9qd/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g5i5k9qd/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g5i5k9qd/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_g5i5k9qd/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fb28b0e03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fb28b0d9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fb28a963d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fb28aa61464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fb28aa61464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fb28aa61464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fb28a962b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fb28aa0f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fb28a907e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fb28a907e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fb28a907c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fb28aa5ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fb28aa5ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [default-linux-x86_64-release-asan] (21.70s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzao5ts9/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzao5ts9/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzao5ts9/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzao5ts9/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzao5ts9/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzao5ts9/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzao5ts9/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzao5ts9/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzao5ts9/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jzao5ts9/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f3e080d03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f3e080c9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f3e07953d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f3e07a51464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f3e07a51464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f3e07a51464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f3e07952b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f3e079ff90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f3e078f7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f3e078f7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f3e078f7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f3e07a4ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f3e07a4ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteSameTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [default-linux-x86_64-release-asan] (20.51s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0bzw057k/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0bzw057k/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0bzw057k/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0bzw057k/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0bzw057k/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0bzw057k/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0bzw057k/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0bzw057k/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0bzw057k/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0bzw057k/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f27dd1f83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f27dd1f1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f27dca7bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f27dcb79464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f27dcb79464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f27dcb79464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f27dca7ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f27dcb2790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f27dca1fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f27dca1fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f27dca1fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f27dcb76e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f27dcb76e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (15.85s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_db6j4kd8/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_db6j4kd8/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_db6j4kd8/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_db6j4kd8/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_db6j4kd8/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_db6j4kd8/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_db6j4kd8/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_db6j4kd8/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_db6j4kd8/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_db6j4kd8/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f9f131d03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f9f131c9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f9f12a53d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f9f12b51464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f9f12b51464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f9f12b51464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f9f12a52b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f9f12aff90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f9f129f7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f9f129f7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f9f129f7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f9f12b4ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f9f12b4ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [default-linux-x86_64-release-asan] (16.94s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v5ic4353/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v5ic4353/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v5ic4353/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v5ic4353/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v5ic4353/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v5ic4353/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v5ic4353/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v5ic4353/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v5ic4353/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v5ic4353/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f32104703b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f3210469ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f320fcf3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f320fdf1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f320fdf1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f320fdf1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f320fcf2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f320fd9f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f320fc97e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f320fc97e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f320fc97c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f320fdeee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f320fdeee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-WriteTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ FAIL: 28 - FAIL ydb/tests/fq/streaming_optimize ------ [test_discovery.py] chunk ran 3 tests (total:184.71s - setup:0.05s test:184.57s) Info: Test run has exceeded 10.0G (10485760K) memory limit with 13.2G (13816788K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1954345 44.9M 44.9M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1954358 33.9M 22.0M 9.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1954360 905M 910M 832M └─ ydb-tests-functional-api --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 1978795 1.4G 1.4G 974M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1978796 1.4G 1.4G 971M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1978810 1.4G 1.4G 977M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1978811 1.4G 1.4G 976M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1978812 1.4G 1.4G 975M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1978813 1.3G 1.3G 962M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1978814 1.4G 1.4G 975M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1978815 1.3G 1.4G 980M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1978816 1.4G 1.4G 980M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1983489 426M 0b 0b └─ ydbd --server=grpc://localhost:18932 admin blobstorage config invoke --proto=Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "dynamic_ 1983546 426M 0b 0b └─ ydbd --server=grpc://localhost:18932 admin blobstorage config invoke --proto=Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "dynam Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/stderr ydb/tests/functional/hive [size:medium] nchunks:80 ------ [test_drain.py 0/20] chunk ran 1 test (total:80.57s - test:80.33s) [fail] test_drain.py::TestHive::test_drain_on_stop [default-linux-x86_64-release-asan] (73.42s) ydb/tests/functional/hive/test_drain.py:93: in test_drain_on_stop wait_tablets_are_active( ydb/tests/library/common/delayed.py:151: in wait_tablets_are_active predicate(raise_error=True) ydb/tests/library/common/delayed.py:141: in predicate raise AssertionError( E AssertionError: E ############################## E 0 seconds passed, 68 tablet(s) are not active. Inactive tablets are (first 10 entries): (72075186224038627: 4) (72075186224038631: 4) (72075186224038643: 4) (72075186224038654: None) (72075186224038667: 4) (72075186224038678: None) (72075186224038686: 3) (72075186224038695: None) (72075186224038707: 4) (72075186224038711: 4). Additional info is empty E ############################## Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff/test_drain.py.TestHive.test_drain_on_stop.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff ------ FAIL: 6 - GOOD, 1 - FAIL ydb/tests/functional/hive ydb/tests/functional/postgresql [size:medium] ------ sole chunk ran 14 tests (total:88.14s - test:88.02s) [fail] test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [default-linux-x86_64-release-asan] (33.64s) teardown failed: ydb/tests/functional/postgresql/test_postgres.py:77: in teardown_class cls.cluster.stop() ydb/tests/library/harness/kikimr_runner.py:494: in stop raise daemon.SeveralDaemonErrors(saved_exceptions) E ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. E Process exit_code = 100. E Stdout file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.horology/cluster/node_1/stdout E Stderr file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.horology/cluster/node_1/stderr E Stderr content: E E GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option E Current KQP shutdown state: spent 0 seconds, not started yet E warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 E E ================================================================= E ==1806969==ERROR: LeakSanitizer: detected memory leaks E E Indirect leak of 23040 byte(s) in 6 object(s) allocated from: E #0 0x1d8785dd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 E #1 0x46c4c21f in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:12 E #2 0x46c4c21f in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #3 0x46c3dab9 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #4 0x46c3dab9 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&, TBasicString> const&, NYql::TYtGatewayConfig const*, TIntrusivePtr>, std::__y1::shared_ptr const&, std::__y1::shared_ptr> const, TIntrusivePtr>>, TBasicString>, THash>>, TSelect1st, TEqualTo>>, std::__y1::allocator>>>::reserve(unsigned long) /-S/util/generic/hash_table.h:1330:13 E #7 0x46a24ee6 in insert_unique >, TIntrusivePtr > > > /-S/util/generic/hash_table.h:679:9 E #8 0x46a24ee6 in insert /-S/util/generic/hash.h:153:20 E #9 0x46a24ee6 in NYql::NCommon::TSettingDispatcher::TSettingHandlerImpl& NYql::NCommon::TSettingDispatcher::AddSetting(TBasicString> const&, NYql::NCommon::TConfSetting&) /-S/yql/essentials/providers/common/config/yql_dispatch.h:345:23 E #10 0x46a05d90 in NYql::TYtConfiguration::TYtConfiguration(NYql::TTypeAnnotationContext&) /-S/yt/yql/providers/yt/common/yql_yt_settings.cpp:472:5 E #11 0x46c4c230 in TYtVersionedConfiguration /-S/yt/yql/providers/yt/common/yql_yt_settings.h:373:11 E #12 0x46c4c230 in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:16 E #13 0x46c4c230 in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #14 0x46c3dab9 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #15 0x46c3dab9 in NYql::CreateYtNativeState(TIntrusivePtr>, TBa... Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.float8.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff ------ FAIL: 13 - GOOD, 1 - FAIL ydb/tests/functional/postgresql ydb/tests/functional/serverless [size:medium] nchunks:20 ------ [test_serverless.py 4/10] chunk ran 2 tests (total:235.95s - test:234.36s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-release-asan] (54.96s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-release-asan] (170.19s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 20 - GOOD, 2 - FAIL ydb/tests/functional/serverless ydb/tests/functional/sqs/cloud [size:medium] nchunks:40 ------ [34/40] chunk ran 2 tests (total:111.12s - test:111.06s) [fail] test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [default-linux-x86_64-release-asan] (80.22s) ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:829: in test_yc_events_processor assert len(lines) >= 2, "Got only %s event lines after all attempts" % len(lines) E AssertionError: Got only 0 event lines after all attempts E assert 0 >= 2 E + where 0 = len([]) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_yc_events_processor.tables_format_v0.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff ------ FAIL: 75 - GOOD, 1 - FAIL ydb/tests/functional/sqs/cloud ------ sole chunk ran 1 test (total:108.29s - test:108.12s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 13.9G (14563484K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1812144 44.9M 42.9M 6.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1812257 34.0M 22.0M 9.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1812306 834M 806M 756M └─ functional-sqs-merge_split_common_table-std --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini 1814407 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1814414 1.5G 1.5G 965M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1853648 1.5G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-res 1814421 1.5G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1853631 1.5G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-res 1814422 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1814423 1.4G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1853673 1.4G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-res 1814426 1.5G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1853672 1.5G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-res 1814427 1.5G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1853708 1.5G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-res 1814428 1.5G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1853668 1.5G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-res Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff/stderr ydb/tests/functional/tpc/medium [size:medium] nchunks:2 ------ [test_tpch.py] chunk ran 22 tests (total:634.33s - setup:0.01s test:600.08s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_tpch.py::TestTpchS1::test_tpch[1] (timeout) duration: 627.55s 21 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/stderr [timeout] test_tpch.py::TestTpchS1::test_tpch[1] [default-linux-x86_64-release-asan] (627.55s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch.py.TestTpchS1.test_tpch.1.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff ------ TIMEOUT: 43 - GOOD, 21 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/functional/tpc/medium ydb/tests/olap/column_family/compression [size:medium] ------ sole chunk ran 2 tests (total:631.84s - test:600.02s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: alter_compression.py::TestAlterCompression::test_all_supported_compression (timeout) duration: 625.56s alter_compression.py::TestAlterCompression::test_availability_data test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/stderr [timeout] alter_compression.py::TestAlterCompression::test_all_supported_compression [default-linux-x86_64-release-asan] (625.56s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAlterCompression.test_all_supported_compression.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/column_family/compression ydb/tests/olap/s3_import [size:medium] ------ sole chunk ran 1 test (total:633.94s - test:600.06s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_tpch_import.py::TestS3TpchImport::test_import_and_export (timeout) duration: 627.37s Info: Test run has exceeded 8.0G (8388608K) memory limit with 15.7G (16413720K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1863587 44.9M 43.4M 6.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1863779 42.9M 30.7M 18.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1863815 606M 557M 524M └─ ydb-tests-olap-s3_import --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 1868304 12.2G 12.0G 11.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/t 1874398 386M 331M 353M ├─ moto_server s3 --port 23229 1891004 2.7G 2.6G 2.6G └─ ydb -e grpc://localhost:25103 -d /Root workload tpch import generator --scale 1 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/stderr [timeout] test_tpch_import.py::TestS3TpchImport::test_import_and_export [default-linux-x86_64-release-asan] (627.37s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - TIMEOUT ydb/tests/olap/s3_import ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 9 tests (total:625.50s - test:600.05s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (timeout) duration: 455.42s test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] (good) duration: 58.32s test_alter_tiering.py::TestAlterTiering::test[many_tables] (fail) duration: 46.90s test_simple.py::TestSimple::test[tablestores] (good) duration: 23.19s test_simple.py::TestSimple::test[alter_table] (good) duration: 18.49s test_simple.py::TestSimple::test[alter_tablestore] (good) duration: 7.85s test_simple.py::TestSimple::test[table] (good) duration: 6.40s 2 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [fail] test_alter_tiering.py::TestAlterTiering::test[many_tables] [default-linux-x86_64-release-asan] (46.90s) ydb/tests/olap/scenario/conftest.py:88: in test ctx.executable(self, ctx) ydb/tests/olap/scenario/test_alter_tiering.py:297: in scenario_many_tables self._setup_tiering_test(ctx) ydb/tests/olap/scenario/test_alter_tiering.py:161: in _setup_tiering_test self._override_external_data_source(sth, self.sources[-1], s3_config) ydb/tests/olap/scenario/test_alter_tiering.py:177: in _override_external_data_source sth.execute_scheme_query(CreateExternalDataSource(path, config, True)) ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:430: in execute_scheme_query self._run_with_expected_status( ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:357: in _run_with_expected_status pytest.fail(f'Unexpected status: must be in {repr(expected_status)}, but get {repr(error or status)}') E Failed: Unexpected status: must be in {}, but get SchemeError('message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 362 } message: "Executing CREATE OBJECT EXTERNAL_DATA_SOURCE" end_position { row: 1 column: 362 } severity: 1 issues { message: "
: Error: (NKikimr::NExternalSource::TExternalSourceException) External source with type ObjectStorage is disabled. Please contact your system administrator to enable it, code: 2003\\n" issue_code: 2003 severity: 1 } } (server_code: 400070)') Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_tiering.py.TestAlterTiering.test.many_tables.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff [timeout] test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [default-linux-x86_64-release-asan] (455.42s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_insert.py.TestInsert.test.read_data_during_bulk_upsert.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 5 - GOOD, 1 - FAIL, 2 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:6 ------ [data_migration_when_alter_ttl.py] chunk ran 1 test (total:622.85s - test:600.09s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 609.75s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-release-asan] (609.75s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ [ttl_delete_s3.py] chunk ran 3 tests (total:617.25s - test:600.10s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change (timeout) duration: 612.44s 2 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [default-linux-x86_64-release-asan] (612.44s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_data_unchanged_after_ttl_change.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 3 - GOOD, 2 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/olap/ttl_tiering ydb/tests/stress/olap_workload/tests [size:medium] ------ sole chunk ran 1 test (total:103.24s - test:103.02s) Test failed with 1 exit code. See logs for more info Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/stderr [crashed] test_workload.py::TestYdbWorkload::test [default-linux-x86_64-release-asan] (0.00s) Test crashed Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbWorkload.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - CRASHED ydb/tests/stress/olap_workload/tests ------ [test_disk.py 0/10] chunk ran 1 test (total:46.24s - test:46.18s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 13.3G (13935060K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1452771 44.9M 44.9M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1452880 33.7M 21.8M 9.1M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1452882 742M 746M 662M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --do 1453392 1.4G 1.4G 1003M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1453393 1.4G 1.4G 1001M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1453394 1.4G 1.4G 997M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1453395 1.4G 1.4G 1001M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1453396 1.4G 1.4G 978M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1453397 1.4G 1.4G 1005M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1453398 1.4G 1.4G 998M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1453399 1.4G 1.4G 994M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1453400 1.4G 1.4G 997M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ------ [test_tablet.py 0/10] chunk ran 1 test (total:100.07s - test:99.91s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 14.1G (14746148K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1452930 44.9M 44.0M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1452941 33.9M 21.7M 9.4M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1452943 796M 780M 716M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --do 1453425 1.7G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1453428 1.7G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1453429 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1453434 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1453437 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1453453 1.7G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1453465 1.7G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1453492 1.6G 1.6G 1.2G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ------ [1/10] chunk ran 1 test (total:212.65s - test:212.61s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 9.6G (10069804K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1434216 44.9M 43.4M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1434344 34.1M 21.6M 9.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1434368 46.0M 44.5M 23.0M └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testi 1436340 9.7G 9.5G 9.5G └─ ydb-core-blobstorage-ut_blobstorage-ut_balancing --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancin Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testing_out_stuff/stderr ydb/core/keyvalue/ut_trace [size:medium] nchunks:5 ------ [0/5] chunk ran 1 test (total:12.13s - test:12.07s) [fail] TKeyValueTracingTest::ReadHuge [default-linux-x86_64-release-asan] (3.45s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.out ------ [1/5] chunk ran 1 test (total:8.46s - test:8.43s) [fail] TKeyValueTracingTest::ReadSmall [default-linux-x86_64-release-asan] (2.04s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.out ------ [2/5] chunk ran 1 test (total:10.97s - test:10.94s) [fail] TKeyValueTracingTest::WriteHuge [default-linux-x86_64-release-asan] (2.06s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.out ------ [3/5] chunk ran 1 test (total:14.37s - test:14.34s) [fail] TKeyValueTracingTest::WriteSmall [default-linux-x86_64-release-asan] (2.39s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.out ------ FAIL: 4 - FAIL ydb/core/keyvalue/ut_trace ydb/core/kqp/ut/cost [size:medium] nchunks:50 ------ [12/50] chunk ran 1 test (total:25.25s - test:25.21s) [crashed] KqpCost::OlapWriteRow [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==1546043==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018d3ad2d bp 0x7fff703bc3e0 sp 0x7fff703bc240 T0) ==1546043==The signal is caused by a READ memory access. ==1546043==Hint: address points to the zero page. 2025-04-10T11:19:21.117085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T11:19:21.117142Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18d3ad2d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18d3ad2d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18d3ad2d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18d3ad2d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18d3ad2d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18d5fe47 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18d5fe47 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18d5fe47 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18d5fe47 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18d5fe47 in std::__y1::__function::__func< ..[snippet truncated].. 0x196b1ff5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x196b1ff5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x196b1ff5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19681b48 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18d5ecf3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19683415 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x196ac56c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f8649851d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f8649851e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x164bd028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x164bd028) (BuildId: e3319472fc934b0dfc588e72f37ffc5cf69adf9b) SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==1546043==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.out ------ FAIL: 21 - GOOD, 1 - CRASHED ydb/core/kqp/ut/cost ydb/core/kqp/ut/federated_query/generic_ut nchunks:10 ------ [7/10] chunk ran 4 tests (total:83.63s - test:60.05s) Chunk exceeded 60s timeout and was killed List of the tests involved in the launch: GenericFederatedQuery::PostgreSQLFilterPushdown (timeout) duration: 31.29s GenericFederatedQuery::PostgreSQLOnPremSelectAll (good) duration: 19.89s GenericFederatedQuery::PostgreSQLSelectCount (good) duration: 17.44s GenericFederatedQuery::PostgreSQLOnPremSelectConstant (good) duration: 12.69s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/stderr [timeout] GenericFederatedQuery::PostgreSQLFilterPushdown [default-linux-x86_64-release-asan] (31.29s) Killed by timeout (60 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/GenericFederatedQuery.PostgreSQLFilterPushdown.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/GenericFederatedQuery.PostgreSQLFilterPushdown.out ------ TIMEOUT: 37 - GOOD, 1 - TIMEOUT ydb/core/kqp/ut/federated_query/generic_ut ydb/core/kqp/ut/indexes [size:medium] nchunks:50 ------ [33/50] chunk ran 2 tests (total:57.98s - test:57.93s) [fail] KqpMultishardIndex::DataColumnWrite-UseSink [default-linux-x86_64-release-asan] (33.48s) assertion failed at ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:2079, virtual void NKikimr::NKqp::NTestSuiteKqpMultishardIndex::TTestCaseDataColumnWrite::Execute_(NUnitTest::TTestContext &) [UseSink = false]: (result.IsSuccess()) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x198E1F3B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x19DA749F 2. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:2079: Execute_ @ 0x19529914 3. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354: operator() @ 0x194E0327 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354:1) &> @ 0x194E0327 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354:1) &> @ 0x194E0327 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x194E0327 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x194E0327 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19DDE4C5 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19DDE4C5 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19DDE4C5 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19DAE018 12. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354: Execute @ 0x194DF4F3 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19DAF8E5 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19DD8A3C 15. ??:0: ?? @ 0x7F0166E8AD8F 16. ??:0: ?? @ 0x7F0166E8AE3F 17. ??:0: ?? @ 0x16613028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/indexes/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/indexes/test-results/unittest/testing_out_stuff/KqpMultishardIndex.DataColumnWrite-UseSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/indexes/test-results/unittest/testing_out_stuff/KqpMultishardIndex.DataColumnWrite-UseSink.out ------ FAIL: 119 - GOOD, 1 - FAIL ydb/core/kqp/ut/indexes ydb/core/kqp/ut/join [size:medium] nchunks:200 ------ [122/200] chunk ran 1 test (total:343.55s - test:343.51s) [fail] KqpJoinOrder::Chain65Nodes [default-linux-x86_64-release-asan] (334.36s) assertion failed at ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:379, void NKikimr::NKqp::TChainTester::JoinTables(): (result.GetStatus() == EStatus::SUCCESS) failed: (TIMEOUT != SUCCESS) , with diff: (TIM|SUCC)E(OUT|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193A6ECB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x19878CFF 2. /tmp//-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:379: JoinTables @ 0x18FB72E1 3. /tmp//-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:325: Test @ 0x18F6F600 4. /tmp//-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:546: Execute_ @ 0x18F6F600 5. /tmp//-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:544: operator() @ 0x18FB3937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:544:1) &> @ 0x18FB3937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:544:1) &> @ 0x18FB3937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18FB3937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18FB3937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198AFD25 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198AFD25 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198AFD25 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1987F878 14. /tmp//-S/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp:544: Execute @ 0x18FB2B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19881145 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198AA29C 17. ??:0: ?? @ 0x7FE5F9EC4D8F 18. ??:0: ?? @ 0x7FE5F9EC4E3F 19. ??:0: ?? @ 0x16572028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/join/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/join/test-results/unittest/testing_out_stuff/KqpJoinOrder.Chain65Nodes.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/join/test-results/unittest/testing_out_stuff/KqpJoinOrder.Chain65Nodes.out ------ FAIL: 230 - GOOD, 1 - FAIL ydb/core/kqp/ut/join ydb/core/kqp/ut/pg [size:medium] nchunks:10 ------ [0/10] chunk ran 12 tests (total:194.40s - test:194.28s) [fail] KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [default-linux-x86_64-release-asan] (10.13s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.AlterColumnSetDefaultFromSequenceWithSchemaname.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.AlterColumnSetDefaultFromSequenceWithSchemaname.out [fail] KqpPg::AlterColumnSetDefaultFromSequence [default-linux-x86_64-release-asan] (7.73s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.AlterColumnSetDefaultFromSequence.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.AlterColumnSetDefaultFromSequence.out ------ [8/10] chunk ran 11 tests (total:432.63s - setup:0.01s test:432.44s) [fail] KqpPg::TempTablesSessionsIsolation [default-linux-x86_64-release-asan] (8.97s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesSessionsIsolation.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesSessionsIsolation.out [fail] KqpPg::TempTablesDrop [default-linux-x86_64-release-asan] (8.67s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesDrop.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesDrop.out ------ [9/10] chunk ran 11 tests (total:338.19s - test:338.06s) [fail] PgCatalog::CheckSetConfig [default-linux-x86_64-release-asan] (8.58s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/PgCatalog.CheckSetConfig.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/PgCatalog.CheckSetConfig.out ------ FAIL: 109 - GOOD, 5 - FAIL ydb/core/kqp/ut/pg ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [12/50] chunk ran 4 tests (total:612.53s - test:600.05s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: KqpLimits::OutOfSpaceYQLUpsertFail+useSink (good) duration: 247.79s KqpLimits::OutOfSpaceBulkUpsertFail (good) duration: 187.60s KqpLimits::OutOfSpaceYQLUpsertFail-useSink (timeout) duration: 174.50s KqpLimits::QSReplySize+useSink test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/stderr [timeout] KqpLimits::OutOfSpaceYQLUpsertFail-useSink [default-linux-x86_64-release-asan] (174.50s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.OutOfSpaceYQLUpsertFail-useSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.OutOfSpaceYQLUpsertFail-useSink.out ------ [15/50] chunk ran 4 tests (total:37.67s - test:37.63s) [crashed] KqpLimits::TooBigColumn+useSink [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.TooBigColumn.useSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.TooBigColumn.useSink.out ------ [47/50] chunk ran 3 tests (total:69.96s - test:69.92s) [fail] KqpStats::SysViewClientLost [default-linux-x86_64-release-asan] (51.86s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x196B4EEB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x19B79E1F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1925A458 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x1926D467 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1926D467 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1926D467 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x1926D467 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x1926D467 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19BB0E45 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19BB0E45 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19BB0E45 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19B80998 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x1926C5EB 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19B82265 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19BAB3BC 15. ??:0: ?? @ 0x7F3CF0827D8F 16. ??:0: ?? @ 0x7F3CF0827E3F 17. ??:0: ?? @ 0x16611028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.out ------ TIMEOUT: 168 - GOOD, 1 - FAIL, 1 - NOT_LAUNCHED, 1 - TIMEOUT, 1 - CRASHED ydb/core/kqp/ut/query ydb/core/kqp/ut/service [size:medium] nchunks:50 ------ [46/50] chunk ran 3 tests (total:615.50s - test:600.11s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: KqpService::PatternCache (timeout) duration: 558.74s KqpService::CloseSessionsWithLoad (good) duration: 53.72s KqpService::RangeCache+UseCache test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/service/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/service/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/service/test-results/unittest/testing_out_stuff/stderr [timeout] KqpService::PatternCache [default-linux-x86_64-release-asan] (558.74s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/service/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/service/test-results/unittest/testing_out_stuff/KqpService.PatternCache.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/service/test-results/unittest/testing_out_stuff/KqpService.PatternCache.out ------ TIMEOUT: 159 - GOOD, 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/core/kqp/ut/service ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [10/50] chunk ran 2 tests (total:35.76s - test:35.69s) [fail] KqpSinkMvcc::OlapMultiSinks [default-linux-x86_64-release-asan] (15.62s) assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.cpp:558, void NKikimr::NKqp::CompareYson(const TString &, const TString &, const TString &): (ReformatYson(expected) == ReformatYson(actual)) failed: ("[[[\"2\"]]]" != "[[[\"1\"]]]") , with diff: "[[[\"(2|1)\"]]]" 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:558: CompareYson @ 0x48DC52D7 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:368: DoExecute @ 0x18F41B6A 4. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:389: Execute_ @ 0x18F1017A 6. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: operator() @ 0x18F162D7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18F162D7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14:1) &> @ 0x18F162D7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F162D7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F162D7 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 12. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 15. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp:14: Execute @ 0x18F154A3 16. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 17. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 18. ??:0: ?? @ 0x7F7CE852AD8F 19. ??:0: ?? @ 0x7F7CE852AE3F 20. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkMvcc.OlapMultiSinks.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkMvcc.OlapMultiSinks.out ------ [21/50] chunk ran 2 tests (total:35.58s - test:35.51s) [fail] KqpSinkTx::OlapInvalidateOnError [default-linux-x86_64-release-asan] (12.18s) assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x18F652BE 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x18F43F0A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x18F4B387 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18F4B387 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18F4B387 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F4B387 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F4B387 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x18F4A553 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F52BDE20D8F 18. ??:0: ?? @ 0x7F52BDE20E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.out ------ [25/50] chunk ran 2 tests (total:27.12s - test:27.07s) [fail] KqpSnapshotIsolation::TConflictReadWriteOlap [default-linux-x86_64-release-asan] (12.78s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18F9B008 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18F826DA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F58AE19CD8F 18. ??:0: ?? @ 0x7F58AE19CE3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.out ------ [26/50] chunk ran 2 tests (total:17.29s - test:17.25s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltp [default-linux-x86_64-release-asan] (8.41s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18F986F7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18F82282 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F6D68729D8F 18. ??:0: ?? @ 0x7F6D68729E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.out [fail] KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [default-linux-x86_64-release-asan] (5.30s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18F986F7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18F824AA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F6D68729D8F 18. ??:0: ?? @ 0x7F6D68729E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.out ------ [27/50] chunk ran 2 tests (total:27.46s - test:27.41s) [fail] KqpSnapshotIsolation::TConflictWriteOlap [default-linux-x86_64-release-asan] (14.65s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18F93668 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18F8205A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7FE9485DED8F 18. ??:0: ?? @ 0x7FE9485DEE3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.out [fail] KqpSnapshotIsolation::TConflictWriteOltp [default-linux-x86_64-release-asan] (8.11s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18F90D57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18F81C02 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7FE9485DED8F 18. ??:0: ?? @ 0x7FE9485DEE3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.out ------ [28/50] chunk ran 2 tests (total:27.12s - test:27.08s) [fail] KqpSnapshotIsolation::TConflictWriteOltpNoSink [default-linux-x86_64-release-asan] (7.93s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18F90D57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18F81E2A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F092E5A4D8F 18. ??:0: ?? @ 0x7F092E5A4E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.out ------ [29/50] chunk ran 2 tests (total:17.89s - test:17.86s) [fail] KqpSnapshotIsolation::TReadOnlyOltp [default-linux-x86_64-release-asan] (8.32s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18FA00B3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18F82902 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F978CDCAD8F 18. ??:0: ?? @ 0x7F978CDCAE3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.out [fail] KqpSnapshotIsolation::TReadOnlyOltpNoSink [default-linux-x86_64-release-asan] (5.77s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18FA00B3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18F82B2A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F978CDCAD8F 18. ??:0: ?? @ 0x7F978CDCAE3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.out ------ [30/50] chunk ran 2 tests (total:26.93s - test:26.88s) [fail] KqpSnapshotIsolation::TSimpleOltp [default-linux-x86_64-release-asan] (7.72s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18F8A7C7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18F81582 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F0067821D8F 18. ??:0: ?? @ 0x7F0067821E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.out ------ [31/50] chunk ran 2 tests (total:27.30s - test:27.28s) [fail] KqpSnapshotIsolation::TSimpleOltpNoSink [default-linux-x86_64-release-asan] (16.27s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18F8A7C7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18F817AA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F5EA7184D8F 18. ??:0: ?? @ 0x7F5EA7184E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.out ------ FAIL: 92 - GOOD, 12 - FAIL ydb/core/kqp/ut/tx ydb/core/kqp/ut/view [size:medium] ------ sole chunk ran 23 tests (total:252.50s - test:252.29s) [fail] TCreateAndDropViewTest::CallDropViewOnTable [default-linux-x86_64-release-asan] (8.11s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.CallDropViewOnTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.CallDropViewOnTable.out [fail] TCreateAndDropViewTest::DropViewIfExists [default-linux-x86_64-release-asan] (7.82s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropViewIfExists.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropViewIfExists.out ------ FAIL: 21 - GOOD, 2 - FAIL ydb/core/kqp/ut/view ydb/core/kqp/workload_service/ut [size:medium] nchunks:10 ------ [0/10] chunk ran 6 tests (total:226.42s - setup:0.06s test:224.50s) [fail] KqpWorkloadService::TestCpuLoadThresholdRefresh [default-linux-x86_64-release-asan] (17.36s) (NThreading::TFutureException) library/cpp/threading/future/core/future-inl.h:58: wait timeout Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/workload_service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/workload_service/ut/test-results/unittest/testing_out_stuff/KqpWorkloadService.TestCpuLoadThresholdRefresh.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/workload_service/ut/test-results/unittest/testing_out_stuff/KqpWorkloadService.TestCpuLoadThresholdRefresh.out ------ FAIL: 57 - GOOD, 1 - FAIL ydb/core/kqp/workload_service/ut ydb/core/mind/bscontroller/ut [size:medium] nchunks:30 ------ [14/30] chunk ran 1 test (total:600.95s - test:600.11s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TGroupMapperTest::MonteCarlo (good) duration: 590.43s Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/mind/bscontroller/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/mind/bscontroller/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/mind/bscontroller/ut/test-results/unittest/testing_out_stuff/stderr ------ TIMEOUT: 23 - GOOD ydb/core/mind/bscontroller/ut ydb/core/persqueue/ut/ut_with_sdk [size:medium] nchunks:10 ------ [4/10] chunk ran 6 tests (total:391.30s - setup:0.09s test:386.04s) [fail] TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic [default-linux-x86_64-release-asan] (55.11s) assertion failed at ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp:1484, virtual void NKikimr::NTestSuiteTopicAutoscaling::TTestCasePartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic::Execute_(NUnitTest::TTestContext &): (stats->GetCommittedOffset() == 8) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/TopicAutoscaling.PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/TopicAutoscaling.PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic.out ------ [9/10] chunk ran 6 tests (total:114.32s - setup:0.09s test:114.00s) [crashed] WithSDK::DescribeConsumer [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==1069560==ERROR: AddressSanitizer: heap-use-after-free on address 0x50300594f889 at pc 0x000018eec552 bp 0x7ffd17275f50 sp 0x7ffd17275710 READ of size 9 at 0x50300594f889 thread T0 2025-04-10T10:54:44.693258Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636185934007660:2476], Partition 0, Sender [0:0:0], Recipient [6:7491636190228975011:2479], Cookie: 0 2025-04-10T10:54:44.701826Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636190228975011:2479]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:44.701970Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:44.702450Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:54:44.702991Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:54:44.703325Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:54:44.703657Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T10:54:44.858584Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7491636185934007660:2476], Partition 0, Sender [0:0:0], Recipient [6:7491636190228975011:2479], Cookie: 0 2025-04-10T10:54:44.858810Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7491636190228975011:2479]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:44.858869Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T10:54:44.859005Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T10:54:44.859154Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T10:54:44.859243Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T10:54:44.859326Z node 6 :PERSQUEUE TRACE: [PQ: 72 ..[snippet truncated].. nittest/registar.cpp:495:19 #21 0x196e0f0c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #22 0x7f8c4938ad8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #23 0x7f8c4938ae3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #24 0x165b1028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk+0x165b1028) (BuildId: 34a7903e4bedd81b1cc1447f91dca5a1a7035efc) SUMMARY: AddressSanitizer: heap-use-after-free /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors_memintrinsics.cpp:63:3 in __asan_memcpy Shadow bytes around the buggy address: 0x50300594f600: fa fa fa fa fa fa fa fa fd fd fd fd fa fa fa fa 0x50300594f680: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x50300594f700: fa fa fa fa fa fa fd fd fd fd fa fa fa fa fa fa 0x50300594f780: fa fa fa fa fa fa fa fa fd fd fd fd fa fa fd fd 0x50300594f800: fd fa fa fa fa fa fa fa fa fa fd fd fd fa fa fa =>0x50300594f880: fd[fd]fd fd fa fa fa fa fa fa fa fa fa fa fa fa 0x50300594f900: fa fa fa fa fa fa fa fa fd fd fd fd fa fa fa fa 0x50300594f980: fa fa fa fa fd fd fd fd fa fa fa fa fa fa fa fa 0x50300594fa00: fa fa fa fa fa fa 00 00 00 00 fa fa fa fa fa fa 0x50300594fa80: fa fa fd fd fd fa fa fa fa fa fa fa fa fa fa fa 0x50300594fb00: fa fa fa fa fd fd fd fa fa fa fa fa fa fa fa fa Shadow byte legend (one shadow byte represents 8 application bytes): Addressable: 00 Partially addressable: 01 02 03 04 05 06 07 Heap left redzone: fa Freed heap region: fd Stack left redzone: f1 Stack mid redzone: f2 Stack right redzone: f3 Stack after return: f5 Stack use after scope: f8 Global redzone: f9 Global init order: f6 Poisoned by user: f7 Container overflow: fc Array cookie: ac Intra object redzone: bb ASan internal: fe Left alloca redzone: ca Right alloca redzone: cb ==1069560==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/WithSDK.DescribeConsumer.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/WithSDK.DescribeConsumer.out ------ FAIL: 59 - GOOD, 1 - FAIL, 1 - CRASHED ydb/core/persqueue/ut/ut_with_sdk ydb/core/quoter/ut [size:medium] ------ sole chunk ran 37 tests (total:157.81s - test:157.71s) [fail] QuoterWithKesusTest::PrefetchCoefficient [default-linux-x86_64-release-asan] (7.64s) assertion failed at ydb/core/quoter/ut_helpers.cpp:121, void NKikimr::TKesusQuoterTestSetup::GetQuota(const std::vector> &, TEvQuota::EResourceOperator, TDuration, TEvQuota::TEvClearance::EResult): (answer->Result == expectedResult) failed: (Success != Deadline) , with diff: (Succ|D)e(ss|adline) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::TKesusQuoterTestSetup::GetQuota(std::__y1::vector>, TBasicString>, unsigned long>, std::__y1::allocator>, TBasicString>, unsigned long>>> const&, NKikimr::TEvQuota::EResourceOperator, TDuration, NKikimr::TEvQuota::TEvClearance::EResult) at /-S/ydb/core/quoter/ut_helpers.cpp:121:5 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/vector:527:18 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/quoter/kesus_quoter_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.PrefetchCoefficient.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.PrefetchCoefficient.out ------ FAIL: 36 - GOOD, 1 - FAIL ydb/core/quoter/ut ydb/core/sys_view/ut [size:medium] nchunks:10 ------ [5/10] chunk ran 7 tests (total:171.93s - test:171.89s) [fail] SystemView::PartitionStatsFields [default-linux-x86_64-release-asan] (10.16s) greater-or-equal assertion failed at ydb/core/sys_view/ut_kqp.cpp:537, void NKikimr::NSysView::(anonymous namespace)::TYsonFieldChecker::Uint64GreaterOrEquals(ui64): value.AsUint64() >= expected TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NSysView::NTestSuiteSystemView::TTestCasePartitionStatsFields::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/sys_view/ut_kqp.cpp:1957:15 operator() at /-S/ydb/core/sys_view/ut_kqp.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.PartitionStatsFields.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.PartitionStatsFields.out ------ [7/10] chunk ran 7 tests (total:233.05s - setup:0.01s test:228.22s) [fail] SystemView::ShowCreateTableKeyBloomFilter [default-linux-x86_64-release-asan] (12.89s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTableKeyBloomFilter.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTableKeyBloomFilter.out [fail] SystemView::ShowCreateTableTtlSettings [default-linux-x86_64-release-asan] (18.22s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTableTtlSettings.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTableTtlSettings.out [fail] SystemView::ShowCreateTableReadReplicas [default-linux-x86_64-release-asan] (11.51s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTableReadReplicas.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTableReadReplicas.out [fail] SystemView::ShowCreateTableTemporary [default-linux-x86_64-release-asan] (16.95s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTableTemporary.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTableTemporary.out ------ FAIL: 67 - GOOD, 5 - FAIL ydb/core/sys_view/ut ydb/core/tx/columnshard/ut_rw [size:medium] nchunks:60 ------ [27/60] chunk ran 1 test (total:603.60s - test:600.10s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString (timeout) duration: 601.98s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [default-linux-x86_64-release-asan] (601.98s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKString.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKString.out ------ [28/60] chunk ran 1 test (total:603.23s - test:600.06s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 (timeout) duration: 601.51s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [default-linux-x86_64-release-asan] (601.51s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKUtf8.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKUtf8.out ------ [31/60] chunk ran 1 test (total:614.40s - setup:0.01s test:600.08s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 (timeout) duration: 610.46s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [default-linux-x86_64-release-asan] (610.46s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranule_PKInt64.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranule_PKInt64.out ------ TIMEOUT: 58 - GOOD, 3 - TIMEOUT ydb/core/tx/columnshard/ut_rw ydb/core/tx/columnshard/ut_schema [size:medium] nchunks:60 ------ [36/60] chunk ran 1 test (total:177.74s - test:176.84s) [crashed] TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [default-linux-x86_64-release-asan] (142.63s) Test crashed (return code: 100) ==1590531==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 56224 byte(s) in 2 object(s) allocated from: #0 0x100b418d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x1ca92df2 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x1ca92df2 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x1ca92df2 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x1ca92df2 in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x1ca92df2 in __vallocate /-S/contrib/libs/cxxsupp/libcxx/include/vector:789:25 #6 0x1ca92df2 in void std::__y1::vector>::__assign_with_size[abi:fe190000](NKikimr::NOlap::TUnifiedBlobId*, NKikimr::NOlap::TUnifiedBlobId*, long) /-S/contrib/libs/cxxsupp/libcxx/include/vector:1378:5 #7 0x1ca8b285 in assign /-S/contrib/libs/cxxsupp/libcxx/include/vector:1359:3 #8 0x1ca8b285 in operator= /-S/contrib/libs/cxxsupp/libcxx/include/vector:1330:5 #9 0x1ca8b285 in NKikimr::NOlap::TPortionMetaConstructor::Build() /-S/ydb/core/tx/columnshard/engines/portions/constructor_meta.cpp:53:20 #10 0x1ca4cad1 in ..[snippet truncated].. () /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #33 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #34 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #35 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #36 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #37 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #38 0xff92093 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #39 0x1084a305 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #40 0x1086a39c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #41 0x7fa053764d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 3120168 byte(s) leaked in 55709 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot.Internal-FirstPkColumn.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot.Internal-FirstPkColumn.out ------ [38/60] chunk ran 1 test (total:177.28s - test:176.32s) [crashed] TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [default-linux-x86_64-release-asan] (149.65s) Test crashed (return code: 100) ==1607702==ERROR: LeakSanitizer: detected memory leaks Too many leaks! Only the first 5000 leaks encountered will be reported. Indirect leak of 41088 byte(s) in 1284 object(s) allocated from: #0 0x100b418d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x18622f15 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x18622f15 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x18622f15 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x18622f15 in get_node /-S/util/generic/hash_table.h:503:43 #5 0x18622f15 in new_node /-S/util/generic/hash_table.h:953:19 #6 0x18622f15 in std::__y1::pair<__yhashtable_iterator, bool> THashTable, TIdentity, TEqualTo, std::__y1::allocator>::insert_unique_noresize(NKikimr::TLogoBlobID const&) /-S/util/generic/hash_table.h:1059:17 #7 0x2a1a1b98 in insert_unique /-S/util/generic/hash_table.h:680:16 #8 0x2a1a1b98 in insert /-S/util/generic/hash_set.h:162:51 #9 0x2a1a1b98 in operator() /-S/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp #10 0x2a1a1b98 in ExtractTo<(lambda at /-S/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp:276:23)> /-S/ydb/core/tx/columnshard/blobs_action/abstract/blob_set.h ..[snippet truncated].. () /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #32 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #33 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #34 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #35 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #36 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #37 0xff92093 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #38 0x1084a305 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #39 0x1086a39c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #40 0x7f6e8d880d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 2643002 byte(s) leaked in 62809 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot-Internal-FirstPkColumn.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot-Internal-FirstPkColumn.out ------ FAIL: 41 - GOOD, 2 - CRASHED ydb/core/tx/columnshard/ut_schema ydb/core/tx/datashard/ut_incremental_backup [size:medium] nchunks:4 ------ [0/4] chunk ran 3 tests (total:281.21s - setup:0.04s test:280.31s) [fail] IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [default-linux-x86_64-release-asan] (153.36s) assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode) at /-S/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.out ------ FAIL: 10 - GOOD, 1 - FAIL ydb/core/tx/datashard/ut_incremental_backup ydb/core/tx/datashard/ut_read_iterator [size:medium] nchunks:10 ------ [9/10] chunk ran 14 tests (total:603.88s - test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ReadIteratorExternalBlobs::ExtBlobsWithSpecificKeys (good) duration: 106.88s ReadIteratorExternalBlobs::ExtBlobsMultipleColumns (good) duration: 103.45s ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle (good) duration: 61.77s ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning (good) duration: 60.23s ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd (good) duration: 58.95s ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded (good) duration: 54.17s ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot (good) duration: 52.98s ReadIteratorExternalBlobs::ExtBlobs (good) duration: 52.10s ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows (timeout) duration: 34.79s DataShardReadIteratorSysTables::ShouldNotAllowArrow (good) duration: 5.71s 2 more tests with 10.75s total duration are not listed. 2 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/stderr [timeout] ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows [default-linux-x86_64-release-asan] (34.79s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/ReadIteratorExternalBlobs.ExtBlobsWithCompactingMiddleRows.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/ReadIteratorExternalBlobs.ExtBlobsWithCompactingMiddleRows.out ------ TIMEOUT: 145 - GOOD, 2 - NOT_LAUNCHED, 1 - TIMEOUT ydb/core/tx/datashard/ut_read_iterator ydb/core/tx/datashard/ut_volatile [size:medium] nchunks:2 ------ [1/2] chunk ran 26 tests (total:412.89s - setup:0.01s test:412.22s) [crashed] DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [default-linux-x86_64-release-asan] (13.29s) Test crashed (return code: 100) ==1125951==ERROR: LeakSanitizer: detected memory leaks Direct leak of 224 byte(s) in 1 object(s) allocated from: #0 0x1920aa0d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2b21b420 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:26 #2 0x2b21b420 in NKikimr::NEvents::TDataEvents::TEvWriteResult::BuildCompleted(unsigned long, unsigned long) /-S/ydb/core/tx/data_events/events.h:123:27 #3 0x2b2149d6 in NKikimr::NDataShard::TExecuteWriteUnit::Execute(TIntrusivePtr>, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/execute_write_unit.cpp:376:37 #4 0x2b06d8cc in NKikimr::NDataShard::TPipeline::RunExecutionPlan(TIntrusivePtr>, TVector>&, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard_pipeline.cpp:1851:28 #5 0x2ad2e714 in NKikimr::NDataShard::TDataShard::TTxWrite::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard__write.cpp:100:38 #6 0x2081b894 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #7 0x20817234 in NKikimr::NTabletFlatExecutor ..[snippet truncated].. in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #27 0x18fb3b07 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #28 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #29 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #30 0x199cac35 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #31 0x1999a788 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #32 0x18fb29b3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #33 0x1999c055 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #34 0x199c51ac in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #35 0x7f2b570dad8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 664 byte(s) leaked in 11 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff/DataShardVolatile.GracefulShardRestartNoEarlyReadSetAck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff/DataShardVolatile.GracefulShardRestartNoEarlyReadSetAck.out ------ FAIL: 51 - GOOD, 1 - CRASHED ydb/core/tx/datashard/ut_volatile ydb/core/tx/schemeshard/ut_index [size:medium] nchunks:80 ------ [9/80] chunk ran 1 test (total:604.26s - test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TAsyncIndexTests::MergeBothWithReboots[TabletReboots] (timeout) duration: 602.34s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/stderr [timeout] TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [default-linux-x86_64-release-asan] (602.34s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/TAsyncIndexTests.MergeBothWithReboots.TabletReboots.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/TAsyncIndexTests.MergeBothWithReboots.TabletReboots.out ------ TIMEOUT: 28 - GOOD, 1 - TIMEOUT ydb/core/tx/schemeshard/ut_index ydb/core/tx/tiering/ut [size:medium] nchunks:60 ------ [3/60] chunk ran 1 test (total:84.20s - test:83.96s) [crashed] ColumnShardTiers::TTLUsage [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.out ------ FAIL: 9 - GOOD, 1 - CRASHED ydb/core/tx/tiering/ut ydb/core/tx/tx_proxy/ut_schemereq [size:medium] nchunks:10 ------ [0/10] chunk ran 30 tests (total:295.08s - test:294.83s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [default-linux-x86_64-release-asan] (8.50s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:19215 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [default-linux-x86_64-release-asan] (8.12s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25146 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [default-linux-x86_64-release-asan] (8.45s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5426 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [default-linux-x86_64-release-asan] (8.85s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:7065 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [default-linux-x86_64-release-asan] (10.34s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27038 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [default-linux-x86_64-release-asan] (10.61s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14216 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [default-linux-x86_64-release-asan] (9.00s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:64828 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [default-linux-x86_64-release-asan] (9.11s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:19776 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22.out ------ [9/10] chunk ran 30 tests (total:227.33s - test:227.26s) [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [default-linux-x86_64-release-asan] (8.95s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:13412 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly.out ------ FAIL: 291 - GOOD, 9 - FAIL ydb/core/tx/tx_proxy/ut_schemereq ydb/core/viewer/ut [size:medium] nchunks:10 ------ [4/10] chunk ran 5 tests (total:615.49s - setup:0.02s test:600.12s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: Viewer::JsonStorageListingV1NodeIdFilter (good) duration: 181.30s Viewer::JsonStorageListingV1GroupIdFilter (good) duration: 153.24s Viewer::JsonStorageListingV1PDiskIdFilter (timeout) duration: 146.65s Viewer::JsonStorageListingV1 (good) duration: 102.36s Viewer::JsonAutocompleteStartOfDatabaseName (good) duration: 13.56s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/stderr [timeout] Viewer::JsonStorageListingV1PDiskIdFilter [default-linux-x86_64-release-asan] (146.65s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.JsonStorageListingV1PDiskIdFilter.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.JsonStorageListingV1PDiskIdFilter.out ------ [5/10] chunk ran 5 tests (total:611.61s - setup:0.03s test:600.10s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: Viewer::JsonStorageListingV2PDiskIdFilter (timeout) duration: 196.22s Viewer::JsonStorageListingV2NodeIdFilter (good) duration: 173.38s Viewer::JsonStorageListingV2GroupIdFilter (good) duration: 140.88s Viewer::JsonStorageListingV2 (good) duration: 82.75s Viewer::LevenshteinDistance (good) duration: 0.00s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/stderr [timeout] Viewer::JsonStorageListingV2PDiskIdFilter [default-linux-x86_64-release-asan] (196.22s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.JsonStorageListingV2PDiskIdFilter.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.JsonStorageListingV2PDiskIdFilter.out ------ [6/10] chunk ran 5 tests (total:171.53s - setup:0.04s test:171.30s) [fail] Viewer::QueryExecuteScript [default-linux-x86_64-release-asan] (17.56s) assertion failed at ydb/core/viewer/viewer_ut.cpp:1948, virtual void NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext &): (json.GetMap().contains("metadata")) {} TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/viewer/viewer_ut.cpp:0:9 operator() at /-S/ydb/core/viewer/viewer_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.QueryExecuteScript.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.QueryExecuteScript.out [fail] Viewer::Plan2SvgBad [default-linux-x86_64-release-asan] (11.25s) (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.Plan2SvgBad.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.Plan2SvgBad.out ------ TIMEOUT: 47 - GOOD, 2 - FAIL, 2 - TIMEOUT ydb/core/viewer/ut ydb/services/metadata/secret/ut [size:medium] nchunks:60 ------ [2/60] chunk ran 1 test (total:58.03s - test:57.99s) [crashed] Secret::Simple [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/metadata/secret/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/metadata/secret/ut/test-results/unittest/testing_out_stuff/Secret.Simple.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/metadata/secret/ut/test-results/unittest/testing_out_stuff/Secret.Simple.out ------ [3/60] chunk ran 1 test (total:184.85s - test:184.68s) [crashed] Secret::SimpleQueryService [default-linux-x86_64-release-asan] (76.45s) Test crashed (return code: 100) ==1336036==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 26080 byte(s) in 1 object(s) allocated from: #0 0x18a3fa7d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x1f055018 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x1f055018 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x1f055018 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x1f055018 in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x1f055018 in __vallocate /-S/contrib/libs/cxxsupp/libcxx/include/vector:789:25 #6 0x1f055018 in vector /-S/contrib/libs/cxxsupp/libcxx/include/vector:442:7 #7 0x1f055018 in make_unique >, unsigned long &> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:30 #8 0x1f055018 in grpc_core::Server::ChannelData::InitTransport(grpc_core::RefCountedPtr, grpc_core::RefCountedPtr, unsigned long, grpc_transport*, long) /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:1155:9 #9 0x1f05488d in grpc_core::Server::SetupTransport(grpc_transport*, grpc_pollset*, grpc_core::ChannelArgs const&, grpc_core::RefCountedPtr const&) /-S/contrib/libs/g ..[snippet truncated].. ke<(lambda at /-S/ydb/library/grpc/server/grpc_server.cpp:265:49) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #23 0x2737a306 in __call<(lambda at /-S/ydb/library/grpc/server/grpc_server.cpp:265:49) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #24 0x2737a306 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #25 0x2737a306 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #26 0x19b319be in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #27 0x19b319be in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #28 0x19b319be in (anonymous namespace)::TThreadFactoryFuncObj::DoExecute() /-S/util/thread/factory.cpp:61:13 #29 0x19b31f0c in Execute /-S/util/thread/factory.h:15:13 #30 0x19b31f0c in (anonymous namespace)::TSystemThreadFactory::TPoolThread::ThreadProc(void*) /-S/util/thread/factory.cpp:36:41 #31 0x18d5bd14 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:244:20 #32 0x18a09a38 in asan_thread_start(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239:28 SUMMARY: AddressSanitizer: 413511 byte(s) leaked in 5078 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/metadata/secret/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/metadata/secret/ut/test-results/unittest/testing_out_stuff/Secret.SimpleQueryService.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/metadata/secret/ut/test-results/unittest/testing_out_stuff/Secret.SimpleQueryService.out ------ FAIL: 4 - GOOD, 2 - CRASHED ydb/services/metadata/secret/ut ydb/services/persqueue_v1/ut [size:medium] nchunks:10 ------ [3/10] chunk ran 14 tests (total:477.74s - setup:0.01s test:476.72s) [crashed] TPersQueueTest::InflightLimit [default-linux-x86_64-release-asan] (61.79s) Test crashed (return code: 100) ==1231095==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 6164144 byte(s) in 94 object(s) allocated from: #0 0x191bbacf in malloc /-S/contrib/libs/clang18-rt/lib/asan/asan_malloc_linux.cpp:68:3 #1 0x1a472253 in grpc_event_engine::experimental::MemoryAllocator::MakeSlice(grpc_event_engine::experimental::MemoryRequest) /-S/contrib/libs/grpc/src/core/lib/event_engine/memory_allocator.cc:63:13 #2 0x1a44cb7d in maybe_make_read_slices /-S/contrib/libs/grpc/src/core/lib/iomgr/tcp_posix.cc:1070:57 #3 0x1a44cb7d in tcp_handle_read(void*, y_absl::lts_y_20240722::Status) /-S/contrib/libs/grpc/src/core/lib/iomgr/tcp_posix.cc:1094:5 #4 0x1a450fd7 in Run /-S/contrib/libs/grpc/src/core/lib/iomgr/closure.h:303:5 #5 0x1a450fd7 in tcp_read(grpc_endpoint*, grpc_slice_buffer*, grpc_closure*, bool, int) /-S/contrib/libs/grpc/src/core/lib/iomgr/tcp_posix.cc:1156:5 #6 0x1a7ae748 in continue_read_action_locked /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/transport/chttp2_transport.cc:2594:3 #7 0x1a7ae748 in read_action_locked(void*, y_absl::lts_y_20240722::Status) /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/transport/chttp2_transport.cc:2583:7 #8 0x1a33bbf6 in grpc_combiner_continue_exec_ctx() /-S/contrib/libs/grpc/src/core/lib/iomgr/combiner.cc:231:5 #9 0x1a3141b4 in grpc_core::ExecCtx::Flush() /-S/contrib/libs/grpc/src/core/lib/iomgr/exec_ctx.cc:75:17 #10 0x1a460404 in end_worker /-S/contrib/ ..[snippet truncated].. llset.cc:48:10 #14 0x1a43aa97 in cq_next(grpc_completion_queue*, gpr_timespec, void*) /-S/contrib/libs/grpc/src/core/lib/surface/completion_queue.cc:1036:29 #15 0x1aff611e in grpc::CompletionQueue::AsyncNextInternal(void**, bool*, gpr_timespec) /-S/contrib/libs/grpc/src/cpp/common/completion_queue_cc.cc:166:15 #16 0x201a66bf in Next /-S/contrib/libs/grpc/include/grpcpp/completion_queue.h:182:13 #17 0x201a66bf in NYdbGrpc::Dev::PullEvents(grpc::CompletionQueue*) /-S/ydb/public/sdk/cpp/src/library/grpc/client/grpc_client_low.cpp:190:18 #18 0x1b00c22e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #19 0x1b00c22e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #20 0x1b00c22e in (anonymous namespace)::TThreadFactoryFuncObj::DoExecute() /-S/util/thread/factory.cpp:61:13 #21 0x1b00c77c in Execute /-S/util/thread/factory.h:15:13 #22 0x1b00c77c in (anonymous namespace)::TSystemThreadFactory::TPoolThread::ThreadProc(void*) /-S/util/thread/factory.cpp:36:41 #23 0x1950a9c4 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:244:20 #24 0x191b9618 in asan_thread_start(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239:28 SUMMARY: AddressSanitizer: 6429421 byte(s) leaked in 1605 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/TPersQueueTest.InflightLimit.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/TPersQueueTest.InflightLimit.out ------ FAIL: 133 - GOOD, 1 - CRASHED ydb/services/persqueue_v1/ut ydb/services/ydb/sdk_sessions_ut [size:medium] nchunks:10 ------ [8/10] chunk ran 1 test (total:23.78s - test:23.74s) [fail] YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [default-linux-x86_64-release-asan] (5.97s) assertion failed at ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:253, virtual void NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext &): (session.GetId() == sessionId) failed: ("ydb://session/3?node_id=1&id=Y2QxODhmNWItYTVkZDkwZTMtYzAwN2I3NGEtZmU5ODU4ZjE=" != "ydb://session/3?node_id=1&id=MmU0ODZmZTYtODZhYzQzZTAtYTVkNmVhNGYtZGVmYTI1NQ==") , with diff: "ydb://session/3?node_id=1&id=(|MmU0ODZmZT)Y(2Qx|t)OD(|Z)h(mNWI|YzQzZTA)tYTVk(ZDkwZTMtYzAw|)N(2I3|mVh)NG(E|Y)tZ(|GV)m(U5ODU4ZjE|YTI1NQ=)=" TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff/YdbSdkSessions.TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff/YdbSdkSessions.TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall.out ------ FAIL: 15 - GOOD, 1 - FAIL ydb/services/ydb/sdk_sessions_ut ydb/services/ydb/ut [size:medium] nchunks:60 ------ [29/60] chunk ran 5 tests (total:69.62s - test:69.48s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-release-asan] (7.26s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 286 - GOOD, 1 - FAIL ydb/services/ydb/ut ------ sole chunk ran 2 tests (total:243.85s - recipes:15.66s test:225.37s recipes:2.64s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 16.1G (16840980K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 2036322 44.9M 43.6M 6.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 2036391 34.2M 21.9M 9.8M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 2041443 46.1M 43.2M 23.0M │ └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/test_tool.args 2042066 2.7G 2.7G 2.7G │ └─ ydb-tests-functional-kqp-kqp_indexes --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/ 2038039 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 2038040 1.7G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 2038041 1.7G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 2038044 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 2038074 1.7G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 2038075 1.8G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 2038079 1.7G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 2038081 1.8G 1.7G 1.3G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/stderr Total 517 suites: 476 - GOOD 27 - FAIL 14 - TIMEOUT Total 10678 tests: 10534 - GOOD 83 - FAIL 31 - NOT_LAUNCHED 17 - TIMEOUT 2 - SKIPPED 11 - CRASHED Cache efficiency ratio is 89.14% (44565 of 49996). Local: 0 (0.00%), dist: 5327 (10.65%), by dynamic uids: 0 (0.00%), avoided: 39238 (78.48%) Dist cache download: count=4859, size=15.43 GiB, speed=159.77 MiB/s Disk usage for tools/sdk at least 62.16 MiB Additional disk space consumed for build cache 935.39 GiB Critical path: [109658 ms] [CC] [C3jcXr0RIOcCC6-5K95GZQ default-linux-x86_64 release asan]: $(SOURCE_ROOT)/ydb/core/sys_view/ut_kqp.cpp [started: 0 (1744282345551), finished: 109658 (1744282455209)] [ 28671 ms] [LD] [ttB3bAJ3At2E2d9DKE0RKA default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/core/sys_view/ut/ydb-core-sys_view-ut [started: 121016 (1744282466567), finished: 149687 (1744282495238)] [562537 ms] [TM] [rnd-9153664536791235612 asan default-linux-x86_64 release]: ydb/core/sys_view/ut/unittest [started: 2580357 (1744284925908), finished: 3142894 (1744285488445)] [ 7167 ms] [TA] [rnd-b5j68kcq187g7vf9]: $(BUILD_ROOT)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} [started: 3142952 (1744285488503), finished: 3150119 (1744285495670)] Time from start: 3920007.716064453 ms, time elapsed by graph 708033 ms, time diff 3211974.716064453 ms. The longest 10 tasks: [634869 ms] [TM] [rnd-17554724267422293455 asan default-linux-x86_64 release]: ydb/tests/functional/tpc/medium/py3test [started: 1744285304000, finished: 1744285938869] [634474 ms] [TM] [rnd-dgkzc5mf60k1t990 asan default-linux-x86_64 release]: ydb/tests/olap/s3_import/py3test [started: 1744285154455, finished: 1744285788929] [632298 ms] [TM] [rnd-ajylmmeq1z58q0zg asan default-linux-x86_64 release]: ydb/tests/olap/column_family/compression/py3test [started: 1744285054092, finished: 1744285686390] [625948 ms] [TM] [rnd-budrjq8tzivfjeaw asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 1744285050820, finished: 1744285676768] [625741 ms] [TM] [rnd-prmi9jat1gzr1mtm asan default-linux-x86_64 release]: ydb/tests/fq/mem_alloc/py3test [started: 1744285125208, finished: 1744285750949] [623490 ms] [TM] [rnd-6909275850342383382 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1744283414996, finished: 1744284038486] [617685 ms] [TM] [rnd-14393932376752011003 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1744283479404, finished: 1744284097089] [616569 ms] [TM] [rnd-14403466702853602324 asan default-linux-x86_64 release]: ydb/core/kqp/ut/query/unittest [started: 1744282543811, finished: 1744283160380] [616430 ms] [TM] [rnd-17086118948488320259 asan default-linux-x86_64 release]: ydb/core/viewer/ut/unittest [started: 1744282458068, finished: 1744283074498] [616132 ms] [TM] [rnd-11279042897734985571 asan default-linux-x86_64 release]: ydb/core/kqp/ut/service/unittest [started: 1744282596250, finished: 1744283212382] Total time by type: [161432078 ms] [TM] [count: 4381, ave time 36848.23 msec] [ 11729152 ms] [prepare:get from dist cache] [count: 5327, ave time 2201.83 msec] [ 11370099 ms] [LD] [count: 456, ave time 24934.43 msec] [ 1796482 ms] [TS] [count: 322, ave time 5579.14 msec] [ 526156 ms] [TA] [count: 243, ave time 2165.25 msec] [ 407246 ms] [prepare:tools] [count: 20, ave time 20362.30 msec] [ 274664 ms] [prepare:bazel-store] [count: 3, ave time 91554.67 msec] [ 227223 ms] [CC] [count: 9, ave time 25247.00 msec] [ 220367 ms] [prepare:put into local cache, clean build dir] [count: 5343, ave time 41.24 msec] [ 164799 ms] [prepare:AC] [count: 4, ave time 41199.75 msec] [ 73979 ms] [prepare:put to dist cache] [count: 404, ave time 183.12 msec] [ 5941 ms] [prepare:resources] [count: 1, ave time 5941.00 msec] [ 2941 ms] [AR] [count: 10, ave time 294.10 msec] [ 605 ms] [UN] [count: 2, ave time 302.50 msec] [ 590 ms] [ld] [count: 2, ave time 295.00 msec] [ 451 ms] [PK] [count: 1, ave time 451.00 msec] [ 417 ms] [BI] [count: 1, ave time 417.00 msec] [ 325 ms] [SB] [count: 1, ave time 325.00 msec] [ 296 ms] [CF] [count: 2, ave time 148.00 msec] [ 144 ms] [CP] [count: 1, ave time 144.00 msec] [ 11 ms] [prepare:clean] [count: 3, ave time 3.67 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 163754716 ms (93.38%) Total run tasks time - 175357807 ms Configure time - 35.2 s Statistics overhead 2133 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json Ok + echo 0 + ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.KC84DoWNuA --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -X --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools [2 ymakes processing] [8436/8436 modules configured] [3652/4837 modules rendered] [2 ymakes processing] [8436/8436 modules configured] [4751/4837 modules rendered] [2 ymakes processing] [8436/8436 modules configured] [4837/4837 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [8442/8442 modules configured] [4837/4837 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution | 5.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup | 6.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction | 7.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut | 8.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/docs/generator/generator | 9.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |11.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |11.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap | 8.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb | 9.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a | 9.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |10.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire |11.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut | 4.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut | 4.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg | 5.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large | 5.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds | 5.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 | 5.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay | 6.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut | 6.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut | 6.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record | 6.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview | 6.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut | 6.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer | 7.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a | 7.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql | 7.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import | 7.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut | 7.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange | 7.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query | 8.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a | 8.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut | 9.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut | 9.6%| CLEANING SYMRES |10.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |10.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |10.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |10.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |11.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |11.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |11.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |11.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |11.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |11.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |12.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |12.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |13.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |13.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |13.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |13.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |13.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |13.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |13.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |13.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |13.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |14.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |14.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |14.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut |14.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |14.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |14.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |14.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |14.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |14.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |13.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |13.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |13.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |13.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |14.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |14.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |14.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |14.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |14.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |14.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |14.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |14.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |15.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |15.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |15.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |15.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |15.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |15.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |15.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |16.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |16.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |16.5%| PREPARE $(GDB) |16.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |16.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |16.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |16.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |17.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |13.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |13.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |13.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |13.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |13.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |13.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |13.9%| PREPARE $(YMAKE_PYTHON3-4256832079) |14.0%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |14.0%| PREPARE $(JDK_DEFAULT-472926544) |14.3%| PREPARE $(JDK17-472926544) |14.4%| PREPARE $(WITH_JDK-sbr:7832760150) |14.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |14.6%| PREPARE $(FLAKE8_PY3-715603131) |14.6%| PREPARE $(WITH_JDK17-sbr:7832760150) |14.7%| PREPARE $(TEST_TOOL_HOST-sbr:8444524403) |14.8%| PREPARE $(CLANG-2518231432) |14.9%| PREPARE $(CLANG_FORMAT-2212207123) |15.0%| PREPARE $(LLD_ROOT-3808007503) |15.0%| PREPARE $(PYTHON) |15.2%| PREPARE $(CLANG18-3363451693) |14.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |14.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |14.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |15.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |15.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |15.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |15.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |15.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |15.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |15.9%| PREPARE $(CLANG-1922233694) |16.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |16.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |16.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |16.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |16.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |16.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |16.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |16.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |16.7%| PREPARE $(CLANG16-1380963495) |16.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |16.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |16.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |16.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |17.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |17.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |17.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |17.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |17.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |17.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |17.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |17.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |17.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |17.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |17.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |17.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |17.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/service_node/service_node |17.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |18.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |18.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |18.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |18.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |18.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |18.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |18.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |18.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |18.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |18.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |19.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |19.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |19.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |19.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |19.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |19.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |19.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |19.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |19.7%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |19.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |19.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |19.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |19.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |19.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |19.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |20.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |20.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |20.9%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |21.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |21.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |21.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |21.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |21.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |21.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |22.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |22.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |22.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |22.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |22.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |22.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |22.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |22.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |23.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |23.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |22.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |23.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |23.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |23.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |23.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |23.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |23.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |23.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |23.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |23.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |23.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |23.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |23.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |23.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |23.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |23.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |23.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |24.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |24.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |24.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |24.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |24.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |24.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |24.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |24.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |24.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |24.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |24.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |25.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |25.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |25.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |25.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |25.5%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |25.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |25.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |26.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |26.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |26.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |26.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |26.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |27.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |27.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |27.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |27.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |27.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |27.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |27.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |27.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |27.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |27.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |27.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |27.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |27.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |27.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |27.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |28.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |28.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |27.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |27.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |27.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |27.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |27.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |28.0%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |28.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |28.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |28.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |28.4%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |28.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |28.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |29.0%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |29.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |29.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |29.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |29.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |29.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |29.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |29.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |29.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |29.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |30.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |30.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |30.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |30.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |30.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |30.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |31.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |31.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |31.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |31.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |31.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |31.3%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |31.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |31.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |31.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |31.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |31.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |31.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |31.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |31.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |31.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |31.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |31.9%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |32.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |32.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |32.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |32.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |32.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |32.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |32.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |32.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |32.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |32.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |32.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |32.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |32.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |33.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |33.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |33.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |34.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |34.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |34.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |34.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |34.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |34.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |34.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |34.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |34.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |34.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |34.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |34.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |34.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |34.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |34.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |35.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |35.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |35.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |35.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |35.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |35.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |35.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |35.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |35.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |35.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |35.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |35.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |36.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |36.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |36.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |35.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |35.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |36.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |36.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |36.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |36.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |36.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |36.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |37.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |37.4%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |37.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |37.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |37.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |38.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |38.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |37.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |37.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |38.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |38.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |38.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |38.3%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |38.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/tools/yqlrun/yqlrun |38.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |38.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |38.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |38.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |38.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |38.6%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |38.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |38.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut |38.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |38.9%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |38.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |39.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |39.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |39.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |39.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |39.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |39.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |39.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |39.7%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |39.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |39.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |40.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |40.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |40.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |39.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |39.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |39.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |39.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |39.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |40.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |40.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |40.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |40.7%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |40.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |40.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |41.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |41.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |41.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |41.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |41.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |41.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |41.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |41.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |42.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |42.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |42.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |42.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |42.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |42.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |42.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |42.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |42.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |42.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |42.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |42.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |42.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |42.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |42.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |42.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |42.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |42.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |42.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |42.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |43.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |43.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |43.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |43.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |43.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |43.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |43.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |43.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |43.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |43.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |43.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |43.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |43.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |43.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |43.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |44.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |44.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |44.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |44.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |44.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |44.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |44.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |45.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |45.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |45.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |45.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |45.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |45.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |45.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |45.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |45.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |45.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |45.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |45.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |45.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |45.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |45.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |45.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |45.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |40.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |40.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |41.0%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |41.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |41.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |41.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |41.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |41.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |37.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |37.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |37.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |37.9%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |38.0%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |38.0%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |38.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |38.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |38.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |38.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |38.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |38.3%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |38.3%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |38.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |38.4%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |38.5%| COMPACTING CACHE 935.4GiB |38.5%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |38.6%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |38.6%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |38.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |38.7%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |38.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |38.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |38.8%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |38.9%| [LD] {RESULT} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |38.9%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |38.9%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |39.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |39.0%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |39.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |39.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |39.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |39.2%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |39.2%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |39.3%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |39.3%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |39.4%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |39.4%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |39.4%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |39.5%| [LD] {RESULT} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |39.5%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |39.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |39.6%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |39.6%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |39.7%| [LD] {RESULT} $(B)/yql/tools/yqlrun/yqlrun |39.7%| [LD] {RESULT} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |39.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |39.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |39.9%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |39.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |39.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |40.0%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |40.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |40.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |40.1%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |40.2%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |40.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |40.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |40.3%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |40.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |40.4%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |40.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |40.4%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |40.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |40.5%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |40.6%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |40.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |40.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |40.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |40.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |40.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |40.8%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |40.9%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |40.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |40.9%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |41.0%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |41.0%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |41.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |41.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |41.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |41.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |41.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |41.3%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |41.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |41.4%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |41.4%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |41.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |41.5%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |41.5%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |41.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |41.6%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |41.7%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |41.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |41.7%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |41.8%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |41.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |41.9%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |41.9%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |41.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |42.0%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |42.0%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |42.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |42.1%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |42.2%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |42.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |42.2%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |42.3%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |42.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |42.4%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |42.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |42.4%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |42.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |42.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |42.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |42.6%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |42.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |42.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |42.7%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |42.8%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |42.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |42.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |42.9%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |42.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |43.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |43.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |43.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |43.1%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |43.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |43.2%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |43.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |43.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |43.3%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |43.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |43.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |43.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |43.5%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |43.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |43.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |43.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |43.7%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |43.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |43.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |43.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |43.8%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |43.9%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |43.9%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |43.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |44.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |44.0%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |44.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |44.1%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |44.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |44.2%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |44.2%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |44.3%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |44.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |44.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |44.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |44.4%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |44.5%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |44.5%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |44.6%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |44.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |44.7%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |44.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |44.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |44.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |44.8%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |44.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |44.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |44.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |45.0%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |45.0%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |45.1%| [LD] {RESULT} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |45.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |45.2%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |45.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |45.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |45.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |45.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |45.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |45.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |45.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |45.5%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |45.5%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |45.6%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |45.6%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |45.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |45.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |45.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |45.8%| [LD] {RESULT} $(B)/ydb/apps/etcd_proxy/etcd_proxy |45.8%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |45.9%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |45.9%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |46.0%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |46.0%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |46.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |46.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |46.1%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |46.2%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |46.2%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |46.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |46.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |46.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |46.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |46.4%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |46.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |46.5%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |46.5%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |46.6%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |46.6%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |46.7%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |46.7%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |46.7%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |46.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |46.8%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |46.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |46.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |47.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |47.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |47.0%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |47.1%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |47.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |47.2%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |47.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |47.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |47.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |47.3%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |47.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |47.4%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |47.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |47.5%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |47.5%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |47.6%| [LD] {RESULT} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |47.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |47.7%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |47.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |47.7%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |47.8%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |47.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |47.9%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |47.9%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |48.0%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |48.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |48.0%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |48.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |48.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |48.2%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |48.2%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |48.2%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |48.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |48.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |48.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |48.4%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |48.5%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |48.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |48.5%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |48.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |48.6%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire |48.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |48.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |48.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |48.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |48.8%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |48.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |48.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |49.0%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |49.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |49.0%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/service_node/service_node |49.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |49.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |49.2%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |49.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |49.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |49.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |49.3%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |49.4%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |49.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |49.5%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |49.5%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |49.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |49.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |49.6%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |49.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |49.7%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |49.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |49.8%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |49.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |49.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |49.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |50.0%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |50.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |50.0%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |50.1%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |50.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |50.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |50.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |50.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |50.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |50.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |50.4%| [LD] {RESULT} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |50.4%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |50.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |50.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |50.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |50.6%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |50.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |50.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |50.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |50.8%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |50.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |50.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |50.9%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |50.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |51.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |51.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |51.0%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |51.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |51.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |51.2%| [LD] {RESULT} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |51.2%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |51.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |51.3%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |51.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |51.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |51.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |51.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |51.5%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |51.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |51.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |51.6%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |51.7%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |51.7%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |51.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |51.8%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |51.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |51.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |51.9%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |52.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |52.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |52.0%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |52.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |52.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |52.2%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |52.2%| [LD] {RESULT} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |52.3%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |52.3%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |52.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |52.4%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |52.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |52.5%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |52.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |52.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |52.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |52.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |52.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |52.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |52.8%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |52.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |52.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |52.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |52.9%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |53.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |53.0%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |53.0%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |53.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |53.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |53.2%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |53.2%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |53.3%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |53.3%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |53.3%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |53.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |53.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |53.5%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |53.5%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |53.5%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |53.6%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |53.6%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |53.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |53.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |53.8%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |53.8%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |53.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |53.9%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |53.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |54.0%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |54.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |54.0%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |54.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |54.1%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |54.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |54.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |54.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |54.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |54.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |54.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |54.4%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |54.5%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |54.5%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |54.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |54.6%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |54.6%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |54.7%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |54.7%| [LD] {RESULT} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |54.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |54.8%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |54.8%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |54.9%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |54.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |55.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |55.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |55.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |55.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |55.1%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |55.2%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |55.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |55.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |55.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |55.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |55.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |55.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |55.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |55.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |55.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |55.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |55.6%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |55.7%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |55.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |55.8%| [LD] {RESULT} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |55.8%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |55.8%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |55.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |55.9%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |56.0%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |56.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> TKeyValueTracingTest::ReadSmall >> TKeyValueTracingTest::ReadHuge >> TKeyValueTracingTest::WriteHuge >> TKeyValueTracingTest::WriteSmall |56.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest |56.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |56.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TempTablesSessionsIsolation |57.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest |57.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest |57.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest |57.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest |57.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname >> KqpPg::TempTablesDrop |57.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |57.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |58.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |59.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] >> TKeyValueTracingTest::WriteHuge [FAIL] >> TKeyValueTracingTest::ReadSmall [FAIL] >> TKeyValueTracingTest::WriteSmall [FAIL] |59.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |59.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |60.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpPg::TempTablesDrop [GOOD] |61.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpPg::TempTablesSessionsIsolation [GOOD] |61.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |61.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |62.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString |62.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |62.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TempTablesSessionsIsolation [GOOD] Test command err: Trying to start YDB, gRPC: 28791, MsgBus: 27553 2025-04-10T12:10:06.916134Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655646983711302:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:06.916213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0005a4/r3tmp/tmpEVqySC/pdisk_1.dat 2025-04-10T12:10:07.266911Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28791, node 1 2025-04-10T12:10:07.319056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:07.319165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:07.320967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:07.414315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:07.414343Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:07.414359Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:07.414532Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27553 TClient is connected to server localhost:27553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:08.026274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:09.328848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655659868613859:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:09.328874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655659868613848:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:09.329639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:09.336387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:09.348583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655659868613862:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:09.446955Z node 1 :TX_PROXY ERROR: Actor# [1:7491655659868613913:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:09.641282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480 2025-04-10T12:10:10.078841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:10:10.087120Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T12:10:10.088139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T12:10:10.102390Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491655664163581476:2365], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:27: Error: At function: KiReadTable!
:3:27: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T12:10:10.102637Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2I0ZTM4ZTgtMzAwNWI3MTYtOTEyZDYwZGQtYTk4N2VjNmM=, ActorId: [1:7491655664163581445:2361], ActorState: ExecuteState, TraceId: 01jrfsq38x9g359mvnj0varr61, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TempTablesDrop [GOOD] Test command err: Trying to start YDB, gRPC: 3516, MsgBus: 12517 2025-04-10T12:10:06.927446Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655648656963603:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:06.928016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/00059b/r3tmp/tmpFSph2k/pdisk_1.dat 2025-04-10T12:10:07.309143Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3516, node 1 2025-04-10T12:10:07.318559Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:10:07.351111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:07.351298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:07.354315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:07.413751Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:07.413781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:07.413789Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:07.413897Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12517 TClient is connected to server localhost:12517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:08.008427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:09.464235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655661541866157:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:09.464247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655661541866166:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:09.464363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:09.468214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:09.480679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655661541866171:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:09.542529Z node 1 :TX_PROXY ERROR: Actor# [1:7491655661541866222:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:09.641326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480 2025-04-10T12:10:10.075634Z node 1 :TX_PROXY ERROR: Actor# [1:7491655665836833751:2467] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:10.088273Z node 1 :TX_PROXY ERROR: Actor# [1:7491655665836833758:2472] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZDRiZDhlNDYtYjlhMTczNGYtYjEyM2EwMjctYmJjNmJlZWY=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:10.102979Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T12:10:10.112152Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491655665836833811:2366], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T12:10:10.112434Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDRiZDhlNDYtYjlhMTczNGYtYjEyM2EwMjctYmJjNmJlZWY=, ActorId: [1:7491655661541866138:2328], ActorState: ExecuteState, TraceId: 01jrfsq39r2vg1srk1dt9grzkm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T12:10:10.141350Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491655665836833822:2373], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T12:10:10.141593Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzdkZWMzOTItM2IxNGMzNGItMjEyMWNhZTAtMWJiNDA4Y2Q=, ActorId: [1:7491655665836833818:2370], ActorState: ExecuteState, TraceId: 01jrfsq3anazv9y7rwwx2t4nq4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: |63.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [GOOD] |63.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |63.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |63.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |63.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |64.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |64.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |64.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |64.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |64.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |64.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |64.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |64.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |64.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |64.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [GOOD] Test command err: Trying to start YDB, gRPC: 10834, MsgBus: 12253 2025-04-10T12:10:06.919201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655649011410141:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:06.919293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000598/r3tmp/tmpzGTNRl/pdisk_1.dat 2025-04-10T12:10:07.278730Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10834, node 1 2025-04-10T12:10:07.313840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:07.314736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:07.316827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:07.413705Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:07.413734Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:07.413748Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:07.413892Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12253 TClient is connected to server localhost:12253 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:08.044756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:09.575218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655661896312699:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:09.575244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655661896312707:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:09.575332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:09.579535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:09.590505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655661896312713:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:09.675781Z node 1 :TX_PROXY ERROR: Actor# [1:7491655661896312764:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:09.722597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:10.293673Z node 1 :TX_PROXY ERROR: Actor# [1:7491655666191280232:2430] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/seq\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 } 2025-04-10T12:10:10.304039Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTcyN2UyMDAtOGVmNzhkODktZWIyY2E5YTYtY2FiZmI4MjA=, ActorId: [1:7491655661896312678:2328], ActorState: ExecuteState, TraceId: 01jrfsq3ewfg7f1k5yrwg74f5m, Create QueryResponse for error on request, msg: 2025-04-10T12:10:10.332707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:10:10.411489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T12:10:10.704231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T12:10:10.732956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T12:10:11.006084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T12:10:11.035626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 |64.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::Chain65Nodes |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::InflightLimit |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |64.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |64.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |64.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnWrite-UseSink |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> Viewer::JsonStorageListingV2PDiskIdFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1020541C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x106C12B0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFE4D8CC) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+318 (0xFE5449E) std::__y1::__function::__func, void ()>::operator()()+280 (0xFE67C78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x106EF216) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x106C7E29) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFE66B24) NUnitTest::TTestFactory::Execute()+2438 (0x106C96F6) NUnitTest::RunMain(int, char**)+5213 (0x106E978D) ??+0 (0x7FDADF58CD90) __libc_start_main+128 (0x7FDADF58CE40) _start+41 (0xD7D1029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1020541C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x106C12B0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFE47EFD) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xFE53D38) std::__y1::__function::__func, void ()>::operator()()+280 (0xFE67C78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x106EF216) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x106C7E29) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFE66B24) NUnitTest::TTestFactory::Execute()+2438 (0x106C96F6) NUnitTest::RunMain(int, char**)+5213 (0x106E978D) ??+0 (0x7F4512911D90) __libc_start_main+128 (0x7F4512911E40) _start+41 (0xD7D1029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1020541C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x106C12B0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFE47EFD) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xFE53A28) std::__y1::__function::__func, void ()>::operator()()+280 (0xFE67C78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x106EF216) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x106C7E29) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFE66B24) NUnitTest::TTestFactory::Execute()+2438 (0x106C96F6) NUnitTest::RunMain(int, char**)+5213 (0x106E978D) ??+0 (0x7F5F99592D90) __libc_start_main+128 (0x7F5F99592E40) _start+41 (0xD7D1029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1020541C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x106C12B0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFE4D8CC) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+318 (0xFE540AE) std::__y1::__function::__func, void ()>::operator()()+280 (0xFE67C78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x106EF216) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x106C7E29) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFE66B24) NUnitTest::TTestFactory::Execute()+2438 (0x106C96F6) NUnitTest::RunMain(int, char**)+5213 (0x106E978D) ??+0 (0x7FCA7A518D90) __libc_start_main+128 (0x7FCA7A518E40) _start+41 (0xD7D1029) |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.8%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |66.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |67.7%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpCost::OlapWriteRow |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |68.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |68.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |68.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |68.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |68.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |68.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> Viewer::JsonStorageListingV1PDiskIdFilter >> Viewer::Plan2SvgBad |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> Viewer::QueryExecuteScript |70.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltp |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltp >> KqpSnapshotIsolation::TConflictWriteOltp >> KqpSnapshotIsolation::TReadOnlyOltpNoSink |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink >> KqpSinkMvcc::OlapMultiSinks >> KqpSnapshotIsolation::TConflictWriteOltpNoSink >> KqpSnapshotIsolation::TConflictReadWriteOlap >> Viewer::Plan2SvgBad [GOOD] |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 |72.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |72.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |72.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |72.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |72.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |72.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |72.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltpNoSink |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> KqpSnapshotIsolation::TSimpleOltp >> KqpSnapshotIsolation::TConflictWriteOlap >> KqpSinkTx::OlapInvalidateOnError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::Plan2SvgBad [GOOD] Test command err: 2025-04-10T12:10:21.758004Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655712037240253:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:21.758556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T12:10:22.157380Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23014, node 1 2025-04-10T12:10:22.171085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:22.171235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:22.179447Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:10:22.179709Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:10:22.182968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:22.285775Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:22.285824Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:22.285835Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:22.286022Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9566 2025-04-10T12:10:24.071326Z node 1 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator >> KqpMultishardIndex::DataColumnWrite-UseSink [GOOD] |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnWrite-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9683, MsgBus: 15199 2025-04-10T12:10:14.325919Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655684114581385:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:14.326007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0001c4/r3tmp/tmp0U0so2/pdisk_1.dat 2025-04-10T12:10:14.570505Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9683, node 1 2025-04-10T12:10:14.632499Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:14.632531Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:14.632539Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:14.632674Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:10:14.654542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:14.654644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:14.656369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15199 TClient is connected to server localhost:15199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:15.050851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:15.068723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:15.183793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:15.271507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:15.313173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:16.364375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655692704517762:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:16.364493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:16.558713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T12:10:16.580853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-10T12:10:16.602248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T12:10:16.623640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T12:10:16.646477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-10T12:10:16.673663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-10T12:10:16.745341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655692704518278:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:16.745394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:16.745402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655692704518283:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:16.748181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-10T12:10:16.756016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655692704518285:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-10T12:10:16.857407Z node 1 :TX_PROXY ERROR: Actor# [1:7491655692704518341:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:17.737127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:19.326026Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655684114581385:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:19.326091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:23.456080Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491655722769293307:2808], TxId: 281474976715731, task: 1. Ctx: { TraceId : 01jrfsqg39ecfjvb75rqzv8nsn. SessionId : ydb://session/3?node_id=1&id=NDQ3OWM4ODUtMmUzOTlkZTQtYWM4Y2I5NGUtMTMxZDQyMA==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-10T12:10:23.456335Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491655722769293309:2809], TxId: 281474976715731, task: 2. Ctx: { TraceId : 01jrfsqg39ecfjvb75rqzv8nsn. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ3OWM4ODUtMmUzOTlkZTQtYWM4Y2I5NGUtMTMxZDQyMA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7491655722769293304:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T12:10:23.456636Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDQ3OWM4ODUtMmUzOTlkZTQtYWM4Y2I5NGUtMTMxZDQyMA==, ActorId: [1:7491655696999485891:2487], ActorState: ExecuteState, TraceId: 01jrfsqg39ecfjvb75rqzv8nsn, Create QueryResponse for error on request, msg: |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 |74.5%| [TA] $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |74.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::ShowCreateTableKeyBloomFilter >> SystemView::ShowCreateTableTtlSettings |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::PartitionStatsFields >> SystemView::ShowCreateTableTemporary >> SystemView::ShowCreateTableReadReplicas >> Viewer::QueryExecuteScript [GOOD] |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::QueryExecuteScript [GOOD] Test command err: 2025-04-10T12:10:22.404165Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655716285206898:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:22.404281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T12:10:22.709863Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:22.757755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:22.757877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 8458, node 1 2025-04-10T12:10:22.759963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:22.795797Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:22.795824Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:22.795831Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:22.795982Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:23.081664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:23.116947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T12:10:23.118940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:24.614975Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:24.615043Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:24.901526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655724875142202:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:24.901526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655724875142193:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:24.901650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:24.907602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T12:10:24.917465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655724875142207:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T12:10:24.996964Z node 1 :TX_PROXY ERROR: Actor# [1:7491655724875142258:2356] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:25.550381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T12:10:25.676427Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:25.676470Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.032702Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:26.032757Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.094210Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:26.094247Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.150585Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:26.150622Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.214034Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:26.214081Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.266650Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:26.266701Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.325650Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:26.325709Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.395769Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:26.395815Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.468724Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:26.468776Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.526230Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:26.526269Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.580332Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:26.580369Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.643354Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:26.643393Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.700221Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:26.700275Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.751955Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:26.751984Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.837423Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:26.837470Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.907014Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:26.907053Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:26.917905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-10T12:10:26.919469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T12:10:26.920744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T12:10:27.404662Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655716285206898:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:27.404740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:28.271378Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:28.271424Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:28.914556Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:28.914598Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:29.287063Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-10T12:10:29.287115Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-10T12:10:29.932769Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-10T12:10:29.932832Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491655746349979819:2571] TxId: 281474976710692. Ctx: { TraceId: 01jrfsqpb02506ryarpze3vh89, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI3NWIwOTUtMjk0ZDZjMzktODVhY2E1LTQyNGViZTA2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-10T12:10:29.933057Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjI3NWIwOTUtMjk0ZDZjMzktODVhY2E1LTQyNGViZTA2, ActorId: [1:7491655746349979794:2571], ActorState: ExecuteState, TraceId: 01jrfsqpb02506ryarpze3vh89, Create QueryResponse for error on request, msg: 2025-04-10T12:10:29.933058Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491655746349979825:2576], TxId: 281474976710692, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjI3NWIwOTUtMjk0ZDZjMzktODVhY2E1LTQyNGViZTA2. TraceId : 01jrfsqpb02506ryarpze3vh89. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7491655746349979819:2571], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-10T12:10:29.933646Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287029919, txId: 281474976710691] shutting down |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic >> WithSDK::DescribeConsumer |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow Test command err: Trying to start YDB, gRPC: 29912, MsgBus: 23028 2025-04-10T12:10:17.968700Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655696939903168:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:17.968821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0005ad/r3tmp/tmp9J7Wih/pdisk_1.dat 2025-04-10T12:10:18.349421Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29912, node 1 2025-04-10T12:10:18.366287Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:10:18.366440Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:10:18.381871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:18.382965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:18.386182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:18.465264Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:18.465284Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:18.465291Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:18.465380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23028 TClient is connected to server localhost:23028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:19.039463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:19.071879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:19.204276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:19.342518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:19.412239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:20.062191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655709824806810:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:20.062274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:20.521541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:10:20.542773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T12:10:20.567646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T12:10:20.595014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:10:20.620541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T12:10:20.687469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T12:10:20.731118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655709824807356:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:20.731167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:20.731333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655709824807361:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:20.734703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T12:10:20.741470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655709824807363:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T12:10:20.797574Z node 1 :TX_PROXY ERROR: Actor# [1:7491655709824807416:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:21.936920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T12:10:22.111833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7491655718414742463:2506];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:22.111997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7491655718414742463:2506];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:22.112254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7491655718414742463:2506];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:10:22.112352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7491655718414742463:2506];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:10:22.112438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7491655718414742463:2506];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:10:22.112518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7491655718414742463:2506];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:10:22.112634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7491655718414742463:2506];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:10:22.112746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7491655718414742463:2506];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:10:22.112865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7491655718414742463:2506];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:10:22.112962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7491655718414742463:2506];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:10:22.113071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7491655718414742463:2506];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:10:22.113136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7491655718414742463:2506];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:10:22.115376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7491655718414742467:2508];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:22.115487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7491655718414742467:2508];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:22.115687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7491655718414742467:2508];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04 ... eSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T12:10:22.272783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T12:10:22.272836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T12:10:22.273033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T12:10:22.273098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T12:10:22.273218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T12:10:22.273265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T12:10:22.273335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T12:10:22.273370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T12:10:22.273420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T12:10:22.273456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T12:10:22.274071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T12:10:22.274165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T12:10:22.274397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T12:10:22.274443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T12:10:22.274582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T12:10:22.274657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T12:10:22.274934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T12:10:22.274987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T12:10:22.275129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T12:10:22.275164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T12:10:22.314260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:10:22.314304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:10:22.318639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:10:22.320406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:10:22.322782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:10:22.326188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:10:22.326838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:10:22.330586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:10:22.332928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:10:22.334221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:10:22.515886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2025-04-10T12:10:22.515888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2025-04-10T12:10:22.516568Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2025-04-10T12:10:22.626668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710675;tx_id=281474976710675;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710675; query_phases { duration_us: 35790 cpu_time_us: 2025 affected_shards: 1 } query_phases { duration_us: 4526 cpu_time_us: 220 affected_shards: 1 } compilation { duration_us: 60895 cpu_time_us: 58392 } process_cpu_time_us: 591 total_duration_us: 105180 total_cpu_time_us: 61228 AddressSanitizer:DEADLYSIGNAL ================================================================= ==2066973==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018d3ad2d bp 0x7ffdaaddf980 sp 0x7ffdaaddf7e0 T0) ==2066973==The signal is caused by a READ memory access. ==2066973==Hint: address points to the zero page. 2025-04-10T12:10:22.968863Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655696939903168:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:22.968981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:33.342888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:10:33.342920Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18d3ad2d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18d3ad2d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18d3ad2d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18d3ad2d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18d3ad2d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18d5fe47 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18d5fe47 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18d5fe47 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18d5fe47 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18d5fe47 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #10 0x196b1ff5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x196b1ff5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x196b1ff5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19681b48 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18d5ecf3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19683415 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x196ac56c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f25fa160d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f25fa160e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x164bd028 in _start (/home/runner/.ya/build/build_root/bh2h/0005ad/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x164bd028) (BuildId: e3319472fc934b0dfc588e72f37ffc5cf69adf9b) AddressSanitizer can not provide additional info. SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==2066973==ABORTING |76.9%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |76.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> SystemView::PartitionStatsFields [GOOD] |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::PartitionStatsFields [GOOD] Test command err: 2025-04-10T12:10:31.690007Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655754219590275:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:31.690094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0004cf/r3tmp/tmpYW5Dh0/pdisk_1.dat 2025-04-10T12:10:32.081487Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:32.118208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:32.118327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:32.120893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18070, node 1 2025-04-10T12:10:32.180131Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:32.180178Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:32.180188Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:32.180337Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:32.449699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:32.476893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:34.359247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655767104493159:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:34.359248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655767104493151:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:34.359378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:34.362621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T12:10:34.379236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655767104493165:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T12:10:34.472628Z node 1 :TX_PROXY ERROR: Actor# [1:7491655767104493244:2705] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:34.900305Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfsqtznaczrba3nr7rxkafm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2MwYmZhMjgtOGRlYzI2YjYtZTEzOTA2YmUtZjFiZGUwNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:10:35.039384Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jrfsqvh7bw5xxhf20jkcbvm7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjllOGIzZjEtMTIyMWU1M2ItOWNiODY1ZDUtNTFlMTY2OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:10:35.041380Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7491655771399460619:2358], owner: [1:7491655771399460615:2356], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T12:10:35.041939Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7491655771399460619:2358], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T12:10:35.042788Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7491655771399460619:2358], row count: 1, finished: 1 2025-04-10T12:10:35.042838Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7491655771399460619:2358], owner: [1:7491655771399460615:2356], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T12:10:35.047696Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287035033, txId: 281474976710662] shutting down 2025-04-10T12:10:36.137582Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jrfsqwn0b5vya79tdz22j96s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg2YWE3YWEtZTA2YzkzNTYtOTE4Mzg5NmMtNTk2MWIxMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:10:36.139188Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7491655775694427960:2369], owner: [1:7491655775694427956:2367], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T12:10:36.139866Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7491655775694427960:2369], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T12:10:36.140202Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7491655775694427960:2369], row count: 1, finished: 1 2025-04-10T12:10:36.140234Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7491655775694427960:2369], owner: [1:7491655775694427956:2367], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T12:10:36.142404Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287036135, txId: 281474976710664] shutting down 2025-04-10T12:10:36.690036Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655754219590275:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:36.690179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:37.222079Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jrfsqxpyd9b3qy09vjej54h7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2YwNTQyNzktMjQ2NzA3NDktMmRjM2JhODctNmIxMDU2OGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:10:37.223571Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7491655779989395305:2380], owner: [1:7491655779989395301:2378], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T12:10:37.224080Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7491655779989395305:2380], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T12:10:37.224326Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7491655779989395305:2380], row count: 1, finished: 1 2025-04-10T12:10:37.224366Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7491655779989395305:2380], owner: [1:7491655779989395301:2378], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T12:10:37.226249Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287037220, txId: 281474976710666] shutting down 2025-04-10T12:10:38.304384Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jrfsqyrrby16ftqddzjngh0j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDc5MzRmNmYtYmRlZjJlZjUtOTE1NDYzNGUtZjZmZTVkN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:10:38.305681Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7491655784284362654:2395], owner: [1:7491655784284362650:2393], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T12:10:38.306185Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7491655784284362654:2395], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T12:10:38.306433Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7491655784284362654:2395], row count: 1, finished: 1 2025-04-10T12:10:38.306476Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7491655784284362654:2395], owner: [1:7491655784284362650:2393], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T12:10:38.308388Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287038303, txId: 281474976710668] shutting down 2025-04-10T12:10:38.463475Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jrfsqyvbf7ywd58x4pz88phe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE0MzEzNWYtYzY1Y2VjZjAtZmY2NDMwYi01MzFmMmExMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:10:38.465557Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7491655784284362691:2405], owner: [1:7491655784284362687:2403], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T12:10:38.466060Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7491655784284362691:2405], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-10T12:10:38.466362Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7491655784284362691:2405], row count: 1, finished: 1 2025-04-10T12:10:38.466402Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7491655784284362691:2405], owner: [1:7491655784284362687:2403], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-10T12:10:38.469150Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287038461, txId: 281474976710670] shutting down |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::RangeCache+UseCache |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpSinkMvcc::OlapMultiSinks [GOOD] |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] >> KqpSnapshotIsolation::TSimpleOltp [FAIL] >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpSinkTx::OlapInvalidateOnError [FAIL] |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 15174, MsgBus: 19207 2025-04-10T12:10:25.142349Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655727293292834:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:25.142502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000522/r3tmp/tmpNlG8PS/pdisk_1.dat 2025-04-10T12:10:25.602841Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15174, node 1 2025-04-10T12:10:25.615947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:25.616081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:25.619674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:25.620532Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:10:25.703231Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:25.703255Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:25.703264Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:25.703407Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19207 TClient is connected to server localhost:19207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:26.451234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:27.797063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655735883228045:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:27.797165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655735883228065:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:27.797296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:27.801890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:27.812243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655735883228082:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:27.895646Z node 1 :TX_PROXY ERROR: Actor# [1:7491655735883228133:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:28.391667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:28.554908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655740178195623:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:28.555173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655740178195623:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:28.555269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491655740178195631:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:28.555368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491655740178195631:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:28.555481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655740178195623:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:10:28.555618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491655740178195631:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:10:28.555641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655740178195623:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:10:28.555732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491655740178195631:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:10:28.555754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655740178195623:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:10:28.555808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491655740178195631:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:10:28.555882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655740178195623:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:10:28.555923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491655740178195631:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:10:28.555994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655740178195623:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:10:28.556031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491655740178195631:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:10:28.556109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655740178195623:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:10:28.556157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491655740178195631:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:10:28.556250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491655740178195631:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:10:28.556253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655740178195623:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:10:28.556334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491655740178195631:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:10:28.556357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655740178195623:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:10:28.556425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491655740178195631:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:10:28.556472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655740178195623:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:10:28.556521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491655740178195631:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:10:28.556565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655740178195623:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:10:28.597244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491655740178195625:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:28.597304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491655740178195625:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2 ... iodicWakeup;tablet_id=72075186224038087;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.815477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038087;self_id=[1:7491655757358070276:3188];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038087;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.815666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[1:7491655757358070251:3182];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038091;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.815845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[1:7491655757358070251:3182];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038091;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.819231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[1:7491655757358070288:3195];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.819513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;self_id=[1:7491655757358070342:3202];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038089;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.822884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[1:7491655757358070288:3195];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.823102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;self_id=[1:7491655757358070342:3202];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038089;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.823288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[1:7491655757358070355:3206];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038058;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.823452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[1:7491655757358070355:3206];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038058;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.823618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[1:7491655757358070364:3212];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.823782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[1:7491655757358070364:3212];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.826788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7491655757358070616:3279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.826999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7491655757358070616:3279];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.827264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[1:7491655757358070392:3230];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.827478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[1:7491655757358070392:3230];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.828782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;task_id=cfa02342-160411f0-858a74e8-c3bf3918;tablet_id=72075186224037993;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.829189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.830519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[1:7491655757358070374:3219];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.830712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[1:7491655757358070374:3219];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.833554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[1:7491655757358070438:3252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038077;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.833755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[1:7491655757358070438:3252];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038077;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.845302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[1:7491655757358069821:3114];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038000;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.845423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;self_id=[1:7491655757358070214:3174];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038084;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.845575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[1:7491655757358069821:3114];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038000;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.845621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;self_id=[1:7491655757358070214:3174];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038084;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.845841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7491655757358070387:3227];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.845956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[1:7491655757358070441:3254];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.845999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7491655757358070387:3227];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.847215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[1:7491655757358070559:3267];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038034;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.847240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[1:7491655757358070219:3175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038092;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.847410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[1:7491655757358070219:3175];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038092;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.847416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[1:7491655757358070559:3267];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038034;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.851024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[1:7491655757358070469:3262];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.851203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[1:7491655757358070469:3262];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.851307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[1:7491655757358070441:3254];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.853300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[1:7491655757358070601:3273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.853512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[1:7491655757358070601:3273];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.853660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[1:7491655757358070454:3260];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038061;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.853806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[1:7491655757358070454:3260];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038061;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.853976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[1:7491655757358070269:3183];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.854119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[1:7491655757358070269:3183];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.857028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[1:7491655757358070614:3278];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:36.857231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[1:7491655757358070614:3278];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-10T12:10:40.598246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:10:40.598306Z node 1 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::PatternCache |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltp [FAIL] Test command err: Trying to start YDB, gRPC: 5083, MsgBus: 8078 2025-04-10T12:10:26.761425Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655731967235967:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:26.761490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0004e7/r3tmp/tmp9Npf5t/pdisk_1.dat 2025-04-10T12:10:27.133144Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5083, node 1 2025-04-10T12:10:27.188779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:27.189466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:27.191339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:27.209819Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:27.209842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:27.209854Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:27.209983Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8078 TClient is connected to server localhost:8078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:27.748334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:29.450978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655744852138515:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:29.451189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:29.451761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655744852138527:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:29.455186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:29.466779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655744852138529:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:29.543618Z node 1 :TX_PROXY ERROR: Actor# [1:7491655744852138580:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:29.936327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:30.052971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:10:31.071885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:10:31.828337Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655731967235967:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:31.834724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:32.506123Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2VlOWYzODEtZGYyOTZhYzMtZWUyYTQ5YjktZTBkOTYxNGM=, ActorId: [1:7491655757737048933:2969], ActorState: ExecuteState, TraceId: 01jrfsqrzf5f2pwze84nkppacx, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-10T12:10:42.121012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:10:42.121048Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18F8A7C7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18F81582 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7FEC1B126D8F 18. ??:0: ?? @ 0x7FEC1B126E3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 4543, MsgBus: 24492 2025-04-10T12:10:25.109691Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655728240567315:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:25.110622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/00052c/r3tmp/tmpeF1l4L/pdisk_1.dat 2025-04-10T12:10:25.521266Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:25.543454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:25.543539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:25.547483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4543, node 1 2025-04-10T12:10:25.703140Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:25.703172Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:25.703179Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:25.703298Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24492 TClient is connected to server localhost:24492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:26.441959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:26.459522Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T12:10:28.011108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655741125469766:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:28.011237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655741125469780:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:28.011403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:28.016162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:28.028360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655741125469795:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:28.083230Z node 1 :TX_PROXY ERROR: Actor# [1:7491655741125469846:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:28.415193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:28.573636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:10:29.611325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:10:30.440990Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655728240567315:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:30.499482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:31.323552Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Njk1MzJkYjMtYjIxMWU5MTEtZmYxYjk2MzItNmViYWQ5NWU=, ActorId: [1:7491655754010380365:2968], ActorState: ExecuteState, TraceId: 01jrfsqqxe2yjd9nj27td9s8m9, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-10T12:10:40.520206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:10:40.520239Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18F986F7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18F824AA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F7D87F83D8F 18. ??:0: ?? @ 0x7F7D87F83E3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] Test command err: Trying to start YDB, gRPC: 29870, MsgBus: 24251 2025-04-10T12:10:25.101739Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655729075119111:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:25.102346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/00051e/r3tmp/tmpqScBho/pdisk_1.dat 2025-04-10T12:10:25.514445Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:25.546803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:25.546911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:25.568188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29870, node 1 2025-04-10T12:10:25.705082Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:25.705108Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:25.705124Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:25.705255Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24251 TClient is connected to server localhost:24251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:26.428786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:27.793809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655737665054374:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:27.793860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655737665054366:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:27.793989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:27.800114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:27.810738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655737665054380:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:27.903915Z node 1 :TX_PROXY ERROR: Actor# [1:7491655737665054431:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:28.383487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:28.515157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:10:29.613899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:10:30.425116Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655729075119111:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:30.451322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:31.327388Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2E5MmFhOTgtYjQ5NmVmZmQtNGEzYjY3OGUtNzliMzVkNmU=, ActorId: [1:7491655754844932217:2969], ActorState: ExecuteState, TraceId: 01jrfsqqw841t2dxfc23s9z6ks, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-10T12:10:40.513994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:10:40.514059Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18F986F7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18F82282 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F8228FCAD8F 18. ??:0: ?? @ 0x7F8228FCAE3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 15236, MsgBus: 29066 2025-04-10T12:10:26.604449Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655731642082474:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:26.604514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0004f4/r3tmp/tmp8Bqtq3/pdisk_1.dat 2025-04-10T12:10:26.932989Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15236, node 1 2025-04-10T12:10:27.003685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:27.003812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:27.005698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:27.021424Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:27.021472Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:27.021490Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:27.021665Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29066 TClient is connected to server localhost:29066 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:27.474125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:29.429981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655744526985000:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:29.430560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:29.430627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655744526985035:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:29.434374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:29.448248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655744526985037:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:29.543070Z node 1 :TX_PROXY ERROR: Actor# [1:7491655744526985088:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:29.842513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:29.968863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:10:30.915574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:10:31.704761Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655731642082474:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:31.713417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:32.443782Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjhmMDhiMC02MGZjYzA2ZS03NmVjMDU3Yy00MjFmNDIyMA==, ActorId: [1:7491655757411895600:2968], ActorState: ExecuteState, TraceId: 01jrfsqrycas5q31wmwf4e38s6, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-10T12:10:41.938384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:10:41.938425Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18F8A7C7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18F817AA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F5A57FCED8F 18. ??:0: ?? @ 0x7F5A57FCEE3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 11820, MsgBus: 4596 2025-04-10T12:10:25.105692Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655729703456345:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:25.105748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000530/r3tmp/tmpgm2aFY/pdisk_1.dat 2025-04-10T12:10:25.526492Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:25.535512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:25.535605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:25.548902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11820, node 1 2025-04-10T12:10:25.703480Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:25.703509Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:25.703523Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:25.703664Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4596 TClient is connected to server localhost:4596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:26.419327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:28.134710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655742588358856:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:28.134800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655742588358876:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:28.134958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:28.139497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:28.149675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655742588358893:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:28.210025Z node 1 :TX_PROXY ERROR: Actor# [1:7491655742588358944:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:28.494877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:28.627698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:10:29.704960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:10:30.469992Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655729703456345:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:30.558055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:31.455669Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzQzM2FhNmQtNDBiMmU1OGUtZWRmMDgyNDAtNmM5ODYxMGU=, ActorId: [1:7491655755473269324:2968], ActorState: ExecuteState, TraceId: 01jrfsqqxd7dys3jwfvkkw8fcz, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-10T12:10:40.523396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:10:40.523451Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18FA00B3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18F82B2A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F49842B2D8F 18. ??:0: ?? @ 0x7F49842B2E3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] Test command err: Trying to start YDB, gRPC: 1710, MsgBus: 3013 2025-04-10T12:10:25.102436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655729005943831:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:25.103939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/00050e/r3tmp/tmpEgXonO/pdisk_1.dat 2025-04-10T12:10:25.532647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:25.533187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:25.535172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:25.579409Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1710, node 1 2025-04-10T12:10:25.704011Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:25.704038Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:25.704047Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:25.704199Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3013 TClient is connected to server localhost:3013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:26.424686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:28.058911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655741890846394:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:28.058996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655741890846386:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:28.059273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:28.062139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:28.072149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655741890846400:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:28.166580Z node 1 :TX_PROXY ERROR: Actor# [1:7491655741890846453:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:28.508210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:28.623859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:10:29.738183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:10:30.467847Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655729005943831:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:30.525231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:31.323824Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTQ1NzMxNmYtYjZjYTY5OTctYWViM2E4YjMtYzMwNmMyNjk=, ActorId: [1:7491655754775756869:2969], ActorState: ExecuteState, TraceId: 01jrfsqqwj33jb4gta8rr1v20h, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-10T12:10:40.555286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:10:40.555321Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18F90D57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18F81C02 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F4689B3FD8F 18. ??:0: ?? @ 0x7F4689B3FE3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] Test command err: Trying to start YDB, gRPC: 1914, MsgBus: 10478 2025-04-10T12:10:25.105466Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655728965515326:2261];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:25.105524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000512/r3tmp/tmpTUSv7a/pdisk_1.dat 2025-04-10T12:10:25.522215Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:25.543089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:25.543218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:25.546093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1914, node 1 2025-04-10T12:10:25.703527Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:25.703562Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:25.703587Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:25.703692Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10478 TClient is connected to server localhost:10478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:26.419267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:27.941361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655737555450374:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:27.941361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655737555450380:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:27.941494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:27.944423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:27.953025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655737555450389:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:28.026728Z node 1 :TX_PROXY ERROR: Actor# [1:7491655741850417736:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:28.383488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:28.501870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:10:29.587642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:10:30.435914Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655728965515326:2261];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:30.482436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:31.461923Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWQ2N2JjYmMtZmY0MWNhYzQtMTNlMjEwNDMtNzAzMDZiMWI=, ActorId: [1:7491655754735328261:2969], ActorState: ExecuteState, TraceId: 01jrfsqqw70x83pkqtfhcc5d3j, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-10T12:10:40.515171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:10:40.515204Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18FA00B3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18F82902 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7EFD2ECC8D8F 18. ??:0: ?? @ 0x7EFD2ECC8E3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 62451, MsgBus: 14277 2025-04-10T12:10:25.152320Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655727665324355:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:25.155490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000567/r3tmp/tmpaKnvI0/pdisk_1.dat 2025-04-10T12:10:25.519770Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:25.543489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:25.543598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:25.547492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62451, node 1 2025-04-10T12:10:25.705808Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:25.705832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:25.705838Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:25.705934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14277 TClient is connected to server localhost:14277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:26.489369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:28.100185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655740550226834:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:28.100320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655740550226823:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:28.100479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:28.105001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:28.116381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655740550226838:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:28.214030Z node 1 :TX_PROXY ERROR: Actor# [1:7491655740550226891:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:28.542990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:28.699774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:10:29.853778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:10:30.617542Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655727665324355:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:30.668167Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:31.330519Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjEyMTEzZDUtYTMxZDA0OS05YTk5NmNlMy04NTY4OGM5ZQ==, ActorId: [1:7491655753435137179:2968], ActorState: ExecuteState, TraceId: 01jrfsqqws6tz6yxtyn6kyy50d, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-10T12:10:40.513312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:10:40.513357Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18F90D57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18F81E2A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F090A7B7D8F 18. ??:0: ?? @ 0x7F090A7B7E3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError [FAIL] Test command err: Trying to start YDB, gRPC: 6445, MsgBus: 10239 2025-04-10T12:10:26.922250Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655734393574436:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:26.922359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0004e1/r3tmp/tmp84aJlH/pdisk_1.dat 2025-04-10T12:10:27.258889Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6445, node 1 2025-04-10T12:10:27.334543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:27.334650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:27.335905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:27.354769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:27.354795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:27.354802Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:27.354940Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10239 TClient is connected to server localhost:10239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:27.849803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:29.781304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655747278476987:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:29.781501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:29.781993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655747278476999:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:29.787384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:29.803437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655747278477001:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:29.877480Z node 1 :TX_PROXY ERROR: Actor# [1:7491655747278477052:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:30.285773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:30.484421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751573444551:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:30.484421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491655751573444539:2344];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:30.484669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491655751573444539:2344];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:30.484673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751573444551:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:30.484991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751573444551:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:10:30.485099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751573444551:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:10:30.485228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751573444551:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:10:30.485273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491655751573444539:2344];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:10:30.485384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751573444551:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:10:30.485412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491655751573444539:2344];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:10:30.485835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491655751573444539:2344];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:10:30.485849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751573444551:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:10:30.485980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751573444551:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:10:30.485990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491655751573444539:2344];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:10:30.486119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751573444551:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:10:30.486182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491655751573444539:2344];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:10:30.486287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751573444551:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:10:30.486297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491655751573444539:2344];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:10:30.486400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751573444551:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:10:30.486403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491655751573444539:2344];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:10:30.486490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491655751573444539:2344];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:10:30.486501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751573444551:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:10:30.486601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491655751573444539:2344];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:10:30.486772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7491655751573444539:2344];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:10:30.528377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491655751573444541:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:30.528448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491655751573444541:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:30.528717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_ ... t_id=72075186224038002;self_id=[1:7491655768753318885:3097];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038002;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.748345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[1:7491655768753318885:3097];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038002;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.748501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7491655768753319255:3145];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.748716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;self_id=[1:7491655768753319069:3122];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038083;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.749285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7491655768753319255:3145];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.749421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;self_id=[1:7491655768753319069:3122];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038083;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.749790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;self_id=[1:7491655768753318968:3112];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038084;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.750070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;self_id=[1:7491655768753318968:3112];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038084;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.750297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[1:7491655768753318952:3110];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.750522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[1:7491655768753318952:3110];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.750838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[1:7491655768753318893:3101];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.751125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[1:7491655768753318893:3101];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.752403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[1:7491655768753319235:3138];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.752672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[1:7491655768753319235:3138];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.752900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[1:7491655768753319267:3150];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038049;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.753123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[1:7491655768753319267:3150];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.756534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7491655768753319289:3166];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.756734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[1:7491655768753318889:3099];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038094;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.757065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7491655768753319289:3166];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.757190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[1:7491655768753318889:3099];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038094;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.759558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[1:7491655768753319765:3301];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.759738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[1:7491655768753318909:3109];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038097;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.760049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[1:7491655768753319765:3301];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.760211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[1:7491655768753318909:3109];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038097;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.770946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[1:7491655768753319190:3129];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038070;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.770978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;self_id=[1:7491655768753319028:3115];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038089;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.771261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[1:7491655768753319251:3144];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.771280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[1:7491655768753319244:3141];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038064;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.771444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[1:7491655768753319067:3121];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038074;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.771497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[1:7491655768753319220:3131];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.771942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[1:7491655768753319190:3129];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.772042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;self_id=[1:7491655768753319028:3115];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038089;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.772190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[1:7491655768753319251:3144];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.772226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[1:7491655768753319244:3141];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038064;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.772349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[1:7491655768753319067:3121];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038074;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.772356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[1:7491655768753319220:3131];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x18F652BE 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x18F43F0A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x18F4B387 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18F4B387 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18F4B387 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F4B387 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F4B387 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x18F4A553 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7FB642753D8F 18. ??:0: ?? @ 0x7FB642753E3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 26218, MsgBus: 1382 2025-04-10T12:10:25.140980Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655730369419123:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:25.141094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000526/r3tmp/tmpjJXKOX/pdisk_1.dat 2025-04-10T12:10:25.595564Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:25.608125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:25.608260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:25.610141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26218, node 1 2025-04-10T12:10:25.706253Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:25.706271Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:25.706276Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:25.706390Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1382 TClient is connected to server localhost:1382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:26.419419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:27.831163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655738959354390:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:27.831234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655738959354372:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:27.831619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:27.835730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:27.845691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655738959354393:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:27.944500Z node 1 :TX_PROXY ERROR: Actor# [1:7491655738959354444:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:28.391065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:28.569733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491655743254321933:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:28.569924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491655743254321929:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:28.569995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491655743254321933:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:28.569998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491655743254321929:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:28.570335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491655743254321929:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:10:28.570395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491655743254321933:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:10:28.570468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491655743254321929:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:10:28.570519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491655743254321933:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:10:28.570601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491655743254321929:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:10:28.570634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491655743254321933:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:10:28.570839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491655743254321929:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:10:28.570954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491655743254321929:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:10:28.571057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491655743254321929:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:10:28.571171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491655743254321929:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:10:28.571346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491655743254321929:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:10:28.571451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491655743254321929:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:10:28.571544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7491655743254321929:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:10:28.572573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491655743254321933:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:10:28.572712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491655743254321933:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:10:28.572777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491655743254321933:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:10:28.572897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491655743254321933:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:10:28.572984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491655743254321933:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:10:28.573073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491655743254321933:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:10:28.573136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7491655743254321933:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:10:28.616356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491655743254321935:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:28.616435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491655743254321935:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:28.616703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_i ... 6224038041;self_id=[1:7491655764729164154:3298];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.110625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[1:7491655764729164123:3287];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038055;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.110899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[1:7491655764729164123:3287];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038055;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.110944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[1:7491655764729164154:3298];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.111111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[1:7491655764729164360:3313];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.111212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7491655764729164485:3327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.111320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[1:7491655764729164360:3313];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.111371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7491655764729164485:3327];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.114426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7491655764729164427:3322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.114427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[1:7491655764729164423:3321];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.114696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7491655764729164427:3322];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.114729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[1:7491655764729164423:3321];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.122460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7491655764729164363:3314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.122756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7491655764729164363:3314];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.126407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7491655764729164367:3315];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.126599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7491655764729164341:3305];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.126679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7491655764729164367:3315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.126898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[1:7491655764729164343:3306];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.126931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7491655764729164341:3305];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.127151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[1:7491655764729164343:3306];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.128849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[1:7491655764729164347:3308];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.129109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[1:7491655764729164347:3308];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.129430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[1:7491655764729164587:3362];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.129739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[1:7491655764729164587:3362];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.132921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[1:7491655764729163964:3274];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038066;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.133316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[1:7491655764729163964:3274];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038066;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.133569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7491655760434196656:3266];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038063;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.133927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7491655760434196656:3266];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.135999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[1:7491655764729164781:3411];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.136388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[1:7491655764729164781:3411];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.136920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[1:7491655764729164350:3309];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.137160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[1:7491655764729164350:3309];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.138991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[1:7491655764729164152:3297];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038029;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.139208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[1:7491655764729164152:3297];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038029;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.527670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;self_id=[1:7491655747549291128:2552];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037957;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:42.528007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;self_id=[1:7491655747549291128:2552];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037957;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18F9B008 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18F826DA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F4C78B18D8F 18. ??:0: ?? @ 0x7F4C78B18E3F 19. ??:0: ?? @ 0x1656B028 >> PgCatalog::CheckSetConfig >> TPersQueueTest::InflightLimit [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 >> KqpPg::AlterColumnSetDefaultFromSequence >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] |80.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |80.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental |80.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |81.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |81.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |81.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |81.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown |81.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |81.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::InflightLimit [GOOD] Test command err: 2025-04-10T12:10:13.574339Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655676488515204:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:13.574417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T12:10:13.601624Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491655676032806957:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:13.601685Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T12:10:13.712206Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000411/r3tmp/tmpmEACFe/pdisk_1.dat 2025-04-10T12:10:13.733700Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T12:10:13.890623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:13.890797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:13.893800Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T12:10:13.894766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:13.913197Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61197, node 1 2025-04-10T12:10:13.931449Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:10:13.931832Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:10:13.943419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:13.943494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:13.950765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:13.957922Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/bh2h/000411/r3tmp/yandex8H5cc2.tmp 2025-04-10T12:10:13.957954Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/bh2h/000411/r3tmp/yandex8H5cc2.tmp 2025-04-10T12:10:13.958161Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/bh2h/000411/r3tmp/yandex8H5cc2.tmp 2025-04-10T12:10:13.958281Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:10:14.015250Z INFO: TTestServer started on Port 1628 GrpcPort 61197 TClient is connected to server localhost:1628 PQClient connected to localhost:61197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:14.261092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T12:10:14.308538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T12:10:15.928079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655685078450955:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.928138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655685078450947:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.928236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.931419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-04-10T12:10:15.948864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655685078450961:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-10T12:10:16.100902Z node 1 :TX_PROXY ERROR: Actor# [1:7491655689373418346:2803] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:16.126021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T12:10:16.188264Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491655689373418363:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T12:10:16.188518Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGQ5OTg1MzUtMjIwM2JiYS01MjVlZWEyMy03NDEyZWI5MQ==, ActorId: [1:7491655685078450944:2340], ActorState: ExecuteState, TraceId: 01jrfsq8zp6fsvpnvp6418vng1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T12:10:16.190234Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T12:10:16.191080Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7491655688917709180:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T12:10:16.191251Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjQ1YmFmOWYtNDg1ZWM3YTQtN2ExOGFmZC1mZDk2MGVmNA==, ActorId: [2:7491655688917709140:2307], ActorState: ExecuteState, TraceId: 01jrfsq9509av5hsb4t1hr9r52, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T12:10:16.191490Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T12:10:16.203210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-10T12:10:16.265699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-10T12:10:16.440685Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfsq9d36ahgmw5czgbzamzw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ExZDVhNzQtYmM0ZDUzNWYtYjBhOGVhYWQtOTRmZTY5NzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7491655689373418732:3085] 2025-04-10T12:10:18.574242Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655676488515204:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:18.574318Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:18.601340Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7491655676032806957:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:18.601410Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 1 partitions CallPersQueueGRPC request to localhost:61197 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2025-04-10T12:10:22.409572Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:61197 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 1 Config { Partit ... rtition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 4 WriteTimestampMS: 1744287035266 CreateTimestampMS: 1744287035265 SizeLag: 82536 WriteTimestampEstimateMS: 1744287038694 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-04-10T12:10:38.697888Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer session _3_3_982424053072630713_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 2025-04-10T12:10:38.697974Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_982424053072630713_v1 sending to client partition status 2025-04-10T12:10:38.701441Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:10:38.701462Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:38.701827Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_982424053072630713_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 0 } } 2025-04-10T12:10:38.701951Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer session _3_3_982424053072630713_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-04-10T12:10:38.702005Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_982424053072630713_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1048576 } } 2025-04-10T12:10:38.702130Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_982424053072630713_v1 got read request: guid# ee7cc0a8-c622f252-6d27735f-1e854ae0 2025-04-10T12:10:38.702201Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer session _3_3_982424053072630713_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-04-10T12:10:38.702225Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_982424053072630713_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2025-04-10T12:10:38.702286Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_982424053072630713_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 1744287035266, sizeLag# 82536 2025-04-10T12:10:38.702299Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_982424053072630713_v1TEvPartitionReady. Aval parts: 1 2025-04-10T12:10:38.702333Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_982424053072630713_v1 performing read request: guid# f8508886-daddec94-ed7be32b-6fa2311d, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 4, size# 99043, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-10T12:10:38.702448Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_982424053072630713_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 4 maxSize 99043 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid f8508886-daddec94-ed7be32b-6fa2311d 2025-04-10T12:10:38.702926Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-10T12:10:38.702970Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-10T12:10:41.841051Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2025-04-10T12:10:41.841101Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-04-10T12:10:41.841232Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-04-10T12:10:41.841258Z node 4 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T12:10:41.841448Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T12:10:41.842477Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _3_2_4837446504667792951_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1744287035266 CreateTimestampMS: 1744287035265 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1744287035283 CreateTimestampMS: 1744287035280 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1744287035303 CreateTimestampMS: 1744287035299 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1744287035315 CreateTimestampMS: 1744287035312 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 3144 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2025-04-10T12:10:41.842755Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _3_2_4837446504667792951_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2025-04-10T12:10:41.842795Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _3_2_4837446504667792951_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 45be2bc9-f14f455-744ff294-f9123dd6 has messages 1 2025-04-10T12:10:41.842984Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _3_2_4837446504667792951_v1 read done: guid# 45be2bc9-f14f455-744ff294-f9123dd6, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82614 2025-04-10T12:10:41.843013Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _3_2_4837446504667792951_v1 response to read: guid# 45be2bc9-f14f455-744ff294-f9123dd6 2025-04-10T12:10:41.843416Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _3_2_4837446504667792951_v1 Process answer. Aval parts: 0 Bytes readed: 82614 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2025-04-10T12:10:41.847818Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _3_2_4837446504667792951_v1 grpc read done: success# 0, data# { } 2025-04-10T12:10:41.847847Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer session _3_2_4837446504667792951_v1 grpc read failed 2025-04-10T12:10:41.847871Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer session _3_2_4837446504667792951_v1 grpc closed 2025-04-10T12:10:41.847902Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer session _3_2_4837446504667792951_v1 is DEAD 2025-04-10T12:10:41.848592Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _3_2_4837446504667792951_v1 2025-04-10T12:10:41.848652Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7491655786608560805:2584] destroyed 2025-04-10T12:10:41.848706Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _3_2_4837446504667792951_v1 2025-04-10T12:10:45.843388Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2025-04-10T12:10:45.843442Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-04-10T12:10:45.843571Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2025-04-10T12:10:45.843597Z node 4 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-10T12:10:45.843796Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-10T12:10:45.845044Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_982424053072630713_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1744287035266 CreateTimestampMS: 1744287035265 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1744287035283 CreateTimestampMS: 1744287035280 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1744287035303 CreateTimestampMS: 1744287035299 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1744287035315 CreateTimestampMS: 1744287035312 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 7140 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2025-04-10T12:10:45.845572Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_982424053072630713_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2025-04-10T12:10:45.845630Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_982424053072630713_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid f8508886-daddec94-ed7be32b-6fa2311d has messages 1 2025-04-10T12:10:45.845821Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_982424053072630713_v1 read done: guid# f8508886-daddec94-ed7be32b-6fa2311d, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82614 2025-04-10T12:10:45.845857Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_982424053072630713_v1 response to read: guid# f8508886-daddec94-ed7be32b-6fa2311d 2025-04-10T12:10:45.846332Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _3_3_982424053072630713_v1 Process answer. Aval parts: 0 Bytes readed: 82614 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2025-04-10T12:10:45.850176Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer session _3_3_982424053072630713_v1 grpc closed 2025-04-10T12:10:45.850246Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer session _3_3_982424053072630713_v1 is DEAD 2025-04-10T12:10:45.851235Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _3_3_982424053072630713_v1 2025-04-10T12:10:45.851287Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7491655786608560811:2588] destroyed 2025-04-10T12:10:45.851330Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _3_3_982424053072630713_v1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 5292, MsgBus: 9925 2025-04-10T12:10:26.780496Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655734343658321:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:26.780613Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0004ea/r3tmp/tmpecIIif/pdisk_1.dat 2025-04-10T12:10:27.073523Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5292, node 1 2025-04-10T12:10:27.149439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:27.150274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:27.157810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:27.187953Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:27.187975Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:27.187982Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:27.188111Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9925 TClient is connected to server localhost:9925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:27.677328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:29.730933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655747228560870:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:29.731150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:29.731477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655747228560882:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:29.737401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:29.749146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655747228560884:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:29.831089Z node 1 :TX_PROXY ERROR: Actor# [1:7491655747228560937:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:30.193990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:30.376410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751523528438:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:30.376593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751523528438:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:30.376853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751523528438:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:10:30.376966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751523528438:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:10:30.377079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751523528438:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:10:30.377162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655751523528422:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:30.377204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655751523528422:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:30.377212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751523528438:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:10:30.377299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751523528438:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:10:30.377381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655751523528422:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:10:30.377430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751523528438:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:10:30.377487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655751523528422:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:10:30.377567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751523528438:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:10:30.377611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655751523528422:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:10:30.377665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751523528438:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:10:30.377708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655751523528422:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:10:30.377758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751523528438:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:10:30.377805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655751523528422:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:10:30.377855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491655751523528438:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:10:30.377904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655751523528422:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:10:30.378013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655751523528422:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:10:30.378161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655751523528422:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:10:30.378276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655751523528422:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:10:30.378364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491655751523528422:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:10:30.423538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491655751523528431:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:30.423601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491655751523528431:2347];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:30.423832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id= ... EvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038058;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.000065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[1:7491655768703403052:3132];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.000176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[1:7491655768703403127:3182];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038058;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.000235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[1:7491655768703403400:3293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.000381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[1:7491655768703403400:3293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.000612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[1:7491655768703403066:3137];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.000664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[1:7491655768703403631:3306];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.000772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[1:7491655768703403066:3137];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.000831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[1:7491655768703403631:3306];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.001145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7491655768703403370:3284];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.001306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7491655768703403370:3284];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.001654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[1:7491655768703403555:3303];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.001823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[1:7491655768703403555:3303];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.005871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[1:7491655768703403635:3307];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.006053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[1:7491655768703403635:3307];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.006291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7491655768703403323:3254];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.006473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7491655768703403323:3254];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.010352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[1:7491655768703403533:3298];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038036;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.010611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[1:7491655768703403533:3298];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.010797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7491655768703403541:3300];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.010950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7491655768703403541:3300];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.014029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7491655768703403070:3139];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.014273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7491655768703403070:3139];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.014482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[1:7491655768703403145:3194];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.014628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[1:7491655768703403145:3194];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.018542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[1:7491655768703403413:3294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.018779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[1:7491655768703403413:3294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.019065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7491655768703403350:3269];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.019218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7491655768703403350:3269];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.025195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[1:7491655768703403546:3301];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.025199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[1:7491655768703403198:3235];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.025393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[1:7491655768703403546:3301];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.025403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[1:7491655768703403198:3235];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.028834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[1:7491655768703403329:3259];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.028835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7491655768703403626:3305];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.029073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[1:7491655768703403329:3259];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:10:46.029076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7491655768703403626:3305];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18F93668 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18F8205A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7FA340A23D8F 18. ??:0: ?? @ 0x7FA340A23E3F 19. ??:0: ?? @ 0x1656B028 |81.3%| [TA] $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.3%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly |81.4%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |81.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> SystemView::ShowCreateTableKeyBloomFilter [GOOD] |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [GOOD] >> SystemView::ShowCreateTableTemporary [GOOD] >> SystemView::ShowCreateTableReadReplicas [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [GOOD] |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::ShowCreateTableKeyBloomFilter [GOOD] Test command err: 2025-04-10T12:10:30.842263Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655749803838438:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:30.842362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0004dd/r3tmp/tmp09HKjt/pdisk_1.dat 2025-04-10T12:10:31.209731Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:31.261500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:31.261589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14057, node 1 2025-04-10T12:10:31.272164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:31.330652Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:31.330678Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:31.330685Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:31.330783Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7080 TClient is connected to server localhost:7080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:31.973191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:33.692238Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-04-10T12:10:33.692267Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-04-10T12:10:33.716248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655762688741452:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:33.716251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655762688741444:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:33.716412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:33.720287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:33.737794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655762688741458:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:33.801649Z node 1 :TX_PROXY ERROR: Actor# [1:7491655762688741542:2756] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:33.802770Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-10T12:10:33.802903Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F000099D88 2025-04-10T12:10:33.802944Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7491655762688741425:2338], queryUid: , queryText: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n ", keepInCache: 1, split: 0{ TraceId: 01jrfsqtbh75zp25gap9kbfkz0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTg1ODk4NmQtOGZlZTc3ZjctOTBjYTMxZWQtMWNmZWQ5YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-04-10T12:10:33.803121Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-10T12:10:33.803199Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7491655762688741425:2338], queueSize: 1 2025-04-10T12:10:33.803855Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7491655762688741425:2338], compileActor: [1:7491655762688741561:2349] 2025-04-10T12:10:34.117865Z node 1 :KQP_YQL INFO: TraceId: 01jrfsqtbh75zp25gap9kbfkz0, SessionId: CompileActor 2025-04-10 12:10:34.117 INFO ydb-core-sys_view-ut(pid=2072321, tid=0x00007F9416F96640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (return (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) ) 2025-04-10T12:10:34.118975Z node 1 :KQP_YQL TRACE: TraceId: 01jrfsqtbh75zp25gap9kbfkz0, SessionId: CompileActor 2025-04-10 12:10:34.118 TRACE ydb-core-sys_view-ut(pid=2072321, tid=0x00007F9416F96640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (let $4 (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-04-10T12:10:34.119330Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsqtbh75zp25gap9kbfkz0, SessionId: CompileActor 2025-04-10 12:10:34.119 DEBUG ydb-core-sys_view-ut(pid=2072321, tid=0x00007F9416F96640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 288us 2025-04-10T12:10:34.119912Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsqtbh75zp25gap9kbfkz0, SessionId: CompileActor 2025-04-10 12:10:34.119 DEBUG ydb-core-sys_view-ut(pid=2072321, tid=0x00007F9416F96640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-04-10T12:10:34.121386Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsqtbh75zp25gap9kbfkz0, SessionId: CompileActor 2025-04-10 12:10:34.121 DEBUG ydb-core-sys_view-ut(pid=2072321, tid=0x00007F9416F96640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-04-10T12:10:34.122131Z node 1 :KQP_YQL TRACE: TraceId: 01jrfsqtbh75zp25gap9kbfkz0, SessionId: CompileActor 2025-04-10 12:10:34.121 TRACE ydb-core-sys_view-ut(pid=2072321, tid=0x00007F9416F96640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId ... merSuppliedId: , PoolId: default}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T12:10:51.209404Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 2 at actor [1:7491655839998158068:3281] 2025-04-10T12:10:51.209427Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Task: 2, input channelId: 1, src task: 1, at actor [1:7491655839998158067:3280] 2025-04-10T12:10:51.209459Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:7491655839998158067:3280], channels: 1 2025-04-10T12:10:51.209489Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:7491655839998158068:3281], channels: 2 2025-04-10T12:10:51.209526Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158058:3249] TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7491655839998158067:3280], CA [1:7491655839998158068:3281], 2025-04-10T12:10:51.209562Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158058:3249] TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7491655839998158067:3280], CA [1:7491655839998158068:3281], 2025-04-10T12:10:51.209586Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158058:3249] TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-10T12:10:51.210703Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158058:3249] TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7491655839998158067:3280], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-10T12:10:51.210748Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158058:3249] TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7491655839998158067:3280], CA [1:7491655839998158068:3281], 2025-04-10T12:10:51.210779Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158058:3249] TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7491655839998158067:3280], CA [1:7491655839998158068:3281], 2025-04-10T12:10:51.210812Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158058:3249] TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7491655839998158068:3281], task: 2, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-10T12:10:51.210832Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158058:3249] TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7491655839998158067:3280], CA [1:7491655839998158068:3281], 2025-04-10T12:10:51.210864Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158058:3249] TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7491655839998158067:3280], CA [1:7491655839998158068:3281], 2025-04-10T12:10:51.212993Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158058:3249] TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7491655839998158067:3280], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1163 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 327 FinishTimeMs: 1744287051212 OutputRows: 7 OutputBytes: 178 Tables { TablePath: "//Root/.metadata/initialization/migrations" ReadRows: 7 ReadBytes: 151 AffectedPartitions: 1 } IngressRows: 7 ComputeCpuTimeUs: 157 BuildCpuTimeUs: 170 HostName: "ghrun-x5vqckr7qy" NodeId: 1 StartTimeMs: 1744287051211 CreateTimeMs: 1744287051209 } MaxMemoryUsage: 1048576 } 2025-04-10T12:10:51.213109Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7491655839998158067:3280] 2025-04-10T12:10:51.213166Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158058:3249] TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7491655839998158068:3281], 2025-04-10T12:10:51.213209Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158058:3249] TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7491655839998158068:3281], 2025-04-10T12:10:51.214091Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158058:3249] TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7491655839998158068:3281], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 609 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 269 FinishTimeMs: 1744287051213 InputRows: 7 InputBytes: 178 OutputRows: 7 OutputBytes: 178 ResultRows: 7 ResultBytes: 178 ComputeCpuTimeUs: 173 BuildCpuTimeUs: 96 HostName: "ghrun-x5vqckr7qy" NodeId: 1 StartTimeMs: 1744287051212 CreateTimeMs: 1744287051210 } MaxMemoryUsage: 1048576 } 2025-04-10T12:10:51.214853Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7491655839998158068:3281] 2025-04-10T12:10:51.215009Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158058:3249] TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T12:10:51.215044Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158058:3249] TxId: 281474976710781. Ctx: { TraceId: 01jrfsrb658qfva036qeyj22k1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001772s ReadRows: 7 ReadBytes: 151 ru: 7 rate limiter was not found force flag: 1 2025-04-10T12:10:51.217259Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710782. Resolved key sets: 0 2025-04-10T12:10:51.217340Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710782. Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-10T12:10:51.217371Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7491655839998158083:3249] TxId: 281474976710782. Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T12:10:51.217436Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158083:3249] TxId: 281474976710782. Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2025-04-10T12:10:51.217472Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655839998158083:3249] TxId: 281474976710782. Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NDAyY2ZmYzAtYmQ0NjVhMzUtZWZjNmE4MTQtYTk3OTc0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-10T12:10:51.256330Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [GOOD] |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpService::RangeCache+UseCache [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 27195, msgbus: 12446 2025-04-10T12:10:49.442066Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655831024352234:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:49.442140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000585/r3tmp/tmpNRXjfM/pdisk_1.dat 2025-04-10T12:10:49.845476Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:49.904841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:49.905738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:49.913113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27195, node 1 2025-04-10T12:10:50.104153Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:50.104181Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:50.104189Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:50.104335Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12446 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T12:10:50.320530Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] Handle TEvNavigate describe path dc-1 2025-04-10T12:10:50.320582Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320259:2445] HANDLE EvNavigateScheme dc-1 2025-04-10T12:10:50.321873Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320259:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.388707Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320259:2445] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T12:10:50.426542Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320259:2445] Handle TEvDescribeSchemeResult Forward to# [1:7491655835319320258:2444] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T12:10:50.474935Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] Handle TEvProposeTransaction 2025-04-10T12:10:50.474973Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T12:10:50.475101Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491655835319320284:2456] 2025-04-10T12:10:50.617064Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320284:2456] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:50.617171Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320284:2456] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T12:10:50.617198Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320284:2456] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:50.617299Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320284:2456] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:50.617614Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320284:2456] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.617758Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320284:2456] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T12:10:50.617823Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320284:2456] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T12:10:50.617987Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320284:2456] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T12:10:50.619372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T12:10:50.634054Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320284:2456] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T12:10:50.634126Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320284:2456] txid# 281474976710657 SEND to# [1:7491655835319320283:2455] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-10T12:10:50.674225Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] Handle TEvProposeTransaction 2025-04-10T12:10:50.674258Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T12:10:50.674294Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491655835319320326:2491] 2025-04-10T12:10:50.676530Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320326:2491] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:50.676585Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320326:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T12:10:50.676600Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320326:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:50.676670Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320326:2491] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:50.676931Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320326:2491] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.677028Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320326:2491] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:50.677082Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320326:2491] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T12:10:50.677188Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320326:2491] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T12:10:50.677899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T12:10:50.687540Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320326:2491] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-10T12:10:50.687604Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320326:2491] txid# 281474976710658 SEND to# [1:7491655835319320325:2490] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-10T12:10:50.726087Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] Handle TEvProposeTransaction 2025-04-10T12:10:50.726124Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T12:10:50.726189Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7491655835319320344:2501] 2025-04-10T12:10:50.728725Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320344:2501] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:36372" 2025-04-10T12:10:50.728823Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835319320344:2501] txid# 281474976710659 Bootstrap, UserS ... rationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T12:10:52.793383Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255033:2531] txid# 281474976710660 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T12:10:52.793394Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255033:2531] txid# 281474976710660 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T12:10:52.793504Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255033:2531] txid# 281474976710660 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T12:10:52.793534Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255033:2531] txid# 281474976710660 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T12:10:52.795052Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255033:2531] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T12:10:52.795139Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255033:2531] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:52.795324Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255033:2531] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:52.795433Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255033:2531] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:52.795457Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255033:2531] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-10T12:10:52.795559Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255033:2531] txid# 281474976710660 HANDLE EvClientConnected 2025-04-10T12:10:52.796735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T12:10:52.800152Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255033:2531] txid# 281474976710660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-04-10T12:10:52.800203Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255033:2531] txid# 281474976710660 SEND to# [1:7491655843909255032:2344] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 53} 2025-04-10T12:10:52.818599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655843909255032:2344], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T12:10:52.918579Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] Handle TEvProposeTransaction 2025-04-10T12:10:52.918616Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] TxId# 281474976710661 ProcessProposeTransaction 2025-04-10T12:10:52.918673Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7491655843909255109:2587] 2025-04-10T12:10:52.921088Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255109:2587] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T12:10:52.921148Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255109:2587] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T12:10:52.921166Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255109:2587] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T12:10:52.921379Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255109:2587] txid# 281474976710661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T12:10:52.921413Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255109:2587] txid# 281474976710661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T12:10:52.921901Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255109:2587] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T12:10:52.922002Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255109:2587] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:52.922189Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255109:2587] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:52.922327Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255109:2587] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:52.922370Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255109:2587] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-10T12:10:52.922516Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255109:2587] txid# 281474976710661 HANDLE EvClientConnected 2025-04-10T12:10:52.928278Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255109:2587] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-10T12:10:52.929987Z node 1 :TX_PROXY ERROR: Actor# [1:7491655843909255109:2587] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:52.930016Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655843909255109:2587] txid# 281474976710661 SEND to# [1:7491655843909255032:2344] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-10T12:10:53.218437Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] Handle TEvProposeTransaction 2025-04-10T12:10:53.218469Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] TxId# 281474976710662 ProcessProposeTransaction 2025-04-10T12:10:53.218509Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7491655848204222435:2602] 2025-04-10T12:10:53.221153Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848204222435:2602] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:36410" 2025-04-10T12:10:53.221206Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848204222435:2602] txid# 281474976710662 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T12:10:53.221227Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848204222435:2602] txid# 281474976710662 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-04-10T12:10:53.221362Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848204222435:2602] txid# 281474976710662 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T12:10:53.221384Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848204222435:2602] txid# 281474976710662 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T12:10:53.221436Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848204222435:2602] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.221626Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848204222435:2602] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.221678Z node 1 :TX_PROXY ERROR: Actor# [1:7491655848204222435:2602] txid# 281474976710662, Access denied for ordinaryuser@builtin, attempt to manage user 2025-04-10T12:10:53.221783Z node 1 :TX_PROXY ERROR: Actor# [1:7491655848204222435:2602] txid# 281474976710662, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-04-10T12:10:53.221810Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848204222435:2602] txid# 281474976710662 SEND to# [1:7491655848204222434:2337] Source {TEvProposeTransactionStatus Status# 5} 2025-04-10T12:10:53.240768Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjcyNTVhNDMtZWNmZDYzZDctYjYzMGYzMzUtMmMzOWEyNGU=, ActorId: [1:7491655843909255012:2337], ActorState: ExecuteState, TraceId: 01jrfsrczkd9w1cwnftzq5bbpp, Create QueryResponse for error on request, msg: 2025-04-10T12:10:53.241280Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] Handle TEvExecuteKqpTransaction 2025-04-10T12:10:53.241325Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831024352454:2115] TxId# 281474976710663 ProcessProposeKqpTransaction |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [GOOD] Test command err: Starting YDB, grpc: 9480, msgbus: 61938 2025-04-10T12:10:49.470273Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655832257149677:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:49.470575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000589/r3tmp/tmpcf1ReH/pdisk_1.dat 2025-04-10T12:10:49.945395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:49.945496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:49.946771Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:49.952549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9480, node 1 2025-04-10T12:10:50.126956Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:50.126998Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:50.127007Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:50.127152Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61938 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T12:10:50.378907Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] Handle TEvNavigate describe path dc-1 2025-04-10T12:10:50.378981Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117488:2449] HANDLE EvNavigateScheme dc-1 2025-04-10T12:10:50.380242Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117488:2449] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.424439Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117488:2449] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T12:10:50.435557Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117488:2449] Handle TEvDescribeSchemeResult Forward to# [1:7491655836552117487:2448] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T12:10:50.474508Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] Handle TEvProposeTransaction 2025-04-10T12:10:50.474559Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T12:10:50.474702Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491655836552117504:2457] 2025-04-10T12:10:50.644781Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117504:2457] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:50.644882Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117504:2457] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:50.644921Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117504:2457] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:50.644998Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117504:2457] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:50.645299Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117504:2457] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.645441Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117504:2457] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T12:10:50.645494Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117504:2457] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T12:10:50.645623Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117504:2457] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T12:10:50.646590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T12:10:50.650962Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117504:2457] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T12:10:50.651034Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117504:2457] txid# 281474976710657 SEND to# [1:7491655836552117502:2456] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-10T12:10:50.673319Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] Handle TEvProposeTransaction 2025-04-10T12:10:50.673346Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T12:10:50.673382Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491655836552117559:2499] 2025-04-10T12:10:50.675873Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117559:2499] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:50.675938Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117559:2499] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:50.675955Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117559:2499] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:50.676025Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117559:2499] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:50.676302Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117559:2499] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.676383Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117559:2499] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:50.676420Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117559:2499] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T12:10:50.676565Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117559:2499] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T12:10:50.677066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T12:10:50.680462Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117559:2499] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-10T12:10:50.680527Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117559:2499] txid# 281474976710658 SEND to# [1:7491655836552117558:2498] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-10T12:10:50.733068Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] Handle TEvProposeTransaction 2025-04-10T12:10:50.733105Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T12:10:50.733151Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7491655836552117577:2509] 2025-04-10T12:10:50.735664Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836552117577:2509] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47672" 2025-04-10T12:10:50.735726Z node 1 :TX_PROXY DEBUG: A ... 1:2541] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.024266Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019561:2541] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.024392Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019561:2541] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.024427Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019561:2541] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-10T12:10:53.024550Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019561:2541] txid# 281474976710660 HANDLE EvClientConnected 2025-04-10T12:10:53.025889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T12:10:53.028291Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019561:2541] txid# 281474976710660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-04-10T12:10:53.028363Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019561:2541] txid# 281474976710660 SEND to# [1:7491655849437019560:2341] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 53} 2025-04-10T12:10:53.054333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655849437019560:2341], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T12:10:53.111713Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] Handle TEvProposeTransaction 2025-04-10T12:10:53.111757Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] TxId# 281474976710661 ProcessProposeTransaction 2025-04-10T12:10:53.111816Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7491655849437019642:2599] 2025-04-10T12:10:53.114516Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019642:2599] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n#\010\000\022\037\010\001\020\377\377\003\032\025cluster_admin@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T12:10:53.114568Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019642:2599] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.114635Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019642:2599] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T12:10:53.115215Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019642:2599] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T12:10:53.115287Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019642:2599] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.115452Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019642:2599] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.115558Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019642:2599] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.115606Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019642:2599] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-10T12:10:53.115731Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019642:2599] txid# 281474976710661 HANDLE EvClientConnected 2025-04-10T12:10:53.121914Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019642:2599] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-10T12:10:53.123612Z node 1 :TX_PROXY ERROR: Actor# [1:7491655849437019642:2599] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:53.123640Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019642:2599] txid# 281474976710661 SEND to# [1:7491655849437019560:2341] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-10T12:10:53.382240Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] Handle TEvProposeTransaction 2025-04-10T12:10:53.382280Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] TxId# 281474976710662 ProcessProposeTransaction 2025-04-10T12:10:53.382330Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7491655849437019665:2610] 2025-04-10T12:10:53.384796Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019665:2610] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47676" 2025-04-10T12:10:53.384877Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019665:2610] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.384896Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019665:2610] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:53.384959Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019665:2610] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.386470Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019665:2610] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.386599Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019665:2610] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.386645Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019665:2610] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-10T12:10:53.386783Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019665:2610] txid# 281474976710662 HANDLE EvClientConnected 2025-04-10T12:10:53.422070Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019665:2610] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-10T12:10:53.422132Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019665:2610] txid# 281474976710662 SEND to# [1:7491655849437019664:2334] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-10T12:10:53.470341Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] Handle TEvProposeTransaction 2025-04-10T12:10:53.470377Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] TxId# 281474976710663 ProcessProposeTransaction 2025-04-10T12:10:53.470434Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832257149709:2134] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7491655849437019698:2624] 2025-04-10T12:10:53.472782Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019698:2624] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\025cluster_admin@builtin\022\030\022\026\n\024all-users@well-known\032\025cluster_admin@builtin\"\007Builtin*\027clus****ltin (2AB0E265)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47698" 2025-04-10T12:10:53.472861Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019698:2624] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.472893Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019698:2624] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin IsClusterAdministrator: 1 2025-04-10T12:10:53.472952Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019698:2624] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.473335Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019698:2624] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.473460Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019698:2624] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.473513Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019698:2624] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-04-10T12:10:53.473685Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019698:2624] txid# 281474976710663 HANDLE EvClientConnected 2025-04-10T12:10:53.479566Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019698:2624] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-04-10T12:10:53.479623Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849437019698:2624] txid# 281474976710663 SEND to# [1:7491655849437019697:2351] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [GOOD] Test command err: Starting YDB, grpc: 18687, msgbus: 5600 2025-04-10T12:10:49.619511Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655832609561095:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:49.620360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000578/r3tmp/tmpBRYIdu/pdisk_1.dat 2025-04-10T12:10:50.054258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:50.054369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:50.057641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:50.082544Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18687, node 1 2025-04-10T12:10:50.094303Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-10T12:10:50.100062Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-10T12:10:50.204129Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:50.204153Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:50.204168Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:50.204303Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5600 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T12:10:50.498632Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] Handle TEvNavigate describe path dc-1 2025-04-10T12:10:50.498689Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904528986:2445] HANDLE EvNavigateScheme dc-1 2025-04-10T12:10:50.499869Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904528986:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.563534Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904528986:2445] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T12:10:50.578857Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904528986:2445] Handle TEvDescribeSchemeResult Forward to# [1:7491655836904528985:2444] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T12:10:50.609041Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] Handle TEvProposeTransaction 2025-04-10T12:10:50.609101Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T12:10:50.609238Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491655836904529001:2453] 2025-04-10T12:10:50.726974Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529001:2453] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:50.727066Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529001:2453] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T12:10:50.727091Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529001:2453] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:50.727172Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529001:2453] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:50.727455Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529001:2453] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.727600Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529001:2453] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T12:10:50.727645Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529001:2453] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T12:10:50.727795Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529001:2453] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T12:10:50.728581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T12:10:50.731733Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529001:2453] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T12:10:50.731793Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529001:2453] txid# 281474976710657 SEND to# [1:7491655836904529000:2452] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-10T12:10:50.744687Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T12:10:50.751105Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] Handle TEvProposeTransaction 2025-04-10T12:10:50.751134Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T12:10:50.751205Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491655836904529050:2491] 2025-04-10T12:10:50.753623Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529050:2491] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:50.753676Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529050:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T12:10:50.753694Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529050:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:50.753758Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529050:2491] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:50.754043Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529050:2491] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.754303Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529050:2491] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:50.754359Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529050:2491] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T12:10:50.754505Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529050:2491] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T12:10:50.754955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T12:10:50.758945Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529050:2491] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-10T12:10:50.759001Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529050:2491] txid# 281474976710658 SEND to# [1:7491655836904529049:2490] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-10T12:10:50.798245Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] Handle TEvProposeTransaction 2025-04-10T12:10:50.798292Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T12:10:50.798341Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7491655836904529068:2501] 2025-04-10T12:10:50.800675Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655836904529068:2501] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \ ... rationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T12:10:52.986271Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845494463764:2535] txid# 281474976710660 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T12:10:52.986294Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845494463764:2535] txid# 281474976710660 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T12:10:52.986457Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845494463764:2535] txid# 281474976710660 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T12:10:52.986499Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845494463764:2535] txid# 281474976710660 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T12:10:52.987742Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845494463764:2535] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T12:10:52.987836Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845494463764:2535] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:52.988003Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845494463764:2535] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:52.988168Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845494463764:2535] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:52.988233Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845494463764:2535] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-10T12:10:52.988386Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845494463764:2535] txid# 281474976710660 HANDLE EvClientConnected 2025-04-10T12:10:52.989857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T12:10:52.997360Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845494463764:2535] txid# 281474976710660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-04-10T12:10:52.997429Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845494463764:2535] txid# 281474976710660 SEND to# [1:7491655845494463763:2344] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 53} 2025-04-10T12:10:53.024828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655845494463763:2344], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T12:10:53.085688Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] Handle TEvProposeTransaction 2025-04-10T12:10:53.085732Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] TxId# 281474976710661 ProcessProposeTransaction 2025-04-10T12:10:53.085784Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7491655849789431139:2594] 2025-04-10T12:10:53.088456Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431139:2594] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T12:10:53.088512Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431139:2594] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T12:10:53.088534Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431139:2594] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T12:10:53.088699Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431139:2594] txid# 281474976710661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T12:10:53.088722Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431139:2594] txid# 281474976710661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T12:10:53.089170Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431139:2594] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T12:10:53.089250Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431139:2594] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.089478Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431139:2594] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.089598Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431139:2594] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.089653Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431139:2594] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-10T12:10:53.089813Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431139:2594] txid# 281474976710661 HANDLE EvClientConnected 2025-04-10T12:10:53.094681Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431139:2594] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-10T12:10:53.097409Z node 1 :TX_PROXY ERROR: Actor# [1:7491655849789431139:2594] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:53.097450Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431139:2594] txid# 281474976710661 SEND to# [1:7491655845494463763:2344] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-10T12:10:53.334081Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] Handle TEvProposeTransaction 2025-04-10T12:10:53.334110Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] TxId# 281474976710662 ProcessProposeTransaction 2025-04-10T12:10:53.334178Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7491655849789431166:2606] 2025-04-10T12:10:53.337048Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431166:2606] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:57420" 2025-04-10T12:10:53.337108Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431166:2606] txid# 281474976710662 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-10T12:10:53.337125Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431166:2606] txid# 281474976710662 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-04-10T12:10:53.337253Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431166:2606] txid# 281474976710662 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-10T12:10:53.337277Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431166:2606] txid# 281474976710662 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-10T12:10:53.337342Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431166:2606] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.337569Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431166:2606] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.337605Z node 1 :TX_PROXY ERROR: Actor# [1:7491655849789431166:2606] txid# 281474976710662, Access denied for ordinaryuser@builtin, attempt to manage user 2025-04-10T12:10:53.337776Z node 1 :TX_PROXY ERROR: Actor# [1:7491655849789431166:2606] txid# 281474976710662, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-04-10T12:10:53.337835Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849789431166:2606] txid# 281474976710662 SEND to# [1:7491655849789431165:2337] Source {TEvProposeTransactionStatus Status# 5} 2025-04-10T12:10:53.344503Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjJiNGRkOWUtYjY1YTI1YzYtNTA1NzgxZDUtNWI2M2MwMmI=, ActorId: [1:7491655845494463743:2337], ActorState: ExecuteState, TraceId: 01jrfsrd5k8rmxfb6cj1y3jq60, Create QueryResponse for error on request, msg: 2025-04-10T12:10:53.345058Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] Handle TEvExecuteKqpTransaction 2025-04-10T12:10:53.345079Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832609561181:2115] TxId# 281474976710663 ProcessProposeKqpTransaction >> KqpPg::AlterColumnSetDefaultFromSequence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [GOOD] Test command err: Starting YDB, grpc: 64759, msgbus: 24103 2025-04-10T12:10:49.591463Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655833757319368:2223];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:49.591712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/00057c/r3tmp/tmpX0T0Y6/pdisk_1.dat 2025-04-10T12:10:50.158483Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:50.183114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:50.183215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:50.189447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64759, node 1 2025-04-10T12:10:50.334827Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:50.334855Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:50.334863Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:50.334979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24103 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T12:10:50.669256Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] Handle TEvNavigate describe path dc-1 2025-04-10T12:10:50.669305Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287257:2445] HANDLE EvNavigateScheme dc-1 2025-04-10T12:10:50.669633Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287257:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.710082Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287257:2445] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T12:10:50.734830Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287257:2445] Handle TEvDescribeSchemeResult Forward to# [1:7491655838052287256:2444] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T12:10:50.759005Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] Handle TEvProposeTransaction 2025-04-10T12:10:50.759032Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T12:10:50.759141Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491655838052287265:2452] 2025-04-10T12:10:50.866056Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287265:2452] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:50.866165Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287265:2452] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T12:10:50.866192Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287265:2452] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:50.866283Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287265:2452] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:50.866564Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287265:2452] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.866691Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287265:2452] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T12:10:50.866752Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287265:2452] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T12:10:50.866920Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287265:2452] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T12:10:50.868006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:50.875691Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287265:2452] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T12:10:50.875749Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287265:2452] txid# 281474976710657 SEND to# [1:7491655838052287264:2451] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2025-04-10T12:10:50.902930Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] Handle TEvProposeTransaction 2025-04-10T12:10:50.902958Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T12:10:50.902992Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491655838052287310:2490] 2025-04-10T12:10:50.905535Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287310:2490] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:50.905591Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287310:2490] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T12:10:50.905607Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287310:2490] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:50.905668Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287310:2490] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:50.905929Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287310:2490] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.906014Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287310:2490] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:50.906054Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287310:2490] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T12:10:50.906272Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287310:2490] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T12:10:50.906693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T12:10:50.909236Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287310:2490] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-10T12:10:50.909306Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287310:2490] txid# 281474976710658 SEND to# [1:7491655838052287309:2489] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-10T12:10:50.968490Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] Handle TEvProposeTransaction 2025-04-10T12:10:50.968522Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T12:10:50.968563Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7491655838052287328:2500] 2025-04-10T12:10:50.971021Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838052287328:2500] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:46306" 2025-04-10T12:10:50.971089Z node 1 :TX_PROXY DEBUG: ... 1:2527] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:52.905441Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655846642222011:2527] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:52.905579Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655846642222011:2527] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:52.905635Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655846642222011:2527] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-10T12:10:52.905769Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655846642222011:2527] txid# 281474976710660 HANDLE EvClientConnected 2025-04-10T12:10:52.907348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T12:10:52.915211Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655846642222011:2527] txid# 281474976710660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-04-10T12:10:52.915267Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655846642222011:2527] txid# 281474976710660 SEND to# [1:7491655846642222010:2341] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 53} 2025-04-10T12:10:52.945600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655846642222010:2341], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T12:10:53.012785Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] Handle TEvProposeTransaction 2025-04-10T12:10:53.012841Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] TxId# 281474976710661 ProcessProposeTransaction 2025-04-10T12:10:53.012893Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7491655850937189376:2576] 2025-04-10T12:10:53.015768Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189376:2576] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n#\010\000\022\037\010\001\020\377\377\003\032\025cluster_admin@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T12:10:53.015826Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189376:2576] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.015849Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189376:2576] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T12:10:53.016457Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189376:2576] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T12:10:53.016545Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189376:2576] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.016734Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189376:2576] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.016875Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189376:2576] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.016914Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189376:2576] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-10T12:10:53.017038Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189376:2576] txid# 281474976710661 HANDLE EvClientConnected 2025-04-10T12:10:53.020744Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189376:2576] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-10T12:10:53.022514Z node 1 :TX_PROXY ERROR: Actor# [1:7491655850937189376:2576] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:53.022546Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189376:2576] txid# 281474976710661 SEND to# [1:7491655846642222010:2341] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-10T12:10:53.273437Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] Handle TEvProposeTransaction 2025-04-10T12:10:53.273463Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] TxId# 281474976710662 ProcessProposeTransaction 2025-04-10T12:10:53.273512Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7491655850937189399:2587] 2025-04-10T12:10:53.276206Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189399:2587] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:46326" 2025-04-10T12:10:53.276287Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189399:2587] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.276307Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189399:2587] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:53.276354Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189399:2587] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.276645Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189399:2587] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.276762Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189399:2587] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.276838Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189399:2587] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-10T12:10:53.276998Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189399:2587] txid# 281474976710662 HANDLE EvClientConnected 2025-04-10T12:10:53.286700Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189399:2587] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-10T12:10:53.286752Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189399:2587] txid# 281474976710662 SEND to# [1:7491655850937189398:2334] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-10T12:10:53.353861Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] Handle TEvProposeTransaction 2025-04-10T12:10:53.353906Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] TxId# 281474976710663 ProcessProposeTransaction 2025-04-10T12:10:53.353967Z node 1 :TX_PROXY DEBUG: actor# [1:7491655833757319440:2115] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7491655850937189435:2601] 2025-04-10T12:10:53.356623Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189435:2601] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\025cluster_admin@builtin\022\030\022\026\n\024all-users@well-known\032\025cluster_admin@builtin\"\007Builtin*\027clus****ltin (2AB0E265)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:46360" 2025-04-10T12:10:53.356697Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189435:2601] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.356718Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189435:2601] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin IsClusterAdministrator: 1 2025-04-10T12:10:53.356779Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189435:2601] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.357049Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189435:2601] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.357191Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189435:2601] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.357239Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189435:2601] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-04-10T12:10:53.357382Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189435:2601] txid# 281474976710663 HANDLE EvClientConnected 2025-04-10T12:10:53.360442Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189435:2601] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-04-10T12:10:53.360490Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850937189435:2601] txid# 281474976710663 SEND to# [1:7491655850937189434:2351] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [GOOD] Test command err: Starting YDB, grpc: 23356, msgbus: 29865 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000580/r3tmp/tmpkQYjWc/pdisk_1.dat 2025-04-10T12:10:50.061437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:10:50.378824Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:50.386572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:50.386686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:50.410583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23356, node 1 2025-04-10T12:10:50.650712Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:50.650757Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:50.650766Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:50.650882Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29865 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T12:10:50.950832Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] Handle TEvNavigate describe path dc-1 2025-04-10T12:10:50.950887Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838449921029:2442] HANDLE EvNavigateScheme dc-1 2025-04-10T12:10:50.951905Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838449921029:2442] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.994955Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838449921029:2442] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T12:10:51.013320Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655838449921029:2442] Handle TEvDescribeSchemeResult Forward to# [1:7491655838449921028:2441] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T12:10:51.033103Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] Handle TEvProposeTransaction 2025-04-10T12:10:51.033146Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T12:10:51.033278Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491655842744888335:2451] 2025-04-10T12:10:51.112675Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888335:2451] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:51.112778Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888335:2451] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:51.112821Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888335:2451] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:51.112897Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888335:2451] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:51.113298Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888335:2451] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:51.113441Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888335:2451] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T12:10:51.113494Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888335:2451] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T12:10:51.113663Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888335:2451] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T12:10:51.114576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T12:10:51.119217Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888335:2451] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T12:10:51.119283Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888335:2451] txid# 281474976710657 SEND to# [1:7491655842744888334:2450] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-10T12:10:51.147185Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] Handle TEvProposeTransaction 2025-04-10T12:10:51.147215Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T12:10:51.147248Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491655842744888386:2488] 2025-04-10T12:10:51.149800Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888386:2488] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:51.149853Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888386:2488] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:51.149870Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888386:2488] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:51.149952Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888386:2488] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:51.150324Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888386:2488] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:51.150436Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888386:2488] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:51.150491Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888386:2488] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T12:10:51.150622Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888386:2488] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T12:10:51.151146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T12:10:51.153737Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888386:2488] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-10T12:10:51.153806Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888386:2488] txid# 281474976710658 SEND to# [1:7491655842744888385:2487] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-10T12:10:51.201731Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] Handle TEvProposeTransaction 2025-04-10T12:10:51.201788Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T12:10:51.201845Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7491655842744888404:2498] 2025-04-10T12:10:51.204435Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888404:2498] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51250" 2025-04-10T12:10:51.204496Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888404:2498] txid# 281474976710659 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:51.204512Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655842744888404:2498] txid# 281474976710659 Bootstrap, UserSID: root@ ... 334823090:2343], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:53.295960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655851334823079:2340], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:53.296065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:53.296317Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] Handle TEvProposeTransaction 2025-04-10T12:10:53.296354Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] TxId# 281474976710660 ProcessProposeTransaction 2025-04-10T12:10:53.296407Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] Cookie# 0 userReqId# "" txid# 281474976710660 SEND to# [1:7491655851334823094:2528] 2025-04-10T12:10:53.299720Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823094:2528] txid# 281474976710660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T12:10:53.299770Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823094:2528] txid# 281474976710660 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.299807Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823094:2528] txid# 281474976710660 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T12:10:53.301563Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823094:2528] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T12:10:53.301685Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823094:2528] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.301978Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823094:2528] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.302171Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823094:2528] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.302241Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823094:2528] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-10T12:10:53.302375Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823094:2528] txid# 281474976710660 HANDLE EvClientConnected 2025-04-10T12:10:53.303948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T12:10:53.308590Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823094:2528] txid# 281474976710660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-04-10T12:10:53.308655Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823094:2528] txid# 281474976710660 SEND to# [1:7491655851334823093:2344] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 53} 2025-04-10T12:10:53.331131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655851334823093:2344], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T12:10:53.383961Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] Handle TEvProposeTransaction 2025-04-10T12:10:53.383996Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] TxId# 281474976710661 ProcessProposeTransaction 2025-04-10T12:10:53.384042Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7491655851334823162:2576] 2025-04-10T12:10:53.386592Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823162:2576] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T12:10:53.386638Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823162:2576] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.386652Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823162:2576] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T12:10:53.387107Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823162:2576] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T12:10:53.387164Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823162:2576] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.387365Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823162:2576] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.387478Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823162:2576] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.387514Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823162:2576] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-10T12:10:53.387662Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823162:2576] txid# 281474976710661 HANDLE EvClientConnected 2025-04-10T12:10:53.395569Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823162:2576] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-10T12:10:53.397189Z node 1 :TX_PROXY ERROR: Actor# [1:7491655851334823162:2576] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:53.397228Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823162:2576] txid# 281474976710661 SEND to# [1:7491655851334823093:2344] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-10T12:10:53.606399Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] Handle TEvProposeTransaction 2025-04-10T12:10:53.606425Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] TxId# 281474976710662 ProcessProposeTransaction 2025-04-10T12:10:53.606460Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7491655851334823186:2587] 2025-04-10T12:10:53.608846Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823186:2587] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51276" 2025-04-10T12:10:53.608911Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823186:2587] txid# 281474976710662 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.608929Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823186:2587] txid# 281474976710662 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-04-10T12:10:53.608972Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823186:2587] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.609218Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823186:2587] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.609253Z node 1 :TX_PROXY ERROR: Actor# [1:7491655851334823186:2587] txid# 281474976710662, Access denied for ordinaryuser@builtin, attempt to manage user 2025-04-10T12:10:53.609350Z node 1 :TX_PROXY ERROR: Actor# [1:7491655851334823186:2587] txid# 281474976710662, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-04-10T12:10:53.609375Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655851334823186:2587] txid# 281474976710662 SEND to# [1:7491655851334823185:2337] Source {TEvProposeTransactionStatus Status# 5} 2025-04-10T12:10:53.620246Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDgxNmRmMTEtZmI5OTFkNzEtZGE1ZGNiNGQtYjM3MzNiYTM=, ActorId: [1:7491655851334823058:2337], ActorState: ExecuteState, TraceId: 01jrfsrdfd3sjr6n84yk0788p9, Create QueryResponse for error on request, msg: 2025-04-10T12:10:53.622245Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] Handle TEvExecuteKqpTransaction 2025-04-10T12:10:53.622274Z node 1 :TX_PROXY DEBUG: actor# [1:7491655834154953153:2113] TxId# 281474976710663 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [GOOD] Test command err: Starting YDB, grpc: 30081, msgbus: 18591 2025-04-10T12:10:49.435951Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655832888464538:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:49.436061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/00057e/r3tmp/tmpc5u0en/pdisk_1.dat 2025-04-10T12:10:50.066185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:50.066358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:50.073126Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:50.075670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30081, node 1 2025-04-10T12:10:50.324920Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:50.324940Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:50.324953Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:50.325063Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18591 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T12:10:50.623238Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] Handle TEvNavigate describe path dc-1 2025-04-10T12:10:50.623297Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432593:2450] HANDLE EvNavigateScheme dc-1 2025-04-10T12:10:50.623653Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432593:2450] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.667279Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432593:2450] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T12:10:50.678305Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432593:2450] Handle TEvDescribeSchemeResult Forward to# [1:7491655837183432592:2449] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T12:10:50.706454Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] Handle TEvProposeTransaction 2025-04-10T12:10:50.706483Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] TxId# 281474976715657 ProcessProposeTransaction 2025-04-10T12:10:50.706599Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7491655837183432602:2455] 2025-04-10T12:10:50.805489Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432602:2455] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:50.805574Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432602:2455] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T12:10:50.805604Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432602:2455] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:50.805679Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432602:2455] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:50.805946Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432602:2455] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.806071Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432602:2455] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T12:10:50.806120Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432602:2455] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-10T12:10:50.806276Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432602:2455] txid# 281474976715657 HANDLE EvClientConnected 2025-04-10T12:10:50.807031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T12:10:50.811901Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432602:2455] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-10T12:10:50.811978Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432602:2455] txid# 281474976715657 SEND to# [1:7491655837183432601:2454] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-04-10T12:10:50.842750Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] Handle TEvProposeTransaction 2025-04-10T12:10:50.842775Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] TxId# 281474976715658 ProcessProposeTransaction 2025-04-10T12:10:50.842808Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7491655837183432644:2493] 2025-04-10T12:10:50.845366Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432644:2493] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:50.845418Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432644:2493] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T12:10:50.845434Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432644:2493] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:50.845487Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432644:2493] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:50.845776Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432644:2493] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.845873Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432644:2493] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:50.845931Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432644:2493] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-04-10T12:10:50.846050Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432644:2493] txid# 281474976715658 HANDLE EvClientConnected 2025-04-10T12:10:50.846967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-10T12:10:50.850668Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432644:2493] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-04-10T12:10:50.850714Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432644:2493] txid# 281474976715658 SEND to# [1:7491655837183432643:2492] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-04-10T12:10:50.918949Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] Handle TEvProposeTransaction 2025-04-10T12:10:50.918980Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] TxId# 281474976715659 ProcessProposeTransaction 2025-04-10T12:10:50.919024Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [1:7491655837183432662:2503] 2025-04-10T12:10:50.921464Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432662:2503] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56176" 2025-04-10T12:10:50.921519Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655837183432662:2503] txid# 281474976715659 Bootstrap, UserS ... /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:53.158958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:53.159306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655850068334647:2343], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:53.159756Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] Handle TEvProposeTransaction 2025-04-10T12:10:53.159773Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] TxId# 281474976715660 ProcessProposeTransaction 2025-04-10T12:10:53.159808Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] Cookie# 0 userReqId# "" txid# 281474976715660 SEND to# [1:7491655850068334650:2536] 2025-04-10T12:10:53.162910Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334650:2536] txid# 281474976715660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T12:10:53.162959Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334650:2536] txid# 281474976715660 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.162975Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334650:2536] txid# 281474976715660 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T12:10:53.165614Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334650:2536] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T12:10:53.165703Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334650:2536] txid# 281474976715660 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.165866Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334650:2536] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.166019Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334650:2536] HANDLE EvNavigateKeySetResult, txid# 281474976715660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.166065Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334650:2536] txid# 281474976715660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-04-10T12:10:53.166212Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334650:2536] txid# 281474976715660 HANDLE EvClientConnected 2025-04-10T12:10:53.167645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-10T12:10:53.172760Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334650:2536] txid# 281474976715660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2025-04-10T12:10:53.172840Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334650:2536] txid# 281474976715660 SEND to# [1:7491655850068334649:2344] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 53} 2025-04-10T12:10:53.196594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655850068334649:2344], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-10T12:10:53.274785Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] Handle TEvProposeTransaction 2025-04-10T12:10:53.274816Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] TxId# 281474976715661 ProcessProposeTransaction 2025-04-10T12:10:53.274857Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [1:7491655850068334721:2584] 2025-04-10T12:10:53.277292Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334721:2584] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T12:10:53.277325Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334721:2584] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.277339Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334721:2584] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T12:10:53.277906Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334721:2584] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T12:10:53.277965Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334721:2584] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.279692Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334721:2584] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.279847Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334721:2584] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.279931Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334721:2584] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-10T12:10:53.280089Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334721:2584] txid# 281474976715661 HANDLE EvClientConnected 2025-04-10T12:10:53.285742Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334721:2584] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-10T12:10:53.287432Z node 1 :TX_PROXY ERROR: Actor# [1:7491655850068334721:2584] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:53.287471Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334721:2584] txid# 281474976715661 SEND to# [1:7491655850068334649:2344] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-10T12:10:53.536233Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] Handle TEvProposeTransaction 2025-04-10T12:10:53.536269Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] TxId# 281474976715662 ProcessProposeTransaction 2025-04-10T12:10:53.536318Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [1:7491655850068334746:2596] 2025-04-10T12:10:53.538708Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334746:2596] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56200" 2025-04-10T12:10:53.538783Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334746:2596] txid# 281474976715662 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.538803Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334746:2596] txid# 281474976715662 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-04-10T12:10:53.538865Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334746:2596] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.539327Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334746:2596] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.539378Z node 1 :TX_PROXY ERROR: Actor# [1:7491655850068334746:2596] txid# 281474976715662, Access denied for ordinaryuser@builtin on path /dc-1, with access AlterSchema 2025-04-10T12:10:53.539501Z node 1 :TX_PROXY ERROR: Actor# [1:7491655850068334746:2596] txid# 281474976715662, issues: { message: "Access denied for ordinaryuser@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2025-04-10T12:10:53.539557Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655850068334746:2596] txid# 281474976715662 SEND to# [1:7491655850068334745:2337] Source {TEvProposeTransactionStatus Status# 5} 2025-04-10T12:10:53.547118Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTRkZmE0NTctOTBkOWNhMDMtMjk0ZjE3ZjEtZTgzNTBiMGE=, ActorId: [1:7491655850068334613:2337], ActorState: ExecuteState, TraceId: 01jrfsrdb3cy1h01aczt3853be, Create QueryResponse for error on request, msg: 2025-04-10T12:10:53.547566Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] Handle TEvExecuteKqpTransaction 2025-04-10T12:10:53.547585Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832888464788:2129] TxId# 281474976715663 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [GOOD] Test command err: Starting YDB, grpc: 3650, msgbus: 19540 2025-04-10T12:10:49.627286Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655831065532829:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:49.627478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000582/r3tmp/tmphJL8RD/pdisk_1.dat 2025-04-10T12:10:50.278950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:50.279070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:50.285313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:50.308378Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3650, node 1 2025-04-10T12:10:50.339284Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-10T12:10:50.339309Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-10T12:10:50.452497Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:50.452532Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:50.452539Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:50.452659Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19540 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T12:10:50.809634Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] Handle TEvNavigate describe path dc-1 2025-04-10T12:10:50.809684Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835360500724:2451] HANDLE EvNavigateScheme dc-1 2025-04-10T12:10:50.810016Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835360500724:2451] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:50.878814Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835360500724:2451] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T12:10:50.890815Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835360500724:2451] Handle TEvDescribeSchemeResult Forward to# [1:7491655835360500723:2450] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T12:10:50.953459Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] Handle TEvProposeTransaction 2025-04-10T12:10:50.953496Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T12:10:50.953607Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491655835360500735:2461] 2025-04-10T12:10:51.038385Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835360500735:2461] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:51.038475Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835360500735:2461] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T12:10:51.038495Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835360500735:2461] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:51.038567Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835360500735:2461] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:51.038857Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835360500735:2461] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:51.039005Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835360500735:2461] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T12:10:51.039070Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835360500735:2461] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T12:10:51.039221Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835360500735:2461] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T12:10:51.039953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T12:10:51.047301Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835360500735:2461] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T12:10:51.047383Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655835360500735:2461] txid# 281474976710657 SEND to# [1:7491655835360500734:2460] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-10T12:10:51.091756Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] Handle TEvProposeTransaction 2025-04-10T12:10:51.091802Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T12:10:51.091846Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491655839655468072:2495] 2025-04-10T12:10:51.094384Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655839655468072:2495] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:51.094449Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655839655468072:2495] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T12:10:51.094463Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655839655468072:2495] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:51.094527Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655839655468072:2495] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:51.094805Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655839655468072:2495] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:51.094903Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655839655468072:2495] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:51.094951Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655839655468072:2495] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T12:10:51.095120Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655839655468072:2495] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T12:10:51.095566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T12:10:51.098455Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655839655468072:2495] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-10T12:10:51.098503Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655839655468072:2495] txid# 281474976710658 SEND to# [1:7491655839655468071:2494] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-10T12:10:51.135911Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] Handle TEvProposeTransaction 2025-04-10T12:10:51.135947Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T12:10:51.135988Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7491655839655468090:2505] 2025-04-10T12:10:51.138466Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655839655468090:2505] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" ... /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:53.152370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655848245402772:2343], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:53.152457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:53.152791Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] Handle TEvProposeTransaction 2025-04-10T12:10:53.152822Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] TxId# 281474976710660 ProcessProposeTransaction 2025-04-10T12:10:53.152872Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] Cookie# 0 userReqId# "" txid# 281474976710660 SEND to# [1:7491655848245402779:2535] 2025-04-10T12:10:53.156485Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402779:2535] txid# 281474976710660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T12:10:53.157820Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402779:2535] txid# 281474976710660 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.157843Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402779:2535] txid# 281474976710660 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T12:10:53.159994Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402779:2535] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T12:10:53.160070Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402779:2535] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.160215Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402779:2535] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.160370Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402779:2535] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.160421Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402779:2535] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-10T12:10:53.160538Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402779:2535] txid# 281474976710660 HANDLE EvClientConnected 2025-04-10T12:10:53.162005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T12:10:53.171015Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402779:2535] txid# 281474976710660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-04-10T12:10:53.171078Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402779:2535] txid# 281474976710660 SEND to# [1:7491655848245402778:2344] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 53} 2025-04-10T12:10:53.193780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655848245402778:2344], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T12:10:53.253949Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] Handle TEvProposeTransaction 2025-04-10T12:10:53.253983Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] TxId# 281474976710661 ProcessProposeTransaction 2025-04-10T12:10:53.254027Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7491655848245402847:2583] 2025-04-10T12:10:53.256624Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402847:2583] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T12:10:53.256676Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402847:2583] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.256703Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402847:2583] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T12:10:53.257282Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402847:2583] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T12:10:53.257364Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402847:2583] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.257531Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402847:2583] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.257633Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402847:2583] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.257691Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402847:2583] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-10T12:10:53.257827Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402847:2583] txid# 281474976710661 HANDLE EvClientConnected 2025-04-10T12:10:53.260996Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402847:2583] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-10T12:10:53.262637Z node 1 :TX_PROXY ERROR: Actor# [1:7491655848245402847:2583] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:53.262677Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402847:2583] txid# 281474976710661 SEND to# [1:7491655848245402778:2344] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-10T12:10:53.510665Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] Handle TEvProposeTransaction 2025-04-10T12:10:53.510692Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] TxId# 281474976710662 ProcessProposeTransaction 2025-04-10T12:10:53.510730Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7491655848245402873:2594] 2025-04-10T12:10:53.513207Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402873:2594] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:59762" 2025-04-10T12:10:53.513256Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402873:2594] txid# 281474976710662 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.513273Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402873:2594] txid# 281474976710662 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-04-10T12:10:53.513315Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402873:2594] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.513578Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402873:2594] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.513628Z node 1 :TX_PROXY ERROR: Actor# [1:7491655848245402873:2594] txid# 281474976710662, Access denied for ordinaryuser@builtin on path /dc-1, with access AlterSchema 2025-04-10T12:10:53.513745Z node 1 :TX_PROXY ERROR: Actor# [1:7491655848245402873:2594] txid# 281474976710662, issues: { message: "Access denied for ordinaryuser@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2025-04-10T12:10:53.513773Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655848245402873:2594] txid# 281474976710662 SEND to# [1:7491655848245402872:2337] Source {TEvProposeTransactionStatus Status# 5} 2025-04-10T12:10:53.548201Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Zjc1ZDlkNi1hODI2YmZkZi1jYmNiZTljMi03NWI2Y2RjOQ==, ActorId: [1:7491655848245402750:2337], ActorState: ExecuteState, TraceId: 01jrfsrdav2beg6ghtpwrdxr0p, Create QueryResponse for error on request, msg: 2025-04-10T12:10:53.548731Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] Handle TEvExecuteKqpTransaction 2025-04-10T12:10:53.548752Z node 1 :TX_PROXY DEBUG: actor# [1:7491655831065532900:2116] TxId# 281474976710663 ProcessProposeKqpTransaction |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [GOOD] Test command err: Starting YDB, grpc: 19728, msgbus: 18146 2025-04-10T12:10:49.830313Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655832484207879:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:49.830362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000576/r3tmp/tmpus0k7x/pdisk_1.dat 2025-04-10T12:10:50.451786Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:50.465109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:50.466943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:50.480210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19728, node 1 2025-04-10T12:10:50.802896Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:50.802925Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:50.802936Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:50.803080Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18146 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T12:10:51.114936Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] Handle TEvNavigate describe path dc-1 2025-04-10T12:10:51.115026Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143230:2448] HANDLE EvNavigateScheme dc-1 2025-04-10T12:10:51.115407Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143230:2448] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:51.150175Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143230:2448] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T12:10:51.168232Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143230:2448] Handle TEvDescribeSchemeResult Forward to# [1:7491655841074143229:2447] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T12:10:51.202464Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] Handle TEvProposeTransaction 2025-04-10T12:10:51.202504Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T12:10:51.202627Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491655841074143241:2455] 2025-04-10T12:10:51.365453Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143241:2455] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:51.365552Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143241:2455] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:51.365601Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143241:2455] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:51.365670Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143241:2455] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:51.366028Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143241:2455] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:51.366177Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143241:2455] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T12:10:51.366238Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143241:2455] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T12:10:51.366384Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143241:2455] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T12:10:51.367333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T12:10:51.371693Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143241:2455] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-10T12:10:51.371776Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143241:2455] txid# 281474976710657 SEND to# [1:7491655841074143240:2454] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-10T12:10:51.387053Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T12:10:51.394368Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] Handle TEvProposeTransaction 2025-04-10T12:10:51.394395Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] TxId# 281474976710658 ProcessProposeTransaction 2025-04-10T12:10:51.394428Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7491655841074143286:2496] 2025-04-10T12:10:51.396780Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143286:2496] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:51.396864Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143286:2496] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:51.396891Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143286:2496] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:51.396961Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143286:2496] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:51.397349Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143286:2496] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:51.397455Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143286:2496] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:51.397511Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143286:2496] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-10T12:10:51.397661Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143286:2496] txid# 281474976710658 HANDLE EvClientConnected 2025-04-10T12:10:51.398263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T12:10:51.400690Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143286:2496] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-10T12:10:51.400734Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143286:2496] txid# 281474976710658 SEND to# [1:7491655841074143285:2495] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-10T12:10:51.446871Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] Handle TEvProposeTransaction 2025-04-10T12:10:51.446903Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] TxId# 281474976710659 ProcessProposeTransaction 2025-04-10T12:10:51.446949Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7491655841074143304:2506] 2025-04-10T12:10:51.449431Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655841074143304:2506] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\026\010\001\022\022\032\020db_admin@builtin\n\036\010\000\022\032\010\001\020\200\200\002\032\020db_admin@builtin \000\n\035\010\000\022\031\010\001\020\200\010\032\020db_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\0 ... ] ActorId: [1:7491655849664077990:2341], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:53.362785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655849664077998:2344], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:53.362926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:53.363201Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] Handle TEvProposeTransaction 2025-04-10T12:10:53.363226Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] TxId# 281474976710661 ProcessProposeTransaction 2025-04-10T12:10:53.363276Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7491655849664078005:2545] 2025-04-10T12:10:53.365844Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078005:2545] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T12:10:53.365890Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078005:2545] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.365904Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078005:2545] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T12:10:53.367911Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078005:2545] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T12:10:53.367999Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078005:2545] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.368175Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078005:2545] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.368297Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078005:2545] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.368368Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078005:2545] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-10T12:10:53.368501Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078005:2545] txid# 281474976710661 HANDLE EvClientConnected 2025-04-10T12:10:53.369904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-10T12:10:53.372429Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078005:2545] txid# 281474976710661 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710661} 2025-04-10T12:10:53.372469Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078005:2545] txid# 281474976710661 SEND to# [1:7491655849664078004:2345] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 53} 2025-04-10T12:10:53.396730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655849664078004:2345], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-10T12:10:53.497095Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] Handle TEvProposeTransaction 2025-04-10T12:10:53.497124Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] TxId# 281474976710662 ProcessProposeTransaction 2025-04-10T12:10:53.497170Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7491655849664078077:2597] 2025-04-10T12:10:53.499764Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078077:2597] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-10T12:10:53.499852Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078077:2597] txid# 281474976710662 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.499887Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078077:2597] txid# 281474976710662 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-10T12:10:53.500445Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078077:2597] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-10T12:10:53.500543Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078077:2597] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.500690Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078077:2597] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.500842Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078077:2597] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-10T12:10:53.500896Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078077:2597] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-10T12:10:53.501036Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078077:2597] txid# 281474976710662 HANDLE EvClientConnected 2025-04-10T12:10:53.503907Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078077:2597] txid# 281474976710662 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710662 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-10T12:10:53.505529Z node 1 :TX_PROXY ERROR: Actor# [1:7491655849664078077:2597] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:53.505568Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078077:2597] txid# 281474976710662 SEND to# [1:7491655849664078004:2345] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-10T12:10:53.774100Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] Handle TEvProposeTransaction 2025-04-10T12:10:53.774133Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] TxId# 281474976710663 ProcessProposeTransaction 2025-04-10T12:10:53.774198Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7491655849664078103:2608] 2025-04-10T12:10:53.776493Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078103:2608] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\020db_admin@builtin\022\030\022\026\n\024all-users@well-known\032\020db_admin@builtin\"\007Builtin*\027db_a****ltin (DEFA2CD5)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:42964" 2025-04-10T12:10:53.776561Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078103:2608] txid# 281474976710663 Bootstrap, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:53.776578Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078103:2608] txid# 281474976710663 Bootstrap, UserSID: db_admin@builtin IsClusterAdministrator: 0 2025-04-10T12:10:53.776643Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078103:2608] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:53.776972Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078103:2608] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:53.777006Z node 1 :TX_PROXY ERROR: Actor# [1:7491655849664078103:2608] txid# 281474976710663, Access denied for db_admin@builtin, attempt to manage user 2025-04-10T12:10:53.777123Z node 1 :TX_PROXY ERROR: Actor# [1:7491655849664078103:2608] txid# 281474976710663, issues: { message: "Access denied for db_admin@builtin" issue_code: 200000 severity: 1 } 2025-04-10T12:10:53.777164Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655849664078103:2608] txid# 281474976710663 SEND to# [1:7491655849664078102:2336] Source {TEvProposeTransactionStatus Status# 5} 2025-04-10T12:10:53.785617Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTk3NThkZGQtYjAyMjFiOGItMTIyYmEzZTgtYWUyNTI1OQ==, ActorId: [1:7491655849664077959:2336], ActorState: ExecuteState, TraceId: 01jrfsrdhgd37rnw07f8z2jd6k, Create QueryResponse for error on request, msg: 2025-04-10T12:10:53.786032Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] Handle TEvExecuteKqpTransaction 2025-04-10T12:10:53.786069Z node 1 :TX_PROXY DEBUG: actor# [1:7491655832484208124:2123] TxId# 281474976710664 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::ShowCreateTableTemporary [GOOD] Test command err: 2025-04-10T12:10:31.871313Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655756702874513:2277];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:31.871507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0004d7/r3tmp/tmp2kK8jC/pdisk_1.dat 2025-04-10T12:10:32.260879Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20817, node 1 2025-04-10T12:10:32.268709Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:10:32.269807Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:10:32.276424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:32.276504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:32.281797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:32.323730Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:32.323769Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:32.323787Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:32.323981Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5398 TClient is connected to server localhost:5398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:32.823033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:34.815690Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-04-10T12:10:34.815744Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-04-10T12:10:34.855108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655769587777330:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:34.855199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655769587777322:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:34.855349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:34.859948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:34.885263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655769587777336:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:34.954744Z node 1 :TX_PROXY ERROR: Actor# [1:7491655769587777415:2762] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:34.956029Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-10T12:10:34.956192Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F0000154E8 2025-04-10T12:10:34.956265Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7491655769587777295:2338], queryUid: , queryText: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n ", keepInCache: 1, split: 0{ TraceId: 01jrfsqvf4cwtxh957yxab9jt1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdmOTM5NzUtMmNlMDFiMy1kYzVmYzg1Ni1lNzc0N2VmZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-04-10T12:10:34.956441Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-10T12:10:34.956517Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7491655769587777295:2338], queueSize: 1 2025-04-10T12:10:34.957276Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7491655769587777295:2338], compileActor: [1:7491655769587777434:2350] 2025-04-10T12:10:35.221456Z node 1 :KQP_YQL INFO: TraceId: 01jrfsqvf4cwtxh957yxab9jt1, SessionId: CompileActor 2025-04-10 12:10:35.220 INFO ydb-core-sys_view-ut(pid=2072747, tid=0x00007FD5222DE640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (return (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) ) 2025-04-10T12:10:35.222512Z node 1 :KQP_YQL TRACE: TraceId: 01jrfsqvf4cwtxh957yxab9jt1, SessionId: CompileActor 2025-04-10 12:10:35.221 TRACE ydb-core-sys_view-ut(pid=2072747, tid=0x00007FD5222DE640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (let $4 (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-04-10T12:10:35.222846Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsqvf4cwtxh957yxab9jt1, SessionId: CompileActor 2025-04-10 12:10:35.222 DEBUG ydb-core-sys_view-ut(pid=2072747, tid=0x00007FD5222DE640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 263us 2025-04-10T12:10:35.223454Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsqvf4cwtxh957yxab9jt1, SessionId: CompileActor 2025-04-10 12:10:35.223 DEBUG ydb-core-sys_view-ut(pid=2072747, tid=0x00007FD5222DE640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-04-10T12:10:35.224774Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsqvf4cwtxh957yxab9jt1, SessionId: CompileActor 2025-04-10 12:10:35.224 DEBUG ydb-core-sys_view-ut(pid=2072747, tid=0x00007FD5222DE640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-04-10T12:10:35.225566Z node 1 :KQP_YQL TRACE: TraceId: 01jrfsqvf4cwtxh957yxab9jt1, SessionId: CompileActor 2025-04-10 12:10:35.225 TRACE ydb-core-sys_view-ut(pid=2072 ... cutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T12:10:52.703279Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 2 at actor [1:7491655846897193827:3275] 2025-04-10T12:10:52.703301Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Task: 2, input channelId: 1, src task: 1, at actor [1:7491655846897193824:3274] 2025-04-10T12:10:52.703334Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:7491655846897193827:3275], channels: 2 2025-04-10T12:10:52.703353Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:7491655846897193824:3274], channels: 1 2025-04-10T12:10:52.703387Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193801:3252] TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7491655846897193827:3275], CA [1:7491655846897193824:3274], 2025-04-10T12:10:52.703421Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193801:3252] TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7491655846897193827:3275], CA [1:7491655846897193824:3274], 2025-04-10T12:10:52.703442Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193801:3252] TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-10T12:10:52.703531Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193801:3252] TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7491655846897193824:3274], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-10T12:10:52.703558Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193801:3252] TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7491655846897193827:3275], CA [1:7491655846897193824:3274], 2025-04-10T12:10:52.703589Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193801:3252] TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7491655846897193827:3275], CA [1:7491655846897193824:3274], 2025-04-10T12:10:52.703628Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193801:3252] TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7491655846897193827:3275], task: 2, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-10T12:10:52.703645Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193801:3252] TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7491655846897193827:3275], CA [1:7491655846897193824:3274], 2025-04-10T12:10:52.703666Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193801:3252] TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7491655846897193827:3275], CA [1:7491655846897193824:3274], 2025-04-10T12:10:52.704930Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193801:3252] TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7491655846897193824:3274], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1042 Tasks { TaskId: 1 CpuTimeUs: 269 FinishTimeMs: 1744287052704 OutputRows: 7 OutputBytes: 178 Tables { TablePath: "//Root/.metadata/initialization/migrations" ReadRows: 7 ReadBytes: 151 AffectedPartitions: 1 } IngressRows: 7 ComputeCpuTimeUs: 151 BuildCpuTimeUs: 118 HostName: "ghrun-x5vqckr7qy" NodeId: 1 StartTimeMs: 1744287052704 CreateTimeMs: 1744287052703 } MaxMemoryUsage: 1048576 } 2025-04-10T12:10:52.705030Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7491655846897193824:3274] 2025-04-10T12:10:52.705089Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193801:3252] TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7491655846897193827:3275], 2025-04-10T12:10:52.705117Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193801:3252] TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7491655846897193827:3275], 2025-04-10T12:10:52.705472Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193801:3252] TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7491655846897193827:3275], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 550 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 223 FinishTimeMs: 1744287052705 InputRows: 7 InputBytes: 178 OutputRows: 7 OutputBytes: 178 ResultRows: 7 ResultBytes: 178 ComputeCpuTimeUs: 158 BuildCpuTimeUs: 65 HostName: "ghrun-x5vqckr7qy" NodeId: 1 StartTimeMs: 1744287052704 CreateTimeMs: 1744287052703 } MaxMemoryUsage: 1048576 } 2025-04-10T12:10:52.705549Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7491655846897193827:3275] 2025-04-10T12:10:52.705669Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193801:3252] TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-10T12:10:52.705715Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193801:3252] TxId: 281474976710788. Ctx: { TraceId: 01jrfsrcn2f27j5f6kwtgadhry, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001592s ReadRows: 7 ReadBytes: 151 ru: 7 rate limiter was not found force flag: 1 2025-04-10T12:10:52.707015Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710789. Resolved key sets: 0 2025-04-10T12:10:52.707093Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710789. Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-10T12:10:52.707120Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7491655846897193831:3252] TxId: 281474976710789. Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-10T12:10:52.707193Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193831:3252] TxId: 281474976710789. Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2025-04-10T12:10:52.707221Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7491655846897193831:3252] TxId: 281474976710789. Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=MTdhODU0N2YtMmIzOThiN2YtMTY0YzI2YzktZGFjMmY5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-10T12:10:52.719614Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::ShowCreateTableReadReplicas [GOOD] Test command err: 2025-04-10T12:10:32.031143Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655760481982979:2184];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:32.032247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0004d3/r3tmp/tmpmN0i5a/pdisk_1.dat 2025-04-10T12:10:32.382079Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65044, node 1 2025-04-10T12:10:32.424403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:32.424498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:32.429045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:32.460965Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:32.461014Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:32.461031Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:32.461186Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4282 TClient is connected to server localhost:4282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:32.997317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:34.742924Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-04-10T12:10:34.742966Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-04-10T12:10:34.765736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655769071918581:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:34.765858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:34.765954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655769071918589:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:34.770808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:34.794434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655769071918595:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:34.879613Z node 1 :TX_PROXY ERROR: Actor# [1:7491655769071918670:2753] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:34.881025Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-10T12:10:34.881193Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F000349178 2025-04-10T12:10:34.881244Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7491655769071918577:2338], queryUid: , queryText: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n ", keepInCache: 1, split: 0{ TraceId: 01jrfsqvcb8zcjt7ym572wvc1n, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODkyZGI5MzUtMTk0YWI5ZmMtZGVkMmEyMDktOTkxNjA4ZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-04-10T12:10:34.881411Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-10T12:10:34.881497Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7491655769071918577:2338], queueSize: 1 2025-04-10T12:10:34.882202Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7491655769071918577:2338], compileActor: [1:7491655769071918689:2349] 2025-04-10T12:10:35.259356Z node 1 :KQP_YQL INFO: TraceId: 01jrfsqvcb8zcjt7ym572wvc1n, SessionId: CompileActor 2025-04-10 12:10:35.258 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAB00E13640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (return (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) ) 2025-04-10T12:10:35.260658Z node 1 :KQP_YQL TRACE: TraceId: 01jrfsqvcb8zcjt7ym572wvc1n, SessionId: CompileActor 2025-04-10 12:10:35.259 TRACE ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAB00E13640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (let $4 (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-04-10T12:10:35.261178Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsqvcb8zcjt7ym572wvc1n, SessionId: CompileActor 2025-04-10 12:10:35.261 DEBUG ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAB00E13640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 422us 2025-04-10T12:10:35.262027Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsqvcb8zcjt7ym572wvc1n, SessionId: CompileActor 2025-04-10 12:10:35.261 DEBUG ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAB00E13640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-04-10T12:10:35.264163Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsqvcb8zcjt7ym572wvc1n, SessionId: CompileActor 2025-04-10 12:10:35.264 DEBUG ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAB00E13640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-04-10T12:10:35.265273Z node 1 :KQP_YQL TRACE: TraceId: 01jrfsqvcb8zcjt7ym572wvc1n, SessionId: CompileActor 2025-04-10 12:10:35.264 TRACE ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAB00E13640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId ... tid=0x00007FAB00E13640) [core exec] yql_execution.cpp:87: Finish, output #84, status: Ok 2025-04-10T12:10:52.490729Z node 1 :KQP_YQL INFO: TraceId: 01jrfsrcna3bfvfyz3p630rdwj, SessionId: CompileActor 2025-04-10 12:10:52.490 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAB00E13640) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #84 2025-04-10T12:10:52.492697Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received response, sender: [1:7491655842086366770:3136], status: SUCCESS, compileActor: [1:7491655846381335139:3228] 2025-04-10T12:10:52.492741Z node 1 :KQP_COMPILE_SERVICE DEBUG: Send response, sender: [1:7491655842086366770:3136], queryUid: d8316030-2a8ab42a-b2d6eb07-c6c5f69, status:SUCCESS 2025-04-10T12:10:52.495646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710771:0, at schemeshard: 72057594046644480 2025-04-10T12:10:52.599000Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: DROP TABLE `test_show_create`;, Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-10T12:10:52.599110Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F000769A78 2025-04-10T12:10:52.599138Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7491655842086366770:3136], queryUid: , queryText: "DROP TABLE `test_show_create`;", keepInCache: 1, split: 0{ TraceId: 01jrfsrcsp9m06zahkfnwwajqw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGQ1MzY2NDYtNzI5MjU3NzctZTJjMzZhZDEtOGI3MTg5NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-04-10T12:10:52.599260Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: DROP TABLE `test_show_create`;, Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-10T12:10:52.599307Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7491655842086366770:3136], queueSize: 1 2025-04-10T12:10:52.599955Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7491655842086366770:3136], compileActor: [1:7491655846381335340:3239] 2025-04-10T12:10:52.606616Z node 1 :KQP_YQL INFO: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.606 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAB00E13640) [KQP] kqp_host.cpp:1382: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/test_show_create"))) (Void) '('('mode 'drop)))) ) 2025-04-10T12:10:52.607049Z node 1 :KQP_YQL TRACE: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.606 TRACE ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAB00E13640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/test_show_create"))) (Void) '('('mode 'drop)))) (return (Commit! $1 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-04-10T12:10:52.607234Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.607 DEBUG ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAB00E13640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 133us 2025-04-10T12:10:52.607537Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.607 DEBUG ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAB00E13640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-04-10T12:10:52.608646Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.608 DEBUG ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAB00E13640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-04-10T12:10:52.609227Z node 1 :KQP_YQL TRACE: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.608 TRACE ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAB00E13640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/test_show_create"))) (Void) '('('mode 'drop)))) (return (Commit! $1 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-04-10T12:10:52.611854Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.611 DEBUG ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [perf] type_ann_expr.cpp:47: Execution of [TypeAnnotationTransformer::DoTransform] took 211us 2025-04-10T12:10:52.612079Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.612 DEBUG ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [perf] yql_expr_constraint.cpp:3228: Execution of [ConstraintTransformer::DoTransform] took 123us 2025-04-10T12:10:52.612144Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.612 DEBUG ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [perf] yql_expr_csee.cpp:620: Execution of [UpdateCompletness] took 26us 2025-04-10T12:10:52.612376Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.612 DEBUG ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [perf] yql_expr_csee.cpp:633: Execution of [EliminateCommonSubExpressions] took 181us 2025-04-10T12:10:52.613876Z node 1 :KQP_YQL INFO: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.613 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropTable! world $1 '"/Root/test_show_create" '() '"table" '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) 2025-04-10T12:10:52.613915Z node 1 :KQP_YQL INFO: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.613 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:59: Begin, root #43 2025-04-10T12:10:52.613950Z node 1 :KQP_YQL INFO: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.613 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-04-10T12:10:52.613983Z node 1 :KQP_YQL TRACE: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.613 TRACE ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-04-10T12:10:52.614031Z node 1 :KQP_YQL TRACE: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.613 TRACE ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:387: {1}, callable #42 2025-04-10T12:10:52.614099Z node 1 :KQP_YQL TRACE: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.614 TRACE ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:387: {1}, callable #42 2025-04-10T12:10:52.614584Z node 1 :KQP_YQL INFO: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.614 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:466: Register async execution for node #42 2025-04-10T12:10:52.614655Z node 1 :KQP_YQL INFO: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.614 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:87: Finish, output #43, status: Async 2025-04-10T12:10:52.614739Z node 1 :KQP_YQL INFO: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.614 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-04-10T12:10:52.614787Z node 1 :KQP_YQL INFO: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.614 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-04-10T12:10:52.614821Z node 1 :KQP_YQL INFO: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.614 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:59: Begin, root #43 2025-04-10T12:10:52.614856Z node 1 :KQP_YQL INFO: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.614 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-04-10T12:10:52.614905Z node 1 :KQP_YQL TRACE: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.614 TRACE ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-04-10T12:10:52.614954Z node 1 :KQP_YQL INFO: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.614 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-04-10T12:10:52.614994Z node 1 :KQP_YQL INFO: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.614 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-04-10T12:10:52.615022Z node 1 :KQP_YQL INFO: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.615 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-04-10T12:10:52.615049Z node 1 :KQP_YQL INFO: TraceId: 01jrfsrcsp9m06zahkfnwwajqw, SessionId: CompileActor 2025-04-10 12:10:52.615 INFO ydb-core-sys_view-ut(pid=2072786, tid=0x00007FAAFFB09640) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-04-10T12:10:52.617406Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received response, sender: [1:7491655842086366770:3136], status: SUCCESS, compileActor: [1:7491655846381335340:3239] 2025-04-10T12:10:52.617479Z node 1 :KQP_COMPILE_SERVICE DEBUG: Send response, sender: [1:7491655842086366770:3136], queryUid: fde500ba-5053b583-887038f2-bb74f6c4, status:SUCCESS 2025-04-10T12:10:52.675527Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-04-10T12:10:52.675556Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-04-10T12:10:52.676034Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-04-10T12:10:52.677313Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::RangeCache+UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 21408, MsgBus: 21951 2025-04-10T12:10:42.181554Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655800776403961:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:42.181656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000449/r3tmp/tmpuRNm9V/pdisk_1.dat 2025-04-10T12:10:42.523272Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21408, node 1 2025-04-10T12:10:42.602299Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:42.602324Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:42.602331Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:42.602461Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:10:42.610173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:42.610335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:42.612523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21951 TClient is connected to server localhost:21951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:43.049318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:43.071025Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T12:10:43.079842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:43.193516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:43.327082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:43.389292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:45.190968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655813661307625:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:45.194380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:45.584642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:10:45.618222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T12:10:45.659482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T12:10:45.728610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:10:45.765779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T12:10:45.837060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T12:10:45.921238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655813661308147:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:45.921305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:45.921421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655813661308152:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:45.925303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T12:10:45.935949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655813661308154:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T12:10:46.008600Z node 1 :TX_PROXY ERROR: Actor# [1:7491655817956275506:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:47.181468Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655800776403961:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:47.181544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 7.439642s took: 7.433857s took: 7.434564s took: 7.444837s took: 7.439877s took: 7.437974s took: 7.440765s took: 7.442447s took: 7.447187s took: 7.444175s |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::AlterColumnSetDefaultFromSequence [GOOD] Test command err: Trying to start YDB, gRPC: 18129, MsgBus: 11547 2025-04-10T12:10:49.323343Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655832016215374:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:49.323785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/00058f/r3tmp/tmpxDlhYu/pdisk_1.dat 2025-04-10T12:10:49.742035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:49.742222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:49.743660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:49.766780Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18129, node 1 2025-04-10T12:10:49.878862Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:49.878887Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:49.878896Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:49.879036Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11547 TClient is connected to server localhost:11547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:50.523541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:50.558536Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T12:10:52.425185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655844901117848:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:52.425196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655844901117840:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:52.425302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:52.429131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:52.441123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655844901117854:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:52.524370Z node 1 :TX_PROXY ERROR: Actor# [1:7491655844901117905:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:52.576742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:53.013685Z node 1 :TX_PROXY ERROR: Actor# [1:7491655849196085369:2429] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/seq\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 } 2025-04-10T12:10:53.022950Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzdhZmU1MDEtYWU1ZTU0Y2QtZmZmNTc3NTgtZTMyNWY2NWI=, ActorId: [1:7491655844901117834:2329], ActorState: ExecuteState, TraceId: 01jrfsrd66757c38t8jxakjhx5, Create QueryResponse for error on request, msg: 2025-04-10T12:10:53.067692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:10:53.158327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T12:10:53.519481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-10T12:10:53.559779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-10T12:10:53.852312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T12:10:53.889596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-10T12:10:54.180928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSequence, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-10T12:10:54.319436Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655832016215374:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:54.319516Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:54.519433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSequence, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-10T12:10:54.549936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSequence, opId: 281474976710681:0, at schemeshard: 72057594046644480 >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [GOOD] Test command err: Starting YDB, grpc: 1772, msgbus: 64685 2025-04-10T12:10:51.641695Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655841529620696:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:51.641817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000574/r3tmp/tmpj2rXYC/pdisk_1.dat 2025-04-10T12:10:52.088294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T12:10:52.088354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T12:10:52.088370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-10T12:10:52.088394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T12:10:52.088423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T12:10:52.088433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T12:10:52.088465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T12:10:52.088541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T12:10:52.088907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T12:10:52.101448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:52.101562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:52.108415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:52.138438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Subscription to Console has been set up, schemeshardId: 72057594046644480 2025-04-10T12:10:52.138504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2025-04-10T12:10:52.138515Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:52.149243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete TServer::EnableGrpc on GrpcPort 1772, node 1 2025-04-10T12:10:52.149297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T12:10:52.149404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-10T12:10:52.174507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T12:10:52.174646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T12:10:52.182371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-10T12:10:52.191037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-10T12:10:52.229166Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-10T12:10:52.245796Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-10T12:10:52.263976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-10T12:10:52.266485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-10T12:10:52.266514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-10T12:10:52.266571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T12:10:52.266612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-10T12:10:52.266629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T12:10:52.267300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-10T12:10:52.368424Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:52.368481Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:52.368495Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:52.368626Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64685 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-10T12:10:52.628102Z node 1 :TX_PROXY DEBUG: actor# [1:7491655841529620951:2133] Handle TEvNavigate describe path dc-1 2025-04-10T12:10:52.628169Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845824588727:2446] HANDLE EvNavigateScheme dc-1 2025-04-10T12:10:52.629287Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845824588727:2446] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:52.670383Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845824588727:2446] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-10T12:10:52.680376Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845824588727:2446] Handle TEvDescribeSchemeResult Forward to# [1:7491655845824588726:2445] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T12:10:52.706696Z node 1 :TX_PROXY DEBUG: actor# [1:7491655841529620951:2133] Handle TEvProposeTransaction 2025-04-10T12:10:52.706725Z node 1 :TX_PROXY DEBUG: actor# [1:7491655841529620951:2133] TxId# 281474976710657 ProcessProposeTransaction 2025-04-10T12:10:52.706905Z node 1 :TX_PROXY DEBUG: actor# [1:7491655841529620951:2133] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7491655845824588750:2455] 2025-04-10T12:10:52.786229Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845824588750:2455] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-10T12:10:52.786346Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845824588750:2455] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:52.786368Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845824588750:2455] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:52.786474Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845824588750:2455] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:52.786727Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845824588750:2455] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:52.786853Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845824588750:2455] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-10T12:10:52.786901Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845824588750:2455] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-10T12:10:52.787059Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655845824588750:2455] txid# 281474976710657 HANDLE EvClientConnected 2025-04-10T12:10:52.788706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T12:10:52.788915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-10T12:10:52.789073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T12:10:52.789236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T12:10:52.789267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ... OXY DEBUG: Actor# [1:7491655867299425838:2849] txid# 281474976710665 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-10T12:10:57.151181Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655867299425838:2849] txid# 281474976710665 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-10T12:10:57.151245Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655867299425838:2849] txid# 281474976710665 TEvNavigateKeySet requested from SchemeCache 2025-04-10T12:10:57.151598Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655867299425838:2849] txid# 281474976710665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:57.151718Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655867299425838:2849] HANDLE EvNavigateKeySetResult, txid# 281474976710665 shardToRequest# 72075186224037891 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 2] DomainInfo.Params# Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 RedirectRequired# true 2025-04-10T12:10:57.151773Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655867299425838:2849] txid# 281474976710665 SEND to# 72075186224037891 shardToRequest {TEvModifySchemeTransaction txid# 281474976710665 TabletId# 72075186224037891} 2025-04-10T12:10:57.152306Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655867299425838:2849] txid# 281474976710665 HANDLE EvClientConnected 2025-04-10T12:10:57.154976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpModifyACL ModifyACL { Name: "tenant-db" DiffACL: "\n\022\010\001\022\016\032\014clusteradmin\n\031\010\000\022\025\010\001\020\200\004\032\014clusteradmin \003" } } TxId: 281474976710665 TabletId: 72075186224037891 Owner: "root@builtin" UserToken: "***" PeerName: "ipv6:[::1]:39992" , at schemeshard: 72075186224037891 2025-04-10T12:10:57.155248Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /dc-1/tenant-db, operationId: 281474976710665:0, at schemeshard: 72075186224037891 2025-04-10T12:10:57.155384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72075186224037891, LocalPathId: 1] name: dc-1/tenant-db type: EPathTypeSubDomain state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-10T12:10:57.155399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-10T12:10:57.155564Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710665:1, propose status:StatusSuccess, reason: , at schemeshard: 72075186224037891 2025-04-10T12:10:57.155588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72075186224037891 2025-04-10T12:10:57.155653Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-04-10T12:10:57.155664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-10T12:10:57.155681Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-04-10T12:10:57.155690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-10T12:10:57.155719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 4 2025-04-10T12:10:57.155775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710665, ready parts: 1/1, is published: false 2025-04-10T12:10:57.155792Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72075186224037891, LocalPathId: 1], at schemeshard: 72075186224037891 2025-04-10T12:10:57.155803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-10T12:10:57.155813Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710665:0 2025-04-10T12:10:57.155824Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710665, publications: 1, subscribers: 0 2025-04-10T12:10:57.155834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710665, [OwnerId: 72075186224037891, LocalPathId: 1], 9 2025-04-10T12:10:57.159086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710665, response: Status: StatusSuccess TxId: 281474976710665 SchemeshardId: 72075186224037891, at schemeshard: 72075186224037891 2025-04-10T12:10:57.159287Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710665, subject: root@builtin, status: StatusSuccess, operation: MODIFY ACL, path: /dc-1/tenant-db, add access: +(DS):clusteradmin, remove access: -():clusteradmin:- 2025-04-10T12:10:57.159509Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186224037891 2025-04-10T12:10:57.159534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976710665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-10T12:10:57.159506Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655867299425838:2849] txid# 281474976710665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710665} 2025-04-10T12:10:57.159570Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655867299425838:2849] txid# 281474976710665 SEND to# [1:7491655867299425837:2355] Source {TEvProposeTransactionStatus txid# 281474976710665 Status# 48} 2025-04-10T12:10:57.159787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976710665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-10T12:10:57.159897Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186224037891 2025-04-10T12:10:57.159921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:7491655850119384701:2306], at schemeshard: 72075186224037891, txId: 281474976710665, path id: 1 2025-04-10T12:10:57.159948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:7491655850119384701:2306], at schemeshard: 72075186224037891, txId: 281474976710665, path id: 1 2025-04-10T12:10:57.160743Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976710665 2025-04-10T12:10:57.160859Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976710665 2025-04-10T12:10:57.160870Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72075186224037891, txId: 281474976710665 2025-04-10T12:10:57.160903Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186224037891, txId: 281474976710665, pathId: [OwnerId: 72075186224037891, LocalPathId: 1], version: 9 2025-04-10T12:10:57.160921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 5 2025-04-10T12:10:57.161023Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72075186224037891, txId: 281474976710665, subscribers: 0 TEST clusteradmin triggers auth on tenant 2025-04-10T12:10:57.163419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 TClient is connected to server localhost:64685 TClient::Ls request: /dc-1/tenant-db 2025-04-10T12:10:57.323323Z node 1 :TX_PROXY DEBUG: actor# [1:7491655841529620951:2133] Handle TEvNavigate describe path /dc-1/tenant-db 2025-04-10T12:10:57.323373Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655867299425847:2854] HANDLE EvNavigateScheme /dc-1/tenant-db 2025-04-10T12:10:57.323656Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655867299425847:2854] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-10T12:10:57.323756Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655867299425847:2854] SEND to# 72075186224037891 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/tenant-db" Options { ReturnBoundaries: false ShowPrivateTable: true ReturnRangeKey: false } 2025-04-10T12:10:57.325515Z node 1 :TX_PROXY DEBUG: Actor# [1:7491655867299425847:2854] Handle TEvDescribeSchemeResult Forward to# [1:7491655867299425846:2853] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/dc-1/tenant-db" PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_tenant-db_kind_tenant-db" Kind: "tenant-db" } StoragePools { Name: "name_tenant-db_kind_test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Sids { Name: "tenantuser" Type: USER } Audience: "/dc-1/tenant-db" } } } PathId: 1 PathOwnerId: 72075186224037891 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037891 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 184467440737095... (TRUNCATED) 2025-04-10T12:10:57.354654Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 2 2025-04-10T12:10:57.355497Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T12:10:57.355176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnected |85.2%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |85.3%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 13754, MsgBus: 1317 2025-04-10T12:10:50.393369Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655835407531314:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:50.393424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000494/r3tmp/tmpFy12OD/pdisk_1.dat 2025-04-10T12:10:50.940425Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:50.975461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:50.982439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:50.986734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13754, node 1 2025-04-10T12:10:51.279182Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:51.279214Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:51.279226Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:51.279414Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1317 TClient is connected to server localhost:1317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:52.367926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:54.012365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655852587400953:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:54.012909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:54.519274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-10T12:10:54.722964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655852587401077:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:54.723088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:54.723608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655852587401082:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:54.729826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-10T12:10:54.737501Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-10T12:10:54.738100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655852587401084:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T12:10:54.800759Z node 1 :TX_PROXY ERROR: Actor# [1:7491655852587401124:2402] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:55.393713Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655835407531314:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:55.393802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:55.877563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:10:56.273456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-10T12:10:56.734772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T12:10:57.203032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-10T12:10:57.625100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-10T12:10:58.051316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-10T12:10:58.084344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-10T12:10:59.727981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710704:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.5%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] >> ColumnShardTiers::TTLUsage |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> WithSDK::DescribeConsumer Test command err: 2025-04-10T12:10:35.842327Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655773090868682:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:35.842424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T12:10:36.047057Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/00055f/r3tmp/tmpHM4ZEO/pdisk_1.dat 2025-04-10T12:10:36.236571Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:36.281086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:36.281226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:36.283146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5969, node 1 2025-04-10T12:10:36.457422Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/bh2h/00055f/r3tmp/yandexh0IkMr.tmp 2025-04-10T12:10:36.457450Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/bh2h/00055f/r3tmp/yandexh0IkMr.tmp 2025-04-10T12:10:36.458329Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/bh2h/00055f/r3tmp/yandexh0IkMr.tmp 2025-04-10T12:10:36.458480Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:10:36.724594Z INFO: TTestServer started on Port 61544 GrpcPort 5969 TClient is connected to server localhost:61544 PQClient connected to localhost:5969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:37.025168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T12:10:37.063789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T12:10:38.862895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655785975771386:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:38.863048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:38.863373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655785975771401:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:38.867481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T12:10:38.877831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655785975771403:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T12:10:39.148836Z node 1 :TX_PROXY ERROR: Actor# [1:7491655785975771467:2452] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:39.242143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T12:10:39.271844Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491655790270738771:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T12:10:39.273511Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTRkMzJlNWYtOTNkODM2N2EtOTgwNzE3MTAtMmU1YTQwZjA=, ActorId: [1:7491655785975771369:2336], ActorState: ExecuteState, TraceId: 01jrfsqzbqd1pmgh68v83515f0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T12:10:39.275860Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T12:10:39.277644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:10:39.350414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7491655790270739059:2632] 2025-04-10T12:10:40.842633Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655773090868682:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:40.842753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T12:10:45.765012Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-10T12:10:45.783523Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-10T12:10:45.784981Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491655816040543128:2793], Recipient [1:7491655777385836418:2198]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T12:10:45.785029Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T12:10:45.785049Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T12:10:45.785083Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491655816040543124:2790], Recipient [1:7491655777385836418:2198]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-10T12:10:45.785101Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T12:10:45.849395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T12:10:45.849828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T12:10:45.850190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-10T12:10:45.850231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-10T12:10:45.850264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-10T12:10:45.850299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-10T12:10:45.850352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-10T12:10:45.852155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T12:10:45.852614Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T12:10:45.852652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDb ... to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:11:02.361977Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:11:02.362002Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 #0 0x18eec551 in __asan_memcpy /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors_memintrinsics.cpp:63:3 #1 0x19172701 in MemCopy /-S/util/generic/mem_copy.h:18:9 #2 0x19172701 in TBuffer::Append(char const*, unsigned long) /-S/util/generic/buffer.cpp:54:5 #3 0x1fc4af46 in Acquire /-S/ydb/public/sdk/cpp/src/client/topic/impl/write_session_impl.h:208:18 #4 0x1fc4af46 in NYdb::Dev::NTopic::TWriteSessionImpl::FlushWriteIfRequiredImpl() /-S/ydb/public/sdk/cpp/src/client/topic/impl/write_session_impl.cpp:1366:64 #5 0x1fc4a4f0 in NYdb::Dev::NTopic::TWriteSessionImpl::WriteInternal(NYdb::Dev::NTopic::TContinuationToken&&, NYdb::Dev::NTopic::TWriteMessage&&) /-S/ydb/public/sdk/cpp/src/client/topic/impl/write_session_impl.cpp:660:9 #6 0x1fc23871 in NYdb::Dev::NTopic::TWriteSession::Write(NYdb::Dev::NTopic::TContinuationToken&&, NYdb::Dev::NTopic::TWriteMessage&&, NYdb::Dev::NTable::TTransaction*) /-S/ydb/public/sdk/cpp/src/client/topic/impl/write_session.cpp:72:19 #7 0x1fc2581e in NYdb::Dev::NTopic::TSimpleBlockingWriteSession::Write(NYdb::Dev::NTopic::TWriteMessage&&, NYdb::Dev::NTable::TTransaction*, TDuration const&) /-S/ydb/public/sdk/cpp/src/client/topic/impl/write_session.cpp:131:17 #8 0x18e500cc in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&)::$_1::operator()(unsigned long) const /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:52:13 #9 0x18e29842 in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:76:9 #10 0x18e56a27 in operator() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #11 0x18e56a27 in __invoke<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #12 0x18e56a27 in __call<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #13 0x18e56a27 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #14 0x18e56a27 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #15 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #16 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #17 0x196e6995 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #18 0x196b64c8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #19 0x18e55bf3 in NKikimr::NTestSuiteWithSDK::TCurrentTest::Execute() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #20 0x196b7d95 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #21 0x196e0f0c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #22 0x7f2ae0072d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #23 0x7f2ae0072e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #24 0x165b1028 in _start (/home/runner/.ya/build/build_root/bh2h/00055f/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk+0x165b1028) (BuildId: 34a7903e4bedd81b1cc1447f91dca5a1a7035efc) 0x50300353bb19 is located 9 bytes inside of 32-byte region [0x50300353bb10,0x50300353bb30) freed by thread T0 here: #0 0x18f22a3d in operator delete(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:143:3 #1 0x18e50034 in UnRef /-S/util/generic/string.h:100:13 #2 0x18e50034 in UnRef /-S/util/generic/string.h:53:16 #3 0x18e50034 in UnRef /-S/util/generic/ptr.h:625:13 #4 0x18e50034 in ~TIntrusivePtr /-S/util/generic/ptr.h:523:9 #5 0x18e50034 in ~TBasicString /-S/util/generic/fwd.h:8:7 #6 0x18e50034 in ~TStringBuilder /-S/util/string/builder.h:8:11 #7 0x18e50034 in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&)::$_1::operator()(unsigned long) const /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:50:27 #8 0x18e29842 in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:76:9 #9 0x18e56a27 in operator() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #10 0x18e56a27 in __invoke<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #11 0x18e56a27 in __call<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #12 0x18e56a27 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #13 0x18e56a27 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #14 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #15 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #16 0x196e6995 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #17 0x196b64c8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #18 0x18e55bf3 in NKikimr::NTestSuiteWithSDK::TCurrentTest::Execute() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #19 0x196b7d95 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #20 0x196e0f0c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #21 0x7f2ae0072d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) previously allocated by thread T0 here: #0 0x18f221dd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x165b3305 in Construct, std::__y1::allocator > > &> /-S/util/generic/string.h:207:17 #2 0x165b3305 in TBasicString>::Clone() /-S/util/generic/string.h:228:9 #3 0x191c3ecc in Detach /-S/util/generic/string.h:376:13 #4 0x191c3ecc in MutRef /-S/util/generic/string.h:249:9 #5 0x191c3ecc in append /-S/util/generic/string.h:784:9 #6 0x191c3ecc in TStringOutput::DoWrite(void const*, unsigned long) /-S/util/stream/str.cpp:37:9 #7 0x18e4fedd in Write /-S/util/stream/output.h:74:13 #8 0x18e4fedd in Write /-S/util/stream/output.h:84:9 #9 0x18e4fedd in Out /-S/util/stream/output.h:199:11 #10 0x18e4fedd in operator<< /-S/util/stream/output.h:218:5 #11 0x18e4fedd in operator<< /-S/util/string/builder.h:33:21 #12 0x18e4fedd in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&)::$_1::operator()(unsigned long) const /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:50:48 #13 0x18e29842 in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:76:9 #14 0x18e56a27 in operator() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #15 0x18e56a27 in __invoke<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #16 0x18e56a27 in __call<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #17 0x18e56a27 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #18 0x18e56a27 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #19 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #20 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #21 0x196e6995 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #22 0x196b64c8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #23 0x18e55bf3 in NKikimr::NTestSuiteWithSDK::TCurrentTest::Execute() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #24 0x196b7d95 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #25 0x196e0f0c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #26 0x7f2ae0072d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: heap-use-after-free /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors_memintrinsics.cpp:63:3 in __asan_memcpy Shadow bytes around the buggy address: 0x50300353b880: fa fa fa fa fa fa fa fa fa fa fd fd fd fd fa fa 0x50300353b900: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x50300353b980: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x50300353ba00: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x50300353ba80: fd fd fd fd fa fa fa fa fa fa fa fa fa fa fa fa =>0x50300353bb00: fa fa fd[fd]fd fd fa fa fa fa fa fa fa fa fa fa 0x50300353bb80: fa fa fa fa fd fd fd fd fa fa fa fa fa fa fa fa 0x50300353bc00: fa fa fa fa fa fa fd fd fd fd fa fa 00 00 00 fa 0x50300353bc80: fa fa fd fd fd fd fa fa fa fa fa fa fa fa fa fa 0x50300353bd00: fa fa fa fa 00 00 00 fa fa fa fa fa fa fa fa fa 0x50300353bd80: fd fd fd fa fa fa fa fa fa fa fa fa fd fd fd fa Shadow byte legend (one shadow byte represents 8 application bytes): Addressable: 00 Partially addressable: 01 02 03 04 05 06 07 Heap left redzone: fa Freed heap region: fd Stack left redzone: f1 Stack mid redzone: f2 Stack right redzone: f3 Stack after return: f5 Stack use after scope: f8 Global redzone: f9 Global init order: f6 Poisoned by user: f7 Container overflow: fc Array cookie: ac Intra object redzone: bb ASan internal: fe Left alloca redzone: ca Right alloca redzone: cb ==2073450==ABORTING |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> PgCatalog::CheckSetConfig [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] Test command err: 2025-04-10T12:10:48.461012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:10:48.461281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:10:48.461511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000471/r3tmp/tmpsCt9ZJ/pdisk_1.dat 2025-04-10T12:10:48.968175Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] ::Bootstrap [1:594:2519] 2025-04-10T12:10:48.968242Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] lookup [1:594:2519] 2025-04-10T12:10:48.968323Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] queue send [1:594:2519] 2025-04-10T12:10:48.968440Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] received pending shutdown [1:594:2519] 2025-04-10T12:10:48.968691Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] forward result local node, try to connect [1:594:2519] 2025-04-10T12:10:48.968759Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480]::SendEvent [1:594:2519] 2025-04-10T12:10:48.969627Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] connected with status OK role: Leader [1:594:2519] 2025-04-10T12:10:48.969687Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] send queued [1:594:2519] 2025-04-10T12:10:48.969727Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] push event to server [1:594:2519] 2025-04-10T12:10:48.969790Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] shutdown pipe due to pending shutdown request [1:594:2519] 2025-04-10T12:10:48.969823Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] notify reset [1:594:2519] 2025-04-10T12:10:48.971083Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose 2025-04-10T12:10:48.971171Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T12:10:48.980962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:10:48.981772Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} hope 1 -> done Change{4, redo 987b alter 0b annex 0, ~{ 1, 33, 35, 42, 4 } -{ }, 0 gb} 2025-04-10T12:10:48.981880Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T12:10:48.982470Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} commited cookie 1 for step 5 2025-04-10T12:10:48.992385Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-10T12:10:48.992468Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T12:10:48.992780Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{5, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-04-10T12:10:48.992869Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T12:10:48.993339Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} commited cookie 1 for step 6 2025-04-10T12:10:48.993500Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-10T12:10:48.993558Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T12:10:48.993733Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{6, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-04-10T12:10:48.993837Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T12:10:48.994127Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} commited cookie 1 for step 7 2025-04-10T12:10:48.994291Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-10T12:10:48.994327Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T12:10:48.995217Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{7, redo 120b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-10T12:10:48.995270Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T12:10:48.995545Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} commited cookie 1 for step 8 2025-04-10T12:10:48.995824Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] ::Bootstrap [1:601:2525] 2025-04-10T12:10:48.995866Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] lookup [1:601:2525] 2025-04-10T12:10:48.995948Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] queue send [1:601:2525] 2025-04-10T12:10:48.996233Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] forward result local node, try to connect [1:601:2525] 2025-04-10T12:10:48.996278Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545]::SendEvent [1:601:2525] 2025-04-10T12:10:48.996446Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] connected with status OK role: Leader [1:601:2525] 2025-04-10T12:10:48.996491Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] send queued [1:601:2525] 2025-04-10T12:10:48.996519Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] push event to server [1:601:2525] 2025-04-10T12:10:48.997923Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] ::Bootstrap [1:604:2527] 2025-04-10T12:10:48.997969Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] lookup [1:604:2527] 2025-04-10T12:10:48.998044Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] queue send [1:604:2527] 2025-04-10T12:10:48.998105Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] received pending shutdown [1:604:2527] 2025-04-10T12:10:48.998180Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] forward result local node, try to connect [1:604:2527] 2025-04-10T12:10:48.998236Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480]::SendEvent [1:604:2527] 2025-04-10T12:10:48.998420Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] connected with status OK role: Leader [1:604:2527] 2025-04-10T12:10:48.998464Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] send queued [1:604:2527] 2025-04-10T12:10:48.998500Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] push event to server [1:604:2527] 2025-04-10T12:10:48.998546Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] shutdown pipe due to pending shutdown request [1:604:2527] 2025-04-10T12:10:48.998578Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] notify reset [1:604:2527] 2025-04-10T12:10:48.998729Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion 2025-04-10T12:10:48.998801Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T12:10:48.998896Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T12:10:48.998952Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T12:10:49.009454Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] client retry [1:91:2137] 2025-04-10T12:10:49.009547Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:91:2137] 2025-04-10T12:10:49.009624Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] client retry [1:342:2378] 2025-04-10T12:10:49.009648Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] lookup [1:342:2378] 2025-04-10T12:10:49.009736Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result local node, try to connect [1:91:2137] 2025-04-10T12:10:49.009783Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [1:91:2137] 2025-04-10T12:10:49.009825Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] forward result local node, try to connect [1:342:2378] 2025-04-10T12:10:49.009865Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131]::SendEvent [1:342:2378] 2025-04-10T12:10:49.010088Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] connected with status OK role: Leader [1:342:2378] 2025-04-10T12:10:49.010130Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] send queued [1:342:2378] 2025-04-10T12:10:49.010189Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] push event to server [1:342:2378] 2025-04-10T12:10:49.010300Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [1:91:2137] 2025-04-10T12:10:49.010332Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [1:91:2137] 2025-04-10T12:10:49.010373Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [1:91:2137] 2025-04-10T12:10:49.010597Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} queued, type NKikimr::NBsController::TBlobStorageController::TTxRegisterNode 2025-04-10T12:10:49.010676Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T12:10:49.010902Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} hope 1 -> done Change{7, redo 79b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-10T12:10:49.011040Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T12:10:49.011572Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] send [1:342:2378] 2025-04-10T12:10:49.011620Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] push event to server [1:342:2378] 2025-0 ... atExecutor::ITransaction, TDelete>, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:57:5 #16 0x21e45361 in NKikimr::NDataShard::TDataShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard.cpp:3415:21 #17 0x21dedd4b in NKikimr::NDataShard::TDataShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/datashard/datashard_impl.h:3157:13 #18 0x1a71942c in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #19 0x36ea8834 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #20 0x36ea10a9 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #21 0x36eab423 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #22 0x37073ae3 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #23 0x37073082 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #24 0x37073082 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #25 0x3706b2e2 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:577:24 #26 0x3706aeba in NActors::TTestActorRuntime::SimulateSleep(TDuration) /-S/ydb/core/testlib/actors/test_runtime.cpp:298:9 #27 0x18f9e247 in NKikimr::NTestSuiteDataShardVolatile::TTestCaseGracefulShardRestartNoEarlyReadSetAck::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:3909:17 #28 0x18fb3b07 in operator() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #29 0x18fb3b07 in __invoke<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #30 0x18fb3b07 in __call<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #31 0x18fb3b07 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #32 0x18fb3b07 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #33 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #34 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #35 0x199cac35 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #36 0x1999a788 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #37 0x18fb29b3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #38 0x1999c055 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #39 0x199c51ac in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #40 0x7f745af1fd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x1920aa0d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x19e3a60e in CreateString > > /-S/contrib/libs/protobuf/src/google/protobuf/arenastring.cc:102:20 #2 0x19e3a60e in void google::protobuf::internal::ArenaStringPtr::Set<>(TBasicString> const&, google::protobuf::Arena*) /-S/contrib/libs/protobuf/src/google/protobuf/arenastring.cc:150:38 #3 0x2ae05b65 in set_name > &> /-B/ydb/core/protos/query_stats.pb.h:1740:16 #4 0x2ae05b65 in SetName /-B/ydb/core/protos/query_stats.pb.h:696:56 #5 0x2ae05b65 in NKikimr::NDataShard::KqpFillTxStats(NKikimr::NDataShard::TDataShard&, NKikimr::NMiniKQL::TEngineHostCounters const&, NKikimrQueryStats::TTxStats&) /-S/ydb/core/tx/datashard/datashard_kqp.cpp:917:34 #6 0x2b215464 in NKikimr::NDataShard::TExecuteWriteUnit::Execute(TIntrusivePtr>, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/execute_write_unit.cpp:436:13 #7 0x2b06d8cc in NKikimr::NDataShard::TPipeline::RunExecutionPlan(TIntrusivePtr>, TVector>&, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard_pipeline.cpp:1851:28 #8 0x2ad2e714 in NKikimr::NDataShard::TDataShard::TTxWrite::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard__write.cpp:100:38 #9 0x2081b894 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #10 0x20817234 in NKikimr::NTabletFlatExecutor::TExecutor::DoExecute(TAutoPtr, NKikimr::NTabletFlatExecutor::TExecutor::ETxMode) /-S/ydb/core/tablet_flat/flat_executor.cpp:1814:13 #11 0x2081e42e in NKikimr::NTabletFlatExecutor::TExecutor::Execute(TAutoPtr, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/flat_executor.cpp:1828:5 #12 0x207b4a9a in Execute /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:62:46 #13 0x207b4a9a in NKikimr::NTabletFlatExecutor::TTabletExecutedFlat::Execute(TAutoPtr, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:57:5 #14 0x21e45361 in NKikimr::NDataShard::TDataShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard.cpp:3415:21 #15 0x21dedd4b in NKikimr::NDataShard::TDataShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/datashard/datashard_impl.h:3157:13 #16 0x1a71942c in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #17 0x36ea8834 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #18 0x36ea10a9 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #19 0x36eab423 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #20 0x37073ae3 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #21 0x37073082 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #22 0x37073082 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #23 0x3706b2e2 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:577:24 #24 0x3706aeba in NActors::TTestActorRuntime::SimulateSleep(TDuration) /-S/ydb/core/testlib/actors/test_runtime.cpp:298:9 #25 0x18f9e247 in NKikimr::NTestSuiteDataShardVolatile::TTestCaseGracefulShardRestartNoEarlyReadSetAck::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:3909:17 #26 0x18fb3b07 in operator() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #27 0x18fb3b07 in __invoke<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #28 0x18fb3b07 in __call<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #29 0x18fb3b07 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #30 0x18fb3b07 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #31 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #32 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #33 0x199cac35 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #34 0x1999a788 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #35 0x18fb29b3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #36 0x1999c055 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #37 0x199c51ac in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #38 0x7f745af1fd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 664 byte(s) leaked in 11 allocation(s). |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> PgCatalog::CheckSetConfig [GOOD] Test command err: Trying to start YDB, gRPC: 10105, MsgBus: 7613 2025-04-10T12:10:48.488073Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655827024891113:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:48.488172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000592/r3tmp/tmpswEh2Z/pdisk_1.dat 2025-04-10T12:10:48.776983Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10105, node 1 2025-04-10T12:10:48.855317Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:48.855343Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:48.855350Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:48.855470Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:10:48.860089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:48.860217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:48.862218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7613 TClient is connected to server localhost:7613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:49.334873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:51.412340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655839909793671:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:51.412352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655839909793683:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:51.412455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:51.419152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:51.431371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655839909793685:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:51.526206Z node 1 :TX_PROXY ERROR: Actor# [1:7491655839909793736:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:51.587867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:52.090362Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491655844204761201:2367], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect, At function: PgSetItem
:2:31: Error: At function: PgReadTable!
:2:31: Error: Unsupported table: pgtable 2025-04-10T12:10:52.091683Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDc0ZTM1ZjctYzc4N2Y1MGUtNzMzYTM4MTYtOWEzZjcwMQ==, ActorId: [1:7491655839909793667:2328], ActorState: ExecuteState, TraceId: 01jrfsrc96a8edk0d49m9b3259, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 62639, MsgBus: 2374 2025-04-10T12:10:52.823501Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491655844820638316:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:52.823579Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000592/r3tmp/tmpojo1Cd/pdisk_1.dat 2025-04-10T12:10:53.069678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:53.069796Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:53.073020Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:53.074280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62639, node 2 2025-04-10T12:10:53.190675Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:53.190705Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:53.190712Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:53.190825Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2374 TClient is connected to server localhost:2374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:53.659994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:53.667540Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T12:10:56.313840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491655862000508168:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:56.313896Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491655862000508160:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:56.313990Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:56.316736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:56.325027Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491655862000508174:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T12:10:56.383794Z node 2 :TX_PROXY ERROR: Actor# [2:7491655862000508225:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 2870, MsgBus: 6950 2025-04-10T12:10:58.884980Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7491655869339505843:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:58.885145Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000592/r3tmp/tmpbaDePR/pdisk_1.dat 2025-04-10T12:10:58.985695Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:59.014429Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:59.014515Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:59.016530Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2870, node 3 2025-04-10T12:10:59.060264Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:59.060295Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:59.060303Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:59.060426Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6950 TClient is connected to server localhost:6950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:59.417506Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:01.209380Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491655882224408389:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:01.209477Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7491655882224408381:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:01.209608Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:01.213135Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T12:11:01.228674Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7491655882224408395:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T12:11:01.293721Z node 3 :TX_PROXY ERROR: Actor# [3:7491655882224408446:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 2307, MsgBus: 7105 2025-04-10T12:11:02.516353Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7491655887133939769:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:02.516428Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000592/r3tmp/tmpK3NhUH/pdisk_1.dat 2025-04-10T12:11:02.644962Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2307, node 4 2025-04-10T12:11:02.669347Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:02.669436Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:11:02.671745Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:11:02.715608Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:11:02.715640Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:11:02.715651Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:11:02.715824Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7105 TClient is connected to server localhost:7105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:11:03.296788Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:06.333012Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491655904313809611:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:06.333020Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7491655904313809605:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:06.333101Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:06.337983Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T12:11:06.353189Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7491655904313809619:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T12:11:06.445129Z node 4 :TX_PROXY ERROR: Actor# [4:7491655904313809670:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:11:06.511029Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T12:11:06.791356Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7491655904313809812:2359], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect, At function: PgSetItem
:2:31: Error: At function: PgReadTable!
:2:31: Error: Unsupported table: pgtable 2025-04-10T12:11:06.792540Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzY3MDdlMDYtODM2YmM4YmUtY2FhMmVjYzItNWIzNmE4ZDA=, ActorId: [4:7491655904313809586:2329], ActorState: ExecuteState, TraceId: 01jrfsrtmrb3y09nx9sjs2raj5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: |89.8%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestCpuLoadThresholdRefresh |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> ReadIteratorExternalBlobs::NotExtBlobs |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [FAIL] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic [FAIL] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> YdbLogStore::AlterLogTable >> KqpLimits::QSReplySize+useSink |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> KqpLimits::TooBigColumn+useSink |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> YdbLogStore::AlterLogTable [FAIL] >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic [FAIL] Test command err: 2025-04-10T12:10:35.852518Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655770922280864:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:35.852616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T12:10:36.045061Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000568/r3tmp/tmpe2g21K/pdisk_1.dat 2025-04-10T12:10:36.257108Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:36.279164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:36.279262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:36.283153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30075, node 1 2025-04-10T12:10:36.457384Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/bh2h/000568/r3tmp/yandexoWrlUE.tmp 2025-04-10T12:10:36.457437Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/bh2h/000568/r3tmp/yandexoWrlUE.tmp 2025-04-10T12:10:36.458305Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/bh2h/000568/r3tmp/yandexoWrlUE.tmp 2025-04-10T12:10:36.458458Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:10:36.724731Z INFO: TTestServer started on Port 19688 GrpcPort 30075 TClient is connected to server localhost:19688 PQClient connected to localhost:30075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:37.025096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T12:10:37.065355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T12:10:38.730759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655783807183357:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:38.730868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:38.731311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655783807183370:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:38.738676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T12:10:38.749725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655783807183372:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T12:10:38.831348Z node 1 :TX_PROXY ERROR: Actor# [1:7491655783807183436:2451] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:39.200072Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491655783807183444:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T12:10:39.206539Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2Q2M2VkNjQtNWU5OTNiY2YtODdlZjEzODYtZjVkMjhmMzI=, ActorId: [1:7491655783807183355:2336], ActorState: ExecuteState, TraceId: 01jrfsqz857zb7ve7eb16a0fgv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T12:10:39.210175Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T12:10:39.242033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T12:10:39.270363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:10:39.350558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7491655788102151033:2634] 2025-04-10T12:10:40.852375Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655770922280864:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:40.852524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T12:10:45.738007Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-10T12:10:45.764718Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-10T12:10:45.772812Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491655813871955115:2808], Recipient [1:7491655775217248379:2196]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T12:10:45.772863Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T12:10:45.772901Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T12:10:45.772946Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491655813871955111:2805], Recipient [1:7491655775217248379:2196]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-10T12:10:45.772960Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T12:10:45.852859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 2 ScaleUpPartitionWriteSpeedThresholdPercent: 2 ScaleDownPartitionWriteSpeedThresholdPercent: 1 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T12:10:45.853331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T12:10:45.853629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-10T12:10:45.853681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-10T12:10:45.853722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-10T12:10:45.853781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-10T12:10:45.853834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-10T12:10:45.853987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710673:1, propose status:StatusAccepted, reason: , at schemesh ... de 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7491655856821629640:2848], Partition 2, Sender [0:0:0], Recipient [1:7491655856821629716:2855], Cookie: 0 2025-04-10T12:11:11.349151Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7491655856821629716:2855]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.349169Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.349215Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:11:11.349287Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:11:11.349318Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:11:11.349345Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T12:11:11.349592Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7491655813871955170:2470], Partition 0, Sender [0:0:0], Recipient [1:7491655818166922502:2473], Cookie: 0 2025-04-10T12:11:11.349658Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7491655818166922502:2473]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.349660Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7491655856821629648:2849], Partition 1, Sender [0:0:0], Recipient [1:7491655856821629729:2858], Cookie: 0 2025-04-10T12:11:11.349691Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.349727Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:11:11.349732Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7491655856821629729:2858]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.349757Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.349766Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:11:11.349782Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:11:11.349796Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T12:11:11.349805Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:11:11.349848Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:11:11.349909Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:11:11.349946Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T12:11:11.449549Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7491655856821629640:2848], Partition 2, Sender [0:0:0], Recipient [1:7491655856821629716:2855], Cookie: 0 2025-04-10T12:11:11.449650Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7491655856821629716:2855]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.449674Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.449711Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:11:11.449766Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:11:11.449784Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:11:11.449828Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T12:11:11.449899Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7491655813871955170:2470], Partition 0, Sender [0:0:0], Recipient [1:7491655818166922502:2473], Cookie: 0 2025-04-10T12:11:11.449935Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7491655818166922502:2473]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.449947Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.449968Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:11:11.449992Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:11:11.450004Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:11:11.450016Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T12:11:11.450011Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7491655856821629648:2849], Partition 1, Sender [0:0:0], Recipient [1:7491655856821629729:2858], Cookie: 0 2025-04-10T12:11:11.450093Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7491655856821629729:2858]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.450135Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.450202Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:11:11.450287Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:11:11.450319Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:11:11.450348Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T12:11:11.550377Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7491655856821629640:2848], Partition 2, Sender [0:0:0], Recipient [1:7491655856821629716:2855], Cookie: 0 2025-04-10T12:11:11.550379Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7491655813871955170:2470], Partition 0, Sender [0:0:0], Recipient [1:7491655818166922502:2473], Cookie: 0 2025-04-10T12:11:11.550443Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7491655818166922502:2473]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.550448Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7491655856821629716:2855]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.550466Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.550467Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.550508Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:11:11.550508Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:11:11.550576Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:11:11.550576Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:11:11.550596Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:11:11.550595Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:11:11.550620Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T12:11:11.550643Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T12:11:11.550709Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7491655856821629648:2849], Partition 1, Sender [0:0:0], Recipient [1:7491655856821629729:2858], Cookie: 0 2025-04-10T12:11:11.550751Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7491655856821629729:2858]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.550769Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:11:11.550788Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:11:11.550815Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:11:11.550826Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:11:11.550842Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 assertion failed at ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp:1484, virtual void NKikimr::NTestSuiteTopicAutoscaling::TTestCasePartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic::Execute_(NUnitTest::TTestContext &): (stats->GetCommittedOffset() == 8) TBackTrace::Capture()+28 (0x191F204C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x196AF950) NKikimr::NTestSuiteTopicAutoscaling::TTestCasePartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic::Execute_(NUnitTest::TTestContext&)+94800 (0x18CFE1E0) std::__y1::__function::__func, void ()>::operator()()+280 (0x18D7EC98) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x196E6996) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x196B64C9) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()+1204 (0x18D7DB44) NUnitTest::TTestFactory::Execute()+2438 (0x196B7D96) NUnitTest::RunMain(int, char**)+5213 (0x196E0F0D) ??+0 (0x7F3555422D90) __libc_start_main+128 (0x7F3555422E40) _start+41 (0x165B1029) |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable [GOOD] |93.5%| [TA] $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWorkloadService::TestCpuLoadThresholdRefresh [GOOD] |93.5%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable [GOOD] Test command err: 2025-04-10T12:11:15.901825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:15.902126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:15.902371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000306/r3tmp/tmpLKEMYK/pdisk_1.dat 2025-04-10T12:11:16.337527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:11:16.377525Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:11:16.418674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:16.418837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:11:16.430584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:11:16.516077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T12:11:16.877262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2625], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:16.877416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:754:2630], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:16.877511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:16.884385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T12:11:17.054108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:758:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T12:11:17.129733Z node 1 :TX_PROXY ERROR: Actor# [1:832:2676] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:11:17.657517Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfss4ga4d4hnna54617zd66, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTk4MTA4NTYtYTE1Zjk1MC01NGYxZGUwNC1kYTc3OGNhMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestCpuLoadThresholdRefresh [GOOD] Test command err: 2025-04-10T12:11:10.097961Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655920826950015:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:10.098105Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000384/r3tmp/tmpISi3qg/pdisk_1.dat 2025-04-10T12:11:10.529561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:10.530043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:11:10.530382Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:11:10.543746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30257, node 1 2025-04-10T12:11:10.714613Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:11:10.714643Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:11:10.714656Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:11:10.714800Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:11:11.175798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:12.651400Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-10T12:11:12.660094Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491655929416885250:2328], Start check tables existence, number paths: 2 2025-04-10T12:11:12.660247Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-10T12:11:12.660287Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-10T12:11:12.660319Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-10T12:11:12.662270Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491655929416885250:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-10T12:11:12.662356Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491655929416885250:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-10T12:11:12.662393Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7491655929416885250:2328], Successfully finished 2025-04-10T12:11:12.663188Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-10T12:11:12.664766Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTlmYTkxZDQtN2UxN2IwYTctMTUzOTJkYjgtNWE1NDQ0MmY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTlmYTkxZDQtN2UxN2IwYTctMTUzOTJkYjgtNWE1NDQ0MmY= 2025-04-10T12:11:12.664886Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTlmYTkxZDQtN2UxN2IwYTctMTUzOTJkYjgtNWE1NDQ0MmY=, ActorId: [1:7491655929416885266:2329], ActorState: unknown state, session actor bootstrapped 2025-04-10T12:11:12.684409Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655929416885268:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T12:11:12.689876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:11:12.691381Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655929416885268:2300], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-10T12:11:12.691536Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655929416885268:2300], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-10T12:11:12.701733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655929416885268:2300], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:11:12.774349Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655929416885268:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-10T12:11:12.780123Z node 1 :TX_PROXY ERROR: Actor# [1:7491655929416885319:2332] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:11:12.780300Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655929416885268:2300], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-10T12:11:12.782820Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , Bootstrap. Database: /Root 2025-04-10T12:11:12.784529Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGE1MWZiZGMtNjIyNzEwYzEtOTk0OTMzOGItYjE1YjU1NTg=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OGE1MWZiZGMtNjIyNzEwYzEtOTk0OTMzOGItYjE1YjU1NTg= 2025-04-10T12:11:12.784662Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGE1MWZiZGMtNjIyNzEwYzEtOTk0OTMzOGItYjE1YjU1NTg=, ActorId: [1:7491655929416885332:2331], ActorState: unknown state, session actor bootstrapped 2025-04-10T12:11:12.785527Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-10T12:11:12.785561Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-10T12:11:12.786434Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGE1MWZiZGMtNjIyNzEwYzEtOTk0OTMzOGItYjE1YjU1NTg=, ActorId: [1:7491655929416885332:2331], ActorState: ReadyState, TraceId: 01jrfss0ghbdvaqah5g9t638yj, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7491655929416885327:2339] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-10T12:11:12.787052Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWYwMjNiMWItZWYzYjZjOGYtNjQ4NWU1ZDktMzYwNmI2NDc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OWYwMjNiMWItZWYzYjZjOGYtNjQ4NWU1ZDktMzYwNmI2NDc= 2025-04-10T12:11:12.787278Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7491655929416885332:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OGE1MWZiZGMtNjIyNzEwYzEtOTk0OTMzOGItYjE1YjU1NTg= 2025-04-10T12:11:12.787496Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655929416885334:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T12:11:12.787648Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWYwMjNiMWItZWYzYjZjOGYtNjQ4NWU1ZDktMzYwNmI2NDc=, ActorId: [1:7491655929416885335:2333], ActorState: unknown state, session actor bootstrapped 2025-04-10T12:11:12.787681Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491655929416885336:2334], Database: /Root, Start database fetching 2025-04-10T12:11:12.788572Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7491655929416885336:2334], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-10T12:11:12.788652Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-10T12:11:12.788771Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491655929416885345:2335], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OGE1MWZiZGMtNjIyNzEwYzEtOTk0OTMzOGItYjE1YjU1NTg=, Start pool fetching 2025-04-10T12:11:12.788862Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655929416885346:2336], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-10T12:11:12.789185Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655929416885346:2336], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-10T12:11:12.789185Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655929416885334:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-10T12:11:12.789247Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7491655929416885345:2335], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OGE1MWZiZGMtNjIyNzEwYzEtOTk0OTMzOGItYjE1YjU1NTg=, Pool info successfully resolved 2025-04-10T12:11:12.789256Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-10T12:11:12.789346Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-10T12:11:12.789606Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); ... 0 2025-04-10T12:11:17.664734Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, ActorId: [1:7491655950891722481:2507], ActorState: ExecuteState, TraceId: 01jrfss58z998pvsn03e37f4za, Sending to Executer TraceId: 0 8 2025-04-10T12:11:17.664814Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, ActorId: [1:7491655950891722481:2507], ActorState: ExecuteState, TraceId: 01jrfss58z998pvsn03e37f4za, Created new KQP executer: [1:7491655950891722510:2507] isRollback: 0 2025-04-10T12:11:17.666245Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NTlmYTkxZDQtN2UxN2IwYTctMTUzOTJkYjgtNWE1NDQ0MmY=, ActorId: [1:7491655929416885266:2329], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T12:11:17.666282Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NTlmYTkxZDQtN2UxN2IwYTctMTUzOTJkYjgtNWE1NDQ0MmY=, ActorId: [1:7491655929416885266:2329], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T12:11:17.666300Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTlmYTkxZDQtN2UxN2IwYTctMTUzOTJkYjgtNWE1NDQ0MmY=, ActorId: [1:7491655929416885266:2329], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T12:11:17.666314Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTlmYTkxZDQtN2UxN2IwYTctMTUzOTJkYjgtNWE1NDQ0MmY=, ActorId: [1:7491655929416885266:2329], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T12:11:17.666362Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTlmYTkxZDQtN2UxN2IwYTctMTUzOTJkYjgtNWE1NDQ0MmY=, ActorId: [1:7491655929416885266:2329], ActorState: unknown state, Session actor destroyed 2025-04-10T12:11:17.672329Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, ActorId: [1:7491655950891722481:2507], ActorState: ExecuteState, TraceId: 01jrfss58z998pvsn03e37f4za, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-10T12:11:17.672404Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, ActorId: [1:7491655950891722481:2507], ActorState: ExecuteState, TraceId: 01jrfss58z998pvsn03e37f4za, ExecutePhyTx, tx: 0x000050C0000D45D8 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-10T12:11:17.673237Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, ActorId: [1:7491655950891722481:2507], ActorState: ExecuteState, TraceId: 01jrfss58z998pvsn03e37f4za, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-10T12:11:17.673363Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, ActorId: [1:7491655950891722481:2507], ActorState: ExecuteState, TraceId: 01jrfss58z998pvsn03e37f4za, txInfo Status: Committed Kind: ReadOnly TotalDuration: 8.88 ServerDuration: 8.744 QueriesCount: 2 2025-04-10T12:11:17.673456Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, ActorId: [1:7491655950891722481:2507], ActorState: ExecuteState, TraceId: 01jrfss58z998pvsn03e37f4za, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T12:11:17.673518Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, ActorId: [1:7491655950891722481:2507], ActorState: ExecuteState, TraceId: 01jrfss58z998pvsn03e37f4za, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T12:11:17.673538Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, ActorId: [1:7491655950891722481:2507], ActorState: ExecuteState, TraceId: 01jrfss58z998pvsn03e37f4za, EndCleanup, isFinal: 0 2025-04-10T12:11:17.673587Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, ActorId: [1:7491655950891722481:2507], ActorState: ExecuteState, TraceId: 01jrfss58z998pvsn03e37f4za, Sent query response back to proxy, proxyRequestId: 29, proxyId: [1:7491655920826950263:2277] 2025-04-10T12:11:17.673956Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, TxId: 2025-04-10T12:11:17.674026Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, TxId: 2025-04-10T12:11:17.674119Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7491655929416885349:2337], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2025-04-10T12:11:17.674488Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, ActorId: [1:7491655950891722481:2507], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T12:11:17.674508Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, ActorId: [1:7491655950891722481:2507], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T12:11:17.674522Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, ActorId: [1:7491655950891722481:2507], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T12:11:17.674543Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, ActorId: [1:7491655950891722481:2507], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T12:11:17.674582Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjYxYmU4YTQtN2M0OWJkN2MtYTkwYThiMTktY2I2ODFjNGM=, ActorId: [1:7491655950891722481:2507], ActorState: unknown state, Session actor destroyed 2025-04-10T12:11:17.723905Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: ExecuteState, TraceId: 01jrfss4xf9cezr6sa6v77zg3e, ExecutePhyTx, tx: 0x000050C000281458 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-10T12:11:17.723955Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: ExecuteState, TraceId: 01jrfss4xf9cezr6sa6v77zg3e, Sending to Executer TraceId: 0 8 2025-04-10T12:11:17.724041Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: ExecuteState, TraceId: 01jrfss4xf9cezr6sa6v77zg3e, Created new KQP executer: [1:7491655950891722530:2495] isRollback: 0 2025-04-10T12:11:17.744265Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: ExecuteState, TraceId: 01jrfss4xf9cezr6sa6v77zg3e, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-10T12:11:17.744346Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: ExecuteState, TraceId: 01jrfss4xf9cezr6sa6v77zg3e, ExecutePhyTx, tx: 0x000050C000281218 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-10T12:11:17.745191Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: ExecuteState, TraceId: 01jrfss4xf9cezr6sa6v77zg3e, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-10T12:11:17.745385Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: ExecuteState, TraceId: 01jrfss4xf9cezr6sa6v77zg3e, txInfo Status: Committed Kind: ReadOnly TotalDuration: 21.538 ServerDuration: 21.477 QueriesCount: 2 2025-04-10T12:11:17.745493Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: ExecuteState, TraceId: 01jrfss4xf9cezr6sa6v77zg3e, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-10T12:11:17.745541Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: ExecuteState, TraceId: 01jrfss4xf9cezr6sa6v77zg3e, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T12:11:17.745561Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: ExecuteState, TraceId: 01jrfss4xf9cezr6sa6v77zg3e, EndCleanup, isFinal: 0 2025-04-10T12:11:17.745608Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: ExecuteState, TraceId: 01jrfss4xf9cezr6sa6v77zg3e, Sent query response back to proxy, proxyRequestId: 26, proxyId: [1:7491655920826950263:2277] 2025-04-10T12:11:17.745976Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, TxId: 2025-04-10T12:11:17.746081Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, TxId: 2025-04-10T12:11:17.746247Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Succesfully fetched cpu load: 0%, cpu number: 1 2025-04-10T12:11:17.746794Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: ReadyState, Session closed due to explicit close event 2025-04-10T12:11:17.746828Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-10T12:11:17.746851Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-10T12:11:17.746874Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: unknown state, Cleanup temp tables: 0 2025-04-10T12:11:17.746935Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdlOGIxZTUtZGIwYWZhNTctZjQ0MDY3ZmItNGJlMmE5ZWI=, ActorId: [1:7491655950891722442:2495], ActorState: unknown state, Session actor destroyed |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> KqpStats::SysViewClientLost |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |94.1%| [TA] $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLimits::TooBigColumn+useSink [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] Test command err: 2025-04-10T12:11:02.009375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:3155:2436], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:02.011310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:02.011872Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:02.012827Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:3112:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:02.013343Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:1516:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:02.014268Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:02.014438Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:02.014512Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:02.015571Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:02.016645Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:1513:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:02.018217Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:02.019301Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:3151:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:02.019822Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:02.020690Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:02.020919Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:3161:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:02.022138Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:02.022451Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:3106:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:02.022664Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:3109:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:02.023287Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:02.024388Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:02.024444Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:02.024526Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:02.025674Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:02.025735Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:02.026539Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:3158:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:02.027575Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:02.028309Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T12:11:02.582666Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:11:02.863918Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T12:11:02.884046Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-10T12:11:03.393476Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 27129, node 1 TClient is connected to server localhost:16918 2025-04-10T12:11:03.720780Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:11:03.720881Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:11:03.720927Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:11:03.721581Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 22113, MsgBus: 30840 2025-04-10T12:11:14.267876Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655938712396537:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:14.269513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0002f1/r3tmp/tmpIlyGdj/pdisk_1.dat 2025-04-10T12:11:14.577408Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22113, node 1 2025-04-10T12:11:14.646908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:14.647068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:11:14.650081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:11:14.658026Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:11:14.658068Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:11:14.658080Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:11:14.658215Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30840 TClient is connected to server localhost:30840 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:11:15.231303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:15.252784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:15.390601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:15.551537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:15.621751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:17.116448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655951597300222:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:17.116568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:17.434135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:11:17.459538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T12:11:17.487404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T12:11:17.531519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:11:17.562948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T12:11:17.631441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T12:11:17.673308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655951597300739:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:17.673383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:17.673500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655951597300744:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:17.677770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T12:11:17.692718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655951597300746:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T12:11:17.806270Z node 1 :TX_PROXY ERROR: Actor# [1:7491655951597300799:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:11:19.268203Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655938712396537:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:19.268293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:11:19.366507Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911;tx_id=3; 2025-04-10T12:11:19.374668Z node 1 :TX_DATASHARD ERROR: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 2025-04-10T12:11:19.374959Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491655960187235752:2496], Table: `/Root/KeyValue` ([72057594046644480:6:1]), SessionActorId: [1:7491655955892268393:2496]Got BAD REQUEST for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[1:7491655960187235752:2496].{
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 } 2025-04-10T12:11:19.375855Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491655960187235745:2496], SessionActorId: [1:7491655955892268393:2496], statusCode=BAD_REQUEST. Issue=
: Error: Bad request. Table: `/Root/KeyValue`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 . sessionActorId=[1:7491655955892268393:2496]. isRollback=0 2025-04-10T12:11:19.416484Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWRmY2RiYzYtN2UxYmE4Yi05MDVkNDIwNi1kNGUyYzQyMA==, ActorId: [1:7491655955892268393:2496], ActorState: ExecuteState, TraceId: 01jrfss6keabj1st1ae529gzb5, got TEvKqpBuffer::TEvError in ExecuteState, status: BAD_REQUEST send to: [1:7491655960187235746:2496] from: [1:7491655960187235745:2496] 2025-04-10T12:11:19.416793Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491655960187235746:2496] TxId: 281474976710671. Ctx: { TraceId: 01jrfss6keabj1st1ae529gzb5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRmY2RiYzYtN2UxYmE4Yi05MDVkNDIwNi1kNGUyYzQyMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: {
: Error: Bad request. Table: `/Root/KeyValue`., code: 2017 subissue: {
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 } } 2025-04-10T12:11:19.417208Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWRmY2RiYzYtN2UxYmE4Yi05MDVkNDIwNi1kNGUyYzQyMA==, ActorId: [1:7491655955892268393:2496], ActorState: ExecuteState, TraceId: 01jrfss6keabj1st1ae529gzb5, Create QueryResponse for error on request, msg:
: Error: Bad request. Table: `/Root/KeyValue`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> QuoterWithKesusTest::PrefetchCoefficient |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> TCreateAndDropViewTest::CallDropViewOnTable |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [FAIL] Test command err: 2025-04-10T12:11:05.555294Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655900139243615:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:05.558645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000476/r3tmp/tmpBzLc9K/pdisk_1.dat 2025-04-10T12:11:05.957504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:05.957652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:11:05.961065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:11:05.961844Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7874, node 1 2025-04-10T12:11:06.107575Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:11:06.107598Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:11:06.107607Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:11:06.107761Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:11:06.372983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:09.038131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655917319113686:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:09.038287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:09.319123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T12:11:09.543689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655917319113881:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:09.543778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655917319113886:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:09.543779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:09.547060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T12:11:09.566406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655917319113888:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T12:11:09.660929Z node 1 :TX_PROXY ERROR: Actor# [1:7491655917319113965:2811] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:11:09.772430Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfsrxb61s85hhaa42g9ahnv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdhNWRlMzgtMTJmMmQ5ODYtMjg4YjE4N2QtYTQ5YTU4ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root assertion failed at ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:253, virtual void NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext &): (session.GetId() == sessionId) failed: ("ydb://session/3?node_id=1&id=OGFiZjAyYWItZGQ1NDFlZmYtZWFlYWM5ZjEtYTczMzRmNzY=" != "ydb://session/3?node_id=1&id=ODdhNWRlMzgtMTJmMmQ5ODYtMjg4YjE4N2QtYTQ5YTU4ZjU=") , with diff: "ydb://session/3?node_id=1&id=O(GFiZjAyY|DdhN)W(I|RlMzg)t(ZG|MTJmMm)Q(1N|5O)D(FlZm|)Yt(ZWFl|Mjg4)Y(WM5Z|)jE(|4N2Q)tYT(czMzRmNz|Q5)Y(|TU4ZjU)=" TBackTrace::Capture()+28 (0x18DC746C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1928F5D0) NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext&)+7545 (0x18987419) std::__y1::__function::__func, void ()>::operator()()+280 (0x189FCB78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x192C6616) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19296149) NTestSuiteYdbSdkSessions::TCurrentTest::Execute()+1204 (0x189FBA24) NUnitTest::TTestFactory::Execute()+2438 (0x19297A16) NUnitTest::RunMain(int, char**)+5213 (0x192C0B8D) ??+0 (0x7FCA499C6D90) __libc_start_main+128 (0x7FCA499C6E40) _start+41 (0x162E7029) |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |95.0%| [TA] $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export >> alter_compression.py::TestAlterCompression::test_all_supported_compression |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] >> test_tpch.py::TestTpchS1::test_tpch[1] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> QuoterWithKesusTest::PrefetchCoefficient [FAIL] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KqpLimits::QSReplySize+useSink [GOOD] >> TCreateAndDropViewTest::CallDropViewOnTable [GOOD] >> TCreateAndDropViewTest::DropViewIfExists >> test_drain.py::TestHive::test_drain_on_stop >> test_alter_tiering.py::TestAlterTiering::test[many_tables] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> SystemView::ShowCreateTableTtlSettings [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yaml_config/ut_transform/py3test >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] 2025-04-10 12:11:28,134 ERROR devtools.ya.test.canon.compare: Cannot calculate diff: Traceback (most recent call last): File "devtools/ya/test/canon/compare.py", line 402, in _get_file_diff_via_diff raise Exception( Exception: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: |95.5%| [TM] {RESULT} ydb/library/yaml_config/ut_transform/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 61308, MsgBus: 10425 2025-04-10T12:11:12.822849Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655930092519207:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:12.823009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/00030f/r3tmp/tmpG57j3a/pdisk_1.dat 2025-04-10T12:11:13.140934Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61308, node 1 2025-04-10T12:11:13.198714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:13.202016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:11:13.204440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:11:13.259902Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:11:13.259946Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:11:13.259958Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:11:13.260086Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10425 TClient is connected to server localhost:10425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:11:13.822795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:13.852675Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T12:11:13.869671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:14.030375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:14.185378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:14.256283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:15.931547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655942977422874:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:15.931732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:16.256074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:11:16.288036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T12:11:16.318186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T12:11:16.346627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:11:16.377129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T12:11:16.414752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T12:11:16.494183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655947272390687:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:16.494280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:16.494360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655947272390692:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:16.499075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T12:11:16.510119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655947272390694:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T12:11:16.607519Z node 1 :TX_PROXY ERROR: Actor# [1:7491655947272390749:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:11:17.463559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:17.819914Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655930092519207:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:17.819997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:11:26.547879Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491655985927097749:2635], SessionActorId: [1:7491655985927097731:2635], statusCode=PRECONDITION_FAILED. Issue=
: Error: Stream write queries aren't allowed., code: 2029 . sessionActorId=[1:7491655985927097731:2635]. isRollback=0 2025-04-10T12:11:26.615632Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmRlZDMwNjQtN2UwYzRmY2UtMmE3ODQ3MmQtZTYwNmZl, ActorId: [1:7491655985927097731:2635], ActorState: ExecuteState, TraceId: 01jrfsscj65gb5bshba9nkgrep, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7491655985927097750:2635] from: [1:7491655985927097749:2635] 2025-04-10T12:11:26.615742Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491655985927097750:2635] TxId: 281474976710672. Ctx: { TraceId: 01jrfsscj65gb5bshba9nkgrep, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRlZDMwNjQtN2UwYzRmY2UtMmE3ODQ3MmQtZTYwNmZl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Stream write queries aren't allowed., code: 2029 } 2025-04-10T12:11:26.615912Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7491655985927097758:2648], TxId: 281474976710672, task: 5. Ctx: { TraceId : 01jrfsscj65gb5bshba9nkgrep. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZmRlZDMwNjQtN2UwYzRmY2UtMmE3ODQ3MmQtZTYwNmZl. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7491655985927097750:2635], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-10T12:11:26.616407Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmRlZDMwNjQtN2UwYzRmY2UtMmE3ODQ3MmQtZTYwNmZl, ActorId: [1:7491655985927097731:2635], ActorState: ExecuteState, TraceId: 01jrfsscj65gb5bshba9nkgrep, Create QueryResponse for error on request, msg:
: Error: Stream write queries aren't allowed., code: 2029 |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::ShowCreateTableTtlSettings [GOOD] Test command err: 2025-04-10T12:10:30.953101Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655751540399695:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:30.953178Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0004dc/r3tmp/tmpkYxLK3/pdisk_1.dat 2025-04-10T12:10:31.380771Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:31.392199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:31.392279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 16163, node 1 2025-04-10T12:10:31.401886Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:10:31.431070Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:10:31.431420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:31.460516Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:31.460553Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:31.460570Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:31.460733Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30652 TClient is connected to server localhost:30652 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:32.086115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:32.127466Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T12:10:33.889828Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-04-10T12:10:33.889869Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-04-10T12:10:33.904289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655764425302716:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:33.904295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655764425302711:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:33.904398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:33.908164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:10:33.927201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655764425302725:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:10:34.022253Z node 1 :TX_PROXY ERROR: Actor# [1:7491655768720270098:2753] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:34.023342Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-10T12:10:34.023455Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F0000BF1C8 2025-04-10T12:10:34.023509Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7491655764425302692:2338], queryUid: , queryText: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n ", keepInCache: 1, split: 0{ TraceId: 01jrfsqthd8e25rbv8acqftze0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjVmYTIyMWEtYWQ1MTljM2YtOThkNzdmNjAtYmY4OWRiODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-04-10T12:10:34.023647Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-10T12:10:34.023717Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7491655764425302692:2338], queueSize: 1 2025-04-10T12:10:34.024355Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7491655764425302692:2338], compileActor: [1:7491655768720270117:2350] 2025-04-10T12:10:34.219190Z node 1 :KQP_YQL INFO: TraceId: 01jrfsqthd8e25rbv8acqftze0, SessionId: CompileActor 2025-04-10 12:10:34.218 INFO ydb-core-sys_view-ut(pid=2072499, tid=0x00007FCF3518C640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (return (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) ) 2025-04-10T12:10:34.220061Z node 1 :KQP_YQL TRACE: TraceId: 01jrfsqthd8e25rbv8acqftze0, SessionId: CompileActor 2025-04-10 12:10:34.219 TRACE ydb-core-sys_view-ut(pid=2072499, tid=0x00007FCF3518C640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (let $4 (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-04-10T12:10:34.220364Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsqthd8e25rbv8acqftze0, SessionId: CompileActor 2025-04-10 12:10:34.220 DEBUG ydb-core-sys_view-ut(pid=2072499, tid=0x00007FCF3518C640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 233us 2025-04-10T12:10:34.220917Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsqthd8e25rbv8acqftze0, SessionId: CompileActor 2025-04-10 12:10:34.220 DEBUG ydb-core-sys_view-ut(pid=2072499, tid=0x00007FCF3518C640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-04-10T12:10:34.222202Z node 1 :KQP_YQL DEBUG: TraceId: 01jrfsqthd8e25rbv8acqftze0, SessionId: CompileActor 2025-04-10 12:10:34.222 DEBUG ydb-core-sys_view-ut(pid=2072499, tid=0x00007FCF3518C640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-04-1 ... : HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038378 not found 2025-04-10T12:11:25.715501Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038376 not found 2025-04-10T12:11:25.715519Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038407 not found 2025-04-10T12:11:25.715537Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038402 not found 2025-04-10T12:11:25.715556Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038404 not found 2025-04-10T12:11:25.715587Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038397 not found 2025-04-10T12:11:25.715604Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038360 not found 2025-04-10T12:11:25.715622Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038345 not found 2025-04-10T12:11:25.715639Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038355 not found 2025-04-10T12:11:25.715654Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038383 not found 2025-04-10T12:11:25.715672Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038381 not found 2025-04-10T12:11:25.715689Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038379 not found 2025-04-10T12:11:25.715707Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038398 not found 2025-04-10T12:11:25.715724Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038394 not found 2025-04-10T12:11:25.715740Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038362 not found 2025-04-10T12:11:25.715757Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038374 not found 2025-04-10T12:11:25.715778Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038357 not found 2025-04-10T12:11:25.715796Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038375 not found 2025-04-10T12:11:25.715813Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038351 not found 2025-04-10T12:11:25.715828Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038406 not found 2025-04-10T12:11:25.715844Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038392 not found 2025-04-10T12:11:25.715858Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038377 not found 2025-04-10T12:11:25.715876Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038391 not found 2025-04-10T12:11:25.715898Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038396 not found 2025-04-10T12:11:25.718712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038388;self_id=[1:7491655974878774190:6511];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.719623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038349;self_id=[1:7491655974878774197:6514];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.723635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038364;self_id=[1:7491655974878774100:6489];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.724746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038405;self_id=[1:7491655974878774051:6481];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.726951Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038389 not found 2025-04-10T12:11:25.726994Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038387 not found 2025-04-10T12:11:25.727013Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038385 not found 2025-04-10T12:11:25.727037Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224038400 not found 2025-04-10T12:11:25.728546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038353;self_id=[1:7491655974878774111:6493];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.729980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038386;self_id=[1:7491655974878774182:6510];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.733177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038380;self_id=[1:7491655974878774118:6496];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.734842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038384;self_id=[1:7491655974878774115:6495];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.738896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038383;self_id=[1:7491655974878774195:6513];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.739837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038382;self_id=[1:7491655974878774126:6497];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.744476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038378;self_id=[1:7491655974878774203:6516];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.744743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038381;self_id=[1:7491655974878774109:6492];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.749407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038376;self_id=[1:7491655970583806474:6472];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.749914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038379;self_id=[1:7491655974878774157:6503];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.755206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038407;self_id=[1:7491655970583806202:6466];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.755290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038398;self_id=[1:7491655974878774255:6520];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.760899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038394;self_id=[1:7491655974878774132:6500];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.760975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038402;self_id=[1:7491655974878774002:6475];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.766985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038362;self_id=[1:7491655974878774170:6505];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.767218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038404;self_id=[1:7491655970583806685:6474];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.772720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038374;self_id=[1:7491655974878774061:6483];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.772720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038397;self_id=[1:7491655974878774066:6485];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.778407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038357;self_id=[1:7491655974878774173:6506];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.778441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038360;self_id=[1:7491655970583806210:6470];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.783908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038345;self_id=[1:7491655970583806284:6471];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.783917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038375;self_id=[1:7491655974878774180:6509];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.789424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038355;self_id=[1:7491655974878774113:6494];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.789423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038351;self_id=[1:7491655974878774260:6522];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.794986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038387;self_id=[1:7491655974878774289:6527];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.794986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038406;self_id=[1:7491655974878774192:6512];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.800499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038392;self_id=[1:7491655974878774128:6498];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.800500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038385;self_id=[1:7491655974878774175:6507];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.806054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038377;self_id=[1:7491655974878774309:6528];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.806054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038400;self_id=[1:7491655970583806682:6473];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.811514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038391;self_id=[1:7491655974878774275:6526];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.812607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038396;self_id=[1:7491655974878774012:6477];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-04-10T12:11:25.816776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038389;self_id=[1:7491655974878774104:6491];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-04-10T12:11:13.015703Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655934860931359:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:13.015749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/00030a/r3tmp/tmpdVQfSu/pdisk_1.dat 2025-04-10T12:11:13.350198Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:11:13.390125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:13.390258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11135, node 1 2025-04-10T12:11:13.451107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:11:13.451757Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:11:13.451789Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:11:13.495208Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:11:13.495244Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:11:13.495259Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:11:13.495443Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:11:13.829515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:14.043683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:54450" , at schemeshard: 72057594046644480 2025-04-10T12:11:14.044194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T12:11:14.044228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-04-10T12:11:14.047608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976710658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T12:11:14.047802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-04-10T12:11:14.050327Z node 1 :TX_PROXY ERROR: Actor# [1:7491655934860932287:2611] txid# 281474976710658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x1C8C03EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1CD7D4A0) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+8721 (0x1C3F27D1) std::__y1::__function::__func, void ()>::operator()()+280 (0x1C41B668) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1CDB44C6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1CD84019) NTestSuiteYdbLogStore::TCurrentTest::Execute()+1204 (0x1C41A834) NUnitTest::TTestFactory::Execute()+2438 (0x1CD858E6) NUnitTest::RunMain(int, char**)+5213 (0x1CDAEA3D) ??+0 (0x7FEBE9D34D90) __libc_start_main+128 (0x7FEBE9D34E40) _start+41 (0x1924C029) >> TCreateAndDropViewTest::DropViewIfExists [GOOD] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |96.0%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/view/unittest >> TCreateAndDropViewTest::DropViewIfExists [GOOD] Test command err: Trying to start YDB, gRPC: 63818, MsgBus: 3276 2025-04-10T12:11:22.727161Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655972985591867:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:22.729721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0001a7/r3tmp/tmpgKoqUG/pdisk_1.dat 2025-04-10T12:11:23.229855Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:11:23.248821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:23.250006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:11:23.265759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63818, node 1 2025-04-10T12:11:23.465455Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:11:23.465493Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:11:23.465499Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:11:23.465624Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3276 TClient is connected to server localhost:3276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:11:24.169216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:25.804903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655985870494437:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:25.804971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655985870494425:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:25.805124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:25.814373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:11:25.829145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655985870494439:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:11:25.905306Z node 1 :TX_PROXY ERROR: Actor# [1:7491655985870494490:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:11:26.415676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:11:26.611590Z node 1 :TX_PROXY ERROR: Actor# [1:7491655990165461909:2410] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/table\', error: path is not a view (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:11:26.611685Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710661, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/table', error: path is not a view (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-10T12:11:26.611933Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjMwYWI3M2YtODQzZjM3MGMtMTdiNTY3MDItZDliNDNmZA==, ActorId: [1:7491655985870494421:2329], ActorState: ExecuteState, TraceId: 01jrfssdzt6m3c4b92fbmp7sh3, Create QueryResponse for error on request, msg: 2025-04-10T12:11:26.640469Z node 1 :TX_PROXY ERROR: Actor# [1:7491655990165461923:2418] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/table\', error: path is not a view (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:11:26.640556Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710663, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/table', error: path is not a view (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-10T12:11:26.640833Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjMwYWI3M2YtODQzZjM3MGMtMTdiNTY3MDItZDliNDNmZA==, ActorId: [1:7491655985870494421:2329], ActorState: ExecuteState, TraceId: 01jrfsse10bsy0rm6symhrj5k8, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 31587, MsgBus: 22616 2025-04-10T12:11:27.406591Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491655995424076634:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:27.408449Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0001a7/r3tmp/tmp3RFmHT/pdisk_1.dat 2025-04-10T12:11:27.551792Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:11:27.573644Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:27.573741Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:11:27.575938Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31587, node 2 2025-04-10T12:11:27.633156Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:11:27.633183Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:11:27.633197Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:11:27.633338Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22616 TClient is connected to server localhost:22616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:11:28.104299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:28.119071Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-10T12:11:30.793576Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491656008308979190:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:30.793650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7491656008308979182:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:30.794024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:30.798466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T12:11:30.821218Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7491656008308979196:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T12:11:30.908933Z node 2 :TX_PROXY ERROR: Actor# [2:7491656008308979247:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:11:31.212969Z node 2 :TX_PROXY ERROR: Actor# [2:7491656012603946620:2384] txid# 281474976715662, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } |96.1%| [TM] {RESULT} ydb/core/kqp/ut/view/unittest |96.2%| [TA] $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.3%| [TA] {RESULT} $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/quoter/ut/unittest >> QuoterWithKesusTest::PrefetchCoefficient [FAIL] Test command err: 2025-04-10T12:11:22.569402Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655973926818242:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:22.569469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T12:11:22.661631Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491655974146132511:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:22.661741Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0001ab/r3tmp/tmpYG8iRA/pdisk_1.dat 2025-04-10T12:11:23.262441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:23.263226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:11:23.263468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:23.263562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:11:23.267150Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T12:11:23.267301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:11:23.268936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:11:23.309290Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:64435 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T12:11:23.605717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:23.647147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:23.675212Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72075186224037888 2025-04-10T12:11:23.675333Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInitSchema::Execute 2025-04-10T12:11:23.682260Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInitSchema::Complete 2025-04-10T12:11:23.682386Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInit::Execute 2025-04-10T12:11:23.688965Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInit::Complete 2025-04-10T12:11:23.718341Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxConfigSet::Execute (sender=[1:7491655978221786033:2271], cookie=1, path="/dc-1/KesusQuoter") 2025-04-10T12:11:23.725111Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxConfigSet::Complete (sender=[1:7491655978221786033:2271], cookie=1, status=SUCCESS) AddQuoterResource: { Resource { ResourcePath: "Resource" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 } } } TClient::Ls request: /dc-1/KesusQuoter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "KesusQuoter" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeKesus CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744287083777 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 KesusVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 P... (TRUNCATED) 2025-04-10T12:11:23.761361Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Execute (sender=[1:7491655978221786443:2585], cookie=0, path="Resource", config={ MaxUnitsPerSecond: 10 }) 2025-04-10T12:11:23.761809Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] Created new quoter resource 1 "Resource" 2025-04-10T12:11:23.766941Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Complete (sender=[1:7491655978221786443:2585], cookie=0) AddQuoterResource: { Resource { ResourcePath: "root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 PrefetchCoefficient: 1000 } } } TClient::Ls request: /dc-1/KesusQuoter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "KesusQuoter" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeKesus CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744287083777 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 KesusVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 P... (TRUNCATED) 2025-04-10T12:11:23.771604Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Execute (sender=[1:7491655978221786443:2585], cookie=0, path="root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 1000 }) 2025-04-10T12:11:23.771971Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] Created new quoter resource 2 "root" 2025-04-10T12:11:23.775192Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Complete (sender=[1:7491655978221786443:2585], cookie=0) AddQuoterResource: { Resource { ResourcePath: "root/leaf" HierarchicalDRRResourceConfig { } } } TClient::Ls request: /dc-1/KesusQuoter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "KesusQuoter" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeKesus CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744287083777 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 KesusVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 P... (TRUNCATED) 2025-04-10T12:11:23.777711Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Execute (sender=[1:7491655978221786443:2585], cookie=0, path="root/leaf", config={ }) 2025-04-10T12:11:23.778073Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] Created new quoter resource 3 "root/leaf" 2025-04-10T12:11:23.781041Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Complete (sender=[1:7491655978221786443:2585], cookie=0) 2025-04-10T12:11:23.781373Z node 1 :QUOTER_SERVICE TRACE: Request({ Operator: And Deadline: no Cookie: 0 [ { 1, "/dc-1/KesusQuoter":"root/leaf" } ] }) 2025-04-10T12:11:23.782245Z node 1 :QUOTER_SERVICE INFO: resolve new quoter /dc-1/KesusQuoter 2025-04-10T12:11:23.782382Z node 1 :QUOTER_SERVICE INFO: path resolved as Kesus /dc-1/KesusQuoter 2025-04-10T12:11:23.783492Z node 1 :QUOTER_SERVICE INFO: resolve resource root/leaf on quoter /dc-1/KesusQuoter 2025-04-10T12:11:23.783536Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Created kesus quoter proxy. Tablet id: 72075186224037888 2025-04-10T12:11:23.783716Z node 1 :QUOTER_PROXY DEBUG: [/dc-1/KesusQuoter]: Connecting to kesus 2025-04-10T12:11:23.784643Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: ProxyRequest "root/leaf" 2025-04-10T12:11:23.785119Z node 1 :QUOTER_PROXY DEBUG: [/dc-1/KesusQuoter]: Successfully connected to tablet 2025-04-10T12:11:23.789059Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvSubscribeOnResourcesResult to [1:7491655978221786467:2305]. Cookie: 1. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "root/leaf" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1000 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-10T12:11:23.789107Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] Subscribe on quoter resources (sender=[1:7491655978221786467:2305], cookie=1) 2025-04-10T12:11:23.789892Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: SubscribeOnResourceResult({ Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "root/leaf" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1000 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 }) 2025-04-10T12:11:23.789920Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Initialized new session with resource "root/leaf" 2025-04-10T12:11:23.789988Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxySession("root/leaf", 3) 2025-04-10T12:11:23.790995Z node 1 :QUOTER_SERVIC ... e: 0 2025-04-10T12:11:25.000666Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491655978221786467:2305]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:11:25.000785Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:11:25.000855Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:11:25.000898Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:11:25.000992Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:11:25.001029Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:11:25.100745Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491655978221786467:2305]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:11:25.100889Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:11:25.100927Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:11:25.100963Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:11:25.101400Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:11:25.101428Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:11:25.202162Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491655978221786467:2305]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:11:25.202272Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:11:25.202323Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:11:25.202364Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:11:25.202421Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:11:25.202447Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:11:25.300939Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491655978221786467:2305]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:11:25.301034Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:11:25.301056Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:11:25.301083Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:11:25.301171Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:11:25.301196Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:11:25.400579Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491655978221786467:2305]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:11:25.400692Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:11:25.400718Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:11:25.400756Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:11:25.400869Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:11:25.400902Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:11:25.500960Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491655978221786467:2305]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:11:25.501079Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:11:25.501105Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:11:25.501132Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:11:25.501199Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:11:25.501245Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:11:25.600725Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491655978221786467:2305]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:11:25.600863Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:11:25.600891Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:11:25.600920Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:11:25.601012Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:11:25.601036Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:11:25.700621Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491655978221786467:2305]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:11:25.700748Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:11:25.700776Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:11:25.700826Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:11:25.700910Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:11:25.700950Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:11:25.800471Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491655978221786467:2305]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:11:25.800610Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:11:25.800639Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:11:25.800671Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:11:25.800775Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:11:25.800836Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:11:25.900994Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491655978221786467:2305]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:11:25.901120Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:11:25.901149Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:11:25.901185Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:11:25.901279Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:11:25.901300Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:11:26.000585Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491655978221786467:2305]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:11:26.000775Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:11:26.000830Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:11:26.000881Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:11:26.000967Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:11:26.001013Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 assertion failed at ydb/core/quoter/ut_helpers.cpp:121, void NKikimr::TKesusQuoterTestSetup::GetQuota(const std::vector> &, TEvQuota::EResourceOperator, TDuration, TEvQuota::TEvClearance::EResult): (answer->Result == expectedResult) failed: (Success != Deadline) , with diff: (Succ|D)e(ss|adline) TBackTrace::Capture()+28 (0x18E4FBDC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1930CD10) NKikimr::TKesusQuoterTestSetup::GetQuota(std::__y1::vector>, TBasicString>, unsigned long>, std::__y1::allocator>, TBasicString>, unsigned long>>> const&, NKikimr::TEvQuota::EResourceOperator, TDuration, NKikimr::TEvQuota::TEvClearance::EResult)+3257 (0x18A53289) NKikimr::TKesusQuoterTestSetup::GetQuota(TBasicString> const&, TBasicString> const&, unsigned long, TDuration, NKikimr::TEvQuota::TEvClearance::EResult)+575 (0x18A5480F) NKikimr::NTestSuiteQuoterWithKesusTest::TTestCasePrefetchCoefficient::Execute_(NUnitTest::TTestContext&)+2163 (0x1899A883) std::__y1::__function::__func, void ()>::operator()()+280 (0x189D92F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19343D56) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19313889) NKikimr::NTestSuiteQuoterWithKesusTest::TCurrentTest::Execute()+1204 (0x189D82A4) NUnitTest::TTestFactory::Execute()+2438 (0x19315156) NUnitTest::RunMain(int, char**)+5213 (0x1933E2CD) ??+0 (0x7F09CD842D90) __libc_start_main+128 (0x7F09CD842E40) _start+41 (0x16320029) |96.7%| [TM] {RESULT} ydb/core/quoter/ut/unittest |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2025-04-10T12:11:07.053994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:07.054297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:07.054541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0004a2/r3tmp/tmpXUUlpL/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26614, node 1 TClient is connected to server localhost:9284 2025-04-10T12:11:08.046475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:11:08.107634Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:11:08.116141Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:11:08.116209Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:11:08.116251Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:11:08.116627Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:11:08.155838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:08.156777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:11:08.169954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:11:08.344280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-10T12:11:08.471639Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:688:2580], Recipient [1:744:2626]: NKikimr::TEvTablet::TEvBoot 2025-04-10T12:11:08.473187Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:688:2580], Recipient [1:744:2626]: NKikimr::TEvTablet::TEvRestored 2025-04-10T12:11:08.473517Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:744:2626];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T12:11:08.500505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:744:2626];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T12:11:08.501061Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-10T12:11:08.510716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:11:08.511064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:11:08.511364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:11:08.511498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:11:08.511615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:11:08.511768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:11:08.511907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:11:08.512050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:11:08.512188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:11:08.512315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:11:08.512461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:11:08.512587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:11:08.536702Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:688:2580], Recipient [1:744:2626]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T12:11:08.538729Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-04-10T12:11:08.539074Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T12:11:08.539159Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T12:11:08.539395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:11:08.539597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T12:11:08.539689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T12:11:08.539744Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T12:11:08.539852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T12:11:08.539932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T12:11:08.539980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T12:11:08.540016Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T12:11:08.540252Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:11:08.540332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T12:11:08.540385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T12:11:08.540432Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T12:11:08.540545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T12:11:08.540633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T12:11:08.540698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T12:11:08.540747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T12:11:08.540851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T12:11:08.540904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T12:11:08.540942Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T12:11:08.540998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T12:11:08.541065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T12:11:08.541099Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T12:11:08.541640Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=77; 2025-04-10T12:11:08.541743Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-10T12:11:08.542625Z node 1 :TX_COLUMNSHARD INFO: tablet_id= ... 45528;count=1;;14:size=2598864;count=4;;15:size=1210528;count=1;;16:size=994992;count=1;;17:size=1445928;count=1;;18:size=1445448;count=1;;19:size=1445400;count=1;;20:size=2164288;count=3;;21:size=1420056;count=1;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-04-10T12:11:38.353627Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435085, Sender [1:1282:3083], Recipient [1:744:2626]: NKikimr::NColumnShard::TEvPrivate::TEvGarbageCollectionFinished 2025-04-10T12:11:38.354203Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [1:1283:3084], Recipient [1:744:2626]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2025-04-10T12:11:38.354249Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-04-10T12:11:38.354509Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[29] (CS::GENERAL) apply at tablet 72075186224037888 2025-04-10T12:11:38.358548Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:19 Blob count: 1 2025-04-10T12:11:38.358733Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2911848;raw_bytes=96858239;count=2;records=82491} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=16012912;raw_bytes=539964581;count=8;records=453277} inactive {blob_bytes=24530552;raw_bytes=822698724;count=16;records=696683} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:75/0:size=4378;count=18;;1:size=57067;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445608;count=1;;7:size=1445920;count=1;;8:size=1445360;count=1;;9:size=2966864;count=4;;10:size=1445744;count=1;;11:size=1445408;count=1;;12:size=1445360;count=1;;13:size=1445528;count=1;;14:size=2598864;count=4;;15:size=1210528;count=1;;16:size=994992;count=1;;17:size=1445928;count=1;;18:size=1445448;count=1;;19:size=1445400;count=1;;20:size=2164288;count=3;;21:size=1420056;count=1;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:75/0:size=4447;count=19;;1:size=57067;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445608;count=1;;7:size=1445920;count=1;;8:size=1445360;count=1;;9:size=2966864;count=4;;10:size=1445744;count=1;;11:size=1445408;count=1;;12:size=1445360;count=1;;13:size=1445528;count=1;;14:size=2598864;count=4;;15:size=1210528;count=1;;16:size=994992;count=1;;17:size=1445928;count=1;;18:size=1445448;count=1;;19:size=1445400;count=1;;20:size=2164288;count=3;;21:size=1420056;count=1;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-04-10T12:11:38.371170Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-04-10T12:11:38.371246Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;fline=with_appended.cpp:65;portions=27,;task_id=f37fa97c-160411f0-a8875602-79228d78; 2025-04-10T12:11:38.371589Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:27;path_id:3;records_count:100082;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:3527144;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-04-10T12:11:38.371816Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/491609.000000s;; 2025-04-10T12:11:38.371947Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::f37fa97c-160411f0-a8875602-79228d78; 2025-04-10T12:11:38.372028Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:3;path_id:3;size:18931160;portions_count:27;); 2025-04-10T12:11:38.372080Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T12:11:38.372154Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T12:11:38.372222Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-04-10T12:11:38.372291Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;tablet_id=72075186224037888;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2025-04-10T12:11:38.372343Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T12:11:38.372399Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T12:11:38.372443Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T12:11:38.372523Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;tablet_id=72075186224037888;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.599000s; 2025-04-10T12:11:38.372581Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T12:11:38.372823Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:19 Blob count: 1 VERIFY failed (2025-04-10T12:11:38.373034Z): tablet_id=72075186224037888;task_id=f37fa97c-160411f0-a8875602-79228d78;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+873 (0x18D9C6A9) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x18D8A93B) NActors::TVerifyFormattedRecordWriter::~TVerifyFormattedRecordWriter()+326 (0x1A0A28D6) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+4577 (0x48A2F961) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+4797 (0x3079BF8D) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+899 (0x1EAB4133) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3856 (0x1E997B10) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+3444 (0x1E7DDF04) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+2821 (0x1E77AC85) NActors::IActor::Receive(TAutoPtr&)+237 (0x19FD40AD) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+3557 (0x35AD47C5) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+12602 (0x35ACD03A) NActors::TTestActorRuntimeBase::DispatchEvents(NActors::TDispatchOptions const&)+49 (0x35AC9DE1) NKikimr::Tests::NCS::THelperSchemaless::SendDataViaActorSystem(TBasicString>, std::__y1::shared_ptr, Ydb::StatusIds_StatusCode const&) const+7904 (0x36452850) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+4568 (0x1897A088) std::__y1::__function::__func, void ()>::operator()()+280 (0x1898C378) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x192493C6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19218EF9) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+1204 (0x1898B324) NUnitTest::TTestFactory::Execute()+2438 (0x1921A7C6) NUnitTest::RunMain(int, char**)+5213 (0x1924393D) ??+0 (0x7F70D971FD90) __libc_start_main+128 (0x7F70D971FE40) _start+41 (0x162EC029) >> test_workload.py::TestYdbWorkload::test |97.0%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [GOOD] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] |97.1%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] Test command err: 2025-04-10T12:11:14.013887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:14.014136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:14.014353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000339/r3tmp/tmp5ldRPb/pdisk_1.dat 2025-04-10T12:11:14.420361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:11:14.470432Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:11:14.511832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:14.512022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:11:14.523828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:11:14.615531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T12:11:14.974209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:740:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:14.974365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:749:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:14.974443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:14.980362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T12:11:15.143237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T12:11:15.206233Z node 1 :TX_PROXY ERROR: Actor# [1:828:2673] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:11:19.151670Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfss2mv7xsy58enex9axgjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGY0YjcwNDItODUzZjNlMTItNDI2OTI2ZTUtZTczNGFjODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:22.474646Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfss6vdav8081d797jgpkkq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNjMmM4NTItZTVlMWNkNi05YjliNDg5MC0yM2M3ZjhiZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:25.761805Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfssa14638x118ec8kgtjxy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY1YmIzMjYtNTg5ZjUxNTYtNzcyYjJkY2ItZTJiYjVhYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:28.945744Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfssd80dp9kd813pq2gdw5s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc4ZjYxY2MtZTc3NTRkMWMtZTJhMjViMzgtNzU4OGQyYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:32.823402Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfssgasahtrsxrsb7anf2cm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDU2ODFjZjktODA5Y2Y4YmUtMWEzMjQ1NGMtYjBmZDc0ODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:36.523046Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfssm431twgb1vzqy2xxp5p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmFhNGRhM2YtNWE5NDhmMjctMWU4NDA1Y2UtZjJmZDc5ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:40.158534Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfssqqnb58sr4s70yde7x4x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTE5MDQ2MTctYmM1Njc0ZTctYTcyYjc3ODAtNTg2OTdlMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:43.798746Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfssva9d0jehbqv0v39r42v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjhiZTBjNjctNTFmYmUzYWItNWVkOTViMGUtZGU4ZTIyMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:47.669150Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jrfssyvrbaygmd1pq3d78vw3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzNjc3YmUtZjRhN2VlNGQtYzk3MWIxY2EtMTk4ZmQ0OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:51.662884Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrfst2mv51h50dy40se7a4v7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Q5ZDJlYWYtNjAxMzBjZDAtMWYxMDNkZGMtODIxYzA3NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2025-04-10T12:11:54.031412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:11:54.031479Z node 1 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 10487312 RowCount: 10 IndexSize: 0 InMemSize: 10487312 LastAccessTime: 1506 LastUpdateTime: 1506 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 7222 Memory: 17425464 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 10487312 RowCount: 10 IndexSize: 0 InMemSize: 10487312 LastAccessTime: 1506 LastUpdateTime: 1506 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 8069 Memory: 124948 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 2 TableStats { DataSize: 10486220 RowCount: 10 IndexSize: 0 InMemSize: 0 LastAccessTime: 1506 LastUpdateTime: 1506 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 10486220 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 8069 Memory: 124948 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-10T12:11:58.687791Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jrfstd5997t9qasf2a7b6pnj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDc0NzU3ZTAtZTM1NGFmNTUtM2MxYWMwODItYTUwZGJkNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [GOOD] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] |97.2%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |97.3%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> KqpStats::SysViewClientLost [FAIL] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost [FAIL] Test command err: Trying to start YDB, gRPC: 10471, MsgBus: 2210 2025-04-10T12:11:19.803908Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655959718403214:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:19.803973Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0002cd/r3tmp/tmpop5w8U/pdisk_1.dat 2025-04-10T12:11:20.218599Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:11:20.240792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:20.241039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:11:20.242793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10471, node 1 2025-04-10T12:11:20.298034Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:11:20.298062Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:11:20.298074Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:11:20.298216Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2210 TClient is connected to server localhost:2210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:11:20.872201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:20.889097Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T12:11:20.904617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:21.051367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:21.215306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:21.297837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:22.980445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655972603306897:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:22.980560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:23.340434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:11:23.369565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T12:11:23.400315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T12:11:23.437575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:11:23.470436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T12:11:23.539929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T12:11:23.584083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655976898274706:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:23.584165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655976898274711:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:23.584192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:23.588026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T12:11:23.598796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655976898274713:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T12:11:23.673830Z node 1 :TX_PROXY ERROR: Actor# [1:7491655976898274766:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:11:24.572410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:24.807792Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655959718403214:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:24.807875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:11:35.186330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:11:35.186377Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:11:58.830210Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287118818, txId: 281474976710672] shutting down 2025-04-10T12:11:58.889499Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-10T12:12:00.100934Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287120092, txId: 281474976710674] shutting down 2025-04-10T12:12:01.290913Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287121284, txId: 281474976710676] shutting down 2025-04-10T12:12:02.543347Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287122502, txId: 281474976710678] shutting down 2025-04-10T12:12:03.810260Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287123798, txId: 281474976710680] shutting down 2025-04-10T12:12:05.077433Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287125070, txId: 281474976710682] shutting down 2025-04-10T12:12:06.361079Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287126353, txId: 281474976710684] shutting down 2025-04-10T12:12:07.571400Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287127564, txId: 281474976710686] shutting down 2025-04-10T12:12:08.763705Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287128753, txId: 281474976710688] shutting down 2025-04-10T12:12:10.023554Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287130014, txId: 281474976710690] shutting down 2025-04-10T12:12:11.269366Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287131253, txId: 281474976710692] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x196B4EEB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x19B79E1F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1925A458 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x1926D467 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1926D467 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1926D467 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x1926D467 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x1926D467 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19BB0E45 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19BB0E45 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19BB0E45 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19B80998 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x1926C5EB 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19B82265 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19BAB3BC 15. ??:0: ?? @ 0x7FB0809D7D8F 16. ??:0: ?? @ 0x7FB0809D7E3F 17. ??:0: ?? @ 0x16611028 >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] |97.5%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |97.6%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/py3test >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] [GOOD] |97.7%| [TM] {RESULT} ydb/tests/fq/mem_alloc/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_drain.py::TestHive::test_drain_on_stop [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [GOOD] |97.7%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |97.8%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> Secret::SimpleQueryService [GOOD] >> Secret::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService [GOOD] Test command err: 2025-04-10T12:10:39.862814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:10:39.863012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:10:39.863149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0003ea/r3tmp/tmpOJwAEY/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10673, node 1 TClient is connected to server localhost:6156 2025-04-10T12:10:40.578441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:10:40.626672Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:40.632011Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:40.632089Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:40.632133Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:40.632562Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:10:40.669611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:40.670640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:40.682603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-10T12:10:52.523279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:808:2676], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:52.523417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:818:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:52.523508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:52.533232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-10T12:10:52.562814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:822:2684], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-10T12:10:52.612708Z node 1 :TX_PROXY ERROR: Actor# [1:873:2716] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:53.139874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-10T12:10:54.737445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T12:10:55.375435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-04-10T12:10:56.258623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-10T12:10:56.928779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T12:10:57.371870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T12:10:58.795235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-04-10T12:10:59.297853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: 2025-04-10T12:11:15.080058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:11:15.080122Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 2025-04-10T12:11:38.382535Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715721. Ctx: { TraceId: 01jrfsss46dpsh38hkawqam2jt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjIwNGMzZC03MmQ1N2RkLWFkMmZkNTg5LWYzMGJhYjQw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-10T12:12:01.817056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715743:0, at schemeshard: 72057594046644480 2025-04-10T12:12:03.240525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715752:0, at schemeshard: 72057594046644480 2025-04-10T12:12:05.097587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715765:0, at schemeshard: 72057594046644480 2025-04-10T12:12:05.587607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715768:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-10T12:12:18.454606Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715780. Ctx: { TraceId: 01jrfsv0bf0wxwj2qtmtjxx959, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjRmZjJiODMtYjI4YzhkYmUtZmE3MjgxNDMtYjNkYzkxN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 2025-04-10T12:12:59.297415Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715826. Ctx: { TraceId: 01jrfsw8b94wysq1r38pyg9qv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjc0MmZkODAtMWQ4ZjY4NzUtMTMzYzMzZDctZjk3Y2Q0MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple [GOOD] Test command err: 2025-04-10T12:10:43.389833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:10:43.390067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:10:43.390295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/00038a/r3tmp/tmpYedVPl/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28667, node 1 TClient is connected to server localhost:21400 2025-04-10T12:10:43.995193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:10:44.044123Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:44.048432Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:44.048497Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:44.048534Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:44.048944Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:10:44.085649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:44.085824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:44.097862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-10T12:10:56.220330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:811:2677], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:56.220510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:56.224409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-10T12:10:56.414224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:928:2755], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:56.414333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:56.414684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:933:2760], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:56.419695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-10T12:10:56.479510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:935:2762], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T12:10:56.798647Z node 1 :TX_PROXY ERROR: Actor# [1:1030:2828] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:57.314770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-10T12:10:57.782991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-10T12:10:58.426639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-10T12:10:59.109942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-10T12:10:59.470405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-10T12:11:00.571615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-04-10T12:11:00.952527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: 2025-04-10T12:11:17.428936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:11:17.429027Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 2025-04-10T12:11:40.637322Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715719. Ctx: { TraceId: 01jrfssvbf4a556fhh3qxbc3jp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGRmMDg3N2YtNWIyMDQ0NWQtMjRkZDFmMjUtNjY0YzY3ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-10T12:12:04.140019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715737:0, at schemeshard: 72057594046644480 2025-04-10T12:12:05.376557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715744:0, at schemeshard: 72057594046644480 2025-04-10T12:12:07.158892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715755:0, at schemeshard: 72057594046644480 2025-04-10T12:12:07.791371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715758:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-10T12:12:21.214946Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715770. Ctx: { TraceId: 01jrfsv2x26thwmq3a9mkdrvap, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZkZDFkNDQtY2EyMTEyMjYtNDdkMGU3NmQtYzZmMGU0ZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 2025-04-10T12:13:00.664742Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715810. Ctx: { TraceId: 01jrfsw9me0dbaqkbywmr2tz70, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmEyMmU1NmQtY2M0NGM0ZmEtY2JjZmIwYzEtNTNlNWQ3ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |97.9%| [TA] $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.0%| [TA] {RESULT} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] >> Viewer::JsonStorageListingV1PDiskIdFilter [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV1PDiskIdFilter [GOOD] Test command err: 2025-04-10T12:11:07.218449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:3161:2436], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:07.220086Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:07.220883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:07.222211Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:3118:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:07.222886Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:1522:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:07.224355Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:07.224619Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:07.224725Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:07.226079Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:07.227263Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:1519:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:07.228849Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:07.229815Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:3157:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:07.230310Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:07.231132Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:07.231377Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:3167:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:07.232889Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:07.233218Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:3112:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:07.233430Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:3115:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:07.234168Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:07.235175Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:07.235230Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:07.235331Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:07.236932Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:07.237005Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:11:07.237960Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:3164:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:07.239236Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:07.240201Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T12:11:07.724563Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:11:07.964702Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T12:11:07.979080Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-10T12:11:08.388174Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 4069, node 1 TClient is connected to server localhost:12549 2025-04-10T12:11:08.739803Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:11:08.739883Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:11:08.739938Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:11:08.740547Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:12:48.540600Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:3122:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:12:48.543587Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:12:48.544934Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:12:48.546950Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:1972:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:12:48.547229Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [16:1975:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:12:48.547933Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:3131:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:12:48.549172Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:3128:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:12:48.549415Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:12:48.549507Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:12:48.550191Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:12:48.550280Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:12:48.550339Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:12:48.551222Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:12:48.551307Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:12:48.551748Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:12:48.555622Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:1981:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:12:48.556268Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:3118:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:12:48.556683Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:3125:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:12:48.557742Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:12:48.558240Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:12:48.559040Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:12:48.559104Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:12:48.559161Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:12:48.559572Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-10T12:12:48.562398Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:1978:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:12:48.564285Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:12:48.565071Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-10T12:12:49.107043Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:12:49.391233Z node 10 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-10T12:12:49.430766Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-10T12:12:50.149785Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 32761, node 10 TClient is connected to server localhost:7402 2025-04-10T12:12:50.682671Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:12:50.682759Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:12:50.682824Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:12:50.683943Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration |98.0%| [TA] $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.1%| [TA] {RESULT} $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] Test command err: 2025-04-10T12:10:53.366494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:10:53.366780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:10:53.367185Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/00032c/r3tmp/tmpoZPquz/pdisk_1.dat 2025-04-10T12:10:53.920497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T12:10:53.920629Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T12:10:53.920677Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T12:10:53.920925Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-10T12:10:53.920960Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T12:10:54.041690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-10T12:10:54.043880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:10:54.045487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T12:10:54.047710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T12:10:54.047812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:10:54.047970Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T12:10:54.049470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T12:10:54.050417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T12:10:54.050488Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T12:10:54.050527Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T12:10:54.050764Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T12:10:54.050813Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T12:10:54.050911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:10:54.050965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T12:10:54.051058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T12:10:54.051094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T12:10:54.052050Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T12:10:54.052669Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T12:10:54.052713Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T12:10:54.052870Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T12:10:54.052909Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T12:10:54.052980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:10:54.053034Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T12:10:54.053098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T12:10:54.053177Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T12:10:54.053549Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T12:10:54.053577Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T12:10:54.053681Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T12:10:54.053720Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T12:10:54.053761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:10:54.053789Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:10:54.053838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-10T12:10:54.053867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T12:10:54.053913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T12:10:54.065029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T12:10:54.065837Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T12:10:54.065897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T12:10:54.066821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-10T12:10:54.068262Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T12:10:54.068342Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T12:10:54.068385Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-10T12:10:54.068541Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-10T12:10:54.068997Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T12:10:54.069041Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T12:10:54.069078Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T12:10:54.069210Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-10T12:10:54.069237Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-10T12:10:54.069316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-10T12:10:54.069356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-10T12:10:54.069397Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-10T12:10:54.123487Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-10T12:10:54.123620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-10T12:10:54.123660Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:10:54.124389Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-10T12:10:54.124466Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-10T12:10:54.169359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:54.170092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:54.183333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:54.265439Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-10T12:10:54.266237Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T12:10:54.266282Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T12:10:54.266314Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T12:10:54.266546Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-10T12:10:54.266582Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T12:10:54.266686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T12:10:54.266829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... s: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T12:13:10.287041Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-10T12:13:10.308055Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T12:13:10.308151Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T12:13:10.308188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T12:13:10.308275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-10T12:13:10.308319Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-10T12:13:10.308437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 22 shard idx 72057594046644480:7 data size 0 row count 0 2025-04-10T12:13:10.308525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037894 maps to shardIdx: 72057594046644480:7 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 22], pathId map=TableA, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T12:13:10.308558Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037894, followerId 0 2025-04-10T12:13:10.308633Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:7 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-10T12:13:10.308751Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T12:13:10.319437Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T12:13:10.319532Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T12:13:10.319570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-10T12:13:10.395399Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-10T12:13:10.395989Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1580:3193], Recipient [1:409:2404]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037895 TableLocalId: 24 Generation: 1 Round: 58 TableStats { DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 24 Memory: 119576 Storage: 142 } ShardState: 2 UserTablePartOwners: 72075186224037895 NodeId: 1 StartTime: 4950 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-10T12:13:10.396038Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T12:13:10.396078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] state 'Ready' dataSize 54 rowCount 2 cpuUsage 0.0024 2025-04-10T12:13:10.396211Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] raw table stats: DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T12:13:10.396257Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-10T12:13:10.451188Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T12:13:10.451275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T12:13:10.451308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T12:13:10.451389Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-10T12:13:10.451419Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-10T12:13:10.451546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 24 shard idx 72057594046644480:8 data size 54 row count 2 2025-04-10T12:13:10.451629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037895 maps to shardIdx: 72057594046644480:8 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 24], pathId map=TableA, is column=0, is olap=0, RowCount 2, DataSize 54 2025-04-10T12:13:10.451661Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037895, followerId 0 2025-04-10T12:13:10.451724Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:8 with partCount# 1, rowCount# 2, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-10T12:13:10.451824Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T12:13:10.462372Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T12:13:10.462465Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T12:13:10.462501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-10T12:13:10.525029Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T12:13:10.525139Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T12:13:10.525233Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T12:13:10.525272Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T12:13:10.547068Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T12:13:10.632550Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-10T12:13:10.707606Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T12:13:10.707698Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T12:13:10.707792Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T12:13:10.707834Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T12:13:10.729610Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037898 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T12:13:10.814743Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037899 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T12:13:10.899981Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T12:13:10.900088Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T12:13:10.901356Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmEzNTFkYjEtODIxYmVjMDgtYmFiMmI4N2UtNjdkMzQ2YQ==, ActorId: [1:2034:3520], ActorState: ExecuteState, TraceId: 01jrfsrktp1n2rjv9csvkp8fws, Create QueryResponse for error on request, msg: 2025-04-10T12:13:10.901651Z node 1 :KQP_SLOW_LOG WARN: SessionId: ydb://session/3?node_id=1&id=NmEzNTFkYjEtODIxYmVjMDgtYmFiMmI4N2UtNjdkMzQ2YQ==, Slow query, duration: 600.000000s, status: GENERIC_ERROR, user: UNAUTHENTICATED, results: 0b, text: "RESTORE `MyCollection`;", parameters: 0b 2025-04-10T12:13:10.903309Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T12:13:10.903364Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture()+28 (0x191926EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1964F830) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+3639 (0x4905B847) NKikimr::NTestSuiteIncrementalBackup::TTestCaseComplexRestoreBackupCollection::Execute_(NUnitTest::TTestContext&)+26179 (0x18DE2DB3) std::__y1::__function::__func, void ()>::operator()()+280 (0x18D9DEF8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19686856) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x196563A9) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()+1204 (0x18D9CDA4) NUnitTest::TTestFactory::Execute()+2438 (0x19657C76) NUnitTest::RunMain(int, char**)+5213 (0x19680DCD) ??+0 (0x7F72EB4BDD90) __libc_start_main+128 (0x7F72EB4BDE40) _start+41 (0x16510029) |98.2%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |98.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] Test command err: 2025-04-10T12:10:21.071717Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T12:10:21.228703Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-10T12:10:21.249269Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-10T12:10:21.249692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T12:10:21.275692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T12:10:21.275975Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T12:10:21.284319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:21.284532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:21.284752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:10:21.284920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:10:21.285046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:10:21.285183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:10:21.285298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:10:21.285419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:10:21.285571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:10:21.285697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:10:21.285814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:10:21.285935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:10:21.312237Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T12:10:21.318103Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T12:10:21.318491Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T12:10:21.318547Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T12:10:21.318774Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:10:21.319099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T12:10:21.319182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T12:10:21.319224Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T12:10:21.319316Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T12:10:21.319387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T12:10:21.319449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T12:10:21.319485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T12:10:21.319683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:10:21.319761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T12:10:21.319818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T12:10:21.319851Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T12:10:21.319962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T12:10:21.320014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T12:10:21.320079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T12:10:21.320114Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T12:10:21.320180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T12:10:21.320238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T12:10:21.320294Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T12:10:21.320343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T12:10:21.320379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T12:10:21.320410Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T12:10:21.320777Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-10T12:10:21.320881Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-10T12:10:21.320975Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-10T12:10:21.321078Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=50; 2025-04-10T12:10:21.321241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T12:10:21.321297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T12:10:21.321329Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T12:10:21.321598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T12:10:21.321651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T12:10:21.321694Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T12:10:21.321842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T12:10:21.321890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T12:10:21.321924Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T12:10:21.322132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T12:10:21.322210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T12:10:21.322242Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T1 ... me.h:532:20 #26 0x3146be97 in NKikimr::NTxUT::PlanSchemaTx(NActors::TTestBasicRuntime&, NActors::TActorId const&, NKikimr::NOlap::TSnapshot) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:79:5 #27 0x31483b82 in NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, NKikimr::NOlap::TSnapshot const&, bool) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:480:13 #28 0xff9b983 in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:219:5 #29 0xff931e7 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #30 0xff931e7 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #31 0xff931e7 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #32 0xff931e7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #33 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #34 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #35 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #36 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #37 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #38 0xff92093 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x100b418d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2a1b7206 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x2a1b7206 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x2a1b7206 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x2a1b7206 in __allocate_at_least > *> > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x2a1b7206 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:358:25 #6 0x2a1b7206 in std::__y1::deque>, std::__y1::allocator>>>::__add_back_capacity() /-S/contrib/libs/cxxsupp/libcxx/include/deque:2144:51 #7 0x2a1a9544 in push_back /-S/contrib/libs/cxxsupp/libcxx/include/deque:1528:5 #8 0x2a1a9544 in NKikimr::NOlap::TBlobManager::StartBlobBatch() /-S/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp:380:23 #9 0x2a18eb4d in TWriteAction /-S/ydb/core/tx/columnshard/blobs_action/bs/write.h:41:30 #10 0x2a18eb4d in void std::__y1::allocator::construct[abi:fe190000]> const&, std::__y1::shared_ptr&>(NKikimr::NOlap::NBlobOperations::NBlobStorage::TWriteAction*, TBasicString> const&, std::__y1::shared_ptr&) /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:165:24 #11 0x2a1842ba in construct > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:320:9 #12 0x2a1842ba in __shared_ptr_emplace > &, std::__y1::shared_ptr &, std::__y1::allocator, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:296:5 #13 0x2a1842ba in allocate_shared, const TBasicString > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:875:51 #14 0x2a1842ba in make_shared > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:883:10 #15 0x2a1842ba in NKikimr::NOlap::NBlobOperations::NBlobStorage::TOperator::DoStartWritingAction() /-S/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp:16:12 #16 0x25bd53a5 in NKikimr::NOlap::IBlobsStorageOperator::StartWritingAction(NKikimr::NOlap::NBlobOperations::EConsumer) /-S/ydb/core/tx/columnshard/blobs_action/abstract/storage.h:106:23 #17 0x25c2d860 in NKikimr::NColumnShard::TWriteOperation::Start(NKikimr::NColumnShard::TColumnShard&, std::__y1::shared_ptr const&, NActors::TActorId const&, NKikimr::NOlap::TWritingContext const&) /-S/ydb/core/tx/columnshard/operations/write.cpp:39:53 #18 0x25de8b94 in NKikimr::NColumnShard::TWriteTask::Execute(NKikimr::NColumnShard::TColumnShard*, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/tablet/write_queue.cpp:26:21 #19 0x25dea128 in NKikimr::NColumnShard::TWriteTasksQueue::Drain(bool, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/tablet/write_queue.cpp:45:15 #20 0x25d99e92 in NKikimr::NColumnShard::TColumnShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/columnshard__write.cpp:615:22 #21 0x25d1cbc6 in NKikimr::NColumnShard::TColumnShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/columnshard/columnshard_impl.h:449:13 #22 0x115b762c in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #23 0x2cc61cf4 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #24 0x2cc5a569 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #25 0x2cc648e3 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #26 0x3148fad2 in NKikimr::NEvents::TDataEvents::TEvWriteResult* NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TAutoPtr&, std::__y1::function, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:446:13 #27 0x3146f664 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:510:20 #28 0x3146f664 in NKikimr::NTxUT::WaitWriteResult(NActors::TTestBasicRuntime&, unsigned long, std::__y1::vector>*) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:102:26 #29 0x31470de1 in NKikimr::NTxUT::WriteDataImpl(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, unsigned long, TBasicString> const&, std::__y1::shared_ptr const&, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:128:16 #30 0x31471e10 in NKikimr::NTxUT::WriteData(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, TBasicString> const&, std::__y1::vector> const&, bool, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:143:16 #31 0xff9c125 in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:232:9 #32 0xff9888b in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1184:13 #33 0xff931e7 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #34 0xff931e7 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #35 0xff931e7 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #36 0xff931e7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #37 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #38 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #39 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #40 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #41 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #42 0xff92093 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #43 0x1084a305 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #44 0x1086a39c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #45 0x7f78c262ad8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 3125688 byte(s) leaked in 55830 allocation(s). ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] Test command err: 2025-04-10T12:10:21.071688Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T12:10:21.228745Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-10T12:10:21.242735Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-10T12:10:21.244924Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T12:10:21.267073Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T12:10:21.267823Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T12:10:21.279234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:21.279469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:21.279698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:10:21.279852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:10:21.279997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:10:21.280126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:10:21.280240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:10:21.280360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:10:21.280464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:10:21.280595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:10:21.280720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:10:21.280848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:10:21.310217Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T12:10:21.315897Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T12:10:21.316360Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T12:10:21.316439Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T12:10:21.316695Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:10:21.318441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T12:10:21.318532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T12:10:21.318599Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T12:10:21.318705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T12:10:21.318808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T12:10:21.318856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T12:10:21.318885Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T12:10:21.319075Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:10:21.319158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T12:10:21.319199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T12:10:21.319228Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T12:10:21.319352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T12:10:21.319409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T12:10:21.319456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T12:10:21.319491Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T12:10:21.319557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T12:10:21.319620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T12:10:21.319652Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T12:10:21.319702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T12:10:21.319739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T12:10:21.319790Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T12:10:21.320222Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2025-04-10T12:10:21.320314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-10T12:10:21.320946Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=581; 2025-04-10T12:10:21.321081Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=49; 2025-04-10T12:10:21.321247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T12:10:21.321323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T12:10:21.321360Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T12:10:21.321629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T12:10:21.321683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T12:10:21.321713Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T12:10:21.321864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T12:10:21.321911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T12:10:21.321946Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T12:10:21.322136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T12:10:21.322204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T12:10:21.322236Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T ... ikimr::TEvTxProcessing::TEvPlanStepAck::TPtr const&)> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #24 0x3146cf82 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #25 0x3146cf82 in NKikimr::TEvTxProcessing::TEvPlanStepAck::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #26 0x3146be97 in NKikimr::NTxUT::PlanSchemaTx(NActors::TTestBasicRuntime&, NActors::TActorId const&, NKikimr::NOlap::TSnapshot) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:79:5 #27 0x31483b82 in NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, NKikimr::NOlap::TSnapshot const&, bool) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:480:13 #28 0xff9b983 in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:219:5 #29 0xff931e7 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #30 0xff931e7 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #31 0xff931e7 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #32 0xff931e7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #33 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #34 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #35 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #36 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #37 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #38 0xff92093 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x100b418d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0xf4fb11d in NObjectFactory::TFactoryObjectCreator::Create() const /-S/library/cpp/object_factory/object_factory.h:38:20 #2 0x1c9aded9 in MakeHolder /-S/library/cpp/object_factory/object_factory.h:137:38 #3 0x1c9aded9 in NKikimr::NOlap::NStorageOptimizer::IOptimizerPlannerConstructor::BuildDefault() /-S/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.h:204:23 #4 0x1c9ac26f in NKikimr::NOlap::TIndexInfo::DeserializeOptionsFromProto(NKikimrSchemeOp::TColumnTableSchemeOptions const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:209:40 #5 0x1c9b292b in NKikimr::NOlap::TIndexInfo::DeserializeFromProto(NKikimrSchemeOp::TColumnTableSchema const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:250:5 #6 0x1c9bf54b in NKikimr::NOlap::TIndexInfo::BuildFromProto(NKikimrSchemeOp::TColumnTableSchema const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:333:17 #7 0x25b38dab in NKikimr::NOlap::TColumnEngineForLogs::RegisterSchemaVersion(NKikimr::NOlap::TSnapshot const&, unsigned long, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData const&) /-S/ydb/core/tx/columnshard/engines/column_engine_logs.cpp:103:29 #8 0x25b3486e in NKikimr::NOlap::TColumnEngineForLogs::TColumnEngineForLogs(unsigned long, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&, NKikimr::NOlap::TSnapshot const&, unsigned long, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/column_engine_logs.cpp:42:5 #9 0x2a213c7a in make_unique &, std::__y1::shared_ptr &, std::__y1::shared_ptr &, const NKikimr::NOlap::TSnapshot &, const unsigned int &, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData, std::__y1::shared_ptr &> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:30 #10 0x2a213c7a in NKikimr::NColumnShard::TTablesManager::AddSchemaVersion(unsigned int, NKikimr::NOlap::TSnapshot const&, NKikimrSchemeOp::TColumnTableSchema const&, NKikimr::NIceDb::TNiceDb&) /-S/ydb/core/tx/columnshard/tables_manager.cpp:282:24 #11 0x2a215507 in NKikimr::NColumnShard::TTablesManager::AddTableVersion(NKikimr::NColumnShard::TInternalPathId, NKikimr::NOlap::TSnapshot const&, NKikimrTxColumnShard::TTableVersionInfo const&, std::__y1::optional const&, NKikimr::NIceDb::TNiceDb&) /-S/ydb/core/tx/columnshard/tables_manager.cpp:320:9 #12 0x2a0e6a09 in NKikimr::NColumnShard::TColumnShard::RunEnsureTable(NKikimrTxColumnShard::TCreateTable const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:431:19 #13 0x2a0e5370 in NKikimr::NColumnShard::TColumnShard::RunInit(NKikimrTxColumnShard::TInitShard const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:373:9 #14 0x2a0e4ae1 in NKikimr::NColumnShard::TColumnShard::RunSchemaTx(NKikimrTxColumnShard::TSchemaTxBody const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:328:13 #15 0xff0a29e in NKikimr::NColumnShard::TSchemaTransactionOperator::ProgressOnExecute(NKikimr::NColumnShard::TColumnShard&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/transactions/operators/schema.h:94:19 #16 0x25cccf3d in NKikimr::NColumnShard::TColumnShard::TTxProgressTx::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/columnshard__progress_tx.cpp:80:13 #17 0x1849b274 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #18 0x184609f6 in NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&) /-S/ydb/core/tablet_flat/flat_executor.cpp:4143:9 #19 0x115b762c in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #20 0x2cc61cf4 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #21 0x2cc5a569 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #22 0x2cc648e3 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #23 0x3148df63 in NKikimr::TEvTxProcessing::TEvPlanStepAck::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #24 0x3146cf82 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #25 0x3146cf82 in NKikimr::TEvTxProcessing::TEvPlanStepAck::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #26 0x3146be97 in NKikimr::NTxUT::PlanSchemaTx(NActors::TTestBasicRuntime&, NActors::TActorId const&, NKikimr::NOlap::TSnapshot) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:79:5 #27 0x31483b82 in NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, NKikimr::NOlap::TSnapshot const&, bool) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:480:13 #28 0xff9b983 in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:219:5 #29 0xff98779 in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1184:13 #30 0xff931e7 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #31 0xff931e7 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #32 0xff931e7 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #33 0xff931e7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #34 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #35 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #36 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #37 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #38 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 SUMMARY: AddressSanitizer: 2637722 byte(s) leaked in 62743 allocation(s). >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] |98.4%| [TA] $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 14899, MsgBus: 9668 2025-04-10T12:11:08.953330Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655915290829661:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:08.953457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0002d9/r3tmp/tmp96L7Yi/pdisk_1.dat 2025-04-10T12:11:09.340190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:09.340367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:11:09.346302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:11:09.366204Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14899, node 1 2025-04-10T12:11:09.402080Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:11:09.405315Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:11:09.445872Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:11:09.445908Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:11:09.445921Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:11:09.446090Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9668 TClient is connected to server localhost:9668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:11:09.909201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:09.953623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:11:12.042094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655932470699912:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:12.042106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655932470699904:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:12.042235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:12.045505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T12:11:12.062561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655932470699918:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T12:11:12.123505Z node 1 :TX_PROXY ERROR: Actor# [1:7491655932470699993:2689] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:11:13.953565Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655915290829661:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:11:13.953727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:11:24.365984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:11:24.366025Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:11:40.459898Z node 1 :TX_DATASHARD ERROR: CPU usage 61.0982 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/LargeTable] 2025-04-10T12:12:40.954697Z node 1 :TX_DATASHARD ERROR: CPU usage 64.674 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/LargeTable] 2025-04-10T12:13:17.030381Z node 1 :TX_DATASHARD ERROR: CPU usage 69.3598 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037890 table: [/Root/LargeTable] 2025-04-10T12:13:31.828838Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-10T12:13:41.790549Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923014389440}: tablet 72075186224037890 could not find a group for channel 1 pool /Root:test 2025-04-10T12:13:41.790619Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923014389440}: tablet 72075186224037890 wasn't changed 2025-04-10T12:13:41.790663Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923014389440}: tablet 72075186224037890 skipped channel 1 2025-04-10T12:13:42.022026Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923014239808}: tablet 72075186224037890 could not find a group for channel 0 pool /Root:test 2025-04-10T12:13:42.022066Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923014239808}: tablet 72075186224037890 could not find a group for channel 1 pool /Root:test 2025-04-10T12:13:42.022078Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923014239808}: tablet 72075186224037890 wasn't changed 2025-04-10T12:13:42.022091Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923014239808}: tablet 72075186224037890 skipped channel 0 2025-04-10T12:13:42.022107Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923014239808}: tablet 72075186224037890 skipped channel 1 2025-04-10T12:13:43.719456Z node 1 :BS_SKELETON WARN: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-04-10T12:13:43.903750Z node 1 :BS_SKELETON WARN: PDiskId# 1 VDISK[82000000:_:0:0:0]: (2181038080) TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-04-10T12:13:44.720002Z node 1 :BS_SKELETON WARN: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-04-10T12:13:44.788103Z node 1 :TX_DATASHARD ERROR: Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 281474976710757 2025-04-10T12:13:44.788166Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 281474976710757 at tablet 72075186224037890 errors: OUT_OF_SPACE (Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 281474976710757) | 2025-04-10T12:13:44.788261Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710757 at tablet 72075186224037890 status: ERROR errors: OUT_OF_SPACE (Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 281474976710757) | 2025-04-10T12:13:44.788457Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7491656585305741418:2337] TxId: 281474976710757. Ctx: { TraceId: 01jrfsxmb5dr352jy2jn7cd7wv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWJjZDExY2YtZGI0ZGJlMzktOWEwODIxZGYtNjBlNmQ4YWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ERROR: [OUT_OF_SPACE] Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 281474976710757; 2025-04-10T12:13:44.810134Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWJjZDExY2YtZGI0ZGJlMzktOWEwODIxZGYtNjBlNmQ4YWU=, ActorId: [1:7491655928175732600:2337], ActorState: ExecuteState, TraceId: 01jrfsxmb5dr352jy2jn7cd7wv, Create QueryResponse for error on request, msg: Got out of space. Successfully inserted 30 x 0 lines, each of size 1048576bytes2025-04-10T12:13:44.904794Z node 1 :BS_SKELETON ERROR: PDiskId# 1 VDISK[82000000:_:0:0:0]: (2181038080) TDskSpaceTrackerActor: LIGHT_ORANGE ZONE Marker# BSVSOOST01 |98.5%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |98.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] |98.6%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpJoinOrder::Chain65Nodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::Chain65Nodes [GOOD] Test command err: Trying to start YDB, gRPC: 3128, MsgBus: 1145 2025-04-10T12:10:13.041888Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655679964944745:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:13.042024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/0001f7/r3tmp/tmpJGJutO/pdisk_1.dat 2025-04-10T12:10:13.379343Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3128, node 1 2025-04-10T12:10:13.412080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:13.412258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:13.414572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:13.421026Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:13.421059Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:13.421076Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:13.421221Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1145 TClient is connected to server localhost:1145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:13.910011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:15.353877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655688554880002:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.353955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.499656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T12:10:15.586014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655688554880104:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.586065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.595267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-10T12:10:15.618594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655688554880179:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.618641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.627516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:10:15.654358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655688554880254:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.654447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.663610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:10:15.690398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655688554880329:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.690481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.700118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:10:15.727639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655688554880404:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.727714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.737335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T12:10:15.764692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655688554880480:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.764749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.775979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T12:10:15.802699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655688554880556:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.802778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.814113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:10:15.843952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655688554880632:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.844027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.855281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T12:10:15.882028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655688554880708:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.882121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.891131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T12:10:15.914743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655688554880785:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.914793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:15.927179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-10T12:10:15.954560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655688554880862:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10 ... :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655697144818826:2811], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:17.693495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:17.707268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-10T12:10:17.742923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655697144818907:2820], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:17.743001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:17.757517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-04-10T12:10:17.798724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655697144818988:2829], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:17.798809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:17.810946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 2025-04-10T12:10:17.880461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655697144819071:2838], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:17.880552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:17.893996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-04-10T12:10:17.970445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655697144819159:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:17.970547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:17.984917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-10T12:10:18.024919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655701439786536:2856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.024984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.038870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 2025-04-10T12:10:18.041798Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655679964944745:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:18.041851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:18.083571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655701439786619:2866], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.083639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.098220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710717:0, at schemeshard: 72057594046644480 2025-04-10T12:10:18.162270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655701439786701:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.162351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.175710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710718:0, at schemeshard: 72057594046644480 2025-04-10T12:10:18.210980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655701439786781:2884], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.211052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.222979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 2025-04-10T12:10:18.255069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655701439786860:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.255143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.268590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 2025-04-10T12:10:18.302279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655701439786939:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.302363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.314972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710721:0, at schemeshard: 72057594046644480 2025-04-10T12:10:18.348557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655701439787021:2912], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.348626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.358828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 2025-04-10T12:10:18.395384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655701439787099:2921], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.395480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.395560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655701439787104:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:18.398821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710723:3, at schemeshard: 72057594046644480 2025-04-10T12:10:18.408042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655701439787106:2925], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710723 completed, doublechecking } 2025-04-10T12:10:18.510432Z node 1 :TX_PROXY ERROR: Actor# [1:7491655701439787169:5767] txid# 281474976710724, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 70], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:10:28.323252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:10:28.323279Z node 1 :IMPORT WARN: Table profiles were not loaded
: Warning: Execution, code: 1060
: Warning: Cost Based Optimizer could not be applied to this query: Enumeration is too large, use PRAGMA MaxDPHypDPTableSize='4294967295' to disable the limitation, code: 8000 |98.7%| [TA] $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} |98.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbWorkload::test [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |98.9%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test >> KqpService::PatternCache [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::PatternCache [GOOD] Test command err: Trying to start YDB, gRPC: 24736, MsgBus: 22621 2025-04-10T12:10:46.012587Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491655817793867458:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:46.012721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000417/r3tmp/tmp0dParX/pdisk_1.dat 2025-04-10T12:10:46.307938Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24736, node 1 2025-04-10T12:10:46.371851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:10:46.371977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:10:46.373805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:10:46.386712Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:10:46.386739Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:10:46.386743Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:10:46.386836Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22621 TClient is connected to server localhost:22621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:10:46.950066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:10:46.968792Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T12:10:49.182261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655830678770067:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:49.182288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655830678770060:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:49.182367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655830678770068:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:49.182429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655830678770064:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:49.182467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655830678770069:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:49.182537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655830678770066:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:49.182570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655830678770034:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:49.182887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:49.184570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655830678770090:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:49.184660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:49.185799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655830678770098:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:49.186003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655830678770106:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:49.186089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655830678770108:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:49.186238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491655830678770103:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:10:49.191268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-10T12:10:49.200111Z node 1 :TX_PROXY ERROR: Actor# [1:7491655830678770095:2329] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:10:49.200285Z node 1 :TX_PROXY ERROR: Actor# [1:7491655830678770087:2323] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:10:49.200392Z node 1 :TX_PROXY ERROR: Actor# [1:7491655830678770085:2322] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:10:49.200504Z node 1 :TX_PROXY ERROR: Actor# [1:7491655830678770096:2330] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:10:49.200662Z node 1 :TX_PROXY ERROR: Actor# [1:7491655830678770119:2339] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:10:49.202298Z node 1 :TX_PROXY ERROR: Actor# [1:7491655830678770138:2342] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:10:49.202452Z node 1 :TX_PROXY ERROR: Actor# [1:7491655830678770136:2340] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:10:49.202552Z node 1 :TX_PROXY ERROR: Actor# [1:7491655830678770139:2343] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:10:49.207448Z node 1 :TX_PROXY ERROR: Actor# [1:7491655830678770137:2341] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:10:49.219712Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-10T12:10:49.220566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655830678770080:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T12:10:49.220640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655830678770134:2394], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T12:10:49.220685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655830678770133:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-10T12:10:49.220786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491655830678 ... r: Resource pool default not found or you don't have access permissions } 2025-04-10T12:15:14.324768Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491656971292842110:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:15:14.324863Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:15:14.327191Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491656971292842210:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:15:14.327214Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7491656971292842212:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:15:14.327265Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:15:14.330700Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-04-10T12:15:14.336577Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842176:2329] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:15:14.336857Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842198:2336] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:15:14.337065Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842216:2343] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:15:14.337281Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842174:2328] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:15:14.337443Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842183:2334] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:15:14.337695Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842182:2333] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:15:14.337884Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842200:2338] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:15:14.338171Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842201:2339] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:15:14.338313Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842199:2337] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-10T12:15:14.348507Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491656971292842187:2392], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-10T12:15:14.348531Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491656971292842171:2386], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-10T12:15:14.348585Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491656971292842215:2397], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-10T12:15:14.348631Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491656971292842177:2390], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-10T12:15:14.348693Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491656971292842170:2385], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-10T12:15:14.348759Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491656971292842172:2387], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-10T12:15:14.348765Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491656971292842180:2391], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-10T12:15:14.348833Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491656971292842173:2388], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-10T12:15:14.348896Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491656971292842175:2389], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-10T12:15:14.348921Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7491656971292842169:2384], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-10T12:15:14.403540Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842309:2416] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:15:14.411852Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842328:2425] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:15:14.412108Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842329:2426] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:15:14.415454Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842344:2437] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:15:14.419924Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842352:2443] txid# 281474976715672, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:15:14.441429Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842360:2449] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:15:14.441656Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842362:2451] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:15:14.444053Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842372:2459] txid# 281474976715675, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:15:14.445863Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842382:2466] txid# 281474976715676, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:15:14.450684Z node 6 :TX_PROXY ERROR: Actor# [6:7491656971292842392:2473] txid# 281474976715677, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:15:15.127895Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7491656954112972240:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:15:15.128000Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:15:25.232088Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:15:25.232120Z node 6 :IMPORT WARN: Table profiles were not loaded |99.0%| [TA] $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows [GOOD] Test command err: 2025-04-10T12:11:16.130783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:11:16.131005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:11:16.131195Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bh2h/000309/r3tmp/tmpE1wvs3/pdisk_1.dat 2025-04-10T12:11:16.532673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:11:16.567800Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:11:16.608007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:11:16.608142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:11:16.619719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:11:16.703125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T12:11:17.043686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:17.043819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:17.043901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:11:17.049581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T12:11:17.210368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T12:11:17.286583Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:11:21.027360Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrfss4nhca3yt16e4z98srd8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmFkZjdkNi02NTRjODVkMy04NTMwYmRkNS04NjhiYzVmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:24.370133Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrfss8m658dmxp2fcyffm72d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjlhNzk3ODUtYjE5MWViMi1kMTNjODA3YS00ZmRhNjMx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:28.388830Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrfssbw643kn9hek1cfc365w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNlZTFiNWItYzVlODUwZGUtM2QwZmUzMmEtZjNhYWRkNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:32.430729Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrfssfs0a15xcfxam5vq4vnx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDA2MTZlNDMtNDNhOGFlMWItMTY4ZjRmYzctOGYzZDU1ODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:36.416602Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrfsskra5cavh5zqmmjwq7nq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc5NjQyMGEtMjE1ZGFjYjctMzFjMjJmMi03NzY0NTg2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:39.832170Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrfssqmw6vp1dete7g2t214k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTY2NGFlOTEtNGJjNWE5NDEtOWUzNWY5NDQtNmJjMjVmNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:43.276046Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrfsstzd09x734z1bgxb8zam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc2MWYwZTgtOTUyOWY2MmYtZTVmY2M3ZmUtNjUyMjQ5Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:47.389914Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrfssyb204xbksd6w9pptvgt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2E3ODU1MGQtODRlMzZlMWYtM2E2Y2ZlZmItYzY2YWQxZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:51.319596Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jrfst2c6d2nzd77d0zg5mg14, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM0OThmOGYtY2U4NGQwNDUtZTMxYmQ0NzEtYjM5NjY0NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:55.132421Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrfst66k62amqtn4v3p4dxn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWYyNzRlZTQtODM2ZGFlOTItMmQ0MWRhZDEtOWZhNDNkMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:11:59.157886Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jrfst9wr9cztsqkesjf546tb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNhNzQ0YzYtOGJkY2NmOGUtMTZiYzljYjctMWFjNzMyM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:12:03.138318Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jrfstdtj3xmeewes4ksfk914, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmM1MDA0M2YtYjNhZmVmNDYtMWZmMjY4NjQtNzYwZjAxZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:12:06.896480Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jrfsthqv0mpy0zah69b5wjvb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWEwZmVmNWItODRmZTJhOGQtZjRlOTY1ODctYjU2NzRiNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:12:10.363713Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jrfstnccb3er5zef8x9b3bas, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjM1Mzg5NmItNjcxMGRjMmUtYzRiZWVjNmMtYzMwYjYyM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:12:14.560891Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jrfstrsm89qprgy3qyg4z5en, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGRhZjZmYjAtY2JmNzFmN2QtNmQ0YjcwNC1kYTUwYTczMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:12:18.369158Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jrfstwx5af32j7gn8r5zdkpt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWVkMWVmZjUtYjM5NDA2MWMtOGRlZTVjYmUtNmRiZjI5MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:12:22.079053Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jrfsv0m4fn39wyxp58t3wjk7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTI5MTVhMWMtMmE1Njc2ZTktN2U5YTVmMjktZTI3OTQyMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:12:25.691808Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jrfsv47faxhqsa59jtwzm74j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTU3ZTQ1NjMtNWEwM2IzNjEtNDM2YjYyMGQtYTFlNGQ5MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2025-04-10T12:12:27.910547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:12:27.910633Z node 1 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 18875828 RowCount: 18 IndexSize: 0 InMemSize: 2097888 LastAccessTime: 1502 LastUpdateTime: 1502 ImmediateTxCompleted: 18 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 18 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 596 IndexSize: 0 } Channels { Channel: 2 DataSize: 16777344 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 4827 Memory: 2223139 Storage: 16778930 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 18875828 RowCount: 18 IndexSize: 0 InMemSize: 2097888 LastAccessTime: 1502 LastUpdateTime: 1502 ImmediateTxCompleted: 18 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 18 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 596 IndexSize: 0 } Channels { Channel: 2 DataSize: 16777344 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1322 Memory: 126163 Storage: 18876266 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-10T12:12:34.310310Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jrfsvcdd20bnkp4tn8kycr7z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTFkNWExMC00MGRmMjMxYy00N2IxNWVkNi01NTQ5YzA0Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:12:37.838105Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jrfsvg653phhjaezvvsrbwxd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdiN ... .622757Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:16:20.622891Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:16:20.634324Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:16:20.717317Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-10T12:16:20.969735Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:16:20.969845Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:16:20.969920Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:16:20.975468Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T12:16:21.133726Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T12:16:21.168332Z node 5 :TX_PROXY ERROR: Actor# [5:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:16:24.022236Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jrft2df8br198ktxce3gn5g5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjRmYTU2OWEtMjI1ZmRkMjktZmUwMTQ1ODItYzkxZTY0MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:16:26.544615Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jrft2gfx0m2323510qmncaw6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YzdiZTcxMmItNzlkZTQxNWQtN2VhYWVkYWMtMjkzYzNjMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:16:29.230441Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jrft2jz50tbbmc9wk4cc6xac, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NzkyMDc4NDYtOWUwMDNlMmMtMTQ4YjIzNjEtMjQ4YzJmMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:16:31.672337Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jrft2nj26dw9rcsa4azqhxge, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NDhlOWFlNDQtZmRjZmIwNmUtNjFjMjc5Y2YtZTBiMGZmMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:16:34.185344Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jrft2qz77z7wya1qa1pren5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OTRmNDczZjktN2JkODkxN2YtYzc1Yzk5MWMtMzUzZWFiYzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:16:36.556914Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jrft2tct5an72pkzhndqq52y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NmE0ZTI5NGQtNjBmOGZlYjctNjU3Mzc0NDEtM2MzYmM3NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:16:38.983165Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jrft2wqmafdbbkm5zaqjsn68, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=Mzk1ZGY0NTMtNjU2NmFjZmQtNjRlNjhiZmYtYmM3OGUyYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:16:41.364142Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jrft2z3fcxb5yp0js2w7stvn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NDMzNTNjMjEtZTU4ZTc1ZWUtOWU0Yzc1NDAtYmEzNTU4NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:16:43.769652Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jrft31d779bf0d75rhagbb7b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=M2MwM2M4NDEtMWUxMTE3YzgtMjgyNjIzNWEtZjJjYTI2NDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:16:46.109493Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jrft33s71ftyqnj6e9byea30, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=Y2I0Nzg0MzgtZGU3ODE2ZTgtYTEzNjMyN2ItOTk3MmIyMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2025-04-10T12:16:48.157504Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:16:48.157556Z node 5 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 10487392 RowCount: 10 IndexSize: 0 InMemSize: 10487392 LastAccessTime: 1542 LastUpdateTime: 1542 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 2040 Memory: 10609800 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 5 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 10487392 RowCount: 10 IndexSize: 0 InMemSize: 10487392 LastAccessTime: 1542 LastUpdateTime: 1542 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 549 Memory: 125395 Storage: 10486922 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 5 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 2 TableStats { DataSize: 10486220 RowCount: 10 IndexSize: 0 InMemSize: 0 LastAccessTime: 1542 LastUpdateTime: 1542 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 380 IndexSize: 0 } Channels { Channel: 2 DataSize: 10485840 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 549 Memory: 125395 Storage: 10486922 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 5 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-10T12:16:54.642576Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jrft3bs55pnthjtkf9r5r2aj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjI1YTAzYmItZTQ0ZDY3MDMtZWUyZjZlNTUtMWQ2ODA4NDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:16:56.990762Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jrft3ec406htjn1p2ppnpj3e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZTUxODg0YzgtMjFmYWNjMTgtODc3MjljYTEtNDU4ODY4ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:16:59.459257Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jrft3gpbczphbbe1dm8emmy6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NjgwZmQ5OWItMTMxMTcyZGQtOWNiZWRkMi01MWYwZTFhNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:17:01.860387Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jrft3k3gackfkbtjez7f83n7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZDYyNzk3YTctMzE3YmU3YjgtZmRhMjFkNzItYTFjMTM5Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:17:04.300872Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jrft3neddy37eqhkmng4cnks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NzkyZjUzNTMtMTcyZjJlZWItNDdjM2ZhMDMtZjk5NmIyYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:17:06.639875Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jrft3qtpepk3y99wtk75qdec, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NjZlMDc4ZjYtODFjMjEzMmYtMzUyYzlkMTgtYWI4N2UyNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:17:09.070918Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jrft3t3pcbtjq4219wagd9ak, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NDExYzU1ZjUtODU4NGY1YTItODNjZWVmYWQtNTIwNWY3NDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:17:11.471449Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jrft3wfr9jsztvjw9pvqpam9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MWQ3NmIwYmEtYjlhOTdkYTgtMTdlYTM5ZGItOTNiZWZlOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:17:13.924010Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jrft3yt4cs30tqn08db761pt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MWYxNDMxOTctNTg2MjhjZC02ZTZmY2ZiMC1mMzkzZTJhYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:17:16.396794Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jrft417j9ffkn533sn3vbmtm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YzZkMWQ2OWEtMzU0M2UxYS05YzkwNjI1Yy01N2I2NzE3, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:17:16.561949Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jrft43kxd8f21qwny6a68q6r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZmYxNzk3YjQtZTI5NGQ1ZTUtOWJkMDkyNjgtZTQ0ODhkZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |99.1%| [TA] $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} |99.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[1] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[2] >> test_tpch.py::TestTpchS1::test_tpch[2] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[3] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] Test command err: 2025-04-10T12:10:26.535083Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T12:10:26.611782Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T12:10:26.634121Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T12:10:26.634433Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T12:10:26.641856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:26.642064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:26.642295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:10:26.642401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:10:26.642522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:10:26.642644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:10:26.642755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:10:26.642863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:10:26.642961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:10:26.643061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:10:26.643180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:10:26.643270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:10:26.671395Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T12:10:26.671620Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T12:10:26.671671Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T12:10:26.671838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:10:26.671986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T12:10:26.672081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T12:10:26.672121Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T12:10:26.672226Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T12:10:26.672295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T12:10:26.672335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T12:10:26.672364Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T12:10:26.672532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:10:26.672603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T12:10:26.672640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T12:10:26.672665Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T12:10:26.672748Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T12:10:26.672823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T12:10:26.672878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T12:10:26.672904Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T12:10:26.672979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T12:10:26.673027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T12:10:26.673056Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T12:10:26.673101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T12:10:26.673153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T12:10:26.673182Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T12:10:26.673522Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-10T12:10:26.673597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-10T12:10:26.673661Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=26; 2025-04-10T12:10:26.673743Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-04-10T12:10:26.673911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T12:10:26.673978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T12:10:26.674016Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T12:10:26.674220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T12:10:26.674270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T12:10:26.674297Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T12:10:26.674459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T12:10:26.674502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T12:10:26.674530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T12:10:26.674704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T12:10:26.674744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T12:10:26.674777Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T12:10:26.674911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T12:10:26.674952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T12:10:26.674996Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... .202570Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15672:17630];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-04-10T12:17:46.679500Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T12:17:46.679601Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=12; 2025-04-10T12:17:46.679895Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=231; 2025-04-10T12:17:46.679934Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=287; 2025-04-10T12:17:46.684497Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T12:17:46.684581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=10; 2025-04-10T12:17:46.691431Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=6762; 2025-04-10T12:17:46.698624Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=6035; 2025-04-10T12:17:46.698734Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7220; 2025-04-10T12:17:46.698885Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=93; 2025-04-10T12:17:46.698989Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=69; 2025-04-10T12:17:46.699129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=109; 2025-04-10T12:17:46.699244Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=87; 2025-04-10T12:17:46.699444Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=164; 2025-04-10T12:17:46.699479Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=14853; 2025-04-10T12:17:46.705767Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T12:17:46.705854Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=11; 2025-04-10T12:17:46.712232Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=6281; 2025-04-10T12:17:46.741750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=29388; 2025-04-10T12:17:46.741930Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=51; 2025-04-10T12:17:46.742031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=37; 2025-04-10T12:17:46.742089Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-04-10T12:17:46.742141Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-04-10T12:17:46.742216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=11; 2025-04-10T12:17:46.742329Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=71; 2025-04-10T12:17:46.742390Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-10T12:17:46.742515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=79; 2025-04-10T12:17:46.742570Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-04-10T12:17:46.742650Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=37; 2025-04-10T12:17:46.742776Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=77; 2025-04-10T12:17:46.742889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=67; 2025-04-10T12:17:46.742941Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=37019; 2025-04-10T12:17:46.743183Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2425692;raw_bytes=4011492;count=1;records=39328} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111593408;raw_bytes=187115688;count=44;records=1805672} inactive {blob_bytes=178990352;raw_bytes=300417312;count=90;records=2897034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T12:17:46.744114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15672:17630];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T12:17:46.744208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15672:17630];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T12:17:46.744324Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15672:17630];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T12:17:46.744394Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15672:17630];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-10T12:17:46.744655Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T12:17:46.744750Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T12:17:46.745031Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-04-10T12:17:46.745138Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T12:17:46.745205Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T12:17:46.745275Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T12:17:46.745344Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T12:17:46.745478Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T12:17:46.751011Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T12:17:46.755457Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15672:17630];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T12:17:46.756930Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15672:17630];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T12:17:46.756982Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T12:17:46.757007Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T12:17:46.757059Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15672:17630];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T12:17:46.757135Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15672:17630];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T12:17:46.757373Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15672:17630];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-04-10T12:17:46.757457Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15672:17630];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T12:17:46.757506Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15672:17630];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T12:17:46.757565Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15672:17630];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T12:17:46.757609Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15672:17630];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T12:17:46.757727Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15672:17630];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-04-10T12:17:46.757783Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15672:17630];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [FAIL] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] >> test_tpch.py::TestTpchS1::test_tpch[3] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[4] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [FAIL] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:111:2142] 2025-04-10T12:10:57.277942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-10T12:10:57.278048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T12:10:57.278091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-10T12:10:57.278133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-10T12:10:57.278210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-10T12:10:57.278245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-10T12:10:57.278336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-10T12:10:57.278425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-10T12:10:57.278811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-10T12:10:57.376691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-04-10T12:10:57.376786Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:174:2058] recipient: [1:15:2062] 2025-04-10T12:10:57.396379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-10T12:10:57.396662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-10T12:10:57.396843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-10T12:10:57.407435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-10T12:10:57.407787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-10T12:10:57.408339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-10T12:10:57.408985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-10T12:10:57.412145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-10T12:10:57.413331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T12:10:57.413388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T12:10:57.413537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-10T12:10:57.413591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T12:10:57.413630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-10T12:10:57.413746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:213:2058] recipient: [1:211:2212] Leader for TabletID 72057594037968897 is [1:217:2216] sender: [1:218:2058] recipient: [1:211:2212] 2025-04-10T12:10:57.420920Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-10T12:10:57.551315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-10T12:10:57.551617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T12:10:57.551883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-10T12:10:57.552184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-10T12:10:57.552251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-10T12:10:57.555480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-10T12:10:57.555638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-10T12:10:57.555880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T12:10:57.555944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-10T12:10:57.556006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T12:10:57.556050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T12:10:57.558817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T12:10:57.558892Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-10T12:10:57.558933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T12:10:57.561345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T12:10:57.561414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T12:10:57.561470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-10T12:10:57.561549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T12:10:57.565834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T12:10:57.568499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T12:10:57.568750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:253:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-10T12:10:57.569871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-10T12:10:57.570043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T12:10:57.570104Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T12:10:57.570416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-10T12:10:57.570497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-10T12:10:57.570691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-10T12:10:57.570778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-10T12:10:57.573529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-10T12:10:57.573586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-10T12:10:57.573780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-10T12:10:57.573835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-10T12:10:57.574090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-10T12:10:57.574174Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-10T12:10:57.574279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T12:10:57.574316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-10T12:10:57.574359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-10T12:10:57.574428Z no ... } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T12:19:17.139740Z node 202 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-10T12:19:17.139969Z node 202 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 234us result status StatusSuccess 2025-04-10T12:19:17.140661Z node 202 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-10T12:19:17.151648Z node 202 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1130:2897] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-10T12:19:17.151779Z node 202 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][202:1076:2897] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-10T12:19:17.151938Z node 202 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1130:2897] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1744287557129942 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1744287557129942 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1744287557129942 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-10T12:19:17.154474Z node 202 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1130:2897] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-10T12:19:17.154568Z node 202 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][202:1076:2897] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } |99.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} |99.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[4] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[5] >> test_tpch.py::TestTpchS1::test_tpch[5] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[6] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString 2025-04-10 12:20:10,471 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 12:20:10,594 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 2065517 46.5M 46.6M 23.6M test_tool run_ut @/home/runner/.ya/build/build_root/bh2h/00026c/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args 2065669 2.1G 2.1G 1.9G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/bh2h/00026c/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/ch Test command err: 2025-04-10T12:10:11.672920Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T12:10:11.759510Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T12:10:11.783002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T12:10:11.783237Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T12:10:11.789056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:11.789218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:11.789368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:10:11.789466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:10:11.789529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:10:11.789588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:10:11.789667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:10:11.789757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:10:11.789858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:10:11.789975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:10:11.790092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:10:11.790195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:10:11.809430Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T12:10:11.809745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T12:10:11.809781Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T12:10:11.809920Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:10:11.810045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T12:10:11.810105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T12:10:11.810163Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T12:10:11.810233Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T12:10:11.810274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T12:10:11.810326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T12:10:11.810346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T12:10:11.810469Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:10:11.810509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T12:10:11.810535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T12:10:11.810555Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T12:10:11.810617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T12:10:11.810647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T12:10:11.810674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T12:10:11.810692Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T12:10:11.810742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T12:10:11.810764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T12:10:11.810779Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T12:10:11.810810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T12:10:11.810833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T12:10:11.810852Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T12:10:11.811120Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=35; 2025-04-10T12:10:11.811173Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=23; 2025-04-10T12:10:11.811249Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-04-10T12:10:11.811321Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=26; 2025-04-10T12:10:11.811450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T12:10:11.811486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T12:10:11.811507Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T12:10:11.811612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T12:10:11.811635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T12:10:11.811652Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T12:10:11.811749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T12:10:11.811775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T12:10:11.811794Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T12:10:11.811928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T12:10:11.811959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T12:10:11.811975Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T12:10:11.812044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T12:10:11.812069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T12:10:11.812096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... line=scanner.cpp:67;event=wait_interval;remained=54;interval_idx=30; 2025-04-10T12:20:10.371331Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:29;records_count:766644;schema=timestamp: binary message: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-10T12:20:10.371359Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-10T12:20:10.371383Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T12:20:10.373340Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T12:20:10.373371Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=46; 2025-04-10T12:20:10.373395Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=46;intervalId=135; 2025-04-10T12:20:10.373426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:43;event=interval_result_absent;interval_idx=30;merger=0;interval_id=119; 2025-04-10T12:20:10.373448Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:67;event=wait_interval;remained=54;interval_idx=30; 2025-04-10T12:20:10.373534Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:29;records_count:766644;schema=timestamp: binary message: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-10T12:20:10.373560Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-10T12:20:10.373585Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T12:20:10.374612Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T12:20:10.374643Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=54; 2025-04-10T12:20:10.374668Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=54;intervalId=143; 2025-04-10T12:20:10.374697Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:43;event=interval_result_absent;interval_idx=30;merger=0;interval_id=119; 2025-04-10T12:20:10.374721Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:67;event=wait_interval;remained=54;interval_idx=30; 2025-04-10T12:20:10.374802Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:29;records_count:766644;schema=timestamp: binary message: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-10T12:20:10.374827Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-10T12:20:10.374851Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T12:20:10.388267Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-10T12:20:10.406426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=ASSEMBLER::SPEC;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};;scan_step_idx=2; 2025-04-10T12:20:10.407944Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=ASSEMBLER::LAST_PK;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};;scan_step_idx=3; 2025-04-10T12:20:10.460714Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=SNAPSHOT;details={};;scan_step_idx=4; 2025-04-10T12:20:10.465443Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=ALLOCATE_MEMORY::Fetching;details={stage=Fetching;column_ids=[RawSequential:6];};;scan_step_idx=5; 2025-04-10T12:20:10.465688Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=OPTIONAL_ASSEMBLER::LAST;details={columns=(column_ids=6;column_names=message;);;};;scan_step_idx=6; 2025-04-10T12:20:10.466078Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=7; 2025-04-10T12:20:10.469041Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T12:20:10.469080Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-04-10T12:20:10.469110Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=35; 2025-04-10T12:20:10.469147Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=35;interval_id=124;memory=9864910;count=3; 2025-04-10T12:20:10.469613Z node 1 :ARROW_HELPER WARN: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=accessor.cpp:16;event=many_deserializations;counter=4;size=2776;buffer=0; 2025-04-10T12:20:10.474391Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=35; 2025-04-10T12:20:10.474819Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:60;event=update_memory_merger;before_data=13;before_memory=256;after_memory=256;after_data=13;guard=9864910; 2025-04-10T12:20:10.474859Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=30; 2025-04-10T12:20:10.474889Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=30;interval_id=119;memory=13394745;count=4; 2025-04-10T12:20:10.475109Z node 1 :ARROW_HELPER WARN: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=accessor.cpp:16;event=many_deserializations;counter=2;size=2792;buffer=0; 2025-04-10T12:20:10.475283Z node 1 :ARROW_HELPER WARN: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=accessor.cpp:16;event=many_deserializations;counter=5;size=2776;buffer=0; 2025-04-10T12:20:10.475446Z node 1 :ARROW_HELPER WARN: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=accessor.cpp:16;event=many_deserializations;counter=3;size=2792;buffer=0; 2025-04-10T12:20:10.475913Z node 1 :ARROW_HELPER WARN: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=accessor.cpp:16;event=many_deserializations;counter=3;size=2816;buffer=0; 2025-04-10T12:20:10.476378Z node 1 :ARROW_HELPER WARN: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=accessor.cpp:16;event=many_deserializations;counter=3;size=2800;buffer=0; 2025-04-10T12:20:10.476549Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13471:15437];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=30; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/bh2h/00026c/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/bh2h/00026c/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_tpch.py::TestTpchS1::test_tpch[6] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[7] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 2025-04-10 12:20:28,245 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 12:20:28,358 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 2071884 46.6M 46.7M 23.7M test_tool run_ut @/home/runner/.ya/build/build_root/bh2h/0001eb/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args 2072086 2.0G 2.0G 1.7G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/bh2h/0001eb/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/ch Test command err: 2025-04-10T12:10:29.751816Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T12:10:29.918514Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T12:10:29.942806Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T12:10:29.943119Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T12:10:29.951750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:10:29.951989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:10:29.952237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:10:29.952386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:10:29.952498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:10:29.952614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:10:29.952735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:10:29.952860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:10:29.952969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:10:29.953089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:10:29.953199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:10:29.953299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:10:29.981779Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T12:10:29.982060Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T12:10:29.982138Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T12:10:29.982362Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:10:29.982563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T12:10:29.982672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T12:10:29.982719Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T12:10:29.982818Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T12:10:29.982893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T12:10:29.982945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T12:10:29.983001Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T12:10:29.983219Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:10:29.983296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T12:10:29.983346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T12:10:29.983380Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T12:10:29.983472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T12:10:29.983530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T12:10:29.983590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T12:10:29.983622Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T12:10:29.983700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T12:10:29.983747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T12:10:29.983780Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T12:10:29.983836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T12:10:29.983881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T12:10:29.983912Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T12:10:29.984323Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-10T12:10:29.984436Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=50; 2025-04-10T12:10:29.984536Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-04-10T12:10:29.984657Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=55; 2025-04-10T12:10:29.984887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T12:10:29.984965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T12:10:29.985001Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T12:10:29.985223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T12:10:29.985279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T12:10:29.985317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T12:10:29.985499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T12:10:29.985561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T12:10:29.985596Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T12:10:29.985790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T12:10:29.985851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T12:10:29.985897Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T12:10:29.986043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T12:10:29.986130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T12:10:29.986226Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... he::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-10T12:20:28.286258Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=OPTIONAL_ASSEMBLER::LAST;details={columns=(column_ids=6;column_names=message;);;};;scan_step_idx=8; 2025-04-10T12:20:28.286525Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=9; 2025-04-10T12:20:28.288588Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T12:20:28.288633Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-04-10T12:20:28.288666Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-04-10T12:20:28.288707Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=0;interval_id=181;memory=10889551;count=1; 2025-04-10T12:20:28.289149Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=0; 2025-04-10T12:20:28.328911Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:60;event=update_memory_merger;before_data=1560;before_memory=1792;after_memory=1792;after_data=1560;guard=10889551; 2025-04-10T12:20:28.329007Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=source.cpp:50;event=source_ready;intervals_count=1;source_idx=0; 2025-04-10T12:20:28.329118Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-10T12:20:28.329155Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-10T12:20:28.329186Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T12:20:28.335404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-04-10T12:20:28.335463Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-10T12:20:28.335497Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=181; 2025-04-10T12:20:28.335542Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=98;merger=0;interval_id=181; 2025-04-10T12:20:28.335582Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-10T12:20:28.335685Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-10T12:20:28.335718Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=98;finished=1; 2025-04-10T12:20:28.335754Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:198;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-10T12:20:28.338802Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T12:20:28.338962Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:1;records_count:98;schema=timestamp: string message: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-10T12:20:28.338996Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-10T12:20:28.339084Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:229;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;);columns=2;rows=98; 2025-04-10T12:20:28.339135Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:249;stage=data_format;batch_size=1560;num_rows=98;batch_columns=timestamp,message; 2025-04-10T12:20:28.339222Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:365;event=send_data;compute_actor_id=[1:13821:15787];bytes=1560;rows=98;faults=0;finished=0;fault=0;schema=timestamp: string message: string; 2025-04-10T12:20:28.339316Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-10T12:20:28.339409Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-10T12:20:28.339485Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-10T12:20:28.339653Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-10T12:20:28.339741Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:187;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-10T12:20:28.339817Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:13822:15788];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:192;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-10T12:20:28.339847Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:13822:15788] finished for tablet 9437184 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/bh2h/0001eb/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8444524403/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/bh2h/0001eb/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |99.4%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |99.5%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete 2025-04-10 12:20:30,152 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 12:20:30,584 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 2072556 685M 688M 602M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/bh2h/0001b5/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mod 2073718 6.4G 6.4G 5.9G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/bh2h/0001b5/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_d 2074888 394M 394M 359M └─ moto_server s3 --port 10558 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 269, in test_ttl_delete self.ydb_client.query(""" File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...', '/home/runner/.ya/build/build_root/bh2h/0001b5', '--source-root', '/home/runner/.ya/build/build_root/bh2h/0001b5/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/bh2h/0001b5/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...', '/home/runner/.ya/build/build_root/bh2h/0001b5', '--source-root', '/home/runner/.ya/build/build_root/bh2h/0001b5/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/bh2h/0001b5/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-04-10 12:20:30,202 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 12:20:30,457 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 2072550 737M 741M 656M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/bh2h/0001b6/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mod 2073713 2.8G 2.8G 2.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/bh2h/0001b6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_ 2074886 458M 458M 424M └─ moto_server s3 --port 2113 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 71, in wait_for if condition_func(): File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 172, in lambda: get_rows_in_portion(bucket1_path) == self.row_count File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 160, in get_rows_in_portion logger.info(f"portions: {portions_stat}, blobs: {table.get_blob_stat_by_tier()}") File "ydb/tests/olap/common/column_table_helper.py", line 41, in get_blob_stat_by_tier results = self.ydb_client.query(stmt) File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ..._root/bh2h/0001b6', '--source-root', '/home/runner/.ya/build/build_root/bh2h/0001b6/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/bh2h/0001b6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("..._root/bh2h/0001b6', '--source-root', '/home/runner/.ya/build/build_root/bh2h/0001b6/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/bh2h/0001b6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout",), {}) |99.6%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[7] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[8] >> test_tpch.py::TestTpchS1::test_tpch[8] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[8] [FAIL] 2025-04-10 12:21:21,719 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 12:21:22,336 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 2082296 618M 621M 537M ydb-tests-functional-tpc-medium --basetemp /home/runner/.ya/build/build_root/bh2h/0001ba/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 2083002 10.2G 10.2G 9.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/bh2h/0001ba/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/ 2086300 4.6G 4.6G 4.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/bh2h/0001ba/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/ 2108409 153M 156M 90.6M └─ ydb -e grpc://localhost:8549 -d /local/test_db workload tpch --path olap_yatests/tpch/s1 run --json /home/runner/.ya/build/build_root/bh2h/0001ba/ydb/tests/functional/t Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/load/lib/tpch.py", line 48, in test_tpch self.run_workload_test(self._get_path(), query_num) File "ydb/tests/olap/load/lib/conftest.py", line 286, in run_workload_test result = YdbCliHelper.workload_run( File "ydb/tests/olap/lib/ydb_cli.py", line 310, in workload_run ).process() File "ydb/tests/olap/lib/ydb_cli.py", line 283, in process process = yatest.common.process.execute(self._get_cmd(), check_exit_code=False) File "library/python/testing/yatest_common/yatest/common/process.py", line 656, in execute res.wait(check_exit_code, timeout, on_timeout) File "library/python/testing/yatest_common/yatest/common/process.py", line 400, in wait _wait() File "library/python/testing/yatest_common/yatest/common/process.py", line 335, in _wait pid, sts, rusage = os.wait4(self._process.pid, 0) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...tTpchS1::test_tpch[8]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[9]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[10]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[11]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[12]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[13]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[14]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[15]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[16]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[17]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[18]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[19]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[20]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[21]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[22]', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...tTpchS1::test_tpch[8]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[9]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[10]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[11]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[12]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[13]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[14]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[15]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[16]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[17]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[18]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[19]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[20]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[21]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[22]', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout",), {}) 2025-04-10 12:21:36,261 WARNING libarchive: File (test_tpch.py.TestTpchS1.test_tpch.1/cluster/slot_1/logfile_7iox3iuz.log) size has changed. Can't write more data than was declared in the tar header (346345438). (probably file was changed during archiving) |99.7%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_all_supported_compression 2025-04-10 12:21:20,644 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 12:21:20,923 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 2082211 764M 770M 686M ydb-tests-olap-column_family-compression --basetemp /home/runner/.ya/build/build_root/bh2h/0001a9/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor 2082952 6.0G 6.0G 5.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/bh2h/0001a9/ydb/tests/olap/column_family/compression/test-results/py3test/testing_o Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/column_family/compression/alter_compression.py", line 110, in test_all_supported_compression volumes: tuple[int, int] = tables[i].get_volumes_column("value") File "ydb/tests/olap/common/column_table_helper.py", line 73, in get_volumes_column time.sleep(10) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/bh2h/0001a9/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/bh2h/0001a9', '--source-root', '/home/runner/.ya/build/build_root/bh2h/0001a9/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/bh2h/0001a9/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'alter_compression.py::TestAlterCompression::test_all_supported_compression', '--test-filter', 'alter_compression.py::TestAlterCompression::test_availability_data', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/bh2h/0001a9/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/bh2h/0001a9', '--source-root', '/home/runner/.ya/build/build_root/bh2h/0001a9/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/bh2h/0001a9/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'alter_compression.py::TestAlterCompression::test_all_supported_compression', '--test-filter', 'alter_compression.py::TestAlterCompression::test_availability_data', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.8%| [TM] {RESULT} ydb/tests/olap/column_family/compression/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] 2025-04-10 12:21:21,521 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 12:21:21,677 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 2082260 1.1G 1.2G 1.1G ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/bh2h/0001aa/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 2106278 2.0G 2.0G 1.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/bh2h/0001aa/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_ins Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/bh2h/0001aa/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 88, in test ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_insert.py", line 86, in scenario_read_data_during_bulk_upsert thread2.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...st-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/bh2h/0001aa', '--source-root', '/home/runner/.ya/build/build_root/bh2h/0001aa/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/bh2h/0001aa/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load]', '--test-filter', 'test_alter_compression.py::TestAlterCompression::test[alter_compression]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...st-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/bh2h/0001aa', '--source-root', '/home/runner/.ya/build/build_root/bh2h/0001aa/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/bh2h/0001aa/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load]', '--test-filter', 'test_alter_compression.py::TestAlterCompression::test[alter_compression]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.9%| [TM] {RESULT} ydb/tests/olap/scenario/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export 2025-04-10 12:21:20,418 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 12:21:21,220 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 2082163 603M 606M 521M ydb-tests-olap-s3_import --basetemp /home/runner/.ya/build/build_root/bh2h/0001b4/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modul 2082951 11.6G 11.6G 11.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/bh2h/0001b4/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tp 2086161 465M 465M 430M └─ moto_server s3 --port 9704 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/s3_import/test_tpch_import.py", line 97, in test_import_and_export self.ydb_client.query("INSERT INTO s3_table SELECT * FROM lineitem") File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...import', '--basetemp', '/home/runner/.ya/build/build_root/bh2h/0001b4/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/bh2h/0001b4/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/bh2h/0001b4', '--source-root', '/home/runner/.ya/build/build_root/bh2h/0001b4/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/bh2h/0001b4/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_tpch_import.py::TestS3TpchImport::test_import_and_export', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...import', '--basetemp', '/home/runner/.ya/build/build_root/bh2h/0001b4/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/bh2h/0001b4/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/bh2h/0001b4', '--source-root', '/home/runner/.ya/build/build_root/bh2h/0001b4/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/bh2h/0001b4/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_tpch_import.py::TestS3TpchImport::test_import_and_export', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.9%| [TM] {RESULT} ydb/tests/olap/s3_import/py3test |99.9%| CLEANING BUILD ROOT ydb/library/yaml_config/ut_transform [size:medium] ------ sole chunk ran 1 test (total:7.39s - test:6.56s canon:0.65s) [fail] test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [default-linux-x86_64-release-asan] (2.02s) Test results differ from canonical: test_result[3]: files content differs: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff/test_transform.py.TestYamlConfigTransformations.test_basic.args1-dump_ds_init.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/library/yaml_config/ut_transform ydb/tests/fq/streaming_optimize [size:medium] nchunks:4 ------ [test_sql_streaming.py 0/4] chunk ran 7 tests (total:125.44s - recipes:0.58s test:124.00s recipes:0.67s) [fail] test_sql_streaming.py::test[suites-GroupByHop-default.txt] [default-linux-x86_64-release-asan] (25.63s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oatof5ug/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oatof5ug/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oatof5ug/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oatof5ug/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oatof5ug/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oatof5ug/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oatof5ug/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oatof5ug/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oatof5ug/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oatof5ug/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fe3555a03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fe355599ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fe354e23d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fe354f21464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fe354f21464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fe354f21464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fe354e22b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fe354ecf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fe354dc7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fe354dc7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fe354dc7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fe354f1ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fe354f1ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [default-linux-x86_64-release-asan] (13.80s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j_kfajwy/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j_kfajwy/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j_kfajwy/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j_kfajwy/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j_kfajwy/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j_kfajwy/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j_kfajwy/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j_kfajwy/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j_kfajwy/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j_kfajwy/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fe1c62903b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fe1c6289ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fe1c5b13d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fe1c5c11464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fe1c5c11464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fe1c5c11464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fe1c5b12b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fe1c5bbf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fe1c5ab7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fe1c5ab7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fe1c5ab7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fe1c5c0ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fe1c5c0ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [default-linux-x86_64-release-asan] (14.03s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f7iwsqe/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f7iwsqe/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f7iwsqe/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f7iwsqe/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f7iwsqe/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f7iwsqe/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f7iwsqe/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f7iwsqe/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f7iwsqe/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f7iwsqe/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f9e5c6a03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f9e5c699ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f9e5bf23d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f9e5c021464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f9e5c021464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f9e5c021464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f9e5bf22b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f9e5bfcf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f9e5bec7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f9e5bec7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f9e5bec7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f9e5c01ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f9e5c01ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [default-linux-x86_64-release-asan] (15.77s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j44c4udl/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j44c4udl/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j44c4udl/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j44c4udl/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j44c4udl/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j44c4udl/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j44c4udl/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j44c4udl/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j44c4udl/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j44c4udl/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f4ffa9a03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f4ffa999ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f4ffa223d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f4ffa321464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f4ffa321464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f4ffa321464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f4ffa222b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f4ffa2cf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f4ffa1c7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f4ffa1c7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f4ffa1c7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f4ffa31ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f4ffa31ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [default-linux-x86_64-release-asan] (16.87s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l_x44pxu/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l_x44pxu/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l_x44pxu/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l_x44pxu/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l_x44pxu/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l_x44pxu/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l_x44pxu/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l_x44pxu/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l_x44pxu/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l_x44pxu/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7ff022a303b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7ff022a29ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7ff0222b3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7ff0223b1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7ff0223b1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7ff0223b1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7ff0222b2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7ff02235f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7ff022257e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7ff022257e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7ff022257c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7ff0223aee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7ff0223aee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [default-linux-x86_64-release-asan] (16.50s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4mvyxoru/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4mvyxoru/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4mvyxoru/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4mvyxoru/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4mvyxoru/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4mvyxoru/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4mvyxoru/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4mvyxoru/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4mvyxoru/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4mvyxoru/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fd18a4203b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fd18a419ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fd189ca3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fd189da1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fd189da1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fd189da1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fd189ca2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fd189d4f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fd189c47e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fd189c47e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fd189c47c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fd189d9ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fd189d9ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (17.40s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z6ewjgi5/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z6ewjgi5/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z6ewjgi5/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z6ewjgi5/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z6ewjgi5/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z6ewjgi5/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z6ewjgi5/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z6ewjgi5/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z6ewjgi5/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z6ewjgi5/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7ff09faf03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7ff09fae9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7ff09f373d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7ff09f471464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7ff09f471464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7ff09f471464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7ff09f372b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7ff09f41f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7ff09f317e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7ff09f317e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7ff09f317c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7ff09f46ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7ff09f46ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423687 byte(s) leaked in 8242 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 1/4] chunk ran 7 tests (total:123.09s - recipes:0.64s test:121.53s recipes:0.68s) [fail] test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (25.59s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ndas0xm/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ndas0xm/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ndas0xm/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ndas0xm/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ndas0xm/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ndas0xm/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ndas0xm/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ndas0xm/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ndas0xm/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ndas0xm/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fa60ce503b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fa60ce49ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fa60c6d3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fa60c7d1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fa60c7d1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fa60c7d1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fa60c6d2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fa60c77f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fa60c677e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fa60c677e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fa60c677c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fa60c7cee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fa60c7cee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423357 byte(s) leaked in 8235 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [default-linux-x86_64-release-asan] (14.54s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yc6b5cev/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yc6b5cev/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yc6b5cev/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yc6b5cev/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yc6b5cev/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yc6b5cev/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yc6b5cev/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yc6b5cev/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yc6b5cev/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yc6b5cev/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fe84ef603b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fe84ef59ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fe84e7e3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fe84e8e1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fe84e8e1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fe84e8e1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fe84e7e2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fe84e88f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fe84e787e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fe84e787e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fe84e787c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fe84e8dee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fe84e8dee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 409042 byte(s) leaked in 7924 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindow-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [default-linux-x86_64-release-asan] (13.98s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ube7bnfr/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ube7bnfr/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ube7bnfr/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ube7bnfr/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ube7bnfr/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ube7bnfr/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ube7bnfr/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ube7bnfr/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ube7bnfr/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ube7bnfr/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f05dea403b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f05dea39ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f05de2c3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f05de3c1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f05de3c1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f05de3c1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f05de2c2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f05de36f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f05de267e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f05de267e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f05de267c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f05de3bee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f05de3bee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [default-linux-x86_64-release-asan] (14.83s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nzr3xvhj/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nzr3xvhj/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nzr3xvhj/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nzr3xvhj/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nzr3xvhj/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nzr3xvhj/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nzr3xvhj/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nzr3xvhj/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nzr3xvhj/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nzr3xvhj/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f7560c203b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f7560c19ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f75604a3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f75605a1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f75605a1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f75605a1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f75604a2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f756054f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7560447e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7560447e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7560447c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f756059ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f756059ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [default-linux-x86_64-release-asan] (16.93s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3nedg7m/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3nedg7m/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3nedg7m/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3nedg7m/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3nedg7m/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3nedg7m/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3nedg7m/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3nedg7m/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3nedg7m/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3nedg7m/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fa7ffad03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fa7ffac9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fa7ff353d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fa7ff451464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fa7ff451464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fa7ff451464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fa7ff352b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fa7ff3ff90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fa7ff2f7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fa7ff2f7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fa7ff2f7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fa7ff44ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fa7ff44ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [default-linux-x86_64-release-asan] (15.79s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahuzkc2a/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahuzkc2a/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahuzkc2a/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahuzkc2a/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahuzkc2a/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahuzkc2a/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahuzkc2a/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahuzkc2a/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahuzkc2a/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ahuzkc2a/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f112e2d03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f112e2c9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f112db53d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f112dc51464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f112dc51464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f112dc51464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f112db52b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f112dbff90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f112daf7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f112daf7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f112daf7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f112dc4ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f112dc4ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [default-linux-x86_64-release-asan] (15.91s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y3y754yz/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y3y754yz/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y3y754yz/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y3y754yz/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y3y754yz/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y3y754yz/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y3y754yz/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y3y754yz/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y3y754yz/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y3y754yz/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f40056303b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f4005629ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f4004eb3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f4004fb1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f4004fb1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f4004fb1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f4004eb2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f4004f5f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f4004e57e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f4004e57e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f4004e57c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f4004faee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f4004faee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 2/4] chunk ran 7 tests (total:121.22s - recipes:0.56s test:119.92s recipes:0.62s) [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (25.74s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0lz5rm60/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0lz5rm60/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0lz5rm60/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0lz5rm60/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0lz5rm60/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0lz5rm60/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0lz5rm60/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0lz5rm60/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0lz5rm60/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0lz5rm60/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f51ff0b03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f51ff0a9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f51fe933d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f51fea31464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f51fea31464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f51fea31464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f51fe932b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f51fe9df90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f51fe8d7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f51fe8d7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f51fe8d7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f51fea2ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f51fea2ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (14.09s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x1c50a6k/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x1c50a6k/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x1c50a6k/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x1c50a6k/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x1c50a6k/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x1c50a6k/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x1c50a6k/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x1c50a6k/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x1c50a6k/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x1c50a6k/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f896d6a03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f896d699ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f896cf23d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f896d021464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f896d021464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f896d021464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f896cf22b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f896cfcf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f896cec7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f896cec7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f896cec7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f896d01ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f896d01ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 408988 byte(s) leaked in 7923 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopic-default.txt] [default-linux-x86_64-release-asan] (12.64s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wanp_rca/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wanp_rca/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wanp_rca/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wanp_rca/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wanp_rca/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wanp_rca/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wanp_rca/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wanp_rca/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wanp_rca/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wanp_rca/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fbe49d203b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fbe49d19ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fbe495a3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fbe496a1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fbe496a1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fbe496a1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fbe495a2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fbe4964f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fbe49547e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fbe49547e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fbe49547c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fbe4969ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fbe4969ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [default-linux-x86_64-release-asan] (14.51s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3htbdsrq/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3htbdsrq/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3htbdsrq/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3htbdsrq/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3htbdsrq/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3htbdsrq/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3htbdsrq/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3htbdsrq/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3htbdsrq/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3htbdsrq/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7feb177a03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7feb17799ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7feb17023d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7feb17121464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7feb17121464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7feb17121464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7feb17022b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7feb170cf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7feb16fc7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7feb16fc7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7feb16fc7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7feb1711ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7feb1711ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423598 byte(s) leaked in 8240 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicGroupWriteToSolomon-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [default-linux-x86_64-release-asan] (16.68s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c0vnw_uk/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c0vnw_uk/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c0vnw_uk/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c0vnw_uk/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c0vnw_uk/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c0vnw_uk/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c0vnw_uk/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c0vnw_uk/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c0vnw_uk/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c0vnw_uk/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f45c5dc03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f45c5db9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f45c5643d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f45c5741464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f45c5741464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f45c5741464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f45c5642b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f45c56ef90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f45c55e7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f45c55e7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f45c55e7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f45c573ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f45c573ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadata-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [default-linux-x86_64-release-asan] (16.92s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2etv_ila/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2etv_ila/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2etv_ila/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2etv_ila/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2etv_ila/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2etv_ila/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2etv_ila/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2etv_ila/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2etv_ila/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2etv_ila/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f215a7c03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f215a7b9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f215a043d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f215a141464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f215a141464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f215a141464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f215a042b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f215a0ef90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f2159fe7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f2159fe7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f2159fe7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f215a13ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f215a13ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataInsideFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [default-linux-x86_64-release-asan] (15.70s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i532o_4l/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i532o_4l/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i532o_4l/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i532o_4l/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i532o_4l/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i532o_4l/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i532o_4l/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i532o_4l/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i532o_4l/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i532o_4l/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fa1c00503b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fa1c0049ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fa1bf8d3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fa1bf9d1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fa1bf9d1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fa1bf9d1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fa1bf8d2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fa1bf97f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fa1bf877e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fa1bf877e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fa1bf877c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fa1bf9cee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fa1bf9cee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataNestedDeep-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 3/4] chunk ran 7 tests (total:120.20s - recipes:0.55s test:118.82s recipes:0.67s) [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [default-linux-x86_64-release-asan] (25.75s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qbs25j9k/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qbs25j9k/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qbs25j9k/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qbs25j9k/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qbs25j9k/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qbs25j9k/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qbs25j9k/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qbs25j9k/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qbs25j9k/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qbs25j9k/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f2d260a03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f2d26099ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f2d25923d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f2d25a21464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f2d25a21464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f2d25a21464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f2d25922b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f2d259cf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f2d258c7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f2d258c7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f2d258c7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f2d25a1ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f2d25a1ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataWithFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (14.00s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s82mo8xy/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s82mo8xy/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s82mo8xy/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s82mo8xy/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s82mo8xy/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s82mo8xy/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s82mo8xy/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s82mo8xy/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s82mo8xy/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_s82mo8xy/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fd3f4bc03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fd3f4bb9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fd3f4443d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fd3f4541464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fd3f4541464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fd3f4541464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fd3f4442b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fd3f44ef90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fd3f43e7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fd3f43e7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fd3f43e7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fd3f453ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fd3f453ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [default-linux-x86_64-release-asan] (13.11s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dfl956hi/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dfl956hi/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dfl956hi/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dfl956hi/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dfl956hi/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dfl956hi/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dfl956hi/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dfl956hi/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dfl956hi/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_dfl956hi/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7efd5e1f03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7efd5e1e9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7efd5da73d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7efd5db71464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7efd5db71464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7efd5db71464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7efd5da72b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7efd5db1f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7efd5da17e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7efd5da17e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7efd5da17c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7efd5db6ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7efd5db6ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [default-linux-x86_64-release-asan] (12.92s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1vt4h9ym/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1vt4h9ym/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1vt4h9ym/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1vt4h9ym/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1vt4h9ym/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1vt4h9ym/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1vt4h9ym/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1vt4h9ym/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1vt4h9ym/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1vt4h9ym/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f7c36df03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f7c36de9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f7c36673d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f7c36771464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f7c36771464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f7c36771464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f7c36672b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f7c3671f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7c36617e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7c36617e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7c36617c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f7c3676ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f7c3676ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteSameTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [default-linux-x86_64-release-asan] (15.48s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fmn1ajoh/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fmn1ajoh/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fmn1ajoh/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fmn1ajoh/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fmn1ajoh/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fmn1ajoh/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fmn1ajoh/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fmn1ajoh/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fmn1ajoh/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fmn1ajoh/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7ffaa16b03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7ffaa16a9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7ffaa0f33d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7ffaa1031464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7ffaa1031464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7ffaa1031464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7ffaa0f32b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7ffaa0fdf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7ffaa0ed7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7ffaa0ed7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7ffaa0ed7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7ffaa102ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7ffaa102ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (17.27s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ktkubpco/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ktkubpco/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ktkubpco/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ktkubpco/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ktkubpco/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ktkubpco/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ktkubpco/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ktkubpco/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ktkubpco/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ktkubpco/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fc3186f03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fc3186e9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fc317f73d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fc318071464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fc318071464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fc318071464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fc317f72b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fc31801f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fc317f17e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fc317f17e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fc317f17c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fc31806ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fc31806ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [default-linux-x86_64-release-asan] (16.47s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ueu3haj1/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ueu3haj1/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ueu3haj1/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ueu3haj1/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ueu3haj1/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ueu3haj1/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ueu3haj1/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ueu3haj1/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ueu3haj1/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ueu3haj1/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fe108ab03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fe108aa9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fe108333d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fe108431464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fe108431464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fe108431464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fe108332b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fe1083df90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fe1082d7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fe1082d7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fe1082d7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fe10842ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fe10842ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-WriteTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ FAIL: 28 - FAIL ydb/tests/fq/streaming_optimize ydb/tests/functional/serverless [size:medium] nchunks:10 ------ [test_serverless.py 0/10] chunk ran 1 test (total:135.46s - setup:0.01s test:134.99s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-release-asan] (128.53s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 1/10] chunk ran 1 test (total:139.14s - test:138.66s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-release-asan] (131.78s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 2 - FAIL ydb/tests/functional/serverless ydb/tests/functional/tpc/medium [size:medium] ------ [test_tpch.py] chunk ran 22 tests (total:614.65s - setup:0.01s test:600.05s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_tpch.py::TestTpchS1::test_tpch[1] (good) duration: 366.98s test_tpch.py::TestTpchS1::test_tpch[5] (good) duration: 49.68s test_tpch.py::TestTpchS1::test_tpch[3] (good) duration: 48.77s test_tpch.py::TestTpchS1::test_tpch[4] (good) duration: 47.75s test_tpch.py::TestTpchS1::test_tpch[8] (fail) duration: 32.50s test_tpch.py::TestTpchS1::test_tpch[7] (good) duration: 31.03s test_tpch.py::TestTpchS1::test_tpch[6] (good) duration: 19.67s test_tpch.py::TestTpchS1::test_tpch[2] (good) duration: 11.49s 14 tests were not launched inside chunk. Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/stderr [fail] test_tpch.py::TestTpchS1::test_tpch[8] [default-linux-x86_64-release-asan] (32.50s) ydb/tests/olap/load/lib/tpch.py:48: in test_tpch self.run_workload_test(self._get_path(), query_num) ydb/tests/olap/load/lib/conftest.py:298: in run_workload_test self.process_query_result(result, query_num, qparams.iterations, True) ydb/tests/olap/load/lib/conftest.py:245: in process_query_result raise exc ydb/tests/olap/lib/ydb_cli.py:283: in process process = yatest.common.process.execute(self._get_cmd(), check_exit_code=False) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:400: in wait _wait() library/python/testing/yatest_common/yatest/common/process.py:335: in _wait pid, sts, rusage = os.wait4(self._process.pid, 0) library/python/pytest/plugins/ya.py:347: in _graceful_shutdown _graceful_shutdown_on_log(not capman.is_globally_capturing()) library/python/pytest/plugins/ya.py:321: in _graceful_shutdown_on_log pytest.exit("Graceful shutdown requested") E Failed: Graceful shutdown requested Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch.py.TestTpchS1.test_tpch.8.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff ------ TIMEOUT: 7 - GOOD, 1 - FAIL, 14 - NOT_LAUNCHED ydb/tests/functional/tpc/medium ydb/tests/olap/column_family/compression [size:medium] ------ sole chunk ran 2 tests (total:619.63s - test:600.03s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: alter_compression.py::TestAlterCompression::test_all_supported_compression (timeout) duration: 614.60s alter_compression.py::TestAlterCompression::test_availability_data test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/stderr [timeout] alter_compression.py::TestAlterCompression::test_all_supported_compression [default-linux-x86_64-release-asan] (614.60s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAlterCompression.test_all_supported_compression.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/column_family/compression ydb/tests/olap/s3_import [size:medium] ------ sole chunk ran 1 test (total:626.82s - test:600.02s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_tpch_import.py::TestS3TpchImport::test_import_and_export (timeout) duration: 621.56s Info: Test run has exceeded 8.0G (8388608K) memory limit with 13.1G (13704540K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 2082050 44.9M 44.9M 6.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 2082159 41.4M 29.9M 17.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 2082163 779M 782M 697M └─ ydb-tests-olap-s3_import --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 2082951 11.6G 12.2G 11.7G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/t Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/stderr [timeout] test_tpch_import.py::TestS3TpchImport::test_import_and_export [default-linux-x86_64-release-asan] (621.56s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - TIMEOUT ydb/tests/olap/s3_import ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 4 tests (total:621.91s - test:600.10s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_alter_compression.py::TestAlterCompression::test[alter_compression] (fail) duration: 363.79s test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (timeout) duration: 210.46s test_alter_tiering.py::TestAlterTiering::test[many_tables] (fail) duration: 39.63s test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [fail] test_alter_tiering.py::TestAlterTiering::test[many_tables] [default-linux-x86_64-release-asan] (39.63s) ydb/tests/olap/scenario/conftest.py:88: in test ctx.executable(self, ctx) ydb/tests/olap/scenario/test_alter_tiering.py:297: in scenario_many_tables self._setup_tiering_test(ctx) ydb/tests/olap/scenario/test_alter_tiering.py:161: in _setup_tiering_test self._override_external_data_source(sth, self.sources[-1], s3_config) ydb/tests/olap/scenario/test_alter_tiering.py:177: in _override_external_data_source sth.execute_scheme_query(CreateExternalDataSource(path, config, True)) ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:430: in execute_scheme_query self._run_with_expected_status( ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:357: in _run_with_expected_status pytest.fail(f'Unexpected status: must be in {repr(expected_status)}, but get {repr(error or status)}') E Failed: Unexpected status: must be in {}, but get SchemeError('message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 362 } message: "Executing CREATE OBJECT EXTERNAL_DATA_SOURCE" end_position { row: 1 column: 362 } severity: 1 issues { message: "
: Error: (NKikimr::NExternalSource::TExternalSourceException) External source with type ObjectStorage is disabled. Please contact your system administrator to enable it, code: 2003\\n" issue_code: 2003 severity: 1 } } (server_code: 400070)') Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_tiering.py.TestAlterTiering.test.many_tables.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff [fail] test_alter_compression.py::TestAlterCompression::test[alter_compression] [default-linux-x86_64-release-asan] (363.79s) ydb/tests/olap/scenario/conftest.py:112: in test ScenarioTestHelper(None).remove_path(test_path, ctx.suite) ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:726: in remove_path self._run_with_expected_status( ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:357: in _run_with_expected_status pytest.fail(f'Unexpected status: must be in {repr(expected_status)}, but get {repr(error or status)}') E Failed: Unexpected status: must be in {}, but get ConnectionLost('Rpc error, reason <_InactiveRpcError of RPC that terminated with:\n\tstatus = StatusCode.UNAVAILABLE\n\tdetails = "Socket closed"\n\tdebug_error_string = "UNKNOWN:Error received from peer {grpc_message:"Socket closed", grpc_status:14, created_time:"2025-04-10T12:18:12.125360942+00:00"}"\n>')teardown failed: ydb/tests/olap/scenario/conftest.py:80: in teardown_class ScenarioTestHelper(None).remove_path(cls.get_suite_name()) ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:718: in remove_path for e in self.list_path(path, folder): ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:677: in list_path self_descr = YdbCluster._describe_path_impl(os.path.join(root_path, path)) ydb/tests/olap/lib/ydb_cluster.py:194: in _describe_path_impl return cls.get_ydb_driver().scheme_client.describe_path(path) contrib/python/ydb/py3/ydb/scheme.py:507: in describe_path return self._driver( contrib/python/ydb/py3/ydb/tracing.py:70: in wrapper return f(self, *args, **kwargs) contrib/python/ydb/py3/ydb/pool.py:438: in __call__ connection = self._store.get(preferred_endpoint) contrib/python/ydb/py3/ydb/tracing.py:70: in wrapper return f(self, *args, **kwargs) contrib/python/ydb/py3/ydb/pool.py:144: in get raise issues.ConnectionLost("Couldn't find valid connection") E ydb.issues.ConnectionLost: Couldn't find valid connection Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_compression.py.TestAlterCompression.test.alter_compression.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff [timeout] test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [default-linux-x86_64-release-asan] (210.46s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_insert.py.TestInsert.test.read_data_during_bulk_upsert.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 2 - FAIL, 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:2 ------ [data_migration_when_alter_ttl.py] chunk ran 1 test (total:615.96s - setup:0.02s test:600.11s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 610.28s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-release-asan] (610.28s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ [ttl_delete_s3.py] chunk ran 3 tests (total:615.56s - test:600.04s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change (fail) duration: 489.48s ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete (timeout) duration: 119.72s ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [fail] ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [default-linux-x86_64-release-asan] (489.48s) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:141: in test_data_unchanged_after_ttl_change data = self.get_aggregated(table_path) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:27: in get_aggregated answer = self.ydb_client.query(f"SELECT count(*), sum(val), sum(Digest::Fnv32(s)) from `{table_path}`") ydb/tests/olap/common/ydb_client.py:24: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:204: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:202: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:178: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:195: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.BadRequest: message: "Table /Root/test_data_unchanged_after_ttl_change/table (shard 72075186224037936) scan failed, reason: cannot build metadata withno ranges/Snapshot too old: {1744287519000:max}. CS min read snapshot: {1744287520000:max}. now: 2025-04-10T12:18:45.209747Z" issue_code: 2017 severity: 1 (server_code: 400010) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_data_unchanged_after_ttl_change.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff [timeout] ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete [default-linux-x86_64-release-asan] (119.72s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_ttl_delete.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - FAIL, 1 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/olap/ttl_tiering ------ sole chunk ran 1 test (total:242.78s - test:242.64s) Info: Test run has exceeded 32.0G (33554432K) memory limit with 34.7G (36366536K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 2087358 44.9M 44.9M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 2087398 34.5M 22.6M 9.9M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 2087400 736M 740M 659M └─ ydb-tests-stress-olap_workload-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:f 2088565 3.2G 3.2G 2.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 2088577 2.6G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 2101025 2.6G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/test 2088593 2.6G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 2101021 2.6G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/test 2088605 2.5G 2.5G 2.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 2101024 2.5G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/test 2088607 0b 0b 0b ├─ 2088608 2.4G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 2101023 2.4G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/test 2088609 2.8G 2.8G 2.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 2088610 2.4G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 2101028 2.4G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/test 2088612 2.4G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 2101026 2.4G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/test Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/stderr ydb/core/keyvalue/ut_trace [size:medium] nchunks:5 ------ [0/5] chunk ran 1 test (total:13.77s - test:13.74s) [fail] TKeyValueTracingTest::ReadHuge [default-linux-x86_64-release-asan] (2.31s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.out ------ [1/5] chunk ran 1 test (total:14.08s - test:14.05s) [fail] TKeyValueTracingTest::ReadSmall [default-linux-x86_64-release-asan] (2.40s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.out ------ [2/5] chunk ran 1 test (total:13.18s - test:13.16s) [fail] TKeyValueTracingTest::WriteHuge [default-linux-x86_64-release-asan] (2.30s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.out ------ [3/5] chunk ran 1 test (total:14.08s - test:14.05s) [fail] TKeyValueTracingTest::WriteSmall [default-linux-x86_64-release-asan] (2.32s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.out ------ FAIL: 4 - FAIL ydb/core/keyvalue/ut_trace ydb/core/kqp/ut/cost [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:23.48s - test:23.45s) [crashed] KqpCost::OlapWriteRow [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==2066973==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018d3ad2d bp 0x7ffdaaddf980 sp 0x7ffdaaddf7e0 T0) ==2066973==The signal is caused by a READ memory access. ==2066973==Hint: address points to the zero page. 2025-04-10T12:10:22.968863Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491655696939903168:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:10:22.968981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:10:33.342888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:10:33.342920Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18d3ad2d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18d3ad2d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18d3ad2d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18d3ad2d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18d3ad2d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18d5fe47 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18d5fe47 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18d5fe47 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> ..[snippet truncated].. 0x196b1ff5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x196b1ff5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x196b1ff5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19681b48 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18d5ecf3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19683415 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x196ac56c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f25fa160d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f25fa160e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x164bd028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x164bd028) (BuildId: e3319472fc934b0dfc588e72f37ffc5cf69adf9b) SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==2066973==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.out ------ FAIL: 1 - CRASHED ydb/core/kqp/ut/cost ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [3/50] chunk ran 1 test (total:62.75s - setup:0.01s test:62.70s) [fail] KqpStats::SysViewClientLost [default-linux-x86_64-release-asan] (57.98s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x196B4EEB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x19B79E1F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1925A458 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x1926D467 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1926D467 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1926D467 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x1926D467 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x1926D467 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19BB0E45 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19BB0E45 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19BB0E45 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19B80998 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x1926C5EB 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19B82265 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19BAB3BC 15. ??:0: ?? @ 0x7FB0809D7D8F 16. ??:0: ?? @ 0x7FB0809D7E3F 17. ??:0: ?? @ 0x16611028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.out ------ FAIL: 3 - GOOD, 1 - FAIL ydb/core/kqp/ut/query ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [1/50] chunk ran 1 test (total:22.17s - test:22.12s) [fail] KqpSinkTx::OlapInvalidateOnError [default-linux-x86_64-release-asan] (17.25s) assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x18F652BE 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x18F43F0A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x18F4B387 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18F4B387 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18F4B387 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F4B387 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F4B387 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x18F4A553 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7FB642753D8F 18. ??:0: ?? @ 0x7FB642753E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.out ------ [10/50] chunk ran 1 test (total:22.23s - test:22.19s) [fail] KqpSnapshotIsolation::TSimpleOltp [default-linux-x86_64-release-asan] (16.96s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18F8A7C7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18F81582 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7FEC1B126D8F 18. ??:0: ?? @ 0x7FEC1B126E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.out ------ [11/50] chunk ran 1 test (total:22.25s - test:22.22s) [fail] KqpSnapshotIsolation::TSimpleOltpNoSink [default-linux-x86_64-release-asan] (17.70s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18F8A7C7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18F817AA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F5A57FCED8F 18. ??:0: ?? @ 0x7F5A57FCEE3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.out ------ [2/50] chunk ran 1 test (total:26.28s - test:26.23s) [fail] KqpSnapshotIsolation::TConflictReadWriteOlap [default-linux-x86_64-release-asan] (18.99s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18F9B008 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18F826DA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F4C78B18D8F 18. ??:0: ?? @ 0x7F4C78B18E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.out ------ [3/50] chunk ran 1 test (total:25.04s - test:25.01s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltp [default-linux-x86_64-release-asan] (19.06s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18F986F7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18F82282 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F8228FCAD8F 18. ??:0: ?? @ 0x7F8228FCAE3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.out ------ [4/50] chunk ran 1 test (total:26.15s - test:26.12s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [default-linux-x86_64-release-asan] (19.36s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18F986F7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18F824AA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F7D87F83D8F 18. ??:0: ?? @ 0x7F7D87F83E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.out ------ [5/50] chunk ran 1 test (total:25.21s - test:25.14s) [fail] KqpSnapshotIsolation::TConflictWriteOlap [default-linux-x86_64-release-asan] (20.55s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18F93668 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18F8205A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7FA340A23D8F 18. ??:0: ?? @ 0x7FA340A23E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.out ------ [6/50] chunk ran 1 test (total:24.83s - test:24.79s) [fail] KqpSnapshotIsolation::TConflictWriteOltp [default-linux-x86_64-release-asan] (18.79s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18F90D57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18F81C02 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F4689B3FD8F 18. ??:0: ?? @ 0x7F4689B3FE3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.out ------ [7/50] chunk ran 1 test (total:26.37s - test:26.34s) [fail] KqpSnapshotIsolation::TConflictWriteOltpNoSink [default-linux-x86_64-release-asan] (18.55s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18F90D57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18F81E2A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F090A7B7D8F 18. ??:0: ?? @ 0x7F090A7B7E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.out ------ [8/50] chunk ran 1 test (total:24.93s - test:24.89s) [fail] KqpSnapshotIsolation::TReadOnlyOltp [default-linux-x86_64-release-asan] (18.64s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18FA00B3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18F82902 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7EFD2ECC8D8F 18. ??:0: ?? @ 0x7EFD2ECC8E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.out ------ [9/50] chunk ran 1 test (total:26.13s - test:26.09s) [fail] KqpSnapshotIsolation::TReadOnlyOltpNoSink [default-linux-x86_64-release-asan] (19.03s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18FA00B3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18F82B2A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F49842B2D8F 18. ??:0: ?? @ 0x7F49842B2E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.out ------ FAIL: 1 - GOOD, 11 - FAIL ydb/core/kqp/ut/tx ydb/core/persqueue/ut/ut_with_sdk [size:medium] nchunks:10 ------ [0/10] chunk ran 1 test (total:47.55s - test:47.44s) [fail] TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic [default-linux-x86_64-release-asan] (36.67s) assertion failed at ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp:1484, virtual void NKikimr::NTestSuiteTopicAutoscaling::TTestCasePartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic::Execute_(NUnitTest::TTestContext &): (stats->GetCommittedOffset() == 8) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/TopicAutoscaling.PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/TopicAutoscaling.PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic.out ------ [1/10] chunk ran 1 test (total:35.19s - test:35.13s) [crashed] WithSDK::DescribeConsumer [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==2073450==ERROR: AddressSanitizer: heap-use-after-free on address 0x50300353bb19 at pc 0x000018eec552 bp 0x7ffd57531e30 sp 0x7ffd575315f0 READ of size 9 at 0x50300353bb19 thread T0 2025-04-10T12:10:49.446617Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7491655816040543180:2471], Partition 0, Sender [0:0:0], Recipient [1:7491655820335510518:2474], Cookie: 0 2025-04-10T12:10:49.446907Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7491655820335510518:2474]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:10:49.447005Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:10:49.447254Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:10:49.447439Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:10:49.447488Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:10:49.447629Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T12:10:49.547861Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7491655816040543180:2471], Partition 0, Sender [0:0:0], Recipient [1:7491655820335510518:2474], Cookie: 0 2025-04-10T12:10:49.548083Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7491655820335510518:2474]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:10:49.548145Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:10:49.548216Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:10:49.548292Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:10:49.548360Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:10:49.548404Z node 1 :PERSQUEUE TRACE: [PQ: 72 ..[snippet truncated].. nittest/registar.cpp:495:19 #21 0x196e0f0c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #22 0x7f2ae0072d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #23 0x7f2ae0072e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #24 0x165b1028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk+0x165b1028) (BuildId: 34a7903e4bedd81b1cc1447f91dca5a1a7035efc) SUMMARY: AddressSanitizer: heap-use-after-free /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors_memintrinsics.cpp:63:3 in __asan_memcpy Shadow bytes around the buggy address: 0x50300353b880: fa fa fa fa fa fa fa fa fa fa fd fd fd fd fa fa 0x50300353b900: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x50300353b980: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x50300353ba00: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x50300353ba80: fd fd fd fd fa fa fa fa fa fa fa fa fa fa fa fa =>0x50300353bb00: fa fa fd[fd]fd fd fa fa fa fa fa fa fa fa fa fa 0x50300353bb80: fa fa fa fa fd fd fd fd fa fa fa fa fa fa fa fa 0x50300353bc00: fa fa fa fa fa fa fd fd fd fd fa fa 00 00 00 fa 0x50300353bc80: fa fa fd fd fd fd fa fa fa fa fa fa fa fa fa fa 0x50300353bd00: fa fa fa fa 00 00 00 fa fa fa fa fa fa fa fa fa 0x50300353bd80: fd fd fd fa fa fa fa fa fa fa fa fa fd fd fd fa Shadow byte legend (one shadow byte represents 8 application bytes): Addressable: 00 Partially addressable: 01 02 03 04 05 06 07 Heap left redzone: fa Freed heap region: fd Stack left redzone: f1 Stack mid redzone: f2 Stack right redzone: f3 Stack after return: f5 Stack use after scope: f8 Global redzone: f9 Global init order: f6 Poisoned by user: f7 Container overflow: fc Array cookie: ac Intra object redzone: bb ASan internal: fe Left alloca redzone: ca Right alloca redzone: cb ==2073450==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/WithSDK.DescribeConsumer.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/WithSDK.DescribeConsumer.out ------ FAIL: 1 - FAIL, 1 - CRASHED ydb/core/persqueue/ut/ut_with_sdk ydb/core/quoter/ut [size:medium] ------ sole chunk ran 1 test (total:18.62s - test:18.58s) [fail] QuoterWithKesusTest::PrefetchCoefficient [default-linux-x86_64-release-asan] (4.75s) assertion failed at ydb/core/quoter/ut_helpers.cpp:121, void NKikimr::TKesusQuoterTestSetup::GetQuota(const std::vector> &, TEvQuota::EResourceOperator, TDuration, TEvQuota::TEvClearance::EResult): (answer->Result == expectedResult) failed: (Success != Deadline) , with diff: (Succ|D)e(ss|adline) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::TKesusQuoterTestSetup::GetQuota(std::__y1::vector>, TBasicString>, unsigned long>, std::__y1::allocator>, TBasicString>, unsigned long>>> const&, NKikimr::TEvQuota::EResourceOperator, TDuration, NKikimr::TEvQuota::TEvClearance::EResult) at /-S/ydb/core/quoter/ut_helpers.cpp:121:5 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/vector:527:18 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/quoter/kesus_quoter_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.PrefetchCoefficient.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.PrefetchCoefficient.out ------ FAIL: 1 - FAIL ydb/core/quoter/ut ydb/core/tx/columnshard/ut_rw [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:601.90s - test:600.08s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString (timeout) duration: 600.61s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [default-linux-x86_64-release-asan] (600.61s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKString.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKString.out ------ [1/60] chunk ran 1 test (total:602.35s - test:600.05s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 (timeout) duration: 600.96s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [default-linux-x86_64-release-asan] (600.96s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKUtf8.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKUtf8.out ------ TIMEOUT: 1 - GOOD, 2 - TIMEOUT ydb/core/tx/columnshard/ut_rw ydb/core/tx/columnshard/ut_schema [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:202.73s - test:201.96s) [crashed] TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [default-linux-x86_64-release-asan] (167.96s) Test crashed (return code: 100) ==2068945==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 56224 byte(s) in 2 object(s) allocated from: #0 0x100b418d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x1ca92df2 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x1ca92df2 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x1ca92df2 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x1ca92df2 in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x1ca92df2 in __vallocate /-S/contrib/libs/cxxsupp/libcxx/include/vector:789:25 #6 0x1ca92df2 in void std::__y1::vector>::__assign_with_size[abi:fe190000](NKikimr::NOlap::TUnifiedBlobId*, NKikimr::NOlap::TUnifiedBlobId*, long) /-S/contrib/libs/cxxsupp/libcxx/include/vector:1378:5 #7 0x1ca8b285 in assign /-S/contrib/libs/cxxsupp/libcxx/include/vector:1359:3 #8 0x1ca8b285 in operator= /-S/contrib/libs/cxxsupp/libcxx/include/vector:1330:5 #9 0x1ca8b285 in NKikimr::NOlap::TPortionMetaConstructor::Build() /-S/ydb/core/tx/columnshard/engines/portions/constructor_meta.cpp:53:20 #10 0x1ca4cad1 in ..[snippet truncated].. () /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #34 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #35 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #36 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #37 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #38 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #39 0xff92093 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #40 0x1084a305 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #41 0x1086a39c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #42 0x7f78c262ad8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 3125688 byte(s) leaked in 55830 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot.Internal-FirstPkColumn.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot.Internal-FirstPkColumn.out ------ [1/60] chunk ran 1 test (total:204.03s - test:203.27s) [crashed] TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [default-linux-x86_64-release-asan] (167.99s) Test crashed (return code: 100) ==2068946==ERROR: LeakSanitizer: detected memory leaks Too many leaks! Only the first 5000 leaks encountered will be reported. Indirect leak of 41088 byte(s) in 1284 object(s) allocated from: #0 0x100b418d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x18622f15 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x18622f15 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x18622f15 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x18622f15 in get_node /-S/util/generic/hash_table.h:503:43 #5 0x18622f15 in new_node /-S/util/generic/hash_table.h:953:19 #6 0x18622f15 in std::__y1::pair<__yhashtable_iterator, bool> THashTable, TIdentity, TEqualTo, std::__y1::allocator>::insert_unique_noresize(NKikimr::TLogoBlobID const&) /-S/util/generic/hash_table.h:1059:17 #7 0x2a1a1b98 in insert_unique /-S/util/generic/hash_table.h:680:16 #8 0x2a1a1b98 in insert /-S/util/generic/hash_set.h:162:51 #9 0x2a1a1b98 in operator() /-S/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp #10 0x2a1a1b98 in ExtractTo<(lambda at /-S/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp:276:23)> /-S/ydb/core/tx/columnshard/blobs_action/abstract/blob_set.h ..[snippet truncated].. () /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #32 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #33 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #34 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #35 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #36 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #37 0xff92093 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #38 0x1084a305 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #39 0x1086a39c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #40 0x7f27da4dcd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 2637722 byte(s) leaked in 62743 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot-Internal-FirstPkColumn.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot-Internal-FirstPkColumn.out ------ FAIL: 2 - CRASHED ydb/core/tx/columnshard/ut_schema ydb/core/tx/datashard/ut_incremental_backup [size:medium] nchunks:4 ------ [0/4] chunk ran 1 test (total:159.54s - test:159.45s) [fail] IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [default-linux-x86_64-release-asan] (141.79s) assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode) at /-S/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.out ------ FAIL: 1 - FAIL ydb/core/tx/datashard/ut_incremental_backup ydb/core/tx/datashard/ut_volatile [size:medium] nchunks:2 ------ [0/2] chunk ran 1 test (total:28.43s - test:28.39s) [crashed] DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [default-linux-x86_64-release-asan] (7.88s) Test crashed (return code: 100) ==2075156==ERROR: LeakSanitizer: detected memory leaks Direct leak of 224 byte(s) in 1 object(s) allocated from: #0 0x1920aa0d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2b21b420 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:26 #2 0x2b21b420 in NKikimr::NEvents::TDataEvents::TEvWriteResult::BuildCompleted(unsigned long, unsigned long) /-S/ydb/core/tx/data_events/events.h:123:27 #3 0x2b2149d6 in NKikimr::NDataShard::TExecuteWriteUnit::Execute(TIntrusivePtr>, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/execute_write_unit.cpp:376:37 #4 0x2b06d8cc in NKikimr::NDataShard::TPipeline::RunExecutionPlan(TIntrusivePtr>, TVector>&, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard_pipeline.cpp:1851:28 #5 0x2ad2e714 in NKikimr::NDataShard::TDataShard::TTxWrite::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard__write.cpp:100:38 #6 0x2081b894 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #7 0x20817234 in NKikimr::NTabletFlatExecutor ..[snippet truncated].. in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #27 0x18fb3b07 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #28 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #29 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #30 0x199cac35 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #31 0x1999a788 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #32 0x18fb29b3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #33 0x1999c055 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #34 0x199c51ac in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #35 0x7f745af1fd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 664 byte(s) leaked in 11 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff/DataShardVolatile.GracefulShardRestartNoEarlyReadSetAck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff/DataShardVolatile.GracefulShardRestartNoEarlyReadSetAck.out ------ FAIL: 1 - CRASHED ydb/core/tx/datashard/ut_volatile ydb/core/tx/tiering/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:39.87s - test:39.83s) [crashed] ColumnShardTiers::TTLUsage [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.out ------ FAIL: 1 - CRASHED ydb/core/tx/tiering/ut ydb/services/ydb/sdk_sessions_ut [size:medium] nchunks:10 ------ [0/10] chunk ran 1 test (total:19.88s - test:19.85s) [fail] YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [default-linux-x86_64-release-asan] (6.26s) assertion failed at ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:253, virtual void NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext &): (session.GetId() == sessionId) failed: ("ydb://session/3?node_id=1&id=OGFiZjAyYWItZGQ1NDFlZmYtZWFlYWM5ZjEtYTczMzRmNzY=" != "ydb://session/3?node_id=1&id=ODdhNWRlMzgtMTJmMmQ5ODYtMjg4YjE4N2QtYTQ5YTU4ZjU=") , with diff: "ydb://session/3?node_id=1&id=O(GFiZjAyY|DdhN)W(I|RlMzg)t(ZG|MTJmMm)Q(1N|5O)D(FlZm|)Yt(ZWFl|Mjg4)Y(WM5Z|)jE(|4N2Q)tYT(czMzRmNz|Q5)Y(|TU4ZjU)=" TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff/YdbSdkSessions.TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff/YdbSdkSessions.TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall.out ------ FAIL: 1 - FAIL ydb/services/ydb/sdk_sessions_ut ydb/services/ydb/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:20.48s - test:20.41s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-release-asan] (4.41s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 1 - FAIL ydb/services/ydb/ut Total 40 suites: 19 - GOOD 15 - FAIL 6 - TIMEOUT Total 142 tests: 56 - GOOD 56 - FAIL 17 - NOT_LAUNCHED 7 - TIMEOUT 6 - CRASHED Cache efficiency ratio is 97.60% (43073 of 44131). Local: 433 (0.98%), dist: 0 (0.00%), by dynamic uids: 0 (0.00%), avoided: 42640 (96.62%) Dist cache download: count=0, size=0 bytes, speed=0.0 bytes/s Disk usage for tools/sdk 3.64 GiB Additional disk space consumed for build cache 0 bytes Critical path: [627267 ms] [TM] [rnd-j1a193ra27s8l80p asan default-linux-x86_64 release]: ydb/tests/olap/s3_import/py3test [started: 0 (1744287079976), finished: 627267 (1744287707243)] Time from start: 723822.7280273438 ms, time elapsed by graph 627267 ms, time diff 96555.72802734375 ms. The longest 10 tasks: [627267 ms] [TM] [rnd-j1a193ra27s8l80p asan default-linux-x86_64 release]: ydb/tests/olap/s3_import/py3test [started: 1744287079976, finished: 1744287707243] [622360 ms] [TM] [rnd-6gx8d5wwe4cgrknz asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 1744287080994, finished: 1744287703354] [620027 ms] [TM] [rnd-zonwn5ldt6qvcqyz asan default-linux-x86_64 release]: ydb/tests/olap/column_family/compression/py3test [started: 1744287080248, finished: 1744287700275] [616391 ms] [TM] [rnd-8793078171425761536 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1744287029674, finished: 1744287646065] [616033 ms] [TM] [rnd-13803350446551977508 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1744287029680, finished: 1744287645713] [615037 ms] [TM] [rnd-2732920974072325737 asan default-linux-x86_64 release]: ydb/tests/functional/tpc/medium/py3test [started: 1744287081322, finished: 1744287696359] [602705 ms] [TM] [rnd-11399888642543182103 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1744287027890, finished: 1744287630595] [602269 ms] [TM] [rnd-16807017896877051072 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1744287010052, finished: 1744287612321] [503870 ms] [TM] [rnd-2876966206782776450 asan default-linux-x86_64 release]: ydb/core/tx/schemeshard/ut_index/unittest [started: 1744287054911, finished: 1744287558781] [444548 ms] [TM] [rnd-9574490975704076154 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1744287024791, finished: 1744287469339] Total time by type: [12652478 ms] [TM] [count: 1017, ave time 12440.98 msec] [ 119280 ms] [prepare:get from local cache] [count: 433, ave time 275.47 msec] [ 56412 ms] [TS] [count: 10, ave time 5641.20 msec] [ 37277 ms] [prepare:AC] [count: 2, ave time 18638.50 msec] [ 32774 ms] [prepare:put to dist cache] [count: 423, ave time 77.48 msec] [ 25743 ms] [TA] [count: 31, ave time 830.42 msec] [ 6954 ms] [prepare:bazel-store] [count: 1, ave time 6954.00 msec] [ 6506 ms] [prepare:tools] [count: 16, ave time 406.62 msec] [ 694 ms] [prepare:clean] [count: 3, ave time 231.33 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 12734633 ms (100.00%) Total run tasks time - 12734633 ms Configure time - 28.9 s Statistics overhead 1510 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json Ok + echo 0 + ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.KC84DoWNuA --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -X --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [8442/8442 modules configured] [0 ymakes processing] [8442/8442 modules configured] [4837/4837 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution | 1.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut | 1.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests | 1.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown | 1.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table | 2.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan | 2.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut | 1.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq | 2.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 | 2.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat | 2.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume | 4.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering | 7.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica | 7.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms | 8.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut | 8.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark | 8.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs | 8.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots | 8.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer | 8.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable | 9.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut | 9.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut | 9.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol | 9.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx | 9.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut | 9.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage | 9.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a | 9.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |10.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |10.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |10.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |10.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |10.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |10.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |10.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |10.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |11.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |11.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |11.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |11.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |11.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |11.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |11.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |11.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |11.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |11.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |11.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |12.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |12.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |12.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |12.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |12.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |12.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |12.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |12.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |12.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |12.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |13.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |13.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |13.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |13.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |13.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |13.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |13.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |13.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |13.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |14.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |14.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |14.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |14.6%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |14.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |14.8%| PREPARE $(CLANG-1922233694) |14.9%| PREPARE $(CLANG_FORMAT-2212207123) |15.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |15.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |15.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |15.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |16.1%| PREPARE $(JDK17-472926544) |15.5%| PREPARE $(CLANG18-3363451693) |15.6%| PREPARE $(WITH_JDK17-sbr:7832760150) |15.9%| PREPARE $(WITH_JDK-sbr:7832760150) |16.2%| PREPARE $(YMAKE_PYTHON3-4256832079) |16.4%| PREPARE $(PYTHON) |16.5%| PREPARE $(TEST_TOOL_HOST-sbr:8444524403) |16.6%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |17.1%| PREPARE $(GDB) |17.5%| PREPARE $(JDK_DEFAULT-472926544) |17.8%| PREPARE $(FLAKE8_PY3-715603131) |17.9%| PREPARE $(CLANG-2518231432) |18.3%| PREPARE $(CLANG16-1380963495) |20.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |21.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |21.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |21.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |21.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |21.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |21.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |22.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |22.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |22.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |22.5%| PREPARE $(LLD_ROOT-3808007503) |22.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |22.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/docs/generator/generator |22.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |22.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |23.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |23.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |23.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |23.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |23.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |23.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |23.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |23.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |23.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |23.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |23.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |23.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |23.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |24.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |24.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |24.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |24.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |24.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |24.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |24.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |24.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |24.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |24.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |25.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |25.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |25.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |25.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |25.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |25.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |25.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |26.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |26.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |26.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |26.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |26.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |26.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |26.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |26.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |26.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |26.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |26.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |27.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |27.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |27.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |29.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |29.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |30.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |30.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |30.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |30.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |30.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |30.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |30.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |31.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |31.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |31.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |31.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |31.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |32.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |32.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |32.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |32.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |32.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |32.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |32.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |32.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |32.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |33.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |33.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |33.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |33.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |33.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |33.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |33.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |33.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |33.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |34.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |34.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |34.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |34.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |34.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |34.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |34.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |34.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |34.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |34.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |34.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |35.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |35.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |35.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |35.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |35.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |35.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |35.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |35.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |35.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |35.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |35.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |36.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |36.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |36.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |36.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |36.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |36.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |36.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |36.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut |37.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |37.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |37.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |37.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |37.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |37.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |37.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |37.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |38.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |38.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |38.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |38.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |38.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |38.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |38.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |38.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |38.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |39.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |39.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |39.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |39.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |39.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |39.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |39.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |39.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |40.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |40.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |40.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |40.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |40.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |40.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |41.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |41.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |41.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |41.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |41.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |41.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |41.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |41.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |41.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |41.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |42.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |42.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |42.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |42.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire |42.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |42.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |42.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |43.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/tools/yqlrun/yqlrun |43.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |43.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/service_node/service_node |43.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |43.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |43.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |43.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |43.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |43.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |43.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |44.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |44.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |44.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |44.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |44.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |45.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |45.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |45.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |45.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |45.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut |45.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |45.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |45.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |45.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |45.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |46.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |46.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |46.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |46.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |46.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |46.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |46.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |47.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |47.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |46.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |46.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |46.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |47.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |47.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |47.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |47.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |47.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |47.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |48.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |48.2%| CLEANING SYMRES |48.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |48.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |48.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |48.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |48.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |48.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |48.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |48.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |49.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |49.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |49.3%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |49.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |49.4%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |49.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |49.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |49.8%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |50.1%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |50.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |50.2%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |50.4%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |50.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |50.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |50.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |50.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |50.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |50.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |50.6%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |50.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |50.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |50.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |51.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |51.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |51.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |51.4%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |51.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |51.7%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |51.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |51.9%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |52.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |52.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |52.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |52.0%| [LD] {RESULT} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |52.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |52.3%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |52.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |52.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |52.5%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |52.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |52.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |52.8%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |52.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |53.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |53.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |53.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |53.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |53.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |53.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |53.7%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |53.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |53.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |54.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |54.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |53.8%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |53.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |54.0%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |54.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |54.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |54.3%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |54.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |54.4%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |54.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |54.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |54.8%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |54.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |54.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |55.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |55.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |55.3%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |55.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |55.5%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |55.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |55.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |55.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |55.8%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |55.9%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |56.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |56.1%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |56.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |56.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |56.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |56.6%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |56.7%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |56.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |56.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |56.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |56.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |56.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |56.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |57.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |57.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |57.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |57.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |57.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |57.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |57.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |57.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |57.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |58.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |58.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink |57.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |58.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |58.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |58.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |58.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |58.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |58.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |58.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |58.7%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |58.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |59.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |59.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |59.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |59.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |59.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |59.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |59.5%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |59.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |59.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |59.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |59.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |59.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 |59.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |59.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |59.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |60.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |60.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |60.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |60.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |60.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |60.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |60.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |60.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |60.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |61.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |61.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |61.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |61.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |61.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |61.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |61.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |61.4%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |61.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |61.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |61.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |61.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |61.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |61.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |62.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |62.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |62.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |62.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental |62.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |62.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |62.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |62.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |63.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |63.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |63.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |63.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |63.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |62.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |62.8%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |63.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |63.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |63.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |63.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |63.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |63.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |63.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |64.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |64.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |64.2%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |64.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |64.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |64.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |64.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |64.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |64.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |64.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |64.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |64.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |64.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |64.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |65.0%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |65.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |65.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |65.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |65.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |65.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |65.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |65.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |65.9%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |65.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |66.1%| [LD] {RESULT} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |66.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |66.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |65.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |66.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |66.4%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |66.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |66.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |66.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |66.9%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |67.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |67.2%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |67.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |67.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |67.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |68.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |67.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |67.6%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |67.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |67.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |67.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |67.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |68.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |68.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |68.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |68.3%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |68.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |68.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |68.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |68.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |69.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |69.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |60.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |61.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |61.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |61.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |61.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |61.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |61.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |61.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |61.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |61.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |61.7%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |61.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |61.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |62.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |62.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |62.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |62.2%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |62.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |62.4%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |56.2%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |56.2%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |56.3%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |56.4%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |56.4%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |56.5%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |56.5%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |56.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |56.6%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |56.7%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |56.8%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |56.8%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |56.9%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |56.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |57.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |57.1%| COMPACTING CACHE 935.4GiB |57.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |57.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |57.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |57.3%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |57.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |57.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |57.5%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |57.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |57.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |57.7%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |57.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |57.8%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |57.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |57.9%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |57.9%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |58.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |58.1%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |58.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |58.2%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |58.2%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |58.3%| [LD] {RESULT} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |58.3%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |58.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |58.4%| [LD] {RESULT} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |58.5%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |58.6%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |58.6%| [LD] {RESULT} $(B)/yql/tools/yqlrun/yqlrun |58.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |58.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |58.8%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |58.8%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |58.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |59.0%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |59.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |59.1%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |59.1%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |59.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |59.2%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |59.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |59.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |59.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |59.5%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |59.5%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |59.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |59.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |59.7%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |59.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |59.8%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |59.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |59.9%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |60.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |60.0%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |60.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |60.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |60.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |60.2%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |60.3%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |60.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |60.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |60.5%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |60.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |60.6%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |60.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |60.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |60.8%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |60.8%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |60.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest |60.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |61.0%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |61.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |61.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |61.1%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |61.2%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |61.3%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |61.3%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |61.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |61.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |61.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |61.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |61.6%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |61.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |61.7%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |61.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |61.8%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString >> TKeyValueTracingTest::ReadSmall >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] >> TKeyValueTracingTest::ReadHuge >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] |61.9%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |61.9%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |62.0%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |62.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |62.1%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |62.2%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |62.2%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |62.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |62.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |62.4%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |62.4%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |62.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |62.6%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |62.6%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |62.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |62.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |62.8%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |62.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |62.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |63.0%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |63.0%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |63.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |63.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |63.2%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |63.2%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |63.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TKeyValueTracingTest::WriteSmall >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] >> TKeyValueTracingTest::WriteHuge |63.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |63.4%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |63.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |63.5%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |63.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |63.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |63.7%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |63.7%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |63.8%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |63.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |63.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |64.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |64.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |64.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |64.1%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |64.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |64.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |64.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |64.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |64.4%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |64.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |64.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |64.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |64.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |64.8%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |64.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |64.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |65.0%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |65.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |65.1%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |65.1%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |65.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |65.3%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |65.4%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |65.4%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |65.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |65.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |65.7%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |65.7%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |65.8%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |65.8%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |65.9%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |65.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |66.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn |66.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |66.2%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |66.3%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |66.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |66.5%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |66.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |66.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |66.9%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |67.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |67.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |67.1%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |67.2%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |67.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |67.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |67.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |67.5%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |67.6%| [LD] {RESULT} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |67.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |67.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |67.8%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |67.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |68.1%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |68.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.4%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |68.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |68.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.5%| [LD] {RESULT} $(B)/ydb/apps/etcd_proxy/etcd_proxy |68.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |68.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.7%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.8%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |68.9%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.0%| [LD] {RESULT} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |69.0%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |69.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.1%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |69.2%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |69.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.5%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |69.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |69.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |69.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |69.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |69.9%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |69.9%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |70.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |70.0%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |70.1%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |70.2%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |70.2%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |70.3%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |70.3%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |70.4%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |70.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |70.5%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |70.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |70.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |70.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |70.7%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |70.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |70.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |70.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |70.9%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |71.0%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |71.1%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |71.1%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |71.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |71.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |71.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |71.3%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |71.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |71.5%| [LD] {RESULT} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |71.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |71.6%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |71.6%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |71.7%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |71.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |71.8%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |71.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |71.9%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |72.0%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |72.0%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |72.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |72.1%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |72.2%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |72.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |72.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |72.4%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |72.4%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |72.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |72.5%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |72.6%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |72.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |72.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |72.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |72.8%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire |72.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |72.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |73.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |73.0%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |73.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |73.1%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |73.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |73.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |73.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |73.4%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |73.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |73.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |73.5%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/service_node/service_node |73.6%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |73.6%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |73.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |73.8%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |73.8%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |73.9%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |73.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |74.0%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |74.0%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |74.1%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |74.2%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |74.2%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |74.3%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |74.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |74.4%| [LD] {RESULT} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |74.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |74.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |74.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |74.6%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |74.7%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |74.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |74.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |74.8%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |74.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |74.9%| [LD] {RESULT} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |75.0%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |75.1%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |75.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |75.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |75.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |75.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |75.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |75.4%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |75.5%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |75.5%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |75.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |75.6%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |75.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |75.7%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |75.8%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |75.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |75.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |76.0%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |76.0%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |76.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |76.1%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |76.2%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |76.2%| [LD] {RESULT} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |76.3%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |76.4%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |76.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |76.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |76.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |76.6%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |76.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |76.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |76.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |76.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |76.9%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |76.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |77.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |77.0%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |77.1%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |77.1%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |77.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |77.3%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |77.3%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |77.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |77.4%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |77.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |77.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |77.6%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |77.6%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |77.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] >> TKeyValueTracingTest::WriteHuge [FAIL] >> TKeyValueTracingTest::ReadHuge [FAIL] >> TKeyValueTracingTest::WriteSmall [FAIL] |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> ColumnShardTiers::TTLUsage |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1020541C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x106C12B0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFE47EFD) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xFE53D38) std::__y1::__function::__func, void ()>::operator()()+280 (0xFE67C78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x106EF216) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x106C7E29) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFE66B24) NUnitTest::TTestFactory::Execute()+2438 (0x106C96F6) NUnitTest::RunMain(int, char**)+5213 (0x106E978D) ??+0 (0x7FAF580AFD90) __libc_start_main+128 (0x7FAF580AFE40) _start+41 (0xD7D1029) |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1020541C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x106C12B0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFE4D8CC) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+318 (0xFE5449E) std::__y1::__function::__func, void ()>::operator()()+280 (0xFE67C78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x106EF216) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x106C7E29) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFE66B24) NUnitTest::TTestFactory::Execute()+2438 (0x106C96F6) NUnitTest::RunMain(int, char**)+5213 (0x106E978D) ??+0 (0x7F30E007DD90) __libc_start_main+128 (0x7F30E007DE40) _start+41 (0xD7D1029) |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1020541C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x106C12B0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFE4D8CC) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+318 (0xFE540AE) std::__y1::__function::__func, void ()>::operator()()+280 (0xFE67C78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x106EF216) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x106C7E29) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFE66B24) NUnitTest::TTestFactory::Execute()+2438 (0x106C96F6) NUnitTest::RunMain(int, char**)+5213 (0x106E978D) ??+0 (0x7F2F98B68D90) __libc_start_main+128 (0x7F2F98B68E40) _start+41 (0xD7D1029) |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1020541C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x106C12B0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFE47EFD) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xFE53A28) std::__y1::__function::__func, void ()>::operator()()+280 (0xFE67C78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x106EF216) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x106C7E29) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFE66B24) NUnitTest::TTestFactory::Execute()+2438 (0x106C96F6) NUnitTest::RunMain(int, char**)+5213 (0x106E978D) ??+0 (0x7F8B9F5E5D90) __libc_start_main+128 (0x7F8B9F5E5E40) _start+41 (0xD7D1029) |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.2%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> YdbLogStore::AlterLogTable [FAIL] |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 24763, MsgBus: 3528 2025-04-10T12:24:22.825771Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659325890733034:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:22.825865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/0001a6/r3tmp/tmpFuCuYJ/pdisk_1.dat 2025-04-10T12:24:23.097985Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:24:23.115557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:23.115657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:23.117995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24763, node 1 2025-04-10T12:24:23.179988Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:23.180021Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:23.180039Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:23.180219Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3528 TClient is connected to server localhost:3528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:23.683437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:25.441890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659338775635591:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:25.441967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659338775635583:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:25.442107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:25.446671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:24:25.457057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659338775635597:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:24:25.544793Z node 1 :TX_PROXY ERROR: Actor# [1:7491659338775635648:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:25.847983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:24:25.958288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:24:26.894583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:24:27.826192Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491659325890733034:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:27.826254Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:24:28.326797Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmY4YzUzZDQtOTk5MmRjYTgtNDEwNWVjNDctZDI3NGZiYWM=, ActorId: [1:7491659351660546108:2968], ActorState: ExecuteState, TraceId: 01jrfth97s57z6v0j3ax86fj25, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18FA00B3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18F82B2A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F6CE09E0D8F 18. ??:0: ?? @ 0x7F6CE09E0E3F 19. ??:0: ?? @ 0x1656B028 |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [GOOD] Test command err: 2025-04-10T12:24:31.609686Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659362327943525:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:31.609751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/0001d6/r3tmp/tmpfnD6UV/pdisk_1.dat 2025-04-10T12:24:31.954200Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7300, node 1 2025-04-10T12:24:31.996311Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:24:31.996348Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:24:32.012495Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:32.012558Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:32.012571Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:32.012714Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:24:32.013163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:32.013320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:32.031477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:32.254928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:33.913637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659370917879127:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:33.913805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:34.116569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T12:24:34.262450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659375212846617:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:34.262529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:34.262551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659375212846622:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:34.272068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-10T12:24:34.289061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659375212846624:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-10T12:24:34.367272Z node 1 :TX_PROXY ERROR: Actor# [1:7491659375212846701:2816] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:34.474484Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jrfthf6n8gv2thyep66ptvvf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjMzMWY3NTAtYmUyMmFjZDMtNzAwMzBiNWMtNDZhYjI0MWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-10T12:24:34.633544Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jrfthfj70bszev3fz11pdngm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTI2Y2JlNjUtOWQ5ODdhMjItYjBmN2EzYjEtZGY5YTkzZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] >> KqpSnapshotIsolation::TSimpleOltpNoSink |94.3%| [TA] $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltp |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] >> KqpSnapshotIsolation::TConflictWriteOltpNoSink |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-04-10T12:24:29.683869Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659356327342715:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:29.685128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/00024b/r3tmp/tmp9FisS7/pdisk_1.dat 2025-04-10T12:24:30.083641Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:24:30.094080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:30.094212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12010, node 1 2025-04-10T12:24:30.138453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:24:30.139157Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:24:30.139184Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:24:30.156453Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:30.156487Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:30.156497Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:30.156654Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:30.409061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:30.557286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:37210" , at schemeshard: 72057594046644480 2025-04-10T12:24:30.557748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-10T12:24:30.557785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-04-10T12:24:30.560192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976710658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-10T12:24:30.560338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-04-10T12:24:30.562693Z node 1 :TX_PROXY ERROR: Actor# [1:7491659360622310859:2602] txid# 281474976710658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x1C8C03EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1CD7D4A0) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+8721 (0x1C3F27D1) std::__y1::__function::__func, void ()>::operator()()+280 (0x1C41B668) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1CDB44C6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1CD84019) NTestSuiteYdbLogStore::TCurrentTest::Execute()+1204 (0x1C41A834) NUnitTest::TTestFactory::Execute()+2438 (0x1CD858E6) NUnitTest::RunMain(int, char**)+5213 (0x1CDAEA3D) ??+0 (0x7F41C4192D90) __libc_start_main+128 (0x7F41C4192E40) _start+41 (0x1924C029) |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] >> KqpSnapshotIsolation::TReadOnlyOltp |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.2%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOlap >> KqpSnapshotIsolation::TConflictReadWriteOlap >> KqpSnapshotIsolation::TConflictWriteOltp >> KqpSnapshotIsolation::TConflictReadWriteOltp |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> WithSDK::DescribeConsumer >> QuoterWithKesusTest::PrefetchCoefficient >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] >> test_tpch_import.py::TestS3TpchImport::test_import_and_export >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] >> alter_compression.py::TestAlterCompression::test_all_supported_compression >> test_tpch.py::TestTpchS1::test_tpch[8] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> QuoterWithKesusTest::PrefetchCoefficient [FAIL] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow Test command err: Trying to start YDB, gRPC: 5607, MsgBus: 26340 2025-04-10T12:24:33.071255Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659371535891242:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:33.071400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/0001f1/r3tmp/tmpN9V22S/pdisk_1.dat 2025-04-10T12:24:33.380667Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5607, node 1 2025-04-10T12:24:33.443607Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:33.443643Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:33.443658Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:33.443807Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:24:33.457642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:33.457780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:33.459896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26340 TClient is connected to server localhost:26340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:33.917149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:33.942396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:34.090356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:34.261196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:34.336469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:35.767049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659380125827629:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:35.767179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:36.054349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:24:36.091071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T12:24:36.123166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T12:24:36.154380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:24:36.185748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T12:24:36.218103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T12:24:36.263960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659384420795436:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:36.264056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:36.264081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659384420795441:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:36.268209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T12:24:36.277155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659384420795443:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T12:24:36.336241Z node 1 :TX_PROXY ERROR: Actor# [1:7491659384420795496:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:37.247757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-10T12:24:37.378051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7491659388715763260:2509];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:24:37.378283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7491659388715763260:2509];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:24:37.378558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491659388715763250:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:24:37.378605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7491659388715763260:2509];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:24:37.378627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491659388715763250:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:24:37.378787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7491659388715763260:2509];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:24:37.378874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491659388715763250:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:24:37.378965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7491659388715763260:2509];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:24:37.379011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491659388715763250:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:24:37.379113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7491659388715763260:2509];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:24:37.379123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491659388715763250:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:24:37.379230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7491659388715763260:2509];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:24:37.379232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491659388715763250:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:24:37.379325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491659388715763250:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:24:37.379348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7491659388715763260:2509];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:24:37.379426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7491659388715763250:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract. ... WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T12:24:37.551827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T12:24:37.551828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T12:24:37.551858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T12:24:37.552031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T12:24:37.552076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T12:24:37.552199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T12:24:37.552238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T12:24:37.552255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T12:24:37.552281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T12:24:37.552370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T12:24:37.552393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T12:24:37.552399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T12:24:37.552418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T12:24:37.552511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T12:24:37.552543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T12:24:37.552707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T12:24:37.552743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T12:24:37.552863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T12:24:37.552890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T12:24:37.607968Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:24:37.608356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:24:37.614972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:24:37.615485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:24:37.620759Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:24:37.622803Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:24:37.626319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:24:37.629535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:24:37.631716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:24:37.636065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710671; 2025-04-10T12:24:37.757463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;self_id=[1:7491659388715763278:2512];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037927;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037928;receive=72075186224037923; 2025-04-10T12:24:37.757553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2025-04-10T12:24:37.758188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2025-04-10T12:24:37.758550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710673; 2025-04-10T12:24:37.888685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710675;tx_id=281474976710675;fline=tx_controller.cpp:212;event=finished_tx;tx_id=281474976710675; query_phases { duration_us: 7725 cpu_time_us: 2032 affected_shards: 1 } query_phases { duration_us: 8628 cpu_time_us: 324 affected_shards: 1 } compilation { duration_us: 102554 cpu_time_us: 99002 } process_cpu_time_us: 577 total_duration_us: 122703 total_cpu_time_us: 101935 AddressSanitizer:DEADLYSIGNAL ================================================================= ==2113376==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018d3ad2d bp 0x7ffd6faf6bc0 sp 0x7ffd6faf6a20 T0) ==2113376==The signal is caused by a READ memory access. ==2113376==Hint: address points to the zero page. 2025-04-10T12:24:38.070973Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491659371535891242:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:38.071312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; #0 0x18d3ad2d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18d3ad2d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18d3ad2d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18d3ad2d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18d3ad2d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18d5fe47 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18d5fe47 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18d5fe47 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18d5fe47 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18d5fe47 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #10 0x196b1ff5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x196b1ff5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x196b1ff5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19681b48 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18d5ecf3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19683415 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x196ac56c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f6f9e436d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f6f9e436e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x164bd028 in _start (/home/runner/.ya/build/build_root/a650/0001f1/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x164bd028) (BuildId: e3319472fc934b0dfc588e72f37ffc5cf69adf9b) AddressSanitizer can not provide additional info. SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==2113376==ABORTING >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yaml_config/ut_transform/py3test >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] 2025-04-10 12:24:44,207 ERROR devtools.ya.test.canon.compare: Cannot calculate diff: Traceback (most recent call last): File "devtools/ya/test/canon/compare.py", line 402, in _get_file_diff_via_diff raise Exception( Exception: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: |96.3%| [TM] {RESULT} ydb/library/yaml_config/ut_transform/py3test |96.4%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/quoter/ut/unittest >> QuoterWithKesusTest::PrefetchCoefficient [FAIL] Test command err: 2025-04-10T12:24:39.425641Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659395570801705:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:39.425780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T12:24:39.461096Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7491659399340198376:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/000188/r3tmp/tmpulHxxP/pdisk_1.dat 2025-04-10T12:24:39.671471Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T12:24:39.788503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:39.788598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:39.793007Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-10T12:24:39.794135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:24:39.818027Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:24:39.866524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:39.866624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:39.869318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26580 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-10T12:24:40.016491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:40.033816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:40.050734Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72075186224037888 2025-04-10T12:24:40.050833Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInitSchema::Execute 2025-04-10T12:24:40.053526Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInitSchema::Complete 2025-04-10T12:24:40.053615Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInit::Execute 2025-04-10T12:24:40.055612Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxInit::Complete 2025-04-10T12:24:40.092681Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxConfigSet::Execute (sender=[1:7491659395570802115:2276], cookie=1, path="/dc-1/KesusQuoter") 2025-04-10T12:24:40.095863Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxConfigSet::Complete (sender=[1:7491659395570802115:2276], cookie=1, status=SUCCESS) AddQuoterResource: { Resource { ResourcePath: "Resource" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 } } } TClient::Ls request: /dc-1/KesusQuoter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "KesusQuoter" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeKesus CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744287880146 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 KesusVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 P... (TRUNCATED) 2025-04-10T12:24:40.124086Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Execute (sender=[1:7491659399865769800:2581], cookie=0, path="Resource", config={ MaxUnitsPerSecond: 10 }) 2025-04-10T12:24:40.124419Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] Created new quoter resource 1 "Resource" 2025-04-10T12:24:40.126829Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Complete (sender=[1:7491659399865769800:2581], cookie=0) AddQuoterResource: { Resource { ResourcePath: "root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 PrefetchCoefficient: 1000 } } } TClient::Ls request: /dc-1/KesusQuoter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "KesusQuoter" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeKesus CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744287880146 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 KesusVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 P... (TRUNCATED) 2025-04-10T12:24:40.128884Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Execute (sender=[1:7491659399865769800:2581], cookie=0, path="root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 1000 }) 2025-04-10T12:24:40.129077Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] Created new quoter resource 2 "root" 2025-04-10T12:24:40.131534Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Complete (sender=[1:7491659399865769800:2581], cookie=0) AddQuoterResource: { Resource { ResourcePath: "root/leaf" HierarchicalDRRResourceConfig { } } } TClient::Ls request: /dc-1/KesusQuoter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "KesusQuoter" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeKesus CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1744287880146 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 KesusVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 P... (TRUNCATED) 2025-04-10T12:24:40.133574Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Execute (sender=[1:7491659399865769800:2581], cookie=0, path="root/leaf", config={ }) 2025-04-10T12:24:40.133782Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] Created new quoter resource 3 "root/leaf" 2025-04-10T12:24:40.136055Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] TTxQuoterResourceAdd::Complete (sender=[1:7491659399865769800:2581], cookie=0) 2025-04-10T12:24:40.136287Z node 1 :QUOTER_SERVICE TRACE: Request({ Operator: And Deadline: no Cookie: 0 [ { 1, "/dc-1/KesusQuoter":"root/leaf" } ] }) 2025-04-10T12:24:40.136379Z node 1 :QUOTER_SERVICE INFO: resolve new quoter /dc-1/KesusQuoter 2025-04-10T12:24:40.136464Z node 1 :QUOTER_SERVICE INFO: path resolved as Kesus /dc-1/KesusQuoter 2025-04-10T12:24:40.136531Z node 1 :QUOTER_SERVICE INFO: resolve resource root/leaf on quoter /dc-1/KesusQuoter 2025-04-10T12:24:40.136573Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Created kesus quoter proxy. Tablet id: 72075186224037888 2025-04-10T12:24:40.136710Z node 1 :QUOTER_PROXY DEBUG: [/dc-1/KesusQuoter]: Connecting to kesus 2025-04-10T12:24:40.136759Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: ProxyRequest "root/leaf" 2025-04-10T12:24:40.137091Z node 1 :QUOTER_PROXY DEBUG: [/dc-1/KesusQuoter]: Successfully connected to tablet 2025-04-10T12:24:40.137532Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvSubscribeOnResourcesResult to [1:7491659399865769824:2304]. Cookie: 1. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "root/leaf" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1000 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-10T12:24:40.137554Z node 1 :KESUS_TABLET DEBUG: [72075186224037888] Subscribe on quoter resources (sender=[1:7491659399865769824:2304], cookie=1) 2025-04-10T12:24:40.137777Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: SubscribeOnResourceResult({ Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "root/leaf" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1000 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 }) 2025-04-10T12:24:40.137795Z node 1 :QUOTER_PROXY INFO: [/dc-1/KesusQuoter]: Initialized new session with resource "root/leaf" 2025-04-10T12:24:40.137825Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxySession("root/leaf", 3) 2025-04-10T12:24:40.137900Z node 1 :QUOTER_SERVIC ... e: 0 2025-04-10T12:24:41.503822Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491659399865769824:2304]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:24:41.503935Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:24:41.503971Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:24:41.504005Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:24:41.504273Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:24:41.504295Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:24:41.602425Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491659399865769824:2304]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:24:41.602677Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:24:41.602710Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:24:41.602748Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:24:41.602840Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:24:41.602874Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:24:41.702389Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491659399865769824:2304]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:24:41.702470Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:24:41.702490Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:24:41.702514Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:24:41.702580Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:24:41.702603Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:24:41.801095Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491659399865769824:2304]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:24:41.801229Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:24:41.801256Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:24:41.801290Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:24:41.801391Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:24:41.801422Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:24:41.902219Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491659399865769824:2304]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:24:41.902296Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:24:41.902319Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:24:41.902347Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:24:41.902430Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:24:41.902452Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:24:42.000722Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491659399865769824:2304]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:24:42.000803Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:24:42.000838Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:24:42.000882Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:24:42.000937Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:24:42.000961Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:24:42.100723Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491659399865769824:2304]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:24:42.100843Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:24:42.100866Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:24:42.100915Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:24:42.100985Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:24:42.101017Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:24:42.202444Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491659399865769824:2304]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:24:42.202568Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:24:42.202630Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:24:42.202657Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:24:42.202775Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:24:42.202802Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:24:42.300848Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491659399865769824:2304]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:24:42.300973Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:24:42.300998Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:24:42.301034Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:24:42.301132Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:24:42.301165Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:24:42.400892Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491659399865769824:2304]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:24:42.401009Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:24:42.401033Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:24:42.401066Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:24:42.401148Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:24:42.401170Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 2025-04-10T12:24:42.500783Z node 1 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [1:7491659399865769824:2304]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-10T12:24:42.500970Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 3 Amount: 10 StateNotification { Status: SUCCESS } } }) 2025-04-10T12:24:42.501018Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"root/leaf", 10} 2025-04-10T12:24:42.501063Z node 1 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "root/leaf", Normal, {0: Sustained(0, 0)} }]) 2025-04-10T12:24:42.501149Z node 1 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-10T12:24:42.501174Z node 1 :QUOTER_SERVICE TRACE: Feed resource "root/leaf". Balance: 0. FreeBalance: 0 assertion failed at ydb/core/quoter/ut_helpers.cpp:121, void NKikimr::TKesusQuoterTestSetup::GetQuota(const std::vector> &, TEvQuota::EResourceOperator, TDuration, TEvQuota::TEvClearance::EResult): (answer->Result == expectedResult) failed: (Success != Deadline) , with diff: (Succ|D)e(ss|adline) TBackTrace::Capture()+28 (0x18E4FBDC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1930CD10) NKikimr::TKesusQuoterTestSetup::GetQuota(std::__y1::vector>, TBasicString>, unsigned long>, std::__y1::allocator>, TBasicString>, unsigned long>>> const&, NKikimr::TEvQuota::EResourceOperator, TDuration, NKikimr::TEvQuota::TEvClearance::EResult)+3257 (0x18A53289) NKikimr::TKesusQuoterTestSetup::GetQuota(TBasicString> const&, TBasicString> const&, unsigned long, TDuration, NKikimr::TEvQuota::TEvClearance::EResult)+575 (0x18A5480F) NKikimr::NTestSuiteQuoterWithKesusTest::TTestCasePrefetchCoefficient::Execute_(NUnitTest::TTestContext&)+2163 (0x1899A883) std::__y1::__function::__func, void ()>::operator()()+280 (0x189D92F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19343D56) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19313889) NKikimr::NTestSuiteQuoterWithKesusTest::TCurrentTest::Execute()+1204 (0x189D82A4) NUnitTest::TTestFactory::Execute()+2438 (0x19315156) NUnitTest::RunMain(int, char**)+5213 (0x1933E2CD) ??+0 (0x7FBBEC535D90) __libc_start_main+128 (0x7FBBEC535E40) _start+41 (0x16320029) |96.7%| [TM] {RESULT} ydb/core/quoter/ut/unittest >> KqpSnapshotIsolation::TSimpleOltp [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltp [FAIL] Test command err: Trying to start YDB, gRPC: 14461, MsgBus: 5002 2025-04-10T12:24:37.592903Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659390788162581:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:37.592962Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/0001be/r3tmp/tmpZSzYkO/pdisk_1.dat 2025-04-10T12:24:37.935083Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:24:37.965045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:37.965153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:37.966968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14461, node 1 2025-04-10T12:24:38.025038Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:38.025068Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:38.025074Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:38.025175Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5002 TClient is connected to server localhost:5002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:38.517503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:40.083105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659403673065142:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.083159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659403673065134:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.083289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.087211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:24:40.095984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659403673065148:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:24:40.172748Z node 1 :TX_PROXY ERROR: Actor# [1:7491659403673065199:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:40.462523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:24:40.572628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:24:41.396447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:24:42.593061Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491659390788162581:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:42.593128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:24:42.720435Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWJlN2I2ZjktNDI2MDhiZDctNjQyMTVmM2QtOTJhNTU5NWI=, ActorId: [1:7491659412263008237:2968], ActorState: ExecuteState, TraceId: 01jrfthq9j21bar0f1tnwn566t, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18F8A7C7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18F81582 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F0EAFA42D8F 18. ??:0: ?? @ 0x7F0EAFA42E3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 14570, MsgBus: 12333 2025-04-10T12:24:37.856928Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659388972894399:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:37.856995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/0001af/r3tmp/tmp92Xq4X/pdisk_1.dat 2025-04-10T12:24:38.161068Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14570, node 1 2025-04-10T12:24:38.225790Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:38.225828Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:38.225840Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:38.226022Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:24:38.227728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:38.227886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:38.230141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12333 TClient is connected to server localhost:12333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:38.671870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:40.460677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659401857796747:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.460803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659401857796727:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.461174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.465059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-10T12:24:40.474421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659401857796756:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-10T12:24:40.529891Z node 1 :TX_PROXY ERROR: Actor# [1:7491659401857796807:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:40.822377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-10T12:24:40.939683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-10T12:24:41.812162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-10T12:24:42.861631Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491659388972894399:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:42.861815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:24:43.197594Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTM0YzdhZGQtNDg1MTA4OWYtY2ZkNjg2ODMtYWNiMTA5MTk=, ActorId: [1:7491659414742707280:2968], ActorState: ExecuteState, TraceId: 01jrfthqt72wfcjef2t8wxz35v, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18F90D57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18F81E2A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F5EC7749D8F 18. ??:0: ?? @ 0x7F5EC7749E3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 3564, MsgBus: 1591 2025-04-10T12:24:37.273245Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659387596795139:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:37.273335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/000195/r3tmp/tmpPYPlyd/pdisk_1.dat 2025-04-10T12:24:37.572615Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3564, node 1 2025-04-10T12:24:37.646916Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:37.646952Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:37.646988Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:37.647159Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:24:37.664242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:37.664365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:37.665595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1591 TClient is connected to server localhost:1591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:38.161595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:40.153053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659400481697670:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.153215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659400481697691:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.153352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.157840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:24:40.169193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659400481697708:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:24:40.223456Z node 1 :TX_PROXY ERROR: Actor# [1:7491659400481697759:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:40.558656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:24:40.673286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:24:41.568601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:24:42.319354Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491659387596795139:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:42.324507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:24:42.901979Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTgyZjMzZDEtZDRmY2E4NjItMjZlZjdlZDAtNTg5NWE0YjM=, ActorId: [1:7491659409071641009:2968], ActorState: ExecuteState, TraceId: 01jrfthqhyfg2m81kt56zmm7ws, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18F986F7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18F824AA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F3E131E9D8F 18. ??:0: ?? @ 0x7F3E131E9E3F 19. ??:0: ?? @ 0x1656B028 >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] Test command err: 2025-04-10T12:24:41.366050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:24:41.366297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:24:41.366496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/0002bf/r3tmp/tmpzLeVnR/pdisk_1.dat 2025-04-10T12:24:41.758492Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] ::Bootstrap [1:594:2519] 2025-04-10T12:24:41.758544Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] lookup [1:594:2519] 2025-04-10T12:24:41.758602Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] queue send [1:594:2519] 2025-04-10T12:24:41.758667Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] received pending shutdown [1:594:2519] 2025-04-10T12:24:41.758850Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] forward result local node, try to connect [1:594:2519] 2025-04-10T12:24:41.758901Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480]::SendEvent [1:594:2519] 2025-04-10T12:24:41.759603Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] connected with status OK role: Leader [1:594:2519] 2025-04-10T12:24:41.759655Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] send queued [1:594:2519] 2025-04-10T12:24:41.759681Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] push event to server [1:594:2519] 2025-04-10T12:24:41.759720Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] shutdown pipe due to pending shutdown request [1:594:2519] 2025-04-10T12:24:41.759747Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] notify reset [1:594:2519] 2025-04-10T12:24:41.759917Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose 2025-04-10T12:24:41.759974Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T12:24:41.761907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:24:41.762329Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} hope 1 -> done Change{4, redo 987b alter 0b annex 0, ~{ 1, 33, 35, 42, 4 } -{ }, 0 gb} 2025-04-10T12:24:41.762465Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T12:24:41.762968Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} commited cookie 1 for step 5 2025-04-10T12:24:41.773048Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-10T12:24:41.773135Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T12:24:41.773408Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{5, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-04-10T12:24:41.773507Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T12:24:41.774000Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} commited cookie 1 for step 6 2025-04-10T12:24:41.774167Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-10T12:24:41.774242Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T12:24:41.774434Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{6, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-04-10T12:24:41.774544Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T12:24:41.774850Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} commited cookie 1 for step 7 2025-04-10T12:24:41.775007Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-10T12:24:41.775051Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T12:24:41.775217Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{7, redo 120b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-10T12:24:41.775267Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T12:24:41.775540Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} commited cookie 1 for step 8 2025-04-10T12:24:41.775764Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] ::Bootstrap [1:601:2525] 2025-04-10T12:24:41.775799Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] lookup [1:601:2525] 2025-04-10T12:24:41.775881Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] queue send [1:601:2525] 2025-04-10T12:24:41.776161Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] forward result local node, try to connect [1:601:2525] 2025-04-10T12:24:41.776206Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545]::SendEvent [1:601:2525] 2025-04-10T12:24:41.776393Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] connected with status OK role: Leader [1:601:2525] 2025-04-10T12:24:41.776436Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] send queued [1:601:2525] 2025-04-10T12:24:41.776464Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] push event to server [1:601:2525] 2025-04-10T12:24:41.776842Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] ::Bootstrap [1:604:2527] 2025-04-10T12:24:41.776891Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] lookup [1:604:2527] 2025-04-10T12:24:41.776963Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] queue send [1:604:2527] 2025-04-10T12:24:41.777029Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] received pending shutdown [1:604:2527] 2025-04-10T12:24:41.777090Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] forward result local node, try to connect [1:604:2527] 2025-04-10T12:24:41.777136Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480]::SendEvent [1:604:2527] 2025-04-10T12:24:41.777311Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] connected with status OK role: Leader [1:604:2527] 2025-04-10T12:24:41.777355Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] send queued [1:604:2527] 2025-04-10T12:24:41.777389Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] push event to server [1:604:2527] 2025-04-10T12:24:41.777433Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] shutdown pipe due to pending shutdown request [1:604:2527] 2025-04-10T12:24:41.777467Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] notify reset [1:604:2527] 2025-04-10T12:24:41.777606Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion 2025-04-10T12:24:41.777684Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T12:24:41.777767Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-10T12:24:41.777809Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T12:24:41.788267Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] client retry [1:91:2137] 2025-04-10T12:24:41.788369Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:91:2137] 2025-04-10T12:24:41.788453Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] client retry [1:342:2378] 2025-04-10T12:24:41.788478Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] lookup [1:342:2378] 2025-04-10T12:24:41.788570Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result local node, try to connect [1:91:2137] 2025-04-10T12:24:41.788619Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [1:91:2137] 2025-04-10T12:24:41.788660Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] forward result local node, try to connect [1:342:2378] 2025-04-10T12:24:41.788705Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131]::SendEvent [1:342:2378] 2025-04-10T12:24:41.788937Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] connected with status OK role: Leader [1:342:2378] 2025-04-10T12:24:41.788980Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] send queued [1:342:2378] 2025-04-10T12:24:41.789024Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] push event to server [1:342:2378] 2025-04-10T12:24:41.789131Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [1:91:2137] 2025-04-10T12:24:41.789158Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [1:91:2137] 2025-04-10T12:24:41.789195Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [1:91:2137] 2025-04-10T12:24:41.789401Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} queued, type NKikimr::NBsController::TBlobStorageController::TTxRegisterNode 2025-04-10T12:24:41.789482Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-10T12:24:41.789757Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} hope 1 -> done Change{7, redo 79b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-10T12:24:41.789839Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-10T12:24:41.790304Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] send [1:342:2378] 2025-04-10T12:24:41.790348Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] push event to server [1:342:2378] 2025-0 ... atExecutor::ITransaction, TDelete>, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:57:5 #16 0x21e45361 in NKikimr::NDataShard::TDataShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard.cpp:3415:21 #17 0x21dedd4b in NKikimr::NDataShard::TDataShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/datashard/datashard_impl.h:3157:13 #18 0x1a71942c in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #19 0x36ea8834 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #20 0x36ea10a9 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #21 0x36eab423 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #22 0x37073ae3 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #23 0x37073082 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #24 0x37073082 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #25 0x3706b2e2 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:577:24 #26 0x3706aeba in NActors::TTestActorRuntime::SimulateSleep(TDuration) /-S/ydb/core/testlib/actors/test_runtime.cpp:298:9 #27 0x18f9e247 in NKikimr::NTestSuiteDataShardVolatile::TTestCaseGracefulShardRestartNoEarlyReadSetAck::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:3909:17 #28 0x18fb3b07 in operator() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #29 0x18fb3b07 in __invoke<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #30 0x18fb3b07 in __call<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #31 0x18fb3b07 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #32 0x18fb3b07 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #33 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #34 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #35 0x199cac35 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #36 0x1999a788 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #37 0x18fb29b3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #38 0x1999c055 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #39 0x199c51ac in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #40 0x7ff2a54c1d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x1920aa0d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x19e3a60e in CreateString > > /-S/contrib/libs/protobuf/src/google/protobuf/arenastring.cc:102:20 #2 0x19e3a60e in void google::protobuf::internal::ArenaStringPtr::Set<>(TBasicString> const&, google::protobuf::Arena*) /-S/contrib/libs/protobuf/src/google/protobuf/arenastring.cc:150:38 #3 0x2ae05b65 in set_name > &> /-B/ydb/core/protos/query_stats.pb.h:1740:16 #4 0x2ae05b65 in SetName /-B/ydb/core/protos/query_stats.pb.h:696:56 #5 0x2ae05b65 in NKikimr::NDataShard::KqpFillTxStats(NKikimr::NDataShard::TDataShard&, NKikimr::NMiniKQL::TEngineHostCounters const&, NKikimrQueryStats::TTxStats&) /-S/ydb/core/tx/datashard/datashard_kqp.cpp:917:34 #6 0x2b215464 in NKikimr::NDataShard::TExecuteWriteUnit::Execute(TIntrusivePtr>, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/execute_write_unit.cpp:436:13 #7 0x2b06d8cc in NKikimr::NDataShard::TPipeline::RunExecutionPlan(TIntrusivePtr>, TVector>&, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard_pipeline.cpp:1851:28 #8 0x2ad2e714 in NKikimr::NDataShard::TDataShard::TTxWrite::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard__write.cpp:100:38 #9 0x2081b894 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #10 0x20817234 in NKikimr::NTabletFlatExecutor::TExecutor::DoExecute(TAutoPtr, NKikimr::NTabletFlatExecutor::TExecutor::ETxMode) /-S/ydb/core/tablet_flat/flat_executor.cpp:1814:13 #11 0x2081e42e in NKikimr::NTabletFlatExecutor::TExecutor::Execute(TAutoPtr, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/flat_executor.cpp:1828:5 #12 0x207b4a9a in Execute /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:62:46 #13 0x207b4a9a in NKikimr::NTabletFlatExecutor::TTabletExecutedFlat::Execute(TAutoPtr, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:57:5 #14 0x21e45361 in NKikimr::NDataShard::TDataShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard.cpp:3415:21 #15 0x21dedd4b in NKikimr::NDataShard::TDataShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/datashard/datashard_impl.h:3157:13 #16 0x1a71942c in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #17 0x36ea8834 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #18 0x36ea10a9 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #19 0x36eab423 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #20 0x37073ae3 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #21 0x37073082 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #22 0x37073082 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #23 0x3706b2e2 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:577:24 #24 0x3706aeba in NActors::TTestActorRuntime::SimulateSleep(TDuration) /-S/ydb/core/testlib/actors/test_runtime.cpp:298:9 #25 0x18f9e247 in NKikimr::NTestSuiteDataShardVolatile::TTestCaseGracefulShardRestartNoEarlyReadSetAck::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:3909:17 #26 0x18fb3b07 in operator() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #27 0x18fb3b07 in __invoke<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #28 0x18fb3b07 in __call<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #29 0x18fb3b07 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #30 0x18fb3b07 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #31 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #32 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #33 0x199cac35 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #34 0x1999a788 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #35 0x18fb29b3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #36 0x1999c055 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #37 0x199c51ac in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #38 0x7ff2a54c1d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 664 byte(s) leaked in 11 allocation(s). ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] Test command err: Trying to start YDB, gRPC: 3238, MsgBus: 29446 2025-04-10T12:24:38.426809Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659391726884581:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:38.426988Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/0001a2/r3tmp/tmpj8aFp7/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3238, node 1 2025-04-10T12:24:38.722980Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:24:38.723100Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:24:38.723858Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:24:38.751334Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:38.751361Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:38.751368Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:38.751501Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:24:38.758973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:38.759108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:38.760753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29446 TClient is connected to server localhost:29446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:39.276240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:40.879406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659400316819845:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.879417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659400316819837:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.879529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.884048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:24:40.893517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659400316819851:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:24:40.988867Z node 1 :TX_PROXY ERROR: Actor# [1:7491659400316819902:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:41.296637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:24:41.403531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:24:42.253680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:24:43.426775Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491659391726884581:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:43.426847Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:24:43.658531Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWE0MGNhLWU0ZjA2NTE5LWYyYjRjNzA2LTdmYTdiOTMw, ActorId: [1:7491659413201730475:2968], ActorState: ExecuteState, TraceId: 01jrfthr6q8121pwx0v830tzd3, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18FA00B3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18F82902 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F35C0AC0D8F 18. ??:0: ?? @ 0x7F35C0AC0E3F 19. ??:0: ?? @ 0x1656B028 >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 15335, MsgBus: 5686 2025-04-10T12:24:37.537675Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659388635489963:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:37.537747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/0001c0/r3tmp/tmpK5EPKz/pdisk_1.dat 2025-04-10T12:24:37.913933Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15335, node 1 2025-04-10T12:24:37.957939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:37.958124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:37.963472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:24:37.994957Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:37.994984Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:37.994992Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:37.995132Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5686 TClient is connected to server localhost:5686 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:38.441968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:40.248767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659401520392500:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.248979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659401520392508:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.249047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.253099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:24:40.262348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659401520392514:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:24:40.362605Z node 1 :TX_PROXY ERROR: Actor# [1:7491659401520392565:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:40.644374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:24:40.764106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:24:41.644087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:24:42.557704Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491659388635489963:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:42.559420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:24:43.008315Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjQ3YmY2MTQtNWFhMjQ2NDUtNjExMDIxZDctZjllMjU2ODI=, ActorId: [1:7491659410110335638:2968], ActorState: ExecuteState, TraceId: 01jrfthqjka1zgc0yt71j0n43y, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18F8A7C7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18F817AA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F0615C04D8F 18. ??:0: ?? @ 0x7F0615C04E3F 19. ??:0: ?? @ 0x1656B028 |97.1%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] Test command err: Trying to start YDB, gRPC: 27652, MsgBus: 9136 2025-04-10T12:24:39.036580Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659396966405690:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:39.036679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/000198/r3tmp/tmp4FznwI/pdisk_1.dat 2025-04-10T12:24:39.302832Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27652, node 1 2025-04-10T12:24:39.389245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:39.389333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:39.390847Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:39.390975Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:39.391003Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:39.391117Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:24:39.391351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9136 TClient is connected to server localhost:9136 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:39.910799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:41.577042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659405556340939:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.577195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659405556340912:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.577277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.581937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:24:41.592322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659405556340941:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:24:41.656625Z node 1 :TX_PROXY ERROR: Actor# [1:7491659405556340992:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:41.982060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:24:42.116047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:24:43.025453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:24:44.036580Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491659396966405690:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:44.036647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:24:44.286459Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTgzYjQ4MzctZDVlNzBiNjQtN2FmNzk1MDYtNjcyOWIxNDY=, ActorId: [1:7491659418441251369:2969], ActorState: ExecuteState, TraceId: 01jrfthrwqf9xdfdm6b7vm77ng, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18F986F7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18F82282 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F4253E1BD8F 18. ??:0: ?? @ 0x7F4253E1BE3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] Test command err: Trying to start YDB, gRPC: 21245, MsgBus: 18453 2025-04-10T12:24:38.956415Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659393158310738:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:38.956532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/00019e/r3tmp/tmpyGLxYX/pdisk_1.dat 2025-04-10T12:24:39.271783Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:24:39.285789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:39.285877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:39.287698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21245, node 1 2025-04-10T12:24:39.346742Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:39.346781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:39.346790Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:39.346909Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18453 TClient is connected to server localhost:18453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:39.817307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:41.523995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659406043213284:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.524138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659406043213296:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.524203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.528475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:24:41.541069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659406043213298:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:24:41.634360Z node 1 :TX_PROXY ERROR: Actor# [1:7491659406043213349:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:41.918738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:24:42.031504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-10T12:24:42.909878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:24:43.956412Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491659393158310738:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:43.956556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:24:44.176067Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzNkYmM3ODEtYWIzN2I1ZmQtZWYxZGNiNTEtODkzYmFhYTI=, ActorId: [1:7491659418928123619:2969], ActorState: ExecuteState, TraceId: 01jrfthrrvb94b5q17cah3xvxv, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18F90D57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18F81C02 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7FF76E488D8F 18. ??:0: ?? @ 0x7FF76E488E3F 19. ??:0: ?? @ 0x1656B028 >> KqpSinkTx::OlapInvalidateOnError [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError [FAIL] Test command err: Trying to start YDB, gRPC: 4811, MsgBus: 17385 2025-04-10T12:24:38.811125Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659390970275201:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:38.811231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/0001a0/r3tmp/tmp4qH15J/pdisk_1.dat 2025-04-10T12:24:39.123794Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4811, node 1 2025-04-10T12:24:39.195124Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:39.195163Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:39.195179Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:39.195353Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:24:39.200841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:39.200957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:39.202326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17385 TClient is connected to server localhost:17385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:39.735825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:41.254599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659403855177752:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.254707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.255553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659403855177764:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.259939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:24:41.271289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659403855177766:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:24:41.329986Z node 1 :TX_PROXY ERROR: Actor# [1:7491659403855177817:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:41.575473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:24:41.748010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491659403855178021:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:24:41.748010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491659403855178008:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:24:41.748252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491659403855178008:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:24:41.748405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491659403855178021:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:24:41.748580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491659403855178008:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:24:41.748615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491659403855178021:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:24:41.748865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491659403855178021:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:24:41.748919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491659403855178008:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:24:41.748987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491659403855178021:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:24:41.749061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491659403855178008:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:24:41.749148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491659403855178021:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:24:41.749181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491659403855178008:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:24:41.749259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491659403855178021:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:24:41.749283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491659403855178008:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:24:41.749372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491659403855178021:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:24:41.749404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491659403855178008:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:24:41.749509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491659403855178021:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:24:41.749525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491659403855178008:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:24:41.749973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491659403855178021:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:24:41.750018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491659403855178008:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:24:41.750112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491659403855178021:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:24:41.750166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491659403855178008:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:24:41.750232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7491659403855178021:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:24:41.750247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7491659403855178008:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:24:41.787690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491659403855178025:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:24:41.787767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491659403855178025:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:24:41.788026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_ ... t_id=72075186224038013;self_id=[1:7491659421035053436:3318];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.200046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[1:7491659421035053436:3318];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.200359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[1:7491659421035053335:3305];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.200560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[1:7491659421035053335:3305];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.202952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[1:7491659421035052972:3242];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.203119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[1:7491659421035052972:3242];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.203397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[1:7491659421035052650:3157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038056;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.203543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[1:7491659421035052650:3157];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038056;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.205963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[1:7491659421035053305:3298];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.206059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7491659421035052673:3166];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038063;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.206093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[1:7491659421035053305:3298];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.206300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7491659421035052673:3166];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.208018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7491659421035052885:3203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.208221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7491659421035052885:3203];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.208528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7491659421035052897:3208];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.208672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7491659421035052897:3208];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.210561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7491659421035053569:3325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.210684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7491659421035053569:3325];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.211111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7491659421035053439:3319];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.211332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7491659421035053439:3319];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.213198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[1:7491659421035053156:3274];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.213366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[1:7491659421035053156:3274];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.213621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[1:7491659421035053398:3316];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.213759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[1:7491659421035053398:3316];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.215718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7491659421035053326:3302];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.215834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7491659421035053326:3302];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.216222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[1:7491659421035053328:3303];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.216370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[1:7491659421035053328:3303];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.216781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[1:7491659421035052477:3121];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.217009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[1:7491659421035052477:3121];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.218308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[1:7491659421035053236:3289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.218488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[1:7491659421035053236:3289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.218779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7491659421035053397:3315];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.218953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7491659421035053397:3315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.220320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7491659421035053333:3304];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:51.220491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7491659421035053333:3304];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x18F652BE 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x18F43F0A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x18F4B387 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18F4B387 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18F4B387 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F4B387 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F4B387 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x18F4A553 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F30E1037D8F 18. ??:0: ?? @ 0x7F30E1037E3F 19. ??:0: ?? @ 0x1656B028 >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 9725, MsgBus: 62472 2025-04-10T12:24:38.972854Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659393731625240:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:38.973070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/00019c/r3tmp/tmpkJcK9u/pdisk_1.dat 2025-04-10T12:24:39.340631Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9725, node 1 2025-04-10T12:24:39.368431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:39.368566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:39.377561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:24:39.415902Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:39.415927Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:39.415937Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:39.416065Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62472 TClient is connected to server localhost:62472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:39.918340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:39.927170Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-10T12:24:41.816239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659406616527775:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.816408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.816793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659406616527802:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.821061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:24:41.833962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659406616527804:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:24:41.918768Z node 1 :TX_PROXY ERROR: Actor# [1:7491659406616527855:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:42.244848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:24:42.403139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491659410911495351:2347];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:24:42.403347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491659410911495351:2347];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:24:42.403609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491659410911495351:2347];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:24:42.403702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491659410911495351:2347];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:24:42.403849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491659410911495351:2347];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:24:42.403995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491659410911495351:2347];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:24:42.404148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491659410911495351:2347];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:24:42.404278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491659410911495351:2347];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:24:42.404403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491659410911495353:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:24:42.404416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491659410911495351:2347];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:24:42.404441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491659410911495353:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:24:42.404514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491659410911495351:2347];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:24:42.404558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491659410911495353:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:24:42.404637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491659410911495353:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:24:42.404646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491659410911495351:2347];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:24:42.404711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491659410911495353:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:24:42.404771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7491659410911495351:2347];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:24:42.404793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491659410911495353:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:24:42.404898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491659410911495353:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:24:42.405023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491659410911495353:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:24:42.405137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491659410911495353:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:24:42.405298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491659410911495353:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:24:42.405391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491659410911495353:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:24:42.405546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7491659410911495353:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:24:42.408377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T12:24:42.408451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T12:24:42.408680Z ... 6224037919;self_id=[1:7491659415206464299:2548];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037919;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.169551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7491659415206464299:2548];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037919;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.169761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7491659415206464283:2543];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037930;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.169854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;self_id=[1:7491659415206464036:2513];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037972;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.169896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7491659415206464283:2543];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037930;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.170020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;self_id=[1:7491659415206464036:2513];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037972;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.170261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7491659415206463824:2496];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037904;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.170394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7491659415206463824:2496];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037904;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.170837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[1:7491659415206464392:2583];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037916;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.170847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037973;self_id=[1:7491659415206464069:2519];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037973;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.171005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[1:7491659415206464392:2583];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037916;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.171022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037973;self_id=[1:7491659415206464069:2519];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037973;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.171305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7491659415206464273:2542];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037933;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.171360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7491659415206464301:2549];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037928;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.171439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7491659415206464273:2542];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037933;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.171555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7491659415206464301:2549];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037928;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.178511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[1:7491659415206464285:2544];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037929;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.178730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[1:7491659415206464285:2544];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037929;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.178999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[1:7491659415206464306:2550];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037937;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.179061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;self_id=[1:7491659415206464102:2526];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037983;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.179151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[1:7491659415206464306:2550];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037937;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.179290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;self_id=[1:7491659415206464102:2526];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037983;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.179524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[1:7491659415206464561:2597];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037905;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.179533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037977;self_id=[1:7491659415206464162:2528];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037977;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.179672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[1:7491659415206464561:2597];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037905;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.179673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037977;self_id=[1:7491659415206464162:2528];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037977;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.180010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037958;self_id=[1:7491659415206464323:2556];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037958;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.180071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7491659415206464463:2590];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037908;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.180139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037958;self_id=[1:7491659415206464323:2556];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037958;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.180255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7491659415206464463:2590];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037908;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.180518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;self_id=[1:7491659415206463986:2509];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037988;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.180546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;self_id=[1:7491659415206463971:2507];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037987;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.180651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;self_id=[1:7491659415206463986:2509];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037988;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.180691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;self_id=[1:7491659415206463971:2507];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037987;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.181017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;self_id=[1:7491659415206464061:2516];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037982;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:53.181147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;self_id=[1:7491659415206464061:2516];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037982;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18F9B008 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18F826DA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F95C9F81D8F 18. ??:0: ?? @ 0x7F95C9F81E3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> WithSDK::DescribeConsumer Test command err: 2025-04-10T12:24:39.200848Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659398694007646:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:39.201106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/000190/r3tmp/tmpt8YubE/pdisk_1.dat 2025-04-10T12:24:39.344689Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-10T12:24:39.480847Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18631, node 1 2025-04-10T12:24:39.518817Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/a650/000190/r3tmp/yandexdUeFJ6.tmp 2025-04-10T12:24:39.518846Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/a650/000190/r3tmp/yandexdUeFJ6.tmp 2025-04-10T12:24:39.519025Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/a650/000190/r3tmp/yandexdUeFJ6.tmp 2025-04-10T12:24:39.519190Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:24:39.538776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:39.538883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:39.540663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:24:39.577880Z INFO: TTestServer started on Port 24905 GrpcPort 18631 TClient is connected to server localhost:24905 PQClient connected to localhost:18631 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:39.863482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T12:24:39.895379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T12:24:41.742566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659407283942854:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.742681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659407283942866:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.742776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.747444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T12:24:41.769432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659407283942868:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T12:24:42.061545Z node 1 :TX_PROXY ERROR: Actor# [1:7491659407283942932:2448] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:42.097716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T12:24:42.173475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:24:42.193848Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491659411578910237:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T12:24:42.194560Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjg4OTY4ZTEtNzY2NGRlZGItNmQyYzBiOC0xZTAwMDdmYQ==, ActorId: [1:7491659407283942851:2336], ActorState: ExecuteState, TraceId: 01jrfthpfc7wqpvk0ertxrjrhp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T12:24:42.196202Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T12:24:42.280655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7491659411578910529:2629] 2025-04-10T12:24:44.197656Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491659398694007646:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:44.197791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T12:24:48.456123Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-10T12:24:48.467681Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-10T12:24:48.468893Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491659437348714595:2787], Recipient [1:7491659398694007860:2189]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T12:24:48.468931Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T12:24:48.468945Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T12:24:48.468980Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491659437348714591:2784], Recipient [1:7491659398694007860:2189]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-10T12:24:48.468993Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T12:24:48.515628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T12:24:48.515995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T12:24:48.516197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-10T12:24:48.516235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-10T12:24:48.516260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-10T12:24:48.516300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-10T12:24:48.516341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-10T12:24:48.516487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T12:24:48.516853Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T12:24:48.516896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementP ... to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:24:55.412712Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:24:55.412727Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 #0 0x18eec551 in __asan_memcpy /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors_memintrinsics.cpp:63:3 #1 0x19172701 in MemCopy /-S/util/generic/mem_copy.h:18:9 #2 0x19172701 in TBuffer::Append(char const*, unsigned long) /-S/util/generic/buffer.cpp:54:5 #3 0x1fc4af46 in Acquire /-S/ydb/public/sdk/cpp/src/client/topic/impl/write_session_impl.h:208:18 #4 0x1fc4af46 in NYdb::Dev::NTopic::TWriteSessionImpl::FlushWriteIfRequiredImpl() /-S/ydb/public/sdk/cpp/src/client/topic/impl/write_session_impl.cpp:1366:64 #5 0x1fc4a4f0 in NYdb::Dev::NTopic::TWriteSessionImpl::WriteInternal(NYdb::Dev::NTopic::TContinuationToken&&, NYdb::Dev::NTopic::TWriteMessage&&) /-S/ydb/public/sdk/cpp/src/client/topic/impl/write_session_impl.cpp:660:9 #6 0x1fc23871 in NYdb::Dev::NTopic::TWriteSession::Write(NYdb::Dev::NTopic::TContinuationToken&&, NYdb::Dev::NTopic::TWriteMessage&&, NYdb::Dev::NTable::TTransaction*) /-S/ydb/public/sdk/cpp/src/client/topic/impl/write_session.cpp:72:19 #7 0x1fc2581e in NYdb::Dev::NTopic::TSimpleBlockingWriteSession::Write(NYdb::Dev::NTopic::TWriteMessage&&, NYdb::Dev::NTable::TTransaction*, TDuration const&) /-S/ydb/public/sdk/cpp/src/client/topic/impl/write_session.cpp:131:17 #8 0x18e500cc in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&)::$_1::operator()(unsigned long) const /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:52:13 #9 0x18e29842 in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:76:9 #10 0x18e56a27 in operator() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #11 0x18e56a27 in __invoke<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #12 0x18e56a27 in __call<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #13 0x18e56a27 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #14 0x18e56a27 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #15 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #16 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #17 0x196e6995 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #18 0x196b64c8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #19 0x18e55bf3 in NKikimr::NTestSuiteWithSDK::TCurrentTest::Execute() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #20 0x196b7d95 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #21 0x196e0f0c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #22 0x7ff157244d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #23 0x7ff157244e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #24 0x165b1028 in _start (/home/runner/.ya/build/build_root/a650/000190/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk+0x165b1028) (BuildId: 34a7903e4bedd81b1cc1447f91dca5a1a7035efc) 0x5030066349c9 is located 9 bytes inside of 32-byte region [0x5030066349c0,0x5030066349e0) freed by thread T0 here: #0 0x18f22a3d in operator delete(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:143:3 #1 0x18e50034 in UnRef /-S/util/generic/string.h:100:13 #2 0x18e50034 in UnRef /-S/util/generic/string.h:53:16 #3 0x18e50034 in UnRef /-S/util/generic/ptr.h:625:13 #4 0x18e50034 in ~TIntrusivePtr /-S/util/generic/ptr.h:523:9 #5 0x18e50034 in ~TBasicString /-S/util/generic/fwd.h:8:7 #6 0x18e50034 in ~TStringBuilder /-S/util/string/builder.h:8:11 #7 0x18e50034 in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&)::$_1::operator()(unsigned long) const /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:50:27 #8 0x18e29842 in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:76:9 #9 0x18e56a27 in operator() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #10 0x18e56a27 in __invoke<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #11 0x18e56a27 in __call<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #12 0x18e56a27 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #13 0x18e56a27 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #14 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #15 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #16 0x196e6995 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #17 0x196b64c8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #18 0x18e55bf3 in NKikimr::NTestSuiteWithSDK::TCurrentTest::Execute() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #19 0x196b7d95 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #20 0x196e0f0c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #21 0x7ff157244d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) previously allocated by thread T0 here: #0 0x18f221dd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x165b3305 in Construct, std::__y1::allocator > > &> /-S/util/generic/string.h:207:17 #2 0x165b3305 in TBasicString>::Clone() /-S/util/generic/string.h:228:9 #3 0x191c3ecc in Detach /-S/util/generic/string.h:376:13 #4 0x191c3ecc in MutRef /-S/util/generic/string.h:249:9 #5 0x191c3ecc in append /-S/util/generic/string.h:784:9 #6 0x191c3ecc in TStringOutput::DoWrite(void const*, unsigned long) /-S/util/stream/str.cpp:37:9 #7 0x18e4fedd in Write /-S/util/stream/output.h:74:13 #8 0x18e4fedd in Write /-S/util/stream/output.h:84:9 #9 0x18e4fedd in Out /-S/util/stream/output.h:199:11 #10 0x18e4fedd in operator<< /-S/util/stream/output.h:218:5 #11 0x18e4fedd in operator<< /-S/util/string/builder.h:33:21 #12 0x18e4fedd in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&)::$_1::operator()(unsigned long) const /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:50:48 #13 0x18e29842 in NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:76:9 #14 0x18e56a27 in operator() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #15 0x18e56a27 in __invoke<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #16 0x18e56a27 in __call<(lambda at /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #17 0x18e56a27 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #18 0x18e56a27 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #19 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #20 0x196e6995 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #21 0x196e6995 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #22 0x196b64c8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #23 0x18e55bf3 in NKikimr::NTestSuiteWithSDK::TCurrentTest::Execute() /-S/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp:31:1 #24 0x196b7d95 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #25 0x196e0f0c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #26 0x7ff157244d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: heap-use-after-free /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors_memintrinsics.cpp:63:3 in __asan_memcpy Shadow bytes around the buggy address: 0x503006634700: 00 fa fa fa fd fd fd fd fa fa fd fd fd fd fa fa 0x503006634780: 00 00 00 fa fa fa fd fd fd fd fa fa fd fd fd fd 0x503006634800: fa fa fd fd fd fd fa fa fd fd fd fd fa fa fd fd 0x503006634880: fd fd fa fa fd fd fd fd fa fa fd fd fd fd fa fa 0x503006634900: fd fd fd fd fa fa fa fa fa fa fa fa fa fa fa fa =>0x503006634980: fa fa fa fa fa fa fa fa fd[fd]fd fd fa fa fa fa 0x503006634a00: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x503006634a80: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x503006634b00: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fd fd 0x503006634b80: fd fd fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x503006634c00: fa fa fa fa fa fa fa fa fa fa fa fa fd fd fd fd Shadow byte legend (one shadow byte represents 8 application bytes): Addressable: 00 Partially addressable: 01 02 03 04 05 06 07 Heap left redzone: fa Freed heap region: fd Stack left redzone: f1 Stack mid redzone: f2 Stack right redzone: f3 Stack after return: f5 Stack use after scope: f8 Global redzone: f9 Global init order: f6 Poisoned by user: f7 Container overflow: fc Array cookie: ac Intra object redzone: bb ASan internal: fe Left alloca redzone: ca Right alloca redzone: cb ==2116888==ABORTING ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 19999, MsgBus: 25705 2025-04-10T12:24:38.989819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659391820781596:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:38.989931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/000199/r3tmp/tmpCzonns/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19999, node 1 2025-04-10T12:24:39.351556Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:24:39.351584Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:24:39.357968Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:24:39.386578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:39.386712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:39.388345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:24:39.404700Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:39.404750Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:39.404765Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:39.404951Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25705 TClient is connected to server localhost:25705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:39.913623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:41.662762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659404705684144:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.662900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659404705684131:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.663662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:41.667245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-10T12:24:41.679234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659404705684160:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-10T12:24:41.764464Z node 1 :TX_PROXY ERROR: Actor# [1:7491659404705684211:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:42.151543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-10T12:24:42.320911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491659409000651697:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:24:42.320909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491659409000651735:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:24:42.321133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491659409000651735:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:24:42.321438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491659409000651735:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:24:42.321551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491659409000651735:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:24:42.321643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491659409000651697:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:24:42.321652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491659409000651735:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:24:42.321890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491659409000651735:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:24:42.321921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491659409000651697:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:24:42.322010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491659409000651735:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:24:42.322066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491659409000651697:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:24:42.322092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491659409000651735:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:24:42.322193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491659409000651697:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:24:42.322254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491659409000651735:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:24:42.322302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491659409000651697:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:24:42.322370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491659409000651735:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:24:42.322415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491659409000651697:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:24:42.322498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491659409000651735:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:24:42.322524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491659409000651697:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:24:42.322619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7491659409000651735:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:24:42.322620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491659409000651697:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:24:42.322736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491659409000651697:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:24:42.322837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491659409000651697:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:24:42.322937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7491659409000651697:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:24:42.357695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491659409000651702:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:24:42.357757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7491659409000651702:2346];tablet_id=72075186224037889;process=TTxInitSchema::Exe ... cast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.158314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7491659426180527271:3338];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.158470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7491659426180527271:3338];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.158824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7491659426180527264:3337];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.158973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7491659426180527264:3337];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.160352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[1:7491659426180526720:3304];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.160508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[1:7491659426180526720:3304];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.160936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;self_id=[1:7491659426180526668:3279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038083;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.161085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;self_id=[1:7491659426180526668:3279];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038083;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.166114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[1:7491659426180526638:3264];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.166324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[1:7491659426180526638:3264];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.170649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7491659426180527244:3333];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.170878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7491659426180527244:3333];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.172675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[1:7491659426180526675:3283];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.172860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[1:7491659426180526675:3283];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.173737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7491659426180527260:3336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.173935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7491659426180527260:3336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.174116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7491659426180527273:3339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.174279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7491659426180527273:3339];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.176358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[1:7491659426180527304:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.176601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[1:7491659426180527304:3346];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.176737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[1:7491659426180526715:3301];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.176953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[1:7491659426180526715:3301];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.178792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7491659426180526708:3298];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.178938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7491659426180526708:3298];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.185001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7491659426180527241:3332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.185181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7491659426180527241:3332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.195779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[1:7491659426180526706:3297];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.196013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[1:7491659426180526706:3297];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.196189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[1:7491659426180527290:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.196301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[1:7491659426180527290:3342];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.203419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[1:7491659426180527278:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.203631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[1:7491659426180527278:3340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.204410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[1:7491659426180526736:3309];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.204523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[1:7491659426180526736:3309];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.219830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[1:7491659426180526729:3307];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-04-10T12:24:55.220110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[1:7491659426180526729:3307];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18F93668 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18F8205A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F1373FFBD8F 18. ??:0: ?? @ 0x7F1373FFBE3F 19. ??:0: ?? @ 0x1656B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2025-04-10T12:24:29.373399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:24:29.373605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:24:29.373749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/00025d/r3tmp/tmp4knFDG/pdisk_1.dat TServer::EnableGrpc on GrpcPort 65183, node 1 TClient is connected to server localhost:14612 2025-04-10T12:24:29.926122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:24:29.968195Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:24:29.973298Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:29.973368Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:29.973403Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:29.973737Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:24:30.010309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:30.010457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:30.022454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:24:30.152842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-10T12:24:30.267409Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:688:2580], Recipient [1:744:2626]: NKikimr::TEvTablet::TEvBoot 2025-04-10T12:24:30.268938Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:688:2580], Recipient [1:744:2626]: NKikimr::TEvTablet::TEvRestored 2025-04-10T12:24:30.269278Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:744:2626];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T12:24:30.295576Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:744:2626];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T12:24:30.295987Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-10T12:24:30.305255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:24:30.305602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:24:30.305893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:24:30.306021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:24:30.306132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:24:30.306303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:24:30.306435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:24:30.306578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:24:30.306709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:24:30.306844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:24:30.306988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:24:30.307115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:744:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:24:30.331487Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:688:2580], Recipient [1:744:2626]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T12:24:30.333531Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-04-10T12:24:30.333891Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T12:24:30.333978Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T12:24:30.334209Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:24:30.334400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T12:24:30.334483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T12:24:30.334534Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T12:24:30.334633Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T12:24:30.334698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T12:24:30.334743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T12:24:30.334782Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T12:24:30.335007Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:24:30.335080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T12:24:30.335127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T12:24:30.335174Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T12:24:30.335274Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T12:24:30.335365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T12:24:30.335429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T12:24:30.335480Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T12:24:30.335559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T12:24:30.335600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T12:24:30.335636Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T12:24:30.335694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T12:24:30.335755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T12:24:30.335789Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T12:24:30.336302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=84; 2025-04-10T12:24:30.336401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-04-10T12:24:30.336483Z node 1 :TX_COLUMNSHARD INFO: tablet_id ... 45528;count=1;;14:size=2598864;count=4;;15:size=1210528;count=1;;16:size=994992;count=1;;17:size=1445928;count=1;;18:size=1445448;count=1;;19:size=1445400;count=1;;20:size=2164288;count=3;;21:size=1420056;count=1;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-04-10T12:24:56.727934Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435085, Sender [1:1282:3083], Recipient [1:744:2626]: NKikimr::NColumnShard::TEvPrivate::TEvGarbageCollectionFinished 2025-04-10T12:24:56.728354Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [1:1283:3084], Recipient [1:744:2626]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2025-04-10T12:24:56.728382Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-04-10T12:24:56.728546Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[29] (CS::GENERAL) apply at tablet 72075186224037888 2025-04-10T12:24:56.731223Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:19 Blob count: 1 2025-04-10T12:24:56.731364Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2911848;raw_bytes=96858239;count=2;records=82491} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=16012912;raw_bytes=539964581;count=8;records=453277} inactive {blob_bytes=24530552;raw_bytes=822698724;count=16;records=696683} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:75/0:size=4378;count=18;;1:size=57067;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445608;count=1;;7:size=1445920;count=1;;8:size=1445360;count=1;;9:size=2966864;count=4;;10:size=1445744;count=1;;11:size=1445408;count=1;;12:size=1445360;count=1;;13:size=1445528;count=1;;14:size=2598864;count=4;;15:size=1210528;count=1;;16:size=994992;count=1;;17:size=1445928;count=1;;18:size=1445448;count=1;;19:size=1445400;count=1;;20:size=2164288;count=3;;21:size=1420056;count=1;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:75/0:size=4447;count=19;;1:size=57067;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445608;count=1;;7:size=1445920;count=1;;8:size=1445360;count=1;;9:size=2966864;count=4;;10:size=1445744;count=1;;11:size=1445408;count=1;;12:size=1445360;count=1;;13:size=1445528;count=1;;14:size=2598864;count=4;;15:size=1210528;count=1;;16:size=994992;count=1;;17:size=1445928;count=1;;18:size=1445448;count=1;;19:size=1445400;count=1;;20:size=2164288;count=3;;21:size=1420056;count=1;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-04-10T12:24:56.743459Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-04-10T12:24:56.743530Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;fline=with_appended.cpp:65;portions=27,;task_id=cf9714ee-160611f0-96c82c40-5d1140ce; 2025-04-10T12:24:56.743822Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:27;path_id:3;records_count:100082;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:3527144;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-04-10T12:24:56.744014Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/491609.000000s;; 2025-04-10T12:24:56.744123Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::cf9714ee-160611f0-96c82c40-5d1140ce; 2025-04-10T12:24:56.744192Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:3;path_id:3;size:18931160;portions_count:27;); 2025-04-10T12:24:56.744241Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T12:24:56.744301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T12:24:56.744361Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-04-10T12:24:56.744422Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;tablet_id=72075186224037888;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2025-04-10T12:24:56.744468Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T12:24:56.744516Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T12:24:56.744555Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T12:24:56.744624Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;tablet_id=72075186224037888;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.599000s; 2025-04-10T12:24:56.744672Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T12:24:56.744877Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:19 Blob count: 1 VERIFY failed (2025-04-10T12:24:56.745124Z): tablet_id=72075186224037888;task_id=cf9714ee-160611f0-96c82c40-5d1140ce;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+873 (0x18D9C6A9) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x18D8A93B) NActors::TVerifyFormattedRecordWriter::~TVerifyFormattedRecordWriter()+326 (0x1A0A28D6) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+4577 (0x48A2F961) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+4797 (0x3079BF8D) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+899 (0x1EAB4133) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3856 (0x1E997B10) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+3444 (0x1E7DDF04) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+2821 (0x1E77AC85) NActors::IActor::Receive(TAutoPtr&)+237 (0x19FD40AD) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+3557 (0x35AD47C5) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+12602 (0x35ACD03A) NActors::TTestActorRuntimeBase::DispatchEvents(NActors::TDispatchOptions const&)+49 (0x35AC9DE1) NKikimr::Tests::NCS::THelperSchemaless::SendDataViaActorSystem(TBasicString>, std::__y1::shared_ptr, Ydb::StatusIds_StatusCode const&) const+7904 (0x36452850) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+4568 (0x1897A088) std::__y1::__function::__func, void ()>::operator()()+280 (0x1898C378) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x192493C6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19218EF9) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+1204 (0x1898B324) NUnitTest::TTestFactory::Execute()+2438 (0x1921A7C6) NUnitTest::RunMain(int, char**)+5213 (0x1924393D) ??+0 (0x7F02DAC03D90) __libc_start_main+128 (0x7F02DAC03E40) _start+41 (0x162EC029) |97.6%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] >> KqpStats::SysViewClientLost [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost [FAIL] Test command err: Trying to start YDB, gRPC: 19700, MsgBus: 10292 2025-04-10T12:24:27.442102Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659347512093698:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:27.442217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/0002cd/r3tmp/tmpkh2Z8H/pdisk_1.dat 2025-04-10T12:24:27.804755Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19700, node 1 2025-04-10T12:24:27.846922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:27.851524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:27.855473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:24:27.877558Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-10T12:24:27.877586Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-10T12:24:27.877598Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-10T12:24:27.877754Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10292 TClient is connected to server localhost:10292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:28.430174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:28.459103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:28.634096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:28.777901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:28.854140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:30.459066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659360396997359:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:30.459188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:30.731240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-10T12:24:30.753992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-10T12:24:30.781231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T12:24:30.811623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:24:30.840550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-10T12:24:30.869785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-10T12:24:30.907159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659360396997866:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:30.907288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:30.907313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659360396997871:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:30.910813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-10T12:24:30.920051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659360396997873:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-10T12:24:30.983855Z node 1 :TX_PROXY ERROR: Actor# [1:7491659360396997927:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:31.710308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-10T12:24:32.442264Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491659347512093698:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:32.442333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-10T12:24:42.794555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-10T12:24:42.794600Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:24:57.564933Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287897556, txId: 281474976710672] shutting down 2025-04-10T12:24:57.619881Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-10T12:24:58.826041Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287898813, txId: 281474976710674] shutting down 2025-04-10T12:24:59.979830Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287899973, txId: 281474976710676] shutting down 2025-04-10T12:25:01.153040Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287901147, txId: 281474976710678] shutting down 2025-04-10T12:25:02.296327Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287902290, txId: 281474976710680] shutting down 2025-04-10T12:25:03.472337Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287903465, txId: 281474976710682] shutting down 2025-04-10T12:25:04.649195Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287904643, txId: 281474976710684] shutting down 2025-04-10T12:25:05.823700Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287905817, txId: 281474976710686] shutting down 2025-04-10T12:25:06.960215Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287906954, txId: 281474976710688] shutting down 2025-04-10T12:25:08.111854Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287908105, txId: 281474976710690] shutting down 2025-04-10T12:25:09.260219Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1744287909254, txId: 281474976710692] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x196B4EEB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x19B79E1F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1925A458 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x1926D467 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1926D467 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1926D467 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x1926D467 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x1926D467 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19BB0E45 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19BB0E45 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19BB0E45 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19B80998 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x1926C5EB 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19B82265 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19BAB3BC 15. ??:0: ?? @ 0x7F0003A79D8F 16. ??:0: ?? @ 0x7F0003A79E3F 17. ??:0: ?? @ 0x16611028 |97.9%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |97.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_DistributedTxCommit_CheckOffsetCommitForDifferentCases_SplitedTopic [GOOD] Test command err: 2025-04-10T12:24:38.330869Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7491659391939366681:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:38.331078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-10T12:24:38.472999Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/00018d/r3tmp/tmpDkoiBU/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9992, node 1 2025-04-10T12:24:38.649632Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:24:38.654716Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:24:38.658323Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-10T12:24:38.683942Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/a650/00018d/r3tmp/yandexrK15W8.tmp 2025-04-10T12:24:38.683993Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/a650/00018d/r3tmp/yandexrK15W8.tmp 2025-04-10T12:24:38.684238Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/a650/00018d/r3tmp/yandexrK15W8.tmp 2025-04-10T12:24:38.684461Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-10T12:24:38.715915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:38.716064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:38.717661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:24:38.736492Z INFO: TTestServer started on Port 32294 GrpcPort 9992 TClient is connected to server localhost:32294 PQClient connected to localhost:9992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-10T12:24:38.999314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-10T12:24:39.032595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-10T12:24:40.639138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659400529302098:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.639182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659400529302074:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.639265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.643244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-10T12:24:40.646300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7491659400529302135:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.646397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-10T12:24:40.654493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7491659400529302103:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-10T12:24:40.900456Z node 1 :TX_PROXY ERROR: Actor# [1:7491659400529302159:2447] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-10T12:24:40.927462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-10T12:24:40.957957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-10T12:24:41.024145Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7491659400529302175:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-10T12:24:41.025942Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjQ4NDM1NjUtYTRjZTI4YmEtZmJiYzgzY2YtOGYyYzhmOWI=, ActorId: [1:7491659400529302071:2336], ActorState: ExecuteState, TraceId: 01jrfthndp9rhx0g2vgtv902b7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-10T12:24:41.028124Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-10T12:24:41.030189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7491659404824269761:2628] 2025-04-10T12:24:43.331020Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491659391939366681:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:43.331085Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-10T12:24:47.372624Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-10T12:24:47.383378Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-10T12:24:47.384466Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7491659430594073829:2787], Recipient [1:7491659391939367108:2198]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T12:24:47.384503Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T12:24:47.384539Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T12:24:47.384584Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7491659430594073825:2784], Recipient [1:7491659391939367108:2198]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-10T12:24:47.384599Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T12:24:47.452334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 2 ScaleUpPartitionWriteSpeedThresholdPercent: 2 ScaleDownPartitionWriteSpeedThresholdPercent: 1 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-10T12:24:47.452744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-10T12:24:47.452980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-10T12:24:47.453018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction tar ... 1078Z :DEBUG: [/Root] [/Root] [bab98359-7941f9a9-668b5c78-99165424] [] Returning serverBytesSize = 0 to budget 2025-04-10T12:25:16.731234Z :DEBUG: [/Root] [/Root] [bab98359-7941f9a9-668b5c78-99165424] [] Abort session to cluster 2025-04-10T12:25:16.732292Z :NOTICE: [/Root] [/Root] [bab98359-7941f9a9-668b5c78-99165424] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-10T12:25:16.732409Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_1_1_11844242880670106709_v1 grpc read done: success# 0, data# { } 2025-04-10T12:25:16.732464Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_1_1_11844242880670106709_v1 grpc read failed 2025-04-10T12:25:16.732518Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_1_1_11844242880670106709_v1 grpc closed 2025-04-10T12:25:16.732592Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_1_1_11844242880670106709_v1 is DEAD 2025-04-10T12:25:16.732982Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [1:7491659533673291836:4194], Recipient [1:7491659469248781023:2842]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T12:25:16.733014Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T12:25:16.733034Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-10T12:25:16.733070Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_1_1_11844242880670106709_v1 2025-04-10T12:25:16.733101Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [1:7491659533673291834:3219] destroyed 2025-04-10T12:25:16.733145Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [1:7491659533673291835:4193], Recipient [1:7491659469248781024:2843]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T12:25:16.733168Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T12:25:16.733179Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-10T12:25:16.733193Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Destroy direct read session test-consumer_1_1_11844242880670106709_v1 2025-04-10T12:25:16.733211Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037897] server disconnected, pipe [1:7491659533673291833:3217] destroyed 2025-04-10T12:25:16.733248Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [1:7491659533673291785:4171], Recipient [1:7491659430594073878:2470]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T12:25:16.733261Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T12:25:16.733272Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-10T12:25:16.733285Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_1_1_11844242880670106709_v1 2025-04-10T12:25:16.733301Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7491659533673291784:3202] destroyed 2025-04-10T12:25:16.733354Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_1_1_11844242880670106709_v1 2025-04-10T12:25:16.733373Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_1_1_11844242880670106709_v1 2025-04-10T12:25:16.733389Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_1_1_11844242880670106709_v1 2025-04-10T12:25:16.733455Z :INFO: [/Root] TraceId [] SessionId [producer-2|122cf2da-41bd7210-ca366fd4-4720111c_0] PartitionId [1] Generation [1] Write session: close. Timeout 0.000000s 2025-04-10T12:25:16.733496Z :INFO: [/Root] TraceId [] SessionId [producer-2|122cf2da-41bd7210-ca366fd4-4720111c_0] PartitionId [1] Generation [1] Write session will now close 2025-04-10T12:25:16.733543Z :DEBUG: [/Root] TraceId [] SessionId [producer-2|122cf2da-41bd7210-ca366fd4-4720111c_0] PartitionId [1] Generation [1] Write session: aborting 2025-04-10T12:25:16.733802Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [1:7491659533673291781:3199] disconnected; active server actors: 1 2025-04-10T12:25:16.733841Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [1:7491659533673291781:3199] client test-consumer disconnected session test-consumer_1_1_11844242880670106709_v1 2025-04-10T12:25:16.733981Z :INFO: [/Root] TraceId [] SessionId [producer-2|122cf2da-41bd7210-ca366fd4-4720111c_0] PartitionId [1] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-10T12:25:16.734025Z :DEBUG: [/Root] TraceId [] SessionId [producer-2|122cf2da-41bd7210-ca366fd4-4720111c_0] PartitionId [1] Generation [1] Write session: destroy 2025-04-10T12:25:16.734165Z :DEBUG: [/Root] TraceId [] SessionId [producer-2|533b6878-de0034ab-56708d59-38850410_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-10T12:25:16.734895Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|8b4af3dd-b6c03c22-2b543a79-cddc0082_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-10T12:25:16.737262Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: producer-2|122cf2da-41bd7210-ca366fd4-4720111c_0 grpc read done: success: 0 data: 2025-04-10T12:25:16.737320Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: producer-2|122cf2da-41bd7210-ca366fd4-4720111c_0 grpc read failed 2025-04-10T12:25:16.737356Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: producer-2|122cf2da-41bd7210-ca366fd4-4720111c_0 grpc closed 2025-04-10T12:25:16.737369Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: producer-2|122cf2da-41bd7210-ca366fd4-4720111c_0 is DEAD 2025-04-10T12:25:16.738381Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateIdle, received event# 65543, Sender [1:7491659533673291734:3194], Recipient [1:7491659533673291736:3194]: NActors::TEvents::TEvPoison 2025-04-10T12:25:16.738451Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037897 (partition=1) Received event: NActors::TEvents::TEvPoison 2025-04-10T12:25:16.738669Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [1:7491659533673291774:4168], Recipient [1:7491659469248781024:2843]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-10T12:25:16.738703Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-10T12:25:16.738719Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-10T12:25:16.738769Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037897] server disconnected, pipe [1:7491659533673291773:3194] destroyed 2025-04-10T12:25:16.738839Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188506 (NKikimr::TEvPQ::TEvPipeDisconnected), Tablet [1:7491659469248781024:2843], Partition 1, Sender [1:7491659469248781024:2843], Recipient [1:7491659469248781100:2849], Cookie: 0 2025-04-10T12:25:16.738896Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188506, Sender [1:7491659469248781024:2843], Recipient [1:7491659469248781100:2849]: NKikimr::TEvPQ::TEvPipeDisconnected 2025-04-10T12:25:16.738929Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPipeDisconnected 2025-04-10T12:25:16.738960Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::DropOwner. 2025-04-10T12:25:16.738999Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessChangeOwnerRequests. 2025-04-10T12:25:16.739035Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:25:16.739103Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:25:16.739136Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:25:16.739164Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T12:25:16.809208Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7491659430594073878:2470], Partition 0, Sender [0:0:0], Recipient [1:7491659430594073914:2473], Cookie: 0 2025-04-10T12:25:16.809207Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7491659469248781024:2843], Partition 1, Sender [0:0:0], Recipient [1:7491659469248781100:2849], Cookie: 0 2025-04-10T12:25:16.809277Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7491659469248781100:2849]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:25:16.809293Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7491659430594073914:2473]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:25:16.809304Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:25:16.809313Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:25:16.809353Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:25:16.809353Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:25:16.809423Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:25:16.809424Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:25:16.809444Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:25:16.809445Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:25:16.809472Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T12:25:16.809472Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T12:25:16.809625Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7491659469248781023:2842], Partition 2, Sender [0:0:0], Recipient [1:7491659469248781103:2851], Cookie: 0 2025-04-10T12:25:16.809673Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7491659469248781103:2851]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:25:16.809690Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:25:16.809714Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:25:16.809742Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:25:16.809762Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:25:16.809778Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 |98.0%| [TA] $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} |98.1%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] |98.4%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |98.4%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] Test command err: 2025-04-10T12:24:25.576706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-10T12:24:25.576900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-10T12:24:25.577072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/a650/00035e/r3tmp/tmpY8tkEW/pdisk_1.dat 2025-04-10T12:24:25.906965Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T12:24:25.907058Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T12:24:25.907101Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T12:24:25.907256Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-10T12:24:25.907286Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-10T12:24:26.039749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-10T12:24:26.039994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:24:26.040224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-10T12:24:26.040454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-10T12:24:26.040518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:24:26.040611Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T12:24:26.041326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-10T12:24:26.041521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-10T12:24:26.041573Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T12:24:26.041606Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T12:24:26.041785Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T12:24:26.041824Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T12:24:26.041908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:24:26.041959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-10T12:24:26.042003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-10T12:24:26.042034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-10T12:24:26.042176Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T12:24:26.042640Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T12:24:26.042680Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T12:24:26.042808Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T12:24:26.042852Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T12:24:26.042910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:24:26.042954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-10T12:24:26.043010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-10T12:24:26.043095Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T12:24:26.043447Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T12:24:26.043474Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-10T12:24:26.043571Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-10T12:24:26.043606Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-10T12:24:26.043649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:24:26.043680Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-10T12:24:26.043729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-10T12:24:26.043756Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-10T12:24:26.043798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-10T12:24:26.047616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-10T12:24:26.048194Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T12:24:26.048242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-10T12:24:26.048428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-10T12:24:26.049753Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-10T12:24:26.049804Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-10T12:24:26.049844Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-10T12:24:26.049983Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-10T12:24:26.050417Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T12:24:26.050464Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T12:24:26.050496Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T12:24:26.050629Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-10T12:24:26.050656Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-10T12:24:26.050723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-10T12:24:26.050764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-10T12:24:26.050806Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-10T12:24:26.087544Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-10T12:24:26.087673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-04-10T12:24:26.087710Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-10T12:24:26.088313Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-10T12:24:26.088385Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-10T12:24:26.134319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-10T12:24:26.134450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-10T12:24:26.146024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-10T12:24:26.225205Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-10T12:24:26.226069Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-10T12:24:26.226125Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-10T12:24:26.226188Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-10T12:24:26.226380Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-10T12:24:26.226422Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-10T12:24:26.226536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-10T12:24:26.226696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... es: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T12:26:34.776072Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-10T12:26:34.796857Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T12:26:34.796930Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T12:26:34.796957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T12:26:34.797023Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-10T12:26:34.797051Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-10T12:26:34.797132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 22 shard idx 72057594046644480:7 data size 0 row count 0 2025-04-10T12:26:34.797192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037894 maps to shardIdx: 72057594046644480:7 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 22], pathId map=TableA, is column=0, is olap=0, RowCount 0, DataSize 0 2025-04-10T12:26:34.797217Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037894, followerId 0 2025-04-10T12:26:34.797275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:7 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-10T12:26:34.797358Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T12:26:34.807721Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T12:26:34.807781Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T12:26:34.807808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-10T12:26:34.880862Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-10T12:26:34.881290Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1580:3193], Recipient [1:409:2404]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037895 TableLocalId: 24 Generation: 1 Round: 58 TableStats { DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 20 Memory: 119576 Storage: 142 } ShardState: 2 UserTablePartOwners: 72075186224037895 NodeId: 1 StartTime: 4950 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-10T12:26:34.881328Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-10T12:26:34.881367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] state 'Ready' dataSize 54 rowCount 2 cpuUsage 0.002 2025-04-10T12:26:34.881446Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] raw table stats: DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-10T12:26:34.881473Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-10T12:26:34.934654Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T12:26:34.934718Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T12:26:34.934745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T12:26:34.934800Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-10T12:26:34.934828Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-10T12:26:34.934895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 24 shard idx 72057594046644480:8 data size 54 row count 2 2025-04-10T12:26:34.934955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037895 maps to shardIdx: 72057594046644480:8 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 24], pathId map=TableA, is column=0, is olap=0, RowCount 2, DataSize 54 2025-04-10T12:26:34.934981Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037895, followerId 0 2025-04-10T12:26:34.935037Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:8 with partCount# 1, rowCount# 2, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-10T12:26:34.935130Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-10T12:26:34.945488Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-10T12:26:34.945560Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-10T12:26:34.945588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-10T12:26:35.006956Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T12:26:35.007024Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T12:26:35.007085Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T12:26:35.007108Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T12:26:35.028607Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T12:26:35.113423Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-10T12:26:35.198256Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T12:26:35.198327Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T12:26:35.198391Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T12:26:35.198413Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T12:26:35.219526Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037898 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T12:26:35.303515Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037899 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-10T12:26:35.387236Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T12:26:35.387302Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-10T12:26:35.387508Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjQ2YjQ0ZjktZmFmMjAzYTItMzFmZWQ3ZGQtNmQ3OTJjMDc=, ActorId: [1:2034:3520], ActorState: ExecuteState, TraceId: 01jrfthc06938b68ddkgcjne9h, Create QueryResponse for error on request, msg: 2025-04-10T12:26:35.387693Z node 1 :KQP_SLOW_LOG WARN: SessionId: ydb://session/3?node_id=1&id=MjQ2YjQ0ZjktZmFmMjAzYTItMzFmZWQ3ZGQtNmQ3OTJjMDc=, Slow query, duration: 600.000000s, status: GENERIC_ERROR, user: UNAUTHENTICATED, results: 0b, text: "RESTORE `MyCollection`;", parameters: 0b 2025-04-10T12:26:35.388767Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-10T12:26:35.388800Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture()+28 (0x191926EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1964F830) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+3639 (0x4905B847) NKikimr::NTestSuiteIncrementalBackup::TTestCaseComplexRestoreBackupCollection::Execute_(NUnitTest::TTestContext&)+26179 (0x18DE2DB3) std::__y1::__function::__func, void ()>::operator()()+280 (0x18D9DEF8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19686856) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x196563A9) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()+1204 (0x18D9CDA4) NUnitTest::TTestFactory::Execute()+2438 (0x19657C76) NUnitTest::RunMain(int, char**)+5213 (0x19680DCD) ??+0 (0x7F1E1B768D90) __libc_start_main+128 (0x7F1E1B768E40) _start+41 (0x16510029) |98.6%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |98.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] |98.8%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |98.8%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] Test command err: 2025-04-10T12:24:24.638177Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T12:24:24.726074Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-10T12:24:24.730454Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-10T12:24:24.730846Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T12:24:24.755886Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T12:24:24.756193Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T12:24:24.764605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:24:24.764841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:24:24.765063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:24:24.765180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:24:24.765311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:24:24.765453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:24:24.765567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:24:24.765684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:24:24.765791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:24:24.765925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:24:24.766047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:24:24.766179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:24:24.792180Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T12:24:24.796768Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T12:24:24.797141Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T12:24:24.797195Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T12:24:24.797381Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:24:24.797551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T12:24:24.797649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T12:24:24.797712Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T12:24:24.797830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T12:24:24.797905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T12:24:24.797950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T12:24:24.797982Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T12:24:24.798176Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:24:24.798251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T12:24:24.798294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T12:24:24.798337Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T12:24:24.798459Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T12:24:24.798543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T12:24:24.798588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T12:24:24.798617Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T12:24:24.798688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T12:24:24.798745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T12:24:24.798780Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T12:24:24.798831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T12:24:24.798879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T12:24:24.798908Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T12:24:24.799328Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-10T12:24:24.799409Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-04-10T12:24:24.799498Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-04-10T12:24:24.799616Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=56; 2025-04-10T12:24:24.799788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T12:24:24.799845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T12:24:24.799880Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T12:24:24.800135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T12:24:24.800184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T12:24:24.800216Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T12:24:24.800395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T12:24:24.800440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T12:24:24.800475Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T12:24:24.800649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T12:24:24.800687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T12:24:24.800718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T1 ... ctors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, TBasicString> const&, std::__y1::vector> const&, bool, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:143:16 #31 0xff9c125 in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:232:9 #32 0xff9879f in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1184:13 #33 0xff931e7 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #34 0xff931e7 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #35 0xff931e7 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #36 0xff931e7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #37 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #38 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #39 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #40 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #41 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #42 0xff92093 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #43 0x1084a305 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #44 0x1086a39c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #45 0x7f78fc4bbd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x100b418d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0xf4fb11d in NObjectFactory::TFactoryObjectCreator::Create() const /-S/library/cpp/object_factory/object_factory.h:38:20 #2 0x1c9aded9 in MakeHolder /-S/library/cpp/object_factory/object_factory.h:137:38 #3 0x1c9aded9 in NKikimr::NOlap::NStorageOptimizer::IOptimizerPlannerConstructor::BuildDefault() /-S/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.h:204:23 #4 0x1c9ac26f in NKikimr::NOlap::TIndexInfo::DeserializeOptionsFromProto(NKikimrSchemeOp::TColumnTableSchemeOptions const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:209:40 #5 0x1c9b292b in NKikimr::NOlap::TIndexInfo::DeserializeFromProto(NKikimrSchemeOp::TColumnTableSchema const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:250:5 #6 0x1c9bf54b in NKikimr::NOlap::TIndexInfo::BuildFromProto(NKikimrSchemeOp::TColumnTableSchema const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/scheme/index_info.cpp:333:17 #7 0x25b38dab in NKikimr::NOlap::TColumnEngineForLogs::RegisterSchemaVersion(NKikimr::NOlap::TSnapshot const&, unsigned long, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData const&) /-S/ydb/core/tx/columnshard/engines/column_engine_logs.cpp:103:29 #8 0x25b3486e in NKikimr::NOlap::TColumnEngineForLogs::TColumnEngineForLogs(unsigned long, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&, NKikimr::NOlap::TSnapshot const&, unsigned long, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData const&, std::__y1::shared_ptr const&) /-S/ydb/core/tx/columnshard/engines/column_engine_logs.cpp:42:5 #9 0x2a213c7a in make_unique &, std::__y1::shared_ptr &, std::__y1::shared_ptr &, const NKikimr::NOlap::TSnapshot &, const unsigned int &, NKikimr::NOlap::IColumnEngine::TSchemaInitializationData, std::__y1::shared_ptr &> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:30 #10 0x2a213c7a in NKikimr::NColumnShard::TTablesManager::AddSchemaVersion(unsigned int, NKikimr::NOlap::TSnapshot const&, NKikimrSchemeOp::TColumnTableSchema const&, NKikimr::NIceDb::TNiceDb&) /-S/ydb/core/tx/columnshard/tables_manager.cpp:282:24 #11 0x2a215507 in NKikimr::NColumnShard::TTablesManager::AddTableVersion(NKikimr::NColumnShard::TInternalPathId, NKikimr::NOlap::TSnapshot const&, NKikimrTxColumnShard::TTableVersionInfo const&, std::__y1::optional const&, NKikimr::NIceDb::TNiceDb&) /-S/ydb/core/tx/columnshard/tables_manager.cpp:320:9 #12 0x2a0e6a09 in NKikimr::NColumnShard::TColumnShard::RunEnsureTable(NKikimrTxColumnShard::TCreateTable const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:431:19 #13 0x2a0e5370 in NKikimr::NColumnShard::TColumnShard::RunInit(NKikimrTxColumnShard::TInitShard const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:373:9 #14 0x2a0e4ae1 in NKikimr::NColumnShard::TColumnShard::RunSchemaTx(NKikimrTxColumnShard::TSchemaTxBody const&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/columnshard_impl.cpp:328:13 #15 0xff0a29e in NKikimr::NColumnShard::TSchemaTransactionOperator::ProgressOnExecute(NKikimr::NColumnShard::TColumnShard&, NKikimr::NOlap::TSnapshot const&, NKikimr::NTabletFlatExecutor::TTransactionContext&) /-S/ydb/core/tx/columnshard/transactions/operators/schema.h:94:19 #16 0x25cccf3d in NKikimr::NColumnShard::TColumnShard::TTxProgressTx::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/columnshard__progress_tx.cpp:80:13 #17 0x1849b274 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #18 0x184609f6 in NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&) /-S/ydb/core/tablet_flat/flat_executor.cpp:4143:9 #19 0x115b762c in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #20 0x2cc61cf4 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #21 0x2cc5a569 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #22 0x2cc648e3 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #23 0x3148df63 in NKikimr::TEvTxProcessing::TEvPlanStepAck::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #24 0x3146cf82 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #25 0x3146cf82 in NKikimr::TEvTxProcessing::TEvPlanStepAck::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #26 0x3146be97 in NKikimr::NTxUT::PlanSchemaTx(NActors::TTestBasicRuntime&, NActors::TActorId const&, NKikimr::NOlap::TSnapshot) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:79:5 #27 0x31483b82 in NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, NKikimr::NOlap::TSnapshot const&, bool) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:480:13 #28 0xff9b983 in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:219:5 #29 0xff9878c in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1184:13 #30 0xff931e7 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #31 0xff931e7 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #32 0xff931e7 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #33 0xff931e7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #34 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #35 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #36 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #37 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #38 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 SUMMARY: AddressSanitizer: 2643314 byte(s) leaked in 62813 allocation(s). ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] Test command err: 2025-04-10T12:24:34.610549Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T12:24:34.701952Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-10T12:24:34.705710Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-10T12:24:34.706009Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T12:24:34.722025Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T12:24:34.722298Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T12:24:34.728982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:24:34.729135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:24:34.729299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:24:34.729390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:24:34.729465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:24:34.729550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:24:34.729615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:24:34.729687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:24:34.729759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:24:34.729839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:24:34.729918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:24:34.729986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:24:34.750133Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-10T12:24:34.754452Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T12:24:34.754849Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T12:24:34.754912Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T12:24:34.755137Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:24:34.755333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T12:24:34.755416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T12:24:34.755465Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T12:24:34.755592Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T12:24:34.755672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T12:24:34.755726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T12:24:34.755770Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T12:24:34.755969Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:24:34.756055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T12:24:34.756099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T12:24:34.756134Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T12:24:34.756263Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T12:24:34.756325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T12:24:34.756375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T12:24:34.756409Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T12:24:34.756480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T12:24:34.756541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T12:24:34.756575Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T12:24:34.756631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T12:24:34.756676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T12:24:34.756705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T12:24:34.757174Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-10T12:24:34.757287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-10T12:24:34.757386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-04-10T12:24:34.757508Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=54; 2025-04-10T12:24:34.757679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T12:24:34.757738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T12:24:34.757777Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T12:24:34.758045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T12:24:34.758101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T12:24:34.758135Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T12:24:34.758308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T12:24:34.758351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T12:24:34.758389Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T12:24:34.758597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T12:24:34.758653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T12:24:34.758684Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T1 ... anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:232:9 #32 0xff98849 in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1184:13 #33 0xff931e7 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #34 0xff931e7 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #35 0xff931e7 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #36 0xff931e7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #37 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #38 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #39 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #40 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #41 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #42 0xff92093 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #43 0x1084a305 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #44 0x1086a39c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #45 0x7fd8e6403d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x100b418d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2a1b7206 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x2a1b7206 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x2a1b7206 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x2a1b7206 in __allocate_at_least > *> > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x2a1b7206 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:358:25 #6 0x2a1b7206 in std::__y1::deque>, std::__y1::allocator>>>::__add_back_capacity() /-S/contrib/libs/cxxsupp/libcxx/include/deque:2144:51 #7 0x2a1a9544 in push_back /-S/contrib/libs/cxxsupp/libcxx/include/deque:1528:5 #8 0x2a1a9544 in NKikimr::NOlap::TBlobManager::StartBlobBatch() /-S/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp:380:23 #9 0x2a18eb4d in TWriteAction /-S/ydb/core/tx/columnshard/blobs_action/bs/write.h:41:30 #10 0x2a18eb4d in void std::__y1::allocator::construct[abi:fe190000]> const&, std::__y1::shared_ptr&>(NKikimr::NOlap::NBlobOperations::NBlobStorage::TWriteAction*, TBasicString> const&, std::__y1::shared_ptr&) /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:165:24 #11 0x2a1842ba in construct > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:320:9 #12 0x2a1842ba in __shared_ptr_emplace > &, std::__y1::shared_ptr &, std::__y1::allocator, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:296:5 #13 0x2a1842ba in allocate_shared, const TBasicString > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:875:51 #14 0x2a1842ba in make_shared > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:883:10 #15 0x2a1842ba in NKikimr::NOlap::NBlobOperations::NBlobStorage::TOperator::DoStartWritingAction() /-S/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp:16:12 #16 0x25bd53a5 in NKikimr::NOlap::IBlobsStorageOperator::StartWritingAction(NKikimr::NOlap::NBlobOperations::EConsumer) /-S/ydb/core/tx/columnshard/blobs_action/abstract/storage.h:106:23 #17 0x25c2d860 in NKikimr::NColumnShard::TWriteOperation::Start(NKikimr::NColumnShard::TColumnShard&, std::__y1::shared_ptr const&, NActors::TActorId const&, NKikimr::NOlap::TWritingContext const&) /-S/ydb/core/tx/columnshard/operations/write.cpp:39:53 #18 0x25de8b94 in NKikimr::NColumnShard::TWriteTask::Execute(NKikimr::NColumnShard::TColumnShard*, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/tablet/write_queue.cpp:26:21 #19 0x25dea128 in NKikimr::NColumnShard::TWriteTasksQueue::Drain(bool, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/tablet/write_queue.cpp:45:15 #20 0x25d99e92 in NKikimr::NColumnShard::TColumnShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/columnshard__write.cpp:615:22 #21 0x25d1cbc6 in NKikimr::NColumnShard::TColumnShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/columnshard/columnshard_impl.h:449:13 #22 0x115b762c in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #23 0x2cc61cf4 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #24 0x2cc5a569 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #25 0x2cc648e3 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #26 0x3148fad2 in NKikimr::NEvents::TDataEvents::TEvWriteResult* NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TAutoPtr&, std::__y1::function, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:446:13 #27 0x3146f664 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:510:20 #28 0x3146f664 in NKikimr::NTxUT::WaitWriteResult(NActors::TTestBasicRuntime&, unsigned long, std::__y1::vector>*) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:102:26 #29 0x31470de1 in NKikimr::NTxUT::WriteDataImpl(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, unsigned long, TBasicString> const&, std::__y1::shared_ptr const&, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:128:16 #30 0x31471e10 in NKikimr::NTxUT::WriteData(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, unsigned long, TBasicString> const&, std::__y1::vector> const&, bool, std::__y1::vector>*, NKikimr::NEvWrite::EModificationType, unsigned long) /-S/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp:143:16 #31 0xff9c125 in NKikimr::(anonymous namespace)::TestTtl(bool, bool, bool, unsigned short) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:232:9 #32 0xff9888b in void NKikimr::NTestSuiteTColumnShardTestSchema::TTL(NUnitTest::TTestContext&) /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1184:13 #33 0xff931e7 in operator() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #34 0xff931e7 in __invoke<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #35 0xff931e7 in __call<(lambda at /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #36 0xff931e7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #37 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #38 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #39 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #40 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #41 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #42 0xff92093 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #43 0x1084a305 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #44 0x1086a39c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #45 0x7fd8e6403d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 2623242 byte(s) leaked in 61938 allocation(s). |99.0%| [TA] $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[8] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[9] >> test_tpch.py::TestTpchS1::test_tpch[9] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[10] >> test_tpch.py::TestTpchS1::test_tpch[10] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[11] >> test_tpch.py::TestTpchS1::test_tpch[11] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[12] >> test_tpch.py::TestTpchS1::test_tpch[12] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[13] >> test_tpch.py::TestTpchS1::test_tpch[13] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[14] >> test_tpch.py::TestTpchS1::test_tpch[14] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[15] >> test_tpch.py::TestTpchS1::test_tpch[15] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[16] >> test_tpch.py::TestTpchS1::test_tpch[16] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[17] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [FAIL] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete >> test_tpch.py::TestTpchS1::test_tpch[17] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[18] >> test_tpch.py::TestTpchS1::test_tpch[18] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[19] >> test_tpch.py::TestTpchS1::test_tpch[19] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[20] >> test_tpch.py::TestTpchS1::test_tpch[20] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[21] >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] Test command err: 2025-04-10T12:24:24.327558Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T12:24:24.401906Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T12:24:24.424503Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T12:24:24.424806Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T12:24:24.432713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:24:24.432959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:24:24.433170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:24:24.433309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:24:24.433419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:24:24.433508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:24:24.433624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:24:24.433724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:24:24.433823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:24:24.433931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:24:24.434037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:24:24.434124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:24:24.462727Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T12:24:24.463124Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T12:24:24.463179Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T12:24:24.463356Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:24:24.463489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T12:24:24.463570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T12:24:24.463607Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T12:24:24.463701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T12:24:24.463773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T12:24:24.463818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T12:24:24.463848Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T12:24:24.464070Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:24:24.464128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T12:24:24.464162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T12:24:24.464189Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T12:24:24.464268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T12:24:24.464313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T12:24:24.464379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T12:24:24.464403Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T12:24:24.464462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T12:24:24.464500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T12:24:24.464530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T12:24:24.464580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T12:24:24.464614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T12:24:24.464643Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T12:24:24.465015Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-10T12:24:24.465094Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-10T12:24:24.465182Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-04-10T12:24:24.465291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-10T12:24:24.465452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T12:24:24.465525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T12:24:24.465560Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T12:24:24.465753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T12:24:24.465798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T12:24:24.465824Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T12:24:24.465962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T12:24:24.466007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T12:24:24.466045Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T12:24:24.466273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T12:24:24.466320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T12:24:24.466347Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T12:24:24.466460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T12:24:24.466497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T12:24:24.466538Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... UMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=326182; 2025-04-10T12:32:55.281507Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:13942:15885];tablet_id=9437184;parent=[1:13903:15853];fline=manager.cpp:82;event=ask_data;request=request_id=399;1={portions_count=193};; 2025-04-10T12:32:55.284036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13903:15853];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=193;path_id=1; 2025-04-10T12:32:55.294332Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13903:15853];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-04-10T12:32:55.882591Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T12:32:55.882699Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=13; 2025-04-10T12:32:55.887408Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=4614; 2025-04-10T12:32:55.892542Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=3897; 2025-04-10T12:32:55.892648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=5153; 2025-04-10T12:32:55.892813Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=92; 2025-04-10T12:32:55.892950Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=92; 2025-04-10T12:32:55.893133Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=137; 2025-04-10T12:32:55.893278Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=101; 2025-04-10T12:32:55.893441Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=124; 2025-04-10T12:32:55.893469Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=10724; 2025-04-10T12:32:55.900391Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T12:32:55.900454Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=8; 2025-04-10T12:32:55.934192Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=33658; 2025-04-10T12:32:55.982482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=48181; 2025-04-10T12:32:55.982601Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=30; 2025-04-10T12:32:55.982663Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=25; 2025-04-10T12:32:55.982709Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-10T12:32:55.982755Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-10T12:32:55.982803Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-04-10T12:32:55.982870Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=35; 2025-04-10T12:32:55.982910Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-10T12:32:55.983018Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=66; 2025-04-10T12:32:55.983065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-10T12:32:55.983132Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2025-04-10T12:32:55.983237Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=67; 2025-04-10T12:32:55.983329Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=49; 2025-04-10T12:32:55.983370Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=82867; 2025-04-10T12:32:55.983573Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=128676200;raw_bytes=191860770;count=66;records=1845000} inactive {blob_bytes=245561044;raw_bytes=360315949;count=127;records=3499542} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T12:32:55.984080Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13903:15853];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T12:32:55.984140Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13903:15853];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T12:32:55.984208Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13903:15853];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T12:32:55.984248Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13903:15853];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-10T12:32:55.984432Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T12:32:55.984485Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T12:32:55.984688Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-04-10T12:32:55.984741Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T12:32:55.984783Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T12:32:55.984835Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T12:32:55.984870Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T12:32:55.984959Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T12:32:55.990071Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T12:32:55.994166Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13903:15853];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T12:32:55.997002Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13903:15853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T12:32:55.997051Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T12:32:55.997084Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T12:32:55.997145Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13903:15853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T12:32:55.997224Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13903:15853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T12:32:55.997529Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13903:15853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-04-10T12:32:55.997616Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13903:15853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T12:32:55.997678Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13903:15853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T12:32:55.997738Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13903:15853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T12:32:55.997799Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13903:15853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T12:32:55.997898Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13903:15853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.998000s; 2025-04-10T12:32:55.997963Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13903:15853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] Test command err: 2025-04-10T12:24:23.021829Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-10T12:24:23.125294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-10T12:24:23.149832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-10T12:24:23.150128Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-10T12:24:23.158233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-10T12:24:23.158462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-10T12:24:23.158705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-10T12:24:23.158856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-10T12:24:23.158978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-10T12:24:23.159097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-10T12:24:23.159224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-10T12:24:23.159336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-10T12:24:23.159460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-10T12:24:23.159587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-10T12:24:23.159699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-10T12:24:23.159815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-10T12:24:23.184511Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-10T12:24:23.184905Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-10T12:24:23.184963Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-10T12:24:23.185146Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:24:23.185324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-10T12:24:23.185432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-10T12:24:23.185476Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-10T12:24:23.185564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-10T12:24:23.185642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-10T12:24:23.185699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-10T12:24:23.185735Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-10T12:24:23.185917Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-10T12:24:23.185986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-10T12:24:23.186028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-10T12:24:23.186058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-10T12:24:23.186161Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-10T12:24:23.186218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-10T12:24:23.186268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-10T12:24:23.186295Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-10T12:24:23.186365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-10T12:24:23.186423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-10T12:24:23.186453Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-10T12:24:23.186505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-10T12:24:23.186540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-10T12:24:23.186566Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-10T12:24:23.186906Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=35; 2025-04-10T12:24:23.187026Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-10T12:24:23.187133Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-04-10T12:24:23.187233Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-10T12:24:23.187407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-10T12:24:23.187475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-10T12:24:23.187511Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-10T12:24:23.187700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-10T12:24:23.187754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-10T12:24:23.187784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-10T12:24:23.187948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-10T12:24:23.187992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-10T12:24:23.188038Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-10T12:24:23.188228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-10T12:24:23.188270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-10T12:24:23.188302Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-10T12:24:23.188430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-10T12:24:23.188474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-10T12:24:23.188514Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... UMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=333895; 2025-04-10T12:32:56.227348Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:13944:15887];tablet_id=9437184;parent=[1:13905:15855];fline=manager.cpp:82;event=ask_data;request=request_id=399;1={portions_count=193};; 2025-04-10T12:32:56.229132Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13905:15855];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=193;path_id=1; 2025-04-10T12:32:56.240890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13905:15855];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-04-10T12:32:56.842035Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T12:32:56.842205Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=17; 2025-04-10T12:32:56.845801Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=3478; 2025-04-10T12:32:56.850365Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=3152; 2025-04-10T12:32:56.850487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=4576; 2025-04-10T12:32:56.850649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=93; 2025-04-10T12:32:56.850759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=67; 2025-04-10T12:32:56.850945Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=127; 2025-04-10T12:32:56.851135Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=135; 2025-04-10T12:32:56.851424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=212; 2025-04-10T12:32:56.851481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=9202; 2025-04-10T12:32:56.856423Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-10T12:32:56.856521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=13; 2025-04-10T12:32:56.885683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=29047; 2025-04-10T12:32:56.933870Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=48039; 2025-04-10T12:32:56.934045Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=60; 2025-04-10T12:32:56.934133Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=33; 2025-04-10T12:32:56.934211Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=14; 2025-04-10T12:32:56.934260Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-04-10T12:32:56.934318Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-04-10T12:32:56.934424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=42; 2025-04-10T12:32:56.934476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-10T12:32:56.934580Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=67; 2025-04-10T12:32:56.934636Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-04-10T12:32:56.934713Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2025-04-10T12:32:56.934819Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=57; 2025-04-10T12:32:56.934913Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=51; 2025-04-10T12:32:56.934952Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=78365; 2025-04-10T12:32:56.935145Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=128676200;raw_bytes=191860770;count=66;records=1845000} inactive {blob_bytes=245561044;raw_bytes=360315949;count=127;records=3499542} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-10T12:32:56.935702Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13905:15855];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-10T12:32:56.935779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13905:15855];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-10T12:32:56.935872Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13905:15855];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-10T12:32:56.935933Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13905:15855];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-04-10T12:32:56.936180Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T12:32:56.936250Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T12:32:56.936491Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-04-10T12:32:56.936567Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T12:32:56.936611Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T12:32:56.936665Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T12:32:56.936705Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T12:32:56.936815Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-04-10T12:32:56.941304Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T12:32:56.943938Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13905:15855];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-10T12:32:56.945353Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13905:15855];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-10T12:32:56.945403Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-10T12:32:56.945431Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-10T12:32:56.945491Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13905:15855];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-04-10T12:32:56.945576Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13905:15855];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-04-10T12:32:56.945656Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13905:15855];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-04-10T12:32:56.945739Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13905:15855];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-10T12:32:56.945796Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13905:15855];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-10T12:32:56.945857Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13905:15855];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-04-10T12:32:56.945921Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13905:15855];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-04-10T12:32:56.946011Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13905:15855];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-04-10T12:32:56.946065Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13905:15855];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; |99.2%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |99.3%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[21] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[22] >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] 2025-04-10 12:33:39,188 WARNING libarchive: File (test_tpch.py.TestTpchS1.test_tpch.8/cluster/slot_1/logfile_8k168tjf.log) size has changed. Can't write more data than was declared in the tar header (233291054). (probably file was changed during archiving) |99.4%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.4%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-04-10 12:34:35,566 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 12:34:35,753 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 2115988 729M 734M 648M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/a650/0002dc/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mod 2117894 2.6G 2.6G 2.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/a650/0002dc/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_ 2119259 482M 482M 448M └─ moto_server s3 --port 14613 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 73, in wait_for time.sleep(1) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ..._root/a650/0002dc', '--source-root', '/home/runner/.ya/build/build_root/a650/0002dc/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/a650/0002dc/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("..._root/a650/0002dc', '--source-root', '/home/runner/.ya/build/build_root/a650/0002dc/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/a650/0002dc/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete 2025-04-10 12:34:35,578 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 12:34:35,935 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 2115984 713M 716M 630M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/a650/0002df/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mod 2117889 6.8G 6.8G 6.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/a650/0002df/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_d 2119170 393M 393M 359M └─ moto_server s3 --port 20071 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 269, in test_ttl_delete self.ydb_client.query(""" File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...', '/home/runner/.ya/build/build_root/a650/0002df', '--source-root', '/home/runner/.ya/build/build_root/a650/0002df/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/a650/0002df/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...', '/home/runner/.ya/build/build_root/a650/0002df', '--source-root', '/home/runner/.ya/build/build_root/a650/0002df/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/a650/0002df/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout",), {}) |99.6%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_all_supported_compression 2025-04-10 12:34:37,864 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 12:34:38,139 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 2116568 787M 794M 710M ydb-tests-olap-column_family-compression --basetemp /home/runner/.ya/build/build_root/a650/000290/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor 2118268 5.9G 5.9G 5.3G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/a650/000290/ydb/tests/olap/column_family/compression/test-results/py3test/testing_o Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/column_family/compression/alter_compression.py", line 110, in test_all_supported_compression volumes: tuple[int, int] = tables[i].get_volumes_column("value") File "ydb/tests/olap/common/column_table_helper.py", line 69, in get_volumes_column raw_bytes, bytes = self._coollect_volumes_column(column_name) File "ydb/tests/olap/common/column_table_helper.py", line 59, in _coollect_volumes_column result_set = self.ydb_client.query(query) File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/a650/000290/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/a650/000290', '--source-root', '/home/runner/.ya/build/build_root/a650/000290/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/a650/000290/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'alter_compression.py::TestAlterCompression::test_all_supported_compression', '--test-filter', 'alter_compression.py::TestAlterCompression::test_availability_data', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/a650/000290/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/a650/000290', '--source-root', '/home/runner/.ya/build/build_root/a650/000290/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/a650/000290/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'alter_compression.py::TestAlterCompression::test_all_supported_compression', '--test-filter', 'alter_compression.py::TestAlterCompression::test_availability_data', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.8%| [TM] {RESULT} ydb/tests/olap/column_family/compression/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] 2025-04-10 12:34:37,762 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 12:34:37,914 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 2116532 996M 1001M 902M ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/a650/00028f/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 2128076 2.0G 2.0G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/a650/00028f/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_ins Test command err: Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 2001 30% 2001 50% 2001 90% 2001 99% 2001 ms Write: 10% 2236 30% 2236 50% 2236 90% 2236 99% 2236 ms Write: 10% 3301 30% 3301 50% 3301 90% 3301 99% 3301 ms Write: 10% 9476 30% 9476 50% 9476 90% 9476 99% 9476 ms Write: 10% 9840 30% 9840 50% 9840 90% 9840 99% 9840 ms Write: 10% 9471 30% 9471 50% 9471 90% 9471 99% 9471 ms Write: 10% 9801 30% 9801 50% 9801 90% 9801 99% 9801 ms Write: 10% 9697 30% 9697 50% 9697 90% 9697 99% 9697 ms Write: 10% 9515 30% 9515 50% 9515 90% 9515 99% 9515 ms Write: 10% 9178 30% 9178 50% 9178 90% 9178 99% 9178 ms Write: 10% 9751 30% 9751 50% 9751 90% 9751 99% 9751 ms Write: 10% 9292 30% 9292 50% 9292 90% 9292 99% 9292 ms Write: 10% 8951 30% 8951 50% 8951 90% 8951 99% 8951 ms Write: 10% 9332 30% 9332 50% 9332 90% 9332 99% 9332 ms Write: 10% 8721 30% 8721 50% 8721 90% 8721 99% 8721 ms Write: 10% 8230 30% 8230 50% 8230 90% 8230 99% 8230 ms Write: 10% 8448 30% 8448 50% 8448 90% 8448 99% 8448 ms Write: 10% 8319 30% 8319 50% 8319 90% 8319 99% 8319 ms Write: 10% 8182 30% 8182 50% 8182 90% 8182 99% 8182 ms Write: 10% 8064 30% 8064 50% 8064 90% 8064 99% 8064 ms Write: 10% 7879 30% 7879 50% 7879 90% 7879 99% 7879 ms Write: 10% 7563 30% 7563 50% 7563 90% 7563 99% 7563 ms Write: 10% 7665 30% 7665 50% 7665 90% 7665 99% 7665 ms Write: 10% 6201 30% 6201 50% 6201 90% 6201 99% 6201 ms Write: 10% 7210 30% 7210 50% 7210 90% 7210 99% 7210 ms Write: 10% 7074 30% 7074 50% 7074 90% 7074 99% 7074 ms Write: 10% 6511 30% 6511 50% 6511 90% 6511 99% 6511 ms Write: 10% 3887 30% 3887 50% 3887 90% 3887 99% 3887 ms Write: 10% 5659 30% 5659 50% 5659 90% 5659 99% 5659 ms Write: 10% 7028 30% 7028 50% 7028 90% 7028 99% 7028 ms Write: 10% 4891 30% 4891 50% 4891 90% 4891 99% 4891 ms Write: 10% 3826 30% 3826 50% 3826 90% 3826 99% 3826 ms Write: 10% 3862 30% 3862 50% 3862 90% 3862 99% 3862 ms Write: 10% 4349 30% 4349 50% 4349 90% 4349 99% 4349 ms Write: 10% 3690 30% 3690 50% 3690 90% 3690 99% 3690 ms Write: 10% 3172 30% 3172 50% 3172 90% 3172 99% 3172 ms Write: 10% 3523 30% 3523 50% 3523 90% 3523 99% 3523 ms Write: 10% 3467 30% 3467 50% 3467 90% 3467 99% 3467 ms Write: 10% 3029 30% 3029 50% 3029 90% 3029 99% 3029 ms Write: 10% 2948 30% 2948 50% 2948 90% 2948 99% 2948 ms Write: 10% 2556 30% 2556 50% 2556 90% 2556 99% 2556 ms Write: 10% 2633 30% 2633 50% 2633 90% 2633 99% 2633 ms Write: 10% 2487 30% 2487 50% 2487 90% 2487 99% 2487 ms Write: 10% 3167 30% 3167 50% 3167 90% 3167 99% 3167 ms Write: 10% 2247 30% 2247 50% 2247 90% 2247 99% 2247 ms Write: 10% 3038 30% 3038 50% 3038 90% 3038 99% 3038 ms Write: 10% 2538 30% 2538 50% 2538 90% 2538 99% 2538 ms Write: 10% 2533 30% 2533 50% 2533 90% 2533 99% 2533 ms Write: 10% 2653 30% 2653 50% 2653 90% 2653 99% 2653 ms Write: 10% 2204 30% 2204 50% 2204 90% 2204 99% 2204 ms Write: 10% 2903 30% 2903 50% 2903 90% 2903 99% 2903 ms Write: 10% 2169 30% 2169 50% 2169 90% 2169 99% 2169 ms Write: 10% 2282 30% 2282 50% 2282 90% 2282 99% 2282 ms Write: 10% 2225 30% 2225 50% 2225 90% 2225 99% 2225 ms Write: 10% 2137 30% 2137 50% 2137 90% 2137 99% 2137 ms Write: 10% 1966 30% 1966 50% 1966 90% 1966 99% 1966 ms Write: 10% 2004 30% 2004 50% 2004 90% 2004 99% 2004 ms Write: 10% 1780 30% 1780 50% 1780 90% 1780 99% 1780 ms Write: 10% 1864 30% 1864 50% 1864 90% 1864 99% 1864 ms Write: 10% 1985 30% 1985 50% 1985 90% 1985 99% 1985 ms Write: 10% 2078 30% 2078 50% 2078 90% 2078 99% 2078 ms Write: 10% 1987 30% 1987 50% 1987 90% 1987 99% 1987 ms Write: 10% 3063 30% 3063 50% 3063 90% 3063 99% 3063 ms Write: 10% 2039 30% 2039 50% 2039 90% 2039 99% 2039 ms Step 2. read write Write: 10% 5317 30% 5317 50% 5317 90% 5317 99% 5317 ms Write: 10% 10471 30% 10471 50% 10471 90% 10471 99% 10471 ms Write: 10% 11461 30% 11461 50% 11461 90% 11461 99% 11461 ms Write: 10% 11326 30% 11326 50% 11326 90% 11326 99% 11326 ms Write: 10% 11624 30% 11624 50% 11624 90% 11624 99% 11624 ms Write: 10% 11353 30% 11353 50% 11353 90% 11353 99% 11353 ms Write: 10% 11059 30% 11059 50% 11059 90% 11059 99% 11059 ms Write: 10% 11204 30% 11204 50% 11204 90% 11204 99% 11204 ms Write: 10% 11575 30% 11575 50% 11575 90% 11575 99% 11575 ms Write: 10% 11207 30% 11207 50% 11207 90% 11207 99% 11207 ms Write: 10% 10975 30% 10975 50% 10975 90% 10975 99% 10975 ms Write: 10% 10225 30% 10225 50% 10225 90% 10225 99% 10225 ms Write: 10% 11385 30% 11385 50% 11385 90% 11385 99% 11385 ms Write: 10% 10182 30% 10182 50% 10182 90% 10182 99% 10182 ms Write: 10% 10935 30% 10935 50% 10935 90% 10935 99% 10935 ms Write: 10% 10122 30% 10122 50% 10122 90% 10122 99% 10122 ms Write: 10% 10112 30% 10112 50% 10112 90% 10112 99% 10112 ms Write: 10% 9457 30% 9457 50% 9457 90% 9457 99% 9457 ms Write: 10% 9629 30% 9629 50% 9629 90% 9629 99% 9629 ms Write: 10% 9282 30% 9282 50% 9282 90% 9282 99% 9282 ms Write: 10% 9270 30% 9270 50% 9270 90% 9270 99% 9270 ms Write: 10% 9288 30% 9288 50% 9288 90% 9288 99% 9288 ms Write: 10% 8891 30% 8891 50% 8891 90% 8891 99% 8891 ms Write: 10% 9362 30% 9362 50% 9362 90% 9362 99% 9362 ms Write: 10% 8374 30% 8374 50% 8374 90% 8374 99% 8374 ms Write: 10% 8814 30% 8814 50% 8814 90% 8814 99% 8814 ms Write: 10% 4873 30% 4873 50% 4873 90% 4873 99% 4873 ms Write: 10% 8017 30% 8017 50% 8017 90% 8017 99% 8017 ms Write: 10% 7723 30% 7723 50% 7723 90% 7723 99% 7723 ms Write: 10% 7872 30% 7872 50% 7872 90% 7872 99% 7872 ms Write: 10% 7725 30% 7725 50% 7725 90% 7725 99% 7725 ms Write: 10% 7330 30% 7330 50% 7330 90% 7330 99% 7330 ms Write: 10% 7287 30% 7287 50% 7287 90% 7287 99% 7287 ms Write: 10% 7265 30% 7265 50% 7265 90% 7265 99% 7265 ms Write: 10% 7295 30% 7295 50% 7295 90% 7295 99% 7295 ms Write: 10% 7195 30% 7195 50% 7195 90% 7195 99% 7195 ms Write: 10% 6962 30% 6962 50% 6962 90% 6962 99% 6962 ms Write: 10% 6619 30% 6619 50% 6619 90% 6619 99% 6619 ms Write: 10% 6674 30% 6674 50% 6674 90% 6674 99% 6674 ms Write: 10% 6355 30% 6355 50% 6355 90% 6355 99% 6355 ms Write: 10% 6154 30% 6154 50% 6154 90% 6154 99% 6154 ms Write: 10% 6046 30% 6046 50% 6046 90% 6046 99% 6046 ms Write: 10% 5258 30% 5258 50% 5258 90% 5258 99% 5258 ms Write: 10% 5086 30% 5086 50% 5086 90% 5086 99% 5086 ms Write: 10% 3873 30% 3873 50% 3873 90% 3873 99% 3873 ms Write: 10% 3982 30% 3982 50% 3982 90% 3982 99% 3982 ms Write: 10% 4795 30% 4795 50% 4795 90% 4795 99% 4795 ms Write: 10% 4505 30% 4505 50% 4505 90% 4505 99% 4505 ms Write: 10% 4837 30% 4837 50% 4837 90% 4837 99% 4837 ms Write: 10% 3009 30% 3009 50% 3009 90% 3009 99% 3009 ms Write: 10% 3851 30% 3851 50% 3851 90% 3851 99% 3851 ms Write: 10% 3796 30% 3796 50% 3796 90% 3796 99% 3796 ms Write: 10% 4000 30% 4000 50% 4000 90% 4000 99% 4000 ms Write: 10% 4394 30% 4394 50% 4394 90% 4394 99% 4394 ms Write: 10% 2555 30% 2555 50% 2555 90% 2555 99% 2555 ms Write: 10% 1893 30% 1893 50% 1893 90% 1893 99% 1893 ms Write: 10% 1922 30% 1922 50% 1922 90% 1922 99% 1922 ms Write: 10% 2197 30% 2197 50% 2197 90% 2197 99% 2197 ms Write: 10% 2313 30% 2313 50% 2313 90% 2313 99% 2313 ms Write: 10% 1538 30% 1538 50% 1538 90% 1538 99% 1538 ms Write: 10% 1833 30% 1833 50% 1833 90% 1833 99% 1833 ms Write: 10% 3431 30% 3431 50% 3431 90% 3431 99% 3431 ms Write: 10% 2498 30% 2498 50% 2498 90% 2498 99% 2498 ms Write: 10% 1730 30% 1730 50% 1730 90% 1730 99% 1730 ms Read: 10% 4452 30% 6897 50% 9341 90% 14229 99% 15329 ms Step 3. write modify Write: 10% 5794 30% 5794 50% 5794 90% 5794 99% 5794 ms Write: 10% 11488 30% 11488 50% 11488 90% 11488 99% 11488 ms Write: 10% 11048 30% 11048 50% 11048 90% 11048 99% 11048 ms Write: 10% 10110 30% 10110 50% 10110 90% 10110 99% 10110 ms Write: 10% 11581 30% 11581 50% 11581 90% 11581 99% 11581 ms Write: 10% 10451 30% 10451 50% 10451 90% 10451 99% 10451 ms Write: 10% 10749 30% 10749 50% 10749 90% 10749 99% 10749 ms Write: 10% 10879 30% 10879 50% 10879 90% 10879 99% 10879 ms Write: 10% 10775 30% 10775 50% 10775 90% 10775 99% 10775 ms Write: 10% 10868 30% 10868 50% 10868 90% 10868 99% 10868 ms Write: 10% 10862 30% 10862 50% 10862 90% 10862 99% 10862 ms Write: 10% 11076 30% 11076 50% 11076 90% 11076 99% 11076 ms Write: 10% 10122 30% 10122 50% 10122 90% 10122 99% 10122 ms Write: 10% 10537 30% 10537 50% 10537 90% 10537 99% 10537 ms Write: 10% 10059 30% 10059 50% 10059 90% 10059 99% 10059 ms Write: 10% 10239 30% 10239 50% 10239 90% 10239 99% 10239 ms Write: 10% 9944 30% 9944 50% 9944 90% 9944 99% 9944 ms Write: 10% 9969 30% 9969 50% 9969 90% 9969 99% 9969 ms Write: 10% 9887 30% 9887 50% 9887 90% 9887 99% 9887 ms Write: 10% 9452 30% 9452 50% 9452 90% 9452 99% 9452 ms Write: 10% 9250 30% 9250 50% 9250 90% 9250 99% 9250 ms Write: 10% 8904 30% 8904 50% 8904 90% 8904 99% 8904 ms Write: 10% 9251 30% 9251 50% 9251 90% 9251 99% 9251 ms Write: 10% 9362 30% 9362 50% 9362 90% 9362 99% 9362 ms Write: 10% 8397 30% 8397 50% 8397 90% 8397 99% 8397 ms Write: 10% 8692 30% 8692 50% 8692 90% 8692 99% 8692 ms Write: 10% 9661 30% 9661 50% 9661 90% 9661 99% 9661 ms Write: 10% 8364 30% 8364 50% 8364 90% 8364 99% 8364 ms Write: 10% 7453 30% 7453 50% 7453 90% 7453 99% 7453 ms Write: 10% 8120 30% 8120 50% 8120 90% 8120 99% 8120 ms Write: 10% 8060 30% 8060 50% 8060 90% 8060 99% 8060 ms Write: 10% 7327 30% 7327 50% 7327 90% 7327 99% 7327 ms Write: 10% 8155 30% 8155 50% 8155 90% 8155 99% 8155 ms Write: 10% 7100 30% 7100 50% 7100 90% 7100 99% 7100 ms Write: 10% 5459 30% 5459 50% 5459 90% 5459 99% 5459 ms Write: 10% 7018 30% 7018 50% 7018 90% 7018 99% 7018 ms Write: 10% 6664 30% 6664 50% 6664 90% 6664 99% 6664 ms Write: 10% 7285 30% 7285 50% 7285 90% 7285 99% 7285 ms Write: 10% 5121 30% 5121 50% 5121 90% 5121 99% 5121 ms Write: 10% 4456 30% 4456 50% 4456 90% 4456 99% 4456 ms Write: 10% 4465 30% 4465 50% 4465 90% 4465 99% 4465 ms Write: 10% 4594 30% 4594 50% 4594 90% 4594 99% 4594 ms Write: 10% 4340 30% 4340 50% 4340 90% 4340 99% 4340 ms Write: 10% 3907 30% 3907 50% 3907 90% 3907 99% 3907 ms Write: 10% 4259 30% 4259 50% 4259 90% 4259 99% 4259 ms Write: 10% 5180 30% 5180 50% 5180 90% 5180 99% 5180 ms Write: 10% 4025 30% 4025 50% 4025 90% 4025 99% 4025 ms Write: 10% 4883 30% 4883 50% 4883 90% 4883 99% 4883 ms Write: 10% 5940 30% 5940 50% 5940 90% 5940 99% 5940 ms Write: 10% 5064 30% 5064 50% 5064 90% 5064 99% 5064 ms Write: 10% 3871 30% 3871 50% 3871 90% 3871 99% 3871 ms Write: 10% 3578 30% 3578 50% 3578 90% 3578 99% 3578 ms Write: 10% 3711 30% 3711 50% 3711 90% 3711 99% 3711 ms Write: 10% 4984 30% 4984 50% 4984 90% 4984 99% 4984 ms Write: 10% 3594 30% 3594 50% 3594 90% 3594 99% 3594 ms Write: 10% 3731 30% 3731 50% 3731 90% 3731 99% 3731 ms Write: 10% 3689 30% 3689 50% 3689 90% 3689 99% 3689 ms Write: 10% 4119 30% 4119 50% 4119 90% 4119 99% 4119 ms Write: 10% 4348 30% 4348 50% 4348 90% 4348 99% 4348 ms Write: 10% 3818 30% 3818 50% 3818 90% 3818 99% 3818 ms Write: 10% 4048 30% 4048 50% 4048 90% 4048 99% 4048 ms Write: 10% 3736 30% 3736 50% 3736 90% 3736 99% 3736 ms Write: 10% 3853 30% 3853 50% 3853 90% 3853 99% 3853 ms Write: 10% 3601 30% 3601 50% 3601 90% 3601 99% 3601 ms Update: 10% 970 30% 970 50% 970 90% 970 99% 970 ms Step 4. read modify write Write: 10% 821 30% 821 50% 821 90% 821 99% 821 ms Write: 10% 5607 30% 5607 50% 5607 90% 5607 99% 5607 ms Write: 10% 6775 30% 6775 50% 6775 90% 6775 99% 6775 ms Was written: 24.609375 MiB, Speed: 0.41015625 MiB/s Write: 10% 9557 30% 9557 50% 9557 90% 9557 99% 9557 ms Write: 10% 10301 30% 10301 50% 10301 90% 10301 99% 10301 ms Write: 10% 9769 30% 9769 50% 9769 90% 9769 99% 9769 ms Write: 10% 10145 30% 10145 50% 10145 90% 10145 99% 10145 ms Write: 10% 10015 30% 10015 50% 10015 90% 10015 99% 10015 ms Write: 10% 9751 30% 9751 50% 9751 90% 9751 99% 9751 ms Write: 10% 9902 30% 9902 50% 9902 90% 9902 99% 9902 ms Write: 10% 9860 30% 9860 50% 9860 90% 9860 99% 9860 ms Write: 10% 9925 30% 9925 50% 9925 90% 9925 99% 9925 ms Write: 10% 9785 30% 9785 50% 9785 90% 9785 99% 9785 ms Write: 10% 9943 30% 9943 50% 9943 90% 9943 99% 9943 ms Write: 10% 9842 30% 9842 50% 9842 90% 9842 99% 9842 ms Write: 10% 9726 30% 9726 50% 9726 90% 9726 99% 9726 ms Write: 10% 9214 30% 9214 50% 9214 90% 9214 99% 9214 ms Write: 10% 9368 30% 9368 50% 9368 90% 9368 99% 9368 ms Write: 10% 9503 30% 9503 50% 9503 90% 9503 99% 9503 ms Write: 10% 9376 30% 9376 50% 9376 90% 9376 99% 9376 ms Write: 10% 9242 30% 9242 50% 9242 90% 9242 99% 9242 ms Write: 10% 9307 30% 9307 50% 9307 90% 9307 99% 9307 ms Write: 10% 9179 30% 9179 50% 9179 90% 9179 99% 9179 ms Write: 10% 8988 30% 8988 50% 8988 90% 8988 99% 8988 ms Write: 10% 9290 30% 9290 50% 9290 90% 9290 99% 9290 ms Write: 10% 8586 30% 8586 50% 8586 90% 8586 99% 8586 ms Write: 10% 8461 30% 8461 50% 8461 90% 8461 99% 8461 ms Write: 10% 9080 30% 9080 50% 9080 90% 9080 99% 9080 ms Write: 10% 8537 30% 8537 50% 8537 90% 8537 99% 8537 ms Write: 10% 8078 30% 8078 50% 8078 90% 8078 99% 8078 ms Write: 10% 8544 30% 8544 50% 8544 90% 8544 99% 8544 ms Write: 10% 7718 30% 7718 50% 7718 90% 7718 99% 7718 ms Write: 10% 8279 30% 8279 50% 8279 90% 8279 99% 8279 ms Write: 10% 7506 30% 7506 50% 7506 90% 7506 99% 7506 ms Write: 10% 7696 30% 7696 50% 7696 90% 7696 99% 7696 ms Write: 10% 6809 30% 6809 50% 6809 90% 6809 99% 6809 ms Write: 10% 6984 30% 6984 50% 6984 90% 6984 99% 6984 ms Write: 10% 6470 30% 6470 50% 6470 90% 6470 99% 6470 ms Write: 10% 6516 30% 6516 50% 6516 90% 6516 99% 6516 ms Write: 10% 6509 30% 6509 50% 6509 90% 6509 99% 6509 ms Write: 10% 6285 30% 6285 50% 6285 90% 6285 99% 6285 ms Write: 10% 6011 30% 6011 50% 6011 90% 6011 99% 6011 ms Write: 10% 5711 30% 5711 50% 5711 90% 5711 99% 5711 ms Write: 10% 6389 30% 6389 50% 6389 90% 6389 99% 6389 ms Write: 10% 5720 30% 5720 50% 5720 90% 5720 99% 5720 ms Write: 10% 5564 30% 5564 50% 5564 90% 5564 99% 5564 ms Write: 10% 4611 30% 4611 50% 4611 90% 4611 99% 4611 ms Write: 10% 4770 30% 4770 50% 4770 90% 4770 99% 4770 ms Write: 10% 4564 30% 4564 50% 4564 90% 4564 99% 4564 ms Write: 10% 5302 30% 5302 50% 5302 90% 5302 99% 5302 ms Write: 10% 5357 30% 5357 50% 5357 90% 5357 99% 5357 ms Write: 10% 4106 30% 4106 50% 4106 90% 4106 99% 4106 ms Write: 10% 4475 30% 4475 50% 4475 90% 4475 99% 4475 ms Write: 10% 3640 30% 3640 50% 3640 90% 3640 99% 3640 ms Write: 10% 4408 30% 4408 50% 4408 90% 4408 99% 4408 ms Write: 10% 3718 30% 3718 50% 3718 90% 3718 99% 3718 ms Write: 10% 3560 30% 3560 50% 3560 90% 3560 99% 3560 ms Write: 10% 3298 30% 3298 50% 3298 90% 3298 99% 3298 ms Write: 10% 3530 30% 3530 50% 3530 90% 3530 99% 3530 ms Write: 10% 4425 30% 4425 50% 4425 90% 4425 99% 4425 ms Write: 10% 3129 30% 3129 50% 3129 90% 3129 99% 3129 ms Write: 10% 3114 30% 3114 50% 3114 90% 3114 99% 3114 ms Write: 10% 3145 30% 3145 50% 3145 90% 3145 99% 3145 ms Write: 10% 3576 30% 3576 50% 3576 90% 3576 99% 3576 ms Read: 10% 17411 30% 17411 50% 17411 90% 17411 99% 17411 ms Update: 10% 898 30% 898 50% 898 90% 898 99% 898 ms File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/a650/00028f/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 88, in test ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_insert.py", line 86, in scenario_read_data_during_bulk_upsert thread2.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...st-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/a650/00028f', '--source-root', '/home/runner/.ya/build/build_root/a650/00028f/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/a650/00028f/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_alter_compression.py::TestAlterCompression::test[alter_compression]', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...st-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/a650/00028f', '--source-root', '/home/runner/.ya/build/build_root/a650/00028f/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/a650/00028f/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_alter_compression.py::TestAlterCompression::test[alter_compression]', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.9%| [TM] {RESULT} ydb/tests/olap/scenario/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export 2025-04-10 12:34:38,008 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-10 12:34:38,497 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 2116604 607M 610M 524M ydb-tests-olap-s3_import --basetemp /home/runner/.ya/build/build_root/a650/00028e/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modul 2118239 10.5G 10.5G 10.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/a650/00028e/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tp 2119897 616M 658M 624M └─ moto_server s3 --port 22475 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/s3_import/test_tpch_import.py", line 99, in test_import_and_export self.validate_table("s3_table") File "ydb/tests/olap/s3_import/test_tpch_import.py", line 12, in validate_table result_sets = self.ydb_client.query(f""" File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...import', '--basetemp', '/home/runner/.ya/build/build_root/a650/00028e/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/a650/00028e/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/a650/00028e', '--source-root', '/home/runner/.ya/build/build_root/a650/00028e/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/a650/00028e/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_tpch_import.py::TestS3TpchImport::test_import_and_export', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1752, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...import', '--basetemp', '/home/runner/.ya/build/build_root/a650/00028e/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/a650/00028e/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/a650/00028e', '--source-root', '/home/runner/.ya/build/build_root/a650/00028e/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/a650/00028e/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8444524403/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_tpch_import.py::TestS3TpchImport::test_import_and_export', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.9%| [TM] {RESULT} ydb/tests/olap/s3_import/py3test |99.9%| CLEANING BUILD ROOT ydb/library/yaml_config/ut_transform [size:medium] ------ sole chunk ran 1 test (total:6.49s - setup:0.01s test:5.66s canon:0.63s) [fail] test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [default-linux-x86_64-release-asan] (1.98s) Test results differ from canonical: test_result[3]: files content differs: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff/test_transform.py.TestYamlConfigTransformations.test_basic.args1-dump_ds_init.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/library/yaml_config/ut_transform ydb/tests/fq/streaming_optimize [size:medium] nchunks:4 ------ [test_sql_streaming.py 0/4] chunk ran 7 tests (total:90.91s - recipes:0.64s test:89.74s recipes:0.42s) [fail] test_sql_streaming.py::test[suites-GroupByHop-default.txt] [default-linux-x86_64-release-asan] (13.50s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gjaodpg7/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gjaodpg7/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gjaodpg7/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gjaodpg7/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gjaodpg7/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gjaodpg7/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gjaodpg7/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gjaodpg7/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gjaodpg7/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gjaodpg7/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fe8b7eb03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fe8b7ea9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fe8b7733d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fe8b7831464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fe8b7831464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fe8b7831464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fe8b7732b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fe8b77df90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fe8b76d7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fe8b76d7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fe8b76d7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fe8b782ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fe8b782ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [default-linux-x86_64-release-asan] (12.75s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l4hdcqdw/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l4hdcqdw/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l4hdcqdw/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l4hdcqdw/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l4hdcqdw/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l4hdcqdw/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l4hdcqdw/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l4hdcqdw/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l4hdcqdw/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l4hdcqdw/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f1e57ab03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f1e57aa9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f1e57333d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f1e57431464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f1e57431464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f1e57431464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f1e57332b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f1e573df90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f1e572d7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f1e572d7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f1e572d7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f1e5742ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f1e5742ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [default-linux-x86_64-release-asan] (12.07s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_19dzajja/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_19dzajja/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_19dzajja/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_19dzajja/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_19dzajja/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_19dzajja/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_19dzajja/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_19dzajja/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_19dzajja/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_19dzajja/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f7e795303b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f7e79529ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f7e78db3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f7e78eb1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f7e78eb1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f7e78eb1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f7e78db2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f7e78e5f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7e78d57e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7e78d57e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7e78d57c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f7e78eaee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f7e78eaee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [default-linux-x86_64-release-asan] (11.36s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sln0b1ma/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sln0b1ma/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sln0b1ma/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sln0b1ma/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sln0b1ma/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sln0b1ma/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sln0b1ma/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sln0b1ma/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sln0b1ma/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sln0b1ma/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f7d8c8f83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f7d8c8f1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f7d8c17bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f7d8c279464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f7d8c279464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f7d8c279464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f7d8c17ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f7d8c22790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7d8c11fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7d8c11fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7d8c11fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f7d8c276e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f7d8c276e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [default-linux-x86_64-release-asan] (13.81s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7g5ikev0/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7g5ikev0/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7g5ikev0/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7g5ikev0/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7g5ikev0/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7g5ikev0/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7g5ikev0/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7g5ikev0/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7g5ikev0/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7g5ikev0/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fb8e5b103b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fb8e5b09ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fb8e5393d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fb8e5491464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fb8e5491464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fb8e5491464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fb8e5392b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fb8e543f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fb8e5337e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fb8e5337e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fb8e5337c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fb8e548ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fb8e548ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [default-linux-x86_64-release-asan] (11.83s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8ia1tql6/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8ia1tql6/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8ia1tql6/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8ia1tql6/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8ia1tql6/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8ia1tql6/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8ia1tql6/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8ia1tql6/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8ia1tql6/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8ia1tql6/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f191e6a03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f191e699ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f191df23d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f191e021464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f191e021464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f191e021464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f191df22b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f191dfcf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f191dec7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f191dec7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f191dec7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f191e01ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f191e01ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (10.90s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3hn3eqh7/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3hn3eqh7/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3hn3eqh7/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3hn3eqh7/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3hn3eqh7/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3hn3eqh7/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3hn3eqh7/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3hn3eqh7/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3hn3eqh7/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3hn3eqh7/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7ff94d4803b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7ff94d479ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7ff94cd03d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7ff94ce01464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7ff94ce01464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7ff94ce01464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7ff94cd02b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7ff94cdaf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7ff94cca7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7ff94cca7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7ff94cca7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7ff94cdfee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7ff94cdfee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 1/4] chunk ran 7 tests (total:90.54s - recipes:0.63s test:89.40s recipes:0.41s) [fail] test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (13.47s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvkdy24c/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvkdy24c/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvkdy24c/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvkdy24c/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvkdy24c/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvkdy24c/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvkdy24c/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvkdy24c/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvkdy24c/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tvkdy24c/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f4ee86f83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f4ee86f1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f4ee7f7bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f4ee8079464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f4ee8079464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f4ee8079464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f4ee7f7ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f4ee802790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f4ee7f1fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f4ee7f1fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f4ee7f1fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f4ee8076e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f4ee8076e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [default-linux-x86_64-release-asan] (12.71s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_k8klhk3f/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_k8klhk3f/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_k8klhk3f/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_k8klhk3f/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_k8klhk3f/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_k8klhk3f/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_k8klhk3f/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_k8klhk3f/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_k8klhk3f/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_k8klhk3f/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f2a9f0d03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f2a9f0c9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f2a9e953d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f2a9ea51464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f2a9ea51464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f2a9ea51464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f2a9e952b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f2a9e9ff90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f2a9e8f7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f2a9e8f7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f2a9e8f7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f2a9ea4ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f2a9ea4ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindow-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [default-linux-x86_64-release-asan] (11.67s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j9yatidx/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j9yatidx/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j9yatidx/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j9yatidx/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j9yatidx/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j9yatidx/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j9yatidx/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j9yatidx/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j9yatidx/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j9yatidx/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fedb64403b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fedb6439ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fedb5cc3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fedb5dc1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fedb5dc1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fedb5dc1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fedb5cc2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fedb5d6f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fedb5c67e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fedb5c67e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fedb5c67c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fedb5dbee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fedb5dbee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [default-linux-x86_64-release-asan] (11.55s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6vuxunfj/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6vuxunfj/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6vuxunfj/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6vuxunfj/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6vuxunfj/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6vuxunfj/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6vuxunfj/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6vuxunfj/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6vuxunfj/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6vuxunfj/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f51b72203b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f51b7219ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f51b6aa3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f51b6ba1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f51b6ba1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f51b6ba1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f51b6aa2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f51b6b4f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f51b6a47e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f51b6a47e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f51b6a47c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f51b6b9ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f51b6b9ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [default-linux-x86_64-release-asan] (13.94s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re0mgyyf/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re0mgyyf/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re0mgyyf/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re0mgyyf/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re0mgyyf/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re0mgyyf/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re0mgyyf/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re0mgyyf/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re0mgyyf/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re0mgyyf/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fcae31103b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fcae3109ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fcae2993d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fcae2a91464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fcae2a91464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fcae2a91464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fcae2992b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fcae2a3f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fcae2937e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fcae2937e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fcae2937c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fcae2a8ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fcae2a8ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [default-linux-x86_64-release-asan] (11.92s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ejf32o5d/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ejf32o5d/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ejf32o5d/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ejf32o5d/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ejf32o5d/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ejf32o5d/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ejf32o5d/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ejf32o5d/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ejf32o5d/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ejf32o5d/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f9dbe2c03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f9dbe2b9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f9dbdb43d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f9dbdc41464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f9dbdc41464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f9dbdc41464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f9dbdb42b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f9dbdbef90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f9dbdae7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f9dbdae7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f9dbdae7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f9dbdc3ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f9dbdc3ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [default-linux-x86_64-release-asan] (10.59s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_36fv1tqj/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_36fv1tqj/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_36fv1tqj/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_36fv1tqj/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_36fv1tqj/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_36fv1tqj/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_36fv1tqj/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_36fv1tqj/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_36fv1tqj/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_36fv1tqj/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fc4573603b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fc457359ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fc456be3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fc456ce1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fc456ce1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fc456ce1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fc456be2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fc456c8f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fc456b87e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fc456b87e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fc456b87c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fc456cdee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fc456cdee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 2/4] chunk ran 7 tests (total:89.11s - recipes:0.63s test:87.94s recipes:0.45s) [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (12.88s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5mcychir/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5mcychir/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5mcychir/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5mcychir/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5mcychir/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5mcychir/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5mcychir/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5mcychir/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5mcychir/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5mcychir/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f0c3e5103b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f0c3e509ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f0c3dd93d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f0c3de91464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f0c3de91464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f0c3de91464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f0c3dd92b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f0c3de3f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f0c3dd37e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f0c3dd37e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f0c3dd37c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f0c3de8ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f0c3de8ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (12.96s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mxdvx917/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mxdvx917/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mxdvx917/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mxdvx917/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mxdvx917/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mxdvx917/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mxdvx917/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mxdvx917/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mxdvx917/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mxdvx917/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fca739f83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fca739f1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fca7327bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fca73379464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fca73379464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fca73379464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fca7327ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fca7332790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fca7321fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fca7321fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fca7321fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fca73376e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fca73376e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopic-default.txt] [default-linux-x86_64-release-asan] (11.85s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nb6koa0z/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nb6koa0z/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nb6koa0z/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nb6koa0z/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nb6koa0z/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nb6koa0z/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nb6koa0z/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nb6koa0z/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nb6koa0z/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nb6koa0z/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f61984403b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f6198439ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f6197cc3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f6197dc1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f6197dc1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f6197dc1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f6197cc2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f6197d6f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f6197c67e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f6197c67e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f6197c67c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f6197dbee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f6197dbee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [default-linux-x86_64-release-asan] (11.25s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u6whdsy7/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u6whdsy7/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u6whdsy7/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u6whdsy7/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u6whdsy7/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u6whdsy7/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u6whdsy7/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u6whdsy7/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u6whdsy7/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u6whdsy7/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f03f6af03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f03f6ae9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f03f6373d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f03f6471464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f03f6471464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f03f6471464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f03f6372b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f03f641f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f03f6317e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f03f6317e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f03f6317c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f03f646ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f03f646ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicGroupWriteToSolomon-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [default-linux-x86_64-release-asan] (13.90s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t86u1vw9/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t86u1vw9/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t86u1vw9/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t86u1vw9/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t86u1vw9/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t86u1vw9/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t86u1vw9/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t86u1vw9/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t86u1vw9/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t86u1vw9/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f963f5903b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f963f589ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f963ee13d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f963ef11464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f963ef11464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f963ef11464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f963ee12b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f963eebf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f963edb7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f963edb7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f963edb7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f963ef0ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f963ef0ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadata-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [default-linux-x86_64-release-asan] (11.64s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d34vbxu7/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d34vbxu7/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d34vbxu7/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d34vbxu7/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d34vbxu7/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d34vbxu7/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d34vbxu7/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d34vbxu7/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d34vbxu7/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d34vbxu7/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f4fc9ea03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f4fc9e99ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f4fc9723d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f4fc9821464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f4fc9821464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f4fc9821464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f4fc9722b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f4fc97cf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f4fc96c7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f4fc96c7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f4fc96c7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f4fc981ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f4fc981ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataInsideFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [default-linux-x86_64-release-asan] (9.85s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_14t07tp3/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_14t07tp3/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_14t07tp3/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_14t07tp3/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_14t07tp3/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_14t07tp3/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_14t07tp3/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_14t07tp3/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_14t07tp3/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_14t07tp3/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f9b528603b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f9b52859ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f9b520e3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f9b521e1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f9b521e1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f9b521e1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f9b520e2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f9b5218f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f9b52087e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f9b52087e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f9b52087c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f9b521dee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f9b521dee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataNestedDeep-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 3/4] chunk ran 7 tests (total:89.25s - recipes:0.58s test:88.12s recipes:0.45s) [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [default-linux-x86_64-release-asan] (12.61s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_boupsneu/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_boupsneu/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_boupsneu/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_boupsneu/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_boupsneu/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_boupsneu/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_boupsneu/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_boupsneu/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_boupsneu/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_boupsneu/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f7a5eaa03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f7a5ea99ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f7a5e323d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f7a5e421464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f7a5e421464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f7a5e421464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f7a5e322b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f7a5e3cf90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7a5e2c7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7a5e2c7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7a5e2c7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f7a5e41ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f7a5e41ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataWithFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (13.07s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yhrlkh7/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yhrlkh7/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yhrlkh7/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yhrlkh7/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yhrlkh7/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yhrlkh7/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yhrlkh7/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yhrlkh7/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yhrlkh7/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yhrlkh7/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f91803003b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f91802f9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f917fb83d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f917fc81464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f917fc81464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f917fc81464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f917fb82b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f917fc2f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f917fb27e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f917fb27e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f917fb27c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f917fc7ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f917fc7ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [default-linux-x86_64-release-asan] (11.88s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a917gri4/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a917gri4/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a917gri4/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a917gri4/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a917gri4/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a917gri4/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a917gri4/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a917gri4/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a917gri4/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a917gri4/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f1f2fc803b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f1f2fc79ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f1f2f503d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f1f2f601464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f1f2f601464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f1f2f601464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f1f2f502b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f1f2f5af90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f1f2f4a7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f1f2f4a7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f1f2f4a7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f1f2f5fee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f1f2f5fee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [default-linux-x86_64-release-asan] (11.15s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p1o1qw_j/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p1o1qw_j/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p1o1qw_j/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p1o1qw_j/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p1o1qw_j/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p1o1qw_j/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p1o1qw_j/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p1o1qw_j/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p1o1qw_j/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p1o1qw_j/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f920b9403b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f920b939ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f920b1c3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f920b2c1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f920b2c1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f920b2c1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f920b1c2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f920b26f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f920b167e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f920b167e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f920b167c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f920b2bee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f920b2bee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteSameTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [default-linux-x86_64-release-asan] (13.61s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f6ge09k/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f6ge09k/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f6ge09k/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f6ge09k/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f6ge09k/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f6ge09k/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f6ge09k/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f6ge09k/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f6ge09k/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4f6ge09k/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fe68db803b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fe68db79ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fe68d403d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fe68d501464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fe68d501464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fe68d501464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fe68d402b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fe68d4af90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fe68d3a7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fe68d3a7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fe68d3a7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fe68d4fee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fe68d4fee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (11.46s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9svwd72t/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9svwd72t/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9svwd72t/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9svwd72t/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9svwd72t/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9svwd72t/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9svwd72t/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9svwd72t/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9svwd72t/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9svwd72t/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f83140c03b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f83140b9ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f8313943d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f8313a41464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f8313a41464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f8313a41464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f8313942b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f83139ef90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f83138e7e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f83138e7e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f83138e7c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f8313a3ee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f8313a3ee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [default-linux-x86_64-release-asan] (10.77s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jebhw6je/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jebhw6je/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jebhw6je/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jebhw6je/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jebhw6je/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jebhw6je/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jebhw6je/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jebhw6je/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jebhw6je/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jebhw6je/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7ff734e403b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7ff734e39ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7ff7346c3d6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7ff7347c1464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7ff7347c1464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7ff7347c1464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7ff7346c2b64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7ff73476f90a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7ff734667e0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7ff734667e0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7ff734667c50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7ff7347bee0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7ff7347bee0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423314 byte(s) leaked in 8234 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-WriteTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ FAIL: 28 - FAIL ydb/tests/fq/streaming_optimize ydb/tests/functional/serverless [size:medium] nchunks:10 ------ [test_serverless.py 0/10] chunk ran 1 test (total:109.60s - test:109.17s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-release-asan] (102.89s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 1/10] chunk ran 1 test (total:121.41s - test:121.02s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-release-asan] (114.97s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 2 - FAIL ydb/tests/functional/serverless ydb/tests/olap/column_family/compression [size:medium] ------ sole chunk ran 2 tests (total:620.10s - test:600.02s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: alter_compression.py::TestAlterCompression::test_all_supported_compression (timeout) duration: 615.92s alter_compression.py::TestAlterCompression::test_availability_data test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/stderr [timeout] alter_compression.py::TestAlterCompression::test_all_supported_compression [default-linux-x86_64-release-asan] (615.92s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAlterCompression.test_all_supported_compression.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/column_family/compression ydb/tests/olap/s3_import [size:medium] ------ sole chunk ran 1 test (total:620.95s - test:600.05s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_tpch_import.py::TestS3TpchImport::test_import_and_export (timeout) duration: 616.94s Info: Test run has exceeded 8.0G (8388608K) memory limit with 11.9G (12489712K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 2116444 44.9M 44.9M 6.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 2116588 37.1M 25.5M 12.8M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 2116604 666M 671M 586M └─ ydb-tests-olap-s3_import --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 2118239 10.5G 10.5G 10.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/t 2119897 594M 661M 626M └─ moto_server s3 --port 22475 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/stderr [timeout] test_tpch_import.py::TestS3TpchImport::test_import_and_export [default-linux-x86_64-release-asan] (616.94s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - TIMEOUT ydb/tests/olap/s3_import ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 4 tests (total:620.94s - test:600.04s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (timeout) duration: 513.06s test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] (good) duration: 102.50s 2 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [default-linux-x86_64-release-asan] (513.06s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_insert.py.TestInsert.test.read_data_during_bulk_upsert.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - GOOD, 2 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:2 ------ [data_migration_when_alter_ttl.py] chunk ran 1 test (total:610.31s - test:600.03s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 606.15s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-release-asan] (606.15s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ [ttl_delete_s3.py] chunk ran 3 tests (total:615.00s - test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change (fail) duration: 389.02s ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete (timeout) duration: 221.41s ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering test was not launched inside chunk. Info: Test run has exceeded 8.0G (8388608K) memory limit with 11.9G (12449640K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 2115709 44.9M 44.9M 6.6M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 2115976 38.6M 27.1M 14.4M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 2115984 802M 806M 719M └─ ydb-tests-olap-ttl_tiering --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --do 2117889 5.5G 5.5G 4.9G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff 2134565 5.5G 5.5G 4.9G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_st Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [fail] ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [default-linux-x86_64-release-asan] (389.02s) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:141: in test_data_unchanged_after_ttl_change data = self.get_aggregated(table_path) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:27: in get_aggregated answer = self.ydb_client.query(f"SELECT count(*), sum(val), sum(Digest::Fnv32(s)) from `{table_path}`") ydb/tests/olap/common/ydb_client.py:24: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:204: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:202: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:178: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:195: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.BadRequest: message: "Table /Root/test_data_unchanged_after_ttl_change/table (shard 72075186224037938) scan failed, reason: cannot build metadata withno ranges/Snapshot too old: {1744288262000:max}. CS min read snapshot: {1744288263000:max}. now: 2025-04-10T12:31:08.270734Z" issue_code: 2017 severity: 1 (server_code: 400010) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_data_unchanged_after_ttl_change.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff [timeout] ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete [default-linux-x86_64-release-asan] (221.41s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_ttl_delete.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - FAIL, 1 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/olap/ttl_tiering ydb/core/keyvalue/ut_trace [size:medium] nchunks:5 ------ [0/5] chunk ran 1 test (total:5.88s - test:5.85s) [fail] TKeyValueTracingTest::ReadHuge [default-linux-x86_64-release-asan] (2.06s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.out ------ [1/5] chunk ran 1 test (total:5.87s - test:5.85s) [fail] TKeyValueTracingTest::ReadSmall [default-linux-x86_64-release-asan] (2.09s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.out ------ [2/5] chunk ran 1 test (total:5.71s - test:5.66s) [fail] TKeyValueTracingTest::WriteHuge [default-linux-x86_64-release-asan] (2.08s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.out ------ [3/5] chunk ran 1 test (total:6.38s - test:6.35s) [fail] TKeyValueTracingTest::WriteSmall [default-linux-x86_64-release-asan] (2.05s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.out ------ FAIL: 4 - FAIL ydb/core/keyvalue/ut_trace ydb/core/kqp/ut/cost [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:12.22s - test:12.19s) [crashed] KqpCost::OlapWriteRow [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==2113376==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018d3ad2d bp 0x7ffd6faf6bc0 sp 0x7ffd6faf6a20 T0) ==2113376==The signal is caused by a READ memory access. ==2113376==Hint: address points to the zero page. 2025-04-10T12:24:38.070973Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7491659371535891242:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-10T12:24:38.071312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; #0 0x18d3ad2d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18d3ad2d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18d3ad2d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18d3ad2d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18d3ad2d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18d5fe47 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18d5fe47 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18d5fe47 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18d5fe47 in ..[snippet truncated].. 0x196b1ff5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x196b1ff5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x196b1ff5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19681b48 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18d5ecf3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19683415 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x196ac56c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f6f9e436d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f6f9e436e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x164bd028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x164bd028) (BuildId: e3319472fc934b0dfc588e72f37ffc5cf69adf9b) SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==2113376==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.out ------ FAIL: 1 - CRASHED ydb/core/kqp/ut/cost ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:50.17s - test:50.13s) [fail] KqpStats::SysViewClientLost [default-linux-x86_64-release-asan] (46.10s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x196B4EEB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x19B79E1F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1925A458 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x1926D467 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1926D467 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1926D467 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x1926D467 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x1926D467 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19BB0E45 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19BB0E45 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19BB0E45 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19B80998 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x1926C5EB 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19B82265 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19BAB3BC 15. ??:0: ?? @ 0x7F0003A79D8F 16. ??:0: ?? @ 0x7F0003A79E3F 17. ??:0: ?? @ 0x16611028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.out ------ FAIL: 1 - FAIL ydb/core/kqp/ut/query ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:18.12s - test:18.07s) [fail] KqpSinkTx::OlapInvalidateOnError [default-linux-x86_64-release-asan] (13.76s) assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x18F652BE 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x18F43F0A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x18F4B387 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18F4B387 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18F4B387 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F4B387 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F4B387 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x18F4A553 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F30E1037D8F 18. ??:0: ?? @ 0x7F30E1037E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.out ------ [1/50] chunk ran 1 test (total:19.96s - test:19.91s) [fail] KqpSnapshotIsolation::TConflictReadWriteOlap [default-linux-x86_64-release-asan] (15.74s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18F9B008 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18F826DA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F95C9F81D8F 18. ??:0: ?? @ 0x7F95C9F81E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.out ------ [10/50] chunk ran 1 test (total:14.71s - test:14.68s) [fail] KqpSnapshotIsolation::TSimpleOltpNoSink [default-linux-x86_64-release-asan] (10.71s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18F8A7C7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18F817AA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F0615C04D8F 18. ??:0: ?? @ 0x7F0615C04E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.out ------ [2/50] chunk ran 1 test (total:14.11s - setup:0.01s test:14.08s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltp [default-linux-x86_64-release-asan] (9.87s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18F986F7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18F82282 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F4253E1BD8F 18. ??:0: ?? @ 0x7F4253E1BE3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.out ------ [3/50] chunk ran 1 test (total:14.83s - test:14.80s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [default-linux-x86_64-release-asan] (10.50s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18F986F7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18F824AA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F3E131E9D8F 18. ??:0: ?? @ 0x7F3E131E9E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.out ------ [4/50] chunk ran 1 test (total:21.58s - test:21.52s) [fail] KqpSnapshotIsolation::TConflictWriteOlap [default-linux-x86_64-release-asan] (17.71s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18F93668 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18F8205A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F1373FFBD8F 18. ??:0: ?? @ 0x7F1373FFBE3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.out ------ [5/50] chunk ran 1 test (total:14.32s - test:14.28s) [fail] KqpSnapshotIsolation::TConflictWriteOltp [default-linux-x86_64-release-asan] (9.84s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18F90D57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18F81C02 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7FF76E488D8F 18. ??:0: ?? @ 0x7FF76E488E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.out ------ [6/50] chunk ran 1 test (total:14.11s - test:14.08s) [fail] KqpSnapshotIsolation::TConflictWriteOltpNoSink [default-linux-x86_64-release-asan] (10.02s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18F90D57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18F81E2A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F5EC7749D8F 18. ??:0: ?? @ 0x7F5EC7749E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.out ------ [7/50] chunk ran 1 test (total:13.72s - test:13.68s) [fail] KqpSnapshotIsolation::TReadOnlyOltp [default-linux-x86_64-release-asan] (9.77s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18FA00B3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18F82902 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F35C0AC0D8F 18. ??:0: ?? @ 0x7F35C0AC0E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.out ------ [8/50] chunk ran 1 test (total:14.81s - test:14.78s) [fail] KqpSnapshotIsolation::TReadOnlyOltpNoSink [default-linux-x86_64-release-asan] (10.66s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18FA00B3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18F82B2A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F6CE09E0D8F 18. ??:0: ?? @ 0x7F6CE09E0E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.out ------ [9/50] chunk ran 1 test (total:13.93s - test:13.89s) [fail] KqpSnapshotIsolation::TSimpleOltp [default-linux-x86_64-release-asan] (10.12s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x193C7C4B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1988FD5F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18F8A7C7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18ED54DA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18F81582 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18F88937 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18F88937 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18F88937 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18F88937 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x198C6D85 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x198C6D85 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x198C6D85 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x198968D8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18F87B03 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x198981A5 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x198C12FC 17. ??:0: ?? @ 0x7F0EAFA42D8F 18. ??:0: ?? @ 0x7F0EAFA42E3F 19. ??:0: ?? @ 0x1656B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.out ------ FAIL: 11 - FAIL ydb/core/kqp/ut/tx ydb/core/persqueue/ut/ut_with_sdk [size:medium] nchunks:10 ------ [1/10] chunk ran 1 test (total:19.86s - test:19.81s) [crashed] WithSDK::DescribeConsumer [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==2116888==ERROR: AddressSanitizer: heap-use-after-free on address 0x5030066349c9 at pc 0x000018eec552 bp 0x7fff27ab4470 sp 0x7fff27ab3c30 READ of size 9 at 0x5030066349c9 thread T0 2025-04-10T12:24:51.697384Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7491659437348714635:2469], Partition 0, Sender [0:0:0], Recipient [1:7491659437348714677:2472], Cookie: 0 2025-04-10T12:24:51.697683Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7491659437348714677:2472]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:24:51.697721Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [1:7491659398694007860:2189]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-10T12:24:51.697796Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-10T12:24:51.697871Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-10T12:24:51.697962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-10T12:24:51.697964Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-10T12:24:51.698009Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-10T12:24:51.698233Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-10T12:24:51.698259Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-10T12:24:51.698349Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-04-10T12:24:51.698414Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-10T12:24:51.699897Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [1:7491659398694007860:2189]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-10T12:24:51.699939Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-10T12:24:51.700038Z node 1 :FLAT_TX_SCHEM ..[snippet truncated].. nittest/registar.cpp:495:19 #21 0x196e0f0c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #22 0x7ff157244d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #23 0x7ff157244e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #24 0x165b1028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk+0x165b1028) (BuildId: 34a7903e4bedd81b1cc1447f91dca5a1a7035efc) SUMMARY: AddressSanitizer: heap-use-after-free /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors_memintrinsics.cpp:63:3 in __asan_memcpy Shadow bytes around the buggy address: 0x503006634700: 00 fa fa fa fd fd fd fd fa fa fd fd fd fd fa fa 0x503006634780: 00 00 00 fa fa fa fd fd fd fd fa fa fd fd fd fd 0x503006634800: fa fa fd fd fd fd fa fa fd fd fd fd fa fa fd fd 0x503006634880: fd fd fa fa fd fd fd fd fa fa fd fd fd fd fa fa 0x503006634900: fd fd fd fd fa fa fa fa fa fa fa fa fa fa fa fa =>0x503006634980: fa fa fa fa fa fa fa fa fd[fd]fd fd fa fa fa fa 0x503006634a00: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x503006634a80: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x503006634b00: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fd fd 0x503006634b80: fd fd fa fa fa fa fa fa fa fa fa fa fa fa fa fa 0x503006634c00: fa fa fa fa fa fa fa fa fa fa fa fa fd fd fd fd Shadow byte legend (one shadow byte represents 8 application bytes): Addressable: 00 Partially addressable: 01 02 03 04 05 06 07 Heap left redzone: fa Freed heap region: fd Stack left redzone: f1 Stack mid redzone: f2 Stack right redzone: f3 Stack after return: f5 Stack use after scope: f8 Global redzone: f9 Global init order: f6 Poisoned by user: f7 Container overflow: fc Array cookie: ac Intra object redzone: bb ASan internal: fe Left alloca redzone: ca Right alloca redzone: cb ==2116888==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/WithSDK.DescribeConsumer.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/WithSDK.DescribeConsumer.out ------ FAIL: 1 - GOOD, 1 - CRASHED ydb/core/persqueue/ut/ut_with_sdk ydb/core/quoter/ut [size:medium] ------ sole chunk ran 1 test (total:9.50s - test:9.47s) [fail] QuoterWithKesusTest::PrefetchCoefficient [default-linux-x86_64-release-asan] (4.05s) assertion failed at ydb/core/quoter/ut_helpers.cpp:121, void NKikimr::TKesusQuoterTestSetup::GetQuota(const std::vector> &, TEvQuota::EResourceOperator, TDuration, TEvQuota::TEvClearance::EResult): (answer->Result == expectedResult) failed: (Success != Deadline) , with diff: (Succ|D)e(ss|adline) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::TKesusQuoterTestSetup::GetQuota(std::__y1::vector>, TBasicString>, unsigned long>, std::__y1::allocator>, TBasicString>, unsigned long>>> const&, NKikimr::TEvQuota::EResourceOperator, TDuration, NKikimr::TEvQuota::TEvClearance::EResult) at /-S/ydb/core/quoter/ut_helpers.cpp:121:5 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/vector:527:18 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/quoter/kesus_quoter_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.PrefetchCoefficient.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.PrefetchCoefficient.out ------ FAIL: 1 - FAIL ydb/core/quoter/ut ydb/core/tx/columnshard/ut_schema [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:147.03s - test:146.40s) [crashed] TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [default-linux-x86_64-release-asan] (125.85s) Test crashed (return code: 100) ==2114620==ERROR: LeakSanitizer: detected memory leaks Too many leaks! Only the first 5000 leaks encountered will be reported. Indirect leak of 31192 byte(s) in 557 object(s) allocated from: #0 0x100b418d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2a1b4034 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x2a1b4034 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x2a1b4034 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x2a1b4034 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:281:16 #5 0x2a1b4034 in __construct_node /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1812:21 #6 0x2a1b4034 in std::__y1::pair*, long>, bool> std::__y1::__tree>::__emplace_unique_key_args(NKikimr::TLogoBlobID const&, NKikimr::TLogoBlobID const&) /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1779:25 #7 0x2a1ab549 in __emplace_unique_extract_key /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1054:12 #8 0x2a1ab549 in __emplace_unique /-S/contrib/libs/cxxsupp/libcxx/include/__tree:1032:12 #9 0x2a1ab549 in emplace, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #36 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #37 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #38 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #39 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #40 0xff92093 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #41 0x1084a305 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #42 0x1086a39c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #43 0x7fd8e6403d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 2623242 byte(s) leaked in 61938 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot.Internal-FirstPkColumn.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot.Internal-FirstPkColumn.out ------ [1/60] chunk ran 1 test (total:148.86s - test:148.18s) [crashed] TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [default-linux-x86_64-release-asan] (127.58s) Test crashed (return code: 100) ==2110501==ERROR: LeakSanitizer: detected memory leaks Too many leaks! Only the first 5000 leaks encountered will be reported. Indirect leak of 41088 byte(s) in 1284 object(s) allocated from: #0 0x100b418d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x18622f15 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x18622f15 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x18622f15 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x18622f15 in get_node /-S/util/generic/hash_table.h:503:43 #5 0x18622f15 in new_node /-S/util/generic/hash_table.h:953:19 #6 0x18622f15 in std::__y1::pair<__yhashtable_iterator, bool> THashTable, TIdentity, TEqualTo, std::__y1::allocator>::insert_unique_noresize(NKikimr::TLogoBlobID const&) /-S/util/generic/hash_table.h:1059:17 #7 0x2a1a1b98 in insert_unique /-S/util/generic/hash_table.h:680:16 #8 0x2a1a1b98 in insert /-S/util/generic/hash_set.h:162:51 #9 0x2a1a1b98 in operator() /-S/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp #10 0x2a1a1b98 in ExtractTo<(lambda at /-S/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp:276:23)> /-S/ydb/core/tx/columnshard/blobs_action/abstract/blob_set.h ..[snippet truncated].. () /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #32 0xff931e7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #33 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #34 0x1086fe25 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #35 0x1086fe25 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #36 0x10848a38 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #37 0xff92093 in NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute() /-S/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp:1093:1 #38 0x1084a305 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #39 0x1086a39c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #40 0x7f78fc4bbd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 2643314 byte(s) leaked in 62813 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot-Internal-FirstPkColumn.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_schema/test-results/unittest/testing_out_stuff/TColumnShardTestSchema.TTL.Reboot-Internal-FirstPkColumn.out ------ FAIL: 2 - CRASHED ydb/core/tx/columnshard/ut_schema ydb/core/tx/datashard/ut_incremental_backup [size:medium] nchunks:4 ------ [0/4] chunk ran 1 test (total:137.88s - test:137.80s) [fail] IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [default-linux-x86_64-release-asan] (132.92s) assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode) at /-S/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.out ------ FAIL: 1 - FAIL ydb/core/tx/datashard/ut_incremental_backup ydb/core/tx/datashard/ut_volatile [size:medium] nchunks:2 ------ [0/2] chunk ran 1 test (total:14.03s - test:13.99s) [crashed] DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [default-linux-x86_64-release-asan] (6.10s) Test crashed (return code: 100) ==2116434==ERROR: LeakSanitizer: detected memory leaks Direct leak of 224 byte(s) in 1 object(s) allocated from: #0 0x1920aa0d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2b21b420 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:26 #2 0x2b21b420 in NKikimr::NEvents::TDataEvents::TEvWriteResult::BuildCompleted(unsigned long, unsigned long) /-S/ydb/core/tx/data_events/events.h:123:27 #3 0x2b2149d6 in NKikimr::NDataShard::TExecuteWriteUnit::Execute(TIntrusivePtr>, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/execute_write_unit.cpp:376:37 #4 0x2b06d8cc in NKikimr::NDataShard::TPipeline::RunExecutionPlan(TIntrusivePtr>, TVector>&, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard_pipeline.cpp:1851:28 #5 0x2ad2e714 in NKikimr::NDataShard::TDataShard::TTxWrite::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard__write.cpp:100:38 #6 0x2081b894 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #7 0x20817234 in NKikimr::NTabletFlatExecutor ..[snippet truncated].. in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #27 0x18fb3b07 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #28 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #29 0x199cac35 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #30 0x199cac35 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #31 0x1999a788 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #32 0x18fb29b3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #33 0x1999c055 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #34 0x199c51ac in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #35 0x7ff2a54c1d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 664 byte(s) leaked in 11 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff/DataShardVolatile.GracefulShardRestartNoEarlyReadSetAck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff/DataShardVolatile.GracefulShardRestartNoEarlyReadSetAck.out ------ FAIL: 1 - CRASHED ydb/core/tx/datashard/ut_volatile ydb/core/tx/tiering/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:34.13s - setup:0.03s test:34.07s) [crashed] ColumnShardTiers::TTLUsage [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.out ------ FAIL: 1 - CRASHED ydb/core/tx/tiering/ut ydb/services/ydb/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:9.88s - test:9.85s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-release-asan] (3.76s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 1 - FAIL ydb/services/ydb/ut Total 21 suites: 3 - GOOD 14 - FAIL 4 - TIMEOUT Total 86 tests: 20 - GOOD 51 - FAIL 4 - NOT_LAUNCHED 5 - TIMEOUT 6 - CRASHED Cache efficiency ratio is 98.95% (43069 of 43528). Local: 431 (0.99%), dist: 0 (0.00%), by dynamic uids: 0 (0.00%), avoided: 42638 (97.96%) Dist cache download: count=0, size=0 bytes, speed=0.0 bytes/s Disk usage for tools/sdk 3.64 GiB Additional disk space consumed for build cache 0 bytes Critical path: [621376 ms] [TM] [rnd-3izi4sbufks985j8 asan default-linux-x86_64 release]: ydb/tests/olap/s3_import/py3test [started: 0 (1744287877566), finished: 621376 (1744288498942)] Time from start: 651299.0080566406 ms, time elapsed by graph 621376 ms, time diff 29923.008056640625 ms. The longest 10 tasks: [621376 ms] [TM] [rnd-3izi4sbufks985j8 asan default-linux-x86_64 release]: ydb/tests/olap/s3_import/py3test [started: 1744287877566, finished: 1744288498942] [621288 ms] [TM] [rnd-5xdtb003otg0qagr asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 1744287877403, finished: 1744288498691] [620496 ms] [TM] [rnd-cbms9ur8pcegtscp asan default-linux-x86_64 release]: ydb/tests/olap/column_family/compression/py3test [started: 1744287877483, finished: 1744288497979] [615354 ms] [TM] [rnd-16074842517648569092 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1744287875190, finished: 1744288490544] [610680 ms] [TM] [rnd-13689386687951001105 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1744287875193, finished: 1744288485873] [541699 ms] [TM] [rnd-3663720783026587496 asan default-linux-x86_64 release]: ydb/tests/functional/tpc/medium/py3test [started: 1744287877601, finished: 1744288419300] [518519 ms] [TM] [rnd-2285979815965720044 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1744287861204, finished: 1744288379723] [516102 ms] [TM] [rnd-15219972476607569325 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1744287862494, finished: 1744288378596] [149255 ms] [TM] [rnd-9049720445573201635 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_schema/unittest [started: 1744287862562, finished: 1744288011817] [147410 ms] [TM] [rnd-4229017733272876871 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_schema/unittest [started: 1744287872469, finished: 1744288019879] Total time by type: [6765216 ms] [TM] [count: 443, ave time 15271.37 msec] [ 88356 ms] [prepare:get from local cache] [count: 431, ave time 205.00 msec] [ 28614 ms] [prepare:AC] [count: 2, ave time 14307.00 msec] [ 24812 ms] [prepare:put to dist cache] [count: 423, ave time 58.66 msec] [ 12929 ms] [TA] [count: 16, ave time 808.06 msec] [ 7925 ms] [prepare:bazel-store] [count: 1, ave time 7925.00 msec] [ 5396 ms] [prepare:tools] [count: 16, ave time 337.25 msec] [ 1983 ms] [prepare:clean] [count: 3, ave time 661.00 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 6778145 ms (100.00%) Total run tasks time - 6778145 ms Configure time - 22.9 s Statistics overhead 1389 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/report.json Ok + echo 0